diff --git "a/test.json" "b/test.json" new file mode 100644--- /dev/null +++ "b/test.json" @@ -0,0 +1,2002 @@ +[ + { + "comment": "```suggestion assertThat(k2).hasSameHashCodeAs(k1); ```", + "method_body": "private void testSerialization(BlobKey.BlobType blobType) throws Exception {\nfinal BlobKey k1 = BlobKey.createKey(blobType, KEY_ARRAY_1, RANDOM_ARRAY_1);\nfinal BlobKey k2 = CommonTestUtils.createCopySerializable(k1);\nassertThat(k2).isEqualTo(k1);\nassertThat(k2.hashCode()).isEqualTo(k1.hashCode());\nassertThat(k1.compareTo(k2)).isZero();\n}", + "target_code": "assertThat(k2.hashCode()).isEqualTo(k1.hashCode());", + "method_body_after": "private void testSerialization(BlobKey.BlobType blobType) throws Exception {\nfinal BlobKey k1 = BlobKey.createKey(blobType, KEY_ARRAY_1, RANDOM_ARRAY_1);\nfinal BlobKey k2 = CommonTestUtils.createCopySerializable(k1);\nassertThat(k2).isEqualTo(k1);\nassertThat(k2).hasSameHashCodeAs(k1);\nassertThat(k1).isEqualByComparingTo(k2);\n}", + "context_before": "class BlobKeyTest {\n/** The first key array to be used during the unit tests. */\nprivate static final byte[] KEY_ARRAY_1 = new byte[BlobKey.SIZE];\n/** The second key array to be used during the unit tests. */\nprivate static final byte[] KEY_ARRAY_2 = new byte[BlobKey.SIZE];\n/** First byte array to use for the random component of a {@link BlobKey}. */\nprivate static final byte[] RANDOM_ARRAY_1 = new byte[AbstractID.SIZE];\n/** Second byte array to use for the random component of a {@link BlobKey}. */\nprivate static final byte[] RANDOM_ARRAY_2 = new byte[AbstractID.SIZE];\n/*\n* Initialize the key and random arrays.\n*/\nstatic {\nfor (int i = 0; i < KEY_ARRAY_1.length; ++i) {\nKEY_ARRAY_1[i] = (byte) i;\nKEY_ARRAY_2[i] = (byte) (i + 1);\n}\nfor (int i = 0; i < RANDOM_ARRAY_1.length; ++i) {\nRANDOM_ARRAY_1[i] = (byte) i;\nRANDOM_ARRAY_2[i] = (byte) (i + 1);\n}\n}\n@Test\nvoid testCreateKey() {\nBlobKey key = BlobKey.createKey(PERMANENT_BLOB, KEY_ARRAY_1);\nverifyType(PERMANENT_BLOB, key);\nassertThat(key.getHash()).isEqualTo(KEY_ARRAY_1);\nkey = BlobKey.createKey(TRANSIENT_BLOB, KEY_ARRAY_1);\nverifyType(TRANSIENT_BLOB, key);\nassertThat(key.getHash()).isEqualTo(KEY_ARRAY_1);\n}\n@Test\nvoid testSerializationTransient() throws Exception {\ntestSerialization(TRANSIENT_BLOB);\n}\n@Test\nvoid testSerializationPermanent() throws Exception {\ntestSerialization(PERMANENT_BLOB);\n}\n/** Tests the serialization/deserialization of BLOB keys. */\n@Test\nvoid testEqualsTransient() {\ntestEquals(TRANSIENT_BLOB);\n}\n@Test\nvoid testEqualsPermanent() {\ntestEquals(PERMANENT_BLOB);\n}\n/** Tests the {@link BlobKey\nprivate void testEquals(BlobKey.BlobType blobType) {\nfinal BlobKey k1 = BlobKey.createKey(blobType, KEY_ARRAY_1, RANDOM_ARRAY_1);\nfinal BlobKey k2 = BlobKey.createKey(blobType, KEY_ARRAY_1, RANDOM_ARRAY_1);\nfinal BlobKey k3 = BlobKey.createKey(blobType, KEY_ARRAY_2, RANDOM_ARRAY_1);\nfinal BlobKey k4 = BlobKey.createKey(blobType, KEY_ARRAY_1, RANDOM_ARRAY_2);\nassertThat(k1.equals(k2)).isTrue();\nassertThat(k2.equals(k1)).isTrue();\nassertThat(k2.hashCode()).isEqualTo(k1.hashCode());\nassertThat(k1.equals(k3)).isFalse();\nassertThat(k3.equals(k1)).isFalse();\nassertThat(k1.equals(k4)).isFalse();\nassertThat(k4.equals(k1)).isFalse();\nassertThat(k1.equals(null)).isFalse();\nassertThat(k1.equals(this)).isFalse();\n}\n/** Tests the equals method. */\n@Test\nvoid testEqualsDifferentBlobType() {\nfinal BlobKey k1 = BlobKey.createKey(TRANSIENT_BLOB, KEY_ARRAY_1, RANDOM_ARRAY_1);\nfinal BlobKey k2 = BlobKey.createKey(PERMANENT_BLOB, KEY_ARRAY_1, RANDOM_ARRAY_1);\nassertThat(k1.equals(k2)).isFalse();\nassertThat(k2.equals(k1)).isFalse();\n}\n@Test\nvoid testComparesTransient() {\ntestCompares(TRANSIENT_BLOB);\n}\n@Test\nvoid testComparesPermanent() {\ntestCompares(PERMANENT_BLOB);\n}\n/** Tests the compares method. */\nprivate void testCompares(BlobKey.BlobType blobType) {\nfinal BlobKey k1 = BlobKey.createKey(blobType, KEY_ARRAY_1, RANDOM_ARRAY_1);\nfinal BlobKey k2 = BlobKey.createKey(blobType, KEY_ARRAY_1, RANDOM_ARRAY_1);\nfinal BlobKey k3 = BlobKey.createKey(blobType, KEY_ARRAY_2, RANDOM_ARRAY_1);\nfinal BlobKey k4 = BlobKey.createKey(blobType, KEY_ARRAY_1, RANDOM_ARRAY_2);\nassertThat(k1.compareTo(k2)).isZero();\nassertThat(k2.compareTo(k1)).isZero();\nassertThat(k1.compareTo(k3)).isLessThan(0);\nassertThat(k1.compareTo(k4)).isLessThan(0);\nassertThat(k3.compareTo(k1)).isGreaterThan(0);\nassertThat(k4.compareTo(k1)).isGreaterThan(0);\n}\n@Test\nvoid testComparesDifferentBlobType() {\nfinal BlobKey k1 = BlobKey.createKey(TRANSIENT_BLOB, KEY_ARRAY_1, RANDOM_ARRAY_1);\nfinal BlobKey k2 = BlobKey.createKey(PERMANENT_BLOB, KEY_ARRAY_1, RANDOM_ARRAY_1);\nassertThat(k1.compareTo(k2)).isGreaterThan(0);\nassertThat(k2.compareTo(k1)).isLessThan(0);\n}\n@Test\nvoid testStreamsTransient() throws Exception {\ntestStreams(TRANSIENT_BLOB);\n}\n@Test\nvoid testStreamsPermanent() throws Exception {\ntestStreams(PERMANENT_BLOB);\n}\n@Test\nvoid testToFromStringPermanentKey() {\ntestToFromString(BlobKey.createKey(PERMANENT_BLOB));\n}\n@Test\nvoid testToFromStringTransientKey() {\ntestToFromString(BlobKey.createKey(TRANSIENT_BLOB));\n}\nprivate void testToFromString(BlobKey blobKey) {\nfinal String stringRepresentation = blobKey.toString();\nfinal BlobKey parsedBlobKey = BlobKey.fromString(stringRepresentation);\nassertThat(blobKey).isEqualTo(parsedBlobKey);\n}\n@Test\nvoid testFromStringFailsWithWrongInput() {\nassertThatThrownBy(() -> BlobKey.fromString(\"foobar\"))\n.isInstanceOf(IllegalStateException.class);\n}\n@Test\nvoid testFromStringFailsWithInvalidBlobKeyType() {\nassertThatThrownBy(\n() ->\nBlobKey.fromString(\nString.format(\n\"x-%s-%s\",\nStringUtils.byteToHexString(KEY_ARRAY_1),\nStringUtils.byteToHexString(RANDOM_ARRAY_1))))\n.isInstanceOf(IllegalStateException.class);\n}\n/** Test the serialization/deserialization using input/output streams. */\nprivate void testStreams(BlobKey.BlobType blobType) throws IOException {\nfinal BlobKey k1 = BlobKey.createKey(blobType, KEY_ARRAY_1, RANDOM_ARRAY_1);\nfinal ByteArrayOutputStream baos = new ByteArrayOutputStream(20);\nk1.writeToOutputStream(baos);\nbaos.close();\nfinal ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());\nfinal BlobKey k2 = BlobKey.readFromInputStream(bais);\nassertThat(k2).isEqualTo(k1);\n}\n/**\n* Verifies that the two given key's are different in total but share the same hash.\n*\n* @param key1 first blob key\n* @param key2 second blob key\n*/\nstatic void verifyKeyDifferentHashEquals(BlobKey key1, BlobKey key2) {\nassertThat(key1).isNotEqualTo(key2);\nassertThat(key1.getHash()).isEqualTo(key2.getHash());\n}\n/**\n* Verifies that the two given key's are different in total and also have different hashes.\n*\n* @param key1 first blob key\n* @param key2 second blob key\n*/\nstatic void verifyKeyDifferentHashDifferent(BlobKey key1, BlobKey key2) {\nassertThat(key1).isNotEqualTo(key2);\nassertThat(key1.getHash()).isNotEqualTo(key2.getHash());\n}\n/**\n* Verifies that the given key is of an expected type.\n*\n* @param expected the type the key should have\n* @param key the key to verify\n*/\nstatic void verifyType(BlobKey.BlobType expected, BlobKey key) {\nif (expected == PERMANENT_BLOB) {\nassertThat(key).isInstanceOf(PermanentBlobKey.class);\n} else {\nassertThat(key).isInstanceOf(TransientBlobKey.class);\n}\n}\n}", + "context_after": "class BlobKeyTest {\n/** The first key array to be used during the unit tests. */\nprivate static final byte[] KEY_ARRAY_1 = new byte[BlobKey.SIZE];\n/** The second key array to be used during the unit tests. */\nprivate static final byte[] KEY_ARRAY_2 = new byte[BlobKey.SIZE];\n/** First byte array to use for the random component of a {@link BlobKey}. */\nprivate static final byte[] RANDOM_ARRAY_1 = new byte[AbstractID.SIZE];\n/** Second byte array to use for the random component of a {@link BlobKey}. */\nprivate static final byte[] RANDOM_ARRAY_2 = new byte[AbstractID.SIZE];\n/*\n* Initialize the key and random arrays.\n*/\nstatic {\nfor (int i = 0; i < KEY_ARRAY_1.length; ++i) {\nKEY_ARRAY_1[i] = (byte) i;\nKEY_ARRAY_2[i] = (byte) (i + 1);\n}\nfor (int i = 0; i < RANDOM_ARRAY_1.length; ++i) {\nRANDOM_ARRAY_1[i] = (byte) i;\nRANDOM_ARRAY_2[i] = (byte) (i + 1);\n}\n}\n@Test\nvoid testCreateKey() {\nBlobKey key = BlobKey.createKey(PERMANENT_BLOB, KEY_ARRAY_1);\nverifyType(PERMANENT_BLOB, key);\nassertThat(key.getHash()).isEqualTo(KEY_ARRAY_1);\nkey = BlobKey.createKey(TRANSIENT_BLOB, KEY_ARRAY_1);\nverifyType(TRANSIENT_BLOB, key);\nassertThat(key.getHash()).isEqualTo(KEY_ARRAY_1);\n}\n@Test\nvoid testSerializationTransient() throws Exception {\ntestSerialization(TRANSIENT_BLOB);\n}\n@Test\nvoid testSerializationPermanent() throws Exception {\ntestSerialization(PERMANENT_BLOB);\n}\n/** Tests the serialization/deserialization of BLOB keys. */\n@Test\nvoid testEqualsTransient() {\ntestEquals(TRANSIENT_BLOB);\n}\n@Test\nvoid testEqualsPermanent() {\ntestEquals(PERMANENT_BLOB);\n}\n/** Tests the {@link BlobKey\nprivate void testEquals(BlobKey.BlobType blobType) {\nfinal BlobKey k1 = BlobKey.createKey(blobType, KEY_ARRAY_1, RANDOM_ARRAY_1);\nfinal BlobKey k2 = BlobKey.createKey(blobType, KEY_ARRAY_1, RANDOM_ARRAY_1);\nfinal BlobKey k3 = BlobKey.createKey(blobType, KEY_ARRAY_2, RANDOM_ARRAY_1);\nfinal BlobKey k4 = BlobKey.createKey(blobType, KEY_ARRAY_1, RANDOM_ARRAY_2);\nassertThat(k1).isEqualTo(k2);\nassertThat(k2).isEqualTo(k1);\nassertThat(k2).hasSameHashCodeAs(k1);\nassertThat(k1).isNotEqualTo(k3);\nassertThat(k3).isNotEqualTo(k1);\nassertThat(k1).isNotEqualTo(k4);\nassertThat(k4).isNotEqualTo(k1);\nassertThat(k1).isNotEqualTo(null);\nassertThat(k1).isNotEqualTo(this);\n}\n/** Tests the equals method. */\n@Test\nvoid testEqualsDifferentBlobType() {\nfinal BlobKey k1 = BlobKey.createKey(TRANSIENT_BLOB, KEY_ARRAY_1, RANDOM_ARRAY_1);\nfinal BlobKey k2 = BlobKey.createKey(PERMANENT_BLOB, KEY_ARRAY_1, RANDOM_ARRAY_1);\nassertThat(k1).isNotEqualTo(k2);\nassertThat(k2).isNotEqualTo(k1);\n}\n@Test\nvoid testComparesTransient() {\ntestCompares(TRANSIENT_BLOB);\n}\n@Test\nvoid testComparesPermanent() {\ntestCompares(PERMANENT_BLOB);\n}\n/** Tests the compares method. */\nprivate void testCompares(BlobKey.BlobType blobType) {\nfinal BlobKey k1 = BlobKey.createKey(blobType, KEY_ARRAY_1, RANDOM_ARRAY_1);\nfinal BlobKey k2 = BlobKey.createKey(blobType, KEY_ARRAY_1, RANDOM_ARRAY_1);\nfinal BlobKey k3 = BlobKey.createKey(blobType, KEY_ARRAY_2, RANDOM_ARRAY_1);\nfinal BlobKey k4 = BlobKey.createKey(blobType, KEY_ARRAY_1, RANDOM_ARRAY_2);\nassertThat(k1).isEqualByComparingTo(k2);\nassertThat(k2).isEqualByComparingTo(k1);\nassertThat(k1).isLessThan(k3);\nassertThat(k1).isLessThan(k4);\nassertThat(k3).isGreaterThan(k1);\nassertThat(k4).isGreaterThan(k1);\n}\n@Test\nvoid testComparesDifferentBlobType() {\nfinal BlobKey k1 = BlobKey.createKey(TRANSIENT_BLOB, KEY_ARRAY_1, RANDOM_ARRAY_1);\nfinal BlobKey k2 = BlobKey.createKey(PERMANENT_BLOB, KEY_ARRAY_1, RANDOM_ARRAY_1);\nassertThat(k1).isGreaterThan(k2);\nassertThat(k2).isLessThan(k1);\n}\n@Test\nvoid testStreamsTransient() throws Exception {\ntestStreams(TRANSIENT_BLOB);\n}\n@Test\nvoid testStreamsPermanent() throws Exception {\ntestStreams(PERMANENT_BLOB);\n}\n@Test\nvoid testToFromStringPermanentKey() {\ntestToFromString(BlobKey.createKey(PERMANENT_BLOB));\n}\n@Test\nvoid testToFromStringTransientKey() {\ntestToFromString(BlobKey.createKey(TRANSIENT_BLOB));\n}\nprivate void testToFromString(BlobKey blobKey) {\nfinal String stringRepresentation = blobKey.toString();\nfinal BlobKey parsedBlobKey = BlobKey.fromString(stringRepresentation);\nassertThat(blobKey).isEqualTo(parsedBlobKey);\n}\n@Test\nvoid testFromStringFailsWithWrongInput() {\nassertThatThrownBy(() -> BlobKey.fromString(\"foobar\"))\n.isInstanceOf(IllegalStateException.class);\n}\n@Test\nvoid testFromStringFailsWithInvalidBlobKeyType() {\nassertThatThrownBy(\n() ->\nBlobKey.fromString(\nString.format(\n\"x-%s-%s\",\nStringUtils.byteToHexString(KEY_ARRAY_1),\nStringUtils.byteToHexString(RANDOM_ARRAY_1))))\n.isInstanceOf(IllegalStateException.class);\n}\n/** Test the serialization/deserialization using input/output streams. */\nprivate void testStreams(BlobKey.BlobType blobType) throws IOException {\nfinal BlobKey k1 = BlobKey.createKey(blobType, KEY_ARRAY_1, RANDOM_ARRAY_1);\nfinal ByteArrayOutputStream baos = new ByteArrayOutputStream(20);\nk1.writeToOutputStream(baos);\nbaos.close();\nfinal ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());\nfinal BlobKey k2 = BlobKey.readFromInputStream(bais);\nassertThat(k2).isEqualTo(k1);\n}\n/**\n* Verifies that the two given key's are different in total but share the same hash.\n*\n* @param key1 first blob key\n* @param key2 second blob key\n*/\nstatic void verifyKeyDifferentHashEquals(BlobKey key1, BlobKey key2) {\nassertThat(key1).isNotEqualTo(key2);\nassertThat(key1.getHash()).isEqualTo(key2.getHash());\n}\n/**\n* Verifies that the two given key's are different in total and also have different hashes.\n*\n* @param key1 first blob key\n* @param key2 second blob key\n*/\nstatic void verifyKeyDifferentHashDifferent(BlobKey key1, BlobKey key2) {\nassertThat(key1).isNotEqualTo(key2);\nassertThat(key1.getHash()).isNotEqualTo(key2.getHash());\n}\n/**\n* Verifies that the given key is of an expected type.\n*\n* @param expected the type the key should have\n* @param key the key to verify\n*/\nstatic void verifyType(BlobKey.BlobType expected, BlobKey key) {\nif (expected == PERMANENT_BLOB) {\nassertThat(key).isInstanceOf(PermanentBlobKey.class);\n} else {\nassertThat(key).isInstanceOf(TransientBlobKey.class);\n}\n}\n}" + }, + { + "comment": "shall this protected by lock? otherwise during the removal, someone set the to-be-removed-volume as default or bind it to db/table?", + "method_body": "public void removeStorageVolume(String name) throws AnalysisException, DdlException {\nStorageVolume sv = getStorageVolumeByName(name);\nPreconditions.checkState(sv != null,\n\"Storage volume '%s' does not exist\", name);\nPreconditions.checkState(defaultStorageVolumeId != sv.getId(),\n\"default storage volume can not be removed\");\nPreconditions.checkState(!storageVolumeToDbs.containsKey(sv.getId())\n&& !storageVolumeToTables.containsKey(sv.getId()),\n\"Storage volume '%s' is referenced by db or table\", name);\nGlobalStateMgr.getCurrentState().getStarOSAgent().removeFileStoreByName(name);\n}", + "target_code": "Preconditions.checkState(sv != null,", + "method_body_after": "public void removeStorageVolume(String name) throws AnalysisException, DdlException {\ntry (LockCloseable lock = new LockCloseable(rwLock.writeLock())) {\nStorageVolume sv = getStorageVolumeByName(name);\nPreconditions.checkState(sv != null,\n\"Storage volume '%s' does not exist\", name);\nPreconditions.checkState(defaultStorageVolumeId != sv.getId(),\n\"default storage volume can not be removed\");\nSet dbs = storageVolumeToDbs.get(sv.getId());\nSet tables = storageVolumeToTables.get(sv.getId());\nPreconditions.checkState(dbs == null && tables == null,\n\"Storage volume '%s' is referenced by dbs or tables, dbs: %s, tables: %s\",\nname, dbs != null ? dbs.toString() : \"[]\", tables != null ? tables.toString() : \"[]\");\nGlobalStateMgr.getCurrentState().getStarOSAgent().removeFileStoreByName(name);\n}\n}", + "context_before": "class SharedDataStorageVolumeMgr extends StorageVolumeMgr {\n@Override\npublic Long createStorageVolume(String name, String svType, List locations, Map params,\nOptional enabled, String comment)\nthrows AlreadyExistsException, AnalysisException, DdlException {\nif (exists(name)) {\nthrow new AlreadyExistsException(String.format(\"Storage volume '%s' already exists\", name));\n}\nStorageVolume sv = new StorageVolume(0, name, svType, locations, params, enabled.orElse(true), comment);\nreturn Long.valueOf(GlobalStateMgr.getCurrentState().getStarOSAgent().addFileStore(sv.toFileStoreInfo()));\n}\n@Override\n@Override\npublic void updateStorageVolume(String name, Map params,\nOptional enabled, String comment) throws DdlException, AnalysisException {\nStorageVolume sv = getStorageVolumeByName(name);\nPreconditions.checkState(sv != null, \"Storage volume '%s' does not exist\", name);\nif (enabled.isPresent()) {\nboolean enabledValue = enabled.get();\nif (!enabledValue) {\nPreconditions.checkState(sv.getId() != defaultStorageVolumeId, \"Default volume can not be disabled\");\n}\nsv.setEnabled(enabledValue);\n}\nif (!comment.isEmpty()) {\nsv.setComment(comment);\n}\nif (!params.isEmpty()) {\nsv.setCloudConfiguration(params);\n}\nGlobalStateMgr.getCurrentState().getStarOSAgent().updateFileStore(sv.toFileStoreInfo());\n}\n@Override\npublic void setDefaultStorageVolume(String svKey) throws AnalysisException, DdlException {\ntry (LockCloseable lock = new LockCloseable(rwLock.writeLock())) {\nStorageVolume sv = getStorageVolumeByName(svKey);\nPreconditions.checkState(sv != null, \"Storage volume '%s' does not exist\", svKey);\nPreconditions.checkState(sv.getEnabled(), \"Storage volume '%s' is disabled\", svKey);\nthis.defaultStorageVolumeId = sv.getId();\n}\n}\n@Override\npublic boolean exists(String svKey) throws DdlException {\ntry {\nStorageVolume sv = getStorageVolumeByName(svKey);\nreturn sv != null;\n} catch (AnalysisException e) {\nthrow new DdlException(e.getMessage());\n}\n}\n@Override\npublic StorageVolume getStorageVolumeByName(String svKey) throws AnalysisException {\ntry {\nFileStoreInfo fileStoreInfo = GlobalStateMgr.getCurrentState().getStarOSAgent().getFileStoreByName(svKey);\nif (fileStoreInfo == null) {\nreturn null;\n}\nreturn StorageVolume.fromFileStoreInfo(fileStoreInfo);\n} catch (DdlException e) {\nthrow new AnalysisException(e.getMessage());\n}\n}\n@Override\npublic StorageVolume getStorageVolume(long storageVolumeId) throws AnalysisException {\nreturn null;\n}\n@Override\npublic List listStorageVolumeNames() throws DdlException {\nreturn GlobalStateMgr.getCurrentState().getStarOSAgent().listFileStore()\n.stream().map(FileStoreInfo::getFsName).collect(Collectors.toList());\n}\n}", + "context_after": "class SharedDataStorageVolumeMgr extends StorageVolumeMgr {\n@Override\npublic Long createStorageVolume(String name, String svType, List locations, Map params,\nOptional enabled, String comment)\nthrows AlreadyExistsException, AnalysisException, DdlException {\ntry (LockCloseable lock = new LockCloseable(rwLock.writeLock())) {\nif (exists(name)) {\nthrow new AlreadyExistsException(String.format(\"Storage volume '%s' already exists\", name));\n}\nStorageVolume sv = new StorageVolume(0, name, svType, locations, params, enabled.orElse(true), comment);\nreturn Long.valueOf(GlobalStateMgr.getCurrentState().getStarOSAgent().addFileStore(sv.toFileStoreInfo()));\n}\n}\n@Override\n@Override\npublic void updateStorageVolume(String name, Map params,\nOptional enabled, String comment) throws DdlException, AnalysisException {\ntry (LockCloseable lock = new LockCloseable(rwLock.writeLock())) {\nStorageVolume sv = getStorageVolumeByName(name);\nPreconditions.checkState(sv != null, \"Storage volume '%s' does not exist\", name);\nif (enabled.isPresent()) {\nboolean enabledValue = enabled.get();\nif (!enabledValue) {\nPreconditions.checkState(sv.getId() != defaultStorageVolumeId, \"Default volume can not be disabled\");\n}\nsv.setEnabled(enabledValue);\n}\nif (!comment.isEmpty()) {\nsv.setComment(comment);\n}\nif (!params.isEmpty()) {\nsv.setCloudConfiguration(params);\n}\nGlobalStateMgr.getCurrentState().getStarOSAgent().updateFileStore(sv.toFileStoreInfo());\n}\n}\n@Override\npublic void setDefaultStorageVolume(String svKey) throws AnalysisException, DdlException {\ntry (LockCloseable lock = new LockCloseable(rwLock.writeLock())) {\nStorageVolume sv = getStorageVolumeByName(svKey);\nPreconditions.checkState(sv != null, \"Storage volume '%s' does not exist\", svKey);\nPreconditions.checkState(sv.getEnabled(), \"Storage volume '%s' is disabled\", svKey);\nthis.defaultStorageVolumeId = sv.getId();\n}\n}\n@Override\npublic boolean exists(String svKey) throws DdlException {\ntry (LockCloseable lock = new LockCloseable(rwLock.readLock())) {\ntry {\nStorageVolume sv = getStorageVolumeByName(svKey);\nreturn sv != null;\n} catch (AnalysisException e) {\nthrow new DdlException(e.getMessage());\n}\n}\n}\n@Override\npublic StorageVolume getStorageVolumeByName(String svKey) throws AnalysisException {\ntry (LockCloseable lock = new LockCloseable(rwLock.readLock())) {\ntry {\nFileStoreInfo fileStoreInfo = GlobalStateMgr.getCurrentState().getStarOSAgent().getFileStoreByName(svKey);\nif (fileStoreInfo == null) {\nreturn null;\n}\nreturn StorageVolume.fromFileStoreInfo(fileStoreInfo);\n} catch (DdlException e) {\nthrow new AnalysisException(e.getMessage());\n}\n}\n}\n@Override\npublic StorageVolume getStorageVolume(long storageVolumeId) throws AnalysisException {\nreturn null;\n}\n@Override\npublic List listStorageVolumeNames() throws DdlException {\ntry (LockCloseable lock = new LockCloseable(rwLock.readLock())) {\nreturn GlobalStateMgr.getCurrentState().getStarOSAgent().listFileStore()\n.stream().map(FileStoreInfo::getFsName).collect(Collectors.toList());\n}\n}\n}" + }, + { + "comment": "So you prefer `config.set(CoreOptions.DEFAULT_PARALLELISM, 1);` to `env.setParallelism(1)`? \ud83d\ude05", + "method_body": "private StreamExecutionEnvironment getExecutionEnvironment() {\nConfiguration config = new Configuration();\nconfig.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH);\nconfig.set(CoreOptions.DEFAULT_PARALLELISM, 1);\nconfig.set(ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL, Duration.ofMillis(42));\nfinal StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(\nconfig);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(10, Time.milliseconds(1)));\nreturn env;\n}", + "target_code": "config.set(CoreOptions.DEFAULT_PARALLELISM, 1);", + "method_body_after": "private StreamExecutionEnvironment getExecutionEnvironment() {\nConfiguration config = new Configuration();\nconfig.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH);\nfinal StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(\nconfig);\nenv.setParallelism(1);\nenv.enableCheckpointing(42);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(10, Time.milliseconds(1)));\nreturn env;\n}", + "context_before": "class DataStreamBatchExecutionITCase {\nprivate static final int DEFAULT_PARALLELISM = 1;\n@ClassRule\npublic static MiniClusterWithClientResource miniClusterResource = new MiniClusterWithClientResource(\nnew MiniClusterResourceConfiguration.Builder()\n.setNumberTaskManagers(1)\n.setNumberSlotsPerTaskManager(DEFAULT_PARALLELISM)\n.build());\n/**\n* We induce a failure in the last mapper. In BATCH execution mode the part of the pipeline\n* before the key-by should not be re-executed. Only the part after that will restart. We check\n* that by suffixing the attempt number to records and asserting the correct number.\n*/\n@Test\npublic void batchFailoverWithKeyByBarrier() throws Exception {\nfinal StreamExecutionEnvironment env = getExecutionEnvironment();\nDataStreamSource source = env.fromElements(\"foo\", \"bar\");\nSingleOutputStreamOperator mapped = source\n.map(new SuffixAttemptId(\"a\"))\n.map(new SuffixAttemptId(\"b\"))\n.keyBy(in -> in)\n.map(new SuffixAttemptId(\"c\"))\n.map(new OnceFailingMapper(\"d\"));\ntry (CloseableIterator result = mapped.executeAndCollect()) {\nassertThat(\niteratorToList(result),\ncontainsInAnyOrder(\"foo-a0-b0-c1-d1\", \"bar-a0-b0-c1-d1\"));\n}\n}\n@Test\npublic void batchReduceSingleResultPerKey() throws Exception {\nStreamExecutionEnvironment env = getExecutionEnvironment();\nDataStreamSource numbers = env\n.fromSequence(0, 10);\nKeyedStream stream = numbers.keyBy(i -> i % 2);\nDataStream sums = stream.reduce(Long::sum);\ntry (CloseableIterator sumsIterator = sums.executeAndCollect()) {\nList results = CollectionUtil.iteratorToList(sumsIterator);\nassertThat(results, equalTo(Arrays.asList(\n30L, 25L\n)));\n}\n}\n@Test\npublic void batchSumSingleResultPerKey() throws Exception {\nStreamExecutionEnvironment env = getExecutionEnvironment();\nDataStreamSource numbers = env\n.fromSequence(0, 10);\nKeyedStream stream = numbers.keyBy(i -> i % 2);\nDataStream sums = stream.sum(0);\ntry (CloseableIterator sumsIterator = sums.executeAndCollect()) {\nList results = CollectionUtil.iteratorToList(sumsIterator);\nassertThat(results, equalTo(Arrays.asList(\n30L, 25L\n)));\n}\n}\n/** Adds the attempt number as a suffix. */\npublic static class SuffixAttemptId extends RichMapFunction {\nprivate final String suffix;\npublic SuffixAttemptId(String suffix) {\nthis.suffix = suffix;\n}\n@Override\npublic String map(String value) {\nreturn value + \"-\" + suffix + getRuntimeContext().getAttemptNumber();\n}\n}\n/**\n* Adds the attempt number as a suffix.\n*\n*

Also fails by throwing an exception on the first attempt.\n*/\npublic static class OnceFailingMapper extends RichMapFunction {\nprivate final String suffix;\npublic OnceFailingMapper(String suffix) {\nthis.suffix = suffix;\n}\n@Override\npublic String map(String value) throws Exception {\nif (getRuntimeContext().getAttemptNumber() <= 0) {\nthrow new RuntimeException(\"FAILING\");\n}\nreturn value + \"-\" + suffix + getRuntimeContext().getAttemptNumber();\n}\n}\n}", + "context_after": "class DataStreamBatchExecutionITCase {\nprivate static final int DEFAULT_PARALLELISM = 1;\n@ClassRule\npublic static MiniClusterWithClientResource miniClusterResource = new MiniClusterWithClientResource(\nnew MiniClusterResourceConfiguration.Builder()\n.setNumberTaskManagers(1)\n.setNumberSlotsPerTaskManager(DEFAULT_PARALLELISM)\n.build());\n/**\n* We induce a failure in the last mapper. In BATCH execution mode the part of the pipeline\n* before the key-by should not be re-executed. Only the part after that will restart. We check\n* that by suffixing the attempt number to records and asserting the correct number.\n*/\n@Test\npublic void batchFailoverWithKeyByBarrier() throws Exception {\nfinal StreamExecutionEnvironment env = getExecutionEnvironment();\nDataStreamSource source = env.fromElements(\"foo\", \"bar\");\nSingleOutputStreamOperator mapped = source\n.map(new SuffixAttemptId(\"a\"))\n.map(new SuffixAttemptId(\"b\"))\n.keyBy(in -> in)\n.map(new SuffixAttemptId(\"c\"))\n.map(new OnceFailingMapper(\"d\"));\ntry (CloseableIterator result = mapped.executeAndCollect()) {\nassertThat(\niteratorToList(result),\ncontainsInAnyOrder(\"foo-a0-b0-c1-d1\", \"bar-a0-b0-c1-d1\"));\n}\n}\n@Test\npublic void batchReduceSingleResultPerKey() throws Exception {\nStreamExecutionEnvironment env = getExecutionEnvironment();\nDataStreamSource numbers = env\n.fromSequence(0, 10);\nKeyedStream stream = numbers.keyBy(i -> i % 2);\nDataStream sums = stream.reduce(Long::sum);\ntry (CloseableIterator sumsIterator = sums.executeAndCollect()) {\nList results = CollectionUtil.iteratorToList(sumsIterator);\nassertThat(results, equalTo(Arrays.asList(\n30L, 25L\n)));\n}\n}\n@Test\npublic void batchSumSingleResultPerKey() throws Exception {\nStreamExecutionEnvironment env = getExecutionEnvironment();\nDataStreamSource numbers = env\n.fromSequence(0, 10);\nKeyedStream stream = numbers.keyBy(i -> i % 2);\nDataStream sums = stream.sum(0);\ntry (CloseableIterator sumsIterator = sums.executeAndCollect()) {\nList results = CollectionUtil.iteratorToList(sumsIterator);\nassertThat(results, equalTo(Arrays.asList(\n30L, 25L\n)));\n}\n}\n/** Adds the attempt number as a suffix. */\npublic static class SuffixAttemptId extends RichMapFunction {\nprivate final String suffix;\npublic SuffixAttemptId(String suffix) {\nthis.suffix = suffix;\n}\n@Override\npublic String map(String value) {\nreturn value + \"-\" + suffix + getRuntimeContext().getAttemptNumber();\n}\n}\n/**\n* Adds the attempt number as a suffix.\n*\n*

Also fails by throwing an exception on the first attempt.\n*/\npublic static class OnceFailingMapper extends RichMapFunction {\nprivate final String suffix;\npublic OnceFailingMapper(String suffix) {\nthis.suffix = suffix;\n}\n@Override\npublic String map(String value) throws Exception {\nif (getRuntimeContext().getAttemptNumber() <= 0) {\nthrow new RuntimeException(\"FAILING\");\n}\nreturn value + \"-\" + suffix + getRuntimeContext().getAttemptNumber();\n}\n}\n}" + }, + { + "comment": "nit: inconsistent `final`. I don't see you re-setting this variable.", + "method_body": "private void checkNamingPattern(DetailAST blockCommentToken) {\nif (!BlockCommentPosition.isOnMethod(blockCommentToken)) {\nreturn;\n}\nDetailNode javadocNode = null;\ntry {\njavadocNode = DetailNodeTreeStringPrinter.parseJavadocAsDetailNode(blockCommentToken);\n} catch (IllegalArgumentException ex) {\n}\nif (javadocNode == null) {\nreturn;\n}\nfor (DetailNode node : javadocNode.getChildren()) {\nif (node.getType() != JavadocTokenTypes.JAVADOC_INLINE_TAG) {\ncontinue;\n}\nDetailNode customNameNode = JavadocUtil.findFirstToken(node, JavadocTokenTypes.CUSTOM_NAME);\nif (customNameNode == null || !CODE_SNIPPET_ANNOTATION.equals(customNameNode.getText())) {\nreturn;\n}\nDetailNode descriptionNode = JavadocUtil.findFirstToken(node, JavadocTokenTypes.DESCRIPTION);\nif (descriptionNode == null) {\nlog(node.getLineNumber(), MISSING_CODESNIPPET_TAG_MESSAGE);\nreturn;\n}\nString customDescription = JavadocUtil.findFirstToken(descriptionNode, JavadocTokenTypes.TEXT).getText();\nDetailAST methodDefToken = methodDefStack.peek();\nfinal String methodName = methodDefToken.findFirstToken(TokenTypes.IDENT).getText();\nfinal String className = classNameStack.isEmpty() ? \"\" : classNameStack.peek();\nfinal String parameters = constructParametersString(methodDefToken);\nString fullPath = packageName + \".\" + className + \".\" + methodName;\nif (parameters != null) {\nfullPath = fullPath + \"\n}\nif (customDescription == null || customDescription.isEmpty() ||\n!isNamingMatched(customDescription.toLowerCase(), fullPath.toLowerCase(), parameters)) {\nlog(node.getLineNumber(), String.format(\"Naming pattern mismatch. The @codeSnippet description \"\n+ \"''%s'' does not match ''%s''. Case Insensitive.\", customDescription, fullPath));\n}\n}\n}", + "target_code": "DetailAST methodDefToken = methodDefStack.peek();", + "method_body_after": "private void checkNamingPattern(DetailAST blockCommentToken) {\nif (!BlockCommentPosition.isOnMethod(blockCommentToken)) {\nreturn;\n}\nDetailNode javadocNode = null;\ntry {\njavadocNode = DetailNodeTreeStringPrinter.parseJavadocAsDetailNode(blockCommentToken);\n} catch (IllegalArgumentException ex) {\n}\nif (javadocNode == null) {\nreturn;\n}\nfor (DetailNode node : javadocNode.getChildren()) {\nif (node.getType() != JavadocTokenTypes.JAVADOC_INLINE_TAG) {\ncontinue;\n}\nDetailNode customNameNode = JavadocUtil.findFirstToken(node, JavadocTokenTypes.CUSTOM_NAME);\nif (customNameNode == null || !CODE_SNIPPET_ANNOTATION.equals(customNameNode.getText())) {\nreturn;\n}\nDetailNode descriptionNode = JavadocUtil.findFirstToken(node, JavadocTokenTypes.DESCRIPTION);\nif (descriptionNode == null) {\nlog(node.getLineNumber(), MISSING_CODESNIPPET_TAG_MESSAGE);\nreturn;\n}\nString customDescription = JavadocUtil.findFirstToken(descriptionNode, JavadocTokenTypes.TEXT).getText();\nfinal String methodName = methodDefToken.findFirstToken(TokenTypes.IDENT).getText();\nfinal String className = classNameStack.isEmpty() ? \"\" : classNameStack.peek();\nfinal String parameters = constructParametersString(methodDefToken);\nString fullPath = packageName + \".\" + className + \".\" + methodName;\nfinal String fullPathWithoutParameters = fullPath;\nif (parameters != null) {\nfullPath = fullPath + \"\n}\nif (customDescription == null || customDescription.isEmpty()\n|| !isNamingMatched(customDescription.toLowerCase(Locale.ROOT),\nfullPathWithoutParameters.toLowerCase(Locale.ROOT), parameters)) {\nlog(node.getLineNumber(), String.format(\"Naming pattern mismatch. The @codesnippet description \"\n+ \"''%s'' does not match ''%s''. Case Insensitive.\", customDescription, fullPath));\n}\n}\n}", + "context_before": "class name when leave the same token\nprivate Deque classNameStack = new ArrayDeque<>();\nprivate Deque methodDefStack = new ArrayDeque<>();\n@Override\npublic int[] getDefaultTokens() {\nreturn getRequiredTokens();\n}", + "context_after": "class name when leave the same token\nprivate Deque classNameStack = new ArrayDeque<>();\nprivate DetailAST methodDefToken = null;\n@Override\npublic int[] getDefaultTokens() {\nreturn getRequiredTokens();\n}" + }, + { + "comment": "Please fix assertion by original `getIndexes`", + "method_body": "private void assertTableMetaDataMap(final Map actual) {\nassertThat(actual.size(), is(1));\nassertThat(actual.get(\"tbl\").getColumns().size(), is(2));\nassertThat(actual.get(\"tbl\").getColumnMetaData(0), is(new ColumnMetaData(\"id\", 4, true, true, true)));\nassertThat(actual.get(\"tbl\").getColumnMetaData(1), is(new ColumnMetaData(\"name\", 12, false, false, false)));\nassertThat(actual.get(\"tbl\").getIndexes().size(), is(1));\nassertThat(actual.get(\"tbl\").getIndexMetaData(0), is(new IndexMetaData(\"id\")));\n}", + "target_code": "assertThat(actual.get(\"tbl\").getIndexMetaData(0), is(new IndexMetaData(\"id\")));", + "method_body_after": "private void assertTableMetaDataMap(final Map actual) {\nassertThat(actual.size(), is(1));\nassertThat(actual.get(\"tbl\").getColumns().size(), is(2));\nassertThat(actual.get(\"tbl\").getColumnMetaData(0), is(new ColumnMetaData(\"id\", 4, true, true, true)));\nassertThat(actual.get(\"tbl\").getColumnMetaData(1), is(new ColumnMetaData(\"name\", 12, false, false, false)));\nassertThat(actual.get(\"tbl\").getIndexes().size(), is(1));\nassertThat(actual.get(\"tbl\").getIndexes().get(\"id\"), is(new IndexMetaData(\"id\")));\n}", + "context_before": "class MySQLTableMetaDataLoaderTest {\n@BeforeClass\npublic static void setUp() {\nShardingSphereServiceLoader.register(DialectTableMetaDataLoader.class);\n}\n@Test\npublic void assertLoadWithoutExistedTables() throws SQLException {\nDataSource dataSource = mockDataSource();\nResultSet resultSet = mockTableMetaDataResultSet();\nwhen(dataSource.getConnection().prepareStatement(\n\"SELECT TABLE_NAME, COLUMN_NAME, DATA_TYPE, COLUMN_KEY, EXTRA, COLLATION_NAME FROM information_schema.columns WHERE TABLE_SCHEMA=?\").executeQuery()).thenReturn(resultSet);\nResultSet indexResultSet = mockIndexMetaDataResultSet();\nwhen(dataSource.getConnection().prepareStatement(\n\"SELECT INDEX_NAME FROM information_schema.statistics WHERE TABLE_SCHEMA=?\").executeQuery()).thenReturn(indexResultSet);\nassertTableMetaDataMap(getTableMetaDataLoader().load(dataSource, Collections.emptyList()));\n}\n@Test\npublic void assertLoadWithExistedTables() throws SQLException {\nDataSource dataSource = mockDataSource();\nResultSet resultSet = mockTableMetaDataResultSet();\nwhen(dataSource.getConnection().prepareStatement(\n\"SELECT TABLE_NAME, COLUMN_NAME, DATA_TYPE, COLUMN_KEY, EXTRA, COLLATION_NAME FROM information_schema.columns WHERE TABLE_SCHEMA=? AND TABLE_NAME NOT IN ('existed_tbl')\")\n.executeQuery()).thenReturn(resultSet);\nResultSet indexResultSet = mockIndexMetaDataResultSet();\nwhen(dataSource.getConnection().prepareStatement(\n\"SELECT INDEX_NAME FROM information_schema.statistics WHERE TABLE_SCHEMA=?\").executeQuery()).thenReturn(indexResultSet);\nassertTableMetaDataMap(getTableMetaDataLoader().load(dataSource, Collections.singletonList(\"existed_tbl\")));\n}\nprivate DataSource mockDataSource() throws SQLException {\nDataSource result = mock(DataSource.class, RETURNS_DEEP_STUBS);\nResultSet typeInfoResultSet = mockTypeInfoResultSet();\nwhen(result.getConnection().getMetaData().getTypeInfo()).thenReturn(typeInfoResultSet);\nreturn result;\n}\nprivate ResultSet mockTypeInfoResultSet() throws SQLException {\nResultSet result = mock(ResultSet.class);\nwhen(result.next()).thenReturn(true, true, false);\nwhen(result.getString(\"TYPE_NAME\")).thenReturn(\"int\", \"varchar\");\nwhen(result.getInt(\"DATA_TYPE\")).thenReturn(4, 12);\nreturn result;\n}\nprivate ResultSet mockTableMetaDataResultSet() throws SQLException {\nResultSet result = mock(ResultSet.class);\nwhen(result.next()).thenReturn(true, true, false);\nwhen(result.getString(\"TABLE_NAME\")).thenReturn(\"tbl\");\nwhen(result.getString(\"COLUMN_NAME\")).thenReturn(\"id\", \"name\");\nwhen(result.getString(\"DATA_TYPE\")).thenReturn(\"int\", \"varchar\");\nwhen(result.getString(\"COLUMN_KEY\")).thenReturn(\"PRI\", \"\");\nwhen(result.getString(\"EXTRA\")).thenReturn(\"auto_increment\", \"\");\nwhen(result.getString(\"COLLATION_NAME\")).thenReturn(\"utf8_general_ci\", \"utf8\");\nreturn result;\n}\nprivate ResultSet mockIndexMetaDataResultSet() throws SQLException {\nResultSet result = mock(ResultSet.class);\nwhen(result.next()).thenReturn(true, false, false);\nwhen(result.getString(\"INDEX_NAME\")).thenReturn(\"id\");\nreturn result;\n}\nprivate DialectTableMetaDataLoader getTableMetaDataLoader() {\nfor (DialectTableMetaDataLoader each : ShardingSphereServiceLoader.newServiceInstances(DialectTableMetaDataLoader.class)) {\nif (\"MySQL\".equals(each.getDatabaseType())) {\nreturn each;\n}\n}\nthrow new IllegalStateException(\"Can not find MySQLTableMetaDataLoader\");\n}\n}", + "context_after": "class MySQLTableMetaDataLoaderTest {\n@BeforeClass\npublic static void setUp() {\nShardingSphereServiceLoader.register(DialectTableMetaDataLoader.class);\n}\n@Test\npublic void assertLoadWithoutExistedTables() throws SQLException {\nDataSource dataSource = mockDataSource();\nResultSet resultSet = mockTableMetaDataResultSet();\nwhen(dataSource.getConnection().prepareStatement(\n\"SELECT TABLE_NAME, COLUMN_NAME, DATA_TYPE, COLUMN_KEY, EXTRA, COLLATION_NAME FROM information_schema.columns WHERE TABLE_SCHEMA=?\").executeQuery()).thenReturn(resultSet);\nResultSet indexResultSet = mockIndexMetaDataResultSet();\nwhen(dataSource.getConnection().prepareStatement(\n\"SELECT INDEX_NAME FROM information_schema.statistics WHERE TABLE_SCHEMA=?\").executeQuery()).thenReturn(indexResultSet);\nassertTableMetaDataMap(getTableMetaDataLoader().load(dataSource, Collections.emptyList()));\n}\n@Test\npublic void assertLoadWithExistedTables() throws SQLException {\nDataSource dataSource = mockDataSource();\nResultSet resultSet = mockTableMetaDataResultSet();\nwhen(dataSource.getConnection().prepareStatement(\n\"SELECT TABLE_NAME, COLUMN_NAME, DATA_TYPE, COLUMN_KEY, EXTRA, COLLATION_NAME FROM information_schema.columns WHERE TABLE_SCHEMA=? AND TABLE_NAME NOT IN ('existed_tbl')\")\n.executeQuery()).thenReturn(resultSet);\nResultSet indexResultSet = mockIndexMetaDataResultSet();\nwhen(dataSource.getConnection().prepareStatement(\n\"SELECT INDEX_NAME FROM information_schema.statistics WHERE TABLE_SCHEMA=?\").executeQuery()).thenReturn(indexResultSet);\nassertTableMetaDataMap(getTableMetaDataLoader().load(dataSource, Collections.singletonList(\"existed_tbl\")));\n}\nprivate DataSource mockDataSource() throws SQLException {\nDataSource result = mock(DataSource.class, RETURNS_DEEP_STUBS);\nResultSet typeInfoResultSet = mockTypeInfoResultSet();\nwhen(result.getConnection().getMetaData().getTypeInfo()).thenReturn(typeInfoResultSet);\nreturn result;\n}\nprivate ResultSet mockTypeInfoResultSet() throws SQLException {\nResultSet result = mock(ResultSet.class);\nwhen(result.next()).thenReturn(true, true, false);\nwhen(result.getString(\"TYPE_NAME\")).thenReturn(\"int\", \"varchar\");\nwhen(result.getInt(\"DATA_TYPE\")).thenReturn(4, 12);\nreturn result;\n}\nprivate ResultSet mockTableMetaDataResultSet() throws SQLException {\nResultSet result = mock(ResultSet.class);\nwhen(result.next()).thenReturn(true, true, false);\nwhen(result.getString(\"TABLE_NAME\")).thenReturn(\"tbl\");\nwhen(result.getString(\"COLUMN_NAME\")).thenReturn(\"id\", \"name\");\nwhen(result.getString(\"DATA_TYPE\")).thenReturn(\"int\", \"varchar\");\nwhen(result.getString(\"COLUMN_KEY\")).thenReturn(\"PRI\", \"\");\nwhen(result.getString(\"EXTRA\")).thenReturn(\"auto_increment\", \"\");\nwhen(result.getString(\"COLLATION_NAME\")).thenReturn(\"utf8_general_ci\", \"utf8\");\nreturn result;\n}\nprivate ResultSet mockIndexMetaDataResultSet() throws SQLException {\nResultSet result = mock(ResultSet.class);\nwhen(result.next()).thenReturn(true, false);\nwhen(result.getString(\"INDEX_NAME\")).thenReturn(\"id\");\nreturn result;\n}\nprivate DialectTableMetaDataLoader getTableMetaDataLoader() {\nfor (DialectTableMetaDataLoader each : ShardingSphereServiceLoader.newServiceInstances(DialectTableMetaDataLoader.class)) {\nif (\"MySQL\".equals(each.getDatabaseType())) {\nreturn each;\n}\n}\nthrow new IllegalStateException(\"Can not find MySQLTableMetaDataLoader\");\n}\n}" + }, + { + "comment": "Can we just pass a flag into `BeamSqlLikeExpression`? ", + "method_body": "private static BeamSqlExpression getBeamSqlExpression(RexNode rexNode) {\nBeamSqlExpression ret;\nif (rexNode instanceof RexLiteral) {\nRexLiteral node = (RexLiteral) rexNode;\nSqlTypeName type = node.getTypeName();\nObject value = node.getValue();\nif (SqlTypeName.CHAR_TYPES.contains(type) && node.getValue() instanceof NlsString) {\nret = BeamSqlPrimitive.of(type, ((NlsString) value).getValue());\n} else if (isDateNode(type, value)) {\nret = BeamSqlPrimitive.of(type, new DateTime(((Calendar) value).getTimeInMillis()));\n} else {\nSqlTypeName realType = node.getType().getSqlTypeName();\nObject realValue = value;\nif (SqlTypeName.NUMERIC_TYPES.contains(type)) {\nswitch (realType) {\ncase TINYINT:\nrealValue = SqlFunctions.toByte(value);\nbreak;\ncase SMALLINT:\nrealValue = SqlFunctions.toShort(value);\nbreak;\ncase INTEGER:\nrealValue = SqlFunctions.toInt(value);\nbreak;\ncase BIGINT:\nrealValue = SqlFunctions.toLong(value);\nbreak;\ncase FLOAT:\nrealValue = SqlFunctions.toFloat(value);\nbreak;\ncase DOUBLE:\nrealValue = SqlFunctions.toDouble(value);\nbreak;\ncase DECIMAL:\nrealValue = SqlFunctions.toBigDecimal(value);\nbreak;\ndefault:\nthrow new IllegalStateException(\n\"Unsupported conversion: Attempted convert node \"\n+ node.toString()\n+ \" of type \"\n+ type\n+ \"to \"\n+ realType);\n}\n}\nret = BeamSqlPrimitive.of(realType, realValue);\n}\n} else if (rexNode instanceof RexInputRef) {\nRexInputRef node = (RexInputRef) rexNode;\nret = new BeamSqlInputRefExpression(node.getType().getSqlTypeName(), node.getIndex());\n} else if (rexNode instanceof RexCorrelVariable) {\nRexCorrelVariable correlVariable = (RexCorrelVariable) rexNode;\nret =\nnew BeamSqlCorrelVariableExpression(\ncorrelVariable.getType().getSqlTypeName(), correlVariable.id.getId());\n} else if (rexNode instanceof RexLocalRef) {\nRexLocalRef localRef = (RexLocalRef) rexNode;\nret = new BeamSqlLocalRefExpression(localRef.getType().getSqlTypeName(), localRef.getIndex());\n} else if (rexNode instanceof RexFieldAccess) {\nRexFieldAccess fieldAccessNode = (RexFieldAccess) rexNode;\nBeamSqlExpression referenceExpression = buildExpression(fieldAccessNode.getReferenceExpr());\nint nestedFieldIndex = fieldAccessNode.getField().getIndex();\nSqlTypeName nestedFieldType = fieldAccessNode.getField().getType().getSqlTypeName();\nret =\nnew BeamSqlFieldAccessExpression(referenceExpression, nestedFieldIndex, nestedFieldType);\n} else if (rexNode instanceof RexCall) {\nRexCall node = (RexCall) rexNode;\nString opName = node.op.getName();\nList subExps = new ArrayList<>();\nfor (RexNode subNode : node.getOperands()) {\nsubExps.add(buildExpression(subNode));\n}\nswitch (opName) {\ncase \"AND\":\nret = new BeamSqlAndExpression(subExps);\nbreak;\ncase \"OR\":\nret = new BeamSqlOrExpression(subExps);\nbreak;\ncase \"NOT\":\nret = new BeamSqlNotExpression(subExps);\nbreak;\ncase \"=\":\nret = new BeamSqlEqualsExpression(subExps);\nbreak;\ncase \"<>\":\nret = new BeamSqlNotEqualsExpression(subExps);\nbreak;\ncase \">\":\nret = new BeamSqlGreaterThanExpression(subExps);\nbreak;\ncase \">=\":\nret = new BeamSqlGreaterThanOrEqualsExpression(subExps);\nbreak;\ncase \"<\":\nret = new BeamSqlLessThanExpression(subExps);\nbreak;\ncase \"<=\":\nret = new BeamSqlLessThanOrEqualsExpression(subExps);\nbreak;\ncase \"LIKE\":\nret = new BeamSqlLikeExpression(subExps);\nbreak;\ncase \"NOT LIKE\":\nret = new BeamSqlNotLikeExpression(subExps);\nbreak;\ncase \"+\":\nif (SqlTypeName.NUMERIC_TYPES.contains(node.type.getSqlTypeName())) {\nret = new BeamSqlPlusExpression(subExps);\n} else {\nret = new BeamSqlDatetimePlusExpression(subExps);\n}\nbreak;\ncase \"-\":\nif (SqlTypeName.NUMERIC_TYPES.contains(node.type.getSqlTypeName())) {\nret = new BeamSqlMinusExpression(subExps);\n} else {\nret = new BeamSqlDatetimeMinusExpression(subExps, node.type.getSqlTypeName());\n}\nbreak;\ncase \"*\":\nif (SqlTypeName.NUMERIC_TYPES.contains(node.type.getSqlTypeName())) {\nret = new BeamSqlMultiplyExpression(subExps);\n} else {\nret = new BeamSqlIntervalMultiplyExpression(subExps);\n}\nbreak;\ncase \"/\":\ncase \"/INT\":\nret = new BeamSqlDivideExpression(subExps);\nbreak;\ncase \"MOD\":\nret = new BeamSqlModExpression(subExps);\nbreak;\ncase \"ABS\":\nret = new BeamSqlAbsExpression(subExps);\nbreak;\ncase \"ROUND\":\nret = new BeamSqlRoundExpression(subExps);\nbreak;\ncase \"LN\":\nret = new BeamSqlLnExpression(subExps);\nbreak;\ncase \"LOG10\":\nret = new BeamSqlLogExpression(subExps);\nbreak;\ncase \"EXP\":\nret = new BeamSqlExpExpression(subExps);\nbreak;\ncase \"ACOS\":\nret = new BeamSqlAcosExpression(subExps);\nbreak;\ncase \"ASIN\":\nret = new BeamSqlAsinExpression(subExps);\nbreak;\ncase \"ATAN\":\nret = new BeamSqlAtanExpression(subExps);\nbreak;\ncase \"COT\":\nret = new BeamSqlCotExpression(subExps);\nbreak;\ncase \"DEGREES\":\nret = new BeamSqlDegreesExpression(subExps);\nbreak;\ncase \"RADIANS\":\nret = new BeamSqlRadiansExpression(subExps);\nbreak;\ncase \"COS\":\nret = new BeamSqlCosExpression(subExps);\nbreak;\ncase \"SIN\":\nret = new BeamSqlSinExpression(subExps);\nbreak;\ncase \"TAN\":\nret = new BeamSqlTanExpression(subExps);\nbreak;\ncase \"SIGN\":\nret = new BeamSqlSignExpression(subExps);\nbreak;\ncase \"POWER\":\nret = new BeamSqlPowerExpression(subExps);\nbreak;\ncase \"PI\":\nret = new BeamSqlPiExpression();\nbreak;\ncase \"ATAN2\":\nret = new BeamSqlAtan2Expression(subExps);\nbreak;\ncase \"TRUNCATE\":\nret = new BeamSqlTruncateExpression(subExps);\nbreak;\ncase \"RAND\":\nret = new BeamSqlRandExpression(subExps);\nbreak;\ncase \"RAND_INTEGER\":\nret = new BeamSqlRandIntegerExpression(subExps);\nbreak;\ncase \"||\":\nret = new BeamSqlOperatorExpression(StringOperators.CONCAT, subExps);\nbreak;\ncase \"POSITION\":\nret = new BeamSqlOperatorExpression(StringOperators.POSITION, subExps);\nbreak;\ncase \"CHAR_LENGTH\":\ncase \"CHARACTER_LENGTH\":\nret = new BeamSqlOperatorExpression(StringOperators.CHAR_LENGTH, subExps);\nbreak;\ncase \"UPPER\":\nret = new BeamSqlOperatorExpression(StringOperators.UPPER, subExps);\nbreak;\ncase \"LOWER\":\nret = new BeamSqlOperatorExpression(StringOperators.LOWER, subExps);\nbreak;\ncase \"TRIM\":\nret = new BeamSqlOperatorExpression(StringOperators.TRIM, subExps);\nbreak;\ncase \"SUBSTRING\":\nret = new BeamSqlOperatorExpression(StringOperators.SUBSTRING, subExps);\nbreak;\ncase \"OVERLAY\":\nret = new BeamSqlOperatorExpression(StringOperators.OVERLAY, subExps);\nbreak;\ncase \"INITCAP\":\nret = new BeamSqlOperatorExpression(StringOperators.INIT_CAP, subExps);\nbreak;\ncase \"Reinterpret\":\nret = new BeamSqlReinterpretExpression(subExps, node.type.getSqlTypeName());\nbreak;\ncase \"CEIL\":\nif (SqlTypeName.NUMERIC_TYPES.contains(node.type.getSqlTypeName())) {\nret = new BeamSqlCeilExpression(subExps);\n} else {\nret = new BeamSqlOperatorExpression(DateOperators.DATETIME_CEIL, subExps);\n}\nbreak;\ncase \"FLOOR\":\nif (SqlTypeName.NUMERIC_TYPES.contains(node.type.getSqlTypeName())) {\nret = new BeamSqlFloorExpression(subExps);\n} else {\nret = new BeamSqlOperatorExpression(DateOperators.DATETIME_FLOOR, subExps);\n}\nbreak;\ncase \"EXTRACT_DATE\":\ncase \"EXTRACT\":\nret = new BeamSqlOperatorExpression(DateOperators.EXTRACT, subExps);\nbreak;\ncase \"LOCALTIME\":\ncase \"CURRENT_TIME\":\nret = new BeamSqlCurrentTimeExpression(subExps);\nbreak;\ncase \"CURRENT_TIMESTAMP\":\ncase \"LOCALTIMESTAMP\":\nret = new BeamSqlCurrentTimestampExpression(subExps);\nbreak;\ncase \"CURRENT_DATE\":\nret = new BeamSqlCurrentDateExpression();\nbreak;\ncase \"DATETIME_PLUS\":\nret = new BeamSqlDatetimePlusExpression(subExps);\nbreak;\ncase \"ARRAY\":\nret = new BeamSqlArrayExpression(subExps);\nbreak;\ncase \"MAP\":\nret = new BeamSqlMapExpression(subExps);\nbreak;\ncase \"ITEM\":\nswitch (subExps.get(0).getOutputType()) {\ncase MAP:\nret = new BeamSqlMapItemExpression(subExps, node.type.getSqlTypeName());\nbreak;\ncase ARRAY:\nret = new BeamSqlArrayItemExpression(subExps, node.type.getSqlTypeName());\nbreak;\ndefault:\nthrow new UnsupportedOperationException(\n\"Operator: \" + opName + \" is not supported yet\");\n}\nbreak;\ncase \"ELEMENT\":\nret = new BeamSqlSingleElementExpression(subExps, node.type.getSqlTypeName());\nbreak;\ncase \"CARDINALITY\":\nret = new BeamSqlCardinalityExpression(subExps, node.type.getSqlTypeName());\nbreak;\ncase \"DOT\":\nret = new BeamSqlDotExpression(subExps, node.type.getSqlTypeName());\nbreak;\ncase \"DEFAULT\":\nret = new BeamSqlDefaultExpression();\nbreak;\ncase \"CASE\":\nret = new BeamSqlCaseExpression(subExps);\nbreak;\ncase \"CAST\":\nret = new BeamSqlCastExpression(subExps, node.type.getSqlTypeName());\nbreak;\ncase \"IS NULL\":\nret = new BeamSqlIsNullExpression(subExps.get(0));\nbreak;\ncase \"IS NOT NULL\":\nret = new BeamSqlIsNotNullExpression(subExps.get(0));\nbreak;\ncase \"HOP\":\ncase \"TUMBLE\":\ncase \"SESSION\":\nret = new BeamSqlWindowExpression(subExps, node.type.getSqlTypeName());\nbreak;\ncase \"HOP_START\":\ncase \"TUMBLE_START\":\ncase \"SESSION_START\":\nret = new BeamSqlWindowStartExpression();\nbreak;\ncase \"HOP_END\":\ncase \"TUMBLE_END\":\ncase \"SESSION_END\":\nret = new BeamSqlWindowEndExpression();\nbreak;\ndefault:\nif (((RexCall) rexNode).getOperator() instanceof SqlUserDefinedFunction) {\nSqlUserDefinedFunction udf = (SqlUserDefinedFunction) ((RexCall) rexNode).getOperator();\nScalarFunctionImpl fn = (ScalarFunctionImpl) udf.getFunction();\nret =\nnew BeamSqlUdfExpression(\nfn.method, subExps, ((RexCall) rexNode).type.getSqlTypeName());\n} else {\nthrow new UnsupportedOperationException(\n\"Operator: \" + opName + \" is not supported yet\");\n}\n}\n} else {\nthrow new UnsupportedOperationException(\nString.format(\"%s is not supported yet\", rexNode.getClass().toString()));\n}\nreturn ret;\n}", + "target_code": "ret = new BeamSqlNotLikeExpression(subExps);", + "method_body_after": "private static BeamSqlExpression getBeamSqlExpression(RexNode rexNode) {\nBeamSqlExpression ret;\nif (rexNode instanceof RexLiteral) {\nRexLiteral node = (RexLiteral) rexNode;\nSqlTypeName type = node.getTypeName();\nObject value = node.getValue();\nif (SqlTypeName.CHAR_TYPES.contains(type) && node.getValue() instanceof NlsString) {\nret = BeamSqlPrimitive.of(type, ((NlsString) value).getValue());\n} else if (isDateNode(type, value)) {\nret = BeamSqlPrimitive.of(type, new DateTime(((Calendar) value).getTimeInMillis()));\n} else {\nSqlTypeName realType = node.getType().getSqlTypeName();\nObject realValue = value;\nif (SqlTypeName.NUMERIC_TYPES.contains(type)) {\nswitch (realType) {\ncase TINYINT:\nrealValue = SqlFunctions.toByte(value);\nbreak;\ncase SMALLINT:\nrealValue = SqlFunctions.toShort(value);\nbreak;\ncase INTEGER:\nrealValue = SqlFunctions.toInt(value);\nbreak;\ncase BIGINT:\nrealValue = SqlFunctions.toLong(value);\nbreak;\ncase FLOAT:\nrealValue = SqlFunctions.toFloat(value);\nbreak;\ncase DOUBLE:\nrealValue = SqlFunctions.toDouble(value);\nbreak;\ncase DECIMAL:\nrealValue = SqlFunctions.toBigDecimal(value);\nbreak;\ndefault:\nthrow new IllegalStateException(\n\"Unsupported conversion: Attempted convert node \"\n+ node.toString()\n+ \" of type \"\n+ type\n+ \"to \"\n+ realType);\n}\n}\nret = BeamSqlPrimitive.of(realType, realValue);\n}\n} else if (rexNode instanceof RexInputRef) {\nRexInputRef node = (RexInputRef) rexNode;\nret = new BeamSqlInputRefExpression(node.getType().getSqlTypeName(), node.getIndex());\n} else if (rexNode instanceof RexCorrelVariable) {\nRexCorrelVariable correlVariable = (RexCorrelVariable) rexNode;\nret =\nnew BeamSqlCorrelVariableExpression(\ncorrelVariable.getType().getSqlTypeName(), correlVariable.id.getId());\n} else if (rexNode instanceof RexLocalRef) {\nRexLocalRef localRef = (RexLocalRef) rexNode;\nret = new BeamSqlLocalRefExpression(localRef.getType().getSqlTypeName(), localRef.getIndex());\n} else if (rexNode instanceof RexFieldAccess) {\nRexFieldAccess fieldAccessNode = (RexFieldAccess) rexNode;\nBeamSqlExpression referenceExpression = buildExpression(fieldAccessNode.getReferenceExpr());\nint nestedFieldIndex = fieldAccessNode.getField().getIndex();\nSqlTypeName nestedFieldType = fieldAccessNode.getField().getType().getSqlTypeName();\nret =\nnew BeamSqlFieldAccessExpression(referenceExpression, nestedFieldIndex, nestedFieldType);\n} else if (rexNode instanceof RexCall) {\nRexCall node = (RexCall) rexNode;\nString opName = node.op.getName();\nList subExps = new ArrayList<>();\nfor (RexNode subNode : node.getOperands()) {\nsubExps.add(buildExpression(subNode));\n}\nswitch (opName) {\ncase \"AND\":\nret = new BeamSqlAndExpression(subExps);\nbreak;\ncase \"OR\":\nret = new BeamSqlOrExpression(subExps);\nbreak;\ncase \"NOT\":\nret = new BeamSqlNotExpression(subExps);\nbreak;\ncase \"=\":\nret = new BeamSqlEqualsExpression(subExps);\nbreak;\ncase \"<>\":\nret = new BeamSqlNotEqualsExpression(subExps);\nbreak;\ncase \">\":\nret = new BeamSqlGreaterThanExpression(subExps);\nbreak;\ncase \">=\":\nret = new BeamSqlGreaterThanOrEqualsExpression(subExps);\nbreak;\ncase \"<\":\nret = new BeamSqlLessThanExpression(subExps);\nbreak;\ncase \"<=\":\nret = new BeamSqlLessThanOrEqualsExpression(subExps);\nbreak;\ncase \"LIKE\":\nret = new BeamSqlLikeExpression(subExps);\nbreak;\ncase \"NOT LIKE\":\nret = new BeamSqlNotLikeExpression(subExps);\nbreak;\ncase \"+\":\nif (SqlTypeName.NUMERIC_TYPES.contains(node.type.getSqlTypeName())) {\nret = new BeamSqlPlusExpression(subExps);\n} else {\nret = new BeamSqlDatetimePlusExpression(subExps);\n}\nbreak;\ncase \"-\":\nif (SqlTypeName.NUMERIC_TYPES.contains(node.type.getSqlTypeName())) {\nret = new BeamSqlMinusExpression(subExps);\n} else {\nret = new BeamSqlDatetimeMinusExpression(subExps, node.type.getSqlTypeName());\n}\nbreak;\ncase \"*\":\nif (SqlTypeName.NUMERIC_TYPES.contains(node.type.getSqlTypeName())) {\nret = new BeamSqlMultiplyExpression(subExps);\n} else {\nret = new BeamSqlIntervalMultiplyExpression(subExps);\n}\nbreak;\ncase \"/\":\ncase \"/INT\":\nret = new BeamSqlDivideExpression(subExps);\nbreak;\ncase \"MOD\":\nret = new BeamSqlModExpression(subExps);\nbreak;\ncase \"ABS\":\nret = new BeamSqlAbsExpression(subExps);\nbreak;\ncase \"ROUND\":\nret = new BeamSqlRoundExpression(subExps);\nbreak;\ncase \"LN\":\nret = new BeamSqlLnExpression(subExps);\nbreak;\ncase \"LOG10\":\nret = new BeamSqlLogExpression(subExps);\nbreak;\ncase \"EXP\":\nret = new BeamSqlExpExpression(subExps);\nbreak;\ncase \"ACOS\":\nret = new BeamSqlAcosExpression(subExps);\nbreak;\ncase \"ASIN\":\nret = new BeamSqlAsinExpression(subExps);\nbreak;\ncase \"ATAN\":\nret = new BeamSqlAtanExpression(subExps);\nbreak;\ncase \"COT\":\nret = new BeamSqlCotExpression(subExps);\nbreak;\ncase \"DEGREES\":\nret = new BeamSqlDegreesExpression(subExps);\nbreak;\ncase \"RADIANS\":\nret = new BeamSqlRadiansExpression(subExps);\nbreak;\ncase \"COS\":\nret = new BeamSqlCosExpression(subExps);\nbreak;\ncase \"SIN\":\nret = new BeamSqlSinExpression(subExps);\nbreak;\ncase \"TAN\":\nret = new BeamSqlTanExpression(subExps);\nbreak;\ncase \"SIGN\":\nret = new BeamSqlSignExpression(subExps);\nbreak;\ncase \"POWER\":\nret = new BeamSqlPowerExpression(subExps);\nbreak;\ncase \"PI\":\nret = new BeamSqlPiExpression();\nbreak;\ncase \"ATAN2\":\nret = new BeamSqlAtan2Expression(subExps);\nbreak;\ncase \"TRUNCATE\":\nret = new BeamSqlTruncateExpression(subExps);\nbreak;\ncase \"RAND\":\nret = new BeamSqlRandExpression(subExps);\nbreak;\ncase \"RAND_INTEGER\":\nret = new BeamSqlRandIntegerExpression(subExps);\nbreak;\ncase \"||\":\nret = new BeamSqlOperatorExpression(StringOperators.CONCAT, subExps);\nbreak;\ncase \"POSITION\":\nret = new BeamSqlOperatorExpression(StringOperators.POSITION, subExps);\nbreak;\ncase \"CHAR_LENGTH\":\ncase \"CHARACTER_LENGTH\":\nret = new BeamSqlOperatorExpression(StringOperators.CHAR_LENGTH, subExps);\nbreak;\ncase \"UPPER\":\nret = new BeamSqlOperatorExpression(StringOperators.UPPER, subExps);\nbreak;\ncase \"LOWER\":\nret = new BeamSqlOperatorExpression(StringOperators.LOWER, subExps);\nbreak;\ncase \"TRIM\":\nret = new BeamSqlOperatorExpression(StringOperators.TRIM, subExps);\nbreak;\ncase \"SUBSTRING\":\nret = new BeamSqlOperatorExpression(StringOperators.SUBSTRING, subExps);\nbreak;\ncase \"OVERLAY\":\nret = new BeamSqlOperatorExpression(StringOperators.OVERLAY, subExps);\nbreak;\ncase \"INITCAP\":\nret = new BeamSqlOperatorExpression(StringOperators.INIT_CAP, subExps);\nbreak;\ncase \"Reinterpret\":\nret = new BeamSqlReinterpretExpression(subExps, node.type.getSqlTypeName());\nbreak;\ncase \"CEIL\":\nif (SqlTypeName.NUMERIC_TYPES.contains(node.type.getSqlTypeName())) {\nret = new BeamSqlCeilExpression(subExps);\n} else {\nret = new BeamSqlOperatorExpression(DateOperators.DATETIME_CEIL, subExps);\n}\nbreak;\ncase \"FLOOR\":\nif (SqlTypeName.NUMERIC_TYPES.contains(node.type.getSqlTypeName())) {\nret = new BeamSqlFloorExpression(subExps);\n} else {\nret = new BeamSqlOperatorExpression(DateOperators.DATETIME_FLOOR, subExps);\n}\nbreak;\ncase \"EXTRACT_DATE\":\ncase \"EXTRACT\":\nret = new BeamSqlOperatorExpression(DateOperators.EXTRACT, subExps);\nbreak;\ncase \"LOCALTIME\":\ncase \"CURRENT_TIME\":\nret = new BeamSqlCurrentTimeExpression(subExps);\nbreak;\ncase \"CURRENT_TIMESTAMP\":\ncase \"LOCALTIMESTAMP\":\nret = new BeamSqlCurrentTimestampExpression(subExps);\nbreak;\ncase \"CURRENT_DATE\":\nret = new BeamSqlCurrentDateExpression();\nbreak;\ncase \"DATETIME_PLUS\":\nret = new BeamSqlDatetimePlusExpression(subExps);\nbreak;\ncase \"ARRAY\":\nret = new BeamSqlArrayExpression(subExps);\nbreak;\ncase \"MAP\":\nret = new BeamSqlMapExpression(subExps);\nbreak;\ncase \"ITEM\":\nswitch (subExps.get(0).getOutputType()) {\ncase MAP:\nret = new BeamSqlMapItemExpression(subExps, node.type.getSqlTypeName());\nbreak;\ncase ARRAY:\nret = new BeamSqlArrayItemExpression(subExps, node.type.getSqlTypeName());\nbreak;\ndefault:\nthrow new UnsupportedOperationException(\n\"Operator: \" + opName + \" is not supported yet\");\n}\nbreak;\ncase \"ELEMENT\":\nret = new BeamSqlSingleElementExpression(subExps, node.type.getSqlTypeName());\nbreak;\ncase \"CARDINALITY\":\nret = new BeamSqlCardinalityExpression(subExps, node.type.getSqlTypeName());\nbreak;\ncase \"DOT\":\nret = new BeamSqlDotExpression(subExps, node.type.getSqlTypeName());\nbreak;\ncase \"DEFAULT\":\nret = new BeamSqlDefaultExpression();\nbreak;\ncase \"CASE\":\nret = new BeamSqlCaseExpression(subExps);\nbreak;\ncase \"CAST\":\nret = new BeamSqlCastExpression(subExps, node.type.getSqlTypeName());\nbreak;\ncase \"IS NULL\":\nret = new BeamSqlIsNullExpression(subExps.get(0));\nbreak;\ncase \"IS NOT NULL\":\nret = new BeamSqlIsNotNullExpression(subExps.get(0));\nbreak;\ncase \"HOP\":\ncase \"TUMBLE\":\ncase \"SESSION\":\nret = new BeamSqlWindowExpression(subExps, node.type.getSqlTypeName());\nbreak;\ncase \"HOP_START\":\ncase \"TUMBLE_START\":\ncase \"SESSION_START\":\nret = new BeamSqlWindowStartExpression();\nbreak;\ncase \"HOP_END\":\ncase \"TUMBLE_END\":\ncase \"SESSION_END\":\nret = new BeamSqlWindowEndExpression();\nbreak;\ndefault:\nif (((RexCall) rexNode).getOperator() instanceof SqlUserDefinedFunction) {\nSqlUserDefinedFunction udf = (SqlUserDefinedFunction) ((RexCall) rexNode).getOperator();\nScalarFunctionImpl fn = (ScalarFunctionImpl) udf.getFunction();\nret =\nnew BeamSqlUdfExpression(\nfn.method, subExps, ((RexCall) rexNode).type.getSqlTypeName());\n} else {\nthrow new UnsupportedOperationException(\n\"Operator: \" + opName + \" is not supported yet\");\n}\n}\n} else {\nthrow new UnsupportedOperationException(\nString.format(\"%s is not supported yet\", rexNode.getClass().toString()));\n}\nreturn ret;\n}", + "context_before": "class BeamSqlFnExecutor implements BeamSqlExpressionExecutor {\nprivate List exprs;\nprivate BeamSqlExpression filterCondition;\nprivate List projections;\npublic BeamSqlFnExecutor(RexProgram program) {\nthis.exprs =\nprogram\n.getExprList()\n.stream()\n.map(BeamSqlFnExecutor::buildExpression)\n.collect(Collectors.toList());\nthis.filterCondition =\nprogram.getCondition() == null\n? BeamSqlPrimitive.of(SqlTypeName.BOOLEAN, true)\n: buildExpression(program.getCondition());\nthis.projections =\nprogram\n.getProjectList()\n.stream()\n.map(BeamSqlFnExecutor::buildExpression)\n.collect(Collectors.toList());\n}\n/**\n* {@link\n* represent each {@link SqlOperator} with a corresponding {@link BeamSqlExpression}.\n*/\nstatic BeamSqlExpression buildExpression(RexNode rexNode) {\nBeamSqlExpression ret = getBeamSqlExpression(rexNode);\nif (!ret.accept()) {\nthrow new IllegalStateException(\nret.getClass().getSimpleName() + \" does not accept the operands.(\" + rexNode + \")\");\n}\nreturn ret;\n}\nprivate static boolean isDateNode(SqlTypeName type, Object value) {\nreturn (type == SqlTypeName.DATE || type == SqlTypeName.TIMESTAMP) && value instanceof Calendar;\n}\n@Override\npublic void prepare() {}\n@Override\npublic @Nullable List execute(\nRow inputRow, BoundedWindow window, BeamSqlExpressionEnvironment env) {\nfinal BeamSqlExpressionEnvironment localEnv = env.copyWithLocalRefExprs(exprs);\nboolean conditionResult = filterCondition.evaluate(inputRow, window, localEnv).getBoolean();\nif (conditionResult) {\nreturn projections\n.stream()\n.map(project -> project.evaluate(inputRow, window, localEnv).getValue())\n.collect(Collectors.toList());\n} else {\nreturn null;\n}\n}\n@Override\npublic void close() {}\n}", + "context_after": "class BeamSqlFnExecutor implements BeamSqlExpressionExecutor {\nprivate List exprs;\nprivate BeamSqlExpression filterCondition;\nprivate List projections;\npublic BeamSqlFnExecutor(RexProgram program) {\nthis.exprs =\nprogram\n.getExprList()\n.stream()\n.map(BeamSqlFnExecutor::buildExpression)\n.collect(Collectors.toList());\nthis.filterCondition =\nprogram.getCondition() == null\n? BeamSqlPrimitive.of(SqlTypeName.BOOLEAN, true)\n: buildExpression(program.getCondition());\nthis.projections =\nprogram\n.getProjectList()\n.stream()\n.map(BeamSqlFnExecutor::buildExpression)\n.collect(Collectors.toList());\n}\n/**\n* {@link\n* represent each {@link SqlOperator} with a corresponding {@link BeamSqlExpression}.\n*/\nstatic BeamSqlExpression buildExpression(RexNode rexNode) {\nBeamSqlExpression ret = getBeamSqlExpression(rexNode);\nif (!ret.accept()) {\nthrow new IllegalStateException(\nret.getClass().getSimpleName() + \" does not accept the operands.(\" + rexNode + \")\");\n}\nreturn ret;\n}\nprivate static boolean isDateNode(SqlTypeName type, Object value) {\nreturn (type == SqlTypeName.DATE || type == SqlTypeName.TIMESTAMP) && value instanceof Calendar;\n}\n@Override\npublic void prepare() {}\n@Override\npublic @Nullable List execute(\nRow inputRow, BoundedWindow window, BeamSqlExpressionEnvironment env) {\nfinal BeamSqlExpressionEnvironment localEnv = env.copyWithLocalRefExprs(exprs);\nboolean conditionResult = filterCondition.evaluate(inputRow, window, localEnv).getBoolean();\nif (conditionResult) {\nreturn projections\n.stream()\n.map(project -> project.evaluate(inputRow, window, localEnv).getValue())\n.collect(Collectors.toList());\n} else {\nreturn null;\n}\n}\n@Override\npublic void close() {}\n}" + }, + { + "comment": "Please pass the cause to log.error.", + "method_body": "public void start(final String socketPath) {\nif (!Epoll.isAvailable()) {\nlog.error(\"Epoll is unavailable, DomainSocket can't start.\");\nreturn;\n}\ntry {\nChannelFuture future = startDomainSocket(socketPath);\nfuture.addListener((ChannelFutureListener) futureParams -> {\nif (futureParams.isSuccess()) {\nlog.info(\"The listening address for DomainSocket is {}\", socketPath);\n} else {\nlog.error(\"DomainSocket failed to start.\");\nfutureParams.cause().printStackTrace();\n}\n});\n} catch (final InterruptedException ignored) {\nclose();\n}\n}", + "target_code": "log.error(\"DomainSocket failed to start.\");", + "method_body_after": "public void start(final String socketPath) {\nif (!Epoll.isAvailable()) {\nlog.error(\"Epoll is unavailable, DomainSocket can't start.\");\nreturn;\n}\nChannelFuture future = startDomainSocket(socketPath);\nfuture.addListener((ChannelFutureListener) futureParams -> {\nif (futureParams.isSuccess()) {\nlog.info(\"The listening address for DomainSocket is {}\", socketPath);\n} else {\nlog.error(\"DomainSocket failed to start:{}\", futureParams.cause().getMessage());\nfutureParams.cause().printStackTrace();\n}\n});\n}", + "context_before": "class ShardingSphereProxy {\nprivate EventLoopGroup bossGroup;\nprivate EventLoopGroup workerGroup;\npublic ShardingSphereProxy() {\ncreateEventLoopGroup();\nRuntime.getRuntime().addShutdownHook(new Thread(this::close));\n}\n/**\n* Start ShardingSphere-Proxy.\n*\n* @param port port\n* @param addresses addresses\n*/\n@SneakyThrows(InterruptedException.class)\npublic void start(final int port, final List addresses) {\ntry {\nList futures = startInternal(port, addresses);\naccept(futures);\n} finally {\nclose();\n}\n}\n/**\n* Start ShardingSphere-Proxy with DomainSocket.\n*\n* @param socketPath socket path\n*/\nprivate List startInternal(final int port, final List addresses) throws InterruptedException {\nServerBootstrap bootstrap = new ServerBootstrap();\ninitServerBootstrap(bootstrap);\nList futures = new ArrayList<>();\nfor (String address : addresses) {\nfutures.add(bootstrap.bind(address, port).sync());\n}\nreturn futures;\n}\nprivate ChannelFuture startDomainSocket(final String socketPath) throws InterruptedException {\nServerBootstrap bootstrap = new ServerBootstrap();\ninitServerBootstrap(bootstrap, new DomainSocketAddress(socketPath));\nreturn bootstrap.bind();\n}\nprivate void accept(final List futures) throws InterruptedException {\nlog.info(\"ShardingSphere-Proxy {} mode started successfully\", ProxyContext.getInstance().getContextManager().getInstanceContext().getModeConfiguration().getType());\nfor (ChannelFuture future : futures) {\nfuture.channel().closeFuture().sync();\n}\n}\nprivate void createEventLoopGroup() {\nbossGroup = Epoll.isAvailable() ? new EpollEventLoopGroup(1) : new NioEventLoopGroup(1);\nworkerGroup = getWorkerGroup();\n}\nprivate EventLoopGroup getWorkerGroup() {\nint workerThreads = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getProps().getValue(ConfigurationPropertyKey.PROXY_FRONTEND_EXECUTOR_SIZE);\nreturn Epoll.isAvailable() ? new EpollEventLoopGroup(workerThreads) : new NioEventLoopGroup(workerThreads);\n}\nprivate void initServerBootstrap(final ServerBootstrap bootstrap) {\nInteger backLog = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getProps().getValue(ConfigurationPropertyKey.PROXY_NETTY_BACKLOG);\nbootstrap.group(bossGroup, workerGroup)\n.channel(Epoll.isAvailable() ? EpollServerSocketChannel.class : NioServerSocketChannel.class)\n.option(ChannelOption.WRITE_BUFFER_WATER_MARK, new WriteBufferWaterMark(8 * 1024 * 1024, 16 * 1024 * 1024))\n.option(ChannelOption.ALLOCATOR, PooledByteBufAllocator.DEFAULT)\n.option(ChannelOption.SO_REUSEADDR, true)\n.option(ChannelOption.SO_BACKLOG, backLog)\n.childOption(ChannelOption.ALLOCATOR, PooledByteBufAllocator.DEFAULT)\n.childOption(ChannelOption.TCP_NODELAY, true)\n.handler(new LoggingHandler(LogLevel.INFO))\n.childHandler(new ServerHandlerInitializer(FrontDatabaseProtocolTypeFactory.getDatabaseType()));\n}\nprivate void initServerBootstrap(final ServerBootstrap bootstrap, final DomainSocketAddress localDomainSocketAddress) {\nbootstrap.group(bossGroup, workerGroup)\n.channel(EpollServerDomainSocketChannel.class)\n.localAddress(localDomainSocketAddress)\n.handler(new LoggingHandler(LogLevel.INFO))\n.childHandler(new ServerHandlerInitializer(FrontDatabaseProtocolTypeFactory.getDatabaseType()));\n}\nprivate void close() {\nif (null != bossGroup) {\nbossGroup.shutdownGracefully();\n}\nif (null != workerGroup) {\nworkerGroup.shutdownGracefully();\n}\nBackendExecutorContext.getInstance().getExecutorEngine().close();\n}\n}", + "context_after": "class ShardingSphereProxy {\nprivate EventLoopGroup bossGroup;\nprivate EventLoopGroup workerGroup;\npublic ShardingSphereProxy() {\ncreateEventLoopGroup();\nRuntime.getRuntime().addShutdownHook(new Thread(this::close));\n}\n/**\n* Start ShardingSphere-Proxy.\n*\n* @param port port\n* @param addresses addresses\n*/\n@SneakyThrows(InterruptedException.class)\npublic void start(final int port, final List addresses) {\ntry {\nList futures = startInternal(port, addresses);\naccept(futures);\n} finally {\nclose();\n}\n}\n/**\n* Start ShardingSphere-Proxy with DomainSocket.\n*\n* @param socketPath socket path\n*/\nprivate List startInternal(final int port, final List addresses) throws InterruptedException {\nServerBootstrap bootstrap = new ServerBootstrap();\ninitServerBootstrap(bootstrap);\nList futures = new ArrayList<>();\nfor (String address : addresses) {\nfutures.add(bootstrap.bind(address, port).sync());\n}\nreturn futures;\n}\nprivate ChannelFuture startDomainSocket(final String socketPath) {\nServerBootstrap bootstrap = new ServerBootstrap();\ninitServerBootstrap(bootstrap, new DomainSocketAddress(socketPath));\nreturn bootstrap.bind();\n}\nprivate void accept(final List futures) throws InterruptedException {\nlog.info(\"ShardingSphere-Proxy {} mode started successfully\", ProxyContext.getInstance().getContextManager().getInstanceContext().getModeConfiguration().getType());\nfor (ChannelFuture future : futures) {\nfuture.channel().closeFuture().sync();\n}\n}\nprivate void createEventLoopGroup() {\nbossGroup = Epoll.isAvailable() ? new EpollEventLoopGroup(1) : new NioEventLoopGroup(1);\nworkerGroup = getWorkerGroup();\n}\nprivate EventLoopGroup getWorkerGroup() {\nint workerThreads = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getProps().getValue(ConfigurationPropertyKey.PROXY_FRONTEND_EXECUTOR_SIZE);\nreturn Epoll.isAvailable() ? new EpollEventLoopGroup(workerThreads) : new NioEventLoopGroup(workerThreads);\n}\nprivate void initServerBootstrap(final ServerBootstrap bootstrap) {\nInteger backLog = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getProps().getValue(ConfigurationPropertyKey.PROXY_NETTY_BACKLOG);\nbootstrap.group(bossGroup, workerGroup)\n.channel(Epoll.isAvailable() ? EpollServerSocketChannel.class : NioServerSocketChannel.class)\n.option(ChannelOption.WRITE_BUFFER_WATER_MARK, new WriteBufferWaterMark(8 * 1024 * 1024, 16 * 1024 * 1024))\n.option(ChannelOption.ALLOCATOR, PooledByteBufAllocator.DEFAULT)\n.option(ChannelOption.SO_REUSEADDR, true)\n.option(ChannelOption.SO_BACKLOG, backLog)\n.childOption(ChannelOption.ALLOCATOR, PooledByteBufAllocator.DEFAULT)\n.childOption(ChannelOption.TCP_NODELAY, true)\n.handler(new LoggingHandler(LogLevel.INFO))\n.childHandler(new ServerHandlerInitializer(FrontDatabaseProtocolTypeFactory.getDatabaseType()));\n}\nprivate void initServerBootstrap(final ServerBootstrap bootstrap, final DomainSocketAddress localDomainSocketAddress) {\nbootstrap.group(bossGroup, workerGroup)\n.channel(EpollServerDomainSocketChannel.class)\n.localAddress(localDomainSocketAddress)\n.handler(new LoggingHandler(LogLevel.INFO))\n.childHandler(new ServerHandlerInitializer(FrontDatabaseProtocolTypeFactory.getDatabaseType()));\n}\nprivate void close() {\nbossGroup.shutdownGracefully();\nworkerGroup.shutdownGracefully();\nBackendExecutorContext.getInstance().getExecutorEngine().close();\n}\n}" + }, + { + "comment": "It could be the first thread started runs before the synchronized block starts causing a deadlock. I have a fix in https://github.com/apache/beam/pull/29041 that uses countdown latches.", + "method_body": "public void testActiveThreadMetric() throws Exception {\nint maxThreads = 5;\nint threadExpirationSec = 60;\nBoundedQueueExecutor executor =\nnew BoundedQueueExecutor(\nmaxThreads,\nthreadExpirationSec,\nTimeUnit.SECONDS,\nmaxThreads,\n10000000,\nnew ThreadFactoryBuilder()\n.setNameFormat(\"DataflowWorkUnits-%d\")\n.setDaemon(true)\n.build());\nComputationState computationState =\nnew ComputationState(\n\"computation\",\ndefaultMapTask(Arrays.asList(makeSourceInstruction(StringUtf8Coder.of()))),\nexecutor,\nImmutableMap.of(),\nnull);\nShardedKey key1Shard1 = ShardedKey.create(ByteString.copyFromUtf8(\"key1\"), 1);\nConsumer sleepProcessWorkFn =\nunused -> {\nsynchronized (this) {\nthis.notify();\n}\nint count = 0;\nwhile (!stop) {\ncount += 1;\n}\n};\nWork m2 = createMockWork(2, sleepProcessWorkFn);\nWork m3 = createMockWork(3, sleepProcessWorkFn);\nWork m4 = createMockWork(4, sleepProcessWorkFn);\nassertEquals(0, executor.activeCount());\nassertTrue(computationState.activateWork(key1Shard1, m2));\nsynchronized (this) {\nexecutor.execute(m2, m2.getWorkItem().getSerializedSize());\nthis.wait();\nthis.wait();\n}\nassertEquals(2, executor.activeCount());\nassertTrue(computationState.activateWork(key1Shard1, m3));\nassertTrue(computationState.activateWork(key1Shard1, m4));\nsynchronized (this) {\nexecutor.execute(m3, m3.getWorkItem().getSerializedSize());\nthis.wait();\n}\nassertEquals(3, executor.activeCount());\nsynchronized (this) {\nexecutor.execute(m4, m4.getWorkItem().getSerializedSize());\nthis.wait();\n}\nassertEquals(4, executor.activeCount());\nstop = true;\nexecutor.shutdown();\n}", + "target_code": "this.wait();", + "method_body_after": "public void testActiveThreadMetric() throws Exception {\nint maxThreads = 5;\nint threadExpirationSec = 60;\nBoundedQueueExecutor executor =\nnew BoundedQueueExecutor(\nmaxThreads,\nthreadExpirationSec,\nTimeUnit.SECONDS,\nmaxThreads,\n10000000,\nnew ThreadFactoryBuilder()\n.setNameFormat(\"DataflowWorkUnits-%d\")\n.setDaemon(true)\n.build());\nComputationState computationState =\nnew ComputationState(\n\"computation\",\ndefaultMapTask(Arrays.asList(makeSourceInstruction(StringUtf8Coder.of()))),\nexecutor,\nImmutableMap.of(),\nnull);\nShardedKey key1Shard1 = ShardedKey.create(ByteString.copyFromUtf8(\"key1\"), 1);\nConsumer sleepProcessWorkFn =\nunused -> {\nsynchronized (this) {\nthis.notify();\n}\nint count = 0;\nwhile (!stop) {\ncount += 1;\n}\n};\nWork m2 = createMockWork(2, sleepProcessWorkFn);\nWork m3 = createMockWork(3, sleepProcessWorkFn);\nWork m4 = createMockWork(4, sleepProcessWorkFn);\nassertEquals(0, executor.activeCount());\nassertTrue(computationState.activateWork(key1Shard1, m2));\nsynchronized (this) {\nexecutor.execute(m2, m2.getWorkItem().getSerializedSize());\nthis.wait();\nthis.wait();\n}\nassertEquals(2, executor.activeCount());\nassertTrue(computationState.activateWork(key1Shard1, m3));\nassertTrue(computationState.activateWork(key1Shard1, m4));\nsynchronized (this) {\nexecutor.execute(m3, m3.getWorkItem().getSerializedSize());\nthis.wait();\n}\nassertEquals(3, executor.activeCount());\nsynchronized (this) {\nexecutor.execute(m4, m4.getWorkItem().getSerializedSize());\nthis.wait();\n}\nassertEquals(4, executor.activeCount());\nstop = true;\nexecutor.shutdown();\n}", + "context_before": "class StreamingDataflowWorkerTest {\nprivate static final Logger LOG = LoggerFactory.getLogger(StreamingDataflowWorkerTest.class);\nprivate static final IntervalWindow DEFAULT_WINDOW =\nnew IntervalWindow(new Instant(1234), Duration.millis(1000));\nprivate static final IntervalWindow WINDOW_AT_ZERO =\nnew IntervalWindow(new Instant(0), new Instant(1000));\nprivate static final IntervalWindow WINDOW_AT_ONE_SECOND =\nnew IntervalWindow(new Instant(1000), new Instant(2000));\nprivate static final Coder DEFAULT_WINDOW_CODER = IntervalWindow.getCoder();\nprivate static final Coder> DEFAULT_WINDOW_COLLECTION_CODER =\nCollectionCoder.of(DEFAULT_WINDOW_CODER);\nprivate static final String DEFAULT_COMPUTATION_ID = \"computation\";\nprivate static final String DEFAULT_MAP_STAGE_NAME = \"computation\";\nprivate static final String DEFAULT_MAP_SYSTEM_NAME = \"computation\";\nprivate static final String DEFAULT_OUTPUT_ORIGINAL_NAME = \"originalName\";\nprivate static final String DEFAULT_OUTPUT_SYSTEM_NAME = \"systemName\";\nprivate static final String DEFAULT_PARDO_SYSTEM_NAME = \"parDo\";\nprivate static final String DEFAULT_PARDO_ORIGINAL_NAME = \"parDoOriginalName\";\nprivate static final String DEFAULT_PARDO_USER_NAME = \"parDoUserName\";\nprivate static final String DEFAULT_PARDO_STATE_FAMILY = \"parDoStateFamily\";\nprivate static final String DEFAULT_SOURCE_SYSTEM_NAME = \"source\";\nprivate static final String DEFAULT_SOURCE_ORIGINAL_NAME = \"sourceOriginalName\";\nprivate static final String DEFAULT_SINK_SYSTEM_NAME = \"sink\";\nprivate static final String DEFAULT_SINK_ORIGINAL_NAME = \"sinkOriginalName\";\nprivate static final String DEFAULT_SOURCE_COMPUTATION_ID = \"upstream\";\nprivate static final String DEFAULT_KEY_STRING = \"key\";\nprivate static final long DEFAULT_SHARDING_KEY = 12345;\nprivate static final ByteString DEFAULT_KEY_BYTES = ByteString.copyFromUtf8(DEFAULT_KEY_STRING);\nprivate static final String DEFAULT_DATA_STRING = \"data\";\nprivate static final String DEFAULT_DESTINATION_STREAM_ID = \"out\";\nprivate static final Function EMPTY_DATA_RESPONDER =\n(GetDataRequest request) -> {\nGetDataResponse.Builder builder = GetDataResponse.newBuilder();\nfor (ComputationGetDataRequest compRequest : request.getRequestsList()) {\nComputationGetDataResponse.Builder compBuilder =\nbuilder.addDataBuilder().setComputationId(compRequest.getComputationId());\nfor (KeyedGetDataRequest keyRequest : compRequest.getRequestsList()) {\nKeyedGetDataResponse.Builder keyBuilder =\ncompBuilder\n.addDataBuilder()\n.setKey(keyRequest.getKey())\n.setShardingKey(keyRequest.getShardingKey());\nkeyBuilder.addAllValues(keyRequest.getValuesToFetchList());\nkeyBuilder.addAllBags(keyRequest.getBagsToFetchList());\nkeyBuilder.addAllWatermarkHolds(keyRequest.getWatermarkHoldsToFetchList());\n}\n}\nreturn builder.build();\n};\nprivate final boolean streamingEngine;\nprivate final Supplier idGenerator =\nnew Supplier() {\nprivate final AtomicLong idGenerator = new AtomicLong(1L);\n@Override\npublic Long get() {\nreturn idGenerator.getAndIncrement();\n}\n};\n@Rule public BlockingFn blockingFn = new BlockingFn();\n@Rule public TestRule restoreMDC = new RestoreDataflowLoggingMDC();\n@Rule public ErrorCollector errorCollector = new ErrorCollector();\nWorkUnitClient mockWorkUnitClient = mock(WorkUnitClient.class);\nHotKeyLogger hotKeyLogger = mock(HotKeyLogger.class);\npublic StreamingDataflowWorkerTest(Boolean streamingEngine) {\nthis.streamingEngine = streamingEngine;\n}\n@Parameterized.Parameters(name = \"{index}: [streamingEngine={0}]\")\npublic static Iterable data() {\nreturn Arrays.asList(new Object[][] {{false}, {true}});\n}\nprivate static CounterUpdate getCounter(Iterable counters, String name) {\nfor (CounterUpdate counter : counters) {\nif (counter.getNameAndKind().getName().equals(name)) {\nreturn counter;\n}\n}\nreturn null;\n}\nstatic Work createMockWork(long workToken) {\nreturn Work.create(\nWindmill.WorkItem.newBuilder().setKey(ByteString.EMPTY).setWorkToken(workToken).build(),\nInstant::now,\nCollections.emptyList(),\nwork -> {});\n}\nstatic Work createMockWork(long workToken, Consumer processWorkFn) {\nreturn Work.create(\nWindmill.WorkItem.newBuilder().setKey(ByteString.EMPTY).setWorkToken(workToken).build(),\nInstant::now,\nCollections.emptyList(),\nprocessWorkFn);\n}\nprivate byte[] intervalWindowBytes(IntervalWindow window) throws Exception {\nreturn CoderUtils.encodeToByteArray(\nDEFAULT_WINDOW_COLLECTION_CODER, Collections.singletonList(window));\n}\nprivate String keyStringForIndex(int index) {\nreturn DEFAULT_KEY_STRING + index;\n}\nprivate String dataStringForIndex(long index) {\nreturn DEFAULT_DATA_STRING + index;\n}\nprivate ParallelInstruction makeWindowingSourceInstruction(Coder coder) {\nCloudObject timerCloudObject =\nCloudObject.forClassName(\n\"com.google.cloud.dataflow.sdk.util.TimerOrElement$TimerOrElementCoder\");\nList component =\nCollections.singletonList(CloudObjects.asCloudObject(coder, /* sdkComponents= */ null));\nStructs.addList(timerCloudObject, PropertyNames.COMPONENT_ENCODINGS, component);\nCloudObject encodedCoder = CloudObject.forClassName(\"kind:windowed_value\");\nStructs.addBoolean(encodedCoder, PropertyNames.IS_WRAPPER, true);\nStructs.addList(\nencodedCoder,\nPropertyNames.COMPONENT_ENCODINGS,\nImmutableList.of(\ntimerCloudObject,\nCloudObjects.asCloudObject(IntervalWindowCoder.of(), /* sdkComponents= */ null)));\nreturn new ParallelInstruction()\n.setSystemName(DEFAULT_SOURCE_SYSTEM_NAME)\n.setOriginalName(DEFAULT_SOURCE_ORIGINAL_NAME)\n.setRead(\nnew ReadInstruction()\n.setSource(\nnew Source()\n.setSpec(CloudObject.forClass(WindowingWindmillReader.class))\n.setCodec(encodedCoder)))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setName(Long.toString(idGenerator.get()))\n.setCodec(encodedCoder)\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)));\n}\nprivate ParallelInstruction makeSourceInstruction(Coder coder) {\nreturn new ParallelInstruction()\n.setSystemName(DEFAULT_SOURCE_SYSTEM_NAME)\n.setOriginalName(DEFAULT_SOURCE_ORIGINAL_NAME)\n.setRead(\nnew ReadInstruction()\n.setSource(\nnew Source()\n.setSpec(CloudObject.forClass(UngroupedWindmillReader.class))\n.setCodec(\nCloudObjects.asCloudObject(\nWindowedValue.getFullCoder(coder, IntervalWindow.getCoder()),\n/* sdkComponents= */ null))))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setName(Long.toString(idGenerator.get()))\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)\n.setCodec(\nCloudObjects.asCloudObject(\nWindowedValue.getFullCoder(coder, IntervalWindow.getCoder()),\n/* sdkComponents= */ null))));\n}\nprivate ParallelInstruction makeDoFnInstruction(\nDoFn doFn,\nint producerIndex,\nCoder outputCoder,\nWindowingStrategy windowingStrategy) {\nCloudObject spec = CloudObject.forClassName(\"DoFn\");\naddString(\nspec,\nPropertyNames.SERIALIZED_FN,\nStringUtils.byteArrayToJsonString(\nSerializableUtils.serializeToByteArray(\nDoFnInfo.forFn(\ndoFn,\nwindowingStrategy /* windowing strategy */,\nnull /* side input views */,\nnull /* input coder */,\nnew TupleTag<>(PropertyNames.OUTPUT) /* main output id */,\nDoFnSchemaInformation.create(),\nCollections.emptyMap()))));\nreturn new ParallelInstruction()\n.setSystemName(DEFAULT_PARDO_SYSTEM_NAME)\n.setName(DEFAULT_PARDO_USER_NAME)\n.setOriginalName(DEFAULT_PARDO_ORIGINAL_NAME)\n.setParDo(\nnew ParDoInstruction()\n.setInput(\nnew InstructionInput()\n.setProducerInstructionIndex(producerIndex)\n.setOutputNum(0))\n.setNumOutputs(1)\n.setUserFn(spec)\n.setMultiOutputInfos(\nCollections.singletonList(new MultiOutputInfo().setTag(PropertyNames.OUTPUT))))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setName(PropertyNames.OUTPUT)\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)\n.setCodec(\nCloudObjects.asCloudObject(\nWindowedValue.getFullCoder(\noutputCoder, windowingStrategy.getWindowFn().windowCoder()),\n/* sdkComponents= */ null))));\n}\nprivate ParallelInstruction makeDoFnInstruction(\nDoFn doFn, int producerIndex, Coder outputCoder) {\nWindowingStrategy windowingStrategy =\nWindowingStrategy.of(FixedWindows.of(Duration.millis(10)));\nreturn makeDoFnInstruction(doFn, producerIndex, outputCoder, windowingStrategy);\n}\nprivate ParallelInstruction makeSinkInstruction(\nString streamId,\nCoder coder,\nint producerIndex,\nCoder windowCoder) {\nCloudObject spec = CloudObject.forClass(WindmillSink.class);\naddString(spec, \"stream_id\", streamId);\nreturn new ParallelInstruction()\n.setSystemName(DEFAULT_SINK_SYSTEM_NAME)\n.setOriginalName(DEFAULT_SINK_ORIGINAL_NAME)\n.setWrite(\nnew WriteInstruction()\n.setInput(\nnew InstructionInput()\n.setProducerInstructionIndex(producerIndex)\n.setOutputNum(0))\n.setSink(\nnew Sink()\n.setSpec(spec)\n.setCodec(\nCloudObjects.asCloudObject(\nWindowedValue.getFullCoder(coder, windowCoder),\n/* sdkComponents= */ null))));\n}\nprivate ParallelInstruction makeSinkInstruction(\nCoder coder, int producerIndex, Coder windowCoder) {\nreturn makeSinkInstruction(DEFAULT_DESTINATION_STREAM_ID, coder, producerIndex, windowCoder);\n}\nprivate ParallelInstruction makeSinkInstruction(Coder coder, int producerIndex) {\nreturn makeSinkInstruction(coder, producerIndex, IntervalWindow.getCoder());\n}\n/**\n* Returns a {@link MapTask} with the provided {@code instructions} and default values everywhere\n* else.\n*/\nprivate MapTask defaultMapTask(List instructions) {\nMapTask mapTask =\nnew MapTask()\n.setStageName(DEFAULT_MAP_STAGE_NAME)\n.setSystemName(DEFAULT_MAP_SYSTEM_NAME)\n.setInstructions(instructions);\nmapTask.setFactory(Transport.getJsonFactory());\nreturn mapTask;\n}\nprivate Windmill.GetWorkResponse buildInput(String input, byte[] metadata) throws Exception {\nWindmill.GetWorkResponse.Builder builder = Windmill.GetWorkResponse.newBuilder();\nTextFormat.merge(input, builder);\nif (metadata != null) {\nWindmill.InputMessageBundle.Builder messageBundleBuilder =\nbuilder.getWorkBuilder(0).getWorkBuilder(0).getMessageBundlesBuilder(0);\nfor (Windmill.Message.Builder messageBuilder :\nmessageBundleBuilder.getMessagesBuilderList()) {\nmessageBuilder.setMetadata(addPaneTag(PaneInfo.NO_FIRING, metadata));\n}\n}\nreturn builder.build();\n}\nprivate Windmill.GetWorkResponse buildSessionInput(\nint workToken,\nlong inputWatermark,\nlong outputWatermark,\nList inputs,\nList timers)\nthrows Exception {\nWindmill.WorkItem.Builder builder = Windmill.WorkItem.newBuilder();\nbuilder.setKey(DEFAULT_KEY_BYTES);\nbuilder.setShardingKey(DEFAULT_SHARDING_KEY);\nbuilder.setCacheToken(1);\nbuilder.setWorkToken(workToken);\nbuilder.setOutputDataWatermark(outputWatermark * 1000);\nif (!inputs.isEmpty()) {\nInputMessageBundle.Builder messageBuilder =\nWindmill.InputMessageBundle.newBuilder()\n.setSourceComputationId(DEFAULT_SOURCE_COMPUTATION_ID);\nfor (Long input : inputs) {\nmessageBuilder.addMessages(\nWindmill.Message.newBuilder()\n.setTimestamp(input)\n.setData(ByteString.copyFromUtf8(dataStringForIndex(input)))\n.setMetadata(\naddPaneTag(\nPaneInfo.NO_FIRING,\nintervalWindowBytes(\nnew IntervalWindow(\nnew Instant(input),\nnew Instant(input).plus(Duration.millis(10)))))));\n}\nbuilder.addMessageBundles(messageBuilder);\n}\nif (!timers.isEmpty()) {\nbuilder.setTimers(Windmill.TimerBundle.newBuilder().addAllTimers(timers));\n}\nreturn Windmill.GetWorkResponse.newBuilder()\n.addWork(\nWindmill.ComputationWorkItems.newBuilder()\n.setComputationId(DEFAULT_COMPUTATION_ID)\n.setInputDataWatermark(inputWatermark * 1000)\n.addWork(builder))\n.build();\n}\nprivate Windmill.GetWorkResponse makeInput(int index, long timestamp) throws Exception {\nreturn makeInput(index, timestamp, keyStringForIndex(index), DEFAULT_SHARDING_KEY);\n}\nprivate Windmill.GetWorkResponse makeInput(\nint index, long timestamp, String key, long shardingKey) throws Exception {\nreturn buildInput(\n\"work {\"\n+ \" computation_id: \\\"\"\n+ DEFAULT_COMPUTATION_ID\n+ \"\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"\"\n+ key\n+ \"\\\"\"\n+ \" sharding_key: \"\n+ shardingKey\n+ \" work_token: \"\n+ index\n+ \" cache_token: 3\"\n+ \" hot_key_info {\"\n+ \" hot_key_age_usec: 1000000\"\n+ \" }\"\n+ \" message_bundles {\"\n+ \" source_computation_id: \\\"\"\n+ DEFAULT_SOURCE_COMPUTATION_ID\n+ \"\\\"\"\n+ \" messages {\"\n+ \" timestamp: \"\n+ timestamp\n+ \" data: \\\"data\"\n+ index\n+ \"\\\"\"\n+ \" }\"\n+ \" }\"\n+ \" }\"\n+ \"}\",\nCoderUtils.encodeToByteArray(\nCollectionCoder.of(IntervalWindow.getCoder()),\nCollections.singletonList(DEFAULT_WINDOW)));\n}\n/**\n* Returns a {@link org.apache.beam.runners.dataflow.windmill.Windmill.WorkItemCommitRequest}\n* builder parsed from the provided text format proto.\n*/\nprivate WorkItemCommitRequest.Builder parseCommitRequest(String output) throws Exception {\nWorkItemCommitRequest.Builder builder = Windmill.WorkItemCommitRequest.newBuilder();\nTextFormat.merge(output, builder);\nreturn builder;\n}\n/** Sets the metadata of all the contained messages in this WorkItemCommitRequest. */\nprivate WorkItemCommitRequest.Builder setMessagesMetadata(\nPaneInfo pane, byte[] windowBytes, WorkItemCommitRequest.Builder builder) throws Exception {\nif (windowBytes != null) {\nKeyedMessageBundle.Builder bundles = builder.getOutputMessagesBuilder(0).getBundlesBuilder(0);\nfor (int i = 0; i < bundles.getMessagesCount(); i++) {\nbundles.getMessagesBuilder(i).setMetadata(addPaneTag(pane, windowBytes));\n}\n}\nreturn builder;\n}\n/** Reset value update timestamps to zero. */\nprivate WorkItemCommitRequest.Builder setValuesTimestamps(WorkItemCommitRequest.Builder builder) {\nfor (int i = 0; i < builder.getValueUpdatesCount(); i++) {\nbuilder.getValueUpdatesBuilder(i).getValueBuilder().setTimestamp(0);\n}\nreturn builder;\n}\nprivate WorkItemCommitRequest.Builder makeExpectedOutput(int index, long timestamp)\nthrows Exception {\nreturn makeExpectedOutput(\nindex, timestamp, keyStringForIndex(index), DEFAULT_SHARDING_KEY, keyStringForIndex(index));\n}\nprivate WorkItemCommitRequest.Builder makeExpectedOutput(\nint index, long timestamp, String key, long shardingKey, String outKey) throws Exception {\nStringBuilder expectedCommitRequestBuilder =\ninitializeExpectedCommitRequest(key, shardingKey, index);\nappendCommitOutputMessages(expectedCommitRequestBuilder, index, timestamp, outKey);\nreturn setMessagesMetadata(\nPaneInfo.NO_FIRING,\nintervalWindowBytes(DEFAULT_WINDOW),\nparseCommitRequest(expectedCommitRequestBuilder.toString()));\n}\nprivate WorkItemCommitRequest removeDynamicFields(WorkItemCommitRequest request) {\nreturn request.toBuilder().clearPerWorkItemLatencyAttributions().build();\n}\nprivate WorkItemCommitRequest.Builder makeExpectedTruncationRequestOutput(\nint index, String key, long shardingKey, long estimatedSize) throws Exception {\nStringBuilder expectedCommitRequestBuilder =\ninitializeExpectedCommitRequest(key, shardingKey, index, false);\nappendCommitTruncationFields(expectedCommitRequestBuilder, estimatedSize);\nreturn parseCommitRequest(expectedCommitRequestBuilder.toString());\n}\nprivate StringBuilder initializeExpectedCommitRequest(\nString key, long shardingKey, int index, Boolean hasSourceBytesProcessed) {\nStringBuilder requestBuilder = new StringBuilder();\nrequestBuilder.append(\"key: \\\"\");\nrequestBuilder.append(key);\nrequestBuilder.append(\"\\\" \");\nrequestBuilder.append(\"sharding_key: \");\nrequestBuilder.append(shardingKey);\nrequestBuilder.append(\" \");\nrequestBuilder.append(\"work_token: \");\nrequestBuilder.append(index);\nrequestBuilder.append(\" \");\nrequestBuilder.append(\"cache_token: 3 \");\nif (hasSourceBytesProcessed) requestBuilder.append(\"source_bytes_processed: 0 \");\nreturn requestBuilder;\n}\nprivate StringBuilder initializeExpectedCommitRequest(String key, long shardingKey, int index) {\nreturn initializeExpectedCommitRequest(key, shardingKey, index, true);\n}\nprivate StringBuilder appendCommitOutputMessages(\nStringBuilder requestBuilder, int index, long timestamp, String outKey) {\nrequestBuilder.append(\"output_messages {\");\nrequestBuilder.append(\" destination_stream_id: \\\"\");\nrequestBuilder.append(DEFAULT_DESTINATION_STREAM_ID);\nrequestBuilder.append(\"\\\"\");\nrequestBuilder.append(\" bundles {\");\nrequestBuilder.append(\" key: \\\"\");\nrequestBuilder.append(outKey);\nrequestBuilder.append(\"\\\"\");\nrequestBuilder.append(\" messages {\");\nrequestBuilder.append(\" timestamp: \");\nrequestBuilder.append(timestamp);\nrequestBuilder.append(\" data: \\\"\");\nrequestBuilder.append(dataStringForIndex(index));\nrequestBuilder.append(\"\\\"\");\nrequestBuilder.append(\" metadata: \\\"\\\"\");\nrequestBuilder.append(\" }\");\nrequestBuilder.append(\" messages_ids: \\\"\\\"\");\nrequestBuilder.append(\" }\");\nrequestBuilder.append(\"}\");\nreturn requestBuilder;\n}\nprivate StringBuilder appendCommitTruncationFields(\nStringBuilder requestBuilder, long estimatedSize) {\nrequestBuilder.append(\"exceeds_max_work_item_commit_bytes: true \");\nrequestBuilder.append(\"estimated_work_item_commit_bytes: \");\nrequestBuilder.append(estimatedSize);\nreturn requestBuilder;\n}\nprivate StreamingComputationConfig makeDefaultStreamingComputationConfig(\nList instructions) {\nStreamingComputationConfig config = new StreamingComputationConfig();\nconfig.setComputationId(DEFAULT_COMPUTATION_ID);\nconfig.setSystemName(DEFAULT_MAP_SYSTEM_NAME);\nconfig.setStageName(DEFAULT_MAP_STAGE_NAME);\nconfig.setInstructions(instructions);\nreturn config;\n}\nprivate ByteString addPaneTag(PaneInfo pane, byte[] windowBytes) throws IOException {\nByteStringOutputStream output = new ByteStringOutputStream();\nPaneInfo.PaneInfoCoder.INSTANCE.encode(pane, output, Context.OUTER);\noutput.write(windowBytes);\nreturn output.toByteString();\n}\nprivate StreamingDataflowWorkerOptions createTestingPipelineOptions(\nFakeWindmillServer server, String... args) {\nList argsList = Lists.newArrayList(args);\nif (streamingEngine) {\nargsList.add(\"--experiments=enable_streaming_engine\");\n}\nStreamingDataflowWorkerOptions options =\nPipelineOptionsFactory.fromArgs(argsList.toArray(new String[0]))\n.as(StreamingDataflowWorkerOptions.class);\noptions.setAppName(\"StreamingWorkerHarnessTest\");\noptions.setJobId(\"test_job_id\");\noptions.setStreaming(true);\noptions.setWindmillServerStub(server);\noptions.setActiveWorkRefreshPeriodMillis(0);\nreturn options;\n}\nprivate StreamingDataflowWorker makeWorker(\nList instructions,\nStreamingDataflowWorkerOptions options,\nboolean publishCounters,\nSupplier clock,\nFunction executorSupplier)\nthrows Exception {\nStreamingDataflowWorker worker =\nnew StreamingDataflowWorker(\nCollections.singletonList(defaultMapTask(instructions)),\nIntrinsicMapTaskExecutorFactory.defaultFactory(),\nmockWorkUnitClient,\noptions,\npublishCounters,\nhotKeyLogger,\nclock,\nexecutorSupplier);\nworker.addStateNameMappings(\nImmutableMap.of(DEFAULT_PARDO_USER_NAME, DEFAULT_PARDO_STATE_FAMILY));\nreturn worker;\n}\nprivate StreamingDataflowWorker makeWorker(\nList instructions,\nStreamingDataflowWorkerOptions options,\nboolean publishCounters)\nthrows Exception {\nreturn makeWorker(\ninstructions,\noptions,\npublishCounters,\nInstant::now,\n(threadName) -> Executors.newSingleThreadScheduledExecutor());\n}\n@Test\npublic void testBasicHarness() throws Exception {\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nfinal int numIters = 2000;\nfor (int i = 0; i < numIters; ++i) {\nserver.whenGetWorkCalled().thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i)));\n}\nMap result = server.waitForAndGetCommits(numIters);\nworker.stop();\nfor (int i = 0; i < numIters; ++i) {\nassertTrue(result.containsKey((long) i));\nassertEquals(\nmakeExpectedOutput(i, TimeUnit.MILLISECONDS.toMicros(i)).build(),\nremoveDynamicFields(result.get((long) i)));\n}\nverify(hotKeyLogger, atLeastOnce()).logHotKeyDetection(nullable(String.class), any());\n}\n@Test\npublic void testBasic() throws Exception {\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nserver.setIsReady(false);\nStreamingConfigTask streamingConfig = new StreamingConfigTask();\nstreamingConfig.setStreamingComputationConfigs(\nImmutableList.of(makeDefaultStreamingComputationConfig(instructions)));\nstreamingConfig.setWindmillServiceEndpoint(\"foo\");\nWorkItem workItem = new WorkItem();\nworkItem.setStreamingConfigTask(streamingConfig);\nwhen(mockWorkUnitClient.getGlobalStreamingConfigWorkItem()).thenReturn(Optional.of(workItem));\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nfinal int numIters = 2000;\nfor (int i = 0; i < numIters; ++i) {\nserver.whenGetWorkCalled().thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i)));\n}\nMap result = server.waitForAndGetCommits(numIters);\nworker.stop();\nfor (int i = 0; i < numIters; ++i) {\nassertTrue(result.containsKey((long) i));\nassertEquals(\nmakeExpectedOutput(i, TimeUnit.MILLISECONDS.toMicros(i)).build(),\nremoveDynamicFields(result.get((long) i)));\n}\nverify(hotKeyLogger, atLeastOnce()).logHotKeyDetection(nullable(String.class), any());\n}\n@Test\npublic void testHotKeyLogging() throws Exception {\nList instructions =\nArrays.asList(\nmakeSourceInstruction(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of())),\nmakeSinkInstruction(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of()), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nserver.setIsReady(false);\nStreamingConfigTask streamingConfig = new StreamingConfigTask();\nstreamingConfig.setStreamingComputationConfigs(\nImmutableList.of(makeDefaultStreamingComputationConfig(instructions)));\nstreamingConfig.setWindmillServiceEndpoint(\"foo\");\nWorkItem workItem = new WorkItem();\nworkItem.setStreamingConfigTask(streamingConfig);\nwhen(mockWorkUnitClient.getGlobalStreamingConfigWorkItem()).thenReturn(Optional.of(workItem));\nStreamingDataflowWorkerOptions options =\ncreateTestingPipelineOptions(server, \"--hotKeyLoggingEnabled=true\");\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nfinal int numIters = 2000;\nfor (int i = 0; i < numIters; ++i) {\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i), \"key\", DEFAULT_SHARDING_KEY));\n}\nserver.waitForAndGetCommits(numIters);\nworker.stop();\nverify(hotKeyLogger, atLeastOnce())\n.logHotKeyDetection(nullable(String.class), any(), eq(\"key\"));\n}\n@Test\npublic void testHotKeyLoggingNotEnabled() throws Exception {\nList instructions =\nArrays.asList(\nmakeSourceInstruction(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of())),\nmakeSinkInstruction(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of()), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nserver.setIsReady(false);\nStreamingConfigTask streamingConfig = new StreamingConfigTask();\nstreamingConfig.setStreamingComputationConfigs(\nImmutableList.of(makeDefaultStreamingComputationConfig(instructions)));\nstreamingConfig.setWindmillServiceEndpoint(\"foo\");\nWorkItem workItem = new WorkItem();\nworkItem.setStreamingConfigTask(streamingConfig);\nwhen(mockWorkUnitClient.getGlobalStreamingConfigWorkItem()).thenReturn(Optional.of(workItem));\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nfinal int numIters = 2000;\nfor (int i = 0; i < numIters; ++i) {\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i), \"key\", DEFAULT_SHARDING_KEY));\n}\nserver.waitForAndGetCommits(numIters);\nworker.stop();\nverify(hotKeyLogger, atLeastOnce()).logHotKeyDetection(nullable(String.class), any());\n}\n@Test\npublic void testIgnoreRetriedKeys() throws Exception {\nfinal int numIters = 4;\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(blockingFn, 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nfor (int i = 0; i < numIters; ++i) {\nserver\n.whenGetWorkCalled()\n.thenReturn(\nmakeInput(\ni, TimeUnit.MILLISECONDS.toMicros(i), keyStringForIndex(i), DEFAULT_SHARDING_KEY))\n.thenReturn(\nmakeInput(\ni + 1000,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY + 1));\n}\nBlockingFn.counter.acquire(numIters * 2);\nfor (int i = 0; i < numIters; ++i) {\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i)))\n.thenReturn(\nmakeInput(\ni + 1000,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY + 1));\n}\nserver.waitForEmptyWorkQueue();\nfor (int i = 0; i < numIters; ++i) {\nserver\n.whenGetWorkCalled()\n.thenReturn(\nmakeInput(\ni + numIters,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY));\n}\nserver.waitForEmptyWorkQueue();\nBlockingFn.blocker.countDown();\nMap result = server.waitForAndGetCommits(numIters * 3);\nfor (int i = 0; i < numIters; ++i) {\nassertTrue(result.containsKey((long) i));\nassertEquals(\nmakeExpectedOutput(i, TimeUnit.MILLISECONDS.toMicros(i)).build(),\nremoveDynamicFields(result.get((long) i)));\nassertTrue(result.containsKey((long) i + 1000));\nassertEquals(\nmakeExpectedOutput(\ni + 1000,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY + 1,\nkeyStringForIndex(i))\n.build(),\nremoveDynamicFields(result.get((long) i + 1000)));\nassertTrue(result.containsKey((long) i + numIters));\nassertEquals(\nmakeExpectedOutput(\ni + numIters,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY,\nkeyStringForIndex(i))\n.build(),\nremoveDynamicFields(result.get((long) i + numIters)));\n}\nfor (int i = 0; i < numIters; ++i) {\nserver\n.whenGetWorkCalled()\n.thenReturn(\nmakeInput(\ni + numIters * 2,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY));\n}\nresult = server.waitForAndGetCommits(numIters);\nworker.stop();\nfor (int i = 0; i < numIters; ++i) {\nassertTrue(result.containsKey((long) i + numIters * 2));\nassertEquals(\nmakeExpectedOutput(\ni + numIters * 2,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY,\nkeyStringForIndex(i))\n.build(),\nremoveDynamicFields(result.get((long) i + numIters * 2)));\n}\n}\n@Test(timeout = 10000)\npublic void testNumberOfWorkerHarnessThreadsIsHonored() throws Exception {\nint expectedNumberOfThreads = 5;\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(blockingFn, 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setNumberOfWorkerHarnessThreads(expectedNumberOfThreads);\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nfor (int i = 0; i < expectedNumberOfThreads * 2; ++i) {\nserver.whenGetWorkCalled().thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i)));\n}\nBlockingFn.counter.acquire(expectedNumberOfThreads);\nif (BlockingFn.counter.tryAcquire(500, TimeUnit.MILLISECONDS)) {\nfail(\n\"Expected number of threads \"\n+ expectedNumberOfThreads\n+ \" does not match actual \"\n+ \"number of work items processed concurrently \"\n+ BlockingFn.callCounter.get()\n+ \".\");\n}\nBlockingFn.blocker.countDown();\n}\n@Test\npublic void testKeyTokenInvalidException() throws Exception {\nif (streamingEngine) {\nreturn;\n}\nKvCoder kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());\nList instructions =\nArrays.asList(\nmakeSourceInstruction(kvCoder),\nmakeDoFnInstruction(new KeyTokenInvalidFn(), 0, kvCoder),\nmakeSinkInstruction(kvCoder, 1));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(0, 0, DEFAULT_KEY_STRING, DEFAULT_SHARDING_KEY));\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), true /* publishCounters */);\nworker.start();\nserver.waitForEmptyWorkQueue();\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(1, 0, DEFAULT_KEY_STRING, DEFAULT_SHARDING_KEY));\nMap result = server.waitForAndGetCommits(1);\nassertEquals(\nmakeExpectedOutput(1, 0, DEFAULT_KEY_STRING, DEFAULT_SHARDING_KEY, DEFAULT_KEY_STRING)\n.build(),\nremoveDynamicFields(result.get(1L)));\nassertEquals(1, result.size());\n}\n@Test\npublic void testKeyCommitTooLargeException() throws Exception {\nKvCoder kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());\nList instructions =\nArrays.asList(\nmakeSourceInstruction(kvCoder),\nmakeDoFnInstruction(new LargeCommitFn(), 0, kvCoder),\nmakeSinkInstruction(kvCoder, 1));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nserver.setExpectedExceptionCount(1);\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), true /* publishCounters */);\nworker.setMaxWorkItemCommitBytes(1000);\nworker.start();\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(1, 0, \"large_key\", DEFAULT_SHARDING_KEY))\n.thenReturn(makeInput(2, 0, \"key\", DEFAULT_SHARDING_KEY));\nserver.waitForEmptyWorkQueue();\nMap result = server.waitForAndGetCommits(1);\nassertEquals(2, result.size());\nassertEquals(\nmakeExpectedOutput(2, 0, \"key\", DEFAULT_SHARDING_KEY, \"key\").build(),\nremoveDynamicFields(result.get(2L)));\nassertTrue(result.containsKey(1L));\nWorkItemCommitRequest largeCommit = result.get(1L);\nassertEquals(\"large_key\", largeCommit.getKey().toStringUtf8());\nassertEquals(\nmakeExpectedTruncationRequestOutput(\n1, \"large_key\", DEFAULT_SHARDING_KEY, largeCommit.getEstimatedWorkItemCommitBytes())\n.build(),\nlargeCommit);\nassertTrue(largeCommit.getEstimatedWorkItemCommitBytes() > 1000);\nint maxTries = 10;\nwhile (--maxTries > 0) {\nworker.reportPeriodicWorkerUpdates();\nUninterruptibles.sleepUninterruptibly(1000, TimeUnit.MILLISECONDS);\n}\nArgumentCaptor workItemStatusCaptor =\nArgumentCaptor.forClass(WorkItemStatus.class);\nverify(mockWorkUnitClient, atLeast(2)).reportWorkItemStatus(workItemStatusCaptor.capture());\nList capturedStatuses = workItemStatusCaptor.getAllValues();\nboolean foundErrors = false;\nfor (WorkItemStatus status : capturedStatuses) {\nif (!status.getErrors().isEmpty()) {\nassertFalse(foundErrors);\nfoundErrors = true;\nString errorMessage = status.getErrors().get(0).getMessage();\nassertThat(errorMessage, Matchers.containsString(\"KeyCommitTooLargeException\"));\n}\n}\nassertTrue(foundErrors);\n}\n@Test\npublic void testKeyChange() throws Exception {\nKvCoder kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());\nList instructions =\nArrays.asList(\nmakeSourceInstruction(kvCoder),\nmakeDoFnInstruction(new ChangeKeysFn(), 0, kvCoder),\nmakeSinkInstruction(kvCoder, 1));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nfor (int i = 0; i < 2; i++) {\nserver\n.whenGetWorkCalled()\n.thenReturn(\nmakeInput(\ni, TimeUnit.MILLISECONDS.toMicros(i), keyStringForIndex(i), DEFAULT_SHARDING_KEY))\n.thenReturn(\nmakeInput(\ni + 1000,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY + i));\n}\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), true /* publishCounters */);\nworker.start();\nMap result = server.waitForAndGetCommits(4);\nfor (int i = 0; i < 2; i++) {\nassertTrue(result.containsKey((long) i));\nassertEquals(\nmakeExpectedOutput(\ni,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY,\nkeyStringForIndex(i) + \"_data\" + i)\n.build(),\nremoveDynamicFields(result.get((long) i)));\nassertTrue(result.containsKey((long) i + 1000));\nassertEquals(\nmakeExpectedOutput(\ni + 1000,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY + i,\nkeyStringForIndex(i) + \"_data\" + (i + 1000))\n.build(),\nremoveDynamicFields(result.get((long) i + 1000)));\n}\n}\n@Test(timeout = 30000)\npublic void testExceptions() throws Exception {\nif (streamingEngine) {\nreturn;\n}\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(new TestExceptionFn(), 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 1));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nserver.setExpectedExceptionCount(1);\nString keyString = keyStringForIndex(0);\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"\"\n+ DEFAULT_COMPUTATION_ID\n+ \"\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"\"\n+ keyString\n+ \"\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 0\"\n+ \" cache_token: 1\"\n+ \" message_bundles {\"\n+ \" source_computation_id: \\\"\"\n+ DEFAULT_SOURCE_COMPUTATION_ID\n+ \"\\\"\"\n+ \" messages {\"\n+ \" timestamp: 0\"\n+ \" data: \\\"0\\\"\"\n+ \" }\"\n+ \" }\"\n+ \" }\"\n+ \"}\",\nCoderUtils.encodeToByteArray(\nCollectionCoder.of(IntervalWindow.getCoder()),\nCollections.singletonList(DEFAULT_WINDOW))));\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), true /* publishCounters */);\nworker.start();\nserver.waitForEmptyWorkQueue();\nint maxTries = 10;\nwhile (maxTries-- > 0 && !worker.workExecutorIsEmpty()) {\nUninterruptibles.sleepUninterruptibly(1000, TimeUnit.MILLISECONDS);\n}\nassertTrue(worker.workExecutorIsEmpty());\nmaxTries = 10;\nwhile (maxTries-- > 0) {\nworker.reportPeriodicWorkerUpdates();\nUninterruptibles.sleepUninterruptibly(1000, TimeUnit.MILLISECONDS);\n}\nArgumentCaptor workItemStatusCaptor =\nArgumentCaptor.forClass(WorkItemStatus.class);\nverify(mockWorkUnitClient, atLeast(1)).reportWorkItemStatus(workItemStatusCaptor.capture());\nList capturedStatuses = workItemStatusCaptor.getAllValues();\nboolean foundErrors = false;\nint lastUpdateWithoutErrors = 0;\nint lastUpdateWithErrors = 0;\nfor (WorkItemStatus status : capturedStatuses) {\nif (status.getErrors().isEmpty()) {\nlastUpdateWithoutErrors++;\ncontinue;\n}\nlastUpdateWithErrors++;\nassertFalse(foundErrors);\nfoundErrors = true;\nString stacktrace = status.getErrors().get(0).getMessage();\nassertThat(stacktrace, Matchers.containsString(\"Exception!\"));\nassertThat(stacktrace, Matchers.containsString(\"Another exception!\"));\nassertThat(stacktrace, Matchers.containsString(\"processElement\"));\n}\nassertTrue(foundErrors);\nassertTrue(lastUpdateWithoutErrors > lastUpdateWithErrors);\nassertThat(server.getStatsReceived().size(), Matchers.greaterThanOrEqualTo(1));\nWindmill.ReportStatsRequest stats = server.getStatsReceived().get(0);\nassertEquals(DEFAULT_COMPUTATION_ID, stats.getComputationId());\nassertEquals(keyString, stats.getKey().toStringUtf8());\nassertEquals(0, stats.getWorkToken());\nassertEquals(1, stats.getShardingKey());\n}\n@Test\npublic void testAssignWindows() throws Exception {\nDuration gapDuration = Duration.standardSeconds(1);\nCloudObject spec = CloudObject.forClassName(\"AssignWindowsDoFn\");\nSdkComponents sdkComponents = SdkComponents.create();\nsdkComponents.registerEnvironment(Environments.JAVA_SDK_HARNESS_ENVIRONMENT);\naddString(\nspec,\nPropertyNames.SERIALIZED_FN,\nStringUtils.byteArrayToJsonString(\nWindowingStrategyTranslation.toMessageProto(\nWindowingStrategy.of(FixedWindows.of(gapDuration)), sdkComponents)\n.toByteArray()));\nParallelInstruction addWindowsInstruction =\nnew ParallelInstruction()\n.setSystemName(\"AssignWindows\")\n.setName(\"AssignWindows\")\n.setOriginalName(\"AssignWindowsOriginal\")\n.setParDo(\nnew ParDoInstruction()\n.setInput(new InstructionInput().setProducerInstructionIndex(0).setOutputNum(0))\n.setNumOutputs(1)\n.setUserFn(spec))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)\n.setName(\"output\")\n.setCodec(\nCloudObjects.asCloudObject(\nWindowedValue.getFullCoder(\nStringUtf8Coder.of(), IntervalWindow.getCoder()),\n/* sdkComponents= */ null))));\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\naddWindowsInstruction,\nmakeSinkInstruction(StringUtf8Coder.of(), 1));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nint timestamp1 = 0;\nint timestamp2 = 1000000;\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(timestamp1, timestamp1))\n.thenReturn(makeInput(timestamp2, timestamp2));\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), false /* publishCounters */);\nworker.start();\nMap result = server.waitForAndGetCommits(2);\nassertThat(\nremoveDynamicFields(result.get((long) timestamp1)),\nequalTo(\nsetMessagesMetadata(\nPaneInfo.NO_FIRING,\nintervalWindowBytes(WINDOW_AT_ZERO),\nmakeExpectedOutput(timestamp1, timestamp1))\n.build()));\nassertThat(\nremoveDynamicFields(result.get((long) timestamp2)),\nequalTo(\nsetMessagesMetadata(\nPaneInfo.NO_FIRING,\nintervalWindowBytes(WINDOW_AT_ONE_SECOND),\nmakeExpectedOutput(timestamp2, timestamp2))\n.build()));\n}\nprivate void verifyTimers(WorkItemCommitRequest commit, Timer... timers) {\nassertThat(commit.getOutputTimersList(), Matchers.containsInAnyOrder(timers));\n}\nprivate void verifyHolds(WorkItemCommitRequest commit, WatermarkHold... watermarkHolds) {\nassertThat(commit.getWatermarkHoldsList(), Matchers.containsInAnyOrder(watermarkHolds));\n}\nprivate Timer buildWatermarkTimer(String tagPrefix, long timestampMillis) {\nreturn buildWatermarkTimer(tagPrefix, timestampMillis, false);\n}\nprivate Timer buildWatermarkTimer(String tagPrefix, long timestampMillis, boolean delete) {\nTimer.Builder builder =\nTimer.newBuilder()\n.setTag(ByteString.copyFromUtf8(tagPrefix + \":\" + timestampMillis))\n.setType(Type.WATERMARK)\n.setStateFamily(\"MergeWindows\");\nif (!delete) {\nbuilder.setTimestamp(timestampMillis * 1000);\nbuilder.setMetadataTimestamp(timestampMillis * 1000);\n}\nreturn builder.build();\n}\nprivate WatermarkHold buildHold(String tag, long timestamp, boolean reset) {\nWatermarkHold.Builder builder =\nWatermarkHold.newBuilder()\n.setTag(ByteString.copyFromUtf8(tag))\n.setStateFamily(\"MergeWindows\");\nif (reset) {\nbuilder.setReset(true);\n}\nif (timestamp >= 0) {\nbuilder.addTimestamps(timestamp * 1000);\n}\nreturn builder.build();\n}\n@Test\npublic void testMergeWindows() throws Exception {\nCoder> kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());\nCoder>> windowedKvCoder =\nFullWindowedValueCoder.of(kvCoder, IntervalWindow.getCoder());\nKvCoder> groupedCoder =\nKvCoder.of(StringUtf8Coder.of(), ListCoder.of(StringUtf8Coder.of()));\nCoder>>> windowedGroupedCoder =\nFullWindowedValueCoder.of(groupedCoder, IntervalWindow.getCoder());\nCloudObject spec = CloudObject.forClassName(\"MergeWindowsDoFn\");\nSdkComponents sdkComponents = SdkComponents.create();\nsdkComponents.registerEnvironment(Environments.JAVA_SDK_HARNESS_ENVIRONMENT);\naddString(\nspec,\nPropertyNames.SERIALIZED_FN,\nStringUtils.byteArrayToJsonString(\nWindowingStrategyTranslation.toMessageProto(\nWindowingStrategy.of(FixedWindows.of(Duration.standardSeconds(1)))\n.withTimestampCombiner(TimestampCombiner.EARLIEST),\nsdkComponents)\n.toByteArray()));\naddObject(\nspec,\nWorkerPropertyNames.INPUT_CODER,\nCloudObjects.asCloudObject(windowedKvCoder, /* sdkComponents= */ null));\nParallelInstruction mergeWindowsInstruction =\nnew ParallelInstruction()\n.setSystemName(\"MergeWindows-System\")\n.setName(\"MergeWindowsStep\")\n.setOriginalName(\"MergeWindowsOriginal\")\n.setParDo(\nnew ParDoInstruction()\n.setInput(new InstructionInput().setProducerInstructionIndex(0).setOutputNum(0))\n.setNumOutputs(1)\n.setUserFn(spec))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)\n.setName(\"output\")\n.setCodec(\nCloudObjects.asCloudObject(\nwindowedGroupedCoder, /* sdkComponents= */ null))));\nList instructions =\nArrays.asList(\nmakeWindowingSourceInstruction(kvCoder),\nmergeWindowsInstruction,\nmakeSinkInstruction(groupedCoder, 1));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), false /* publishCounters */);\nMap nameMap = new HashMap<>();\nnameMap.put(\"MergeWindowsStep\", \"MergeWindows\");\nworker.addStateNameMappings(nameMap);\nworker.start();\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"\"\n+ DEFAULT_COMPUTATION_ID\n+ \"\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"\"\n+ DEFAULT_KEY_STRING\n+ \"\\\"\"\n+ \" sharding_key: \"\n+ DEFAULT_SHARDING_KEY\n+ \" cache_token: 1\"\n+ \" work_token: 1\"\n+ \" message_bundles {\"\n+ \" source_computation_id: \\\"\"\n+ DEFAULT_SOURCE_COMPUTATION_ID\n+ \"\\\"\"\n+ \" messages {\"\n+ \" timestamp: 0\"\n+ \" data: \\\"\"\n+ dataStringForIndex(0)\n+ \"\\\"\"\n+ \" }\"\n+ \" }\"\n+ \" }\"\n+ \"}\",\nintervalWindowBytes(WINDOW_AT_ZERO)));\nMap result = server.waitForAndGetCommits(1);\nIterable counters = worker.buildCounters();\nString window = \"/gAAAAAAAA-joBw/\";\nString timerTagPrefix = \"/s\" + window + \"+0\";\nByteString bufferTag = ByteString.copyFromUtf8(window + \"+ubuf\");\nByteString paneInfoTag = ByteString.copyFromUtf8(window + \"+upane\");\nString watermarkDataHoldTag = window + \"+uhold\";\nString watermarkExtraHoldTag = window + \"+uextra\";\nString stateFamily = \"MergeWindows\";\nByteString bufferData = ByteString.copyFromUtf8(\"data0\");\nByteString outputData =\nByteString.copyFrom(\nnew byte[] {\n(byte) 0xff,\n(byte) 0xff,\n(byte) 0xff,\n(byte) 0xff,\n0x01,\n0x05,\n0x64,\n0x61,\n0x74,\n0x61,\n0x30,\n0x00\n});\nlong timerTimestamp = 999000L;\nWorkItemCommitRequest actualOutput = result.get(1L);\nverifyTimers(actualOutput, buildWatermarkTimer(timerTagPrefix, 999));\nassertThat(\nactualOutput.getBagUpdatesList(),\nMatchers.contains(\nMatchers.equalTo(\nWindmill.TagBag.newBuilder()\n.setTag(bufferTag)\n.setStateFamily(stateFamily)\n.addValues(bufferData)\n.build())));\nverifyHolds(actualOutput, buildHold(watermarkDataHoldTag, 0, false));\nassertEquals(0L, splitIntToLong(getCounter(counters, \"WindmillStateBytesRead\").getInteger()));\nassertEquals(\nWindmill.WorkItemCommitRequest.newBuilder(actualOutput)\n.clearCounterUpdates()\n.clearOutputMessages()\n.clearPerWorkItemLatencyAttributions()\n.build()\n.getSerializedSize(),\nsplitIntToLong(getCounter(counters, \"WindmillStateBytesWritten\").getInteger()));\nassertEquals(\nVarInt.getLength(0L)\n+ dataStringForIndex(0).length()\n+ addPaneTag(PaneInfo.NO_FIRING, intervalWindowBytes(WINDOW_AT_ZERO)).size()\n+ 5L\n,\nsplitIntToLong(getCounter(counters, \"WindmillShuffleBytesRead\").getInteger()));\nWindmill.GetWorkResponse.Builder getWorkResponse = Windmill.GetWorkResponse.newBuilder();\ngetWorkResponse\n.addWorkBuilder()\n.setComputationId(DEFAULT_COMPUTATION_ID)\n.setInputDataWatermark(timerTimestamp + 1000)\n.addWorkBuilder()\n.setKey(ByteString.copyFromUtf8(DEFAULT_KEY_STRING))\n.setShardingKey(DEFAULT_SHARDING_KEY)\n.setWorkToken(2)\n.setCacheToken(1)\n.getTimersBuilder()\n.addTimers(buildWatermarkTimer(timerTagPrefix, timerTimestamp));\nserver.whenGetWorkCalled().thenReturn(getWorkResponse.build());\nlong expectedBytesRead = 0L;\nWindmill.GetDataResponse.Builder dataResponse = Windmill.GetDataResponse.newBuilder();\nWindmill.KeyedGetDataResponse.Builder dataBuilder =\ndataResponse\n.addDataBuilder()\n.setComputationId(DEFAULT_COMPUTATION_ID)\n.addDataBuilder()\n.setKey(ByteString.copyFromUtf8(DEFAULT_KEY_STRING))\n.setShardingKey(DEFAULT_SHARDING_KEY);\ndataBuilder\n.addBagsBuilder()\n.setTag(bufferTag)\n.setStateFamily(stateFamily)\n.addValues(bufferData);\ndataBuilder\n.addWatermarkHoldsBuilder()\n.setTag(ByteString.copyFromUtf8(watermarkDataHoldTag))\n.setStateFamily(stateFamily)\n.addTimestamps(0);\ndataBuilder\n.addWatermarkHoldsBuilder()\n.setTag(ByteString.copyFromUtf8(watermarkExtraHoldTag))\n.setStateFamily(stateFamily)\n.addTimestamps(0);\ndataBuilder\n.addValuesBuilder()\n.setTag(paneInfoTag)\n.setStateFamily(stateFamily)\n.getValueBuilder()\n.setTimestamp(0)\n.setData(ByteString.EMPTY);\nserver.whenGetDataCalled().thenReturn(dataResponse.build());\nexpectedBytesRead += dataBuilder.build().getSerializedSize();\nresult = server.waitForAndGetCommits(1);\ncounters = worker.buildCounters();\nactualOutput = result.get(2L);\nassertEquals(1, actualOutput.getOutputMessagesCount());\nassertEquals(\nDEFAULT_DESTINATION_STREAM_ID, actualOutput.getOutputMessages(0).getDestinationStreamId());\nassertEquals(\nDEFAULT_KEY_STRING,\nactualOutput.getOutputMessages(0).getBundles(0).getKey().toStringUtf8());\nassertEquals(0, actualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getTimestamp());\nassertEquals(\noutputData, actualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getData());\nByteString metadata =\nactualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getMetadata();\nInputStream inStream = metadata.newInput();\nassertEquals(\nPaneInfo.createPane(true, true, Timing.ON_TIME), PaneInfoCoder.INSTANCE.decode(inStream));\nassertEquals(\nCollections.singletonList(WINDOW_AT_ZERO),\nDEFAULT_WINDOW_COLLECTION_CODER.decode(inStream, Coder.Context.OUTER));\nassertThat(\n\"\" + actualOutput.getValueUpdatesList(),\nactualOutput.getValueUpdatesList(),\nMatchers.contains(\nMatchers.equalTo(\nWindmill.TagValue.newBuilder()\n.setTag(paneInfoTag)\n.setStateFamily(stateFamily)\n.setValue(\nWindmill.Value.newBuilder()\n.setTimestamp(Long.MAX_VALUE)\n.setData(ByteString.EMPTY))\n.build())));\nassertThat(\n\"\" + actualOutput.getBagUpdatesList(),\nactualOutput.getBagUpdatesList(),\nMatchers.contains(\nMatchers.equalTo(\nWindmill.TagBag.newBuilder()\n.setTag(bufferTag)\n.setStateFamily(stateFamily)\n.setDeleteAll(true)\n.build())));\nverifyHolds(\nactualOutput,\nbuildHold(watermarkDataHoldTag, -1, true),\nbuildHold(watermarkExtraHoldTag, -1, true));\nassertEquals(\nexpectedBytesRead,\nsplitIntToLong(getCounter(counters, \"WindmillStateBytesRead\").getInteger()));\nassertEquals(\nWindmill.WorkItemCommitRequest.newBuilder(removeDynamicFields(actualOutput))\n.clearCounterUpdates()\n.clearOutputMessages()\n.build()\n.getSerializedSize(),\nsplitIntToLong(getCounter(counters, \"WindmillStateBytesWritten\").getInteger()));\nassertEquals(0L, splitIntToLong(getCounter(counters, \"WindmillShuffleBytesRead\").getInteger()));\n}\n@Test\npublic void testMergeWindowsCaching() throws Exception {\nCoder> kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());\nCoder>> windowedKvCoder =\nFullWindowedValueCoder.of(kvCoder, IntervalWindow.getCoder());\nKvCoder> groupedCoder =\nKvCoder.of(StringUtf8Coder.of(), ListCoder.of(StringUtf8Coder.of()));\nCoder>>> windowedGroupedCoder =\nFullWindowedValueCoder.of(groupedCoder, IntervalWindow.getCoder());\nCloudObject spec = CloudObject.forClassName(\"MergeWindowsDoFn\");\nSdkComponents sdkComponents = SdkComponents.create();\nsdkComponents.registerEnvironment(Environments.JAVA_SDK_HARNESS_ENVIRONMENT);\naddString(\nspec,\nPropertyNames.SERIALIZED_FN,\nStringUtils.byteArrayToJsonString(\nWindowingStrategyTranslation.toMessageProto(\nWindowingStrategy.of(FixedWindows.of(Duration.standardSeconds(1)))\n.withTimestampCombiner(TimestampCombiner.EARLIEST),\nsdkComponents)\n.toByteArray()));\naddObject(\nspec,\nWorkerPropertyNames.INPUT_CODER,\nCloudObjects.asCloudObject(windowedKvCoder, /* sdkComponents= */ null));\nParallelInstruction mergeWindowsInstruction =\nnew ParallelInstruction()\n.setSystemName(\"MergeWindows-System\")\n.setName(\"MergeWindowsStep\")\n.setOriginalName(\"MergeWindowsOriginal\")\n.setParDo(\nnew ParDoInstruction()\n.setInput(new InstructionInput().setProducerInstructionIndex(0).setOutputNum(0))\n.setNumOutputs(1)\n.setUserFn(spec))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)\n.setName(\"output\")\n.setCodec(\nCloudObjects.asCloudObject(\nwindowedGroupedCoder, /* sdkComponents= */ null))));\nList instructions =\nArrays.asList(\nmakeWindowingSourceInstruction(kvCoder),\nmergeWindowsInstruction,\nmakeDoFnInstruction(new PassthroughDoFn(), 1, groupedCoder),\nmakeSinkInstruction(groupedCoder, 2));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), false /* publishCounters */);\nMap nameMap = new HashMap<>();\nnameMap.put(\"MergeWindowsStep\", \"MergeWindows\");\nworker.addStateNameMappings(nameMap);\nworker.start();\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"\"\n+ DEFAULT_COMPUTATION_ID\n+ \"\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"\"\n+ DEFAULT_KEY_STRING\n+ \"\\\"\"\n+ \" sharding_key: \"\n+ DEFAULT_SHARDING_KEY\n+ \" cache_token: 1\"\n+ \" work_token: 1\"\n+ \" is_new_key: 1\"\n+ \" message_bundles {\"\n+ \" source_computation_id: \\\"\"\n+ DEFAULT_SOURCE_COMPUTATION_ID\n+ \"\\\"\"\n+ \" messages {\"\n+ \" timestamp: 0\"\n+ \" data: \\\"\"\n+ dataStringForIndex(0)\n+ \"\\\"\"\n+ \" }\"\n+ \" }\"\n+ \" }\"\n+ \"}\",\nintervalWindowBytes(WINDOW_AT_ZERO)));\nMap result = server.waitForAndGetCommits(1);\nIterable counters = worker.buildCounters();\nString window = \"/gAAAAAAAA-joBw/\";\nString timerTagPrefix = \"/s\" + window + \"+0\";\nByteString bufferTag = ByteString.copyFromUtf8(window + \"+ubuf\");\nByteString paneInfoTag = ByteString.copyFromUtf8(window + \"+upane\");\nString watermarkDataHoldTag = window + \"+uhold\";\nString watermarkExtraHoldTag = window + \"+uextra\";\nString stateFamily = \"MergeWindows\";\nByteString bufferData = ByteString.copyFromUtf8(\"data0\");\nByteString outputData =\nByteString.copyFrom(\nnew byte[] {\n(byte) 0xff,\n(byte) 0xff,\n(byte) 0xff,\n(byte) 0xff,\n0x01,\n0x05,\n0x64,\n0x61,\n0x74,\n0x61,\n0x30,\n0x00\n});\nlong timerTimestamp = 999000L;\nWorkItemCommitRequest actualOutput = result.get(1L);\nverifyTimers(actualOutput, buildWatermarkTimer(timerTagPrefix, 999));\nassertThat(\nactualOutput.getBagUpdatesList(),\nMatchers.contains(\nMatchers.equalTo(\nWindmill.TagBag.newBuilder()\n.setTag(bufferTag)\n.setStateFamily(stateFamily)\n.addValues(bufferData)\n.build())));\nverifyHolds(actualOutput, buildHold(watermarkDataHoldTag, 0, false));\nassertEquals(0L, splitIntToLong(getCounter(counters, \"WindmillStateBytesRead\").getInteger()));\nassertEquals(\nWindmill.WorkItemCommitRequest.newBuilder(removeDynamicFields(actualOutput))\n.clearCounterUpdates()\n.clearOutputMessages()\n.build()\n.getSerializedSize(),\nsplitIntToLong(getCounter(counters, \"WindmillStateBytesWritten\").getInteger()));\nassertEquals(\nVarInt.getLength(0L)\n+ dataStringForIndex(0).length()\n+ addPaneTag(PaneInfo.NO_FIRING, intervalWindowBytes(WINDOW_AT_ZERO)).size()\n+ 5L\n,\nsplitIntToLong(getCounter(counters, \"WindmillShuffleBytesRead\").getInteger()));\nWindmill.GetWorkResponse.Builder getWorkResponse = Windmill.GetWorkResponse.newBuilder();\ngetWorkResponse\n.addWorkBuilder()\n.setComputationId(DEFAULT_COMPUTATION_ID)\n.setInputDataWatermark(timerTimestamp + 1000)\n.addWorkBuilder()\n.setKey(ByteString.copyFromUtf8(DEFAULT_KEY_STRING))\n.setShardingKey(DEFAULT_SHARDING_KEY)\n.setWorkToken(2)\n.setCacheToken(1)\n.getTimersBuilder()\n.addTimers(buildWatermarkTimer(timerTagPrefix, timerTimestamp));\nserver.whenGetWorkCalled().thenReturn(getWorkResponse.build());\nlong expectedBytesRead = 0L;\nWindmill.GetDataResponse.Builder dataResponse = Windmill.GetDataResponse.newBuilder();\nWindmill.KeyedGetDataResponse.Builder dataBuilder =\ndataResponse\n.addDataBuilder()\n.setComputationId(DEFAULT_COMPUTATION_ID)\n.addDataBuilder()\n.setKey(ByteString.copyFromUtf8(DEFAULT_KEY_STRING))\n.setShardingKey(DEFAULT_SHARDING_KEY);\ndataBuilder\n.addWatermarkHoldsBuilder()\n.setTag(ByteString.copyFromUtf8(watermarkExtraHoldTag))\n.setStateFamily(stateFamily)\n.addTimestamps(0);\ndataBuilder\n.addValuesBuilder()\n.setTag(paneInfoTag)\n.setStateFamily(stateFamily)\n.getValueBuilder()\n.setTimestamp(0)\n.setData(ByteString.EMPTY);\nserver.whenGetDataCalled().thenReturn(dataResponse.build());\nexpectedBytesRead += dataBuilder.build().getSerializedSize();\nresult = server.waitForAndGetCommits(1);\ncounters = worker.buildCounters();\nactualOutput = result.get(2L);\nassertEquals(1, actualOutput.getOutputMessagesCount());\nassertEquals(\nDEFAULT_DESTINATION_STREAM_ID, actualOutput.getOutputMessages(0).getDestinationStreamId());\nassertEquals(\nDEFAULT_KEY_STRING,\nactualOutput.getOutputMessages(0).getBundles(0).getKey().toStringUtf8());\nassertEquals(0, actualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getTimestamp());\nassertEquals(\noutputData, actualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getData());\nByteString metadata =\nactualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getMetadata();\nInputStream inStream = metadata.newInput();\nassertEquals(\nPaneInfo.createPane(true, true, Timing.ON_TIME), PaneInfoCoder.INSTANCE.decode(inStream));\nassertEquals(\nCollections.singletonList(WINDOW_AT_ZERO),\nDEFAULT_WINDOW_COLLECTION_CODER.decode(inStream, Coder.Context.OUTER));\nassertThat(\n\"\" + actualOutput.getValueUpdatesList(),\nactualOutput.getValueUpdatesList(),\nMatchers.contains(\nMatchers.equalTo(\nWindmill.TagValue.newBuilder()\n.setTag(paneInfoTag)\n.setStateFamily(stateFamily)\n.setValue(\nWindmill.Value.newBuilder()\n.setTimestamp(Long.MAX_VALUE)\n.setData(ByteString.EMPTY))\n.build())));\nassertThat(\n\"\" + actualOutput.getBagUpdatesList(),\nactualOutput.getBagUpdatesList(),\nMatchers.contains(\nMatchers.equalTo(\nWindmill.TagBag.newBuilder()\n.setTag(bufferTag)\n.setStateFamily(stateFamily)\n.setDeleteAll(true)\n.build())));\nverifyHolds(\nactualOutput,\nbuildHold(watermarkDataHoldTag, -1, true),\nbuildHold(watermarkExtraHoldTag, -1, true));\nassertEquals(\nexpectedBytesRead,\nsplitIntToLong(getCounter(counters, \"WindmillStateBytesRead\").getInteger()));\nassertEquals(\nWindmill.WorkItemCommitRequest.newBuilder(removeDynamicFields(actualOutput))\n.clearCounterUpdates()\n.clearOutputMessages()\n.build()\n.getSerializedSize(),\nsplitIntToLong(getCounter(counters, \"WindmillStateBytesWritten\").getInteger()));\nassertEquals(0L, splitIntToLong(getCounter(counters, \"WindmillShuffleBytesRead\").getInteger()));\nCacheStats stats = worker.stateCache.getCacheStats();\nLOG.info(\"cache stats {}\", stats);\nassertEquals(1, stats.hitCount());\nassertEquals(4, stats.missCount());\n}\nprivate void runMergeSessionsActions(List actions) throws Exception {\nCoder> kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());\nCoder>> windowedKvCoder =\nFullWindowedValueCoder.of(kvCoder, IntervalWindow.getCoder());\nKvCoder> groupedCoder =\nKvCoder.of(StringUtf8Coder.of(), ListCoder.of(StringUtf8Coder.of()));\nCoder>>> windowedGroupedCoder =\nFullWindowedValueCoder.of(groupedCoder, IntervalWindow.getCoder());\nCloudObject spec = CloudObject.forClassName(\"MergeWindowsDoFn\");\nSdkComponents sdkComponents = SdkComponents.create();\nsdkComponents.registerEnvironment(Environments.JAVA_SDK_HARNESS_ENVIRONMENT);\naddString(\nspec,\nPropertyNames.SERIALIZED_FN,\nStringUtils.byteArrayToJsonString(\nWindowingStrategyTranslation.toMessageProto(\nWindowingStrategy.of(Sessions.withGapDuration(Duration.millis(10)))\n.withMode(AccumulationMode.DISCARDING_FIRED_PANES)\n.withTrigger(\nRepeatedly.forever(\nAfterWatermark.pastEndOfWindow()\n.withLateFirings(AfterPane.elementCountAtLeast(1))))\n.withAllowedLateness(Duration.standardMinutes(60)),\nsdkComponents)\n.toByteArray()));\naddObject(\nspec,\nWorkerPropertyNames.INPUT_CODER,\nCloudObjects.asCloudObject(windowedKvCoder, /* sdkComponents= */ null));\nParallelInstruction mergeWindowsInstruction =\nnew ParallelInstruction()\n.setSystemName(\"MergeWindows-System\")\n.setName(\"MergeWindowsStep\")\n.setOriginalName(\"MergeWindowsOriginal\")\n.setParDo(\nnew ParDoInstruction()\n.setInput(new InstructionInput().setProducerInstructionIndex(0).setOutputNum(0))\n.setNumOutputs(1)\n.setUserFn(spec))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)\n.setName(\"output\")\n.setCodec(\nCloudObjects.asCloudObject(\nwindowedGroupedCoder, /* sdkComponents= */ null))));\nList instructions =\nArrays.asList(\nmakeWindowingSourceInstruction(kvCoder),\nmergeWindowsInstruction,\nmakeSinkInstruction(groupedCoder, 1));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), false /* publishCounters */);\nMap nameMap = new HashMap<>();\nnameMap.put(\"MergeWindowsStep\", \"MergeWindows\");\nworker.addStateNameMappings(nameMap);\nworker.start();\nserver.whenGetDataCalled().answerByDefault(EMPTY_DATA_RESPONDER);\nfor (int i = 0; i < actions.size(); ++i) {\nAction action = actions.get(i);\nserver.whenGetWorkCalled().thenReturn(action.response);\nMap result = server.waitForAndGetCommits(1);\nWorkItemCommitRequest actualOutput = result.get(i + 1L);\nassertThat(actualOutput, Matchers.not(Matchers.nullValue()));\nverifyTimers(actualOutput, action.expectedTimers);\nverifyHolds(actualOutput, action.expectedHolds);\n}\n}\n@Test\npublic void testMergeSessionWindows() throws Exception {\nrunMergeSessionsActions(\nCollections.singletonList(\nnew Action(\nbuildSessionInput(\n1, 40, 0, Collections.singletonList(1L), Collections.EMPTY_LIST))\n.withHolds(\nbuildHold(\"/gAAAAAAAAAsK/+uhold\", -1, true),\nbuildHold(\"/gAAAAAAAAAsK/+uextra\", -1, true))\n.withTimers(buildWatermarkTimer(\"/s/gAAAAAAAAAsK/+0\", 3600010))));\nrunMergeSessionsActions(\nArrays.asList(\nnew Action(\nbuildSessionInput(\n1, 0, 0, Collections.singletonList(1L), Collections.EMPTY_LIST))\n.withHolds(buildHold(\"/gAAAAAAAAAsK/+uhold\", 10, false))\n.withTimers(\nbuildWatermarkTimer(\"/s/gAAAAAAAAAsK/+0\", 10),\nbuildWatermarkTimer(\"/s/gAAAAAAAAAsK/+0\", 3600010)),\nnew Action(\nbuildSessionInput(\n2,\n30,\n0,\nCollections.EMPTY_LIST,\nCollections.singletonList(buildWatermarkTimer(\"/s/gAAAAAAAAAsK/+0\", 10))))\n.withTimers(buildWatermarkTimer(\"/s/gAAAAAAAAAsK/+0\", 3600010))\n.withHolds(\nbuildHold(\"/gAAAAAAAAAsK/+uhold\", -1, true),\nbuildHold(\"/gAAAAAAAAAsK/+uextra\", -1, true)),\nnew Action(\nbuildSessionInput(\n3, 30, 0, Collections.singletonList(8L), Collections.EMPTY_LIST))\n.withTimers(\nbuildWatermarkTimer(\"/s/gAAAAAAAABIR/+0\", 3600017),\nbuildWatermarkTimer(\"/s/gAAAAAAAAAsK/+0\", 10, true),\nbuildWatermarkTimer(\"/s/gAAAAAAAAAsK/+0\", 3600010, true))\n.withHolds(\nbuildHold(\"/gAAAAAAAAAsK/+uhold\", -1, true),\nbuildHold(\"/gAAAAAAAAAsK/+uextra\", -1, true)),\nnew Action(\nbuildSessionInput(\n4, 30, 0, Collections.singletonList(31L), Collections.EMPTY_LIST))\n.withTimers(\nbuildWatermarkTimer(\"/s/gAAAAAAAACkK/+0\", 3600040),\nbuildWatermarkTimer(\"/s/gAAAAAAAACkK/+0\", 40))\n.withHolds(buildHold(\"/gAAAAAAAACkK/+uhold\", 40, false)),\nnew Action(buildSessionInput(5, 30, 0, Arrays.asList(17L, 23L), Collections.EMPTY_LIST))\n.withTimers(\nbuildWatermarkTimer(\"/s/gAAAAAAAACkK/+0\", 3600040, true),\nbuildWatermarkTimer(\"/s/gAAAAAAAACkK/+0\", 40, true),\nbuildWatermarkTimer(\"/s/gAAAAAAAABIR/+0\", 3600017, true),\nbuildWatermarkTimer(\"/s/gAAAAAAAABIR/+0\", 17, true),\nbuildWatermarkTimer(\"/s/gAAAAAAAACko/+0\", 40),\nbuildWatermarkTimer(\"/s/gAAAAAAAACko/+0\", 3600040))\n.withHolds(\nbuildHold(\"/gAAAAAAAACkK/+uhold\", -1, true),\nbuildHold(\"/gAAAAAAAACkK/+uextra\", -1, true),\nbuildHold(\"/gAAAAAAAAAsK/+uhold\", 40, true),\nbuildHold(\"/gAAAAAAAAAsK/+uextra\", 3600040, true)),\nnew Action(\nbuildSessionInput(\n6,\n50,\n0,\nCollections.EMPTY_LIST,\nCollections.singletonList(buildWatermarkTimer(\"/s/gAAAAAAAACko/+0\", 40))))\n.withTimers(buildWatermarkTimer(\"/s/gAAAAAAAACko/+0\", 3600040))\n.withHolds(\nbuildHold(\"/gAAAAAAAAAsK/+uhold\", -1, true),\nbuildHold(\"/gAAAAAAAAAsK/+uextra\", -1, true))));\n}\nprivate List makeUnboundedSourcePipeline() throws Exception {\nreturn makeUnboundedSourcePipeline(1, new PrintFn());\n}\nprivate List makeUnboundedSourcePipeline(\nint numMessagesPerShard,\nDoFn>, String> doFn)\nthrows Exception {\nDataflowPipelineOptions options =\nPipelineOptionsFactory.create().as(DataflowPipelineOptions.class);\noptions.setNumWorkers(1);\nCloudObject codec =\nCloudObjects.asCloudObject(\nWindowedValue.getFullCoder(\nValueWithRecordId.ValueWithRecordIdCoder.of(\nKvCoder.of(VarIntCoder.of(), VarIntCoder.of())),\nGlobalWindow.Coder.INSTANCE),\n/* sdkComponents= */ null);\nreturn Arrays.asList(\nnew ParallelInstruction()\n.setSystemName(\"Read\")\n.setOriginalName(\"OriginalReadName\")\n.setRead(\nnew ReadInstruction()\n.setSource(\nCustomSources.serializeToCloudSource(\nnew TestCountingSource(numMessagesPerShard), options)\n.setCodec(codec)))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setName(\"read_output\")\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)\n.setCodec(codec))),\nmakeDoFnInstruction(doFn, 0, StringUtf8Coder.of(), WindowingStrategy.globalDefault()),\nmakeSinkInstruction(StringUtf8Coder.of(), 1, GlobalWindow.Coder.INSTANCE));\n}\n@Test\npublic void testUnboundedSources() throws Exception {\nList finalizeTracker = Lists.newArrayList();\nTestCountingSource.setFinalizeTracker(finalizeTracker);\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorker worker =\nmakeWorker(\nmakeUnboundedSourcePipeline(),\ncreateTestingPipelineOptions(server),\nfalse /* publishCounters */);\nworker.start();\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 1\"\n+ \" cache_token: 1\"\n+ \" }\"\n+ \"}\",\nnull));\nMap result = server.waitForAndGetCommits(1);\nIterable counters = worker.buildCounters();\nWindmill.WorkItemCommitRequest commit = result.get(1L);\nUnsignedLong finalizeId =\nUnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));\nassertThat(\nremoveDynamicFields(commit),\nequalTo(\nsetMessagesMetadata(\nPaneInfo.NO_FIRING,\nCoderUtils.encodeToByteArray(\nCollectionCoder.of(GlobalWindow.Coder.INSTANCE),\nCollections.singletonList(GlobalWindow.INSTANCE)),\nparseCommitRequest(\n\"key: \\\"0000000000000001\\\" \"\n+ \"sharding_key: 1 \"\n+ \"work_token: 1 \"\n+ \"cache_token: 1 \"\n+ \"source_backlog_bytes: 7 \"\n+ \"source_bytes_processed: 18 \"\n+ \"output_messages {\"\n+ \" destination_stream_id: \\\"out\\\"\"\n+ \" bundles {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" messages {\"\n+ \" timestamp: 0\"\n+ \" data: \\\"0:0\\\"\"\n+ \" }\"\n+ \" messages_ids: \\\"\\\"\"\n+ \" }\"\n+ \"} \"\n+ \"source_state_updates {\"\n+ \" state: \\\"\\000\\\"\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \"} \"\n+ \"source_watermark: 1000\"))\n.build()));\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 2\"\n+ \" cache_token: 1\"\n+ \" source_state {\"\n+ \" state: \\\"\\001\\\"\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \" } \"\n+ \" }\"\n+ \"}\",\nnull));\nresult = server.waitForAndGetCommits(1);\ncounters = worker.buildCounters();\ncommit = result.get(2L);\nfinalizeId = UnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));\nassertThat(\nremoveDynamicFields(commit),\nequalTo(\nparseCommitRequest(\n\"key: \\\"0000000000000001\\\" \"\n+ \"sharding_key: 1 \"\n+ \"work_token: 2 \"\n+ \"cache_token: 1 \"\n+ \"source_backlog_bytes: 7 \"\n+ \"source_bytes_processed: 0 \"\n+ \"source_state_updates {\"\n+ \" state: \\\"\\000\\\"\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \"} \"\n+ \"source_watermark: 1000\")\n.build()));\nassertThat(finalizeTracker, contains(0));\nassertNull(getCounter(counters, \"dataflow_input_size-computation\"));\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000002\\\"\"\n+ \" sharding_key: 2\"\n+ \" work_token: 3\"\n+ \" cache_token: 2\"\n+ \" source_state {\"\n+ \" state: \\\"\\000\\\"\"\n+ \" } \"\n+ \" }\"\n+ \"}\",\nnull));\nresult = server.waitForAndGetCommits(1);\ncounters = worker.buildCounters();\ncommit = result.get(3L);\nfinalizeId = UnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));\nassertThat(\nremoveDynamicFields(commit),\nequalTo(\nparseCommitRequest(\n\"key: \\\"0000000000000002\\\" \"\n+ \"sharding_key: 2 \"\n+ \"work_token: 3 \"\n+ \"cache_token: 2 \"\n+ \"source_backlog_bytes: 7 \"\n+ \"source_bytes_processed: 0 \"\n+ \"source_state_updates {\"\n+ \" state: \\\"\\000\\\"\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \"} \"\n+ \"source_watermark: 1000\")\n.build()));\nassertNull(getCounter(counters, \"dataflow_input_size-computation\"));\n}\n@Test\npublic void testUnboundedSourcesDrain() throws Exception {\nList finalizeTracker = Lists.newArrayList();\nTestCountingSource.setFinalizeTracker(finalizeTracker);\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorker worker =\nmakeWorker(\nmakeUnboundedSourcePipeline(),\ncreateTestingPipelineOptions(server),\ntrue /* publishCounters */);\nworker.start();\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 2\"\n+ \" cache_token: 3\"\n+ \" }\"\n+ \"}\",\nnull));\nMap result = server.waitForAndGetCommits(1);\nWindmill.WorkItemCommitRequest commit = result.get(2L);\nUnsignedLong finalizeId =\nUnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));\nassertThat(\nremoveDynamicFields(commit),\nequalTo(\nsetMessagesMetadata(\nPaneInfo.NO_FIRING,\nCoderUtils.encodeToByteArray(\nCollectionCoder.of(GlobalWindow.Coder.INSTANCE),\nCollections.singletonList(GlobalWindow.INSTANCE)),\nparseCommitRequest(\n\"key: \\\"0000000000000001\\\" \"\n+ \"sharding_key: 1 \"\n+ \"work_token: 2 \"\n+ \"cache_token: 3 \"\n+ \"source_backlog_bytes: 7 \"\n+ \"source_bytes_processed: 18 \"\n+ \"output_messages {\"\n+ \" destination_stream_id: \\\"out\\\"\"\n+ \" bundles {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" messages {\"\n+ \" timestamp: 0\"\n+ \" data: \\\"0:0\\\"\"\n+ \" }\"\n+ \" messages_ids: \\\"\\\"\"\n+ \" }\"\n+ \"} \"\n+ \"source_state_updates {\"\n+ \" state: \\\"\\000\\\"\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \"} \"\n+ \"source_watermark: 1000\"))\n.build()));\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 3\"\n+ \" cache_token: 3\"\n+ \" source_state {\"\n+ \" only_finalize: true\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \" }\"\n+ \" }\"\n+ \"}\",\nnull));\nresult = server.waitForAndGetCommits(1);\ncommit = result.get(3L);\nassertThat(\ncommit,\nequalTo(\nparseCommitRequest(\n\"key: \\\"0000000000000001\\\" \"\n+ \"sharding_key: 1 \"\n+ \"work_token: 3 \"\n+ \"cache_token: 3 \"\n+ \"source_state_updates {\"\n+ \" only_finalize: true\"\n+ \"} \")\n.build()));\nassertThat(finalizeTracker, contains(0));\n}\n@Test\npublic void testUnboundedSourceWorkRetry() throws Exception {\nList finalizeTracker = Lists.newArrayList();\nTestCountingSource.setFinalizeTracker(finalizeTracker);\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setWorkerCacheMb(0);\nStreamingDataflowWorker worker =\nmakeWorker(makeUnboundedSourcePipeline(), options, false /* publishCounters */);\nworker.start();\nWindmill.GetWorkResponse work =\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 1\"\n+ \" cache_token: 1\"\n+ \" }\"\n+ \"}\",\nnull);\nserver.whenGetWorkCalled().thenReturn(work);\nMap result = server.waitForAndGetCommits(1);\nIterable counters = worker.buildCounters();\nWindmill.WorkItemCommitRequest commit = result.get(1L);\nUnsignedLong finalizeId =\nUnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));\nWindmill.WorkItemCommitRequest expectedCommit =\nsetMessagesMetadata(\nPaneInfo.NO_FIRING,\nCoderUtils.encodeToByteArray(\nCollectionCoder.of(GlobalWindow.Coder.INSTANCE),\nCollections.singletonList(GlobalWindow.INSTANCE)),\nparseCommitRequest(\n\"key: \\\"0000000000000001\\\" \"\n+ \"sharding_key: 1 \"\n+ \"work_token: 1 \"\n+ \"cache_token: 1 \"\n+ \"source_backlog_bytes: 7 \"\n+ \"source_bytes_processed: 18 \"\n+ \"output_messages {\"\n+ \" destination_stream_id: \\\"out\\\"\"\n+ \" bundles {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" messages {\"\n+ \" timestamp: 0\"\n+ \" data: \\\"0:0\\\"\"\n+ \" }\"\n+ \" messages_ids: \\\"\\\"\"\n+ \" }\"\n+ \"} \"\n+ \"source_state_updates {\"\n+ \" state: \\\"\\000\\\"\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \"} \"\n+ \"source_watermark: 1000\"))\n.build();\nassertThat(removeDynamicFields(commit), equalTo(expectedCommit));\nserver.clearCommitsReceived();\nserver.whenGetWorkCalled().thenReturn(work);\nresult = server.waitForAndGetCommits(1);\ncommit = result.get(1L);\nfinalizeId = UnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));\nWindmill.WorkItemCommitRequest.Builder commitBuilder = expectedCommit.toBuilder();\ncommitBuilder\n.getSourceStateUpdatesBuilder()\n.setFinalizeIds(0, commit.getSourceStateUpdates().getFinalizeIds(0));\nexpectedCommit = commitBuilder.build();\nassertThat(removeDynamicFields(commit), equalTo(expectedCommit));\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 2\"\n+ \" cache_token: 1\"\n+ \" source_state {\"\n+ \" state: \\\"\\001\\\"\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \" } \"\n+ \" }\"\n+ \"}\",\nnull));\nresult = server.waitForAndGetCommits(1);\ncommit = result.get(2L);\nfinalizeId = UnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));\nassertThat(\nremoveDynamicFields(commit),\nequalTo(\nparseCommitRequest(\n\"key: \\\"0000000000000001\\\" \"\n+ \"sharding_key: 1 \"\n+ \"work_token: 2 \"\n+ \"cache_token: 1 \"\n+ \"source_backlog_bytes: 7 \"\n+ \"source_bytes_processed: 0 \"\n+ \"source_state_updates {\"\n+ \" state: \\\"\\000\\\"\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \"} \"\n+ \"source_watermark: 1000\")\n.build()));\nassertThat(finalizeTracker, contains(0));\n}\n@Test\npublic void testActiveWork() throws Exception {\nBoundedQueueExecutor mockExecutor = Mockito.mock(BoundedQueueExecutor.class);\nComputationState computationState =\nnew ComputationState(\n\"computation\",\ndefaultMapTask(Collections.singletonList(makeSourceInstruction(StringUtf8Coder.of()))),\nmockExecutor,\nImmutableMap.of(),\nnull);\nShardedKey key1 = ShardedKey.create(ByteString.copyFromUtf8(\"key1\"), 1);\nShardedKey key2 = ShardedKey.create(ByteString.copyFromUtf8(\"key2\"), 2);\nWork m1 = createMockWork(1);\nassertTrue(computationState.activateWork(key1, m1));\nMockito.verify(mockExecutor).execute(m1, m1.getWorkItem().getSerializedSize());\ncomputationState.completeWorkAndScheduleNextWorkForKey(key1, 1);\nMockito.verifyNoMoreInteractions(mockExecutor);\nWork m2 = createMockWork(2);\nassertTrue(computationState.activateWork(key1, m2));\nMockito.verify(mockExecutor).execute(m2, m2.getWorkItem().getSerializedSize());\nWork m3 = createMockWork(3);\nassertTrue(computationState.activateWork(key1, m3));\nMockito.verifyNoMoreInteractions(mockExecutor);\nWork m4 = createMockWork(4);\nassertTrue(computationState.activateWork(key2, m4));\nMockito.verify(mockExecutor).execute(m4, m4.getWorkItem().getSerializedSize());\ncomputationState.completeWorkAndScheduleNextWorkForKey(key2, 4);\nMockito.verifyNoMoreInteractions(mockExecutor);\ncomputationState.completeWorkAndScheduleNextWorkForKey(key1, 2);\nMockito.verify(mockExecutor).forceExecute(m3, m3.getWorkItem().getSerializedSize());\ncomputationState.completeWorkAndScheduleNextWorkForKey(key1, 3);\nMockito.verifyNoMoreInteractions(mockExecutor);\nWork m5 = createMockWork(5);\ncomputationState.activateWork(key1, m5);\nMockito.verify(mockExecutor).execute(m5, m5.getWorkItem().getSerializedSize());\nassertFalse(computationState.activateWork(key1, m5));\nMockito.verifyNoMoreInteractions(mockExecutor);\ncomputationState.completeWorkAndScheduleNextWorkForKey(key1, 5);\nMockito.verifyNoMoreInteractions(mockExecutor);\n}\n@Test\npublic void testActiveWorkForShardedKeys() throws Exception {\nBoundedQueueExecutor mockExecutor = Mockito.mock(BoundedQueueExecutor.class);\nComputationState computationState =\nnew ComputationState(\n\"computation\",\ndefaultMapTask(Collections.singletonList(makeSourceInstruction(StringUtf8Coder.of()))),\nmockExecutor,\nImmutableMap.of(),\nnull);\nShardedKey key1Shard1 = ShardedKey.create(ByteString.copyFromUtf8(\"key1\"), 1);\nShardedKey key1Shard2 = ShardedKey.create(ByteString.copyFromUtf8(\"key1\"), 2);\nWork m1 = createMockWork(1);\nassertTrue(computationState.activateWork(key1Shard1, m1));\nMockito.verify(mockExecutor).execute(m1, m1.getWorkItem().getSerializedSize());\ncomputationState.completeWorkAndScheduleNextWorkForKey(key1Shard1, 1);\nMockito.verifyNoMoreInteractions(mockExecutor);\nWork m2 = createMockWork(2);\nassertTrue(computationState.activateWork(key1Shard1, m2));\nMockito.verify(mockExecutor).execute(m2, m2.getWorkItem().getSerializedSize());\nWork m3 = createMockWork(3);\nassertTrue(computationState.activateWork(key1Shard1, m3));\nMockito.verifyNoMoreInteractions(mockExecutor);\nWork m4 = createMockWork(3);\nassertFalse(computationState.activateWork(key1Shard1, m4));\nMockito.verifyNoMoreInteractions(mockExecutor);\nassertTrue(computationState.activateWork(key1Shard2, m4));\nMockito.verify(mockExecutor).execute(m4, m4.getWorkItem().getSerializedSize());\nassertFalse(computationState.activateWork(key1Shard2, m4));\ncomputationState.completeWorkAndScheduleNextWorkForKey(key1Shard2, 3);\nMockito.verifyNoMoreInteractions(mockExecutor);\n}\n@Test\n@Ignore\npublic void testMaxThreadMetric() throws Exception {\nint maxThreads = 2;\nint threadExpiration = 60;\nBoundedQueueExecutor executor =\nnew BoundedQueueExecutor(\nmaxThreads,\nthreadExpiration,\nTimeUnit.SECONDS,\nmaxThreads,\n10000000,\nnew ThreadFactoryBuilder()\n.setNameFormat(\"DataflowWorkUnits-%d\")\n.setDaemon(true)\n.build());\nComputationState computationState =\nnew ComputationState(\n\"computation\",\ndefaultMapTask(Collections.singletonList(makeSourceInstruction(StringUtf8Coder.of()))),\nexecutor,\nImmutableMap.of(),\nnull);\nShardedKey key1Shard1 = ShardedKey.create(ByteString.copyFromUtf8(\"key1\"), 1);\nConsumer sleepProcessWorkFn =\nunused -> {\ntry {\nThread.sleep(1000);\n} catch (InterruptedException e) {\nThread.currentThread().interrupt();\n}\n};\nWork m2 = createMockWork(2, sleepProcessWorkFn);\nWork m3 = createMockWork(3, sleepProcessWorkFn);\nassertTrue(computationState.activateWork(key1Shard1, m2));\nassertTrue(computationState.activateWork(key1Shard1, m3));\nexecutor.execute(m2, m2.getWorkItem().getSerializedSize());\nexecutor.execute(m3, m3.getWorkItem().getSerializedSize());\nlong i = 990L;\nassertTrue(executor.allThreadsActiveTime() >= i);\nexecutor.shutdown();\n}\nvolatile boolean stop = false;\n@Test\n@Test\npublic void testExceptionInvalidatesCache() throws Exception {\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nserver.setExpectedExceptionCount(2);\nDataflowPipelineOptions options = createTestingPipelineOptions(server);\noptions.setNumWorkers(1);\nDataflowPipelineDebugOptions debugOptions = options.as(DataflowPipelineDebugOptions.class);\ndebugOptions.setUnboundedReaderMaxElements(1);\nCloudObject codec =\nCloudObjects.asCloudObject(\nWindowedValue.getFullCoder(\nValueWithRecordId.ValueWithRecordIdCoder.of(\nKvCoder.of(VarIntCoder.of(), VarIntCoder.of())),\nGlobalWindow.Coder.INSTANCE),\n/* sdkComponents= */ null);\nTestCountingSource counter = new TestCountingSource(3).withThrowOnFirstSnapshot(true);\nList instructions =\nArrays.asList(\nnew ParallelInstruction()\n.setOriginalName(\"OriginalReadName\")\n.setSystemName(\"Read\")\n.setName(DEFAULT_PARDO_USER_NAME)\n.setRead(\nnew ReadInstruction()\n.setSource(\nCustomSources.serializeToCloudSource(counter, options).setCodec(codec)))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setName(\"read_output\")\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)\n.setCodec(codec))),\nmakeDoFnInstruction(\nnew TestExceptionInvalidatesCacheFn(),\n0,\nStringUtf8Coder.of(),\nWindowingStrategy.globalDefault()),\nmakeSinkInstruction(StringUtf8Coder.of(), 1, GlobalWindow.Coder.INSTANCE));\nStreamingDataflowWorker worker =\nmakeWorker(\ninstructions,\noptions.as(StreamingDataflowWorkerOptions.class),\ntrue /* publishCounters */);\nworker.setRetryLocallyDelayMs(100);\nworker.start();\nfor (int i = 0; i < 3; i++) {\nByteString state;\nif (i == 0 || i == 1) {\nstate = ByteString.EMPTY;\n} else {\nstate = ByteString.copyFrom(new byte[] {42});\n}\nWindmill.GetDataResponse.Builder dataResponse = Windmill.GetDataResponse.newBuilder();\ndataResponse\n.addDataBuilder()\n.setComputationId(DEFAULT_COMPUTATION_ID)\n.addDataBuilder()\n.setKey(ByteString.copyFromUtf8(\"0000000000000001\"))\n.setShardingKey(1)\n.addValuesBuilder()\n.setTag(ByteString.copyFromUtf8(\"\n.setStateFamily(DEFAULT_PARDO_STATE_FAMILY)\n.getValueBuilder()\n.setTimestamp(0)\n.setData(state);\nserver.whenGetDataCalled().thenReturn(dataResponse.build());\n}\nfor (int i = 0; i < 3; i++) {\nStringBuilder sb = new StringBuilder();\nsb.append(\"work {\\n\");\nsb.append(\" computation_id: \\\"computation\\\"\\n\");\nsb.append(\" input_data_watermark: 0\\n\");\nsb.append(\" work {\\n\");\nsb.append(\" key: \\\"0000000000000001\\\"\\n\");\nsb.append(\" sharding_key: 1\\n\");\nsb.append(\" work_token: \");\nsb.append(i);\nsb.append(\" cache_token: 1\");\nsb.append(\"\\n\");\nif (i > 0) {\nint previousCheckpoint = i - 1;\nsb.append(\" source_state {\\n\");\nsb.append(\" state: \\\"\");\nsb.append((char) previousCheckpoint);\nsb.append(\"\\\"\\n\");\nsb.append(\" }\\n\");\n}\nsb.append(\" }\\n\");\nsb.append(\"}\\n\");\nserver.whenGetWorkCalled().thenReturn(buildInput(sb.toString(), null));\nMap result = server.waitForAndGetCommits(1);\nWindmill.WorkItemCommitRequest commit = result.get((long) i);\nUnsignedLong finalizeId =\nUnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));\nsb = new StringBuilder();\nsb.append(\"key: \\\"0000000000000001\\\"\\n\");\nsb.append(\"sharding_key: 1\\n\");\nsb.append(\"work_token: \");\nsb.append(i);\nsb.append(\"\\n\");\nsb.append(\"cache_token: 1\\n\");\nsb.append(\"output_messages {\\n\");\nsb.append(\" destination_stream_id: \\\"out\\\"\\n\");\nsb.append(\" bundles {\\n\");\nsb.append(\" key: \\\"0000000000000001\\\"\\n\");\nint messageNum = i;\nsb.append(\" messages {\\n\");\nsb.append(\" timestamp: \");\nsb.append(messageNum * 1000);\nsb.append(\"\\n\");\nsb.append(\" data: \\\"0:\");\nsb.append(messageNum);\nsb.append(\"\\\"\\n\");\nsb.append(\" }\\n\");\nsb.append(\" messages_ids: \\\"\\\"\\n\");\nsb.append(\" }\\n\");\nsb.append(\"}\\n\");\nif (i == 0) {\nsb.append(\"value_updates {\\n\");\nsb.append(\" tag: \\\"\nsb.append(\" value {\\n\");\nsb.append(\" timestamp: 0\\n\");\nsb.append(\" data: \\\"\");\nsb.append((char) 42);\nsb.append(\"\\\"\\n\");\nsb.append(\" }\\n\");\nsb.append(\" state_family: \\\"parDoStateFamily\\\"\\n\");\nsb.append(\"}\\n\");\n}\nint sourceState = i;\nsb.append(\"source_state_updates {\\n\");\nsb.append(\" state: \\\"\");\nsb.append((char) sourceState);\nsb.append(\"\\\"\\n\");\nsb.append(\" finalize_ids: \");\nsb.append(finalizeId);\nsb.append(\"}\\n\");\nsb.append(\"source_watermark: \");\nsb.append((sourceState + 1) * 1000);\nsb.append(\"\\n\");\nsb.append(\"source_backlog_bytes: 7\\n\");\nassertThat(\nsetValuesTimestamps(\nremoveDynamicFields(commit)\n.toBuilder()\n.clearOutputTimers()\n.clearSourceBytesProcessed())\n.build(),\nequalTo(\nsetMessagesMetadata(\nPaneInfo.NO_FIRING,\nCoderUtils.encodeToByteArray(\nCollectionCoder.of(GlobalWindow.Coder.INSTANCE),\nImmutableList.of(GlobalWindow.INSTANCE)),\nparseCommitRequest(sb.toString()))\n.build()));\n}\n}\n@Test\npublic void testHugeCommits() throws Exception {\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(new FanoutFn(), 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nserver.whenGetWorkCalled().thenReturn(makeInput(0, TimeUnit.MILLISECONDS.toMicros(0)));\nserver.waitForAndGetCommits(0);\nworker.stop();\n}\n@Test\npublic void testActiveWorkRefresh() throws Exception {\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(new SlowDoFn(), 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setActiveWorkRefreshPeriodMillis(100);\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nserver.whenGetWorkCalled().thenReturn(makeInput(0, TimeUnit.MILLISECONDS.toMicros(0)));\nserver.waitForAndGetCommits(1);\nworker.stop();\nassertThat(server.numGetDataRequests(), greaterThan(0));\n}\n@Test\npublic void testLatencyAttributionProtobufsPopulated() {\nFakeClock clock = new FakeClock();\nWork work = Work.create(null, clock, Collections.emptyList(), unused -> {});\nclock.sleep(Duration.millis(10));\nwork.setState(Work.State.PROCESSING);\nclock.sleep(Duration.millis(20));\nwork.setState(Work.State.READING);\nclock.sleep(Duration.millis(30));\nwork.setState(Work.State.PROCESSING);\nclock.sleep(Duration.millis(40));\nwork.setState(Work.State.COMMIT_QUEUED);\nclock.sleep(Duration.millis(50));\nwork.setState(Work.State.COMMITTING);\nclock.sleep(Duration.millis(60));\nIterator it = work.getLatencyAttributions().iterator();\nassertTrue(it.hasNext());\nLatencyAttribution lat = it.next();\nassertSame(State.QUEUED, lat.getState());\nassertEquals(10, lat.getTotalDurationMillis());\nassertTrue(it.hasNext());\nlat = it.next();\nassertSame(State.ACTIVE, lat.getState());\nassertEquals(60, lat.getTotalDurationMillis());\nassertTrue(it.hasNext());\nlat = it.next();\nassertSame(State.READING, lat.getState());\nassertEquals(30, lat.getTotalDurationMillis());\nassertTrue(it.hasNext());\nlat = it.next();\nassertSame(State.COMMITTING, lat.getState());\nassertEquals(110, lat.getTotalDurationMillis());\nassertFalse(it.hasNext());\n}\n@Test\npublic void testLatencyAttributionToQueuedState() throws Exception {\nfinal int workToken = 3232;\nFakeClock clock = new FakeClock();\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(\nnew FakeSlowDoFn(clock, Duration.millis(1000)), 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setActiveWorkRefreshPeriodMillis(100);\noptions.setNumberOfWorkerHarnessThreads(1);\nStreamingDataflowWorker worker =\nmakeWorker(\ninstructions,\noptions,\nfalse /* publishCounters */,\nclock,\nclock::newFakeScheduledExecutor);\nworker.start();\nActiveWorkRefreshSink awrSink = new ActiveWorkRefreshSink(EMPTY_DATA_RESPONDER);\nserver.whenGetDataCalled().answerByDefault(awrSink::getData).delayEachResponseBy(Duration.ZERO);\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(workToken + 1, 0 /* timestamp */))\n.thenReturn(makeInput(workToken, 1 /* timestamp */));\nserver.waitForAndGetCommits(2);\nworker.stop();\nassertEquals(\nawrSink.getLatencyAttributionDuration(workToken, State.QUEUED), Duration.millis(1000));\nassertEquals(awrSink.getLatencyAttributionDuration(workToken + 1, State.QUEUED), Duration.ZERO);\n}\n@Test\npublic void testLatencyAttributionToActiveState() throws Exception {\nfinal int workToken = 4242;\nFakeClock clock = new FakeClock();\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(\nnew FakeSlowDoFn(clock, Duration.millis(1000)), 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setActiveWorkRefreshPeriodMillis(100);\nStreamingDataflowWorker worker =\nmakeWorker(\ninstructions,\noptions,\nfalse /* publishCounters */,\nclock,\nclock::newFakeScheduledExecutor);\nworker.start();\nActiveWorkRefreshSink awrSink = new ActiveWorkRefreshSink(EMPTY_DATA_RESPONDER);\nserver.whenGetDataCalled().answerByDefault(awrSink::getData).delayEachResponseBy(Duration.ZERO);\nserver.whenGetWorkCalled().thenReturn(makeInput(workToken, 0 /* timestamp */));\nserver.waitForAndGetCommits(1);\nworker.stop();\nassertEquals(\nawrSink.getLatencyAttributionDuration(workToken, State.ACTIVE), Duration.millis(1000));\n}\n@Test\npublic void testLatencyAttributionToReadingState() throws Exception {\nfinal int workToken = 5454;\nFakeClock clock = new FakeClock();\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(new ReadingDoFn(), 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setActiveWorkRefreshPeriodMillis(100);\nStreamingDataflowWorker worker =\nmakeWorker(\ninstructions,\noptions,\nfalse /* publishCounters */,\nclock,\nclock::newFakeScheduledExecutor);\nworker.start();\nActiveWorkRefreshSink awrSink =\nnew ActiveWorkRefreshSink(\n(request) -> {\nclock.sleep(Duration.millis(1000));\nreturn EMPTY_DATA_RESPONDER.apply(request);\n});\nserver.whenGetDataCalled().answerByDefault(awrSink::getData).delayEachResponseBy(Duration.ZERO);\nserver.whenGetWorkCalled().thenReturn(makeInput(workToken, 0 /* timestamp */));\nserver.waitForAndGetCommits(1);\nworker.stop();\nassertEquals(\nawrSink.getLatencyAttributionDuration(workToken, State.READING), Duration.millis(1000));\n}\n@Test\npublic void testLatencyAttributionToCommittingState() throws Exception {\nfinal int workToken = 6464;\nFakeClock clock = new FakeClock();\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nserver\n.whenCommitWorkCalled()\n.answerByDefault(\n(request) -> {\nclock.sleep(Duration.millis(1000));\nreturn Windmill.CommitWorkResponse.getDefaultInstance();\n});\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setActiveWorkRefreshPeriodMillis(100);\nStreamingDataflowWorker worker =\nmakeWorker(\ninstructions,\noptions,\nfalse /* publishCounters */,\nclock,\nclock::newFakeScheduledExecutor);\nworker.start();\nActiveWorkRefreshSink awrSink = new ActiveWorkRefreshSink(EMPTY_DATA_RESPONDER);\nserver.whenGetDataCalled().answerByDefault(awrSink::getData).delayEachResponseBy(Duration.ZERO);\nserver.whenGetWorkCalled().thenReturn(makeInput(workToken, TimeUnit.MILLISECONDS.toMicros(0)));\nserver.waitForAndGetCommits(1);\nworker.stop();\nassertEquals(\nawrSink.getLatencyAttributionDuration(workToken, State.COMMITTING), Duration.millis(1000));\n}\n@Test\npublic void testLatencyAttributionPopulatedInCommitRequest() throws Exception {\nfinal int workToken = 7272;\nlong dofnWaitTimeMs = 1000;\nFakeClock clock = new FakeClock();\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(\nnew FakeSlowDoFn(clock, Duration.millis(dofnWaitTimeMs)), 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setActiveWorkRefreshPeriodMillis(100);\noptions.setNumberOfWorkerHarnessThreads(1);\nStreamingDataflowWorker worker =\nmakeWorker(\ninstructions,\noptions,\nfalse /* publishCounters */,\nclock,\nclock::newFakeScheduledExecutor);\nworker.start();\nActiveWorkRefreshSink awrSink = new ActiveWorkRefreshSink(EMPTY_DATA_RESPONDER);\nserver.whenGetDataCalled().answerByDefault(awrSink::getData).delayEachResponseBy(Duration.ZERO);\nserver.whenGetWorkCalled().thenReturn(makeInput(workToken, 1 /* timestamp */));\nMap workItemCommitRequest = server.waitForAndGetCommits(1);\nworker.stop();\nassertEquals(\nworkItemCommitRequest.get((long) workToken).getPerWorkItemLatencyAttributions(0),\nLatencyAttribution.newBuilder()\n.setState(State.ACTIVE)\n.setTotalDurationMillis(dofnWaitTimeMs)\n.build());\nif (streamingEngine) {\nassertEquals(\nworkItemCommitRequest.get((long) workToken).getPerWorkItemLatencyAttributions(1),\nLatencyAttribution.newBuilder()\n.setState(State.GET_WORK_IN_TRANSIT_TO_USER_WORKER)\n.setTotalDurationMillis(1000)\n.build());\n}\n}\n@Test\npublic void testLimitOnOutputBundleSize() throws Exception {\nList finalizeTracker = Lists.newArrayList();\nTestCountingSource.setFinalizeTracker(finalizeTracker);\nfinal int numMessagesInCustomSourceShard = 100000;\nfinal int inflatedSizePerMessage = 10000;\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorker worker =\nmakeWorker(\nmakeUnboundedSourcePipeline(\nnumMessagesInCustomSourceShard, new InflateDoFn(inflatedSizePerMessage)),\ncreateTestingPipelineOptions(server),\nfalse /* publishCounters */);\nworker.start();\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 1\"\n+ \" cache_token: 1\"\n+ \" }\"\n+ \"}\",\nnull));\nMatcher isWithinBundleSizeLimits =\nboth(greaterThan(StreamingDataflowWorker.MAX_SINK_BYTES * 9 / 10))\n.and(lessThan(StreamingDataflowWorker.MAX_SINK_BYTES * 11 / 10));\nMap result = server.waitForAndGetCommits(1);\nWindmill.WorkItemCommitRequest commit = result.get(1L);\nassertThat(commit.getSerializedSize(), isWithinBundleSizeLimits);\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 2\"\n+ \" cache_token: 1\"\n+ \" }\"\n+ \"}\",\nnull));\nresult = server.waitForAndGetCommits(1);\ncommit = result.get(2L);\nassertThat(commit.getSerializedSize(), isWithinBundleSizeLimits);\n}\n@Test\npublic void testLimitOnOutputBundleSizeWithMultipleSinks() throws Exception {\nList finalizeTracker = Lists.newArrayList();\nTestCountingSource.setFinalizeTracker(finalizeTracker);\nfinal int numMessagesInCustomSourceShard = 100000;\nfinal int inflatedSizePerMessage = 10000;\nList instructions = new ArrayList<>();\ninstructions.addAll(\nmakeUnboundedSourcePipeline(\nnumMessagesInCustomSourceShard, new InflateDoFn(inflatedSizePerMessage)));\ninstructions.add(\nmakeSinkInstruction(\nDEFAULT_DESTINATION_STREAM_ID + \"-1\",\nStringUtf8Coder.of(),\n1,\nGlobalWindow.Coder.INSTANCE));\ninstructions.add(\nmakeSinkInstruction(\nDEFAULT_DESTINATION_STREAM_ID + \"-2\",\nStringUtf8Coder.of(),\n1,\nGlobalWindow.Coder.INSTANCE));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), true /* publishCounters */);\nworker.start();\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 1\"\n+ \" cache_token: 1\"\n+ \" }\"\n+ \"}\",\nnull));\nMatcher isWithinBundleSizeLimits =\nboth(greaterThan(StreamingDataflowWorker.MAX_SINK_BYTES * 9 / 10))\n.and(lessThan(StreamingDataflowWorker.MAX_SINK_BYTES * 11 / 10));\nMap result = server.waitForAndGetCommits(1);\nWindmill.WorkItemCommitRequest commit = result.get(1L);\nassertThat(commit.getSerializedSize(), isWithinBundleSizeLimits);\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 2\"\n+ \" cache_token: 1\"\n+ \" }\"\n+ \"}\",\nnull));\nresult = server.waitForAndGetCommits(1);\ncommit = result.get(2L);\nassertThat(commit.getSerializedSize(), isWithinBundleSizeLimits);\n}\n@Test\npublic void testStuckCommit() throws Exception {\nif (!streamingEngine) {\nreturn;\n}\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setStuckCommitDurationMillis(2000);\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nserver.setDropStreamingCommits(true);\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(10, TimeUnit.MILLISECONDS.toMicros(2), DEFAULT_KEY_STRING, 1))\n.thenReturn(makeInput(15, TimeUnit.MILLISECONDS.toMicros(3), DEFAULT_KEY_STRING, 5));\nConcurrentHashMap> droppedCommits =\nserver.waitForDroppedCommits(2);\nserver.setDropStreamingCommits(false);\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(1, TimeUnit.MILLISECONDS.toMicros(1), DEFAULT_KEY_STRING, 1));\nMap result = server.waitForAndGetCommits(1);\ndroppedCommits.values().iterator().next().accept(CommitStatus.OK);\nworker.stop();\nassertTrue(result.containsKey(1L));\nassertEquals(\nmakeExpectedOutput(\n1, TimeUnit.MILLISECONDS.toMicros(1), DEFAULT_KEY_STRING, 1, DEFAULT_KEY_STRING)\n.build(),\nremoveDynamicFields(result.get(1L)));\n}\nstatic class BlockingFn extends DoFn implements TestRule {\npublic static CountDownLatch blocker = new CountDownLatch(1);\npublic static Semaphore counter = new Semaphore(0);\npublic static AtomicInteger callCounter = new AtomicInteger(0);\n@ProcessElement\npublic void processElement(ProcessContext c) throws InterruptedException {\ncallCounter.incrementAndGet();\ncounter.release();\nblocker.await();\nc.output(c.element());\n}\n@Override\npublic Statement apply(final Statement base, final Description description) {\nreturn new Statement() {\n@Override\npublic void evaluate() throws Throwable {\nblocker = new CountDownLatch(1);\ncounter = new Semaphore(0);\ncallCounter = new AtomicInteger();\nbase.evaluate();\n}\n};\n}\n}\nstatic class KeyTokenInvalidFn extends DoFn, KV> {\nstatic boolean thrown = false;\n@ProcessElement\npublic void processElement(ProcessContext c) {\nif (!thrown) {\nthrown = true;\nthrow new KeyTokenInvalidException(\"key\");\n} else {\nc.output(c.element());\n}\n}\n}\nstatic class LargeCommitFn extends DoFn, KV> {\n@ProcessElement\npublic void processElement(ProcessContext c) {\nif (c.element().getKey().equals(\"large_key\")) {\nStringBuilder s = new StringBuilder();\nfor (int i = 0; i < 100; ++i) {\ns.append(\"large_commit\");\n}\nc.output(KV.of(c.element().getKey(), s.toString()));\n} else {\nc.output(c.element());\n}\n}\n}\nstatic class ChangeKeysFn extends DoFn, KV> {\n@ProcessElement\npublic void processElement(ProcessContext c) {\nKV elem = c.element();\nc.output(KV.of(elem.getKey() + \"_\" + elem.getValue(), elem.getValue()));\n}\n}\nstatic class TestExceptionFn extends DoFn {\nboolean firstTime = true;\n@ProcessElement\npublic void processElement(ProcessContext c) throws Exception {\nif (firstTime) {\nfirstTime = false;\ntry {\nthrow new Exception(\"Exception!\");\n} catch (Exception e) {\nthrow new Exception(\"Another exception!\", e);\n}\n}\n}\n}\nstatic class PassthroughDoFn\nextends DoFn>, KV>> {\n@ProcessElement\npublic void processElement(ProcessContext c) {\nc.output(c.element());\n}\n}\nstatic class Action {\nGetWorkResponse response;\nTimer[] expectedTimers = new Timer[] {};\nWatermarkHold[] expectedHolds = new WatermarkHold[] {};\npublic Action(GetWorkResponse response) {\nthis.response = response;\n}\nAction withHolds(WatermarkHold... holds) {\nthis.expectedHolds = holds;\nreturn this;\n}\nAction withTimers(Timer... timers) {\nthis.expectedTimers = timers;\nreturn this;\n}\n}\nstatic class PrintFn extends DoFn>, String> {\n@ProcessElement\npublic void processElement(ProcessContext c) {\nKV elem = c.element().getValue();\nc.output(elem.getKey() + \":\" + elem.getValue());\n}\n}\nprivate static class MockWork {\nWork create(long workToken) {\nreturn Work.create(\nWindmill.WorkItem.newBuilder().setKey(ByteString.EMPTY).setWorkToken(workToken).build(),\nInstant::now,\nCollections.emptyList(),\nwork -> {});\n}\n}\nstatic class TestExceptionInvalidatesCacheFn\nextends DoFn>, String> {\nstatic boolean thrown = false;\n@StateId(\"int\")\nprivate final StateSpec> counter = StateSpecs.value(VarIntCoder.of());\n@ProcessElement\npublic void processElement(ProcessContext c, @StateId(\"int\") ValueState state)\nthrows Exception {\nKV elem = c.element().getValue();\nif (elem.getValue() == 0) {\nLOG.error(\"**** COUNTER 0 ****\");\nassertNull(state.read());\nstate.write(42);\nassertEquals((Integer) 42, state.read());\n} else if (elem.getValue() == 1) {\nLOG.error(\"**** COUNTER 1 ****\");\nassertEquals((Integer) 42, state.read());\n} else if (elem.getValue() == 2) {\nif (!thrown) {\nLOG.error(\"**** COUNTER 2 (will throw) ****\");\nthrown = true;\nthrow new Exception(\"Exception!\");\n}\nLOG.error(\"**** COUNTER 2 (retry) ****\");\nassertEquals((Integer) 42, state.read());\n} else {\nthrow new RuntimeException(\"only expecting values [0,2]\");\n}\nc.output(elem.getKey() + \":\" + elem.getValue());\n}\n}\nprivate static class FanoutFn extends DoFn {\n@ProcessElement\npublic void processElement(ProcessContext c) {\nStringBuilder builder = new StringBuilder(1000000);\nfor (int i = 0; i < 1000000; i++) {\nbuilder.append(' ');\n}\nString largeString = builder.toString();\nfor (int i = 0; i < 3000; i++) {\nc.output(largeString);\n}\n}\n}\nprivate static class SlowDoFn extends DoFn {\n@ProcessElement\npublic void processElement(ProcessContext c) throws Exception {\nThread.sleep(1000);\nc.output(c.element());\n}\n}\nstatic class FakeClock implements Supplier {\nprivate final PriorityQueue jobs = new PriorityQueue<>();\nprivate Instant now = Instant.now();\npublic ScheduledExecutorService newFakeScheduledExecutor(String unused) {\nreturn new FakeScheduledExecutor();\n}\n@Override\npublic synchronized Instant get() {\nreturn now;\n}\npublic synchronized void clear() {\njobs.clear();\n}\npublic synchronized void sleep(Duration duration) {\nif (duration.isShorterThan(Duration.ZERO)) {\nthrow new UnsupportedOperationException(\"Cannot sleep backwards in time\");\n}\nInstant endOfSleep = now.plus(duration);\nwhile (true) {\nJob job = jobs.peek();\nif (job == null || job.when.isAfter(endOfSleep)) {\nbreak;\n}\njobs.remove();\nnow = job.when;\njob.work.run();\n}\nnow = endOfSleep;\n}\nprivate synchronized void schedule(Duration fromNow, Runnable work) {\njobs.add(new Job(now.plus(fromNow), work));\n}\nprivate static class Job implements Comparable {\nfinal Instant when;\nfinal Runnable work;\nJob(Instant when, Runnable work) {\nthis.when = when;\nthis.work = work;\n}\n@Override\npublic int compareTo(Job job) {\nreturn when.compareTo(job.when);\n}\n}\nprivate class FakeScheduledExecutor implements ScheduledExecutorService {\n@Override\npublic boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {\nreturn true;\n}\n@Override\npublic void execute(Runnable command) {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic List> invokeAll(Collection> tasks)\nthrows InterruptedException {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic List> invokeAll(\nCollection> tasks, long timeout, TimeUnit unit)\nthrows InterruptedException {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic T invokeAny(Collection> tasks)\nthrows ExecutionException, InterruptedException {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic T invokeAny(Collection> tasks, long timeout, TimeUnit unit)\nthrows ExecutionException, InterruptedException, TimeoutException {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic boolean isShutdown() {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic boolean isTerminated() {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic void shutdown() {}\n@Override\npublic List shutdownNow() {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic Future submit(Callable task) {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic Future submit(Runnable task) {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic Future submit(Runnable task, T result) {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic ScheduledFuture schedule(Callable callable, long delay, TimeUnit unit) {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic ScheduledFuture schedule(Runnable command, long delay, TimeUnit unit) {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic ScheduledFuture scheduleAtFixedRate(\nRunnable command, long initialDelay, long period, TimeUnit unit) {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic ScheduledFuture scheduleWithFixedDelay(\nRunnable command, long initialDelay, long delay, TimeUnit unit) {\nif (delay <= 0) {\nthrow new UnsupportedOperationException(\n\"Please supply a delay > 0 to scheduleWithFixedDelay\");\n}\nFakeClock.this.schedule(\nDuration.millis(unit.toMillis(initialDelay)),\nnew Runnable() {\n@Override\npublic void run() {\ncommand.run();\nFakeClock.this.schedule(Duration.millis(unit.toMillis(delay)), this);\n}\n});\nFakeClock.this.sleep(Duration.ZERO);\nreturn null;\n}\n}\n}\nprivate static class FakeSlowDoFn extends DoFn {\nprivate static FakeClock clock;\nprivate final Duration sleep;\nFakeSlowDoFn(FakeClock clock, Duration sleep) {\nFakeSlowDoFn.clock = clock;\nthis.sleep = sleep;\n}\n@ProcessElement\npublic void processElement(ProcessContext c) throws Exception {\nclock.sleep(sleep);\nc.output(c.element());\n}\n}\nstatic class ActiveWorkRefreshSink {\nprivate final Function responder;\nprivate final Map> totalDurations =\nnew HashMap<>();\nActiveWorkRefreshSink(Function responder) {\nthis.responder = responder;\n}\nDuration getLatencyAttributionDuration(long workToken, LatencyAttribution.State state) {\nEnumMap durations = totalDurations.get(workToken);\nreturn durations == null ? Duration.ZERO : durations.getOrDefault(state, Duration.ZERO);\n}\nboolean isActiveWorkRefresh(GetDataRequest request) {\nfor (ComputationGetDataRequest computationRequest : request.getRequestsList()) {\nif (!computationRequest.getComputationId().equals(DEFAULT_COMPUTATION_ID)) {\nreturn false;\n}\nfor (KeyedGetDataRequest keyedRequest : computationRequest.getRequestsList()) {\nif (keyedRequest.getWorkToken() == 0\n|| keyedRequest.getShardingKey() != DEFAULT_SHARDING_KEY\n|| keyedRequest.getValuesToFetchCount() != 0\n|| keyedRequest.getBagsToFetchCount() != 0\n|| keyedRequest.getTagValuePrefixesToFetchCount() != 0\n|| keyedRequest.getWatermarkHoldsToFetchCount() != 0) {\nreturn false;\n}\n}\n}\nreturn true;\n}\nGetDataResponse getData(GetDataRequest request) {\nif (!isActiveWorkRefresh(request)) {\nreturn responder.apply(request);\n}\nfor (ComputationGetDataRequest computationRequest : request.getRequestsList()) {\nfor (KeyedGetDataRequest keyedRequest : computationRequest.getRequestsList()) {\nfor (LatencyAttribution la : keyedRequest.getLatencyAttributionList()) {\nEnumMap durations =\ntotalDurations.computeIfAbsent(\nkeyedRequest.getWorkToken(),\n(Long workToken) ->\nnew EnumMap(\nLatencyAttribution.State.class));\nDuration cur = Duration.millis(la.getTotalDurationMillis());\ndurations.compute(la.getState(), (s, d) -> d == null || d.isShorterThan(cur) ? cur : d);\n}\n}\n}\nreturn EMPTY_DATA_RESPONDER.apply(request);\n}\n}\nstatic class ReadingDoFn extends DoFn {\n@StateId(\"int\")\nprivate final StateSpec> counter = StateSpecs.value(VarIntCoder.of());\n@ProcessElement\npublic void processElement(ProcessContext c, @StateId(\"int\") ValueState state) {\nstate.read();\nc.output(c.element());\n}\n}\n/** For each input element, emits a large string. */\nprivate static class InflateDoFn extends DoFn>, String> {\nfinal int inflatedSize;\n/** For each input elements, outputs a string of this length */\nInflateDoFn(int inflatedSize) {\nthis.inflatedSize = inflatedSize;\n}\n@ProcessElement\npublic void processElement(ProcessContext c) {\nchar[] chars = new char[inflatedSize];\nArrays.fill(chars, ' ');\nc.output(new String(chars));\n}\n}\n}", + "context_after": "class StreamingDataflowWorkerTest {\nprivate static final Logger LOG = LoggerFactory.getLogger(StreamingDataflowWorkerTest.class);\nprivate static final IntervalWindow DEFAULT_WINDOW =\nnew IntervalWindow(new Instant(1234), Duration.millis(1000));\nprivate static final IntervalWindow WINDOW_AT_ZERO =\nnew IntervalWindow(new Instant(0), new Instant(1000));\nprivate static final IntervalWindow WINDOW_AT_ONE_SECOND =\nnew IntervalWindow(new Instant(1000), new Instant(2000));\nprivate static final Coder DEFAULT_WINDOW_CODER = IntervalWindow.getCoder();\nprivate static final Coder> DEFAULT_WINDOW_COLLECTION_CODER =\nCollectionCoder.of(DEFAULT_WINDOW_CODER);\nprivate static final String DEFAULT_COMPUTATION_ID = \"computation\";\nprivate static final String DEFAULT_MAP_STAGE_NAME = \"computation\";\nprivate static final String DEFAULT_MAP_SYSTEM_NAME = \"computation\";\nprivate static final String DEFAULT_OUTPUT_ORIGINAL_NAME = \"originalName\";\nprivate static final String DEFAULT_OUTPUT_SYSTEM_NAME = \"systemName\";\nprivate static final String DEFAULT_PARDO_SYSTEM_NAME = \"parDo\";\nprivate static final String DEFAULT_PARDO_ORIGINAL_NAME = \"parDoOriginalName\";\nprivate static final String DEFAULT_PARDO_USER_NAME = \"parDoUserName\";\nprivate static final String DEFAULT_PARDO_STATE_FAMILY = \"parDoStateFamily\";\nprivate static final String DEFAULT_SOURCE_SYSTEM_NAME = \"source\";\nprivate static final String DEFAULT_SOURCE_ORIGINAL_NAME = \"sourceOriginalName\";\nprivate static final String DEFAULT_SINK_SYSTEM_NAME = \"sink\";\nprivate static final String DEFAULT_SINK_ORIGINAL_NAME = \"sinkOriginalName\";\nprivate static final String DEFAULT_SOURCE_COMPUTATION_ID = \"upstream\";\nprivate static final String DEFAULT_KEY_STRING = \"key\";\nprivate static final long DEFAULT_SHARDING_KEY = 12345;\nprivate static final ByteString DEFAULT_KEY_BYTES = ByteString.copyFromUtf8(DEFAULT_KEY_STRING);\nprivate static final String DEFAULT_DATA_STRING = \"data\";\nprivate static final String DEFAULT_DESTINATION_STREAM_ID = \"out\";\nprivate static final Function EMPTY_DATA_RESPONDER =\n(GetDataRequest request) -> {\nGetDataResponse.Builder builder = GetDataResponse.newBuilder();\nfor (ComputationGetDataRequest compRequest : request.getRequestsList()) {\nComputationGetDataResponse.Builder compBuilder =\nbuilder.addDataBuilder().setComputationId(compRequest.getComputationId());\nfor (KeyedGetDataRequest keyRequest : compRequest.getRequestsList()) {\nKeyedGetDataResponse.Builder keyBuilder =\ncompBuilder\n.addDataBuilder()\n.setKey(keyRequest.getKey())\n.setShardingKey(keyRequest.getShardingKey());\nkeyBuilder.addAllValues(keyRequest.getValuesToFetchList());\nkeyBuilder.addAllBags(keyRequest.getBagsToFetchList());\nkeyBuilder.addAllWatermarkHolds(keyRequest.getWatermarkHoldsToFetchList());\n}\n}\nreturn builder.build();\n};\nprivate final boolean streamingEngine;\nprivate final Supplier idGenerator =\nnew Supplier() {\nprivate final AtomicLong idGenerator = new AtomicLong(1L);\n@Override\npublic Long get() {\nreturn idGenerator.getAndIncrement();\n}\n};\n@Rule public BlockingFn blockingFn = new BlockingFn();\n@Rule public TestRule restoreMDC = new RestoreDataflowLoggingMDC();\n@Rule public ErrorCollector errorCollector = new ErrorCollector();\nWorkUnitClient mockWorkUnitClient = mock(WorkUnitClient.class);\nHotKeyLogger hotKeyLogger = mock(HotKeyLogger.class);\npublic StreamingDataflowWorkerTest(Boolean streamingEngine) {\nthis.streamingEngine = streamingEngine;\n}\n@Parameterized.Parameters(name = \"{index}: [streamingEngine={0}]\")\npublic static Iterable data() {\nreturn Arrays.asList(new Object[][] {{false}, {true}});\n}\nprivate static CounterUpdate getCounter(Iterable counters, String name) {\nfor (CounterUpdate counter : counters) {\nif (counter.getNameAndKind().getName().equals(name)) {\nreturn counter;\n}\n}\nreturn null;\n}\nstatic Work createMockWork(long workToken) {\nreturn Work.create(\nWindmill.WorkItem.newBuilder().setKey(ByteString.EMPTY).setWorkToken(workToken).build(),\nInstant::now,\nCollections.emptyList(),\nwork -> {});\n}\nstatic Work createMockWork(long workToken, Consumer processWorkFn) {\nreturn Work.create(\nWindmill.WorkItem.newBuilder().setKey(ByteString.EMPTY).setWorkToken(workToken).build(),\nInstant::now,\nCollections.emptyList(),\nprocessWorkFn);\n}\nprivate byte[] intervalWindowBytes(IntervalWindow window) throws Exception {\nreturn CoderUtils.encodeToByteArray(\nDEFAULT_WINDOW_COLLECTION_CODER, Collections.singletonList(window));\n}\nprivate String keyStringForIndex(int index) {\nreturn DEFAULT_KEY_STRING + index;\n}\nprivate String dataStringForIndex(long index) {\nreturn DEFAULT_DATA_STRING + index;\n}\nprivate ParallelInstruction makeWindowingSourceInstruction(Coder coder) {\nCloudObject timerCloudObject =\nCloudObject.forClassName(\n\"com.google.cloud.dataflow.sdk.util.TimerOrElement$TimerOrElementCoder\");\nList component =\nCollections.singletonList(CloudObjects.asCloudObject(coder, /* sdkComponents= */ null));\nStructs.addList(timerCloudObject, PropertyNames.COMPONENT_ENCODINGS, component);\nCloudObject encodedCoder = CloudObject.forClassName(\"kind:windowed_value\");\nStructs.addBoolean(encodedCoder, PropertyNames.IS_WRAPPER, true);\nStructs.addList(\nencodedCoder,\nPropertyNames.COMPONENT_ENCODINGS,\nImmutableList.of(\ntimerCloudObject,\nCloudObjects.asCloudObject(IntervalWindowCoder.of(), /* sdkComponents= */ null)));\nreturn new ParallelInstruction()\n.setSystemName(DEFAULT_SOURCE_SYSTEM_NAME)\n.setOriginalName(DEFAULT_SOURCE_ORIGINAL_NAME)\n.setRead(\nnew ReadInstruction()\n.setSource(\nnew Source()\n.setSpec(CloudObject.forClass(WindowingWindmillReader.class))\n.setCodec(encodedCoder)))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setName(Long.toString(idGenerator.get()))\n.setCodec(encodedCoder)\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)));\n}\nprivate ParallelInstruction makeSourceInstruction(Coder coder) {\nreturn new ParallelInstruction()\n.setSystemName(DEFAULT_SOURCE_SYSTEM_NAME)\n.setOriginalName(DEFAULT_SOURCE_ORIGINAL_NAME)\n.setRead(\nnew ReadInstruction()\n.setSource(\nnew Source()\n.setSpec(CloudObject.forClass(UngroupedWindmillReader.class))\n.setCodec(\nCloudObjects.asCloudObject(\nWindowedValue.getFullCoder(coder, IntervalWindow.getCoder()),\n/* sdkComponents= */ null))))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setName(Long.toString(idGenerator.get()))\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)\n.setCodec(\nCloudObjects.asCloudObject(\nWindowedValue.getFullCoder(coder, IntervalWindow.getCoder()),\n/* sdkComponents= */ null))));\n}\nprivate ParallelInstruction makeDoFnInstruction(\nDoFn doFn,\nint producerIndex,\nCoder outputCoder,\nWindowingStrategy windowingStrategy) {\nCloudObject spec = CloudObject.forClassName(\"DoFn\");\naddString(\nspec,\nPropertyNames.SERIALIZED_FN,\nStringUtils.byteArrayToJsonString(\nSerializableUtils.serializeToByteArray(\nDoFnInfo.forFn(\ndoFn,\nwindowingStrategy /* windowing strategy */,\nnull /* side input views */,\nnull /* input coder */,\nnew TupleTag<>(PropertyNames.OUTPUT) /* main output id */,\nDoFnSchemaInformation.create(),\nCollections.emptyMap()))));\nreturn new ParallelInstruction()\n.setSystemName(DEFAULT_PARDO_SYSTEM_NAME)\n.setName(DEFAULT_PARDO_USER_NAME)\n.setOriginalName(DEFAULT_PARDO_ORIGINAL_NAME)\n.setParDo(\nnew ParDoInstruction()\n.setInput(\nnew InstructionInput()\n.setProducerInstructionIndex(producerIndex)\n.setOutputNum(0))\n.setNumOutputs(1)\n.setUserFn(spec)\n.setMultiOutputInfos(\nCollections.singletonList(new MultiOutputInfo().setTag(PropertyNames.OUTPUT))))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setName(PropertyNames.OUTPUT)\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)\n.setCodec(\nCloudObjects.asCloudObject(\nWindowedValue.getFullCoder(\noutputCoder, windowingStrategy.getWindowFn().windowCoder()),\n/* sdkComponents= */ null))));\n}\nprivate ParallelInstruction makeDoFnInstruction(\nDoFn doFn, int producerIndex, Coder outputCoder) {\nWindowingStrategy windowingStrategy =\nWindowingStrategy.of(FixedWindows.of(Duration.millis(10)));\nreturn makeDoFnInstruction(doFn, producerIndex, outputCoder, windowingStrategy);\n}\nprivate ParallelInstruction makeSinkInstruction(\nString streamId,\nCoder coder,\nint producerIndex,\nCoder windowCoder) {\nCloudObject spec = CloudObject.forClass(WindmillSink.class);\naddString(spec, \"stream_id\", streamId);\nreturn new ParallelInstruction()\n.setSystemName(DEFAULT_SINK_SYSTEM_NAME)\n.setOriginalName(DEFAULT_SINK_ORIGINAL_NAME)\n.setWrite(\nnew WriteInstruction()\n.setInput(\nnew InstructionInput()\n.setProducerInstructionIndex(producerIndex)\n.setOutputNum(0))\n.setSink(\nnew Sink()\n.setSpec(spec)\n.setCodec(\nCloudObjects.asCloudObject(\nWindowedValue.getFullCoder(coder, windowCoder),\n/* sdkComponents= */ null))));\n}\nprivate ParallelInstruction makeSinkInstruction(\nCoder coder, int producerIndex, Coder windowCoder) {\nreturn makeSinkInstruction(DEFAULT_DESTINATION_STREAM_ID, coder, producerIndex, windowCoder);\n}\nprivate ParallelInstruction makeSinkInstruction(Coder coder, int producerIndex) {\nreturn makeSinkInstruction(coder, producerIndex, IntervalWindow.getCoder());\n}\n/**\n* Returns a {@link MapTask} with the provided {@code instructions} and default values everywhere\n* else.\n*/\nprivate MapTask defaultMapTask(List instructions) {\nMapTask mapTask =\nnew MapTask()\n.setStageName(DEFAULT_MAP_STAGE_NAME)\n.setSystemName(DEFAULT_MAP_SYSTEM_NAME)\n.setInstructions(instructions);\nmapTask.setFactory(Transport.getJsonFactory());\nreturn mapTask;\n}\nprivate Windmill.GetWorkResponse buildInput(String input, byte[] metadata) throws Exception {\nWindmill.GetWorkResponse.Builder builder = Windmill.GetWorkResponse.newBuilder();\nTextFormat.merge(input, builder);\nif (metadata != null) {\nWindmill.InputMessageBundle.Builder messageBundleBuilder =\nbuilder.getWorkBuilder(0).getWorkBuilder(0).getMessageBundlesBuilder(0);\nfor (Windmill.Message.Builder messageBuilder :\nmessageBundleBuilder.getMessagesBuilderList()) {\nmessageBuilder.setMetadata(addPaneTag(PaneInfo.NO_FIRING, metadata));\n}\n}\nreturn builder.build();\n}\nprivate Windmill.GetWorkResponse buildSessionInput(\nint workToken,\nlong inputWatermark,\nlong outputWatermark,\nList inputs,\nList timers)\nthrows Exception {\nWindmill.WorkItem.Builder builder = Windmill.WorkItem.newBuilder();\nbuilder.setKey(DEFAULT_KEY_BYTES);\nbuilder.setShardingKey(DEFAULT_SHARDING_KEY);\nbuilder.setCacheToken(1);\nbuilder.setWorkToken(workToken);\nbuilder.setOutputDataWatermark(outputWatermark * 1000);\nif (!inputs.isEmpty()) {\nInputMessageBundle.Builder messageBuilder =\nWindmill.InputMessageBundle.newBuilder()\n.setSourceComputationId(DEFAULT_SOURCE_COMPUTATION_ID);\nfor (Long input : inputs) {\nmessageBuilder.addMessages(\nWindmill.Message.newBuilder()\n.setTimestamp(input)\n.setData(ByteString.copyFromUtf8(dataStringForIndex(input)))\n.setMetadata(\naddPaneTag(\nPaneInfo.NO_FIRING,\nintervalWindowBytes(\nnew IntervalWindow(\nnew Instant(input),\nnew Instant(input).plus(Duration.millis(10)))))));\n}\nbuilder.addMessageBundles(messageBuilder);\n}\nif (!timers.isEmpty()) {\nbuilder.setTimers(Windmill.TimerBundle.newBuilder().addAllTimers(timers));\n}\nreturn Windmill.GetWorkResponse.newBuilder()\n.addWork(\nWindmill.ComputationWorkItems.newBuilder()\n.setComputationId(DEFAULT_COMPUTATION_ID)\n.setInputDataWatermark(inputWatermark * 1000)\n.addWork(builder))\n.build();\n}\nprivate Windmill.GetWorkResponse makeInput(int index, long timestamp) throws Exception {\nreturn makeInput(index, timestamp, keyStringForIndex(index), DEFAULT_SHARDING_KEY);\n}\nprivate Windmill.GetWorkResponse makeInput(\nint index, long timestamp, String key, long shardingKey) throws Exception {\nreturn buildInput(\n\"work {\"\n+ \" computation_id: \\\"\"\n+ DEFAULT_COMPUTATION_ID\n+ \"\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"\"\n+ key\n+ \"\\\"\"\n+ \" sharding_key: \"\n+ shardingKey\n+ \" work_token: \"\n+ index\n+ \" cache_token: 3\"\n+ \" hot_key_info {\"\n+ \" hot_key_age_usec: 1000000\"\n+ \" }\"\n+ \" message_bundles {\"\n+ \" source_computation_id: \\\"\"\n+ DEFAULT_SOURCE_COMPUTATION_ID\n+ \"\\\"\"\n+ \" messages {\"\n+ \" timestamp: \"\n+ timestamp\n+ \" data: \\\"data\"\n+ index\n+ \"\\\"\"\n+ \" }\"\n+ \" }\"\n+ \" }\"\n+ \"}\",\nCoderUtils.encodeToByteArray(\nCollectionCoder.of(IntervalWindow.getCoder()),\nCollections.singletonList(DEFAULT_WINDOW)));\n}\n/**\n* Returns a {@link org.apache.beam.runners.dataflow.windmill.Windmill.WorkItemCommitRequest}\n* builder parsed from the provided text format proto.\n*/\nprivate WorkItemCommitRequest.Builder parseCommitRequest(String output) throws Exception {\nWorkItemCommitRequest.Builder builder = Windmill.WorkItemCommitRequest.newBuilder();\nTextFormat.merge(output, builder);\nreturn builder;\n}\n/** Sets the metadata of all the contained messages in this WorkItemCommitRequest. */\nprivate WorkItemCommitRequest.Builder setMessagesMetadata(\nPaneInfo pane, byte[] windowBytes, WorkItemCommitRequest.Builder builder) throws Exception {\nif (windowBytes != null) {\nKeyedMessageBundle.Builder bundles = builder.getOutputMessagesBuilder(0).getBundlesBuilder(0);\nfor (int i = 0; i < bundles.getMessagesCount(); i++) {\nbundles.getMessagesBuilder(i).setMetadata(addPaneTag(pane, windowBytes));\n}\n}\nreturn builder;\n}\n/** Reset value update timestamps to zero. */\nprivate WorkItemCommitRequest.Builder setValuesTimestamps(WorkItemCommitRequest.Builder builder) {\nfor (int i = 0; i < builder.getValueUpdatesCount(); i++) {\nbuilder.getValueUpdatesBuilder(i).getValueBuilder().setTimestamp(0);\n}\nreturn builder;\n}\nprivate WorkItemCommitRequest.Builder makeExpectedOutput(int index, long timestamp)\nthrows Exception {\nreturn makeExpectedOutput(\nindex, timestamp, keyStringForIndex(index), DEFAULT_SHARDING_KEY, keyStringForIndex(index));\n}\nprivate WorkItemCommitRequest.Builder makeExpectedOutput(\nint index, long timestamp, String key, long shardingKey, String outKey) throws Exception {\nStringBuilder expectedCommitRequestBuilder =\ninitializeExpectedCommitRequest(key, shardingKey, index);\nappendCommitOutputMessages(expectedCommitRequestBuilder, index, timestamp, outKey);\nreturn setMessagesMetadata(\nPaneInfo.NO_FIRING,\nintervalWindowBytes(DEFAULT_WINDOW),\nparseCommitRequest(expectedCommitRequestBuilder.toString()));\n}\nprivate WorkItemCommitRequest removeDynamicFields(WorkItemCommitRequest request) {\nreturn request.toBuilder().clearPerWorkItemLatencyAttributions().build();\n}\nprivate WorkItemCommitRequest.Builder makeExpectedTruncationRequestOutput(\nint index, String key, long shardingKey, long estimatedSize) throws Exception {\nStringBuilder expectedCommitRequestBuilder =\ninitializeExpectedCommitRequest(key, shardingKey, index, false);\nappendCommitTruncationFields(expectedCommitRequestBuilder, estimatedSize);\nreturn parseCommitRequest(expectedCommitRequestBuilder.toString());\n}\nprivate StringBuilder initializeExpectedCommitRequest(\nString key, long shardingKey, int index, Boolean hasSourceBytesProcessed) {\nStringBuilder requestBuilder = new StringBuilder();\nrequestBuilder.append(\"key: \\\"\");\nrequestBuilder.append(key);\nrequestBuilder.append(\"\\\" \");\nrequestBuilder.append(\"sharding_key: \");\nrequestBuilder.append(shardingKey);\nrequestBuilder.append(\" \");\nrequestBuilder.append(\"work_token: \");\nrequestBuilder.append(index);\nrequestBuilder.append(\" \");\nrequestBuilder.append(\"cache_token: 3 \");\nif (hasSourceBytesProcessed) requestBuilder.append(\"source_bytes_processed: 0 \");\nreturn requestBuilder;\n}\nprivate StringBuilder initializeExpectedCommitRequest(String key, long shardingKey, int index) {\nreturn initializeExpectedCommitRequest(key, shardingKey, index, true);\n}\nprivate StringBuilder appendCommitOutputMessages(\nStringBuilder requestBuilder, int index, long timestamp, String outKey) {\nrequestBuilder.append(\"output_messages {\");\nrequestBuilder.append(\" destination_stream_id: \\\"\");\nrequestBuilder.append(DEFAULT_DESTINATION_STREAM_ID);\nrequestBuilder.append(\"\\\"\");\nrequestBuilder.append(\" bundles {\");\nrequestBuilder.append(\" key: \\\"\");\nrequestBuilder.append(outKey);\nrequestBuilder.append(\"\\\"\");\nrequestBuilder.append(\" messages {\");\nrequestBuilder.append(\" timestamp: \");\nrequestBuilder.append(timestamp);\nrequestBuilder.append(\" data: \\\"\");\nrequestBuilder.append(dataStringForIndex(index));\nrequestBuilder.append(\"\\\"\");\nrequestBuilder.append(\" metadata: \\\"\\\"\");\nrequestBuilder.append(\" }\");\nrequestBuilder.append(\" messages_ids: \\\"\\\"\");\nrequestBuilder.append(\" }\");\nrequestBuilder.append(\"}\");\nreturn requestBuilder;\n}\nprivate StringBuilder appendCommitTruncationFields(\nStringBuilder requestBuilder, long estimatedSize) {\nrequestBuilder.append(\"exceeds_max_work_item_commit_bytes: true \");\nrequestBuilder.append(\"estimated_work_item_commit_bytes: \");\nrequestBuilder.append(estimatedSize);\nreturn requestBuilder;\n}\nprivate StreamingComputationConfig makeDefaultStreamingComputationConfig(\nList instructions) {\nStreamingComputationConfig config = new StreamingComputationConfig();\nconfig.setComputationId(DEFAULT_COMPUTATION_ID);\nconfig.setSystemName(DEFAULT_MAP_SYSTEM_NAME);\nconfig.setStageName(DEFAULT_MAP_STAGE_NAME);\nconfig.setInstructions(instructions);\nreturn config;\n}\nprivate ByteString addPaneTag(PaneInfo pane, byte[] windowBytes) throws IOException {\nByteStringOutputStream output = new ByteStringOutputStream();\nPaneInfo.PaneInfoCoder.INSTANCE.encode(pane, output, Context.OUTER);\noutput.write(windowBytes);\nreturn output.toByteString();\n}\nprivate StreamingDataflowWorkerOptions createTestingPipelineOptions(\nFakeWindmillServer server, String... args) {\nList argsList = Lists.newArrayList(args);\nif (streamingEngine) {\nargsList.add(\"--experiments=enable_streaming_engine\");\n}\nStreamingDataflowWorkerOptions options =\nPipelineOptionsFactory.fromArgs(argsList.toArray(new String[0]))\n.as(StreamingDataflowWorkerOptions.class);\noptions.setAppName(\"StreamingWorkerHarnessTest\");\noptions.setJobId(\"test_job_id\");\noptions.setStreaming(true);\noptions.setWindmillServerStub(server);\noptions.setActiveWorkRefreshPeriodMillis(0);\nreturn options;\n}\nprivate StreamingDataflowWorker makeWorker(\nList instructions,\nStreamingDataflowWorkerOptions options,\nboolean publishCounters,\nSupplier clock,\nFunction executorSupplier)\nthrows Exception {\nStreamingDataflowWorker worker =\nnew StreamingDataflowWorker(\nCollections.singletonList(defaultMapTask(instructions)),\nIntrinsicMapTaskExecutorFactory.defaultFactory(),\nmockWorkUnitClient,\noptions,\npublishCounters,\nhotKeyLogger,\nclock,\nexecutorSupplier);\nworker.addStateNameMappings(\nImmutableMap.of(DEFAULT_PARDO_USER_NAME, DEFAULT_PARDO_STATE_FAMILY));\nreturn worker;\n}\nprivate StreamingDataflowWorker makeWorker(\nList instructions,\nStreamingDataflowWorkerOptions options,\nboolean publishCounters)\nthrows Exception {\nreturn makeWorker(\ninstructions,\noptions,\npublishCounters,\nInstant::now,\n(threadName) -> Executors.newSingleThreadScheduledExecutor());\n}\n@Test\npublic void testBasicHarness() throws Exception {\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nfinal int numIters = 2000;\nfor (int i = 0; i < numIters; ++i) {\nserver.whenGetWorkCalled().thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i)));\n}\nMap result = server.waitForAndGetCommits(numIters);\nworker.stop();\nfor (int i = 0; i < numIters; ++i) {\nassertTrue(result.containsKey((long) i));\nassertEquals(\nmakeExpectedOutput(i, TimeUnit.MILLISECONDS.toMicros(i)).build(),\nremoveDynamicFields(result.get((long) i)));\n}\nverify(hotKeyLogger, atLeastOnce()).logHotKeyDetection(nullable(String.class), any());\n}\n@Test\npublic void testBasic() throws Exception {\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nserver.setIsReady(false);\nStreamingConfigTask streamingConfig = new StreamingConfigTask();\nstreamingConfig.setStreamingComputationConfigs(\nImmutableList.of(makeDefaultStreamingComputationConfig(instructions)));\nstreamingConfig.setWindmillServiceEndpoint(\"foo\");\nWorkItem workItem = new WorkItem();\nworkItem.setStreamingConfigTask(streamingConfig);\nwhen(mockWorkUnitClient.getGlobalStreamingConfigWorkItem()).thenReturn(Optional.of(workItem));\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nfinal int numIters = 2000;\nfor (int i = 0; i < numIters; ++i) {\nserver.whenGetWorkCalled().thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i)));\n}\nMap result = server.waitForAndGetCommits(numIters);\nworker.stop();\nfor (int i = 0; i < numIters; ++i) {\nassertTrue(result.containsKey((long) i));\nassertEquals(\nmakeExpectedOutput(i, TimeUnit.MILLISECONDS.toMicros(i)).build(),\nremoveDynamicFields(result.get((long) i)));\n}\nverify(hotKeyLogger, atLeastOnce()).logHotKeyDetection(nullable(String.class), any());\n}\n@Test\npublic void testHotKeyLogging() throws Exception {\nList instructions =\nArrays.asList(\nmakeSourceInstruction(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of())),\nmakeSinkInstruction(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of()), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nserver.setIsReady(false);\nStreamingConfigTask streamingConfig = new StreamingConfigTask();\nstreamingConfig.setStreamingComputationConfigs(\nImmutableList.of(makeDefaultStreamingComputationConfig(instructions)));\nstreamingConfig.setWindmillServiceEndpoint(\"foo\");\nWorkItem workItem = new WorkItem();\nworkItem.setStreamingConfigTask(streamingConfig);\nwhen(mockWorkUnitClient.getGlobalStreamingConfigWorkItem()).thenReturn(Optional.of(workItem));\nStreamingDataflowWorkerOptions options =\ncreateTestingPipelineOptions(server, \"--hotKeyLoggingEnabled=true\");\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nfinal int numIters = 2000;\nfor (int i = 0; i < numIters; ++i) {\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i), \"key\", DEFAULT_SHARDING_KEY));\n}\nserver.waitForAndGetCommits(numIters);\nworker.stop();\nverify(hotKeyLogger, atLeastOnce())\n.logHotKeyDetection(nullable(String.class), any(), eq(\"key\"));\n}\n@Test\npublic void testHotKeyLoggingNotEnabled() throws Exception {\nList instructions =\nArrays.asList(\nmakeSourceInstruction(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of())),\nmakeSinkInstruction(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of()), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nserver.setIsReady(false);\nStreamingConfigTask streamingConfig = new StreamingConfigTask();\nstreamingConfig.setStreamingComputationConfigs(\nImmutableList.of(makeDefaultStreamingComputationConfig(instructions)));\nstreamingConfig.setWindmillServiceEndpoint(\"foo\");\nWorkItem workItem = new WorkItem();\nworkItem.setStreamingConfigTask(streamingConfig);\nwhen(mockWorkUnitClient.getGlobalStreamingConfigWorkItem()).thenReturn(Optional.of(workItem));\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nfinal int numIters = 2000;\nfor (int i = 0; i < numIters; ++i) {\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i), \"key\", DEFAULT_SHARDING_KEY));\n}\nserver.waitForAndGetCommits(numIters);\nworker.stop();\nverify(hotKeyLogger, atLeastOnce()).logHotKeyDetection(nullable(String.class), any());\n}\n@Test\npublic void testIgnoreRetriedKeys() throws Exception {\nfinal int numIters = 4;\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(blockingFn, 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nfor (int i = 0; i < numIters; ++i) {\nserver\n.whenGetWorkCalled()\n.thenReturn(\nmakeInput(\ni, TimeUnit.MILLISECONDS.toMicros(i), keyStringForIndex(i), DEFAULT_SHARDING_KEY))\n.thenReturn(\nmakeInput(\ni + 1000,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY + 1));\n}\nBlockingFn.counter.acquire(numIters * 2);\nfor (int i = 0; i < numIters; ++i) {\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i)))\n.thenReturn(\nmakeInput(\ni + 1000,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY + 1));\n}\nserver.waitForEmptyWorkQueue();\nfor (int i = 0; i < numIters; ++i) {\nserver\n.whenGetWorkCalled()\n.thenReturn(\nmakeInput(\ni + numIters,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY));\n}\nserver.waitForEmptyWorkQueue();\nBlockingFn.blocker.countDown();\nMap result = server.waitForAndGetCommits(numIters * 3);\nfor (int i = 0; i < numIters; ++i) {\nassertTrue(result.containsKey((long) i));\nassertEquals(\nmakeExpectedOutput(i, TimeUnit.MILLISECONDS.toMicros(i)).build(),\nremoveDynamicFields(result.get((long) i)));\nassertTrue(result.containsKey((long) i + 1000));\nassertEquals(\nmakeExpectedOutput(\ni + 1000,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY + 1,\nkeyStringForIndex(i))\n.build(),\nremoveDynamicFields(result.get((long) i + 1000)));\nassertTrue(result.containsKey((long) i + numIters));\nassertEquals(\nmakeExpectedOutput(\ni + numIters,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY,\nkeyStringForIndex(i))\n.build(),\nremoveDynamicFields(result.get((long) i + numIters)));\n}\nfor (int i = 0; i < numIters; ++i) {\nserver\n.whenGetWorkCalled()\n.thenReturn(\nmakeInput(\ni + numIters * 2,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY));\n}\nresult = server.waitForAndGetCommits(numIters);\nworker.stop();\nfor (int i = 0; i < numIters; ++i) {\nassertTrue(result.containsKey((long) i + numIters * 2));\nassertEquals(\nmakeExpectedOutput(\ni + numIters * 2,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY,\nkeyStringForIndex(i))\n.build(),\nremoveDynamicFields(result.get((long) i + numIters * 2)));\n}\n}\n@Test(timeout = 10000)\npublic void testNumberOfWorkerHarnessThreadsIsHonored() throws Exception {\nint expectedNumberOfThreads = 5;\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(blockingFn, 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setNumberOfWorkerHarnessThreads(expectedNumberOfThreads);\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nfor (int i = 0; i < expectedNumberOfThreads * 2; ++i) {\nserver.whenGetWorkCalled().thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i)));\n}\nBlockingFn.counter.acquire(expectedNumberOfThreads);\nif (BlockingFn.counter.tryAcquire(500, TimeUnit.MILLISECONDS)) {\nfail(\n\"Expected number of threads \"\n+ expectedNumberOfThreads\n+ \" does not match actual \"\n+ \"number of work items processed concurrently \"\n+ BlockingFn.callCounter.get()\n+ \".\");\n}\nBlockingFn.blocker.countDown();\n}\n@Test\npublic void testKeyTokenInvalidException() throws Exception {\nif (streamingEngine) {\nreturn;\n}\nKvCoder kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());\nList instructions =\nArrays.asList(\nmakeSourceInstruction(kvCoder),\nmakeDoFnInstruction(new KeyTokenInvalidFn(), 0, kvCoder),\nmakeSinkInstruction(kvCoder, 1));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(0, 0, DEFAULT_KEY_STRING, DEFAULT_SHARDING_KEY));\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), true /* publishCounters */);\nworker.start();\nserver.waitForEmptyWorkQueue();\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(1, 0, DEFAULT_KEY_STRING, DEFAULT_SHARDING_KEY));\nMap result = server.waitForAndGetCommits(1);\nassertEquals(\nmakeExpectedOutput(1, 0, DEFAULT_KEY_STRING, DEFAULT_SHARDING_KEY, DEFAULT_KEY_STRING)\n.build(),\nremoveDynamicFields(result.get(1L)));\nassertEquals(1, result.size());\n}\n@Test\npublic void testKeyCommitTooLargeException() throws Exception {\nKvCoder kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());\nList instructions =\nArrays.asList(\nmakeSourceInstruction(kvCoder),\nmakeDoFnInstruction(new LargeCommitFn(), 0, kvCoder),\nmakeSinkInstruction(kvCoder, 1));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nserver.setExpectedExceptionCount(1);\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), true /* publishCounters */);\nworker.setMaxWorkItemCommitBytes(1000);\nworker.start();\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(1, 0, \"large_key\", DEFAULT_SHARDING_KEY))\n.thenReturn(makeInput(2, 0, \"key\", DEFAULT_SHARDING_KEY));\nserver.waitForEmptyWorkQueue();\nMap result = server.waitForAndGetCommits(1);\nassertEquals(2, result.size());\nassertEquals(\nmakeExpectedOutput(2, 0, \"key\", DEFAULT_SHARDING_KEY, \"key\").build(),\nremoveDynamicFields(result.get(2L)));\nassertTrue(result.containsKey(1L));\nWorkItemCommitRequest largeCommit = result.get(1L);\nassertEquals(\"large_key\", largeCommit.getKey().toStringUtf8());\nassertEquals(\nmakeExpectedTruncationRequestOutput(\n1, \"large_key\", DEFAULT_SHARDING_KEY, largeCommit.getEstimatedWorkItemCommitBytes())\n.build(),\nlargeCommit);\nassertTrue(largeCommit.getEstimatedWorkItemCommitBytes() > 1000);\nint maxTries = 10;\nwhile (--maxTries > 0) {\nworker.reportPeriodicWorkerUpdates();\nUninterruptibles.sleepUninterruptibly(1000, TimeUnit.MILLISECONDS);\n}\nArgumentCaptor workItemStatusCaptor =\nArgumentCaptor.forClass(WorkItemStatus.class);\nverify(mockWorkUnitClient, atLeast(2)).reportWorkItemStatus(workItemStatusCaptor.capture());\nList capturedStatuses = workItemStatusCaptor.getAllValues();\nboolean foundErrors = false;\nfor (WorkItemStatus status : capturedStatuses) {\nif (!status.getErrors().isEmpty()) {\nassertFalse(foundErrors);\nfoundErrors = true;\nString errorMessage = status.getErrors().get(0).getMessage();\nassertThat(errorMessage, Matchers.containsString(\"KeyCommitTooLargeException\"));\n}\n}\nassertTrue(foundErrors);\n}\n@Test\npublic void testKeyChange() throws Exception {\nKvCoder kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());\nList instructions =\nArrays.asList(\nmakeSourceInstruction(kvCoder),\nmakeDoFnInstruction(new ChangeKeysFn(), 0, kvCoder),\nmakeSinkInstruction(kvCoder, 1));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nfor (int i = 0; i < 2; i++) {\nserver\n.whenGetWorkCalled()\n.thenReturn(\nmakeInput(\ni, TimeUnit.MILLISECONDS.toMicros(i), keyStringForIndex(i), DEFAULT_SHARDING_KEY))\n.thenReturn(\nmakeInput(\ni + 1000,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY + i));\n}\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), true /* publishCounters */);\nworker.start();\nMap result = server.waitForAndGetCommits(4);\nfor (int i = 0; i < 2; i++) {\nassertTrue(result.containsKey((long) i));\nassertEquals(\nmakeExpectedOutput(\ni,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY,\nkeyStringForIndex(i) + \"_data\" + i)\n.build(),\nremoveDynamicFields(result.get((long) i)));\nassertTrue(result.containsKey((long) i + 1000));\nassertEquals(\nmakeExpectedOutput(\ni + 1000,\nTimeUnit.MILLISECONDS.toMicros(i),\nkeyStringForIndex(i),\nDEFAULT_SHARDING_KEY + i,\nkeyStringForIndex(i) + \"_data\" + (i + 1000))\n.build(),\nremoveDynamicFields(result.get((long) i + 1000)));\n}\n}\n@Test(timeout = 30000)\npublic void testExceptions() throws Exception {\nif (streamingEngine) {\nreturn;\n}\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(new TestExceptionFn(), 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 1));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nserver.setExpectedExceptionCount(1);\nString keyString = keyStringForIndex(0);\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"\"\n+ DEFAULT_COMPUTATION_ID\n+ \"\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"\"\n+ keyString\n+ \"\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 0\"\n+ \" cache_token: 1\"\n+ \" message_bundles {\"\n+ \" source_computation_id: \\\"\"\n+ DEFAULT_SOURCE_COMPUTATION_ID\n+ \"\\\"\"\n+ \" messages {\"\n+ \" timestamp: 0\"\n+ \" data: \\\"0\\\"\"\n+ \" }\"\n+ \" }\"\n+ \" }\"\n+ \"}\",\nCoderUtils.encodeToByteArray(\nCollectionCoder.of(IntervalWindow.getCoder()),\nCollections.singletonList(DEFAULT_WINDOW))));\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), true /* publishCounters */);\nworker.start();\nserver.waitForEmptyWorkQueue();\nint maxTries = 10;\nwhile (maxTries-- > 0 && !worker.workExecutorIsEmpty()) {\nUninterruptibles.sleepUninterruptibly(1000, TimeUnit.MILLISECONDS);\n}\nassertTrue(worker.workExecutorIsEmpty());\nmaxTries = 10;\nwhile (maxTries-- > 0) {\nworker.reportPeriodicWorkerUpdates();\nUninterruptibles.sleepUninterruptibly(1000, TimeUnit.MILLISECONDS);\n}\nArgumentCaptor workItemStatusCaptor =\nArgumentCaptor.forClass(WorkItemStatus.class);\nverify(mockWorkUnitClient, atLeast(1)).reportWorkItemStatus(workItemStatusCaptor.capture());\nList capturedStatuses = workItemStatusCaptor.getAllValues();\nboolean foundErrors = false;\nint lastUpdateWithoutErrors = 0;\nint lastUpdateWithErrors = 0;\nfor (WorkItemStatus status : capturedStatuses) {\nif (status.getErrors().isEmpty()) {\nlastUpdateWithoutErrors++;\ncontinue;\n}\nlastUpdateWithErrors++;\nassertFalse(foundErrors);\nfoundErrors = true;\nString stacktrace = status.getErrors().get(0).getMessage();\nassertThat(stacktrace, Matchers.containsString(\"Exception!\"));\nassertThat(stacktrace, Matchers.containsString(\"Another exception!\"));\nassertThat(stacktrace, Matchers.containsString(\"processElement\"));\n}\nassertTrue(foundErrors);\nassertTrue(lastUpdateWithoutErrors > lastUpdateWithErrors);\nassertThat(server.getStatsReceived().size(), Matchers.greaterThanOrEqualTo(1));\nWindmill.ReportStatsRequest stats = server.getStatsReceived().get(0);\nassertEquals(DEFAULT_COMPUTATION_ID, stats.getComputationId());\nassertEquals(keyString, stats.getKey().toStringUtf8());\nassertEquals(0, stats.getWorkToken());\nassertEquals(1, stats.getShardingKey());\n}\n@Test\npublic void testAssignWindows() throws Exception {\nDuration gapDuration = Duration.standardSeconds(1);\nCloudObject spec = CloudObject.forClassName(\"AssignWindowsDoFn\");\nSdkComponents sdkComponents = SdkComponents.create();\nsdkComponents.registerEnvironment(Environments.JAVA_SDK_HARNESS_ENVIRONMENT);\naddString(\nspec,\nPropertyNames.SERIALIZED_FN,\nStringUtils.byteArrayToJsonString(\nWindowingStrategyTranslation.toMessageProto(\nWindowingStrategy.of(FixedWindows.of(gapDuration)), sdkComponents)\n.toByteArray()));\nParallelInstruction addWindowsInstruction =\nnew ParallelInstruction()\n.setSystemName(\"AssignWindows\")\n.setName(\"AssignWindows\")\n.setOriginalName(\"AssignWindowsOriginal\")\n.setParDo(\nnew ParDoInstruction()\n.setInput(new InstructionInput().setProducerInstructionIndex(0).setOutputNum(0))\n.setNumOutputs(1)\n.setUserFn(spec))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)\n.setName(\"output\")\n.setCodec(\nCloudObjects.asCloudObject(\nWindowedValue.getFullCoder(\nStringUtf8Coder.of(), IntervalWindow.getCoder()),\n/* sdkComponents= */ null))));\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\naddWindowsInstruction,\nmakeSinkInstruction(StringUtf8Coder.of(), 1));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nint timestamp1 = 0;\nint timestamp2 = 1000000;\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(timestamp1, timestamp1))\n.thenReturn(makeInput(timestamp2, timestamp2));\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), false /* publishCounters */);\nworker.start();\nMap result = server.waitForAndGetCommits(2);\nassertThat(\nremoveDynamicFields(result.get((long) timestamp1)),\nequalTo(\nsetMessagesMetadata(\nPaneInfo.NO_FIRING,\nintervalWindowBytes(WINDOW_AT_ZERO),\nmakeExpectedOutput(timestamp1, timestamp1))\n.build()));\nassertThat(\nremoveDynamicFields(result.get((long) timestamp2)),\nequalTo(\nsetMessagesMetadata(\nPaneInfo.NO_FIRING,\nintervalWindowBytes(WINDOW_AT_ONE_SECOND),\nmakeExpectedOutput(timestamp2, timestamp2))\n.build()));\n}\nprivate void verifyTimers(WorkItemCommitRequest commit, Timer... timers) {\nassertThat(commit.getOutputTimersList(), Matchers.containsInAnyOrder(timers));\n}\nprivate void verifyHolds(WorkItemCommitRequest commit, WatermarkHold... watermarkHolds) {\nassertThat(commit.getWatermarkHoldsList(), Matchers.containsInAnyOrder(watermarkHolds));\n}\nprivate Timer buildWatermarkTimer(String tagPrefix, long timestampMillis) {\nreturn buildWatermarkTimer(tagPrefix, timestampMillis, false);\n}\nprivate Timer buildWatermarkTimer(String tagPrefix, long timestampMillis, boolean delete) {\nTimer.Builder builder =\nTimer.newBuilder()\n.setTag(ByteString.copyFromUtf8(tagPrefix + \":\" + timestampMillis))\n.setType(Type.WATERMARK)\n.setStateFamily(\"MergeWindows\");\nif (!delete) {\nbuilder.setTimestamp(timestampMillis * 1000);\nbuilder.setMetadataTimestamp(timestampMillis * 1000);\n}\nreturn builder.build();\n}\nprivate WatermarkHold buildHold(String tag, long timestamp, boolean reset) {\nWatermarkHold.Builder builder =\nWatermarkHold.newBuilder()\n.setTag(ByteString.copyFromUtf8(tag))\n.setStateFamily(\"MergeWindows\");\nif (reset) {\nbuilder.setReset(true);\n}\nif (timestamp >= 0) {\nbuilder.addTimestamps(timestamp * 1000);\n}\nreturn builder.build();\n}\n@Test\npublic void testMergeWindows() throws Exception {\nCoder> kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());\nCoder>> windowedKvCoder =\nFullWindowedValueCoder.of(kvCoder, IntervalWindow.getCoder());\nKvCoder> groupedCoder =\nKvCoder.of(StringUtf8Coder.of(), ListCoder.of(StringUtf8Coder.of()));\nCoder>>> windowedGroupedCoder =\nFullWindowedValueCoder.of(groupedCoder, IntervalWindow.getCoder());\nCloudObject spec = CloudObject.forClassName(\"MergeWindowsDoFn\");\nSdkComponents sdkComponents = SdkComponents.create();\nsdkComponents.registerEnvironment(Environments.JAVA_SDK_HARNESS_ENVIRONMENT);\naddString(\nspec,\nPropertyNames.SERIALIZED_FN,\nStringUtils.byteArrayToJsonString(\nWindowingStrategyTranslation.toMessageProto(\nWindowingStrategy.of(FixedWindows.of(Duration.standardSeconds(1)))\n.withTimestampCombiner(TimestampCombiner.EARLIEST),\nsdkComponents)\n.toByteArray()));\naddObject(\nspec,\nWorkerPropertyNames.INPUT_CODER,\nCloudObjects.asCloudObject(windowedKvCoder, /* sdkComponents= */ null));\nParallelInstruction mergeWindowsInstruction =\nnew ParallelInstruction()\n.setSystemName(\"MergeWindows-System\")\n.setName(\"MergeWindowsStep\")\n.setOriginalName(\"MergeWindowsOriginal\")\n.setParDo(\nnew ParDoInstruction()\n.setInput(new InstructionInput().setProducerInstructionIndex(0).setOutputNum(0))\n.setNumOutputs(1)\n.setUserFn(spec))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)\n.setName(\"output\")\n.setCodec(\nCloudObjects.asCloudObject(\nwindowedGroupedCoder, /* sdkComponents= */ null))));\nList instructions =\nArrays.asList(\nmakeWindowingSourceInstruction(kvCoder),\nmergeWindowsInstruction,\nmakeSinkInstruction(groupedCoder, 1));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), false /* publishCounters */);\nMap nameMap = new HashMap<>();\nnameMap.put(\"MergeWindowsStep\", \"MergeWindows\");\nworker.addStateNameMappings(nameMap);\nworker.start();\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"\"\n+ DEFAULT_COMPUTATION_ID\n+ \"\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"\"\n+ DEFAULT_KEY_STRING\n+ \"\\\"\"\n+ \" sharding_key: \"\n+ DEFAULT_SHARDING_KEY\n+ \" cache_token: 1\"\n+ \" work_token: 1\"\n+ \" message_bundles {\"\n+ \" source_computation_id: \\\"\"\n+ DEFAULT_SOURCE_COMPUTATION_ID\n+ \"\\\"\"\n+ \" messages {\"\n+ \" timestamp: 0\"\n+ \" data: \\\"\"\n+ dataStringForIndex(0)\n+ \"\\\"\"\n+ \" }\"\n+ \" }\"\n+ \" }\"\n+ \"}\",\nintervalWindowBytes(WINDOW_AT_ZERO)));\nMap result = server.waitForAndGetCommits(1);\nIterable counters = worker.buildCounters();\nString window = \"/gAAAAAAAA-joBw/\";\nString timerTagPrefix = \"/s\" + window + \"+0\";\nByteString bufferTag = ByteString.copyFromUtf8(window + \"+ubuf\");\nByteString paneInfoTag = ByteString.copyFromUtf8(window + \"+upane\");\nString watermarkDataHoldTag = window + \"+uhold\";\nString watermarkExtraHoldTag = window + \"+uextra\";\nString stateFamily = \"MergeWindows\";\nByteString bufferData = ByteString.copyFromUtf8(\"data0\");\nByteString outputData =\nByteString.copyFrom(\nnew byte[] {\n(byte) 0xff,\n(byte) 0xff,\n(byte) 0xff,\n(byte) 0xff,\n0x01,\n0x05,\n0x64,\n0x61,\n0x74,\n0x61,\n0x30,\n0x00\n});\nlong timerTimestamp = 999000L;\nWorkItemCommitRequest actualOutput = result.get(1L);\nverifyTimers(actualOutput, buildWatermarkTimer(timerTagPrefix, 999));\nassertThat(\nactualOutput.getBagUpdatesList(),\nMatchers.contains(\nMatchers.equalTo(\nWindmill.TagBag.newBuilder()\n.setTag(bufferTag)\n.setStateFamily(stateFamily)\n.addValues(bufferData)\n.build())));\nverifyHolds(actualOutput, buildHold(watermarkDataHoldTag, 0, false));\nassertEquals(0L, splitIntToLong(getCounter(counters, \"WindmillStateBytesRead\").getInteger()));\nassertEquals(\nWindmill.WorkItemCommitRequest.newBuilder(actualOutput)\n.clearCounterUpdates()\n.clearOutputMessages()\n.clearPerWorkItemLatencyAttributions()\n.build()\n.getSerializedSize(),\nsplitIntToLong(getCounter(counters, \"WindmillStateBytesWritten\").getInteger()));\nassertEquals(\nVarInt.getLength(0L)\n+ dataStringForIndex(0).length()\n+ addPaneTag(PaneInfo.NO_FIRING, intervalWindowBytes(WINDOW_AT_ZERO)).size()\n+ 5L\n,\nsplitIntToLong(getCounter(counters, \"WindmillShuffleBytesRead\").getInteger()));\nWindmill.GetWorkResponse.Builder getWorkResponse = Windmill.GetWorkResponse.newBuilder();\ngetWorkResponse\n.addWorkBuilder()\n.setComputationId(DEFAULT_COMPUTATION_ID)\n.setInputDataWatermark(timerTimestamp + 1000)\n.addWorkBuilder()\n.setKey(ByteString.copyFromUtf8(DEFAULT_KEY_STRING))\n.setShardingKey(DEFAULT_SHARDING_KEY)\n.setWorkToken(2)\n.setCacheToken(1)\n.getTimersBuilder()\n.addTimers(buildWatermarkTimer(timerTagPrefix, timerTimestamp));\nserver.whenGetWorkCalled().thenReturn(getWorkResponse.build());\nlong expectedBytesRead = 0L;\nWindmill.GetDataResponse.Builder dataResponse = Windmill.GetDataResponse.newBuilder();\nWindmill.KeyedGetDataResponse.Builder dataBuilder =\ndataResponse\n.addDataBuilder()\n.setComputationId(DEFAULT_COMPUTATION_ID)\n.addDataBuilder()\n.setKey(ByteString.copyFromUtf8(DEFAULT_KEY_STRING))\n.setShardingKey(DEFAULT_SHARDING_KEY);\ndataBuilder\n.addBagsBuilder()\n.setTag(bufferTag)\n.setStateFamily(stateFamily)\n.addValues(bufferData);\ndataBuilder\n.addWatermarkHoldsBuilder()\n.setTag(ByteString.copyFromUtf8(watermarkDataHoldTag))\n.setStateFamily(stateFamily)\n.addTimestamps(0);\ndataBuilder\n.addWatermarkHoldsBuilder()\n.setTag(ByteString.copyFromUtf8(watermarkExtraHoldTag))\n.setStateFamily(stateFamily)\n.addTimestamps(0);\ndataBuilder\n.addValuesBuilder()\n.setTag(paneInfoTag)\n.setStateFamily(stateFamily)\n.getValueBuilder()\n.setTimestamp(0)\n.setData(ByteString.EMPTY);\nserver.whenGetDataCalled().thenReturn(dataResponse.build());\nexpectedBytesRead += dataBuilder.build().getSerializedSize();\nresult = server.waitForAndGetCommits(1);\ncounters = worker.buildCounters();\nactualOutput = result.get(2L);\nassertEquals(1, actualOutput.getOutputMessagesCount());\nassertEquals(\nDEFAULT_DESTINATION_STREAM_ID, actualOutput.getOutputMessages(0).getDestinationStreamId());\nassertEquals(\nDEFAULT_KEY_STRING,\nactualOutput.getOutputMessages(0).getBundles(0).getKey().toStringUtf8());\nassertEquals(0, actualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getTimestamp());\nassertEquals(\noutputData, actualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getData());\nByteString metadata =\nactualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getMetadata();\nInputStream inStream = metadata.newInput();\nassertEquals(\nPaneInfo.createPane(true, true, Timing.ON_TIME), PaneInfoCoder.INSTANCE.decode(inStream));\nassertEquals(\nCollections.singletonList(WINDOW_AT_ZERO),\nDEFAULT_WINDOW_COLLECTION_CODER.decode(inStream, Coder.Context.OUTER));\nassertThat(\n\"\" + actualOutput.getValueUpdatesList(),\nactualOutput.getValueUpdatesList(),\nMatchers.contains(\nMatchers.equalTo(\nWindmill.TagValue.newBuilder()\n.setTag(paneInfoTag)\n.setStateFamily(stateFamily)\n.setValue(\nWindmill.Value.newBuilder()\n.setTimestamp(Long.MAX_VALUE)\n.setData(ByteString.EMPTY))\n.build())));\nassertThat(\n\"\" + actualOutput.getBagUpdatesList(),\nactualOutput.getBagUpdatesList(),\nMatchers.contains(\nMatchers.equalTo(\nWindmill.TagBag.newBuilder()\n.setTag(bufferTag)\n.setStateFamily(stateFamily)\n.setDeleteAll(true)\n.build())));\nverifyHolds(\nactualOutput,\nbuildHold(watermarkDataHoldTag, -1, true),\nbuildHold(watermarkExtraHoldTag, -1, true));\nassertEquals(\nexpectedBytesRead,\nsplitIntToLong(getCounter(counters, \"WindmillStateBytesRead\").getInteger()));\nassertEquals(\nWindmill.WorkItemCommitRequest.newBuilder(removeDynamicFields(actualOutput))\n.clearCounterUpdates()\n.clearOutputMessages()\n.build()\n.getSerializedSize(),\nsplitIntToLong(getCounter(counters, \"WindmillStateBytesWritten\").getInteger()));\nassertEquals(0L, splitIntToLong(getCounter(counters, \"WindmillShuffleBytesRead\").getInteger()));\n}\n@Test\npublic void testMergeWindowsCaching() throws Exception {\nCoder> kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());\nCoder>> windowedKvCoder =\nFullWindowedValueCoder.of(kvCoder, IntervalWindow.getCoder());\nKvCoder> groupedCoder =\nKvCoder.of(StringUtf8Coder.of(), ListCoder.of(StringUtf8Coder.of()));\nCoder>>> windowedGroupedCoder =\nFullWindowedValueCoder.of(groupedCoder, IntervalWindow.getCoder());\nCloudObject spec = CloudObject.forClassName(\"MergeWindowsDoFn\");\nSdkComponents sdkComponents = SdkComponents.create();\nsdkComponents.registerEnvironment(Environments.JAVA_SDK_HARNESS_ENVIRONMENT);\naddString(\nspec,\nPropertyNames.SERIALIZED_FN,\nStringUtils.byteArrayToJsonString(\nWindowingStrategyTranslation.toMessageProto(\nWindowingStrategy.of(FixedWindows.of(Duration.standardSeconds(1)))\n.withTimestampCombiner(TimestampCombiner.EARLIEST),\nsdkComponents)\n.toByteArray()));\naddObject(\nspec,\nWorkerPropertyNames.INPUT_CODER,\nCloudObjects.asCloudObject(windowedKvCoder, /* sdkComponents= */ null));\nParallelInstruction mergeWindowsInstruction =\nnew ParallelInstruction()\n.setSystemName(\"MergeWindows-System\")\n.setName(\"MergeWindowsStep\")\n.setOriginalName(\"MergeWindowsOriginal\")\n.setParDo(\nnew ParDoInstruction()\n.setInput(new InstructionInput().setProducerInstructionIndex(0).setOutputNum(0))\n.setNumOutputs(1)\n.setUserFn(spec))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)\n.setName(\"output\")\n.setCodec(\nCloudObjects.asCloudObject(\nwindowedGroupedCoder, /* sdkComponents= */ null))));\nList instructions =\nArrays.asList(\nmakeWindowingSourceInstruction(kvCoder),\nmergeWindowsInstruction,\nmakeDoFnInstruction(new PassthroughDoFn(), 1, groupedCoder),\nmakeSinkInstruction(groupedCoder, 2));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), false /* publishCounters */);\nMap nameMap = new HashMap<>();\nnameMap.put(\"MergeWindowsStep\", \"MergeWindows\");\nworker.addStateNameMappings(nameMap);\nworker.start();\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"\"\n+ DEFAULT_COMPUTATION_ID\n+ \"\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"\"\n+ DEFAULT_KEY_STRING\n+ \"\\\"\"\n+ \" sharding_key: \"\n+ DEFAULT_SHARDING_KEY\n+ \" cache_token: 1\"\n+ \" work_token: 1\"\n+ \" is_new_key: 1\"\n+ \" message_bundles {\"\n+ \" source_computation_id: \\\"\"\n+ DEFAULT_SOURCE_COMPUTATION_ID\n+ \"\\\"\"\n+ \" messages {\"\n+ \" timestamp: 0\"\n+ \" data: \\\"\"\n+ dataStringForIndex(0)\n+ \"\\\"\"\n+ \" }\"\n+ \" }\"\n+ \" }\"\n+ \"}\",\nintervalWindowBytes(WINDOW_AT_ZERO)));\nMap result = server.waitForAndGetCommits(1);\nIterable counters = worker.buildCounters();\nString window = \"/gAAAAAAAA-joBw/\";\nString timerTagPrefix = \"/s\" + window + \"+0\";\nByteString bufferTag = ByteString.copyFromUtf8(window + \"+ubuf\");\nByteString paneInfoTag = ByteString.copyFromUtf8(window + \"+upane\");\nString watermarkDataHoldTag = window + \"+uhold\";\nString watermarkExtraHoldTag = window + \"+uextra\";\nString stateFamily = \"MergeWindows\";\nByteString bufferData = ByteString.copyFromUtf8(\"data0\");\nByteString outputData =\nByteString.copyFrom(\nnew byte[] {\n(byte) 0xff,\n(byte) 0xff,\n(byte) 0xff,\n(byte) 0xff,\n0x01,\n0x05,\n0x64,\n0x61,\n0x74,\n0x61,\n0x30,\n0x00\n});\nlong timerTimestamp = 999000L;\nWorkItemCommitRequest actualOutput = result.get(1L);\nverifyTimers(actualOutput, buildWatermarkTimer(timerTagPrefix, 999));\nassertThat(\nactualOutput.getBagUpdatesList(),\nMatchers.contains(\nMatchers.equalTo(\nWindmill.TagBag.newBuilder()\n.setTag(bufferTag)\n.setStateFamily(stateFamily)\n.addValues(bufferData)\n.build())));\nverifyHolds(actualOutput, buildHold(watermarkDataHoldTag, 0, false));\nassertEquals(0L, splitIntToLong(getCounter(counters, \"WindmillStateBytesRead\").getInteger()));\nassertEquals(\nWindmill.WorkItemCommitRequest.newBuilder(removeDynamicFields(actualOutput))\n.clearCounterUpdates()\n.clearOutputMessages()\n.build()\n.getSerializedSize(),\nsplitIntToLong(getCounter(counters, \"WindmillStateBytesWritten\").getInteger()));\nassertEquals(\nVarInt.getLength(0L)\n+ dataStringForIndex(0).length()\n+ addPaneTag(PaneInfo.NO_FIRING, intervalWindowBytes(WINDOW_AT_ZERO)).size()\n+ 5L\n,\nsplitIntToLong(getCounter(counters, \"WindmillShuffleBytesRead\").getInteger()));\nWindmill.GetWorkResponse.Builder getWorkResponse = Windmill.GetWorkResponse.newBuilder();\ngetWorkResponse\n.addWorkBuilder()\n.setComputationId(DEFAULT_COMPUTATION_ID)\n.setInputDataWatermark(timerTimestamp + 1000)\n.addWorkBuilder()\n.setKey(ByteString.copyFromUtf8(DEFAULT_KEY_STRING))\n.setShardingKey(DEFAULT_SHARDING_KEY)\n.setWorkToken(2)\n.setCacheToken(1)\n.getTimersBuilder()\n.addTimers(buildWatermarkTimer(timerTagPrefix, timerTimestamp));\nserver.whenGetWorkCalled().thenReturn(getWorkResponse.build());\nlong expectedBytesRead = 0L;\nWindmill.GetDataResponse.Builder dataResponse = Windmill.GetDataResponse.newBuilder();\nWindmill.KeyedGetDataResponse.Builder dataBuilder =\ndataResponse\n.addDataBuilder()\n.setComputationId(DEFAULT_COMPUTATION_ID)\n.addDataBuilder()\n.setKey(ByteString.copyFromUtf8(DEFAULT_KEY_STRING))\n.setShardingKey(DEFAULT_SHARDING_KEY);\ndataBuilder\n.addWatermarkHoldsBuilder()\n.setTag(ByteString.copyFromUtf8(watermarkExtraHoldTag))\n.setStateFamily(stateFamily)\n.addTimestamps(0);\ndataBuilder\n.addValuesBuilder()\n.setTag(paneInfoTag)\n.setStateFamily(stateFamily)\n.getValueBuilder()\n.setTimestamp(0)\n.setData(ByteString.EMPTY);\nserver.whenGetDataCalled().thenReturn(dataResponse.build());\nexpectedBytesRead += dataBuilder.build().getSerializedSize();\nresult = server.waitForAndGetCommits(1);\ncounters = worker.buildCounters();\nactualOutput = result.get(2L);\nassertEquals(1, actualOutput.getOutputMessagesCount());\nassertEquals(\nDEFAULT_DESTINATION_STREAM_ID, actualOutput.getOutputMessages(0).getDestinationStreamId());\nassertEquals(\nDEFAULT_KEY_STRING,\nactualOutput.getOutputMessages(0).getBundles(0).getKey().toStringUtf8());\nassertEquals(0, actualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getTimestamp());\nassertEquals(\noutputData, actualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getData());\nByteString metadata =\nactualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getMetadata();\nInputStream inStream = metadata.newInput();\nassertEquals(\nPaneInfo.createPane(true, true, Timing.ON_TIME), PaneInfoCoder.INSTANCE.decode(inStream));\nassertEquals(\nCollections.singletonList(WINDOW_AT_ZERO),\nDEFAULT_WINDOW_COLLECTION_CODER.decode(inStream, Coder.Context.OUTER));\nassertThat(\n\"\" + actualOutput.getValueUpdatesList(),\nactualOutput.getValueUpdatesList(),\nMatchers.contains(\nMatchers.equalTo(\nWindmill.TagValue.newBuilder()\n.setTag(paneInfoTag)\n.setStateFamily(stateFamily)\n.setValue(\nWindmill.Value.newBuilder()\n.setTimestamp(Long.MAX_VALUE)\n.setData(ByteString.EMPTY))\n.build())));\nassertThat(\n\"\" + actualOutput.getBagUpdatesList(),\nactualOutput.getBagUpdatesList(),\nMatchers.contains(\nMatchers.equalTo(\nWindmill.TagBag.newBuilder()\n.setTag(bufferTag)\n.setStateFamily(stateFamily)\n.setDeleteAll(true)\n.build())));\nverifyHolds(\nactualOutput,\nbuildHold(watermarkDataHoldTag, -1, true),\nbuildHold(watermarkExtraHoldTag, -1, true));\nassertEquals(\nexpectedBytesRead,\nsplitIntToLong(getCounter(counters, \"WindmillStateBytesRead\").getInteger()));\nassertEquals(\nWindmill.WorkItemCommitRequest.newBuilder(removeDynamicFields(actualOutput))\n.clearCounterUpdates()\n.clearOutputMessages()\n.build()\n.getSerializedSize(),\nsplitIntToLong(getCounter(counters, \"WindmillStateBytesWritten\").getInteger()));\nassertEquals(0L, splitIntToLong(getCounter(counters, \"WindmillShuffleBytesRead\").getInteger()));\nCacheStats stats = worker.stateCache.getCacheStats();\nLOG.info(\"cache stats {}\", stats);\nassertEquals(1, stats.hitCount());\nassertEquals(4, stats.missCount());\n}\nprivate void runMergeSessionsActions(List actions) throws Exception {\nCoder> kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());\nCoder>> windowedKvCoder =\nFullWindowedValueCoder.of(kvCoder, IntervalWindow.getCoder());\nKvCoder> groupedCoder =\nKvCoder.of(StringUtf8Coder.of(), ListCoder.of(StringUtf8Coder.of()));\nCoder>>> windowedGroupedCoder =\nFullWindowedValueCoder.of(groupedCoder, IntervalWindow.getCoder());\nCloudObject spec = CloudObject.forClassName(\"MergeWindowsDoFn\");\nSdkComponents sdkComponents = SdkComponents.create();\nsdkComponents.registerEnvironment(Environments.JAVA_SDK_HARNESS_ENVIRONMENT);\naddString(\nspec,\nPropertyNames.SERIALIZED_FN,\nStringUtils.byteArrayToJsonString(\nWindowingStrategyTranslation.toMessageProto(\nWindowingStrategy.of(Sessions.withGapDuration(Duration.millis(10)))\n.withMode(AccumulationMode.DISCARDING_FIRED_PANES)\n.withTrigger(\nRepeatedly.forever(\nAfterWatermark.pastEndOfWindow()\n.withLateFirings(AfterPane.elementCountAtLeast(1))))\n.withAllowedLateness(Duration.standardMinutes(60)),\nsdkComponents)\n.toByteArray()));\naddObject(\nspec,\nWorkerPropertyNames.INPUT_CODER,\nCloudObjects.asCloudObject(windowedKvCoder, /* sdkComponents= */ null));\nParallelInstruction mergeWindowsInstruction =\nnew ParallelInstruction()\n.setSystemName(\"MergeWindows-System\")\n.setName(\"MergeWindowsStep\")\n.setOriginalName(\"MergeWindowsOriginal\")\n.setParDo(\nnew ParDoInstruction()\n.setInput(new InstructionInput().setProducerInstructionIndex(0).setOutputNum(0))\n.setNumOutputs(1)\n.setUserFn(spec))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)\n.setName(\"output\")\n.setCodec(\nCloudObjects.asCloudObject(\nwindowedGroupedCoder, /* sdkComponents= */ null))));\nList instructions =\nArrays.asList(\nmakeWindowingSourceInstruction(kvCoder),\nmergeWindowsInstruction,\nmakeSinkInstruction(groupedCoder, 1));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), false /* publishCounters */);\nMap nameMap = new HashMap<>();\nnameMap.put(\"MergeWindowsStep\", \"MergeWindows\");\nworker.addStateNameMappings(nameMap);\nworker.start();\nserver.whenGetDataCalled().answerByDefault(EMPTY_DATA_RESPONDER);\nfor (int i = 0; i < actions.size(); ++i) {\nAction action = actions.get(i);\nserver.whenGetWorkCalled().thenReturn(action.response);\nMap result = server.waitForAndGetCommits(1);\nWorkItemCommitRequest actualOutput = result.get(i + 1L);\nassertThat(actualOutput, Matchers.not(Matchers.nullValue()));\nverifyTimers(actualOutput, action.expectedTimers);\nverifyHolds(actualOutput, action.expectedHolds);\n}\n}\n@Test\npublic void testMergeSessionWindows() throws Exception {\nrunMergeSessionsActions(\nCollections.singletonList(\nnew Action(\nbuildSessionInput(\n1, 40, 0, Collections.singletonList(1L), Collections.EMPTY_LIST))\n.withHolds(\nbuildHold(\"/gAAAAAAAAAsK/+uhold\", -1, true),\nbuildHold(\"/gAAAAAAAAAsK/+uextra\", -1, true))\n.withTimers(buildWatermarkTimer(\"/s/gAAAAAAAAAsK/+0\", 3600010))));\nrunMergeSessionsActions(\nArrays.asList(\nnew Action(\nbuildSessionInput(\n1, 0, 0, Collections.singletonList(1L), Collections.EMPTY_LIST))\n.withHolds(buildHold(\"/gAAAAAAAAAsK/+uhold\", 10, false))\n.withTimers(\nbuildWatermarkTimer(\"/s/gAAAAAAAAAsK/+0\", 10),\nbuildWatermarkTimer(\"/s/gAAAAAAAAAsK/+0\", 3600010)),\nnew Action(\nbuildSessionInput(\n2,\n30,\n0,\nCollections.EMPTY_LIST,\nCollections.singletonList(buildWatermarkTimer(\"/s/gAAAAAAAAAsK/+0\", 10))))\n.withTimers(buildWatermarkTimer(\"/s/gAAAAAAAAAsK/+0\", 3600010))\n.withHolds(\nbuildHold(\"/gAAAAAAAAAsK/+uhold\", -1, true),\nbuildHold(\"/gAAAAAAAAAsK/+uextra\", -1, true)),\nnew Action(\nbuildSessionInput(\n3, 30, 0, Collections.singletonList(8L), Collections.EMPTY_LIST))\n.withTimers(\nbuildWatermarkTimer(\"/s/gAAAAAAAABIR/+0\", 3600017),\nbuildWatermarkTimer(\"/s/gAAAAAAAAAsK/+0\", 10, true),\nbuildWatermarkTimer(\"/s/gAAAAAAAAAsK/+0\", 3600010, true))\n.withHolds(\nbuildHold(\"/gAAAAAAAAAsK/+uhold\", -1, true),\nbuildHold(\"/gAAAAAAAAAsK/+uextra\", -1, true)),\nnew Action(\nbuildSessionInput(\n4, 30, 0, Collections.singletonList(31L), Collections.EMPTY_LIST))\n.withTimers(\nbuildWatermarkTimer(\"/s/gAAAAAAAACkK/+0\", 3600040),\nbuildWatermarkTimer(\"/s/gAAAAAAAACkK/+0\", 40))\n.withHolds(buildHold(\"/gAAAAAAAACkK/+uhold\", 40, false)),\nnew Action(buildSessionInput(5, 30, 0, Arrays.asList(17L, 23L), Collections.EMPTY_LIST))\n.withTimers(\nbuildWatermarkTimer(\"/s/gAAAAAAAACkK/+0\", 3600040, true),\nbuildWatermarkTimer(\"/s/gAAAAAAAACkK/+0\", 40, true),\nbuildWatermarkTimer(\"/s/gAAAAAAAABIR/+0\", 3600017, true),\nbuildWatermarkTimer(\"/s/gAAAAAAAABIR/+0\", 17, true),\nbuildWatermarkTimer(\"/s/gAAAAAAAACko/+0\", 40),\nbuildWatermarkTimer(\"/s/gAAAAAAAACko/+0\", 3600040))\n.withHolds(\nbuildHold(\"/gAAAAAAAACkK/+uhold\", -1, true),\nbuildHold(\"/gAAAAAAAACkK/+uextra\", -1, true),\nbuildHold(\"/gAAAAAAAAAsK/+uhold\", 40, true),\nbuildHold(\"/gAAAAAAAAAsK/+uextra\", 3600040, true)),\nnew Action(\nbuildSessionInput(\n6,\n50,\n0,\nCollections.EMPTY_LIST,\nCollections.singletonList(buildWatermarkTimer(\"/s/gAAAAAAAACko/+0\", 40))))\n.withTimers(buildWatermarkTimer(\"/s/gAAAAAAAACko/+0\", 3600040))\n.withHolds(\nbuildHold(\"/gAAAAAAAAAsK/+uhold\", -1, true),\nbuildHold(\"/gAAAAAAAAAsK/+uextra\", -1, true))));\n}\nprivate List makeUnboundedSourcePipeline() throws Exception {\nreturn makeUnboundedSourcePipeline(1, new PrintFn());\n}\nprivate List makeUnboundedSourcePipeline(\nint numMessagesPerShard,\nDoFn>, String> doFn)\nthrows Exception {\nDataflowPipelineOptions options =\nPipelineOptionsFactory.create().as(DataflowPipelineOptions.class);\noptions.setNumWorkers(1);\nCloudObject codec =\nCloudObjects.asCloudObject(\nWindowedValue.getFullCoder(\nValueWithRecordId.ValueWithRecordIdCoder.of(\nKvCoder.of(VarIntCoder.of(), VarIntCoder.of())),\nGlobalWindow.Coder.INSTANCE),\n/* sdkComponents= */ null);\nreturn Arrays.asList(\nnew ParallelInstruction()\n.setSystemName(\"Read\")\n.setOriginalName(\"OriginalReadName\")\n.setRead(\nnew ReadInstruction()\n.setSource(\nCustomSources.serializeToCloudSource(\nnew TestCountingSource(numMessagesPerShard), options)\n.setCodec(codec)))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setName(\"read_output\")\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)\n.setCodec(codec))),\nmakeDoFnInstruction(doFn, 0, StringUtf8Coder.of(), WindowingStrategy.globalDefault()),\nmakeSinkInstruction(StringUtf8Coder.of(), 1, GlobalWindow.Coder.INSTANCE));\n}\n@Test\npublic void testUnboundedSources() throws Exception {\nList finalizeTracker = Lists.newArrayList();\nTestCountingSource.setFinalizeTracker(finalizeTracker);\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorker worker =\nmakeWorker(\nmakeUnboundedSourcePipeline(),\ncreateTestingPipelineOptions(server),\nfalse /* publishCounters */);\nworker.start();\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 1\"\n+ \" cache_token: 1\"\n+ \" }\"\n+ \"}\",\nnull));\nMap result = server.waitForAndGetCommits(1);\nIterable counters = worker.buildCounters();\nWindmill.WorkItemCommitRequest commit = result.get(1L);\nUnsignedLong finalizeId =\nUnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));\nassertThat(\nremoveDynamicFields(commit),\nequalTo(\nsetMessagesMetadata(\nPaneInfo.NO_FIRING,\nCoderUtils.encodeToByteArray(\nCollectionCoder.of(GlobalWindow.Coder.INSTANCE),\nCollections.singletonList(GlobalWindow.INSTANCE)),\nparseCommitRequest(\n\"key: \\\"0000000000000001\\\" \"\n+ \"sharding_key: 1 \"\n+ \"work_token: 1 \"\n+ \"cache_token: 1 \"\n+ \"source_backlog_bytes: 7 \"\n+ \"source_bytes_processed: 18 \"\n+ \"output_messages {\"\n+ \" destination_stream_id: \\\"out\\\"\"\n+ \" bundles {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" messages {\"\n+ \" timestamp: 0\"\n+ \" data: \\\"0:0\\\"\"\n+ \" }\"\n+ \" messages_ids: \\\"\\\"\"\n+ \" }\"\n+ \"} \"\n+ \"source_state_updates {\"\n+ \" state: \\\"\\000\\\"\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \"} \"\n+ \"source_watermark: 1000\"))\n.build()));\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 2\"\n+ \" cache_token: 1\"\n+ \" source_state {\"\n+ \" state: \\\"\\001\\\"\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \" } \"\n+ \" }\"\n+ \"}\",\nnull));\nresult = server.waitForAndGetCommits(1);\ncounters = worker.buildCounters();\ncommit = result.get(2L);\nfinalizeId = UnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));\nassertThat(\nremoveDynamicFields(commit),\nequalTo(\nparseCommitRequest(\n\"key: \\\"0000000000000001\\\" \"\n+ \"sharding_key: 1 \"\n+ \"work_token: 2 \"\n+ \"cache_token: 1 \"\n+ \"source_backlog_bytes: 7 \"\n+ \"source_bytes_processed: 0 \"\n+ \"source_state_updates {\"\n+ \" state: \\\"\\000\\\"\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \"} \"\n+ \"source_watermark: 1000\")\n.build()));\nassertThat(finalizeTracker, contains(0));\nassertNull(getCounter(counters, \"dataflow_input_size-computation\"));\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000002\\\"\"\n+ \" sharding_key: 2\"\n+ \" work_token: 3\"\n+ \" cache_token: 2\"\n+ \" source_state {\"\n+ \" state: \\\"\\000\\\"\"\n+ \" } \"\n+ \" }\"\n+ \"}\",\nnull));\nresult = server.waitForAndGetCommits(1);\ncounters = worker.buildCounters();\ncommit = result.get(3L);\nfinalizeId = UnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));\nassertThat(\nremoveDynamicFields(commit),\nequalTo(\nparseCommitRequest(\n\"key: \\\"0000000000000002\\\" \"\n+ \"sharding_key: 2 \"\n+ \"work_token: 3 \"\n+ \"cache_token: 2 \"\n+ \"source_backlog_bytes: 7 \"\n+ \"source_bytes_processed: 0 \"\n+ \"source_state_updates {\"\n+ \" state: \\\"\\000\\\"\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \"} \"\n+ \"source_watermark: 1000\")\n.build()));\nassertNull(getCounter(counters, \"dataflow_input_size-computation\"));\n}\n@Test\npublic void testUnboundedSourcesDrain() throws Exception {\nList finalizeTracker = Lists.newArrayList();\nTestCountingSource.setFinalizeTracker(finalizeTracker);\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorker worker =\nmakeWorker(\nmakeUnboundedSourcePipeline(),\ncreateTestingPipelineOptions(server),\ntrue /* publishCounters */);\nworker.start();\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 2\"\n+ \" cache_token: 3\"\n+ \" }\"\n+ \"}\",\nnull));\nMap result = server.waitForAndGetCommits(1);\nWindmill.WorkItemCommitRequest commit = result.get(2L);\nUnsignedLong finalizeId =\nUnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));\nassertThat(\nremoveDynamicFields(commit),\nequalTo(\nsetMessagesMetadata(\nPaneInfo.NO_FIRING,\nCoderUtils.encodeToByteArray(\nCollectionCoder.of(GlobalWindow.Coder.INSTANCE),\nCollections.singletonList(GlobalWindow.INSTANCE)),\nparseCommitRequest(\n\"key: \\\"0000000000000001\\\" \"\n+ \"sharding_key: 1 \"\n+ \"work_token: 2 \"\n+ \"cache_token: 3 \"\n+ \"source_backlog_bytes: 7 \"\n+ \"source_bytes_processed: 18 \"\n+ \"output_messages {\"\n+ \" destination_stream_id: \\\"out\\\"\"\n+ \" bundles {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" messages {\"\n+ \" timestamp: 0\"\n+ \" data: \\\"0:0\\\"\"\n+ \" }\"\n+ \" messages_ids: \\\"\\\"\"\n+ \" }\"\n+ \"} \"\n+ \"source_state_updates {\"\n+ \" state: \\\"\\000\\\"\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \"} \"\n+ \"source_watermark: 1000\"))\n.build()));\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 3\"\n+ \" cache_token: 3\"\n+ \" source_state {\"\n+ \" only_finalize: true\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \" }\"\n+ \" }\"\n+ \"}\",\nnull));\nresult = server.waitForAndGetCommits(1);\ncommit = result.get(3L);\nassertThat(\ncommit,\nequalTo(\nparseCommitRequest(\n\"key: \\\"0000000000000001\\\" \"\n+ \"sharding_key: 1 \"\n+ \"work_token: 3 \"\n+ \"cache_token: 3 \"\n+ \"source_state_updates {\"\n+ \" only_finalize: true\"\n+ \"} \")\n.build()));\nassertThat(finalizeTracker, contains(0));\n}\n@Test\npublic void testUnboundedSourceWorkRetry() throws Exception {\nList finalizeTracker = Lists.newArrayList();\nTestCountingSource.setFinalizeTracker(finalizeTracker);\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setWorkerCacheMb(0);\nStreamingDataflowWorker worker =\nmakeWorker(makeUnboundedSourcePipeline(), options, false /* publishCounters */);\nworker.start();\nWindmill.GetWorkResponse work =\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 1\"\n+ \" cache_token: 1\"\n+ \" }\"\n+ \"}\",\nnull);\nserver.whenGetWorkCalled().thenReturn(work);\nMap result = server.waitForAndGetCommits(1);\nIterable counters = worker.buildCounters();\nWindmill.WorkItemCommitRequest commit = result.get(1L);\nUnsignedLong finalizeId =\nUnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));\nWindmill.WorkItemCommitRequest expectedCommit =\nsetMessagesMetadata(\nPaneInfo.NO_FIRING,\nCoderUtils.encodeToByteArray(\nCollectionCoder.of(GlobalWindow.Coder.INSTANCE),\nCollections.singletonList(GlobalWindow.INSTANCE)),\nparseCommitRequest(\n\"key: \\\"0000000000000001\\\" \"\n+ \"sharding_key: 1 \"\n+ \"work_token: 1 \"\n+ \"cache_token: 1 \"\n+ \"source_backlog_bytes: 7 \"\n+ \"source_bytes_processed: 18 \"\n+ \"output_messages {\"\n+ \" destination_stream_id: \\\"out\\\"\"\n+ \" bundles {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" messages {\"\n+ \" timestamp: 0\"\n+ \" data: \\\"0:0\\\"\"\n+ \" }\"\n+ \" messages_ids: \\\"\\\"\"\n+ \" }\"\n+ \"} \"\n+ \"source_state_updates {\"\n+ \" state: \\\"\\000\\\"\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \"} \"\n+ \"source_watermark: 1000\"))\n.build();\nassertThat(removeDynamicFields(commit), equalTo(expectedCommit));\nserver.clearCommitsReceived();\nserver.whenGetWorkCalled().thenReturn(work);\nresult = server.waitForAndGetCommits(1);\ncommit = result.get(1L);\nfinalizeId = UnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));\nWindmill.WorkItemCommitRequest.Builder commitBuilder = expectedCommit.toBuilder();\ncommitBuilder\n.getSourceStateUpdatesBuilder()\n.setFinalizeIds(0, commit.getSourceStateUpdates().getFinalizeIds(0));\nexpectedCommit = commitBuilder.build();\nassertThat(removeDynamicFields(commit), equalTo(expectedCommit));\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 2\"\n+ \" cache_token: 1\"\n+ \" source_state {\"\n+ \" state: \\\"\\001\\\"\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \" } \"\n+ \" }\"\n+ \"}\",\nnull));\nresult = server.waitForAndGetCommits(1);\ncommit = result.get(2L);\nfinalizeId = UnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));\nassertThat(\nremoveDynamicFields(commit),\nequalTo(\nparseCommitRequest(\n\"key: \\\"0000000000000001\\\" \"\n+ \"sharding_key: 1 \"\n+ \"work_token: 2 \"\n+ \"cache_token: 1 \"\n+ \"source_backlog_bytes: 7 \"\n+ \"source_bytes_processed: 0 \"\n+ \"source_state_updates {\"\n+ \" state: \\\"\\000\\\"\"\n+ \" finalize_ids: \"\n+ finalizeId\n+ \"} \"\n+ \"source_watermark: 1000\")\n.build()));\nassertThat(finalizeTracker, contains(0));\n}\n@Test\npublic void testActiveWork() throws Exception {\nBoundedQueueExecutor mockExecutor = Mockito.mock(BoundedQueueExecutor.class);\nComputationState computationState =\nnew ComputationState(\n\"computation\",\ndefaultMapTask(Collections.singletonList(makeSourceInstruction(StringUtf8Coder.of()))),\nmockExecutor,\nImmutableMap.of(),\nnull);\nShardedKey key1 = ShardedKey.create(ByteString.copyFromUtf8(\"key1\"), 1);\nShardedKey key2 = ShardedKey.create(ByteString.copyFromUtf8(\"key2\"), 2);\nWork m1 = createMockWork(1);\nassertTrue(computationState.activateWork(key1, m1));\nMockito.verify(mockExecutor).execute(m1, m1.getWorkItem().getSerializedSize());\ncomputationState.completeWorkAndScheduleNextWorkForKey(key1, 1);\nMockito.verifyNoMoreInteractions(mockExecutor);\nWork m2 = createMockWork(2);\nassertTrue(computationState.activateWork(key1, m2));\nMockito.verify(mockExecutor).execute(m2, m2.getWorkItem().getSerializedSize());\nWork m3 = createMockWork(3);\nassertTrue(computationState.activateWork(key1, m3));\nMockito.verifyNoMoreInteractions(mockExecutor);\nWork m4 = createMockWork(4);\nassertTrue(computationState.activateWork(key2, m4));\nMockito.verify(mockExecutor).execute(m4, m4.getWorkItem().getSerializedSize());\ncomputationState.completeWorkAndScheduleNextWorkForKey(key2, 4);\nMockito.verifyNoMoreInteractions(mockExecutor);\ncomputationState.completeWorkAndScheduleNextWorkForKey(key1, 2);\nMockito.verify(mockExecutor).forceExecute(m3, m3.getWorkItem().getSerializedSize());\ncomputationState.completeWorkAndScheduleNextWorkForKey(key1, 3);\nMockito.verifyNoMoreInteractions(mockExecutor);\nWork m5 = createMockWork(5);\ncomputationState.activateWork(key1, m5);\nMockito.verify(mockExecutor).execute(m5, m5.getWorkItem().getSerializedSize());\nassertFalse(computationState.activateWork(key1, m5));\nMockito.verifyNoMoreInteractions(mockExecutor);\ncomputationState.completeWorkAndScheduleNextWorkForKey(key1, 5);\nMockito.verifyNoMoreInteractions(mockExecutor);\n}\n@Test\npublic void testActiveWorkForShardedKeys() throws Exception {\nBoundedQueueExecutor mockExecutor = Mockito.mock(BoundedQueueExecutor.class);\nComputationState computationState =\nnew ComputationState(\n\"computation\",\ndefaultMapTask(Collections.singletonList(makeSourceInstruction(StringUtf8Coder.of()))),\nmockExecutor,\nImmutableMap.of(),\nnull);\nShardedKey key1Shard1 = ShardedKey.create(ByteString.copyFromUtf8(\"key1\"), 1);\nShardedKey key1Shard2 = ShardedKey.create(ByteString.copyFromUtf8(\"key1\"), 2);\nWork m1 = createMockWork(1);\nassertTrue(computationState.activateWork(key1Shard1, m1));\nMockito.verify(mockExecutor).execute(m1, m1.getWorkItem().getSerializedSize());\ncomputationState.completeWorkAndScheduleNextWorkForKey(key1Shard1, 1);\nMockito.verifyNoMoreInteractions(mockExecutor);\nWork m2 = createMockWork(2);\nassertTrue(computationState.activateWork(key1Shard1, m2));\nMockito.verify(mockExecutor).execute(m2, m2.getWorkItem().getSerializedSize());\nWork m3 = createMockWork(3);\nassertTrue(computationState.activateWork(key1Shard1, m3));\nMockito.verifyNoMoreInteractions(mockExecutor);\nWork m4 = createMockWork(3);\nassertFalse(computationState.activateWork(key1Shard1, m4));\nMockito.verifyNoMoreInteractions(mockExecutor);\nassertTrue(computationState.activateWork(key1Shard2, m4));\nMockito.verify(mockExecutor).execute(m4, m4.getWorkItem().getSerializedSize());\nassertFalse(computationState.activateWork(key1Shard2, m4));\ncomputationState.completeWorkAndScheduleNextWorkForKey(key1Shard2, 3);\nMockito.verifyNoMoreInteractions(mockExecutor);\n}\n@Test\n@Ignore\npublic void testMaxThreadMetric() throws Exception {\nint maxThreads = 2;\nint threadExpiration = 60;\nBoundedQueueExecutor executor =\nnew BoundedQueueExecutor(\nmaxThreads,\nthreadExpiration,\nTimeUnit.SECONDS,\nmaxThreads,\n10000000,\nnew ThreadFactoryBuilder()\n.setNameFormat(\"DataflowWorkUnits-%d\")\n.setDaemon(true)\n.build());\nComputationState computationState =\nnew ComputationState(\n\"computation\",\ndefaultMapTask(Collections.singletonList(makeSourceInstruction(StringUtf8Coder.of()))),\nexecutor,\nImmutableMap.of(),\nnull);\nShardedKey key1Shard1 = ShardedKey.create(ByteString.copyFromUtf8(\"key1\"), 1);\nConsumer sleepProcessWorkFn =\nunused -> {\ntry {\nThread.sleep(1000);\n} catch (InterruptedException e) {\nThread.currentThread().interrupt();\n}\n};\nWork m2 = createMockWork(2, sleepProcessWorkFn);\nWork m3 = createMockWork(3, sleepProcessWorkFn);\nassertTrue(computationState.activateWork(key1Shard1, m2));\nassertTrue(computationState.activateWork(key1Shard1, m3));\nexecutor.execute(m2, m2.getWorkItem().getSerializedSize());\nexecutor.execute(m3, m3.getWorkItem().getSerializedSize());\nlong i = 990L;\nassertTrue(executor.allThreadsActiveTime() >= i);\nexecutor.shutdown();\n}\nvolatile boolean stop = false;\n@Test\n@Test\npublic void testExceptionInvalidatesCache() throws Exception {\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nserver.setExpectedExceptionCount(2);\nDataflowPipelineOptions options = createTestingPipelineOptions(server);\noptions.setNumWorkers(1);\nDataflowPipelineDebugOptions debugOptions = options.as(DataflowPipelineDebugOptions.class);\ndebugOptions.setUnboundedReaderMaxElements(1);\nCloudObject codec =\nCloudObjects.asCloudObject(\nWindowedValue.getFullCoder(\nValueWithRecordId.ValueWithRecordIdCoder.of(\nKvCoder.of(VarIntCoder.of(), VarIntCoder.of())),\nGlobalWindow.Coder.INSTANCE),\n/* sdkComponents= */ null);\nTestCountingSource counter = new TestCountingSource(3).withThrowOnFirstSnapshot(true);\nList instructions =\nArrays.asList(\nnew ParallelInstruction()\n.setOriginalName(\"OriginalReadName\")\n.setSystemName(\"Read\")\n.setName(DEFAULT_PARDO_USER_NAME)\n.setRead(\nnew ReadInstruction()\n.setSource(\nCustomSources.serializeToCloudSource(counter, options).setCodec(codec)))\n.setOutputs(\nCollections.singletonList(\nnew InstructionOutput()\n.setName(\"read_output\")\n.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)\n.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)\n.setCodec(codec))),\nmakeDoFnInstruction(\nnew TestExceptionInvalidatesCacheFn(),\n0,\nStringUtf8Coder.of(),\nWindowingStrategy.globalDefault()),\nmakeSinkInstruction(StringUtf8Coder.of(), 1, GlobalWindow.Coder.INSTANCE));\nStreamingDataflowWorker worker =\nmakeWorker(\ninstructions,\noptions.as(StreamingDataflowWorkerOptions.class),\ntrue /* publishCounters */);\nworker.setRetryLocallyDelayMs(100);\nworker.start();\nfor (int i = 0; i < 3; i++) {\nByteString state;\nif (i == 0 || i == 1) {\nstate = ByteString.EMPTY;\n} else {\nstate = ByteString.copyFrom(new byte[] {42});\n}\nWindmill.GetDataResponse.Builder dataResponse = Windmill.GetDataResponse.newBuilder();\ndataResponse\n.addDataBuilder()\n.setComputationId(DEFAULT_COMPUTATION_ID)\n.addDataBuilder()\n.setKey(ByteString.copyFromUtf8(\"0000000000000001\"))\n.setShardingKey(1)\n.addValuesBuilder()\n.setTag(ByteString.copyFromUtf8(\"\n.setStateFamily(DEFAULT_PARDO_STATE_FAMILY)\n.getValueBuilder()\n.setTimestamp(0)\n.setData(state);\nserver.whenGetDataCalled().thenReturn(dataResponse.build());\n}\nfor (int i = 0; i < 3; i++) {\nStringBuilder sb = new StringBuilder();\nsb.append(\"work {\\n\");\nsb.append(\" computation_id: \\\"computation\\\"\\n\");\nsb.append(\" input_data_watermark: 0\\n\");\nsb.append(\" work {\\n\");\nsb.append(\" key: \\\"0000000000000001\\\"\\n\");\nsb.append(\" sharding_key: 1\\n\");\nsb.append(\" work_token: \");\nsb.append(i);\nsb.append(\" cache_token: 1\");\nsb.append(\"\\n\");\nif (i > 0) {\nint previousCheckpoint = i - 1;\nsb.append(\" source_state {\\n\");\nsb.append(\" state: \\\"\");\nsb.append((char) previousCheckpoint);\nsb.append(\"\\\"\\n\");\nsb.append(\" }\\n\");\n}\nsb.append(\" }\\n\");\nsb.append(\"}\\n\");\nserver.whenGetWorkCalled().thenReturn(buildInput(sb.toString(), null));\nMap result = server.waitForAndGetCommits(1);\nWindmill.WorkItemCommitRequest commit = result.get((long) i);\nUnsignedLong finalizeId =\nUnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));\nsb = new StringBuilder();\nsb.append(\"key: \\\"0000000000000001\\\"\\n\");\nsb.append(\"sharding_key: 1\\n\");\nsb.append(\"work_token: \");\nsb.append(i);\nsb.append(\"\\n\");\nsb.append(\"cache_token: 1\\n\");\nsb.append(\"output_messages {\\n\");\nsb.append(\" destination_stream_id: \\\"out\\\"\\n\");\nsb.append(\" bundles {\\n\");\nsb.append(\" key: \\\"0000000000000001\\\"\\n\");\nint messageNum = i;\nsb.append(\" messages {\\n\");\nsb.append(\" timestamp: \");\nsb.append(messageNum * 1000);\nsb.append(\"\\n\");\nsb.append(\" data: \\\"0:\");\nsb.append(messageNum);\nsb.append(\"\\\"\\n\");\nsb.append(\" }\\n\");\nsb.append(\" messages_ids: \\\"\\\"\\n\");\nsb.append(\" }\\n\");\nsb.append(\"}\\n\");\nif (i == 0) {\nsb.append(\"value_updates {\\n\");\nsb.append(\" tag: \\\"\nsb.append(\" value {\\n\");\nsb.append(\" timestamp: 0\\n\");\nsb.append(\" data: \\\"\");\nsb.append((char) 42);\nsb.append(\"\\\"\\n\");\nsb.append(\" }\\n\");\nsb.append(\" state_family: \\\"parDoStateFamily\\\"\\n\");\nsb.append(\"}\\n\");\n}\nint sourceState = i;\nsb.append(\"source_state_updates {\\n\");\nsb.append(\" state: \\\"\");\nsb.append((char) sourceState);\nsb.append(\"\\\"\\n\");\nsb.append(\" finalize_ids: \");\nsb.append(finalizeId);\nsb.append(\"}\\n\");\nsb.append(\"source_watermark: \");\nsb.append((sourceState + 1) * 1000);\nsb.append(\"\\n\");\nsb.append(\"source_backlog_bytes: 7\\n\");\nassertThat(\nsetValuesTimestamps(\nremoveDynamicFields(commit)\n.toBuilder()\n.clearOutputTimers()\n.clearSourceBytesProcessed())\n.build(),\nequalTo(\nsetMessagesMetadata(\nPaneInfo.NO_FIRING,\nCoderUtils.encodeToByteArray(\nCollectionCoder.of(GlobalWindow.Coder.INSTANCE),\nImmutableList.of(GlobalWindow.INSTANCE)),\nparseCommitRequest(sb.toString()))\n.build()));\n}\n}\n@Test\npublic void testHugeCommits() throws Exception {\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(new FanoutFn(), 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nserver.whenGetWorkCalled().thenReturn(makeInput(0, TimeUnit.MILLISECONDS.toMicros(0)));\nserver.waitForAndGetCommits(0);\nworker.stop();\n}\n@Test\npublic void testActiveWorkRefresh() throws Exception {\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(new SlowDoFn(), 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setActiveWorkRefreshPeriodMillis(100);\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nserver.whenGetWorkCalled().thenReturn(makeInput(0, TimeUnit.MILLISECONDS.toMicros(0)));\nserver.waitForAndGetCommits(1);\nworker.stop();\nassertThat(server.numGetDataRequests(), greaterThan(0));\n}\n@Test\npublic void testLatencyAttributionProtobufsPopulated() {\nFakeClock clock = new FakeClock();\nWork work = Work.create(null, clock, Collections.emptyList(), unused -> {});\nclock.sleep(Duration.millis(10));\nwork.setState(Work.State.PROCESSING);\nclock.sleep(Duration.millis(20));\nwork.setState(Work.State.READING);\nclock.sleep(Duration.millis(30));\nwork.setState(Work.State.PROCESSING);\nclock.sleep(Duration.millis(40));\nwork.setState(Work.State.COMMIT_QUEUED);\nclock.sleep(Duration.millis(50));\nwork.setState(Work.State.COMMITTING);\nclock.sleep(Duration.millis(60));\nIterator it = work.getLatencyAttributions().iterator();\nassertTrue(it.hasNext());\nLatencyAttribution lat = it.next();\nassertSame(State.QUEUED, lat.getState());\nassertEquals(10, lat.getTotalDurationMillis());\nassertTrue(it.hasNext());\nlat = it.next();\nassertSame(State.ACTIVE, lat.getState());\nassertEquals(60, lat.getTotalDurationMillis());\nassertTrue(it.hasNext());\nlat = it.next();\nassertSame(State.READING, lat.getState());\nassertEquals(30, lat.getTotalDurationMillis());\nassertTrue(it.hasNext());\nlat = it.next();\nassertSame(State.COMMITTING, lat.getState());\nassertEquals(110, lat.getTotalDurationMillis());\nassertFalse(it.hasNext());\n}\n@Test\npublic void testLatencyAttributionToQueuedState() throws Exception {\nfinal int workToken = 3232;\nFakeClock clock = new FakeClock();\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(\nnew FakeSlowDoFn(clock, Duration.millis(1000)), 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setActiveWorkRefreshPeriodMillis(100);\noptions.setNumberOfWorkerHarnessThreads(1);\nStreamingDataflowWorker worker =\nmakeWorker(\ninstructions,\noptions,\nfalse /* publishCounters */,\nclock,\nclock::newFakeScheduledExecutor);\nworker.start();\nActiveWorkRefreshSink awrSink = new ActiveWorkRefreshSink(EMPTY_DATA_RESPONDER);\nserver.whenGetDataCalled().answerByDefault(awrSink::getData).delayEachResponseBy(Duration.ZERO);\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(workToken + 1, 0 /* timestamp */))\n.thenReturn(makeInput(workToken, 1 /* timestamp */));\nserver.waitForAndGetCommits(2);\nworker.stop();\nassertEquals(\nawrSink.getLatencyAttributionDuration(workToken, State.QUEUED), Duration.millis(1000));\nassertEquals(awrSink.getLatencyAttributionDuration(workToken + 1, State.QUEUED), Duration.ZERO);\n}\n@Test\npublic void testLatencyAttributionToActiveState() throws Exception {\nfinal int workToken = 4242;\nFakeClock clock = new FakeClock();\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(\nnew FakeSlowDoFn(clock, Duration.millis(1000)), 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setActiveWorkRefreshPeriodMillis(100);\nStreamingDataflowWorker worker =\nmakeWorker(\ninstructions,\noptions,\nfalse /* publishCounters */,\nclock,\nclock::newFakeScheduledExecutor);\nworker.start();\nActiveWorkRefreshSink awrSink = new ActiveWorkRefreshSink(EMPTY_DATA_RESPONDER);\nserver.whenGetDataCalled().answerByDefault(awrSink::getData).delayEachResponseBy(Duration.ZERO);\nserver.whenGetWorkCalled().thenReturn(makeInput(workToken, 0 /* timestamp */));\nserver.waitForAndGetCommits(1);\nworker.stop();\nassertEquals(\nawrSink.getLatencyAttributionDuration(workToken, State.ACTIVE), Duration.millis(1000));\n}\n@Test\npublic void testLatencyAttributionToReadingState() throws Exception {\nfinal int workToken = 5454;\nFakeClock clock = new FakeClock();\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(new ReadingDoFn(), 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setActiveWorkRefreshPeriodMillis(100);\nStreamingDataflowWorker worker =\nmakeWorker(\ninstructions,\noptions,\nfalse /* publishCounters */,\nclock,\nclock::newFakeScheduledExecutor);\nworker.start();\nActiveWorkRefreshSink awrSink =\nnew ActiveWorkRefreshSink(\n(request) -> {\nclock.sleep(Duration.millis(1000));\nreturn EMPTY_DATA_RESPONDER.apply(request);\n});\nserver.whenGetDataCalled().answerByDefault(awrSink::getData).delayEachResponseBy(Duration.ZERO);\nserver.whenGetWorkCalled().thenReturn(makeInput(workToken, 0 /* timestamp */));\nserver.waitForAndGetCommits(1);\nworker.stop();\nassertEquals(\nawrSink.getLatencyAttributionDuration(workToken, State.READING), Duration.millis(1000));\n}\n@Test\npublic void testLatencyAttributionToCommittingState() throws Exception {\nfinal int workToken = 6464;\nFakeClock clock = new FakeClock();\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nserver\n.whenCommitWorkCalled()\n.answerByDefault(\n(request) -> {\nclock.sleep(Duration.millis(1000));\nreturn Windmill.CommitWorkResponse.getDefaultInstance();\n});\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setActiveWorkRefreshPeriodMillis(100);\nStreamingDataflowWorker worker =\nmakeWorker(\ninstructions,\noptions,\nfalse /* publishCounters */,\nclock,\nclock::newFakeScheduledExecutor);\nworker.start();\nActiveWorkRefreshSink awrSink = new ActiveWorkRefreshSink(EMPTY_DATA_RESPONDER);\nserver.whenGetDataCalled().answerByDefault(awrSink::getData).delayEachResponseBy(Duration.ZERO);\nserver.whenGetWorkCalled().thenReturn(makeInput(workToken, TimeUnit.MILLISECONDS.toMicros(0)));\nserver.waitForAndGetCommits(1);\nworker.stop();\nassertEquals(\nawrSink.getLatencyAttributionDuration(workToken, State.COMMITTING), Duration.millis(1000));\n}\n@Test\npublic void testLatencyAttributionPopulatedInCommitRequest() throws Exception {\nfinal int workToken = 7272;\nlong dofnWaitTimeMs = 1000;\nFakeClock clock = new FakeClock();\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeDoFnInstruction(\nnew FakeSlowDoFn(clock, Duration.millis(dofnWaitTimeMs)), 0, StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setActiveWorkRefreshPeriodMillis(100);\noptions.setNumberOfWorkerHarnessThreads(1);\nStreamingDataflowWorker worker =\nmakeWorker(\ninstructions,\noptions,\nfalse /* publishCounters */,\nclock,\nclock::newFakeScheduledExecutor);\nworker.start();\nActiveWorkRefreshSink awrSink = new ActiveWorkRefreshSink(EMPTY_DATA_RESPONDER);\nserver.whenGetDataCalled().answerByDefault(awrSink::getData).delayEachResponseBy(Duration.ZERO);\nserver.whenGetWorkCalled().thenReturn(makeInput(workToken, 1 /* timestamp */));\nMap workItemCommitRequest = server.waitForAndGetCommits(1);\nworker.stop();\nassertEquals(\nworkItemCommitRequest.get((long) workToken).getPerWorkItemLatencyAttributions(0),\nLatencyAttribution.newBuilder()\n.setState(State.ACTIVE)\n.setTotalDurationMillis(dofnWaitTimeMs)\n.build());\nif (streamingEngine) {\nassertEquals(\nworkItemCommitRequest.get((long) workToken).getPerWorkItemLatencyAttributions(1),\nLatencyAttribution.newBuilder()\n.setState(State.GET_WORK_IN_TRANSIT_TO_USER_WORKER)\n.setTotalDurationMillis(1000)\n.build());\n}\n}\n@Test\npublic void testLimitOnOutputBundleSize() throws Exception {\nList finalizeTracker = Lists.newArrayList();\nTestCountingSource.setFinalizeTracker(finalizeTracker);\nfinal int numMessagesInCustomSourceShard = 100000;\nfinal int inflatedSizePerMessage = 10000;\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorker worker =\nmakeWorker(\nmakeUnboundedSourcePipeline(\nnumMessagesInCustomSourceShard, new InflateDoFn(inflatedSizePerMessage)),\ncreateTestingPipelineOptions(server),\nfalse /* publishCounters */);\nworker.start();\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 1\"\n+ \" cache_token: 1\"\n+ \" }\"\n+ \"}\",\nnull));\nMatcher isWithinBundleSizeLimits =\nboth(greaterThan(StreamingDataflowWorker.MAX_SINK_BYTES * 9 / 10))\n.and(lessThan(StreamingDataflowWorker.MAX_SINK_BYTES * 11 / 10));\nMap result = server.waitForAndGetCommits(1);\nWindmill.WorkItemCommitRequest commit = result.get(1L);\nassertThat(commit.getSerializedSize(), isWithinBundleSizeLimits);\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 2\"\n+ \" cache_token: 1\"\n+ \" }\"\n+ \"}\",\nnull));\nresult = server.waitForAndGetCommits(1);\ncommit = result.get(2L);\nassertThat(commit.getSerializedSize(), isWithinBundleSizeLimits);\n}\n@Test\npublic void testLimitOnOutputBundleSizeWithMultipleSinks() throws Exception {\nList finalizeTracker = Lists.newArrayList();\nTestCountingSource.setFinalizeTracker(finalizeTracker);\nfinal int numMessagesInCustomSourceShard = 100000;\nfinal int inflatedSizePerMessage = 10000;\nList instructions = new ArrayList<>();\ninstructions.addAll(\nmakeUnboundedSourcePipeline(\nnumMessagesInCustomSourceShard, new InflateDoFn(inflatedSizePerMessage)));\ninstructions.add(\nmakeSinkInstruction(\nDEFAULT_DESTINATION_STREAM_ID + \"-1\",\nStringUtf8Coder.of(),\n1,\nGlobalWindow.Coder.INSTANCE));\ninstructions.add(\nmakeSinkInstruction(\nDEFAULT_DESTINATION_STREAM_ID + \"-2\",\nStringUtf8Coder.of(),\n1,\nGlobalWindow.Coder.INSTANCE));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorker worker =\nmakeWorker(instructions, createTestingPipelineOptions(server), true /* publishCounters */);\nworker.start();\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 1\"\n+ \" cache_token: 1\"\n+ \" }\"\n+ \"}\",\nnull));\nMatcher isWithinBundleSizeLimits =\nboth(greaterThan(StreamingDataflowWorker.MAX_SINK_BYTES * 9 / 10))\n.and(lessThan(StreamingDataflowWorker.MAX_SINK_BYTES * 11 / 10));\nMap result = server.waitForAndGetCommits(1);\nWindmill.WorkItemCommitRequest commit = result.get(1L);\nassertThat(commit.getSerializedSize(), isWithinBundleSizeLimits);\nserver\n.whenGetWorkCalled()\n.thenReturn(\nbuildInput(\n\"work {\"\n+ \" computation_id: \\\"computation\\\"\"\n+ \" input_data_watermark: 0\"\n+ \" work {\"\n+ \" key: \\\"0000000000000001\\\"\"\n+ \" sharding_key: 1\"\n+ \" work_token: 2\"\n+ \" cache_token: 1\"\n+ \" }\"\n+ \"}\",\nnull));\nresult = server.waitForAndGetCommits(1);\ncommit = result.get(2L);\nassertThat(commit.getSerializedSize(), isWithinBundleSizeLimits);\n}\n@Test\npublic void testStuckCommit() throws Exception {\nif (!streamingEngine) {\nreturn;\n}\nList instructions =\nArrays.asList(\nmakeSourceInstruction(StringUtf8Coder.of()),\nmakeSinkInstruction(StringUtf8Coder.of(), 0));\nFakeWindmillServer server = new FakeWindmillServer(errorCollector);\nStreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);\noptions.setStuckCommitDurationMillis(2000);\nStreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);\nworker.start();\nserver.setDropStreamingCommits(true);\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(10, TimeUnit.MILLISECONDS.toMicros(2), DEFAULT_KEY_STRING, 1))\n.thenReturn(makeInput(15, TimeUnit.MILLISECONDS.toMicros(3), DEFAULT_KEY_STRING, 5));\nConcurrentHashMap> droppedCommits =\nserver.waitForDroppedCommits(2);\nserver.setDropStreamingCommits(false);\nserver\n.whenGetWorkCalled()\n.thenReturn(makeInput(1, TimeUnit.MILLISECONDS.toMicros(1), DEFAULT_KEY_STRING, 1));\nMap result = server.waitForAndGetCommits(1);\ndroppedCommits.values().iterator().next().accept(CommitStatus.OK);\nworker.stop();\nassertTrue(result.containsKey(1L));\nassertEquals(\nmakeExpectedOutput(\n1, TimeUnit.MILLISECONDS.toMicros(1), DEFAULT_KEY_STRING, 1, DEFAULT_KEY_STRING)\n.build(),\nremoveDynamicFields(result.get(1L)));\n}\nstatic class BlockingFn extends DoFn implements TestRule {\npublic static CountDownLatch blocker = new CountDownLatch(1);\npublic static Semaphore counter = new Semaphore(0);\npublic static AtomicInteger callCounter = new AtomicInteger(0);\n@ProcessElement\npublic void processElement(ProcessContext c) throws InterruptedException {\ncallCounter.incrementAndGet();\ncounter.release();\nblocker.await();\nc.output(c.element());\n}\n@Override\npublic Statement apply(final Statement base, final Description description) {\nreturn new Statement() {\n@Override\npublic void evaluate() throws Throwable {\nblocker = new CountDownLatch(1);\ncounter = new Semaphore(0);\ncallCounter = new AtomicInteger();\nbase.evaluate();\n}\n};\n}\n}\nstatic class KeyTokenInvalidFn extends DoFn, KV> {\nstatic boolean thrown = false;\n@ProcessElement\npublic void processElement(ProcessContext c) {\nif (!thrown) {\nthrown = true;\nthrow new KeyTokenInvalidException(\"key\");\n} else {\nc.output(c.element());\n}\n}\n}\nstatic class LargeCommitFn extends DoFn, KV> {\n@ProcessElement\npublic void processElement(ProcessContext c) {\nif (c.element().getKey().equals(\"large_key\")) {\nStringBuilder s = new StringBuilder();\nfor (int i = 0; i < 100; ++i) {\ns.append(\"large_commit\");\n}\nc.output(KV.of(c.element().getKey(), s.toString()));\n} else {\nc.output(c.element());\n}\n}\n}\nstatic class ChangeKeysFn extends DoFn, KV> {\n@ProcessElement\npublic void processElement(ProcessContext c) {\nKV elem = c.element();\nc.output(KV.of(elem.getKey() + \"_\" + elem.getValue(), elem.getValue()));\n}\n}\nstatic class TestExceptionFn extends DoFn {\nboolean firstTime = true;\n@ProcessElement\npublic void processElement(ProcessContext c) throws Exception {\nif (firstTime) {\nfirstTime = false;\ntry {\nthrow new Exception(\"Exception!\");\n} catch (Exception e) {\nthrow new Exception(\"Another exception!\", e);\n}\n}\n}\n}\nstatic class PassthroughDoFn\nextends DoFn>, KV>> {\n@ProcessElement\npublic void processElement(ProcessContext c) {\nc.output(c.element());\n}\n}\nstatic class Action {\nGetWorkResponse response;\nTimer[] expectedTimers = new Timer[] {};\nWatermarkHold[] expectedHolds = new WatermarkHold[] {};\npublic Action(GetWorkResponse response) {\nthis.response = response;\n}\nAction withHolds(WatermarkHold... holds) {\nthis.expectedHolds = holds;\nreturn this;\n}\nAction withTimers(Timer... timers) {\nthis.expectedTimers = timers;\nreturn this;\n}\n}\nstatic class PrintFn extends DoFn>, String> {\n@ProcessElement\npublic void processElement(ProcessContext c) {\nKV elem = c.element().getValue();\nc.output(elem.getKey() + \":\" + elem.getValue());\n}\n}\nprivate static class MockWork {\nWork create(long workToken) {\nreturn Work.create(\nWindmill.WorkItem.newBuilder().setKey(ByteString.EMPTY).setWorkToken(workToken).build(),\nInstant::now,\nCollections.emptyList(),\nwork -> {});\n}\n}\nstatic class TestExceptionInvalidatesCacheFn\nextends DoFn>, String> {\nstatic boolean thrown = false;\n@StateId(\"int\")\nprivate final StateSpec> counter = StateSpecs.value(VarIntCoder.of());\n@ProcessElement\npublic void processElement(ProcessContext c, @StateId(\"int\") ValueState state)\nthrows Exception {\nKV elem = c.element().getValue();\nif (elem.getValue() == 0) {\nLOG.error(\"**** COUNTER 0 ****\");\nassertNull(state.read());\nstate.write(42);\nassertEquals((Integer) 42, state.read());\n} else if (elem.getValue() == 1) {\nLOG.error(\"**** COUNTER 1 ****\");\nassertEquals((Integer) 42, state.read());\n} else if (elem.getValue() == 2) {\nif (!thrown) {\nLOG.error(\"**** COUNTER 2 (will throw) ****\");\nthrown = true;\nthrow new Exception(\"Exception!\");\n}\nLOG.error(\"**** COUNTER 2 (retry) ****\");\nassertEquals((Integer) 42, state.read());\n} else {\nthrow new RuntimeException(\"only expecting values [0,2]\");\n}\nc.output(elem.getKey() + \":\" + elem.getValue());\n}\n}\nprivate static class FanoutFn extends DoFn {\n@ProcessElement\npublic void processElement(ProcessContext c) {\nStringBuilder builder = new StringBuilder(1000000);\nfor (int i = 0; i < 1000000; i++) {\nbuilder.append(' ');\n}\nString largeString = builder.toString();\nfor (int i = 0; i < 3000; i++) {\nc.output(largeString);\n}\n}\n}\nprivate static class SlowDoFn extends DoFn {\n@ProcessElement\npublic void processElement(ProcessContext c) throws Exception {\nThread.sleep(1000);\nc.output(c.element());\n}\n}\nstatic class FakeClock implements Supplier {\nprivate final PriorityQueue jobs = new PriorityQueue<>();\nprivate Instant now = Instant.now();\npublic ScheduledExecutorService newFakeScheduledExecutor(String unused) {\nreturn new FakeScheduledExecutor();\n}\n@Override\npublic synchronized Instant get() {\nreturn now;\n}\npublic synchronized void clear() {\njobs.clear();\n}\npublic synchronized void sleep(Duration duration) {\nif (duration.isShorterThan(Duration.ZERO)) {\nthrow new UnsupportedOperationException(\"Cannot sleep backwards in time\");\n}\nInstant endOfSleep = now.plus(duration);\nwhile (true) {\nJob job = jobs.peek();\nif (job == null || job.when.isAfter(endOfSleep)) {\nbreak;\n}\njobs.remove();\nnow = job.when;\njob.work.run();\n}\nnow = endOfSleep;\n}\nprivate synchronized void schedule(Duration fromNow, Runnable work) {\njobs.add(new Job(now.plus(fromNow), work));\n}\nprivate static class Job implements Comparable {\nfinal Instant when;\nfinal Runnable work;\nJob(Instant when, Runnable work) {\nthis.when = when;\nthis.work = work;\n}\n@Override\npublic int compareTo(Job job) {\nreturn when.compareTo(job.when);\n}\n}\nprivate class FakeScheduledExecutor implements ScheduledExecutorService {\n@Override\npublic boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {\nreturn true;\n}\n@Override\npublic void execute(Runnable command) {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic List> invokeAll(Collection> tasks)\nthrows InterruptedException {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic List> invokeAll(\nCollection> tasks, long timeout, TimeUnit unit)\nthrows InterruptedException {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic T invokeAny(Collection> tasks)\nthrows ExecutionException, InterruptedException {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic T invokeAny(Collection> tasks, long timeout, TimeUnit unit)\nthrows ExecutionException, InterruptedException, TimeoutException {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic boolean isShutdown() {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic boolean isTerminated() {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic void shutdown() {}\n@Override\npublic List shutdownNow() {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic Future submit(Callable task) {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic Future submit(Runnable task) {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic Future submit(Runnable task, T result) {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic ScheduledFuture schedule(Callable callable, long delay, TimeUnit unit) {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic ScheduledFuture schedule(Runnable command, long delay, TimeUnit unit) {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic ScheduledFuture scheduleAtFixedRate(\nRunnable command, long initialDelay, long period, TimeUnit unit) {\nthrow new UnsupportedOperationException(\"Not implemented yet\");\n}\n@Override\npublic ScheduledFuture scheduleWithFixedDelay(\nRunnable command, long initialDelay, long delay, TimeUnit unit) {\nif (delay <= 0) {\nthrow new UnsupportedOperationException(\n\"Please supply a delay > 0 to scheduleWithFixedDelay\");\n}\nFakeClock.this.schedule(\nDuration.millis(unit.toMillis(initialDelay)),\nnew Runnable() {\n@Override\npublic void run() {\ncommand.run();\nFakeClock.this.schedule(Duration.millis(unit.toMillis(delay)), this);\n}\n});\nFakeClock.this.sleep(Duration.ZERO);\nreturn null;\n}\n}\n}\nprivate static class FakeSlowDoFn extends DoFn {\nprivate static FakeClock clock;\nprivate final Duration sleep;\nFakeSlowDoFn(FakeClock clock, Duration sleep) {\nFakeSlowDoFn.clock = clock;\nthis.sleep = sleep;\n}\n@ProcessElement\npublic void processElement(ProcessContext c) throws Exception {\nclock.sleep(sleep);\nc.output(c.element());\n}\n}\nstatic class ActiveWorkRefreshSink {\nprivate final Function responder;\nprivate final Map> totalDurations =\nnew HashMap<>();\nActiveWorkRefreshSink(Function responder) {\nthis.responder = responder;\n}\nDuration getLatencyAttributionDuration(long workToken, LatencyAttribution.State state) {\nEnumMap durations = totalDurations.get(workToken);\nreturn durations == null ? Duration.ZERO : durations.getOrDefault(state, Duration.ZERO);\n}\nboolean isActiveWorkRefresh(GetDataRequest request) {\nfor (ComputationGetDataRequest computationRequest : request.getRequestsList()) {\nif (!computationRequest.getComputationId().equals(DEFAULT_COMPUTATION_ID)) {\nreturn false;\n}\nfor (KeyedGetDataRequest keyedRequest : computationRequest.getRequestsList()) {\nif (keyedRequest.getWorkToken() == 0\n|| keyedRequest.getShardingKey() != DEFAULT_SHARDING_KEY\n|| keyedRequest.getValuesToFetchCount() != 0\n|| keyedRequest.getBagsToFetchCount() != 0\n|| keyedRequest.getTagValuePrefixesToFetchCount() != 0\n|| keyedRequest.getWatermarkHoldsToFetchCount() != 0) {\nreturn false;\n}\n}\n}\nreturn true;\n}\nGetDataResponse getData(GetDataRequest request) {\nif (!isActiveWorkRefresh(request)) {\nreturn responder.apply(request);\n}\nfor (ComputationGetDataRequest computationRequest : request.getRequestsList()) {\nfor (KeyedGetDataRequest keyedRequest : computationRequest.getRequestsList()) {\nfor (LatencyAttribution la : keyedRequest.getLatencyAttributionList()) {\nEnumMap durations =\ntotalDurations.computeIfAbsent(\nkeyedRequest.getWorkToken(),\n(Long workToken) ->\nnew EnumMap(\nLatencyAttribution.State.class));\nDuration cur = Duration.millis(la.getTotalDurationMillis());\ndurations.compute(la.getState(), (s, d) -> d == null || d.isShorterThan(cur) ? cur : d);\n}\n}\n}\nreturn EMPTY_DATA_RESPONDER.apply(request);\n}\n}\nstatic class ReadingDoFn extends DoFn {\n@StateId(\"int\")\nprivate final StateSpec> counter = StateSpecs.value(VarIntCoder.of());\n@ProcessElement\npublic void processElement(ProcessContext c, @StateId(\"int\") ValueState state) {\nstate.read();\nc.output(c.element());\n}\n}\n/** For each input element, emits a large string. */\nprivate static class InflateDoFn extends DoFn>, String> {\nfinal int inflatedSize;\n/** For each input elements, outputs a string of this length */\nInflateDoFn(int inflatedSize) {\nthis.inflatedSize = inflatedSize;\n}\n@ProcessElement\npublic void processElement(ProcessContext c) {\nchar[] chars = new char[inflatedSize];\nArrays.fill(chars, ' ');\nc.output(new String(chars));\n}\n}\n}" + }, + { + "comment": "I will block creating db using storage volume in shared_nothing mode in next PR.", + "method_body": "private void handleShowCreateDb() throws AnalysisException {\nShowCreateDbStmt showStmt = (ShowCreateDbStmt) stmt;\nString catalogName = showStmt.getCatalogName();\nString dbName = showStmt.getDb();\nList> rows = Lists.newArrayList();\nDatabase db;\nif (Strings.isNullOrEmpty(catalogName) || CatalogMgr.isInternalCatalog(catalogName)) {\ndb = connectContext.getGlobalStateMgr().getDb(dbName);\n} else {\ndb = GlobalStateMgr.getCurrentState().getMetadataMgr().getDb(catalogName, dbName);\n}\nMetaUtils.checkDbNullAndReport(db, showStmt.getDb());\nStringBuilder createSqlBuilder = new StringBuilder();\ncreateSqlBuilder.append(\"CREATE DATABASE `\").append(showStmt.getDb()).append(\"`\");\nif (!Strings.isNullOrEmpty(db.getLocation())) {\ncreateSqlBuilder.append(\"\\nPROPERTIES (\\\"location\\\" = \\\"\").append(db.getLocation()).append(\"\\\")\");\n}\nif (RunMode.getCurrentRunMode() == RunMode.SHARED_DATA && !Strings.isNullOrEmpty(db.getStorageVolumeId())) {\nStorageVolume sv = GlobalStateMgr.getCurrentState().getStorageVolumeMgr().getStorageVolume(db.getStorageVolumeId());\ncreateSqlBuilder.append(\"\\nPROPERTIES (\\\"storage_volume\\\" = \\\"\").append(sv.getName()).append(\"\\\")\");\n}\nrows.add(Lists.newArrayList(showStmt.getDb(), createSqlBuilder.toString()));\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}", + "target_code": "if (RunMode.getCurrentRunMode() == RunMode.SHARED_DATA && !Strings.isNullOrEmpty(db.getStorageVolumeId())) {", + "method_body_after": "private void handleShowCreateDb() throws AnalysisException {\nShowCreateDbStmt showStmt = (ShowCreateDbStmt) stmt;\nString catalogName = showStmt.getCatalogName();\nString dbName = showStmt.getDb();\nList> rows = Lists.newArrayList();\nDatabase db;\nif (Strings.isNullOrEmpty(catalogName) || CatalogMgr.isInternalCatalog(catalogName)) {\ndb = connectContext.getGlobalStateMgr().getDb(dbName);\n} else {\ndb = GlobalStateMgr.getCurrentState().getMetadataMgr().getDb(catalogName, dbName);\n}\nMetaUtils.checkDbNullAndReport(db, showStmt.getDb());\nStringBuilder createSqlBuilder = new StringBuilder();\ncreateSqlBuilder.append(\"CREATE DATABASE `\").append(showStmt.getDb()).append(\"`\");\nif (!Strings.isNullOrEmpty(db.getLocation())) {\ncreateSqlBuilder.append(\"\\nPROPERTIES (\\\"location\\\" = \\\"\").append(db.getLocation()).append(\"\\\")\");\n}\nif (!Strings.isNullOrEmpty(db.getStorageVolumeId())) {\nStorageVolume sv = GlobalStateMgr.getCurrentState().getStorageVolumeMgr().getStorageVolume(db.getStorageVolumeId());\ncreateSqlBuilder.append(\"\\nPROPERTIES (\\\"storage_volume\\\" = \\\"\").append(sv.getName()).append(\"\\\")\");\n}\nrows.add(Lists.newArrayList(showStmt.getDb(), createSqlBuilder.toString()));\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}", + "context_before": "class ShowExecutor {\nprivate static final Logger LOG = LogManager.getLogger(ShowExecutor.class);\nprivate static final List> EMPTY_SET = Lists.newArrayList();\nprivate final ConnectContext connectContext;\nprivate final ShowStmt stmt;\nprivate ShowResultSet resultSet;\nprivate final MetadataMgr metadataMgr;\npublic ShowExecutor(ConnectContext connectContext, ShowStmt stmt) {\nthis.connectContext = connectContext;\nthis.stmt = stmt;\nresultSet = null;\nmetadataMgr = GlobalStateMgr.getCurrentState().getMetadataMgr();\n}\npublic ShowResultSet execute() throws AnalysisException, DdlException {\nif (stmt instanceof ShowMaterializedViewsStmt) {\nhandleShowMaterializedView();\n} else if (stmt instanceof ShowAuthorStmt) {\nhandleShowAuthor();\n} else if (stmt instanceof ShowProcStmt) {\nhandleShowProc();\n} else if (stmt instanceof HelpStmt) {\nhandleHelp();\n} else if (stmt instanceof ShowWarehousesStmt) {\nhandleShowWarehouses();\n} else if (stmt instanceof ShowClustersStmt) {\nhandleShowClusters();\n} else if (stmt instanceof ShowDbStmt) {\nhandleShowDb();\n} else if (stmt instanceof ShowTableStmt) {\nhandleShowTable();\n} else if (stmt instanceof ShowTableStatusStmt) {\nhandleShowTableStatus();\n} else if (stmt instanceof DescribeStmt) {\nhandleDescribe();\n} else if (stmt instanceof ShowCreateTableStmt) {\nhandleShowCreateTable();\n} else if (stmt instanceof ShowCreateDbStmt) {\nhandleShowCreateDb();\n} else if (stmt instanceof ShowProcesslistStmt) {\nhandleShowProcesslist();\n} else if (stmt instanceof ShowEnginesStmt) {\nhandleShowEngines();\n} else if (stmt instanceof ShowFunctionsStmt) {\nhandleShowFunctions();\n} else if (stmt instanceof ShowVariablesStmt) {\nhandleShowVariables();\n} else if (stmt instanceof ShowColumnStmt) {\nhandleShowColumn();\n} else if (stmt instanceof ShowLoadStmt) {\nhandleShowLoad();\n} else if (stmt instanceof ShowRoutineLoadStmt) {\nhandleShowRoutineLoad();\n} else if (stmt instanceof ShowRoutineLoadTaskStmt) {\nhandleShowRoutineLoadTask();\n} else if (stmt instanceof ShowStreamLoadStmt) {\nhandleShowStreamLoad();\n} else if (stmt instanceof ShowDeleteStmt) {\nhandleShowDelete();\n} else if (stmt instanceof ShowAlterStmt) {\nhandleShowAlter();\n} else if (stmt instanceof ShowUserPropertyStmt) {\nhandleShowUserProperty();\n} else if (stmt instanceof ShowDataStmt) {\nhandleShowData();\n} else if (stmt instanceof ShowCollationStmt) {\nhandleShowCollation();\n} else if (stmt instanceof ShowPartitionsStmt) {\nhandleShowPartitions();\n} else if (stmt instanceof ShowTabletStmt) {\nhandleShowTablet();\n} else if (stmt instanceof ShowBackupStmt) {\nhandleShowBackup();\n} else if (stmt instanceof ShowRestoreStmt) {\nhandleShowRestore();\n} else if (stmt instanceof ShowBrokerStmt) {\nhandleShowBroker();\n} else if (stmt instanceof ShowResourcesStmt) {\nhandleShowResources();\n} else if (stmt instanceof ShowExportStmt) {\nhandleShowExport();\n} else if (stmt instanceof ShowBackendsStmt) {\nhandleShowBackends();\n} else if (stmt instanceof ShowFrontendsStmt) {\nhandleShowFrontends();\n} else if (stmt instanceof ShowRepositoriesStmt) {\nhandleShowRepositories();\n} else if (stmt instanceof ShowSnapshotStmt) {\nhandleShowSnapshot();\n} else if (stmt instanceof ShowGrantsStmt) {\nhandleShowGrants();\n} else if (stmt instanceof ShowRolesStmt) {\nhandleShowRoles();\n} else if (stmt instanceof AdminShowReplicaStatusStmt) {\nhandleAdminShowTabletStatus();\n} else if (stmt instanceof AdminShowReplicaDistributionStmt) {\nhandleAdminShowTabletDistribution();\n} else if (stmt instanceof AdminShowConfigStmt) {\nhandleAdminShowConfig();\n} else if (stmt instanceof ShowSmallFilesStmt) {\nhandleShowSmallFiles();\n} else if (stmt instanceof ShowDynamicPartitionStmt) {\nhandleShowDynamicPartition();\n} else if (stmt instanceof ShowIndexStmt) {\nhandleShowIndex();\n} else if (stmt instanceof ShowTransactionStmt) {\nhandleShowTransaction();\n} else if (stmt instanceof ShowPluginsStmt) {\nhandleShowPlugins();\n} else if (stmt instanceof ShowSqlBlackListStmt) {\nhandleShowSqlBlackListStmt();\n} else if (stmt instanceof ShowAnalyzeJobStmt) {\nhandleShowAnalyzeJob();\n} else if (stmt instanceof ShowAnalyzeStatusStmt) {\nhandleShowAnalyzeStatus();\n} else if (stmt instanceof ShowBasicStatsMetaStmt) {\nhandleShowBasicStatsMeta();\n} else if (stmt instanceof ShowHistogramStatsMetaStmt) {\nhandleShowHistogramStatsMeta();\n} else if (stmt instanceof ShowResourceGroupStmt) {\nhandleShowResourceGroup();\n} else if (stmt instanceof ShowUserStmt) {\nhandleShowUser();\n} else if (stmt instanceof ShowCatalogsStmt) {\nhandleShowCatalogs();\n} else if (stmt instanceof ShowComputeNodesStmt) {\nhandleShowComputeNodes();\n} else if (stmt instanceof ShowAuthenticationStmt) {\nhandleShowAuthentication();\n} else if (stmt instanceof ShowCreateExternalCatalogStmt) {\nhandleShowCreateExternalCatalog();\n} else if (stmt instanceof ShowCharsetStmt) {\nhandleShowCharset();\n} else if (stmt instanceof ShowStorageVolumesStmt) {\nhandleShowStorageVolumes();\n} else if (stmt instanceof DescStorageVolumeStmt) {\nhandleDescStorageVolume();\n} else {\nhandleEmpty();\n}\nList> rows = doPredicate(stmt, stmt.getMetaData(), resultSet.getResultRows());\nreturn new ShowResultSet(resultSet.getMetaData(), rows);\n}\nprivate void handleShowAuthentication() {\nfinal ShowAuthenticationStmt showAuthenticationStmt = (ShowAuthenticationStmt) stmt;\nAuthenticationMgr authenticationManager = GlobalStateMgr.getCurrentState().getAuthenticationMgr();\nList> userAuthInfos = Lists.newArrayList();\nMap authenticationInfoMap = new HashMap<>();\nif (showAuthenticationStmt.isAll()) {\nauthenticationInfoMap.putAll(authenticationManager.getUserToAuthenticationInfo());\n} else {\nUserAuthenticationInfo userAuthenticationInfo;\nif (showAuthenticationStmt.getUserIdent() == null) {\nuserAuthenticationInfo = authenticationManager\n.getUserAuthenticationInfoByUserIdentity(connectContext.getCurrentUserIdentity());\n} else {\nuserAuthenticationInfo =\nauthenticationManager.getUserAuthenticationInfoByUserIdentity(showAuthenticationStmt.getUserIdent());\n}\nauthenticationInfoMap.put(showAuthenticationStmt.getUserIdent(), userAuthenticationInfo);\n}\nfor (Map.Entry entry : authenticationInfoMap.entrySet()) {\nUserAuthenticationInfo userAuthenticationInfo = entry.getValue();\nuserAuthInfos.add(Lists.newArrayList(\nentry.getKey().toString(),\nuserAuthenticationInfo.getPassword().length == 0 ? \"No\" : \"Yes\",\nuserAuthenticationInfo.getAuthPlugin(),\nuserAuthenticationInfo.getTextForAuthPlugin()));\n}\nresultSet = new ShowResultSet(showAuthenticationStmt.getMetaData(), userAuthInfos);\n}\nprivate void handleShowComputeNodes() {\nfinal ShowComputeNodesStmt showStmt = (ShowComputeNodesStmt) stmt;\nList> computeNodesInfos = ComputeNodeProcDir.getClusterComputeNodesInfos();\nresultSet = new ShowResultSet(showStmt.getMetaData(), computeNodesInfos);\n}\nprivate void handleShowMaterializedView() throws AnalysisException {\nShowMaterializedViewsStmt showMaterializedViewsStmt = (ShowMaterializedViewsStmt) stmt;\nString dbName = showMaterializedViewsStmt.getDb();\nDatabase db = GlobalStateMgr.getCurrentState().getDb(dbName);\nMetaUtils.checkDbNullAndReport(db, dbName);\nList materializedViews = Lists.newArrayList();\nList> singleTableMVs = Lists.newArrayList();\ndb.readLock();\ntry {\nPatternMatcher matcher = null;\nif (showMaterializedViewsStmt.getPattern() != null) {\nmatcher = PatternMatcher.createMysqlPattern(showMaterializedViewsStmt.getPattern(),\nCaseSensibility.TABLE.getCaseSensibility());\n}\nfor (Table table : db.getTables()) {\nif (table.isMaterializedView()) {\nMaterializedView mvTable = (MaterializedView) table;\nif (matcher != null && !matcher.match(mvTable.getName())) {\ncontinue;\n}\nAtomicBoolean baseTableHasPrivilege = new AtomicBoolean(true);\nmvTable.getBaseTableInfos().forEach(baseTableInfo -> {\nTable baseTable = baseTableInfo.getTable();\nif (baseTable != null && baseTable.isNativeTableOrMaterializedView() && !PrivilegeActions.\ncheckTableAction(connectContext, baseTableInfo.getDbName(),\nbaseTableInfo.getTableName(),\nPrivilegeType.SELECT)) {\nbaseTableHasPrivilege.set(false);\n}\n});\nif (!baseTableHasPrivilege.get()) {\ncontinue;\n}\nif (!PrivilegeActions.checkAnyActionOnMaterializedView(connectContext, db.getFullName(),\nmvTable.getName())) {\ncontinue;\n}\nmaterializedViews.add(mvTable);\n} else if (Table.TableType.OLAP == table.getType()) {\nOlapTable olapTable = (OlapTable) table;\nList visibleMaterializedViews = olapTable.getVisibleIndexMetas();\nlong baseIdx = olapTable.getBaseIndexId();\nfor (MaterializedIndexMeta mvMeta : visibleMaterializedViews) {\nif (baseIdx == mvMeta.getIndexId()) {\ncontinue;\n}\nif (matcher != null && !matcher.match(olapTable.getIndexNameById(mvMeta.getIndexId()))) {\ncontinue;\n}\nsingleTableMVs.add(Pair.create(olapTable, mvMeta));\n}\n}\n}\nList> rowSets = listMaterializedViewStatus(dbName, materializedViews, singleTableMVs);\nresultSet = new ShowResultSet(stmt.getMetaData(), rowSets);\n} catch (Exception e) {\nLOG.warn(\"listMaterializedViews failed:\", e);\nthrow e;\n} finally {\ndb.readUnlock();\n}\n}\npublic static String buildCreateMVSql(OlapTable olapTable, String mv, MaterializedIndexMeta mvMeta) {\nStringBuilder originStmtBuilder = new StringBuilder(\n\"create materialized view \" + mv +\n\" as select \");\nString groupByString = \"\";\nfor (Column column : mvMeta.getSchema()) {\nif (column.isKey()) {\ngroupByString += column.getName() + \",\";\n}\n}\noriginStmtBuilder.append(groupByString);\nfor (Column column : mvMeta.getSchema()) {\nif (!column.isKey()) {\noriginStmtBuilder.append(column.getAggregationType().toString()).append(\"(\")\n.append(column.getName()).append(\")\").append(\",\");\n}\n}\noriginStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());\noriginStmtBuilder.append(\" from \").append(olapTable.getName()).append(\" group by \")\n.append(groupByString);\noriginStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());\nreturn originStmtBuilder.toString();\n}\npublic static List> listMaterializedViewStatus(\nString dbName,\nList materializedViews,\nList> singleTableMVs) {\nList> rowSets = Lists.newArrayList();\nMap mvNameTaskMap = Maps.newHashMap();\nif (!materializedViews.isEmpty()) {\nGlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\nTaskManager taskManager = globalStateMgr.getTaskManager();\nmvNameTaskMap = taskManager.showMVLastRefreshTaskRunStatus(dbName);\n}\nfor (MaterializedView mvTable : materializedViews) {\nlong mvId = mvTable.getId();\nTaskRunStatus taskStatus = mvNameTaskMap.get(TaskBuilder.getMvTaskName(mvId));\nArrayList resultRow = new ArrayList<>();\nresultRow.add(String.valueOf(mvId));\nresultRow.add(dbName);\nresultRow.add(mvTable.getName());\nMaterializedView.MvRefreshScheme refreshScheme = mvTable.getRefreshScheme();\nif (refreshScheme == null) {\nresultRow.add(\"UNKNOWN\");\n} else {\nresultRow.add(String.valueOf(mvTable.getRefreshScheme().getType()));\n}\nresultRow.add(String.valueOf(mvTable.isActive()));\nresultRow.add(String.valueOf(mvTable.getInactiveReason()));\nif (mvTable.getPartitionInfo() != null && mvTable.getPartitionInfo().getType() != null) {\nresultRow.add(mvTable.getPartitionInfo().getType().toString());\n} else {\nresultRow.add(\"\");\n}\nsetTaskRunStatus(resultRow, taskStatus);\nresultRow.add(String.valueOf(mvTable.getRowCount()));\nresultRow.add(mvTable.getMaterializedViewDdlStmt(true));\nrowSets.add(resultRow);\n}\nfor (Pair singleTableMV : singleTableMVs) {\nOlapTable olapTable = singleTableMV.first;\nMaterializedIndexMeta mvMeta = singleTableMV.second;\nlong mvId = mvMeta.getIndexId();\nArrayList resultRow = new ArrayList<>();\nresultRow.add(String.valueOf(mvId));\nresultRow.add(dbName);\nresultRow.add(olapTable.getIndexNameById(mvId));\nresultRow.add(\"ROLLUP\");\nresultRow.add(String.valueOf(true));\nresultRow.add(\"\");\nif (olapTable.getPartitionInfo() != null && olapTable.getPartitionInfo().getType() != null) {\nresultRow.add(olapTable.getPartitionInfo().getType().toString());\n} else {\nresultRow.add(\"\");\n}\nsetTaskRunStatus(resultRow, null);\nif (olapTable.getPartitionInfo().getType() == PartitionType.UNPARTITIONED) {\nPartition partition = olapTable.getPartitions().iterator().next();\nMaterializedIndex index = partition.getIndex(mvId);\nresultRow.add(String.valueOf(index.getRowCount()));\n} else {\nresultRow.add(String.valueOf(0L));\n}\nif (mvMeta.getOriginStmt() == null) {\nString mvName = olapTable.getIndexNameById(mvId);\nresultRow.add(buildCreateMVSql(olapTable, mvName, mvMeta));\n} else {\nresultRow.add(mvMeta.getOriginStmt().replace(\"\\n\", \"\").replace(\"\\t\", \"\")\n.replaceAll(\"[ ]+\", \" \"));\n}\nrowSets.add(resultRow);\n}\nreturn rowSets;\n}\nprivate static void setTaskRunStatus(List resultRow, TaskRunStatus taskStatus) {\nif (taskStatus != null) {\nresultRow.add(String.valueOf(taskStatus.getTaskId()));\nresultRow.add(Strings.nullToEmpty(taskStatus.getTaskName()));\nresultRow.add(String.valueOf(TimeUtils.longToTimeString(taskStatus.getCreateTime())));\nresultRow.add(String.valueOf(TimeUtils.longToTimeString(taskStatus.getFinishTime())));\nif (taskStatus.getFinishTime() > taskStatus.getCreateTime()) {\nresultRow.add(DebugUtil.DECIMAL_FORMAT_SCALE_3\n.format((taskStatus.getFinishTime() - taskStatus.getCreateTime()) / 1000D));\n} else {\nresultRow.add(\"0.000\");\n}\nresultRow.add(String.valueOf(taskStatus.getState()));\nMVTaskRunExtraMessage extraMessage = taskStatus.getMvTaskRunExtraMessage();\nresultRow.add(extraMessage.isForceRefresh() ? \"true\" : \"false\");\nresultRow.add(Strings.nullToEmpty(extraMessage.getPartitionStart()));\nresultRow.add(Strings.nullToEmpty(extraMessage.getPartitionEnd()));\nresultRow.add(Strings.nullToEmpty(extraMessage.getBasePartitionsToRefreshMapString()));\nresultRow.add(Strings.nullToEmpty(extraMessage.getMvPartitionsToRefreshString()));\nresultRow.add(String.valueOf(taskStatus.getErrorCode()));\nresultRow.add(Strings.nullToEmpty(taskStatus.getErrorMessage()));\n} else {\nresultRow.addAll(Collections.nCopies(13, \"\"));\n}\n}\nprivate void handleShowProcesslist() {\nShowProcesslistStmt showStmt = (ShowProcesslistStmt) stmt;\nList> rowSet = Lists.newArrayList();\nList threadInfos = connectContext.getConnectScheduler()\n.listConnection(connectContext.getQualifiedUser());\nlong nowMs = System.currentTimeMillis();\nfor (ConnectContext.ThreadInfo info : threadInfos) {\nList row = info.toRow(nowMs, showStmt.showFull());\nif (row != null) {\nrowSet.add(row);\n}\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);\n}\nprivate void handleEmpty() {\nresultSet = new ShowResultSet(stmt.getMetaData(), EMPTY_SET);\n}\nprivate void handleShowAuthor() {\nShowAuthorStmt showAuthorStmt = (ShowAuthorStmt) stmt;\nList> rowSet = Lists.newArrayList();\nresultSet = new ShowResultSet(showAuthorStmt.getMetaData(), rowSet);\n}\nprivate void handleShowEngines() {\nShowEnginesStmt showStmt = (ShowEnginesStmt) stmt;\nList> rowSet = Lists.newArrayList();\nrowSet.add(Lists.newArrayList(\"OLAP\", \"YES\", \"Default storage engine of StarRocks\", \"NO\", \"NO\", \"NO\"));\nrowSet.add(Lists.newArrayList(\"MySQL\", \"YES\", \"MySQL server which data is in it\", \"NO\", \"NO\", \"NO\"));\nrowSet.add(Lists.newArrayList(\"ELASTICSEARCH\", \"YES\", \"ELASTICSEARCH cluster which data is in it\", \"NO\", \"NO\",\n\"NO\"));\nrowSet.add(Lists.newArrayList(\"HIVE\", \"YES\", \"HIVE database which data is in it\", \"NO\", \"NO\", \"NO\"));\nrowSet.add(Lists.newArrayList(\"ICEBERG\", \"YES\", \"ICEBERG data lake which data is in it\", \"NO\", \"NO\", \"NO\"));\nresultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);\n}\nprivate void handleShowFunctions() throws AnalysisException {\nShowFunctionsStmt showStmt = (ShowFunctionsStmt) stmt;\nList functions;\nif (showStmt.getIsBuiltin()) {\nfunctions = connectContext.getGlobalStateMgr().getBuiltinFunctions();\n} else if (showStmt.getIsGlobal()) {\nfunctions = connectContext.getGlobalStateMgr().getGlobalFunctionMgr().getFunctions();\n} else {\nDatabase db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());\nMetaUtils.checkDbNullAndReport(db, showStmt.getDbName());\nfunctions = db.getFunctions();\n}\nList> rowSet = Lists.newArrayList();\nfor (Function function : functions) {\nList row = function.getInfo(showStmt.getIsVerbose());\nif (showStmt.getWild() == null || showStmt.like(function.functionName())) {\nif (showStmt.getIsGlobal()) {\nif (!PrivilegeActions.checkAnyActionOnGlobalFunction(connectContext, function.getFunctionId())) {\ncontinue;\n}\n} else if (!showStmt.getIsBuiltin()) {\nDatabase db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());\nif (!PrivilegeActions.checkAnyActionOnFunction(\nconnectContext.getCurrentUserIdentity(), connectContext.getCurrentRoleIds(),\ndb.getId(), function.getFunctionId())) {\ncontinue;\n}\n}\nrowSet.add(row);\n}\n}\nListComparator> comparator;\nOrderByPair orderByPair = new OrderByPair(0, false);\ncomparator = new ListComparator<>(orderByPair);\nrowSet.sort(comparator);\nList> resultRowSet = Lists.newArrayList();\nSet functionNameSet = new HashSet<>();\nfor (List row : rowSet) {\nList resultRow = Lists.newArrayList();\nif (functionNameSet.contains(row.get(0).toString())) {\ncontinue;\n}\nfor (Comparable column : row) {\nresultRow.add(column.toString());\n}\nresultRowSet.add(resultRow);\nfunctionNameSet.add(resultRow.get(0));\n}\nShowResultSetMetaData showMetaData = showStmt.getIsVerbose() ? showStmt.getMetaData() :\nShowResultSetMetaData.builder()\n.addColumn(new Column(\"Function Name\", ScalarType.createVarchar(256))).build();\nresultSet = new ShowResultSet(showMetaData, resultRowSet);\n}\nprivate void handleShowProc() throws AnalysisException {\nShowProcStmt showProcStmt = (ShowProcStmt) stmt;\nShowResultSetMetaData metaData = showProcStmt.getMetaData();\nProcNodeInterface procNode = showProcStmt.getNode();\nList> finalRows = procNode.fetchResult().getRows();\nresultSet = new ShowResultSet(metaData, finalRows);\n}\nprivate void handleShowDb() {\nShowDbStmt showDbStmt = (ShowDbStmt) stmt;\nList> rows = Lists.newArrayList();\nList dbNames;\nString catalogName;\nif (showDbStmt.getCatalogName() == null) {\ncatalogName = connectContext.getCurrentCatalog();\n} else {\ncatalogName = showDbStmt.getCatalogName();\n}\ndbNames = metadataMgr.listDbNames(catalogName);\nPatternMatcher matcher = null;\nif (showDbStmt.getPattern() != null) {\nmatcher = PatternMatcher.createMysqlPattern(showDbStmt.getPattern(),\nCaseSensibility.DATABASE.getCaseSensibility());\n}\nSet dbNameSet = Sets.newTreeSet();\nfor (String dbName : dbNames) {\nif (matcher != null && !matcher.match(dbName)) {\ncontinue;\n}\nif (!PrivilegeActions.checkAnyActionOnOrInDb(connectContext, catalogName, dbName)) {\ncontinue;\n}\ndbNameSet.add(dbName);\n}\nfor (String dbName : dbNameSet) {\nrows.add(Lists.newArrayList(dbName));\n}\nresultSet = new ShowResultSet(showDbStmt.getMetaData(), rows);\n}\nprivate void handleShowTable() throws AnalysisException {\nShowTableStmt showTableStmt = (ShowTableStmt) stmt;\nList> rows = Lists.newArrayList();\nString catalogName = showTableStmt.getCatalogName();\nif (catalogName == null) {\ncatalogName = connectContext.getCurrentCatalog();\n}\nString dbName = showTableStmt.getDb();\nDatabase db = metadataMgr.getDb(catalogName, dbName);\nPatternMatcher matcher = null;\nif (showTableStmt.getPattern() != null) {\nmatcher = PatternMatcher.createMysqlPattern(showTableStmt.getPattern(),\nCaseSensibility.TABLE.getCaseSensibility());\n}\nMap tableMap = Maps.newTreeMap();\nMetaUtils.checkDbNullAndReport(db, showTableStmt.getDb());\nif (CatalogMgr.isInternalCatalog(catalogName)) {\ndb.readLock();\ntry {\nfor (Table tbl : db.getTables()) {\nif (matcher != null && !matcher.match(tbl.getName())) {\ncontinue;\n}\nif (tbl.isView()) {\nif (!PrivilegeActions.checkAnyActionOnView(\nconnectContext, db.getFullName(), tbl.getName())) {\ncontinue;\n}\n} else if (tbl.isMaterializedView()) {\nif (!PrivilegeActions.checkAnyActionOnMaterializedView(\nconnectContext, db.getFullName(), tbl.getName())) {\ncontinue;\n}\n} else if (!PrivilegeActions.checkAnyActionOnTable(\nconnectContext, db.getFullName(), tbl.getName())) {\ncontinue;\n}\ntableMap.put(tbl.getName(), tbl.getMysqlType());\n}\n} finally {\ndb.readUnlock();\n}\n} else {\nList tableNames = metadataMgr.listTableNames(catalogName, dbName);\nfor (String tableName : tableNames) {\nif (matcher != null && !matcher.match(tableName)) {\ncontinue;\n}\nTable table = metadataMgr.getTable(catalogName, dbName, tableName);\nif (table == null) {\nLOG.warn(\"table {}.{}.{} does not exist\", catalogName, dbName, tableName);\ncontinue;\n}\nif (table.isView()) {\nif (!PrivilegeActions.checkAnyActionOnView(\nconnectContext, catalogName, db.getFullName(), table.getName())) {\ncontinue;\n}\n} else if (!PrivilegeActions.checkAnyActionOnTable(connectContext,\ncatalogName, dbName, tableName)) {\ncontinue;\n}\ntableMap.put(tableName, table.getMysqlType());\n}\n}\nfor (Map.Entry entry : tableMap.entrySet()) {\nif (showTableStmt.isVerbose()) {\nrows.add(Lists.newArrayList(entry.getKey(), entry.getValue()));\n} else {\nrows.add(Lists.newArrayList(entry.getKey()));\n}\n}\nresultSet = new ShowResultSet(showTableStmt.getMetaData(), rows);\n}\nprivate void handleShowTableStatus() {\nShowTableStatusStmt showStmt = (ShowTableStatusStmt) stmt;\nList> rows = Lists.newArrayList();\nDatabase db = connectContext.getGlobalStateMgr().getDb(showStmt.getDb());\nZoneId currentTimeZoneId = TimeUtils.getTimeZone().toZoneId();\nif (db != null) {\ndb.readLock();\ntry {\nPatternMatcher matcher = null;\nif (showStmt.getPattern() != null) {\nmatcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),\nCaseSensibility.TABLE.getCaseSensibility());\n}\nfor (Table table : db.getTables()) {\nif (matcher != null && !matcher.match(table.getName())) {\ncontinue;\n}\nif (!PrivilegeActions.checkAnyActionOnTable(connectContext, db.getFullName(), table.getName())) {\ncontinue;\n}\nTTableInfo info = new TTableInfo();\nif (table.isNativeTableOrMaterializedView() || table.getType() == Table.TableType.OLAP_EXTERNAL) {\nInformationSchemaDataSource.genNormalTableInfo(table, info);\n} else {\nInformationSchemaDataSource.genDefaultConfigInfo(info);\n}\nList row = Lists.newArrayList();\nrow.add(table.getName());\nrow.add(table.getEngine());\nrow.add(null);\nrow.add(\"\");\nrow.add(String.valueOf(info.getTable_rows()));\nrow.add(String.valueOf(info.getAvg_row_length()));\nrow.add(String.valueOf(info.getData_length()));\nrow.add(null);\nrow.add(null);\nrow.add(null);\nrow.add(null);\nrow.add(DateUtils.formatTimeStampInSeconds(table.getCreateTime(), currentTimeZoneId));\nrow.add(DateUtils.formatTimeStampInSeconds(info.getUpdate_time(), currentTimeZoneId));\nrow.add(null);\nrow.add(InformationSchemaDataSource.UTF8_GENERAL_CI);\nrow.add(null);\nrow.add(\"\");\nrow.add(table.getDisplayComment());\nrows.add(row);\n}\n} finally {\ndb.readUnlock();\n}\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowVariables() {\nShowVariablesStmt showStmt = (ShowVariablesStmt) stmt;\nPatternMatcher matcher = null;\nif (showStmt.getPattern() != null) {\nmatcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),\nCaseSensibility.VARIABLES.getCaseSensibility());\n}\nList> rows = VariableMgr.dump(showStmt.getType(), connectContext.getSessionVariable(), matcher);\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowCreateTable() throws AnalysisException {\nShowCreateTableStmt showStmt = (ShowCreateTableStmt) stmt;\nTableName tbl = showStmt.getTbl();\nString catalogName = tbl.getCatalog();\nif (catalogName == null) {\ncatalogName = connectContext.getCurrentCatalog();\n}\nif (CatalogMgr.isInternalCatalog(catalogName)) {\nshowCreateInternalCatalogTable(showStmt);\n} else {\nshowCreateExternalCatalogTable(tbl, catalogName);\n}\n}\nprivate void showCreateExternalCatalogTable(TableName tbl, String catalogName) {\nString dbName = tbl.getDb();\nString tableName = tbl.getTbl();\nMetadataMgr metadataMgr = GlobalStateMgr.getCurrentState().getMetadataMgr();\nDatabase db = metadataMgr.getDb(catalogName, dbName);\nif (db == null) {\nErrorReport.reportSemanticException(ErrorCode.ERR_BAD_DB_ERROR, dbName);\n}\nTable table = metadataMgr.getTable(catalogName, dbName, tableName);\nif (table == null) {\nErrorReport.reportSemanticException(ErrorCode.ERR_BAD_TABLE_ERROR, tableName);\n}\nStringBuilder createTableSql = new StringBuilder();\ncreateTableSql.append(\"CREATE TABLE \")\n.append(\"`\").append(tableName).append(\"`\")\n.append(\" (\\n\");\nList columns = table.getFullSchema().stream().map(\nthis::toMysqlDDL).collect(Collectors.toList());\ncreateTableSql.append(String.join(\",\\n\", columns))\n.append(\"\\n)\");\nif (table.getType() != JDBC && !table.isUnPartitioned()) {\ncreateTableSql.append(\"\\nPARTITION BY ( \")\n.append(String.join(\", \", table.getPartitionColumnNames()))\n.append(\" )\");\n}\nString location = null;\nif (table.isHiveTable() || table.isHudiTable()) {\nlocation = ((HiveMetaStoreTable) table).getTableLocation();\n} else if (table.isIcebergTable()) {\nlocation = ((IcebergTable) table).getTableLocation();\n} else if (table.isDeltalakeTable()) {\nlocation = ((DeltaLakeTable) table).getTableLocation();\n}\nif (!Strings.isNullOrEmpty(location)) {\ncreateTableSql.append(\"\\nPROPERTIES (\\\"location\\\" = \\\"\").append(location).append(\"\\\");\");\n}\nList> rows = Lists.newArrayList();\nrows.add(Lists.newArrayList(tableName, createTableSql.toString()));\nresultSet = new ShowResultSet(stmt.getMetaData(), rows);\n}\nprivate String toMysqlDDL(Column column) {\nStringBuilder sb = new StringBuilder();\nsb.append(\" `\").append(column.getName()).append(\"` \");\nsb.append(column.getType().toSql());\nsb.append(\" DEFAULT NULL\");\nif (!Strings.isNullOrEmpty(column.getComment())) {\nsb.append(\" COMMENT \\\"\").append(column.getDisplayComment()).append(\"\\\"\");\n}\nreturn sb.toString();\n}\nprivate void showCreateInternalCatalogTable(ShowCreateTableStmt showStmt) throws AnalysisException {\nDatabase db = connectContext.getGlobalStateMgr().getDb(showStmt.getDb());\nMetaUtils.checkDbNullAndReport(db, showStmt.getDb());\nList> rows = Lists.newArrayList();\ndb.readLock();\ntry {\nTable table = db.getTable(showStmt.getTable());\nif (table == null) {\nif (showStmt.getType() != ShowCreateTableStmt.CreateTableType.MATERIALIZED_VIEW) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getTable());\n} else {\nfor (Table tbl : db.getTables()) {\nif (tbl.getType() == Table.TableType.OLAP) {\nOlapTable olapTable = (OlapTable) tbl;\nList visibleMaterializedViews =\nolapTable.getVisibleIndexMetas();\nfor (MaterializedIndexMeta mvMeta : visibleMaterializedViews) {\nif (olapTable.getIndexNameById(mvMeta.getIndexId()).equals(showStmt.getTable())) {\nif (mvMeta.getOriginStmt() == null) {\nString mvName = olapTable.getIndexNameById(mvMeta.getIndexId());\nrows.add(Lists.newArrayList(showStmt.getTable(), buildCreateMVSql(olapTable,\nmvName, mvMeta), \"utf8\", \"utf8_general_ci\"));\n} else {\nrows.add(Lists.newArrayList(showStmt.getTable(), mvMeta.getOriginStmt(),\n\"utf8\", \"utf8_general_ci\"));\n}\nresultSet = new ShowResultSet(ShowCreateTableStmt.getMaterializedViewMetaData(), rows);\nreturn;\n}\n}\n}\n}\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getTable());\n}\n}\nList createTableStmt = Lists.newArrayList();\nGlobalStateMgr.getDdlStmt(table, createTableStmt, null, null, false, true /* hide password */);\nif (createTableStmt.isEmpty()) {\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\nreturn;\n}\nif (table instanceof View) {\nif (showStmt.getType() == ShowCreateTableStmt.CreateTableType.MATERIALIZED_VIEW) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_OBJECT, showStmt.getDb(),\nshowStmt.getTable(), \"MATERIALIZED VIEW\");\n}\nrows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0), \"utf8\", \"utf8_general_ci\"));\nresultSet = new ShowResultSet(ShowCreateTableStmt.getViewMetaData(), rows);\n} else if (table instanceof MaterializedView) {\nif (showStmt.getType() == ShowCreateTableStmt.CreateTableType.VIEW) {\nMaterializedView mv = (MaterializedView) table;\nString sb = \"CREATE VIEW `\" + table.getName() + \"` AS \" + mv.getViewDefineSql();\nrows.add(Lists.newArrayList(table.getName(), sb, \"utf8\", \"utf8_general_ci\"));\nresultSet = new ShowResultSet(ShowCreateTableStmt.getViewMetaData(), rows);\n} else {\nrows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0)));\nresultSet = new ShowResultSet(ShowCreateTableStmt.getMaterializedViewMetaData(), rows);\n}\n} else {\nif (showStmt.getType() != ShowCreateTableStmt.CreateTableType.TABLE) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_OBJECT, showStmt.getDb(),\nshowStmt.getTable(), showStmt.getType().getValue());\n}\nrows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0)));\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\n} finally {\ndb.readUnlock();\n}\n}\nprivate void handleDescribe() throws AnalysisException {\nDescribeStmt describeStmt = (DescribeStmt) stmt;\nresultSet = new ShowResultSet(describeStmt.getMetaData(), describeStmt.getResultRows());\n}\nprivate void handleShowColumn() throws AnalysisException {\nShowColumnStmt showStmt = (ShowColumnStmt) stmt;\nList> rows = Lists.newArrayList();\nString catalogName = showStmt.getCatalog();\nif (catalogName == null) {\ncatalogName = connectContext.getCurrentCatalog();\n}\nString dbName = showStmt.getDb();\nDatabase db = metadataMgr.getDb(catalogName, dbName);\nMetaUtils.checkDbNullAndReport(db, showStmt.getDb());\ndb.readLock();\ntry {\nTable table = metadataMgr.getTable(catalogName, dbName, showStmt.getTable());\nif (table == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR,\nshowStmt.getDb() + \".\" + showStmt.getTable());\n}\nPatternMatcher matcher = null;\nif (showStmt.getPattern() != null) {\nmatcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),\nCaseSensibility.COLUMN.getCaseSensibility());\n}\nList columns = table.getBaseSchema();\nfor (Column col : columns) {\nif (matcher != null && !matcher.match(col.getName())) {\ncontinue;\n}\nfinal String columnName = col.getName();\nfinal String columnType = col.getType().canonicalName().toLowerCase();\nfinal String isAllowNull = col.isAllowNull() ? \"YES\" : \"NO\";\nfinal String isKey = col.isKey() ? \"YES\" : \"NO\";\nfinal String defaultValue = col.getMetaDefaultValue(Lists.newArrayList());\nfinal String aggType = col.getAggregationType() == null\n|| col.isAggregationTypeImplicit() ? \"\" : col.getAggregationType().toSql();\nif (showStmt.isVerbose()) {\nrows.add(Lists.newArrayList(columnName,\ncolumnType,\n\"\",\nisAllowNull,\nisKey,\ndefaultValue,\naggType,\n\"\",\ncol.getDisplayComment()));\n} else {\nrows.add(Lists.newArrayList(columnName,\ncolumnType,\nisAllowNull,\nisKey,\ndefaultValue,\naggType));\n}\n}\n} finally {\ndb.readUnlock();\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowIndex() throws AnalysisException {\nShowIndexStmt showStmt = (ShowIndexStmt) stmt;\nList> rows = Lists.newArrayList();\nDatabase db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());\nMetaUtils.checkDbNullAndReport(db, showStmt.getDbName());\ndb.readLock();\ntry {\nTable table = db.getTable(showStmt.getTableName().getTbl());\nif (table == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR,\ndb.getOriginName() + \".\" + showStmt.getTableName().toString());\n} else if (table instanceof OlapTable) {\nList indexes = ((OlapTable) table).getIndexes();\nfor (Index index : indexes) {\nrows.add(Lists.newArrayList(showStmt.getTableName().toString(), \"\", index.getIndexName(),\n\"\", String.join(\",\", index.getColumns()), \"\", \"\", \"\", \"\",\n\"\", index.getIndexType().name(), index.getComment()));\n}\n} else {\n}\n} finally {\ndb.readUnlock();\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleHelp() {\nHelpStmt helpStmt = (HelpStmt) stmt;\nString mark = helpStmt.getMask();\nHelpModule module = HelpModule.getInstance();\nHelpTopic topic = module.getTopic(mark);\nif (topic == null) {\nList topics = module.listTopicByKeyword(mark);\nif (topics.size() == 0) {\ntopic = null;\n} else if (topics.size() == 1) {\ntopic = module.getTopic(topics.get(0));\n} else {\nList> rows = Lists.newArrayList();\nfor (String str : topics) {\nrows.add(Lists.newArrayList(str, \"N\"));\n}\nList categories = module.listCategoryByName(mark);\nfor (String str : categories) {\nrows.add(Lists.newArrayList(str, \"Y\"));\n}\nresultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), rows);\nreturn;\n}\n}\nif (topic != null) {\nresultSet = new ShowResultSet(helpStmt.getMetaData(), Lists.>newArrayList(\nLists.newArrayList(topic.getName(), topic.getDescription(), topic.getExample())));\n} else {\nList categories = module.listCategoryByName(mark);\nif (categories.isEmpty()) {\nresultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), EMPTY_SET);\n} else if (categories.size() > 1) {\nresultSet = new ShowResultSet(helpStmt.getCategoryMetaData(),\nLists.>newArrayList(categories));\n} else {\nList> rows = Lists.newArrayList();\nList topics = module.listTopicByCategory(categories.get(0));\nfor (String str : topics) {\nrows.add(Lists.newArrayList(str, \"N\"));\n}\nList subCategories = module.listCategoryByCategory(categories.get(0));\nfor (String str : subCategories) {\nrows.add(Lists.newArrayList(str, \"Y\"));\n}\nresultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), rows);\n}\n}\n}\nprivate void handleShowLoad() throws AnalysisException {\nShowLoadStmt showStmt = (ShowLoadStmt) stmt;\nGlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\nlong dbId = -1;\nif (showStmt.isAll()) {\ndbId = -1;\n} else {\nDatabase db = globalStateMgr.getDb(showStmt.getDbName());\nMetaUtils.checkDbNullAndReport(db, showStmt.getDbName());\ndbId = db.getId();\n}\nSet statesValue = showStmt.getStates() == null ? null : showStmt.getStates().stream()\n.map(Enum::name)\n.collect(Collectors.toSet());\nList> loadInfos =\nglobalStateMgr.getLoadMgr().getLoadJobInfosByDb(dbId, showStmt.getLabelValue(),\nshowStmt.isAccurateMatch(),\nstatesValue);\nList orderByPairs = showStmt.getOrderByPairs();\nListComparator> comparator = null;\nif (orderByPairs != null) {\nOrderByPair[] orderByPairArr = new OrderByPair[orderByPairs.size()];\ncomparator = new ListComparator<>(orderByPairs.toArray(orderByPairArr));\n} else {\ncomparator = new ListComparator<>(0);\n}\nloadInfos.sort(comparator);\nList> rows = Lists.newArrayList();\nfor (List loadInfo : loadInfos) {\nList oneInfo = new ArrayList<>(loadInfo.size());\nfor (Comparable element : loadInfo) {\noneInfo.add(element.toString());\n}\nrows.add(oneInfo);\n}\nlong limit = showStmt.getLimit();\nlong offset = showStmt.getOffset() == -1L ? 0 : showStmt.getOffset();\nif (offset >= rows.size()) {\nrows = Lists.newArrayList();\n} else if (limit != -1L) {\nif ((limit + offset) < rows.size()) {\nrows = rows.subList((int) offset, (int) (limit + offset));\n} else {\nrows = rows.subList((int) offset, rows.size());\n}\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowRoutineLoad() throws AnalysisException {\nShowRoutineLoadStmt showRoutineLoadStmt = (ShowRoutineLoadStmt) stmt;\nList> rows = Lists.newArrayList();\nList routineLoadJobList;\ntry {\nroutineLoadJobList = GlobalStateMgr.getCurrentState().getRoutineLoadMgr()\n.getJob(showRoutineLoadStmt.getDbFullName(),\nshowRoutineLoadStmt.getName(),\nshowRoutineLoadStmt.isIncludeHistory());\n} catch (MetaNotFoundException e) {\nLOG.warn(e.getMessage(), e);\nthrow new AnalysisException(e.getMessage());\n}\nif (routineLoadJobList != null) {\nIterator iterator = routineLoadJobList.iterator();\nwhile (iterator.hasNext()) {\nRoutineLoadJob routineLoadJob = iterator.next();\ntry {\nif (!PrivilegeActions.checkAnyActionOnTable(connectContext,\nroutineLoadJob.getDbFullName(),\nroutineLoadJob.getTableName())) {\niterator.remove();\n}\n} catch (MetaNotFoundException e) {\n}\n}\n}\nif (routineLoadJobList != null) {\nRoutineLoadFunctionalExprProvider fProvider = showRoutineLoadStmt.getFunctionalExprProvider(this.connectContext);\nrows = routineLoadJobList.parallelStream()\n.filter(fProvider.getPredicateChain())\n.sorted(fProvider.getOrderComparator())\n.skip(fProvider.getSkipCount())\n.limit(fProvider.getLimitCount())\n.map(RoutineLoadJob::getShowInfo)\n.collect(Collectors.toList());\n}\nif (!Strings.isNullOrEmpty(showRoutineLoadStmt.getName()) && rows.isEmpty()) {\nthrow new AnalysisException(\"There is no running job named \" + showRoutineLoadStmt.getName()\n+ \" in db \" + showRoutineLoadStmt.getDbFullName()\n+ \". Include history? \" + showRoutineLoadStmt.isIncludeHistory()\n+ \", you can try `show all routine load job for job_name` if you want to list stopped and cancelled jobs\");\n}\nresultSet = new ShowResultSet(showRoutineLoadStmt.getMetaData(), rows);\n}\nprivate void handleShowRoutineLoadTask() throws AnalysisException {\nShowRoutineLoadTaskStmt showRoutineLoadTaskStmt = (ShowRoutineLoadTaskStmt) stmt;\nList> rows = Lists.newArrayList();\nRoutineLoadJob routineLoadJob;\ntry {\nroutineLoadJob =\nGlobalStateMgr.getCurrentState().getRoutineLoadMgr()\n.getJob(showRoutineLoadTaskStmt.getDbFullName(),\nshowRoutineLoadTaskStmt.getJobName());\n} catch (MetaNotFoundException e) {\nLOG.warn(e.getMessage(), e);\nthrow new AnalysisException(e.getMessage());\n}\nif (routineLoadJob == null) {\nthrow new AnalysisException(\"The job named \" + showRoutineLoadTaskStmt.getJobName() + \"does not exists \"\n+ \"or job state is stopped or cancelled\");\n}\nString dbFullName = showRoutineLoadTaskStmt.getDbFullName();\nString tableName;\ntry {\ntableName = routineLoadJob.getTableName();\n} catch (MetaNotFoundException e) {\nthrow new AnalysisException(\n\"The table metadata of job has been changed. The job will be cancelled automatically\", e);\n}\nif (!PrivilegeActions.checkAnyActionOnTable(connectContext, dbFullName, tableName)) {\nresultSet = new ShowResultSet(showRoutineLoadTaskStmt.getMetaData(), rows);\nreturn;\n}\nrows.addAll(routineLoadJob.getTasksShowInfo());\nresultSet = new ShowResultSet(showRoutineLoadTaskStmt.getMetaData(), rows);\n}\nprivate void handleShowStreamLoad() throws AnalysisException {\nShowStreamLoadStmt showStreamLoadStmt = (ShowStreamLoadStmt) stmt;\nList> rows = Lists.newArrayList();\nList streamLoadTaskList;\ntry {\nstreamLoadTaskList = GlobalStateMgr.getCurrentState().getStreamLoadMgr()\n.getTask(showStreamLoadStmt.getDbFullName(),\nshowStreamLoadStmt.getName(),\nshowStreamLoadStmt.isIncludeHistory());\n} catch (MetaNotFoundException e) {\nLOG.warn(e.getMessage(), e);\nthrow new AnalysisException(e.getMessage());\n}\nif (streamLoadTaskList != null) {\nStreamLoadFunctionalExprProvider fProvider = showStreamLoadStmt.getFunctionalExprProvider(this.connectContext);\nrows = streamLoadTaskList.parallelStream()\n.filter(fProvider.getPredicateChain())\n.sorted(fProvider.getOrderComparator())\n.skip(fProvider.getSkipCount())\n.limit(fProvider.getLimitCount())\n.map(StreamLoadTask::getShowInfo)\n.collect(Collectors.toList());\n}\nif (!Strings.isNullOrEmpty(showStreamLoadStmt.getName()) && rows.isEmpty()) {\nthrow new AnalysisException(\"There is no label named \" + showStreamLoadStmt.getName()\n+ \" in db \" + showStreamLoadStmt.getDbFullName()\n+ \". Include history? \" + showStreamLoadStmt.isIncludeHistory());\n}\nresultSet = new ShowResultSet(showStreamLoadStmt.getMetaData(), rows);\n}\nprivate void handleShowUserProperty() throws AnalysisException {\nShowUserPropertyStmt showStmt = (ShowUserPropertyStmt) stmt;\nresultSet = new ShowResultSet(showStmt.getMetaData(), showStmt.getRows(connectContext));\n}\nprivate void handleShowDelete() throws AnalysisException {\nShowDeleteStmt showStmt = (ShowDeleteStmt) stmt;\nGlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\nDatabase db = globalStateMgr.getDb(showStmt.getDbName());\nMetaUtils.checkDbNullAndReport(db, showStmt.getDbName());\nlong dbId = db.getId();\nDeleteMgr deleteHandler = globalStateMgr.getDeleteMgr();\nList> deleteInfos = deleteHandler.getDeleteInfosByDb(dbId);\nList> rows = Lists.newArrayList();\nfor (List deleteInfo : deleteInfos) {\nList oneInfo = new ArrayList<>(deleteInfo.size());\nfor (Comparable element : deleteInfo) {\noneInfo.add(element.toString());\n}\nrows.add(oneInfo);\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowAlter() throws AnalysisException {\nShowAlterStmt showStmt = (ShowAlterStmt) stmt;\nProcNodeInterface procNodeI = showStmt.getNode();\nPreconditions.checkNotNull(procNodeI);\nList> rows;\nif (procNodeI instanceof SchemaChangeProcDir) {\nrows = ((SchemaChangeProcDir) procNodeI).fetchResultByFilter(showStmt.getFilterMap(),\nshowStmt.getOrderPairs(), showStmt.getLimitElement()).getRows();\n} else {\nrows = procNodeI.fetchResult().getRows();\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowCollation() {\nShowCollationStmt showStmt = (ShowCollationStmt) stmt;\nList> rows = Lists.newArrayList();\nList row = Lists.newArrayList();\nrow.add(\"utf8_general_ci\");\nrow.add(\"utf8\");\nrow.add(\"33\");\nrow.add(\"Yes\");\nrow.add(\"Yes\");\nrow.add(\"1\");\nrows.add(row);\nrow = Lists.newArrayList();\nrow.add(\"binary\");\nrow.add(\"binary\");\nrow.add(\"63\");\nrow.add(\"Yes\");\nrow.add(\"Yes\");\nrow.add(\"1\");\nrows.add(row);\nrow = Lists.newArrayList();\nrow.add(\"gbk_chinese_ci\");\nrow.add(\"gbk\");\nrow.add(\"28\");\nrow.add(\"Yes\");\nrow.add(\"Yes\");\nrow.add(\"1\");\nrows.add(row);\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowData() {\nShowDataStmt showStmt = (ShowDataStmt) stmt;\nString dbName = showStmt.getDbName();\nDatabase db = GlobalStateMgr.getCurrentState().getDb(dbName);\nif (db == null) {\nErrorReport.reportSemanticException(ErrorCode.ERR_BAD_DB_ERROR, dbName);\n}\ndb.readLock();\ntry {\nString tableName = showStmt.getTableName();\nList> totalRows = showStmt.getResultRows();\nif (tableName == null) {\nlong totalSize = 0;\nlong totalReplicaCount = 0;\nList tables = db.getTables();\nSortedSet
sortedTables = new TreeSet<>(Comparator.comparing(Table::getName));\nfor (Table table : tables) {\nif (!PrivilegeActions.checkAnyActionOnTable(connectContext, dbName, table.getName())) {\ncontinue;\n}\nsortedTables.add(table);\n}\nfor (Table table : sortedTables) {\nif (!table.isNativeTableOrMaterializedView()) {\ncontinue;\n}\nOlapTable olapTable = (OlapTable) table;\nlong tableSize = olapTable.getDataSize();\nlong replicaCount = olapTable.getReplicaCount();\nPair tableSizePair = DebugUtil.getByteUint(tableSize);\nString readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(tableSizePair.first) + \" \"\n+ tableSizePair.second;\nList row = Arrays.asList(table.getName(), readableSize, String.valueOf(replicaCount));\ntotalRows.add(row);\ntotalSize += tableSize;\ntotalReplicaCount += replicaCount;\n}\nPair totalSizePair = DebugUtil.getByteUint(totalSize);\nString readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(totalSizePair.first) + \" \"\n+ totalSizePair.second;\nList total = Arrays.asList(\"Total\", readableSize, String.valueOf(totalReplicaCount));\ntotalRows.add(total);\nlong quota = db.getDataQuota();\nlong replicaQuota = db.getReplicaQuota();\nPair quotaPair = DebugUtil.getByteUint(quota);\nString readableQuota = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(quotaPair.first) + \" \"\n+ quotaPair.second;\nList quotaRow = Arrays.asList(\"Quota\", readableQuota, String.valueOf(replicaQuota));\ntotalRows.add(quotaRow);\nlong left = Math.max(0, quota - totalSize);\nlong replicaCountLeft = Math.max(0, replicaQuota - totalReplicaCount);\nPair leftPair = DebugUtil.getByteUint(left);\nString readableLeft = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(leftPair.first) + \" \"\n+ leftPair.second;\nList leftRow = Arrays.asList(\"Left\", readableLeft, String.valueOf(replicaCountLeft));\ntotalRows.add(leftRow);\n} else {\nif (!PrivilegeActions.checkAnyActionOnTable(connectContext, dbName, tableName)) {\nErrorReport.reportSemanticException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, \"SHOW DATA\",\nconnectContext.getQualifiedUser(),\nconnectContext.getRemoteIP(),\ntableName);\n}\nTable table = db.getTable(tableName);\nif (table == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, tableName);\n}\nif (!table.isNativeTableOrMaterializedView()) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_NOT_OLAP_TABLE, tableName);\n}\nOlapTable olapTable = (OlapTable) table;\nint i = 0;\nlong totalSize = 0;\nlong totalReplicaCount = 0;\nMap indexNames = olapTable.getIndexNameToId();\nMap sortedIndexNames = new TreeMap<>(indexNames);\nfor (Long indexId : sortedIndexNames.values()) {\nlong indexSize = 0;\nlong indexReplicaCount = 0;\nlong indexRowCount = 0;\nfor (Partition partition : olapTable.getAllPartitions()) {\nMaterializedIndex mIndex = partition.getIndex(indexId);\nindexSize += mIndex.getDataSize();\nindexReplicaCount += mIndex.getReplicaCount();\nindexRowCount += mIndex.getRowCount();\n}\nPair indexSizePair = DebugUtil.getByteUint(indexSize);\nString readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(indexSizePair.first) + \" \"\n+ indexSizePair.second;\nList row = null;\nif (i == 0) {\nrow = Arrays.asList(tableName,\nolapTable.getIndexNameById(indexId),\nreadableSize, String.valueOf(indexReplicaCount),\nString.valueOf(indexRowCount));\n} else {\nrow = Arrays.asList(\"\",\nolapTable.getIndexNameById(indexId),\nreadableSize, String.valueOf(indexReplicaCount),\nString.valueOf(indexRowCount));\n}\ntotalSize += indexSize;\ntotalReplicaCount += indexReplicaCount;\ntotalRows.add(row);\ni++;\n}\nPair totalSizePair = DebugUtil.getByteUint(totalSize);\nString readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(totalSizePair.first) + \" \"\n+ totalSizePair.second;\nList row = Arrays.asList(\"\", \"Total\", readableSize, String.valueOf(totalReplicaCount), \"\");\ntotalRows.add(row);\n}\n} catch (AnalysisException e) {\nthrow new SemanticException(e.getMessage());\n} finally {\ndb.readUnlock();\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), showStmt.getResultRows());\n}\nprivate void handleShowPartitions() throws AnalysisException {\nShowPartitionsStmt showStmt = (ShowPartitionsStmt) stmt;\nProcNodeInterface procNodeI = showStmt.getNode();\nPreconditions.checkNotNull(procNodeI);\nList> rows = ((PartitionsProcDir) procNodeI).fetchResultByFilter(showStmt.getFilterMap(),\nshowStmt.getOrderByPairs(), showStmt.getLimitElement()).getRows();\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowTablet() throws AnalysisException {\nShowTabletStmt showStmt = (ShowTabletStmt) stmt;\nList> rows = Lists.newArrayList();\nGlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\nif (showStmt.isShowSingleTablet()) {\nlong tabletId = showStmt.getTabletId();\nTabletInvertedIndex invertedIndex = GlobalStateMgr.getCurrentInvertedIndex();\nTabletMeta tabletMeta = invertedIndex.getTabletMeta(tabletId);\nLong dbId = tabletMeta != null ? tabletMeta.getDbId() : TabletInvertedIndex.NOT_EXIST_VALUE;\nString dbName = null;\nLong tableId = tabletMeta != null ? tabletMeta.getTableId() : TabletInvertedIndex.NOT_EXIST_VALUE;\nString tableName = null;\nLong partitionId = tabletMeta != null ? tabletMeta.getPartitionId() : TabletInvertedIndex.NOT_EXIST_VALUE;\nString partitionName = null;\nLong indexId = tabletMeta != null ? tabletMeta.getIndexId() : TabletInvertedIndex.NOT_EXIST_VALUE;\nString indexName = null;\nBoolean isSync = true;\ndo {\nDatabase db = globalStateMgr.getDb(dbId);\nif (db == null) {\nisSync = false;\nbreak;\n}\ndbName = db.getFullName();\ndb.readLock();\ntry {\nTable table = db.getTable(tableId);\nif (!(table instanceof OlapTable)) {\nisSync = false;\nbreak;\n}\ntableName = table.getName();\nOlapTable olapTable = (OlapTable) table;\nPartition partition = olapTable.getPartition(partitionId);\nif (partition == null) {\nisSync = false;\nbreak;\n}\npartitionName = partition.getName();\nMaterializedIndex index = partition.getIndex(indexId);\nif (index == null) {\nisSync = false;\nbreak;\n}\nindexName = olapTable.getIndexNameById(indexId);\nif (table.isCloudNativeTableOrMaterializedView()) {\nbreak;\n}\nLocalTablet tablet = (LocalTablet) index.getTablet(tabletId);\nif (tablet == null) {\nisSync = false;\nbreak;\n}\nList replicas = tablet.getImmutableReplicas();\nfor (Replica replica : replicas) {\nReplica tmp = invertedIndex.getReplica(tabletId, replica.getBackendId());\nif (tmp == null) {\nisSync = false;\nbreak;\n}\nif (tmp != replica) {\nisSync = false;\nbreak;\n}\n}\n} finally {\ndb.readUnlock();\n}\n} while (false);\nString detailCmd = String.format(\"SHOW PROC '/dbs/%d/%d/partitions/%d/%d/%d';\",\ndbId, tableId, partitionId, indexId, tabletId);\nrows.add(Lists.newArrayList(dbName, tableName, partitionName, indexName,\ndbId.toString(), tableId.toString(),\npartitionId.toString(), indexId.toString(),\nisSync.toString(), detailCmd));\n} else {\nDatabase db = globalStateMgr.getDb(showStmt.getDbName());\nMetaUtils.checkDbNullAndReport(db, showStmt.getDbName());\ndb.readLock();\ntry {\nTable table = db.getTable(showStmt.getTableName());\nif (table == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getTableName());\n}\nif (!table.isNativeTableOrMaterializedView()) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_NOT_OLAP_TABLE, showStmt.getTableName());\n}\nOlapTable olapTable = (OlapTable) table;\nlong sizeLimit = -1;\nif (showStmt.hasOffset() && showStmt.hasLimit()) {\nsizeLimit = showStmt.getOffset() + showStmt.getLimit();\n} else if (showStmt.hasLimit()) {\nsizeLimit = showStmt.getLimit();\n}\nboolean stop = false;\nCollection partitions = new ArrayList<>();\nif (showStmt.hasPartition()) {\nPartitionNames partitionNames = showStmt.getPartitionNames();\nfor (String partName : partitionNames.getPartitionNames()) {\nPartition partition = olapTable.getPartition(partName, partitionNames.isTemp());\nif (partition == null) {\nthrow new AnalysisException(\"Unknown partition: \" + partName);\n}\npartitions.add(partition);\n}\n} else {\npartitions = olapTable.getPartitions();\n}\nList> tabletInfos = new ArrayList<>();\nString indexName = showStmt.getIndexName();\nlong indexId = -1;\nif (indexName != null) {\nLong id = olapTable.getIndexIdByName(indexName);\nif (id == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getIndexName());\n}\nindexId = id;\n}\nfor (Partition partition : partitions) {\nif (stop) {\nbreak;\n}\nfor (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.ALL)) {\nif (indexId > -1 && index.getId() != indexId) {\ncontinue;\n}\nif (olapTable.isCloudNativeTableOrMaterializedView()) {\nLakeTabletsProcNode procNode = new LakeTabletsProcNode(db, olapTable, index);\ntabletInfos.addAll(procNode.fetchComparableResult());\n} else {\nLocalTabletsProcDir procDir = new LocalTabletsProcDir(db, olapTable, index);\ntabletInfos.addAll(procDir.fetchComparableResult(\nshowStmt.getVersion(), showStmt.getBackendId(), showStmt.getReplicaState()));\n}\nif (sizeLimit > -1 && CollectionUtils.isEmpty(showStmt.getOrderByPairs())\n&& tabletInfos.size() >= sizeLimit) {\nstop = true;\nbreak;\n}\n}\n}\nList orderByPairs = showStmt.getOrderByPairs();\nListComparator> comparator;\nif (orderByPairs != null) {\nOrderByPair[] orderByPairArr = new OrderByPair[orderByPairs.size()];\ncomparator = new ListComparator<>(orderByPairs.toArray(orderByPairArr));\n} else {\ncomparator = new ListComparator<>(0, 1);\n}\ntabletInfos.sort(comparator);\nif (sizeLimit > -1 && tabletInfos.size() >= sizeLimit) {\ntabletInfos = tabletInfos.subList((int) showStmt.getOffset(), (int) sizeLimit);\n}\nfor (List tabletInfo : tabletInfos) {\nList oneTablet = new ArrayList<>(tabletInfo.size());\nfor (Comparable column : tabletInfo) {\noneTablet.add(column.toString());\n}\nrows.add(oneTablet);\n}\n} finally {\ndb.readUnlock();\n}\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowBroker() {\nShowBrokerStmt showStmt = (ShowBrokerStmt) stmt;\nList> rowSet = GlobalStateMgr.getCurrentState().getBrokerMgr().getBrokersInfo();\nresultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);\n}\nprivate void handleShowResources() {\nShowResourcesStmt showStmt = (ShowResourcesStmt) stmt;\nList> rowSet = GlobalStateMgr.getCurrentState().getResourceMgr().getResourcesInfo();\nresultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);\n}\nprivate void handleShowExport() throws AnalysisException {\nShowExportStmt showExportStmt = (ShowExportStmt) stmt;\nGlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\nDatabase db = globalStateMgr.getDb(showExportStmt.getDbName());\nMetaUtils.checkDbNullAndReport(db, showExportStmt.getDbName());\nlong dbId = db.getId();\nExportMgr exportMgr = globalStateMgr.getExportMgr();\nSet states = null;\nExportJob.JobState state = showExportStmt.getJobState();\nif (state != null) {\nstates = Sets.newHashSet(state);\n}\nList> infos = exportMgr.getExportJobInfosByIdOrState(\ndbId, showExportStmt.getJobId(), states, showExportStmt.getQueryId(),\nshowExportStmt.getOrderByPairs(), showExportStmt.getLimit());\nresultSet = new ShowResultSet(showExportStmt.getMetaData(), infos);\n}\nprivate void handleShowBackends() {\nfinal ShowBackendsStmt showStmt = (ShowBackendsStmt) stmt;\nList> backendInfos = BackendsProcDir.getClusterBackendInfos();\nresultSet = new ShowResultSet(showStmt.getMetaData(), backendInfos);\n}\nprivate void handleShowFrontends() {\nfinal ShowFrontendsStmt showStmt = (ShowFrontendsStmt) stmt;\nList> infos = Lists.newArrayList();\nFrontendsProcNode.getFrontendsInfo(GlobalStateMgr.getCurrentState(), infos);\nresultSet = new ShowResultSet(showStmt.getMetaData(), infos);\n}\nprivate void handleShowRepositories() {\nfinal ShowRepositoriesStmt showStmt = (ShowRepositoriesStmt) stmt;\nList> repoInfos = GlobalStateMgr.getCurrentState().getBackupHandler().getRepoMgr().getReposInfo();\nresultSet = new ShowResultSet(showStmt.getMetaData(), repoInfos);\n}\nprivate void handleShowSnapshot() throws AnalysisException {\nfinal ShowSnapshotStmt showStmt = (ShowSnapshotStmt) stmt;\nRepository repo =\nGlobalStateMgr.getCurrentState().getBackupHandler().getRepoMgr().getRepo(showStmt.getRepoName());\nif (repo == null) {\nthrow new AnalysisException(\"Repository \" + showStmt.getRepoName() + \" does not exist\");\n}\nList> snapshotInfos = repo.getSnapshotInfos(showStmt.getSnapshotName(), showStmt.getTimestamp(),\nshowStmt.getSnapshotNames());\nresultSet = new ShowResultSet(showStmt.getMetaData(), snapshotInfos);\n}\nprivate void handleShowBackup() {\nShowBackupStmt showStmt = (ShowBackupStmt) stmt;\nDatabase filterDb = GlobalStateMgr.getCurrentState().getDb(showStmt.getDbName());\nList> infos = Lists.newArrayList();\nList dbs = Lists.newArrayList();\nif (filterDb == null) {\nfor (Map.Entry entry : GlobalStateMgr.getCurrentState().getIdToDb().entrySet()) {\ndbs.add(entry.getValue());\n}\n} else {\ndbs.add(filterDb);\n}\nfor (Database db : dbs) {\nAbstractJob jobI = GlobalStateMgr.getCurrentState().getBackupHandler().getJob(db.getId());\nif (!(jobI instanceof BackupJob)) {\nresultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);\ncontinue;\n}\nBackupJob backupJob = (BackupJob) jobI;\nList tableRefs = backupJob.getTableRef();\nAtomicBoolean privilegeDeny = new AtomicBoolean(false);\ntableRefs.forEach(tableRef -> {\nTableName tableName = tableRef.getName();\nif (!PrivilegeActions.checkTableAction(connectContext, tableName.getDb(), tableName.getTbl(),\nPrivilegeType.EXPORT)) {\nprivilegeDeny.set(true);\n}\n});\nif (privilegeDeny.get()) {\nresultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);\nreturn;\n}\nList info = backupJob.getInfo();\ninfos.add(info);\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), infos);\n}\nprivate void handleShowRestore() {\nShowRestoreStmt showStmt = (ShowRestoreStmt) stmt;\nDatabase filterDb = GlobalStateMgr.getCurrentState().getDb(showStmt.getDbName());\nList> infos = Lists.newArrayList();\nList dbs = Lists.newArrayList();\nif (filterDb == null) {\nfor (Map.Entry entry : GlobalStateMgr.getCurrentState().getIdToDb().entrySet()) {\ndbs.add(entry.getValue());\n}\n} else {\ndbs.add(filterDb);\n}\nfor (Database db : dbs) {\nAbstractJob jobI = GlobalStateMgr.getCurrentState().getBackupHandler().getJob(db.getId());\nif (!(jobI instanceof RestoreJob)) {\nresultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);\ncontinue;\n}\nRestoreJob restoreJob = (RestoreJob) jobI;\nList info = restoreJob.getInfo();\ninfos.add(info);\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), infos);\n}\nprivate String getCatalogNameById(long catalogId) throws MetaNotFoundException {\nif (CatalogMgr.isInternalCatalog(catalogId)) {\nreturn InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME;\n}\nCatalogMgr catalogMgr = GlobalStateMgr.getCurrentState().getCatalogMgr();\nOptional catalogOptional = catalogMgr.getCatalogById(catalogId);\nif (!catalogOptional.isPresent()) {\nthrow new MetaNotFoundException(\"cannot find catalog\");\n}\nreturn catalogOptional.get().getName();\n}\nprivate String getCatalogNameFromPEntry(ObjectType objectType, PrivilegeCollection.PrivilegeEntry privilegeEntry)\nthrows MetaNotFoundException {\nif (objectType.equals(ObjectType.CATALOG)) {\nCatalogPEntryObject catalogPEntryObject =\n(CatalogPEntryObject) privilegeEntry.getObject();\nif (catalogPEntryObject.getId() == PrivilegeBuiltinConstants.ALL_CATALOGS_ID) {\nreturn null;\n} else {\nreturn getCatalogNameById(catalogPEntryObject.getId());\n}\n} else if (objectType.equals(ObjectType.DATABASE)) {\nDbPEntryObject dbPEntryObject = (DbPEntryObject) privilegeEntry.getObject();\nif (dbPEntryObject.getCatalogId() == PrivilegeBuiltinConstants.ALL_CATALOGS_ID) {\nreturn null;\n}\nreturn getCatalogNameById(dbPEntryObject.getCatalogId());\n} else if (objectType.equals(ObjectType.TABLE)) {\nTablePEntryObject tablePEntryObject = (TablePEntryObject) privilegeEntry.getObject();\nif (tablePEntryObject.getCatalogId() == PrivilegeBuiltinConstants.ALL_CATALOGS_ID) {\nreturn null;\n}\nreturn getCatalogNameById(tablePEntryObject.getCatalogId());\n} else {\nreturn InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME;\n}\n}\nprivate List> privilegeToRowString(AuthorizationMgr authorizationManager, GrantRevokeClause userOrRoleName,\nMap>\ntypeToPrivilegeEntryList) throws PrivilegeException {\nList> infos = new ArrayList<>();\nfor (Map.Entry> typeToPrivilegeEntry\n: typeToPrivilegeEntryList.entrySet()) {\nfor (PrivilegeCollection.PrivilegeEntry privilegeEntry : typeToPrivilegeEntry.getValue()) {\nObjectType objectType = typeToPrivilegeEntry.getKey();\nString catalogName;\ntry {\ncatalogName = getCatalogNameFromPEntry(objectType, privilegeEntry);\n} catch (MetaNotFoundException e) {\ncontinue;\n}\nList info = new ArrayList<>();\ninfo.add(userOrRoleName.getRoleName() != null ?\nuserOrRoleName.getRoleName() : userOrRoleName.getUserIdentity().toString());\ninfo.add(catalogName);\nGrantPrivilegeStmt grantPrivilegeStmt = new GrantPrivilegeStmt(new ArrayList<>(), objectType.name(),\nuserOrRoleName, null, privilegeEntry.isWithGrantOption());\ngrantPrivilegeStmt.setObjectType(objectType);\nActionSet actionSet = privilegeEntry.getActionSet();\nList privList = authorizationManager.analyzeActionSet(objectType, actionSet);\ngrantPrivilegeStmt.setPrivilegeTypes(privList);\ngrantPrivilegeStmt.setObjectList(Lists.newArrayList(privilegeEntry.getObject()));\ntry {\ninfo.add(AstToSQLBuilder.toSQL(grantPrivilegeStmt));\ninfos.add(info);\n} catch (com.starrocks.sql.common.MetaNotFoundException e) {\n}\n}\n}\nreturn infos;\n}\nprivate void handleShowGrants() {\nShowGrantsStmt showStmt = (ShowGrantsStmt) stmt;\nAuthorizationMgr authorizationManager = GlobalStateMgr.getCurrentState().getAuthorizationMgr();\ntry {\nList> infos = new ArrayList<>();\nif (showStmt.getRole() != null) {\nList granteeRole = authorizationManager.getGranteeRoleDetailsForRole(showStmt.getRole());\nif (granteeRole != null) {\ninfos.add(granteeRole);\n}\nMap> typeToPrivilegeEntryList =\nauthorizationManager.getTypeToPrivilegeEntryListByRole(showStmt.getRole());\ninfos.addAll(privilegeToRowString(authorizationManager,\nnew GrantRevokeClause(null, showStmt.getRole()), typeToPrivilegeEntryList));\n} else {\nList granteeRole = authorizationManager.getGranteeRoleDetailsForUser(showStmt.getUserIdent());\nif (granteeRole != null) {\ninfos.add(granteeRole);\n}\nMap> typeToPrivilegeEntryList =\nauthorizationManager.getTypeToPrivilegeEntryListByUser(showStmt.getUserIdent());\ninfos.addAll(privilegeToRowString(authorizationManager,\nnew GrantRevokeClause(showStmt.getUserIdent(), null), typeToPrivilegeEntryList));\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), infos);\n} catch (PrivilegeException e) {\nthrow new SemanticException(e.getMessage());\n}\n}\nprivate void handleShowRoles() {\nShowRolesStmt showStmt = (ShowRolesStmt) stmt;\nList> infos = new ArrayList<>();\nAuthorizationMgr authorizationManager = GlobalStateMgr.getCurrentState().getAuthorizationMgr();\nList roles = authorizationManager.getAllRoles();\nroles.forEach(e -> infos.add(Lists.newArrayList(e,\nauthorizationManager.isBuiltinRole(e) ? \"true\" : \"false\",\nauthorizationManager.getRoleComment(e))));\nresultSet = new ShowResultSet(showStmt.getMetaData(), infos);\n}\nprivate void handleShowUser() {\nList> rowSet = Lists.newArrayList();\nShowUserStmt showUserStmt = (ShowUserStmt) stmt;\nif (showUserStmt.isAll()) {\nAuthorizationMgr authorizationManager = GlobalStateMgr.getCurrentState().getAuthorizationMgr();\nList users = authorizationManager.getAllUsers();\nusers.forEach(u -> rowSet.add(Lists.newArrayList(u)));\n} else {\nList row = Lists.newArrayList();\nrow.add(connectContext.getCurrentUserIdentity().toString());\nrowSet.add(row);\n}\nresultSet = new ShowResultSet(stmt.getMetaData(), rowSet);\n}\nprivate void handleAdminShowTabletStatus() throws AnalysisException {\nAdminShowReplicaStatusStmt showStmt = (AdminShowReplicaStatusStmt) stmt;\nList> results;\ntry {\nresults = MetadataViewer.getTabletStatus(showStmt);\n} catch (DdlException e) {\nthrow new AnalysisException(e.getMessage());\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), results);\n}\nprivate void handleAdminShowTabletDistribution() throws AnalysisException {\nAdminShowReplicaDistributionStmt showStmt = (AdminShowReplicaDistributionStmt) stmt;\nList> results;\ntry {\nresults = MetadataViewer.getTabletDistribution(showStmt);\n} catch (DdlException e) {\nthrow new AnalysisException(e.getMessage());\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), results);\n}\nprivate void handleAdminShowConfig() throws AnalysisException {\nAdminShowConfigStmt showStmt = (AdminShowConfigStmt) stmt;\nList> results;\ntry {\nPatternMatcher matcher = null;\nif (showStmt.getPattern() != null) {\nmatcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),\nCaseSensibility.CONFIG.getCaseSensibility());\n}\nresults = ConfigBase.getConfigInfo(matcher);\nresults.sort(Comparator.comparing(o -> o.get(0)));\n} catch (DdlException e) {\nthrow new AnalysisException(e.getMessage());\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), results);\n}\nprivate void handleShowSmallFiles() throws AnalysisException {\nShowSmallFilesStmt showStmt = (ShowSmallFilesStmt) stmt;\nList> results;\ntry {\nresults = GlobalStateMgr.getCurrentState().getSmallFileMgr().getInfo(showStmt.getDbName());\n} catch (DdlException e) {\nthrow new AnalysisException(e.getMessage());\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), results);\n}\nprivate void handleShowDynamicPartition() {\nShowDynamicPartitionStmt showDynamicPartitionStmt = (ShowDynamicPartitionStmt) stmt;\nList> rows = Lists.newArrayList();\nDatabase db = connectContext.getGlobalStateMgr().getDb(showDynamicPartitionStmt.getDb());\nif (db != null) {\ndb.readLock();\ntry {\nfor (Table tbl : db.getTables()) {\nif (!(tbl instanceof OlapTable)) {\ncontinue;\n}\nDynamicPartitionScheduler dynamicPartitionScheduler =\nGlobalStateMgr.getCurrentState().getDynamicPartitionScheduler();\nOlapTable olapTable = (OlapTable) tbl;\nif (!olapTable.dynamicPartitionExists()) {\ndynamicPartitionScheduler.removeRuntimeInfo(olapTable.getName());\ncontinue;\n}\nif (!PrivilegeActions.checkAnyActionOnTable(ConnectContext.get(),\ndb.getFullName(), olapTable.getName())) {\ncontinue;\n}\nDynamicPartitionProperty dynamicPartitionProperty =\nolapTable.getTableProperty().getDynamicPartitionProperty();\nString tableName = olapTable.getName();\nint replicationNum = dynamicPartitionProperty.getReplicationNum();\nreplicationNum = (replicationNum == DynamicPartitionProperty.NOT_SET_REPLICATION_NUM) ?\nolapTable.getDefaultReplicationNum() : RunMode.defaultReplicationNum();\nrows.add(Lists.newArrayList(\ntableName,\nString.valueOf(dynamicPartitionProperty.getEnable()),\ndynamicPartitionProperty.getTimeUnit().toUpperCase(),\nString.valueOf(dynamicPartitionProperty.getStart()),\nString.valueOf(dynamicPartitionProperty.getEnd()),\ndynamicPartitionProperty.getPrefix(),\nString.valueOf(dynamicPartitionProperty.getBuckets()),\nString.valueOf(replicationNum),\ndynamicPartitionProperty.getStartOfInfo(),\ndynamicPartitionScheduler\n.getRuntimeInfo(tableName, DynamicPartitionScheduler.LAST_UPDATE_TIME),\ndynamicPartitionScheduler\n.getRuntimeInfo(tableName, DynamicPartitionScheduler.LAST_SCHEDULER_TIME),\ndynamicPartitionScheduler\n.getRuntimeInfo(tableName, DynamicPartitionScheduler.DYNAMIC_PARTITION_STATE),\ndynamicPartitionScheduler\n.getRuntimeInfo(tableName, DynamicPartitionScheduler.CREATE_PARTITION_MSG),\ndynamicPartitionScheduler\n.getRuntimeInfo(tableName, DynamicPartitionScheduler.DROP_PARTITION_MSG)));\n}\n} finally {\ndb.readUnlock();\n}\nresultSet = new ShowResultSet(showDynamicPartitionStmt.getMetaData(), rows);\n}\n}\nprivate void handleShowTransaction() throws AnalysisException {\nShowTransactionStmt showStmt = (ShowTransactionStmt) stmt;\nDatabase db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());\nMetaUtils.checkDbNullAndReport(db, showStmt.getDbName());\nlong txnId = showStmt.getTxnId();\nGlobalTransactionMgr transactionMgr = GlobalStateMgr.getCurrentGlobalTransactionMgr();\nresultSet = new ShowResultSet(showStmt.getMetaData(), transactionMgr.getSingleTranInfo(db.getId(), txnId));\n}\nprivate void handleShowPlugins() {\nShowPluginsStmt pluginsStmt = (ShowPluginsStmt) stmt;\nList> rows = GlobalStateMgr.getCurrentPluginMgr().getPluginShowInfos();\nresultSet = new ShowResultSet(pluginsStmt.getMetaData(), rows);\n}\nprivate void handleShowCharset() {\nShowCharsetStmt showCharsetStmt = (ShowCharsetStmt) stmt;\nList> rows = Lists.newArrayList();\nList row = Lists.newArrayList();\nrow.add(\"utf8\");\nrow.add(\"UTF-8 Unicode\");\nrow.add(\"utf8_general_ci\");\nrow.add(\"3\");\nrows.add(row);\nresultSet = new ShowResultSet(showCharsetStmt.getMetaData(), rows);\n}\nprivate void handleShowSqlBlackListStmt() {\nShowSqlBlackListStmt showStmt = (ShowSqlBlackListStmt) stmt;\nList> rows = new ArrayList<>();\nfor (Map.Entry entry : SqlBlackList.getInstance().sqlBlackListMap.entrySet()) {\nList oneSql = new ArrayList<>();\noneSql.add(String.valueOf(entry.getValue().id));\noneSql.add(entry.getKey());\nrows.add(oneSql);\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowAnalyzeJob() {\nList jobs = connectContext.getGlobalStateMgr().getAnalyzeMgr().getAllAnalyzeJobList();\nList> rows = Lists.newArrayList();\njobs.sort(Comparator.comparing(AnalyzeJob::getId));\nfor (AnalyzeJob job : jobs) {\ntry {\nList result = ShowAnalyzeJobStmt.showAnalyzeJobs(connectContext, job);\nif (result != null) {\nrows.add(result);\n}\n} catch (MetaNotFoundException e) {\n}\n}\nrows = doPredicate(stmt, stmt.getMetaData(), rows);\nresultSet = new ShowResultSet(stmt.getMetaData(), rows);\n}\nprivate void handleShowAnalyzeStatus() {\nList statuses = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeMgr()\n.getAnalyzeStatusMap().values());\nList> rows = Lists.newArrayList();\nstatuses.sort(Comparator.comparing(AnalyzeStatus::getId));\nfor (AnalyzeStatus status : statuses) {\ntry {\nList result = ShowAnalyzeStatusStmt.showAnalyzeStatus(connectContext, status);\nif (result != null) {\nrows.add(result);\n}\n} catch (MetaNotFoundException e) {\n}\n}\nrows = doPredicate(stmt, stmt.getMetaData(), rows);\nresultSet = new ShowResultSet(stmt.getMetaData(), rows);\n}\nprivate void handleShowBasicStatsMeta() {\nList metas = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeMgr()\n.getBasicStatsMetaMap().values());\nList> rows = Lists.newArrayList();\nfor (BasicStatsMeta meta : metas) {\ntry {\nList result = ShowBasicStatsMetaStmt.showBasicStatsMeta(connectContext, meta);\nif (result != null) {\nrows.add(result);\n}\n} catch (MetaNotFoundException e) {\n}\n}\nrows = doPredicate(stmt, stmt.getMetaData(), rows);\nresultSet = new ShowResultSet(stmt.getMetaData(), rows);\n}\nprivate void handleShowHistogramStatsMeta() {\nList metas = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeMgr()\n.getHistogramStatsMetaMap().values());\nList> rows = Lists.newArrayList();\nfor (HistogramStatsMeta meta : metas) {\ntry {\nList result = ShowHistogramStatsMetaStmt.showHistogramStatsMeta(connectContext, meta);\nif (result != null) {\nrows.add(result);\n}\n} catch (MetaNotFoundException e) {\n}\n}\nrows = doPredicate(stmt, stmt.getMetaData(), rows);\nresultSet = new ShowResultSet(stmt.getMetaData(), rows);\n}\nprivate void handleShowResourceGroup() throws AnalysisException {\nShowResourceGroupStmt showResourceGroupStmt = (ShowResourceGroupStmt) stmt;\nList> rows =\nGlobalStateMgr.getCurrentState().getResourceGroupMgr().showResourceGroup(showResourceGroupStmt);\nresultSet = new ShowResultSet(showResourceGroupStmt.getMetaData(), rows);\n}\nprivate void handleShowCatalogs() {\nShowCatalogsStmt showCatalogsStmt = (ShowCatalogsStmt) stmt;\nGlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\nCatalogMgr catalogMgr = globalStateMgr.getCatalogMgr();\nList> rowSet = catalogMgr.getCatalogsInfo().stream()\n.filter(row -> {\nif (!InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME.equals(row.get(0))) {\nreturn PrivilegeActions.checkAnyActionOnOrInCatalog(\nconnectContext.getCurrentUserIdentity(),\nconnectContext.getCurrentRoleIds(), row.get(0));\n}\nreturn true;\n}\n)\n.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());\nresultSet = new ShowResultSet(showCatalogsStmt.getMetaData(), rowSet);\n}\nprivate void handleShowWarehouses() {\nShowWarehousesStmt showStmt = (ShowWarehousesStmt) stmt;\nGlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\nWarehouseManager warehouseMgr = globalStateMgr.getWarehouseMgr();\nList> rowSet = warehouseMgr.getWarehousesInfo().stream()\n.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());\nresultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);\n}\nprivate void handleShowClusters() {\nShowClustersStmt showStmt = (ShowClustersStmt) stmt;\nWarehouseManager warehouseMgr = GlobalStateMgr.getCurrentWarehouseMgr();\nWarehouse warehouse = warehouseMgr.getWarehouse(showStmt.getWarehouseName());\nList> rowSet = warehouse.getClusterInfo().stream()\n.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());\nresultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);\n}\nprivate List> doPredicate(ShowStmt showStmt,\nShowResultSetMetaData showResultSetMetaData,\nList> rows) {\nPredicate predicate = showStmt.getPredicate();\nif (predicate == null) {\nreturn rows;\n}\nSlotRef slotRef = (SlotRef) predicate.getChild(0);\nStringLiteral stringLiteral = (StringLiteral) predicate.getChild(1);\nList> returnRows = new ArrayList<>();\nBinaryPredicate binaryPredicate = (BinaryPredicate) predicate;\nint idx = showResultSetMetaData.getColumnIdx(slotRef.getColumnName());\nif (binaryPredicate.getOp().isEquivalence()) {\nfor (List row : rows) {\nif (row.get(idx).equals(stringLiteral.getStringValue())) {\nreturnRows.add(row);\n}\n}\n}\nreturn returnRows;\n}\nprivate void handleShowCreateExternalCatalog() throws AnalysisException {\nShowCreateExternalCatalogStmt showStmt = (ShowCreateExternalCatalogStmt) stmt;\nString catalogName = showStmt.getCatalogName();\nList> rows = Lists.newArrayList();\nif (InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME.equalsIgnoreCase(catalogName)) {\nresultSet = new ShowResultSet(stmt.getMetaData(), rows);\nreturn;\n}\nCatalog catalog = connectContext.getGlobalStateMgr().getCatalogMgr().getCatalogByName(catalogName);\nif (catalog == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_CATALOG_ERROR, catalogName);\n}\nStringBuilder createCatalogSql = new StringBuilder();\ncreateCatalogSql.append(\"CREATE EXTERNAL CATALOG \")\n.append(\"`\").append(catalogName).append(\"`\")\n.append(\"\\n\");\nString comment = catalog.getComment();\nif (comment != null) {\ncreateCatalogSql.append(\"comment \\\"\").append(catalog.getDisplayComment()).append(\"\\\"\\n\");\n}\nMap clonedConfig = new HashMap<>(catalog.getConfig());\nCloudCredentialUtil.maskCloudCredential(clonedConfig);\ncreateCatalogSql.append(\"PROPERTIES (\")\n.append(new PrintableMap<>(clonedConfig, \" = \", true, true))\n.append(\"\\n)\");\nrows.add(Lists.newArrayList(catalogName, createCatalogSql.toString()));\nresultSet = new ShowResultSet(stmt.getMetaData(), rows);\n}\nprivate void handleShowStorageVolumes() throws DdlException {\nShowStorageVolumesStmt showStmt = (ShowStorageVolumesStmt) stmt;\nGlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\nStorageVolumeMgr storageVolumeMgr = globalStateMgr.getStorageVolumeMgr();\nList storageVolumeNames = storageVolumeMgr.listStorageVolumeNames();\nPatternMatcher matcher = null;\nList> rows = Lists.newArrayList();\nif (!showStmt.getPattern().isEmpty()) {\nmatcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),\nCaseSensibility.TABLE.getCaseSensibility());\n}\nPatternMatcher finalMatcher = matcher;\nstorageVolumeNames = storageVolumeNames.stream()\n.filter(storageVolumeName -> finalMatcher == null || finalMatcher.match(storageVolumeName))\n.filter(storageVolumeName -> PrivilegeActions.checkAnyActionOnStorageVolume(connectContext, storageVolumeName))\n.collect(Collectors.toList());\nfor (String storageVolumeName : storageVolumeNames) {\nrows.add(Lists.newArrayList(storageVolumeName));\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleDescStorageVolume() throws AnalysisException {\nDescStorageVolumeStmt desc = (DescStorageVolumeStmt) stmt;\nresultSet = new ShowResultSet(desc.getMetaData(), desc.getResultRows());\n}\n}", + "context_after": "class ShowExecutor {\nprivate static final Logger LOG = LogManager.getLogger(ShowExecutor.class);\nprivate static final List> EMPTY_SET = Lists.newArrayList();\nprivate final ConnectContext connectContext;\nprivate final ShowStmt stmt;\nprivate ShowResultSet resultSet;\nprivate final MetadataMgr metadataMgr;\npublic ShowExecutor(ConnectContext connectContext, ShowStmt stmt) {\nthis.connectContext = connectContext;\nthis.stmt = stmt;\nresultSet = null;\nmetadataMgr = GlobalStateMgr.getCurrentState().getMetadataMgr();\n}\npublic ShowResultSet execute() throws AnalysisException, DdlException {\nif (stmt instanceof ShowMaterializedViewsStmt) {\nhandleShowMaterializedView();\n} else if (stmt instanceof ShowAuthorStmt) {\nhandleShowAuthor();\n} else if (stmt instanceof ShowProcStmt) {\nhandleShowProc();\n} else if (stmt instanceof HelpStmt) {\nhandleHelp();\n} else if (stmt instanceof ShowWarehousesStmt) {\nhandleShowWarehouses();\n} else if (stmt instanceof ShowClustersStmt) {\nhandleShowClusters();\n} else if (stmt instanceof ShowDbStmt) {\nhandleShowDb();\n} else if (stmt instanceof ShowTableStmt) {\nhandleShowTable();\n} else if (stmt instanceof ShowTableStatusStmt) {\nhandleShowTableStatus();\n} else if (stmt instanceof DescribeStmt) {\nhandleDescribe();\n} else if (stmt instanceof ShowCreateTableStmt) {\nhandleShowCreateTable();\n} else if (stmt instanceof ShowCreateDbStmt) {\nhandleShowCreateDb();\n} else if (stmt instanceof ShowProcesslistStmt) {\nhandleShowProcesslist();\n} else if (stmt instanceof ShowEnginesStmt) {\nhandleShowEngines();\n} else if (stmt instanceof ShowFunctionsStmt) {\nhandleShowFunctions();\n} else if (stmt instanceof ShowVariablesStmt) {\nhandleShowVariables();\n} else if (stmt instanceof ShowColumnStmt) {\nhandleShowColumn();\n} else if (stmt instanceof ShowLoadStmt) {\nhandleShowLoad();\n} else if (stmt instanceof ShowRoutineLoadStmt) {\nhandleShowRoutineLoad();\n} else if (stmt instanceof ShowRoutineLoadTaskStmt) {\nhandleShowRoutineLoadTask();\n} else if (stmt instanceof ShowStreamLoadStmt) {\nhandleShowStreamLoad();\n} else if (stmt instanceof ShowDeleteStmt) {\nhandleShowDelete();\n} else if (stmt instanceof ShowAlterStmt) {\nhandleShowAlter();\n} else if (stmt instanceof ShowUserPropertyStmt) {\nhandleShowUserProperty();\n} else if (stmt instanceof ShowDataStmt) {\nhandleShowData();\n} else if (stmt instanceof ShowCollationStmt) {\nhandleShowCollation();\n} else if (stmt instanceof ShowPartitionsStmt) {\nhandleShowPartitions();\n} else if (stmt instanceof ShowTabletStmt) {\nhandleShowTablet();\n} else if (stmt instanceof ShowBackupStmt) {\nhandleShowBackup();\n} else if (stmt instanceof ShowRestoreStmt) {\nhandleShowRestore();\n} else if (stmt instanceof ShowBrokerStmt) {\nhandleShowBroker();\n} else if (stmt instanceof ShowResourcesStmt) {\nhandleShowResources();\n} else if (stmt instanceof ShowExportStmt) {\nhandleShowExport();\n} else if (stmt instanceof ShowBackendsStmt) {\nhandleShowBackends();\n} else if (stmt instanceof ShowFrontendsStmt) {\nhandleShowFrontends();\n} else if (stmt instanceof ShowRepositoriesStmt) {\nhandleShowRepositories();\n} else if (stmt instanceof ShowSnapshotStmt) {\nhandleShowSnapshot();\n} else if (stmt instanceof ShowGrantsStmt) {\nhandleShowGrants();\n} else if (stmt instanceof ShowRolesStmt) {\nhandleShowRoles();\n} else if (stmt instanceof AdminShowReplicaStatusStmt) {\nhandleAdminShowTabletStatus();\n} else if (stmt instanceof AdminShowReplicaDistributionStmt) {\nhandleAdminShowTabletDistribution();\n} else if (stmt instanceof AdminShowConfigStmt) {\nhandleAdminShowConfig();\n} else if (stmt instanceof ShowSmallFilesStmt) {\nhandleShowSmallFiles();\n} else if (stmt instanceof ShowDynamicPartitionStmt) {\nhandleShowDynamicPartition();\n} else if (stmt instanceof ShowIndexStmt) {\nhandleShowIndex();\n} else if (stmt instanceof ShowTransactionStmt) {\nhandleShowTransaction();\n} else if (stmt instanceof ShowPluginsStmt) {\nhandleShowPlugins();\n} else if (stmt instanceof ShowSqlBlackListStmt) {\nhandleShowSqlBlackListStmt();\n} else if (stmt instanceof ShowAnalyzeJobStmt) {\nhandleShowAnalyzeJob();\n} else if (stmt instanceof ShowAnalyzeStatusStmt) {\nhandleShowAnalyzeStatus();\n} else if (stmt instanceof ShowBasicStatsMetaStmt) {\nhandleShowBasicStatsMeta();\n} else if (stmt instanceof ShowHistogramStatsMetaStmt) {\nhandleShowHistogramStatsMeta();\n} else if (stmt instanceof ShowResourceGroupStmt) {\nhandleShowResourceGroup();\n} else if (stmt instanceof ShowUserStmt) {\nhandleShowUser();\n} else if (stmt instanceof ShowCatalogsStmt) {\nhandleShowCatalogs();\n} else if (stmt instanceof ShowComputeNodesStmt) {\nhandleShowComputeNodes();\n} else if (stmt instanceof ShowAuthenticationStmt) {\nhandleShowAuthentication();\n} else if (stmt instanceof ShowCreateExternalCatalogStmt) {\nhandleShowCreateExternalCatalog();\n} else if (stmt instanceof ShowCharsetStmt) {\nhandleShowCharset();\n} else if (stmt instanceof ShowStorageVolumesStmt) {\nhandleShowStorageVolumes();\n} else if (stmt instanceof DescStorageVolumeStmt) {\nhandleDescStorageVolume();\n} else {\nhandleEmpty();\n}\nList> rows = doPredicate(stmt, stmt.getMetaData(), resultSet.getResultRows());\nreturn new ShowResultSet(resultSet.getMetaData(), rows);\n}\nprivate void handleShowAuthentication() {\nfinal ShowAuthenticationStmt showAuthenticationStmt = (ShowAuthenticationStmt) stmt;\nAuthenticationMgr authenticationManager = GlobalStateMgr.getCurrentState().getAuthenticationMgr();\nList> userAuthInfos = Lists.newArrayList();\nMap authenticationInfoMap = new HashMap<>();\nif (showAuthenticationStmt.isAll()) {\nauthenticationInfoMap.putAll(authenticationManager.getUserToAuthenticationInfo());\n} else {\nUserAuthenticationInfo userAuthenticationInfo;\nif (showAuthenticationStmt.getUserIdent() == null) {\nuserAuthenticationInfo = authenticationManager\n.getUserAuthenticationInfoByUserIdentity(connectContext.getCurrentUserIdentity());\n} else {\nuserAuthenticationInfo =\nauthenticationManager.getUserAuthenticationInfoByUserIdentity(showAuthenticationStmt.getUserIdent());\n}\nauthenticationInfoMap.put(showAuthenticationStmt.getUserIdent(), userAuthenticationInfo);\n}\nfor (Map.Entry entry : authenticationInfoMap.entrySet()) {\nUserAuthenticationInfo userAuthenticationInfo = entry.getValue();\nuserAuthInfos.add(Lists.newArrayList(\nentry.getKey().toString(),\nuserAuthenticationInfo.getPassword().length == 0 ? \"No\" : \"Yes\",\nuserAuthenticationInfo.getAuthPlugin(),\nuserAuthenticationInfo.getTextForAuthPlugin()));\n}\nresultSet = new ShowResultSet(showAuthenticationStmt.getMetaData(), userAuthInfos);\n}\nprivate void handleShowComputeNodes() {\nfinal ShowComputeNodesStmt showStmt = (ShowComputeNodesStmt) stmt;\nList> computeNodesInfos = ComputeNodeProcDir.getClusterComputeNodesInfos();\nresultSet = new ShowResultSet(showStmt.getMetaData(), computeNodesInfos);\n}\nprivate void handleShowMaterializedView() throws AnalysisException {\nShowMaterializedViewsStmt showMaterializedViewsStmt = (ShowMaterializedViewsStmt) stmt;\nString dbName = showMaterializedViewsStmt.getDb();\nDatabase db = GlobalStateMgr.getCurrentState().getDb(dbName);\nMetaUtils.checkDbNullAndReport(db, dbName);\nList materializedViews = Lists.newArrayList();\nList> singleTableMVs = Lists.newArrayList();\ndb.readLock();\ntry {\nPatternMatcher matcher = null;\nif (showMaterializedViewsStmt.getPattern() != null) {\nmatcher = PatternMatcher.createMysqlPattern(showMaterializedViewsStmt.getPattern(),\nCaseSensibility.TABLE.getCaseSensibility());\n}\nfor (Table table : db.getTables()) {\nif (table.isMaterializedView()) {\nMaterializedView mvTable = (MaterializedView) table;\nif (matcher != null && !matcher.match(mvTable.getName())) {\ncontinue;\n}\nAtomicBoolean baseTableHasPrivilege = new AtomicBoolean(true);\nmvTable.getBaseTableInfos().forEach(baseTableInfo -> {\nTable baseTable = baseTableInfo.getTable();\nif (baseTable != null && baseTable.isNativeTableOrMaterializedView() && !PrivilegeActions.\ncheckTableAction(connectContext, baseTableInfo.getDbName(),\nbaseTableInfo.getTableName(),\nPrivilegeType.SELECT)) {\nbaseTableHasPrivilege.set(false);\n}\n});\nif (!baseTableHasPrivilege.get()) {\ncontinue;\n}\nif (!PrivilegeActions.checkAnyActionOnMaterializedView(connectContext, db.getFullName(),\nmvTable.getName())) {\ncontinue;\n}\nmaterializedViews.add(mvTable);\n} else if (Table.TableType.OLAP == table.getType()) {\nOlapTable olapTable = (OlapTable) table;\nList visibleMaterializedViews = olapTable.getVisibleIndexMetas();\nlong baseIdx = olapTable.getBaseIndexId();\nfor (MaterializedIndexMeta mvMeta : visibleMaterializedViews) {\nif (baseIdx == mvMeta.getIndexId()) {\ncontinue;\n}\nif (matcher != null && !matcher.match(olapTable.getIndexNameById(mvMeta.getIndexId()))) {\ncontinue;\n}\nsingleTableMVs.add(Pair.create(olapTable, mvMeta));\n}\n}\n}\nList> rowSets = listMaterializedViewStatus(dbName, materializedViews, singleTableMVs);\nresultSet = new ShowResultSet(stmt.getMetaData(), rowSets);\n} catch (Exception e) {\nLOG.warn(\"listMaterializedViews failed:\", e);\nthrow e;\n} finally {\ndb.readUnlock();\n}\n}\npublic static String buildCreateMVSql(OlapTable olapTable, String mv, MaterializedIndexMeta mvMeta) {\nStringBuilder originStmtBuilder = new StringBuilder(\n\"create materialized view \" + mv +\n\" as select \");\nString groupByString = \"\";\nfor (Column column : mvMeta.getSchema()) {\nif (column.isKey()) {\ngroupByString += column.getName() + \",\";\n}\n}\noriginStmtBuilder.append(groupByString);\nfor (Column column : mvMeta.getSchema()) {\nif (!column.isKey()) {\noriginStmtBuilder.append(column.getAggregationType().toString()).append(\"(\")\n.append(column.getName()).append(\")\").append(\",\");\n}\n}\noriginStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());\noriginStmtBuilder.append(\" from \").append(olapTable.getName()).append(\" group by \")\n.append(groupByString);\noriginStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());\nreturn originStmtBuilder.toString();\n}\npublic static List> listMaterializedViewStatus(\nString dbName,\nList materializedViews,\nList> singleTableMVs) {\nList> rowSets = Lists.newArrayList();\nMap mvNameTaskMap = Maps.newHashMap();\nif (!materializedViews.isEmpty()) {\nGlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\nTaskManager taskManager = globalStateMgr.getTaskManager();\nmvNameTaskMap = taskManager.showMVLastRefreshTaskRunStatus(dbName);\n}\nfor (MaterializedView mvTable : materializedViews) {\nlong mvId = mvTable.getId();\nTaskRunStatus taskStatus = mvNameTaskMap.get(TaskBuilder.getMvTaskName(mvId));\nArrayList resultRow = new ArrayList<>();\nresultRow.add(String.valueOf(mvId));\nresultRow.add(dbName);\nresultRow.add(mvTable.getName());\nMaterializedView.MvRefreshScheme refreshScheme = mvTable.getRefreshScheme();\nif (refreshScheme == null) {\nresultRow.add(\"UNKNOWN\");\n} else {\nresultRow.add(String.valueOf(mvTable.getRefreshScheme().getType()));\n}\nresultRow.add(String.valueOf(mvTable.isActive()));\nresultRow.add(String.valueOf(mvTable.getInactiveReason()));\nif (mvTable.getPartitionInfo() != null && mvTable.getPartitionInfo().getType() != null) {\nresultRow.add(mvTable.getPartitionInfo().getType().toString());\n} else {\nresultRow.add(\"\");\n}\nsetTaskRunStatus(resultRow, taskStatus);\nresultRow.add(String.valueOf(mvTable.getRowCount()));\nresultRow.add(mvTable.getMaterializedViewDdlStmt(true));\nrowSets.add(resultRow);\n}\nfor (Pair singleTableMV : singleTableMVs) {\nOlapTable olapTable = singleTableMV.first;\nMaterializedIndexMeta mvMeta = singleTableMV.second;\nlong mvId = mvMeta.getIndexId();\nArrayList resultRow = new ArrayList<>();\nresultRow.add(String.valueOf(mvId));\nresultRow.add(dbName);\nresultRow.add(olapTable.getIndexNameById(mvId));\nresultRow.add(\"ROLLUP\");\nresultRow.add(String.valueOf(true));\nresultRow.add(\"\");\nif (olapTable.getPartitionInfo() != null && olapTable.getPartitionInfo().getType() != null) {\nresultRow.add(olapTable.getPartitionInfo().getType().toString());\n} else {\nresultRow.add(\"\");\n}\nsetTaskRunStatus(resultRow, null);\nif (olapTable.getPartitionInfo().getType() == PartitionType.UNPARTITIONED) {\nPartition partition = olapTable.getPartitions().iterator().next();\nMaterializedIndex index = partition.getIndex(mvId);\nresultRow.add(String.valueOf(index.getRowCount()));\n} else {\nresultRow.add(String.valueOf(0L));\n}\nif (mvMeta.getOriginStmt() == null) {\nString mvName = olapTable.getIndexNameById(mvId);\nresultRow.add(buildCreateMVSql(olapTable, mvName, mvMeta));\n} else {\nresultRow.add(mvMeta.getOriginStmt().replace(\"\\n\", \"\").replace(\"\\t\", \"\")\n.replaceAll(\"[ ]+\", \" \"));\n}\nrowSets.add(resultRow);\n}\nreturn rowSets;\n}\nprivate static void setTaskRunStatus(List resultRow, TaskRunStatus taskStatus) {\nif (taskStatus != null) {\nresultRow.add(String.valueOf(taskStatus.getTaskId()));\nresultRow.add(Strings.nullToEmpty(taskStatus.getTaskName()));\nresultRow.add(String.valueOf(TimeUtils.longToTimeString(taskStatus.getCreateTime())));\nresultRow.add(String.valueOf(TimeUtils.longToTimeString(taskStatus.getFinishTime())));\nif (taskStatus.getFinishTime() > taskStatus.getCreateTime()) {\nresultRow.add(DebugUtil.DECIMAL_FORMAT_SCALE_3\n.format((taskStatus.getFinishTime() - taskStatus.getCreateTime()) / 1000D));\n} else {\nresultRow.add(\"0.000\");\n}\nresultRow.add(String.valueOf(taskStatus.getState()));\nMVTaskRunExtraMessage extraMessage = taskStatus.getMvTaskRunExtraMessage();\nresultRow.add(extraMessage.isForceRefresh() ? \"true\" : \"false\");\nresultRow.add(Strings.nullToEmpty(extraMessage.getPartitionStart()));\nresultRow.add(Strings.nullToEmpty(extraMessage.getPartitionEnd()));\nresultRow.add(Strings.nullToEmpty(extraMessage.getBasePartitionsToRefreshMapString()));\nresultRow.add(Strings.nullToEmpty(extraMessage.getMvPartitionsToRefreshString()));\nresultRow.add(String.valueOf(taskStatus.getErrorCode()));\nresultRow.add(Strings.nullToEmpty(taskStatus.getErrorMessage()));\n} else {\nresultRow.addAll(Collections.nCopies(13, \"\"));\n}\n}\nprivate void handleShowProcesslist() {\nShowProcesslistStmt showStmt = (ShowProcesslistStmt) stmt;\nList> rowSet = Lists.newArrayList();\nList threadInfos = connectContext.getConnectScheduler()\n.listConnection(connectContext.getQualifiedUser());\nlong nowMs = System.currentTimeMillis();\nfor (ConnectContext.ThreadInfo info : threadInfos) {\nList row = info.toRow(nowMs, showStmt.showFull());\nif (row != null) {\nrowSet.add(row);\n}\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);\n}\nprivate void handleEmpty() {\nresultSet = new ShowResultSet(stmt.getMetaData(), EMPTY_SET);\n}\nprivate void handleShowAuthor() {\nShowAuthorStmt showAuthorStmt = (ShowAuthorStmt) stmt;\nList> rowSet = Lists.newArrayList();\nresultSet = new ShowResultSet(showAuthorStmt.getMetaData(), rowSet);\n}\nprivate void handleShowEngines() {\nShowEnginesStmt showStmt = (ShowEnginesStmt) stmt;\nList> rowSet = Lists.newArrayList();\nrowSet.add(Lists.newArrayList(\"OLAP\", \"YES\", \"Default storage engine of StarRocks\", \"NO\", \"NO\", \"NO\"));\nrowSet.add(Lists.newArrayList(\"MySQL\", \"YES\", \"MySQL server which data is in it\", \"NO\", \"NO\", \"NO\"));\nrowSet.add(Lists.newArrayList(\"ELASTICSEARCH\", \"YES\", \"ELASTICSEARCH cluster which data is in it\", \"NO\", \"NO\",\n\"NO\"));\nrowSet.add(Lists.newArrayList(\"HIVE\", \"YES\", \"HIVE database which data is in it\", \"NO\", \"NO\", \"NO\"));\nrowSet.add(Lists.newArrayList(\"ICEBERG\", \"YES\", \"ICEBERG data lake which data is in it\", \"NO\", \"NO\", \"NO\"));\nresultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);\n}\nprivate void handleShowFunctions() throws AnalysisException {\nShowFunctionsStmt showStmt = (ShowFunctionsStmt) stmt;\nList functions;\nif (showStmt.getIsBuiltin()) {\nfunctions = connectContext.getGlobalStateMgr().getBuiltinFunctions();\n} else if (showStmt.getIsGlobal()) {\nfunctions = connectContext.getGlobalStateMgr().getGlobalFunctionMgr().getFunctions();\n} else {\nDatabase db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());\nMetaUtils.checkDbNullAndReport(db, showStmt.getDbName());\nfunctions = db.getFunctions();\n}\nList> rowSet = Lists.newArrayList();\nfor (Function function : functions) {\nList row = function.getInfo(showStmt.getIsVerbose());\nif (showStmt.getWild() == null || showStmt.like(function.functionName())) {\nif (showStmt.getIsGlobal()) {\nif (!PrivilegeActions.checkAnyActionOnGlobalFunction(connectContext, function.getFunctionId())) {\ncontinue;\n}\n} else if (!showStmt.getIsBuiltin()) {\nDatabase db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());\nif (!PrivilegeActions.checkAnyActionOnFunction(\nconnectContext.getCurrentUserIdentity(), connectContext.getCurrentRoleIds(),\ndb.getId(), function.getFunctionId())) {\ncontinue;\n}\n}\nrowSet.add(row);\n}\n}\nListComparator> comparator;\nOrderByPair orderByPair = new OrderByPair(0, false);\ncomparator = new ListComparator<>(orderByPair);\nrowSet.sort(comparator);\nList> resultRowSet = Lists.newArrayList();\nSet functionNameSet = new HashSet<>();\nfor (List row : rowSet) {\nList resultRow = Lists.newArrayList();\nif (functionNameSet.contains(row.get(0).toString())) {\ncontinue;\n}\nfor (Comparable column : row) {\nresultRow.add(column.toString());\n}\nresultRowSet.add(resultRow);\nfunctionNameSet.add(resultRow.get(0));\n}\nShowResultSetMetaData showMetaData = showStmt.getIsVerbose() ? showStmt.getMetaData() :\nShowResultSetMetaData.builder()\n.addColumn(new Column(\"Function Name\", ScalarType.createVarchar(256))).build();\nresultSet = new ShowResultSet(showMetaData, resultRowSet);\n}\nprivate void handleShowProc() throws AnalysisException {\nShowProcStmt showProcStmt = (ShowProcStmt) stmt;\nShowResultSetMetaData metaData = showProcStmt.getMetaData();\nProcNodeInterface procNode = showProcStmt.getNode();\nList> finalRows = procNode.fetchResult().getRows();\nresultSet = new ShowResultSet(metaData, finalRows);\n}\nprivate void handleShowDb() {\nShowDbStmt showDbStmt = (ShowDbStmt) stmt;\nList> rows = Lists.newArrayList();\nList dbNames;\nString catalogName;\nif (showDbStmt.getCatalogName() == null) {\ncatalogName = connectContext.getCurrentCatalog();\n} else {\ncatalogName = showDbStmt.getCatalogName();\n}\ndbNames = metadataMgr.listDbNames(catalogName);\nPatternMatcher matcher = null;\nif (showDbStmt.getPattern() != null) {\nmatcher = PatternMatcher.createMysqlPattern(showDbStmt.getPattern(),\nCaseSensibility.DATABASE.getCaseSensibility());\n}\nSet dbNameSet = Sets.newTreeSet();\nfor (String dbName : dbNames) {\nif (matcher != null && !matcher.match(dbName)) {\ncontinue;\n}\nif (!PrivilegeActions.checkAnyActionOnOrInDb(connectContext, catalogName, dbName)) {\ncontinue;\n}\ndbNameSet.add(dbName);\n}\nfor (String dbName : dbNameSet) {\nrows.add(Lists.newArrayList(dbName));\n}\nresultSet = new ShowResultSet(showDbStmt.getMetaData(), rows);\n}\nprivate void handleShowTable() throws AnalysisException {\nShowTableStmt showTableStmt = (ShowTableStmt) stmt;\nList> rows = Lists.newArrayList();\nString catalogName = showTableStmt.getCatalogName();\nif (catalogName == null) {\ncatalogName = connectContext.getCurrentCatalog();\n}\nString dbName = showTableStmt.getDb();\nDatabase db = metadataMgr.getDb(catalogName, dbName);\nPatternMatcher matcher = null;\nif (showTableStmt.getPattern() != null) {\nmatcher = PatternMatcher.createMysqlPattern(showTableStmt.getPattern(),\nCaseSensibility.TABLE.getCaseSensibility());\n}\nMap tableMap = Maps.newTreeMap();\nMetaUtils.checkDbNullAndReport(db, showTableStmt.getDb());\nif (CatalogMgr.isInternalCatalog(catalogName)) {\ndb.readLock();\ntry {\nfor (Table tbl : db.getTables()) {\nif (matcher != null && !matcher.match(tbl.getName())) {\ncontinue;\n}\nif (tbl.isView()) {\nif (!PrivilegeActions.checkAnyActionOnView(\nconnectContext, db.getFullName(), tbl.getName())) {\ncontinue;\n}\n} else if (tbl.isMaterializedView()) {\nif (!PrivilegeActions.checkAnyActionOnMaterializedView(\nconnectContext, db.getFullName(), tbl.getName())) {\ncontinue;\n}\n} else if (!PrivilegeActions.checkAnyActionOnTable(\nconnectContext, db.getFullName(), tbl.getName())) {\ncontinue;\n}\ntableMap.put(tbl.getName(), tbl.getMysqlType());\n}\n} finally {\ndb.readUnlock();\n}\n} else {\nList tableNames = metadataMgr.listTableNames(catalogName, dbName);\nfor (String tableName : tableNames) {\nif (matcher != null && !matcher.match(tableName)) {\ncontinue;\n}\nTable table = metadataMgr.getTable(catalogName, dbName, tableName);\nif (table == null) {\nLOG.warn(\"table {}.{}.{} does not exist\", catalogName, dbName, tableName);\ncontinue;\n}\nif (table.isView()) {\nif (!PrivilegeActions.checkAnyActionOnView(\nconnectContext, catalogName, db.getFullName(), table.getName())) {\ncontinue;\n}\n} else if (!PrivilegeActions.checkAnyActionOnTable(connectContext,\ncatalogName, dbName, tableName)) {\ncontinue;\n}\ntableMap.put(tableName, table.getMysqlType());\n}\n}\nfor (Map.Entry entry : tableMap.entrySet()) {\nif (showTableStmt.isVerbose()) {\nrows.add(Lists.newArrayList(entry.getKey(), entry.getValue()));\n} else {\nrows.add(Lists.newArrayList(entry.getKey()));\n}\n}\nresultSet = new ShowResultSet(showTableStmt.getMetaData(), rows);\n}\nprivate void handleShowTableStatus() {\nShowTableStatusStmt showStmt = (ShowTableStatusStmt) stmt;\nList> rows = Lists.newArrayList();\nDatabase db = connectContext.getGlobalStateMgr().getDb(showStmt.getDb());\nZoneId currentTimeZoneId = TimeUtils.getTimeZone().toZoneId();\nif (db != null) {\ndb.readLock();\ntry {\nPatternMatcher matcher = null;\nif (showStmt.getPattern() != null) {\nmatcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),\nCaseSensibility.TABLE.getCaseSensibility());\n}\nfor (Table table : db.getTables()) {\nif (matcher != null && !matcher.match(table.getName())) {\ncontinue;\n}\nif (!PrivilegeActions.checkAnyActionOnTable(connectContext, db.getFullName(), table.getName())) {\ncontinue;\n}\nTTableInfo info = new TTableInfo();\nif (table.isNativeTableOrMaterializedView() || table.getType() == Table.TableType.OLAP_EXTERNAL) {\nInformationSchemaDataSource.genNormalTableInfo(table, info);\n} else {\nInformationSchemaDataSource.genDefaultConfigInfo(info);\n}\nList row = Lists.newArrayList();\nrow.add(table.getName());\nrow.add(table.getEngine());\nrow.add(null);\nrow.add(\"\");\nrow.add(String.valueOf(info.getTable_rows()));\nrow.add(String.valueOf(info.getAvg_row_length()));\nrow.add(String.valueOf(info.getData_length()));\nrow.add(null);\nrow.add(null);\nrow.add(null);\nrow.add(null);\nrow.add(DateUtils.formatTimeStampInSeconds(table.getCreateTime(), currentTimeZoneId));\nrow.add(DateUtils.formatTimeStampInSeconds(info.getUpdate_time(), currentTimeZoneId));\nrow.add(null);\nrow.add(InformationSchemaDataSource.UTF8_GENERAL_CI);\nrow.add(null);\nrow.add(\"\");\nrow.add(table.getDisplayComment());\nrows.add(row);\n}\n} finally {\ndb.readUnlock();\n}\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowVariables() {\nShowVariablesStmt showStmt = (ShowVariablesStmt) stmt;\nPatternMatcher matcher = null;\nif (showStmt.getPattern() != null) {\nmatcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),\nCaseSensibility.VARIABLES.getCaseSensibility());\n}\nList> rows = VariableMgr.dump(showStmt.getType(), connectContext.getSessionVariable(), matcher);\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowCreateTable() throws AnalysisException {\nShowCreateTableStmt showStmt = (ShowCreateTableStmt) stmt;\nTableName tbl = showStmt.getTbl();\nString catalogName = tbl.getCatalog();\nif (catalogName == null) {\ncatalogName = connectContext.getCurrentCatalog();\n}\nif (CatalogMgr.isInternalCatalog(catalogName)) {\nshowCreateInternalCatalogTable(showStmt);\n} else {\nshowCreateExternalCatalogTable(tbl, catalogName);\n}\n}\nprivate void showCreateExternalCatalogTable(TableName tbl, String catalogName) {\nString dbName = tbl.getDb();\nString tableName = tbl.getTbl();\nMetadataMgr metadataMgr = GlobalStateMgr.getCurrentState().getMetadataMgr();\nDatabase db = metadataMgr.getDb(catalogName, dbName);\nif (db == null) {\nErrorReport.reportSemanticException(ErrorCode.ERR_BAD_DB_ERROR, dbName);\n}\nTable table = metadataMgr.getTable(catalogName, dbName, tableName);\nif (table == null) {\nErrorReport.reportSemanticException(ErrorCode.ERR_BAD_TABLE_ERROR, tableName);\n}\nStringBuilder createTableSql = new StringBuilder();\ncreateTableSql.append(\"CREATE TABLE \")\n.append(\"`\").append(tableName).append(\"`\")\n.append(\" (\\n\");\nList columns = table.getFullSchema().stream().map(\nthis::toMysqlDDL).collect(Collectors.toList());\ncreateTableSql.append(String.join(\",\\n\", columns))\n.append(\"\\n)\");\nif (table.getType() != JDBC && !table.isUnPartitioned()) {\ncreateTableSql.append(\"\\nPARTITION BY ( \")\n.append(String.join(\", \", table.getPartitionColumnNames()))\n.append(\" )\");\n}\nString location = null;\nif (table.isHiveTable() || table.isHudiTable()) {\nlocation = ((HiveMetaStoreTable) table).getTableLocation();\n} else if (table.isIcebergTable()) {\nlocation = ((IcebergTable) table).getTableLocation();\n} else if (table.isDeltalakeTable()) {\nlocation = ((DeltaLakeTable) table).getTableLocation();\n}\nif (!Strings.isNullOrEmpty(location)) {\ncreateTableSql.append(\"\\nPROPERTIES (\\\"location\\\" = \\\"\").append(location).append(\"\\\");\");\n}\nList> rows = Lists.newArrayList();\nrows.add(Lists.newArrayList(tableName, createTableSql.toString()));\nresultSet = new ShowResultSet(stmt.getMetaData(), rows);\n}\nprivate String toMysqlDDL(Column column) {\nStringBuilder sb = new StringBuilder();\nsb.append(\" `\").append(column.getName()).append(\"` \");\nsb.append(column.getType().toSql());\nsb.append(\" DEFAULT NULL\");\nif (!Strings.isNullOrEmpty(column.getComment())) {\nsb.append(\" COMMENT \\\"\").append(column.getDisplayComment()).append(\"\\\"\");\n}\nreturn sb.toString();\n}\nprivate void showCreateInternalCatalogTable(ShowCreateTableStmt showStmt) throws AnalysisException {\nDatabase db = connectContext.getGlobalStateMgr().getDb(showStmt.getDb());\nMetaUtils.checkDbNullAndReport(db, showStmt.getDb());\nList> rows = Lists.newArrayList();\ndb.readLock();\ntry {\nTable table = db.getTable(showStmt.getTable());\nif (table == null) {\nif (showStmt.getType() != ShowCreateTableStmt.CreateTableType.MATERIALIZED_VIEW) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getTable());\n} else {\nfor (Table tbl : db.getTables()) {\nif (tbl.getType() == Table.TableType.OLAP) {\nOlapTable olapTable = (OlapTable) tbl;\nList visibleMaterializedViews =\nolapTable.getVisibleIndexMetas();\nfor (MaterializedIndexMeta mvMeta : visibleMaterializedViews) {\nif (olapTable.getIndexNameById(mvMeta.getIndexId()).equals(showStmt.getTable())) {\nif (mvMeta.getOriginStmt() == null) {\nString mvName = olapTable.getIndexNameById(mvMeta.getIndexId());\nrows.add(Lists.newArrayList(showStmt.getTable(), buildCreateMVSql(olapTable,\nmvName, mvMeta), \"utf8\", \"utf8_general_ci\"));\n} else {\nrows.add(Lists.newArrayList(showStmt.getTable(), mvMeta.getOriginStmt(),\n\"utf8\", \"utf8_general_ci\"));\n}\nresultSet = new ShowResultSet(ShowCreateTableStmt.getMaterializedViewMetaData(), rows);\nreturn;\n}\n}\n}\n}\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getTable());\n}\n}\nList createTableStmt = Lists.newArrayList();\nGlobalStateMgr.getDdlStmt(table, createTableStmt, null, null, false, true /* hide password */);\nif (createTableStmt.isEmpty()) {\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\nreturn;\n}\nif (table instanceof View) {\nif (showStmt.getType() == ShowCreateTableStmt.CreateTableType.MATERIALIZED_VIEW) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_OBJECT, showStmt.getDb(),\nshowStmt.getTable(), \"MATERIALIZED VIEW\");\n}\nrows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0), \"utf8\", \"utf8_general_ci\"));\nresultSet = new ShowResultSet(ShowCreateTableStmt.getViewMetaData(), rows);\n} else if (table instanceof MaterializedView) {\nif (showStmt.getType() == ShowCreateTableStmt.CreateTableType.VIEW) {\nMaterializedView mv = (MaterializedView) table;\nString sb = \"CREATE VIEW `\" + table.getName() + \"` AS \" + mv.getViewDefineSql();\nrows.add(Lists.newArrayList(table.getName(), sb, \"utf8\", \"utf8_general_ci\"));\nresultSet = new ShowResultSet(ShowCreateTableStmt.getViewMetaData(), rows);\n} else {\nrows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0)));\nresultSet = new ShowResultSet(ShowCreateTableStmt.getMaterializedViewMetaData(), rows);\n}\n} else {\nif (showStmt.getType() != ShowCreateTableStmt.CreateTableType.TABLE) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_OBJECT, showStmt.getDb(),\nshowStmt.getTable(), showStmt.getType().getValue());\n}\nrows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0)));\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\n} finally {\ndb.readUnlock();\n}\n}\nprivate void handleDescribe() throws AnalysisException {\nDescribeStmt describeStmt = (DescribeStmt) stmt;\nresultSet = new ShowResultSet(describeStmt.getMetaData(), describeStmt.getResultRows());\n}\nprivate void handleShowColumn() throws AnalysisException {\nShowColumnStmt showStmt = (ShowColumnStmt) stmt;\nList> rows = Lists.newArrayList();\nString catalogName = showStmt.getCatalog();\nif (catalogName == null) {\ncatalogName = connectContext.getCurrentCatalog();\n}\nString dbName = showStmt.getDb();\nDatabase db = metadataMgr.getDb(catalogName, dbName);\nMetaUtils.checkDbNullAndReport(db, showStmt.getDb());\ndb.readLock();\ntry {\nTable table = metadataMgr.getTable(catalogName, dbName, showStmt.getTable());\nif (table == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR,\nshowStmt.getDb() + \".\" + showStmt.getTable());\n}\nPatternMatcher matcher = null;\nif (showStmt.getPattern() != null) {\nmatcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),\nCaseSensibility.COLUMN.getCaseSensibility());\n}\nList columns = table.getBaseSchema();\nfor (Column col : columns) {\nif (matcher != null && !matcher.match(col.getName())) {\ncontinue;\n}\nfinal String columnName = col.getName();\nfinal String columnType = col.getType().canonicalName().toLowerCase();\nfinal String isAllowNull = col.isAllowNull() ? \"YES\" : \"NO\";\nfinal String isKey = col.isKey() ? \"YES\" : \"NO\";\nfinal String defaultValue = col.getMetaDefaultValue(Lists.newArrayList());\nfinal String aggType = col.getAggregationType() == null\n|| col.isAggregationTypeImplicit() ? \"\" : col.getAggregationType().toSql();\nif (showStmt.isVerbose()) {\nrows.add(Lists.newArrayList(columnName,\ncolumnType,\n\"\",\nisAllowNull,\nisKey,\ndefaultValue,\naggType,\n\"\",\ncol.getDisplayComment()));\n} else {\nrows.add(Lists.newArrayList(columnName,\ncolumnType,\nisAllowNull,\nisKey,\ndefaultValue,\naggType));\n}\n}\n} finally {\ndb.readUnlock();\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowIndex() throws AnalysisException {\nShowIndexStmt showStmt = (ShowIndexStmt) stmt;\nList> rows = Lists.newArrayList();\nDatabase db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());\nMetaUtils.checkDbNullAndReport(db, showStmt.getDbName());\ndb.readLock();\ntry {\nTable table = db.getTable(showStmt.getTableName().getTbl());\nif (table == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR,\ndb.getOriginName() + \".\" + showStmt.getTableName().toString());\n} else if (table instanceof OlapTable) {\nList indexes = ((OlapTable) table).getIndexes();\nfor (Index index : indexes) {\nrows.add(Lists.newArrayList(showStmt.getTableName().toString(), \"\", index.getIndexName(),\n\"\", String.join(\",\", index.getColumns()), \"\", \"\", \"\", \"\",\n\"\", index.getIndexType().name(), index.getComment()));\n}\n} else {\n}\n} finally {\ndb.readUnlock();\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleHelp() {\nHelpStmt helpStmt = (HelpStmt) stmt;\nString mark = helpStmt.getMask();\nHelpModule module = HelpModule.getInstance();\nHelpTopic topic = module.getTopic(mark);\nif (topic == null) {\nList topics = module.listTopicByKeyword(mark);\nif (topics.size() == 0) {\ntopic = null;\n} else if (topics.size() == 1) {\ntopic = module.getTopic(topics.get(0));\n} else {\nList> rows = Lists.newArrayList();\nfor (String str : topics) {\nrows.add(Lists.newArrayList(str, \"N\"));\n}\nList categories = module.listCategoryByName(mark);\nfor (String str : categories) {\nrows.add(Lists.newArrayList(str, \"Y\"));\n}\nresultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), rows);\nreturn;\n}\n}\nif (topic != null) {\nresultSet = new ShowResultSet(helpStmt.getMetaData(), Lists.>newArrayList(\nLists.newArrayList(topic.getName(), topic.getDescription(), topic.getExample())));\n} else {\nList categories = module.listCategoryByName(mark);\nif (categories.isEmpty()) {\nresultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), EMPTY_SET);\n} else if (categories.size() > 1) {\nresultSet = new ShowResultSet(helpStmt.getCategoryMetaData(),\nLists.>newArrayList(categories));\n} else {\nList> rows = Lists.newArrayList();\nList topics = module.listTopicByCategory(categories.get(0));\nfor (String str : topics) {\nrows.add(Lists.newArrayList(str, \"N\"));\n}\nList subCategories = module.listCategoryByCategory(categories.get(0));\nfor (String str : subCategories) {\nrows.add(Lists.newArrayList(str, \"Y\"));\n}\nresultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), rows);\n}\n}\n}\nprivate void handleShowLoad() throws AnalysisException {\nShowLoadStmt showStmt = (ShowLoadStmt) stmt;\nGlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\nlong dbId = -1;\nif (showStmt.isAll()) {\ndbId = -1;\n} else {\nDatabase db = globalStateMgr.getDb(showStmt.getDbName());\nMetaUtils.checkDbNullAndReport(db, showStmt.getDbName());\ndbId = db.getId();\n}\nSet statesValue = showStmt.getStates() == null ? null : showStmt.getStates().stream()\n.map(Enum::name)\n.collect(Collectors.toSet());\nList> loadInfos =\nglobalStateMgr.getLoadMgr().getLoadJobInfosByDb(dbId, showStmt.getLabelValue(),\nshowStmt.isAccurateMatch(),\nstatesValue);\nList orderByPairs = showStmt.getOrderByPairs();\nListComparator> comparator = null;\nif (orderByPairs != null) {\nOrderByPair[] orderByPairArr = new OrderByPair[orderByPairs.size()];\ncomparator = new ListComparator<>(orderByPairs.toArray(orderByPairArr));\n} else {\ncomparator = new ListComparator<>(0);\n}\nloadInfos.sort(comparator);\nList> rows = Lists.newArrayList();\nfor (List loadInfo : loadInfos) {\nList oneInfo = new ArrayList<>(loadInfo.size());\nfor (Comparable element : loadInfo) {\noneInfo.add(element.toString());\n}\nrows.add(oneInfo);\n}\nlong limit = showStmt.getLimit();\nlong offset = showStmt.getOffset() == -1L ? 0 : showStmt.getOffset();\nif (offset >= rows.size()) {\nrows = Lists.newArrayList();\n} else if (limit != -1L) {\nif ((limit + offset) < rows.size()) {\nrows = rows.subList((int) offset, (int) (limit + offset));\n} else {\nrows = rows.subList((int) offset, rows.size());\n}\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowRoutineLoad() throws AnalysisException {\nShowRoutineLoadStmt showRoutineLoadStmt = (ShowRoutineLoadStmt) stmt;\nList> rows = Lists.newArrayList();\nList routineLoadJobList;\ntry {\nroutineLoadJobList = GlobalStateMgr.getCurrentState().getRoutineLoadMgr()\n.getJob(showRoutineLoadStmt.getDbFullName(),\nshowRoutineLoadStmt.getName(),\nshowRoutineLoadStmt.isIncludeHistory());\n} catch (MetaNotFoundException e) {\nLOG.warn(e.getMessage(), e);\nthrow new AnalysisException(e.getMessage());\n}\nif (routineLoadJobList != null) {\nIterator iterator = routineLoadJobList.iterator();\nwhile (iterator.hasNext()) {\nRoutineLoadJob routineLoadJob = iterator.next();\ntry {\nif (!PrivilegeActions.checkAnyActionOnTable(connectContext,\nroutineLoadJob.getDbFullName(),\nroutineLoadJob.getTableName())) {\niterator.remove();\n}\n} catch (MetaNotFoundException e) {\n}\n}\n}\nif (routineLoadJobList != null) {\nRoutineLoadFunctionalExprProvider fProvider = showRoutineLoadStmt.getFunctionalExprProvider(this.connectContext);\nrows = routineLoadJobList.parallelStream()\n.filter(fProvider.getPredicateChain())\n.sorted(fProvider.getOrderComparator())\n.skip(fProvider.getSkipCount())\n.limit(fProvider.getLimitCount())\n.map(RoutineLoadJob::getShowInfo)\n.collect(Collectors.toList());\n}\nif (!Strings.isNullOrEmpty(showRoutineLoadStmt.getName()) && rows.isEmpty()) {\nthrow new AnalysisException(\"There is no running job named \" + showRoutineLoadStmt.getName()\n+ \" in db \" + showRoutineLoadStmt.getDbFullName()\n+ \". Include history? \" + showRoutineLoadStmt.isIncludeHistory()\n+ \", you can try `show all routine load job for job_name` if you want to list stopped and cancelled jobs\");\n}\nresultSet = new ShowResultSet(showRoutineLoadStmt.getMetaData(), rows);\n}\nprivate void handleShowRoutineLoadTask() throws AnalysisException {\nShowRoutineLoadTaskStmt showRoutineLoadTaskStmt = (ShowRoutineLoadTaskStmt) stmt;\nList> rows = Lists.newArrayList();\nRoutineLoadJob routineLoadJob;\ntry {\nroutineLoadJob =\nGlobalStateMgr.getCurrentState().getRoutineLoadMgr()\n.getJob(showRoutineLoadTaskStmt.getDbFullName(),\nshowRoutineLoadTaskStmt.getJobName());\n} catch (MetaNotFoundException e) {\nLOG.warn(e.getMessage(), e);\nthrow new AnalysisException(e.getMessage());\n}\nif (routineLoadJob == null) {\nthrow new AnalysisException(\"The job named \" + showRoutineLoadTaskStmt.getJobName() + \"does not exists \"\n+ \"or job state is stopped or cancelled\");\n}\nString dbFullName = showRoutineLoadTaskStmt.getDbFullName();\nString tableName;\ntry {\ntableName = routineLoadJob.getTableName();\n} catch (MetaNotFoundException e) {\nthrow new AnalysisException(\n\"The table metadata of job has been changed. The job will be cancelled automatically\", e);\n}\nif (!PrivilegeActions.checkAnyActionOnTable(connectContext, dbFullName, tableName)) {\nresultSet = new ShowResultSet(showRoutineLoadTaskStmt.getMetaData(), rows);\nreturn;\n}\nrows.addAll(routineLoadJob.getTasksShowInfo());\nresultSet = new ShowResultSet(showRoutineLoadTaskStmt.getMetaData(), rows);\n}\nprivate void handleShowStreamLoad() throws AnalysisException {\nShowStreamLoadStmt showStreamLoadStmt = (ShowStreamLoadStmt) stmt;\nList> rows = Lists.newArrayList();\nList streamLoadTaskList;\ntry {\nstreamLoadTaskList = GlobalStateMgr.getCurrentState().getStreamLoadMgr()\n.getTask(showStreamLoadStmt.getDbFullName(),\nshowStreamLoadStmt.getName(),\nshowStreamLoadStmt.isIncludeHistory());\n} catch (MetaNotFoundException e) {\nLOG.warn(e.getMessage(), e);\nthrow new AnalysisException(e.getMessage());\n}\nif (streamLoadTaskList != null) {\nStreamLoadFunctionalExprProvider fProvider = showStreamLoadStmt.getFunctionalExprProvider(this.connectContext);\nrows = streamLoadTaskList.parallelStream()\n.filter(fProvider.getPredicateChain())\n.sorted(fProvider.getOrderComparator())\n.skip(fProvider.getSkipCount())\n.limit(fProvider.getLimitCount())\n.map(StreamLoadTask::getShowInfo)\n.collect(Collectors.toList());\n}\nif (!Strings.isNullOrEmpty(showStreamLoadStmt.getName()) && rows.isEmpty()) {\nthrow new AnalysisException(\"There is no label named \" + showStreamLoadStmt.getName()\n+ \" in db \" + showStreamLoadStmt.getDbFullName()\n+ \". Include history? \" + showStreamLoadStmt.isIncludeHistory());\n}\nresultSet = new ShowResultSet(showStreamLoadStmt.getMetaData(), rows);\n}\nprivate void handleShowUserProperty() throws AnalysisException {\nShowUserPropertyStmt showStmt = (ShowUserPropertyStmt) stmt;\nresultSet = new ShowResultSet(showStmt.getMetaData(), showStmt.getRows(connectContext));\n}\nprivate void handleShowDelete() throws AnalysisException {\nShowDeleteStmt showStmt = (ShowDeleteStmt) stmt;\nGlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\nDatabase db = globalStateMgr.getDb(showStmt.getDbName());\nMetaUtils.checkDbNullAndReport(db, showStmt.getDbName());\nlong dbId = db.getId();\nDeleteMgr deleteHandler = globalStateMgr.getDeleteMgr();\nList> deleteInfos = deleteHandler.getDeleteInfosByDb(dbId);\nList> rows = Lists.newArrayList();\nfor (List deleteInfo : deleteInfos) {\nList oneInfo = new ArrayList<>(deleteInfo.size());\nfor (Comparable element : deleteInfo) {\noneInfo.add(element.toString());\n}\nrows.add(oneInfo);\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowAlter() throws AnalysisException {\nShowAlterStmt showStmt = (ShowAlterStmt) stmt;\nProcNodeInterface procNodeI = showStmt.getNode();\nPreconditions.checkNotNull(procNodeI);\nList> rows;\nif (procNodeI instanceof SchemaChangeProcDir) {\nrows = ((SchemaChangeProcDir) procNodeI).fetchResultByFilter(showStmt.getFilterMap(),\nshowStmt.getOrderPairs(), showStmt.getLimitElement()).getRows();\n} else {\nrows = procNodeI.fetchResult().getRows();\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowCollation() {\nShowCollationStmt showStmt = (ShowCollationStmt) stmt;\nList> rows = Lists.newArrayList();\nList row = Lists.newArrayList();\nrow.add(\"utf8_general_ci\");\nrow.add(\"utf8\");\nrow.add(\"33\");\nrow.add(\"Yes\");\nrow.add(\"Yes\");\nrow.add(\"1\");\nrows.add(row);\nrow = Lists.newArrayList();\nrow.add(\"binary\");\nrow.add(\"binary\");\nrow.add(\"63\");\nrow.add(\"Yes\");\nrow.add(\"Yes\");\nrow.add(\"1\");\nrows.add(row);\nrow = Lists.newArrayList();\nrow.add(\"gbk_chinese_ci\");\nrow.add(\"gbk\");\nrow.add(\"28\");\nrow.add(\"Yes\");\nrow.add(\"Yes\");\nrow.add(\"1\");\nrows.add(row);\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowData() {\nShowDataStmt showStmt = (ShowDataStmt) stmt;\nString dbName = showStmt.getDbName();\nDatabase db = GlobalStateMgr.getCurrentState().getDb(dbName);\nif (db == null) {\nErrorReport.reportSemanticException(ErrorCode.ERR_BAD_DB_ERROR, dbName);\n}\ndb.readLock();\ntry {\nString tableName = showStmt.getTableName();\nList> totalRows = showStmt.getResultRows();\nif (tableName == null) {\nlong totalSize = 0;\nlong totalReplicaCount = 0;\nList
tables = db.getTables();\nSortedSet
sortedTables = new TreeSet<>(Comparator.comparing(Table::getName));\nfor (Table table : tables) {\nif (!PrivilegeActions.checkAnyActionOnTable(connectContext, dbName, table.getName())) {\ncontinue;\n}\nsortedTables.add(table);\n}\nfor (Table table : sortedTables) {\nif (!table.isNativeTableOrMaterializedView()) {\ncontinue;\n}\nOlapTable olapTable = (OlapTable) table;\nlong tableSize = olapTable.getDataSize();\nlong replicaCount = olapTable.getReplicaCount();\nPair tableSizePair = DebugUtil.getByteUint(tableSize);\nString readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(tableSizePair.first) + \" \"\n+ tableSizePair.second;\nList row = Arrays.asList(table.getName(), readableSize, String.valueOf(replicaCount));\ntotalRows.add(row);\ntotalSize += tableSize;\ntotalReplicaCount += replicaCount;\n}\nPair totalSizePair = DebugUtil.getByteUint(totalSize);\nString readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(totalSizePair.first) + \" \"\n+ totalSizePair.second;\nList total = Arrays.asList(\"Total\", readableSize, String.valueOf(totalReplicaCount));\ntotalRows.add(total);\nlong quota = db.getDataQuota();\nlong replicaQuota = db.getReplicaQuota();\nPair quotaPair = DebugUtil.getByteUint(quota);\nString readableQuota = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(quotaPair.first) + \" \"\n+ quotaPair.second;\nList quotaRow = Arrays.asList(\"Quota\", readableQuota, String.valueOf(replicaQuota));\ntotalRows.add(quotaRow);\nlong left = Math.max(0, quota - totalSize);\nlong replicaCountLeft = Math.max(0, replicaQuota - totalReplicaCount);\nPair leftPair = DebugUtil.getByteUint(left);\nString readableLeft = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(leftPair.first) + \" \"\n+ leftPair.second;\nList leftRow = Arrays.asList(\"Left\", readableLeft, String.valueOf(replicaCountLeft));\ntotalRows.add(leftRow);\n} else {\nif (!PrivilegeActions.checkAnyActionOnTable(connectContext, dbName, tableName)) {\nErrorReport.reportSemanticException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, \"SHOW DATA\",\nconnectContext.getQualifiedUser(),\nconnectContext.getRemoteIP(),\ntableName);\n}\nTable table = db.getTable(tableName);\nif (table == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, tableName);\n}\nif (!table.isNativeTableOrMaterializedView()) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_NOT_OLAP_TABLE, tableName);\n}\nOlapTable olapTable = (OlapTable) table;\nint i = 0;\nlong totalSize = 0;\nlong totalReplicaCount = 0;\nMap indexNames = olapTable.getIndexNameToId();\nMap sortedIndexNames = new TreeMap<>(indexNames);\nfor (Long indexId : sortedIndexNames.values()) {\nlong indexSize = 0;\nlong indexReplicaCount = 0;\nlong indexRowCount = 0;\nfor (Partition partition : olapTable.getAllPartitions()) {\nMaterializedIndex mIndex = partition.getIndex(indexId);\nindexSize += mIndex.getDataSize();\nindexReplicaCount += mIndex.getReplicaCount();\nindexRowCount += mIndex.getRowCount();\n}\nPair indexSizePair = DebugUtil.getByteUint(indexSize);\nString readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(indexSizePair.first) + \" \"\n+ indexSizePair.second;\nList row = null;\nif (i == 0) {\nrow = Arrays.asList(tableName,\nolapTable.getIndexNameById(indexId),\nreadableSize, String.valueOf(indexReplicaCount),\nString.valueOf(indexRowCount));\n} else {\nrow = Arrays.asList(\"\",\nolapTable.getIndexNameById(indexId),\nreadableSize, String.valueOf(indexReplicaCount),\nString.valueOf(indexRowCount));\n}\ntotalSize += indexSize;\ntotalReplicaCount += indexReplicaCount;\ntotalRows.add(row);\ni++;\n}\nPair totalSizePair = DebugUtil.getByteUint(totalSize);\nString readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(totalSizePair.first) + \" \"\n+ totalSizePair.second;\nList row = Arrays.asList(\"\", \"Total\", readableSize, String.valueOf(totalReplicaCount), \"\");\ntotalRows.add(row);\n}\n} catch (AnalysisException e) {\nthrow new SemanticException(e.getMessage());\n} finally {\ndb.readUnlock();\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), showStmt.getResultRows());\n}\nprivate void handleShowPartitions() throws AnalysisException {\nShowPartitionsStmt showStmt = (ShowPartitionsStmt) stmt;\nProcNodeInterface procNodeI = showStmt.getNode();\nPreconditions.checkNotNull(procNodeI);\nList> rows = ((PartitionsProcDir) procNodeI).fetchResultByFilter(showStmt.getFilterMap(),\nshowStmt.getOrderByPairs(), showStmt.getLimitElement()).getRows();\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowTablet() throws AnalysisException {\nShowTabletStmt showStmt = (ShowTabletStmt) stmt;\nList> rows = Lists.newArrayList();\nGlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\nif (showStmt.isShowSingleTablet()) {\nlong tabletId = showStmt.getTabletId();\nTabletInvertedIndex invertedIndex = GlobalStateMgr.getCurrentInvertedIndex();\nTabletMeta tabletMeta = invertedIndex.getTabletMeta(tabletId);\nLong dbId = tabletMeta != null ? tabletMeta.getDbId() : TabletInvertedIndex.NOT_EXIST_VALUE;\nString dbName = null;\nLong tableId = tabletMeta != null ? tabletMeta.getTableId() : TabletInvertedIndex.NOT_EXIST_VALUE;\nString tableName = null;\nLong partitionId = tabletMeta != null ? tabletMeta.getPartitionId() : TabletInvertedIndex.NOT_EXIST_VALUE;\nString partitionName = null;\nLong indexId = tabletMeta != null ? tabletMeta.getIndexId() : TabletInvertedIndex.NOT_EXIST_VALUE;\nString indexName = null;\nBoolean isSync = true;\ndo {\nDatabase db = globalStateMgr.getDb(dbId);\nif (db == null) {\nisSync = false;\nbreak;\n}\ndbName = db.getFullName();\ndb.readLock();\ntry {\nTable table = db.getTable(tableId);\nif (!(table instanceof OlapTable)) {\nisSync = false;\nbreak;\n}\ntableName = table.getName();\nOlapTable olapTable = (OlapTable) table;\nPartition partition = olapTable.getPartition(partitionId);\nif (partition == null) {\nisSync = false;\nbreak;\n}\npartitionName = partition.getName();\nMaterializedIndex index = partition.getIndex(indexId);\nif (index == null) {\nisSync = false;\nbreak;\n}\nindexName = olapTable.getIndexNameById(indexId);\nif (table.isCloudNativeTableOrMaterializedView()) {\nbreak;\n}\nLocalTablet tablet = (LocalTablet) index.getTablet(tabletId);\nif (tablet == null) {\nisSync = false;\nbreak;\n}\nList replicas = tablet.getImmutableReplicas();\nfor (Replica replica : replicas) {\nReplica tmp = invertedIndex.getReplica(tabletId, replica.getBackendId());\nif (tmp == null) {\nisSync = false;\nbreak;\n}\nif (tmp != replica) {\nisSync = false;\nbreak;\n}\n}\n} finally {\ndb.readUnlock();\n}\n} while (false);\nString detailCmd = String.format(\"SHOW PROC '/dbs/%d/%d/partitions/%d/%d/%d';\",\ndbId, tableId, partitionId, indexId, tabletId);\nrows.add(Lists.newArrayList(dbName, tableName, partitionName, indexName,\ndbId.toString(), tableId.toString(),\npartitionId.toString(), indexId.toString(),\nisSync.toString(), detailCmd));\n} else {\nDatabase db = globalStateMgr.getDb(showStmt.getDbName());\nMetaUtils.checkDbNullAndReport(db, showStmt.getDbName());\ndb.readLock();\ntry {\nTable table = db.getTable(showStmt.getTableName());\nif (table == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getTableName());\n}\nif (!table.isNativeTableOrMaterializedView()) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_NOT_OLAP_TABLE, showStmt.getTableName());\n}\nOlapTable olapTable = (OlapTable) table;\nlong sizeLimit = -1;\nif (showStmt.hasOffset() && showStmt.hasLimit()) {\nsizeLimit = showStmt.getOffset() + showStmt.getLimit();\n} else if (showStmt.hasLimit()) {\nsizeLimit = showStmt.getLimit();\n}\nboolean stop = false;\nCollection partitions = new ArrayList<>();\nif (showStmt.hasPartition()) {\nPartitionNames partitionNames = showStmt.getPartitionNames();\nfor (String partName : partitionNames.getPartitionNames()) {\nPartition partition = olapTable.getPartition(partName, partitionNames.isTemp());\nif (partition == null) {\nthrow new AnalysisException(\"Unknown partition: \" + partName);\n}\npartitions.add(partition);\n}\n} else {\npartitions = olapTable.getPartitions();\n}\nList> tabletInfos = new ArrayList<>();\nString indexName = showStmt.getIndexName();\nlong indexId = -1;\nif (indexName != null) {\nLong id = olapTable.getIndexIdByName(indexName);\nif (id == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getIndexName());\n}\nindexId = id;\n}\nfor (Partition partition : partitions) {\nif (stop) {\nbreak;\n}\nfor (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.ALL)) {\nif (indexId > -1 && index.getId() != indexId) {\ncontinue;\n}\nif (olapTable.isCloudNativeTableOrMaterializedView()) {\nLakeTabletsProcNode procNode = new LakeTabletsProcNode(db, olapTable, index);\ntabletInfos.addAll(procNode.fetchComparableResult());\n} else {\nLocalTabletsProcDir procDir = new LocalTabletsProcDir(db, olapTable, index);\ntabletInfos.addAll(procDir.fetchComparableResult(\nshowStmt.getVersion(), showStmt.getBackendId(), showStmt.getReplicaState()));\n}\nif (sizeLimit > -1 && CollectionUtils.isEmpty(showStmt.getOrderByPairs())\n&& tabletInfos.size() >= sizeLimit) {\nstop = true;\nbreak;\n}\n}\n}\nList orderByPairs = showStmt.getOrderByPairs();\nListComparator> comparator;\nif (orderByPairs != null) {\nOrderByPair[] orderByPairArr = new OrderByPair[orderByPairs.size()];\ncomparator = new ListComparator<>(orderByPairs.toArray(orderByPairArr));\n} else {\ncomparator = new ListComparator<>(0, 1);\n}\ntabletInfos.sort(comparator);\nif (sizeLimit > -1 && tabletInfos.size() >= sizeLimit) {\ntabletInfos = tabletInfos.subList((int) showStmt.getOffset(), (int) sizeLimit);\n}\nfor (List tabletInfo : tabletInfos) {\nList oneTablet = new ArrayList<>(tabletInfo.size());\nfor (Comparable column : tabletInfo) {\noneTablet.add(column.toString());\n}\nrows.add(oneTablet);\n}\n} finally {\ndb.readUnlock();\n}\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowBroker() {\nShowBrokerStmt showStmt = (ShowBrokerStmt) stmt;\nList> rowSet = GlobalStateMgr.getCurrentState().getBrokerMgr().getBrokersInfo();\nresultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);\n}\nprivate void handleShowResources() {\nShowResourcesStmt showStmt = (ShowResourcesStmt) stmt;\nList> rowSet = GlobalStateMgr.getCurrentState().getResourceMgr().getResourcesInfo();\nresultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);\n}\nprivate void handleShowExport() throws AnalysisException {\nShowExportStmt showExportStmt = (ShowExportStmt) stmt;\nGlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\nDatabase db = globalStateMgr.getDb(showExportStmt.getDbName());\nMetaUtils.checkDbNullAndReport(db, showExportStmt.getDbName());\nlong dbId = db.getId();\nExportMgr exportMgr = globalStateMgr.getExportMgr();\nSet states = null;\nExportJob.JobState state = showExportStmt.getJobState();\nif (state != null) {\nstates = Sets.newHashSet(state);\n}\nList> infos = exportMgr.getExportJobInfosByIdOrState(\ndbId, showExportStmt.getJobId(), states, showExportStmt.getQueryId(),\nshowExportStmt.getOrderByPairs(), showExportStmt.getLimit());\nresultSet = new ShowResultSet(showExportStmt.getMetaData(), infos);\n}\nprivate void handleShowBackends() {\nfinal ShowBackendsStmt showStmt = (ShowBackendsStmt) stmt;\nList> backendInfos = BackendsProcDir.getClusterBackendInfos();\nresultSet = new ShowResultSet(showStmt.getMetaData(), backendInfos);\n}\nprivate void handleShowFrontends() {\nfinal ShowFrontendsStmt showStmt = (ShowFrontendsStmt) stmt;\nList> infos = Lists.newArrayList();\nFrontendsProcNode.getFrontendsInfo(GlobalStateMgr.getCurrentState(), infos);\nresultSet = new ShowResultSet(showStmt.getMetaData(), infos);\n}\nprivate void handleShowRepositories() {\nfinal ShowRepositoriesStmt showStmt = (ShowRepositoriesStmt) stmt;\nList> repoInfos = GlobalStateMgr.getCurrentState().getBackupHandler().getRepoMgr().getReposInfo();\nresultSet = new ShowResultSet(showStmt.getMetaData(), repoInfos);\n}\nprivate void handleShowSnapshot() throws AnalysisException {\nfinal ShowSnapshotStmt showStmt = (ShowSnapshotStmt) stmt;\nRepository repo =\nGlobalStateMgr.getCurrentState().getBackupHandler().getRepoMgr().getRepo(showStmt.getRepoName());\nif (repo == null) {\nthrow new AnalysisException(\"Repository \" + showStmt.getRepoName() + \" does not exist\");\n}\nList> snapshotInfos = repo.getSnapshotInfos(showStmt.getSnapshotName(), showStmt.getTimestamp(),\nshowStmt.getSnapshotNames());\nresultSet = new ShowResultSet(showStmt.getMetaData(), snapshotInfos);\n}\nprivate void handleShowBackup() {\nShowBackupStmt showStmt = (ShowBackupStmt) stmt;\nDatabase filterDb = GlobalStateMgr.getCurrentState().getDb(showStmt.getDbName());\nList> infos = Lists.newArrayList();\nList dbs = Lists.newArrayList();\nif (filterDb == null) {\nfor (Map.Entry entry : GlobalStateMgr.getCurrentState().getIdToDb().entrySet()) {\ndbs.add(entry.getValue());\n}\n} else {\ndbs.add(filterDb);\n}\nfor (Database db : dbs) {\nAbstractJob jobI = GlobalStateMgr.getCurrentState().getBackupHandler().getJob(db.getId());\nif (!(jobI instanceof BackupJob)) {\nresultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);\ncontinue;\n}\nBackupJob backupJob = (BackupJob) jobI;\nList tableRefs = backupJob.getTableRef();\nAtomicBoolean privilegeDeny = new AtomicBoolean(false);\ntableRefs.forEach(tableRef -> {\nTableName tableName = tableRef.getName();\nif (!PrivilegeActions.checkTableAction(connectContext, tableName.getDb(), tableName.getTbl(),\nPrivilegeType.EXPORT)) {\nprivilegeDeny.set(true);\n}\n});\nif (privilegeDeny.get()) {\nresultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);\nreturn;\n}\nList info = backupJob.getInfo();\ninfos.add(info);\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), infos);\n}\nprivate void handleShowRestore() {\nShowRestoreStmt showStmt = (ShowRestoreStmt) stmt;\nDatabase filterDb = GlobalStateMgr.getCurrentState().getDb(showStmt.getDbName());\nList> infos = Lists.newArrayList();\nList dbs = Lists.newArrayList();\nif (filterDb == null) {\nfor (Map.Entry entry : GlobalStateMgr.getCurrentState().getIdToDb().entrySet()) {\ndbs.add(entry.getValue());\n}\n} else {\ndbs.add(filterDb);\n}\nfor (Database db : dbs) {\nAbstractJob jobI = GlobalStateMgr.getCurrentState().getBackupHandler().getJob(db.getId());\nif (!(jobI instanceof RestoreJob)) {\nresultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);\ncontinue;\n}\nRestoreJob restoreJob = (RestoreJob) jobI;\nList info = restoreJob.getInfo();\ninfos.add(info);\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), infos);\n}\nprivate String getCatalogNameById(long catalogId) throws MetaNotFoundException {\nif (CatalogMgr.isInternalCatalog(catalogId)) {\nreturn InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME;\n}\nCatalogMgr catalogMgr = GlobalStateMgr.getCurrentState().getCatalogMgr();\nOptional catalogOptional = catalogMgr.getCatalogById(catalogId);\nif (!catalogOptional.isPresent()) {\nthrow new MetaNotFoundException(\"cannot find catalog\");\n}\nreturn catalogOptional.get().getName();\n}\nprivate String getCatalogNameFromPEntry(ObjectType objectType, PrivilegeCollection.PrivilegeEntry privilegeEntry)\nthrows MetaNotFoundException {\nif (objectType.equals(ObjectType.CATALOG)) {\nCatalogPEntryObject catalogPEntryObject =\n(CatalogPEntryObject) privilegeEntry.getObject();\nif (catalogPEntryObject.getId() == PrivilegeBuiltinConstants.ALL_CATALOGS_ID) {\nreturn null;\n} else {\nreturn getCatalogNameById(catalogPEntryObject.getId());\n}\n} else if (objectType.equals(ObjectType.DATABASE)) {\nDbPEntryObject dbPEntryObject = (DbPEntryObject) privilegeEntry.getObject();\nif (dbPEntryObject.getCatalogId() == PrivilegeBuiltinConstants.ALL_CATALOGS_ID) {\nreturn null;\n}\nreturn getCatalogNameById(dbPEntryObject.getCatalogId());\n} else if (objectType.equals(ObjectType.TABLE)) {\nTablePEntryObject tablePEntryObject = (TablePEntryObject) privilegeEntry.getObject();\nif (tablePEntryObject.getCatalogId() == PrivilegeBuiltinConstants.ALL_CATALOGS_ID) {\nreturn null;\n}\nreturn getCatalogNameById(tablePEntryObject.getCatalogId());\n} else {\nreturn InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME;\n}\n}\nprivate List> privilegeToRowString(AuthorizationMgr authorizationManager, GrantRevokeClause userOrRoleName,\nMap>\ntypeToPrivilegeEntryList) throws PrivilegeException {\nList> infos = new ArrayList<>();\nfor (Map.Entry> typeToPrivilegeEntry\n: typeToPrivilegeEntryList.entrySet()) {\nfor (PrivilegeCollection.PrivilegeEntry privilegeEntry : typeToPrivilegeEntry.getValue()) {\nObjectType objectType = typeToPrivilegeEntry.getKey();\nString catalogName;\ntry {\ncatalogName = getCatalogNameFromPEntry(objectType, privilegeEntry);\n} catch (MetaNotFoundException e) {\ncontinue;\n}\nList info = new ArrayList<>();\ninfo.add(userOrRoleName.getRoleName() != null ?\nuserOrRoleName.getRoleName() : userOrRoleName.getUserIdentity().toString());\ninfo.add(catalogName);\nGrantPrivilegeStmt grantPrivilegeStmt = new GrantPrivilegeStmt(new ArrayList<>(), objectType.name(),\nuserOrRoleName, null, privilegeEntry.isWithGrantOption());\ngrantPrivilegeStmt.setObjectType(objectType);\nActionSet actionSet = privilegeEntry.getActionSet();\nList privList = authorizationManager.analyzeActionSet(objectType, actionSet);\ngrantPrivilegeStmt.setPrivilegeTypes(privList);\ngrantPrivilegeStmt.setObjectList(Lists.newArrayList(privilegeEntry.getObject()));\ntry {\ninfo.add(AstToSQLBuilder.toSQL(grantPrivilegeStmt));\ninfos.add(info);\n} catch (com.starrocks.sql.common.MetaNotFoundException e) {\n}\n}\n}\nreturn infos;\n}\nprivate void handleShowGrants() {\nShowGrantsStmt showStmt = (ShowGrantsStmt) stmt;\nAuthorizationMgr authorizationManager = GlobalStateMgr.getCurrentState().getAuthorizationMgr();\ntry {\nList> infos = new ArrayList<>();\nif (showStmt.getRole() != null) {\nList granteeRole = authorizationManager.getGranteeRoleDetailsForRole(showStmt.getRole());\nif (granteeRole != null) {\ninfos.add(granteeRole);\n}\nMap> typeToPrivilegeEntryList =\nauthorizationManager.getTypeToPrivilegeEntryListByRole(showStmt.getRole());\ninfos.addAll(privilegeToRowString(authorizationManager,\nnew GrantRevokeClause(null, showStmt.getRole()), typeToPrivilegeEntryList));\n} else {\nList granteeRole = authorizationManager.getGranteeRoleDetailsForUser(showStmt.getUserIdent());\nif (granteeRole != null) {\ninfos.add(granteeRole);\n}\nMap> typeToPrivilegeEntryList =\nauthorizationManager.getTypeToPrivilegeEntryListByUser(showStmt.getUserIdent());\ninfos.addAll(privilegeToRowString(authorizationManager,\nnew GrantRevokeClause(showStmt.getUserIdent(), null), typeToPrivilegeEntryList));\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), infos);\n} catch (PrivilegeException e) {\nthrow new SemanticException(e.getMessage());\n}\n}\nprivate void handleShowRoles() {\nShowRolesStmt showStmt = (ShowRolesStmt) stmt;\nList> infos = new ArrayList<>();\nAuthorizationMgr authorizationManager = GlobalStateMgr.getCurrentState().getAuthorizationMgr();\nList roles = authorizationManager.getAllRoles();\nroles.forEach(e -> infos.add(Lists.newArrayList(e,\nauthorizationManager.isBuiltinRole(e) ? \"true\" : \"false\",\nauthorizationManager.getRoleComment(e))));\nresultSet = new ShowResultSet(showStmt.getMetaData(), infos);\n}\nprivate void handleShowUser() {\nList> rowSet = Lists.newArrayList();\nShowUserStmt showUserStmt = (ShowUserStmt) stmt;\nif (showUserStmt.isAll()) {\nAuthorizationMgr authorizationManager = GlobalStateMgr.getCurrentState().getAuthorizationMgr();\nList users = authorizationManager.getAllUsers();\nusers.forEach(u -> rowSet.add(Lists.newArrayList(u)));\n} else {\nList row = Lists.newArrayList();\nrow.add(connectContext.getCurrentUserIdentity().toString());\nrowSet.add(row);\n}\nresultSet = new ShowResultSet(stmt.getMetaData(), rowSet);\n}\nprivate void handleAdminShowTabletStatus() throws AnalysisException {\nAdminShowReplicaStatusStmt showStmt = (AdminShowReplicaStatusStmt) stmt;\nList> results;\ntry {\nresults = MetadataViewer.getTabletStatus(showStmt);\n} catch (DdlException e) {\nthrow new AnalysisException(e.getMessage());\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), results);\n}\nprivate void handleAdminShowTabletDistribution() throws AnalysisException {\nAdminShowReplicaDistributionStmt showStmt = (AdminShowReplicaDistributionStmt) stmt;\nList> results;\ntry {\nresults = MetadataViewer.getTabletDistribution(showStmt);\n} catch (DdlException e) {\nthrow new AnalysisException(e.getMessage());\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), results);\n}\nprivate void handleAdminShowConfig() throws AnalysisException {\nAdminShowConfigStmt showStmt = (AdminShowConfigStmt) stmt;\nList> results;\ntry {\nPatternMatcher matcher = null;\nif (showStmt.getPattern() != null) {\nmatcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),\nCaseSensibility.CONFIG.getCaseSensibility());\n}\nresults = ConfigBase.getConfigInfo(matcher);\nresults.sort(Comparator.comparing(o -> o.get(0)));\n} catch (DdlException e) {\nthrow new AnalysisException(e.getMessage());\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), results);\n}\nprivate void handleShowSmallFiles() throws AnalysisException {\nShowSmallFilesStmt showStmt = (ShowSmallFilesStmt) stmt;\nList> results;\ntry {\nresults = GlobalStateMgr.getCurrentState().getSmallFileMgr().getInfo(showStmt.getDbName());\n} catch (DdlException e) {\nthrow new AnalysisException(e.getMessage());\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), results);\n}\nprivate void handleShowDynamicPartition() {\nShowDynamicPartitionStmt showDynamicPartitionStmt = (ShowDynamicPartitionStmt) stmt;\nList> rows = Lists.newArrayList();\nDatabase db = connectContext.getGlobalStateMgr().getDb(showDynamicPartitionStmt.getDb());\nif (db != null) {\ndb.readLock();\ntry {\nfor (Table tbl : db.getTables()) {\nif (!(tbl instanceof OlapTable)) {\ncontinue;\n}\nDynamicPartitionScheduler dynamicPartitionScheduler =\nGlobalStateMgr.getCurrentState().getDynamicPartitionScheduler();\nOlapTable olapTable = (OlapTable) tbl;\nif (!olapTable.dynamicPartitionExists()) {\ndynamicPartitionScheduler.removeRuntimeInfo(olapTable.getName());\ncontinue;\n}\nif (!PrivilegeActions.checkAnyActionOnTable(ConnectContext.get(),\ndb.getFullName(), olapTable.getName())) {\ncontinue;\n}\nDynamicPartitionProperty dynamicPartitionProperty =\nolapTable.getTableProperty().getDynamicPartitionProperty();\nString tableName = olapTable.getName();\nint replicationNum = dynamicPartitionProperty.getReplicationNum();\nreplicationNum = (replicationNum == DynamicPartitionProperty.NOT_SET_REPLICATION_NUM) ?\nolapTable.getDefaultReplicationNum() : RunMode.defaultReplicationNum();\nrows.add(Lists.newArrayList(\ntableName,\nString.valueOf(dynamicPartitionProperty.getEnable()),\ndynamicPartitionProperty.getTimeUnit().toUpperCase(),\nString.valueOf(dynamicPartitionProperty.getStart()),\nString.valueOf(dynamicPartitionProperty.getEnd()),\ndynamicPartitionProperty.getPrefix(),\nString.valueOf(dynamicPartitionProperty.getBuckets()),\nString.valueOf(replicationNum),\ndynamicPartitionProperty.getStartOfInfo(),\ndynamicPartitionScheduler\n.getRuntimeInfo(tableName, DynamicPartitionScheduler.LAST_UPDATE_TIME),\ndynamicPartitionScheduler\n.getRuntimeInfo(tableName, DynamicPartitionScheduler.LAST_SCHEDULER_TIME),\ndynamicPartitionScheduler\n.getRuntimeInfo(tableName, DynamicPartitionScheduler.DYNAMIC_PARTITION_STATE),\ndynamicPartitionScheduler\n.getRuntimeInfo(tableName, DynamicPartitionScheduler.CREATE_PARTITION_MSG),\ndynamicPartitionScheduler\n.getRuntimeInfo(tableName, DynamicPartitionScheduler.DROP_PARTITION_MSG)));\n}\n} finally {\ndb.readUnlock();\n}\nresultSet = new ShowResultSet(showDynamicPartitionStmt.getMetaData(), rows);\n}\n}\nprivate void handleShowTransaction() throws AnalysisException {\nShowTransactionStmt showStmt = (ShowTransactionStmt) stmt;\nDatabase db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());\nMetaUtils.checkDbNullAndReport(db, showStmt.getDbName());\nlong txnId = showStmt.getTxnId();\nGlobalTransactionMgr transactionMgr = GlobalStateMgr.getCurrentGlobalTransactionMgr();\nresultSet = new ShowResultSet(showStmt.getMetaData(), transactionMgr.getSingleTranInfo(db.getId(), txnId));\n}\nprivate void handleShowPlugins() {\nShowPluginsStmt pluginsStmt = (ShowPluginsStmt) stmt;\nList> rows = GlobalStateMgr.getCurrentPluginMgr().getPluginShowInfos();\nresultSet = new ShowResultSet(pluginsStmt.getMetaData(), rows);\n}\nprivate void handleShowCharset() {\nShowCharsetStmt showCharsetStmt = (ShowCharsetStmt) stmt;\nList> rows = Lists.newArrayList();\nList row = Lists.newArrayList();\nrow.add(\"utf8\");\nrow.add(\"UTF-8 Unicode\");\nrow.add(\"utf8_general_ci\");\nrow.add(\"3\");\nrows.add(row);\nresultSet = new ShowResultSet(showCharsetStmt.getMetaData(), rows);\n}\nprivate void handleShowSqlBlackListStmt() {\nShowSqlBlackListStmt showStmt = (ShowSqlBlackListStmt) stmt;\nList> rows = new ArrayList<>();\nfor (Map.Entry entry : SqlBlackList.getInstance().sqlBlackListMap.entrySet()) {\nList oneSql = new ArrayList<>();\noneSql.add(String.valueOf(entry.getValue().id));\noneSql.add(entry.getKey());\nrows.add(oneSql);\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleShowAnalyzeJob() {\nList jobs = connectContext.getGlobalStateMgr().getAnalyzeMgr().getAllAnalyzeJobList();\nList> rows = Lists.newArrayList();\njobs.sort(Comparator.comparing(AnalyzeJob::getId));\nfor (AnalyzeJob job : jobs) {\ntry {\nList result = ShowAnalyzeJobStmt.showAnalyzeJobs(connectContext, job);\nif (result != null) {\nrows.add(result);\n}\n} catch (MetaNotFoundException e) {\n}\n}\nrows = doPredicate(stmt, stmt.getMetaData(), rows);\nresultSet = new ShowResultSet(stmt.getMetaData(), rows);\n}\nprivate void handleShowAnalyzeStatus() {\nList statuses = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeMgr()\n.getAnalyzeStatusMap().values());\nList> rows = Lists.newArrayList();\nstatuses.sort(Comparator.comparing(AnalyzeStatus::getId));\nfor (AnalyzeStatus status : statuses) {\ntry {\nList result = ShowAnalyzeStatusStmt.showAnalyzeStatus(connectContext, status);\nif (result != null) {\nrows.add(result);\n}\n} catch (MetaNotFoundException e) {\n}\n}\nrows = doPredicate(stmt, stmt.getMetaData(), rows);\nresultSet = new ShowResultSet(stmt.getMetaData(), rows);\n}\nprivate void handleShowBasicStatsMeta() {\nList metas = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeMgr()\n.getBasicStatsMetaMap().values());\nList> rows = Lists.newArrayList();\nfor (BasicStatsMeta meta : metas) {\ntry {\nList result = ShowBasicStatsMetaStmt.showBasicStatsMeta(connectContext, meta);\nif (result != null) {\nrows.add(result);\n}\n} catch (MetaNotFoundException e) {\n}\n}\nrows = doPredicate(stmt, stmt.getMetaData(), rows);\nresultSet = new ShowResultSet(stmt.getMetaData(), rows);\n}\nprivate void handleShowHistogramStatsMeta() {\nList metas = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeMgr()\n.getHistogramStatsMetaMap().values());\nList> rows = Lists.newArrayList();\nfor (HistogramStatsMeta meta : metas) {\ntry {\nList result = ShowHistogramStatsMetaStmt.showHistogramStatsMeta(connectContext, meta);\nif (result != null) {\nrows.add(result);\n}\n} catch (MetaNotFoundException e) {\n}\n}\nrows = doPredicate(stmt, stmt.getMetaData(), rows);\nresultSet = new ShowResultSet(stmt.getMetaData(), rows);\n}\nprivate void handleShowResourceGroup() throws AnalysisException {\nShowResourceGroupStmt showResourceGroupStmt = (ShowResourceGroupStmt) stmt;\nList> rows =\nGlobalStateMgr.getCurrentState().getResourceGroupMgr().showResourceGroup(showResourceGroupStmt);\nresultSet = new ShowResultSet(showResourceGroupStmt.getMetaData(), rows);\n}\nprivate void handleShowCatalogs() {\nShowCatalogsStmt showCatalogsStmt = (ShowCatalogsStmt) stmt;\nGlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\nCatalogMgr catalogMgr = globalStateMgr.getCatalogMgr();\nList> rowSet = catalogMgr.getCatalogsInfo().stream()\n.filter(row -> {\nif (!InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME.equals(row.get(0))) {\nreturn PrivilegeActions.checkAnyActionOnOrInCatalog(\nconnectContext.getCurrentUserIdentity(),\nconnectContext.getCurrentRoleIds(), row.get(0));\n}\nreturn true;\n}\n)\n.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());\nresultSet = new ShowResultSet(showCatalogsStmt.getMetaData(), rowSet);\n}\nprivate void handleShowWarehouses() {\nShowWarehousesStmt showStmt = (ShowWarehousesStmt) stmt;\nGlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\nWarehouseManager warehouseMgr = globalStateMgr.getWarehouseMgr();\nList> rowSet = warehouseMgr.getWarehousesInfo().stream()\n.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());\nresultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);\n}\nprivate void handleShowClusters() {\nShowClustersStmt showStmt = (ShowClustersStmt) stmt;\nWarehouseManager warehouseMgr = GlobalStateMgr.getCurrentWarehouseMgr();\nWarehouse warehouse = warehouseMgr.getWarehouse(showStmt.getWarehouseName());\nList> rowSet = warehouse.getClusterInfo().stream()\n.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());\nresultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);\n}\nprivate List> doPredicate(ShowStmt showStmt,\nShowResultSetMetaData showResultSetMetaData,\nList> rows) {\nPredicate predicate = showStmt.getPredicate();\nif (predicate == null) {\nreturn rows;\n}\nSlotRef slotRef = (SlotRef) predicate.getChild(0);\nStringLiteral stringLiteral = (StringLiteral) predicate.getChild(1);\nList> returnRows = new ArrayList<>();\nBinaryPredicate binaryPredicate = (BinaryPredicate) predicate;\nint idx = showResultSetMetaData.getColumnIdx(slotRef.getColumnName());\nif (binaryPredicate.getOp().isEquivalence()) {\nfor (List row : rows) {\nif (row.get(idx).equals(stringLiteral.getStringValue())) {\nreturnRows.add(row);\n}\n}\n}\nreturn returnRows;\n}\nprivate void handleShowCreateExternalCatalog() throws AnalysisException {\nShowCreateExternalCatalogStmt showStmt = (ShowCreateExternalCatalogStmt) stmt;\nString catalogName = showStmt.getCatalogName();\nList> rows = Lists.newArrayList();\nif (InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME.equalsIgnoreCase(catalogName)) {\nresultSet = new ShowResultSet(stmt.getMetaData(), rows);\nreturn;\n}\nCatalog catalog = connectContext.getGlobalStateMgr().getCatalogMgr().getCatalogByName(catalogName);\nif (catalog == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_CATALOG_ERROR, catalogName);\n}\nStringBuilder createCatalogSql = new StringBuilder();\ncreateCatalogSql.append(\"CREATE EXTERNAL CATALOG \")\n.append(\"`\").append(catalogName).append(\"`\")\n.append(\"\\n\");\nString comment = catalog.getComment();\nif (comment != null) {\ncreateCatalogSql.append(\"comment \\\"\").append(catalog.getDisplayComment()).append(\"\\\"\\n\");\n}\nMap clonedConfig = new HashMap<>(catalog.getConfig());\nCloudCredentialUtil.maskCloudCredential(clonedConfig);\ncreateCatalogSql.append(\"PROPERTIES (\")\n.append(new PrintableMap<>(clonedConfig, \" = \", true, true))\n.append(\"\\n)\");\nrows.add(Lists.newArrayList(catalogName, createCatalogSql.toString()));\nresultSet = new ShowResultSet(stmt.getMetaData(), rows);\n}\nprivate void handleShowStorageVolumes() throws DdlException {\nShowStorageVolumesStmt showStmt = (ShowStorageVolumesStmt) stmt;\nGlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\nStorageVolumeMgr storageVolumeMgr = globalStateMgr.getStorageVolumeMgr();\nList storageVolumeNames = storageVolumeMgr.listStorageVolumeNames();\nPatternMatcher matcher = null;\nList> rows = Lists.newArrayList();\nif (!showStmt.getPattern().isEmpty()) {\nmatcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),\nCaseSensibility.TABLE.getCaseSensibility());\n}\nPatternMatcher finalMatcher = matcher;\nstorageVolumeNames = storageVolumeNames.stream()\n.filter(storageVolumeName -> finalMatcher == null || finalMatcher.match(storageVolumeName))\n.filter(storageVolumeName -> PrivilegeActions.checkAnyActionOnStorageVolume(connectContext, storageVolumeName))\n.collect(Collectors.toList());\nfor (String storageVolumeName : storageVolumeNames) {\nrows.add(Lists.newArrayList(storageVolumeName));\n}\nresultSet = new ShowResultSet(showStmt.getMetaData(), rows);\n}\nprivate void handleDescStorageVolume() throws AnalysisException {\nDescStorageVolumeStmt desc = (DescStorageVolumeStmt) stmt;\nresultSet = new ShowResultSet(desc.getMetaData(), desc.getResultRows());\n}\n}" + }, + { + "comment": "We don't need `bType != symTable.nilType` separately with this? Let's make this the first check instead.", + "method_body": "public void visit(BLangExpressionStmt exprStmtNode, AnalyzerData data) {\nSymbolEnv currentEnv = data.env;\nSymbolEnv stmtEnv = new SymbolEnv(exprStmtNode, currentEnv.scope);\ncurrentEnv.copyTo(stmtEnv);\nBLangExpression expr = exprStmtNode.expr;\nBType bType = typeChecker.checkExpr(expr, stmtEnv, data.prevEnvs, data.commonAnalyzerData);\nif (bType != symTable.nilType && bType != symTable.semanticError &&\nexpr.getKind() != NodeKind.FAIL &&\n!types.isNeverTypeOrStructureTypeWithARequiredNeverMember(bType)\n&& !types.isAssignable(bType, symTable.nilType)) {\ndlog.error(exprStmtNode.pos, DiagnosticErrorCode.ASSIGNMENT_REQUIRED, bType);\n} else if (expr.getKind() == NodeKind.INVOCATION &&\ntypes.isNeverTypeOrStructureTypeWithARequiredNeverMember(expr.getBType())) {\ndata.notCompletedNormally = true;\n}\nvalidateWorkerAnnAttachments(exprStmtNode.expr, data);\n}", + "target_code": "&& !types.isAssignable(bType, symTable.nilType)) {", + "method_body_after": "public void visit(BLangExpressionStmt exprStmtNode, AnalyzerData data) {\nSymbolEnv currentEnv = data.env;\nSymbolEnv stmtEnv = new SymbolEnv(exprStmtNode, currentEnv.scope);\ncurrentEnv.copyTo(stmtEnv);\nBLangExpression expr = exprStmtNode.expr;\nBType bType = typeChecker.checkExpr(expr, stmtEnv, data.prevEnvs, data.commonAnalyzerData);\nif (!types.isAssignable(bType, symTable.nilType) && bType != symTable.semanticError &&\nexpr.getKind() != NodeKind.FAIL && !types.isNeverTypeOrStructureTypeWithARequiredNeverMember(bType)) {\ndlog.error(exprStmtNode.pos, DiagnosticErrorCode.ASSIGNMENT_REQUIRED, bType);\n} else if (expr.getKind() == NodeKind.INVOCATION &&\ntypes.isNeverTypeOrStructureTypeWithARequiredNeverMember(expr.getBType())) {\ndata.notCompletedNormally = true;\n}\nvalidateWorkerAnnAttachments(exprStmtNode.expr, data);\n}", + "context_before": "class representing a service-decl or object-ctor with service prefix\nAttachPoint.Point attachedPoint;\nSet flagSet = classDefinition.flagSet;\nif (flagSet.contains(Flag.OBJECT_CTOR) && flagSet.contains(Flag.SERVICE)) {\nattachedPoint = AttachPoint.Point.SERVICE;\n}", + "context_after": "class representing a service-decl or object-ctor with service prefix\nAttachPoint.Point attachedPoint;\nSet flagSet = classDefinition.flagSet;\nif (flagSet.contains(Flag.OBJECT_CTOR) && flagSet.contains(Flag.SERVICE)) {\nattachedPoint = AttachPoint.Point.SERVICE;\n}" + }, + { + "comment": "+1.. That'd actually simplify things a bit", + "method_body": "private boolean isAssignableRecordType(BRecordType recordType, BType type) {\nBType targetType;\nswitch (type.tag) {\ncase TypeTags.MAP:\ntargetType = ((BMapType) type).constraint;\nbreak;\ncase TypeTags.JSON:\ntargetType = type;\nbreak;\ndefault:\nthrow new IllegalArgumentException(\"Incompatible target type: \" + type.toString());\n}\nif (recordType.sealed) {\nreturn recordFieldsAssignableToType(recordType, targetType);\n} else {\nreturn recordFieldsAssignableToType(recordType, targetType) &&\nisAssignable(recordType.restFieldType, targetType);\n}\n}", + "target_code": "isAssignable(recordType.restFieldType, targetType);", + "method_body_after": "private boolean isAssignableRecordType(BRecordType recordType, BType type) {\nBType targetType;\nswitch (type.tag) {\ncase TypeTags.MAP:\ntargetType = ((BMapType) type).constraint;\nbreak;\ncase TypeTags.JSON:\ntargetType = type;\nbreak;\ndefault:\nthrow new IllegalArgumentException(\"Incompatible target type: \" + type.toString());\n}\nreturn recordFieldsAssignableToType(recordType, targetType);\n}", + "context_before": "class Types {\nprivate static final CompilerContext.Key TYPES_KEY =\nnew CompilerContext.Key<>();\nprivate SymbolTable symTable;\nprivate SymbolResolver symResolver;\nprivate BLangDiagnosticLogHelper dlogHelper;\nprivate Names names;\nprivate int finiteTypeCount = 0;\nprivate BUnionType expandedXMLBuiltinSubtypes;\npublic static Types getInstance(CompilerContext context) {\nTypes types = context.get(TYPES_KEY);\nif (types == null) {\ntypes = new Types(context);\n}\nreturn types;\n}\npublic Types(CompilerContext context) {\ncontext.put(TYPES_KEY, this);\nthis.symTable = SymbolTable.getInstance(context);\nthis.symResolver = SymbolResolver.getInstance(context);\nthis.dlogHelper = BLangDiagnosticLogHelper.getInstance(context);\nthis.names = Names.getInstance(context);\nthis.expandedXMLBuiltinSubtypes = BUnionType.create(null,\nsymTable.xmlElementType, symTable.xmlCommentType, symTable.xmlPIType, symTable.xmlTextType);\n}\npublic List checkTypes(BLangExpression node,\nList actualTypes,\nList expTypes) {\nList resTypes = new ArrayList<>();\nfor (int i = 0; i < actualTypes.size(); i++) {\nresTypes.add(checkType(node, actualTypes.get(i), expTypes.size() > i ? expTypes.get(i) : symTable.noType));\n}\nreturn resTypes;\n}\npublic BType checkType(BLangExpression node,\nBType actualType,\nBType expType) {\nreturn checkType(node, actualType, expType, DiagnosticCode.INCOMPATIBLE_TYPES);\n}\npublic BType checkType(BLangExpression expr,\nBType actualType,\nBType expType,\nDiagnosticCode diagCode) {\nexpr.type = checkType(expr.pos, actualType, expType, diagCode);\nif (expr.type.tag == TypeTags.SEMANTIC_ERROR) {\nreturn expr.type;\n}\nsetImplicitCastExpr(expr, actualType, expType);\nreturn expr.type;\n}\npublic BType checkType(DiagnosticPos pos,\nBType actualType,\nBType expType,\nDiagnosticCode diagCode) {\nif (expType.tag == TypeTags.SEMANTIC_ERROR) {\nreturn expType;\n} else if (expType.tag == TypeTags.NONE) {\nreturn actualType;\n} else if (actualType.tag == TypeTags.SEMANTIC_ERROR) {\nreturn actualType;\n} else if (isAssignable(actualType, expType)) {\nreturn actualType;\n}\ndlogHelper.error(pos, diagCode, expType, actualType);\nreturn symTable.semanticError;\n}\npublic boolean isJSONContext(BType type) {\nif (type.tag == TypeTags.UNION) {\nreturn ((BUnionType) type).getMemberTypes().stream().anyMatch(memType -> memType.tag == TypeTags.JSON);\n}\nreturn type.tag == TypeTags.JSON;\n}\npublic boolean isLax(BType type) {\nswitch (type.tag) {\ncase TypeTags.JSON:\ncase TypeTags.XML:\ncase TypeTags.XML_ELEMENT:\nreturn true;\ncase TypeTags.MAP:\nreturn isLax(((BMapType) type).constraint);\ncase TypeTags.UNION:\nreturn ((BUnionType) type).getMemberTypes().stream().allMatch(this::isLax);\n}\nreturn false;\n}\npublic boolean isSameType(BType source, BType target) {\nreturn isSameType(source, target, new HashSet<>());\n}\nprivate boolean isSameType(BType source, BType target, Set unresolvedTypes) {\nTypePair pair = new TypePair(source, target);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nBTypeVisitor sameTypeVisitor = new BSameTypeVisitor(unresolvedTypes);\nreturn target.accept(sameTypeVisitor, source);\n}\npublic boolean isValueType(BType type) {\nswitch (type.tag) {\ncase TypeTags.BOOLEAN:\ncase TypeTags.BYTE:\ncase TypeTags.DECIMAL:\ncase TypeTags.FLOAT:\ncase TypeTags.INT:\ncase TypeTags.STRING:\ncase TypeTags.SIGNED32_INT:\ncase TypeTags.SIGNED16_INT:\ncase TypeTags.SIGNED8_INT:\ncase TypeTags.UNSIGNED32_INT:\ncase TypeTags.UNSIGNED16_INT:\ncase TypeTags.UNSIGNED8_INT:\ncase TypeTags.CHAR_STRING:\nreturn true;\ndefault:\nreturn false;\n}\n}\nboolean isBasicNumericType(BType type) {\nreturn type.tag < TypeTags.STRING || TypeTags.isIntegerTypeTag(type.tag);\n}\nboolean finiteTypeContainsNumericTypeValues(BFiniteType finiteType) {\nreturn finiteType.getValueSpace().stream().anyMatch(valueExpr -> isBasicNumericType(valueExpr.type));\n}\npublic boolean containsErrorType(BType type) {\nif (type.tag == TypeTags.UNION) {\nreturn ((BUnionType) type).getMemberTypes().stream()\n.anyMatch(this::containsErrorType);\n}\nreturn type.tag == TypeTags.ERROR;\n}\npublic boolean isSubTypeOfList(BType type) {\nif (type.tag != TypeTags.UNION) {\nreturn isSubTypeOfBaseType(type, TypeTags.ARRAY) || isSubTypeOfBaseType(type, TypeTags.TUPLE);\n}\nreturn ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfList);\n}\npublic boolean isSubTypeOfMapping(BType type) {\nif (type.tag != TypeTags.UNION) {\nreturn isSubTypeOfBaseType(type, TypeTags.MAP) || isSubTypeOfBaseType(type, TypeTags.RECORD);\n}\nreturn ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfMapping);\n}\npublic boolean isSubTypeOfBaseType(BType type, int baseTypeTag) {\nif (type.tag != TypeTags.UNION) {\nreturn type.tag == baseTypeTag;\n}\nif (TypeTags.isXMLTypeTag(baseTypeTag)) {\nreturn true;\n}\nreturn ((BUnionType) type).getMemberTypes().stream().allMatch(memType -> memType.tag == baseTypeTag);\n}\n/**\n* Checks whether source type is assignable to the target type.\n*

\n* Source type is assignable to the target type if,\n* 1) the target type is any and the source type is not a value type.\n* 2) there exists an implicit cast symbol from source to target.\n* 3) both types are JSON and the target constraint is no type.\n* 4) both types are array type and both array types are assignable.\n* 5) both types are MAP and the target constraint is any type or constraints are structurally equivalent.\n*\n* @param source type.\n* @param target type.\n* @return true if source type is assignable to the target type.\n*/\npublic boolean isAssignable(BType source, BType target) {\nreturn isAssignable(source, target, new HashSet<>());\n}\nboolean isStampingAllowed(BType source, BType target) {\nreturn (isAssignable(source, target) || isAssignable(target, source) ||\ncheckTypeEquivalencyForStamping(source, target) || checkTypeEquivalencyForStamping(target, source));\n}\nprivate boolean checkTypeEquivalencyForStamping(BType source, BType target) {\nif (target.tag == TypeTags.RECORD) {\nif (source.tag == TypeTags.RECORD) {\nTypePair pair = new TypePair(source, target);\nSet unresolvedTypes = new HashSet<>();\nunresolvedTypes.add(pair);\nreturn checkRecordEquivalencyForStamping((BRecordType) source, (BRecordType) target, unresolvedTypes);\n} else if (source.tag == TypeTags.MAP) {\nint mapConstraintTypeTag = ((BMapType) source).constraint.tag;\nif ((!(mapConstraintTypeTag == TypeTags.ANY || mapConstraintTypeTag == TypeTags.ANYDATA)) &&\n((BRecordType) target).sealed) {\nfor (BField field : ((BStructureType) target).getFields()) {\nif (field.getType().tag != mapConstraintTypeTag) {\nreturn false;\n}\n}\n}\nreturn true;\n}\n} else if (target.tag == TypeTags.JSON) {\nreturn source.tag == TypeTags.JSON || source.tag == TypeTags.RECORD || source.tag == TypeTags.MAP;\n} else if (target.tag == TypeTags.MAP) {\nif (source.tag == TypeTags.MAP) {\nreturn isStampingAllowed(((BMapType) source).getConstraint(), ((BMapType) target).getConstraint());\n} else if (source.tag == TypeTags.UNION) {\nreturn checkUnionEquivalencyForStamping(source, target);\n}\n} else if (target.tag == TypeTags.ARRAY) {\nif (source.tag == TypeTags.JSON) {\nreturn true;\n} else if (source.tag == TypeTags.TUPLE) {\nBType arrayElementType = ((BArrayType) target).eType;\nfor (BType tupleMemberType : ((BTupleType) source).getTupleTypes()) {\nif (!isStampingAllowed(tupleMemberType, arrayElementType)) {\nreturn false;\n}\n}\nreturn true;\n} else if (source.tag == TypeTags.ARRAY) {\nreturn checkTypeEquivalencyForStamping(((BArrayType) source).eType, ((BArrayType) target).eType);\n}\n} else if (target.tag == TypeTags.UNION) {\nreturn checkUnionEquivalencyForStamping(source, target);\n} else if (target.tag == TypeTags.TUPLE && source.tag == TypeTags.TUPLE) {\nreturn checkTupleEquivalencyForStamping(source, target);\n}\nreturn false;\n}\nprivate boolean checkRecordEquivalencyForStamping(BRecordType rhsType, BRecordType lhsType,\nSet unresolvedTypes) {\nif (Symbols.isFlagOn(lhsType.tsymbol.flags ^ rhsType.tsymbol.flags, Flags.PUBLIC)) {\nreturn false;\n}\nif (Symbols.isPrivate(lhsType.tsymbol) && rhsType.tsymbol.pkgID != lhsType.tsymbol.pkgID) {\nreturn false;\n}\nif (lhsType.fields.size() > rhsType.fields.size()) {\nreturn false;\n}\nif (lhsType.sealed && !rhsType.sealed) {\nreturn false;\n}\nreturn checkFieldEquivalencyForStamping(lhsType, rhsType, unresolvedTypes);\n}\nprivate boolean checkFieldEquivalencyForStamping(BStructureType lhsType, BStructureType rhsType,\nSet unresolvedTypes) {\nMap rhsFields = rhsType.fields.stream().collect(\nCollectors.toMap(BField::getName, field -> field));\nfor (BField lhsField : lhsType.fields) {\nBField rhsField = rhsFields.get(lhsField.name);\nif (rhsField == null || !isStampingAllowed(rhsField.type, lhsField.type)) {\nreturn false;\n}\n}\nMap lhsFields = lhsType.fields.stream().collect(\nCollectors.toMap(BField::getName, field -> field));\nfor (BField rhsField : rhsType.fields) {\nBField lhsField = lhsFields.get(rhsField.name);\nif (lhsField == null && !isStampingAllowed(rhsField.type, ((BRecordType) lhsType).restFieldType)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate boolean checkUnionEquivalencyForStamping(BType source, BType target) {\nSet sourceTypes = new LinkedHashSet<>();\nSet targetTypes = new LinkedHashSet<>();\nif (source.tag == TypeTags.UNION) {\nBUnionType sourceUnionType = (BUnionType) source;\nsourceTypes.addAll(sourceUnionType.getMemberTypes());\n} else {\nsourceTypes.add(source);\n}\nif (target.tag == TypeTags.UNION) {\nBUnionType targetUnionType = (BUnionType) target;\ntargetTypes.addAll(targetUnionType.getMemberTypes());\n} else {\ntargetTypes.add(target);\n}\nboolean notAssignable = sourceTypes\n.stream()\n.map(s -> targetTypes\n.stream()\n.anyMatch(t -> isStampingAllowed(s, t)))\n.anyMatch(assignable -> !assignable);\nreturn !notAssignable;\n}\nprivate boolean checkTupleEquivalencyForStamping(BType source, BType target) {\nif (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) {\nreturn false;\n}\nBTupleType lhsTupleType = (BTupleType) target;\nBTupleType rhsTupleType = (BTupleType) source;\nif (lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) {\nreturn false;\n}\nfor (int i = 0; i < lhsTupleType.tupleTypes.size(); i++) {\nif (!isStampingAllowed(rhsTupleType.tupleTypes.get(i), lhsTupleType.tupleTypes.get(i))) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate boolean isAssignable(BType source, BType target, Set unresolvedTypes) {\nif (isSameType(source, target)) {\nreturn true;\n}\nint sourceTag = source.tag;\nint targetTag = target.tag;\nif (sourceTag == TypeTags.BYTE && targetTag == TypeTags.INT) {\nreturn true;\n}\nif (TypeTags.isXMLTypeTag(sourceTag) && targetTag == TypeTags.XML) {\nreturn true;\n}\nif (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.STRING) {\nreturn true;\n}\nif (TypeTags.isXMLTypeTag(sourceTag) && targetTag == TypeTags.XML) {\nreturn true;\n}\nif (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.STRING) {\nreturn true;\n}\nif (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ERROR) {\nreturn isErrorTypeAssignable((BErrorType) source, (BErrorType) target, unresolvedTypes);\n} else if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ANY) {\nreturn false;\n}\nif (sourceTag == TypeTags.NIL && (isNullable(target) || targetTag == TypeTags.JSON)) {\nreturn true;\n}\nif (targetTag == TypeTags.ANY && !containsErrorType(source) && !isValueType(source)) {\nreturn true;\n}\nif (targetTag == TypeTags.ANYDATA && !containsErrorType(source) && source.isAnydata()) {\nreturn true;\n}\nif (targetTag == TypeTags.MAP && sourceTag == TypeTags.RECORD) {\nBRecordType recordType = (BRecordType) source;\nreturn isAssignableRecordType(recordType, target);\n}\nif (target.getKind() == TypeKind.SERVICE && source.getKind() == TypeKind.SERVICE) {\nreturn true;\n}\nif (targetTag == TypeTags.TYPEDESC && sourceTag == TypeTags.TYPEDESC) {\nreturn isAssignable(((BTypedescType) source).constraint, (((BTypedescType) target).constraint),\nunresolvedTypes);\n}\nif (targetTag == TypeTags.TABLE && sourceTag == TypeTags.TABLE) {\nreturn isAssignable(((BTableType) source).constraint, (((BTableType) target).constraint),\nunresolvedTypes);\n}\nif (targetTag == TypeTags.STREAM && sourceTag == TypeTags.STREAM) {\nreturn isAssignable(((BStreamType) source).constraint, ((BStreamType) target).constraint, unresolvedTypes);\n}\nif (isBuiltInTypeWidenPossible(source, target) == TypeTestResult.TRUE) {\nreturn true;\n}\nif (sourceTag == TypeTags.FINITE) {\nreturn isFiniteTypeAssignable((BFiniteType) source, target, unresolvedTypes);\n}\nif ((targetTag == TypeTags.UNION || sourceTag == TypeTags.UNION) &&\nisAssignableToUnionType(source, target, unresolvedTypes)) {\nreturn true;\n}\nif (targetTag == TypeTags.JSON) {\nif (sourceTag == TypeTags.JSON) {\nreturn true;\n}\nif (sourceTag == TypeTags.ARRAY) {\nreturn isArrayTypesAssignable(source, target, unresolvedTypes);\n}\nif (sourceTag == TypeTags.MAP) {\nreturn isAssignable(((BMapType) source).constraint, target, unresolvedTypes);\n}\nif (sourceTag == TypeTags.RECORD) {\nreturn isAssignableRecordType((BRecordType) source, target);\n}\n}\nif (targetTag == TypeTags.FUTURE && sourceTag == TypeTags.FUTURE) {\nif (((BFutureType) target).constraint.tag == TypeTags.NONE) {\nreturn true;\n}\nreturn isAssignable(((BFutureType) source).constraint, ((BFutureType) target).constraint, unresolvedTypes);\n}\nif (targetTag == TypeTags.MAP && sourceTag == TypeTags.MAP) {\nif (((BMapType) target).constraint.tag == TypeTags.ANY &&\n((BMapType) source).constraint.tag != TypeTags.UNION) {\nreturn true;\n}\nreturn isAssignable(((BMapType) source).constraint, ((BMapType) target).constraint, unresolvedTypes);\n}\nif (targetTag == TypeTags.MAP && sourceTag == TypeTags.RECORD) {\nBType mapConstraint = ((BMapType) target).constraint;\nBRecordType srcRec = (BRecordType) source;\nboolean hasIncompatibleType = srcRec.fields\n.stream().anyMatch(field -> !isAssignable(field.type, mapConstraint));\nreturn !hasIncompatibleType && isAssignable(srcRec.restFieldType, mapConstraint);\n}\nif ((sourceTag == TypeTags.OBJECT || sourceTag == TypeTags.RECORD)\n&& (targetTag == TypeTags.OBJECT || targetTag == TypeTags.RECORD)) {\nreturn checkStructEquivalency(source, target, unresolvedTypes);\n}\nif (sourceTag == TypeTags.TUPLE && targetTag == TypeTags.ARRAY) {\nreturn isTupleTypeAssignableToArrayType((BTupleType) source, (BArrayType) target, unresolvedTypes);\n}\nif (sourceTag == TypeTags.ARRAY && targetTag == TypeTags.TUPLE) {\nreturn isArrayTypeAssignableToTupleType((BArrayType) source, (BTupleType) target, unresolvedTypes);\n}\nif (sourceTag == TypeTags.TUPLE || targetTag == TypeTags.TUPLE) {\nreturn isTupleTypeAssignable(source, target, unresolvedTypes);\n}\nif (sourceTag == TypeTags.INVOKABLE && targetTag == TypeTags.INVOKABLE) {\nreturn isFunctionTypeAssignable((BInvokableType) source, (BInvokableType) target, new HashSet<>());\n}\nreturn sourceTag == TypeTags.ARRAY && targetTag == TypeTags.ARRAY &&\nisArrayTypesAssignable(source, target, unresolvedTypes);\n}\nprivate boolean recordFieldsAssignableToType(BRecordType recordType, BType targetType) {\nfor (BField field : recordType.fields) {\nif (!isAssignable(field.type, targetType)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate boolean isErrorTypeAssignable(BErrorType source, BErrorType target, Set unresolvedTypes) {\nif (target == symTable.errorType) {\nreturn true;\n}\nTypePair pair = new TypePair(source, target);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nreturn isAssignable(source.reasonType, target.reasonType, unresolvedTypes) &&\nisAssignable(source.detailType, target.detailType, unresolvedTypes);\n}\nprivate boolean isTupleTypeAssignable(BType source, BType target, Set unresolvedTypes) {\nif (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) {\nreturn false;\n}\nBTupleType lhsTupleType = (BTupleType) target;\nBTupleType rhsTupleType = (BTupleType) source;\nif (lhsTupleType.restType == null && rhsTupleType.restType != null) {\nreturn false;\n}\nif (lhsTupleType.restType == null && lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) {\nreturn false;\n}\nif (lhsTupleType.restType != null && rhsTupleType.restType != null) {\nif (!isAssignable(rhsTupleType.restType, lhsTupleType.restType, unresolvedTypes)) {\nreturn false;\n}\n}\nfor (int i = 0; i < rhsTupleType.tupleTypes.size(); i++) {\nBType lhsType = (lhsTupleType.tupleTypes.size() > i)\n? lhsTupleType.tupleTypes.get(i) : lhsTupleType.restType;\nif (!isAssignable(rhsTupleType.tupleTypes.get(i), lhsType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate boolean isTupleTypeAssignableToArrayType(BTupleType source, BArrayType target,\nSet unresolvedTypes) {\nif (target.state != BArrayState.UNSEALED\n&& (source.restType != null || source.tupleTypes.size() != target.size)) {\nreturn false;\n}\nList sourceTypes = new ArrayList<>(source.tupleTypes);\nif (source.restType != null) {\nsourceTypes.add(source.restType);\n}\nreturn sourceTypes.stream()\n.allMatch(tupleElemType -> isAssignable(tupleElemType, target.eType, unresolvedTypes));\n}\nprivate boolean isArrayTypeAssignableToTupleType(BArrayType source, BTupleType target,\nSet unresolvedTypes) {\nif (!target.tupleTypes.isEmpty()) {\nif (source.state == BArrayState.UNSEALED) {\nreturn false;\n}\nif (target.restType != null && target.tupleTypes.size() > source.size) {\nreturn false;\n}\nif (target.restType == null && target.tupleTypes.size() != source.size) {\nreturn false;\n}\n}\nList targetTypes = new ArrayList<>(target.tupleTypes);\nif (target.restType != null) {\ntargetTypes.add(target.restType);\n}\nreturn targetTypes.stream()\n.allMatch(tupleElemType -> isAssignable(source.eType, tupleElemType, unresolvedTypes));\n}\npublic boolean isArrayTypesAssignable(BType source, BType target, Set unresolvedTypes) {\nif (target.tag == TypeTags.ARRAY && source.tag == TypeTags.ARRAY) {\nBArrayType lhsArrayType = (BArrayType) target;\nBArrayType rhsArrayType = (BArrayType) source;\nif (lhsArrayType.state == BArrayState.UNSEALED) {\nreturn isArrayTypesAssignable(rhsArrayType.eType, lhsArrayType.eType, unresolvedTypes);\n}\nreturn checkSealedArraySizeEquality(rhsArrayType, lhsArrayType)\n&& isArrayTypesAssignable(rhsArrayType.eType, lhsArrayType.eType, unresolvedTypes);\n} else if (source.tag == TypeTags.ARRAY) {\nif (target.tag == TypeTags.JSON) {\nreturn isAssignable(((BArrayType) source).getElementType(), target, unresolvedTypes);\n}\nif (target.tag == TypeTags.UNION) {\nreturn isAssignable(source, target);\n}\nreturn target.tag == TypeTags.ANY;\n} else if (target.tag == TypeTags.ARRAY) {\nreturn false;\n}\nif (isAssignable(source, target, unresolvedTypes)) {\nreturn true;\n}\nif (target.tag == TypeTags.UNION) {\nreturn isAssignable(source, target, unresolvedTypes);\n}\nreturn target.tag == TypeTags.ANY && !isValueType(source);\n}\nprivate boolean isFunctionTypeAssignable(BInvokableType source, BInvokableType target,\nSet unresolvedTypes) {\nif (containsTypeParams(target)) {\nif (source.paramTypes.size() != target.paramTypes.size()) {\nreturn false;\n}\nfor (int i = 0; i < source.paramTypes.size(); i++) {\nBType sourceParam = source.paramTypes.get(i);\nBType targetParam = target.paramTypes.get(i);\nboolean isTypeParam = TypeParamAnalyzer.isTypeParam(targetParam);\nif (isTypeParam) {\nif (!isAssignable(sourceParam, targetParam)) {\nreturn false;\n}\n} else {\nif (!isAssignable(targetParam, sourceParam)) {\nreturn false;\n}\n}\n}\nif (source.retType == null && target.retType == null) {\nreturn true;\n} else if (source.retType == null || target.retType == null) {\nreturn false;\n}\nreturn isAssignable(source.retType, target.retType, unresolvedTypes);\n}\nreturn checkFunctionTypeEquality(source, target, unresolvedTypes, (s, t, ut) -> isAssignable(t, s, ut));\n}\nprivate boolean containsTypeParams(BInvokableType type) {\nboolean hasParameterizedTypes = type.paramTypes.stream()\n.anyMatch(t -> {\nif (t.tag == TypeTags.FUNCTION_POINTER) {\nreturn containsTypeParams((BInvokableType) t);\n}\nreturn TypeParamAnalyzer.isTypeParam(t);\n});\nif (hasParameterizedTypes) {\nreturn hasParameterizedTypes;\n}\nif (type.retType.tag == TypeTags.FUNCTION_POINTER) {\nreturn containsTypeParams((BInvokableType) type.retType);\n}\nreturn TypeParamAnalyzer.isTypeParam(type.retType);\n}\nprivate boolean isSameFunctionType(BInvokableType source, BInvokableType target, Set unresolvedTypes) {\nreturn checkFunctionTypeEquality(source, target, unresolvedTypes, this::isSameType);\n}\nprivate boolean checkFunctionTypeEquality(BInvokableType source, BInvokableType target,\nSet unresolvedTypes, TypeEqualityPredicate equality) {\nif (source.paramTypes.size() != target.paramTypes.size()) {\nreturn false;\n}\nfor (int i = 0; i < source.paramTypes.size(); i++) {\nif (!equality.test(source.paramTypes.get(i), target.paramTypes.get(i), unresolvedTypes)) {\nreturn false;\n}\n}\nif ((source.restType != null && target.restType == null) ||\ntarget.restType != null && source.restType == null) {\nreturn false;\n} else if (source.restType != null && !equality.test(source.restType, target.restType, unresolvedTypes)) {\nreturn false;\n}\nif (source.retType == null && target.retType == null) {\nreturn true;\n} else if (source.retType == null || target.retType == null) {\nreturn false;\n}\nreturn isAssignable(source.retType, target.retType, unresolvedTypes);\n}\npublic boolean checkArrayEquality(BType source, BType target, Set unresolvedTypes) {\nif (target.tag != TypeTags.ARRAY || source.tag != TypeTags.ARRAY) {\nreturn false;\n}\nBArrayType lhsArrayType = (BArrayType) target;\nBArrayType rhsArrayType = (BArrayType) source;\nif (lhsArrayType.state == BArrayState.UNSEALED) {\nreturn rhsArrayType.state == BArrayState.UNSEALED &&\nisSameType(lhsArrayType.eType, rhsArrayType.eType, unresolvedTypes);\n}\nreturn checkSealedArraySizeEquality(rhsArrayType, lhsArrayType)\n&& isSameType(lhsArrayType.eType, rhsArrayType.eType, unresolvedTypes);\n}\npublic boolean checkSealedArraySizeEquality(BArrayType rhsArrayType, BArrayType lhsArrayType) {\nreturn lhsArrayType.size == rhsArrayType.size;\n}\npublic boolean checkStructEquivalency(BType rhsType, BType lhsType) {\nreturn checkStructEquivalency(rhsType, lhsType, new HashSet<>());\n}\nprivate boolean checkStructEquivalency(BType rhsType, BType lhsType, Set unresolvedTypes) {\nTypePair pair = new TypePair(rhsType, lhsType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nif (rhsType.tag == TypeTags.OBJECT && lhsType.tag == TypeTags.OBJECT) {\nreturn checkObjectEquivalency((BObjectType) rhsType, (BObjectType) lhsType, unresolvedTypes);\n}\nif (rhsType.tag == TypeTags.RECORD && lhsType.tag == TypeTags.RECORD) {\nreturn checkRecordEquivalency((BRecordType) rhsType, (BRecordType) lhsType, unresolvedTypes);\n}\nreturn false;\n}\npublic boolean checkObjectEquivalency(BObjectType rhsType, BObjectType lhsType, Set unresolvedTypes) {\nBObjectTypeSymbol lhsStructSymbol = (BObjectTypeSymbol) lhsType.tsymbol;\nBObjectTypeSymbol rhsStructSymbol = (BObjectTypeSymbol) rhsType.tsymbol;\nList lhsFuncs = lhsStructSymbol.attachedFuncs;\nList rhsFuncs = ((BObjectTypeSymbol) rhsType.tsymbol).attachedFuncs;\nint lhsAttachedFuncCount = getObjectFuncCount(lhsStructSymbol);\nint rhsAttachedFuncCount = getObjectFuncCount(rhsStructSymbol);\nif (lhsType.fields.size() > rhsType.fields.size() || lhsAttachedFuncCount > rhsAttachedFuncCount) {\nreturn false;\n}\nif (lhsType.getFields().stream().anyMatch(field -> Symbols.isPrivate(field.symbol)) ||\nlhsFuncs.stream().anyMatch(func -> Symbols.isPrivate(func.symbol))) {\nreturn false;\n}\nMap rhsFields =\nrhsType.fields.stream().collect(Collectors.toMap(BField::getName, field -> field));\nfor (BField lhsField : lhsType.fields) {\nBField rhsField = rhsFields.get(lhsField.name);\nif (rhsField == null || !isInSameVisibilityRegion(lhsField.symbol, rhsField.symbol)\n|| !isAssignable(rhsField.type, lhsField.type)) {\nreturn false;\n}\n}\nfor (BAttachedFunction lhsFunc : lhsFuncs) {\nif (lhsFunc == lhsStructSymbol.initializerFunc) {\ncontinue;\n}\nBAttachedFunction rhsFunc = getMatchingInvokableType(rhsFuncs, lhsFunc, unresolvedTypes);\nif (rhsFunc == null || !isInSameVisibilityRegion(lhsFunc.symbol, rhsFunc.symbol)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate int getObjectFuncCount(BObjectTypeSymbol sym) {\nif (sym.initializerFunc != null && sym.attachedFuncs.contains(sym.initializerFunc)) {\nreturn sym.attachedFuncs.size() - 1;\n}\nreturn sym.attachedFuncs.size();\n}\npublic boolean checkRecordEquivalency(BRecordType rhsType, BRecordType lhsType, Set unresolvedTypes) {\nif (lhsType.sealed && !rhsType.sealed) {\nreturn false;\n}\nif (!rhsType.sealed && !isAssignable(rhsType.restFieldType, lhsType.restFieldType, unresolvedTypes)) {\nreturn false;\n}\nreturn checkFieldEquivalency(lhsType, rhsType, unresolvedTypes);\n}\npublic void setForeachTypedBindingPatternType(BLangForeach foreachNode) {\nBType collectionType = foreachNode.collection.type;\nBType varType;\nswitch (collectionType.tag) {\ncase TypeTags.STRING:\nvarType = symTable.stringType;\nbreak;\ncase TypeTags.ARRAY:\nBArrayType arrayType = (BArrayType) collectionType;\nvarType = arrayType.eType;\nbreak;\ncase TypeTags.TUPLE:\nBTupleType tupleType = (BTupleType) collectionType;\nLinkedHashSet tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes);\nif (tupleType.restType != null) {\ntupleTypes.add(tupleType.restType);\n}\nvarType = tupleTypes.size() == 1 ?\ntupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);\nbreak;\ncase TypeTags.MAP:\nBMapType bMapType = (BMapType) collectionType;\nvarType = bMapType.constraint;\nbreak;\ncase TypeTags.RECORD:\nBRecordType recordType = (BRecordType) collectionType;\nvarType = inferRecordFieldType(recordType);\nbreak;\ncase TypeTags.XML:\nvarType = BUnionType.create(null, symTable.xmlType, symTable.stringType);\nbreak;\ncase TypeTags.TABLE:\nBTableType tableType = (BTableType) collectionType;\nif (tableType.constraint.tag == TypeTags.NONE) {\nvarType = symTable.anydataType;\nbreak;\n}\nvarType = tableType.constraint;\nbreak;\ncase TypeTags.STREAM:\nBStreamType streamType = (BStreamType) collectionType;\nif (streamType.constraint.tag == TypeTags.NONE) {\nvarType = symTable.anydataType;\nbreak;\n}\nvarType = streamType.constraint;\nif (streamType.error != null) {\nBType actualType = BUnionType.create(null, varType, streamType.error);\ndlogHelper.error(foreachNode.collection.pos, DiagnosticCode.INCOMPATIBLE_TYPES,\nvarType, actualType);\n}\nbreak;\ncase TypeTags.OBJECT:\nBUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType);\nif (nextMethodReturnType != null) {\nforeachNode.resultType = getRecordType(nextMethodReturnType);\nBType valueType = (foreachNode.resultType != null)\n? ((BRecordType) foreachNode.resultType).fields.get(0).type : null;\nBType errorType = getErrorType(nextMethodReturnType);\nif (errorType != null) {\nBType actualType = BUnionType.create(null, valueType, errorType);\ndlogHelper.error(foreachNode.collection.pos, DiagnosticCode.INCOMPATIBLE_TYPES,\nvalueType, actualType);\n}\nforeachNode.nillableResultType = nextMethodReturnType;\nforeachNode.varType = valueType;\nreturn;\n}\ndlogHelper.error(foreachNode.collection.pos, DiagnosticCode.INCOMPATIBLE_ITERATOR_FUNCTION_SIGNATURE);\ncase TypeTags.SEMANTIC_ERROR:\nforeachNode.varType = symTable.semanticError;\nforeachNode.resultType = symTable.semanticError;\nforeachNode.nillableResultType = symTable.semanticError;\nreturn;\ndefault:\nforeachNode.varType = symTable.semanticError;\nforeachNode.resultType = symTable.semanticError;\nforeachNode.nillableResultType = symTable.semanticError;\ndlogHelper.error(foreachNode.collection.pos, DiagnosticCode.ITERABLE_NOT_SUPPORTED_COLLECTION,\ncollectionType);\nreturn;\n}\nBInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType,\nnames.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));\nBUnionType nextMethodReturnType =\n(BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType);\nforeachNode.varType = varType;\nforeachNode.resultType = getRecordType(nextMethodReturnType);\nforeachNode.nillableResultType = nextMethodReturnType;\n}\npublic void setFromClauseTypedBindingPatternType(BLangFromClause fromClause) {\nif (fromClause.collection == null) {\nreturn;\n}\nBType collectionType = fromClause.collection.type;\nBType varType;\nswitch (collectionType.tag) {\ncase TypeTags.STRING:\nvarType = symTable.stringType;\nbreak;\ncase TypeTags.ARRAY:\nBArrayType arrayType = (BArrayType) collectionType;\nvarType = arrayType.eType;\nbreak;\ncase TypeTags.TUPLE:\nBTupleType tupleType = (BTupleType) collectionType;\nLinkedHashSet tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes);\nif (tupleType.restType != null) {\ntupleTypes.add(tupleType.restType);\n}\nvarType = tupleTypes.size() == 1 ?\ntupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);\nbreak;\ncase TypeTags.MAP:\nBMapType bMapType = (BMapType) collectionType;\nvarType = bMapType.constraint;\nbreak;\ncase TypeTags.RECORD:\nBRecordType recordType = (BRecordType) collectionType;\nvarType = inferRecordFieldType(recordType);\nbreak;\ncase TypeTags.XML:\nvarType = BUnionType.create(null, symTable.xmlType, symTable.stringType);\nbreak;\ncase TypeTags.TABLE:\nBTableType tableType = (BTableType) collectionType;\nif (tableType.constraint.tag == TypeTags.NONE) {\nvarType = symTable.anydataType;\nbreak;\n}\nvarType = tableType.constraint;\nbreak;\ncase TypeTags.STREAM:\nBStreamType streamType = (BStreamType) collectionType;\nif (streamType.constraint.tag == TypeTags.NONE) {\nvarType = symTable.anydataType;\nbreak;\n}\nvarType = streamType.constraint;\nbreak;\ncase TypeTags.OBJECT:\nBUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType);\nif (nextMethodReturnType != null) {\nfromClause.resultType = getRecordType(nextMethodReturnType);\nfromClause.nillableResultType = nextMethodReturnType;\nfromClause.varType = ((BRecordType) fromClause.resultType).fields.get(0).type;\nreturn;\n}\ndlogHelper.error(fromClause.collection.pos, DiagnosticCode.INCOMPATIBLE_ITERATOR_FUNCTION_SIGNATURE);\ncase TypeTags.SEMANTIC_ERROR:\nfromClause.varType = symTable.semanticError;\nfromClause.resultType = symTable.semanticError;\nfromClause.nillableResultType = symTable.semanticError;\nreturn;\ndefault:\nfromClause.varType = symTable.semanticError;\nfromClause.resultType = symTable.semanticError;\nfromClause.nillableResultType = symTable.semanticError;\ndlogHelper.error(fromClause.collection.pos, DiagnosticCode.ITERABLE_NOT_SUPPORTED_COLLECTION,\ncollectionType);\nreturn;\n}\nBInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType,\nnames.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));\nBUnionType nextMethodReturnType =\n(BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType);\nfromClause.varType = varType;\nfromClause.resultType = getRecordType(nextMethodReturnType);\nfromClause.nillableResultType = nextMethodReturnType;\n}\npublic BUnionType getVarTypeFromIterableObject(BObjectType collectionType) {\nBObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) collectionType.tsymbol;\nfor (BAttachedFunction func : objectTypeSymbol.attachedFuncs) {\nif (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_OBJECT_ITERATOR_FUNC)) {\nreturn getVarTypeFromIteratorFunc(func);\n}\n}\nreturn null;\n}\nprivate BUnionType getVarTypeFromIteratorFunc(BAttachedFunction candidateIteratorFunc) {\nif (!candidateIteratorFunc.type.paramTypes.isEmpty()) {\nreturn null;\n}\nBType returnType = candidateIteratorFunc.type.retType;\nreturn getVarTypeFromIteratorFuncReturnType(returnType);\n}\npublic BUnionType getVarTypeFromIteratorFuncReturnType(BType returnType) {\nBObjectTypeSymbol objectTypeSymbol;\nif (returnType.tag != TypeTags.OBJECT) {\nreturn null;\n}\nobjectTypeSymbol = (BObjectTypeSymbol) returnType.tsymbol;\nfor (BAttachedFunction func : objectTypeSymbol.attachedFuncs) {\nif (func.funcName.value.equals(BLangCompilerConstants.NEXT_FUNC)) {\nreturn getVarTypeFromNextFunc(func);\n}\n}\nreturn null;\n}\nprivate BUnionType getVarTypeFromNextFunc(BAttachedFunction nextFunc) {\nBType returnType;\nif (!nextFunc.type.paramTypes.isEmpty()) {\nreturn null;\n}\nreturnType = nextFunc.type.retType;\nif (checkNextFuncReturnType(returnType)) {\nreturn (BUnionType) returnType;\n}\nreturn null;\n}\nprivate boolean checkNextFuncReturnType(BType returnType) {\nif (returnType.tag != TypeTags.UNION) {\nreturn false;\n}\nList types = new ArrayList<>(((BUnionType) returnType).getMemberTypes());\nif (!types.removeIf(type -> type.tag == TypeTags.NIL)) {\nreturn false;\n}\ntypes.removeIf(type -> type.tag == TypeTags.ERROR);\nif (types.size() != 1) {\nreturn false;\n}\nif (types.get(0).tag != TypeTags.RECORD) {\nreturn false;\n}\nBRecordType recordType = (BRecordType) types.get(0);\nreturn checkRecordTypeInNextFuncReturnType(recordType);\n}\nprivate boolean checkRecordTypeInNextFuncReturnType(BRecordType recordType) {\nif (!recordType.sealed) {\nreturn false;\n}\nif (recordType.fields.size() != 1) {\nreturn false;\n}\nfor (BField field : recordType.fields) {\nif (field.name.value.equals(BLangCompilerConstants.VALUE_FIELD)) {\nreturn true;\n}\n}\nreturn false;\n}\nprivate BRecordType getRecordType(BUnionType type) {\nfor (BType member : type.getMemberTypes()) {\nif (member.tag == TypeTags.RECORD) {\nreturn (BRecordType) member;\n}\n}\nreturn null;\n}\npublic BErrorType getErrorType(BUnionType type) {\nfor (BType member : type.getMemberTypes()) {\nif (member.tag == TypeTags.ERROR) {\nreturn (BErrorType) member;\n} else if (member.tag == TypeTags.UNION) {\nBErrorType e = getErrorType((BUnionType) member);\nif (e != null) {\nreturn e;\n}\n}\n}\nreturn null;\n}\nprivate BType getResultTypeOfNextInvocation(BObjectType iteratorType) {\nBAttachedFunction nextFunc = getNextFunc(iteratorType);\nreturn Objects.requireNonNull(nextFunc).type.retType;\n}\nprivate BAttachedFunction getNextFunc(BObjectType iteratorType) {\nBObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;\nfor (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {\nif (bAttachedFunction.funcName.value\n.equals(BLangCompilerConstants.NEXT_FUNC)) {\nreturn bAttachedFunction;\n}\n}\nreturn null;\n}\npublic BType inferRecordFieldType(BRecordType recordType) {\nList fields = recordType.fields;\nBUnionType unionType = BUnionType.create(null);\nif (!recordType.sealed) {\nunionType.add(recordType.restFieldType);\n}\nfor (BField field : fields) {\nif (isAssignable(field.type, unionType)) {\ncontinue;\n}\nif (isAssignable(unionType, field.type)) {\nunionType = BUnionType.create(null);\n}\nunionType.add(field.type);\n}\nif (unionType.getMemberTypes().size() > 1) {\nunionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)),\nNames.EMPTY, recordType.tsymbol.pkgID, null, recordType.tsymbol.owner);\nreturn unionType;\n}\nreturn unionType.getMemberTypes().iterator().next();\n}\n/**\n* Enum to represent type test result.\n*\n* @since 1.2.0\n*/\nenum TypeTestResult {\nNOT_FOUND,\nTRUE,\nFALSE\n}\nTypeTestResult isBuiltInTypeWidenPossible(BType actualType, BType targetType) {\nint targetTag = targetType.tag;\nint actualTag = actualType.tag;\nif (actualTag < TypeTags.JSON && targetTag < TypeTags.JSON) {\nswitch (actualTag) {\ncase TypeTags.INT:\ncase TypeTags.BYTE:\ncase TypeTags.FLOAT:\ncase TypeTags.DECIMAL:\nif (targetTag == TypeTags.BOOLEAN || targetTag == TypeTags.STRING) {\nreturn TypeTestResult.FALSE;\n}\nbreak;\ncase TypeTags.BOOLEAN:\nif (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT\n|| targetTag == TypeTags.DECIMAL || targetTag == TypeTags.STRING) {\nreturn TypeTestResult.FALSE;\n}\nbreak;\ncase TypeTags.STRING:\nif (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT\n|| targetTag == TypeTags.DECIMAL || targetTag == TypeTags.BOOLEAN) {\nreturn TypeTestResult.FALSE;\n}\nbreak;\n}\n}\nswitch (actualTag) {\ncase TypeTags.INT:\ncase TypeTags.BYTE:\ncase TypeTags.FLOAT:\ncase TypeTags.DECIMAL:\ncase TypeTags.BOOLEAN:\ncase TypeTags.STRING:\ncase TypeTags.SIGNED32_INT:\ncase TypeTags.SIGNED16_INT:\ncase TypeTags.SIGNED8_INT:\ncase TypeTags.UNSIGNED32_INT:\ncase TypeTags.UNSIGNED16_INT:\ncase TypeTags.UNSIGNED8_INT:\ncase TypeTags.CHAR_STRING:\nif (targetTag == TypeTags.JSON || targetTag == TypeTags.ANYDATA || targetTag == TypeTags.ANY) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\ncase TypeTags.ANYDATA:\ncase TypeTags.TYPEDESC:\nif (targetTag == TypeTags.ANY) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\ndefault:\n}\nif (TypeTags.isIntegerTypeTag(targetTag) && actualTag == targetTag) {\nreturn TypeTestResult.FALSE;\n}\nif ((TypeTags.isIntegerTypeTag(actualTag) || actualTag == TypeTags.BYTE)\n&& (TypeTags.isIntegerTypeTag(targetTag) || targetTag == TypeTags.BYTE)) {\nreturn checkBuiltInIntSubtypeWidenPossible(actualType, targetType);\n}\nif (actualTag == TypeTags.CHAR_STRING && TypeTags.STRING == targetTag) {\nreturn TypeTestResult.TRUE;\n}\nreturn TypeTestResult.NOT_FOUND;\n}\nprivate TypeTestResult checkBuiltInIntSubtypeWidenPossible(BType actualType, BType targetType) {\nint actualTag = actualType.tag;\nswitch (targetType.tag) {\ncase TypeTags.INT:\nif (actualTag == TypeTags.BYTE || TypeTags.isIntegerTypeTag(actualTag)) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\ncase TypeTags.SIGNED32_INT:\nif (actualTag == TypeTags.SIGNED16_INT || actualTag == TypeTags.SIGNED8_INT ||\nactualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT ||\nactualTag == TypeTags.BYTE) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\ncase TypeTags.SIGNED16_INT:\nif (actualTag == TypeTags.SIGNED8_INT || actualTag == TypeTags.UNSIGNED8_INT ||\nactualTag == TypeTags.BYTE) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\ncase TypeTags.UNSIGNED32_INT:\nif (actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT ||\nactualTag == TypeTags.BYTE) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\ncase TypeTags.UNSIGNED16_INT:\nif (actualTag == TypeTags.UNSIGNED8_INT || actualTag == TypeTags.BYTE) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\ncase TypeTags.BYTE:\nif (actualTag == TypeTags.UNSIGNED8_INT) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\ncase TypeTags.UNSIGNED8_INT:\nif (actualTag == TypeTags.BYTE) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\n}\nreturn TypeTestResult.NOT_FOUND;\n}\npublic boolean isImplicityCastable(BType actualType, BType targetType) {\n/* The word Builtin refers for Compiler known types. */\nBType newTargetType = targetType;\nif ((targetType.tag == TypeTags.UNION || targetType.tag == TypeTags.FINITE) && isValueType(actualType)) {\nnewTargetType = symTable.anyType;\n}\nTypeTestResult result = isBuiltInTypeWidenPossible(actualType, newTargetType);\nif (result != TypeTestResult.NOT_FOUND) {\nreturn result == TypeTestResult.TRUE;\n}\nif (isValueType(targetType) &&\n(actualType.tag == TypeTags.FINITE ||\n(actualType.tag == TypeTags.UNION && ((BUnionType) actualType).getMemberTypes().stream()\n.anyMatch(type -> type.tag == TypeTags.FINITE && isAssignable(type, targetType))))) {\nreturn targetType.tag == TypeTags.INT || targetType.tag == TypeTags.BYTE || targetType.tag == TypeTags.FLOAT\n|| targetType.tag == TypeTags.STRING || targetType.tag == TypeTags.BOOLEAN;\n} else if (targetType.tag == TypeTags.ERROR\n&& (actualType.tag == TypeTags.UNION\n&& isAllErrorMembers((BUnionType) actualType))) {\nreturn true;\n}\nreturn false;\n}\npublic boolean isTypeCastable(BLangExpression expr, BType sourceType, BType targetType) {\nif (sourceType.tag == TypeTags.SEMANTIC_ERROR || targetType.tag == TypeTags.SEMANTIC_ERROR ||\nsourceType == targetType) {\nreturn true;\n}\nif (isAssignable(sourceType, targetType) || isAssignable(targetType, sourceType)) {\nreturn true;\n}\nif (isNumericConversionPossible(expr, sourceType, targetType)) {\nreturn true;\n}\nboolean validTypeCast = false;\nif (sourceType.tag == TypeTags.UNION) {\nif (getTypeForUnionTypeMembersAssignableToType((BUnionType) sourceType, targetType)\n!= symTable.semanticError) {\nvalidTypeCast = true;\n}\n}\nif (targetType.tag == TypeTags.UNION) {\nif (getTypeForUnionTypeMembersAssignableToType((BUnionType) targetType, sourceType)\n!= symTable.semanticError) {\nvalidTypeCast = true;\n}\n}\nif (sourceType.tag == TypeTags.FINITE) {\nif (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) sourceType, targetType)\n!= symTable.semanticError) {\nvalidTypeCast = true;\n}\n}\nif (targetType.tag == TypeTags.FINITE) {\nif (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) targetType, sourceType)\n!= symTable.semanticError) {\nvalidTypeCast = true;\n}\n}\nif (validTypeCast) {\nif (isValueType(sourceType)) {\nsetImplicitCastExpr(expr, sourceType, symTable.anyType);\n}\nreturn true;\n}\nreturn false;\n}\nboolean isNumericConversionPossible(BLangExpression expr, BType sourceType,\nBType targetType) {\nfinal boolean isSourceNumericType = isBasicNumericType(sourceType);\nfinal boolean isTargetNumericType = isBasicNumericType(targetType);\nif (isSourceNumericType && isTargetNumericType) {\nreturn true;\n}\nif (targetType.tag == TypeTags.UNION) {\nHashSet typeTags = new HashSet<>();\nfor (BType bType : ((BUnionType) targetType).getMemberTypes()) {\nif (isBasicNumericType(bType)) {\ntypeTags.add(bType.tag);\nif (typeTags.size() > 1) {\nreturn false;\n}\n}\n}\n}\nif (!isTargetNumericType && targetType.tag != TypeTags.UNION) {\nreturn false;\n}\nif (isSourceNumericType) {\nsetImplicitCastExpr(expr, sourceType, symTable.anyType);\nreturn true;\n}\nswitch (sourceType.tag) {\ncase TypeTags.ANY:\ncase TypeTags.ANYDATA:\ncase TypeTags.JSON:\nreturn true;\ncase TypeTags.UNION:\nfor (BType memType : ((BUnionType) sourceType).getMemberTypes()) {\nif (isBasicNumericType(memType) ||\n(memType.tag == TypeTags.FINITE &&\nfiniteTypeContainsNumericTypeValues((BFiniteType) memType))) {\nreturn true;\n}\n}\nbreak;\ncase TypeTags.FINITE:\nif (finiteTypeContainsNumericTypeValues((BFiniteType) sourceType)) {\nreturn true;\n}\nbreak;\n}\nreturn false;\n}\nprivate boolean isAllErrorMembers(BUnionType actualType) {\nreturn actualType.getMemberTypes().stream().allMatch(t -> isAssignable(t, symTable.errorType));\n}\npublic void setImplicitCastExpr(BLangExpression expr, BType actualType, BType expType) {\nif (!isImplicityCastable(actualType, expType)) {\nreturn;\n}\nBLangTypeConversionExpr implicitConversionExpr =\n(BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();\nimplicitConversionExpr.pos = expr.pos;\nimplicitConversionExpr.expr = expr.impConversionExpr == null ? expr : expr.impConversionExpr;\nimplicitConversionExpr.type = expType;\nimplicitConversionExpr.targetType = expType;\nexpr.impConversionExpr = implicitConversionExpr;\n}\npublic BType getElementType(BType type) {\nif (type.tag != TypeTags.ARRAY) {\nreturn type;\n}\nreturn getElementType(((BArrayType) type).getElementType());\n}\npublic boolean checkListenerCompatibility(BType type) {\nif (type.tag != TypeTags.OBJECT) {\nreturn false;\n}\nfinal BSymbol bSymbol = symTable.langObjectModuleSymbol.scope.lookup(Names.LISTENER).symbol;\nif (bSymbol == symTable.notFoundSymbol || bSymbol.type.tag != TypeTags.OBJECT) {\nthrow new AssertionError(\"Listener object not defined.\");\n}\nBObjectType rhsType = (BObjectType) type;\nBObjectType lhsType = (BObjectType) bSymbol.type;\nBStructureTypeSymbol lhsStructSymbol = (BStructureTypeSymbol) lhsType.tsymbol;\nList lhsFuncs = lhsStructSymbol.attachedFuncs;\nList rhsFuncs = ((BStructureTypeSymbol) rhsType.tsymbol).attachedFuncs;\nint lhsAttachedFuncCount = lhsStructSymbol.initializerFunc != null ? lhsFuncs.size() - 1 : lhsFuncs.size();\nif (lhsAttachedFuncCount > rhsFuncs.size()) {\nreturn false;\n}\nfor (BAttachedFunction lhsFunc : lhsFuncs) {\nif (lhsFunc == lhsStructSymbol.initializerFunc) {\ncontinue;\n}\nif (!Symbols.isPublic(lhsFunc.symbol)) {\nreturn false;\n}\nBAttachedFunction rhsFunc = getMatchingInvokableType(rhsFuncs, lhsFunc, new HashSet<>());\nif (rhsFunc == null || !Symbols.isPublic(rhsFunc.symbol)) {\nreturn false;\n}\n}\nreturn true;\n}\npublic boolean isValidErrorDetailType(BType detailType) {\nswitch (detailType.tag) {\ncase TypeTags.MAP:\ncase TypeTags.RECORD:\nreturn isAssignable(detailType, symTable.detailType);\n}\nreturn false;\n}\nprivate boolean isNullable(BType fieldType) {\nreturn fieldType.isNullable();\n}\nprivate class BSameTypeVisitor implements BTypeVisitor {\nSet unresolvedTypes;\nBSameTypeVisitor(Set unresolvedTypes) {\nthis.unresolvedTypes = unresolvedTypes;\n}\n@Override\npublic Boolean visit(BType t, BType s) {\nif (t == s) {\nreturn true;\n}\nswitch (t.tag) {\ncase TypeTags.INT:\ncase TypeTags.BYTE:\ncase TypeTags.FLOAT:\ncase TypeTags.DECIMAL:\ncase TypeTags.STRING:\ncase TypeTags.BOOLEAN:\ncase TypeTags.ANY:\ncase TypeTags.ANYDATA:\nreturn t.tag == s.tag\n&& (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s));\ndefault:\nbreak;\n}\nreturn false;\n}\n@Override\npublic Boolean visit(BBuiltInRefType t, BType s) {\nreturn t == s;\n}\n@Override\npublic Boolean visit(BAnyType t, BType s) {\nreturn t == s;\n}\n@Override\npublic Boolean visit(BAnydataType t, BType s) {\nreturn t == s;\n}\n@Override\npublic Boolean visit(BMapType t, BType s) {\nif (s.tag != TypeTags.MAP) {\nreturn false;\n}\nBMapType sType = ((BMapType) s);\nreturn isSameType(sType.constraint, t.constraint, this.unresolvedTypes);\n}\n@Override\npublic Boolean visit(BFutureType t, BType s) {\nreturn s.tag == TypeTags.FUTURE && t.constraint.tag == ((BFutureType) s).constraint.tag;\n}\n@Override\npublic Boolean visit(BXMLType t, BType s) {\nreturn visit((BBuiltInRefType) t, s);\n}\n@Override\npublic Boolean visit(BJSONType t, BType s) {\nreturn s.tag == TypeTags.JSON;\n}\n@Override\npublic Boolean visit(BArrayType t, BType s) {\nreturn s.tag == TypeTags.ARRAY && checkArrayEquality(s, t, new HashSet<>());\n}\n@Override\npublic Boolean visit(BObjectType t, BType s) {\nif (t == s) {\nreturn true;\n}\nif (s.tag != TypeTags.OBJECT) {\nreturn false;\n}\nreturn t.tsymbol.pkgID.equals(s.tsymbol.pkgID) && t.tsymbol.name.equals(s.tsymbol.name);\n}\n@Override\npublic Boolean visit(BRecordType t, BType s) {\nif (t == s) {\nreturn true;\n}\nif (s.tag != TypeTags.RECORD) {\nreturn false;\n}\nBRecordType source = (BRecordType) s;\nif (source.fields.size() != t.fields.size()) {\nreturn false;\n}\nboolean notSameType = source.fields\n.stream()\n.map(fs -> t.fields.stream()\n.anyMatch(ft -> fs.name.equals(ft.name)\n&& isSameType(fs.type, ft.type, this.unresolvedTypes)\n&& hasSameOptionalFlag(fs.symbol, ft.symbol)))\n.anyMatch(foundSameType -> !foundSameType);\nif (notSameType) {\nreturn false;\n}\nreturn isSameType(source.restFieldType, t.restFieldType, unresolvedTypes);\n}\nprivate boolean hasSameOptionalFlag(BVarSymbol s, BVarSymbol t) {\nreturn ((s.flags & Flags.OPTIONAL) ^ (t.flags & Flags.OPTIONAL)) != Flags.OPTIONAL;\n}\n@Override\npublic Boolean visit(BTableType t, BType s) {\nreturn t == s;\n}\npublic Boolean visit(BTupleType t, BType s) {\nif (s.tag != TypeTags.TUPLE) {\nreturn false;\n}\nBTupleType source = (BTupleType) s;\nif (source.tupleTypes.size() != t.tupleTypes.size()) {\nreturn false;\n}\nfor (int i = 0; i < source.tupleTypes.size(); i++) {\nif (t.getTupleTypes().get(i) == symTable.noType) {\ncontinue;\n}\nif (!isSameType(source.getTupleTypes().get(i), t.tupleTypes.get(i), this.unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\n@Override\npublic Boolean visit(BStreamType t, BType s) {\nreturn t == s;\n}\n@Override\npublic Boolean visit(BInvokableType t, BType s) {\nreturn s.tag == TypeTags.INVOKABLE && isSameFunctionType((BInvokableType) s, t, new HashSet<>());\n}\n@Override\npublic Boolean visit(BUnionType tUnionType, BType s) {\nif (s.tag != TypeTags.UNION) {\nreturn false;\n}\nBUnionType sUnionType = (BUnionType) s;\nif (sUnionType.getMemberTypes().size()\n!= tUnionType.getMemberTypes().size()) {\nreturn false;\n}\nSet sourceTypes = new LinkedHashSet<>(sUnionType.getMemberTypes());\nSet targetTypes = new LinkedHashSet<>(tUnionType.getMemberTypes());\nboolean notSameType = sourceTypes\n.stream()\n.map(sT -> targetTypes\n.stream()\n.anyMatch(it -> isSameType(it, sT, this.unresolvedTypes)))\n.anyMatch(foundSameType -> !foundSameType);\nreturn !notSameType;\n}\n@Override\npublic Boolean visit(BErrorType t, BType s) {\nif (s.tag != TypeTags.ERROR) {\nreturn false;\n}\nBErrorType source = (BErrorType) s;\nif (!isSameType(source.reasonType, t.reasonType, this.unresolvedTypes)) {\nreturn false;\n}\nif (source.detailType == t.detailType) {\nreturn true;\n}\nreturn isSameType(source.detailType, t.detailType, this.unresolvedTypes);\n}\n@Override\npublic Boolean visit(BServiceType t, BType s) {\nreturn t == s || t.tag == s.tag;\n}\n@Override\npublic Boolean visit(BTypedescType t, BType s) {\nif (s.tag != TypeTags.TYPEDESC) {\nreturn false;\n}\nBTypedescType sType = ((BTypedescType) s);\nreturn isSameType(sType.constraint, t.constraint, this.unresolvedTypes);\n}\n@Override\npublic Boolean visit(BFiniteType t, BType s) {\nreturn s == t;\n}\n};\nprivate boolean checkFieldEquivalency(BRecordType lhsType, BRecordType rhsType, Set unresolvedTypes) {\nMap rhsFields = rhsType.fields.stream().collect(Collectors.toMap(BField::getName, f -> f));\nfor (BField lhsField : lhsType.fields) {\nBField rhsField = rhsFields.get(lhsField.name);\nif (rhsField == null) {\nreturn false;\n}\nif (!Symbols.isOptional(lhsField.symbol) && Symbols.isOptional(rhsField.symbol)) {\nreturn false;\n}\nif (!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) {\nreturn false;\n}\nrhsFields.remove(lhsField.name);\n}\nreturn rhsFields.entrySet().stream().allMatch(\nfieldEntry -> isAssignable(fieldEntry.getValue().type, lhsType.restFieldType, unresolvedTypes));\n}\nprivate BAttachedFunction getMatchingInvokableType(List rhsFuncList, BAttachedFunction lhsFunc,\nSet unresolvedTypes) {\nreturn rhsFuncList.stream()\n.filter(rhsFunc -> lhsFunc.funcName.equals(rhsFunc.funcName))\n.filter(rhsFunc -> isFunctionTypeAssignable(rhsFunc.type, lhsFunc.type, unresolvedTypes))\n.findFirst()\n.orElse(null);\n}\nprivate boolean isInSameVisibilityRegion(BSymbol lhsSym, BSymbol rhsSym) {\nif (Symbols.isPrivate(lhsSym)) {\nreturn Symbols.isPrivate(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID)\n&& lhsSym.owner.name.equals(rhsSym.owner.name);\n} else if (Symbols.isPublic(lhsSym)) {\nreturn Symbols.isPublic(rhsSym);\n}\nreturn !Symbols.isPrivate(rhsSym) && !Symbols.isPublic(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID);\n}\nprivate boolean isAssignableToUnionType(BType source, BType target, Set unresolvedTypes) {\nSet sourceTypes = new LinkedHashSet<>();\nSet targetTypes = new LinkedHashSet<>();\nif (source.tag == TypeTags.UNION) {\nBUnionType sourceUnionType = (BUnionType) source;\nsourceTypes.addAll(sourceUnionType.getMemberTypes());\n} else {\nsourceTypes.add(source);\n}\nif (target.tag == TypeTags.UNION) {\nBUnionType targetUnionType = (BUnionType) target;\ntargetTypes.addAll(targetUnionType.getMemberTypes());\n} else {\ntargetTypes.add(target);\n}\nreturn sourceTypes.stream()\n.allMatch(s -> (targetTypes.stream().anyMatch(t -> isAssignable(s, t, unresolvedTypes)))\n|| (s.tag == TypeTags.FINITE && isAssignable(s, target, unresolvedTypes))\n|| (s.tag == TypeTags.XML\n&& isAssignableToUnionType(expandedXMLBuiltinSubtypes, target, unresolvedTypes)));\n}\nprivate boolean isFiniteTypeAssignable(BFiniteType finiteType, BType targetType, Set unresolvedTypes) {\nif (targetType.tag == TypeTags.FINITE) {\nreturn finiteType.getValueSpace().stream()\n.allMatch(expression -> isAssignableToFiniteType(targetType, (BLangLiteral) expression));\n}\nif (targetType.tag == TypeTags.UNION) {\nList unionMemberTypes = getAllTypes(targetType);\nreturn finiteType.getValueSpace().stream()\n.allMatch(valueExpr -> unionMemberTypes.stream()\n.anyMatch(targetMemType -> targetMemType.tag == TypeTags.FINITE ?\nisAssignableToFiniteType(targetMemType, (BLangLiteral) valueExpr) :\nisAssignable(valueExpr.type, targetType, unresolvedTypes)));\n}\nreturn finiteType.getValueSpace().stream()\n.allMatch(expression -> isAssignable(expression.type, targetType, unresolvedTypes));\n}\nboolean isAssignableToFiniteType(BType type, BLangLiteral literalExpr) {\nif (type.tag != TypeTags.FINITE) {\nreturn false;\n}\nBFiniteType expType = (BFiniteType) type;\nreturn expType.getValueSpace().stream().anyMatch(memberLiteral -> {\nif (((BLangLiteral) memberLiteral).value == null) {\nreturn literalExpr.value == null;\n}\nreturn checkLiteralAssignabilityBasedOnType((BLangLiteral) memberLiteral, literalExpr);\n});\n}\n/**\n* Method to check the literal assignability based on the types of the literals. For numeric literals the\n* assignability depends on the equivalency of the literals. If the candidate literal could either be a simple\n* literal or a constant. In case of a constant, it is assignable to the base literal if and only if both\n* literals have same type and equivalent values.\n*\n* @param baseLiteral Literal based on which we check the assignability.\n* @param candidateLiteral Literal to be tested whether it is assignable to the base literal or not.\n* @return true if assignable; false otherwise.\n*/\nboolean checkLiteralAssignabilityBasedOnType(BLangLiteral baseLiteral, BLangLiteral candidateLiteral) {\nif (baseLiteral.getKind() != candidateLiteral.getKind()) {\nreturn false;\n}\nObject baseValue = baseLiteral.value;\nObject candidateValue = candidateLiteral.value;\nint candidateTypeTag = candidateLiteral.type.tag;\nswitch (baseLiteral.type.tag) {\ncase TypeTags.BYTE:\nif (candidateTypeTag == TypeTags.BYTE || (candidateTypeTag == TypeTags.INT &&\n!candidateLiteral.isConstant && isByteLiteralValue((Long) candidateValue))) {\nreturn ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();\n}\nbreak;\ncase TypeTags.INT:\nif (candidateTypeTag == TypeTags.INT) {\nreturn ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();\n}\nbreak;\ncase TypeTags.SIGNED32_INT:\nif (candidateTypeTag == TypeTags.INT && isSigned32LiteralValue((Long) candidateValue)) {\nreturn ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();\n}\nbreak;\ncase TypeTags.SIGNED16_INT:\nif (candidateTypeTag == TypeTags.INT && isSigned16LiteralValue((Long) candidateValue)) {\nreturn ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();\n}\nbreak;\ncase TypeTags.SIGNED8_INT:\nif (candidateTypeTag == TypeTags.INT && isSigned8LiteralValue((Long) candidateValue)) {\nreturn ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();\n}\nbreak;\ncase TypeTags.UNSIGNED32_INT:\nif (candidateTypeTag == TypeTags.INT && isUnsigned32LiteralValue((Long) candidateValue)) {\nreturn ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();\n}\nbreak;\ncase TypeTags.UNSIGNED16_INT:\nif (candidateTypeTag == TypeTags.INT && isUnsigned16LiteralValue((Long) candidateValue)) {\nreturn ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();\n}\nbreak;\ncase TypeTags.UNSIGNED8_INT:\nif (candidateTypeTag == TypeTags.INT && isUnsigned8LiteralValue((Long) candidateValue)) {\nreturn ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();\n}\nbreak;\ncase TypeTags.FLOAT:\nString baseValueStr = String.valueOf(baseValue);\nString originalValue = baseLiteral.originalValue != null ? baseLiteral.originalValue : baseValueStr;\nif (NumericLiteralSupport.isDecimalDiscriminated(originalValue)) {\nreturn false;\n}\ndouble baseDoubleVal = Double.parseDouble(baseValueStr);\ndouble candidateDoubleVal;\nif (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) {\ncandidateDoubleVal = ((Long) candidateValue).doubleValue();\nreturn baseDoubleVal == candidateDoubleVal;\n} else if (candidateTypeTag == TypeTags.FLOAT) {\ncandidateDoubleVal = Double.parseDouble(String.valueOf(candidateValue));\nreturn baseDoubleVal == candidateDoubleVal;\n}\nbreak;\ncase TypeTags.DECIMAL:\nBigDecimal baseDecimalVal = NumericLiteralSupport.parseBigDecimal(baseValue);\nBigDecimal candidateDecimalVal;\nif (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) {\ncandidateDecimalVal = new BigDecimal((long) candidateValue, MathContext.DECIMAL128);\nreturn baseDecimalVal.compareTo(candidateDecimalVal) == 0;\n} else if (candidateTypeTag == TypeTags.FLOAT && !candidateLiteral.isConstant ||\ncandidateTypeTag == TypeTags.DECIMAL) {\nif (NumericLiteralSupport.isFloatDiscriminated(String.valueOf(candidateValue))) {\nreturn false;\n}\ncandidateDecimalVal = NumericLiteralSupport.parseBigDecimal(candidateValue);\nreturn baseDecimalVal.compareTo(candidateDecimalVal) == 0;\n}\nbreak;\ndefault:\nreturn baseValue.equals(candidateValue);\n}\nreturn false;\n}\nboolean isByteLiteralValue(Long longObject) {\nreturn (longObject.intValue() >= BBYTE_MIN_VALUE && longObject.intValue() <= BBYTE_MAX_VALUE);\n}\nboolean isSigned32LiteralValue(Long longObject) {\nreturn (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE);\n}\nboolean isSigned16LiteralValue(Long longObject) {\nreturn (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE);\n}\nboolean isSigned8LiteralValue(Long longObject) {\nreturn (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE);\n}\nboolean isUnsigned32LiteralValue(Long longObject) {\nreturn (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE);\n}\nboolean isUnsigned16LiteralValue(Long longObject) {\nreturn (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE);\n}\nboolean isUnsigned8LiteralValue(Long longObject) {\nreturn (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE);\n}\nboolean isCharLiteralValue(String literal) {\nreturn (literal.codePoints().count() == 1);\n}\n/**\n* Method to retrieve a type representing all the values in the value space of a finite type that are assignable to\n* the target type.\n*\n* @param finiteType the finite type\n* @param targetType the target type\n* @return a new finite type if at least one value in the value space of the specified finiteType is\n* assignable to targetType (the same if all are assignable), else semanticError\n*/\nBType getTypeForFiniteTypeValuesAssignableToType(BFiniteType finiteType, BType targetType) {\nif (isAssignable(finiteType, targetType)) {\nreturn finiteType;\n}\nSet matchingValues = finiteType.getValueSpace().stream()\n.filter(\nexpr -> isAssignable(expr.type, targetType) ||\nisAssignableToFiniteType(targetType, (BLangLiteral) expr) ||\n(targetType.tag == TypeTags.UNION &&\n((BUnionType) targetType).getMemberTypes().stream()\n.filter(memType -> memType.tag == TypeTags.FINITE)\n.anyMatch(filteredType -> isAssignableToFiniteType(filteredType,\n(BLangLiteral) expr))))\n.collect(Collectors.toSet());\nif (matchingValues.isEmpty()) {\nreturn symTable.semanticError;\n}\nBTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, finiteType.tsymbol.flags,\nnames.fromString(\"$anonType$\" + finiteTypeCount++),\nfiniteType.tsymbol.pkgID, null,\nfiniteType.tsymbol.owner);\nBFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, matchingValues);\nfiniteTypeSymbol.type = intersectingFiniteType;\nreturn intersectingFiniteType;\n}\n/**\n* Method to retrieve a type representing all the member types of a union type that are assignable to\n* the target type.\n*\n* @param unionType the union type\n* @param targetType the target type\n* @return a single type or a new union type if at least one member type of the union type is\n* assignable to targetType, else semanticError\n*/\nBType getTypeForUnionTypeMembersAssignableToType(BUnionType unionType, BType targetType) {\nList intersection = new LinkedList<>();\nunionType.getMemberTypes().forEach(memType -> {\nif (memType.tag == TypeTags.FINITE) {\nBType finiteTypeWithMatches = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) memType,\ntargetType);\nif (finiteTypeWithMatches != symTable.semanticError) {\nintersection.add(finiteTypeWithMatches);\n}\n} else {\nif (isAssignable(memType, targetType)) {\nintersection.add(memType);\n}\n}\n});\nif (intersection.isEmpty()) {\nreturn symTable.semanticError;\n}\nif (intersection.size() == 1) {\nreturn intersection.get(0);\n} else {\nreturn BUnionType.create(null, new LinkedHashSet<>(intersection));\n}\n}\nboolean validEqualityIntersectionExists(BType lhsType, BType rhsType) {\nif (!lhsType.isPureType() || !rhsType.isPureType()) {\nreturn false;\n}\nif (isAssignable(lhsType, rhsType) || isAssignable(rhsType, lhsType)) {\nreturn true;\n}\nSet lhsTypes = expandAndGetMemberTypesRecursive(lhsType);\nSet rhsTypes = expandAndGetMemberTypesRecursive(rhsType);\nreturn equalityIntersectionExists(lhsTypes, rhsTypes);\n}\nprivate boolean equalityIntersectionExists(Set lhsTypes, Set rhsTypes) {\nif ((lhsTypes.contains(symTable.anydataType) &&\nrhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR)) ||\n(rhsTypes.contains(symTable.anydataType) &&\nlhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR))) {\nreturn true;\n}\nboolean matchFound = lhsTypes\n.stream()\n.anyMatch(s -> rhsTypes\n.stream()\n.anyMatch(t -> isSameType(s, t)));\nif (!matchFound) {\nmatchFound = equalityIntersectionExistsForComplexTypes(lhsTypes, rhsTypes);\n}\nreturn matchFound;\n}\n/**\n* Retrieves member types of the specified type, expanding maps/arrays of/constrained by unions types to individual\n* maps/arrays.\n*\n* e.g., (string|int)[] would cause three entries as string[], int[], (string|int)[]\n*\n* @param bType the type for which member types needs to be identified\n* @return a set containing all the retrieved member types\n*/\npublic Set expandAndGetMemberTypesRecursive(BType bType) {\nSet memberTypes = new LinkedHashSet<>();\nswitch (bType.tag) {\ncase TypeTags.BYTE:\ncase TypeTags.INT:\nmemberTypes.add(symTable.intType);\nmemberTypes.add(symTable.byteType);\nbreak;\ncase TypeTags.FINITE:\nBFiniteType expType = (BFiniteType) bType;\nexpType.getValueSpace().forEach(value -> {\nmemberTypes.add(value.type);\n});\nbreak;\ncase TypeTags.UNION:\nBUnionType unionType = (BUnionType) bType;\nunionType.getMemberTypes().forEach(member -> {\nmemberTypes.addAll(expandAndGetMemberTypesRecursive(member));\n});\nbreak;\ncase TypeTags.ARRAY:\nBType arrayElementType = ((BArrayType) bType).getElementType();\nif (((BArrayType) bType).getSize() != -1) {\nmemberTypes.add(new BArrayType(arrayElementType));\n}\nif (arrayElementType.tag == TypeTags.UNION) {\nSet elementUnionTypes = expandAndGetMemberTypesRecursive(arrayElementType);\nelementUnionTypes.forEach(elementUnionType -> {\nmemberTypes.add(new BArrayType(elementUnionType));\n});\n}\nmemberTypes.add(bType);\nbreak;\ncase TypeTags.MAP:\nBType mapConstraintType = ((BMapType) bType).getConstraint();\nif (mapConstraintType.tag == TypeTags.UNION) {\nSet constraintUnionTypes = expandAndGetMemberTypesRecursive(mapConstraintType);\nconstraintUnionTypes.forEach(constraintUnionType -> {\nmemberTypes.add(new BMapType(TypeTags.MAP, constraintUnionType, symTable.mapType.tsymbol));\n});\n}\nmemberTypes.add(bType);\nbreak;\ndefault:\nmemberTypes.add(bType);\n}\nreturn memberTypes;\n}\nprivate boolean tupleIntersectionExists(BTupleType lhsType, BTupleType rhsType) {\nif (lhsType.getTupleTypes().size() != rhsType.getTupleTypes().size()) {\nreturn false;\n}\nList lhsMemberTypes = lhsType.getTupleTypes();\nList rhsMemberTypes = rhsType.getTupleTypes();\nfor (int i = 0; i < lhsType.getTupleTypes().size(); i++) {\nif (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberTypes.get(i)),\nexpandAndGetMemberTypesRecursive(rhsMemberTypes.get(i)))) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate boolean equalityIntersectionExistsForComplexTypes(Set lhsTypes, Set rhsTypes) {\nfor (BType lhsMemberType : lhsTypes) {\nswitch (lhsMemberType.tag) {\ncase TypeTags.INT:\ncase TypeTags.STRING:\ncase TypeTags.FLOAT:\ncase TypeTags.DECIMAL:\ncase TypeTags.BOOLEAN:\ncase TypeTags.NIL:\nif (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) {\nreturn true;\n}\nbreak;\ncase TypeTags.JSON:\nif (jsonEqualityIntersectionExists(rhsTypes)) {\nreturn true;\n}\nbreak;\ncase TypeTags.TUPLE:\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE &&\ntupleIntersectionExists((BTupleType) lhsMemberType, (BTupleType) rhsMemberType))) {\nreturn true;\n}\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY &&\narrayTupleEqualityIntersectionExists((BArrayType) rhsMemberType,\n(BTupleType) lhsMemberType))) {\nreturn true;\n}\nbreak;\ncase TypeTags.ARRAY:\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY &&\nequalityIntersectionExists(\nexpandAndGetMemberTypesRecursive(((BArrayType) lhsMemberType).eType),\nexpandAndGetMemberTypesRecursive(((BArrayType) rhsMemberType).eType)))) {\nreturn true;\n}\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE &&\narrayTupleEqualityIntersectionExists((BArrayType) lhsMemberType,\n(BTupleType) rhsMemberType))) {\nreturn true;\n}\nbreak;\ncase TypeTags.MAP:\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> rhsMemberType.tag == TypeTags.MAP &&\nequalityIntersectionExists(\nexpandAndGetMemberTypesRecursive(((BMapType) lhsMemberType).constraint),\nexpandAndGetMemberTypesRecursive(((BMapType) rhsMemberType).constraint)))) {\nreturn true;\n}\nif (!isAssignable(((BMapType) lhsMemberType).constraint, symTable.errorType) &&\nrhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) {\nreturn true;\n}\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> rhsMemberType.tag == TypeTags.RECORD &&\nmapRecordEqualityIntersectionExists((BMapType) lhsMemberType,\n(BRecordType) rhsMemberType))) {\nreturn true;\n}\nbreak;\ncase TypeTags.OBJECT:\ncase TypeTags.RECORD:\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> checkStructEquivalency(rhsMemberType, lhsMemberType) ||\ncheckStructEquivalency(lhsMemberType, rhsMemberType))) {\nreturn true;\n}\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> rhsMemberType.tag == TypeTags.RECORD &&\nrecordEqualityIntersectionExists((BRecordType) lhsMemberType,\n(BRecordType) rhsMemberType))) {\nreturn true;\n}\nif (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON) &&\njsonEqualityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberType))) {\nreturn true;\n}\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> rhsMemberType.tag == TypeTags.MAP &&\nmapRecordEqualityIntersectionExists((BMapType) rhsMemberType,\n(BRecordType) lhsMemberType))) {\nreturn true;\n}\nbreak;\n}\n}\nreturn false;\n}\nprivate boolean arrayTupleEqualityIntersectionExists(BArrayType arrayType, BTupleType tupleType) {\nSet elementTypes = expandAndGetMemberTypesRecursive(arrayType.eType);\nreturn tupleType.tupleTypes.stream()\n.allMatch(tupleMemType -> equalityIntersectionExists(elementTypes,\nexpandAndGetMemberTypesRecursive(tupleMemType)));\n}\nprivate boolean recordEqualityIntersectionExists(BRecordType lhsType, BRecordType rhsType) {\nList lhsFields = lhsType.fields;\nList rhsFields = rhsType.fields;\nList matchedFieldNames = new ArrayList<>();\nfor (BField lhsField : lhsFields) {\nOptional match =\nrhsFields.stream().filter(rhsField -> lhsField.name.equals(rhsField.name)).findFirst();\nif (match.isPresent()) {\nif (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type),\nexpandAndGetMemberTypesRecursive(match.get().type))) {\nreturn false;\n}\nmatchedFieldNames.add(lhsField.getName());\n} else {\nif (Symbols.isFlagOn(lhsField.symbol.flags, Flags.OPTIONAL)) {\nbreak;\n}\nif (rhsType.sealed) {\nreturn false;\n}\nif (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type),\nexpandAndGetMemberTypesRecursive(rhsType.restFieldType))) {\nreturn false;\n}\n}\n}\nfor (BField rhsField : rhsFields) {\nif (matchedFieldNames.contains(rhsField.getName())) {\ncontinue;\n}\nif (!Symbols.isFlagOn(rhsField.symbol.flags, Flags.OPTIONAL)) {\nif (lhsType.sealed) {\nreturn false;\n}\nif (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(rhsField.type),\nexpandAndGetMemberTypesRecursive(lhsType.restFieldType))) {\nreturn false;\n}\n}\n}\nreturn true;\n}\nprivate boolean mapRecordEqualityIntersectionExists(BMapType mapType, BRecordType recordType) {\nSet mapConstrTypes = expandAndGetMemberTypesRecursive(mapType.getConstraint());\nreturn recordType.fields.stream()\n.allMatch(field -> Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) ||\nequalityIntersectionExists(mapConstrTypes, expandAndGetMemberTypesRecursive(field.type)));\n}\nprivate boolean jsonEqualityIntersectionExists(Set typeSet) {\nfor (BType type : typeSet) {\nswitch (type.tag) {\ncase TypeTags.MAP:\nif (!isAssignable(((BMapType) type).constraint, symTable.errorType)) {\nreturn true;\n}\nbreak;\ncase TypeTags.RECORD:\nBRecordType recordType = (BRecordType) type;\nif (recordType.fields.stream()\n.allMatch(field -> Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) ||\n!isAssignable(field.type, symTable.errorType))) {\nreturn true;\n}\nbreak;\ndefault:\nif (isAssignable(type, symTable.jsonType)) {\nreturn true;\n}\n}\n}\nreturn false;\n}\npublic BType getRemainingType(BType originalType, BType typeToRemove) {\nswitch (originalType.tag) {\ncase TypeTags.UNION:\nreturn getRemainingType((BUnionType) originalType, getAllTypes(typeToRemove));\ncase TypeTags.FINITE:\nreturn getRemainingType((BFiniteType) originalType, getAllTypes(typeToRemove));\ndefault:\nreturn originalType;\n}\n}\nprivate BType getRemainingType(BUnionType originalType, List removeTypes) {\nList remainingTypes = getAllTypes(originalType);\nremoveTypes.forEach(removeType -> remainingTypes.removeIf(type -> isAssignable(type, removeType)));\nList finiteTypesToRemove = new ArrayList<>();\nList finiteTypesToAdd = new ArrayList<>();\nfor (BType remainingType : remainingTypes) {\nif (remainingType.tag == TypeTags.FINITE) {\nBFiniteType finiteType = (BFiniteType) remainingType;\nfiniteTypesToRemove.add(finiteType);\nBType remainingTypeWithMatchesRemoved = getRemainingType(finiteType, removeTypes);\nif (remainingTypeWithMatchesRemoved != symTable.semanticError) {\nfiniteTypesToAdd.add(remainingTypeWithMatchesRemoved);\n}\n}\n}\nremainingTypes.removeAll(finiteTypesToRemove);\nremainingTypes.addAll(finiteTypesToAdd);\nif (remainingTypes.size() == 1) {\nreturn remainingTypes.get(0);\n}\nif (remainingTypes.isEmpty()) {\nreturn symTable.semanticError;\n}\nreturn BUnionType.create(null, new LinkedHashSet<>(remainingTypes));\n}\nprivate BType getRemainingType(BFiniteType originalType, List removeTypes) {\nSet remainingValueSpace = new LinkedHashSet<>();\nfor (BLangExpression valueExpr : originalType.getValueSpace()) {\nboolean matchExists = false;\nfor (BType remType : removeTypes) {\nif (isAssignable(valueExpr.type, remType) ||\nisAssignableToFiniteType(remType, (BLangLiteral) valueExpr)) {\nmatchExists = true;\nbreak;\n}\n}\nif (!matchExists) {\nremainingValueSpace.add(valueExpr);\n}\n}\nif (remainingValueSpace.isEmpty()) {\nreturn symTable.semanticError;\n}\nBTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, originalType.tsymbol.flags,\nnames.fromString(\"$anonType$\" + finiteTypeCount++),\noriginalType.tsymbol.pkgID, null,\noriginalType.tsymbol.owner);\nBFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, remainingValueSpace);\nfiniteTypeSymbol.type = intersectingFiniteType;\nreturn intersectingFiniteType;\n}\npublic BType getSafeType(BType type, boolean liftNil, boolean liftError) {\nswitch (type.tag) {\ncase TypeTags.JSON:\nBJSONType jsonType = (BJSONType) type;\nreturn new BJSONType(jsonType.tag, jsonType.tsymbol, false);\ncase TypeTags.ANY:\nreturn new BAnyType(type.tag, type.tsymbol, false);\ncase TypeTags.ANYDATA:\nreturn new BAnydataType(type.tag, type.tsymbol, false);\n}\nif (type.tag != TypeTags.UNION) {\nreturn type;\n}\nBUnionType unionType = (BUnionType) type;\nLinkedHashSet memTypes = new LinkedHashSet<>(unionType.getMemberTypes());\nBUnionType errorLiftedType = BUnionType.create(null, memTypes);\nif (liftNil) {\nerrorLiftedType.remove(symTable.nilType);\n}\nif (liftError) {\nerrorLiftedType.remove(symTable.errorType);\n}\nif (errorLiftedType.getMemberTypes().size() == 1) {\nreturn errorLiftedType.getMemberTypes().toArray(new BType[0])[0];\n}\nreturn errorLiftedType;\n}\npublic List getAllTypes(BType type) {\nif (type.tag != TypeTags.UNION) {\nreturn Lists.of(type);\n}\nList memberTypes = new ArrayList<>();\n((BUnionType) type).getMemberTypes().forEach(memberType -> memberTypes.addAll(getAllTypes(memberType)));\nreturn memberTypes;\n}\npublic boolean isAllowedConstantType(BType type) {\nswitch (type.tag) {\ncase TypeTags.BOOLEAN:\ncase TypeTags.INT:\ncase TypeTags.BYTE:\ncase TypeTags.FLOAT:\ncase TypeTags.DECIMAL:\ncase TypeTags.STRING:\ncase TypeTags.NIL:\nreturn true;\ncase TypeTags.MAP:\nreturn isAllowedConstantType(((BMapType) type).constraint);\ncase TypeTags.FINITE:\nBLangExpression finiteValue = ((BFiniteType) type).getValueSpace().toArray(new BLangExpression[0])[0];\nreturn isAllowedConstantType(finiteValue.type);\ndefault:\nreturn false;\n}\n}\npublic boolean isValidLiteral(BLangLiteral literal, BType targetType) {\nBType literalType = literal.type;\nif (literalType.tag == targetType.tag) {\nreturn true;\n}\nswitch (targetType.tag) {\ncase TypeTags.BYTE:\nreturn literalType.tag == TypeTags.INT && isByteLiteralValue((Long) literal.value);\ncase TypeTags.DECIMAL:\nreturn literalType.tag == TypeTags.FLOAT || literalType.tag == TypeTags.INT;\ncase TypeTags.FLOAT:\nreturn literalType.tag == TypeTags.INT;\ncase TypeTags.SIGNED32_INT:\nreturn literalType.tag == TypeTags.INT && isSigned32LiteralValue((Long) literal.value);\ncase TypeTags.SIGNED16_INT:\nreturn literalType.tag == TypeTags.INT && isSigned16LiteralValue((Long) literal.value);\ncase TypeTags.SIGNED8_INT:\nreturn literalType.tag == TypeTags.INT && isSigned8LiteralValue((Long) literal.value);\ncase TypeTags.UNSIGNED32_INT:\nreturn literalType.tag == TypeTags.INT && isUnsigned32LiteralValue((Long) literal.value);\ncase TypeTags.UNSIGNED16_INT:\nreturn literalType.tag == TypeTags.INT && isUnsigned16LiteralValue((Long) literal.value);\ncase TypeTags.UNSIGNED8_INT:\nreturn literalType.tag == TypeTags.INT && isUnsigned8LiteralValue((Long) literal.value);\ncase TypeTags.CHAR_STRING:\nreturn literalType.tag == TypeTags.STRING && isCharLiteralValue((String) literal.value);\ndefault:\nreturn false;\n}\n}\n/**\n* Validate if the return type of the given function is a subtype of `error?`, containing `()`.\n*\n* @param function The function of which the return type should be validated\n* @param diagnosticCode The code to log if the return type is invalid\n*/\npublic void validateErrorOrNilReturn(BLangFunction function, DiagnosticCode diagnosticCode) {\nBType returnType = function.returnTypeNode.type;\nif (returnType.tag == TypeTags.NIL) {\nreturn;\n}\nif (returnType.tag == TypeTags.UNION) {\nSet memberTypes = ((BUnionType) returnType).getMemberTypes();\nif (returnType.isNullable() &&\nmemberTypes.stream().allMatch(type -> type.tag == TypeTags.NIL || type.tag == TypeTags.ERROR)) {\nreturn;\n}\n}\ndlogHelper.error(function.returnTypeNode.pos, diagnosticCode, function.returnTypeNode.type.toString());\n}\n/**\n* Type vector of size two, to hold the source and the target types.\n*\n* @since 0.982.0\n*/\nprivate static class TypePair {\nBType sourceType;\nBType targetType;\npublic TypePair(BType sourceType, BType targetType) {\nthis.sourceType = sourceType;\nthis.targetType = targetType;\n}\n@Override\npublic boolean equals(Object obj) {\nif (!(obj instanceof TypePair)) {\nreturn false;\n}\nTypePair other = (TypePair) obj;\nreturn this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType);\n}\n@Override\npublic int hashCode() {\nreturn Objects.hash(sourceType, targetType);\n}\n}\n/**\n* A functional interface for parameterizing the type of type checking that needs to be done on the source and\n* target types.\n*\n* @since 0.995.0\n*/\nprivate interface TypeEqualityPredicate {\nboolean test(BType source, BType target, Set unresolvedTypes);\n}\npublic boolean hasFillerValue(BType type) {\nswitch (type.tag) {\ncase TypeTags.INT:\ncase TypeTags.BYTE:\ncase TypeTags.FLOAT:\ncase TypeTags.DECIMAL:\ncase TypeTags.STRING:\ncase TypeTags.BOOLEAN:\ncase TypeTags.JSON:\ncase TypeTags.XML:\ncase TypeTags.TABLE:\ncase TypeTags.NIL:\ncase TypeTags.ANYDATA:\ncase TypeTags.MAP:\ncase TypeTags.ANY:\nreturn true;\ncase TypeTags.ARRAY:\nreturn checkFillerValue((BArrayType) type);\ncase TypeTags.FINITE:\nreturn checkFillerValue((BFiniteType) type);\ncase TypeTags.UNION:\nreturn checkFillerValue((BUnionType) type);\ncase TypeTags.OBJECT:\nreturn checkFillerValue((BObjectType) type);\ncase TypeTags.RECORD:\nreturn checkFillerValue((BRecordType) type);\ncase TypeTags.TUPLE:\nBTupleType tupleType = (BTupleType) type;\nreturn tupleType.getTupleTypes().stream().allMatch(eleType -> hasFillerValue(eleType));\ndefault:\nreturn false;\n}\n}\nprivate boolean checkFillerValue(BObjectType type) {\nif ((type.tsymbol.flags & Flags.ABSTRACT) == Flags.ABSTRACT) {\nreturn false;\n}\nBAttachedFunction initFunction = ((BObjectTypeSymbol) type.tsymbol).initializerFunc;\nif (initFunction == null) {\nreturn true;\n}\nif (initFunction.symbol.getReturnType().getKind() != TypeKind.NIL) {\nreturn false;\n}\nfor (BVarSymbol bVarSymbol : initFunction.symbol.getParameters()) {\nif (!bVarSymbol.defaultableParam) {\nreturn false;\n}\n}\nreturn true;\n}\n/**\n* This will handle two types. Singleton : As singleton can have one value that value should it self be a valid fill\n* value Union : 1. if nil is a member it is the fill values 2. else all the values should belong to same type and\n* the default value for that type should be a member of the union precondition : value space should have at least\n* one element\n*\n* @param type BFiniteType union or finite\n* @return boolean whether type has a valid filler value or not\n*/\nprivate boolean checkFillerValue(BFiniteType type) {\nif (type.isNullable()) {\nreturn true;\n}\nif (type.getValueSpace().size() == 1) {\nreturn true;\n}\nIterator iterator = type.getValueSpace().iterator();\nBLangExpression firstElement = (BLangExpression) iterator.next();\nboolean defaultFillValuePresent = isImplicitDefaultValue(firstElement);\nwhile (iterator.hasNext()) {\nBLangExpression value = (BLangExpression) iterator.next();\nif (!isSameType(value.type, firstElement.type)) {\nreturn false;\n}\nif (!defaultFillValuePresent && isImplicitDefaultValue(value)) {\ndefaultFillValuePresent = true;\n}\n}\nreturn defaultFillValuePresent;\n}\nprivate boolean checkFillerValue(BUnionType type) {\nif (type.isNullable()) {\nreturn true;\n}\nIterator iterator = type.getMemberTypes().iterator();\nBType firstMember = iterator.next();\nwhile (iterator.hasNext()) {\nif (!isSameType(firstMember, iterator.next())) {\nreturn false;\n}\n}\nreturn isValueType(firstMember) && hasFillerValue(firstMember);\n}\nprivate boolean isImplicitDefaultValue(BLangExpression expression) {\nif ((expression.getKind() == NodeKind.LITERAL) || (expression.getKind() == NodeKind.NUMERIC_LITERAL)) {\nBLangLiteral literalExpression = (BLangLiteral) expression;\nBType literalExprType = literalExpression.type;\nObject value = literalExpression.getValue();\nswitch (literalExprType.getKind()) {\ncase INT:\ncase BYTE:\nreturn value.equals(Long.valueOf(0));\ncase STRING:\nreturn value == null || value.equals(\"\");\ncase DECIMAL:\ncase FLOAT:\nreturn value.equals(String.valueOf(0.0));\ncase BOOLEAN:\nreturn value.equals(Boolean.valueOf(false));\ncase NIL:\nreturn true;\ndefault:\nreturn false;\n}\n}\nreturn false;\n}\nprivate boolean checkFillerValue(BRecordType type) {\nfor (BField field : type.fields) {\nif (Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL)) {\ncontinue;\n}\nif (Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate boolean checkFillerValue(BArrayType type) {\nif (type.size == -1) {\nreturn true;\n}\nreturn hasFillerValue(type.eType);\n}\n}", + "context_after": "class Types {\nprivate static final CompilerContext.Key TYPES_KEY =\nnew CompilerContext.Key<>();\nprivate SymbolTable symTable;\nprivate SymbolResolver symResolver;\nprivate BLangDiagnosticLogHelper dlogHelper;\nprivate Names names;\nprivate int finiteTypeCount = 0;\nprivate BUnionType expandedXMLBuiltinSubtypes;\npublic static Types getInstance(CompilerContext context) {\nTypes types = context.get(TYPES_KEY);\nif (types == null) {\ntypes = new Types(context);\n}\nreturn types;\n}\npublic Types(CompilerContext context) {\ncontext.put(TYPES_KEY, this);\nthis.symTable = SymbolTable.getInstance(context);\nthis.symResolver = SymbolResolver.getInstance(context);\nthis.dlogHelper = BLangDiagnosticLogHelper.getInstance(context);\nthis.names = Names.getInstance(context);\nthis.expandedXMLBuiltinSubtypes = BUnionType.create(null,\nsymTable.xmlElementType, symTable.xmlCommentType, symTable.xmlPIType, symTable.xmlTextType);\n}\npublic List checkTypes(BLangExpression node,\nList actualTypes,\nList expTypes) {\nList resTypes = new ArrayList<>();\nfor (int i = 0; i < actualTypes.size(); i++) {\nresTypes.add(checkType(node, actualTypes.get(i), expTypes.size() > i ? expTypes.get(i) : symTable.noType));\n}\nreturn resTypes;\n}\npublic BType checkType(BLangExpression node,\nBType actualType,\nBType expType) {\nreturn checkType(node, actualType, expType, DiagnosticCode.INCOMPATIBLE_TYPES);\n}\npublic BType checkType(BLangExpression expr,\nBType actualType,\nBType expType,\nDiagnosticCode diagCode) {\nexpr.type = checkType(expr.pos, actualType, expType, diagCode);\nif (expr.type.tag == TypeTags.SEMANTIC_ERROR) {\nreturn expr.type;\n}\nsetImplicitCastExpr(expr, actualType, expType);\nreturn expr.type;\n}\npublic BType checkType(DiagnosticPos pos,\nBType actualType,\nBType expType,\nDiagnosticCode diagCode) {\nif (expType.tag == TypeTags.SEMANTIC_ERROR) {\nreturn expType;\n} else if (expType.tag == TypeTags.NONE) {\nreturn actualType;\n} else if (actualType.tag == TypeTags.SEMANTIC_ERROR) {\nreturn actualType;\n} else if (isAssignable(actualType, expType)) {\nreturn actualType;\n}\ndlogHelper.error(pos, diagCode, expType, actualType);\nreturn symTable.semanticError;\n}\npublic boolean isJSONContext(BType type) {\nif (type.tag == TypeTags.UNION) {\nreturn ((BUnionType) type).getMemberTypes().stream().anyMatch(memType -> memType.tag == TypeTags.JSON);\n}\nreturn type.tag == TypeTags.JSON;\n}\npublic boolean isLax(BType type) {\nswitch (type.tag) {\ncase TypeTags.JSON:\ncase TypeTags.XML:\ncase TypeTags.XML_ELEMENT:\nreturn true;\ncase TypeTags.MAP:\nreturn isLax(((BMapType) type).constraint);\ncase TypeTags.UNION:\nreturn ((BUnionType) type).getMemberTypes().stream().allMatch(this::isLax);\n}\nreturn false;\n}\npublic boolean isSameType(BType source, BType target) {\nreturn isSameType(source, target, new HashSet<>());\n}\nprivate boolean isSameType(BType source, BType target, Set unresolvedTypes) {\nTypePair pair = new TypePair(source, target);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nBTypeVisitor sameTypeVisitor = new BSameTypeVisitor(unresolvedTypes);\nreturn target.accept(sameTypeVisitor, source);\n}\npublic boolean isValueType(BType type) {\nswitch (type.tag) {\ncase TypeTags.BOOLEAN:\ncase TypeTags.BYTE:\ncase TypeTags.DECIMAL:\ncase TypeTags.FLOAT:\ncase TypeTags.INT:\ncase TypeTags.STRING:\ncase TypeTags.SIGNED32_INT:\ncase TypeTags.SIGNED16_INT:\ncase TypeTags.SIGNED8_INT:\ncase TypeTags.UNSIGNED32_INT:\ncase TypeTags.UNSIGNED16_INT:\ncase TypeTags.UNSIGNED8_INT:\ncase TypeTags.CHAR_STRING:\nreturn true;\ndefault:\nreturn false;\n}\n}\nboolean isBasicNumericType(BType type) {\nreturn type.tag < TypeTags.STRING || TypeTags.isIntegerTypeTag(type.tag);\n}\nboolean finiteTypeContainsNumericTypeValues(BFiniteType finiteType) {\nreturn finiteType.getValueSpace().stream().anyMatch(valueExpr -> isBasicNumericType(valueExpr.type));\n}\npublic boolean containsErrorType(BType type) {\nif (type.tag == TypeTags.UNION) {\nreturn ((BUnionType) type).getMemberTypes().stream()\n.anyMatch(this::containsErrorType);\n}\nreturn type.tag == TypeTags.ERROR;\n}\npublic boolean isSubTypeOfList(BType type) {\nif (type.tag != TypeTags.UNION) {\nreturn isSubTypeOfBaseType(type, TypeTags.ARRAY) || isSubTypeOfBaseType(type, TypeTags.TUPLE);\n}\nreturn ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfList);\n}\npublic boolean isSubTypeOfMapping(BType type) {\nif (type.tag != TypeTags.UNION) {\nreturn isSubTypeOfBaseType(type, TypeTags.MAP) || isSubTypeOfBaseType(type, TypeTags.RECORD);\n}\nreturn ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfMapping);\n}\npublic boolean isSubTypeOfBaseType(BType type, int baseTypeTag) {\nif (type.tag != TypeTags.UNION) {\nreturn type.tag == baseTypeTag;\n}\nif (TypeTags.isXMLTypeTag(baseTypeTag)) {\nreturn true;\n}\nreturn ((BUnionType) type).getMemberTypes().stream().allMatch(memType -> memType.tag == baseTypeTag);\n}\n/**\n* Checks whether source type is assignable to the target type.\n*

\n* Source type is assignable to the target type if,\n* 1) the target type is any and the source type is not a value type.\n* 2) there exists an implicit cast symbol from source to target.\n* 3) both types are JSON and the target constraint is no type.\n* 4) both types are array type and both array types are assignable.\n* 5) both types are MAP and the target constraint is any type or constraints are structurally equivalent.\n*\n* @param source type.\n* @param target type.\n* @return true if source type is assignable to the target type.\n*/\npublic boolean isAssignable(BType source, BType target) {\nreturn isAssignable(source, target, new HashSet<>());\n}\nboolean isStampingAllowed(BType source, BType target) {\nreturn (isAssignable(source, target) || isAssignable(target, source) ||\ncheckTypeEquivalencyForStamping(source, target) || checkTypeEquivalencyForStamping(target, source));\n}\nprivate boolean checkTypeEquivalencyForStamping(BType source, BType target) {\nif (target.tag == TypeTags.RECORD) {\nif (source.tag == TypeTags.RECORD) {\nTypePair pair = new TypePair(source, target);\nSet unresolvedTypes = new HashSet<>();\nunresolvedTypes.add(pair);\nreturn checkRecordEquivalencyForStamping((BRecordType) source, (BRecordType) target, unresolvedTypes);\n} else if (source.tag == TypeTags.MAP) {\nint mapConstraintTypeTag = ((BMapType) source).constraint.tag;\nif ((!(mapConstraintTypeTag == TypeTags.ANY || mapConstraintTypeTag == TypeTags.ANYDATA)) &&\n((BRecordType) target).sealed) {\nfor (BField field : ((BStructureType) target).getFields()) {\nif (field.getType().tag != mapConstraintTypeTag) {\nreturn false;\n}\n}\n}\nreturn true;\n}\n} else if (target.tag == TypeTags.JSON) {\nreturn source.tag == TypeTags.JSON || source.tag == TypeTags.RECORD || source.tag == TypeTags.MAP;\n} else if (target.tag == TypeTags.MAP) {\nif (source.tag == TypeTags.MAP) {\nreturn isStampingAllowed(((BMapType) source).getConstraint(), ((BMapType) target).getConstraint());\n} else if (source.tag == TypeTags.UNION) {\nreturn checkUnionEquivalencyForStamping(source, target);\n}\n} else if (target.tag == TypeTags.ARRAY) {\nif (source.tag == TypeTags.JSON) {\nreturn true;\n} else if (source.tag == TypeTags.TUPLE) {\nBType arrayElementType = ((BArrayType) target).eType;\nfor (BType tupleMemberType : ((BTupleType) source).getTupleTypes()) {\nif (!isStampingAllowed(tupleMemberType, arrayElementType)) {\nreturn false;\n}\n}\nreturn true;\n} else if (source.tag == TypeTags.ARRAY) {\nreturn checkTypeEquivalencyForStamping(((BArrayType) source).eType, ((BArrayType) target).eType);\n}\n} else if (target.tag == TypeTags.UNION) {\nreturn checkUnionEquivalencyForStamping(source, target);\n} else if (target.tag == TypeTags.TUPLE && source.tag == TypeTags.TUPLE) {\nreturn checkTupleEquivalencyForStamping(source, target);\n}\nreturn false;\n}\nprivate boolean checkRecordEquivalencyForStamping(BRecordType rhsType, BRecordType lhsType,\nSet unresolvedTypes) {\nif (Symbols.isFlagOn(lhsType.tsymbol.flags ^ rhsType.tsymbol.flags, Flags.PUBLIC)) {\nreturn false;\n}\nif (Symbols.isPrivate(lhsType.tsymbol) && rhsType.tsymbol.pkgID != lhsType.tsymbol.pkgID) {\nreturn false;\n}\nif (lhsType.fields.size() > rhsType.fields.size()) {\nreturn false;\n}\nif (lhsType.sealed && !rhsType.sealed) {\nreturn false;\n}\nreturn checkFieldEquivalencyForStamping(lhsType, rhsType, unresolvedTypes);\n}\nprivate boolean checkFieldEquivalencyForStamping(BStructureType lhsType, BStructureType rhsType,\nSet unresolvedTypes) {\nMap rhsFields = rhsType.fields.stream().collect(\nCollectors.toMap(BField::getName, field -> field));\nfor (BField lhsField : lhsType.fields) {\nBField rhsField = rhsFields.get(lhsField.name);\nif (rhsField == null || !isStampingAllowed(rhsField.type, lhsField.type)) {\nreturn false;\n}\n}\nMap lhsFields = lhsType.fields.stream().collect(\nCollectors.toMap(BField::getName, field -> field));\nfor (BField rhsField : rhsType.fields) {\nBField lhsField = lhsFields.get(rhsField.name);\nif (lhsField == null && !isStampingAllowed(rhsField.type, ((BRecordType) lhsType).restFieldType)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate boolean checkUnionEquivalencyForStamping(BType source, BType target) {\nSet sourceTypes = new LinkedHashSet<>();\nSet targetTypes = new LinkedHashSet<>();\nif (source.tag == TypeTags.UNION) {\nBUnionType sourceUnionType = (BUnionType) source;\nsourceTypes.addAll(sourceUnionType.getMemberTypes());\n} else {\nsourceTypes.add(source);\n}\nif (target.tag == TypeTags.UNION) {\nBUnionType targetUnionType = (BUnionType) target;\ntargetTypes.addAll(targetUnionType.getMemberTypes());\n} else {\ntargetTypes.add(target);\n}\nboolean notAssignable = sourceTypes\n.stream()\n.map(s -> targetTypes\n.stream()\n.anyMatch(t -> isStampingAllowed(s, t)))\n.anyMatch(assignable -> !assignable);\nreturn !notAssignable;\n}\nprivate boolean checkTupleEquivalencyForStamping(BType source, BType target) {\nif (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) {\nreturn false;\n}\nBTupleType lhsTupleType = (BTupleType) target;\nBTupleType rhsTupleType = (BTupleType) source;\nif (lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) {\nreturn false;\n}\nfor (int i = 0; i < lhsTupleType.tupleTypes.size(); i++) {\nif (!isStampingAllowed(rhsTupleType.tupleTypes.get(i), lhsTupleType.tupleTypes.get(i))) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate boolean isAssignable(BType source, BType target, Set unresolvedTypes) {\nif (isSameType(source, target)) {\nreturn true;\n}\nint sourceTag = source.tag;\nint targetTag = target.tag;\nif (sourceTag == TypeTags.BYTE && targetTag == TypeTags.INT) {\nreturn true;\n}\nif (TypeTags.isXMLTypeTag(sourceTag) && targetTag == TypeTags.XML) {\nreturn true;\n}\nif (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.STRING) {\nreturn true;\n}\nif (TypeTags.isXMLTypeTag(sourceTag) && targetTag == TypeTags.XML) {\nreturn true;\n}\nif (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.STRING) {\nreturn true;\n}\nif (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ERROR) {\nreturn isErrorTypeAssignable((BErrorType) source, (BErrorType) target, unresolvedTypes);\n} else if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ANY) {\nreturn false;\n}\nif (sourceTag == TypeTags.NIL && (isNullable(target) || targetTag == TypeTags.JSON)) {\nreturn true;\n}\nif (targetTag == TypeTags.ANY && !containsErrorType(source) && !isValueType(source)) {\nreturn true;\n}\nif (targetTag == TypeTags.ANYDATA && !containsErrorType(source) && source.isAnydata()) {\nreturn true;\n}\nif (targetTag == TypeTags.MAP && sourceTag == TypeTags.RECORD) {\nBRecordType recordType = (BRecordType) source;\nreturn isAssignableRecordType(recordType, target);\n}\nif (target.getKind() == TypeKind.SERVICE && source.getKind() == TypeKind.SERVICE) {\nreturn true;\n}\nif (targetTag == TypeTags.TYPEDESC && sourceTag == TypeTags.TYPEDESC) {\nreturn isAssignable(((BTypedescType) source).constraint, (((BTypedescType) target).constraint),\nunresolvedTypes);\n}\nif (targetTag == TypeTags.TABLE && sourceTag == TypeTags.TABLE) {\nreturn isAssignable(((BTableType) source).constraint, (((BTableType) target).constraint),\nunresolvedTypes);\n}\nif (targetTag == TypeTags.STREAM && sourceTag == TypeTags.STREAM) {\nreturn isAssignable(((BStreamType) source).constraint, ((BStreamType) target).constraint, unresolvedTypes);\n}\nif (isBuiltInTypeWidenPossible(source, target) == TypeTestResult.TRUE) {\nreturn true;\n}\nif (sourceTag == TypeTags.FINITE) {\nreturn isFiniteTypeAssignable((BFiniteType) source, target, unresolvedTypes);\n}\nif ((targetTag == TypeTags.UNION || sourceTag == TypeTags.UNION) &&\nisAssignableToUnionType(source, target, unresolvedTypes)) {\nreturn true;\n}\nif (targetTag == TypeTags.JSON) {\nif (sourceTag == TypeTags.JSON) {\nreturn true;\n}\nif (sourceTag == TypeTags.ARRAY) {\nreturn isArrayTypesAssignable(source, target, unresolvedTypes);\n}\nif (sourceTag == TypeTags.MAP) {\nreturn isAssignable(((BMapType) source).constraint, target, unresolvedTypes);\n}\nif (sourceTag == TypeTags.RECORD) {\nreturn isAssignableRecordType((BRecordType) source, target);\n}\n}\nif (targetTag == TypeTags.FUTURE && sourceTag == TypeTags.FUTURE) {\nif (((BFutureType) target).constraint.tag == TypeTags.NONE) {\nreturn true;\n}\nreturn isAssignable(((BFutureType) source).constraint, ((BFutureType) target).constraint, unresolvedTypes);\n}\nif (targetTag == TypeTags.MAP && sourceTag == TypeTags.MAP) {\nif (((BMapType) target).constraint.tag == TypeTags.ANY &&\n((BMapType) source).constraint.tag != TypeTags.UNION) {\nreturn true;\n}\nreturn isAssignable(((BMapType) source).constraint, ((BMapType) target).constraint, unresolvedTypes);\n}\nif (targetTag == TypeTags.MAP && sourceTag == TypeTags.RECORD) {\nBType mapConstraint = ((BMapType) target).constraint;\nBRecordType srcRec = (BRecordType) source;\nboolean hasIncompatibleType = srcRec.fields\n.stream().anyMatch(field -> !isAssignable(field.type, mapConstraint));\nreturn !hasIncompatibleType && isAssignable(srcRec.restFieldType, mapConstraint);\n}\nif ((sourceTag == TypeTags.OBJECT || sourceTag == TypeTags.RECORD)\n&& (targetTag == TypeTags.OBJECT || targetTag == TypeTags.RECORD)) {\nreturn checkStructEquivalency(source, target, unresolvedTypes);\n}\nif (sourceTag == TypeTags.TUPLE && targetTag == TypeTags.ARRAY) {\nreturn isTupleTypeAssignableToArrayType((BTupleType) source, (BArrayType) target, unresolvedTypes);\n}\nif (sourceTag == TypeTags.ARRAY && targetTag == TypeTags.TUPLE) {\nreturn isArrayTypeAssignableToTupleType((BArrayType) source, (BTupleType) target, unresolvedTypes);\n}\nif (sourceTag == TypeTags.TUPLE || targetTag == TypeTags.TUPLE) {\nreturn isTupleTypeAssignable(source, target, unresolvedTypes);\n}\nif (sourceTag == TypeTags.INVOKABLE && targetTag == TypeTags.INVOKABLE) {\nreturn isFunctionTypeAssignable((BInvokableType) source, (BInvokableType) target, new HashSet<>());\n}\nreturn sourceTag == TypeTags.ARRAY && targetTag == TypeTags.ARRAY &&\nisArrayTypesAssignable(source, target, unresolvedTypes);\n}\nprivate boolean recordFieldsAssignableToType(BRecordType recordType, BType targetType) {\nfor (BField field : recordType.fields) {\nif (!isAssignable(field.type, targetType)) {\nreturn false;\n}\n}\nif (!recordType.sealed) {\nreturn isAssignable(recordType.restFieldType, targetType);\n}\nreturn true;\n}\nprivate boolean isErrorTypeAssignable(BErrorType source, BErrorType target, Set unresolvedTypes) {\nif (target == symTable.errorType) {\nreturn true;\n}\nTypePair pair = new TypePair(source, target);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nreturn isAssignable(source.reasonType, target.reasonType, unresolvedTypes) &&\nisAssignable(source.detailType, target.detailType, unresolvedTypes);\n}\nprivate boolean isTupleTypeAssignable(BType source, BType target, Set unresolvedTypes) {\nif (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) {\nreturn false;\n}\nBTupleType lhsTupleType = (BTupleType) target;\nBTupleType rhsTupleType = (BTupleType) source;\nif (lhsTupleType.restType == null && rhsTupleType.restType != null) {\nreturn false;\n}\nif (lhsTupleType.restType == null && lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) {\nreturn false;\n}\nif (lhsTupleType.restType != null && rhsTupleType.restType != null) {\nif (!isAssignable(rhsTupleType.restType, lhsTupleType.restType, unresolvedTypes)) {\nreturn false;\n}\n}\nfor (int i = 0; i < rhsTupleType.tupleTypes.size(); i++) {\nBType lhsType = (lhsTupleType.tupleTypes.size() > i)\n? lhsTupleType.tupleTypes.get(i) : lhsTupleType.restType;\nif (!isAssignable(rhsTupleType.tupleTypes.get(i), lhsType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate boolean isTupleTypeAssignableToArrayType(BTupleType source, BArrayType target,\nSet unresolvedTypes) {\nif (target.state != BArrayState.UNSEALED\n&& (source.restType != null || source.tupleTypes.size() != target.size)) {\nreturn false;\n}\nList sourceTypes = new ArrayList<>(source.tupleTypes);\nif (source.restType != null) {\nsourceTypes.add(source.restType);\n}\nreturn sourceTypes.stream()\n.allMatch(tupleElemType -> isAssignable(tupleElemType, target.eType, unresolvedTypes));\n}\nprivate boolean isArrayTypeAssignableToTupleType(BArrayType source, BTupleType target,\nSet unresolvedTypes) {\nif (!target.tupleTypes.isEmpty()) {\nif (source.state == BArrayState.UNSEALED) {\nreturn false;\n}\nif (target.restType != null && target.tupleTypes.size() > source.size) {\nreturn false;\n}\nif (target.restType == null && target.tupleTypes.size() != source.size) {\nreturn false;\n}\n}\nList targetTypes = new ArrayList<>(target.tupleTypes);\nif (target.restType != null) {\ntargetTypes.add(target.restType);\n}\nreturn targetTypes.stream()\n.allMatch(tupleElemType -> isAssignable(source.eType, tupleElemType, unresolvedTypes));\n}\npublic boolean isArrayTypesAssignable(BType source, BType target, Set unresolvedTypes) {\nif (target.tag == TypeTags.ARRAY && source.tag == TypeTags.ARRAY) {\nBArrayType lhsArrayType = (BArrayType) target;\nBArrayType rhsArrayType = (BArrayType) source;\nif (lhsArrayType.state == BArrayState.UNSEALED) {\nreturn isArrayTypesAssignable(rhsArrayType.eType, lhsArrayType.eType, unresolvedTypes);\n}\nreturn checkSealedArraySizeEquality(rhsArrayType, lhsArrayType)\n&& isArrayTypesAssignable(rhsArrayType.eType, lhsArrayType.eType, unresolvedTypes);\n} else if (source.tag == TypeTags.ARRAY) {\nif (target.tag == TypeTags.JSON) {\nreturn isAssignable(((BArrayType) source).getElementType(), target, unresolvedTypes);\n}\nif (target.tag == TypeTags.UNION) {\nreturn isAssignable(source, target);\n}\nreturn target.tag == TypeTags.ANY;\n} else if (target.tag == TypeTags.ARRAY) {\nreturn false;\n}\nif (isAssignable(source, target, unresolvedTypes)) {\nreturn true;\n}\nif (target.tag == TypeTags.UNION) {\nreturn isAssignable(source, target, unresolvedTypes);\n}\nreturn target.tag == TypeTags.ANY && !isValueType(source);\n}\nprivate boolean isFunctionTypeAssignable(BInvokableType source, BInvokableType target,\nSet unresolvedTypes) {\nif (containsTypeParams(target)) {\nif (source.paramTypes.size() != target.paramTypes.size()) {\nreturn false;\n}\nfor (int i = 0; i < source.paramTypes.size(); i++) {\nBType sourceParam = source.paramTypes.get(i);\nBType targetParam = target.paramTypes.get(i);\nboolean isTypeParam = TypeParamAnalyzer.isTypeParam(targetParam);\nif (isTypeParam) {\nif (!isAssignable(sourceParam, targetParam)) {\nreturn false;\n}\n} else {\nif (!isAssignable(targetParam, sourceParam)) {\nreturn false;\n}\n}\n}\nif (source.retType == null && target.retType == null) {\nreturn true;\n} else if (source.retType == null || target.retType == null) {\nreturn false;\n}\nreturn isAssignable(source.retType, target.retType, unresolvedTypes);\n}\nreturn checkFunctionTypeEquality(source, target, unresolvedTypes, (s, t, ut) -> isAssignable(t, s, ut));\n}\nprivate boolean containsTypeParams(BInvokableType type) {\nboolean hasParameterizedTypes = type.paramTypes.stream()\n.anyMatch(t -> {\nif (t.tag == TypeTags.FUNCTION_POINTER) {\nreturn containsTypeParams((BInvokableType) t);\n}\nreturn TypeParamAnalyzer.isTypeParam(t);\n});\nif (hasParameterizedTypes) {\nreturn hasParameterizedTypes;\n}\nif (type.retType.tag == TypeTags.FUNCTION_POINTER) {\nreturn containsTypeParams((BInvokableType) type.retType);\n}\nreturn TypeParamAnalyzer.isTypeParam(type.retType);\n}\nprivate boolean isSameFunctionType(BInvokableType source, BInvokableType target, Set unresolvedTypes) {\nreturn checkFunctionTypeEquality(source, target, unresolvedTypes, this::isSameType);\n}\nprivate boolean checkFunctionTypeEquality(BInvokableType source, BInvokableType target,\nSet unresolvedTypes, TypeEqualityPredicate equality) {\nif (source.paramTypes.size() != target.paramTypes.size()) {\nreturn false;\n}\nfor (int i = 0; i < source.paramTypes.size(); i++) {\nif (!equality.test(source.paramTypes.get(i), target.paramTypes.get(i), unresolvedTypes)) {\nreturn false;\n}\n}\nif ((source.restType != null && target.restType == null) ||\ntarget.restType != null && source.restType == null) {\nreturn false;\n} else if (source.restType != null && !equality.test(source.restType, target.restType, unresolvedTypes)) {\nreturn false;\n}\nif (source.retType == null && target.retType == null) {\nreturn true;\n} else if (source.retType == null || target.retType == null) {\nreturn false;\n}\nreturn isAssignable(source.retType, target.retType, unresolvedTypes);\n}\npublic boolean checkArrayEquality(BType source, BType target, Set unresolvedTypes) {\nif (target.tag != TypeTags.ARRAY || source.tag != TypeTags.ARRAY) {\nreturn false;\n}\nBArrayType lhsArrayType = (BArrayType) target;\nBArrayType rhsArrayType = (BArrayType) source;\nif (lhsArrayType.state == BArrayState.UNSEALED) {\nreturn rhsArrayType.state == BArrayState.UNSEALED &&\nisSameType(lhsArrayType.eType, rhsArrayType.eType, unresolvedTypes);\n}\nreturn checkSealedArraySizeEquality(rhsArrayType, lhsArrayType)\n&& isSameType(lhsArrayType.eType, rhsArrayType.eType, unresolvedTypes);\n}\npublic boolean checkSealedArraySizeEquality(BArrayType rhsArrayType, BArrayType lhsArrayType) {\nreturn lhsArrayType.size == rhsArrayType.size;\n}\npublic boolean checkStructEquivalency(BType rhsType, BType lhsType) {\nreturn checkStructEquivalency(rhsType, lhsType, new HashSet<>());\n}\nprivate boolean checkStructEquivalency(BType rhsType, BType lhsType, Set unresolvedTypes) {\nTypePair pair = new TypePair(rhsType, lhsType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nif (rhsType.tag == TypeTags.OBJECT && lhsType.tag == TypeTags.OBJECT) {\nreturn checkObjectEquivalency((BObjectType) rhsType, (BObjectType) lhsType, unresolvedTypes);\n}\nif (rhsType.tag == TypeTags.RECORD && lhsType.tag == TypeTags.RECORD) {\nreturn checkRecordEquivalency((BRecordType) rhsType, (BRecordType) lhsType, unresolvedTypes);\n}\nreturn false;\n}\npublic boolean checkObjectEquivalency(BObjectType rhsType, BObjectType lhsType, Set unresolvedTypes) {\nBObjectTypeSymbol lhsStructSymbol = (BObjectTypeSymbol) lhsType.tsymbol;\nBObjectTypeSymbol rhsStructSymbol = (BObjectTypeSymbol) rhsType.tsymbol;\nList lhsFuncs = lhsStructSymbol.attachedFuncs;\nList rhsFuncs = ((BObjectTypeSymbol) rhsType.tsymbol).attachedFuncs;\nint lhsAttachedFuncCount = getObjectFuncCount(lhsStructSymbol);\nint rhsAttachedFuncCount = getObjectFuncCount(rhsStructSymbol);\nif (lhsType.fields.size() > rhsType.fields.size() || lhsAttachedFuncCount > rhsAttachedFuncCount) {\nreturn false;\n}\nif (lhsType.getFields().stream().anyMatch(field -> Symbols.isPrivate(field.symbol)) ||\nlhsFuncs.stream().anyMatch(func -> Symbols.isPrivate(func.symbol))) {\nreturn false;\n}\nMap rhsFields =\nrhsType.fields.stream().collect(Collectors.toMap(BField::getName, field -> field));\nfor (BField lhsField : lhsType.fields) {\nBField rhsField = rhsFields.get(lhsField.name);\nif (rhsField == null || !isInSameVisibilityRegion(lhsField.symbol, rhsField.symbol)\n|| !isAssignable(rhsField.type, lhsField.type)) {\nreturn false;\n}\n}\nfor (BAttachedFunction lhsFunc : lhsFuncs) {\nif (lhsFunc == lhsStructSymbol.initializerFunc) {\ncontinue;\n}\nBAttachedFunction rhsFunc = getMatchingInvokableType(rhsFuncs, lhsFunc, unresolvedTypes);\nif (rhsFunc == null || !isInSameVisibilityRegion(lhsFunc.symbol, rhsFunc.symbol)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate int getObjectFuncCount(BObjectTypeSymbol sym) {\nif (sym.initializerFunc != null && sym.attachedFuncs.contains(sym.initializerFunc)) {\nreturn sym.attachedFuncs.size() - 1;\n}\nreturn sym.attachedFuncs.size();\n}\npublic boolean checkRecordEquivalency(BRecordType rhsType, BRecordType lhsType, Set unresolvedTypes) {\nif (lhsType.sealed && !rhsType.sealed) {\nreturn false;\n}\nif (!rhsType.sealed && !isAssignable(rhsType.restFieldType, lhsType.restFieldType, unresolvedTypes)) {\nreturn false;\n}\nreturn checkFieldEquivalency(lhsType, rhsType, unresolvedTypes);\n}\npublic void setForeachTypedBindingPatternType(BLangForeach foreachNode) {\nBType collectionType = foreachNode.collection.type;\nBType varType;\nswitch (collectionType.tag) {\ncase TypeTags.STRING:\nvarType = symTable.stringType;\nbreak;\ncase TypeTags.ARRAY:\nBArrayType arrayType = (BArrayType) collectionType;\nvarType = arrayType.eType;\nbreak;\ncase TypeTags.TUPLE:\nBTupleType tupleType = (BTupleType) collectionType;\nLinkedHashSet tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes);\nif (tupleType.restType != null) {\ntupleTypes.add(tupleType.restType);\n}\nvarType = tupleTypes.size() == 1 ?\ntupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);\nbreak;\ncase TypeTags.MAP:\nBMapType bMapType = (BMapType) collectionType;\nvarType = bMapType.constraint;\nbreak;\ncase TypeTags.RECORD:\nBRecordType recordType = (BRecordType) collectionType;\nvarType = inferRecordFieldType(recordType);\nbreak;\ncase TypeTags.XML:\nvarType = BUnionType.create(null, symTable.xmlType, symTable.stringType);\nbreak;\ncase TypeTags.TABLE:\nBTableType tableType = (BTableType) collectionType;\nif (tableType.constraint.tag == TypeTags.NONE) {\nvarType = symTable.anydataType;\nbreak;\n}\nvarType = tableType.constraint;\nbreak;\ncase TypeTags.STREAM:\nBStreamType streamType = (BStreamType) collectionType;\nif (streamType.constraint.tag == TypeTags.NONE) {\nvarType = symTable.anydataType;\nbreak;\n}\nvarType = streamType.constraint;\nif (streamType.error != null) {\nBType actualType = BUnionType.create(null, varType, streamType.error);\ndlogHelper.error(foreachNode.collection.pos, DiagnosticCode.INCOMPATIBLE_TYPES,\nvarType, actualType);\n}\nbreak;\ncase TypeTags.OBJECT:\nBUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType);\nif (nextMethodReturnType != null) {\nforeachNode.resultType = getRecordType(nextMethodReturnType);\nBType valueType = (foreachNode.resultType != null)\n? ((BRecordType) foreachNode.resultType).fields.get(0).type : null;\nBType errorType = getErrorType(nextMethodReturnType);\nif (errorType != null) {\nBType actualType = BUnionType.create(null, valueType, errorType);\ndlogHelper.error(foreachNode.collection.pos, DiagnosticCode.INCOMPATIBLE_TYPES,\nvalueType, actualType);\n}\nforeachNode.nillableResultType = nextMethodReturnType;\nforeachNode.varType = valueType;\nreturn;\n}\ndlogHelper.error(foreachNode.collection.pos, DiagnosticCode.INCOMPATIBLE_ITERATOR_FUNCTION_SIGNATURE);\ncase TypeTags.SEMANTIC_ERROR:\nforeachNode.varType = symTable.semanticError;\nforeachNode.resultType = symTable.semanticError;\nforeachNode.nillableResultType = symTable.semanticError;\nreturn;\ndefault:\nforeachNode.varType = symTable.semanticError;\nforeachNode.resultType = symTable.semanticError;\nforeachNode.nillableResultType = symTable.semanticError;\ndlogHelper.error(foreachNode.collection.pos, DiagnosticCode.ITERABLE_NOT_SUPPORTED_COLLECTION,\ncollectionType);\nreturn;\n}\nBInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType,\nnames.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));\nBUnionType nextMethodReturnType =\n(BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType);\nforeachNode.varType = varType;\nforeachNode.resultType = getRecordType(nextMethodReturnType);\nforeachNode.nillableResultType = nextMethodReturnType;\n}\npublic void setFromClauseTypedBindingPatternType(BLangFromClause fromClause) {\nif (fromClause.collection == null) {\nreturn;\n}\nBType collectionType = fromClause.collection.type;\nBType varType;\nswitch (collectionType.tag) {\ncase TypeTags.STRING:\nvarType = symTable.stringType;\nbreak;\ncase TypeTags.ARRAY:\nBArrayType arrayType = (BArrayType) collectionType;\nvarType = arrayType.eType;\nbreak;\ncase TypeTags.TUPLE:\nBTupleType tupleType = (BTupleType) collectionType;\nLinkedHashSet tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes);\nif (tupleType.restType != null) {\ntupleTypes.add(tupleType.restType);\n}\nvarType = tupleTypes.size() == 1 ?\ntupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);\nbreak;\ncase TypeTags.MAP:\nBMapType bMapType = (BMapType) collectionType;\nvarType = bMapType.constraint;\nbreak;\ncase TypeTags.RECORD:\nBRecordType recordType = (BRecordType) collectionType;\nvarType = inferRecordFieldType(recordType);\nbreak;\ncase TypeTags.XML:\nvarType = BUnionType.create(null, symTable.xmlType, symTable.stringType);\nbreak;\ncase TypeTags.TABLE:\nBTableType tableType = (BTableType) collectionType;\nif (tableType.constraint.tag == TypeTags.NONE) {\nvarType = symTable.anydataType;\nbreak;\n}\nvarType = tableType.constraint;\nbreak;\ncase TypeTags.STREAM:\nBStreamType streamType = (BStreamType) collectionType;\nif (streamType.constraint.tag == TypeTags.NONE) {\nvarType = symTable.anydataType;\nbreak;\n}\nvarType = streamType.constraint;\nbreak;\ncase TypeTags.OBJECT:\nBUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType);\nif (nextMethodReturnType != null) {\nfromClause.resultType = getRecordType(nextMethodReturnType);\nfromClause.nillableResultType = nextMethodReturnType;\nfromClause.varType = ((BRecordType) fromClause.resultType).fields.get(0).type;\nreturn;\n}\ndlogHelper.error(fromClause.collection.pos, DiagnosticCode.INCOMPATIBLE_ITERATOR_FUNCTION_SIGNATURE);\ncase TypeTags.SEMANTIC_ERROR:\nfromClause.varType = symTable.semanticError;\nfromClause.resultType = symTable.semanticError;\nfromClause.nillableResultType = symTable.semanticError;\nreturn;\ndefault:\nfromClause.varType = symTable.semanticError;\nfromClause.resultType = symTable.semanticError;\nfromClause.nillableResultType = symTable.semanticError;\ndlogHelper.error(fromClause.collection.pos, DiagnosticCode.ITERABLE_NOT_SUPPORTED_COLLECTION,\ncollectionType);\nreturn;\n}\nBInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType,\nnames.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));\nBUnionType nextMethodReturnType =\n(BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType);\nfromClause.varType = varType;\nfromClause.resultType = getRecordType(nextMethodReturnType);\nfromClause.nillableResultType = nextMethodReturnType;\n}\npublic BUnionType getVarTypeFromIterableObject(BObjectType collectionType) {\nBObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) collectionType.tsymbol;\nfor (BAttachedFunction func : objectTypeSymbol.attachedFuncs) {\nif (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_OBJECT_ITERATOR_FUNC)) {\nreturn getVarTypeFromIteratorFunc(func);\n}\n}\nreturn null;\n}\nprivate BUnionType getVarTypeFromIteratorFunc(BAttachedFunction candidateIteratorFunc) {\nif (!candidateIteratorFunc.type.paramTypes.isEmpty()) {\nreturn null;\n}\nBType returnType = candidateIteratorFunc.type.retType;\nreturn getVarTypeFromIteratorFuncReturnType(returnType);\n}\npublic BUnionType getVarTypeFromIteratorFuncReturnType(BType returnType) {\nBObjectTypeSymbol objectTypeSymbol;\nif (returnType.tag != TypeTags.OBJECT) {\nreturn null;\n}\nobjectTypeSymbol = (BObjectTypeSymbol) returnType.tsymbol;\nfor (BAttachedFunction func : objectTypeSymbol.attachedFuncs) {\nif (func.funcName.value.equals(BLangCompilerConstants.NEXT_FUNC)) {\nreturn getVarTypeFromNextFunc(func);\n}\n}\nreturn null;\n}\nprivate BUnionType getVarTypeFromNextFunc(BAttachedFunction nextFunc) {\nBType returnType;\nif (!nextFunc.type.paramTypes.isEmpty()) {\nreturn null;\n}\nreturnType = nextFunc.type.retType;\nif (checkNextFuncReturnType(returnType)) {\nreturn (BUnionType) returnType;\n}\nreturn null;\n}\nprivate boolean checkNextFuncReturnType(BType returnType) {\nif (returnType.tag != TypeTags.UNION) {\nreturn false;\n}\nList types = new ArrayList<>(((BUnionType) returnType).getMemberTypes());\nif (!types.removeIf(type -> type.tag == TypeTags.NIL)) {\nreturn false;\n}\ntypes.removeIf(type -> type.tag == TypeTags.ERROR);\nif (types.size() != 1) {\nreturn false;\n}\nif (types.get(0).tag != TypeTags.RECORD) {\nreturn false;\n}\nBRecordType recordType = (BRecordType) types.get(0);\nreturn checkRecordTypeInNextFuncReturnType(recordType);\n}\nprivate boolean checkRecordTypeInNextFuncReturnType(BRecordType recordType) {\nif (!recordType.sealed) {\nreturn false;\n}\nif (recordType.fields.size() != 1) {\nreturn false;\n}\nfor (BField field : recordType.fields) {\nif (field.name.value.equals(BLangCompilerConstants.VALUE_FIELD)) {\nreturn true;\n}\n}\nreturn false;\n}\nprivate BRecordType getRecordType(BUnionType type) {\nfor (BType member : type.getMemberTypes()) {\nif (member.tag == TypeTags.RECORD) {\nreturn (BRecordType) member;\n}\n}\nreturn null;\n}\npublic BErrorType getErrorType(BUnionType type) {\nfor (BType member : type.getMemberTypes()) {\nif (member.tag == TypeTags.ERROR) {\nreturn (BErrorType) member;\n} else if (member.tag == TypeTags.UNION) {\nBErrorType e = getErrorType((BUnionType) member);\nif (e != null) {\nreturn e;\n}\n}\n}\nreturn null;\n}\nprivate BType getResultTypeOfNextInvocation(BObjectType iteratorType) {\nBAttachedFunction nextFunc = getNextFunc(iteratorType);\nreturn Objects.requireNonNull(nextFunc).type.retType;\n}\nprivate BAttachedFunction getNextFunc(BObjectType iteratorType) {\nBObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;\nfor (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {\nif (bAttachedFunction.funcName.value\n.equals(BLangCompilerConstants.NEXT_FUNC)) {\nreturn bAttachedFunction;\n}\n}\nreturn null;\n}\npublic BType inferRecordFieldType(BRecordType recordType) {\nList fields = recordType.fields;\nBUnionType unionType = BUnionType.create(null);\nif (!recordType.sealed) {\nunionType.add(recordType.restFieldType);\n}\nfor (BField field : fields) {\nif (isAssignable(field.type, unionType)) {\ncontinue;\n}\nif (isAssignable(unionType, field.type)) {\nunionType = BUnionType.create(null);\n}\nunionType.add(field.type);\n}\nif (unionType.getMemberTypes().size() > 1) {\nunionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)),\nNames.EMPTY, recordType.tsymbol.pkgID, null, recordType.tsymbol.owner);\nreturn unionType;\n}\nreturn unionType.getMemberTypes().iterator().next();\n}\n/**\n* Enum to represent type test result.\n*\n* @since 1.2.0\n*/\nenum TypeTestResult {\nNOT_FOUND,\nTRUE,\nFALSE\n}\nTypeTestResult isBuiltInTypeWidenPossible(BType actualType, BType targetType) {\nint targetTag = targetType.tag;\nint actualTag = actualType.tag;\nif (actualTag < TypeTags.JSON && targetTag < TypeTags.JSON) {\nswitch (actualTag) {\ncase TypeTags.INT:\ncase TypeTags.BYTE:\ncase TypeTags.FLOAT:\ncase TypeTags.DECIMAL:\nif (targetTag == TypeTags.BOOLEAN || targetTag == TypeTags.STRING) {\nreturn TypeTestResult.FALSE;\n}\nbreak;\ncase TypeTags.BOOLEAN:\nif (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT\n|| targetTag == TypeTags.DECIMAL || targetTag == TypeTags.STRING) {\nreturn TypeTestResult.FALSE;\n}\nbreak;\ncase TypeTags.STRING:\nif (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT\n|| targetTag == TypeTags.DECIMAL || targetTag == TypeTags.BOOLEAN) {\nreturn TypeTestResult.FALSE;\n}\nbreak;\n}\n}\nswitch (actualTag) {\ncase TypeTags.INT:\ncase TypeTags.BYTE:\ncase TypeTags.FLOAT:\ncase TypeTags.DECIMAL:\ncase TypeTags.BOOLEAN:\ncase TypeTags.STRING:\ncase TypeTags.SIGNED32_INT:\ncase TypeTags.SIGNED16_INT:\ncase TypeTags.SIGNED8_INT:\ncase TypeTags.UNSIGNED32_INT:\ncase TypeTags.UNSIGNED16_INT:\ncase TypeTags.UNSIGNED8_INT:\ncase TypeTags.CHAR_STRING:\nif (targetTag == TypeTags.JSON || targetTag == TypeTags.ANYDATA || targetTag == TypeTags.ANY) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\ncase TypeTags.ANYDATA:\ncase TypeTags.TYPEDESC:\nif (targetTag == TypeTags.ANY) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\ndefault:\n}\nif (TypeTags.isIntegerTypeTag(targetTag) && actualTag == targetTag) {\nreturn TypeTestResult.FALSE;\n}\nif ((TypeTags.isIntegerTypeTag(actualTag) || actualTag == TypeTags.BYTE)\n&& (TypeTags.isIntegerTypeTag(targetTag) || targetTag == TypeTags.BYTE)) {\nreturn checkBuiltInIntSubtypeWidenPossible(actualType, targetType);\n}\nif (actualTag == TypeTags.CHAR_STRING && TypeTags.STRING == targetTag) {\nreturn TypeTestResult.TRUE;\n}\nreturn TypeTestResult.NOT_FOUND;\n}\nprivate TypeTestResult checkBuiltInIntSubtypeWidenPossible(BType actualType, BType targetType) {\nint actualTag = actualType.tag;\nswitch (targetType.tag) {\ncase TypeTags.INT:\nif (actualTag == TypeTags.BYTE || TypeTags.isIntegerTypeTag(actualTag)) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\ncase TypeTags.SIGNED32_INT:\nif (actualTag == TypeTags.SIGNED16_INT || actualTag == TypeTags.SIGNED8_INT ||\nactualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT ||\nactualTag == TypeTags.BYTE) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\ncase TypeTags.SIGNED16_INT:\nif (actualTag == TypeTags.SIGNED8_INT || actualTag == TypeTags.UNSIGNED8_INT ||\nactualTag == TypeTags.BYTE) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\ncase TypeTags.UNSIGNED32_INT:\nif (actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT ||\nactualTag == TypeTags.BYTE) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\ncase TypeTags.UNSIGNED16_INT:\nif (actualTag == TypeTags.UNSIGNED8_INT || actualTag == TypeTags.BYTE) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\ncase TypeTags.BYTE:\nif (actualTag == TypeTags.UNSIGNED8_INT) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\ncase TypeTags.UNSIGNED8_INT:\nif (actualTag == TypeTags.BYTE) {\nreturn TypeTestResult.TRUE;\n}\nbreak;\n}\nreturn TypeTestResult.NOT_FOUND;\n}\npublic boolean isImplicityCastable(BType actualType, BType targetType) {\n/* The word Builtin refers for Compiler known types. */\nBType newTargetType = targetType;\nif ((targetType.tag == TypeTags.UNION || targetType.tag == TypeTags.FINITE) && isValueType(actualType)) {\nnewTargetType = symTable.anyType;\n}\nTypeTestResult result = isBuiltInTypeWidenPossible(actualType, newTargetType);\nif (result != TypeTestResult.NOT_FOUND) {\nreturn result == TypeTestResult.TRUE;\n}\nif (isValueType(targetType) &&\n(actualType.tag == TypeTags.FINITE ||\n(actualType.tag == TypeTags.UNION && ((BUnionType) actualType).getMemberTypes().stream()\n.anyMatch(type -> type.tag == TypeTags.FINITE && isAssignable(type, targetType))))) {\nreturn targetType.tag == TypeTags.INT || targetType.tag == TypeTags.BYTE || targetType.tag == TypeTags.FLOAT\n|| targetType.tag == TypeTags.STRING || targetType.tag == TypeTags.BOOLEAN;\n} else if (targetType.tag == TypeTags.ERROR\n&& (actualType.tag == TypeTags.UNION\n&& isAllErrorMembers((BUnionType) actualType))) {\nreturn true;\n}\nreturn false;\n}\npublic boolean isTypeCastable(BLangExpression expr, BType sourceType, BType targetType) {\nif (sourceType.tag == TypeTags.SEMANTIC_ERROR || targetType.tag == TypeTags.SEMANTIC_ERROR ||\nsourceType == targetType) {\nreturn true;\n}\nif (isAssignable(sourceType, targetType) || isAssignable(targetType, sourceType)) {\nreturn true;\n}\nif (isNumericConversionPossible(expr, sourceType, targetType)) {\nreturn true;\n}\nboolean validTypeCast = false;\nif (sourceType.tag == TypeTags.UNION) {\nif (getTypeForUnionTypeMembersAssignableToType((BUnionType) sourceType, targetType)\n!= symTable.semanticError) {\nvalidTypeCast = true;\n}\n}\nif (targetType.tag == TypeTags.UNION) {\nif (getTypeForUnionTypeMembersAssignableToType((BUnionType) targetType, sourceType)\n!= symTable.semanticError) {\nvalidTypeCast = true;\n}\n}\nif (sourceType.tag == TypeTags.FINITE) {\nif (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) sourceType, targetType)\n!= symTable.semanticError) {\nvalidTypeCast = true;\n}\n}\nif (targetType.tag == TypeTags.FINITE) {\nif (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) targetType, sourceType)\n!= symTable.semanticError) {\nvalidTypeCast = true;\n}\n}\nif (validTypeCast) {\nif (isValueType(sourceType)) {\nsetImplicitCastExpr(expr, sourceType, symTable.anyType);\n}\nreturn true;\n}\nreturn false;\n}\nboolean isNumericConversionPossible(BLangExpression expr, BType sourceType,\nBType targetType) {\nfinal boolean isSourceNumericType = isBasicNumericType(sourceType);\nfinal boolean isTargetNumericType = isBasicNumericType(targetType);\nif (isSourceNumericType && isTargetNumericType) {\nreturn true;\n}\nif (targetType.tag == TypeTags.UNION) {\nHashSet typeTags = new HashSet<>();\nfor (BType bType : ((BUnionType) targetType).getMemberTypes()) {\nif (isBasicNumericType(bType)) {\ntypeTags.add(bType.tag);\nif (typeTags.size() > 1) {\nreturn false;\n}\n}\n}\n}\nif (!isTargetNumericType && targetType.tag != TypeTags.UNION) {\nreturn false;\n}\nif (isSourceNumericType) {\nsetImplicitCastExpr(expr, sourceType, symTable.anyType);\nreturn true;\n}\nswitch (sourceType.tag) {\ncase TypeTags.ANY:\ncase TypeTags.ANYDATA:\ncase TypeTags.JSON:\nreturn true;\ncase TypeTags.UNION:\nfor (BType memType : ((BUnionType) sourceType).getMemberTypes()) {\nif (isBasicNumericType(memType) ||\n(memType.tag == TypeTags.FINITE &&\nfiniteTypeContainsNumericTypeValues((BFiniteType) memType))) {\nreturn true;\n}\n}\nbreak;\ncase TypeTags.FINITE:\nif (finiteTypeContainsNumericTypeValues((BFiniteType) sourceType)) {\nreturn true;\n}\nbreak;\n}\nreturn false;\n}\nprivate boolean isAllErrorMembers(BUnionType actualType) {\nreturn actualType.getMemberTypes().stream().allMatch(t -> isAssignable(t, symTable.errorType));\n}\npublic void setImplicitCastExpr(BLangExpression expr, BType actualType, BType expType) {\nif (!isImplicityCastable(actualType, expType)) {\nreturn;\n}\nBLangTypeConversionExpr implicitConversionExpr =\n(BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();\nimplicitConversionExpr.pos = expr.pos;\nimplicitConversionExpr.expr = expr.impConversionExpr == null ? expr : expr.impConversionExpr;\nimplicitConversionExpr.type = expType;\nimplicitConversionExpr.targetType = expType;\nexpr.impConversionExpr = implicitConversionExpr;\n}\npublic BType getElementType(BType type) {\nif (type.tag != TypeTags.ARRAY) {\nreturn type;\n}\nreturn getElementType(((BArrayType) type).getElementType());\n}\npublic boolean checkListenerCompatibility(BType type) {\nif (type.tag != TypeTags.OBJECT) {\nreturn false;\n}\nfinal BSymbol bSymbol = symTable.langObjectModuleSymbol.scope.lookup(Names.LISTENER).symbol;\nif (bSymbol == symTable.notFoundSymbol || bSymbol.type.tag != TypeTags.OBJECT) {\nthrow new AssertionError(\"Listener object not defined.\");\n}\nBObjectType rhsType = (BObjectType) type;\nBObjectType lhsType = (BObjectType) bSymbol.type;\nBStructureTypeSymbol lhsStructSymbol = (BStructureTypeSymbol) lhsType.tsymbol;\nList lhsFuncs = lhsStructSymbol.attachedFuncs;\nList rhsFuncs = ((BStructureTypeSymbol) rhsType.tsymbol).attachedFuncs;\nint lhsAttachedFuncCount = lhsStructSymbol.initializerFunc != null ? lhsFuncs.size() - 1 : lhsFuncs.size();\nif (lhsAttachedFuncCount > rhsFuncs.size()) {\nreturn false;\n}\nfor (BAttachedFunction lhsFunc : lhsFuncs) {\nif (lhsFunc == lhsStructSymbol.initializerFunc) {\ncontinue;\n}\nif (!Symbols.isPublic(lhsFunc.symbol)) {\nreturn false;\n}\nBAttachedFunction rhsFunc = getMatchingInvokableType(rhsFuncs, lhsFunc, new HashSet<>());\nif (rhsFunc == null || !Symbols.isPublic(rhsFunc.symbol)) {\nreturn false;\n}\n}\nreturn true;\n}\npublic boolean isValidErrorDetailType(BType detailType) {\nswitch (detailType.tag) {\ncase TypeTags.MAP:\ncase TypeTags.RECORD:\nreturn isAssignable(detailType, symTable.detailType);\n}\nreturn false;\n}\nprivate boolean isNullable(BType fieldType) {\nreturn fieldType.isNullable();\n}\nprivate class BSameTypeVisitor implements BTypeVisitor {\nSet unresolvedTypes;\nBSameTypeVisitor(Set unresolvedTypes) {\nthis.unresolvedTypes = unresolvedTypes;\n}\n@Override\npublic Boolean visit(BType t, BType s) {\nif (t == s) {\nreturn true;\n}\nswitch (t.tag) {\ncase TypeTags.INT:\ncase TypeTags.BYTE:\ncase TypeTags.FLOAT:\ncase TypeTags.DECIMAL:\ncase TypeTags.STRING:\ncase TypeTags.BOOLEAN:\ncase TypeTags.ANY:\ncase TypeTags.ANYDATA:\nreturn t.tag == s.tag\n&& (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s));\ndefault:\nbreak;\n}\nreturn false;\n}\n@Override\npublic Boolean visit(BBuiltInRefType t, BType s) {\nreturn t == s;\n}\n@Override\npublic Boolean visit(BAnyType t, BType s) {\nreturn t == s;\n}\n@Override\npublic Boolean visit(BAnydataType t, BType s) {\nreturn t == s;\n}\n@Override\npublic Boolean visit(BMapType t, BType s) {\nif (s.tag != TypeTags.MAP) {\nreturn false;\n}\nBMapType sType = ((BMapType) s);\nreturn isSameType(sType.constraint, t.constraint, this.unresolvedTypes);\n}\n@Override\npublic Boolean visit(BFutureType t, BType s) {\nreturn s.tag == TypeTags.FUTURE && t.constraint.tag == ((BFutureType) s).constraint.tag;\n}\n@Override\npublic Boolean visit(BXMLType t, BType s) {\nreturn visit((BBuiltInRefType) t, s);\n}\n@Override\npublic Boolean visit(BJSONType t, BType s) {\nreturn s.tag == TypeTags.JSON;\n}\n@Override\npublic Boolean visit(BArrayType t, BType s) {\nreturn s.tag == TypeTags.ARRAY && checkArrayEquality(s, t, new HashSet<>());\n}\n@Override\npublic Boolean visit(BObjectType t, BType s) {\nif (t == s) {\nreturn true;\n}\nif (s.tag != TypeTags.OBJECT) {\nreturn false;\n}\nreturn t.tsymbol.pkgID.equals(s.tsymbol.pkgID) && t.tsymbol.name.equals(s.tsymbol.name);\n}\n@Override\npublic Boolean visit(BRecordType t, BType s) {\nif (t == s) {\nreturn true;\n}\nif (s.tag != TypeTags.RECORD) {\nreturn false;\n}\nBRecordType source = (BRecordType) s;\nif (source.fields.size() != t.fields.size()) {\nreturn false;\n}\nboolean notSameType = source.fields\n.stream()\n.map(fs -> t.fields.stream()\n.anyMatch(ft -> fs.name.equals(ft.name)\n&& isSameType(fs.type, ft.type, this.unresolvedTypes)\n&& hasSameOptionalFlag(fs.symbol, ft.symbol)))\n.anyMatch(foundSameType -> !foundSameType);\nif (notSameType) {\nreturn false;\n}\nreturn isSameType(source.restFieldType, t.restFieldType, unresolvedTypes);\n}\nprivate boolean hasSameOptionalFlag(BVarSymbol s, BVarSymbol t) {\nreturn ((s.flags & Flags.OPTIONAL) ^ (t.flags & Flags.OPTIONAL)) != Flags.OPTIONAL;\n}\n@Override\npublic Boolean visit(BTableType t, BType s) {\nreturn t == s;\n}\npublic Boolean visit(BTupleType t, BType s) {\nif (s.tag != TypeTags.TUPLE) {\nreturn false;\n}\nBTupleType source = (BTupleType) s;\nif (source.tupleTypes.size() != t.tupleTypes.size()) {\nreturn false;\n}\nfor (int i = 0; i < source.tupleTypes.size(); i++) {\nif (t.getTupleTypes().get(i) == symTable.noType) {\ncontinue;\n}\nif (!isSameType(source.getTupleTypes().get(i), t.tupleTypes.get(i), this.unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\n@Override\npublic Boolean visit(BStreamType t, BType s) {\nreturn t == s;\n}\n@Override\npublic Boolean visit(BInvokableType t, BType s) {\nreturn s.tag == TypeTags.INVOKABLE && isSameFunctionType((BInvokableType) s, t, new HashSet<>());\n}\n@Override\npublic Boolean visit(BUnionType tUnionType, BType s) {\nif (s.tag != TypeTags.UNION) {\nreturn false;\n}\nBUnionType sUnionType = (BUnionType) s;\nif (sUnionType.getMemberTypes().size()\n!= tUnionType.getMemberTypes().size()) {\nreturn false;\n}\nSet sourceTypes = new LinkedHashSet<>(sUnionType.getMemberTypes());\nSet targetTypes = new LinkedHashSet<>(tUnionType.getMemberTypes());\nboolean notSameType = sourceTypes\n.stream()\n.map(sT -> targetTypes\n.stream()\n.anyMatch(it -> isSameType(it, sT, this.unresolvedTypes)))\n.anyMatch(foundSameType -> !foundSameType);\nreturn !notSameType;\n}\n@Override\npublic Boolean visit(BErrorType t, BType s) {\nif (s.tag != TypeTags.ERROR) {\nreturn false;\n}\nBErrorType source = (BErrorType) s;\nif (!isSameType(source.reasonType, t.reasonType, this.unresolvedTypes)) {\nreturn false;\n}\nif (source.detailType == t.detailType) {\nreturn true;\n}\nreturn isSameType(source.detailType, t.detailType, this.unresolvedTypes);\n}\n@Override\npublic Boolean visit(BServiceType t, BType s) {\nreturn t == s || t.tag == s.tag;\n}\n@Override\npublic Boolean visit(BTypedescType t, BType s) {\nif (s.tag != TypeTags.TYPEDESC) {\nreturn false;\n}\nBTypedescType sType = ((BTypedescType) s);\nreturn isSameType(sType.constraint, t.constraint, this.unresolvedTypes);\n}\n@Override\npublic Boolean visit(BFiniteType t, BType s) {\nreturn s == t;\n}\n};\nprivate boolean checkFieldEquivalency(BRecordType lhsType, BRecordType rhsType, Set unresolvedTypes) {\nMap rhsFields = rhsType.fields.stream().collect(Collectors.toMap(BField::getName, f -> f));\nfor (BField lhsField : lhsType.fields) {\nBField rhsField = rhsFields.get(lhsField.name);\nif (rhsField == null) {\nreturn false;\n}\nif (!Symbols.isOptional(lhsField.symbol) && Symbols.isOptional(rhsField.symbol)) {\nreturn false;\n}\nif (!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) {\nreturn false;\n}\nrhsFields.remove(lhsField.name);\n}\nreturn rhsFields.entrySet().stream().allMatch(\nfieldEntry -> isAssignable(fieldEntry.getValue().type, lhsType.restFieldType, unresolvedTypes));\n}\nprivate BAttachedFunction getMatchingInvokableType(List rhsFuncList, BAttachedFunction lhsFunc,\nSet unresolvedTypes) {\nreturn rhsFuncList.stream()\n.filter(rhsFunc -> lhsFunc.funcName.equals(rhsFunc.funcName))\n.filter(rhsFunc -> isFunctionTypeAssignable(rhsFunc.type, lhsFunc.type, unresolvedTypes))\n.findFirst()\n.orElse(null);\n}\nprivate boolean isInSameVisibilityRegion(BSymbol lhsSym, BSymbol rhsSym) {\nif (Symbols.isPrivate(lhsSym)) {\nreturn Symbols.isPrivate(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID)\n&& lhsSym.owner.name.equals(rhsSym.owner.name);\n} else if (Symbols.isPublic(lhsSym)) {\nreturn Symbols.isPublic(rhsSym);\n}\nreturn !Symbols.isPrivate(rhsSym) && !Symbols.isPublic(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID);\n}\nprivate boolean isAssignableToUnionType(BType source, BType target, Set unresolvedTypes) {\nSet sourceTypes = new LinkedHashSet<>();\nSet targetTypes = new LinkedHashSet<>();\nif (source.tag == TypeTags.UNION) {\nBUnionType sourceUnionType = (BUnionType) source;\nsourceTypes.addAll(sourceUnionType.getMemberTypes());\n} else {\nsourceTypes.add(source);\n}\nif (target.tag == TypeTags.UNION) {\nBUnionType targetUnionType = (BUnionType) target;\ntargetTypes.addAll(targetUnionType.getMemberTypes());\n} else {\ntargetTypes.add(target);\n}\nreturn sourceTypes.stream()\n.allMatch(s -> (targetTypes.stream().anyMatch(t -> isAssignable(s, t, unresolvedTypes)))\n|| (s.tag == TypeTags.FINITE && isAssignable(s, target, unresolvedTypes))\n|| (s.tag == TypeTags.XML\n&& isAssignableToUnionType(expandedXMLBuiltinSubtypes, target, unresolvedTypes)));\n}\nprivate boolean isFiniteTypeAssignable(BFiniteType finiteType, BType targetType, Set unresolvedTypes) {\nif (targetType.tag == TypeTags.FINITE) {\nreturn finiteType.getValueSpace().stream()\n.allMatch(expression -> isAssignableToFiniteType(targetType, (BLangLiteral) expression));\n}\nif (targetType.tag == TypeTags.UNION) {\nList unionMemberTypes = getAllTypes(targetType);\nreturn finiteType.getValueSpace().stream()\n.allMatch(valueExpr -> unionMemberTypes.stream()\n.anyMatch(targetMemType -> targetMemType.tag == TypeTags.FINITE ?\nisAssignableToFiniteType(targetMemType, (BLangLiteral) valueExpr) :\nisAssignable(valueExpr.type, targetType, unresolvedTypes)));\n}\nreturn finiteType.getValueSpace().stream()\n.allMatch(expression -> isAssignable(expression.type, targetType, unresolvedTypes));\n}\nboolean isAssignableToFiniteType(BType type, BLangLiteral literalExpr) {\nif (type.tag != TypeTags.FINITE) {\nreturn false;\n}\nBFiniteType expType = (BFiniteType) type;\nreturn expType.getValueSpace().stream().anyMatch(memberLiteral -> {\nif (((BLangLiteral) memberLiteral).value == null) {\nreturn literalExpr.value == null;\n}\nreturn checkLiteralAssignabilityBasedOnType((BLangLiteral) memberLiteral, literalExpr);\n});\n}\n/**\n* Method to check the literal assignability based on the types of the literals. For numeric literals the\n* assignability depends on the equivalency of the literals. If the candidate literal could either be a simple\n* literal or a constant. In case of a constant, it is assignable to the base literal if and only if both\n* literals have same type and equivalent values.\n*\n* @param baseLiteral Literal based on which we check the assignability.\n* @param candidateLiteral Literal to be tested whether it is assignable to the base literal or not.\n* @return true if assignable; false otherwise.\n*/\nboolean checkLiteralAssignabilityBasedOnType(BLangLiteral baseLiteral, BLangLiteral candidateLiteral) {\nif (baseLiteral.getKind() != candidateLiteral.getKind()) {\nreturn false;\n}\nObject baseValue = baseLiteral.value;\nObject candidateValue = candidateLiteral.value;\nint candidateTypeTag = candidateLiteral.type.tag;\nswitch (baseLiteral.type.tag) {\ncase TypeTags.BYTE:\nif (candidateTypeTag == TypeTags.BYTE || (candidateTypeTag == TypeTags.INT &&\n!candidateLiteral.isConstant && isByteLiteralValue((Long) candidateValue))) {\nreturn ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();\n}\nbreak;\ncase TypeTags.INT:\nif (candidateTypeTag == TypeTags.INT) {\nreturn ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();\n}\nbreak;\ncase TypeTags.SIGNED32_INT:\nif (candidateTypeTag == TypeTags.INT && isSigned32LiteralValue((Long) candidateValue)) {\nreturn ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();\n}\nbreak;\ncase TypeTags.SIGNED16_INT:\nif (candidateTypeTag == TypeTags.INT && isSigned16LiteralValue((Long) candidateValue)) {\nreturn ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();\n}\nbreak;\ncase TypeTags.SIGNED8_INT:\nif (candidateTypeTag == TypeTags.INT && isSigned8LiteralValue((Long) candidateValue)) {\nreturn ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();\n}\nbreak;\ncase TypeTags.UNSIGNED32_INT:\nif (candidateTypeTag == TypeTags.INT && isUnsigned32LiteralValue((Long) candidateValue)) {\nreturn ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();\n}\nbreak;\ncase TypeTags.UNSIGNED16_INT:\nif (candidateTypeTag == TypeTags.INT && isUnsigned16LiteralValue((Long) candidateValue)) {\nreturn ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();\n}\nbreak;\ncase TypeTags.UNSIGNED8_INT:\nif (candidateTypeTag == TypeTags.INT && isUnsigned8LiteralValue((Long) candidateValue)) {\nreturn ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();\n}\nbreak;\ncase TypeTags.FLOAT:\nString baseValueStr = String.valueOf(baseValue);\nString originalValue = baseLiteral.originalValue != null ? baseLiteral.originalValue : baseValueStr;\nif (NumericLiteralSupport.isDecimalDiscriminated(originalValue)) {\nreturn false;\n}\ndouble baseDoubleVal = Double.parseDouble(baseValueStr);\ndouble candidateDoubleVal;\nif (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) {\ncandidateDoubleVal = ((Long) candidateValue).doubleValue();\nreturn baseDoubleVal == candidateDoubleVal;\n} else if (candidateTypeTag == TypeTags.FLOAT) {\ncandidateDoubleVal = Double.parseDouble(String.valueOf(candidateValue));\nreturn baseDoubleVal == candidateDoubleVal;\n}\nbreak;\ncase TypeTags.DECIMAL:\nBigDecimal baseDecimalVal = NumericLiteralSupport.parseBigDecimal(baseValue);\nBigDecimal candidateDecimalVal;\nif (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) {\ncandidateDecimalVal = new BigDecimal((long) candidateValue, MathContext.DECIMAL128);\nreturn baseDecimalVal.compareTo(candidateDecimalVal) == 0;\n} else if (candidateTypeTag == TypeTags.FLOAT && !candidateLiteral.isConstant ||\ncandidateTypeTag == TypeTags.DECIMAL) {\nif (NumericLiteralSupport.isFloatDiscriminated(String.valueOf(candidateValue))) {\nreturn false;\n}\ncandidateDecimalVal = NumericLiteralSupport.parseBigDecimal(candidateValue);\nreturn baseDecimalVal.compareTo(candidateDecimalVal) == 0;\n}\nbreak;\ndefault:\nreturn baseValue.equals(candidateValue);\n}\nreturn false;\n}\nboolean isByteLiteralValue(Long longObject) {\nreturn (longObject.intValue() >= BBYTE_MIN_VALUE && longObject.intValue() <= BBYTE_MAX_VALUE);\n}\nboolean isSigned32LiteralValue(Long longObject) {\nreturn (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE);\n}\nboolean isSigned16LiteralValue(Long longObject) {\nreturn (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE);\n}\nboolean isSigned8LiteralValue(Long longObject) {\nreturn (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE);\n}\nboolean isUnsigned32LiteralValue(Long longObject) {\nreturn (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE);\n}\nboolean isUnsigned16LiteralValue(Long longObject) {\nreturn (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE);\n}\nboolean isUnsigned8LiteralValue(Long longObject) {\nreturn (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE);\n}\nboolean isCharLiteralValue(String literal) {\nreturn (literal.codePoints().count() == 1);\n}\n/**\n* Method to retrieve a type representing all the values in the value space of a finite type that are assignable to\n* the target type.\n*\n* @param finiteType the finite type\n* @param targetType the target type\n* @return a new finite type if at least one value in the value space of the specified finiteType is\n* assignable to targetType (the same if all are assignable), else semanticError\n*/\nBType getTypeForFiniteTypeValuesAssignableToType(BFiniteType finiteType, BType targetType) {\nif (isAssignable(finiteType, targetType)) {\nreturn finiteType;\n}\nSet matchingValues = finiteType.getValueSpace().stream()\n.filter(\nexpr -> isAssignable(expr.type, targetType) ||\nisAssignableToFiniteType(targetType, (BLangLiteral) expr) ||\n(targetType.tag == TypeTags.UNION &&\n((BUnionType) targetType).getMemberTypes().stream()\n.filter(memType -> memType.tag == TypeTags.FINITE)\n.anyMatch(filteredType -> isAssignableToFiniteType(filteredType,\n(BLangLiteral) expr))))\n.collect(Collectors.toSet());\nif (matchingValues.isEmpty()) {\nreturn symTable.semanticError;\n}\nBTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, finiteType.tsymbol.flags,\nnames.fromString(\"$anonType$\" + finiteTypeCount++),\nfiniteType.tsymbol.pkgID, null,\nfiniteType.tsymbol.owner);\nBFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, matchingValues);\nfiniteTypeSymbol.type = intersectingFiniteType;\nreturn intersectingFiniteType;\n}\n/**\n* Method to retrieve a type representing all the member types of a union type that are assignable to\n* the target type.\n*\n* @param unionType the union type\n* @param targetType the target type\n* @return a single type or a new union type if at least one member type of the union type is\n* assignable to targetType, else semanticError\n*/\nBType getTypeForUnionTypeMembersAssignableToType(BUnionType unionType, BType targetType) {\nList intersection = new LinkedList<>();\nunionType.getMemberTypes().forEach(memType -> {\nif (memType.tag == TypeTags.FINITE) {\nBType finiteTypeWithMatches = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) memType,\ntargetType);\nif (finiteTypeWithMatches != symTable.semanticError) {\nintersection.add(finiteTypeWithMatches);\n}\n} else {\nif (isAssignable(memType, targetType)) {\nintersection.add(memType);\n}\n}\n});\nif (intersection.isEmpty()) {\nreturn symTable.semanticError;\n}\nif (intersection.size() == 1) {\nreturn intersection.get(0);\n} else {\nreturn BUnionType.create(null, new LinkedHashSet<>(intersection));\n}\n}\nboolean validEqualityIntersectionExists(BType lhsType, BType rhsType) {\nif (!lhsType.isPureType() || !rhsType.isPureType()) {\nreturn false;\n}\nif (isAssignable(lhsType, rhsType) || isAssignable(rhsType, lhsType)) {\nreturn true;\n}\nSet lhsTypes = expandAndGetMemberTypesRecursive(lhsType);\nSet rhsTypes = expandAndGetMemberTypesRecursive(rhsType);\nreturn equalityIntersectionExists(lhsTypes, rhsTypes);\n}\nprivate boolean equalityIntersectionExists(Set lhsTypes, Set rhsTypes) {\nif ((lhsTypes.contains(symTable.anydataType) &&\nrhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR)) ||\n(rhsTypes.contains(symTable.anydataType) &&\nlhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR))) {\nreturn true;\n}\nboolean matchFound = lhsTypes\n.stream()\n.anyMatch(s -> rhsTypes\n.stream()\n.anyMatch(t -> isSameType(s, t)));\nif (!matchFound) {\nmatchFound = equalityIntersectionExistsForComplexTypes(lhsTypes, rhsTypes);\n}\nreturn matchFound;\n}\n/**\n* Retrieves member types of the specified type, expanding maps/arrays of/constrained by unions types to individual\n* maps/arrays.\n*\n* e.g., (string|int)[] would cause three entries as string[], int[], (string|int)[]\n*\n* @param bType the type for which member types needs to be identified\n* @return a set containing all the retrieved member types\n*/\npublic Set expandAndGetMemberTypesRecursive(BType bType) {\nSet memberTypes = new LinkedHashSet<>();\nswitch (bType.tag) {\ncase TypeTags.BYTE:\ncase TypeTags.INT:\nmemberTypes.add(symTable.intType);\nmemberTypes.add(symTable.byteType);\nbreak;\ncase TypeTags.FINITE:\nBFiniteType expType = (BFiniteType) bType;\nexpType.getValueSpace().forEach(value -> {\nmemberTypes.add(value.type);\n});\nbreak;\ncase TypeTags.UNION:\nBUnionType unionType = (BUnionType) bType;\nunionType.getMemberTypes().forEach(member -> {\nmemberTypes.addAll(expandAndGetMemberTypesRecursive(member));\n});\nbreak;\ncase TypeTags.ARRAY:\nBType arrayElementType = ((BArrayType) bType).getElementType();\nif (((BArrayType) bType).getSize() != -1) {\nmemberTypes.add(new BArrayType(arrayElementType));\n}\nif (arrayElementType.tag == TypeTags.UNION) {\nSet elementUnionTypes = expandAndGetMemberTypesRecursive(arrayElementType);\nelementUnionTypes.forEach(elementUnionType -> {\nmemberTypes.add(new BArrayType(elementUnionType));\n});\n}\nmemberTypes.add(bType);\nbreak;\ncase TypeTags.MAP:\nBType mapConstraintType = ((BMapType) bType).getConstraint();\nif (mapConstraintType.tag == TypeTags.UNION) {\nSet constraintUnionTypes = expandAndGetMemberTypesRecursive(mapConstraintType);\nconstraintUnionTypes.forEach(constraintUnionType -> {\nmemberTypes.add(new BMapType(TypeTags.MAP, constraintUnionType, symTable.mapType.tsymbol));\n});\n}\nmemberTypes.add(bType);\nbreak;\ndefault:\nmemberTypes.add(bType);\n}\nreturn memberTypes;\n}\nprivate boolean tupleIntersectionExists(BTupleType lhsType, BTupleType rhsType) {\nif (lhsType.getTupleTypes().size() != rhsType.getTupleTypes().size()) {\nreturn false;\n}\nList lhsMemberTypes = lhsType.getTupleTypes();\nList rhsMemberTypes = rhsType.getTupleTypes();\nfor (int i = 0; i < lhsType.getTupleTypes().size(); i++) {\nif (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberTypes.get(i)),\nexpandAndGetMemberTypesRecursive(rhsMemberTypes.get(i)))) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate boolean equalityIntersectionExistsForComplexTypes(Set lhsTypes, Set rhsTypes) {\nfor (BType lhsMemberType : lhsTypes) {\nswitch (lhsMemberType.tag) {\ncase TypeTags.INT:\ncase TypeTags.STRING:\ncase TypeTags.FLOAT:\ncase TypeTags.DECIMAL:\ncase TypeTags.BOOLEAN:\ncase TypeTags.NIL:\nif (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) {\nreturn true;\n}\nbreak;\ncase TypeTags.JSON:\nif (jsonEqualityIntersectionExists(rhsTypes)) {\nreturn true;\n}\nbreak;\ncase TypeTags.TUPLE:\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE &&\ntupleIntersectionExists((BTupleType) lhsMemberType, (BTupleType) rhsMemberType))) {\nreturn true;\n}\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY &&\narrayTupleEqualityIntersectionExists((BArrayType) rhsMemberType,\n(BTupleType) lhsMemberType))) {\nreturn true;\n}\nbreak;\ncase TypeTags.ARRAY:\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY &&\nequalityIntersectionExists(\nexpandAndGetMemberTypesRecursive(((BArrayType) lhsMemberType).eType),\nexpandAndGetMemberTypesRecursive(((BArrayType) rhsMemberType).eType)))) {\nreturn true;\n}\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE &&\narrayTupleEqualityIntersectionExists((BArrayType) lhsMemberType,\n(BTupleType) rhsMemberType))) {\nreturn true;\n}\nbreak;\ncase TypeTags.MAP:\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> rhsMemberType.tag == TypeTags.MAP &&\nequalityIntersectionExists(\nexpandAndGetMemberTypesRecursive(((BMapType) lhsMemberType).constraint),\nexpandAndGetMemberTypesRecursive(((BMapType) rhsMemberType).constraint)))) {\nreturn true;\n}\nif (!isAssignable(((BMapType) lhsMemberType).constraint, symTable.errorType) &&\nrhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) {\nreturn true;\n}\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> rhsMemberType.tag == TypeTags.RECORD &&\nmapRecordEqualityIntersectionExists((BMapType) lhsMemberType,\n(BRecordType) rhsMemberType))) {\nreturn true;\n}\nbreak;\ncase TypeTags.OBJECT:\ncase TypeTags.RECORD:\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> checkStructEquivalency(rhsMemberType, lhsMemberType) ||\ncheckStructEquivalency(lhsMemberType, rhsMemberType))) {\nreturn true;\n}\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> rhsMemberType.tag == TypeTags.RECORD &&\nrecordEqualityIntersectionExists((BRecordType) lhsMemberType,\n(BRecordType) rhsMemberType))) {\nreturn true;\n}\nif (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON) &&\njsonEqualityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberType))) {\nreturn true;\n}\nif (rhsTypes.stream().anyMatch(\nrhsMemberType -> rhsMemberType.tag == TypeTags.MAP &&\nmapRecordEqualityIntersectionExists((BMapType) rhsMemberType,\n(BRecordType) lhsMemberType))) {\nreturn true;\n}\nbreak;\n}\n}\nreturn false;\n}\nprivate boolean arrayTupleEqualityIntersectionExists(BArrayType arrayType, BTupleType tupleType) {\nSet elementTypes = expandAndGetMemberTypesRecursive(arrayType.eType);\nreturn tupleType.tupleTypes.stream()\n.allMatch(tupleMemType -> equalityIntersectionExists(elementTypes,\nexpandAndGetMemberTypesRecursive(tupleMemType)));\n}\nprivate boolean recordEqualityIntersectionExists(BRecordType lhsType, BRecordType rhsType) {\nList lhsFields = lhsType.fields;\nList rhsFields = rhsType.fields;\nList matchedFieldNames = new ArrayList<>();\nfor (BField lhsField : lhsFields) {\nOptional match =\nrhsFields.stream().filter(rhsField -> lhsField.name.equals(rhsField.name)).findFirst();\nif (match.isPresent()) {\nif (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type),\nexpandAndGetMemberTypesRecursive(match.get().type))) {\nreturn false;\n}\nmatchedFieldNames.add(lhsField.getName());\n} else {\nif (Symbols.isFlagOn(lhsField.symbol.flags, Flags.OPTIONAL)) {\nbreak;\n}\nif (rhsType.sealed) {\nreturn false;\n}\nif (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type),\nexpandAndGetMemberTypesRecursive(rhsType.restFieldType))) {\nreturn false;\n}\n}\n}\nfor (BField rhsField : rhsFields) {\nif (matchedFieldNames.contains(rhsField.getName())) {\ncontinue;\n}\nif (!Symbols.isFlagOn(rhsField.symbol.flags, Flags.OPTIONAL)) {\nif (lhsType.sealed) {\nreturn false;\n}\nif (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(rhsField.type),\nexpandAndGetMemberTypesRecursive(lhsType.restFieldType))) {\nreturn false;\n}\n}\n}\nreturn true;\n}\nprivate boolean mapRecordEqualityIntersectionExists(BMapType mapType, BRecordType recordType) {\nSet mapConstrTypes = expandAndGetMemberTypesRecursive(mapType.getConstraint());\nreturn recordType.fields.stream()\n.allMatch(field -> Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) ||\nequalityIntersectionExists(mapConstrTypes, expandAndGetMemberTypesRecursive(field.type)));\n}\nprivate boolean jsonEqualityIntersectionExists(Set typeSet) {\nfor (BType type : typeSet) {\nswitch (type.tag) {\ncase TypeTags.MAP:\nif (!isAssignable(((BMapType) type).constraint, symTable.errorType)) {\nreturn true;\n}\nbreak;\ncase TypeTags.RECORD:\nBRecordType recordType = (BRecordType) type;\nif (recordType.fields.stream()\n.allMatch(field -> Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) ||\n!isAssignable(field.type, symTable.errorType))) {\nreturn true;\n}\nbreak;\ndefault:\nif (isAssignable(type, symTable.jsonType)) {\nreturn true;\n}\n}\n}\nreturn false;\n}\npublic BType getRemainingType(BType originalType, BType typeToRemove) {\nswitch (originalType.tag) {\ncase TypeTags.UNION:\nreturn getRemainingType((BUnionType) originalType, getAllTypes(typeToRemove));\ncase TypeTags.FINITE:\nreturn getRemainingType((BFiniteType) originalType, getAllTypes(typeToRemove));\ndefault:\nreturn originalType;\n}\n}\nprivate BType getRemainingType(BUnionType originalType, List removeTypes) {\nList remainingTypes = getAllTypes(originalType);\nremoveTypes.forEach(removeType -> remainingTypes.removeIf(type -> isAssignable(type, removeType)));\nList finiteTypesToRemove = new ArrayList<>();\nList finiteTypesToAdd = new ArrayList<>();\nfor (BType remainingType : remainingTypes) {\nif (remainingType.tag == TypeTags.FINITE) {\nBFiniteType finiteType = (BFiniteType) remainingType;\nfiniteTypesToRemove.add(finiteType);\nBType remainingTypeWithMatchesRemoved = getRemainingType(finiteType, removeTypes);\nif (remainingTypeWithMatchesRemoved != symTable.semanticError) {\nfiniteTypesToAdd.add(remainingTypeWithMatchesRemoved);\n}\n}\n}\nremainingTypes.removeAll(finiteTypesToRemove);\nremainingTypes.addAll(finiteTypesToAdd);\nif (remainingTypes.size() == 1) {\nreturn remainingTypes.get(0);\n}\nif (remainingTypes.isEmpty()) {\nreturn symTable.semanticError;\n}\nreturn BUnionType.create(null, new LinkedHashSet<>(remainingTypes));\n}\nprivate BType getRemainingType(BFiniteType originalType, List removeTypes) {\nSet remainingValueSpace = new LinkedHashSet<>();\nfor (BLangExpression valueExpr : originalType.getValueSpace()) {\nboolean matchExists = false;\nfor (BType remType : removeTypes) {\nif (isAssignable(valueExpr.type, remType) ||\nisAssignableToFiniteType(remType, (BLangLiteral) valueExpr)) {\nmatchExists = true;\nbreak;\n}\n}\nif (!matchExists) {\nremainingValueSpace.add(valueExpr);\n}\n}\nif (remainingValueSpace.isEmpty()) {\nreturn symTable.semanticError;\n}\nBTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, originalType.tsymbol.flags,\nnames.fromString(\"$anonType$\" + finiteTypeCount++),\noriginalType.tsymbol.pkgID, null,\noriginalType.tsymbol.owner);\nBFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, remainingValueSpace);\nfiniteTypeSymbol.type = intersectingFiniteType;\nreturn intersectingFiniteType;\n}\npublic BType getSafeType(BType type, boolean liftNil, boolean liftError) {\nswitch (type.tag) {\ncase TypeTags.JSON:\nBJSONType jsonType = (BJSONType) type;\nreturn new BJSONType(jsonType.tag, jsonType.tsymbol, false);\ncase TypeTags.ANY:\nreturn new BAnyType(type.tag, type.tsymbol, false);\ncase TypeTags.ANYDATA:\nreturn new BAnydataType(type.tag, type.tsymbol, false);\n}\nif (type.tag != TypeTags.UNION) {\nreturn type;\n}\nBUnionType unionType = (BUnionType) type;\nLinkedHashSet memTypes = new LinkedHashSet<>(unionType.getMemberTypes());\nBUnionType errorLiftedType = BUnionType.create(null, memTypes);\nif (liftNil) {\nerrorLiftedType.remove(symTable.nilType);\n}\nif (liftError) {\nerrorLiftedType.remove(symTable.errorType);\n}\nif (errorLiftedType.getMemberTypes().size() == 1) {\nreturn errorLiftedType.getMemberTypes().toArray(new BType[0])[0];\n}\nreturn errorLiftedType;\n}\npublic List getAllTypes(BType type) {\nif (type.tag != TypeTags.UNION) {\nreturn Lists.of(type);\n}\nList memberTypes = new ArrayList<>();\n((BUnionType) type).getMemberTypes().forEach(memberType -> memberTypes.addAll(getAllTypes(memberType)));\nreturn memberTypes;\n}\npublic boolean isAllowedConstantType(BType type) {\nswitch (type.tag) {\ncase TypeTags.BOOLEAN:\ncase TypeTags.INT:\ncase TypeTags.BYTE:\ncase TypeTags.FLOAT:\ncase TypeTags.DECIMAL:\ncase TypeTags.STRING:\ncase TypeTags.NIL:\nreturn true;\ncase TypeTags.MAP:\nreturn isAllowedConstantType(((BMapType) type).constraint);\ncase TypeTags.FINITE:\nBLangExpression finiteValue = ((BFiniteType) type).getValueSpace().toArray(new BLangExpression[0])[0];\nreturn isAllowedConstantType(finiteValue.type);\ndefault:\nreturn false;\n}\n}\npublic boolean isValidLiteral(BLangLiteral literal, BType targetType) {\nBType literalType = literal.type;\nif (literalType.tag == targetType.tag) {\nreturn true;\n}\nswitch (targetType.tag) {\ncase TypeTags.BYTE:\nreturn literalType.tag == TypeTags.INT && isByteLiteralValue((Long) literal.value);\ncase TypeTags.DECIMAL:\nreturn literalType.tag == TypeTags.FLOAT || literalType.tag == TypeTags.INT;\ncase TypeTags.FLOAT:\nreturn literalType.tag == TypeTags.INT;\ncase TypeTags.SIGNED32_INT:\nreturn literalType.tag == TypeTags.INT && isSigned32LiteralValue((Long) literal.value);\ncase TypeTags.SIGNED16_INT:\nreturn literalType.tag == TypeTags.INT && isSigned16LiteralValue((Long) literal.value);\ncase TypeTags.SIGNED8_INT:\nreturn literalType.tag == TypeTags.INT && isSigned8LiteralValue((Long) literal.value);\ncase TypeTags.UNSIGNED32_INT:\nreturn literalType.tag == TypeTags.INT && isUnsigned32LiteralValue((Long) literal.value);\ncase TypeTags.UNSIGNED16_INT:\nreturn literalType.tag == TypeTags.INT && isUnsigned16LiteralValue((Long) literal.value);\ncase TypeTags.UNSIGNED8_INT:\nreturn literalType.tag == TypeTags.INT && isUnsigned8LiteralValue((Long) literal.value);\ncase TypeTags.CHAR_STRING:\nreturn literalType.tag == TypeTags.STRING && isCharLiteralValue((String) literal.value);\ndefault:\nreturn false;\n}\n}\n/**\n* Validate if the return type of the given function is a subtype of `error?`, containing `()`.\n*\n* @param function The function of which the return type should be validated\n* @param diagnosticCode The code to log if the return type is invalid\n*/\npublic void validateErrorOrNilReturn(BLangFunction function, DiagnosticCode diagnosticCode) {\nBType returnType = function.returnTypeNode.type;\nif (returnType.tag == TypeTags.NIL) {\nreturn;\n}\nif (returnType.tag == TypeTags.UNION) {\nSet memberTypes = ((BUnionType) returnType).getMemberTypes();\nif (returnType.isNullable() &&\nmemberTypes.stream().allMatch(type -> type.tag == TypeTags.NIL || type.tag == TypeTags.ERROR)) {\nreturn;\n}\n}\ndlogHelper.error(function.returnTypeNode.pos, diagnosticCode, function.returnTypeNode.type.toString());\n}\n/**\n* Type vector of size two, to hold the source and the target types.\n*\n* @since 0.982.0\n*/\nprivate static class TypePair {\nBType sourceType;\nBType targetType;\npublic TypePair(BType sourceType, BType targetType) {\nthis.sourceType = sourceType;\nthis.targetType = targetType;\n}\n@Override\npublic boolean equals(Object obj) {\nif (!(obj instanceof TypePair)) {\nreturn false;\n}\nTypePair other = (TypePair) obj;\nreturn this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType);\n}\n@Override\npublic int hashCode() {\nreturn Objects.hash(sourceType, targetType);\n}\n}\n/**\n* A functional interface for parameterizing the type of type checking that needs to be done on the source and\n* target types.\n*\n* @since 0.995.0\n*/\nprivate interface TypeEqualityPredicate {\nboolean test(BType source, BType target, Set unresolvedTypes);\n}\npublic boolean hasFillerValue(BType type) {\nswitch (type.tag) {\ncase TypeTags.INT:\ncase TypeTags.BYTE:\ncase TypeTags.FLOAT:\ncase TypeTags.DECIMAL:\ncase TypeTags.STRING:\ncase TypeTags.BOOLEAN:\ncase TypeTags.JSON:\ncase TypeTags.XML:\ncase TypeTags.TABLE:\ncase TypeTags.NIL:\ncase TypeTags.ANYDATA:\ncase TypeTags.MAP:\ncase TypeTags.ANY:\nreturn true;\ncase TypeTags.ARRAY:\nreturn checkFillerValue((BArrayType) type);\ncase TypeTags.FINITE:\nreturn checkFillerValue((BFiniteType) type);\ncase TypeTags.UNION:\nreturn checkFillerValue((BUnionType) type);\ncase TypeTags.OBJECT:\nreturn checkFillerValue((BObjectType) type);\ncase TypeTags.RECORD:\nreturn checkFillerValue((BRecordType) type);\ncase TypeTags.TUPLE:\nBTupleType tupleType = (BTupleType) type;\nreturn tupleType.getTupleTypes().stream().allMatch(eleType -> hasFillerValue(eleType));\ndefault:\nreturn false;\n}\n}\nprivate boolean checkFillerValue(BObjectType type) {\nif ((type.tsymbol.flags & Flags.ABSTRACT) == Flags.ABSTRACT) {\nreturn false;\n}\nBAttachedFunction initFunction = ((BObjectTypeSymbol) type.tsymbol).initializerFunc;\nif (initFunction == null) {\nreturn true;\n}\nif (initFunction.symbol.getReturnType().getKind() != TypeKind.NIL) {\nreturn false;\n}\nfor (BVarSymbol bVarSymbol : initFunction.symbol.getParameters()) {\nif (!bVarSymbol.defaultableParam) {\nreturn false;\n}\n}\nreturn true;\n}\n/**\n* This will handle two types. Singleton : As singleton can have one value that value should it self be a valid fill\n* value Union : 1. if nil is a member it is the fill values 2. else all the values should belong to same type and\n* the default value for that type should be a member of the union precondition : value space should have at least\n* one element\n*\n* @param type BFiniteType union or finite\n* @return boolean whether type has a valid filler value or not\n*/\nprivate boolean checkFillerValue(BFiniteType type) {\nif (type.isNullable()) {\nreturn true;\n}\nif (type.getValueSpace().size() == 1) {\nreturn true;\n}\nIterator iterator = type.getValueSpace().iterator();\nBLangExpression firstElement = (BLangExpression) iterator.next();\nboolean defaultFillValuePresent = isImplicitDefaultValue(firstElement);\nwhile (iterator.hasNext()) {\nBLangExpression value = (BLangExpression) iterator.next();\nif (!isSameType(value.type, firstElement.type)) {\nreturn false;\n}\nif (!defaultFillValuePresent && isImplicitDefaultValue(value)) {\ndefaultFillValuePresent = true;\n}\n}\nreturn defaultFillValuePresent;\n}\nprivate boolean checkFillerValue(BUnionType type) {\nif (type.isNullable()) {\nreturn true;\n}\nIterator iterator = type.getMemberTypes().iterator();\nBType firstMember = iterator.next();\nwhile (iterator.hasNext()) {\nif (!isSameType(firstMember, iterator.next())) {\nreturn false;\n}\n}\nreturn isValueType(firstMember) && hasFillerValue(firstMember);\n}\nprivate boolean isImplicitDefaultValue(BLangExpression expression) {\nif ((expression.getKind() == NodeKind.LITERAL) || (expression.getKind() == NodeKind.NUMERIC_LITERAL)) {\nBLangLiteral literalExpression = (BLangLiteral) expression;\nBType literalExprType = literalExpression.type;\nObject value = literalExpression.getValue();\nswitch (literalExprType.getKind()) {\ncase INT:\ncase BYTE:\nreturn value.equals(Long.valueOf(0));\ncase STRING:\nreturn value == null || value.equals(\"\");\ncase DECIMAL:\ncase FLOAT:\nreturn value.equals(String.valueOf(0.0));\ncase BOOLEAN:\nreturn value.equals(Boolean.valueOf(false));\ncase NIL:\nreturn true;\ndefault:\nreturn false;\n}\n}\nreturn false;\n}\nprivate boolean checkFillerValue(BRecordType type) {\nfor (BField field : type.fields) {\nif (Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL)) {\ncontinue;\n}\nif (Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate boolean checkFillerValue(BArrayType type) {\nif (type.size == -1) {\nreturn true;\n}\nreturn hasFillerValue(type.eType);\n}\n}" + }, + { + "comment": "\u6b64\u5904\u53d1\u5fc3\u8df3\u5e76\u4e0d\u80fd\u53d1\u5230session\u91cc\uff0c\u800c\u662f\u53d1\u5230\u4e86\u63a5\u53e3\u8c03\u8bd5\u7684session\u91cc\u4e86 \u5c11\u7237", + "method_body": "public void heartbeatCheck() {\nWebSocketUtils.sendMessageAll(\nnew SocketMsgDTO(MsgType.HEARTBEAT.name(), MsgType.HEARTBEAT.name(), MsgType.HEARTBEAT.name(), \"heartbeat check\")\n);\n}", + "target_code": "WebSocketUtils.sendMessageAll(", + "method_body_after": "public void heartbeatCheck() {\nExportWebSocketHandler.sendMessageSingle(\nnew SocketMsgDTO(MsgType.HEARTBEAT.name(), MsgType.HEARTBEAT.name(), MsgType.HEARTBEAT.name(), \"heartbeat check\")\n);\n}", + "context_before": "class ExportWebSocketHandler {\npublic static final Map ONLINE_EXPORT_EXCEL_SESSIONS = new ConcurrentHashMap<>();\npublic static void sendMessage(Session session, SocketMsgDTO message) {\nif (session == null) {\nreturn;\n}\nsession.setMaxIdleTimeout(86400000L);\nRemoteEndpoint.Async async = session.getAsyncRemote();\nif (async == null) {\nreturn;\n}\nasync.sendText(JSON.toJSONString(message));\n}\npublic static void sendMessageSingle(SocketMsgDTO dto) {\nsendMessage(ONLINE_EXPORT_EXCEL_SESSIONS.get(Optional.ofNullable(dto.getReportId())\n.orElse(StringUtils.EMPTY)), dto);\n}\n/**\n* \u8fde\u63a5\u6210\u529f\u54cd\u5e94\n*/\n@OnOpen\npublic void openSession(@PathParam(\"fileId\") String fileId, Session session) {\nONLINE_EXPORT_EXCEL_SESSIONS.put(fileId, session);\nRemoteEndpoint.Async async = session.getAsyncRemote();\nif (async != null) {\nasync.sendText(JSON.toJSONString(new SocketMsgDTO(fileId, \"\", MsgType.CONNECT.name(), MsgType.CONNECT.name())));\nsession.setMaxIdleTimeout(180000);\n}\nLogUtils.info(\"\u5ba2\u6237\u7aef: [\" + fileId + \"] : \u8fde\u63a5\u6210\u529f\uff01\" + WebSocketUtils.ONLINE_USER_SESSIONS.size(), fileId);\n}\n/**\n* \u6536\u5230\u6d88\u606f\u54cd\u5e94\n*/\n@OnMessage\npublic void onMessage(@PathParam(\"fileId\") String fileId, String message) {\nLogUtils.info(\"\u670d\u52a1\u5668\u6536\u5230\uff1a[\" + fileId + \"] : \" + message);\nSocketMsgDTO dto = JSON.parseObject(message, SocketMsgDTO.class);\nWebSocketUtils.sendMessageSingle(dto);\n}\n/**\n* \u8fde\u63a5\u5173\u95ed\u54cd\u5e94\n*/\n@OnClose\npublic void onClose(@PathParam(\"fileId\") String fileId, Session session) throws IOException {\nWebSocketUtils.ONLINE_USER_SESSIONS.remove(fileId);\nLogUtils.info(\"[\" + fileId + \"] : \u65ad\u5f00\u8fde\u63a5\uff01\" + WebSocketUtils.ONLINE_USER_SESSIONS.size());\nsession.close();\n}\n/**\n* \u8fde\u63a5\u5f02\u5e38\u54cd\u5e94\n*/\n@OnError\npublic void onError(Session session, Throwable throwable) throws IOException {\nLogUtils.error(\"\u8fde\u63a5\u5f02\u5e38\u54cd\u5e94\", throwable);\nsession.close();\n}\n/**\n* \u6bcf\u4e00\u5206\u949f\u7fa4\u53d1\u4e00\u6b21\u5fc3\u8df3\u68c0\u67e5\n*/\n@Scheduled(fixedRate = 60000)\n}", + "context_after": "class ExportWebSocketHandler {\npublic static final Map ONLINE_EXPORT_EXCEL_SESSIONS = new ConcurrentHashMap<>();\npublic static void sendMessage(Session session, SocketMsgDTO message) {\nif (session == null) {\nreturn;\n}\nsession.setMaxIdleTimeout(86400000L);\nRemoteEndpoint.Async async = session.getAsyncRemote();\nif (async == null) {\nreturn;\n}\nasync.sendText(JSON.toJSONString(message));\n}\npublic static void sendMessageSingle(SocketMsgDTO dto) {\nsendMessage(ONLINE_EXPORT_EXCEL_SESSIONS.get(Optional.ofNullable(dto.getReportId())\n.orElse(StringUtils.EMPTY)), dto);\n}\n/**\n* \u8fde\u63a5\u6210\u529f\u54cd\u5e94\n*/\n@OnOpen\npublic void openSession(@PathParam(\"fileId\") String fileId, Session session) {\nONLINE_EXPORT_EXCEL_SESSIONS.put(fileId, session);\nRemoteEndpoint.Async async = session.getAsyncRemote();\nif (async != null) {\nasync.sendText(JSON.toJSONString(new SocketMsgDTO(fileId, \"\", MsgType.CONNECT.name(), MsgType.CONNECT.name())));\nsession.setMaxIdleTimeout(180000);\n}\nLogUtils.info(\"\u5ba2\u6237\u7aef: [\" + fileId + \"] : \u8fde\u63a5\u6210\u529f\uff01\" + ExportWebSocketHandler.ONLINE_EXPORT_EXCEL_SESSIONS.size(), fileId);\n}\n/**\n* \u6536\u5230\u6d88\u606f\u54cd\u5e94\n*/\n@OnMessage\npublic void onMessage(@PathParam(\"fileId\") String fileId, String message) {\nLogUtils.info(\"\u670d\u52a1\u5668\u6536\u5230\uff1a[\" + fileId + \"] : \" + message);\nSocketMsgDTO dto = JSON.parseObject(message, SocketMsgDTO.class);\nExportWebSocketHandler.sendMessageSingle(dto);\n}\n/**\n* \u8fde\u63a5\u5173\u95ed\u54cd\u5e94\n*/\n@OnClose\npublic void onClose(@PathParam(\"fileId\") String fileId, Session session) throws IOException {\nExportWebSocketHandler.ONLINE_EXPORT_EXCEL_SESSIONS.remove(fileId);\nLogUtils.info(\"[\" + fileId + \"] : \u65ad\u5f00\u8fde\u63a5\uff01\" + ExportWebSocketHandler.ONLINE_EXPORT_EXCEL_SESSIONS.size());\nsession.close();\n}\n/**\n* \u8fde\u63a5\u5f02\u5e38\u54cd\u5e94\n*/\n@OnError\npublic void onError(Session session, Throwable throwable) throws IOException {\nLogUtils.error(\"\u8fde\u63a5\u5f02\u5e38\u54cd\u5e94\", throwable);\nsession.close();\n}\n/**\n* \u6bcf\u4e00\u5206\u949f\u7fa4\u53d1\u4e00\u6b21\u5fc3\u8df3\u68c0\u67e5\n*/\n@Scheduled(fixedRate = 60000)\n}" + }, + { + "comment": "I think the condition check is probably redundant", + "method_body": "public PCollection expand(PCollection events) {\nfinal Coder coder = events.getCoder();\nreturn events\n.apply(\"Pair with random key\", ParDo.of(new AssignShardFn<>(configuration.numKeyBuckets)))\n.apply(GroupByKey.create())\n.apply(\n\"ExpandIterable\",\nParDo.of(\nnew DoFn>, Event>() {\n@ProcessElement\npublic void processElement(\n@Element KV> element, OutputReceiver r) {\nfor (Event value : element.getValue()) {\nr.output(value);\n}\n}\n}))\n.apply(\nname + \".Serialize\",\nParDo.of(\nnew DoFn() {\nprivate final Counter bytesMetric = Metrics.counter(name, \"serde-bytes\");\nprivate final Random random = new Random();\nprivate double pardoCPUFactor =\n(configuration.pardoCPUFactor >= 0.0 && configuration.pardoCPUFactor <= 1.0)\n? configuration.pardoCPUFactor\n: 1.0;\n@ProcessElement\npublic void processElement(ProcessContext c) throws CoderException, IOException {\nEvent event;\nif (random.nextDouble() <= pardoCPUFactor) {\nevent = encodeDecode(coder, c.element(), bytesMetric);\n} else {\nevent = c.element();\n}\nc.output(event);\n}\n}));\n}", + "target_code": "(configuration.pardoCPUFactor >= 0.0 && configuration.pardoCPUFactor <= 1.0)", + "method_body_after": "public PCollection expand(PCollection events) {\nfinal Coder coder = events.getCoder();\nreturn events\n.apply(\"Pair with random key\", ParDo.of(new AssignShardFn<>(configuration.numKeyBuckets)))\n.apply(GroupByKey.create())\n.apply(\n\"ExpandIterable\",\nParDo.of(\nnew DoFn>, Event>() {\n@ProcessElement\npublic void processElement(\n@Element KV> element, OutputReceiver r) {\nfor (Event value : element.getValue()) {\nr.output(value);\n}\n}\n}))\n.apply(\nname + \".Serialize\",\nParDo.of(\nnew DoFn() {\nprivate final Counter bytesMetric = Metrics.counter(name, \"serde-bytes\");\nprivate final Random random = new Random();\nprivate double pardoCPUFactor =\n(configuration.pardoCPUFactor >= 0.0 && configuration.pardoCPUFactor <= 1.0)\n? configuration.pardoCPUFactor\n: 1.0;\n@ProcessElement\npublic void processElement(ProcessContext c) throws CoderException, IOException {\nEvent event;\nif (random.nextDouble() <= pardoCPUFactor) {\nevent = encodeDecode(coder, c.element(), bytesMetric);\n} else {\nevent = c.element();\n}\nc.output(event);\n}\n}));\n}", + "context_before": "class Query13 extends NexmarkQueryTransform {\nprivate final NexmarkConfiguration configuration;\npublic Query13(NexmarkConfiguration configuration) {\nsuper(\"Query13\");\nthis.configuration = configuration;\n}\n@Override\nprivate static Event encodeDecode(Coder coder, Event e, Counter bytesMetric)\nthrows IOException {\nByteArrayOutputStream outStream = new ByteArrayOutputStream();\ncoder.encode(e, outStream, Coder.Context.OUTER);\nbyte[] byteArray = outStream.toByteArray();\nbytesMetric.inc((long) byteArray.length);\nByteArrayInputStream inStream = new ByteArrayInputStream(byteArray);\nreturn coder.decode(inStream, Coder.Context.OUTER);\n}\n}", + "context_after": "class Query13 extends NexmarkQueryTransform {\nprivate final NexmarkConfiguration configuration;\npublic Query13(NexmarkConfiguration configuration) {\nsuper(\"Query13\");\nthis.configuration = configuration;\n}\n@Override\nprivate static Event encodeDecode(Coder coder, Event e, Counter bytesMetric)\nthrows IOException {\nByteArrayOutputStream outStream = new ByteArrayOutputStream();\ncoder.encode(e, outStream, Coder.Context.OUTER);\nbyte[] byteArray = outStream.toByteArray();\nbytesMetric.inc((long) byteArray.length);\nByteArrayInputStream inStream = new ByteArrayInputStream(byteArray);\nreturn coder.decode(inStream, Coder.Context.OUTER);\n}\n}" + }, + { + "comment": "For async APIs, you should use the Flux or Mono context instead of `Context.None`", + "method_body": "private Consumer> cancelOperation(String name) {\nreturn poller -> {\nservice.updateCertificateOperation(endpoint, name, API_VERSION, ACCEPT_LANGUAGE,\nnew CertificateOperationUpdateParameter().cancellationRequested(true), CONTENT_TYPE_HEADER_VALUE, Context.NONE);\n};\n}", + "target_code": "new CertificateOperationUpdateParameter().cancellationRequested(true), CONTENT_TYPE_HEADER_VALUE, Context.NONE);", + "method_body_after": "new CertificateOperationUpdateParameter().cancellationRequested(true);\nreturn service.updateCertificateOperation(endpoint, certificateName, API_VERSION, ACCEPT_LANGUAGE, parameter, CONTENT_TYPE_HEADER_VALUE, context)\n.doOnRequest(ignored -> logger.info(\"Cancelling certificate operation - {}", + "context_before": "class CertificateAsyncClient {\nstatic final String API_VERSION = \"7.0\";\nstatic final String ACCEPT_LANGUAGE = \"en-US\";\nstatic final int DEFAULT_MAX_PAGE_RESULTS = 25;\nstatic final String CONTENT_TYPE_HEADER_VALUE = \"application/json\";\nprivate String endpoint;\nprivate final CertificateService service;\nprivate final ClientLogger logger = new ClientLogger(CertificateAsyncClient.class);\n/**\n* Creates a CertificateAsyncClient that uses {@code pipeline} to service requests\n*\n* @param endpoint URL for the Azure KeyVault service.\n* @param pipeline HttpPipeline that the HTTP requests and responses flow through.\n*/\nCertificateAsyncClient(URL endpoint, HttpPipeline pipeline) {\nObjects.requireNonNull(endpoint, KeyVaultErrorCodeStrings.getErrorString(KeyVaultErrorCodeStrings.VAULT_END_POINT_REQUIRED));\nthis.endpoint = endpoint.toString();\nthis.service = RestProxy.create(CertificateService.class, pipeline);\n}\n/**\n* Creates a new certificate. If this is the first version, the certificate resource is created. This operation requires\n* the certificates/create permission.\n*\n*

Code Samples

\n*

Create certificate is a long running operation. The {@link Poller poller} allows to automatically poll on crate certificate\n* operation status. It is possible to monitor each intermediate poll response during the poll operation.

\n*\n* {@codesnippet com.azure.security.keyvault.certificates.async.certificateclient.createCertificate\n*\n* @param name The name of the certificate to be created.\n* @param policy The policy of the certificate to be created.\n* @param tags The application specific metadata to set.\n* @throws ResourceModifiedException when invalid certificate policy configuration is provided.\n* @return A {@link Poller} polling on the create certificate operation status.\n*/\npublic Poller createCertificate(String name, CertificatePolicy policy, Map tags) {\nreturn new Poller(Duration.ofSeconds(1), createPollOperation(name), actvationOperation(name, policy, tags), cancelOperation(name));\n}\nprivate Consumer> cancelOperation(String name) {\nreturn poller -> {\nservice.updateCertificateOperation(endpoint, name, API_VERSION, ACCEPT_LANGUAGE,\n/**\n* Creates a new certificate. If this is the first version, the certificate resource is created. This operation requires\n* the certificates/create permission.\n*\n*

Code Samples

\n*

Create certificate is a long running operation. The {@link Poller poller} allows to automatically poll on crate certificate\n* operation status. It is possible to monitor each intermediate poll response during the poll operation.

\n*\n* {@codesnippet com.azure.security.keyvault.certificates.async.certificateclient.createCertificate}\n*\n* @param name The name of the certificate to be created.\n* @param policy The policy of the certificate to be created.\n* @throws ResourceModifiedException when invalid certificate policy configuration is provided.\n* @return A {@link Poller} polling on the create certificate operation status.\n*/\npublic Poller createCertificate(String name, CertificatePolicy policy) {\nreturn createCertificate(name, policy, null);\n}\n/*\nPolling operation to poll on create certificate operation status.\n*/\nprivate Function, Mono>> createPollOperation(String certificateName) {\nreturn prePollResponse -> {\ntry {\nSystem.out.println(\"Calling Polling Function\");\nreturn service.getCertificateOperation(endpoint, certificateName, API_VERSION, ACCEPT_LANGUAGE, CONTENT_TYPE_HEADER_VALUE)\n.flatMap(this::processCertificateOperationResponse);\n} catch (HttpRequestException e) {\nlogger.logExceptionAsError(e);\nreturn Mono.just(new PollResponse<>(PollResponse.OperationStatus.FAILED, null));\n}\n};\n}\nprivate Mono> processCertificateOperationResponse(Response certificateOperationResponse) {\nPollResponse.OperationStatus status = null;\nswitch (certificateOperationResponse.value().status()) {\ncase \"inProgress\":\nstatus = PollResponse.OperationStatus.IN_PROGRESS;\nbreak;\ncase \"completed\":\nstatus = PollResponse.OperationStatus.SUCCESSFULLY_COMPLETED;\nbreak;\ncase \"failed\":\nstatus = PollResponse.OperationStatus.FAILED;\nbreak;\n}\nreturn Mono.just(new PollResponse<>(status, certificateOperationResponse.value()));\n}\nMono> createCertificateWithResponse(String name, CertificatePolicy certificatePolicy, Map tags) {\nCertificateRequestParameters certificateRequestParameters = new CertificateRequestParameters()\n.certificatePolicy(new CertificatePolicyRequest(certificatePolicy))\n.tags(tags);\nreturn service.createCertificate(endpoint, name, API_VERSION, ACCEPT_LANGUAGE, certificateRequestParameters, CONTENT_TYPE_HEADER_VALUE);\n}\n/**\n* Gets information about the latest version of the specified certificate. This operation requires the certificates/get permission.\n*\n*

Code Samples

\n*

Gets a specific version of the key in the key vault. Prints out the\n* returned certificate details when a response has been received.

\n*\n* {@codesnippet com.azure.security.keyvault.certificates.async.certificateclient.getCertificateWithPolicy}\n*\n* @param name The name of the certificate to retrieve, cannot be null\n* @throws ResourceNotFoundException when a certificate with {@code name} doesn't exist in the key vault.\n* @throws HttpRequestException if {@code name} is empty string.\n* @return A {@link Mono} containing the requested {@link Certificate certificate}.\n*/\n@ServiceMethod(returns = ReturnType.SINGLE)\npublic Mono getCertificateWithPolicy(String name) {\nreturn withContext(context -> getCertificateWithResponse(name, \"\", context)).flatMap(FluxUtil::toMono);\n}\nMono> getCertificateWithResponse(String name, String version, Context context) {\nreturn service.getCertificate(endpoint, name, version, API_VERSION, ACCEPT_LANGUAGE, CONTENT_TYPE_HEADER_VALUE, context)\n.doOnRequest(ignored -> logger.info(\"Retrieving certificate - {}\", name))\n.doOnSuccess(response -> logger.info(\"Retrieved the certificate - {}\", response.value().name()))\n.doOnError(error -> logger.warning(\"Failed to Retrieve the certificate - {}\", name, error));\n}\n/**\n* Gets information about the latest version of the specified certificate. This operation requires the certificates/get permission.\n*\n*

Code Samples

\n*

Gets a specific version of the key in the key vault. Prints out the\n* returned certificate details when a response has been received.

\n*\n* {@codesnippet com.azure.security.keyvault.certificates.async.certificateclient.getCertificateWithResponse\n*\n* @param name The name of the certificate to retrieve, cannot be null\n* @param version The version of the certificate to retrieve. If this is an empty String or null, this call is equivalent to calling {@link CertificateAsyncClient\n* @throws ResourceNotFoundException when a certificate with {@code name} doesn't exist in the key vault.\n* @throws HttpRequestException if {@code name} is empty string.\n* @return A {@link Mono} containing a {@link Response} whose {@link Response\n*/\n@ServiceMethod(returns = ReturnType.SINGLE)\npublic Mono> getCertificateWithResponse(String name, String version) {\nreturn withContext(context -> getCertificateWithResponse(name, version == null ? \"\" : version, context));\n}\n/**\n* Gets information about the specified version of the specified certificate. This operation requires the certificates/get permission.\n*\n*

Code Samples

\n*

Gets a specific version of the key in the key vault. Prints out the\n* returned certificate details when a response has been received.

\n*\n* {@codesnippet com.azure.security.keyvault.certificates.async.certificateclient.getCertificate}\n*\n* @param name The name of the certificate to retrieve, cannot be null\n* @param version The version of the certificate to retrieve. If this is an empty String or null, this call is equivalent to calling {@link CertificateAsyncClient\n* @throws ResourceNotFoundException when a certificate with {@code name} doesn't exist in the key vault.\n* @throws HttpRequestException if {@code name} is empty string.\n* @return A {@link Mono} containing the requested {@link Certificate certificate}.\n*/\n@ServiceMethod(returns = ReturnType.SINGLE)\npublic Mono getCertificate(String name, String version) {\nreturn withContext(context -> getCertificateWithResponse(name, version == null ? \"\" : version, context)).flatMap(FluxUtil::toMono);\n}\n/**\n* Updates the specified attributes associated with the specified certificate. The update operation changes specified attributes of an existing\n* stored certificate and attributes that are not specified in the request are left unchanged. This operation requires the certificates/update permission.\n*\n*

Code Samples

\n*

Gets latest version of the certificate, changes its tags and enabled status and then updates it in the Azure Key Vault. Prints out the\n* returned certificate details when a response has been received.

\n*\n* {@codesnippet com.azure.security.keyvault.certificates.async.certificateclient.updateCertificate\n*\n* @param certificate The {@link CertificateBase base certificate} object with updated properties.\n* @throws NullPointerException if {@code certificate} is {@code null}.\n* @throws ResourceNotFoundException when a certificate with {@link CertificateBase\n* @throws HttpRequestException if {@link CertificateBase\n* @return A {@link Mono} containing the {@link CertificateBase updated certificate}.\n*/\n@ServiceMethod(returns = ReturnType.SINGLE)\npublic Mono updateCertificate(CertificateBase certificate) {\nreturn withContext(context -> updateCertificateWithResponse(certificate, context)).flatMap(FluxUtil::toMono);\n}\n/**\n* Updates the specified attributes associated with the specified certificate. The update operation changes specified attributes of an existing\n* stored certificate and attributes that are not specified in the request are left unchanged.. This operation requires the certificates/update permission.\n*\n*

Code Samples

\n*

Gets latest version of the certificate, changes its enabled status and then updates it in the Azure Key Vault. Prints out the\n* returned certificate details when a response has been received.

\n*\n* {@codesnippet com.azure.security.keyvault.certificates.async.certificateclient.updateCertificateWithResponse\n*\n* @param certificate The {@link CertificateBase base certificate} object with updated properties.\n* @throws NullPointerException if {@code certificate} is {@code null}.\n* @throws ResourceNotFoundException when a certificate with {@link CertificateBase\n* @throws HttpRequestException if {@link CertificateBase\n* @return A {@link Mono} containing a {@link Response} whose {@link Response\n*/\n@ServiceMethod(returns = ReturnType.SINGLE)\npublic Mono> updateCertificateWithResponse(CertificateBase certificate) {\nreturn withContext(context -> updateCertificateWithResponse(certificate, context));\n}\nMono> updateCertificateWithResponse(CertificateBase certificateBase, Context context) {\nObjects.requireNonNull(certificateBase, \"The certificate input parameter cannot be null\");\nCertificateUpdateParameters parameters = new CertificateUpdateParameters()\n.tags(certificateBase.tags())\n.certificateAttributes(new CertificateRequestAttributes(certificateBase));\nreturn service.updateCertificate(endpoint, certificateBase.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE, context)\n.doOnRequest(ignored -> logger.info(\"Updating certificate - {}\", certificateBase.name()))\n.doOnSuccess(response -> logger.info(\"Updated the certificate - {}\", certificateBase.name()))\n.doOnError(error -> logger.warning(\"Failed to update the certificate - {}\", certificateBase.name(), error));\n}\n/**\n* Gets information about the certificate which represents {@link CertificateBase certificateBase} from the key vault. This\n* operation requires the certificates/get permission.\n*\n*

The list operations {@link CertificateAsyncClient\n* the {@link Flux} containing {@link CertificateBase base certificate} as output excluding the properties like secretId and keyId of the certificate.\n* This operation can then be used to get the full certificate with its properties excluding the policy from {@code certificateBase}.

\n*\n* {@codesnippet com.azure.security.keyvault.certificates.async.certificateclient.getCertificate\n*\n* @param certificateBase The {@link CertificateBase base certificate} holding attributes of the certificate being requested.\n* @throws ResourceNotFoundException when a certificate with {@link CertificateBase\n* @throws HttpRequestException if {@link CertificateBase", + "context_after": "class CertificateAsyncClient {\nstatic final String API_VERSION = \"7.0\";\nstatic final String ACCEPT_LANGUAGE = \"en-US\";\nstatic final int DEFAULT_MAX_PAGE_RESULTS = 25;\nstatic final String CONTENT_TYPE_HEADER_VALUE = \"application/json\";\nprivate final String endpoint;\nprivate final CertificateService service;\nprivate final ClientLogger logger = new ClientLogger(CertificateAsyncClient.class);\n/**\n* Creates a CertificateAsyncClient that uses {@code pipeline} to service requests\n*\n* @param endpoint URL for the Azure KeyVault service.\n* @param pipeline HttpPipeline that the HTTP requests and responses flow through.\n*/\nCertificateAsyncClient(URL endpoint, HttpPipeline pipeline) {\nObjects.requireNonNull(endpoint, KeyVaultErrorCodeStrings.getErrorString(KeyVaultErrorCodeStrings.VAULT_END_POINT_REQUIRED));\nthis.endpoint = endpoint.toString();\nthis.service = RestProxy.create(CertificateService.class, pipeline);\n}\n/**\n* Creates a new certificate. If this is the first version, the certificate resource is created. This operation requires\n* the certificates/create permission.\n*\n*

Code Samples

\n*

Create certificate is a long running operation. The {@link Poller poller} allows users to automatically poll on the create certificate\n* operation status. It is possible to monitor each intermediate poll response during the poll operation.

\n*\n* {@codesnippet com.azure.security.keyvault.certificates.CertificateAsyncClient.createCertificate\n*\n* @param name The name of the certificate to be created.\n* @param policy The policy of the certificate to be created.\n* @param tags The application specific metadata to set.\n* @throws ResourceModifiedException when invalid certificate policy configuration is provided.\n* @return A {@link Poller} polling on the create certificate operation status.\n*/\npublic Poller createCertificate(String name, CertificatePolicy policy, Map tags) {\nreturn new Poller(Duration.ofSeconds(1), createPollOperation(name), activationOperation(name, policy, tags), cancelOperation(name));\n}\nprivate Consumer> cancelOperation(String name) {\nreturn poller -> withContext(context -> cancelCertificateOperationWithResponse(name, context));\n}\nprivate Supplier> activationOperation(String name, CertificatePolicy policy, Map tags) {\nreturn () -> withContext(context -> createCertificateWithResponse(name, policy, tags, context)\n.flatMap(certificateOperationResponse -> Mono.just(certificateOperationResponse.value())));\n}\n/**\n* Creates a new certificate. If this is the first version, the certificate resource is created. This operation requires\n* the certificates/create permission.\n*\n*

Code Samples

\n*

Create certificate is a long running operation. The {@link Poller poller} allows users to automatically poll on the create certificate\n* operation status. It is possible to monitor each intermediate poll response during the poll operation.

\n*\n* {@codesnippet com.azure.security.keyvault.certificates.CertificateAsyncClient.createCertificate\n*\n* @param name The name of the certificate to be created.\n* @param policy The policy of the certificate to be created.\n* @throws ResourceModifiedException when invalid certificate policy configuration is provided.\n* @return A {@link Poller} polling on the create certificate operation status.\n*/\npublic Poller createCertificate(String name, CertificatePolicy policy) {\nreturn createCertificate(name, policy, null);\n}\n/*\nPolling operation to poll on create certificate operation status.\n*/\nprivate Function, Mono>> createPollOperation(String certificateName) {\nreturn prePollResponse -> {\ntry {\nreturn withContext(context -> service.getCertificateOperation(endpoint, certificateName, API_VERSION, ACCEPT_LANGUAGE, CONTENT_TYPE_HEADER_VALUE, context)\n.flatMap(this::processCertificateOperationResponse));\n} catch (HttpRequestException e) {\nlogger.logExceptionAsError(e);\nreturn Mono.just(new PollResponse<>(PollResponse.OperationStatus.FAILED, null));\n}\n};\n}\nprivate Mono> processCertificateOperationResponse(Response certificateOperationResponse) {\nPollResponse.OperationStatus status = null;\nswitch (certificateOperationResponse.value().status()) {\ncase \"inProgress\":\nstatus = PollResponse.OperationStatus.IN_PROGRESS;\nbreak;\ncase \"completed\":\nstatus = PollResponse.OperationStatus.SUCCESSFULLY_COMPLETED;\nbreak;\ncase \"failed\":\nstatus = PollResponse.OperationStatus.FAILED;\nbreak;\ndefault:\nbreak;\n}\nreturn Mono.just(new PollResponse<>(status, certificateOperationResponse.value()));\n}\nMono> createCertificateWithResponse(String name, CertificatePolicy certificatePolicy, Map tags, Context context) {\nCertificateRequestParameters certificateRequestParameters = new CertificateRequestParameters()\n.certificatePolicy(new CertificatePolicyRequest(certificatePolicy))\n.tags(tags);\nreturn service.createCertificate(endpoint, name, API_VERSION, ACCEPT_LANGUAGE, certificateRequestParameters, CONTENT_TYPE_HEADER_VALUE, context);\n}\n/**\n* Gets information about the latest version of the specified certificate. This operation requires the certificates/get permission.\n*\n*

Code Samples

\n*

Gets a specific version of the key in the key vault. Prints out the\n* returned certificate details when a response has been received.

\n*\n* {@codesnippet com.azure.security.keyvault.certificates.CertificateAsyncClient.getCertificateWithPolicy\n*\n* @param name The name of the certificate to retrieve, cannot be null\n* @throws ResourceNotFoundException when a certificate with {@code name} doesn't exist in the key vault.\n* @throws HttpRequestException if {@code name} is empty string.\n* @return A {@link Mono} containing the requested {@link Certificate certificate}.\n*/\n@ServiceMethod(returns = ReturnType.SINGLE)\npublic Mono getCertificateWithPolicy(String name) {\nreturn withContext(context -> getCertificateWithResponse(name, \"\", context)).flatMap(FluxUtil::toMono);\n}\nMono> getCertificateWithResponse(String name, String version, Context context) {\nreturn service.getCertificate(endpoint, name, version, API_VERSION, ACCEPT_LANGUAGE, CONTENT_TYPE_HEADER_VALUE, context)\n.doOnRequest(ignored -> logger.info(\"Retrieving certificate - {}\", name))\n.doOnSuccess(response -> logger.info(\"Retrieved the certificate - {}\", response.value().name()))\n.doOnError(error -> logger.warning(\"Failed to Retrieve the certificate - {}\", name, error));\n}\n/**\n* Gets information about the latest version of the specified certificate. This operation requires the certificates/get permission.\n*\n*

Code Samples

\n*

Gets a specific version of the key in the key vault. Prints out the\n* returned certificate details when a response has been received.

\n*\n* {@codesnippet com.azure.security.keyvault.certificates.CertificateAsyncClient.getCertificateWithResponse\n*\n* @param name The name of the certificate to retrieve, cannot be null\n* @param version The version of the certificate to retrieve. If this is an empty String or null, this call is equivalent to calling {@link CertificateAsyncClient\n* @throws ResourceNotFoundException when a certificate with {@code name} doesn't exist in the key vault.\n* @throws HttpRequestException if {@code name} is empty string.\n* @return A {@link Mono} containing a {@link Response} whose {@link Response\n*/\n@ServiceMethod(returns = ReturnType.SINGLE)\npublic Mono> getCertificateWithResponse(String name, String version) {\nreturn withContext(context -> getCertificateWithResponse(name, version == null ? \"\" : version, context));\n}\n/**\n* Gets information about the specified version of the specified certificate. This operation requires the certificates/get permission.\n*\n*

Code Samples

\n*

Gets a specific version of the key in the key vault. Prints out the\n* returned certificate details when a response has been received.

\n*\n* {@codesnippet com.azure.security.keyvault.certificates.CertificateAsyncClient.getCertificate\n*\n* @param name The name of the certificate to retrieve, cannot be null\n* @param version The version of the certificate to retrieve. If this is an empty String or null, this call is equivalent to calling {@link CertificateAsyncClient\n* @throws ResourceNotFoundException when a certificate with {@code name} doesn't exist in the key vault.\n* @throws HttpRequestException if {@code name} is empty string.\n* @return A {@link Mono} containing the requested {@link Certificate certificate}.\n*/\n@ServiceMethod(returns = ReturnType.SINGLE)\npublic Mono getCertificate(String name, String version) {\nreturn withContext(context -> getCertificateWithResponse(name, version == null ? \"\" : version, context)).flatMap(FluxUtil::toMono);\n}\n/**\n* Updates the specified attributes associated with the specified certificate. The update operation changes specified attributes of an existing\n* stored certificate and attributes that are not specified in the request are left unchanged. This operation requires the certificates/update permission.\n*\n*

Code Samples

\n*

Gets latest version of the certificate, changes its tags and enabled status and then updates it in the Azure Key Vault. Prints out the\n* returned certificate details when a response has been received.

\n*\n* {@codesnippet com.azure.security.keyvault.certificates.CertificateAsyncClient.updateCertificate\n*\n* @param certificate The {@link CertificateBase} object with updated properties.\n* @throws NullPointerException if {@code certificate} is {@code null}.\n* @throws ResourceNotFoundException when a certificate with {@link CertificateBase\n* @throws HttpRequestException if {@link CertificateBase\n* @return A {@link Mono} containing the {@link CertificateBase updated certificate}.\n*/\n@ServiceMethod(returns = ReturnType.SINGLE)\npublic Mono updateCertificate(CertificateBase certificate) {\nreturn withContext(context -> updateCertificateWithResponse(certificate, context)).flatMap(FluxUtil::toMono);\n}\n/**\n* Updates the specified attributes associated with the specified certificate. The update operation changes specified attributes of an existing\n* stored certificate and attributes that are not specified in the request are left unchanged. This operation requires the certificates/update permission.\n*\n*

Code Samples

\n*

Gets latest version of the certificate, changes its enabled status and then updates it in the Azure Key Vault. Prints out the\n* returned certificate details when a response has been received.

\n*\n* {@codesnippet com.azure.security.keyvault.certificates.CertificateAsyncClient.updateCertificateWithResponse\n*\n* @param certificate The {@link CertificateBase} object with updated properties.\n* @throws NullPointerException if {@code certificate} is {@code null}.\n* @throws ResourceNotFoundException when a certificate with {@link CertificateBase\n* @throws HttpRequestException if {@link CertificateBase\n* @return A {@link Mono} containing a {@link Response} whose {@link Response\n*/\n@ServiceMethod(returns = ReturnType.SINGLE)\npublic Mono> updateCertificateWithResponse(CertificateBase certificate) {\nreturn withContext(context -> updateCertificateWithResponse(certificate, context));\n}\nMono> updateCertificateWithResponse(CertificateBase certificateBase, Context context) {\nObjects.requireNonNull(certificateBase, \"The certificate input parameter cannot be null\");\nCertificateUpdateParameters parameters = new CertificateUpdateParameters()\n.tags(certificateBase.tags())\n.certificateAttributes(new CertificateRequestAttributes(certificateBase));\nreturn service.updateCertificate(endpoint, certificateBase.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE, context)\n.doOnRequest(ignored -> logger.info(\"Updating certificate - {}\", certificateBase.name()))\n.doOnSuccess(response -> logger.info(\"Updated the certificate - {}\", certificateBase.name()))\n.doOnError(error -> logger.warning(\"Failed to update the certificate - {}\", certificateBase.name(), error));\n}\n/**\n* Gets information about the certificate which represents the {@link CertificateBase} from the key vault. This\n* operation requires the certificates/get permission.\n*\n*

The list operations {@link CertificateAsyncClient\n* the {@link Flux} containing {@link CertificateBase} as output excluding the properties like secretId and keyId of the certificate.\n* This operation can then be used to get the full certificate with its properties excluding the policy from {@code certificateBase}.

\n*\n* {@codesnippet com.azure.security.keyvault.certificates.CertificateAsyncClient.getCertificate\n*\n* @param certificateBase The {@link CertificateBase} holding attributes of the certificate being requested.\n* @throws ResourceNotFoundException when a certificate with {@link CertificateBase\n* @throws HttpRequestException if {@link CertificateBase" + }, + { + "comment": "Changed the name as upsert imples that we always merge but we really just want to do this for EOI now.", + "method_body": "void addSummary(CommittableSummary summary) {\nlong checkpointId = summary.getCheckpointIdOrEOI();\nSubtaskCommittableManager manager =\nnew SubtaskCommittableManager<>(\nsummary.getNumberOfCommittables(), subtaskId, checkpointId, metricGroup);\nif (checkpointId == CommittableMessage.EOI) {\nSubtaskCommittableManager merged =\nsubtasksCommittableManagers.merge(\nsummary.getSubtaskId(), manager, SubtaskCommittableManager::merge);\n} else {\nSubtaskCommittableManager existing =\nsubtasksCommittableManagers.putIfAbsent(summary.getSubtaskId(), manager);\nif (existing != null) {\nthrow new UnsupportedOperationException(\nString.format(\n\"Received duplicate committable summary for checkpoint %s + subtask %s (new=%s, old=%s). Please check the status of FLINK-25920\",\ncheckpointId, summary.getSubtaskId(), manager, existing));\n}\n}\n}", + "target_code": "SubtaskCommittableManager merged =", + "method_body_after": "void addSummary(CommittableSummary summary) {\nlong checkpointId = summary.getCheckpointIdOrEOI();\nSubtaskCommittableManager manager =\nnew SubtaskCommittableManager<>(\nsummary.getNumberOfCommittables(), subtaskId, checkpointId, metricGroup);\nif (checkpointId == CommittableMessage.EOI) {\nSubtaskCommittableManager merged =\nsubtasksCommittableManagers.merge(\nsummary.getSubtaskId(), manager, SubtaskCommittableManager::merge);\nLOG.debug(\"Adding EOI summary (new={}}, merged={}}", + "context_before": "class CheckpointCommittableManagerImpl implements CheckpointCommittableManager {\n/** Mapping of subtask id to {@link SubtaskCommittableManager}. */\nprivate final Map> subtasksCommittableManagers;\nprivate final long checkpointId;\nprivate final int subtaskId;\nprivate final int numberOfSubtasks;\nprivate final SinkCommitterMetricGroup metricGroup;\nCheckpointCommittableManagerImpl(\nint subtaskId,\nint numberOfSubtasks,\nlong checkpointId,\nSinkCommitterMetricGroup metricGroup) {\nthis(new HashMap<>(), subtaskId, numberOfSubtasks, checkpointId, metricGroup);\n}\nCheckpointCommittableManagerImpl(\nMap> subtasksCommittableManagers,\nint subtaskId,\nint numberOfSubtasks,\nlong checkpointId,\nSinkCommitterMetricGroup metricGroup) {\nthis.subtasksCommittableManagers = checkNotNull(subtasksCommittableManagers);\nthis.subtaskId = subtaskId;\nthis.numberOfSubtasks = numberOfSubtasks;\nthis.checkpointId = checkpointId;\nthis.metricGroup = metricGroup;\n}\n@Override\npublic long getCheckpointId() {\nreturn checkpointId;\n}\nCollection> getSubtaskCommittableManagers() {\nreturn subtasksCommittableManagers.values();\n}\nvoid addCommittable(CommittableWithLineage committable) {\ngetSubtaskCommittableManager(committable.getSubtaskId()).add(committable);\n}\nSubtaskCommittableManager getSubtaskCommittableManager(int subtaskId) {\nSubtaskCommittableManager committables =\nthis.subtasksCommittableManagers.get(subtaskId);\nreturn checkNotNull(committables, \"Unknown subtask for %s\", subtaskId);\n}\n@Override\npublic CommittableSummary getSummary() {\nreturn new CommittableSummary<>(\nsubtaskId,\nnumberOfSubtasks,\ncheckpointId,\nsubtasksCommittableManagers.values().stream()\n.mapToInt(SubtaskCommittableManager::getNumCommittables)\n.sum(),\nsubtasksCommittableManagers.values().stream()\n.mapToInt(SubtaskCommittableManager::getNumPending)\n.sum(),\nsubtasksCommittableManagers.values().stream()\n.mapToInt(SubtaskCommittableManager::getNumFailed)\n.sum());\n}\nboolean isFinished() {\nreturn subtasksCommittableManagers.values().stream()\n.allMatch(SubtaskCommittableManager::isFinished);\n}\n@Override\npublic Collection> commit(Committer committer)\nthrows IOException, InterruptedException {\nCollection> requests = getPendingRequests(true);\nrequests.forEach(CommitRequestImpl::setSelected);\ncommitter.commit(new ArrayList<>(requests));\nrequests.forEach(CommitRequestImpl::setCommittedIfNoError);\nCollection> committed = drainFinished();\nmetricGroup.setCurrentPendingCommittablesGauge(() -> getPendingRequests(false).size());\nreturn committed;\n}\nCollection> getPendingRequests(boolean onlyIfFullyReceived) {\nreturn subtasksCommittableManagers.values().stream()\n.filter(subtask -> !onlyIfFullyReceived || subtask.hasReceivedAll())\n.flatMap(SubtaskCommittableManager::getPendingRequests)\n.collect(Collectors.toList());\n}\nCollection> drainFinished() {\nreturn subtasksCommittableManagers.values().stream()\n.flatMap(subtask -> subtask.drainCommitted().stream())\n.collect(Collectors.toList());\n}\nCheckpointCommittableManagerImpl merge(CheckpointCommittableManagerImpl other) {\ncheckArgument(other.checkpointId == checkpointId);\nCheckpointCommittableManagerImpl merged = copy();\nfor (Map.Entry> subtaskEntry :\nother.subtasksCommittableManagers.entrySet()) {\nmerged.subtasksCommittableManagers.merge(\nsubtaskEntry.getKey(),\nsubtaskEntry.getValue(),\nSubtaskCommittableManager::merge);\n}\nreturn merged;\n}\nCheckpointCommittableManagerImpl copy() {\nreturn new CheckpointCommittableManagerImpl<>(\nsubtasksCommittableManagers.entrySet().stream()\n.collect(Collectors.toMap(Map.Entry::getKey, (e) -> e.getValue().copy())),\nsubtaskId,\nnumberOfSubtasks,\ncheckpointId,\nmetricGroup);\n}\n}", + "context_after": "class CheckpointCommittableManagerImpl implements CheckpointCommittableManager {\n/** Mapping of subtask id to {@link SubtaskCommittableManager}. */\nprivate final Map> subtasksCommittableManagers;\nprivate final long checkpointId;\nprivate final int subtaskId;\nprivate final int numberOfSubtasks;\nprivate final SinkCommitterMetricGroup metricGroup;\nprivate static final Logger LOG =\nLoggerFactory.getLogger(CheckpointCommittableManagerImpl.class);\nCheckpointCommittableManagerImpl(\nint subtaskId,\nint numberOfSubtasks,\nlong checkpointId,\nSinkCommitterMetricGroup metricGroup) {\nthis(new HashMap<>(), subtaskId, numberOfSubtasks, checkpointId, metricGroup);\n}\nCheckpointCommittableManagerImpl(\nMap> subtasksCommittableManagers,\nint subtaskId,\nint numberOfSubtasks,\nlong checkpointId,\nSinkCommitterMetricGroup metricGroup) {\nthis.subtasksCommittableManagers = checkNotNull(subtasksCommittableManagers);\nthis.subtaskId = subtaskId;\nthis.numberOfSubtasks = numberOfSubtasks;\nthis.checkpointId = checkpointId;\nthis.metricGroup = metricGroup;\n}\n@Override\npublic long getCheckpointId() {\nreturn checkpointId;\n}\nCollection> getSubtaskCommittableManagers() {\nreturn subtasksCommittableManagers.values();\n}\n).\", manager, merged);\n}" + }, + { + "comment": "will adjust message to make it work", + "method_body": "static private void checkTls(boolean expect, int ... values) {\nbyte[] data = new byte[values.length];\nfor (int i = 0; i < data.length; i++) {\ndata[i] = (byte) values[i];\n}\nboolean actual = MaybeTlsCryptoSocket.looksLikeTlsToMe(data);\nif(actual != expect) {\nthrow new AssertionError(message(data, actual));\n}\n}", + "target_code": "if(actual != expect) {", + "method_body_after": "static private void checkTls(boolean expect, int ... values) {\nbyte[] data = new byte[values.length];\nfor (int i = 0; i < data.length; i++) {\ndata[i] = (byte) values[i];\n}\nboolean actual = MaybeTlsCryptoSocket.looksLikeTlsToMe(data);\nif(actual != expect) {\nthrow new AssertionError(message(data, actual));\n}\n}", + "context_before": "class TlsDetectionTest {\nstatic private String message(byte[] data, boolean actual) {\nString msg = \"[\";\nString delimiter = \"\";\nfor (byte b: data) {\nmsg += delimiter + (b & 0xff);\ndelimiter = \", \";\n}\nif (actual) {\nmsg += \"] wrongfully detected as tls\";\n} else {\nmsg += \"] wrongfully rejected as not tls\";\n}\nreturn msg;\n}\n@org.junit.Test public void testValidHandshake() {\ncheckTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);\ncheckTls(true, 22, 3, 3, 10, 255, 1, 0, 10, 251);\n}\n@org.junit.Test public void testDataOfWrongSize() {\ncheckTls(false, 22, 3, 1, 10, 255, 1, 0, 10);\ncheckTls(false, 22, 3, 1, 10, 255, 1, 0, 10, 251, 0);\n}\n@org.junit.Test public void testDataNotTaggedAsHandshake() {\ncheckTls(false, 23, 3, 1, 10, 255, 1, 0, 10, 251);\n}\n@org.junit.Test public void testDataWithBadMajorVersion() {\ncheckTls(false, 22, 0, 1, 10, 255, 1, 0, 10, 251);\ncheckTls(false, 22, 1, 1, 10, 255, 1, 0, 10, 251);\ncheckTls(false, 22, 2, 1, 10, 255, 1, 0, 10, 251);\ncheckTls(false, 22, 4, 1, 10, 255, 1, 0, 10, 251);\ncheckTls(false, 22, 5, 1, 10, 255, 1, 0, 10, 251);\n}\n@org.junit.Test public void testDataWithBadMinorVersion() {\ncheckTls(false, 22, 3, 0, 10, 255, 1, 0, 10, 251);\ncheckTls(false, 22, 3, 2, 10, 255, 1, 0, 10, 251);\ncheckTls(false, 22, 3, 4, 10, 255, 1, 0, 10, 251);\ncheckTls(false, 22, 3, 5, 10, 255, 1, 0, 10, 251);\n}\n@org.junit.Test public void testDataNotTaggedAsClientHello() {\ncheckTls(false, 22, 3, 1, 10, 255, 0, 0, 10, 251);\ncheckTls(false, 22, 3, 1, 10, 255, 2, 0, 10, 251);\n}\n@org.junit.Test public void testFrameSizeLimits() {\ncheckTls(false, 22, 3, 1, 255, 255, 1, 0, 255, 251);\ncheckTls(false, 22, 3, 1, 72, 1, 1, 0, 71, 253);\ncheckTls(true, 22, 3, 1, 72, 0, 1, 0, 71, 252);\ncheckTls(true, 22, 3, 1, 0, 4, 1, 0, 0, 0);\ncheckTls(false, 22, 3, 1, 0, 3, 1, 0, 0, 0);\ncheckTls(false, 22, 3, 1, 0, 3, 1, 255, 255, 255);\n}\n@org.junit.Test public void testFrameAndClientHelloSizeRelationship() {\ncheckTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);\ncheckTls(false, 22, 3, 1, 10, 255, 1, 1, 10, 251);\ncheckTls(false, 22, 3, 1, 10, 255, 1, 2, 10, 251);\ncheckTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 0);\ncheckTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);\ncheckTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 2);\ncheckTls(false, 22, 3, 1, 10, 5, 1, 0, 9, 1);\ncheckTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);\ncheckTls(false, 22, 3, 1, 10, 5, 1, 0, 11, 1);\ncheckTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);\ncheckTls(true, 22, 3, 1, 10, 4, 1, 0, 10, 0);\ncheckTls(true, 22, 3, 1, 10, 3, 1, 0, 9, 255);\ncheckTls(true, 22, 3, 1, 10, 2, 1, 0, 9, 254);\ncheckTls(true, 22, 3, 1, 10, 1, 1, 0, 9, 253);\ncheckTls(true, 22, 3, 1, 10, 0, 1, 0, 9, 252);\n}\n}", + "context_after": "class TlsDetectionTest {\nstatic private String message(byte[] data, boolean actual) {\nString msg = \"[\";\nString delimiter = \"\";\nfor (byte b: data) {\nmsg += delimiter + (b & 0xff);\ndelimiter = \", \";\n}\nif (actual) {\nmsg += \"] wrongfully detected as tls\";\n} else {\nmsg += \"] wrongfully rejected as not tls\";\n}\nreturn msg;\n}\n@org.junit.Test public void testValidHandshake() {\ncheckTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);\ncheckTls(true, 22, 3, 3, 10, 255, 1, 0, 10, 251);\n}\n@org.junit.Test public void testDataOfWrongSize() {\ncheckTls(false, 22, 3, 1, 10, 255, 1, 0, 10);\ncheckTls(false, 22, 3, 1, 10, 255, 1, 0, 10, 251, 0);\n}\n@org.junit.Test public void testDataNotTaggedAsHandshake() {\ncheckTls(false, 23, 3, 1, 10, 255, 1, 0, 10, 251);\n}\n@org.junit.Test public void testDataWithBadMajorVersion() {\ncheckTls(false, 22, 0, 1, 10, 255, 1, 0, 10, 251);\ncheckTls(false, 22, 1, 1, 10, 255, 1, 0, 10, 251);\ncheckTls(false, 22, 2, 1, 10, 255, 1, 0, 10, 251);\ncheckTls(false, 22, 4, 1, 10, 255, 1, 0, 10, 251);\ncheckTls(false, 22, 5, 1, 10, 255, 1, 0, 10, 251);\n}\n@org.junit.Test public void testDataWithBadMinorVersion() {\ncheckTls(false, 22, 3, 0, 10, 255, 1, 0, 10, 251);\ncheckTls(false, 22, 3, 2, 10, 255, 1, 0, 10, 251);\ncheckTls(false, 22, 3, 4, 10, 255, 1, 0, 10, 251);\ncheckTls(false, 22, 3, 5, 10, 255, 1, 0, 10, 251);\n}\n@org.junit.Test public void testDataNotTaggedAsClientHello() {\ncheckTls(false, 22, 3, 1, 10, 255, 0, 0, 10, 251);\ncheckTls(false, 22, 3, 1, 10, 255, 2, 0, 10, 251);\n}\n@org.junit.Test public void testFrameSizeLimits() {\ncheckTls(false, 22, 3, 1, 255, 255, 1, 0, 255, 251);\ncheckTls(false, 22, 3, 1, 72, 1, 1, 0, 71, 253);\ncheckTls(true, 22, 3, 1, 72, 0, 1, 0, 71, 252);\ncheckTls(true, 22, 3, 1, 0, 4, 1, 0, 0, 0);\ncheckTls(false, 22, 3, 1, 0, 3, 1, 0, 0, 0);\ncheckTls(false, 22, 3, 1, 0, 3, 1, 255, 255, 255);\n}\n@org.junit.Test public void testFrameAndClientHelloSizeRelationship() {\ncheckTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);\ncheckTls(false, 22, 3, 1, 10, 255, 1, 1, 10, 251);\ncheckTls(false, 22, 3, 1, 10, 255, 1, 2, 10, 251);\ncheckTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 0);\ncheckTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);\ncheckTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 2);\ncheckTls(false, 22, 3, 1, 10, 5, 1, 0, 9, 1);\ncheckTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);\ncheckTls(false, 22, 3, 1, 10, 5, 1, 0, 11, 1);\ncheckTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);\ncheckTls(true, 22, 3, 1, 10, 4, 1, 0, 10, 0);\ncheckTls(true, 22, 3, 1, 10, 3, 1, 0, 9, 255);\ncheckTls(true, 22, 3, 1, 10, 2, 1, 0, 9, 254);\ncheckTls(true, 22, 3, 1, 10, 1, 1, 0, 9, 253);\ncheckTls(true, 22, 3, 1, 10, 0, 1, 0, 9, 252);\n}\n}" + }, + { + "comment": "I agree this is a good default, but maybe what we do should be configurable by the user?", + "method_body": "public void handle(AsyncResult ar) {\nif (ar.failed()) {\nlog.error(\"Unable to reload the TLS certificate, keeping the current one.\", ar.cause());\n} else {\nif (ar.result()) {\nlog.debug(\"TLS certificates updated\");\n}\n}\n}", + "target_code": "log.error(\"Unable to reload the TLS certificate, keeping the current one.\", ar.cause());", + "method_body_after": "public void handle(AsyncResult ar) {\nif (ar.failed()) {\nlog.error(\"Unable to reload the TLS certificate, keeping the current one.\", ar.cause());\n} else {\nif (ar.result()) {\nlog.debug(\"TLS certificates updated\");\n}\n}\n}", + "context_before": "class TlsCertificateReloadUtils {\npublic static long handleCertificateReloading(Vertx vertx, HttpServer server,\nHttpServerOptions options, HttpConfiguration configuration) {\nif (configuration.ssl.certificate.reloadPeriod.isEmpty()) {\nreturn -1;\n}\nif (configuration.ssl.certificate.reloadPeriod.get().toMillis() < 1000) {\nthrow new IllegalArgumentException(\n\"Unable to configure TLS reloading - The reload period cannot be less than 1 second\");\n}\nif (options == null) {\nthrow new IllegalArgumentException(\"Unable to configure TLS reloading - The HTTP server options were not provided\");\n}\nSSLOptions ssl = options.getSslOptions();\nif (ssl == null) {\nthrow new IllegalArgumentException(\"Unable to configure TLS reloading - TLS/SSL is not enabled on the server\");\n}\nLogger log = Logger.getLogger(TlsCertificateReloadUtils.class);\nreturn vertx.setPeriodic(configuration.ssl.certificate.reloadPeriod.get().toMillis(), id -> {\nvertx.executeBlocking(new Callable() {\n@Override\npublic SSLOptions call() throws Exception {\nvar c = reloadFileContent(ssl, configuration);\nif (c.equals(ssl)) {\nreturn null;\n}\nreturn c;\n}\n}, true)\n.flatMap(new Function>() {\n@Override\npublic Future apply(SSLOptions res) {\nif (res != null) {\nreturn server.updateSSLOptions(res);\n} else {\nreturn Future.succeededFuture(false);\n}\n}\n})\n.onComplete(new Handler>() {\n@Override\n});\n});\n}\nprivate static SSLOptions reloadFileContent(SSLOptions ssl, HttpConfiguration configuration) throws IOException {\nvar copy = new SSLOptions(ssl);\nfinal List keys = new ArrayList<>();\nfinal List certificates = new ArrayList<>();\nif (configuration.ssl.certificate.keyFiles.isPresent()) {\nkeys.addAll(configuration.ssl.certificate.keyFiles.get());\n}\nif (configuration.ssl.certificate.files.isPresent()) {\ncertificates.addAll(configuration.ssl.certificate.files.get());\n}\nif (!certificates.isEmpty() && !keys.isEmpty()) {\nList certBuffer = new ArrayList<>();\nList keysBuffer = new ArrayList<>();\nfor (Path p : certificates) {\nbyte[] cert = getFileContent(p);\ncertBuffer.add(Buffer.buffer(cert));\n}\nfor (Path p : keys) {\nbyte[] key = getFileContent(p);\nkeysBuffer.add(Buffer.buffer(key));\n}\nPemKeyCertOptions opts = new PemKeyCertOptions()\n.setCertValues(certBuffer)\n.setKeyValues(keysBuffer);\ncopy.setKeyCertOptions(opts);\n} else if (configuration.ssl.certificate.keyStoreFile.isPresent()) {\nvar opts = ((KeyStoreOptions) copy.getKeyCertOptions());\nopts.setValue(Buffer.buffer(getFileContent(configuration.ssl.certificate.keyStoreFile.get())));\ncopy.setKeyCertOptions(opts);\n}\nif (configuration.ssl.certificate.trustStoreFile.isPresent()) {\nvar opts = ((KeyStoreOptions) copy.getKeyCertOptions());\nopts.setValue(Buffer.buffer(getFileContent(configuration.ssl.certificate.trustStoreFile.get())));\ncopy.setTrustOptions(opts);\n}\nreturn copy;\n}\n}", + "context_after": "class TlsCertificateReloadUtils {\npublic static long handleCertificateReloading(Vertx vertx, HttpServer server,\nHttpServerOptions options, HttpConfiguration configuration) {\nif (configuration.ssl.certificate.reloadPeriod.isEmpty()) {\nreturn -1;\n}\nif (configuration.ssl.certificate.reloadPeriod.get().toMillis() < 30_000) {\nthrow new IllegalArgumentException(\n\"Unable to configure TLS reloading - The reload period cannot be less than 30 seconds\");\n}\nif (options == null) {\nthrow new IllegalArgumentException(\"Unable to configure TLS reloading - The HTTP server options were not provided\");\n}\nSSLOptions ssl = options.getSslOptions();\nif (ssl == null) {\nthrow new IllegalArgumentException(\"Unable to configure TLS reloading - TLS/SSL is not enabled on the server\");\n}\nLogger log = Logger.getLogger(TlsCertificateReloadUtils.class);\nreturn vertx.setPeriodic(configuration.ssl.certificate.reloadPeriod.get().toMillis(), new Handler() {\n@Override\npublic void handle(Long id) {\nvertx.executeBlocking(new Callable() {\n@Override\npublic SSLOptions call() throws Exception {\nvar c = reloadFileContent(ssl, configuration);\nif (c.equals(ssl)) {\nreturn null;\n}\nreturn c;\n}\n}, true)\n.flatMap(new Function>() {\n@Override\npublic Future apply(SSLOptions res) {\nif (res != null) {\nreturn server.updateSSLOptions(res);\n} else {\nreturn Future.succeededFuture(false);\n}\n}\n})\n.onComplete(new Handler>() {\n@Override\n});\n}\n});\n}\nprivate static SSLOptions reloadFileContent(SSLOptions ssl, HttpConfiguration configuration) throws IOException {\nvar copy = new SSLOptions(ssl);\nfinal List keys = new ArrayList<>();\nfinal List certificates = new ArrayList<>();\nif (configuration.ssl.certificate.keyFiles.isPresent()) {\nkeys.addAll(configuration.ssl.certificate.keyFiles.get());\n}\nif (configuration.ssl.certificate.files.isPresent()) {\ncertificates.addAll(configuration.ssl.certificate.files.get());\n}\nif (!certificates.isEmpty() && !keys.isEmpty()) {\nList certBuffer = new ArrayList<>();\nList keysBuffer = new ArrayList<>();\nfor (Path p : certificates) {\nbyte[] cert = getFileContent(p);\ncertBuffer.add(Buffer.buffer(cert));\n}\nfor (Path p : keys) {\nbyte[] key = getFileContent(p);\nkeysBuffer.add(Buffer.buffer(key));\n}\nPemKeyCertOptions opts = new PemKeyCertOptions()\n.setCertValues(certBuffer)\n.setKeyValues(keysBuffer);\ncopy.setKeyCertOptions(opts);\n} else if (configuration.ssl.certificate.keyStoreFile.isPresent()) {\nvar opts = ((KeyStoreOptions) copy.getKeyCertOptions());\nopts.setValue(Buffer.buffer(getFileContent(configuration.ssl.certificate.keyStoreFile.get())));\ncopy.setKeyCertOptions(opts);\n}\nif (configuration.ssl.certificate.trustStoreFile.isPresent()) {\nvar opts = ((KeyStoreOptions) copy.getKeyCertOptions());\nopts.setValue(Buffer.buffer(getFileContent(configuration.ssl.certificate.trustStoreFile.get())));\ncopy.setTrustOptions(opts);\n}\nreturn copy;\n}\n}" + }, + { + "comment": "I misread this, I thought it was happening at runtime. It's not ideal that it connects to each tenant sequentially but it is probably fine for now, sorry for the noise.", + "method_body": "private TenantConfigContext createTenantContext(Vertx vertx, OidcTenantConfig oidcConfig) {\nOAuth2ClientOptions options = new OAuth2ClientOptions();\nif (!oidcConfig.getAuthServerUrl().isPresent()) {\nreturn null;\n}\noptions.setSite(oidcConfig.getAuthServerUrl().get());\nif (oidcConfig.getIntrospectionPath().isPresent()) {\noptions.setIntrospectionPath(oidcConfig.getIntrospectionPath().get());\n}\nif (oidcConfig.getJwksPath().isPresent()) {\noptions.setJwkPath(oidcConfig.getJwksPath().get());\n}\nif (oidcConfig.getClientId().isPresent()) {\noptions.setClientID(oidcConfig.getClientId().get());\n}\nif (oidcConfig.getCredentials().secret.isPresent()) {\noptions.setClientSecret(oidcConfig.getCredentials().secret.get());\n}\nif (oidcConfig.getPublicKey().isPresent()) {\noptions.addPubSecKey(new PubSecKeyOptions()\n.setAlgorithm(\"RS256\")\n.setPublicKey(oidcConfig.getPublicKey().get()));\n}\nif (oidcConfig.getToken().issuer.isPresent()) {\noptions.setValidateIssuer(false);\n}\nif (oidcConfig.getToken().getExpirationGrace().isPresent()) {\nJWTOptions jwtOptions = new JWTOptions();\njwtOptions.setLeeway(oidcConfig.getToken().getExpirationGrace().get());\noptions.setJWTOptions(jwtOptions);\n}\nfinal long connectionDelayInSecs = oidcConfig.getConnectionDelay().isPresent()\n? oidcConfig.getConnectionDelay().get().toMillis() / 1000\n: 0;\nfinal long connectionRetryCount = connectionDelayInSecs > 1 ? connectionDelayInSecs / 2 : 1;\nif (connectionRetryCount > 1) {\nLOG.infof(\"Connecting to IDP for up to %d times every 2 seconds\", connectionRetryCount);\n}\nOAuth2Auth auth = null;\nfor (long i = 0; i < connectionRetryCount; i++) {\ntry {\nCompletableFuture cf = new CompletableFuture<>();\nKeycloakAuth.discover(vertx, options, new Handler>() {\n@Override\npublic void handle(AsyncResult event) {\nif (event.failed()) {\ncf.completeExceptionally(toOidcException(event.cause()));\n} else {\ncf.complete(event.result());\n}\n}\n});\nauth = cf.join();\nbreak;\n} catch (Throwable throwable) {\nwhile (throwable instanceof CompletionException && throwable.getCause() != null) {\nthrowable = throwable.getCause();\n}\nif (throwable instanceof OIDCException) {\nif (i + 1 < connectionRetryCount) {\nCompletableFuture.runAsync(\nnew Runnable() {\n@Override\npublic void run() {\ntry {\nThread.sleep(2000);\n} catch (InterruptedException iex) {\n}\n}\n})\n.join();\n} else {\nthrow (OIDCException) throwable;\n}\n} else {\nthrow new OIDCException(throwable);\n}\n}\n}\nreturn new TenantConfigContext(auth, oidcConfig);\n}", + "target_code": ".join();", + "method_body_after": "private TenantConfigContext createTenantContext(Vertx vertx, OidcTenantConfig oidcConfig) {\nOAuth2ClientOptions options = new OAuth2ClientOptions();\nif (!oidcConfig.getAuthServerUrl().isPresent()) {\nreturn null;\n}\noptions.setSite(oidcConfig.getAuthServerUrl().get());\nif (oidcConfig.getIntrospectionPath().isPresent()) {\noptions.setIntrospectionPath(oidcConfig.getIntrospectionPath().get());\n}\nif (oidcConfig.getJwksPath().isPresent()) {\noptions.setJwkPath(oidcConfig.getJwksPath().get());\n}\nif (oidcConfig.getClientId().isPresent()) {\noptions.setClientID(oidcConfig.getClientId().get());\n}\nif (oidcConfig.getCredentials().secret.isPresent()) {\noptions.setClientSecret(oidcConfig.getCredentials().secret.get());\n}\nif (oidcConfig.getPublicKey().isPresent()) {\noptions.addPubSecKey(new PubSecKeyOptions()\n.setAlgorithm(\"RS256\")\n.setPublicKey(oidcConfig.getPublicKey().get()));\n}\nif (oidcConfig.getToken().issuer.isPresent()) {\noptions.setValidateIssuer(false);\n}\nif (oidcConfig.getToken().getExpirationGrace().isPresent()) {\nJWTOptions jwtOptions = new JWTOptions();\njwtOptions.setLeeway(oidcConfig.getToken().getExpirationGrace().get());\noptions.setJWTOptions(jwtOptions);\n}\nfinal long connectionDelayInSecs = oidcConfig.getConnectionDelay().isPresent()\n? oidcConfig.getConnectionDelay().get().toMillis() / 1000\n: 0;\nfinal long connectionRetryCount = connectionDelayInSecs > 1 ? connectionDelayInSecs / 2 : 1;\nif (connectionRetryCount > 1) {\nLOG.infof(\"Connecting to IDP for up to %d times every 2 seconds\", connectionRetryCount);\n}\nOAuth2Auth auth = null;\nfor (long i = 0; i < connectionRetryCount; i++) {\ntry {\nCompletableFuture cf = new CompletableFuture<>();\nKeycloakAuth.discover(vertx, options, new Handler>() {\n@Override\npublic void handle(AsyncResult event) {\nif (event.failed()) {\ncf.completeExceptionally(toOidcException(event.cause()));\n} else {\ncf.complete(event.result());\n}\n}\n});\nauth = cf.join();\nbreak;\n} catch (Throwable throwable) {\nwhile (throwable instanceof CompletionException && throwable.getCause() != null) {\nthrowable = throwable.getCause();\n}\nif (throwable instanceof OIDCException) {\nif (i + 1 < connectionRetryCount) {\ntry {\nThread.sleep(2000);\n} catch (InterruptedException iex) {\n}\n} else {\nthrow (OIDCException) throwable;\n}\n} else {\nthrow new OIDCException(throwable);\n}\n}\n}\nreturn new TenantConfigContext(auth, oidcConfig);\n}", + "context_before": "class OidcRecorder {\nprivate static final Logger LOG = Logger.getLogger(OidcRecorder.class);\npublic void setup(OidcConfig config, RuntimeValue vertx, BeanContainer beanContainer) {\nfinal Vertx vertxValue = vertx.getValue();\nMap tenantsConfig = new HashMap<>();\nfor (Map.Entry tenant : config.namedTenants.entrySet()) {\nif (config.defaultTenant.getTenantId().isPresent()\n&& tenant.getKey().equals(config.defaultTenant.getTenantId().get())) {\nthrow new OIDCException(\"tenant-id '\" + tenant.getKey() + \"' duplicates the default tenant-id\");\n}\nif (tenant.getValue().getTenantId().isPresent() && !tenant.getKey().equals(tenant.getValue().getTenantId().get())) {\nthrow new OIDCException(\"Configuration has 2 different tenant-id values: '\"\n+ tenant.getKey() + \"' and '\" + tenant.getValue().getTenantId().get() + \"'\");\n}\ntenantsConfig.put(tenant.getKey(), createTenantContext(vertxValue, tenant.getValue()));\n}\nDefaultTenantConfigResolver resolver = beanContainer.instance(DefaultTenantConfigResolver.class);\nresolver.setDefaultTenant(createTenantContext(vertxValue, config.defaultTenant));\nresolver.setTenantsConfig(tenantsConfig);\nresolver.setTenantConfigContextFactory(new Function() {\n@Override\npublic TenantConfigContext apply(OidcTenantConfig config) {\nreturn createTenantContext(vertxValue, config);\n}\n});\n}\nprotected static OIDCException toOidcException(Throwable cause) {\nfinal String message = \"OIDC server is not available at the 'quarkus.oidc.auth-server-url' URL. \"\n+ \"Please make sure it is correct. Note it has to end with a realm value if you work with Keycloak, for example:\"\n+ \" 'https:\nreturn new OIDCException(message, cause);\n}\n}", + "context_after": "class OidcRecorder {\nprivate static final Logger LOG = Logger.getLogger(OidcRecorder.class);\npublic void setup(OidcConfig config, RuntimeValue vertx, BeanContainer beanContainer) {\nfinal Vertx vertxValue = vertx.getValue();\nMap tenantsConfig = new HashMap<>();\nfor (Map.Entry tenant : config.namedTenants.entrySet()) {\nif (config.defaultTenant.getTenantId().isPresent()\n&& tenant.getKey().equals(config.defaultTenant.getTenantId().get())) {\nthrow new OIDCException(\"tenant-id '\" + tenant.getKey() + \"' duplicates the default tenant-id\");\n}\nif (tenant.getValue().getTenantId().isPresent() && !tenant.getKey().equals(tenant.getValue().getTenantId().get())) {\nthrow new OIDCException(\"Configuration has 2 different tenant-id values: '\"\n+ tenant.getKey() + \"' and '\" + tenant.getValue().getTenantId().get() + \"'\");\n}\ntenantsConfig.put(tenant.getKey(), createTenantContext(vertxValue, tenant.getValue()));\n}\nDefaultTenantConfigResolver resolver = beanContainer.instance(DefaultTenantConfigResolver.class);\nresolver.setDefaultTenant(createTenantContext(vertxValue, config.defaultTenant));\nresolver.setTenantsConfig(tenantsConfig);\nresolver.setTenantConfigContextFactory(new Function() {\n@Override\npublic TenantConfigContext apply(OidcTenantConfig config) {\nreturn createTenantContext(vertxValue, config);\n}\n});\n}\nprotected static OIDCException toOidcException(Throwable cause) {\nfinal String message = \"OIDC server is not available at the 'quarkus.oidc.auth-server-url' URL. \"\n+ \"Please make sure it is correct. Note it has to end with a realm value if you work with Keycloak, for example:\"\n+ \" 'https:\nreturn new OIDCException(message, cause);\n}\n}" + }, + { + "comment": "that's a good point :+1: ", + "method_body": "void testExceptionForwarding() throws Throwable {\nfinal CreateBuilder mockCreateBuilder =\nmock(CreateBuilder.class, Mockito.RETURNS_DEEP_STUBS);\nfinal Exception testException = new Exception(\"Test exception\");\ntry (final CuratorFrameworkWithUnhandledErrorListener curatorFrameworkWrapper =\nZooKeeperUtils.startCuratorFramework(\nconfiguration, NoOpFatalErrorHandler.INSTANCE)) {\nCuratorFramework client = spy(curatorFrameworkWrapper.asCuratorFramework());\ndoAnswer(invocation -> mockCreateBuilder).when(client).create();\nwhen(mockCreateBuilder\n.creatingParentsIfNeeded()\n.withMode(ArgumentMatchers.any(CreateMode.class))\n.forPath(anyString(), any(byte[].class)))\n.thenThrow(testException);\nfinal TestingLeaderElectionListener electionEventHandler =\nnew TestingLeaderElectionListener();\ntry (ZooKeeperLeaderElectionDriver leaderElectionDriver =\ncreateAndInitLeaderElectionDriver(client, electionEventHandler)) {\nelectionEventHandler.await(LeaderElectionEvent.IsLeaderEvent.class);\nleaderElectionDriver.publishLeaderInformation(\nCONTENDER_ID, LeaderInformation.known(UUID.randomUUID(), \"some-address\"));\nfinal LeaderElectionEvent.ErrorEvent errorEvent =\nelectionEventHandler.await(LeaderElectionEvent.ErrorEvent.class);\nassertThat(errorEvent.getError()).isEqualTo(testException);\n} finally {\nelectionEventHandler.throwIfErrorEventHappened();\n}\n}\n}", + "target_code": "electionEventHandler.throwIfErrorEventHappened();", + "method_body_after": "void testExceptionForwarding() throws Exception {\nfinal CreateBuilder mockCreateBuilder =\nmock(CreateBuilder.class, Mockito.RETURNS_DEEP_STUBS);\nfinal Exception testException = new Exception(\"Test exception\");\ntry (final CuratorFrameworkWithUnhandledErrorListener curatorFrameworkWrapper =\nZooKeeperUtils.startCuratorFramework(\nconfiguration, NoOpFatalErrorHandler.INSTANCE)) {\nCuratorFramework client = spy(curatorFrameworkWrapper.asCuratorFramework());\ndoAnswer(invocation -> mockCreateBuilder).when(client).create();\nwhen(mockCreateBuilder\n.creatingParentsIfNeeded()\n.withMode(ArgumentMatchers.any(CreateMode.class))\n.forPath(anyString(), any(byte[].class)))\n.thenThrow(testException);\nfinal TestingLeaderElectionListener electionEventHandler =\nnew TestingLeaderElectionListener();\ntry (ZooKeeperLeaderElectionDriver leaderElectionDriver =\ncreateAndInitLeaderElectionDriver(client, electionEventHandler)) {\nelectionEventHandler.await(LeaderElectionEvent.IsLeaderEvent.class);\nleaderElectionDriver.publishLeaderInformation(\nCONTENDER_ID, LeaderInformation.known(UUID.randomUUID(), \"some-address\"));\nfinal LeaderElectionEvent.ErrorEvent errorEvent =\nelectionEventHandler.await(LeaderElectionEvent.ErrorEvent.class);\nassertThat(errorEvent.getError()).isEqualTo(testException);\n} finally {\nelectionEventHandler.failIfErrorEventHappened();\n}\n}\n}", + "context_before": "class ZooKeeperLeaderElectionTest {\nprivate final ZooKeeperExtension zooKeeperExtension = new ZooKeeperExtension();\n@RegisterExtension\nfinal EachCallbackWrapper zooKeeperResource =\nnew EachCallbackWrapper<>(zooKeeperExtension);\n@RegisterExtension\nfinal TestingFatalErrorHandlerExtension testingFatalErrorHandlerResource =\nnew TestingFatalErrorHandlerExtension();\nprivate Configuration configuration;\nprivate static final String CONTENDER_ID = \"contender-id\";\nprivate static final String LEADER_ADDRESS = \"akka\nprivate static final long timeout = 200L * 1000L;\nprivate static final Logger LOG = LoggerFactory.getLogger(ZooKeeperLeaderElectionTest.class);\n@BeforeEach\nvoid before() {\nconfiguration = new Configuration();\nconfiguration.setString(\nHighAvailabilityOptions.HA_ZOOKEEPER_QUORUM,\nzooKeeperResource.getCustomExtension().getConnectString());\nconfiguration.setString(HighAvailabilityOptions.HA_MODE, \"zookeeper\");\n}\n/** Tests that the ZooKeeperLeaderElection/RetrievalService return both the correct URL. */\n@Test\nvoid testZooKeeperLeaderElectionRetrieval() throws Throwable {\nfinal TestingLeaderElectionListener electionEventHandler =\nnew TestingLeaderElectionListener();\nfinal TestingLeaderRetrievalEventHandler retrievalEventHandler =\nnew TestingLeaderRetrievalEventHandler();\ntry (LeaderElectionDriver leaderElectionDriver =\ncreateAndInitLeaderElectionDriver(\ncreateZooKeeperClient(), electionEventHandler);\nLeaderRetrievalDriver leaderRetrievalDriver =\nZooKeeperUtils.createLeaderRetrievalDriverFactory(\ncreateZooKeeperClient(), CONTENDER_ID)\n.createLeaderRetrievalDriver(\nretrievalEventHandler,\nretrievalEventHandler::handleError)) {\nelectionEventHandler.await(LeaderElectionEvent.IsLeaderEvent.class);\nfinal UUID leaderSessionID = UUID.randomUUID();\nleaderElectionDriver.publishLeaderInformation(\nCONTENDER_ID, LeaderInformation.known(leaderSessionID, LEADER_ADDRESS));\nretrievalEventHandler.waitForNewLeader();\nassertThat(retrievalEventHandler.getLeaderSessionID()).isEqualTo(leaderSessionID);\nassertThat(retrievalEventHandler.getAddress()).isEqualTo(LEADER_ADDRESS);\n} finally {\nelectionEventHandler.throwIfErrorEventHappened();\n}\n}\n/**\n* Tests repeatedly the reelection of still available LeaderContender. After a contender has\n* been elected as the leader, it is removed. This forces the DefaultLeaderElectionService to\n* elect a new leader.\n*/\n@Test\nvoid testZooKeeperReelection() throws Exception {\nDeadline deadline = Deadline.fromNow(Duration.ofMinutes(5L));\nint num = 10;\nDefaultLeaderElectionService[] leaderElectionService =\nnew DefaultLeaderElectionService[num];\nLeaderElection[] leaderElections = new LeaderElection[num];\nTestingContender[] contenders = new TestingContender[num];\nDefaultLeaderRetrievalService leaderRetrievalService = null;\nTestingListener listener = new TestingListener();\ntry {\nleaderRetrievalService =\nZooKeeperUtils.createLeaderRetrievalService(\ncreateZooKeeperClient(), CONTENDER_ID, new Configuration());\nLOG.debug(\"Start leader retrieval service for the TestingListener.\");\nleaderRetrievalService.start(listener);\nfor (int i = 0; i < num; i++) {\nfinal LeaderElectionDriverFactory driverFactory =\nnew ZooKeeperLeaderElectionDriverFactory(createZooKeeperClient());\nleaderElectionService[i] =\nnew DefaultLeaderElectionService(\ndriverFactory,\ntestingFatalErrorHandlerResource.getTestingFatalErrorHandler());\nleaderElectionService[i].startLeaderElectionBackend();\nleaderElections[i] = leaderElectionService[i].createLeaderElection(CONTENDER_ID);\ncontenders[i] = new TestingContender(createAddress(i), leaderElections[i]);\nLOG.debug(\"Start leader election service for contender\ncontenders[i].startLeaderElection();\n}\nString pattern = LEADER_ADDRESS + \"_\" + \"(\\\\d+)\";\nPattern regex = Pattern.compile(pattern);\nint numberSeenLeaders = 0;\nwhile (deadline.hasTimeLeft() && numberSeenLeaders < num) {\nLOG.debug(\"Wait for new leader\nString address = listener.waitForNewLeader();\nMatcher m = regex.matcher(address);\nif (m.find()) {\nint index = Integer.parseInt(m.group(1));\nTestingContender contender = contenders[index];\nif (address.equals(createAddress(index))\n&& listener.getLeaderSessionID()\n.equals(contender.getLeaderSessionID())) {\nLOG.debug(\n\"Stop leader election service of contender\nnumberSeenLeaders);\nleaderElections[index].close();\nleaderElections[index] = null;\nleaderElectionService[index].close();\nleaderElectionService[index] = null;\nnumberSeenLeaders++;\n}\n} else {\nfail(\"Did not find the leader's index.\");\n}\n}\nassertThat(deadline.isOverdue())\n.as(\"Did not complete the leader reelection in time.\")\n.isFalse();\nassertThat(num).isEqualTo(numberSeenLeaders);\n} finally {\nif (leaderRetrievalService != null) {\nleaderRetrievalService.stop();\n}\nfor (LeaderElection leaderElection : leaderElections) {\nif (leaderElection != null) {\nleaderElection.close();\n}\n}\nfor (DefaultLeaderElectionService electionService : leaderElectionService) {\nif (electionService != null) {\nelectionService.close();\n}\n}\n}\n}\nprivate String createAddress(int i) {\nreturn LEADER_ADDRESS + \"_\" + i;\n}\n/**\n* Tests the repeated reelection of {@link LeaderContender} once the current leader dies.\n* Furthermore, it tests that new LeaderElectionServices can be started later on and that they\n* successfully register at ZooKeeper and take part in the leader election.\n*/\n@Test\nvoid testZooKeeperReelectionWithReplacement() throws Exception {\nint num = 3;\nint numTries = 30;\nDefaultLeaderElectionService[] leaderElectionService =\nnew DefaultLeaderElectionService[num];\nLeaderElection[] leaderElections = new LeaderElection[num];\nTestingContender[] contenders = new TestingContender[num];\nDefaultLeaderRetrievalService leaderRetrievalService = null;\nTestingListener listener = new TestingListener();\ntry {\nleaderRetrievalService =\nZooKeeperUtils.createLeaderRetrievalService(\ncreateZooKeeperClient(), CONTENDER_ID, new Configuration());\nleaderRetrievalService.start(listener);\nfor (int i = 0; i < num; i++) {\nfinal LeaderElectionDriverFactory driverFactory =\nnew ZooKeeperLeaderElectionDriverFactory(createZooKeeperClient());\nleaderElectionService[i] =\nnew DefaultLeaderElectionService(\ndriverFactory,\ntestingFatalErrorHandlerResource.getTestingFatalErrorHandler());\nleaderElectionService[i].startLeaderElectionBackend();\nleaderElections[i] = leaderElectionService[i].createLeaderElection(CONTENDER_ID);\ncontenders[i] =\nnew TestingContender(LEADER_ADDRESS + \"_\" + i + \"_0\", leaderElections[i]);\ncontenders[i].startLeaderElection();\n}\nString pattern = LEADER_ADDRESS + \"_\" + \"(\\\\d+)\" + \"_\" + \"(\\\\d+)\";\nPattern regex = Pattern.compile(pattern);\nfor (int i = 0; i < numTries; i++) {\nlistener.waitForNewLeader();\nString address = listener.getAddress();\nMatcher m = regex.matcher(address);\nif (m.find()) {\nint index = Integer.parseInt(m.group(1));\nint lastTry = Integer.parseInt(m.group(2));\nassertThat(listener.getLeaderSessionID())\n.isEqualTo(contenders[index].getLeaderSessionID());\nleaderElections[index].close();\nleaderElections[index] = null;\nleaderElectionService[index].close();\nleaderElections[index] = null;\nfinal LeaderElectionDriverFactory driverFactory =\nnew ZooKeeperLeaderElectionDriverFactory(createZooKeeperClient());\nleaderElectionService[index] =\nnew DefaultLeaderElectionService(\ndriverFactory,\ntestingFatalErrorHandlerResource.getTestingFatalErrorHandler());\nleaderElectionService[index].startLeaderElectionBackend();\nleaderElections[index] =\nleaderElectionService[index].createLeaderElection(CONTENDER_ID);\ncontenders[index] =\nnew TestingContender(\nLEADER_ADDRESS + \"_\" + index + \"_\" + (lastTry + 1),\nleaderElections[index]);\ncontenders[index].startLeaderElection();\n} else {\nthrow new Exception(\"Did not find the leader's index.\");\n}\n}\n} finally {\nif (leaderRetrievalService != null) {\nleaderRetrievalService.stop();\n}\nfor (LeaderElection leaderElection : leaderElections) {\nif (leaderElection != null) {\nleaderElection.close();\n}\n}\nfor (DefaultLeaderElectionService electionService : leaderElectionService) {\nif (electionService != null) {\nelectionService.close();\n}\n}\n}\n}\n/** Tests that the leader update information will not be notified repeatedly. */\n@Test\nvoid testLeaderChangeWriteLeaderInformationOnlyOnce() throws Throwable {\nfinal TestingLeaderElectionListener electionEventHandler =\nnew TestingLeaderElectionListener();\ntry (ZooKeeperLeaderElectionDriver leaderElectionDriver =\ncreateAndInitLeaderElectionDriver(createZooKeeperClient(), electionEventHandler)) {\nelectionEventHandler.await(LeaderElectionEvent.IsLeaderEvent.class);\nleaderElectionDriver.publishLeaderInformation(\nCONTENDER_ID, LeaderInformation.known(UUID.randomUUID(), LEADER_ADDRESS));\nelectionEventHandler.await(LeaderElectionEvent.LeaderInformationChangeEvent.class);\nassertThat(\nelectionEventHandler.await(\nLeaderElectionEvent.LeaderInformationChangeEvent.class,\nDuration.ofMillis(5)))\n.as(\"Another leader information update is not expected.\")\n.isEmpty();\n} finally {\nelectionEventHandler.throwIfErrorEventHappened();\n}\n}\n/**\n* Test that errors in the {@link LeaderElectionDriver} are correctly forwarded to the {@link\n* LeaderContender}.\n*/\n@Test\n/**\n* Tests that there is no information left in the ZooKeeper cluster after the ZooKeeper client\n* has terminated. In other words, checks that the ZooKeeperLeaderElection service uses\n* ephemeral nodes.\n*/\n@Test\nvoid testEphemeralZooKeeperNodes() throws Throwable {\nZooKeeperLeaderElectionDriver leaderElectionDriver;\nLeaderRetrievalDriver leaderRetrievalDriver = null;\nfinal TestingLeaderElectionListener electionEventHandler =\nnew TestingLeaderElectionListener();\nfinal TestingLeaderRetrievalEventHandler retrievalEventHandler =\nnew TestingLeaderRetrievalEventHandler();\nCuratorFrameworkWithUnhandledErrorListener curatorFrameworkWrapper;\nCuratorFrameworkWithUnhandledErrorListener curatorFrameworkWrapper2 = null;\nCuratorCache cache = null;\ntry {\ncuratorFrameworkWrapper =\nZooKeeperUtils.startCuratorFramework(\nconfiguration,\ntestingFatalErrorHandlerResource.getTestingFatalErrorHandler());\ncuratorFrameworkWrapper2 =\nZooKeeperUtils.startCuratorFramework(\nconfiguration,\ntestingFatalErrorHandlerResource.getTestingFatalErrorHandler());\nleaderElectionDriver =\ncreateAndInitLeaderElectionDriver(\ncuratorFrameworkWrapper.asCuratorFramework(), electionEventHandler);\nleaderRetrievalDriver =\nZooKeeperUtils.createLeaderRetrievalDriverFactory(\ncuratorFrameworkWrapper2.asCuratorFramework(), CONTENDER_ID)\n.createLeaderRetrievalDriver(\nretrievalEventHandler, retrievalEventHandler::handleError);\ncache =\nCuratorCache.build(\ncuratorFrameworkWrapper2.asCuratorFramework(),\nZooKeeperUtils.generateConnectionInformationPath(CONTENDER_ID));\nfinal ExistsCacheListener existsListener =\nExistsCacheListener.createWithNodeIsMissingValidation(\ncache, ZooKeeperUtils.generateConnectionInformationPath(CONTENDER_ID));\ncache.listenable().addListener(existsListener);\ncache.start();\nelectionEventHandler.await(LeaderElectionEvent.IsLeaderEvent.class);\nleaderElectionDriver.publishLeaderInformation(\nCONTENDER_ID, LeaderInformation.known(UUID.randomUUID(), LEADER_ADDRESS));\nretrievalEventHandler.waitForNewLeader();\nFuture existsFuture = existsListener.nodeExists();\nexistsFuture.get(timeout, TimeUnit.MILLISECONDS);\nfinal DeletedCacheListener deletedCacheListener =\nDeletedCacheListener.createWithNodeExistValidation(\ncache, ZooKeeperUtils.generateConnectionInformationPath(CONTENDER_ID));\ncache.listenable().addListener(deletedCacheListener);\nleaderElectionDriver.close();\ncuratorFrameworkWrapper.close();\nFuture deletedFuture = deletedCacheListener.nodeDeleted();\ndeletedFuture.get(timeout, TimeUnit.MILLISECONDS);\nretrievalEventHandler.waitForEmptyLeaderInformation();\n} finally {\nif (leaderRetrievalDriver != null) {\nleaderRetrievalDriver.close();\n}\nif (cache != null) {\ncache.close();\n}\nif (curatorFrameworkWrapper2 != null) {\ncuratorFrameworkWrapper2.close();\n}\nelectionEventHandler.throwIfErrorEventHappened();\n}\n}\n@Test\nvoid testNotLeaderShouldNotCleanUpTheLeaderInformation() throws Throwable {\nfinal TestingLeaderElectionListener electionEventHandler =\nnew TestingLeaderElectionListener();\nfinal TestingLeaderRetrievalEventHandler retrievalEventHandler =\nnew TestingLeaderRetrievalEventHandler();\ntry (ZooKeeperLeaderElectionDriver leaderElectionDriver =\ncreateAndInitLeaderElectionDriver(createZooKeeperClient(), electionEventHandler)) {\nelectionEventHandler.await(LeaderElectionEvent.IsLeaderEvent.class);\nfinal UUID leaderSessionID = UUID.randomUUID();\nleaderElectionDriver.publishLeaderInformation(\nCONTENDER_ID, LeaderInformation.known(leaderSessionID, LEADER_ADDRESS));\nleaderElectionDriver.notLeader();\nelectionEventHandler.await(LeaderElectionEvent.NotLeaderEvent.class);\ntry (ZooKeeperLeaderRetrievalDriver leaderRetrievalDriver =\nZooKeeperUtils.createLeaderRetrievalDriverFactory(\ncreateZooKeeperClient(), CONTENDER_ID)\n.createLeaderRetrievalDriver(\nretrievalEventHandler, retrievalEventHandler::handleError)) {\nretrievalEventHandler.waitForNewLeader();\nassertThat(retrievalEventHandler.getLeaderSessionID()).isEqualTo(leaderSessionID);\nassertThat(retrievalEventHandler.getAddress()).isEqualTo(LEADER_ADDRESS);\n}\n} finally {\nelectionEventHandler.throwIfErrorEventHappened();\n}\n}\n/**\n* Test that background errors in the {@link LeaderElectionDriver} are correctly forwarded to\n* the {@link FatalErrorHandler}.\n*/\n@Test\npublic void testUnExpectedErrorForwarding() throws Throwable {\nLeaderElectionDriver leaderElectionDriver = null;\nfinal TestingLeaderElectionListener electionEventHandler =\nnew TestingLeaderElectionListener();\nfinal TestingFatalErrorHandler fatalErrorHandler = new TestingFatalErrorHandler();\nfinal FlinkRuntimeException testException =\nnew FlinkRuntimeException(\"testUnExpectedErrorForwarding\");\nfinal CuratorFrameworkFactory.Builder curatorFrameworkBuilder =\nCuratorFrameworkFactory.builder()\n.connectString(zooKeeperResource.getCustomExtension().getConnectString())\n.retryPolicy(new ExponentialBackoffRetry(1, 0))\n.aclProvider(\nnew ACLProvider() {\n@Override\npublic List getDefaultAcl() {\nthrow testException;\n}\n@Override\npublic List getAclForPath(String s) {\nthrow testException;\n}\n})\n.namespace(\"flink\");\ntry (CuratorFrameworkWithUnhandledErrorListener curatorFrameworkWrapper =\nZooKeeperUtils.startCuratorFramework(curatorFrameworkBuilder, fatalErrorHandler)) {\nCuratorFramework clientWithErrorHandler = curatorFrameworkWrapper.asCuratorFramework();\nassertThat(fatalErrorHandler.getErrorFuture()).isNotDone();\nleaderElectionDriver =\ncreateAndInitLeaderElectionDriver(clientWithErrorHandler, electionEventHandler);\nassertThat(fatalErrorHandler.getErrorFuture().get()).isEqualTo(testException);\n} finally {\nif (leaderElectionDriver != null) {\nleaderElectionDriver.close();\n}\nelectionEventHandler.throwIfErrorEventHappened();\n}\n}\nprivate CuratorFramework createZooKeeperClient() {\nreturn zooKeeperResource\n.getCustomExtension()\n.getZooKeeperClient(testingFatalErrorHandlerResource.getTestingFatalErrorHandler());\n}\nprivate static class ExistsCacheListener implements CuratorCacheListener {\nfinal CompletableFuture existsPromise = new CompletableFuture<>();\nfinal CuratorCache cache;\n/**\n* Factory method that's used to ensure consistency in the implementation. The method\n* validates that the given node doesn't exist, yet.\n*\n* @throws IllegalStateException If the passed path is already present in the passed cache.\n*/\npublic static ExistsCacheListener createWithNodeIsMissingValidation(\nCuratorCache cache, String path) {\nPreconditions.checkState(\n!cache.get(path).isPresent(),\n\"The given path %s should not lead to an already existing node. This listener will then check that the node was created.\",\npath);\nreturn new ExistsCacheListener(cache);\n}\nprivate ExistsCacheListener(final CuratorCache cache) {\nthis.cache = cache;\n}\npublic Future nodeExists() {\nreturn existsPromise;\n}\n@Override\npublic void event(Type type, ChildData oldData, ChildData data) {\nif (type == Type.NODE_CREATED && data != null && !existsPromise.isDone()) {\nexistsPromise.complete(true);\ncache.listenable().removeListener(this);\n}\n}\n}\nprivate static class DeletedCacheListener implements CuratorCacheListener {\nfinal CompletableFuture deletedPromise = new CompletableFuture<>();\nfinal CuratorCache cache;\npublic static DeletedCacheListener createWithNodeExistValidation(\nCuratorCache cache, String path) {\nPreconditions.checkState(\ncache.get(path).isPresent(),\n\"The given path %s should lead to an already existing node. This listener will then check that the node was properly deleted.\",\npath);\nreturn new DeletedCacheListener(cache);\n}\nprivate DeletedCacheListener(final CuratorCache cache) {\nthis.cache = cache;\n}\npublic Future nodeDeleted() {\nreturn deletedPromise;\n}\n@Override\npublic void event(Type type, ChildData oldData, ChildData data) {\nif ((type == Type.NODE_DELETED || data == null) && !deletedPromise.isDone()) {\ndeletedPromise.complete(true);\ncache.listenable().removeListener(this);\n}\n}\n}\nprivate ZooKeeperLeaderElectionDriver createAndInitLeaderElectionDriver(\nCuratorFramework client, TestingLeaderElectionListener electionEventHandler)\nthrows Exception {\nreturn new ZooKeeperLeaderElectionDriverFactory(client).create(electionEventHandler);\n}\n}", + "context_after": "class ZooKeeperLeaderElectionTest {\nprivate final ZooKeeperExtension zooKeeperExtension = new ZooKeeperExtension();\n@RegisterExtension\nfinal EachCallbackWrapper zooKeeperResource =\nnew EachCallbackWrapper<>(zooKeeperExtension);\n@RegisterExtension\nfinal TestingFatalErrorHandlerExtension testingFatalErrorHandlerResource =\nnew TestingFatalErrorHandlerExtension();\nprivate Configuration configuration;\nprivate static final String CONTENDER_ID = \"contender-id\";\nprivate static final String LEADER_ADDRESS = \"akka\nprivate static final long timeout = 200L * 1000L;\nprivate static final Logger LOG = LoggerFactory.getLogger(ZooKeeperLeaderElectionTest.class);\n@BeforeEach\nvoid before() {\nconfiguration = new Configuration();\nconfiguration.setString(\nHighAvailabilityOptions.HA_ZOOKEEPER_QUORUM,\nzooKeeperResource.getCustomExtension().getConnectString());\nconfiguration.setString(HighAvailabilityOptions.HA_MODE, \"zookeeper\");\n}\n/** Tests that the ZooKeeperLeaderElection/RetrievalService return both the correct URL. */\n@Test\nvoid testZooKeeperLeaderElectionRetrieval() throws Exception {\nfinal TestingLeaderElectionListener electionEventHandler =\nnew TestingLeaderElectionListener();\nfinal TestingLeaderRetrievalEventHandler retrievalEventHandler =\nnew TestingLeaderRetrievalEventHandler();\ntry (LeaderElectionDriver leaderElectionDriver =\ncreateAndInitLeaderElectionDriver(\ncreateZooKeeperClient(), electionEventHandler);\nLeaderRetrievalDriver leaderRetrievalDriver =\nZooKeeperUtils.createLeaderRetrievalDriverFactory(\ncreateZooKeeperClient(), CONTENDER_ID)\n.createLeaderRetrievalDriver(\nretrievalEventHandler,\nretrievalEventHandler::handleError)) {\nelectionEventHandler.await(LeaderElectionEvent.IsLeaderEvent.class);\nfinal UUID leaderSessionID = UUID.randomUUID();\nleaderElectionDriver.publishLeaderInformation(\nCONTENDER_ID, LeaderInformation.known(leaderSessionID, LEADER_ADDRESS));\nretrievalEventHandler.waitForNewLeader();\nassertThat(retrievalEventHandler.getLeaderSessionID()).isEqualTo(leaderSessionID);\nassertThat(retrievalEventHandler.getAddress()).isEqualTo(LEADER_ADDRESS);\n} finally {\nelectionEventHandler.failIfErrorEventHappened();\n}\n}\n/**\n* Tests repeatedly the reelection of still available LeaderContender. After a contender has\n* been elected as the leader, it is removed. This forces the DefaultLeaderElectionService to\n* elect a new leader.\n*/\n@Test\nvoid testZooKeeperReelection() throws Exception {\nDeadline deadline = Deadline.fromNow(Duration.ofMinutes(5L));\nint num = 10;\nDefaultLeaderElectionService[] leaderElectionService =\nnew DefaultLeaderElectionService[num];\nLeaderElection[] leaderElections = new LeaderElection[num];\nTestingContender[] contenders = new TestingContender[num];\nDefaultLeaderRetrievalService leaderRetrievalService = null;\nTestingListener listener = new TestingListener();\ntry {\nleaderRetrievalService =\nZooKeeperUtils.createLeaderRetrievalService(\ncreateZooKeeperClient(), CONTENDER_ID, new Configuration());\nLOG.debug(\"Start leader retrieval service for the TestingListener.\");\nleaderRetrievalService.start(listener);\nfor (int i = 0; i < num; i++) {\nfinal LeaderElectionDriverFactory driverFactory =\nnew ZooKeeperLeaderElectionDriverFactory(createZooKeeperClient());\nleaderElectionService[i] =\nnew DefaultLeaderElectionService(\ndriverFactory,\ntestingFatalErrorHandlerResource.getTestingFatalErrorHandler());\nleaderElectionService[i].startLeaderElectionBackend();\nleaderElections[i] = leaderElectionService[i].createLeaderElection(CONTENDER_ID);\ncontenders[i] = new TestingContender(createAddress(i), leaderElections[i]);\nLOG.debug(\"Start leader election service for contender\ncontenders[i].startLeaderElection();\n}\nString pattern = LEADER_ADDRESS + \"_\" + \"(\\\\d+)\";\nPattern regex = Pattern.compile(pattern);\nint numberSeenLeaders = 0;\nwhile (deadline.hasTimeLeft() && numberSeenLeaders < num) {\nLOG.debug(\"Wait for new leader\nString address = listener.waitForNewLeader();\nMatcher m = regex.matcher(address);\nif (m.find()) {\nint index = Integer.parseInt(m.group(1));\nTestingContender contender = contenders[index];\nif (address.equals(createAddress(index))\n&& listener.getLeaderSessionID()\n.equals(contender.getLeaderSessionID())) {\nLOG.debug(\n\"Stop leader election service of contender\nnumberSeenLeaders);\nleaderElections[index].close();\nleaderElections[index] = null;\nleaderElectionService[index].close();\nleaderElectionService[index] = null;\nnumberSeenLeaders++;\n}\n} else {\nfail(\"Did not find the leader's index.\");\n}\n}\nassertThat(deadline.isOverdue())\n.as(\"Did not complete the leader reelection in time.\")\n.isFalse();\nassertThat(num).isEqualTo(numberSeenLeaders);\n} finally {\nif (leaderRetrievalService != null) {\nleaderRetrievalService.stop();\n}\nfor (LeaderElection leaderElection : leaderElections) {\nif (leaderElection != null) {\nleaderElection.close();\n}\n}\nfor (DefaultLeaderElectionService electionService : leaderElectionService) {\nif (electionService != null) {\nelectionService.close();\n}\n}\n}\n}\nprivate String createAddress(int i) {\nreturn LEADER_ADDRESS + \"_\" + i;\n}\n/**\n* Tests the repeated reelection of {@link LeaderContender} once the current leader dies.\n* Furthermore, it tests that new LeaderElectionServices can be started later on and that they\n* successfully register at ZooKeeper and take part in the leader election.\n*/\n@Test\nvoid testZooKeeperReelectionWithReplacement() throws Exception {\nint num = 3;\nint numTries = 30;\nDefaultLeaderElectionService[] leaderElectionService =\nnew DefaultLeaderElectionService[num];\nLeaderElection[] leaderElections = new LeaderElection[num];\nTestingContender[] contenders = new TestingContender[num];\nDefaultLeaderRetrievalService leaderRetrievalService = null;\nTestingListener listener = new TestingListener();\ntry {\nleaderRetrievalService =\nZooKeeperUtils.createLeaderRetrievalService(\ncreateZooKeeperClient(), CONTENDER_ID, new Configuration());\nleaderRetrievalService.start(listener);\nfor (int i = 0; i < num; i++) {\nfinal LeaderElectionDriverFactory driverFactory =\nnew ZooKeeperLeaderElectionDriverFactory(createZooKeeperClient());\nleaderElectionService[i] =\nnew DefaultLeaderElectionService(\ndriverFactory,\ntestingFatalErrorHandlerResource.getTestingFatalErrorHandler());\nleaderElectionService[i].startLeaderElectionBackend();\nleaderElections[i] = leaderElectionService[i].createLeaderElection(CONTENDER_ID);\ncontenders[i] =\nnew TestingContender(LEADER_ADDRESS + \"_\" + i + \"_0\", leaderElections[i]);\ncontenders[i].startLeaderElection();\n}\nString pattern = LEADER_ADDRESS + \"_\" + \"(\\\\d+)\" + \"_\" + \"(\\\\d+)\";\nPattern regex = Pattern.compile(pattern);\nfor (int i = 0; i < numTries; i++) {\nlistener.waitForNewLeader();\nString address = listener.getAddress();\nMatcher m = regex.matcher(address);\nif (m.find()) {\nint index = Integer.parseInt(m.group(1));\nint lastTry = Integer.parseInt(m.group(2));\nassertThat(listener.getLeaderSessionID())\n.isEqualTo(contenders[index].getLeaderSessionID());\nleaderElections[index].close();\nleaderElections[index] = null;\nleaderElectionService[index].close();\nleaderElections[index] = null;\nfinal LeaderElectionDriverFactory driverFactory =\nnew ZooKeeperLeaderElectionDriverFactory(createZooKeeperClient());\nleaderElectionService[index] =\nnew DefaultLeaderElectionService(\ndriverFactory,\ntestingFatalErrorHandlerResource.getTestingFatalErrorHandler());\nleaderElectionService[index].startLeaderElectionBackend();\nleaderElections[index] =\nleaderElectionService[index].createLeaderElection(CONTENDER_ID);\ncontenders[index] =\nnew TestingContender(\nLEADER_ADDRESS + \"_\" + index + \"_\" + (lastTry + 1),\nleaderElections[index]);\ncontenders[index].startLeaderElection();\n} else {\nthrow new Exception(\"Did not find the leader's index.\");\n}\n}\n} finally {\nif (leaderRetrievalService != null) {\nleaderRetrievalService.stop();\n}\nfor (LeaderElection leaderElection : leaderElections) {\nif (leaderElection != null) {\nleaderElection.close();\n}\n}\nfor (DefaultLeaderElectionService electionService : leaderElectionService) {\nif (electionService != null) {\nelectionService.close();\n}\n}\n}\n}\n/** Tests that the leader update information will not be notified repeatedly. */\n@Test\nvoid testLeaderChangeWriteLeaderInformationOnlyOnce() throws Exception {\nfinal TestingLeaderElectionListener electionEventHandler =\nnew TestingLeaderElectionListener();\ntry (ZooKeeperLeaderElectionDriver leaderElectionDriver =\ncreateAndInitLeaderElectionDriver(createZooKeeperClient(), electionEventHandler)) {\nelectionEventHandler.await(LeaderElectionEvent.IsLeaderEvent.class);\nleaderElectionDriver.publishLeaderInformation(\nCONTENDER_ID, LeaderInformation.known(UUID.randomUUID(), LEADER_ADDRESS));\nelectionEventHandler.await(LeaderElectionEvent.LeaderInformationChangeEvent.class);\nassertThat(\nelectionEventHandler.await(\nLeaderElectionEvent.LeaderInformationChangeEvent.class,\nDuration.ofMillis(5)))\n.as(\"Another leader information update is not expected.\")\n.isEmpty();\n} finally {\nelectionEventHandler.failIfErrorEventHappened();\n}\n}\n/**\n* Test that errors in the {@link LeaderElectionDriver} are correctly forwarded to the {@link\n* LeaderContender}.\n*/\n@Test\n/**\n* Tests that there is no information left in the ZooKeeper cluster after the ZooKeeper client\n* has terminated. In other words, checks that the ZooKeeperLeaderElection service uses\n* ephemeral nodes.\n*/\n@Test\nvoid testEphemeralZooKeeperNodes() throws Exception {\nZooKeeperLeaderElectionDriver leaderElectionDriver;\nLeaderRetrievalDriver leaderRetrievalDriver = null;\nfinal TestingLeaderElectionListener electionEventHandler =\nnew TestingLeaderElectionListener();\nfinal TestingLeaderRetrievalEventHandler retrievalEventHandler =\nnew TestingLeaderRetrievalEventHandler();\nCuratorFrameworkWithUnhandledErrorListener curatorFrameworkWrapper;\nCuratorFrameworkWithUnhandledErrorListener curatorFrameworkWrapper2 = null;\nCuratorCache cache = null;\ntry {\ncuratorFrameworkWrapper =\nZooKeeperUtils.startCuratorFramework(\nconfiguration,\ntestingFatalErrorHandlerResource.getTestingFatalErrorHandler());\ncuratorFrameworkWrapper2 =\nZooKeeperUtils.startCuratorFramework(\nconfiguration,\ntestingFatalErrorHandlerResource.getTestingFatalErrorHandler());\nleaderElectionDriver =\ncreateAndInitLeaderElectionDriver(\ncuratorFrameworkWrapper.asCuratorFramework(), electionEventHandler);\nleaderRetrievalDriver =\nZooKeeperUtils.createLeaderRetrievalDriverFactory(\ncuratorFrameworkWrapper2.asCuratorFramework(), CONTENDER_ID)\n.createLeaderRetrievalDriver(\nretrievalEventHandler, retrievalEventHandler::handleError);\ncache =\nCuratorCache.build(\ncuratorFrameworkWrapper2.asCuratorFramework(),\nZooKeeperUtils.generateConnectionInformationPath(CONTENDER_ID));\nfinal ExistsCacheListener existsListener =\nExistsCacheListener.createWithNodeIsMissingValidation(\ncache, ZooKeeperUtils.generateConnectionInformationPath(CONTENDER_ID));\ncache.listenable().addListener(existsListener);\ncache.start();\nelectionEventHandler.await(LeaderElectionEvent.IsLeaderEvent.class);\nleaderElectionDriver.publishLeaderInformation(\nCONTENDER_ID, LeaderInformation.known(UUID.randomUUID(), LEADER_ADDRESS));\nretrievalEventHandler.waitForNewLeader();\nFuture existsFuture = existsListener.nodeExists();\nexistsFuture.get(timeout, TimeUnit.MILLISECONDS);\nfinal DeletedCacheListener deletedCacheListener =\nDeletedCacheListener.createWithNodeExistValidation(\ncache, ZooKeeperUtils.generateConnectionInformationPath(CONTENDER_ID));\ncache.listenable().addListener(deletedCacheListener);\nleaderElectionDriver.close();\ncuratorFrameworkWrapper.close();\nFuture deletedFuture = deletedCacheListener.nodeDeleted();\ndeletedFuture.get(timeout, TimeUnit.MILLISECONDS);\nretrievalEventHandler.waitForEmptyLeaderInformation();\n} finally {\nif (leaderRetrievalDriver != null) {\nleaderRetrievalDriver.close();\n}\nif (cache != null) {\ncache.close();\n}\nif (curatorFrameworkWrapper2 != null) {\ncuratorFrameworkWrapper2.close();\n}\nelectionEventHandler.failIfErrorEventHappened();\n}\n}\n@Test\nvoid testNotLeaderShouldNotCleanUpTheLeaderInformation() throws Exception {\nfinal TestingLeaderElectionListener electionEventHandler =\nnew TestingLeaderElectionListener();\nfinal TestingLeaderRetrievalEventHandler retrievalEventHandler =\nnew TestingLeaderRetrievalEventHandler();\ntry (ZooKeeperLeaderElectionDriver leaderElectionDriver =\ncreateAndInitLeaderElectionDriver(createZooKeeperClient(), electionEventHandler)) {\nelectionEventHandler.await(LeaderElectionEvent.IsLeaderEvent.class);\nfinal UUID leaderSessionID = UUID.randomUUID();\nleaderElectionDriver.publishLeaderInformation(\nCONTENDER_ID, LeaderInformation.known(leaderSessionID, LEADER_ADDRESS));\nleaderElectionDriver.notLeader();\nelectionEventHandler.await(LeaderElectionEvent.NotLeaderEvent.class);\ntry (ZooKeeperLeaderRetrievalDriver leaderRetrievalDriver =\nZooKeeperUtils.createLeaderRetrievalDriverFactory(\ncreateZooKeeperClient(), CONTENDER_ID)\n.createLeaderRetrievalDriver(\nretrievalEventHandler, retrievalEventHandler::handleError)) {\nretrievalEventHandler.waitForNewLeader();\nassertThat(retrievalEventHandler.getLeaderSessionID()).isEqualTo(leaderSessionID);\nassertThat(retrievalEventHandler.getAddress()).isEqualTo(LEADER_ADDRESS);\n}\n} finally {\nelectionEventHandler.failIfErrorEventHappened();\n}\n}\n/**\n* Test that background errors in the {@link LeaderElectionDriver} are correctly forwarded to\n* the {@link FatalErrorHandler}.\n*/\n@Test\npublic void testUnExpectedErrorForwarding() throws Exception {\nLeaderElectionDriver leaderElectionDriver = null;\nfinal TestingLeaderElectionListener electionEventHandler =\nnew TestingLeaderElectionListener();\nfinal TestingFatalErrorHandler fatalErrorHandler = new TestingFatalErrorHandler();\nfinal FlinkRuntimeException testException =\nnew FlinkRuntimeException(\"testUnExpectedErrorForwarding\");\nfinal CuratorFrameworkFactory.Builder curatorFrameworkBuilder =\nCuratorFrameworkFactory.builder()\n.connectString(zooKeeperResource.getCustomExtension().getConnectString())\n.retryPolicy(new ExponentialBackoffRetry(1, 0))\n.aclProvider(\nnew ACLProvider() {\n@Override\npublic List getDefaultAcl() {\nthrow testException;\n}\n@Override\npublic List getAclForPath(String s) {\nthrow testException;\n}\n})\n.namespace(\"flink\");\ntry (CuratorFrameworkWithUnhandledErrorListener curatorFrameworkWrapper =\nZooKeeperUtils.startCuratorFramework(curatorFrameworkBuilder, fatalErrorHandler)) {\nCuratorFramework clientWithErrorHandler = curatorFrameworkWrapper.asCuratorFramework();\nassertThat(fatalErrorHandler.getErrorFuture()).isNotDone();\nleaderElectionDriver =\ncreateAndInitLeaderElectionDriver(clientWithErrorHandler, electionEventHandler);\nassertThat(fatalErrorHandler.getErrorFuture().get()).isEqualTo(testException);\n} finally {\nif (leaderElectionDriver != null) {\nleaderElectionDriver.close();\n}\nelectionEventHandler.failIfErrorEventHappened();\n}\n}\nprivate CuratorFramework createZooKeeperClient() {\nreturn zooKeeperResource\n.getCustomExtension()\n.getZooKeeperClient(testingFatalErrorHandlerResource.getTestingFatalErrorHandler());\n}\nprivate static class ExistsCacheListener implements CuratorCacheListener {\nfinal CompletableFuture existsPromise = new CompletableFuture<>();\nfinal CuratorCache cache;\n/**\n* Factory method that's used to ensure consistency in the implementation. The method\n* validates that the given node doesn't exist, yet.\n*\n* @throws IllegalStateException If the passed path is already present in the passed cache.\n*/\npublic static ExistsCacheListener createWithNodeIsMissingValidation(\nCuratorCache cache, String path) {\nPreconditions.checkState(\n!cache.get(path).isPresent(),\n\"The given path %s should not lead to an already existing node. This listener will then check that the node was created.\",\npath);\nreturn new ExistsCacheListener(cache);\n}\nprivate ExistsCacheListener(final CuratorCache cache) {\nthis.cache = cache;\n}\npublic Future nodeExists() {\nreturn existsPromise;\n}\n@Override\npublic void event(Type type, ChildData oldData, ChildData data) {\nif (type == Type.NODE_CREATED && data != null && !existsPromise.isDone()) {\nexistsPromise.complete(true);\ncache.listenable().removeListener(this);\n}\n}\n}\nprivate static class DeletedCacheListener implements CuratorCacheListener {\nfinal CompletableFuture deletedPromise = new CompletableFuture<>();\nfinal CuratorCache cache;\npublic static DeletedCacheListener createWithNodeExistValidation(\nCuratorCache cache, String path) {\nPreconditions.checkState(\ncache.get(path).isPresent(),\n\"The given path %s should lead to an already existing node. This listener will then check that the node was properly deleted.\",\npath);\nreturn new DeletedCacheListener(cache);\n}\nprivate DeletedCacheListener(final CuratorCache cache) {\nthis.cache = cache;\n}\npublic Future nodeDeleted() {\nreturn deletedPromise;\n}\n@Override\npublic void event(Type type, ChildData oldData, ChildData data) {\nif ((type == Type.NODE_DELETED || data == null) && !deletedPromise.isDone()) {\ndeletedPromise.complete(true);\ncache.listenable().removeListener(this);\n}\n}\n}\nprivate ZooKeeperLeaderElectionDriver createAndInitLeaderElectionDriver(\nCuratorFramework client, TestingLeaderElectionListener electionEventHandler)\nthrows Exception {\nreturn new ZooKeeperLeaderElectionDriverFactory(client).create(electionEventHandler);\n}\n}" + }, + { + "comment": "Why don't always add backtick, seems this condition is a little redundant", + "method_body": "public void execute() throws Exception {\nMap params = new HashMap<>();\nparams.put(\"internalDB\", FeConstants.INTERNAL_DB_NAME);\nparams.put(\"histogramStatTbl\", StatisticConstants.HISTOGRAM_TBL_NAME);\nparams.put(\"catalogId\", String.valueOf(catalog.getId()));\nparams.put(\"dbId\", String.valueOf(db.getId()));\nparams.put(\"tblId\", String.valueOf(tbl.getId()));\nparams.put(\"idxId\", \"-1\");\nparams.put(\"colId\", String.valueOf(info.colName));\nparams.put(\"dbName\", info.dbName);\nparams.put(\"tblName\", String.valueOf(info.tblName));\nparams.put(\"colName\", String.valueOf(info.colName));\nparams.put(\"sampleRate\", String.valueOf(info.sampleRate));\nparams.put(\"maxBucketNum\", String.valueOf(info.maxBucketNum));\nparams.put(\"percentValue\", String.valueOf((int) (info.sampleRate * 100)));\nString histogramSql;\nSet partitionNames = info.partitionNames;\nif (partitionNames.isEmpty()) {\nStringSubstitutor stringSubstitutor = new StringSubstitutor(params);\nhistogramSql = stringSubstitutor.replace(ANALYZE_HISTOGRAM_SQL_TEMPLATE_TABLE);\n} else {\ntry {\ntbl.readLock();\nString partNames = partitionNames.stream()\n.filter(x -> tbl.getPartition(x) != null)\n.map(partName -> SqlScanner\n.isKeyword(partName) ? \"`\" + partName + \"`\" : partName)\n.collect(Collectors.joining(\",\"));\nparams.put(\"partName\", partNames);\nStringSubstitutor stringSubstitutor = new StringSubstitutor(params);\nhistogramSql = stringSubstitutor.replace(ANALYZE_HISTOGRAM_SQL_TEMPLATE_PART);\n} finally {\ntbl.readUnlock();\n}\n}\nLOG.info(\"SQL to collect the histogram:\\n {}\", histogramSql);\ntry (AutoCloseConnectContext r = StatisticsUtil.buildConnectContext()) {\nthis.stmtExecutor = new StmtExecutor(r.connectContext, histogramSql);\nthis.stmtExecutor.execute();\n}\nEnv.getCurrentEnv().getStatisticsCache().refreshSync(tbl.getId(), -1, col.getName());\n}", + "target_code": ".isKeyword(partName) ? \"`\" + partName + \"`\" : partName)", + "method_body_after": "public void execute() throws Exception {\nMap params = new HashMap<>();\nparams.put(\"internalDB\", FeConstants.INTERNAL_DB_NAME);\nparams.put(\"histogramStatTbl\", StatisticConstants.HISTOGRAM_TBL_NAME);\nparams.put(\"catalogId\", String.valueOf(catalog.getId()));\nparams.put(\"dbId\", String.valueOf(db.getId()));\nparams.put(\"tblId\", String.valueOf(tbl.getId()));\nparams.put(\"idxId\", \"-1\");\nparams.put(\"colId\", String.valueOf(info.colName));\nparams.put(\"dbName\", info.dbName);\nparams.put(\"tblName\", String.valueOf(info.tblName));\nparams.put(\"colName\", String.valueOf(info.colName));\nparams.put(\"sampleRate\", String.valueOf(info.sampleRate));\nparams.put(\"maxBucketNum\", String.valueOf(info.maxBucketNum));\nparams.put(\"percentValue\", String.valueOf((int) (info.sampleRate * 100)));\nString histogramSql;\nSet partitionNames = info.partitionNames;\nif (partitionNames.isEmpty()) {\nStringSubstitutor stringSubstitutor = new StringSubstitutor(params);\nhistogramSql = stringSubstitutor.replace(ANALYZE_HISTOGRAM_SQL_TEMPLATE_TABLE);\n} else {\ntry {\ntbl.readLock();\nString partNames = partitionNames.stream()\n.filter(x -> tbl.getPartition(x) != null)\n.map(partName -> \"`\" + partName + \"`\")\n.collect(Collectors.joining(\",\"));\nparams.put(\"partName\", partNames);\nStringSubstitutor stringSubstitutor = new StringSubstitutor(params);\nhistogramSql = stringSubstitutor.replace(ANALYZE_HISTOGRAM_SQL_TEMPLATE_PART);\n} finally {\ntbl.readUnlock();\n}\n}\nLOG.info(\"SQL to collect the histogram:\\n {}\", histogramSql);\ntry (AutoCloseConnectContext r = StatisticsUtil.buildConnectContext()) {\nthis.stmtExecutor = new StmtExecutor(r.connectContext, histogramSql);\nthis.stmtExecutor.execute();\n}\nEnv.getCurrentEnv().getStatisticsCache().refreshSync(tbl.getId(), -1, col.getName());\n}", + "context_before": "class HistogramTask extends BaseAnalysisTask {\nprivate static final String ANALYZE_HISTOGRAM_SQL_TEMPLATE_TABLE = \"INSERT INTO \"\n+ \"${internalDB}.${histogramStatTbl} \"\n+ \"SELECT \"\n+ \" CONCAT(${tblId}, '-', ${idxId}, '-', '${colId}') AS id, \"\n+ \" ${catalogId} AS catalog_id, \"\n+ \" ${dbId} AS db_id, \"\n+ \" ${tblId} AS tbl_id, \"\n+ \" ${idxId} AS idx_id, \"\n+ \" '${colId}' AS col_id, \"\n+ \" ${sampleRate} AS sample_rate, \"\n+ \" HISTOGRAM(`${colName}`, 1, ${maxBucketNum}) AS buckets, \"\n+ \" NOW() AS create_time \"\n+ \"FROM \"\n+ \" `${dbName}`.`${tblName}`\";\nprivate static final String ANALYZE_HISTOGRAM_SQL_TEMPLATE_PART = ANALYZE_HISTOGRAM_SQL_TEMPLATE_TABLE\n+ \" PARTITION (${partName})\";\n@VisibleForTesting\npublic HistogramTask() {\nsuper();\n}\npublic HistogramTask(AnalysisTaskScheduler analysisTaskScheduler, AnalysisTaskInfo info) {\nsuper(analysisTaskScheduler, info);\n}\n@Override\n}", + "context_after": "class HistogramTask extends BaseAnalysisTask {\nprivate static final String ANALYZE_HISTOGRAM_SQL_TEMPLATE_TABLE = \"INSERT INTO \"\n+ \"${internalDB}.${histogramStatTbl} \"\n+ \"SELECT \"\n+ \" CONCAT(${tblId}, '-', ${idxId}, '-', '${colId}') AS id, \"\n+ \" ${catalogId} AS catalog_id, \"\n+ \" ${dbId} AS db_id, \"\n+ \" ${tblId} AS tbl_id, \"\n+ \" ${idxId} AS idx_id, \"\n+ \" '${colId}' AS col_id, \"\n+ \" ${sampleRate} AS sample_rate, \"\n+ \" HISTOGRAM(`${colName}`, ${maxBucketNum}) AS buckets, \"\n+ \" NOW() AS create_time \"\n+ \"FROM \"\n+ \" `${dbName}`.`${tblName}`\";\nprivate static final String ANALYZE_HISTOGRAM_SQL_TEMPLATE_PART = ANALYZE_HISTOGRAM_SQL_TEMPLATE_TABLE\n+ \" PARTITION (${partName})\";\n@VisibleForTesting\npublic HistogramTask() {\nsuper();\n}\npublic HistogramTask(AnalysisTaskScheduler analysisTaskScheduler, AnalysisTaskInfo info) {\nsuper(analysisTaskScheduler, info);\n}\n@Override\n}" + }, + { + "comment": "I think the exception should be `new CheckpointException(CheckpointFailureReason#CHECKPOINT_DECLINED_SUBSUMED)`?", + "method_body": "public void start(long checkpointId, CheckpointOptions checkpointOptions) {\nLOG.debug(\"{} starting checkpoint {} ({})\", taskName, checkpointId, checkpointOptions);\nsynchronized (lock) {\nPreconditions.checkState(checkpointId > ongoingCheckpointId);\nif (isCheckpointSubsumedOrAborted(checkpointId)) {\nLOG.debug(\n\"The checkpoint {} of task {} has been aborted, so don't start.\",\ncheckpointId,\ntaskName);\nreturn;\n}\nif (result != null) {\nresult.fail(new CancellationException(\"Cancel old checkpoint.\"));\n}\nthis.result = new ChannelStateWriteResult();\nongoingCheckpointId = checkpointId;\nenqueue(\nnew CheckpointStartRequest(\ncheckpointId, result, checkpointOptions.getTargetLocation()),\nfalse);\n}\n}", + "target_code": "result.fail(new CancellationException(\"Cancel old checkpoint.\"));", + "method_body_after": "public void start(long checkpointId, CheckpointOptions checkpointOptions) {\nLOG.debug(\"{} starting checkpoint {} ({})\", taskName, checkpointId, checkpointOptions);\nChannelStateWriteResult result = new ChannelStateWriteResult();\nChannelStateWriteResult put =\nresults.computeIfAbsent(\ncheckpointId,\nid -> {\nPreconditions.checkState(\nresults.size() < maxCheckpoints,\nString.format(\n\"%s can't start %d, results.size() > maxCheckpoints: %d > %d\",\ntaskName,\ncheckpointId,\nresults.size(),\nmaxCheckpoints));\nenqueue(\nnew CheckpointStartRequest(\ncheckpointId,\nresult,\ncheckpointOptions.getTargetLocation()),\nfalse);\nreturn result;\n});\nPreconditions.checkArgument(\nput == result,\ntaskName + \" result future already present for checkpoint \" + checkpointId);\n}", + "context_before": "class ChannelStateWriterImpl implements ChannelStateWriter {\nprivate static final Logger LOG = LoggerFactory.getLogger(ChannelStateWriterImpl.class);\nprivate final String taskName;\nprivate final ChannelStateWriteRequestExecutor executor;\nprivate final Object lock = new Object();\n@GuardedBy(\"lock\")\nprivate long ongoingCheckpointId;\n@GuardedBy(\"lock\")\nprivate long maxAbortedCheckpointId;\n@GuardedBy(\"lock\")\nprivate ChannelStateWriteResult result;\n/**\n* Creates a {@link ChannelStateWriterImpl} with {@link ChannelStateSerializerImpl default}\n* {@link ChannelStateSerializer}, and a {@link ChannelStateWriteRequestExecutorImpl}.\n*\n* @param taskName\n* @param streamFactoryResolver a factory to obtain output stream factory for a given checkpoint\n*/\npublic ChannelStateWriterImpl(\nString taskName, int subtaskIndex, CheckpointStorageWorkerView streamFactoryResolver) {\nthis(\ntaskName,\nnew ChannelStateWriteRequestExecutorImpl(\ntaskName,\nnew ChannelStateWriteRequestDispatcherImpl(\ntaskName,\nsubtaskIndex,\nstreamFactoryResolver,\nnew ChannelStateSerializerImpl())));\n}\nChannelStateWriterImpl(String taskName, ChannelStateWriteRequestExecutor executor) {\nthis.taskName = taskName;\nthis.executor = executor;\nthis.ongoingCheckpointId = 0;\nthis.maxAbortedCheckpointId = 0;\n}\n@Override\n@Override\npublic void addInputData(\nlong checkpointId,\nInputChannelInfo info,\nint startSeqNum,\nCloseableIterator iterator) {\nLOG.trace(\n\"{} adding input data, checkpoint {}, channel: {}, startSeqNum: {}\",\ntaskName,\ncheckpointId,\ninfo,\nstartSeqNum);\nif (isCheckpointSubsumedOrAborted(checkpointId)) {\nLOG.debug(\n\"The checkpoint {} of task {} has been aborted, so don't addInputData.\",\ncheckpointId,\ntaskName);\ncloseBuffers(iterator);\nreturn;\n}\nenqueue(write(checkpointId, info, iterator), false);\n}\n@Override\npublic void addOutputData(\nlong checkpointId, ResultSubpartitionInfo info, int startSeqNum, Buffer... data) {\nLOG.trace(\n\"{} adding output data, checkpoint {}, channel: {}, startSeqNum: {}, num buffers: {}\",\ntaskName,\ncheckpointId,\ninfo,\nstartSeqNum,\ndata == null ? 0 : data.length);\nif (isCheckpointSubsumedOrAborted(checkpointId)) {\nLOG.debug(\n\"The checkpoint {} of task {} has been aborted, so don't addOutputData.\",\ncheckpointId,\ntaskName);\ncloseBuffers(data);\nreturn;\n}\nenqueue(write(checkpointId, info, data), false);\n}\n@Override\npublic void addOutputDataFuture(\nlong checkpointId,\nResultSubpartitionInfo info,\nint startSeqNum,\nCompletableFuture> dataFuture)\nthrows IllegalArgumentException {\nLOG.trace(\n\"{} adding output data future, checkpoint {}, channel: {}, startSeqNum: {}\",\ntaskName,\ncheckpointId,\ninfo,\nstartSeqNum);\nif (isCheckpointSubsumedOrAborted(checkpointId)) {\nLOG.debug(\n\"The checkpoint {} of task {} has been aborted, so don't addOutputDataFuture.\",\ncheckpointId,\ntaskName);\ncloseBuffers(dataFuture);\nreturn;\n}\nenqueue(write(checkpointId, info, dataFuture), false);\n}\n@Override\npublic void finishInput(long checkpointId) {\nLOG.debug(\"{} finishing input data, checkpoint {}\", taskName, checkpointId);\nif (isCheckpointSubsumedOrAborted(checkpointId)) {\nLOG.debug(\n\"The checkpoint {} of task {} has been aborted, so don't finishInput.\",\ncheckpointId,\ntaskName);\nreturn;\n}\nenqueue(completeInput(checkpointId), false);\n}\n@Override\npublic void finishOutput(long checkpointId) {\nLOG.debug(\"{} finishing output data, checkpoint {}\", taskName, checkpointId);\nif (isCheckpointSubsumedOrAborted(checkpointId)) {\nLOG.debug(\n\"The checkpoint {} of task {} has been aborted, so don't finishOutput.\",\ncheckpointId,\ntaskName);\nreturn;\n}\nenqueue(completeOutput(checkpointId), false);\n}\n@Override\npublic void abort(long checkpointId, Throwable cause, boolean cleanup) {\nLOG.debug(\"{} aborting, checkpoint {}\", taskName, checkpointId);\nenqueue(ChannelStateWriteRequest.abort(checkpointId, cause), true);\nenqueue(\nChannelStateWriteRequest.abort(checkpointId, cause),\nfalse);\nsynchronized (lock) {\nif (checkpointId > maxAbortedCheckpointId) {\nmaxAbortedCheckpointId = checkpointId;\n}\nif (cleanup && checkpointId == ongoingCheckpointId) {\nresult = null;\n}\n}\n}\n@Override\npublic ChannelStateWriteResult getAndRemoveWriteResult(long checkpointId) {\nLOG.debug(\"{} requested write result, checkpoint {}\", taskName, checkpointId);\nsynchronized (lock) {\nPreconditions.checkArgument(checkpointId == ongoingCheckpointId && result != null);\nreturn result;\n}\n}\n@VisibleForTesting\nChannelStateWriteResult getWriteResult(long checkpointId) {\nsynchronized (lock) {\nif (checkpointId == ongoingCheckpointId) {\nreturn result;\n}\nreturn null;\n}\n}\nprivate boolean isCheckpointSubsumedOrAborted(long checkpointId) {\nsynchronized (lock) {\nreturn checkpointId < ongoingCheckpointId || checkpointId <= maxAbortedCheckpointId;\n}\n}\npublic void open() {\nexecutor.start();\n}\n@Override\npublic void close() throws IOException {\nsynchronized (lock) {\nLOG.debug(\"close, dropping checkpoint {}\", result);\nresult = null;\n}\nexecutor.close();\n}\nprivate void closeBuffers(Buffer... buffers) {\ncloseBuffers(ofElements(Buffer::recycleBuffer, buffers));\n}\nprivate void closeBuffers(CloseableIterator iterator) {\ntry {\niterator.close();\n} catch (Throwable e) {\nLOG.error(\"Failed to recycle the output buffer of channel state.\", e);\n}\n}\nprivate void closeBuffers(CompletableFuture> dataFuture) {\ncloseDataFuture(dataFuture);\n}\nprivate void enqueue(ChannelStateWriteRequest request, boolean atTheFront) {\ntry {\nif (atTheFront) {\nexecutor.submitPriority(request);\n} else {\nexecutor.submit(request);\n}\n} catch (Exception e) {\nRuntimeException wrapped = new RuntimeException(\"unable to send request to worker\", e);\ntry {\nrequest.cancel(e);\n} catch (Exception cancelException) {\nwrapped.addSuppressed(cancelException);\n}\nthrow wrapped;\n}\n}\n}", + "context_after": "class ChannelStateWriterImpl implements ChannelStateWriter {\nprivate static final Logger LOG = LoggerFactory.getLogger(ChannelStateWriterImpl.class);\nprivate static final int DEFAULT_MAX_CHECKPOINTS =\n1000;\nprivate final String taskName;\nprivate final ChannelStateWriteRequestExecutor executor;\nprivate final ConcurrentMap results;\nprivate final int maxCheckpoints;\n/**\n* Creates a {@link ChannelStateWriterImpl} with {@link\n*\n*/\npublic ChannelStateWriterImpl(\nString taskName, int subtaskIndex, CheckpointStorageWorkerView streamFactoryResolver) {\nthis(taskName, subtaskIndex, streamFactoryResolver, DEFAULT_MAX_CHECKPOINTS);\n}\n/**\n* Creates a {@link ChannelStateWriterImpl} with {@link ChannelStateSerializerImpl default}\n* {@link ChannelStateSerializer}, and a {@link ChannelStateWriteRequestExecutorImpl}.\n*\n* @param taskName\n* @param streamFactoryResolver a factory to obtain output stream factory for a given checkpoint\n* @param maxCheckpoints maximum number of checkpoints to be written currently or finished but\n* not taken yet.\n*/\nChannelStateWriterImpl(\nString taskName,\nint subtaskIndex,\nCheckpointStorageWorkerView streamFactoryResolver,\nint maxCheckpoints) {\nthis(\ntaskName,\nnew ConcurrentHashMap<>(maxCheckpoints),\nnew ChannelStateWriteRequestExecutorImpl(\ntaskName,\nnew ChannelStateWriteRequestDispatcherImpl(\ntaskName,\nsubtaskIndex,\nstreamFactoryResolver,\nnew ChannelStateSerializerImpl())),\nmaxCheckpoints);\n}\nChannelStateWriterImpl(\nString taskName,\nConcurrentMap results,\nChannelStateWriteRequestExecutor executor,\nint maxCheckpoints) {\nthis.taskName = taskName;\nthis.results = results;\nthis.maxCheckpoints = maxCheckpoints;\nthis.executor = executor;\n}\n@Override\n@Override\npublic void addInputData(\nlong checkpointId,\nInputChannelInfo info,\nint startSeqNum,\nCloseableIterator iterator) {\nLOG.trace(\n\"{} adding input data, checkpoint {}, channel: {}, startSeqNum: {}\",\ntaskName,\ncheckpointId,\ninfo,\nstartSeqNum);\nenqueue(write(checkpointId, info, iterator), false);\n}\n@Override\npublic void addOutputData(\nlong checkpointId, ResultSubpartitionInfo info, int startSeqNum, Buffer... data) {\nLOG.trace(\n\"{} adding output data, checkpoint {}, channel: {}, startSeqNum: {}, num buffers: {}\",\ntaskName,\ncheckpointId,\ninfo,\nstartSeqNum,\ndata == null ? 0 : data.length);\nenqueue(write(checkpointId, info, data), false);\n}\n@Override\npublic void addOutputDataFuture(\nlong checkpointId,\nResultSubpartitionInfo info,\nint startSeqNum,\nCompletableFuture> dataFuture)\nthrows IllegalArgumentException {\nLOG.trace(\n\"{} adding output data future, checkpoint {}, channel: {}, startSeqNum: {}\",\ntaskName,\ncheckpointId,\ninfo,\nstartSeqNum);\nenqueue(write(checkpointId, info, dataFuture), false);\n}\n@Override\npublic void finishInput(long checkpointId) {\nLOG.debug(\"{} finishing input data, checkpoint {}\", taskName, checkpointId);\nenqueue(completeInput(checkpointId), false);\n}\n@Override\npublic void finishOutput(long checkpointId) {\nLOG.debug(\"{} finishing output data, checkpoint {}\", taskName, checkpointId);\nenqueue(completeOutput(checkpointId), false);\n}\n@Override\npublic void abort(long checkpointId, Throwable cause, boolean cleanup) {\nLOG.debug(\"{} aborting, checkpoint {}\", taskName, checkpointId);\nenqueue(ChannelStateWriteRequest.abort(checkpointId, cause), true);\nenqueue(\nChannelStateWriteRequest.abort(checkpointId, cause),\nfalse);\nif (cleanup) {\nresults.remove(checkpointId);\n}\n}\n@Override\npublic ChannelStateWriteResult getAndRemoveWriteResult(long checkpointId) {\nLOG.debug(\"{} requested write result, checkpoint {}\", taskName, checkpointId);\nChannelStateWriteResult result = results.remove(checkpointId);\nPreconditions.checkArgument(\nresult != null,\ntaskName + \" channel state write result not found for checkpoint \" + checkpointId);\nreturn result;\n}\n@VisibleForTesting\npublic ChannelStateWriteResult getWriteResult(long checkpointId) {\nreturn results.get(checkpointId);\n}\npublic void open() {\nexecutor.start();\n}\n@Override\npublic void close() throws IOException {\nLOG.debug(\"close, dropping checkpoints {}\", results.keySet());\nresults.clear();\nexecutor.close();\n}\nprivate void enqueue(ChannelStateWriteRequest request, boolean atTheFront) {\ntry {\nif (atTheFront) {\nexecutor.submitPriority(request);\n} else {\nexecutor.submit(request);\n}\n} catch (Exception e) {\nRuntimeException wrapped = new RuntimeException(\"unable to send request to worker\", e);\ntry {\nrequest.cancel(e);\n} catch (Exception cancelException) {\nwrapped.addSuppressed(cancelException);\n}\nthrow wrapped;\n}\n}\n}" + }, + { + "comment": "@geoand that should answer your question, this is what is used to get the list of Java versions, the minimum is set in the Platform/Stream. So no 11 won't be in the list if the minimum Java version of the stream is 17.", + "method_body": "void shouldProperlyUseMinJavaVersion() {\nassertThat(getCompatibleLTSVersions(new JavaVersion(\"11\"))).isEqualTo(JAVA_VERSIONS_LTS);\nassertThat(getCompatibleLTSVersions(new JavaVersion(\"17\"))).containsExactly(17, 21);\nassertThat(getCompatibleLTSVersions(new JavaVersion(\"21\"))).containsExactly(21);\nassertThat(getCompatibleLTSVersions(new JavaVersion(\"100\"))).isEmpty();\nassertThat(getCompatibleLTSVersions(JavaVersion.NA)).isEqualTo(JAVA_VERSIONS_LTS);\n}", + "target_code": "assertThat(getCompatibleLTSVersions(new JavaVersion(\"17\"))).containsExactly(17, 21);", + "method_body_after": "void shouldProperlyUseMinJavaVersion() {\nassertThat(getCompatibleLTSVersions(new JavaVersion(\"11\"))).isEqualTo(JAVA_VERSIONS_LTS);\nassertThat(getCompatibleLTSVersions(new JavaVersion(\"17\"))).containsExactly(17, 21);\nassertThat(getCompatibleLTSVersions(new JavaVersion(\"21\"))).containsExactly(21);\nassertThat(getCompatibleLTSVersions(new JavaVersion(\"100\"))).isEmpty();\nassertThat(getCompatibleLTSVersions(JavaVersion.NA)).isEqualTo(JAVA_VERSIONS_LTS);\n}", + "context_before": "class JavaVersionTest {\n@Test\npublic void givenJavaVersion17ShouldReturn17() {\nassertEquals(\"17\", computeJavaVersion(JAVA, \"17\"));\n}\n@Test\npublic void givenJavaVersion22ShouldReturn21() {\nassertEquals(\"21\", computeJavaVersion(JAVA, \"22.0.1\"));\n}\n@Test\npublic void givenJavaVersion21ShouldReturn21() {\nassertEquals(\"21\", computeJavaVersion(JAVA, \"21\"));\n}\n@Test\n@Test\npublic void givenAutoDetectShouldReturnAppropriateVersion() {\nfinal String bestJavaLtsVersion = String.valueOf(determineBestJavaLtsVersion(Runtime.version().feature()));\nassertEquals(bestJavaLtsVersion, computeJavaVersion(JAVA, DETECT_JAVA_RUNTIME_VERSION));\n}\n@Test\npublic void testDetermineBestLtsVersion() {\nassertEquals(17, determineBestJavaLtsVersion(8));\nassertEquals(11, determineBestJavaLtsVersion(11));\nassertEquals(17, determineBestJavaLtsVersion(17));\nassertEquals(17, determineBestJavaLtsVersion(18));\nassertEquals(21, determineBestJavaLtsVersion(21));\nassertEquals(21, determineBestJavaLtsVersion(22));\n}\n@Test\npublic void givenKotlinProjectWithVersion18ShouldReturn17() {\nassertEquals(\"17\", computeJavaVersion(KOTLIN, \"18\"));\n}\n}", + "context_after": "class JavaVersionTest {\n@Test\npublic void givenJavaVersion17ShouldReturn17() {\nassertEquals(\"17\", computeJavaVersion(JAVA, \"17\"));\n}\n@Test\npublic void givenJavaVersion22ShouldReturn21() {\nassertEquals(\"21\", computeJavaVersion(JAVA, \"22.0.1\"));\n}\n@Test\npublic void givenJavaVersion21ShouldReturn21() {\nassertEquals(\"21\", computeJavaVersion(JAVA, \"21\"));\n}\n@Test\n@Test\npublic void givenAutoDetectShouldReturnAppropriateVersion() {\nfinal String bestJavaLtsVersion = String.valueOf(determineBestJavaLtsVersion(Runtime.version().feature()));\nassertEquals(bestJavaLtsVersion, computeJavaVersion(JAVA, DETECT_JAVA_RUNTIME_VERSION));\n}\n@Test\npublic void testDetermineBestLtsVersion() {\nassertEquals(17, determineBestJavaLtsVersion(8));\nassertEquals(11, determineBestJavaLtsVersion(11));\nassertEquals(17, determineBestJavaLtsVersion(17));\nassertEquals(17, determineBestJavaLtsVersion(18));\nassertEquals(21, determineBestJavaLtsVersion(21));\nassertEquals(21, determineBestJavaLtsVersion(22));\n}\n@Test\npublic void givenKotlinProjectWithVersion18ShouldReturn17() {\nassertEquals(\"17\", computeJavaVersion(KOTLIN, \"18\"));\n}\n}" + }, + { + "comment": "Thank you for the info!", + "method_body": "private Path getDescriptorSetOutputFile(CodeGenContext context) throws IOException {\nvar dscOutputDir = context.config().getOptionalValue(DESCRIPTOR_SET_OUTPUT_DIR, String.class)\n.map(context.projectDir()::resolve)\n.orElseGet(context::outDir);\nif (Files.notExists(dscOutputDir)) {\nFiles.createDirectories(dscOutputDir);\n}\nvar dscFilename = context.config().getOptionalValue(DESCRIPTOR_SET_FILENAME, String.class)\n.orElse(\"descriptor_set.dsc\");\nreturn dscOutputDir.resolve(dscFilename).normalize();\n}", + "target_code": ".map(context.projectDir()::resolve)", + "method_body_after": "private Path getDescriptorSetOutputFile(CodeGenContext context) throws IOException {\nvar dscOutputDir = context.config().getOptionalValue(DESCRIPTOR_SET_OUTPUT_DIR, String.class)\n.map(context.workDir()::resolve)\n.orElseGet(context::outDir);\nif (Files.notExists(dscOutputDir)) {\nFiles.createDirectories(dscOutputDir);\n}\nvar dscFilename = context.config().getOptionalValue(DESCRIPTOR_SET_FILENAME, String.class)\n.orElse(\"descriptor_set.dsc\");\nreturn dscOutputDir.resolve(dscFilename).normalize();\n}", + "context_before": "class GrpcCodeGen implements CodeGenProvider {\nprivate static final Logger log = Logger.getLogger(GrpcCodeGen.class);\nprivate static final String quarkusProtocPluginMain = \"io.quarkus.grpc.protoc.plugin.MutinyGrpcGenerator\";\nprivate static final String EXE = \"exe\";\nprivate static final String PROTO = \".proto\";\nprivate static final String PROTOC = \"protoc\";\nprivate static final String PROTOC_GROUPID = \"com.google.protobuf\";\nprivate static final String SCAN_DEPENDENCIES_FOR_PROTO = \"quarkus.generate-code.grpc.scan-for-proto\";\nprivate static final String SCAN_DEPENDENCIES_FOR_PROTO_INCLUDE_PATTERN = \"quarkus.generate-code.grpc.scan-for-proto-include.\\\"%s\\\"\";\nprivate static final String SCAN_DEPENDENCIES_FOR_PROTO_EXCLUDE_PATTERN = \"quarkus.generate-code.grpc.scan-for-proto-exclude.\\\"%s\\\"\";\nprivate static final String SCAN_FOR_IMPORTS = \"quarkus.generate-code.grpc.scan-for-imports\";\nprivate static final String POST_PROCESS_SKIP = \"quarkus.generate.code.grpc-post-processing.skip\";\nprivate static final String GENERATE_DESCRIPTOR_SET = \"quarkus.generate-code.grpc.descriptor-set.generate\";\nprivate static final String DESCRIPTOR_SET_OUTPUT_DIR = \"quarkus.generate-code.grpc.descriptor-set.output-dir\";\nprivate static final String DESCRIPTOR_SET_FILENAME = \"quarkus.generate-code.grpc.descriptor-set.name\";\nprivate Executables executables;\nprivate String input;\n@Override\npublic String providerId() {\nreturn \"grpc\";\n}\n@Override\npublic String inputExtension() {\nreturn \"proto\";\n}\n@Override\npublic String inputDirectory() {\nreturn \"proto\";\n}\n@Override\npublic Path getInputDirectory() {\nif (input != null) {\nreturn Path.of(input);\n}\nreturn null;\n}\n@Override\npublic void init(ApplicationModel model, Map properties) {\nthis.input = properties.get(\"quarkus.grpc.codegen.proto-directory\");\n}\n@Override\npublic boolean trigger(CodeGenContext context) throws CodeGenException {\nif (TRUE.toString().equalsIgnoreCase(System.getProperties().getProperty(\"grpc.codegen.skip\", \"false\"))\n|| context.config().getOptionalValue(\"quarkus.grpc.codegen.skip\", Boolean.class).orElse(false)) {\nlog.info(\"Skipping gRPC code generation on user's request\");\nreturn false;\n}\nPath outDir = context.outDir();\nPath workDir = context.workDir();\nSet protoDirs = new LinkedHashSet<>();\ntry {\nList protoFiles = new ArrayList<>();\nif (Files.isDirectory(context.inputDir())) {\ntry (Stream protoFilesPaths = Files.walk(context.inputDir())) {\nprotoFilesPaths\n.filter(Files::isRegularFile)\n.filter(s -> s.toString().endsWith(PROTO))\n.map(Path::normalize)\n.map(Path::toAbsolutePath)\n.map(Path::toString)\n.forEach(protoFiles::add);\nprotoDirs.add(context.inputDir().normalize().toAbsolutePath().toString());\n}\n}\nPath dirWithProtosFromDependencies = workDir.resolve(\"protoc-protos-from-dependencies\");\nCollection protoFilesFromDependencies = gatherProtosFromDependencies(dirWithProtosFromDependencies, protoDirs,\ncontext);\nif (!protoFilesFromDependencies.isEmpty()) {\nfor (Path files : protoFilesFromDependencies) {\nvar pathToProtoFile = files.normalize().toAbsolutePath();\nvar pathToParentDir = files.getParent();\nprotoFiles.add(pathToProtoFile.toString());\nprotoDirs.add(pathToParentDir.toString());\n}\n}\nif (!protoFiles.isEmpty()) {\ninitExecutables(workDir, context.applicationModel());\nCollection protosToImport = gatherDirectoriesWithImports(workDir.resolve(\"protoc-dependencies\"),\ncontext);\nList command = new ArrayList<>();\ncommand.add(executables.protoc.toString());\nfor (String protoDir : protoDirs) {\ncommand.add(String.format(\"-I=%s\", escapeWhitespace(protoDir)));\n}\nfor (String protoImportDir : protosToImport) {\ncommand.add(String.format(\"-I=%s\", escapeWhitespace(protoImportDir)));\n}\ncommand.addAll(asList(\"--plugin=protoc-gen-grpc=\" + executables.grpc,\n\"--plugin=protoc-gen-q-grpc=\" + executables.quarkusGrpc,\n\"--q-grpc_out=\" + outDir,\n\"--grpc_out=\" + outDir,\n\"--java_out=\" + outDir));\nif (shouldGenerateDescriptorSet(context.config())) {\ncommand.add(String.format(\"--descriptor_set_out=%s\", getDescriptorSetOutputFile(context)));\n}\ncommand.addAll(protoFiles);\nProcessBuilder processBuilder = new ProcessBuilder(command);\nfinal Process process = ProcessUtil.launchProcess(processBuilder, context.shouldRedirectIO());\nint resultCode = process.waitFor();\nif (resultCode != 0) {\nthrow new CodeGenException(\"Failed to generate Java classes from proto files: \" + protoFiles +\n\" to \" + outDir.toAbsolutePath() + \" with command \" + String.join(\" \", command));\n}\npostprocessing(context, outDir);\nlog.info(\"Successfully finished generating and post-processing sources from proto files\");\nreturn true;\n}\n} catch (IOException | InterruptedException e) {\nthrow new CodeGenException(\n\"Failed to generate java files from proto file in \" + context.inputDir().toAbsolutePath(), e);\n}\nreturn false;\n}\nprivate static void copySanitizedProtoFile(ResolvedDependency artifact, Path protoPath, Path outProtoPath)\nthrows IOException {\nboolean genericServicesFound = false;\ntry (var reader = Files.newBufferedReader(protoPath);\nvar writer = Files.newBufferedWriter(outProtoPath)) {\nString line = reader.readLine();\nwhile (line != null) {\nif (!line.contains(\"java_generic_services\")) {\nwriter.write(line);\nwriter.newLine();\n} else {\ngenericServicesFound = true;\n}\nline = reader.readLine();\n}\n}\nif (genericServicesFound) {\nlog.infof(\"Ignoring option java_generic_services in %s:%s%s.\", artifact.getGroupId(), artifact.getArtifactId(),\nprotoPath);\n}\n}\nprivate void postprocessing(CodeGenContext context, Path outDir) {\nif (TRUE.toString().equalsIgnoreCase(System.getProperties().getProperty(POST_PROCESS_SKIP, \"false\"))\n|| context.config().getOptionalValue(POST_PROCESS_SKIP, Boolean.class).orElse(false)) {\nlog.info(\"Skipping gRPC Post-Processing on user's request\");\nreturn;\n}\nnew GrpcPostProcessing(context, outDir).postprocess();\n}\nprivate Collection gatherProtosFromDependencies(Path workDir, Set protoDirectories,\nCodeGenContext context) throws CodeGenException {\nif (context.test()) {\nreturn Collections.emptyList();\n}\nConfig properties = context.config();\nString scanDependencies = properties.getOptionalValue(SCAN_DEPENDENCIES_FOR_PROTO, String.class)\n.orElse(\"none\");\nif (\"none\".equalsIgnoreCase(scanDependencies)) {\nreturn Collections.emptyList();\n}\nboolean scanAll = \"all\".equalsIgnoreCase(scanDependencies);\nList dependenciesToScan = Arrays.stream(scanDependencies.split(\",\")).map(String::trim)\n.collect(Collectors.toList());\nApplicationModel appModel = context.applicationModel();\nList protoFilesFromDependencies = new ArrayList<>();\nfor (ResolvedDependency artifact : appModel.getRuntimeDependencies()) {\nString packageId = String.format(\"%s:%s\", artifact.getGroupId(), artifact.getArtifactId());\nCollection includes = properties\n.getOptionalValue(String.format(SCAN_DEPENDENCIES_FOR_PROTO_INCLUDE_PATTERN, packageId), String.class)\n.map(s -> Arrays.stream(s.split(\",\")).map(String::trim).collect(Collectors.toList()))\n.orElse(List.of());\nCollection excludes = properties\n.getOptionalValue(String.format(SCAN_DEPENDENCIES_FOR_PROTO_EXCLUDE_PATTERN, packageId), String.class)\n.map(s -> Arrays.stream(s.split(\",\")).map(String::trim).collect(Collectors.toList()))\n.orElse(List.of());\nif (scanAll\n|| dependenciesToScan.contains(packageId)) {\nextractProtosFromArtifact(workDir, protoFilesFromDependencies, protoDirectories, artifact, includes, excludes,\ntrue);\n}\n}\nreturn protoFilesFromDependencies;\n}\n@Override\npublic boolean shouldRun(Path sourceDir, Config config) {\nreturn CodeGenProvider.super.shouldRun(sourceDir, config)\n|| isGeneratingFromAppDependenciesEnabled(config);\n}\nprivate boolean isGeneratingFromAppDependenciesEnabled(Config config) {\nreturn config.getOptionalValue(SCAN_DEPENDENCIES_FOR_PROTO, String.class)\n.filter(value -> !\"none\".equals(value)).isPresent();\n}\nprivate boolean shouldGenerateDescriptorSet(Config config) {\nreturn config.getOptionalValue(GENERATE_DESCRIPTOR_SET, Boolean.class).orElse(FALSE);\n}\nprivate Collection gatherDirectoriesWithImports(Path workDir, CodeGenContext context) throws CodeGenException {\nConfig properties = context.config();\nString scanForImports = properties.getOptionalValue(SCAN_FOR_IMPORTS, String.class)\n.orElse(\"com.google.protobuf:protobuf-java\");\nif (\"none\".equals(scanForImports.toLowerCase(Locale.getDefault()))) {\nreturn Collections.emptyList();\n}\nboolean scanAll = \"all\".equals(scanForImports.toLowerCase(Locale.getDefault()));\nList dependenciesToScan = Arrays.stream(scanForImports.split(\",\")).map(String::trim)\n.collect(Collectors.toList());\nSet importDirectories = new HashSet<>();\nApplicationModel appModel = context.applicationModel();\nfor (ResolvedDependency artifact : appModel.getRuntimeDependencies()) {\nif (scanAll\n|| dependenciesToScan.contains(\nString.format(\"%s:%s\", artifact.getGroupId(), artifact.getArtifactId()))) {\nextractProtosFromArtifact(workDir, new ArrayList<>(), importDirectories, artifact, List.of(),\nList.of(), false);\n}\n}\nreturn importDirectories;\n}\nprivate void extractProtosFromArtifact(Path workDir, Collection protoFiles,\nSet protoDirectories, ResolvedDependency artifact, Collection filesToInclude,\nCollection filesToExclude, boolean isDependency) throws CodeGenException {\ntry {\nartifact.getContentTree(new PathFilter(filesToInclude, filesToExclude)).walk(\npathVisit -> {\nPath path = pathVisit.getPath();\nif (Files.isRegularFile(path) && path.getFileName().toString().endsWith(PROTO)) {\nPath root = pathVisit.getRoot();\nif (Files.isDirectory(root)) {\nprotoFiles.add(path);\nprotoDirectories.add(path.getParent().normalize().toAbsolutePath().toString());\n} else {\nPath relativePath = path.getRoot().relativize(path);\nString uniqueName = artifact.getGroupId() + \":\" + artifact.getArtifactId();\nif (artifact.getVersion() != null) {\nuniqueName += \":\" + artifact.getVersion();\n}\nif (artifact.getClassifier() != null) {\nuniqueName += \"-\" + artifact.getClassifier();\n}\nPath protoUnzipDir = workDir\n.resolve(HashUtil.sha1(uniqueName))\n.normalize().toAbsolutePath();\ntry {\nFiles.createDirectories(protoUnzipDir);\nif (filesToInclude.isEmpty()) {\nprotoDirectories.add(protoUnzipDir.toString());\n}\n} catch (IOException e) {\nthrow new GrpcCodeGenException(\"Failed to create directory: \" + protoUnzipDir, e);\n}\nPath outPath = protoUnzipDir;\nfor (Path part : relativePath) {\noutPath = outPath.resolve(part.toString());\n}\ntry {\nFiles.createDirectories(outPath.getParent());\nif (isDependency) {\ncopySanitizedProtoFile(artifact, path, outPath);\n} else {\nFiles.copy(path, outPath, StandardCopyOption.REPLACE_EXISTING);\n}\nprotoFiles.add(outPath);\n} catch (IOException e) {\nthrow new GrpcCodeGenException(\"Failed to extract proto file\" + path + \" to target: \"\n+ outPath, e);\n}\n}\n}\n});\n} catch (GrpcCodeGenException e) {\nthrow new CodeGenException(e.getMessage(), e);\n}\n}\nprivate String escapeWhitespace(String path) {\nif (OS.determineOS() == OS.LINUX) {\nreturn path.replace(\" \", \"\\\\ \");\n} else {\nreturn path;\n}\n}\nprivate void initExecutables(Path workDir, ApplicationModel model) throws CodeGenException {\nif (executables == null) {\nPath protocPath;\nString protocPathProperty = System.getProperty(\"quarkus.grpc.protoc-path\");\nString classifier = System.getProperty(\"quarkus.grpc.protoc-os-classifier\", osClassifier());\nif (protocPathProperty == null) {\nprotocPath = findArtifactPath(model, PROTOC_GROUPID, PROTOC, classifier, EXE);\n} else {\nprotocPath = Paths.get(protocPathProperty);\n}\nPath protocExe = makeExecutableFromPath(workDir, PROTOC_GROUPID, PROTOC, classifier, \"exe\", protocPath);\nPath protocGrpcPluginExe = prepareExecutable(workDir, model,\n\"io.grpc\", \"protoc-gen-grpc-java\", classifier, \"exe\");\nPath quarkusGrpcPluginExe = prepareQuarkusGrpcExecutable(model, workDir);\nexecutables = new Executables(protocExe, protocGrpcPluginExe, quarkusGrpcPluginExe);\n}\n}\nprivate Path prepareExecutable(Path buildDir, ApplicationModel model,\nString groupId, String artifactId, String classifier, String packaging) throws CodeGenException {\nPath artifactPath = findArtifactPath(model, groupId, artifactId, classifier, packaging);\nreturn makeExecutableFromPath(buildDir, groupId, artifactId, classifier, packaging, artifactPath);\n}\nprivate Path makeExecutableFromPath(Path buildDir, String groupId, String artifactId, String classifier, String packaging,\nPath artifactPath) throws CodeGenException {\nPath exe = buildDir.resolve(String.format(\"%s-%s-%s-%s\", groupId, artifactId, classifier, packaging));\nif (Files.exists(exe)) {\nreturn exe;\n}\nif (artifactPath == null) {\nString location = String.format(\"%s:%s:%s:%s\", groupId, artifactId, classifier, packaging);\nthrow new CodeGenException(\"Failed to find \" + location + \" among dependencies\");\n}\ntry {\nFiles.copy(artifactPath, exe, StandardCopyOption.REPLACE_EXISTING);\n} catch (IOException e) {\nthrow new CodeGenException(\"Failed to copy file: \" + artifactPath + \" to \" + exe, e);\n}\nif (!exe.toFile().setExecutable(true)) {\nthrow new CodeGenException(\"Failed to make the file executable: \" + exe);\n}\nreturn exe;\n}\nprivate static Path findArtifactPath(ApplicationModel model, String groupId, String artifactId, String classifier,\nString packaging) {\nPath artifactPath = null;\nfor (ResolvedDependency artifact : model.getDependencies()) {\nif (groupId.equals(artifact.getGroupId())\n&& artifactId.equals(artifact.getArtifactId())\n&& classifier.equals(artifact.getClassifier())\n&& packaging.equals(artifact.getType())) {\nartifactPath = artifact.getResolvedPaths().getSinglePath();\n}\n}\nreturn artifactPath;\n}\nprivate String osClassifier() throws CodeGenException {\nString architecture = OS.getArchitecture();\nswitch (OS.determineOS()) {\ncase LINUX:\nreturn \"linux-\" + architecture;\ncase WINDOWS:\nreturn \"windows-\" + architecture;\ncase MAC:\nreturn \"osx-\" + architecture;\ndefault:\nthrow new CodeGenException(\n\"Unsupported OS, please use maven plugin instead to generate Java classes from proto files\");\n}\n}\nprivate static Path prepareQuarkusGrpcExecutable(ApplicationModel appModel, Path buildDir) throws CodeGenException {\nPath pluginPath = findArtifactPath(appModel, \"io.quarkus\", \"quarkus-grpc-protoc-plugin\", \"shaded\", \"jar\");\nif (pluginPath == null) {\nthrow new CodeGenException(\"Failed to find Quarkus gRPC protoc plugin among dependencies\");\n}\nif (OS.determineOS() != OS.WINDOWS) {\nreturn writeScript(buildDir, pluginPath, \"\n} else {\nreturn writeScript(buildDir, pluginPath, \"@echo off\\r\\n\", \".cmd\");\n}\n}\nprivate static Path writeScript(Path buildDir, Path pluginPath, String shebang, String suffix) throws CodeGenException {\nPath script;\ntry {\nscript = Files.createTempFile(buildDir, \"quarkus-grpc\", suffix);\ntry (BufferedWriter writer = Files.newBufferedWriter(script)) {\nwriter.write(shebang);\nwritePluginExeCmd(pluginPath, writer);\n}\n} catch (IOException e) {\nthrow new CodeGenException(\"Failed to create a wrapper script for quarkus-grpc plugin\", e);\n}\nif (!script.toFile().setExecutable(true)) {\nthrow new CodeGenFailureException(\"failed to set file: \" + script + \" executable. Protoc invocation may fail\");\n}\nreturn script;\n}\nprivate static void writePluginExeCmd(Path pluginPath, BufferedWriter writer) throws IOException {\nwriter.write(\"\\\"\" + JavaBinFinder.findBin() + \"\\\" -cp \\\"\" +\npluginPath.toAbsolutePath() + \"\\\" \" + quarkusProtocPluginMain);\nwriter.newLine();\n}\nprivate static class Executables {\nfinal Path protoc;\nfinal Path grpc;\nfinal Path quarkusGrpc;\nExecutables(Path protoc, Path grpc, Path quarkusGrpc) {\nthis.protoc = protoc;\nthis.grpc = grpc;\nthis.quarkusGrpc = quarkusGrpc;\n}\n}\nprivate static class GrpcCodeGenException extends RuntimeException {\nprivate GrpcCodeGenException(String message, Exception cause) {\nsuper(message, cause);\n}\n}\n}", + "context_after": "class GrpcCodeGen implements CodeGenProvider {\nprivate static final Logger log = Logger.getLogger(GrpcCodeGen.class);\nprivate static final String quarkusProtocPluginMain = \"io.quarkus.grpc.protoc.plugin.MutinyGrpcGenerator\";\nprivate static final String EXE = \"exe\";\nprivate static final String PROTO = \".proto\";\nprivate static final String PROTOC = \"protoc\";\nprivate static final String PROTOC_GROUPID = \"com.google.protobuf\";\nprivate static final String SCAN_DEPENDENCIES_FOR_PROTO = \"quarkus.generate-code.grpc.scan-for-proto\";\nprivate static final String SCAN_DEPENDENCIES_FOR_PROTO_INCLUDE_PATTERN = \"quarkus.generate-code.grpc.scan-for-proto-include.\\\"%s\\\"\";\nprivate static final String SCAN_DEPENDENCIES_FOR_PROTO_EXCLUDE_PATTERN = \"quarkus.generate-code.grpc.scan-for-proto-exclude.\\\"%s\\\"\";\nprivate static final String SCAN_FOR_IMPORTS = \"quarkus.generate-code.grpc.scan-for-imports\";\nprivate static final String POST_PROCESS_SKIP = \"quarkus.generate.code.grpc-post-processing.skip\";\nprivate static final String GENERATE_DESCRIPTOR_SET = \"quarkus.generate-code.grpc.descriptor-set.generate\";\nprivate static final String DESCRIPTOR_SET_OUTPUT_DIR = \"quarkus.generate-code.grpc.descriptor-set.output-dir\";\nprivate static final String DESCRIPTOR_SET_FILENAME = \"quarkus.generate-code.grpc.descriptor-set.name\";\nprivate Executables executables;\nprivate String input;\n@Override\npublic String providerId() {\nreturn \"grpc\";\n}\n@Override\npublic String inputExtension() {\nreturn \"proto\";\n}\n@Override\npublic String inputDirectory() {\nreturn \"proto\";\n}\n@Override\npublic Path getInputDirectory() {\nif (input != null) {\nreturn Path.of(input);\n}\nreturn null;\n}\n@Override\npublic void init(ApplicationModel model, Map properties) {\nthis.input = properties.get(\"quarkus.grpc.codegen.proto-directory\");\n}\n@Override\npublic boolean trigger(CodeGenContext context) throws CodeGenException {\nif (TRUE.toString().equalsIgnoreCase(System.getProperties().getProperty(\"grpc.codegen.skip\", \"false\"))\n|| context.config().getOptionalValue(\"quarkus.grpc.codegen.skip\", Boolean.class).orElse(false)) {\nlog.info(\"Skipping gRPC code generation on user's request\");\nreturn false;\n}\nPath outDir = context.outDir();\nPath workDir = context.workDir();\nSet protoDirs = new LinkedHashSet<>();\ntry {\nList protoFiles = new ArrayList<>();\nif (Files.isDirectory(context.inputDir())) {\ntry (Stream protoFilesPaths = Files.walk(context.inputDir())) {\nprotoFilesPaths\n.filter(Files::isRegularFile)\n.filter(s -> s.toString().endsWith(PROTO))\n.map(Path::normalize)\n.map(Path::toAbsolutePath)\n.map(Path::toString)\n.forEach(protoFiles::add);\nprotoDirs.add(context.inputDir().normalize().toAbsolutePath().toString());\n}\n}\nPath dirWithProtosFromDependencies = workDir.resolve(\"protoc-protos-from-dependencies\");\nCollection protoFilesFromDependencies = gatherProtosFromDependencies(dirWithProtosFromDependencies, protoDirs,\ncontext);\nif (!protoFilesFromDependencies.isEmpty()) {\nfor (Path files : protoFilesFromDependencies) {\nvar pathToProtoFile = files.normalize().toAbsolutePath();\nvar pathToParentDir = files.getParent();\nprotoFiles.add(pathToProtoFile.toString());\nprotoDirs.add(pathToParentDir.toString());\n}\n}\nif (!protoFiles.isEmpty()) {\ninitExecutables(workDir, context.applicationModel());\nCollection protosToImport = gatherDirectoriesWithImports(workDir.resolve(\"protoc-dependencies\"),\ncontext);\nList command = new ArrayList<>();\ncommand.add(executables.protoc.toString());\nfor (String protoDir : protoDirs) {\ncommand.add(String.format(\"-I=%s\", escapeWhitespace(protoDir)));\n}\nfor (String protoImportDir : protosToImport) {\ncommand.add(String.format(\"-I=%s\", escapeWhitespace(protoImportDir)));\n}\ncommand.addAll(asList(\"--plugin=protoc-gen-grpc=\" + executables.grpc,\n\"--plugin=protoc-gen-q-grpc=\" + executables.quarkusGrpc,\n\"--q-grpc_out=\" + outDir,\n\"--grpc_out=\" + outDir,\n\"--java_out=\" + outDir));\nif (shouldGenerateDescriptorSet(context.config())) {\ncommand.add(String.format(\"--descriptor_set_out=%s\", getDescriptorSetOutputFile(context)));\n}\ncommand.addAll(protoFiles);\nProcessBuilder processBuilder = new ProcessBuilder(command);\nfinal Process process = ProcessUtil.launchProcess(processBuilder, context.shouldRedirectIO());\nint resultCode = process.waitFor();\nif (resultCode != 0) {\nthrow new CodeGenException(\"Failed to generate Java classes from proto files: \" + protoFiles +\n\" to \" + outDir.toAbsolutePath() + \" with command \" + String.join(\" \", command));\n}\npostprocessing(context, outDir);\nlog.info(\"Successfully finished generating and post-processing sources from proto files\");\nreturn true;\n}\n} catch (IOException | InterruptedException e) {\nthrow new CodeGenException(\n\"Failed to generate java files from proto file in \" + context.inputDir().toAbsolutePath(), e);\n}\nreturn false;\n}\nprivate static void copySanitizedProtoFile(ResolvedDependency artifact, Path protoPath, Path outProtoPath)\nthrows IOException {\nboolean genericServicesFound = false;\ntry (var reader = Files.newBufferedReader(protoPath);\nvar writer = Files.newBufferedWriter(outProtoPath)) {\nString line = reader.readLine();\nwhile (line != null) {\nif (!line.contains(\"java_generic_services\")) {\nwriter.write(line);\nwriter.newLine();\n} else {\ngenericServicesFound = true;\n}\nline = reader.readLine();\n}\n}\nif (genericServicesFound) {\nlog.infof(\"Ignoring option java_generic_services in %s:%s%s.\", artifact.getGroupId(), artifact.getArtifactId(),\nprotoPath);\n}\n}\nprivate void postprocessing(CodeGenContext context, Path outDir) {\nif (TRUE.toString().equalsIgnoreCase(System.getProperties().getProperty(POST_PROCESS_SKIP, \"false\"))\n|| context.config().getOptionalValue(POST_PROCESS_SKIP, Boolean.class).orElse(false)) {\nlog.info(\"Skipping gRPC Post-Processing on user's request\");\nreturn;\n}\nnew GrpcPostProcessing(context, outDir).postprocess();\n}\nprivate Collection gatherProtosFromDependencies(Path workDir, Set protoDirectories,\nCodeGenContext context) throws CodeGenException {\nif (context.test()) {\nreturn Collections.emptyList();\n}\nConfig properties = context.config();\nString scanDependencies = properties.getOptionalValue(SCAN_DEPENDENCIES_FOR_PROTO, String.class)\n.orElse(\"none\");\nif (\"none\".equalsIgnoreCase(scanDependencies)) {\nreturn Collections.emptyList();\n}\nboolean scanAll = \"all\".equalsIgnoreCase(scanDependencies);\nList dependenciesToScan = Arrays.stream(scanDependencies.split(\",\")).map(String::trim)\n.collect(Collectors.toList());\nApplicationModel appModel = context.applicationModel();\nList protoFilesFromDependencies = new ArrayList<>();\nfor (ResolvedDependency artifact : appModel.getRuntimeDependencies()) {\nString packageId = String.format(\"%s:%s\", artifact.getGroupId(), artifact.getArtifactId());\nCollection includes = properties\n.getOptionalValue(String.format(SCAN_DEPENDENCIES_FOR_PROTO_INCLUDE_PATTERN, packageId), String.class)\n.map(s -> Arrays.stream(s.split(\",\")).map(String::trim).collect(Collectors.toList()))\n.orElse(List.of());\nCollection excludes = properties\n.getOptionalValue(String.format(SCAN_DEPENDENCIES_FOR_PROTO_EXCLUDE_PATTERN, packageId), String.class)\n.map(s -> Arrays.stream(s.split(\",\")).map(String::trim).collect(Collectors.toList()))\n.orElse(List.of());\nif (scanAll\n|| dependenciesToScan.contains(packageId)) {\nextractProtosFromArtifact(workDir, protoFilesFromDependencies, protoDirectories, artifact, includes, excludes,\ntrue);\n}\n}\nreturn protoFilesFromDependencies;\n}\n@Override\npublic boolean shouldRun(Path sourceDir, Config config) {\nreturn CodeGenProvider.super.shouldRun(sourceDir, config)\n|| isGeneratingFromAppDependenciesEnabled(config);\n}\nprivate boolean isGeneratingFromAppDependenciesEnabled(Config config) {\nreturn config.getOptionalValue(SCAN_DEPENDENCIES_FOR_PROTO, String.class)\n.filter(value -> !\"none\".equals(value)).isPresent();\n}\nprivate boolean shouldGenerateDescriptorSet(Config config) {\nreturn config.getOptionalValue(GENERATE_DESCRIPTOR_SET, Boolean.class).orElse(FALSE);\n}\nprivate Collection gatherDirectoriesWithImports(Path workDir, CodeGenContext context) throws CodeGenException {\nConfig properties = context.config();\nString scanForImports = properties.getOptionalValue(SCAN_FOR_IMPORTS, String.class)\n.orElse(\"com.google.protobuf:protobuf-java\");\nif (\"none\".equals(scanForImports.toLowerCase(Locale.getDefault()))) {\nreturn Collections.emptyList();\n}\nboolean scanAll = \"all\".equals(scanForImports.toLowerCase(Locale.getDefault()));\nList dependenciesToScan = Arrays.stream(scanForImports.split(\",\")).map(String::trim)\n.collect(Collectors.toList());\nSet importDirectories = new HashSet<>();\nApplicationModel appModel = context.applicationModel();\nfor (ResolvedDependency artifact : appModel.getRuntimeDependencies()) {\nif (scanAll\n|| dependenciesToScan.contains(\nString.format(\"%s:%s\", artifact.getGroupId(), artifact.getArtifactId()))) {\nextractProtosFromArtifact(workDir, new ArrayList<>(), importDirectories, artifact, List.of(),\nList.of(), false);\n}\n}\nreturn importDirectories;\n}\nprivate void extractProtosFromArtifact(Path workDir, Collection protoFiles,\nSet protoDirectories, ResolvedDependency artifact, Collection filesToInclude,\nCollection filesToExclude, boolean isDependency) throws CodeGenException {\ntry {\nartifact.getContentTree(new PathFilter(filesToInclude, filesToExclude)).walk(\npathVisit -> {\nPath path = pathVisit.getPath();\nif (Files.isRegularFile(path) && path.getFileName().toString().endsWith(PROTO)) {\nPath root = pathVisit.getRoot();\nif (Files.isDirectory(root)) {\nprotoFiles.add(path);\nprotoDirectories.add(path.getParent().normalize().toAbsolutePath().toString());\n} else {\nPath relativePath = path.getRoot().relativize(path);\nString uniqueName = artifact.getGroupId() + \":\" + artifact.getArtifactId();\nif (artifact.getVersion() != null) {\nuniqueName += \":\" + artifact.getVersion();\n}\nif (artifact.getClassifier() != null) {\nuniqueName += \"-\" + artifact.getClassifier();\n}\nPath protoUnzipDir = workDir\n.resolve(HashUtil.sha1(uniqueName))\n.normalize().toAbsolutePath();\ntry {\nFiles.createDirectories(protoUnzipDir);\nif (filesToInclude.isEmpty()) {\nprotoDirectories.add(protoUnzipDir.toString());\n}\n} catch (IOException e) {\nthrow new GrpcCodeGenException(\"Failed to create directory: \" + protoUnzipDir, e);\n}\nPath outPath = protoUnzipDir;\nfor (Path part : relativePath) {\noutPath = outPath.resolve(part.toString());\n}\ntry {\nFiles.createDirectories(outPath.getParent());\nif (isDependency) {\ncopySanitizedProtoFile(artifact, path, outPath);\n} else {\nFiles.copy(path, outPath, StandardCopyOption.REPLACE_EXISTING);\n}\nprotoFiles.add(outPath);\n} catch (IOException e) {\nthrow new GrpcCodeGenException(\"Failed to extract proto file\" + path + \" to target: \"\n+ outPath, e);\n}\n}\n}\n});\n} catch (GrpcCodeGenException e) {\nthrow new CodeGenException(e.getMessage(), e);\n}\n}\nprivate String escapeWhitespace(String path) {\nif (OS.determineOS() == OS.LINUX) {\nreturn path.replace(\" \", \"\\\\ \");\n} else {\nreturn path;\n}\n}\nprivate void initExecutables(Path workDir, ApplicationModel model) throws CodeGenException {\nif (executables == null) {\nPath protocPath;\nString protocPathProperty = System.getProperty(\"quarkus.grpc.protoc-path\");\nString classifier = System.getProperty(\"quarkus.grpc.protoc-os-classifier\", osClassifier());\nif (protocPathProperty == null) {\nprotocPath = findArtifactPath(model, PROTOC_GROUPID, PROTOC, classifier, EXE);\n} else {\nprotocPath = Paths.get(protocPathProperty);\n}\nPath protocExe = makeExecutableFromPath(workDir, PROTOC_GROUPID, PROTOC, classifier, \"exe\", protocPath);\nPath protocGrpcPluginExe = prepareExecutable(workDir, model,\n\"io.grpc\", \"protoc-gen-grpc-java\", classifier, \"exe\");\nPath quarkusGrpcPluginExe = prepareQuarkusGrpcExecutable(model, workDir);\nexecutables = new Executables(protocExe, protocGrpcPluginExe, quarkusGrpcPluginExe);\n}\n}\nprivate Path prepareExecutable(Path buildDir, ApplicationModel model,\nString groupId, String artifactId, String classifier, String packaging) throws CodeGenException {\nPath artifactPath = findArtifactPath(model, groupId, artifactId, classifier, packaging);\nreturn makeExecutableFromPath(buildDir, groupId, artifactId, classifier, packaging, artifactPath);\n}\nprivate Path makeExecutableFromPath(Path buildDir, String groupId, String artifactId, String classifier, String packaging,\nPath artifactPath) throws CodeGenException {\nPath exe = buildDir.resolve(String.format(\"%s-%s-%s-%s\", groupId, artifactId, classifier, packaging));\nif (Files.exists(exe)) {\nreturn exe;\n}\nif (artifactPath == null) {\nString location = String.format(\"%s:%s:%s:%s\", groupId, artifactId, classifier, packaging);\nthrow new CodeGenException(\"Failed to find \" + location + \" among dependencies\");\n}\ntry {\nFiles.copy(artifactPath, exe, StandardCopyOption.REPLACE_EXISTING);\n} catch (IOException e) {\nthrow new CodeGenException(\"Failed to copy file: \" + artifactPath + \" to \" + exe, e);\n}\nif (!exe.toFile().setExecutable(true)) {\nthrow new CodeGenException(\"Failed to make the file executable: \" + exe);\n}\nreturn exe;\n}\nprivate static Path findArtifactPath(ApplicationModel model, String groupId, String artifactId, String classifier,\nString packaging) {\nPath artifactPath = null;\nfor (ResolvedDependency artifact : model.getDependencies()) {\nif (groupId.equals(artifact.getGroupId())\n&& artifactId.equals(artifact.getArtifactId())\n&& classifier.equals(artifact.getClassifier())\n&& packaging.equals(artifact.getType())) {\nartifactPath = artifact.getResolvedPaths().getSinglePath();\n}\n}\nreturn artifactPath;\n}\nprivate String osClassifier() throws CodeGenException {\nString architecture = OS.getArchitecture();\nswitch (OS.determineOS()) {\ncase LINUX:\nreturn \"linux-\" + architecture;\ncase WINDOWS:\nreturn \"windows-\" + architecture;\ncase MAC:\nreturn \"osx-\" + architecture;\ndefault:\nthrow new CodeGenException(\n\"Unsupported OS, please use maven plugin instead to generate Java classes from proto files\");\n}\n}\nprivate static Path prepareQuarkusGrpcExecutable(ApplicationModel appModel, Path buildDir) throws CodeGenException {\nPath pluginPath = findArtifactPath(appModel, \"io.quarkus\", \"quarkus-grpc-protoc-plugin\", \"shaded\", \"jar\");\nif (pluginPath == null) {\nthrow new CodeGenException(\"Failed to find Quarkus gRPC protoc plugin among dependencies\");\n}\nif (OS.determineOS() != OS.WINDOWS) {\nreturn writeScript(buildDir, pluginPath, \"\n} else {\nreturn writeScript(buildDir, pluginPath, \"@echo off\\r\\n\", \".cmd\");\n}\n}\nprivate static Path writeScript(Path buildDir, Path pluginPath, String shebang, String suffix) throws CodeGenException {\nPath script;\ntry {\nscript = Files.createTempFile(buildDir, \"quarkus-grpc\", suffix);\ntry (BufferedWriter writer = Files.newBufferedWriter(script)) {\nwriter.write(shebang);\nwritePluginExeCmd(pluginPath, writer);\n}\n} catch (IOException e) {\nthrow new CodeGenException(\"Failed to create a wrapper script for quarkus-grpc plugin\", e);\n}\nif (!script.toFile().setExecutable(true)) {\nthrow new CodeGenFailureException(\"failed to set file: \" + script + \" executable. Protoc invocation may fail\");\n}\nreturn script;\n}\nprivate static void writePluginExeCmd(Path pluginPath, BufferedWriter writer) throws IOException {\nwriter.write(\"\\\"\" + JavaBinFinder.findBin() + \"\\\" -cp \\\"\" +\npluginPath.toAbsolutePath() + \"\\\" \" + quarkusProtocPluginMain);\nwriter.newLine();\n}\nprivate static class Executables {\nfinal Path protoc;\nfinal Path grpc;\nfinal Path quarkusGrpc;\nExecutables(Path protoc, Path grpc, Path quarkusGrpc) {\nthis.protoc = protoc;\nthis.grpc = grpc;\nthis.quarkusGrpc = quarkusGrpc;\n}\n}\nprivate static class GrpcCodeGenException extends RuntimeException {\nprivate GrpcCodeGenException(String message, Exception cause) {\nsuper(message, cause);\n}\n}\n}" + }, + { + "comment": "Can't this happen outside of the `MetricRegistryImpl` when we create the `MetricReporter` or is there a contract that certain operations can only be when we create the `MetricRegistryImpl`? If the `MetricReporter` depend on the `MetricRegistryImpl` then I understand why we need a factory. But if not, then I think it would be easier to directly start the `MetricReporters`.", + "method_body": "public MetricRegistryImpl(MetricRegistryConfiguration config) {\nthis.maximumFramesize = config.getQueryServiceMessageSizeLimit();\nthis.scopeFormats = config.getScopeFormats();\nthis.globalDelimiter = config.getDelimiter();\nthis.delimiters = new ArrayList<>(10);\nthis.terminationFuture = new CompletableFuture<>();\nthis.isShutdown = false;\nthis.reporters = new ArrayList<>(4);\nList reporterConfigurations = config.getReporterSetups();\nthis.executor = Executors.newSingleThreadScheduledExecutor(new ExecutorThreadFactory(\"Flink-MetricRegistry\"));\nthis.queryService = null;\nthis.metricQueryServicePath = null;\nif (reporterConfigurations.isEmpty()) {\nLOG.info(\"No metrics reporter configured, no metrics will be exposed/reported.\");\n} else {\nfor (MetricRegistryConfiguration.ReporterSetup reporterSetup : reporterConfigurations) {\nfinal String namedReporter = reporterSetup.getName();\nfinal MetricConfig metricConfig = reporterSetup.getConfiguration();\ntry {\nString configuredPeriod = metricConfig.getString(ConfigConstants.METRICS_REPORTER_INTERVAL_SUFFIX, null);\nTimeUnit timeunit = TimeUnit.SECONDS;\nlong period = 10;\nif (configuredPeriod != null) {\ntry {\nString[] interval = configuredPeriod.split(\" \");\nperiod = Long.parseLong(interval[0]);\ntimeunit = TimeUnit.valueOf(interval[1]);\n}\ncatch (Exception e) {\nLOG.error(\"Cannot parse report interval from config: \" + configuredPeriod +\n\" - please use values like '10 SECONDS' or '500 MILLISECONDS'. \" +\n\"Using default reporting interval.\");\n}\n}\nfinal MetricReporter reporterInstance = reporterSetup.getSupplier().get();\nfinal String className = reporterInstance.getClass().getName();\nLOG.info(\"Configuring {} with {}.\", namedReporter, metricConfig);\nreporterInstance.open(metricConfig);\nif (reporterInstance instanceof Scheduled) {\nLOG.info(\"Periodically reporting metrics in intervals of {} {} for reporter {} of type {}.\", period, timeunit.name(), namedReporter, className);\nexecutor.scheduleWithFixedDelay(\nnew MetricRegistryImpl.ReporterTask((Scheduled) reporterInstance), period, period, timeunit);\n} else {\nLOG.info(\"Reporting metrics for reporter {} of type {}.\", namedReporter, className);\n}\nreporters.add(reporterInstance);\nString delimiterForReporter = metricConfig.getString(ConfigConstants.METRICS_REPORTER_SCOPE_DELIMITER, String.valueOf(globalDelimiter));\nif (delimiterForReporter.length() != 1) {\nLOG.warn(\"Failed to parse delimiter '{}' for reporter '{}', using global delimiter '{}'.\", delimiterForReporter, namedReporter, globalDelimiter);\ndelimiterForReporter = String.valueOf(globalDelimiter);\n}\nthis.delimiters.add(delimiterForReporter.charAt(0));\n}\ncatch (Throwable t) {\nLOG.error(\"Could not instantiate metrics reporter {}. Metrics might not be exposed/reported.\", namedReporter, t);\n}\n}\n}\n}", + "target_code": "final MetricReporter reporterInstance = reporterSetup.getSupplier().get();", + "method_body_after": "public MetricRegistryImpl(MetricRegistryConfiguration config) {\nthis(config, Collections.emptyList());\n}", + "context_before": "class MetricRegistryImpl implements MetricRegistry {\nstatic final Logger LOG = LoggerFactory.getLogger(MetricRegistryImpl.class);\nprivate final Object lock = new Object();\nprivate final List reporters;\nprivate final ScheduledExecutorService executor;\nprivate final ScopeFormats scopeFormats;\nprivate final char globalDelimiter;\nprivate final List delimiters;\nprivate final CompletableFuture terminationFuture;\nprivate final long maximumFramesize;\n@Nullable\nprivate ActorRef queryService;\n@Nullable\nprivate String metricQueryServicePath;\nprivate ViewUpdater viewUpdater;\nprivate boolean isShutdown;\n/**\n* Creates a new MetricRegistry and starts the configured reporter.\n*/\n/**\n* Initializes the MetricQueryService.\n*\n* @param actorSystem ActorSystem to create the MetricQueryService on\n* @param resourceID resource ID used to disambiguate the actor name\n*/\npublic void startQueryService(ActorSystem actorSystem, ResourceID resourceID) {\nsynchronized (lock) {\nPreconditions.checkState(!isShutdown(), \"The metric registry has already been shut down.\");\ntry {\nqueryService = MetricQueryService.startMetricQueryService(actorSystem, resourceID, maximumFramesize);\nmetricQueryServicePath = AkkaUtils.getAkkaURL(actorSystem, queryService);\n} catch (Exception e) {\nLOG.warn(\"Could not start MetricDumpActor. No metrics will be submitted to the WebInterface.\", e);\n}\n}\n}\n/**\n* Returns the address under which the {@link MetricQueryService} is reachable.\n*\n* @return address of the metric query service\n*/\n@Override\n@Nullable\npublic String getMetricQueryServicePath() {\nreturn metricQueryServicePath;\n}\n@Override\npublic char getDelimiter() {\nreturn this.globalDelimiter;\n}\n@Override\npublic char getDelimiter(int reporterIndex) {\ntry {\nreturn delimiters.get(reporterIndex);\n} catch (IndexOutOfBoundsException e) {\nLOG.warn(\"Delimiter for reporter index {} not found, returning global delimiter.\", reporterIndex);\nreturn this.globalDelimiter;\n}\n}\n@Override\npublic int getNumberReporters() {\nreturn reporters.size();\n}\n@VisibleForTesting\npublic List getReporters() {\nreturn reporters;\n}\n/**\n* Returns whether this registry has been shutdown.\n*\n* @return true, if this registry was shutdown, otherwise false\n*/\npublic boolean isShutdown() {\nsynchronized (lock) {\nreturn isShutdown;\n}\n}\n/**\n* Shuts down this registry and the associated {@link MetricReporter}.\n*\n*

NOTE: This operation is asynchronous and returns a future which is completed\n* once the shutdown operation has been completed.\n*\n* @return Future which is completed once the {@link MetricRegistryImpl}\n* is shut down.\n*/\npublic CompletableFuture shutdown() {\nsynchronized (lock) {\nif (isShutdown) {\nreturn terminationFuture;\n} else {\nisShutdown = true;\nfinal Collection> terminationFutures = new ArrayList<>(3);\nfinal Time gracePeriod = Time.seconds(1L);\nif (queryService != null) {\nfinal CompletableFuture queryServiceTerminationFuture = ActorUtils.nonBlockingShutDown(\ngracePeriod.toMilliseconds(),\nTimeUnit.MILLISECONDS,\nqueryService);\nterminationFutures.add(queryServiceTerminationFuture);\n}\nThrowable throwable = null;\nfor (MetricReporter reporter : reporters) {\ntry {\nreporter.close();\n} catch (Throwable t) {\nthrowable = ExceptionUtils.firstOrSuppressed(t, throwable);\n}\n}\nreporters.clear();\nif (throwable != null) {\nterminationFutures.add(\nFutureUtils.completedExceptionally(\nnew FlinkException(\"Could not shut down the metric reporters properly.\", throwable)));\n}\nfinal CompletableFuture executorShutdownFuture = ExecutorUtils.nonBlockingShutdown(\ngracePeriod.toMilliseconds(),\nTimeUnit.MILLISECONDS,\nexecutor);\nterminationFutures.add(executorShutdownFuture);\nFutureUtils\n.completeAll(terminationFutures)\n.whenComplete(\n(Void ignored, Throwable error) -> {\nif (error != null) {\nterminationFuture.completeExceptionally(error);\n} else {\nterminationFuture.complete(null);\n}\n});\nreturn terminationFuture;\n}\n}\n}\n@Override\npublic ScopeFormats getScopeFormats() {\nreturn scopeFormats;\n}\n@Override\npublic void register(Metric metric, String metricName, AbstractMetricGroup group) {\nsynchronized (lock) {\nif (isShutdown()) {\nLOG.warn(\"Cannot register metric, because the MetricRegistry has already been shut down.\");\n} else {\nif (reporters != null) {\nfor (int i = 0; i < reporters.size(); i++) {\nMetricReporter reporter = reporters.get(i);\ntry {\nif (reporter != null) {\nFrontMetricGroup front = new FrontMetricGroup>(i, group);\nreporter.notifyOfAddedMetric(metric, metricName, front);\n}\n} catch (Exception e) {\nLOG.warn(\"Error while registering metric.\", e);\n}\n}\n}\ntry {\nif (queryService != null) {\nMetricQueryService.notifyOfAddedMetric(queryService, metric, metricName, group);\n}\n} catch (Exception e) {\nLOG.warn(\"Error while registering metric.\", e);\n}\ntry {\nif (metric instanceof View) {\nif (viewUpdater == null) {\nviewUpdater = new ViewUpdater(executor);\n}\nviewUpdater.notifyOfAddedView((View) metric);\n}\n} catch (Exception e) {\nLOG.warn(\"Error while registering metric.\", e);\n}\n}\n}\n}\n@Override\npublic void unregister(Metric metric, String metricName, AbstractMetricGroup group) {\nsynchronized (lock) {\nif (isShutdown()) {\nLOG.warn(\"Cannot unregister metric, because the MetricRegistry has already been shut down.\");\n} else {\nif (reporters != null) {\nfor (int i = 0; i < reporters.size(); i++) {\ntry {\nMetricReporter reporter = reporters.get(i);\nif (reporter != null) {\nFrontMetricGroup front = new FrontMetricGroup>(i, group);\nreporter.notifyOfRemovedMetric(metric, metricName, front);\n}\n} catch (Exception e) {\nLOG.warn(\"Error while registering metric.\", e);\n}\n}\n}\ntry {\nif (queryService != null) {\nMetricQueryService.notifyOfRemovedMetric(queryService, metric);\n}\n} catch (Exception e) {\nLOG.warn(\"Error while registering metric.\", e);\n}\ntry {\nif (metric instanceof View) {\nif (viewUpdater != null) {\nviewUpdater.notifyOfRemovedView((View) metric);\n}\n}\n} catch (Exception e) {\nLOG.warn(\"Error while registering metric.\", e);\n}\n}\n}\n}\n@VisibleForTesting\n@Nullable\npublic ActorRef getQueryService() {\nreturn queryService;\n}\n/**\n* This task is explicitly a static class, so that it does not hold any references to the enclosing\n* MetricsRegistry instance.\n*\n*

This is a subtle difference, but very important: With this static class, the enclosing class instance\n* may become garbage-collectible, whereas with an anonymous inner class, the timer thread\n* (which is a GC root) will hold a reference via the timer task and its enclosing instance pointer.\n* Making the MetricsRegistry garbage collectible makes the java.util.Timer garbage collectible,\n* which acts as a fail-safe to stop the timer thread and prevents resource leaks.\n*/\nprivate static final class ReporterTask extends TimerTask {\nprivate final Scheduled reporter;\nprivate ReporterTask(Scheduled reporter) {\nthis.reporter = reporter;\n}\n@Override\npublic void run() {\ntry {\nreporter.report();\n} catch (Throwable t) {\nLOG.warn(\"Error while reporting metrics\", t);\n}\n}\n}\n}", + "context_after": "class MetricRegistryImpl implements MetricRegistry {\nstatic final Logger LOG = LoggerFactory.getLogger(MetricRegistryImpl.class);\nprivate final Object lock = new Object();\nprivate final List reporters;\nprivate final ScheduledExecutorService executor;\nprivate final ScopeFormats scopeFormats;\nprivate final char globalDelimiter;\nprivate final List delimiters;\nprivate final CompletableFuture terminationFuture;\nprivate final long maximumFramesize;\n@Nullable\nprivate MetricQueryService queryService;\n@Nullable\nprivate RpcService metricQueryServiceRpcService;\nprivate ViewUpdater viewUpdater;\nprivate boolean isShutdown;\n/**\n* Creates a new MetricRegistry and starts the configured reporter.\n*/\npublic MetricRegistryImpl(MetricRegistryConfiguration config, Collection reporterConfigurations) {\nthis.maximumFramesize = config.getQueryServiceMessageSizeLimit();\nthis.scopeFormats = config.getScopeFormats();\nthis.globalDelimiter = config.getDelimiter();\nthis.delimiters = new ArrayList<>(10);\nthis.terminationFuture = new CompletableFuture<>();\nthis.isShutdown = false;\nthis.reporters = new ArrayList<>(4);\nthis.executor = Executors.newSingleThreadScheduledExecutor(new ExecutorThreadFactory(\"Flink-MetricRegistry\"));\nthis.queryService = null;\nthis.metricQueryServiceRpcService = null;\nif (reporterConfigurations.isEmpty()) {\nLOG.info(\"No metrics reporter configured, no metrics will be exposed/reported.\");\n} else {\nfor (ReporterSetup reporterSetup : reporterConfigurations) {\nfinal String namedReporter = reporterSetup.getName();\ntry {\nOptional configuredPeriod = reporterSetup.getIntervalSettings();\nTimeUnit timeunit = TimeUnit.SECONDS;\nlong period = 10;\nif (configuredPeriod.isPresent()) {\ntry {\nString[] interval = configuredPeriod.get().split(\" \");\nperiod = Long.parseLong(interval[0]);\ntimeunit = TimeUnit.valueOf(interval[1]);\n}\ncatch (Exception e) {\nLOG.error(\"Cannot parse report interval from config: \" + configuredPeriod +\n\" - please use values like '10 SECONDS' or '500 MILLISECONDS'. \" +\n\"Using default reporting interval.\");\n}\n}\nfinal MetricReporter reporterInstance = reporterSetup.getReporter();\nfinal String className = reporterInstance.getClass().getName();\nif (reporterInstance instanceof Scheduled) {\nLOG.info(\"Periodically reporting metrics in intervals of {} {} for reporter {} of type {}.\", period, timeunit.name(), namedReporter, className);\nexecutor.scheduleWithFixedDelay(\nnew MetricRegistryImpl.ReporterTask((Scheduled) reporterInstance), period, period, timeunit);\n} else {\nLOG.info(\"Reporting metrics for reporter {} of type {}.\", namedReporter, className);\n}\nreporters.add(reporterInstance);\nString delimiterForReporter = reporterSetup.getDelimiter().orElse(String.valueOf(globalDelimiter));\nif (delimiterForReporter.length() != 1) {\nLOG.warn(\"Failed to parse delimiter '{}' for reporter '{}', using global delimiter '{}'.\", delimiterForReporter, namedReporter, globalDelimiter);\ndelimiterForReporter = String.valueOf(globalDelimiter);\n}\nthis.delimiters.add(delimiterForReporter.charAt(0));\n}\ncatch (Throwable t) {\nLOG.error(\"Could not instantiate metrics reporter {}. Metrics might not be exposed/reported.\", namedReporter, t);\n}\n}\n}\n}\n/**\n* Initializes the MetricQueryService.\n*\n* @param rpcService RpcService to create the MetricQueryService on\n* @param resourceID resource ID used to disambiguate the actor name\n*/\npublic void startQueryService(RpcService rpcService, ResourceID resourceID) {\nsynchronized (lock) {\nPreconditions.checkState(!isShutdown(), \"The metric registry has already been shut down.\");\ntry {\nmetricQueryServiceRpcService = rpcService;\nqueryService = MetricQueryService.createMetricQueryService(rpcService, resourceID, maximumFramesize);\nqueryService.start();\n} catch (Exception e) {\nLOG.warn(\"Could not start MetricDumpActor. No metrics will be submitted to the WebInterface.\", e);\n}\n}\n}\n/**\n* Returns the rpc service that the {@link MetricQueryService} runs in.\n*\n* @return rpc service of hte MetricQueryService\n*/\n@Nullable\npublic RpcService getMetricQueryServiceRpcService() {\nreturn metricQueryServiceRpcService;\n}\n/**\n* Returns the address under which the {@link MetricQueryService} is reachable.\n*\n* @return address of the metric query service\n*/\n@Override\n@Nullable\npublic String getMetricQueryServiceGatewayRpcAddress() {\nif (queryService != null) {\nreturn queryService.getSelfGateway(MetricQueryServiceGateway.class).getAddress();\n} else {\nreturn null;\n}\n}\n@VisibleForTesting\n@Nullable\nMetricQueryServiceGateway getMetricQueryServiceGateway() {\nif (queryService != null) {\nreturn queryService.getSelfGateway(MetricQueryServiceGateway.class);\n} else {\nreturn null;\n}\n}\n@Override\npublic char getDelimiter() {\nreturn this.globalDelimiter;\n}\n@Override\npublic char getDelimiter(int reporterIndex) {\ntry {\nreturn delimiters.get(reporterIndex);\n} catch (IndexOutOfBoundsException e) {\nLOG.warn(\"Delimiter for reporter index {} not found, returning global delimiter.\", reporterIndex);\nreturn this.globalDelimiter;\n}\n}\n@Override\npublic int getNumberReporters() {\nreturn reporters.size();\n}\n@VisibleForTesting\npublic List getReporters() {\nreturn reporters;\n}\n/**\n* Returns whether this registry has been shutdown.\n*\n* @return true, if this registry was shutdown, otherwise false\n*/\npublic boolean isShutdown() {\nsynchronized (lock) {\nreturn isShutdown;\n}\n}\n/**\n* Shuts down this registry and the associated {@link MetricReporter}.\n*\n*

NOTE: This operation is asynchronous and returns a future which is completed\n* once the shutdown operation has been completed.\n*\n* @return Future which is completed once the {@link MetricRegistryImpl}\n* is shut down.\n*/\npublic CompletableFuture shutdown() {\nsynchronized (lock) {\nif (isShutdown) {\nreturn terminationFuture;\n} else {\nisShutdown = true;\nfinal Collection> terminationFutures = new ArrayList<>(3);\nfinal Time gracePeriod = Time.seconds(1L);\nif (metricQueryServiceRpcService != null) {\nfinal CompletableFuture metricQueryServiceRpcServiceTerminationFuture = metricQueryServiceRpcService.stopService();\nterminationFutures.add(metricQueryServiceRpcServiceTerminationFuture);\n}\nThrowable throwable = null;\nfor (MetricReporter reporter : reporters) {\ntry {\nreporter.close();\n} catch (Throwable t) {\nthrowable = ExceptionUtils.firstOrSuppressed(t, throwable);\n}\n}\nreporters.clear();\nif (throwable != null) {\nterminationFutures.add(\nFutureUtils.completedExceptionally(\nnew FlinkException(\"Could not shut down the metric reporters properly.\", throwable)));\n}\nfinal CompletableFuture executorShutdownFuture = ExecutorUtils.nonBlockingShutdown(\ngracePeriod.toMilliseconds(),\nTimeUnit.MILLISECONDS,\nexecutor);\nterminationFutures.add(executorShutdownFuture);\nFutureUtils\n.completeAll(terminationFutures)\n.whenComplete(\n(Void ignored, Throwable error) -> {\nif (error != null) {\nterminationFuture.completeExceptionally(error);\n} else {\nterminationFuture.complete(null);\n}\n});\nreturn terminationFuture;\n}\n}\n}\n@Override\npublic ScopeFormats getScopeFormats() {\nreturn scopeFormats;\n}\n@Override\npublic void register(Metric metric, String metricName, AbstractMetricGroup group) {\nsynchronized (lock) {\nif (isShutdown()) {\nLOG.warn(\"Cannot register metric, because the MetricRegistry has already been shut down.\");\n} else {\nif (reporters != null) {\nfor (int i = 0; i < reporters.size(); i++) {\nMetricReporter reporter = reporters.get(i);\ntry {\nif (reporter != null) {\nFrontMetricGroup front = new FrontMetricGroup>(i, group);\nreporter.notifyOfAddedMetric(metric, metricName, front);\n}\n} catch (Exception e) {\nLOG.warn(\"Error while registering metric.\", e);\n}\n}\n}\ntry {\nif (queryService != null) {\nqueryService.addMetric(metricName, metric, group);\n}\n} catch (Exception e) {\nLOG.warn(\"Error while registering metric.\", e);\n}\ntry {\nif (metric instanceof View) {\nif (viewUpdater == null) {\nviewUpdater = new ViewUpdater(executor);\n}\nviewUpdater.notifyOfAddedView((View) metric);\n}\n} catch (Exception e) {\nLOG.warn(\"Error while registering metric.\", e);\n}\n}\n}\n}\n@Override\npublic void unregister(Metric metric, String metricName, AbstractMetricGroup group) {\nsynchronized (lock) {\nif (isShutdown()) {\nLOG.warn(\"Cannot unregister metric, because the MetricRegistry has already been shut down.\");\n} else {\nif (reporters != null) {\nfor (int i = 0; i < reporters.size(); i++) {\ntry {\nMetricReporter reporter = reporters.get(i);\nif (reporter != null) {\nFrontMetricGroup front = new FrontMetricGroup>(i, group);\nreporter.notifyOfRemovedMetric(metric, metricName, front);\n}\n} catch (Exception e) {\nLOG.warn(\"Error while registering metric.\", e);\n}\n}\n}\ntry {\nif (queryService != null) {\nqueryService.removeMetric(metric);\n}\n} catch (Exception e) {\nLOG.warn(\"Error while registering metric.\", e);\n}\ntry {\nif (metric instanceof View) {\nif (viewUpdater != null) {\nviewUpdater.notifyOfRemovedView((View) metric);\n}\n}\n} catch (Exception e) {\nLOG.warn(\"Error while registering metric.\", e);\n}\n}\n}\n}\n@VisibleForTesting\n@Nullable\nMetricQueryService getQueryService() {\nreturn queryService;\n}\n/**\n* This task is explicitly a static class, so that it does not hold any references to the enclosing\n* MetricsRegistry instance.\n*\n*

This is a subtle difference, but very important: With this static class, the enclosing class instance\n* may become garbage-collectible, whereas with an anonymous inner class, the timer thread\n* (which is a GC root) will hold a reference via the timer task and its enclosing instance pointer.\n* Making the MetricsRegistry garbage collectible makes the java.util.Timer garbage collectible,\n* which acts as a fail-safe to stop the timer thread and prevents resource leaks.\n*/\nprivate static final class ReporterTask extends TimerTask {\nprivate final Scheduled reporter;\nprivate ReporterTask(Scheduled reporter) {\nthis.reporter = reporter;\n}\n@Override\npublic void run() {\ntry {\nreporter.report();\n} catch (Throwable t) {\nLOG.warn(\"Error while reporting metrics\", t);\n}\n}\n}\n}" + }, + { + "comment": "```suggestion new FatalExitExceptionHandler.INSTANCE.uncaughtException(t, e); ```", + "method_body": "public void uncaughtException(Thread t, Throwable e) {\nif (handleMode == ClusterOptions.UncaughtExceptionHandleMode.LOG) {\nLOG.error(\"WARNING: Thread '{}' produced an uncaught exception.\", t.getName(), e);\n} else {\nnew FatalExitExceptionHandler().uncaughtException(t, e);\n}\n}", + "target_code": "new FatalExitExceptionHandler().uncaughtException(t, e);", + "method_body_after": "public void uncaughtException(Thread t, Throwable e) {\nif (handleMode == ClusterOptions.UncaughtExceptionHandleMode.LOG) {\nLOG.error(\n\"WARNING: Thread '{}' produced an uncaught exception. If you want to fail on uncaught exceptions, then configure {} accordingly\",\nt.getName(),\nClusterOptions.UNCAUGHT_EXCEPTION_HANDLING.key());\n} else {\nFatalExitExceptionHandler.INSTANCE.uncaughtException(t, e);\n}\n}", + "context_before": "class ClusterUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler {\nprivate static final Logger LOG =\nLoggerFactory.getLogger(ClusterUncaughtExceptionHandler.class);\nClusterOptions.UncaughtExceptionHandleMode handleMode;\npublic ClusterUncaughtExceptionHandler(ClusterOptions.UncaughtExceptionHandleMode handleMode) {\nthis.handleMode = handleMode;\n}\n@Override\n}", + "context_after": "class ClusterUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler {\nprivate static final Logger LOG =\nLoggerFactory.getLogger(ClusterUncaughtExceptionHandler.class);\nprivate final ClusterOptions.UncaughtExceptionHandleMode handleMode;\npublic ClusterUncaughtExceptionHandler(ClusterOptions.UncaughtExceptionHandleMode handleMode) {\nthis.handleMode = handleMode;\n}\n@Override\n}" + }, + { + "comment": "!valueColumn.getType().isBigint() is better", + "method_body": "public Void visitDictQueryExpr(DictQueryExpr node, Scope context) {\nList params = node.getParams().exprs();\nif (!(params.get(0) instanceof StringLiteral)) {\nthrow new SemanticException(\"dict_mapping function first param table_name should be string literal\");\n}\nString[] dictTableFullName = ((StringLiteral) params.get(0)).getStringValue().split(\"\\\\.\");\nTableName tableName;\nif (dictTableFullName.length == 1) {\ntableName = new TableName(null, dictTableFullName[0]);\ntableName.normalization(session);\n} else if (dictTableFullName.length == 2) {\ntableName = new TableName(dictTableFullName[0], dictTableFullName[1]);\n} else {\nthrow new SemanticException(\"dict_mapping function first param table_name should be 'db.tbl' or 'tbl' format\");\n}\nDatabase db = GlobalStateMgr.getCurrentState().getDb(tableName.getDb());\nif (db == null) {\nthrow new SemanticException(\"Database %s is not found\", tableName.getDb());\n}\nTable table = db.getTable(tableName.getTbl());\nif (table == null) {\nthrow new SemanticException(\"dict table %s is not found\", tableName.getTbl());\n}\nif (!(table instanceof OlapTable)) {\nthrow new SemanticException(\"dict table type is not OlapTable, type=\" + table.getClass());\n}\nif (table instanceof MaterializedView) {\nthrow new SemanticException(\"dict table can't be materialized view\");\n}\nOlapTable dictTable = (OlapTable) table;\nif (dictTable.getKeysType() != KeysType.PRIMARY_KEYS) {\nthrow new SemanticException(\"dict table \" + tableName + \" should be primary key table\");\n}\nList keyColumns = new ArrayList<>();\nColumn valueColumn = null;\nfor (Column column : dictTable.getBaseSchema()) {\nif (column.isKey()) {\nkeyColumns.add(column);\n}\nif (column.isAutoIncrement()) {\nvalueColumn = column;\n}\n}\nboolean hasValueColumn;\nif (params.size() == keyColumns.size() + 1) {\nhasValueColumn = false;\n} else if (params.size() == keyColumns.size() + 2) {\nhasValueColumn = true;\n} else {\nthrow new SemanticException(String.format(\"dict_mapping function param size should be %d or %d\",\nkeyColumns.size() + 1, keyColumns.size() + 2));\n}\nString valueField;\nif (hasValueColumn) {\nExpr valueFieldExpr = params.get(params.size() - 1);\nif (!(valueFieldExpr instanceof StringLiteral)) {\nthrow new SemanticException(\"dict_mapping function last param should be STRING if specific value column.\");\n}\nvalueField = ((StringLiteral) valueFieldExpr).getStringValue();\nvalueColumn = dictTable.getBaseColumn(valueField);\nif (valueColumn == null) {\nthrow new SemanticException(\"dict_mapping function can't find value column \" + valueField + \" in dict table\");\n}\n} else {\nif (valueColumn == null) {\nthrow new SemanticException(\"dict_mapping function can't find auto increment column in dict table\");\n}\nvalueField = valueColumn.getName();\n}\nif (valueColumn.getType() != Type.BIGINT) {\nthrow new SemanticException(\"dict_mapping function value field '\" + valueField + \"' should be bigint\");\n}\nList expectTypes = new ArrayList<>();\nexpectTypes.add(Type.VARCHAR);\nfor (Column keyColumn : keyColumns) {\nexpectTypes.add(ScalarType.createType(keyColumn.getType().getPrimitiveType()));\n}\nif (hasValueColumn) {\nexpectTypes.add(Type.VARCHAR);\n}\nList actualTypes = params.stream()\n.map(expr -> ScalarType.createType(expr.getType().getPrimitiveType())).collect(Collectors.toList());\nif (!Objects.equals(expectTypes, actualTypes)) {\nList expectTypeNames = new ArrayList<>();\nfor (int i = 0; i < expectTypes.size(); i++) {\nif (i == 0) {\nexpectTypeNames.add(\"VARCHAR dict_table\");\n} else if (hasValueColumn && i == expectTypes.size() - 1) {\nexpectTypeNames.add(\"VARCHAR value_field_name\");\n} else {\nexpectTypeNames.add(expectTypes.get(i).canonicalName() + \" \" + keyColumns.get(i - 1).getName());\n}\n}\nList actualTypeNames = actualTypes.stream().map(Type::canonicalName).collect(Collectors.toList());\nthrow new SemanticException(\nString.format(\"dict_mapping function params not match expected,\\nExpect: %s\\nActual: %s\",\nString.join(\", \", expectTypeNames), String.join(\", \", actualTypeNames)));\n}\nfinal TDictQueryExpr dictQueryExpr = new TDictQueryExpr();\ndictQueryExpr.setDb_name(tableName.getDb());\ndictQueryExpr.setTbl_name(tableName.getTbl());\nMap partitionVersion = new HashMap<>();\ndictTable.getPartitions().forEach(p -> partitionVersion.put(p.getId(), p.getVisibleVersion()));\ndictQueryExpr.setPartition_version(partitionVersion);\nList keyFields = keyColumns.stream().map(Column::getName).collect(Collectors.toList());\ndictQueryExpr.setKey_fields(keyFields);\ndictQueryExpr.setValue_field(valueField);\nnode.setType(Type.BIGINT);\nFunction fn = new Function(FunctionName.createFnName(FunctionSet.DICT_GET), actualTypes, Type.BIGINT, false);\nfn.setBinaryType(TFunctionBinaryType.BUILTIN);\nnode.setFn(fn);\nnode.setDictQueryExpr(dictQueryExpr);\nreturn null;\n}", + "target_code": "if (valueColumn.getType() != Type.BIGINT) {", + "method_body_after": "public Void visitDictQueryExpr(DictQueryExpr node, Scope context) {\nList params = node.getParams().exprs();\nif (!(params.get(0) instanceof StringLiteral)) {\nthrow new SemanticException(\"dict_mapping function first param table_name should be string literal\");\n}\nString[] dictTableFullName = ((StringLiteral) params.get(0)).getStringValue().split(\"\\\\.\");\nTableName tableName;\nif (dictTableFullName.length == 1) {\ntableName = new TableName(null, dictTableFullName[0]);\ntableName.normalization(session);\n} else if (dictTableFullName.length == 2) {\ntableName = new TableName(dictTableFullName[0], dictTableFullName[1]);\n} else {\nthrow new SemanticException(\"dict_mapping function first param table_name should be 'db.tbl' or 'tbl' format\");\n}\nDatabase db = GlobalStateMgr.getCurrentState().getDb(tableName.getDb());\nif (db == null) {\nthrow new SemanticException(\"Database %s is not found\", tableName.getDb());\n}\nTable table = db.getTable(tableName.getTbl());\nif (table == null) {\nthrow new SemanticException(\"dict table %s is not found\", tableName.getTbl());\n}\nif (!(table instanceof OlapTable)) {\nthrow new SemanticException(\"dict table type is not OlapTable, type=\" + table.getClass());\n}\nif (table instanceof MaterializedView) {\nthrow new SemanticException(\"dict table can't be materialized view\");\n}\nOlapTable dictTable = (OlapTable) table;\nif (dictTable.getKeysType() != KeysType.PRIMARY_KEYS) {\nthrow new SemanticException(\"dict table \" + tableName + \" should be primary key table\");\n}\nList keyColumns = new ArrayList<>();\nColumn valueColumn = null;\nfor (Column column : dictTable.getBaseSchema()) {\nif (column.isKey()) {\nkeyColumns.add(column);\n}\nif (column.isAutoIncrement()) {\nvalueColumn = column;\n}\n}\nint valueColumnIdx;\nint strictModeIdx;\nif (params.size() == keyColumns.size() + 1) {\nvalueColumnIdx = -1;\nstrictModeIdx = -1;\n} else if (params.size() == keyColumns.size() + 2) {\nif (params.get(params.size() - 1).getType().getPrimitiveType().isStringType()) {\nvalueColumnIdx = params.size() - 1;\nstrictModeIdx = -1;\n} else {\nstrictModeIdx = params.size() - 1;\nvalueColumnIdx = -1;\n}\n} else if (params.size() == keyColumns.size() + 3) {\nvalueColumnIdx = params.size() - 2;\nstrictModeIdx = params.size() - 1;\n} else {\nthrow new SemanticException(String.format(\"dict_mapping function param size should be %d - %d\",\nkeyColumns.size() + 1, keyColumns.size() + 3));\n}\nString valueField;\nif (valueColumnIdx >= 0) {\nExpr valueFieldExpr = params.get(valueColumnIdx);\nif (!(valueFieldExpr instanceof StringLiteral)) {\nthrow new SemanticException(\"dict_mapping function value_column param should be STRING constant\");\n}\nvalueField = ((StringLiteral) valueFieldExpr).getStringValue();\nvalueColumn = dictTable.getBaseColumn(valueField);\nif (valueColumn == null) {\nthrow new SemanticException(\"dict_mapping function can't find value column \" + valueField + \" in dict table\");\n}\n} else {\nif (valueColumn == null) {\nthrow new SemanticException(\"dict_mapping function can't find auto increment column in dict table\");\n}\nvalueField = valueColumn.getName();\n}\nboolean strictMode = false;\nif (strictModeIdx >= 0) {\nExpr strictModeExpr = params.get(strictModeIdx);\nif (!(strictModeExpr instanceof BoolLiteral)) {\nthrow new SemanticException(\"dict_mapping function strict_mode param should be bool constant\");\n}\nstrictMode = ((BoolLiteral) strictModeExpr).getValue();\n}\nList expectTypes = new ArrayList<>();\nexpectTypes.add(Type.VARCHAR);\nfor (Column keyColumn : keyColumns) {\nexpectTypes.add(ScalarType.createType(keyColumn.getType().getPrimitiveType()));\n}\nif (valueColumnIdx >= 0) {\nexpectTypes.add(Type.VARCHAR);\n}\nif (strictModeIdx >= 0) {\nexpectTypes.add(Type.BOOLEAN);\n}\nList actualTypes = params.stream()\n.map(expr -> ScalarType.createType(expr.getType().getPrimitiveType())).collect(Collectors.toList());\nif (!Objects.equals(expectTypes, actualTypes)) {\nList expectTypeNames = new ArrayList<>();\nexpectTypeNames.add(\"VARCHAR dict_table\");\nfor (int i = 0; i < keyColumns.size(); i++) {\nexpectTypeNames.add(expectTypes.get(i + 1).canonicalName() + \" \" + keyColumns.get(i).getName());\n}\nif (valueColumnIdx >= 0) {\nexpectTypeNames.add(\"VARCHAR value_field_name\");\n}\nif (strictModeIdx >= 0) {\nexpectTypeNames.add(\"BOOLEAN strict_mode\");\n}\nList actualTypeNames = actualTypes.stream().map(Type::canonicalName).collect(Collectors.toList());\nthrow new SemanticException(\nString.format(\"dict_mapping function params not match expected,\\nExpect: %s\\nActual: %s\",\nString.join(\", \", expectTypeNames), String.join(\", \", actualTypeNames)));\n}\nType valueType = ScalarType.createType(valueColumn.getType().getPrimitiveType());\nfinal TDictQueryExpr dictQueryExpr = new TDictQueryExpr();\ndictQueryExpr.setDb_name(tableName.getDb());\ndictQueryExpr.setTbl_name(tableName.getTbl());\nMap partitionVersion = new HashMap<>();\ndictTable.getPartitions().forEach(p -> partitionVersion.put(p.getId(), p.getVisibleVersion()));\ndictQueryExpr.setPartition_version(partitionVersion);\nList keyFields = keyColumns.stream().map(Column::getName).collect(Collectors.toList());\ndictQueryExpr.setKey_fields(keyFields);\ndictQueryExpr.setValue_field(valueField);\ndictQueryExpr.setStrict_mode(strictMode);\nnode.setType(valueType);\nFunction fn = new Function(FunctionName.createFnName(FunctionSet.DICT_MAPPING), actualTypes, valueType, false);\nfn.setBinaryType(TFunctionBinaryType.BUILTIN);\nnode.setFn(fn);\nnode.setDictQueryExpr(dictQueryExpr);\nreturn null;\n}", + "context_before": "class Visitor extends AstVisitor {\nprivate static final List ADD_DATE_FUNCTIONS = Lists.newArrayList(FunctionSet.DATE_ADD,\nFunctionSet.ADDDATE, FunctionSet.DAYS_ADD, FunctionSet.TIMESTAMPADD);\nprivate static final List SUB_DATE_FUNCTIONS =\nLists.newArrayList(FunctionSet.DATE_SUB, FunctionSet.SUBDATE,\nFunctionSet.DAYS_SUB);\nprivate final AnalyzeState analyzeState;\nprivate final ConnectContext session;\npublic Visitor(AnalyzeState analyzeState, ConnectContext session) {\nthis.analyzeState = analyzeState;\nthis.session = session;\n}\n@Override\npublic Void visitExpression(Expr node, Scope scope) {\nthrow new SemanticException(\"not yet implemented: expression analyzer for \" + node.getClass().getName(),\nnode.getPos());\n}\nprivate void handleResolvedField(SlotRef slot, ResolvedField resolvedField) {\nanalyzeState.addColumnReference(slot, FieldId.from(resolvedField));\n}\n@Override\npublic Void visitSubfieldExpr(SubfieldExpr node, Scope scope) {\nExpr child = node.getChild(0);\nif (!child.getType().isStructType()) {\nthrow new SemanticException(child.toSql() + \" must be a struct type, check if you are using `'`\",\nchild.getPos());\n}\nList fieldNames = node.getFieldNames();\nType tmpType = child.getType();\nfor (String fieldName : fieldNames) {\nStructType structType = (StructType) tmpType;\nStructField structField = structType.getField(fieldName);\nif (structField == null) {\nthrow new SemanticException(String.format(\"Struct subfield '%s' cannot be resolved\", fieldName),\nnode.getPos());\n}\ntmpType = structField.getType();\n}\nnode.setType(tmpType);\nreturn null;\n}\n@Override\npublic Void visitSlot(SlotRef node, Scope scope) {\nResolvedField resolvedField = scope.resolveField(node);\nnode.setType(resolvedField.getField().getType());\nnode.setTblName(resolvedField.getField().getRelationAlias());\nnode.setNullable(resolvedField.getField().isNullable());\nif (node.getType().isStructType()) {\nnode.setCol(resolvedField.getField().getName());\nnode.setLabel(resolvedField.getField().getName());\nif (resolvedField.getField().getTmpUsedStructFieldPos().size() > 0) {\nnode.setUsedStructFieldPos(resolvedField.getField().getTmpUsedStructFieldPos());\nnode.resetStructInfo();\n}\n}\nhandleResolvedField(node, resolvedField);\nreturn null;\n}\n@Override\npublic Void visitFieldReference(FieldReference node, Scope scope) {\nField field = scope.getRelationFields().getFieldByIndex(node.getFieldIndex());\nnode.setType(field.getType());\nreturn null;\n}\n@Override\npublic Void visitArrayExpr(ArrayExpr node, Scope scope) {\nif (!node.getChildren().isEmpty()) {\ntry {\nType targetItemType;\nif (node.getType() != null) {\ntargetItemType = ((ArrayType) node.getType()).getItemType();\n} else {\ntargetItemType = TypeManager.getCommonSuperType(\nnode.getChildren().stream().map(Expr::getType).collect(Collectors.toList()));\n}\nfor (int i = 0; i < node.getChildren().size(); i++) {\nif (!node.getChildren().get(i).getType().matchesType(targetItemType)) {\nnode.castChild(targetItemType, i);\n}\n}\nnode.setType(new ArrayType(targetItemType));\n} catch (AnalysisException e) {\nthrow new SemanticException(e.getMessage());\n}\n} else {\nnode.setType(Type.ARRAY_NULL);\n}\nreturn null;\n}\n@Override\npublic Void visitMapExpr(MapExpr node, Scope scope) {\nif (!node.getChildren().isEmpty()) {\nType originalType = node.getType();\nif (originalType == Type.ANY_MAP) {\nType keyType = node.getKeyCommonType();\nType valueType = node.getValueCommonType();\nnode.setType(new MapType(keyType, valueType));\n}\n} else {\nnode.setType(new MapType(Type.NULL, Type.NULL));\n}\nreturn null;\n}\n@Override\npublic Void visitCollectionElementExpr(CollectionElementExpr node, Scope scope) {\nExpr expr = node.getChild(0);\nExpr subscript = node.getChild(1);\nif (expr.getType().isArrayType()) {\nif (!subscript.getType().isNumericType()) {\nthrow new SemanticException(\"array subscript must have type integer\", subscript.getPos());\n}\ntry {\nif (subscript.getType().getPrimitiveType() != PrimitiveType.INT) {\nnode.castChild(Type.INT, 1);\n}\nnode.setType(((ArrayType) expr.getType()).getItemType());\n} catch (AnalysisException e) {\nthrow new SemanticException(e.getMessage());\n}\n} else if (expr.getType().isMapType()) {\ntry {\nif (subscript.getType().getPrimitiveType() !=\n((MapType) expr.getType()).getKeyType().getPrimitiveType()) {\nnode.castChild(((MapType) expr.getType()).getKeyType(), 1);\n}\nnode.setType(((MapType) expr.getType()).getValueType());\n} catch (AnalysisException e) {\nthrow new SemanticException(e.getMessage());\n}\n} else if (expr.getType().isStructType()) {\nif (!(subscript instanceof IntLiteral)) {\nthrow new SemanticException(\"struct subscript must have integer pos\", subscript.getPos());\n}\nlong index = ((IntLiteral) subscript).getValue();\nlong fieldSize = ((StructType) expr.getType()).getFields().size();\nif (fieldSize < Math.abs(index)) {\nthrow new SemanticException(\"the pos is out of struct subfields\", subscript.getPos());\n} else if (index == 0) {\nthrow new SemanticException(\"the pos can't set to zero\", subscript.getPos());\n}\nindex = index > 0 ? index - 1 : fieldSize + index;\nStructField structField = ((StructType) expr.getType()).getFields().get((int) index);\nnode.setType(structField.getType());\n} else {\nthrow new SemanticException(\"cannot subscript type \" + expr.getType()\n+ \" because it is not an array or a map or a struct\", expr.getPos());\n}\nreturn null;\n}\n@Override\npublic Void visitArraySliceExpr(ArraySliceExpr node, Scope scope) {\nif (!node.getChild(0).getType().isArrayType()) {\nthrow new SemanticException(\"cannot subscript type\" +\nnode.getChild(0).getType() + \" because it is not an array\", node.getChild(0).getPos());\n}\nnode.setType(node.getChild(0).getType());\nreturn null;\n}\n@Override\npublic Void visitArrowExpr(ArrowExpr node, Scope scope) {\nExpr item = node.getChild(0);\nExpr key = node.getChild(1);\nif (!key.isLiteral() || !key.getType().isStringType()) {\nthrow new SemanticException(\"right operand of -> should be string literal, but got \" + key,\nkey.getPos());\n}\nif (!item.getType().isJsonType()) {\nthrow new SemanticException(\n\"-> operator could only be used for json column, but got \" + item.getType(), item.getPos());\n}\nnode.setType(Type.JSON);\nreturn null;\n}\n@Override\npublic Void visitLambdaFunctionExpr(LambdaFunctionExpr node, Scope scope) {\nif (scope.getLambdaInputs().size() == 0) {\nthrow new SemanticException(\n\"Lambda Functions can only be used in high-order functions with arrays/maps\",\nnode.getPos());\n}\nif (scope.getLambdaInputs().size() != node.getChildren().size() - 1) {\nthrow new SemanticException(\"Lambda arguments should equal to lambda input arrays\", node.getPos());\n}\nSet set = new HashSet<>();\nList args = Lists.newArrayList();\nfor (int i = 1; i < node.getChildren().size(); ++i) {\nargs.add((LambdaArgument) node.getChild(i));\nString name = ((LambdaArgument) node.getChild(i)).getName();\nif (set.contains(name)) {\nthrow new SemanticException(\"Lambda argument: \" + name + \" is duplicated\",\nnode.getChild(i).getPos());\n}\nset.add(name);\n((LambdaArgument) node.getChild(i)).setNullable(scope.getLambdaInputs().get(i - 1).isNullable());\nnode.getChild(i).setType(scope.getLambdaInputs().get(i - 1).getType());\n}\nScope lambdaScope = new Scope(args, scope);\nExpressionAnalyzer.analyzeExpression(node.getChild(0), this.analyzeState, lambdaScope, this.session);\nnode.setType(Type.FUNCTION);\nscope.clearLambdaInputs();\nreturn null;\n}\n@Override\npublic Void visitCompoundPredicate(CompoundPredicate node, Scope scope) {\nnode.setType(Type.BOOLEAN);\nfor (int i = 0; i < node.getChildren().size(); i++) {\nExpr child = node.getChild(i);\nif (child.getType().isBoolean() || child.getType().isNull()) {\n} else if (!session.getSessionVariable().isEnableStrictType() &&\nType.canCastTo(child.getType(), Type.BOOLEAN)) {\nnode.getChildren().set(i, new CastExpr(Type.BOOLEAN, child));\n} else {\nthrow new SemanticException(child.toSql() + \" can not be converted to boolean type.\");\n}\n}\nreturn null;\n}\n@Override\npublic Void visitBetweenPredicate(BetweenPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nList list = node.getChildren().stream().map(Expr::getType).collect(Collectors.toList());\nType compatibleType = TypeManager.getCompatibleTypeForBetweenAndIn(list);\nfor (Type type : list) {\nif (!Type.canCastTo(type, compatibleType)) {\nthrow new SemanticException(\n\"between predicate type \" + type.toSql() + \" with type \" + compatibleType.toSql()\n+ \" is invalid\", node.getPos());\n}\n}\nreturn null;\n}\n@Override\npublic Void visitBinaryPredicate(BinaryPredicate node, Scope scope) {\nType type1 = node.getChild(0).getType();\nType type2 = node.getChild(1).getType();\nType compatibleType = TypeManager.getCompatibleTypeForBinary(node.getOp(), type1, type2);\nfinal String ERROR_MSG = \"Column type %s does not support binary predicate operation\";\nif (!Type.canCastTo(type1, compatibleType)) {\nthrow new SemanticException(String.format(ERROR_MSG, type1.toSql()), node.getPos());\n}\nif (!Type.canCastTo(type2, compatibleType)) {\nthrow new SemanticException(String.format(ERROR_MSG, type1.toSql()), node.getPos());\n}\nnode.setType(Type.BOOLEAN);\nreturn null;\n}\n@Override\npublic Void visitArithmeticExpr(ArithmeticExpr node, Scope scope) {\nif (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.BINARY_INFIX) {\nArithmeticExpr.Operator op = node.getOp();\nType t1 = node.getChild(0).getType().getNumResultType();\nType t2 = node.getChild(1).getType().getNumResultType();\nif (t1.isDecimalV3() || t2.isDecimalV3()) {\nArithmeticExpr.TypeTriple typeTriple = null;\ntry {\ntypeTriple = node.rewriteDecimalOperation();\n} catch (AnalysisException ex) {\nthrow new SemanticException(ex.getMessage());\n}\nPreconditions.checkArgument(typeTriple != null);\nType[] args = {typeTriple.lhsTargetType, typeTriple.rhsTargetType};\nFunction fn = Expr.getBuiltinFunction(op.getName(), args, Function.CompareMode.IS_IDENTICAL);\nFunction newFn = new ScalarFunction(fn.getFunctionName(), args, typeTriple.returnType, fn.hasVarArgs());\nnode.setType(typeTriple.returnType);\nnode.setFn(newFn);\nreturn null;\n}\nType lhsType;\nType rhsType;\nswitch (op) {\ncase MULTIPLY:\ncase ADD:\ncase SUBTRACT:\nlhsType = ArithmeticExpr.getBiggerType(ArithmeticExpr.getCommonType(t1, t2));\nrhsType = lhsType;\nbreak;\ncase MOD:\nlhsType = ArithmeticExpr.getCommonType(t1, t2);\nrhsType = lhsType;\nbreak;\ncase DIVIDE:\nlhsType = ArithmeticExpr.getCommonType(t1, t2);\nif (lhsType.isFixedPointType()) {\nlhsType = Type.DOUBLE;\n}\nrhsType = lhsType;\nbreak;\ncase INT_DIVIDE:\ncase BITAND:\ncase BITOR:\ncase BITXOR:\nlhsType = ArithmeticExpr.getCommonType(t1, t2);\nif (!lhsType.isFixedPointType()) {\nlhsType = Type.BIGINT;\n}\nrhsType = lhsType;\nbreak;\ncase BIT_SHIFT_LEFT:\ncase BIT_SHIFT_RIGHT:\ncase BIT_SHIFT_RIGHT_LOGICAL:\nlhsType = t1;\nrhsType = Type.BIGINT;\nbreak;\ndefault:\nthrow new SemanticException(\"Unknown arithmetic operation \" + op + \" in: \" + node,\nnode.getPos());\n}\nif (node.getChild(0).getType().equals(Type.NULL) && node.getChild(1).getType().equals(Type.NULL)) {\nlhsType = Type.NULL;\nrhsType = Type.NULL;\n}\nif (lhsType.isInvalid() || rhsType.isInvalid()) {\nthrow new SemanticException(\"Any function type can not cast to \" + Type.INVALID.toSql());\n}\nif (!Type.NULL.equals(node.getChild(0).getType()) && !Type.canCastTo(t1, lhsType)) {\nthrow new SemanticException(\n\"cast type \" + node.getChild(0).getType().toSql() + \" with type \" + lhsType.toSql()\n+ \" is invalid\", node.getPos());\n}\nif (!Type.NULL.equals(node.getChild(1).getType()) && !Type.canCastTo(t2, rhsType)) {\nthrow new SemanticException(\n\"cast type \" + node.getChild(1).getType().toSql() + \" with type \" + rhsType.toSql()\n+ \" is invalid\", node.getPos());\n}\nFunction fn = Expr.getBuiltinFunction(op.getName(), new Type[] {lhsType, rhsType},\nFunction.CompareMode.IS_SUPERTYPE_OF);\nif (fn == null) {\nthrow new SemanticException(String.format(\n\"No matching function '%s' with operand types %s and %s\", node.getOp().getName(), t1, t2));\n}\n/*\n* commonType is the common type of the parameters of the function,\n* and fn.getReturnType() is the return type of the function after execution\n* So we use fn.getReturnType() as node type\n*/\nnode.setType(fn.getReturnType());\nnode.setFn(fn);\n} else if (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.UNARY_PREFIX) {\nFunction fn = Expr.getBuiltinFunction(\nnode.getOp().getName(), new Type[] {Type.BIGINT}, Function.CompareMode.IS_SUPERTYPE_OF);\nnode.setType(Type.BIGINT);\nnode.setFn(fn);\n} else if (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.UNARY_POSTFIX) {\nthrow new SemanticException(\"not yet implemented: expression analyzer for \" + node.getClass().getName(),\nnode.getPos());\n} else {\nthrow new SemanticException(\"not yet implemented: expression analyzer for \" + node.getClass().getName(),\nnode.getPos());\n}\nreturn null;\n}\n@Override\npublic Void visitTimestampArithmeticExpr(TimestampArithmeticExpr node, Scope scope) {\nnode.setChild(0, TypeManager.addCastExpr(node.getChild(0), Type.DATETIME));\nString funcOpName;\nif (node.getFuncName() != null) {\nif (ADD_DATE_FUNCTIONS.contains(node.getFuncName())) {\nfuncOpName = String.format(\"%sS_%s\", node.getTimeUnitIdent(), \"add\");\n} else if (SUB_DATE_FUNCTIONS.contains(node.getFuncName())) {\nfuncOpName = String.format(\"%sS_%s\", node.getTimeUnitIdent(), \"sub\");\n} else {\nnode.setChild(1, TypeManager.addCastExpr(node.getChild(1), Type.DATETIME));\nfuncOpName = String.format(\"%sS_%s\", node.getTimeUnitIdent(), \"diff\");\n}\n} else {\nfuncOpName = String.format(\"%sS_%s\", node.getTimeUnitIdent(),\n(node.getOp() == ArithmeticExpr.Operator.ADD) ? \"add\" : \"sub\");\n}\nType[] argumentTypes = node.getChildren().stream().map(Expr::getType)\n.toArray(Type[]::new);\nFunction fn = Expr.getBuiltinFunction(funcOpName.toLowerCase(), argumentTypes,\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nif (fn == null) {\nString msg = String.format(\"No matching function with signature: %s(%s)\", funcOpName, Joiner.on(\", \")\n.join(Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.toList())));\nthrow new SemanticException(msg, node.getPos());\n}\nnode.setType(fn.getReturnType());\nnode.setFn(fn);\nreturn null;\n}\n@Override\npublic Void visitExistsPredicate(ExistsPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nreturn null;\n}\n@Override\npublic Void visitInPredicate(InPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nList queryExpressions = Lists.newArrayList();\nnode.collect(arg -> arg instanceof Subquery, queryExpressions);\nif (queryExpressions.size() > 0 && node.getChildren().size() > 2) {\nthrow new SemanticException(\"In Predicate only support literal expression list\", node.getPos());\n}\nList list = node.getChildren().stream().map(Expr::getType).collect(Collectors.toList());\nType compatibleType = TypeManager.getCompatibleTypeForBetweenAndIn(list);\nfor (Expr child : node.getChildren()) {\nType type = child.getType();\nif (type.isJsonType()) {\nthrow new SemanticException(\"InPredicate of JSON is not supported\", child.getPos());\n}\nif (!Type.canCastTo(type, compatibleType)) {\nthrow new SemanticException(\n\"in predicate type \" + type.toSql() + \" with type \" + compatibleType.toSql()\n+ \" is invalid\", child.getPos());\n}\n}\nreturn null;\n}\n@Override\npublic Void visitMultiInPredicate(MultiInPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nList leftTypes =\nnode.getChildren().stream().limit(node.getNumberOfColumns()).map(Expr::getType)\n.collect(Collectors.toList());\nSubquery inSubquery = (Subquery) node.getChild(node.getNumberOfColumns());\nList rightTypes =\ninSubquery.getQueryStatement().getQueryRelation().getOutputExpression().stream().map(Expr::getType).\ncollect(Collectors.toList());\nif (leftTypes.size() != rightTypes.size()) {\nthrow new SemanticException(\n\"subquery must return the same number of columns as provided by the IN predicate\",\nnode.getPos());\n}\nfor (int i = 0; i < rightTypes.size(); ++i) {\nif (leftTypes.get(i).isJsonType() || rightTypes.get(i).isJsonType() || leftTypes.get(i).isMapType() ||\nrightTypes.get(i).isMapType() || leftTypes.get(i).isStructType() ||\nrightTypes.get(i).isStructType()) {\nthrow new SemanticException(\"InPredicate of JSON, Map, Struct types is not supported\");\n}\nif (!Type.canCastTo(leftTypes.get(i), rightTypes.get(i))) {\nthrow new SemanticException(\n\"in predicate type \" + leftTypes.get(i).toSql() + \" with type \" + rightTypes.get(i).toSql()\n+ \" is invalid\");\n}\n}\nreturn null;\n}\n@Override\npublic Void visitLiteral(LiteralExpr node, Scope scope) {\nif (node instanceof LargeIntLiteral) {\nBigInteger value = ((LargeIntLiteral) node).getValue();\nif (value.compareTo(LargeIntLiteral.LARGE_INT_MIN) < 0 ||\nvalue.compareTo(LargeIntLiteral.LARGE_INT_MAX) > 0) {\nthrow new SemanticException(PARSER_ERROR_MSG.numOverflow(value.toString()), node.getPos());\n}\n}\nreturn null;\n}\n@Override\npublic Void visitIsNullPredicate(IsNullPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nreturn null;\n}\n@Override\npublic Void visitLikePredicate(LikePredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nType type1 = node.getChild(0).getType();\nType type2 = node.getChild(1).getType();\nif (!type1.isStringType() && !type1.isNull()) {\nthrow new SemanticException(\n\"left operand of \" + node.getOp().toString() + \" must be of type STRING: \" +\nAstToStringBuilder.toString(node), node.getPos());\n}\nif (!type2.isStringType() && !type2.isNull()) {\nthrow new SemanticException(\n\"right operand of \" + node.getOp().toString() + \" must be of type STRING: \" +\nAstToStringBuilder.toString(node), node.getPos());\n}\nif (LikePredicate.Operator.REGEXP.equals(node.getOp()) && !type2.isNull() && node.getChild(1).isLiteral()) {\ntry {\nPattern.compile(((StringLiteral) node.getChild(1)).getValue());\n} catch (PatternSyntaxException e) {\nthrow new SemanticException(\n\"Invalid regular expression in '\" + AstToStringBuilder.toString(node) + \"'\", node.getPos());\n}\n}\nreturn null;\n}\nprivate void predicateBaseAndCheck(Predicate node) {\nnode.setType(Type.BOOLEAN);\nfor (Expr expr : node.getChildren()) {\nif (expr.getType().isOnlyMetricType() ||\n(expr.getType().isComplexType() && !(node instanceof IsNullPredicate))) {\nthrow new SemanticException(\n\"HLL, BITMAP, PERCENTILE and ARRAY, MAP, STRUCT type couldn't as Predicate\", node.getPos());\n}\n}\n}\n@Override\npublic Void visitCastExpr(CastExpr cast, Scope context) {\nType castType;\nif (cast.isImplicit()) {\ncastType = cast.getType();\n} else {\ncastType = cast.getTargetTypeDef().getType();\n}\nif (!Type.canCastTo(cast.getChild(0).getType(), castType)) {\nthrow new SemanticException(\"Invalid type cast from \" + cast.getChild(0).getType().toSql() + \" to \"\n+ castType.toSql() + \" in sql `\" +\nAstToStringBuilder.toString(cast.getChild(0)).replace(\"%\", \"%%\") + \"`\",\ncast.getPos());\n}\ncast.setType(castType);\nreturn null;\n}\n@Override\npublic Void visitFunctionCall(FunctionCallExpr node, Scope scope) {\nType[] argumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new);\nif (node.isNondeterministicBuiltinFnName()) {\nExprId exprId = analyzeState.getNextNondeterministicId();\nnode.setNondeterministicId(exprId);\n}\nFunction fn;\nString fnName = node.getFnName().getFunction();\ncheckFunction(fnName, node, argumentTypes);\nif (fnName.equals(FunctionSet.COUNT) && node.getParams().isDistinct()) {\nfn = Expr.getBuiltinFunction(FunctionSet.COUNT, new Type[] {argumentTypes[0]},\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n} else if (fnName.equals(FunctionSet.EXCHANGE_BYTES) || fnName.equals(FunctionSet.EXCHANGE_SPEED)) {\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nfn = fn.copy();\nfn.setArgsType(argumentTypes);\nfn.setIsNullable(false);\n} else if (fnName.equals(FunctionSet.ARRAY_AGG)) {\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nfn = fn.copy();\nList orderByElements = node.getParams().getOrderByElements();\nList isAscOrder = new ArrayList<>();\nList nullsFirst = new ArrayList<>();\nif (orderByElements != null) {\nfor (OrderByElement elem : orderByElements) {\nisAscOrder.add(elem.getIsAsc());\nnullsFirst.add(elem.getNullsFirstParam());\n}\n}\nType[] argsTypes = new Type[argumentTypes.length];\nfor (int i = 0; i < argumentTypes.length; ++i) {\nargsTypes[i] = argumentTypes[i] == Type.NULL ? Type.BOOLEAN : argumentTypes[i];\n}\nfn.setArgsType(argsTypes);\nArrayList structTypes = new ArrayList<>(argsTypes.length);\nfor (Type t : argsTypes) {\nstructTypes.add(new ArrayType(t));\n}\n((AggregateFunction) fn).setIntermediateType(new StructType(structTypes));\n((AggregateFunction) fn).setIsAscOrder(isAscOrder);\n((AggregateFunction) fn).setNullsFirst(nullsFirst);\nfn.setRetType(new ArrayType(argsTypes[0]));\n} else if (FunctionSet.PERCENTILE_DISC.equals(fnName)) {\nargumentTypes[1] = Type.DOUBLE;\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_IDENTICAL);\nif (fn.getArgs()[0].isDecimalV3()) {\nList argTypes = Arrays.asList(argumentTypes[0], fn.getArgs()[1]);\nAggregateFunction newFn = new AggregateFunction(fn.getFunctionName(), argTypes, argumentTypes[0],\n((AggregateFunction) fn).getIntermediateType(), fn.hasVarArgs());\nnewFn.setFunctionId(fn.getFunctionId());\nnewFn.setChecksum(fn.getChecksum());\nnewFn.setBinaryType(fn.getBinaryType());\nnewFn.setHasVarArgs(fn.hasVarArgs());\nnewFn.setId(fn.getId());\nnewFn.setUserVisible(fn.isUserVisible());\nnewFn.setisAnalyticFn(((AggregateFunction) fn).isAnalyticFn());\nfn = newFn;\n}\n} else if (FunctionSet.CONCAT.equals(fnName) && node.getChildren().stream().anyMatch(child ->\nchild.getType().isArrayType())) {\nList arrayTypes = Arrays.stream(argumentTypes).map(argumentType -> {\nif (argumentType.isArrayType()) {\nreturn argumentType;\n} else {\nreturn new ArrayType(argumentType);\n}\n}).collect(Collectors.toList());\nTypeManager.getCommonSuperType(arrayTypes);\nfor (int i = 0; i < argumentTypes.length; ++i) {\nif (!argumentTypes[i].isArrayType()) {\nnode.setChild(i, new ArrayExpr(new ArrayType(argumentTypes[i]),\nLists.newArrayList(node.getChild(i))));\n}\n}\nargumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new);\nnode.resetFnName(null, FunctionSet.ARRAY_CONCAT);\nif (DecimalV3FunctionAnalyzer.argumentTypeContainDecimalV3(FunctionSet.ARRAY_CONCAT, argumentTypes)) {\nfn = DecimalV3FunctionAnalyzer.getDecimalV3Function(session, node, argumentTypes);\n} else {\nfn = Expr.getBuiltinFunction(FunctionSet.ARRAY_CONCAT, argumentTypes,\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n}\n} else if (FunctionSet.NAMED_STRUCT.equals(fnName)) {\nfn = Expr.getBuiltinFunction(FunctionSet.NAMED_STRUCT, argumentTypes,\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nfn = fn.copy();\nArrayList sf = Lists.newArrayList();\nfor (int i = 0; i < node.getChildren().size(); i = i + 2) {\nStringLiteral literal = (StringLiteral) node.getChild(i);\nsf.add(new StructField(literal.getStringValue(), node.getChild(i + 1).getType()));\n}\nfn.setRetType(new StructType(sf));\n} else if (DecimalV3FunctionAnalyzer.argumentTypeContainDecimalV3(fnName, argumentTypes)) {\nfn = DecimalV3FunctionAnalyzer.getDecimalV3Function(session, node, argumentTypes);\n} else if (Arrays.stream(argumentTypes).anyMatch(arg -> arg.matchesType(Type.TIME))) {\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nif (fn instanceof AggregateFunction) {\nthrow new SemanticException(\"Time Type can not used in\" + fnName + \" function\", node.getPos());\n}\n} else if (FunctionSet.STR_TO_DATE.equals(fnName)) {\nfn = getStrToDateFunction(node, argumentTypes);\n} else if (FunctionSet.ARRAY_GENERATE.equals(fnName)) {\nfn = getArrayGenerateFunction(node);\nargumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new);\n} else if (DecimalV3FunctionAnalyzer.argumentTypeContainDecimalV2(fnName, argumentTypes)) {\nfn = DecimalV3FunctionAnalyzer.getDecimalV2Function(node, argumentTypes);\n} else {\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n}\nif (fn == null) {\nfn = AnalyzerUtils.getUdfFunction(session, node.getFnName(), argumentTypes);\n}\nif (fn == null) {\nString msg = String.format(\"No matching function with signature: %s(%s)\",\nfnName,\nnode.getParams().isStar() ? \"*\" : Joiner.on(\", \")\n.join(Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.toList())));\nthrow new SemanticException(msg, node.getPos());\n}\nif (fn instanceof TableFunction) {\nthrow new SemanticException(\"Table function cannot be used in expression\", node.getPos());\n}\nfor (int i = 0; i < fn.getNumArgs(); i++) {\nif (!argumentTypes[i].matchesType(fn.getArgs()[i]) &&\n!Type.canCastTo(argumentTypes[i], fn.getArgs()[i])) {\nString msg = String.format(\"No matching function with signature: %s(%s)\", fnName,\nnode.getParams().isStar() ? \"*\" :\nArrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.joining(\", \")));\nthrow new SemanticException(msg, node.getPos());\n}\n}\nif (fn.hasVarArgs()) {\nType varType = fn.getArgs()[fn.getNumArgs() - 1];\nfor (int i = fn.getNumArgs(); i < argumentTypes.length; i++) {\nif (!argumentTypes[i].matchesType(varType) &&\n!Type.canCastTo(argumentTypes[i], varType)) {\nString msg = String.format(\"Variadic function %s(%s) can't support type: %s\", fnName,\nArrays.stream(fn.getArgs()).map(Type::toSql).collect(Collectors.joining(\", \")),\nargumentTypes[i]);\nthrow new SemanticException(msg, node.getPos());\n}\n}\n}\nnode.setFn(fn);\nnode.setType(fn.getReturnType());\nFunctionAnalyzer.analyze(node);\nreturn null;\n}\nprivate void checkFunction(String fnName, FunctionCallExpr node, Type[] argumentTypes) {\nswitch (fnName) {\ncase FunctionSet.TIME_SLICE:\ncase FunctionSet.DATE_SLICE:\nif (!(node.getChild(1) instanceof IntLiteral)) {\nthrow new SemanticException(\nfnName + \" requires second parameter must be a constant interval\", node.getPos());\n}\nif (((IntLiteral) node.getChild(1)).getValue() <= 0) {\nthrow new SemanticException(\nfnName + \" requires second parameter must be greater than 0\", node.getPos());\n}\nbreak;\ncase FunctionSet.ARRAY_FILTER:\nif (node.getChildren().size() != 2) {\nthrow new SemanticException(fnName + \" should have 2 array inputs or lambda functions\",\nnode.getPos());\n}\nif (!node.getChild(0).getType().isArrayType() && !node.getChild(0).getType().isNull()) {\nthrow new SemanticException(\"The first input of \" + fnName +\n\" should be an array or a lambda function\", node.getPos());\n}\nif (!node.getChild(1).getType().isArrayType() && !node.getChild(1).getType().isNull()) {\nthrow new SemanticException(\"The second input of \" + fnName +\n\" should be an array or a lambda function\", node.getPos());\n}\nif (!Type.canCastTo(node.getChild(1).getType(), Type.ARRAY_BOOLEAN)) {\nthrow new SemanticException(\"The second input of array_filter \" +\nnode.getChild(1).getType().toString() + \" can't cast to ARRAY\", node.getPos());\n}\nbreak;\ncase FunctionSet.ALL_MATCH:\ncase FunctionSet.ANY_MATCH:\nif (node.getChildren().size() != 1) {\nthrow new SemanticException(fnName + \" should have a input array\", node.getPos());\n}\nif (!node.getChild(0).getType().isArrayType() && !node.getChild(0).getType().isNull()) {\nthrow new SemanticException(\"The first input of \" + fnName + \" should be an array\",\nnode.getPos());\n}\nif (!Type.canCastTo(node.getChild(0).getType(), Type.ARRAY_BOOLEAN)) {\nthrow new SemanticException(\"The second input of \" + fnName +\nnode.getChild(0).getType().toString() + \" can't cast to ARRAY\", node.getPos());\n}\nbreak;\ncase FunctionSet.ARRAY_SORTBY:\nif (node.getChildren().size() != 2) {\nthrow new SemanticException(fnName + \" should have 2 array inputs or lambda functions\",\nnode.getPos());\n}\nif (!node.getChild(0).getType().isArrayType() && !node.getChild(0).getType().isNull()) {\nthrow new SemanticException(\"The first input of \" + fnName +\n\" should be an array or a lambda function\", node.getPos());\n}\nif (!node.getChild(1).getType().isArrayType() && !node.getChild(1).getType().isNull()) {\nthrow new SemanticException(\"The second input of \" + fnName +\n\" should be an array or a lambda function\", node.getPos());\n}\nbreak;\ncase FunctionSet.ARRAY_GENERATE:\nif (node.getChildren().size() < 1 || node.getChildren().size() > 3) {\nthrow new SemanticException(fnName + \" has wrong input numbers\");\n}\nfor (Expr expr : node.getChildren()) {\nif ((expr instanceof SlotRef) && node.getChildren().size() != 3) {\nthrow new SemanticException(fnName + \" with IntColumn doesn't support default parameters\");\n}\nif (!(expr instanceof IntLiteral) && !(expr instanceof LargeIntLiteral) &&\n!(expr instanceof SlotRef) && !(expr instanceof NullLiteral)) {\nthrow new SemanticException(fnName + \"'s parameter only support Integer\");\n}\n}\nbreak;\ncase FunctionSet.MAP_FILTER:\nif (node.getChildren().size() != 2) {\nthrow new SemanticException(fnName + \" should have 2 inputs, \" +\n\"but there are just \" + node.getChildren().size() + \" inputs.\");\n}\nif (!node.getChild(0).getType().isMapType() && !node.getChild(0).getType().isNull()) {\nthrow new SemanticException(\"The first input of \" + fnName +\n\" should be a map or a lambda function.\");\n}\nif (!node.getChild(1).getType().isArrayType() && !node.getChild(1).getType().isNull()) {\nthrow new SemanticException(\"The second input of \" + fnName +\n\" should be a array or a lambda function.\");\n}\nif (!Type.canCastTo(node.getChild(1).getType(), Type.ARRAY_BOOLEAN)) {\nthrow new SemanticException(\"The second input of map_filter \" +\nnode.getChild(1).getType().toString() + \" can't cast to ARRAY\");\n}\nbreak;\ncase FunctionSet.ARRAY_AGG: {\nfor (int i = 1; i < argumentTypes.length; ++i) {\nif (argumentTypes[i].isComplexType()) {\nthrow new SemanticException(\"array_agg can't support order by nested types, \" +\n\"but \" + i + \"-th input is \" + argumentTypes[i].toSql());\n}\n}\nbreak;\n}\ncase FunctionSet.NAMED_STRUCT: {\nif (node.getChildren().size() < 2) {\nthrow new SemanticException(fnName + \" should have at least two inputs\", node.getPos());\n}\nif (node.getChildren().size() % 2 != 0) {\nthrow new SemanticException(fnName + \" arguments must be in name/value pairs\", node.getPos());\n}\nSet check = Sets.newHashSet();\nfor (int i = 0; i < node.getChildren().size(); i = i + 2) {\nif (!(node.getChild(i) instanceof StringLiteral)) {\nthrow new SemanticException(\n\"The \" + (i + 1) + \"-th input of named_struct must be string literal\",\nnode.getPos());\n}\nString name = ((StringLiteral) node.getChild(i)).getValue();\nif (check.contains(name)) {\nthrow new SemanticException(\"named_struct contains duplicate subfield name: \" +\nname + \" at \" + (i + 1) + \"-th input\", node.getPos());\n}\ncheck.add(name);\n}\nbreak;\n}\ncase FunctionSet.ROW: {\nif (node.getChildren().size() < 1) {\nthrow new SemanticException(fnName + \" should have at least one input.\", node.getPos());\n}\nbreak;\n}\n}\n}\nprivate Function getStrToDateFunction(FunctionCallExpr node, Type[] argumentTypes) {\n/*\n* @TODO: Determine the return type of this function\n* If is format is constant and don't contains time part, return date type, to compatible with mysql.\n* In fact we don't want to support str_to_date return date like mysql, reason:\n* 1. The return type of FE/BE str_to_date function signature is datetime, return date\n* let type different, it's will throw unpredictable error\n* 2. Support return date and datetime at same time in one function is complicated.\n* 3. The meaning of the function is confusing. In mysql, will return date if format is a constant\n* string and it's not contains \"%H/%M/%S\" pattern, but it's a trick logic, if format is a variable\n* expression, like: str_to_date(col1, col2), and the col2 is '%Y%m%d', the result always be\n* datetime.\n*/\nFunction fn = Expr.getBuiltinFunction(node.getFnName().getFunction(),\nargumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nif (fn == null) {\nreturn null;\n}\nif (!node.getChild(1).isConstant()) {\nreturn fn;\n}\nExpressionMapping expressionMapping =\nnew ExpressionMapping(new Scope(RelationId.anonymous(), new RelationFields()),\nLists.newArrayList());\nScalarOperator format = SqlToScalarOperatorTranslator.translate(node.getChild(1), expressionMapping,\nnew ColumnRefFactory());\nif (format.isConstantRef() && !HAS_TIME_PART.matcher(format.toString()).matches()) {\nreturn Expr.getBuiltinFunction(\"str2date\", argumentTypes,\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n}\nreturn fn;\n}\nprivate Function getArrayGenerateFunction(FunctionCallExpr node) {\nif (node.getChildren().size() == 1) {\nLiteralExpr secondParam = (LiteralExpr) node.getChild(0);\nnode.clearChildren();\nnode.addChild(new IntLiteral(1));\nnode.addChild(secondParam);\n}\nif (node.getChildren().size() == 2) {\nint idx = 0;\nBigInteger[] childValues = new BigInteger[2];\nBoolean hasNUll = false;\nfor (Expr expr : node.getChildren()) {\nif (expr instanceof NullLiteral) {\nhasNUll = true;\n} else if (expr instanceof IntLiteral) {\nchildValues[idx++] = BigInteger.valueOf(((IntLiteral) expr).getValue());\n} else {\nchildValues[idx++] = ((LargeIntLiteral) expr).getValue();\n}\n}\nif (hasNUll || childValues[0].compareTo(childValues[1]) < 0) {\nnode.addChild(new IntLiteral(1));\n} else {\nnode.addChild(new IntLiteral(-1));\n}\n}\nType[] argumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new);\nreturn Expr.getBuiltinFunction(FunctionSet.ARRAY_GENERATE, argumentTypes,\nFunction.CompareMode.IS_SUPERTYPE_OF);\n}\n@Override\npublic Void visitGroupingFunctionCall(GroupingFunctionCallExpr node, Scope scope) {\nif (node.getChildren().size() < 1) {\nthrow new SemanticException(\"GROUPING functions required at least one parameters\", node.getPos());\n}\nif (node.getChildren().stream().anyMatch(e -> !(e instanceof SlotRef))) {\nthrow new SemanticException(\"grouping functions only support column\", node.getPos());\n}\nType[] childTypes = new Type[1];\nchildTypes[0] = Type.BIGINT;\nFunction fn = Expr.getBuiltinFunction(node.getFnName().getFunction(),\nchildTypes, Function.CompareMode.IS_IDENTICAL);\nnode.setFn(fn);\nnode.setType(fn.getReturnType());\nreturn null;\n}\n@Override\npublic Void visitCaseWhenExpr(CaseExpr node, Scope context) {\nint start = 0;\nint end = node.getChildren().size();\nExpr caseExpr = null;\nExpr elseExpr = null;\nif (node.hasCaseExpr()) {\ncaseExpr = node.getChild(0);\nstart++;\n}\nif (node.hasElseExpr()) {\nelseExpr = node.getChild(end - 1);\nend--;\n}\nList whenTypes = Lists.newArrayList();\nif (null != caseExpr) {\nwhenTypes.add(caseExpr.getType());\n}\nfor (int i = start; i < end; i = i + 2) {\nwhenTypes.add(node.getChild(i).getType());\n}\nType compatibleType = Type.BOOLEAN;\nif (null != caseExpr) {\ncompatibleType = TypeManager.getCompatibleTypeForCaseWhen(whenTypes);\n}\nfor (Type type : whenTypes) {\nif (!Type.canCastTo(type, compatibleType)) {\nthrow new SemanticException(\"Invalid when type cast \" + type.toSql()\n+ \" to \" + compatibleType.toSql(), node.getPos());\n}\n}\nList thenTypes = Lists.newArrayList();\nfor (int i = start + 1; i < end; i = i + 2) {\nthenTypes.add(node.getChild(i).getType());\n}\nif (null != elseExpr) {\nthenTypes.add(elseExpr.getType());\n}\nType returnType = thenTypes.stream().allMatch(Type.NULL::equals) ? Type.BOOLEAN :\nTypeManager.getCompatibleTypeForCaseWhen(thenTypes);\nfor (Type type : thenTypes) {\nif (!Type.canCastTo(type, returnType)) {\nthrow new SemanticException(\"Invalid then type cast \" + type.toSql()\n+ \" to \" + returnType.toSql(), node.getPos());\n}\n}\nnode.setType(returnType);\nreturn null;\n}\n@Override\npublic Void visitSubquery(Subquery node, Scope context) {\nQueryAnalyzer queryAnalyzer = new QueryAnalyzer(session);\nqueryAnalyzer.analyze(node.getQueryStatement(), context);\nnode.setType(node.getQueryStatement().getQueryRelation().getRelationFields().getFieldByIndex(0).getType());\nreturn null;\n}\n@Override\npublic Void visitAnalyticExpr(AnalyticExpr node, Scope context) {\nvisit(node.getFnCall(), context);\nnode.setType(node.getFnCall().getType());\nif (node.getWindow() != null) {\nif (node.getWindow().getLeftBoundary() != null &&\nnode.getWindow().getLeftBoundary().getExpr() != null) {\nvisit(node.getWindow().getLeftBoundary().getExpr(), context);\n}\nif (node.getWindow().getRightBoundary() != null &&\nnode.getWindow().getRightBoundary().getExpr() != null) {\nvisit(node.getWindow().getRightBoundary().getExpr(), context);\n}\n}\nnode.getPartitionExprs().forEach(e -> visit(e, context));\nnode.getOrderByElements().stream().map(OrderByElement::getExpr).forEach(e -> visit(e, context));\nverifyAnalyticExpression(node);\nreturn null;\n}\n@Override\npublic Void visitInformationFunction(InformationFunction node, Scope context) {\nString funcType = node.getFuncType();\nif (funcType.equalsIgnoreCase(\"DATABASE\") || funcType.equalsIgnoreCase(\"SCHEMA\")) {\nnode.setType(Type.VARCHAR);\nnode.setStrValue(ClusterNamespace.getNameFromFullName(session.getDatabase()));\n} else if (funcType.equalsIgnoreCase(\"USER\")) {\nnode.setType(Type.VARCHAR);\nString user = session.getQualifiedUser();\nString remoteIP = session.getRemoteIP();\nnode.setStrValue(new UserIdentity(user, remoteIP).toString());\n} else if (funcType.equalsIgnoreCase(\"CURRENT_USER\")) {\nnode.setType(Type.VARCHAR);\nnode.setStrValue(session.getCurrentUserIdentity().toString());\n} else if (funcType.equalsIgnoreCase(\"CURRENT_ROLE\")) {\nnode.setType(Type.VARCHAR);\nAuthorizationMgr manager = GlobalStateMgr.getCurrentState().getAuthorizationMgr();\nList roleName = new ArrayList<>();\ntry {\nfor (Long roleId : session.getCurrentRoleIds()) {\nRolePrivilegeCollection rolePrivilegeCollection =\nmanager.getRolePrivilegeCollectionUnlocked(roleId, false);\nif (rolePrivilegeCollection != null) {\nroleName.add(rolePrivilegeCollection.getName());\n}\n}\n} catch (PrivilegeException e) {\nthrow new SemanticException(e.getMessage());\n}\nif (roleName.isEmpty()) {\nnode.setStrValue(\"NONE\");\n} else {\nnode.setStrValue(Joiner.on(\", \").join(roleName));\n}\n} else if (funcType.equalsIgnoreCase(\"CONNECTION_ID\")) {\nnode.setType(Type.BIGINT);\nnode.setIntValue(session.getConnectionId());\nnode.setStrValue(\"\");\n} else if (funcType.equalsIgnoreCase(\"CATALOG\")) {\nnode.setType(Type.VARCHAR);\nnode.setStrValue(session.getCurrentCatalog());\n}\nreturn null;\n}\n@Override\npublic Void visitVariableExpr(VariableExpr node, Scope context) {\ntry {\nif (node.getSetType().equals(SetType.USER)) {\nUserVariable userVariable = session.getUserVariables(node.getName());\nif (userVariable == null) {\nnode.setType(Type.STRING);\nnode.setIsNull();\nreturn null;\n}\nType variableType = userVariable.getEvaluatedExpression().getType();\nnode.setType(variableType);\nif (userVariable.getEvaluatedExpression() instanceof NullLiteral) {\nnode.setIsNull();\n} else {\nnode.setValue(userVariable.getEvaluatedExpression().getRealObjectValue());\n}\n} else {\nVariableMgr.fillValue(session.getSessionVariable(), node);\nif (!Strings.isNullOrEmpty(node.getName()) &&\nnode.getName().equalsIgnoreCase(SessionVariable.SQL_MODE)) {\nnode.setType(Type.VARCHAR);\nnode.setValue(SqlModeHelper.decode((long) node.getValue()));\n}\n}\n} catch (AnalysisException | DdlException e) {\nthrow new SemanticException(e.getMessage());\n}\nreturn null;\n}\n@Override\npublic Void visitDefaultValueExpr(DefaultValueExpr node, Scope context) {\nnode.setType(Type.VARCHAR);\nreturn null;\n}\n@Override\npublic Void visitCloneExpr(CloneExpr node, Scope context) {\nreturn null;\n}\n@Override\n}", + "context_after": "class Visitor extends AstVisitor {\nprivate static final List ADD_DATE_FUNCTIONS = Lists.newArrayList(FunctionSet.DATE_ADD,\nFunctionSet.ADDDATE, FunctionSet.DAYS_ADD, FunctionSet.TIMESTAMPADD);\nprivate static final List SUB_DATE_FUNCTIONS =\nLists.newArrayList(FunctionSet.DATE_SUB, FunctionSet.SUBDATE,\nFunctionSet.DAYS_SUB);\nprivate final AnalyzeState analyzeState;\nprivate final ConnectContext session;\npublic Visitor(AnalyzeState analyzeState, ConnectContext session) {\nthis.analyzeState = analyzeState;\nthis.session = session;\n}\n@Override\npublic Void visitExpression(Expr node, Scope scope) {\nthrow new SemanticException(\"not yet implemented: expression analyzer for \" + node.getClass().getName(),\nnode.getPos());\n}\nprivate void handleResolvedField(SlotRef slot, ResolvedField resolvedField) {\nanalyzeState.addColumnReference(slot, FieldId.from(resolvedField));\n}\n@Override\npublic Void visitSubfieldExpr(SubfieldExpr node, Scope scope) {\nExpr child = node.getChild(0);\nif (!child.getType().isStructType()) {\nthrow new SemanticException(child.toSql() + \" must be a struct type, check if you are using `'`\",\nchild.getPos());\n}\nList fieldNames = node.getFieldNames();\nList rightNames = Lists.newArrayList();\nType tmpType = child.getType();\nfor (String fieldName : fieldNames) {\nStructType structType = (StructType) tmpType;\nStructField structField = structType.getField(fieldName);\nif (structField == null) {\nthrow new SemanticException(String.format(\"Struct subfield '%s' cannot be resolved\", fieldName),\nnode.getPos());\n}\nrightNames.add(structField.getName());\ntmpType = structField.getType();\n}\nnode.setFieldNames(rightNames);\nnode.setType(tmpType);\nreturn null;\n}\n@Override\npublic Void visitSlot(SlotRef node, Scope scope) {\nResolvedField resolvedField = scope.resolveField(node);\nnode.setType(resolvedField.getField().getType());\nnode.setTblName(resolvedField.getField().getRelationAlias());\nnode.setNullable(resolvedField.getField().isNullable());\nif (node.getType().isStructType()) {\nnode.setCol(resolvedField.getField().getName());\nnode.setLabel(resolvedField.getField().getName());\nif (resolvedField.getField().getTmpUsedStructFieldPos().size() > 0) {\nnode.setUsedStructFieldPos(resolvedField.getField().getTmpUsedStructFieldPos());\nnode.resetStructInfo();\n}\n}\nhandleResolvedField(node, resolvedField);\nreturn null;\n}\n@Override\npublic Void visitFieldReference(FieldReference node, Scope scope) {\nField field = scope.getRelationFields().getFieldByIndex(node.getFieldIndex());\nnode.setType(field.getType());\nreturn null;\n}\n@Override\npublic Void visitArrayExpr(ArrayExpr node, Scope scope) {\nif (!node.getChildren().isEmpty()) {\ntry {\nType targetItemType;\nif (node.getType() != null) {\ntargetItemType = ((ArrayType) node.getType()).getItemType();\n} else {\ntargetItemType = TypeManager.getCommonSuperType(\nnode.getChildren().stream().map(Expr::getType).collect(Collectors.toList()));\n}\nfor (int i = 0; i < node.getChildren().size(); i++) {\nif (!node.getChildren().get(i).getType().matchesType(targetItemType)) {\nnode.castChild(targetItemType, i);\n}\n}\nnode.setType(new ArrayType(targetItemType));\n} catch (AnalysisException e) {\nthrow new SemanticException(e.getMessage());\n}\n} else {\nnode.setType(Type.ARRAY_NULL);\n}\nreturn null;\n}\n@Override\npublic Void visitMapExpr(MapExpr node, Scope scope) {\nif (!node.getChildren().isEmpty()) {\nType originalType = node.getType();\nif (originalType == Type.ANY_MAP) {\nType keyType = node.getKeyCommonType();\nType valueType = node.getValueCommonType();\nnode.setType(new MapType(keyType, valueType));\n}\n} else {\nnode.setType(new MapType(Type.NULL, Type.NULL));\n}\nreturn null;\n}\n@Override\npublic Void visitCollectionElementExpr(CollectionElementExpr node, Scope scope) {\nExpr expr = node.getChild(0);\nExpr subscript = node.getChild(1);\nif (expr.getType().isArrayType()) {\nif (!subscript.getType().isNumericType()) {\nthrow new SemanticException(\"array subscript must have type integer\", subscript.getPos());\n}\ntry {\nif (subscript.getType().getPrimitiveType() != PrimitiveType.INT) {\nnode.castChild(Type.INT, 1);\n}\nnode.setType(((ArrayType) expr.getType()).getItemType());\n} catch (AnalysisException e) {\nthrow new SemanticException(e.getMessage());\n}\n} else if (expr.getType().isMapType()) {\ntry {\nif (subscript.getType().getPrimitiveType() !=\n((MapType) expr.getType()).getKeyType().getPrimitiveType()) {\nnode.castChild(((MapType) expr.getType()).getKeyType(), 1);\n}\nnode.setType(((MapType) expr.getType()).getValueType());\n} catch (AnalysisException e) {\nthrow new SemanticException(e.getMessage());\n}\n} else if (expr.getType().isStructType()) {\nif (!(subscript instanceof IntLiteral)) {\nthrow new SemanticException(\"struct subscript must have integer pos\", subscript.getPos());\n}\nlong index = ((IntLiteral) subscript).getValue();\nlong fieldSize = ((StructType) expr.getType()).getFields().size();\nif (fieldSize < Math.abs(index)) {\nthrow new SemanticException(\"the pos is out of struct subfields\", subscript.getPos());\n} else if (index == 0) {\nthrow new SemanticException(\"the pos can't set to zero\", subscript.getPos());\n}\nindex = index > 0 ? index - 1 : fieldSize + index;\nStructField structField = ((StructType) expr.getType()).getFields().get((int) index);\nnode.setType(structField.getType());\n} else {\nthrow new SemanticException(\"cannot subscript type \" + expr.getType()\n+ \" because it is not an array or a map or a struct\", expr.getPos());\n}\nreturn null;\n}\n@Override\npublic Void visitArraySliceExpr(ArraySliceExpr node, Scope scope) {\nif (!node.getChild(0).getType().isArrayType()) {\nthrow new SemanticException(\"cannot subscript type\" +\nnode.getChild(0).getType() + \" because it is not an array\", node.getChild(0).getPos());\n}\nnode.setType(node.getChild(0).getType());\nreturn null;\n}\n@Override\npublic Void visitArrowExpr(ArrowExpr node, Scope scope) {\nExpr item = node.getChild(0);\nExpr key = node.getChild(1);\nif (!key.isLiteral() || !key.getType().isStringType()) {\nthrow new SemanticException(\"right operand of -> should be string literal, but got \" + key,\nkey.getPos());\n}\nif (!item.getType().isJsonType()) {\nthrow new SemanticException(\n\"-> operator could only be used for json column, but got \" + item.getType(), item.getPos());\n}\nnode.setType(Type.JSON);\nreturn null;\n}\n@Override\npublic Void visitLambdaFunctionExpr(LambdaFunctionExpr node, Scope scope) {\nif (scope.getLambdaInputs().size() == 0) {\nthrow new SemanticException(\n\"Lambda Functions can only be used in high-order functions with arrays/maps\",\nnode.getPos());\n}\nif (scope.getLambdaInputs().size() != node.getChildren().size() - 1) {\nthrow new SemanticException(\"Lambda arguments should equal to lambda input arrays\", node.getPos());\n}\nSet set = new HashSet<>();\nList args = Lists.newArrayList();\nfor (int i = 1; i < node.getChildren().size(); ++i) {\nargs.add((LambdaArgument) node.getChild(i));\nString name = ((LambdaArgument) node.getChild(i)).getName();\nif (set.contains(name)) {\nthrow new SemanticException(\"Lambda argument: \" + name + \" is duplicated\",\nnode.getChild(i).getPos());\n}\nset.add(name);\n((LambdaArgument) node.getChild(i)).setNullable(scope.getLambdaInputs().get(i - 1).isNullable());\nnode.getChild(i).setType(scope.getLambdaInputs().get(i - 1).getType());\n}\nScope lambdaScope = new Scope(args, scope);\nExpressionAnalyzer.analyzeExpression(node.getChild(0), this.analyzeState, lambdaScope, this.session);\nnode.setType(Type.FUNCTION);\nscope.clearLambdaInputs();\nreturn null;\n}\n@Override\npublic Void visitCompoundPredicate(CompoundPredicate node, Scope scope) {\nnode.setType(Type.BOOLEAN);\nfor (int i = 0; i < node.getChildren().size(); i++) {\nExpr child = node.getChild(i);\nif (child.getType().isBoolean() || child.getType().isNull()) {\n} else if (!session.getSessionVariable().isEnableStrictType() &&\nType.canCastTo(child.getType(), Type.BOOLEAN)) {\nnode.getChildren().set(i, new CastExpr(Type.BOOLEAN, child));\n} else {\nthrow new SemanticException(child.toSql() + \" can not be converted to boolean type.\");\n}\n}\nreturn null;\n}\n@Override\npublic Void visitBetweenPredicate(BetweenPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nList list = node.getChildren().stream().map(Expr::getType).collect(Collectors.toList());\nType compatibleType = TypeManager.getCompatibleTypeForBetweenAndIn(list);\nfor (Type type : list) {\nif (!Type.canCastTo(type, compatibleType)) {\nthrow new SemanticException(\n\"between predicate type \" + type.toSql() + \" with type \" + compatibleType.toSql()\n+ \" is invalid\", node.getPos());\n}\n}\nreturn null;\n}\n@Override\npublic Void visitBinaryPredicate(BinaryPredicate node, Scope scope) {\nType type1 = node.getChild(0).getType();\nType type2 = node.getChild(1).getType();\nType compatibleType = TypeManager.getCompatibleTypeForBinary(node.getOp(), type1, type2);\nfinal String ERROR_MSG = \"Column type %s does not support binary predicate operation with type %s\";\nif (!Type.canCastTo(type1, compatibleType)) {\nthrow new SemanticException(String.format(ERROR_MSG, type1.toSql(), type2.toSql()), node.getPos());\n}\nif (!Type.canCastTo(type2, compatibleType)) {\nthrow new SemanticException(String.format(ERROR_MSG, type1.toSql(), type2.toSql()), node.getPos());\n}\nnode.setType(Type.BOOLEAN);\nreturn null;\n}\n@Override\npublic Void visitArithmeticExpr(ArithmeticExpr node, Scope scope) {\nif (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.BINARY_INFIX) {\nArithmeticExpr.Operator op = node.getOp();\nType t1 = node.getChild(0).getType().getNumResultType();\nType t2 = node.getChild(1).getType().getNumResultType();\nif (t1.isDecimalV3() || t2.isDecimalV3()) {\nArithmeticExpr.TypeTriple typeTriple = null;\ntry {\ntypeTriple = node.rewriteDecimalOperation();\n} catch (AnalysisException ex) {\nthrow new SemanticException(ex.getMessage());\n}\nPreconditions.checkArgument(typeTriple != null);\nType[] args = {typeTriple.lhsTargetType, typeTriple.rhsTargetType};\nFunction fn = Expr.getBuiltinFunction(op.getName(), args, Function.CompareMode.IS_IDENTICAL);\nFunction newFn = new ScalarFunction(fn.getFunctionName(), args, typeTriple.returnType, fn.hasVarArgs());\nnode.setType(typeTriple.returnType);\nnode.setFn(newFn);\nreturn null;\n}\nType lhsType;\nType rhsType;\nswitch (op) {\ncase MULTIPLY:\ncase ADD:\ncase SUBTRACT:\nlhsType = ArithmeticExpr.getBiggerType(ArithmeticExpr.getCommonType(t1, t2));\nrhsType = lhsType;\nbreak;\ncase MOD:\nlhsType = ArithmeticExpr.getCommonType(t1, t2);\nrhsType = lhsType;\nbreak;\ncase DIVIDE:\nlhsType = ArithmeticExpr.getCommonType(t1, t2);\nif (lhsType.isFixedPointType()) {\nlhsType = Type.DOUBLE;\n}\nrhsType = lhsType;\nbreak;\ncase INT_DIVIDE:\ncase BITAND:\ncase BITOR:\ncase BITXOR:\nlhsType = ArithmeticExpr.getCommonType(t1, t2);\nif (!lhsType.isFixedPointType()) {\nlhsType = Type.BIGINT;\n}\nrhsType = lhsType;\nbreak;\ncase BIT_SHIFT_LEFT:\ncase BIT_SHIFT_RIGHT:\ncase BIT_SHIFT_RIGHT_LOGICAL:\nlhsType = t1;\nrhsType = Type.BIGINT;\nbreak;\ndefault:\nthrow new SemanticException(\"Unknown arithmetic operation \" + op + \" in: \" + node,\nnode.getPos());\n}\nif (node.getChild(0).getType().equals(Type.NULL) && node.getChild(1).getType().equals(Type.NULL)) {\nlhsType = Type.NULL;\nrhsType = Type.NULL;\n}\nif (lhsType.isInvalid() || rhsType.isInvalid()) {\nthrow new SemanticException(\"Any function type can not cast to \" + Type.INVALID.toSql());\n}\nif (!Type.NULL.equals(node.getChild(0).getType()) && !Type.canCastTo(t1, lhsType)) {\nthrow new SemanticException(\n\"cast type \" + node.getChild(0).getType().toSql() + \" with type \" + lhsType.toSql()\n+ \" is invalid\", node.getPos());\n}\nif (!Type.NULL.equals(node.getChild(1).getType()) && !Type.canCastTo(t2, rhsType)) {\nthrow new SemanticException(\n\"cast type \" + node.getChild(1).getType().toSql() + \" with type \" + rhsType.toSql()\n+ \" is invalid\", node.getPos());\n}\nFunction fn = Expr.getBuiltinFunction(op.getName(), new Type[] {lhsType, rhsType},\nFunction.CompareMode.IS_SUPERTYPE_OF);\nif (fn == null) {\nthrow new SemanticException(String.format(\n\"No matching function '%s' with operand types %s and %s\", node.getOp().getName(), t1, t2));\n}\n/*\n* commonType is the common type of the parameters of the function,\n* and fn.getReturnType() is the return type of the function after execution\n* So we use fn.getReturnType() as node type\n*/\nnode.setType(fn.getReturnType());\nnode.setFn(fn);\n} else if (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.UNARY_PREFIX) {\nFunction fn = Expr.getBuiltinFunction(\nnode.getOp().getName(), new Type[] {Type.BIGINT}, Function.CompareMode.IS_SUPERTYPE_OF);\nnode.setType(Type.BIGINT);\nnode.setFn(fn);\n} else if (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.UNARY_POSTFIX) {\nthrow new SemanticException(\"not yet implemented: expression analyzer for \" + node.getClass().getName(),\nnode.getPos());\n} else {\nthrow new SemanticException(\"not yet implemented: expression analyzer for \" + node.getClass().getName(),\nnode.getPos());\n}\nreturn null;\n}\n@Override\npublic Void visitTimestampArithmeticExpr(TimestampArithmeticExpr node, Scope scope) {\nnode.setChild(0, TypeManager.addCastExpr(node.getChild(0), Type.DATETIME));\nString funcOpName;\nif (node.getFuncName() != null) {\nif (ADD_DATE_FUNCTIONS.contains(node.getFuncName())) {\nfuncOpName = String.format(\"%sS_%s\", node.getTimeUnitIdent(), \"add\");\n} else if (SUB_DATE_FUNCTIONS.contains(node.getFuncName())) {\nfuncOpName = String.format(\"%sS_%s\", node.getTimeUnitIdent(), \"sub\");\n} else {\nnode.setChild(1, TypeManager.addCastExpr(node.getChild(1), Type.DATETIME));\nfuncOpName = String.format(\"%sS_%s\", node.getTimeUnitIdent(), \"diff\");\n}\n} else {\nfuncOpName = String.format(\"%sS_%s\", node.getTimeUnitIdent(),\n(node.getOp() == ArithmeticExpr.Operator.ADD) ? \"add\" : \"sub\");\n}\nType[] argumentTypes = node.getChildren().stream().map(Expr::getType)\n.toArray(Type[]::new);\nFunction fn = Expr.getBuiltinFunction(funcOpName.toLowerCase(), argumentTypes,\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nif (fn == null) {\nString msg = String.format(\"No matching function with signature: %s(%s)\", funcOpName, Joiner.on(\", \")\n.join(Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.toList())));\nthrow new SemanticException(msg, node.getPos());\n}\nnode.setType(fn.getReturnType());\nnode.setFn(fn);\nreturn null;\n}\n@Override\npublic Void visitExistsPredicate(ExistsPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nreturn null;\n}\n@Override\npublic Void visitInPredicate(InPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nList queryExpressions = Lists.newArrayList();\nnode.collect(arg -> arg instanceof Subquery, queryExpressions);\nif (queryExpressions.size() > 0 && node.getChildren().size() > 2) {\nthrow new SemanticException(\"In Predicate only support literal expression list\", node.getPos());\n}\nList list = node.getChildren().stream().map(Expr::getType).collect(Collectors.toList());\nType compatibleType = TypeManager.getCompatibleTypeForBetweenAndIn(list);\nif (compatibleType == Type.INVALID) {\nthrow new SemanticException(\"The input types (\" + list.stream().map(Type::toSql).collect(\nCollectors.joining(\",\")) + \") of in predict are not compatible\", node.getPos());\n}\nfor (Expr child : node.getChildren()) {\nType type = child.getType();\nif (type.isJsonType() && queryExpressions.size() > 0) {\nthrow new SemanticException(\"In predicate of JSON does not support subquery\", child.getPos());\n}\nif (!Type.canCastTo(type, compatibleType)) {\nthrow new SemanticException(\n\"in predicate type \" + type.toSql() + \" with type \" + compatibleType.toSql()\n+ \" is invalid\", child.getPos());\n}\n}\nreturn null;\n}\n@Override\npublic Void visitMultiInPredicate(MultiInPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nList leftTypes =\nnode.getChildren().stream().limit(node.getNumberOfColumns()).map(Expr::getType)\n.collect(Collectors.toList());\nSubquery inSubquery = (Subquery) node.getChild(node.getNumberOfColumns());\nList rightTypes =\ninSubquery.getQueryStatement().getQueryRelation().getOutputExpression().stream().map(Expr::getType).\ncollect(Collectors.toList());\nif (leftTypes.size() != rightTypes.size()) {\nthrow new SemanticException(\n\"subquery must return the same number of columns as provided by the IN predicate\",\nnode.getPos());\n}\nfor (int i = 0; i < rightTypes.size(); ++i) {\nif (leftTypes.get(i).isJsonType() || rightTypes.get(i).isJsonType() || leftTypes.get(i).isMapType() ||\nrightTypes.get(i).isMapType() || leftTypes.get(i).isStructType() ||\nrightTypes.get(i).isStructType()) {\nthrow new SemanticException(\"InPredicate of JSON, Map, Struct types is not supported\");\n}\nif (!Type.canCastTo(leftTypes.get(i), rightTypes.get(i))) {\nthrow new SemanticException(\n\"in predicate type \" + leftTypes.get(i).toSql() + \" with type \" + rightTypes.get(i).toSql()\n+ \" is invalid\");\n}\n}\nreturn null;\n}\n@Override\npublic Void visitLiteral(LiteralExpr node, Scope scope) {\nif (node instanceof LargeIntLiteral) {\nBigInteger value = ((LargeIntLiteral) node).getValue();\nif (value.compareTo(LargeIntLiteral.LARGE_INT_MIN) < 0 ||\nvalue.compareTo(LargeIntLiteral.LARGE_INT_MAX) > 0) {\nthrow new SemanticException(PARSER_ERROR_MSG.numOverflow(value.toString()), node.getPos());\n}\n}\nreturn null;\n}\n@Override\npublic Void visitIsNullPredicate(IsNullPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nreturn null;\n}\n@Override\npublic Void visitLikePredicate(LikePredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nType type1 = node.getChild(0).getType();\nType type2 = node.getChild(1).getType();\nif (!type1.isStringType() && !type1.isNull()) {\nthrow new SemanticException(\n\"left operand of \" + node.getOp().toString() + \" must be of type STRING: \" +\nAstToStringBuilder.toString(node), node.getPos());\n}\nif (!type2.isStringType() && !type2.isNull()) {\nthrow new SemanticException(\n\"right operand of \" + node.getOp().toString() + \" must be of type STRING: \" +\nAstToStringBuilder.toString(node), node.getPos());\n}\nif (LikePredicate.Operator.REGEXP.equals(node.getOp()) && !type2.isNull() && node.getChild(1).isLiteral()) {\ntry {\nPattern.compile(((StringLiteral) node.getChild(1)).getValue());\n} catch (PatternSyntaxException e) {\nthrow new SemanticException(\n\"Invalid regular expression in '\" + AstToStringBuilder.toString(node) + \"'\", node.getPos());\n}\n}\nreturn null;\n}\nprivate void predicateBaseAndCheck(Predicate node) {\nnode.setType(Type.BOOLEAN);\nfor (Expr expr : node.getChildren()) {\nif (expr.getType().isOnlyMetricType() ||\n(expr.getType().isComplexType() && !(node instanceof IsNullPredicate) &&\n!(node instanceof InPredicate))) {\nthrow new SemanticException(\n\"HLL, BITMAP, PERCENTILE and ARRAY, MAP, STRUCT type couldn't as Predicate\", node.getPos());\n}\n}\n}\n@Override\npublic Void visitCastExpr(CastExpr cast, Scope context) {\nType castType;\nif (cast.isImplicit()) {\ncastType = cast.getType();\n} else {\ncastType = cast.getTargetTypeDef().getType();\n}\nif (!Type.canCastTo(cast.getChild(0).getType(), castType)) {\nthrow new SemanticException(\"Invalid type cast from \" + cast.getChild(0).getType().toSql() + \" to \"\n+ castType.toSql() + \" in sql `\" +\nAstToStringBuilder.toString(cast.getChild(0)).replace(\"%\", \"%%\") + \"`\",\ncast.getPos());\n}\ncast.setType(castType);\nreturn null;\n}\n@Override\npublic Void visitFunctionCall(FunctionCallExpr node, Scope scope) {\nType[] argumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new);\nif (node.isNondeterministicBuiltinFnName()) {\nExprId exprId = analyzeState.getNextNondeterministicId();\nnode.setNondeterministicId(exprId);\n}\nFunction fn;\nString fnName = node.getFnName().getFunction();\ncheckFunction(fnName, node, argumentTypes);\nif (fnName.equals(FunctionSet.COUNT) && node.getParams().isDistinct()) {\nfn = Expr.getBuiltinFunction(FunctionSet.COUNT, new Type[] {argumentTypes[0]},\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n} else if (fnName.equals(FunctionSet.EXCHANGE_BYTES) || fnName.equals(FunctionSet.EXCHANGE_SPEED)) {\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nfn = fn.copy();\nfn.setArgsType(argumentTypes);\nfn.setIsNullable(false);\n} else if (fnName.equals(FunctionSet.ARRAY_AGG) || fnName.equals(FunctionSet.GROUP_CONCAT)) {\nfn = Expr.getBuiltinFunction(fnName, new Type[] {argumentTypes[0]},\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nfn = fn.copy();\nList orderByElements = node.getParams().getOrderByElements();\nList isAscOrder = new ArrayList<>();\nList nullsFirst = new ArrayList<>();\nif (orderByElements != null) {\nfor (OrderByElement elem : orderByElements) {\nisAscOrder.add(elem.getIsAsc());\nnullsFirst.add(elem.getNullsFirstParam());\n}\n}\nType[] argsTypes = new Type[argumentTypes.length];\nfor (int i = 0; i < argumentTypes.length; ++i) {\nargsTypes[i] = argumentTypes[i] == Type.NULL ? Type.BOOLEAN : argumentTypes[i];\nif (fnName.equals(FunctionSet.GROUP_CONCAT) && i < node.getChildren().size() - isAscOrder.size()) {\nargsTypes[i] = Type.VARCHAR;\n}\n}\nfn.setArgsType(argsTypes);\nArrayList structTypes = new ArrayList<>(argsTypes.length);\nfor (Type t : argsTypes) {\nstructTypes.add(new ArrayType(t));\n}\n((AggregateFunction) fn).setIntermediateType(new StructType(structTypes));\n((AggregateFunction) fn).setIsAscOrder(isAscOrder);\n((AggregateFunction) fn).setNullsFirst(nullsFirst);\nif (fnName.equals(FunctionSet.ARRAY_AGG)) {\nfn.setRetType(new ArrayType(argsTypes[0]));\n} else {\nboolean outputConst = true;\nfor (int i = 0; i < node.getChildren().size() - isAscOrder.size() - 1; i++) {\nif (!node.getChild(i).isConstant()) {\noutputConst = false;\nbreak;\n}\n}\n((AggregateFunction) fn).setIsDistinct(node.getParams().isDistinct() &&\n(!isAscOrder.isEmpty() || outputConst));\nfn.setRetType(Type.VARCHAR);\n}\n} else if (FunctionSet.PERCENTILE_DISC.equals(fnName)) {\nargumentTypes[1] = Type.DOUBLE;\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_IDENTICAL);\nif (fn.getArgs()[0].isDecimalV3()) {\nList argTypes = Arrays.asList(argumentTypes[0], fn.getArgs()[1]);\nAggregateFunction newFn = new AggregateFunction(fn.getFunctionName(), argTypes, argumentTypes[0],\n((AggregateFunction) fn).getIntermediateType(), fn.hasVarArgs());\nnewFn.setFunctionId(fn.getFunctionId());\nnewFn.setChecksum(fn.getChecksum());\nnewFn.setBinaryType(fn.getBinaryType());\nnewFn.setHasVarArgs(fn.hasVarArgs());\nnewFn.setId(fn.getId());\nnewFn.setUserVisible(fn.isUserVisible());\nnewFn.setisAnalyticFn(((AggregateFunction) fn).isAnalyticFn());\nfn = newFn;\n}\n} else if (FunctionSet.CONCAT.equals(fnName) && node.getChildren().stream().anyMatch(child ->\nchild.getType().isArrayType())) {\nList arrayTypes = Arrays.stream(argumentTypes).map(argumentType -> {\nif (argumentType.isArrayType()) {\nreturn argumentType;\n} else {\nreturn new ArrayType(argumentType);\n}\n}).collect(Collectors.toList());\nTypeManager.getCommonSuperType(arrayTypes);\nfor (int i = 0; i < argumentTypes.length; ++i) {\nif (!argumentTypes[i].isArrayType()) {\nnode.setChild(i, new ArrayExpr(new ArrayType(argumentTypes[i]),\nLists.newArrayList(node.getChild(i))));\n}\n}\nargumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new);\nnode.resetFnName(null, FunctionSet.ARRAY_CONCAT);\nif (DecimalV3FunctionAnalyzer.argumentTypeContainDecimalV3(FunctionSet.ARRAY_CONCAT, argumentTypes)) {\nfn = DecimalV3FunctionAnalyzer.getDecimalV3Function(session, node, argumentTypes);\n} else {\nfn = Expr.getBuiltinFunction(FunctionSet.ARRAY_CONCAT, argumentTypes,\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n}\n} else if (FunctionSet.NAMED_STRUCT.equals(fnName)) {\nfn = Expr.getBuiltinFunction(FunctionSet.NAMED_STRUCT, argumentTypes,\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nfn = fn.copy();\nArrayList sf = Lists.newArrayList();\nfor (int i = 0; i < node.getChildren().size(); i = i + 2) {\nStringLiteral literal = (StringLiteral) node.getChild(i);\nsf.add(new StructField(literal.getStringValue(), node.getChild(i + 1).getType()));\n}\nfn.setRetType(new StructType(sf));\n} else if (DecimalV3FunctionAnalyzer.argumentTypeContainDecimalV3(fnName, argumentTypes)) {\nfn = DecimalV3FunctionAnalyzer.getDecimalV3Function(session, node, argumentTypes);\n} else if (Arrays.stream(argumentTypes).anyMatch(arg -> arg.matchesType(Type.TIME))) {\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nif (fn instanceof AggregateFunction) {\nthrow new SemanticException(\"Time Type can not used in\" + fnName + \" function\", node.getPos());\n}\n} else if (FunctionSet.STR_TO_DATE.equals(fnName)) {\nfn = getStrToDateFunction(node, argumentTypes);\n} else if (FunctionSet.ARRAY_GENERATE.equals(fnName)) {\nfn = getArrayGenerateFunction(node);\nargumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new);\n} else if (DecimalV3FunctionAnalyzer.argumentTypeContainDecimalV2(fnName, argumentTypes)) {\nfn = DecimalV3FunctionAnalyzer.getDecimalV2Function(node, argumentTypes);\n} else {\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n}\nif (fn == null) {\nfn = AnalyzerUtils.getUdfFunction(session, node.getFnName(), argumentTypes);\n}\nif (fn == null) {\nfn = ScalarOperatorEvaluator.INSTANCE.getMetaFunction(node.getFnName(), argumentTypes);\n}\nif (fn == null) {\nString msg = String.format(\"No matching function with signature: %s(%s)\",\nfnName,\nnode.getParams().isStar() ? \"*\" : Joiner.on(\", \")\n.join(Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.toList())));\nthrow new SemanticException(msg, node.getPos());\n}\nif (fn instanceof TableFunction) {\nthrow new SemanticException(\"Table function cannot be used in expression\", node.getPos());\n}\nfor (int i = 0; i < fn.getNumArgs(); i++) {\nif (!argumentTypes[i].matchesType(fn.getArgs()[i]) &&\n!Type.canCastTo(argumentTypes[i], fn.getArgs()[i])) {\nString msg = String.format(\"No matching function with signature: %s(%s)\", fnName,\nnode.getParams().isStar() ? \"*\" :\nArrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.joining(\", \")));\nthrow new SemanticException(msg, node.getPos());\n}\n}\nif (fn.hasVarArgs()) {\nType varType = fn.getArgs()[fn.getNumArgs() - 1];\nfor (int i = fn.getNumArgs(); i < argumentTypes.length; i++) {\nif (!argumentTypes[i].matchesType(varType) &&\n!Type.canCastTo(argumentTypes[i], varType)) {\nString msg = String.format(\"Variadic function %s(%s) can't support type: %s\", fnName,\nArrays.stream(fn.getArgs()).map(Type::toSql).collect(Collectors.joining(\", \")),\nargumentTypes[i]);\nthrow new SemanticException(msg, node.getPos());\n}\n}\n}\nnode.setFn(fn);\nnode.setType(fn.getReturnType());\nFunctionAnalyzer.analyze(node);\nreturn null;\n}\nprivate void checkFunction(String fnName, FunctionCallExpr node, Type[] argumentTypes) {\nswitch (fnName) {\ncase FunctionSet.TIME_SLICE:\ncase FunctionSet.DATE_SLICE:\nif (!(node.getChild(1) instanceof IntLiteral)) {\nthrow new SemanticException(\nfnName + \" requires second parameter must be a constant interval\", node.getPos());\n}\nif (((IntLiteral) node.getChild(1)).getValue() <= 0) {\nthrow new SemanticException(\nfnName + \" requires second parameter must be greater than 0\", node.getPos());\n}\nbreak;\ncase FunctionSet.ARRAY_FILTER:\nif (node.getChildren().size() != 2) {\nthrow new SemanticException(fnName + \" should have 2 array inputs or lambda functions\",\nnode.getPos());\n}\nif (!node.getChild(0).getType().isArrayType() && !node.getChild(0).getType().isNull()) {\nthrow new SemanticException(\"The first input of \" + fnName +\n\" should be an array or a lambda function\", node.getPos());\n}\nif (!node.getChild(1).getType().isArrayType() && !node.getChild(1).getType().isNull()) {\nthrow new SemanticException(\"The second input of \" + fnName +\n\" should be an array or a lambda function\", node.getPos());\n}\nif (!Type.canCastTo(node.getChild(1).getType(), Type.ARRAY_BOOLEAN)) {\nthrow new SemanticException(\"The second input of array_filter \" +\nnode.getChild(1).getType().toString() + \" can't cast to ARRAY\", node.getPos());\n}\nbreak;\ncase FunctionSet.ALL_MATCH:\ncase FunctionSet.ANY_MATCH:\nif (node.getChildren().size() != 1) {\nthrow new SemanticException(fnName + \" should have a input array\", node.getPos());\n}\nif (!node.getChild(0).getType().isArrayType() && !node.getChild(0).getType().isNull()) {\nthrow new SemanticException(\"The first input of \" + fnName + \" should be an array\",\nnode.getPos());\n}\nif (!Type.canCastTo(node.getChild(0).getType(), Type.ARRAY_BOOLEAN)) {\nthrow new SemanticException(\"The second input of \" + fnName +\nnode.getChild(0).getType().toString() + \" can't cast to ARRAY\", node.getPos());\n}\nbreak;\ncase FunctionSet.ARRAY_SORTBY:\nif (node.getChildren().size() != 2) {\nthrow new SemanticException(fnName + \" should have 2 array inputs or lambda functions\",\nnode.getPos());\n}\nif (!node.getChild(0).getType().isArrayType() && !node.getChild(0).getType().isNull()) {\nthrow new SemanticException(\"The first input of \" + fnName +\n\" should be an array or a lambda function\", node.getPos());\n}\nif (!node.getChild(1).getType().isArrayType() && !node.getChild(1).getType().isNull()) {\nthrow new SemanticException(\"The second input of \" + fnName +\n\" should be an array or a lambda function\", node.getPos());\n}\nbreak;\ncase FunctionSet.ARRAY_GENERATE:\nif (node.getChildren().size() < 1 || node.getChildren().size() > 3) {\nthrow new SemanticException(fnName + \" has wrong input numbers\");\n}\nfor (Expr expr : node.getChildren()) {\nif ((expr instanceof SlotRef) && node.getChildren().size() != 3) {\nthrow new SemanticException(fnName + \" with IntColumn doesn't support default parameters\");\n}\nif (!(expr instanceof IntLiteral) && !(expr instanceof LargeIntLiteral) &&\n!(expr instanceof SlotRef) && !(expr instanceof NullLiteral)) {\nthrow new SemanticException(fnName + \"'s parameter only support Integer\");\n}\n}\nbreak;\ncase FunctionSet.MAP_FILTER:\nif (node.getChildren().size() != 2) {\nthrow new SemanticException(fnName + \" should have 2 inputs, \" +\n\"but there are just \" + node.getChildren().size() + \" inputs.\");\n}\nif (!node.getChild(0).getType().isMapType() && !node.getChild(0).getType().isNull()) {\nthrow new SemanticException(\"The first input of \" + fnName +\n\" should be a map or a lambda function.\");\n}\nif (!node.getChild(1).getType().isArrayType() && !node.getChild(1).getType().isNull()) {\nthrow new SemanticException(\"The second input of \" + fnName +\n\" should be a array or a lambda function.\");\n}\nif (!Type.canCastTo(node.getChild(1).getType(), Type.ARRAY_BOOLEAN)) {\nthrow new SemanticException(\"The second input of map_filter \" +\nnode.getChild(1).getType().toString() + \" can't cast to ARRAY\");\n}\nbreak;\ncase FunctionSet.GROUP_CONCAT:\ncase FunctionSet.ARRAY_AGG: {\nif (node.getChildren().size() == 0) {\nthrow new SemanticException(fnName + \" should have at least one input\", node.getPos());\n}\nint start = 1;\nif (fnName.equals(FunctionSet.GROUP_CONCAT)) {\nstart = argumentTypes.length - node.getParams().getOrderByElemNum();\n}\nfor (int i = start; i < argumentTypes.length; ++i) {\nif (argumentTypes[i].isComplexType() || argumentTypes[i].isJsonType()) {\nthrow new SemanticException(fnName + \" can't support order by nested types, \" +\n\"but \" + i + \"-th input is \" + argumentTypes[i].toSql());\n}\n}\nbreak;\n}\ncase FunctionSet.NAMED_STRUCT: {\nif (node.getChildren().size() < 2) {\nthrow new SemanticException(fnName + \" should have at least two inputs\", node.getPos());\n}\nif (node.getChildren().size() % 2 != 0) {\nthrow new SemanticException(fnName + \" arguments must be in name/value pairs\", node.getPos());\n}\nSet check = Sets.newHashSet();\nfor (int i = 0; i < node.getChildren().size(); i = i + 2) {\nif (!(node.getChild(i) instanceof StringLiteral)) {\nthrow new SemanticException(\n\"The \" + (i + 1) + \"-th input of named_struct must be string literal\",\nnode.getPos());\n}\nString name = ((StringLiteral) node.getChild(i)).getValue();\nif (check.contains(name.toLowerCase())) {\nthrow new SemanticException(\"named_struct contains duplicate subfield name: \" +\nname + \" at \" + (i + 1) + \"-th input\", node.getPos());\n}\ncheck.add(name.toLowerCase());\n}\nbreak;\n}\ncase FunctionSet.ROW: {\nif (node.getChildren().size() < 1) {\nthrow new SemanticException(fnName + \" should have at least one input.\", node.getPos());\n}\nbreak;\n}\ncase FunctionSet.ARRAY_AVG:\ncase FunctionSet.ARRAY_MAX:\ncase FunctionSet.ARRAY_MIN:\ncase FunctionSet.ARRAY_SORT:\ncase FunctionSet.ARRAY_SUM:\ncase FunctionSet.ARRAY_CUM_SUM:\ncase FunctionSet.ARRAY_DIFFERENCE:\ncase FunctionSet.ARRAY_DISTINCT:\ncase FunctionSet.ARRAY_LENGTH:\ncase FunctionSet.ARRAY_TO_BITMAP: {\nif (node.getChildren().size() != 1) {\nthrow new SemanticException(fnName + \" should have only one input\", node.getPos());\n}\nif (!node.getChild(0).getType().isArrayType() && !node.getChild(0).getType().isNull()) {\nthrow new SemanticException(\"The only one input of \" + fnName +\n\" should be an array, rather than \" + node.getChild(0).getType().toSql(),\nnode.getPos());\n}\nbreak;\n}\ncase FunctionSet.ARRAY_CONTAINS_ALL:\ncase FunctionSet.ARRAYS_OVERLAP: {\nif (node.getChildren().size() != 2) {\nthrow new SemanticException(fnName + \" should have only two inputs\", node.getPos());\n}\nfor (int i = 0; i < node.getChildren().size(); ++i) {\nif (!node.getChild(i).getType().isArrayType() && !node.getChild(i).getType().isNull()) {\nthrow new SemanticException((i + 1) + \"-th input of \" + fnName +\n\" should be an array, rather than \" + node.getChild(i).getType().toSql(),\nnode.getPos());\n}\n}\nbreak;\n}\ncase FunctionSet.ARRAY_INTERSECT:\ncase FunctionSet.ARRAY_CONCAT: {\nif (node.getChildren().isEmpty()) {\nthrow new SemanticException(fnName + \" should have at least one input.\", node.getPos());\n}\nfor (int i = 0; i < node.getChildren().size(); ++i) {\nif (!node.getChild(i).getType().isArrayType() && !node.getChild(i).getType().isNull()) {\nthrow new SemanticException((i + 1) + \"-th input of \" + fnName +\n\" should be an array, rather than \" + node.getChild(i).getType().toSql(),\nnode.getPos());\n}\n}\nbreak;\n}\ncase FunctionSet.ARRAY_CONTAINS:\ncase FunctionSet.ARRAY_APPEND:\ncase FunctionSet.ARRAY_JOIN:\ncase FunctionSet.ARRAY_POSITION:\ncase FunctionSet.ARRAY_REMOVE:\ncase FunctionSet.ARRAY_SLICE: {\nif (node.getChildren().isEmpty()) {\nthrow new SemanticException(fnName + \" should have at least one input.\", node.getPos());\n}\nif (!node.getChild(0).getType().isArrayType() && !node.getChild(0).getType().isNull()) {\nthrow new SemanticException(\"The first input of \" + fnName +\n\" should be an array\", node.getPos());\n}\nbreak;\n}\n}\n}\nprivate Function getStrToDateFunction(FunctionCallExpr node, Type[] argumentTypes) {\n/*\n* @TODO: Determine the return type of this function\n* If is format is constant and don't contains time part, return date type, to compatible with mysql.\n* In fact we don't want to support str_to_date return date like mysql, reason:\n* 1. The return type of FE/BE str_to_date function signature is datetime, return date\n* let type different, it's will throw unpredictable error\n* 2. Support return date and datetime at same time in one function is complicated.\n* 3. The meaning of the function is confusing. In mysql, will return date if format is a constant\n* string and it's not contains \"%H/%M/%S\" pattern, but it's a trick logic, if format is a variable\n* expression, like: str_to_date(col1, col2), and the col2 is '%Y%m%d', the result always be\n* datetime.\n*/\nFunction fn = Expr.getBuiltinFunction(node.getFnName().getFunction(),\nargumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nif (fn == null) {\nreturn null;\n}\nif (!node.getChild(1).isConstant()) {\nreturn fn;\n}\nExpressionMapping expressionMapping =\nnew ExpressionMapping(new Scope(RelationId.anonymous(), new RelationFields()),\nLists.newArrayList());\nScalarOperator format = SqlToScalarOperatorTranslator.translate(node.getChild(1), expressionMapping,\nnew ColumnRefFactory());\nif (format.isConstantRef() && !HAS_TIME_PART.matcher(format.toString()).matches()) {\nreturn Expr.getBuiltinFunction(\"str2date\", argumentTypes,\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n}\nreturn fn;\n}\nprivate Function getArrayGenerateFunction(FunctionCallExpr node) {\nif (node.getChildren().size() == 1) {\nLiteralExpr secondParam = (LiteralExpr) node.getChild(0);\nnode.clearChildren();\nnode.addChild(new IntLiteral(1));\nnode.addChild(secondParam);\n}\nif (node.getChildren().size() == 2) {\nint idx = 0;\nBigInteger[] childValues = new BigInteger[2];\nBoolean hasNUll = false;\nfor (Expr expr : node.getChildren()) {\nif (expr instanceof NullLiteral) {\nhasNUll = true;\n} else if (expr instanceof IntLiteral) {\nchildValues[idx++] = BigInteger.valueOf(((IntLiteral) expr).getValue());\n} else {\nchildValues[idx++] = ((LargeIntLiteral) expr).getValue();\n}\n}\nif (hasNUll || childValues[0].compareTo(childValues[1]) < 0) {\nnode.addChild(new IntLiteral(1));\n} else {\nnode.addChild(new IntLiteral(-1));\n}\n}\nType[] argumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new);\nreturn Expr.getBuiltinFunction(FunctionSet.ARRAY_GENERATE, argumentTypes,\nFunction.CompareMode.IS_SUPERTYPE_OF);\n}\n@Override\npublic Void visitGroupingFunctionCall(GroupingFunctionCallExpr node, Scope scope) {\nif (node.getChildren().size() < 1) {\nthrow new SemanticException(\"GROUPING functions required at least one parameters\", node.getPos());\n}\nif (node.getChildren().stream().anyMatch(e -> !(e instanceof SlotRef))) {\nthrow new SemanticException(\"grouping functions only support column\", node.getPos());\n}\nType[] childTypes = new Type[1];\nchildTypes[0] = Type.BIGINT;\nFunction fn = Expr.getBuiltinFunction(node.getFnName().getFunction(),\nchildTypes, Function.CompareMode.IS_IDENTICAL);\nnode.setFn(fn);\nnode.setType(fn.getReturnType());\nreturn null;\n}\n@Override\npublic Void visitCaseWhenExpr(CaseExpr node, Scope context) {\nint start = 0;\nint end = node.getChildren().size();\nExpr caseExpr = null;\nExpr elseExpr = null;\nif (node.hasCaseExpr()) {\ncaseExpr = node.getChild(0);\nstart++;\n}\nif (node.hasElseExpr()) {\nelseExpr = node.getChild(end - 1);\nend--;\n}\nList whenTypes = Lists.newArrayList();\nif (null != caseExpr) {\nwhenTypes.add(caseExpr.getType());\n}\nfor (int i = start; i < end; i = i + 2) {\nwhenTypes.add(node.getChild(i).getType());\n}\nType compatibleType = Type.BOOLEAN;\nif (null != caseExpr) {\ncompatibleType = TypeManager.getCompatibleTypeForCaseWhen(whenTypes);\n}\nfor (Type type : whenTypes) {\nif (!Type.canCastTo(type, compatibleType)) {\nthrow new SemanticException(\"Invalid when type cast \" + type.toSql()\n+ \" to \" + compatibleType.toSql(), node.getPos());\n}\n}\nList thenTypes = Lists.newArrayList();\nfor (int i = start + 1; i < end; i = i + 2) {\nthenTypes.add(node.getChild(i).getType());\n}\nif (null != elseExpr) {\nthenTypes.add(elseExpr.getType());\n}\nType returnType = thenTypes.stream().allMatch(Type.NULL::equals) ? Type.BOOLEAN :\nTypeManager.getCompatibleTypeForCaseWhen(thenTypes);\nfor (Type type : thenTypes) {\nif (!Type.canCastTo(type, returnType)) {\nthrow new SemanticException(\"Invalid then type cast \" + type.toSql()\n+ \" to \" + returnType.toSql(), node.getPos());\n}\n}\nnode.setType(returnType);\nreturn null;\n}\n@Override\npublic Void visitSubquery(Subquery node, Scope context) {\nQueryAnalyzer queryAnalyzer = new QueryAnalyzer(session);\nqueryAnalyzer.analyze(node.getQueryStatement(), context);\nnode.setType(node.getQueryStatement().getQueryRelation().getRelationFields().getFieldByIndex(0).getType());\nreturn null;\n}\n@Override\npublic Void visitAnalyticExpr(AnalyticExpr node, Scope context) {\nvisit(node.getFnCall(), context);\nnode.setType(node.getFnCall().getType());\nif (node.getWindow() != null) {\nif (node.getWindow().getLeftBoundary() != null &&\nnode.getWindow().getLeftBoundary().getExpr() != null) {\nvisit(node.getWindow().getLeftBoundary().getExpr(), context);\n}\nif (node.getWindow().getRightBoundary() != null &&\nnode.getWindow().getRightBoundary().getExpr() != null) {\nvisit(node.getWindow().getRightBoundary().getExpr(), context);\n}\n}\nnode.getPartitionExprs().forEach(e -> visit(e, context));\nnode.getOrderByElements().stream().map(OrderByElement::getExpr).forEach(e -> visit(e, context));\nverifyAnalyticExpression(node);\nreturn null;\n}\n@Override\npublic Void visitInformationFunction(InformationFunction node, Scope context) {\nString funcType = node.getFuncType();\nif (funcType.equalsIgnoreCase(\"DATABASE\") || funcType.equalsIgnoreCase(\"SCHEMA\")) {\nnode.setType(Type.VARCHAR);\nnode.setStrValue(ClusterNamespace.getNameFromFullName(session.getDatabase()));\n} else if (funcType.equalsIgnoreCase(\"USER\")) {\nnode.setType(Type.VARCHAR);\nString user = session.getQualifiedUser();\nString remoteIP = session.getRemoteIP();\nnode.setStrValue(new UserIdentity(user, remoteIP).toString());\n} else if (funcType.equalsIgnoreCase(\"CURRENT_USER\")) {\nnode.setType(Type.VARCHAR);\nnode.setStrValue(session.getCurrentUserIdentity().toString());\n} else if (funcType.equalsIgnoreCase(\"CURRENT_ROLE\")) {\nnode.setType(Type.VARCHAR);\nAuthorizationMgr manager = GlobalStateMgr.getCurrentState().getAuthorizationMgr();\nList roleName = new ArrayList<>();\ntry {\nfor (Long roleId : session.getCurrentRoleIds()) {\nRolePrivilegeCollectionV2 rolePrivilegeCollection =\nmanager.getRolePrivilegeCollectionUnlocked(roleId, false);\nif (rolePrivilegeCollection != null) {\nroleName.add(rolePrivilegeCollection.getName());\n}\n}\n} catch (PrivilegeException e) {\nthrow new SemanticException(e.getMessage());\n}\nif (roleName.isEmpty()) {\nnode.setStrValue(\"NONE\");\n} else {\nnode.setStrValue(Joiner.on(\", \").join(roleName));\n}\n} else if (funcType.equalsIgnoreCase(\"CONNECTION_ID\")) {\nnode.setType(Type.BIGINT);\nnode.setIntValue(session.getConnectionId());\nnode.setStrValue(\"\");\n} else if (funcType.equalsIgnoreCase(\"CATALOG\")) {\nnode.setType(Type.VARCHAR);\nnode.setStrValue(session.getCurrentCatalog());\n}\nreturn null;\n}\n@Override\npublic Void visitVariableExpr(VariableExpr node, Scope context) {\ntry {\nif (node.getSetType().equals(SetType.USER)) {\nUserVariable userVariable = session.getUserVariables(node.getName());\nif (userVariable == null) {\nnode.setType(Type.STRING);\nnode.setIsNull();\nreturn null;\n}\nType variableType = userVariable.getEvaluatedExpression().getType();\nnode.setType(variableType);\nif (userVariable.getEvaluatedExpression() instanceof NullLiteral) {\nnode.setIsNull();\n} else {\nnode.setValue(userVariable.getEvaluatedExpression().getRealObjectValue());\n}\n} else {\nVariableMgr.fillValue(session.getSessionVariable(), node);\nif (!Strings.isNullOrEmpty(node.getName()) &&\nnode.getName().equalsIgnoreCase(SessionVariable.SQL_MODE)) {\nnode.setType(Type.VARCHAR);\nnode.setValue(SqlModeHelper.decode((long) node.getValue()));\n}\n}\n} catch (DdlException e) {\nthrow new SemanticException(e.getMessage());\n}\nreturn null;\n}\n@Override\npublic Void visitDefaultValueExpr(DefaultValueExpr node, Scope context) {\nnode.setType(Type.VARCHAR);\nreturn null;\n}\n@Override\npublic Void visitCloneExpr(CloneExpr node, Scope context) {\nreturn null;\n}\n@Override\n}" + }, + { + "comment": "Is it correct that this error is getting logged? Only `Context` is an unknown type here, right? Even though there are errors when attempting to resolve `FunctionEntry`'s members, it is not unknown?", + "method_body": "public void testFunctionPointerAsVariable() {\nCompileResult result =\nBCompileUtil.compile(\"test-src/expressions/lambda/negative/fp-type-mismatch1-negative.bal\");\nAssert.assertEquals(result.getErrorCount(), 3);\nBAssertUtil.validateError(result, 0, \"incompatible types: expected 'function (string,int) returns \" +\n\"(boolean)', found 'function (string,float) returns (boolean)'\", 2, 53);\nBAssertUtil.validateError(result, 1, \"unknown type 'Context'\", 10, 29);\nBAssertUtil.validateError(result, 2, \"unknown type 'FunctionEntry'\", 12, 5);\n}", + "target_code": "BAssertUtil.validateError(result, 2, \"unknown type 'FunctionEntry'\", 12, 5);", + "method_body_after": "public void testFunctionPointerAsVariable() {\nCompileResult result =\nBCompileUtil.compile(\"test-src/expressions/lambda/negative/fp-type-mismatch1-negative.bal\");\nAssert.assertEquals(result.getErrorCount(), 3);\nBAssertUtil.validateError(result, 0, \"incompatible types: expected 'function (string,int) returns \" +\n\"(boolean)', found 'function (string,float) returns (boolean)'\", 2, 53);\nBAssertUtil.validateError(result, 1, \"unknown type 'Context'\", 10, 29);\nBAssertUtil.validateError(result, 2, \"unknown type 'FunctionEntry'\", 12, 5);\n}", + "context_before": "class FunctionPointersNegativeTest {\n@Test()\n@Test()\npublic void testLambdaAsVariable() {\nCompileResult result =\nBCompileUtil.compile(\"test-src/expressions/lambda/negative/fp-type-mismatch2-negative.bal\");\nAssert.assertEquals(result.getErrorCount(), 1);\nBAssertUtil.validateError(result, 0, \"incompatible types: expected 'function (string,int) returns \" +\n\"(boolean)', found 'function (string,boolean) returns (boolean)'\", 2, 53);\n}\n@Test()\npublic void testFPInStruct() {\nCompileResult result = BCompileUtil.compile(\"test-src/expressions/lambda/negative/fp-struct-negative.bal\");\nAssert.assertEquals(result.getErrorCount(), 1);\nBAssertUtil.validateError(result, 0, \"undefined field 'getFullName' in record 'Person'\", 17, 20);\n}\n@Test()\npublic void testFPInStructIncorrectArg() {\nCompileResult result =\nBCompileUtil.compile(\"test-src/expressions/lambda/negative/fp-struct-incorrect-arg-negative.bal\");\nAssert.assertEquals(result.getErrorCount(), 1);\nBAssertUtil.validateError(result, 0, \"incompatible types: expected 'string', found 'Person'\", 32, 39);\n}\n@Test(groups = { \"disableOnOldParser\" })\npublic void testFPWithNoImport() {\nCompileResult result =\nBCompileUtil.compile(\"test-src/expressions/lambda/negative/fp-with-import-negative.bal\");\nAssert.assertEquals(result.getErrorCount(), 6);\nint i = -1;\nBAssertUtil.validateError(result, ++i, \"undefined module 'streams'\", 19, 5);\nBAssertUtil.validateError(result, ++i, \"unknown type 'Select'\", 19, 5);\nBAssertUtil.validateError(result, ++i, \"undefined function 'createSelect'\", 19, 32);\nBAssertUtil.validateError(result, ++i, \"undefined module 'streams'\", 19, 32);\nBAssertUtil.validateError(result, ++i, \"undefined symbol 'outputProcess'\", 19, 53);\n}\n@Test()\npublic void testFPInvalidInvocation() {\nCompileResult result = BCompileUtil.compile(\"test-src/expressions/lambda/negative\" +\n\"/fp_invalid_invocation_negative.bal\");\nAssert.assertEquals(result.getErrorCount(), 6);\nint i = 0;\nBAssertUtil.validateError(result, i++, \"undefined field 'getFullName' in record 'Person'\", 35, 20);\nBAssertUtil.validateError(result, i++, \"undefined field 'getFname' in object 'Employee'\", 45, 15);\nBAssertUtil.validateError(result, i++, \"undefined function 'f3'\", 46, 9);\nBAssertUtil.validateError(result, i++, \"undefined field 'getFname' in object 'Employee'\", 77, 15);\nBAssertUtil.validateError(result, i++, \"undefined function 'f3'\", 78, 9);\nBAssertUtil.validateError(result, i, \"undefined method 'getLname' in object 'Employee'\", 83, 11);\n}\n@Test\npublic void testFPWithMissingArgs() {\nCompileResult result = BCompileUtil.compile(\"test-src/expressions/lambda/negative\" +\n\"/fp_invocation_with_missing_args.bal\");\nAssert.assertEquals(result.getErrorCount(), 4);\nint i = 0;\nBAssertUtil.validateError(result, i++, \"missing required parameter 'i' in call to 'fn'()\", 9, 16);\nBAssertUtil.validateError(result, i++, \"missing required parameter 'i' in call to 'fn'()\", 20, 16);\nBAssertUtil.validateError(result, i++, \"too many arguments in call to 'fn()'\", 31, 16);\nBAssertUtil.validateError(result, i, \"too many arguments in call to 'fn()'\", 42, 16);\n}\n}", + "context_after": "class FunctionPointersNegativeTest {\n@Test()\n@Test()\npublic void testLambdaAsVariable() {\nCompileResult result =\nBCompileUtil.compile(\"test-src/expressions/lambda/negative/fp-type-mismatch2-negative.bal\");\nAssert.assertEquals(result.getErrorCount(), 1);\nBAssertUtil.validateError(result, 0, \"incompatible types: expected 'function (string,int) returns \" +\n\"(boolean)', found 'function (string,boolean) returns (boolean)'\", 2, 53);\n}\n@Test()\npublic void testFPInStruct() {\nCompileResult result = BCompileUtil.compile(\"test-src/expressions/lambda/negative/fp-struct-negative.bal\");\nAssert.assertEquals(result.getErrorCount(), 1);\nBAssertUtil.validateError(result, 0, \"undefined field 'getFullName' in record 'Person'\", 17, 20);\n}\n@Test()\npublic void testFPInStructIncorrectArg() {\nCompileResult result =\nBCompileUtil.compile(\"test-src/expressions/lambda/negative/fp-struct-incorrect-arg-negative.bal\");\nAssert.assertEquals(result.getErrorCount(), 1);\nBAssertUtil.validateError(result, 0, \"incompatible types: expected 'string', found 'Person'\", 32, 39);\n}\n@Test(groups = { \"disableOnOldParser\" })\npublic void testFPWithNoImport() {\nCompileResult result =\nBCompileUtil.compile(\"test-src/expressions/lambda/negative/fp-with-import-negative.bal\");\nAssert.assertEquals(result.getErrorCount(), 6);\nint i = -1;\nBAssertUtil.validateError(result, ++i, \"undefined module 'streams'\", 19, 5);\nBAssertUtil.validateError(result, ++i, \"unknown type 'Select'\", 19, 5);\nBAssertUtil.validateError(result, ++i, \"undefined function 'createSelect'\", 19, 32);\nBAssertUtil.validateError(result, ++i, \"undefined module 'streams'\", 19, 32);\nBAssertUtil.validateError(result, ++i, \"undefined symbol 'outputProcess'\", 19, 53);\n}\n@Test()\npublic void testFPInvalidInvocation() {\nCompileResult result = BCompileUtil.compile(\"test-src/expressions/lambda/negative\" +\n\"/fp_invalid_invocation_negative.bal\");\nAssert.assertEquals(result.getErrorCount(), 6);\nint i = 0;\nBAssertUtil.validateError(result, i++, \"undefined field 'getFullName' in record 'Person'\", 35, 20);\nBAssertUtil.validateError(result, i++, \"undefined field 'getFname' in object 'Employee'\", 45, 15);\nBAssertUtil.validateError(result, i++, \"undefined function 'f3'\", 46, 9);\nBAssertUtil.validateError(result, i++, \"undefined field 'getFname' in object 'Employee'\", 77, 15);\nBAssertUtil.validateError(result, i++, \"undefined function 'f3'\", 78, 9);\nBAssertUtil.validateError(result, i, \"undefined method 'getLname' in object 'Employee'\", 83, 11);\n}\n@Test\npublic void testFPWithMissingArgs() {\nCompileResult result = BCompileUtil.compile(\"test-src/expressions/lambda/negative\" +\n\"/fp_invocation_with_missing_args.bal\");\nAssert.assertEquals(result.getErrorCount(), 4);\nint i = 0;\nBAssertUtil.validateError(result, i++, \"missing required parameter 'i' in call to 'fn'()\", 9, 16);\nBAssertUtil.validateError(result, i++, \"missing required parameter 'i' in call to 'fn'()\", 20, 16);\nBAssertUtil.validateError(result, i++, \"too many arguments in call to 'fn()'\", 31, 16);\nBAssertUtil.validateError(result, i, \"too many arguments in call to 'fn()'\", 42, 16);\n}\n}" + }, + { + "comment": "nice!", + "method_body": "public void execute() throws AnalysisException {\nGroupExpression logicalExpression = group.getLogicalExpression();\nif (!childrenOptimized) {\npushJob(new RewriteBottomUpJob(group, rules, context, true));\nList children = logicalExpression.children();\nfor (int i = children.size() - 1; i >= 0; i--) {\npushJob(new RewriteBottomUpJob(children.get(i), rules, context, false));\n}\nreturn;\n}\ntrace(logicalExpression);\nList validRules = getValidRules(logicalExpression, rules);\nfor (Rule rule : validRules) {\nGroupExpressionMatching groupExpressionMatching\n= new GroupExpressionMatching(rule.getPattern(), logicalExpression);\nfor (Plan before : groupExpressionMatching) {\nOptional copyInResult = invokeRewriteRuleWithTrace(rule, before, group,\nREWRITE_BOTTOM_UP_JOB_TRACER);\nif (!copyInResult.isPresent()) {\ncontinue;\n}\nGroup correspondingGroup = copyInResult.get().correspondingExpression.getOwnerGroup();\nif (copyInResult.get().generateNewExpression\n|| correspondingGroup != group\n|| logicalExpression.getOwnerGroup() == null) {\npushJob(new RewriteBottomUpJob(correspondingGroup, rules, context, false));\nreturn;\n}\n}\n}\n}", + "target_code": "REWRITE_BOTTOM_UP_JOB_TRACER);", + "method_body_after": "public void execute() throws AnalysisException {\nGroupExpression logicalExpression = group.getLogicalExpression();\nif (!childrenOptimized) {\npushJob(new RewriteBottomUpJob(group, rules, context, true));\nList children = logicalExpression.children();\nfor (int i = children.size() - 1; i >= 0; i--) {\npushJob(new RewriteBottomUpJob(children.get(i), rules, context, false));\n}\nreturn;\n}\ncountJobExecutionTimesOfGroupExpressions(logicalExpression);\nList validRules = getValidRules(logicalExpression, rules);\nfor (Rule rule : validRules) {\nGroupExpressionMatching groupExpressionMatching\n= new GroupExpressionMatching(rule.getPattern(), logicalExpression);\nfor (Plan before : groupExpressionMatching) {\nOptional copyInResult = invokeRewriteRuleWithTrace(rule, before, group);\nif (!copyInResult.isPresent()) {\ncontinue;\n}\nGroup correspondingGroup = copyInResult.get().correspondingExpression.getOwnerGroup();\nif (copyInResult.get().generateNewExpression\n|| correspondingGroup != group\n|| logicalExpression.getOwnerGroup() == null) {\npushJob(new RewriteBottomUpJob(correspondingGroup, rules, context, false));\nreturn;\n}\n}\n}\n}", + "context_before": "class RewriteBottomUpJob extends Job {\nprivate static final EventProducer REWRITE_BOTTOM_UP_JOB_TRACER = new EventProducer(\nTransformEvent.class,\nEventChannel.getDefaultChannel().addConsumers(new LogConsumer(TransformEvent.class, EventChannel.LOG)));\nprivate final Group group;\nprivate final List rules;\nprivate final boolean childrenOptimized;\npublic RewriteBottomUpJob(Group group, JobContext context, List factories) {\nthis(group, factories.stream()\n.flatMap(factory -> factory.buildRules().stream())\n.collect(Collectors.toList()), context, false);\n}\npublic RewriteBottomUpJob(Group group, List rules, JobContext context) {\nthis(group, rules, context, false);\n}\nprivate RewriteBottomUpJob(Group group, List rules,\nJobContext context, boolean childrenOptimized) {\nsuper(JobType.BOTTOM_UP_REWRITE, context);\nthis.group = Objects.requireNonNull(group, \"group cannot be null\");\nthis.rules = Objects.requireNonNull(rules, \"rules cannot be null\");\nthis.childrenOptimized = childrenOptimized;\n}\n@Override\n}", + "context_after": "class RewriteBottomUpJob extends Job {\nprivate static final EventProducer RULE_TRANSFORM_TRACER = new EventProducer(\nTransformEvent.class,\nEventChannel.getDefaultChannel().addConsumers(new LogConsumer(TransformEvent.class, EventChannel.LOG)));\nprivate final Group group;\nprivate final List rules;\nprivate final boolean childrenOptimized;\npublic RewriteBottomUpJob(Group group, JobContext context, List factories) {\nthis(group, factories.stream()\n.flatMap(factory -> factory.buildRules().stream())\n.collect(Collectors.toList()), context, false);\n}\npublic RewriteBottomUpJob(Group group, List rules, JobContext context) {\nthis(group, rules, context, false);\n}\nprivate RewriteBottomUpJob(Group group, List rules,\nJobContext context, boolean childrenOptimized) {\nsuper(JobType.BOTTOM_UP_REWRITE, context);\nthis.group = Objects.requireNonNull(group, \"group cannot be null\");\nthis.rules = Objects.requireNonNull(rules, \"rules cannot be null\");\nthis.childrenOptimized = childrenOptimized;\n}\n@Override\npublic EventProducer getEventTracer() {\nreturn RULE_TRANSFORM_TRACER;\n}\n@Override\n}" + }, + { + "comment": "Reproducible in the following code. ```ballerina anydata arr = []; (map & readonly)[2] arr4 = checkpanic arr.cloneWithType(); ``` Fixed in the latest commit.", + "method_body": "public boolean hasFillerValue(BType type) {\nswitch (type.tag) {\ncase TypeTags.INT:\ncase TypeTags.BYTE:\ncase TypeTags.FLOAT:\ncase TypeTags.DECIMAL:\ncase TypeTags.STRING:\ncase TypeTags.BOOLEAN:\ncase TypeTags.JSON:\ncase TypeTags.XML:\ncase TypeTags.XML_TEXT:\ncase TypeTags.NIL:\ncase TypeTags.TABLE:\ncase TypeTags.ANYDATA:\ncase TypeTags.MAP:\ncase TypeTags.ANY:\nreturn true;\ncase TypeTags.ARRAY:\nreturn checkFillerValue((BArrayType) type);\ncase TypeTags.FINITE:\nreturn checkFillerValue((BFiniteType) type);\ncase TypeTags.UNION:\nreturn checkFillerValue((BUnionType) type);\ncase TypeTags.OBJECT:\nreturn checkFillerValue((BObjectType) type);\ncase TypeTags.RECORD:\nreturn checkFillerValue((BRecordType) type);\ncase TypeTags.TUPLE:\nBTupleType tupleType = (BTupleType) type;\nif (tupleType.isCyclic) {\nreturn false;\n}\nreturn tupleType.getTupleTypes().stream().allMatch(eleType -> hasFillerValue(eleType));\ncase TypeTags.TYPEREFDESC:\nreturn hasFillerValue(getReferredType(type));\ncase TypeTags.INTERSECTION:\nreturn hasFillerValue(((BIntersectionType) type).effectiveType);\ndefault:\nreturn TypeTags.isIntegerTypeTag(type.tag);\n}\n}", + "target_code": "return hasFillerValue(((BIntersectionType) type).effectiveType);", + "method_body_after": "public boolean hasFillerValue(BType type) {\nswitch (type.tag) {\ncase TypeTags.INT:\ncase TypeTags.BYTE:\ncase TypeTags.FLOAT:\ncase TypeTags.DECIMAL:\ncase TypeTags.STRING:\ncase TypeTags.BOOLEAN:\ncase TypeTags.JSON:\ncase TypeTags.XML:\ncase TypeTags.XML_TEXT:\ncase TypeTags.NIL:\ncase TypeTags.TABLE:\ncase TypeTags.ANYDATA:\ncase TypeTags.MAP:\ncase TypeTags.ANY:\nreturn true;\ncase TypeTags.ARRAY:\nreturn checkFillerValue((BArrayType) type);\ncase TypeTags.FINITE:\nreturn checkFillerValue((BFiniteType) type);\ncase TypeTags.UNION:\nreturn checkFillerValue((BUnionType) type);\ncase TypeTags.OBJECT:\nreturn checkFillerValue((BObjectType) type);\ncase TypeTags.RECORD:\nreturn checkFillerValue((BRecordType) type);\ncase TypeTags.TUPLE:\nBTupleType tupleType = (BTupleType) type;\nif (tupleType.isCyclic) {\nreturn false;\n}\nreturn tupleType.getTupleTypes().stream().allMatch(eleType -> hasFillerValue(eleType));\ncase TypeTags.TYPEREFDESC:\nreturn hasFillerValue(getReferredType(type));\ncase TypeTags.INTERSECTION:\nreturn hasFillerValue(((BIntersectionType) type).effectiveType);\ndefault:\nreturn TypeTags.isIntegerTypeTag(type.tag);\n}\n}", + "context_before": "class TypePair {\nBType sourceType;\nBType targetType;\npublic TypePair(BType sourceType, BType targetType) {\nthis.sourceType = sourceType;\nthis.targetType = targetType;\n}\n@Override\npublic boolean equals(Object obj) {\nif (!(obj instanceof TypePair)) {\nreturn false;\n}\nTypePair other = (TypePair) obj;\nreturn this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType);\n}\n@Override\npublic int hashCode() {\nreturn Objects.hash(sourceType, targetType);\n}\n}", + "context_after": "class TypePair {\nBType sourceType;\nBType targetType;\npublic TypePair(BType sourceType, BType targetType) {\nthis.sourceType = sourceType;\nthis.targetType = targetType;\n}\n@Override\npublic boolean equals(Object obj) {\nif (!(obj instanceof TypePair)) {\nreturn false;\n}\nTypePair other = (TypePair) obj;\nreturn this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType);\n}\n@Override\npublic int hashCode() {\nreturn Objects.hash(sourceType, targetType);\n}\n}" + }, + { + "comment": "But can easily copy the code in our own utils if necessary... I will update the PR tomorrow.", + "method_body": "static String adaptMethodName(String name) {\nname = Introspector.decapitalize(name);\nif (name.contains(\"_\")) {\nStringBuilder res = new StringBuilder();\nfor (String str : name.split(\"_\")) {\nif (res.length() == 0) {\nres.append(str);\n} else {\nres.append(Character.toUpperCase(str.charAt(0)));\nif (str.length() > 1) {\nres.append(str.substring(1));\n}\n}\n}\nname = res.toString();\n} else if (SourceVersion.isKeyword(name)) {\nname += \"_\";\n}\nreturn name;\n}", + "target_code": "name = Introspector.decapitalize(name);", + "method_body_after": "static String adaptMethodName(String name) {\nif (name == null || name.isEmpty()) {\nreturn name;\n}\nStringBuilder ret = new StringBuilder();\nret.append(Character.toLowerCase(name.charAt(0)));\nif (name.length() > 1) {\nif (name.contains(\"_\")) {\nfor (String str : name.substring(1).split(\"_\")) {\nif (ret.length() == 1) {\nret.append(str);\n} else {\nret.append(Character.toUpperCase(str.charAt(0)));\nif (str.length() > 1) {\nret.append(str.substring(1));\n}\n}\n}\n} else {\nret.append(name.substring(1));\n}\nif (isJavaKeyword(ret.toString())) {\nret.append(\"_\");\n}\n}\nreturn ret.toString();\n}", + "context_before": "class MutinyGrpcGenerator extends Generator {\nprivate static final Logger log = Logger.getLogger(MutinyGrpcGenerator.class.getName());\nprivate static final int SERVICE_NUMBER_OF_PATHS = 2;\nprivate static final int METHOD_NUMBER_OF_PATHS = 4;\npublic static final String CLASS_PREFIX = \"Mutiny\";\nprivate String getServiceJavaDocPrefix() {\nreturn \" \";\n}\nprivate String getMethodJavaDocPrefix() {\nreturn \" \";\n}\n@Override\nprotected List supportedFeatures() {\nreturn Collections.singletonList(PluginProtos.CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL);\n}\n@Override\npublic List generateFiles(PluginProtos.CodeGeneratorRequest request)\nthrows GeneratorException {\nProtoTypeMap typeMap = ProtoTypeMap.of(request.getProtoFileList());\nList protosToGenerate = request.getProtoFileList().stream()\n.filter(protoFile -> request.getFileToGenerateList().contains(protoFile.getName()))\n.collect(Collectors.toList());\nList services = findServices(protosToGenerate, typeMap);\nvalidateServices(services);\nreturn generateFiles(services);\n}\nprivate void validateServices(List services) {\nboolean failed = false;\nfor (ServiceContext service : services) {\nif (service.packageName == null || service.packageName.isBlank()) {\nlog.log(Level.SEVERE, \"Using the default java package is not supported for \"\n+ \"Quarkus gRPC code generation. Please specify `option java_package = \\\"your.package\\\"` in \"\n+ service.protoName);\nfailed = true;\n}\n}\nif (failed) {\nthrow new IllegalArgumentException(\"Code generation failed. Please check the log above for details.\");\n}\n}\nprivate List findServices(List protos, ProtoTypeMap typeMap) {\nList contexts = new ArrayList<>();\nprotos.forEach(fileProto -> {\nfor (int serviceNumber = 0; serviceNumber < fileProto.getServiceCount(); serviceNumber++) {\nServiceContext serviceContext = buildServiceContext(\nfileProto.getService(serviceNumber),\ntypeMap,\nfileProto.getSourceCodeInfo().getLocationList(),\nserviceNumber);\nserviceContext.protoName = fileProto.getName();\nserviceContext.packageName = extractPackageName(fileProto);\ncontexts.add(serviceContext);\n}\n});\nreturn contexts;\n}\nprivate String extractPackageName(DescriptorProtos.FileDescriptorProto proto) {\nDescriptorProtos.FileOptions options = proto.getOptions();\nif (options != null) {\nString javaPackage = options.getJavaPackage();\nif (!Strings.isNullOrEmpty(javaPackage)) {\nreturn javaPackage;\n}\n}\nreturn Strings.nullToEmpty(proto.getPackage());\n}\nprivate ServiceContext buildServiceContext(DescriptorProtos.ServiceDescriptorProto serviceProto, ProtoTypeMap typeMap,\nList locations, int serviceNumber) {\nServiceContext serviceContext = new ServiceContext();\nserviceContext.classPrefix = CLASS_PREFIX;\nserviceContext.fileName = CLASS_PREFIX + serviceProto.getName() + \"Grpc.java\";\nserviceContext.className = CLASS_PREFIX + serviceProto.getName() + \"Grpc\";\nserviceContext.serviceName = serviceProto.getName();\nserviceContext.deprecated = serviceProto.getOptions() != null && serviceProto.getOptions().getDeprecated();\nList allLocationsForService = locations.stream()\n.filter(location -> location.getPathCount() >= 2 &&\nlocation.getPath(0) == DescriptorProtos.FileDescriptorProto.SERVICE_FIELD_NUMBER &&\nlocation.getPath(1) == serviceNumber)\n.collect(Collectors.toList());\nDescriptorProtos.SourceCodeInfo.Location serviceLocation = allLocationsForService.stream()\n.filter(location -> location.getPathCount() == SERVICE_NUMBER_OF_PATHS)\n.findFirst()\n.orElseGet(DescriptorProtos.SourceCodeInfo.Location::getDefaultInstance);\nserviceContext.javaDoc = getJavaDoc(getComments(serviceLocation), getServiceJavaDocPrefix());\nfor (int methodNumber = 0; methodNumber < serviceProto.getMethodCount(); methodNumber++) {\nMethodContext methodContext = buildMethodContext(\nserviceProto.getMethod(methodNumber),\ntypeMap,\nlocations,\nmethodNumber);\nserviceContext.methods.add(methodContext);\n}\nreturn serviceContext;\n}\nprivate MethodContext buildMethodContext(DescriptorProtos.MethodDescriptorProto methodProto, ProtoTypeMap typeMap,\nList locations, int methodNumber) {\nMethodContext methodContext = new MethodContext();\nmethodContext.methodName = adaptMethodName(methodProto.getName());\nmethodContext.inputType = typeMap.toJavaTypeName(methodProto.getInputType());\nmethodContext.outputType = typeMap.toJavaTypeName(methodProto.getOutputType());\nmethodContext.deprecated = methodProto.getOptions() != null && methodProto.getOptions().getDeprecated();\nmethodContext.isManyInput = methodProto.getClientStreaming();\nmethodContext.isManyOutput = methodProto.getServerStreaming();\nmethodContext.methodNumber = methodNumber;\nDescriptorProtos.SourceCodeInfo.Location methodLocation = locations.stream()\n.filter(location -> location.getPathCount() == METHOD_NUMBER_OF_PATHS &&\nlocation.getPath(METHOD_NUMBER_OF_PATHS - 1) == methodNumber)\n.findFirst()\n.orElseGet(DescriptorProtos.SourceCodeInfo.Location::getDefaultInstance);\nmethodContext.javaDoc = getJavaDoc(getComments(methodLocation), getMethodJavaDocPrefix());\nif (!methodProto.getClientStreaming() && !methodProto.getServerStreaming()) {\nmethodContext.mutinyCallsMethodName = \"oneToOne\";\nmethodContext.grpcCallsMethodName = \"asyncUnaryCall\";\n}\nif (!methodProto.getClientStreaming() && methodProto.getServerStreaming()) {\nmethodContext.mutinyCallsMethodName = \"oneToMany\";\nmethodContext.grpcCallsMethodName = \"asyncServerStreamingCall\";\n}\nif (methodProto.getClientStreaming() && !methodProto.getServerStreaming()) {\nmethodContext.mutinyCallsMethodName = \"manyToOne\";\nmethodContext.grpcCallsMethodName = \"asyncClientStreamingCall\";\n}\nif (methodProto.getClientStreaming() && methodProto.getServerStreaming()) {\nmethodContext.mutinyCallsMethodName = \"manyToMany\";\nmethodContext.grpcCallsMethodName = \"asyncBidiStreamingCall\";\n}\nreturn methodContext;\n}\nprivate List generateFiles(List services) {\nList files = new ArrayList<>();\nfor (ServiceContext service : services) {\nfiles.add(buildFile(service, \"MutinyStub.mustache\", absoluteFileName(service, null)));\nfiles.add(buildFile(service, \"MutinyInterface.mustache\",\nabsoluteFileName(service, service.serviceName + \".java\")));\nfiles.add(buildFile(service, \"MutinyBean.mustache\",\nabsoluteFileName(service, service.serviceName + \"Bean.java\")));\nfiles.add(buildFile(service, \"MutinyClient.mustache\",\nabsoluteFileName(service, service.serviceName + \"Client.java\")));\n}\nreturn files;\n}\nprivate PluginProtos.CodeGeneratorResponse.File buildFile(ServiceContext context, String templateName, String fileName) {\nString content = applyTemplate(templateName, context);\nreturn PluginProtos.CodeGeneratorResponse.File\n.newBuilder()\n.setName(fileName)\n.setContent(content)\n.build();\n}\nprivate String absoluteFileName(ServiceContext ctx, String fileName) {\nif (fileName == null) {\nfileName = ctx.fileName;\n}\nString dir = ctx.packageName.replace('.', '/');\nif (Strings.isNullOrEmpty(dir)) {\nreturn fileName;\n} else {\nreturn dir + \"/\" + fileName;\n}\n}\nprivate String getComments(DescriptorProtos.SourceCodeInfo.Location location) {\nreturn location.getLeadingComments().isEmpty() ? location.getTrailingComments() : location.getLeadingComments();\n}\nprivate String getJavaDoc(String comments, String prefix) {\nif (!comments.isEmpty()) {\nStringBuilder builder = new StringBuilder(\"/**\\n\")\n.append(prefix).append(\" *

\\n\");\nArrays.stream(HtmlEscapers.htmlEscaper().escape(comments).split(\"\\n\"))\n.map(line -> line.replace(\"*/\", \"&\n.forEach(line -> builder.append(prefix).append(\" * \").append(line).append(\"\\n\"));\nbuilder\n.append(prefix).append(\" * 
\\n\")\n.append(prefix).append(\" */\");\nreturn builder.toString();\n}\nreturn null;\n}\n/**\n* Template class for proto Service objects.\n*/\nprivate static class ServiceContext {\npublic String fileName;\npublic String protoName;\npublic String packageName;\npublic String className;\npublic String classPrefix;\npublic String serviceName;\npublic boolean deprecated;\npublic String javaDoc;\npublic List methods = new ArrayList<>();\npublic List unaryUnaryMethods() {\nreturn methods.stream().filter(m -> !m.isManyInput && !m.isManyOutput).collect(Collectors.toList());\n}\npublic List unaryManyMethods() {\nreturn methods.stream().filter(m -> !m.isManyInput && m.isManyOutput).collect(Collectors.toList());\n}\npublic List manyUnaryMethods() {\nreturn methods.stream().filter(m -> m.isManyInput && !m.isManyOutput).collect(Collectors.toList());\n}\npublic List manyManyMethods() {\nreturn methods.stream().filter(m -> m.isManyInput && m.isManyOutput).collect(Collectors.toList());\n}\n}\n/**\n* Template class for proto RPC objects.\n*/\nprivate static class MethodContext {\npublic String methodName;\npublic String inputType;\npublic String outputType;\npublic boolean deprecated;\npublic boolean isManyInput;\npublic boolean isManyOutput;\npublic String mutinyCallsMethodName;\npublic String grpcCallsMethodName;\npublic int methodNumber;\npublic String javaDoc;\npublic String methodNameUpperUnderscore() {\nStringBuilder s = new StringBuilder();\nfor (int i = 0; i < methodName.length(); i++) {\nchar c = methodName.charAt(i);\ns.append(Character.toUpperCase(c));\nif ((i < methodName.length() - 1) && Character.isLowerCase(c)\n&& Character.isUpperCase(methodName.charAt(i + 1))) {\ns.append('_');\n}\n}\nreturn s.toString();\n}\npublic String methodNamePascalCase() {\nString mn = methodName.replace(\"_\", \"\");\nreturn String.valueOf(Character.toUpperCase(mn.charAt(0))) + mn.substring(1);\n}\npublic String methodNameCamelCase() {\nString mn = methodName.replace(\"_\", \"\");\nreturn String.valueOf(Character.toLowerCase(mn.charAt(0))) + mn.substring(1);\n}\npublic String methodHeader() {\nString mh = \"\";\nif (!Strings.isNullOrEmpty(javaDoc)) {\nmh = javaDoc;\n}\nif (deprecated) {\nmh += \"\\n @Deprecated\";\n}\nreturn mh;\n}\n}\npublic static void main(String[] args) {\nif (args.length == 0) {\nProtocPlugin.generate(new MutinyGrpcGenerator());\n} else {\nProtocPlugin.debug(new MutinyGrpcGenerator(), args[0]);\n}\n}\n}", + "context_after": "class MutinyGrpcGenerator extends Generator {\nprivate static final Logger log = Logger.getLogger(MutinyGrpcGenerator.class.getName());\nprivate static final int SERVICE_NUMBER_OF_PATHS = 2;\nprivate static final int METHOD_NUMBER_OF_PATHS = 4;\npublic static final String CLASS_PREFIX = \"Mutiny\";\nprivate String getServiceJavaDocPrefix() {\nreturn \" \";\n}\nprivate String getMethodJavaDocPrefix() {\nreturn \" \";\n}\n@Override\nprotected List supportedFeatures() {\nreturn Collections.singletonList(PluginProtos.CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL);\n}\n@Override\npublic List generateFiles(PluginProtos.CodeGeneratorRequest request)\nthrows GeneratorException {\nProtoTypeMap typeMap = ProtoTypeMap.of(request.getProtoFileList());\nList protosToGenerate = request.getProtoFileList().stream()\n.filter(protoFile -> request.getFileToGenerateList().contains(protoFile.getName()))\n.collect(Collectors.toList());\nList services = findServices(protosToGenerate, typeMap);\nvalidateServices(services);\nreturn generateFiles(services);\n}\nprivate void validateServices(List services) {\nboolean failed = false;\nfor (ServiceContext service : services) {\nif (service.packageName == null || service.packageName.isBlank()) {\nlog.log(Level.SEVERE, \"Using the default java package is not supported for \"\n+ \"Quarkus gRPC code generation. Please specify `option java_package = \\\"your.package\\\"` in \"\n+ service.protoName);\nfailed = true;\n}\n}\nif (failed) {\nthrow new IllegalArgumentException(\"Code generation failed. Please check the log above for details.\");\n}\n}\nprivate List findServices(List protos, ProtoTypeMap typeMap) {\nList contexts = new ArrayList<>();\nprotos.forEach(fileProto -> {\nfor (int serviceNumber = 0; serviceNumber < fileProto.getServiceCount(); serviceNumber++) {\nServiceContext serviceContext = buildServiceContext(\nfileProto.getService(serviceNumber),\ntypeMap,\nfileProto.getSourceCodeInfo().getLocationList(),\nserviceNumber);\nserviceContext.protoName = fileProto.getName();\nserviceContext.packageName = extractPackageName(fileProto);\ncontexts.add(serviceContext);\n}\n});\nreturn contexts;\n}\nprivate String extractPackageName(DescriptorProtos.FileDescriptorProto proto) {\nDescriptorProtos.FileOptions options = proto.getOptions();\nif (options != null) {\nString javaPackage = options.getJavaPackage();\nif (!Strings.isNullOrEmpty(javaPackage)) {\nreturn javaPackage;\n}\n}\nreturn Strings.nullToEmpty(proto.getPackage());\n}\nprivate ServiceContext buildServiceContext(DescriptorProtos.ServiceDescriptorProto serviceProto, ProtoTypeMap typeMap,\nList locations, int serviceNumber) {\nServiceContext serviceContext = new ServiceContext();\nserviceContext.classPrefix = CLASS_PREFIX;\nserviceContext.fileName = CLASS_PREFIX + serviceProto.getName() + \"Grpc.java\";\nserviceContext.className = CLASS_PREFIX + serviceProto.getName() + \"Grpc\";\nserviceContext.serviceName = serviceProto.getName();\nserviceContext.deprecated = serviceProto.getOptions() != null && serviceProto.getOptions().getDeprecated();\nList allLocationsForService = locations.stream()\n.filter(location -> location.getPathCount() >= 2 &&\nlocation.getPath(0) == DescriptorProtos.FileDescriptorProto.SERVICE_FIELD_NUMBER &&\nlocation.getPath(1) == serviceNumber)\n.collect(Collectors.toList());\nDescriptorProtos.SourceCodeInfo.Location serviceLocation = allLocationsForService.stream()\n.filter(location -> location.getPathCount() == SERVICE_NUMBER_OF_PATHS)\n.findFirst()\n.orElseGet(DescriptorProtos.SourceCodeInfo.Location::getDefaultInstance);\nserviceContext.javaDoc = getJavaDoc(getComments(serviceLocation), getServiceJavaDocPrefix());\nfor (int methodNumber = 0; methodNumber < serviceProto.getMethodCount(); methodNumber++) {\nMethodContext methodContext = buildMethodContext(\nserviceProto.getMethod(methodNumber),\ntypeMap,\nlocations,\nmethodNumber);\nserviceContext.methods.add(methodContext);\n}\nreturn serviceContext;\n}\nprivate MethodContext buildMethodContext(DescriptorProtos.MethodDescriptorProto methodProto, ProtoTypeMap typeMap,\nList locations, int methodNumber) {\nMethodContext methodContext = new MethodContext();\nmethodContext.methodName = adaptMethodName(methodProto.getName());\nmethodContext.inputType = typeMap.toJavaTypeName(methodProto.getInputType());\nmethodContext.outputType = typeMap.toJavaTypeName(methodProto.getOutputType());\nmethodContext.deprecated = methodProto.getOptions() != null && methodProto.getOptions().getDeprecated();\nmethodContext.isManyInput = methodProto.getClientStreaming();\nmethodContext.isManyOutput = methodProto.getServerStreaming();\nmethodContext.methodNumber = methodNumber;\nDescriptorProtos.SourceCodeInfo.Location methodLocation = locations.stream()\n.filter(location -> location.getPathCount() == METHOD_NUMBER_OF_PATHS &&\nlocation.getPath(METHOD_NUMBER_OF_PATHS - 1) == methodNumber)\n.findFirst()\n.orElseGet(DescriptorProtos.SourceCodeInfo.Location::getDefaultInstance);\nmethodContext.javaDoc = getJavaDoc(getComments(methodLocation), getMethodJavaDocPrefix());\nif (!methodProto.getClientStreaming() && !methodProto.getServerStreaming()) {\nmethodContext.mutinyCallsMethodName = \"oneToOne\";\nmethodContext.grpcCallsMethodName = \"asyncUnaryCall\";\n}\nif (!methodProto.getClientStreaming() && methodProto.getServerStreaming()) {\nmethodContext.mutinyCallsMethodName = \"oneToMany\";\nmethodContext.grpcCallsMethodName = \"asyncServerStreamingCall\";\n}\nif (methodProto.getClientStreaming() && !methodProto.getServerStreaming()) {\nmethodContext.mutinyCallsMethodName = \"manyToOne\";\nmethodContext.grpcCallsMethodName = \"asyncClientStreamingCall\";\n}\nif (methodProto.getClientStreaming() && methodProto.getServerStreaming()) {\nmethodContext.mutinyCallsMethodName = \"manyToMany\";\nmethodContext.grpcCallsMethodName = \"asyncBidiStreamingCall\";\n}\nreturn methodContext;\n}\nprivate static boolean isJavaKeyword(String value) {\nswitch (value) {\ncase \"public\":\ncase \"protected\":\ncase \"private\":\ncase \"abstract\":\ncase \"static\":\ncase \"final\":\ncase \"transient\":\ncase \"volatile\":\ncase \"synchronized\":\ncase \"native\":\ncase \"class\":\ncase \"interface\":\ncase \"extends\":\ncase \"package\":\ncase \"throws\":\ncase \"implements\":\ncase \"boolean\":\ncase \"byte\":\ncase \"char\":\ncase \"short\":\ncase \"int\":\ncase \"long\":\ncase \"float\":\ncase \"double\":\ncase \"void\":\ncase \"if\":\ncase \"else\":\ncase \"try\":\ncase \"catch\":\ncase \"finally\":\ncase \"do\":\ncase \"while\":\ncase \"for\":\ncase \"continue\":\ncase \"switch\":\ncase \"case\":\ncase \"default\":\ncase \"break\":\ncase \"throw\":\ncase \"return\":\ncase \"this\":\ncase \"new\":\ncase \"super\":\ncase \"import\":\ncase \"instanceof\":\ncase \"goto\":\ncase \"const\":\ncase \"null\":\ncase \"true\":\ncase \"false\":\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate List generateFiles(List services) {\nList files = new ArrayList<>();\nfor (ServiceContext service : services) {\nfiles.add(buildFile(service, \"MutinyStub.mustache\", absoluteFileName(service, null)));\nfiles.add(buildFile(service, \"MutinyInterface.mustache\",\nabsoluteFileName(service, service.serviceName + \".java\")));\nfiles.add(buildFile(service, \"MutinyBean.mustache\",\nabsoluteFileName(service, service.serviceName + \"Bean.java\")));\nfiles.add(buildFile(service, \"MutinyClient.mustache\",\nabsoluteFileName(service, service.serviceName + \"Client.java\")));\n}\nreturn files;\n}\nprivate PluginProtos.CodeGeneratorResponse.File buildFile(ServiceContext context, String templateName, String fileName) {\nString content = applyTemplate(templateName, context);\nreturn PluginProtos.CodeGeneratorResponse.File\n.newBuilder()\n.setName(fileName)\n.setContent(content)\n.build();\n}\nprivate String absoluteFileName(ServiceContext ctx, String fileName) {\nif (fileName == null) {\nfileName = ctx.fileName;\n}\nString dir = ctx.packageName.replace('.', '/');\nif (Strings.isNullOrEmpty(dir)) {\nreturn fileName;\n} else {\nreturn dir + \"/\" + fileName;\n}\n}\nprivate String getComments(DescriptorProtos.SourceCodeInfo.Location location) {\nreturn location.getLeadingComments().isEmpty() ? location.getTrailingComments() : location.getLeadingComments();\n}\nprivate String getJavaDoc(String comments, String prefix) {\nif (!comments.isEmpty()) {\nStringBuilder builder = new StringBuilder(\"/**\\n\")\n.append(prefix).append(\" *
\\n\");\nArrays.stream(HtmlEscapers.htmlEscaper().escape(comments).split(\"\\n\"))\n.map(line -> line.replace(\"*/\", \"&\n.forEach(line -> builder.append(prefix).append(\" * \").append(line).append(\"\\n\"));\nbuilder\n.append(prefix).append(\" * 
\\n\")\n.append(prefix).append(\" */\");\nreturn builder.toString();\n}\nreturn null;\n}\n/**\n* Template class for proto Service objects.\n*/\nprivate static class ServiceContext {\npublic String fileName;\npublic String protoName;\npublic String packageName;\npublic String className;\npublic String classPrefix;\npublic String serviceName;\npublic boolean deprecated;\npublic String javaDoc;\npublic List methods = new ArrayList<>();\npublic List unaryUnaryMethods() {\nreturn methods.stream().filter(m -> !m.isManyInput && !m.isManyOutput).collect(Collectors.toList());\n}\npublic List unaryManyMethods() {\nreturn methods.stream().filter(m -> !m.isManyInput && m.isManyOutput).collect(Collectors.toList());\n}\npublic List manyUnaryMethods() {\nreturn methods.stream().filter(m -> m.isManyInput && !m.isManyOutput).collect(Collectors.toList());\n}\npublic List manyManyMethods() {\nreturn methods.stream().filter(m -> m.isManyInput && m.isManyOutput).collect(Collectors.toList());\n}\n}\n/**\n* Template class for proto RPC objects.\n*/\nprivate static class MethodContext {\npublic String methodName;\npublic String inputType;\npublic String outputType;\npublic boolean deprecated;\npublic boolean isManyInput;\npublic boolean isManyOutput;\npublic String mutinyCallsMethodName;\npublic String grpcCallsMethodName;\npublic int methodNumber;\npublic String javaDoc;\npublic String methodNameUpperUnderscore() {\nStringBuilder s = new StringBuilder();\nfor (int i = 0; i < methodName.length(); i++) {\nchar c = methodName.charAt(i);\ns.append(Character.toUpperCase(c));\nif ((i < methodName.length() - 1) && Character.isLowerCase(c)\n&& Character.isUpperCase(methodName.charAt(i + 1))) {\ns.append('_');\n}\n}\nreturn s.toString();\n}\npublic String methodNamePascalCase() {\nString mn = methodName.replace(\"_\", \"\");\nreturn String.valueOf(Character.toUpperCase(mn.charAt(0))) + mn.substring(1);\n}\npublic String methodNameCamelCase() {\nString mn = methodName.replace(\"_\", \"\");\nreturn String.valueOf(Character.toLowerCase(mn.charAt(0))) + mn.substring(1);\n}\npublic String methodHeader() {\nString mh = \"\";\nif (!Strings.isNullOrEmpty(javaDoc)) {\nmh = javaDoc;\n}\nif (deprecated) {\nmh += \"\\n @Deprecated\";\n}\nreturn mh;\n}\n}\npublic static void main(String[] args) {\nif (args.length == 0) {\nProtocPlugin.generate(new MutinyGrpcGenerator());\n} else {\nProtocPlugin.debug(new MutinyGrpcGenerator(), args[0]);\n}\n}\n}" + }, + { + "comment": "why not capture it in `default` branch?", + "method_body": "public static Type convertColumnType(String hiveType) throws DdlException {\nString typeUpperCase = Utils.getTypeKeyword(hiveType).toUpperCase();\nPrimitiveType primitiveType;\nswitch (typeUpperCase) {\ncase \"TINYINT\":\nprimitiveType = PrimitiveType.TINYINT;\nbreak;\ncase \"SMALLINT\":\nprimitiveType = PrimitiveType.SMALLINT;\nbreak;\ncase \"INT\":\ncase \"INTEGER\":\nprimitiveType = PrimitiveType.INT;\nbreak;\ncase \"BIGINT\":\nprimitiveType = PrimitiveType.BIGINT;\nbreak;\ncase \"FLOAT\":\nprimitiveType = PrimitiveType.FLOAT;\nbreak;\ncase \"DOUBLE\":\ncase \"DOUBLE PRECISION\":\nprimitiveType = PrimitiveType.DOUBLE;\nbreak;\ncase \"DECIMAL\":\ncase \"NUMERIC\":\nprimitiveType = PrimitiveType.DECIMAL32;\nbreak;\ncase \"TIMESTAMP\":\nprimitiveType = PrimitiveType.DATETIME;\nbreak;\ncase \"DATE\":\nprimitiveType = PrimitiveType.DATE;\nbreak;\ncase \"STRING\":\nreturn ScalarType.createDefaultString();\ncase \"VARCHAR\":\nreturn ScalarType.createVarcharType(Utils.getVarcharLength(hiveType));\ncase \"CHAR\":\nreturn ScalarType.createCharType(Utils.getCharLength(hiveType));\ncase \"BOOLEAN\":\nprimitiveType = PrimitiveType.BOOLEAN;\nbreak;\ncase \"BINARY\":\nprimitiveType = PrimitiveType.CONVERT_FAILED;\nbreak;\ncase \"ARRAY\":\nType type = Utils.convertToArrayType(hiveType);\nif (type.isArrayType()) {\nreturn type;\n}\ndefault:\nthrow new DdlException(\"hive table column type [\" + typeUpperCase + \"] transform failed.\");\n}\nif (primitiveType != PrimitiveType.DECIMAL32) {\nreturn ScalarType.createType(primitiveType);\n} else {\nint[] parts = Utils.getPrecisionAndScale(hiveType);\nreturn ScalarType.createUnifiedDecimalType(parts[0], parts[1]);\n}\n}", + "target_code": "case \"BINARY\":", + "method_body_after": "public static Type convertColumnType(String hiveType) throws DdlException {\nString typeUpperCase = Utils.getTypeKeyword(hiveType).toUpperCase();\nPrimitiveType primitiveType;\nswitch (typeUpperCase) {\ncase \"TINYINT\":\nprimitiveType = PrimitiveType.TINYINT;\nbreak;\ncase \"SMALLINT\":\nprimitiveType = PrimitiveType.SMALLINT;\nbreak;\ncase \"INT\":\ncase \"INTEGER\":\nprimitiveType = PrimitiveType.INT;\nbreak;\ncase \"BIGINT\":\nprimitiveType = PrimitiveType.BIGINT;\nbreak;\ncase \"FLOAT\":\nprimitiveType = PrimitiveType.FLOAT;\nbreak;\ncase \"DOUBLE\":\ncase \"DOUBLE PRECISION\":\nprimitiveType = PrimitiveType.DOUBLE;\nbreak;\ncase \"DECIMAL\":\ncase \"NUMERIC\":\nprimitiveType = PrimitiveType.DECIMAL32;\nbreak;\ncase \"TIMESTAMP\":\nprimitiveType = PrimitiveType.DATETIME;\nbreak;\ncase \"DATE\":\nprimitiveType = PrimitiveType.DATE;\nbreak;\ncase \"STRING\":\nreturn ScalarType.createDefaultString();\ncase \"VARCHAR\":\nreturn ScalarType.createVarcharType(Utils.getVarcharLength(hiveType));\ncase \"CHAR\":\nreturn ScalarType.createCharType(Utils.getCharLength(hiveType));\ncase \"BOOLEAN\":\nprimitiveType = PrimitiveType.BOOLEAN;\nbreak;\ncase \"ARRAY\":\nType type = Utils.convertToArrayType(hiveType);\nif (type.isArrayType()) {\nreturn type;\n}\ndefault:\nprimitiveType = PrimitiveType.UNKNOWN_TYPE;\nbreak;\n}\nif (primitiveType != PrimitiveType.DECIMAL32) {\nreturn ScalarType.createType(primitiveType);\n} else {\nint[] parts = Utils.getPrecisionAndScale(hiveType);\nreturn ScalarType.createUnifiedDecimalType(parts[0], parts[1]);\n}\n}", + "context_before": "class HiveMetaStoreTableUtils {\nprivate static final Logger LOG = LogManager.getLogger(HiveMetaStoreTableUtils.class);\npublic static final IdGenerator connectorTableIdIdGenerator = ConnectorTableId.createGenerator();\npublic static final IdGenerator connectorDbIdIdGenerator = ConnectorDatabaseId.createGenerator();\npublic static Map getTableLevelColumnStats(HiveMetaStoreTableInfo hmsTable,\nList columnNames) throws DdlException {\ntry (PlannerProfile.ScopedTimer _ = PlannerProfile.getScopedTimer(\"HMS.tableColumnStats\")) {\nMap allColumnStats = GlobalStateMgr.getCurrentState().getHiveRepository()\n.getTableLevelColumnStats(hmsTable);\nMap result = Maps.newHashMapWithExpectedSize(columnNames.size());\nfor (String columnName : columnNames) {\nresult.put(columnName, allColumnStats.get(columnName));\n}\nreturn result;\n}\n}\npublic static List getPartitionColumns(HiveMetaStoreTableInfo hmsTable) {\nList partColumns = Lists.newArrayList();\nfor (String columnName : hmsTable.getPartColumnNames()) {\npartColumns.add(hmsTable.getNameToColumn().get(columnName));\n}\nreturn partColumns;\n}\npublic static List getAllColumnNames(HiveMetaStoreTableInfo hmsTable) {\nreturn new ArrayList<>(hmsTable.getNameToColumn().keySet());\n}\npublic static List getPartitionsStats(HiveMetaStoreTableInfo hmsTable,\nList partitionKeys) throws DdlException {\ntry (PlannerProfile.ScopedTimer _ = PlannerProfile.getScopedTimer(\"HMS.partitionStats\")) {\nreturn GlobalStateMgr.getCurrentState().getHiveRepository().getPartitionsStats(hmsTable, partitionKeys);\n}\n}\npublic static HiveTableStats getTableStats(HiveMetaStoreTableInfo hmsTable) throws DdlException {\ntry (PlannerProfile.ScopedTimer _ = PlannerProfile.getScopedTimer(\"HMS.tableStats\")) {\nreturn GlobalStateMgr.getCurrentState().getHiveRepository().getTableStats(hmsTable.getResourceName(),\nhmsTable.getDb(), hmsTable.getTable());\n}\n}\npublic static List getPartitions(HiveMetaStoreTableInfo hmsTable,\nList partitionKeys) throws DdlException {\ntry (PlannerProfile.ScopedTimer _ = PlannerProfile.getScopedTimer(\"HMS.partitions\")) {\nreturn GlobalStateMgr.getCurrentState().getHiveRepository()\n.getPartitions(hmsTable, partitionKeys);\n}\n}\npublic static Map getPartitionKeys(HiveMetaStoreTableInfo hmsTable) throws DdlException {\ntry (PlannerProfile.ScopedTimer _ = PlannerProfile.getScopedTimer(\"HMS.partitionKeys\")) {\nreturn GlobalStateMgr.getCurrentState().getHiveRepository().getPartitionKeys(hmsTable);\n}\n}\npublic static long getPartitionStatsRowCount(HiveMetaStoreTableInfo hmsTable,\nList partitions) {\ntry (PlannerProfile.ScopedTimer _ = PlannerProfile.getScopedTimer(\"HMS.partitionRowCount\")) {\nreturn doGetPartitionStatsRowCount(hmsTable, partitions);\n}\n}\npublic static Map getAllHiveColumns(Table table) {\nList unPartHiveColumns = table.getSd().getCols();\nList partHiveColumns = table.getPartitionKeys();\nMap allHiveColumns = unPartHiveColumns.stream()\n.collect(Collectors.toMap(FieldSchema::getName, fieldSchema -> fieldSchema));\nfor (FieldSchema hiveColumn : partHiveColumns) {\nallHiveColumns.put(hiveColumn.getName(), hiveColumn);\n}\nreturn allHiveColumns;\n}\npublic static List getAllColumns(Table table) {\nList allColumns = table.getSd().getCols();\nList partHiveColumns = table.getPartitionKeys();\nallColumns.addAll(partHiveColumns);\nreturn allColumns;\n}\npublic static boolean validateColumnType(String hiveType, Type type) {\nif (hiveType == null) {\nreturn false;\n}\nString typeUpperCase = Utils.getTypeKeyword(hiveType).toUpperCase();\nPrimitiveType primitiveType = type.getPrimitiveType();\nswitch (typeUpperCase) {\ncase \"TINYINT\":\nreturn primitiveType == PrimitiveType.TINYINT;\ncase \"SMALLINT\":\nreturn primitiveType == PrimitiveType.SMALLINT;\ncase \"INT\":\ncase \"INTEGER\":\nreturn primitiveType == PrimitiveType.INT;\ncase \"BIGINT\":\nreturn primitiveType == PrimitiveType.BIGINT;\ncase \"FLOAT\":\nreturn primitiveType == PrimitiveType.FLOAT;\ncase \"DOUBLE\":\ncase \"DOUBLE PRECISION\":\nreturn primitiveType == PrimitiveType.DOUBLE;\ncase \"DECIMAL\":\ncase \"NUMERIC\":\nreturn primitiveType == PrimitiveType.DECIMALV2 || primitiveType == PrimitiveType.DECIMAL32 ||\nprimitiveType == PrimitiveType.DECIMAL64 || primitiveType == PrimitiveType.DECIMAL128;\ncase \"TIMESTAMP\":\nreturn primitiveType == PrimitiveType.DATETIME;\ncase \"DATE\":\nreturn primitiveType == PrimitiveType.DATE;\ncase \"STRING\":\ncase \"VARCHAR\":\nreturn primitiveType == PrimitiveType.VARCHAR;\ncase \"CHAR\":\nreturn primitiveType == PrimitiveType.CHAR ||\nprimitiveType == PrimitiveType.VARCHAR;\ncase \"BOOLEAN\":\nreturn primitiveType == PrimitiveType.BOOLEAN;\ncase \"BINARY\":\nreturn primitiveType == PrimitiveType.CONVERT_FAILED;\ncase \"ARRAY\":\nif (!type.isArrayType()) {\nreturn false;\n}\nreturn validateColumnType(hiveType.substring(hiveType.indexOf('<') + 1, hiveType.length() - 1),\n((ArrayType) type).getItemType());\ndefault:\nreturn false;\n}\n}\npublic static HiveTable convertToSRTable(Table hiveTable, String resoureName) throws DdlException {\nif (hiveTable.getTableType().equals(\"VIRTUAL_VIEW\")) {\nthrow new DdlException(\"Hive view table is not supported.\");\n}\nList allHiveColumns = getAllColumns(hiveTable);\nList fullSchema = Lists.newArrayList();\nfor (FieldSchema fieldSchema : allHiveColumns) {\nType srType = convertColumnType(fieldSchema.getType());\nColumn column = new Column(fieldSchema.getName(), srType, true);\nfullSchema.add(column);\n}\nMap properties = Maps.newHashMap();\nproperties.put(HiveTable.HIVE_DB, hiveTable.getDbName());\nproperties.put(HiveTable.HIVE_TABLE, hiveTable.getTableName());\nproperties.put(HiveTable.HIVE_METASTORE_URIS, resoureName);\nproperties.put(HiveTable.HIVE_RESOURCE, resoureName);\nreturn new HiveTable(connectorTableIdIdGenerator.getNextId().asInt(), hiveTable.getTableName(),\nfullSchema, properties, hiveTable);\n}\npublic static Database convertToSRDatabase(String dbName) {\nreturn new Database(connectorDbIdIdGenerator.getNextId().asInt(), dbName);\n}\npublic static long doGetPartitionStatsRowCount(HiveMetaStoreTableInfo hmsTable,\nList partitions) {\nif (partitions == null) {\ntry {\npartitions = Lists.newArrayList(getPartitionKeys(hmsTable).keySet());\n} catch (DdlException e) {\nLOG.warn(\"Failed to get table {} partitions.\", hmsTable.getTable(), e);\nreturn -1;\n}\n}\nif (partitions.isEmpty()) {\nreturn 0;\n}\nlong numRows = -1;\nList partitionsStats = Lists.newArrayList();\ntry {\npartitionsStats = getPartitionsStats(hmsTable, partitions);\n} catch (DdlException e) {\nLOG.warn(\"Failed to get table {} partitions stats.\", hmsTable.getTable(), e);\n}\nfor (int i = 0; i < partitionsStats.size(); i++) {\nlong partNumRows = partitionsStats.get(i).getNumRows();\nlong partTotalFileBytes = partitionsStats.get(i).getTotalFileBytes();\nif (partNumRows > -1) {\nif (numRows == -1) {\nnumRows = 0;\n}\nnumRows += partNumRows;\n} else {\nLOG.debug(\"Table {} partition {} stats is invalid. num rows: {}, total file bytes: {}\",\nhmsTable.getTable(), partitions.get(i), partNumRows, partTotalFileBytes);\n}\n}\nreturn numRows;\n}\npublic static boolean isInternalCatalog(String resourceName) {\nreturn !resourceName.startsWith(\"thrift:\n}\n}", + "context_after": "class HiveMetaStoreTableUtils {\nprivate static final Logger LOG = LogManager.getLogger(HiveMetaStoreTableUtils.class);\npublic static final IdGenerator connectorTableIdIdGenerator = ConnectorTableId.createGenerator();\npublic static final IdGenerator connectorDbIdIdGenerator = ConnectorDatabaseId.createGenerator();\npublic static Map getTableLevelColumnStats(HiveMetaStoreTableInfo hmsTable,\nList columnNames) throws DdlException {\ntry (PlannerProfile.ScopedTimer _ = PlannerProfile.getScopedTimer(\"HMS.tableColumnStats\")) {\nMap allColumnStats = GlobalStateMgr.getCurrentState().getHiveRepository()\n.getTableLevelColumnStats(hmsTable);\nMap result = Maps.newHashMapWithExpectedSize(columnNames.size());\nfor (String columnName : columnNames) {\nresult.put(columnName, allColumnStats.get(columnName));\n}\nreturn result;\n}\n}\npublic static List getPartitionColumns(HiveMetaStoreTableInfo hmsTable) {\nList partColumns = Lists.newArrayList();\nfor (String columnName : hmsTable.getPartColumnNames()) {\npartColumns.add(hmsTable.getNameToColumn().get(columnName));\n}\nreturn partColumns;\n}\npublic static List getAllColumnNames(HiveMetaStoreTableInfo hmsTable) {\nreturn new ArrayList<>(hmsTable.getNameToColumn().keySet());\n}\npublic static List getPartitionsStats(HiveMetaStoreTableInfo hmsTable,\nList partitionKeys) throws DdlException {\ntry (PlannerProfile.ScopedTimer _ = PlannerProfile.getScopedTimer(\"HMS.partitionStats\")) {\nreturn GlobalStateMgr.getCurrentState().getHiveRepository().getPartitionsStats(hmsTable, partitionKeys);\n}\n}\npublic static HiveTableStats getTableStats(HiveMetaStoreTableInfo hmsTable) throws DdlException {\ntry (PlannerProfile.ScopedTimer _ = PlannerProfile.getScopedTimer(\"HMS.tableStats\")) {\nreturn GlobalStateMgr.getCurrentState().getHiveRepository().getTableStats(hmsTable.getResourceName(),\nhmsTable.getDb(), hmsTable.getTable());\n}\n}\npublic static List getPartitions(HiveMetaStoreTableInfo hmsTable,\nList partitionKeys) throws DdlException {\ntry (PlannerProfile.ScopedTimer _ = PlannerProfile.getScopedTimer(\"HMS.partitions\")) {\nreturn GlobalStateMgr.getCurrentState().getHiveRepository()\n.getPartitions(hmsTable, partitionKeys);\n}\n}\npublic static Map getPartitionKeys(HiveMetaStoreTableInfo hmsTable) throws DdlException {\ntry (PlannerProfile.ScopedTimer _ = PlannerProfile.getScopedTimer(\"HMS.partitionKeys\")) {\nreturn GlobalStateMgr.getCurrentState().getHiveRepository().getPartitionKeys(hmsTable);\n}\n}\npublic static long getPartitionStatsRowCount(HiveMetaStoreTableInfo hmsTable,\nList partitions) {\ntry (PlannerProfile.ScopedTimer _ = PlannerProfile.getScopedTimer(\"HMS.partitionRowCount\")) {\nreturn doGetPartitionStatsRowCount(hmsTable, partitions);\n}\n}\npublic static List getAllHiveColumns(Table table) {\nImmutableList.Builder allColumns = ImmutableList.builder();\nList unHivePartColumns = table.getSd().getCols();\nList partHiveColumns = table.getPartitionKeys();\nreturn allColumns.addAll(unHivePartColumns).addAll(partHiveColumns).build();\n}\npublic static boolean validateColumnType(String hiveType, Type type) {\nif (hiveType == null) {\nreturn false;\n}\nString typeUpperCase = Utils.getTypeKeyword(hiveType).toUpperCase();\nPrimitiveType primitiveType = type.getPrimitiveType();\nswitch (typeUpperCase) {\ncase \"TINYINT\":\nreturn primitiveType == PrimitiveType.TINYINT;\ncase \"SMALLINT\":\nreturn primitiveType == PrimitiveType.SMALLINT;\ncase \"INT\":\ncase \"INTEGER\":\nreturn primitiveType == PrimitiveType.INT;\ncase \"BIGINT\":\nreturn primitiveType == PrimitiveType.BIGINT;\ncase \"FLOAT\":\nreturn primitiveType == PrimitiveType.FLOAT;\ncase \"DOUBLE\":\ncase \"DOUBLE PRECISION\":\nreturn primitiveType == PrimitiveType.DOUBLE;\ncase \"DECIMAL\":\ncase \"NUMERIC\":\nreturn primitiveType == PrimitiveType.DECIMALV2 || primitiveType == PrimitiveType.DECIMAL32 ||\nprimitiveType == PrimitiveType.DECIMAL64 || primitiveType == PrimitiveType.DECIMAL128;\ncase \"TIMESTAMP\":\nreturn primitiveType == PrimitiveType.DATETIME;\ncase \"DATE\":\nreturn primitiveType == PrimitiveType.DATE;\ncase \"STRING\":\ncase \"VARCHAR\":\nreturn primitiveType == PrimitiveType.VARCHAR;\ncase \"CHAR\":\nreturn primitiveType == PrimitiveType.CHAR ||\nprimitiveType == PrimitiveType.VARCHAR;\ncase \"BOOLEAN\":\nreturn primitiveType == PrimitiveType.BOOLEAN;\ncase \"ARRAY\":\nif (!type.isArrayType()) {\nreturn false;\n}\nreturn validateColumnType(hiveType.substring(hiveType.indexOf('<') + 1, hiveType.length() - 1),\n((ArrayType) type).getItemType());\ndefault:\nreturn primitiveType == PrimitiveType.UNKNOWN_TYPE;\n}\n}\npublic static HiveTable convertToSRTable(Table hiveTable, String resoureName) throws DdlException {\nif (hiveTable.getTableType().equals(\"VIRTUAL_VIEW\")) {\nthrow new DdlException(\"Hive view table is not supported.\");\n}\nList allHiveColumns = getAllHiveColumns(hiveTable);\nList fullSchema = Lists.newArrayList();\nfor (FieldSchema fieldSchema : allHiveColumns) {\nType srType = convertColumnType(fieldSchema.getType());\nColumn column = new Column(fieldSchema.getName(), srType, true);\nfullSchema.add(column);\n}\nMap properties = Maps.newHashMap();\nproperties.put(HiveTable.HIVE_DB, hiveTable.getDbName());\nproperties.put(HiveTable.HIVE_TABLE, hiveTable.getTableName());\nproperties.put(HiveTable.HIVE_METASTORE_URIS, resoureName);\nproperties.put(HiveTable.HIVE_RESOURCE, resoureName);\nreturn new HiveTable(connectorTableIdIdGenerator.getNextId().asInt(), hiveTable.getTableName(),\nfullSchema, properties, hiveTable);\n}\npublic static Database convertToSRDatabase(String dbName) {\nreturn new Database(connectorDbIdIdGenerator.getNextId().asInt(), dbName);\n}\npublic static long doGetPartitionStatsRowCount(HiveMetaStoreTableInfo hmsTable,\nList partitions) {\nif (partitions == null) {\ntry {\npartitions = Lists.newArrayList(getPartitionKeys(hmsTable).keySet());\n} catch (DdlException e) {\nLOG.warn(\"Failed to get table {} partitions.\", hmsTable.getTable(), e);\nreturn -1;\n}\n}\nif (partitions.isEmpty()) {\nreturn 0;\n}\nlong numRows = -1;\nList partitionsStats = Lists.newArrayList();\ntry {\npartitionsStats = getPartitionsStats(hmsTable, partitions);\n} catch (DdlException e) {\nLOG.warn(\"Failed to get table {} partitions stats.\", hmsTable.getTable(), e);\n}\nfor (int i = 0; i < partitionsStats.size(); i++) {\nlong partNumRows = partitionsStats.get(i).getNumRows();\nlong partTotalFileBytes = partitionsStats.get(i).getTotalFileBytes();\nif (partNumRows > -1) {\nif (numRows == -1) {\nnumRows = 0;\n}\nnumRows += partNumRows;\n} else {\nLOG.debug(\"Table {} partition {} stats is invalid. num rows: {}, total file bytes: {}\",\nhmsTable.getTable(), partitions.get(i), partNumRows, partTotalFileBytes);\n}\n}\nreturn numRows;\n}\npublic static boolean isInternalCatalog(String resourceName) {\nreturn !resourceName.startsWith(\"thrift:\n}\n}" + }, + { + "comment": "It's unlikely that `tracerInstance.isResolvable()` would be false if the filter is present. I'm probably being over cautious about a situation where `Tracer` has been replaced by an application developer and it has an issue.", + "method_body": "public void handle(RoutingContext routingContext) {\nQuarkusContextStorage.INSTANCE.setRoutingContext(routingContext);\nif (!initialized) {\nInstance tracerInstance = CDI.current().select(Tracer.class);\nif (tracerInstance.isResolvable()) {\ntracer = tracerInstance.get();\n}\ninitialized = true;\n}\nif (tracer != null) {\nContext parentContext = propagatedContext(routingContext);\nSpanBuilder builder = tracer.spanBuilder(routingContext.request().uri().substring(1))\n.setParent(parentContext)\n.setSpanKind(SpanKind.SERVER);\nbuilder.setAttribute(SemanticAttributes.HTTP_FLAVOR, convertHttpVersion(routingContext.request().version()));\nbuilder.setAttribute(SemanticAttributes.HTTP_METHOD, routingContext.request().method().name());\nbuilder.setAttribute(SemanticAttributes.HTTP_TARGET, routingContext.request().path());\nbuilder.setAttribute(SemanticAttributes.HTTP_SCHEME, routingContext.request().scheme());\nbuilder.setAttribute(SemanticAttributes.HTTP_HOST, routingContext.request().host());\nbuilder.setAttribute(SemanticAttributes.HTTP_CLIENT_IP, routingContext.request().remoteAddress().host());\nbuilder.setAttribute(SemanticAttributes.HTTP_USER_AGENT, routingContext.request().getHeader(\"User-Agent\"));\nString contentLength = routingContext.request().getHeader(\"Content-Length\");\nif (contentLength != null && contentLength.length() > 0 && Long.parseLong(contentLength) > 0) {\nbuilder.setAttribute(SemanticAttributes.HTTP_REQUEST_CONTENT_LENGTH, Long.valueOf(contentLength));\n} else {\nbuilder.setAttribute(SemanticAttributes.HTTP_REQUEST_CONTENT_LENGTH, routingContext.request().bytesRead());\n}\nSpan currentSpan = builder.startSpan();\nScope spanScope = currentSpan.makeCurrent();\nHttpServerResponse response = routingContext.response();\nroutingContext.addHeadersEndHandler(new Handler() {\n@Override\npublic void handle(Void event) {\ncurrentSpan.setAttribute(SemanticAttributes.HTTP_STATUS_CODE, response.getStatusCode());\nif (routingContext.failed()) {\ncurrentSpan.setStatus(StatusCode.ERROR);\ncurrentSpan.recordException(routingContext.failure());\n}\ncurrentSpan.end();\nspanScope.close();\nQuarkusContextStorage.INSTANCE.clearRoutingContext(routingContext);\n}\n});\n}\nroutingContext.next();\n}", + "target_code": "tracer = tracerInstance.get();", + "method_body_after": "public void handle(RoutingContext routingContext) {\nQuarkusContextStorage.INSTANCE.setRoutingContext(routingContext);\nif (!initialized) {\nInstance tracerInstance = CDI.current().select(Tracer.class);\nif (tracerInstance.isResolvable()) {\ntracer = tracerInstance.get();\n}\ninitialized = true;\n}\nif (tracer != null) {\nContext parentContext = propagatedContext(routingContext);\nSpanBuilder builder = tracer.spanBuilder(routingContext.request().uri().substring(1))\n.setParent(parentContext)\n.setSpanKind(SpanKind.SERVER);\nbuilder.setAttribute(SemanticAttributes.HTTP_FLAVOR, convertHttpVersion(routingContext.request().version()));\nbuilder.setAttribute(SemanticAttributes.HTTP_METHOD, routingContext.request().method().name());\nbuilder.setAttribute(SemanticAttributes.HTTP_TARGET, routingContext.request().path());\nbuilder.setAttribute(SemanticAttributes.HTTP_SCHEME, routingContext.request().scheme());\nbuilder.setAttribute(SemanticAttributes.HTTP_HOST, routingContext.request().host());\nbuilder.setAttribute(SemanticAttributes.HTTP_CLIENT_IP, routingContext.request().remoteAddress().host());\nbuilder.setAttribute(SemanticAttributes.HTTP_USER_AGENT, routingContext.request().getHeader(\"User-Agent\"));\nString contentLength = routingContext.request().getHeader(\"Content-Length\");\nif (contentLength != null && contentLength.length() > 0 && Long.parseLong(contentLength) > 0) {\nbuilder.setAttribute(SemanticAttributes.HTTP_REQUEST_CONTENT_LENGTH, Long.valueOf(contentLength));\n} else {\nbuilder.setAttribute(SemanticAttributes.HTTP_REQUEST_CONTENT_LENGTH, routingContext.request().bytesRead());\n}\nSpan currentSpan = builder.startSpan();\nScope spanScope = currentSpan.makeCurrent();\nHttpServerResponse response = routingContext.response();\nroutingContext.addHeadersEndHandler(new Handler() {\n@Override\npublic void handle(Void event) {\ncurrentSpan.setAttribute(SemanticAttributes.HTTP_STATUS_CODE, response.getStatusCode());\nif (routingContext.failed()) {\ncurrentSpan.setStatus(StatusCode.ERROR);\ncurrentSpan.recordException(routingContext.failure());\n}\ncurrentSpan.end();\nspanScope.close();\nQuarkusContextStorage.INSTANCE.clearRoutingContext(routingContext);\n}\n});\n}\nroutingContext.next();\n}", + "context_before": "class VertxTracerFilter implements Handler {\nprivate static final TextMapPropagator TEXT_MAP_PROPAGATOR = GlobalOpenTelemetry.getPropagators().getTextMapPropagator();\nTracer tracer;\nboolean initialized = false;\n@Override\nprivate String convertHttpVersion(HttpVersion version) {\nswitch (version) {\ncase HTTP_1_0:\nreturn \"1.0\";\ncase HTTP_1_1:\nreturn \"1.1\";\ncase HTTP_2:\nreturn \"2.0\";\ndefault:\nreturn \"\";\n}\n}\nprivate Context propagatedContext(RoutingContext routingContext) {\nreturn TEXT_MAP_PROPAGATOR.extract(Context.current(), routingContext.request(), GETTER);\n}\nprivate static final TextMapGetter GETTER = new TextMapGetter() {\n@Override\npublic Iterable keys(HttpServerRequest carrier) {\nreturn new Iterable() {\n@Override\npublic Iterator iterator() {\nreturn carrier.headers().names().iterator();\n}\n};\n}\n@Override\npublic String get(HttpServerRequest carrier, String key) {\nif (carrier != null) {\nreturn carrier.getHeader(key);\n}\nreturn null;\n}\n};\n}", + "context_after": "class VertxTracerFilter implements Handler {\nprivate static final TextMapPropagator TEXT_MAP_PROPAGATOR = GlobalOpenTelemetry.getPropagators().getTextMapPropagator();\nTracer tracer;\nboolean initialized = false;\n@Override\nprivate String convertHttpVersion(HttpVersion version) {\nswitch (version) {\ncase HTTP_1_0:\nreturn \"1.0\";\ncase HTTP_1_1:\nreturn \"1.1\";\ncase HTTP_2:\nreturn \"2.0\";\ndefault:\nreturn \"\";\n}\n}\nprivate Context propagatedContext(RoutingContext routingContext) {\nreturn TEXT_MAP_PROPAGATOR.extract(Context.current(), routingContext.request(), GETTER);\n}\nprivate static final TextMapGetter GETTER = new TextMapGetter() {\n@Override\npublic Iterable keys(HttpServerRequest carrier) {\nreturn new Iterable() {\n@Override\npublic Iterator iterator() {\nreturn carrier.headers().names().iterator();\n}\n};\n}\n@Override\npublic String get(HttpServerRequest carrier, String key) {\nif (carrier != null) {\nreturn carrier.getHeader(key);\n}\nreturn null;\n}\n};\n}" + }, + { + "comment": "Shouldn't we ideally pass the errors related to WebSocket connection in the onError resource? IMO all other internal errors should be logged in the internal log but should not appear in onError resource since they are not related to WebSocket connection. WDYT?", + "method_body": "public static void dispatchError(WebSocketOpenConnectionInfo connectionInfo, Throwable throwable) {\nWebSocketService webSocketService = connectionInfo.getService();\nResource onErrorResource = webSocketService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_ERROR);\nif (isUnexpectedError(throwable)) {\nlog.error(\"Unexpected error\", throwable);\nreturn;\n}\nif (onErrorResource == null) {\nErrorHandlerUtils.printError(throwable);\nreturn;\n}\nBValue[] bValues = new BValue[onErrorResource.getParamDetails().size()];\nbValues[0] = connectionInfo.getWebSocketEndpoint();\nbValues[1] = getError(webSocketService, throwable);\nCallableUnitCallback onErrorCallback = new CallableUnitCallback() {\n@Override\npublic void notifySuccess() {\n}\n@Override\npublic void notifyFailure(BStruct error) {\nErrorHandlerUtils.printError(\"error: \" + BLangVMErrors.getPrintableStackTrace(error));\n}\n};\nExecutor.submit(onErrorResource, onErrorCallback, null, null, bValues);\n}", + "target_code": "if (isUnexpectedError(throwable)) {", + "method_body_after": "public static void dispatchError(WebSocketOpenConnectionInfo connectionInfo, Throwable throwable) {\nWebSocketService webSocketService = connectionInfo.getService();\nResource onErrorResource = webSocketService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_ERROR);\nif (isUnexpectedError(throwable)) {\nlog.error(\"Unexpected error\", throwable);\n}\nif (onErrorResource == null) {\nErrorHandlerUtils.printError(throwable);\nreturn;\n}\nBValue[] bValues = new BValue[onErrorResource.getParamDetails().size()];\nbValues[0] = connectionInfo.getWebSocketEndpoint();\nbValues[1] = getError(webSocketService, throwable);\nCallableUnitCallback onErrorCallback = new CallableUnitCallback() {\n@Override\npublic void notifySuccess() {\n}\n@Override\npublic void notifyFailure(BStruct error) {\nErrorHandlerUtils.printError(\"error: \" + BLangVMErrors.getPrintableStackTrace(error));\n}\n};\nExecutor.submit(onErrorResource, onErrorCallback, null, null, bValues);\n}", + "context_before": "class WebSocketDispatcher {\nprivate static final Logger log = LoggerFactory.getLogger(WebSocketDispatcher.class);\n/**\n* This will find the best matching service for given web socket request.\n*\n* @param webSocketMessage incoming message.\n* @return matching service.\n*/\npublic static WebSocketService findService(WebSocketServicesRegistry servicesRegistry,\nMap pathParams, WebSocketInitMessage webSocketMessage,\nHTTPCarbonMessage msg) {\ntry {\nString serviceUri = webSocketMessage.getTarget();\nserviceUri = WebSocketUtil.refactorUri(serviceUri);\nURI requestUri;\ntry {\nrequestUri = URI.create(serviceUri);\n} catch (IllegalArgumentException e) {\nthrow new BallerinaConnectorException(e.getMessage());\n}\nWebSocketService service = servicesRegistry.getUriTemplate().matches(requestUri.getPath(), pathParams,\nwebSocketMessage);\nif (service == null) {\nthrow new BallerinaConnectorException(\"no Service found to handle the service request: \" + serviceUri);\n}\nmsg.setProperty(HttpConstants.QUERY_STR, requestUri.getRawQuery());\nreturn service;\n} catch (Throwable throwable) {\nString message = \"No Service found to handle the service request\";\nwebSocketMessage.cancelHandshake(404, message);\nthrow new BallerinaConnectorException(message, throwable);\n}\n}\npublic static void dispatchTextMessage(WebSocketOpenConnectionInfo connectionInfo,\nWebSocketTextMessage textMessage) {\nWebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection();\nWebSocketService wsService = connectionInfo.getService();\nResource onTextMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_TEXT);\nif (onTextMessageResource == null) {\nwebSocketConnection.readNextFrame();\nreturn;\n}\nList paramDetails = onTextMessageResource.getParamDetails();\nBValue[] bValues = new BValue[paramDetails.size()];\nbValues[0] = connectionInfo.getWebSocketEndpoint();\nbValues[1] = new BString(textMessage.getText());\nif (paramDetails.size() == 3) {\nbValues[2] = new BBoolean(textMessage.isFinalFragment());\n}\nExecutor.submit(onTextMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null,\nnull, bValues);\n}\npublic static void dispatchBinaryMessage(WebSocketOpenConnectionInfo connectionInfo,\nWebSocketBinaryMessage binaryMessage) {\nWebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection();\nWebSocketService wsService = connectionInfo.getService();\nResource onBinaryMessageResource = wsService.getResourceByName(\nWebSocketConstants.RESOURCE_NAME_ON_BINARY);\nif (onBinaryMessageResource == null) {\nwebSocketConnection.readNextFrame();\nreturn;\n}\nList paramDetails = onBinaryMessageResource.getParamDetails();\nBValue[] bValues = new BValue[paramDetails.size()];\nbValues[0] = connectionInfo.getWebSocketEndpoint();\nbValues[1] = new BBlob(binaryMessage.getByteArray());\nif (paramDetails.size() == 3) {\nbValues[2] = new BBoolean(binaryMessage.isFinalFragment());\n}\nExecutor.submit(onBinaryMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null,\nnull, bValues);\n}\npublic static void dispatchControlMessage(WebSocketOpenConnectionInfo connectionInfo,\nWebSocketControlMessage controlMessage) {\nif (controlMessage.getControlSignal() == WebSocketControlSignal.PING) {\nWebSocketDispatcher.dispatchPingMessage(connectionInfo, controlMessage);\n} else if (controlMessage.getControlSignal() == WebSocketControlSignal.PONG) {\nWebSocketDispatcher.dispatchPongMessage(connectionInfo, controlMessage);\n} else {\nthrow new BallerinaConnectorException(\"Received unknown control signal\");\n}\n}\nprivate static void dispatchPingMessage(WebSocketOpenConnectionInfo connectionInfo,\nWebSocketControlMessage controlMessage) {\nWebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection();\nWebSocketService wsService = connectionInfo.getService();\nResource onPingMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_PING);\nif (onPingMessageResource == null) {\npingAutomatically(controlMessage);\nreturn;\n}\nList paramDetails = onPingMessageResource.getParamDetails();\nBValue[] bValues = new BValue[paramDetails.size()];\nbValues[0] = connectionInfo.getWebSocketEndpoint();\nbValues[1] = new BBlob(controlMessage.getByteArray());\nExecutor.submit(onPingMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null,\nnull, bValues);\n}\nprivate static void dispatchPongMessage(WebSocketOpenConnectionInfo connectionInfo,\nWebSocketControlMessage controlMessage) {\nWebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection();\nWebSocketService wsService = connectionInfo.getService();\nResource onPongMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_PONG);\nif (onPongMessageResource == null) {\nwebSocketConnection.readNextFrame();\nreturn;\n}\nList paramDetails = onPongMessageResource.getParamDetails();\nBValue[] bValues = new BValue[paramDetails.size()];\nbValues[0] = connectionInfo.getWebSocketEndpoint();\nbValues[1] = new BBlob(controlMessage.getByteArray());\nExecutor.submit(onPongMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null,\nnull, bValues);\n}\npublic static void dispatchCloseMessage(WebSocketOpenConnectionInfo connectionInfo,\nWebSocketCloseMessage closeMessage) {\nWebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection();\nWebSocketService wsService = connectionInfo.getService();\nResource onCloseResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_CLOSE);\nint closeCode = closeMessage.getCloseCode();\nString closeReason = closeMessage.getCloseReason();\nif (onCloseResource == null) {\nif (webSocketConnection.getSession().isOpen()) {\nwebSocketConnection.finishConnectionClosure(closeCode, null);\n}\nreturn;\n}\nList paramDetails = onCloseResource.getParamDetails();\nBValue[] bValues = new BValue[paramDetails.size()];\nbValues[0] = connectionInfo.getWebSocketEndpoint();\nbValues[1] = new BInteger(closeCode);\nbValues[2] = new BString(closeReason);\nCallableUnitCallback onCloseCallback = new CallableUnitCallback() {\n@Override\npublic void notifySuccess() {\nif (closeMessage.getCloseCode() != WebSocketConstants.STATUS_CODE_ABNORMAL_CLOSURE\n&& webSocketConnection.getSession().isOpen()) {\nwebSocketConnection.finishConnectionClosure(closeCode, null).addListener(\ncloseFuture -> connectionInfo.getWebSocketEndpoint().setBooleanField(0, 0));\n}\n}\n@Override\npublic void notifyFailure(BStruct error) {\nErrorHandlerUtils.printError(\"error: \" + BLangVMErrors.getPrintableStackTrace(error));\n}\n};\nExecutor.submit(onCloseResource, onCloseCallback, null, null, bValues);\n}\nprivate static boolean isUnexpectedError(Throwable throwable) {\nreturn !(throwable instanceof CorruptedFrameException);\n}\nprivate static BStruct getError(WebSocketService webSocketService, Throwable throwable) {\nProgramFile programFile = webSocketService.getServiceInfo().getPackageInfo().getProgramFile();\nPackageInfo errorPackageInfo = programFile.getPackageInfo(BLangVMErrors.PACKAGE_BUILTIN);\nStructureTypeInfo errorStructInfo = errorPackageInfo.getStructInfo(BLangVMErrors.STRUCT_GENERIC_ERROR);\nreturn BLangVMStructs.createBStruct(errorStructInfo, throwable.getMessage());\n}\npublic static void dispatchIdleTimeout(WebSocketOpenConnectionInfo connectionInfo,\nWebSocketControlMessage controlMessage) {\nWebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection();\nWebSocketService wsService = connectionInfo.getService();\nResource onIdleTimeoutResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_IDLE_TIMEOUT);\nif (onIdleTimeoutResource == null) {\nwebSocketConnection.readNextFrame();\nreturn;\n}\nList paramDetails = onIdleTimeoutResource.getParamDetails();\nBValue[] bValues = new BValue[paramDetails.size()];\nbValues[0] = connectionInfo.getWebSocketEndpoint();\nCallableUnitCallback onIdleTimeoutCallback = new CallableUnitCallback() {\n@Override\npublic void notifySuccess() {\n}\n@Override\npublic void notifyFailure(BStruct error) {\nErrorHandlerUtils.printError(\"error: \" + BLangVMErrors.getPrintableStackTrace(error));\n}\n};\nExecutor.submit(onIdleTimeoutResource, onIdleTimeoutCallback, null,\nnull, bValues);\n}\nprivate static void pingAutomatically(WebSocketControlMessage controlMessage) {\nWebSocketConnection webSocketConnection = controlMessage.getWebSocketConnection();\nwebSocketConnection.pong(controlMessage.getPayload()).addListener(future -> {\nThrowable cause = future.cause();\nif (!future.isSuccess() && cause != null) {\nErrorHandlerUtils.printError(cause);\n}\nwebSocketConnection.readNextFrame();\n});\n}\npublic static void setPathParams(BValue[] bValues, List paramDetails, Map pathParams,\nint defaultArgSize) {\nint parameterDetailsSize = paramDetails.size();\nif (parameterDetailsSize > defaultArgSize) {\nfor (int i = defaultArgSize; i < parameterDetailsSize; i++) {\nbValues[i] = new BString(pathParams.get(paramDetails.get(i).getVarName()));\n}\n}\n}\n}", + "context_after": "class WebSocketDispatcher {\nprivate static final Logger log = LoggerFactory.getLogger(WebSocketDispatcher.class);\n/**\n* This will find the best matching service for given web socket request.\n*\n* @param webSocketMessage incoming message.\n* @return matching service.\n*/\npublic static WebSocketService findService(WebSocketServicesRegistry servicesRegistry,\nMap pathParams, WebSocketInitMessage webSocketMessage,\nHTTPCarbonMessage msg) {\ntry {\nString serviceUri = webSocketMessage.getTarget();\nserviceUri = WebSocketUtil.refactorUri(serviceUri);\nURI requestUri;\ntry {\nrequestUri = URI.create(serviceUri);\n} catch (IllegalArgumentException e) {\nthrow new BallerinaConnectorException(e.getMessage());\n}\nWebSocketService service = servicesRegistry.getUriTemplate().matches(requestUri.getPath(), pathParams,\nwebSocketMessage);\nif (service == null) {\nthrow new BallerinaConnectorException(\"no Service found to handle the service request: \" + serviceUri);\n}\nmsg.setProperty(HttpConstants.QUERY_STR, requestUri.getRawQuery());\nreturn service;\n} catch (Throwable throwable) {\nString message = \"No Service found to handle the service request\";\nwebSocketMessage.cancelHandshake(404, message);\nthrow new BallerinaConnectorException(message, throwable);\n}\n}\npublic static void dispatchTextMessage(WebSocketOpenConnectionInfo connectionInfo,\nWebSocketTextMessage textMessage) {\nWebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection();\nWebSocketService wsService = connectionInfo.getService();\nResource onTextMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_TEXT);\nif (onTextMessageResource == null) {\nwebSocketConnection.readNextFrame();\nreturn;\n}\nList paramDetails = onTextMessageResource.getParamDetails();\nBValue[] bValues = new BValue[paramDetails.size()];\nbValues[0] = connectionInfo.getWebSocketEndpoint();\nbValues[1] = new BString(textMessage.getText());\nif (paramDetails.size() == 3) {\nbValues[2] = new BBoolean(textMessage.isFinalFragment());\n}\nExecutor.submit(onTextMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null,\nnull, bValues);\n}\npublic static void dispatchBinaryMessage(WebSocketOpenConnectionInfo connectionInfo,\nWebSocketBinaryMessage binaryMessage) {\nWebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection();\nWebSocketService wsService = connectionInfo.getService();\nResource onBinaryMessageResource = wsService.getResourceByName(\nWebSocketConstants.RESOURCE_NAME_ON_BINARY);\nif (onBinaryMessageResource == null) {\nwebSocketConnection.readNextFrame();\nreturn;\n}\nList paramDetails = onBinaryMessageResource.getParamDetails();\nBValue[] bValues = new BValue[paramDetails.size()];\nbValues[0] = connectionInfo.getWebSocketEndpoint();\nbValues[1] = new BBlob(binaryMessage.getByteArray());\nif (paramDetails.size() == 3) {\nbValues[2] = new BBoolean(binaryMessage.isFinalFragment());\n}\nExecutor.submit(onBinaryMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null,\nnull, bValues);\n}\npublic static void dispatchControlMessage(WebSocketOpenConnectionInfo connectionInfo,\nWebSocketControlMessage controlMessage) {\nif (controlMessage.getControlSignal() == WebSocketControlSignal.PING) {\nWebSocketDispatcher.dispatchPingMessage(connectionInfo, controlMessage);\n} else if (controlMessage.getControlSignal() == WebSocketControlSignal.PONG) {\nWebSocketDispatcher.dispatchPongMessage(connectionInfo, controlMessage);\n} else {\nthrow new BallerinaConnectorException(\"Received unknown control signal\");\n}\n}\nprivate static void dispatchPingMessage(WebSocketOpenConnectionInfo connectionInfo,\nWebSocketControlMessage controlMessage) {\nWebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection();\nWebSocketService wsService = connectionInfo.getService();\nResource onPingMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_PING);\nif (onPingMessageResource == null) {\npingAutomatically(controlMessage);\nreturn;\n}\nList paramDetails = onPingMessageResource.getParamDetails();\nBValue[] bValues = new BValue[paramDetails.size()];\nbValues[0] = connectionInfo.getWebSocketEndpoint();\nbValues[1] = new BBlob(controlMessage.getByteArray());\nExecutor.submit(onPingMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null,\nnull, bValues);\n}\nprivate static void dispatchPongMessage(WebSocketOpenConnectionInfo connectionInfo,\nWebSocketControlMessage controlMessage) {\nWebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection();\nWebSocketService wsService = connectionInfo.getService();\nResource onPongMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_PONG);\nif (onPongMessageResource == null) {\nwebSocketConnection.readNextFrame();\nreturn;\n}\nList paramDetails = onPongMessageResource.getParamDetails();\nBValue[] bValues = new BValue[paramDetails.size()];\nbValues[0] = connectionInfo.getWebSocketEndpoint();\nbValues[1] = new BBlob(controlMessage.getByteArray());\nExecutor.submit(onPongMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null,\nnull, bValues);\n}\npublic static void dispatchCloseMessage(WebSocketOpenConnectionInfo connectionInfo,\nWebSocketCloseMessage closeMessage) {\nWebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection();\nWebSocketService wsService = connectionInfo.getService();\nResource onCloseResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_CLOSE);\nint closeCode = closeMessage.getCloseCode();\nString closeReason = closeMessage.getCloseReason();\nif (onCloseResource == null) {\nif (webSocketConnection.getSession().isOpen()) {\nwebSocketConnection.finishConnectionClosure(closeCode, null);\n}\nreturn;\n}\nList paramDetails = onCloseResource.getParamDetails();\nBValue[] bValues = new BValue[paramDetails.size()];\nbValues[0] = connectionInfo.getWebSocketEndpoint();\nbValues[1] = new BInteger(closeCode);\nbValues[2] = new BString(closeReason);\nCallableUnitCallback onCloseCallback = new CallableUnitCallback() {\n@Override\npublic void notifySuccess() {\nif (closeMessage.getCloseCode() != WebSocketConstants.STATUS_CODE_ABNORMAL_CLOSURE\n&& webSocketConnection.getSession().isOpen()) {\nwebSocketConnection.finishConnectionClosure(closeCode, null).addListener(\ncloseFuture -> connectionInfo.getWebSocketEndpoint().setBooleanField(0, 0));\n}\n}\n@Override\npublic void notifyFailure(BStruct error) {\nErrorHandlerUtils.printError(\"error: \" + BLangVMErrors.getPrintableStackTrace(error));\n}\n};\nExecutor.submit(onCloseResource, onCloseCallback, null, null, bValues);\n}\nprivate static BStruct getError(WebSocketService webSocketService, Throwable throwable) {\nProgramFile programFile = webSocketService.getServiceInfo().getPackageInfo().getProgramFile();\nPackageInfo errorPackageInfo = programFile.getPackageInfo(BLangVMErrors.PACKAGE_BUILTIN);\nStructureTypeInfo errorStructInfo = errorPackageInfo.getStructInfo(BLangVMErrors.STRUCT_GENERIC_ERROR);\nString errMsg;\nif (isUnexpectedError(throwable)) {\nerrMsg = \"Unexpected internal error. Please check internal-log for more details!\";\n} else {\nerrMsg = throwable.getMessage();\n}\nreturn BLangVMStructs.createBStruct(errorStructInfo, errMsg);\n}\nprivate static boolean isUnexpectedError(Throwable throwable) {\nreturn !(throwable instanceof CorruptedFrameException);\n}\npublic static void dispatchIdleTimeout(WebSocketOpenConnectionInfo connectionInfo,\nWebSocketControlMessage controlMessage) {\nWebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection();\nWebSocketService wsService = connectionInfo.getService();\nResource onIdleTimeoutResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_IDLE_TIMEOUT);\nif (onIdleTimeoutResource == null) {\nwebSocketConnection.readNextFrame();\nreturn;\n}\nList paramDetails = onIdleTimeoutResource.getParamDetails();\nBValue[] bValues = new BValue[paramDetails.size()];\nbValues[0] = connectionInfo.getWebSocketEndpoint();\nCallableUnitCallback onIdleTimeoutCallback = new CallableUnitCallback() {\n@Override\npublic void notifySuccess() {\n}\n@Override\npublic void notifyFailure(BStruct error) {\nErrorHandlerUtils.printError(\"error: \" + BLangVMErrors.getPrintableStackTrace(error));\n}\n};\nExecutor.submit(onIdleTimeoutResource, onIdleTimeoutCallback, null,\nnull, bValues);\n}\nprivate static void pingAutomatically(WebSocketControlMessage controlMessage) {\nWebSocketConnection webSocketConnection = controlMessage.getWebSocketConnection();\nwebSocketConnection.pong(controlMessage.getPayload()).addListener(future -> {\nThrowable cause = future.cause();\nif (!future.isSuccess() && cause != null) {\nErrorHandlerUtils.printError(cause);\n}\nwebSocketConnection.readNextFrame();\n});\n}\npublic static void setPathParams(BValue[] bValues, List paramDetails, Map pathParams,\nint defaultArgSize) {\nint parameterDetailsSize = paramDetails.size();\nif (parameterDetailsSize > defaultArgSize) {\nfor (int i = defaultArgSize; i < parameterDetailsSize; i++) {\nbValues[i] = new BString(pathParams.get(paramDetails.get(i).getVarName()));\n}\n}\n}\n}" + }, + { + "comment": "check this before setting outputTimestamp above (and only if outputTimestamp != null)", + "method_body": "private void setAndVerifyOutputTimestamp() {\nif (outputTimestamp == null && TimeDomain.EVENT_TIME.equals(spec.getTimeDomain())) {\noutputTimestamp = target;\n}\nif (outputTimestamp == null) {\noutputTimestamp = elementInputTimestamp;\n}\ncheckArgument(\n!outputTimestamp.isBefore(elementInputTimestamp),\n\"output timestamp %s should be after input message timestamp or output timestamp of firing timers %s\",\noutputTimestamp,\nelementInputTimestamp);\ncheckArgument(\n!outputTimestamp.isAfter(target),\n\"output timestamp %s should be before %s delivery timestamp %s\",\noutputTimestamp,\ntarget);\nInstant windowExpiry = window.maxTimestamp().plus(allowedLateness);\ncheckArgument(\n!target.isAfter(windowExpiry),\n\"Attempted to set event time timer that outputs for %s but that is\"\n+ \" after the expiration of window %s\",\ntarget,\nwindowExpiry);\n}", + "target_code": "elementInputTimestamp);", + "method_body_after": "private void setAndVerifyOutputTimestamp() {\nif (outputTimestamp != null) {\ncheckArgument(\n!outputTimestamp.isBefore(elementInputTimestamp),\n\"output timestamp %s should be after input message timestamp or output timestamp of firing timers %s\",\noutputTimestamp,\nelementInputTimestamp);\n}\nif (outputTimestamp == null && TimeDomain.EVENT_TIME.equals(spec.getTimeDomain())) {\noutputTimestamp = target;\n}\nif (outputTimestamp == null) {\noutputTimestamp = elementInputTimestamp;\n}\nInstant windowExpiry = window.maxTimestamp().plus(allowedLateness);\ncheckArgument(\n!target.isAfter(windowExpiry),\n\"Attempted to set event time timer that outputs for %s but that is\"\n+ \" after the expiration of window %s\",\ntarget,\nwindowExpiry);\n}", + "context_before": "class TimerInternalsTimer implements Timer {\nprivate final TimerInternals timerInternals;\nprivate final BoundedWindow window;\nprivate final StateNamespace namespace;\nprivate final String timerId;\nprivate final String timerFamilyId;\nprivate final TimerSpec spec;\nprivate Instant target;\nprivate Instant outputTimestamp;\nprivate Instant elementInputTimestamp;\nprivate Duration period = Duration.ZERO;\nprivate Duration offset = Duration.ZERO;\npublic TimerInternalsTimer(\nBoundedWindow window,\nStateNamespace namespace,\nString timerId,\nTimerSpec spec,\nInstant elementInputTimestamp,\nTimerInternals timerInternals) {\nthis.window = window;\nthis.namespace = namespace;\nthis.timerId = timerId;\nthis.timerFamilyId = \"\";\nthis.spec = spec;\nthis.elementInputTimestamp = elementInputTimestamp;\nthis.timerInternals = timerInternals;\n}\npublic TimerInternalsTimer(\nBoundedWindow window,\nStateNamespace namespace,\nString timerId,\nString timerFamilyId,\nTimerSpec spec,\nInstant elementInputTimestamp,\nTimerInternals timerInternals) {\nthis.window = window;\nthis.namespace = namespace;\nthis.timerId = timerId;\nthis.timerFamilyId = timerFamilyId;\nthis.spec = spec;\nthis.elementInputTimestamp = elementInputTimestamp;\nthis.timerInternals = timerInternals;\n}\n@Override\npublic void set(Instant target) {\nthis.target = target;\nverifyAbsoluteTimeDomain();\nsetAndVerifyOutputTimestamp();\nsetUnderlyingTimer();\n}\n@Override\npublic void setRelative() {\nInstant now = getCurrentTime();\nif (period.equals(Duration.ZERO)) {\ntarget = now.plus(offset);\n} else {\nlong millisSinceStart = now.plus(offset).getMillis() % period.getMillis();\ntarget = millisSinceStart == 0 ? now : now.plus(period).minus(millisSinceStart);\n}\ntarget = minTargetAndGcTime(target);\nsetAndVerifyOutputTimestamp();\nsetUnderlyingTimer();\n}\n@Override\npublic Timer offset(Duration offset) {\nthis.offset = offset;\nreturn this;\n}\n@Override\npublic Timer align(Duration period) {\nthis.period = period;\nreturn this;\n}\n/**\n* For event time timers the target time should be prior to window GC time. So it return\n* min(time to set, GC Time of window).\n*/\nprivate Instant minTargetAndGcTime(Instant target) {\nif (TimeDomain.EVENT_TIME.equals(spec.getTimeDomain())) {\nInstant windowExpiry = LateDataUtils.garbageCollectionTime(window, allowedLateness);\nif (target.isAfter(windowExpiry)) {\nreturn windowExpiry;\n}\n}\nreturn target;\n}\n@Override\npublic Timer withOutputTimestamp(Instant outputTimestamp) {\nthis.outputTimestamp = outputTimestamp;\nreturn this;\n}\n/** Verifies that the time domain of this timer is acceptable for absolute timers. */\nprivate void verifyAbsoluteTimeDomain() {\nif (!TimeDomain.EVENT_TIME.equals(spec.getTimeDomain())) {\nthrow new IllegalStateException(\n\"Cannot only set relative timers in processing time domain.\" + \" Use\n}\n}\n/**\n*\n*\n*
    \n* Ensures that:\n*
  • Users can't set {@code outputTimestamp} for processing time timers.\n*
  • Event time timers' {@code outputTimestamp} is set before window expiration.\n*
\n*/\n/**\n* Sets the timer for the target time without checking anything about whether it is a reasonable\n* thing to do. For example, absolute processing time timers are not really sensible since the\n* user has no way to compute a good choice of time.\n*/\nprivate void setUnderlyingTimer() {\ntimerInternals.setTimer(\nnamespace, timerId, timerFamilyId, target, outputTimestamp, spec.getTimeDomain());\n}\nprivate Instant getCurrentTime() {\nswitch (spec.getTimeDomain()) {\ncase EVENT_TIME:\nreturn timerInternals.currentInputWatermarkTime();\ncase PROCESSING_TIME:\nreturn timerInternals.currentProcessingTime();\ncase SYNCHRONIZED_PROCESSING_TIME:\nreturn timerInternals.currentSynchronizedProcessingTime();\ndefault:\nthrow new IllegalStateException(\nString.format(\"Timer created for unknown time domain %s\", spec.getTimeDomain()));\n}\n}\n}", + "context_after": "class TimerInternalsTimer implements Timer {\nprivate final TimerInternals timerInternals;\nprivate final BoundedWindow window;\nprivate final StateNamespace namespace;\nprivate final String timerId;\nprivate final String timerFamilyId;\nprivate final TimerSpec spec;\nprivate Instant target;\nprivate Instant outputTimestamp;\nprivate final Instant elementInputTimestamp;\nprivate Duration period = Duration.ZERO;\nprivate Duration offset = Duration.ZERO;\npublic TimerInternalsTimer(\nBoundedWindow window,\nStateNamespace namespace,\nString timerId,\nTimerSpec spec,\nInstant elementInputTimestamp,\nTimerInternals timerInternals) {\nthis.window = window;\nthis.namespace = namespace;\nthis.timerId = timerId;\nthis.timerFamilyId = \"\";\nthis.spec = spec;\nthis.elementInputTimestamp = elementInputTimestamp;\nthis.timerInternals = timerInternals;\n}\npublic TimerInternalsTimer(\nBoundedWindow window,\nStateNamespace namespace,\nString timerId,\nString timerFamilyId,\nTimerSpec spec,\nInstant elementInputTimestamp,\nTimerInternals timerInternals) {\nthis.window = window;\nthis.namespace = namespace;\nthis.timerId = timerId;\nthis.timerFamilyId = timerFamilyId;\nthis.spec = spec;\nthis.elementInputTimestamp = elementInputTimestamp;\nthis.timerInternals = timerInternals;\n}\n@Override\npublic void set(Instant target) {\nthis.target = target;\nverifyAbsoluteTimeDomain();\nsetAndVerifyOutputTimestamp();\nsetUnderlyingTimer();\n}\n@Override\npublic void setRelative() {\nInstant now = getCurrentTime();\nif (period.equals(Duration.ZERO)) {\ntarget = now.plus(offset);\n} else {\nlong millisSinceStart = now.plus(offset).getMillis() % period.getMillis();\ntarget = millisSinceStart == 0 ? now : now.plus(period).minus(millisSinceStart);\n}\ntarget = minTargetAndGcTime(target);\nsetAndVerifyOutputTimestamp();\nsetUnderlyingTimer();\n}\n@Override\npublic Timer offset(Duration offset) {\nthis.offset = offset;\nreturn this;\n}\n@Override\npublic Timer align(Duration period) {\nthis.period = period;\nreturn this;\n}\n/**\n* For event time timers the target time should be prior to window GC time. So it return\n* min(time to set, GC Time of window).\n*/\nprivate Instant minTargetAndGcTime(Instant target) {\nif (TimeDomain.EVENT_TIME.equals(spec.getTimeDomain())) {\nInstant windowExpiry = LateDataUtils.garbageCollectionTime(window, allowedLateness);\nif (target.isAfter(windowExpiry)) {\nreturn windowExpiry;\n}\n}\nreturn target;\n}\n@Override\npublic Timer withOutputTimestamp(Instant outputTimestamp) {\nthis.outputTimestamp = outputTimestamp;\nreturn this;\n}\n/** Verifies that the time domain of this timer is acceptable for absolute timers. */\nprivate void verifyAbsoluteTimeDomain() {\nif (!TimeDomain.EVENT_TIME.equals(spec.getTimeDomain())) {\nthrow new IllegalStateException(\n\"Cannot only set relative timers in processing time domain.\" + \" Use\n}\n}\n/**\n*\n*\n*
    \n* Ensures that:\n*
  • Users can't set {@code outputTimestamp} for processing time timers.\n*
  • Event time timers' {@code outputTimestamp} is set before window expiration.\n*
\n*/\n/**\n* Sets the timer for the target time without checking anything about whether it is a reasonable\n* thing to do. For example, absolute processing time timers are not really sensible since the\n* user has no way to compute a good choice of time.\n*/\nprivate void setUnderlyingTimer() {\ntimerInternals.setTimer(\nnamespace, timerId, timerFamilyId, target, outputTimestamp, spec.getTimeDomain());\n}\nprivate Instant getCurrentTime() {\nswitch (spec.getTimeDomain()) {\ncase EVENT_TIME:\nreturn timerInternals.currentInputWatermarkTime();\ncase PROCESSING_TIME:\nreturn timerInternals.currentProcessingTime();\ncase SYNCHRONIZED_PROCESSING_TIME:\nreturn timerInternals.currentSynchronizedProcessingTime();\ndefault:\nthrow new IllegalStateException(\nString.format(\"Timer created for unknown time domain %s\", spec.getTimeDomain()));\n}\n}\n}" + }, + { + "comment": "This should either be removed or logged using the `log`.", + "method_body": "private void sendData(Map.Entry entry, String session) throws IOException {\nHttpURLConnection connection;\nSystem.out.println(\"Sending \" + entry.getKey());\nconnection = (HttpURLConnection) new URL(url + \"/\" + entry.getKey()).openConnection();\nconnection.setRequestMethod(\"PUT\");\nconnection.setDoOutput(true);\nconnection.addRequestProperty(HttpHeaders.CONTENT_TYPE.toString(), RemoteSyncHandler.APPLICATION_QUARKUS);\nconnection.addRequestProperty(RemoteSyncHandler.QUARKUS_PASSWORD, password);\nconnection.addRequestProperty(RemoteSyncHandler.QUARKUS_SESSION, session);\nconnection.getOutputStream().write(entry.getValue());\nconnection.getOutputStream().close();\nIoUtil.readBytes(connection.getInputStream());\n}", + "target_code": "System.out.println(\"Sending \" + entry.getKey());", + "method_body_after": "private void sendData(Map.Entry entry, String session) throws IOException {\nHttpURLConnection connection;\nlog.info(\"Sending \" + entry.getKey());\nconnection = (HttpURLConnection) new URL(url + \"/\" + entry.getKey()).openConnection();\nconnection.setRequestMethod(\"PUT\");\nconnection.setDoOutput(true);\nconnection.addRequestProperty(HttpHeaders.CONTENT_TYPE.toString(), RemoteSyncHandler.APPLICATION_QUARKUS);\nconnection.addRequestProperty(RemoteSyncHandler.QUARKUS_PASSWORD, password);\nconnection.addRequestProperty(RemoteSyncHandler.QUARKUS_SESSION, session);\nconnection.getOutputStream().write(entry.getValue());\nconnection.getOutputStream().close();\nIoUtil.readBytes(connection.getInputStream());\n}", + "context_before": "class HttpRemoteDevClient implements RemoteDevClient {\nprivate final Logger log = Logger.getLogger(HttpRemoteDevClient.class);\nprivate final String url;\nprivate final String password;\npublic HttpRemoteDevClient(String url, String password) {\nthis.url = url.endsWith(\"/\") ? url.substring(0, url.length() - 1) : url;\nthis.password = password;\n}\n@Override\npublic Closeable sendConnectRequest(RemoteDevState initialState,\nFunction, Map> initialConnectFunction, Supplier changeRequestFunction) {\ntry {\nreturn new Session(initialState, initialConnectFunction, changeRequestFunction);\n} catch (IOException e) {\nthrow new RuntimeException(e);\n}\n}\nprivate String doConnect(RemoteDevState initialState, Function, Map> initialConnectFunction)\nthrows IOException {\nHttpURLConnection connection = (HttpURLConnection) new URL(url + RemoteSyncHandler.CONNECT).openConnection();\nconnection.addRequestProperty(HttpHeaders.CONTENT_TYPE.toString(), RemoteSyncHandler.APPLICATION_QUARKUS);\nconnection.addRequestProperty(RemoteSyncHandler.QUARKUS_PASSWORD, password);\nconnection.setDoOutput(true);\nObjectOutputStream out = new ObjectOutputStream(connection.getOutputStream());\nout.writeObject(initialState);\nout.close();\nString session = connection.getHeaderField(RemoteSyncHandler.QUARKUS_SESSION);\nif (session == null) {\nthrow new IOException(\"Server did not start a remote dev session\");\n}\nString result = new String(IoUtil.readBytes(connection.getInputStream()), StandardCharsets.UTF_8);\nSet changed = new HashSet<>();\nchanged.addAll(Arrays.asList(result.split(\";\")));\nMap data = new LinkedHashMap<>(initialConnectFunction.apply(changed));\nbyte[] lastFile = data.remove(QuarkusEntryPoint.QUARKUS_APPLICATION_DAT);\nif (lastFile != null) {\ndata.put(QuarkusEntryPoint.QUARKUS_APPLICATION_DAT, lastFile);\n}\nfor (Map.Entry entry : data.entrySet()) {\nsendData(entry, session);\n}\nif (lastFile != null) {\nsession = waitForRestart(initialState, initialConnectFunction);\n} else {\nlog.info(\"Connected to remote server\");\n}\nreturn session;\n}\nprivate String waitForRestart(RemoteDevState initialState,\nFunction, Map> initialConnectFunction) {\nlong timeout = System.currentTimeMillis() + 30000;\ntry {\nThread.sleep(1000);\n} catch (InterruptedException e) {\n}\nwhile (System.currentTimeMillis() < timeout) {\ntry {\nHttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();\nIoUtil.readBytes(connection.getInputStream());\nreturn doConnect(initialState, initialConnectFunction);\n} catch (IOException e) {\n}\n}\nthrow new RuntimeException(\"Could not connect to remote side after restart\");\n}\nprivate class Session implements Closeable, Runnable {\nprivate String sessionId = null;\nprivate final RemoteDevState initialState;\nprivate final Function, Map> initialConnectFunction;\nprivate final Supplier changeRequestFunction;\nprivate volatile boolean closed;\nprivate final Thread httpThread;\nprivate final URL url;\nint errorCount;\nprivate Session(RemoteDevState initialState,\nFunction, Map> initialConnectFunction, Supplier changeRequestFunction)\nthrows MalformedURLException {\nthis.initialState = initialState;\nthis.initialConnectFunction = initialConnectFunction;\nthis.changeRequestFunction = changeRequestFunction;\nurl = new URL(HttpRemoteDevClient.this.url + RemoteSyncHandler.DEV);\nhttpThread = new Thread(this, \"Remote dev client thread\");\nhttpThread.start();\n}\n@Override\npublic void close() throws IOException {\nclosed = true;\nhttpThread.interrupt();\n}\n@Override\npublic void run() {\nThrowable problem = null;\nwhile (!closed) {\nHttpURLConnection connection = null;\ntry {\nif (sessionId == null) {\nsessionId = doConnect(initialState, initialConnectFunction);\n}\nconnection = (HttpURLConnection) url.openConnection();\nconnection.setRequestMethod(\"POST\");\nconnection.addRequestProperty(HttpHeaders.CONTENT_TYPE.toString(), RemoteSyncHandler.APPLICATION_QUARKUS);\nconnection.addRequestProperty(RemoteSyncHandler.QUARKUS_PASSWORD, password);\nconnection.addRequestProperty(RemoteSyncHandler.QUARKUS_SESSION, sessionId);\nconnection.setDoOutput(true);\ntry (ObjectOutputStream out = new ObjectOutputStream(connection.getOutputStream())) {\nout.writeObject(problem);\n}\nIoUtil.readBytes(connection.getInputStream());\nint status = connection.getResponseCode();\nif (status == 200) {\nSyncResult sync = changeRequestFunction.get();\nproblem = sync.getProblem();\nfor (Map.Entry entry : sync.getChangedFiles().entrySet()) {\nsendData(entry, sessionId);\n}\nfor (String file : sync.getRemovedFiles()) {\nSystem.out.println(\"deleting \" + file);\nconnection = (HttpURLConnection) new URL(url + \"/\" + file).openConnection();\nconnection.setRequestMethod(\"DELETE\");\nconnection.addRequestProperty(HttpHeaders.CONTENT_TYPE.toString(),\nRemoteSyncHandler.APPLICATION_QUARKUS);\nconnection.addRequestProperty(RemoteSyncHandler.QUARKUS_PASSWORD, password);\nconnection.addRequestProperty(RemoteSyncHandler.QUARKUS_SESSION, sessionId);\nconnection.getOutputStream().close();\nIoUtil.readBytes(connection.getInputStream());\n}\n} else if (status == 203) {\nsessionId = doConnect(initialState, initialConnectFunction);\n}\nerrorCount = 0;\n} catch (Throwable e) {\nerrorCount++;\nlog.error(\"Remote dev request failed\", e);\nif (errorCount == 10) {\nlog.error(\"Connection failed after 10 retries, exiting\");\nreturn;\n}\ntry {\nThread.sleep(2000);\n} catch (InterruptedException ex) {\n}\n}\n}\n}\n}\n}", + "context_after": "class HttpRemoteDevClient implements RemoteDevClient {\nprivate final Logger log = Logger.getLogger(HttpRemoteDevClient.class);\nprivate final String url;\nprivate final String password;\npublic HttpRemoteDevClient(String url, String password) {\nthis.url = url.endsWith(\"/\") ? url.substring(0, url.length() - 1) : url;\nthis.password = password;\n}\n@Override\npublic Closeable sendConnectRequest(RemoteDevState initialState,\nFunction, Map> initialConnectFunction, Supplier changeRequestFunction) {\ntry {\nreturn new Session(initialState, initialConnectFunction, changeRequestFunction);\n} catch (IOException e) {\nthrow new RuntimeException(e);\n}\n}\nprivate String doConnect(RemoteDevState initialState, Function, Map> initialConnectFunction)\nthrows IOException {\nHttpURLConnection connection = (HttpURLConnection) new URL(url + RemoteSyncHandler.CONNECT).openConnection();\nconnection.addRequestProperty(HttpHeaders.CONTENT_TYPE.toString(), RemoteSyncHandler.APPLICATION_QUARKUS);\nconnection.addRequestProperty(RemoteSyncHandler.QUARKUS_PASSWORD, password);\nconnection.setDoOutput(true);\nObjectOutputStream out = new ObjectOutputStream(connection.getOutputStream());\nout.writeObject(initialState);\nout.close();\nString session = connection.getHeaderField(RemoteSyncHandler.QUARKUS_SESSION);\nif (session == null) {\nthrow new IOException(\"Server did not start a remote dev session\");\n}\nString result = new String(IoUtil.readBytes(connection.getInputStream()), StandardCharsets.UTF_8);\nSet changed = new HashSet<>();\nchanged.addAll(Arrays.asList(result.split(\";\")));\nMap data = new LinkedHashMap<>(initialConnectFunction.apply(changed));\nbyte[] lastFile = data.remove(QuarkusEntryPoint.QUARKUS_APPLICATION_DAT);\nif (lastFile != null) {\ndata.put(QuarkusEntryPoint.QUARKUS_APPLICATION_DAT, lastFile);\n}\nfor (Map.Entry entry : data.entrySet()) {\nsendData(entry, session);\n}\nif (lastFile != null) {\nsession = waitForRestart(initialState, initialConnectFunction);\n} else {\nlog.info(\"Connected to remote server\");\n}\nreturn session;\n}\nprivate String waitForRestart(RemoteDevState initialState,\nFunction, Map> initialConnectFunction) {\nlong timeout = System.currentTimeMillis() + 30000;\ntry {\nThread.sleep(1000);\n} catch (InterruptedException e) {\n}\nwhile (System.currentTimeMillis() < timeout) {\ntry {\nHttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();\nIoUtil.readBytes(connection.getInputStream());\nreturn doConnect(initialState, initialConnectFunction);\n} catch (IOException e) {\n}\n}\nthrow new RuntimeException(\"Could not connect to remote side after restart\");\n}\nprivate class Session implements Closeable, Runnable {\nprivate String sessionId = null;\nprivate final RemoteDevState initialState;\nprivate final Function, Map> initialConnectFunction;\nprivate final Supplier changeRequestFunction;\nprivate volatile boolean closed;\nprivate final Thread httpThread;\nprivate final URL url;\nint errorCount;\nprivate Session(RemoteDevState initialState,\nFunction, Map> initialConnectFunction, Supplier changeRequestFunction)\nthrows MalformedURLException {\nthis.initialState = initialState;\nthis.initialConnectFunction = initialConnectFunction;\nthis.changeRequestFunction = changeRequestFunction;\nurl = new URL(HttpRemoteDevClient.this.url + RemoteSyncHandler.DEV);\nhttpThread = new Thread(this, \"Remote dev client thread\");\nhttpThread.start();\n}\n@Override\npublic void close() throws IOException {\nclosed = true;\nhttpThread.interrupt();\n}\n@Override\npublic void run() {\nThrowable problem = null;\nwhile (!closed) {\nHttpURLConnection connection = null;\ntry {\nif (sessionId == null) {\nsessionId = doConnect(initialState, initialConnectFunction);\n}\nconnection = (HttpURLConnection) url.openConnection();\nconnection.setRequestMethod(\"POST\");\nconnection.addRequestProperty(HttpHeaders.CONTENT_TYPE.toString(), RemoteSyncHandler.APPLICATION_QUARKUS);\nconnection.addRequestProperty(RemoteSyncHandler.QUARKUS_PASSWORD, password);\nconnection.addRequestProperty(RemoteSyncHandler.QUARKUS_SESSION, sessionId);\nconnection.setDoOutput(true);\ntry (ObjectOutputStream out = new ObjectOutputStream(connection.getOutputStream())) {\nout.writeObject(problem);\n}\nIoUtil.readBytes(connection.getInputStream());\nint status = connection.getResponseCode();\nif (status == 200) {\nSyncResult sync = changeRequestFunction.get();\nproblem = sync.getProblem();\nfor (Map.Entry entry : sync.getChangedFiles().entrySet()) {\nsendData(entry, sessionId);\n}\nfor (String file : sync.getRemovedFiles()) {\nlog.info(\"deleting \" + file);\nconnection = (HttpURLConnection) new URL(url + \"/\" + file).openConnection();\nconnection.setRequestMethod(\"DELETE\");\nconnection.addRequestProperty(HttpHeaders.CONTENT_TYPE.toString(),\nRemoteSyncHandler.APPLICATION_QUARKUS);\nconnection.addRequestProperty(RemoteSyncHandler.QUARKUS_PASSWORD, password);\nconnection.addRequestProperty(RemoteSyncHandler.QUARKUS_SESSION, sessionId);\nconnection.getOutputStream().close();\nIoUtil.readBytes(connection.getInputStream());\n}\n} else if (status == 203) {\nsessionId = doConnect(initialState, initialConnectFunction);\n}\nerrorCount = 0;\n} catch (Throwable e) {\nerrorCount++;\nlog.error(\"Remote dev request failed\", e);\nif (errorCount == 10) {\nlog.error(\"Connection failed after 10 retries, exiting\");\nreturn;\n}\ntry {\nThread.sleep(2000);\n} catch (InterruptedException ex) {\n}\n}\n}\n}\n}\n}" + }, + { + "comment": "I tried to put test removals into their own commits to be able to give the reasoning in the commit message. For this specific test it is: ``` [test] Removes testStopWhileHavingLeadership: This test case is covered by - testDelayedGrantCallAfterContenderRegistration which checks delayed grant event processing (the first part of the test granting leadership) - testOnRevokeLeadershipIsTriggeredAfterLeaderElectionBeingStop which checks that the leadership is revoked in the contender after the close is called (the second part of the test revoking the leadership in the contender) ```", + "method_body": "void testDriverShutdownFailsWithContenderStillBeingRegistered() throws Exception {\nnew Context() {\n{\nrunTestWithSynchronousEventHandling(\n() ->\nassertThatThrownBy(leaderElectionService::close)\n.as(\n\"The LeaderContender needs to be deregistered before closing the driver.\")\n.isInstanceOf(IllegalStateException.class));\n}\n};\n}", + "target_code": "};", + "method_body_after": "void testDriverShutdownFailsWithContenderStillBeingRegistered() throws Exception {\nnew Context() {\n{\nrunTestWithSynchronousEventHandling(\n() ->\nassertThatThrownBy(leaderElectionService::close)\n.as(\n\"The LeaderContender needs to be deregistered before closing the driver.\")\n.isInstanceOf(IllegalStateException.class));\n}\n};\n}", + "context_before": "class DefaultLeaderElectionServiceTest {\nprivate static final String TEST_URL = \"akka\n@Test\nvoid testOnGrantAndRevokeLeadership() throws Exception {\nfinal AtomicReference storedLeaderInformation =\nnew AtomicReference<>(new LeaderInformationRegister());\nnew Context(storedLeaderInformation) {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\nfinal UUID leaderSessionID = UUID.randomUUID();\ntestingLeaderElectionDriver.grantLeadership(leaderSessionID);\ntestingContender.waitForLeader();\nassertThat(testingContender.getDescription()).isEqualTo(TEST_URL);\nassertThat(testingContender.getLeaderSessionID())\n.isEqualTo(\nleaderElectionService.getLeaderSessionID(contenderID))\n.isEqualTo(leaderSessionID);\nfinal LeaderInformation expectedLeaderInformationInHaBackend =\nLeaderInformation.known(\nleaderElectionService.getLeaderSessionID(contenderID),\nTEST_URL);\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.as(\n\"The HA backend should have its leader information updated.\")\n.hasValue(expectedLeaderInformationInHaBackend);\ntestingLeaderElectionDriver.revokeLeadership();\ntestingContender.waitForRevokeLeader();\nassertThat(testingContender.getLeaderSessionID()).isNull();\nassertThat(leaderElectionService.getLeaderSessionID(contenderID))\n.isNull();\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.as(\n\"External storage is not touched by the leader session because the leadership is already lost.\")\n.hasValue(expectedLeaderInformationInHaBackend);\n});\n}\n};\n}\n/**\n* Tests that we can shut down the DefaultLeaderElectionService if the used LeaderElectionDriver\n* holds an internal lock. See FLINK-20008 for more details.\n*/\n@Test\nvoid testCloseGrantDeadlock() throws Exception {\nfinal OneShotLatch closeReachedLatch = new OneShotLatch();\nfinal OneShotLatch closeContinueLatch = new OneShotLatch();\nfinal OneShotLatch grantReachedLatch = new OneShotLatch();\nfinal OneShotLatch grantContinueLatch = new OneShotLatch();\nfinal CompletableFuture driverCloseTriggered = new CompletableFuture<>();\nfinal TestingLeaderElectionDriver driver =\nTestingLeaderElectionDriver.newBuilder()\n.setGrantConsumer(\n(lock, listener, leaderSessionID) -> {\ntry {\nlock.lock();\ngrantReachedLatch.trigger();\ngrantContinueLatch.awaitQuietly();\nlistener.ifPresent(l -> l.isLeader(leaderSessionID));\n} finally {\nlock.unlock();\n}\n})\n.setCloseConsumer(\nlock -> {\ncloseReachedLatch.trigger();\ncloseContinueLatch.await();\ntry {\nlock.lock();\ndriverCloseTriggered.complete(null);\n} finally {\nlock.unlock();\n}\n})\n.build();\nfinal ManuallyTriggeredScheduledExecutorService executorService =\nnew ManuallyTriggeredScheduledExecutorService();\nfinal DefaultLeaderElectionService testInstance =\nnew DefaultLeaderElectionService(\nnew TestingLeaderElectionDriver.Factory(driver), executorService);\ntestInstance.startLeaderElectionBackend();\nfinal Thread closeThread =\nnew Thread(\n() -> {\ntry {\ntestInstance.close();\n} catch (Exception e) {\nthrow new RuntimeException(e);\n}\n},\n\"CloseThread\");\ncloseThread.start();\ncloseReachedLatch.await();\nfinal Thread grantThread = new Thread(driver::grantLeadership, \"GrantThread\");\ngrantThread.start();\ngrantReachedLatch.await();\ngrantContinueLatch.trigger();\ncloseContinueLatch.trigger();\ncloseThread.join();\ngrantThread.join();\nFlinkAssertions.assertThatFuture(driverCloseTriggered).eventuallySucceeds();\n}\n@Test\nvoid testGrantCallWhileInstantiatingDriver() throws Exception {\nfinal UUID expectedLeaderSessionID = UUID.randomUUID();\ntry (final DefaultLeaderElectionService testInstance =\nnew DefaultLeaderElectionService(\n(eventHandler, errorHandler) -> {\neventHandler.isLeader(expectedLeaderSessionID);\nreturn TestingLeaderElectionDriver.newNoOpBuilder().build();\n},\nExecutors.newDirectExecutorService())) {\ntestInstance.startLeaderElectionBackend();\nfinal LeaderElection leaderElection =\ntestInstance.createLeaderElection(createRandomContenderID());\nfinal TestingContender testingContender =\nnew TestingContender(\"unused-address\", leaderElection);\ntestingContender.startLeaderElection();\nassertThat(testingContender.getLeaderSessionID()).isEqualTo(expectedLeaderSessionID);\nleaderElection.close();\n}\n}\n@Test\nvoid testDelayedGrantCallAfterContenderRegistration() throws Exception {\nnew Context() {\n{\nrunTestWithManuallyTriggeredEvents(\nexecutorService -> {\nleaderElection.close();\nfinal UUID expectedSessionID = UUID.randomUUID();\ntestingLeaderElectionDriver.grantLeadership(expectedSessionID);\ntry (LeaderElection anotherLeaderElection =\nleaderElectionService.createLeaderElection(contenderID)) {\nfinal TestingContender testingContender =\nnew TestingContender(TEST_URL, anotherLeaderElection);\ntestingContender.startLeaderElection();\nassertThat(testingContender.getLeaderSessionID())\n.as(\n\"Leadership grant was not forwarded to the contender, yet.\")\n.isNull();\nexecutorService.trigger();\nassertThat(testingContender.getLeaderSessionID())\n.as(\n\"Leadership grant is actually forwarded to the service.\")\n.isEqualTo(expectedSessionID);\ntestingContender.waitForLeader();\n}\n});\n}\n};\n}\n@Test\nvoid testDelayedGrantCallAfterContenderBeingDeregisteredAgain() throws Exception {\nnew Context() {\n{\nrunTestWithManuallyTriggeredEvents(\nexecutorService -> {\nleaderElection.close();\ntestingLeaderElectionDriver.grantLeadership();\nexecutorService.trigger();\nleaderElection =\nleaderElectionService.createLeaderElection(contenderID);\nfinal TestingContender contender =\nnew TestingContender(\"unused-address\", leaderElection);\ncontender.startLeaderElection();\nleaderElection.close();\nexecutorService.trigger();\n});\n}\n};\n}\n@Test\nvoid testContenderRegistrationWithoutDriverBeingInstantiatedFails() throws Exception {\ntry (final DefaultLeaderElectionService leaderElectionService =\nnew DefaultLeaderElectionService(\nTestingLeaderElectionDriver.Factory.createFactoryWithNoOpDriver())) {\nfinal LeaderElection leaderElection =\nleaderElectionService.createLeaderElection(createRandomContenderID());\nassertThatThrownBy(\n() ->\nnew TestingContender(\"unused-address\", leaderElection)\n.startLeaderElection())\n.isInstanceOf(IllegalStateException.class);\nleaderElectionService.startLeaderElectionBackend();\n}\n}\n@Test\n@Test\nvoid testProperCleanupOnLeaderElectionCloseWhenHoldingTheLeadership() throws Exception {\nfinal AtomicReference storedLeaderInformation =\nnew AtomicReference<>();\nnew Context(storedLeaderInformation) {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\nfinal UUID leaderSessionID = UUID.randomUUID();\ntestingLeaderElectionDriver.grantLeadership(leaderSessionID);\nassertThat(testingContender.getLeaderSessionID()).isNotNull();\nassertThat(leaderElectionService.getLeaderSessionID(contenderID))\n.isEqualTo(leaderSessionID);\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.hasValue(LeaderInformation.known(leaderSessionID, TEST_URL));\nleaderElection.close();\nassertThat(testingContender.getLeaderSessionID())\n.as(\n\"The LeaderContender should have been informed about the leadership loss.\")\n.isNull();\nassertThat(leaderElectionService.getLeaderSessionID(contenderID))\n.as(\n\"The LeaderElectionService should have its internal state cleaned.\")\n.isNull();\nassertThat(storedLeaderInformation.get().getRegisteredContenderIDs())\n.as(\"The HA backend's data should have been cleaned.\")\n.isEmpty();\n});\n}\n};\n}\n@Test\nvoid testLeaderInformationChangedAndShouldBeCorrected() throws Exception {\nfinal AtomicReference storedLeaderInformation =\nnew AtomicReference<>();\nnew Context(storedLeaderInformation) {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\nfinal UUID leaderSessionID = UUID.randomUUID();\ntestingLeaderElectionDriver.grantLeadership(leaderSessionID);\nfinal LeaderInformation expectedLeaderInformation =\nLeaderInformation.known(leaderSessionID, TEST_URL);\nstoredLeaderInformation.set(new LeaderInformationRegister());\ntestingLeaderElectionDriver.triggerLeaderInformationChangeEvent(\ncontenderID, LeaderInformation.empty());\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.as(\"Removed leader information should have been reset.\")\n.hasValue(expectedLeaderInformation);\nfinal LeaderInformation faultyLeaderInformation =\nLeaderInformation.known(UUID.randomUUID(), \"faulty-address\");\nstoredLeaderInformation.set(\nLeaderInformationRegister.of(\ncontenderID, faultyLeaderInformation));\ntestingLeaderElectionDriver.triggerLeaderInformationChangeEvent(\ncontenderID, faultyLeaderInformation);\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.as(\"Overwritten leader information should have been reset.\")\n.hasValue(expectedLeaderInformation);\n});\n}\n};\n}\n@Test\nvoid testHasLeadershipWithLeadershipButNoGrantEventProcessed() throws Exception {\nnew Context() {\n{\nrunTestWithManuallyTriggeredEvents(\nexecutorService -> {\nfinal UUID expectedSessionID = UUID.randomUUID();\ntestingLeaderElectionDriver.grantLeadership(expectedSessionID);\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, expectedSessionID))\n.isFalse();\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, UUID.randomUUID()))\n.isFalse();\n});\n}\n};\n}\n@Test\nvoid testHasLeadershipWithLeadershipAndGrantEventProcessed() throws Exception {\nnew Context() {\n{\nrunTestWithManuallyTriggeredEvents(\nexecutorService -> {\nfinal UUID expectedSessionID = UUID.randomUUID();\ntestingLeaderElectionDriver.grantLeadership(expectedSessionID);\nassertThat(testingContender.getLeaderSessionID()).isNull();\nexecutorService.trigger();\nassertThat(testingContender.getLeaderSessionID())\n.isEqualTo(expectedSessionID);\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, expectedSessionID))\n.isTrue();\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, UUID.randomUUID()))\n.isFalse();\n});\n}\n};\n}\n@Test\nvoid testHasLeadershipWithLeadershipLostButNoRevokeEventProcessed() throws Exception {\nnew Context() {\n{\nrunTestWithManuallyTriggeredEvents(\nexecutorService -> {\ntestingLeaderElectionDriver.grantLeadership();\nexecutorService.trigger();\nfinal UUID expectedSessionID = testingContender.getLeaderSessionID();\ntestingLeaderElectionDriver.revokeLeadership();\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, expectedSessionID))\n.as(\n\"No operation should be handled anymore after the HA backend \"\n+ \"indicated leadership loss even if the onRevokeLeadership wasn't \"\n+ \"processed, yet, because some other process could have picked up \"\n+ \"the leadership in the meantime already based on the HA \"\n+ \"backend's decision.\")\n.isFalse();\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, UUID.randomUUID()))\n.isFalse();\n});\n}\n};\n}\n@Test\nvoid testHasLeadershipWithLeadershipLostAndRevokeEventProcessed() throws Exception {\nnew Context() {\n{\nrunTestWithManuallyTriggeredEvents(\nexecutorService -> {\ntestingLeaderElectionDriver.grantLeadership();\nexecutorService.trigger();\nfinal UUID expectedSessionID = testingContender.getLeaderSessionID();\ntestingLeaderElectionDriver.revokeLeadership();\nexecutorService.trigger();\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, expectedSessionID))\n.isFalse();\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, UUID.randomUUID()))\n.isFalse();\n});\n}\n};\n}\n@Test\nvoid testHasLeadershipAfterLeaderElectionClose() throws Exception {\nnew Context() {\n{\nrunTestWithManuallyTriggeredEvents(\nexecutorService -> {\ntestingLeaderElectionDriver.grantLeadership();\nexecutorService.trigger();\nfinal UUID expectedSessionID = testingContender.getLeaderSessionID();\nleaderElection.close();\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, expectedSessionID))\n.isFalse();\n});\n}\n};\n}\n@Test\nvoid testLeaderInformationChangedIfNotBeingLeader() throws Exception {\nfinal AtomicReference storedLeaderInformation =\nnew AtomicReference<>();\nnew Context(storedLeaderInformation) {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\nfinal LeaderInformation differentLeaderInformation =\nLeaderInformation.known(UUID.randomUUID(), \"different-address\");\nstoredLeaderInformation.set(\nLeaderInformationRegister.of(\ncontenderID, differentLeaderInformation));\ntestingLeaderElectionDriver.triggerLeaderInformationChangeEvent(\ncontenderID, differentLeaderInformation);\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.as(\"The external storage shouldn't have been changed.\")\n.hasValue(differentLeaderInformation);\n});\n}\n};\n}\n@Test\nvoid testOnGrantLeadershipIsIgnoredAfterLeaderElectionBeingStop() throws Exception {\nnew Context() {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\nleaderElection.close();\ntestingLeaderElectionDriver.grantLeadership();\nassertThat(leaderElectionService.getLeaderSessionID(contenderID))\n.as(\n\"The grant event shouldn't have been processed by the LeaderElectionService.\")\n.isNull();\nassertThat(testingContender.getLeaderSessionID())\n.as(\n\"The grant event shouldn't have been forwarded to the contender.\")\n.isNull();\n});\n}\n};\n}\n@Test\nvoid testOnLeaderInformationChangeIsIgnoredAfterLeaderElectionBeingStop() throws Exception {\nfinal AtomicReference storedLeaderInformation =\nnew AtomicReference<>();\nnew Context(storedLeaderInformation) {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\ntestingLeaderElectionDriver.grantLeadership();\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.isPresent();\nleaderElection.close();\nstoredLeaderInformation.set(new LeaderInformationRegister());\ntestingLeaderElectionDriver.triggerLeaderInformationChangeEvent(\ncontenderID, LeaderInformation.empty());\nassertThat(storedLeaderInformation.get().getRegisteredContenderIDs())\n.as(\"The external storage shouldn't have been corrected.\")\n.isEmpty();\n});\n}\n};\n}\n@Test\nvoid testOnRevokeLeadershipIsTriggeredAfterLeaderElectionBeingStop() throws Exception {\nnew Context() {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\ntestingLeaderElectionDriver.grantLeadership();\nfinal UUID oldSessionId =\nleaderElectionService.getLeaderSessionID(contenderID);\nassertThat(testingContender.getLeaderSessionID())\n.isEqualTo(oldSessionId);\nleaderElection.close();\nassertThat(testingContender.getLeaderSessionID())\n.as(\n\"LeaderContender should have been revoked as part of the stop call.\")\n.isNull();\n});\n}\n};\n}\n@Test\nvoid testOldConfirmLeaderInformationWhileHavingNewLeadership() throws Exception {\nfinal AtomicReference storedLeaderInformation =\nnew AtomicReference<>();\nnew Context(storedLeaderInformation) {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\ntestingLeaderElectionDriver.grantLeadership();\nfinal UUID currentLeaderSessionId =\nleaderElectionService.getLeaderSessionID(contenderID);\nassertThat(currentLeaderSessionId).isNotNull();\nfinal LeaderInformation expectedLeaderInformation =\nLeaderInformation.known(currentLeaderSessionId, TEST_URL);\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.hasValue(expectedLeaderInformation);\nleaderElection.confirmLeadership(UUID.randomUUID(), TEST_URL);\nassertThat(leaderElectionService.getLeaderSessionID(contenderID))\n.isEqualTo(currentLeaderSessionId);\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.as(\n\"The leader information in the external storage shouldn't have been updated.\")\n.hasValue(expectedLeaderInformation);\n});\n}\n};\n}\n@Test\nvoid testOldConfirmationWhileHavingLeadershipLost() throws Exception {\nnew Context() {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\ntestingLeaderElectionDriver.grantLeadership();\nfinal UUID currentLeaderSessionId =\nleaderElectionService.getLeaderSessionID(contenderID);\nassertThat(currentLeaderSessionId).isNotNull();\ntestingLeaderElectionDriver.revokeLeadership();\nleaderElection.confirmLeadership(currentLeaderSessionId, TEST_URL);\nassertThat(leaderElectionService.getLeaderSessionID(contenderID))\n.isNull();\n});\n}\n};\n}\n@Test\nvoid testErrorForwarding() throws Exception {\nnew Context() {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\nfinal Exception testException = new Exception(\"test leader exception\");\ntestingLeaderElectionDriver.triggerErrorHandling(testException);\nassertThat(testingContender.getError())\n.isNotNull()\n.hasCause(testException);\ntestingContender.clearError();\n});\n}\n};\n}\n@Test\nvoid testErrorIsIgnoredAfterLeaderElectionBeingClosed() throws Exception {\nnew Context() {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\nfinal Exception testException = new Exception(\"test leader exception\");\nleaderElection.close();\ntestingLeaderElectionDriver.triggerErrorHandling(testException);\nassertThat(testingContender.getError()).isNull();\n});\n}\n};\n}\n@Test\nvoid testOnLeadershipChangeDoesNotBlock() throws Exception {\nfinal CompletableFuture initialLeaderInformation =\nnew CompletableFuture<>();\nfinal OneShotLatch latch = new OneShotLatch();\nfinal TestingLeaderElectionDriver driver =\nTestingLeaderElectionDriver.newBuilder()\n.setPublishLeaderInformationConsumer(\n(lock, contenderID, leaderInformation) -> {\ntry {\nlock.lock();\nif (!initialLeaderInformation.isDone()) {\ninitialLeaderInformation.complete(leaderInformation);\n} else {\nlatch.awaitQuietly();\n}\n} finally {\nlock.unlock();\n}\n})\n.build();\nfinal DefaultLeaderElectionService testInstance =\nnew DefaultLeaderElectionService(new TestingLeaderElectionDriver.Factory(driver));\ntestInstance.startLeaderElectionBackend();\nfinal String contenderID = \"contender-id\";\nfinal String address = \"leader-address\";\nfinal LeaderElection leaderElection = testInstance.createLeaderElection(contenderID);\nleaderElection.startLeaderElection(\nTestingGenericLeaderContender.newBuilder()\n.setGrantLeadershipConsumer(\nsessionID ->\ntestInstance.confirmLeadership(\ncontenderID, sessionID, address))\n.build());\nfinal UUID sessionID = UUID.randomUUID();\ndriver.grantLeadership(sessionID);\nFlinkAssertions.assertThatFuture(initialLeaderInformation)\n.eventuallySucceeds()\n.as(\"The LeaderInformation should have been forwarded to the driver.\")\n.isEqualTo(LeaderInformation.known(sessionID, address));\ntestInstance.onLeaderInformationChange(LeaderInformation.empty());\nlatch.trigger();\nleaderElection.close();\ntestInstance.close();\n}\n@Test\nvoid testOnGrantLeadershipAsyncDoesNotBlock() throws Exception {\ntestNonBlockingCall(\nlatch ->\nTestingGenericLeaderContender.newBuilder()\n.setGrantLeadershipConsumer(\nignoredSessionID -> latch.awaitQuietly())\n.build(),\nTestingLeaderElectionDriver::grantLeadership);\n}\n@Test\nvoid testOnRevokeLeadershipDoesNotBlock() throws Exception {\ntestNonBlockingCall(\nlatch ->\nTestingGenericLeaderContender.newBuilder()\n.setRevokeLeadershipRunnable(latch::awaitQuietly)\n.build(),\ndriver -> {\ndriver.grantLeadership();\ndriver.revokeLeadership();\n});\n}\nprivate static void testNonBlockingCall(\nFunction contenderCreator,\nConsumer driverAction)\nthrows Exception {\nfinal OneShotLatch latch = new OneShotLatch();\nfinal TestingGenericLeaderContender contender = contenderCreator.apply(latch);\nfinal TestingLeaderElectionDriver driver = TestingLeaderElectionDriver.newBuilder().build();\nfinal DefaultLeaderElectionService testInstance =\nnew DefaultLeaderElectionService(new TestingLeaderElectionDriver.Factory(driver));\ntestInstance.startLeaderElectionBackend();\nfinal LeaderElection leaderElection =\ntestInstance.createLeaderElection(createRandomContenderID());\nleaderElection.startLeaderElection(contender);\ndriverAction.accept(driver);\nlatch.trigger();\nleaderElection.close();\ntestInstance.close();\n}\nprivate static String createRandomContenderID() {\nreturn String.format(\"contender-id-%s\", UUID.randomUUID());\n}\nprivate static class Context {\nfinal String contenderID = createRandomContenderID();\nDefaultLeaderElectionService leaderElectionService;\nTestingContender testingContender;\nTestingLeaderElectionDriver testingLeaderElectionDriver;\nLeaderElection leaderElection;\nprivate Context() {\nthis(new AtomicReference<>());\n}\nprivate Context(AtomicReference storedLeaderInformation) {\nthis(\nTestingLeaderElectionDriver.newBuilder(\nnew AtomicBoolean(),\nstoredLeaderInformation,\nnew AtomicBoolean())\n.build());\n}\nprivate Context(TestingLeaderElectionDriver driver) {\nthis.testingLeaderElectionDriver = driver;\n}\nvoid runTestWithSynchronousEventHandling(RunnableWithException testMethod)\nthrows Exception {\nrunTest(testMethod, Executors.newDirectExecutorService());\n}\nvoid runTestWithManuallyTriggeredEvents(\nThrowingConsumer testMethod)\nthrows Exception {\nfinal ManuallyTriggeredScheduledExecutorService executorService =\nnew ManuallyTriggeredScheduledExecutorService();\nrunTest(() -> testMethod.accept(executorService), executorService);\n}\nvoid runTest(RunnableWithException testMethod, ExecutorService leaderEventOperationExecutor)\nthrows Exception {\ntry {\nleaderElectionService =\nnew DefaultLeaderElectionService(\nnew TestingLeaderElectionDriver.Factory(\ntestingLeaderElectionDriver),\nleaderEventOperationExecutor);\nleaderElectionService.startLeaderElectionBackend();\nleaderElection = leaderElectionService.createLeaderElection(contenderID);\ntestingContender = new TestingContender(TEST_URL, leaderElection);\ntestingContender.startLeaderElection();\ntestMethod.run();\n} finally {\nif (leaderElection != null) {\nleaderElection.close();\n}\nif (leaderElectionService != null) {\nleaderElectionService.close();\n}\nif (testingContender != null) {\ntestingContender.throwErrorIfPresent();\n}\nif (testingLeaderElectionDriver != null) {\ntestingLeaderElectionDriver.close();\n}\n}\n}\n}\n}", + "context_after": "class DefaultLeaderElectionServiceTest {\n@RegisterExtension\npublic final TestingFatalErrorHandlerExtension fatalErrorHandlerExtension =\nnew TestingFatalErrorHandlerExtension();\nprivate static final String TEST_URL = \"akka\n@Test\nvoid testOnGrantAndRevokeLeadership() throws Exception {\nfinal AtomicReference storedLeaderInformation =\nnew AtomicReference<>(LeaderInformationRegister.empty());\nnew Context(storedLeaderInformation) {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\nfinal UUID leaderSessionID = UUID.randomUUID();\ngrantLeadership(leaderSessionID);\ntestingContender.waitForLeader();\nassertThat(testingContender.getDescription()).isEqualTo(TEST_URL);\nassertThat(testingContender.getLeaderSessionID())\n.isEqualTo(\nleaderElectionService.getLeaderSessionID(contenderID))\n.isEqualTo(leaderSessionID);\nfinal LeaderInformation expectedLeaderInformationInHaBackend =\nLeaderInformation.known(\nleaderElectionService.getLeaderSessionID(contenderID),\nTEST_URL);\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.as(\n\"The HA backend should have its leader information updated.\")\n.hasValue(expectedLeaderInformationInHaBackend);\nrevokeLeadership();\ntestingContender.waitForRevokeLeader();\nassertThat(testingContender.getLeaderSessionID()).isNull();\nassertThat(leaderElectionService.getLeaderSessionID(contenderID))\n.isNull();\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.as(\n\"External storage is not touched by the leader session because the leadership is already lost.\")\n.hasValue(expectedLeaderInformationInHaBackend);\n});\n}\n};\n}\n/**\n* Tests that we can shut down the DefaultLeaderElectionService if the used LeaderElectionDriver\n* holds an internal lock. See FLINK-20008 for more details.\n*/\n@Test\nvoid testCloseGrantDeadlock() throws Exception {\nfinal OneShotLatch closeReachedLatch = new OneShotLatch();\nfinal OneShotLatch closeContinueLatch = new OneShotLatch();\nfinal OneShotLatch grantReachedLatch = new OneShotLatch();\nfinal OneShotLatch grantContinueLatch = new OneShotLatch();\nfinal CompletableFuture driverCloseTriggered = new CompletableFuture<>();\nfinal AtomicBoolean leadershipGranted = new AtomicBoolean();\nfinal TestingLeaderElectionDriver.Builder driverBuilder =\nTestingLeaderElectionDriver.newBuilder(leadershipGranted)\n.setCloseConsumer(\nlock -> {\ncloseReachedLatch.trigger();\ncloseContinueLatch.await();\ntry {\nlock.lock();\ndriverCloseTriggered.complete(null);\n} finally {\nlock.unlock();\n}\n});\nfinal TestingLeaderElectionDriver.Factory driverFactory =\nnew TestingLeaderElectionDriver.Factory(driverBuilder);\nfinal DefaultLeaderElectionService testInstance =\nnew DefaultLeaderElectionService(\ndriverFactory, fatalErrorHandlerExtension.getTestingFatalErrorHandler());\ntestInstance.startLeaderElectionBackend();\nfinal TestingLeaderElectionDriver driver = driverFactory.assertAndGetOnlyCreatedDriver();\nfinal Thread closeThread =\nnew Thread(\n() -> {\ntry {\ntestInstance.close();\n} catch (Exception e) {\nthrow new RuntimeException(e);\n}\n},\n\"CloseThread\");\ncloseThread.start();\ncloseReachedLatch.await();\nfinal Thread grantThread =\nnew Thread(\n() -> {\ntry {\ndriver.getLock().lock();\ngrantReachedLatch.trigger();\ngrantContinueLatch.awaitQuietly();\nleadershipGranted.set(true);\ntestInstance.isLeader(UUID.randomUUID());\n} finally {\ndriver.getLock().unlock();\n}\n},\n\"GrantThread\");\ngrantThread.start();\ngrantReachedLatch.await();\ngrantContinueLatch.trigger();\ncloseContinueLatch.trigger();\ncloseThread.join();\ngrantThread.join();\nFlinkAssertions.assertThatFuture(driverCloseTriggered).eventuallySucceeds();\n}\n@Test\nvoid testGrantCallWhileInstantiatingDriver() throws Exception {\nfinal UUID expectedLeaderSessionID = UUID.randomUUID();\ntry (final DefaultLeaderElectionService testInstance =\nnew DefaultLeaderElectionService(\n(listener, errorHandler) -> {\nlistener.isLeader(expectedLeaderSessionID);\nreturn TestingLeaderElectionDriver.newNoOpBuilder()\n.build(listener, errorHandler);\n},\nfatalErrorHandlerExtension.getTestingFatalErrorHandler(),\nExecutors.newDirectExecutorService())) {\ntestInstance.startLeaderElectionBackend();\nfinal LeaderElection leaderElection =\ntestInstance.createLeaderElection(createRandomContenderID());\nfinal TestingContender testingContender =\nnew TestingContender(\"unused-address\", leaderElection);\ntestingContender.startLeaderElection();\nassertThat(testingContender.getLeaderSessionID()).isEqualTo(expectedLeaderSessionID);\nleaderElection.close();\n}\n}\n@Test\nvoid testDelayedGrantCallAfterContenderRegistration() throws Exception {\nnew Context() {\n{\nrunTestWithManuallyTriggeredEvents(\nexecutorService -> {\nleaderElection.close();\nfinal UUID expectedSessionID = UUID.randomUUID();\ngrantLeadership(expectedSessionID);\ntry (LeaderElection anotherLeaderElection =\nleaderElectionService.createLeaderElection(contenderID)) {\nfinal TestingContender testingContender =\nnew TestingContender(TEST_URL, anotherLeaderElection);\ntestingContender.startLeaderElection();\nassertThat(testingContender.getLeaderSessionID())\n.as(\n\"Leadership grant was not forwarded to the contender, yet.\")\n.isNull();\nexecutorService.trigger();\nassertThat(testingContender.getLeaderSessionID())\n.as(\n\"Leadership grant is actually forwarded to the service.\")\n.isEqualTo(expectedSessionID);\ntestingContender.waitForLeader();\n}\n});\n}\n};\n}\n@Test\nvoid testDelayedGrantCallAfterContenderBeingDeregisteredAgain() throws Exception {\nnew Context() {\n{\nrunTestWithManuallyTriggeredEvents(\nexecutorService -> {\nleaderElection.close();\ngrantLeadership();\nexecutorService.trigger();\nleaderElection =\nleaderElectionService.createLeaderElection(contenderID);\nfinal TestingContender contender =\nnew TestingContender(\"unused-address\", leaderElection);\ncontender.startLeaderElection();\nleaderElection.close();\nexecutorService.trigger();\n});\n}\n};\n}\n/**\n* Test to cover the issue described in FLINK-31814. This test could be removed after\n* FLINK-31814 is resolved.\n*/\n@Test\nvoid testOnRevokeCallWhileClosingService() throws Exception {\nfinal AtomicBoolean leadershipGranted = new AtomicBoolean();\nfinal TestingLeaderElectionDriver.Builder driverBuilder =\nTestingLeaderElectionDriver.newBuilder(leadershipGranted);\nfinal TestingLeaderElectionDriver.Factory driverFactory =\nnew TestingLeaderElectionDriver.Factory(driverBuilder);\ntry (final DefaultLeaderElectionService testInstance =\nnew DefaultLeaderElectionService(\ndriverFactory, fatalErrorHandlerExtension.getTestingFatalErrorHandler())) {\ndriverBuilder.setCloseConsumer(lock -> testInstance.onRevokeLeadership());\ntestInstance.startLeaderElectionBackend();\nleadershipGranted.set(true);\ntestInstance.isLeader(UUID.randomUUID());\nfinal LeaderElection leaderElection =\ntestInstance.createLeaderElection(createRandomContenderID());\nfinal TestingContender contender =\nnew TestingContender(\"unused-address\", leaderElection);\ncontender.startLeaderElection();\ncontender.waitForLeader();\nleaderElection.close();\ncontender.throwErrorIfPresent();\n}\n}\n@Test\nvoid testContenderRegistrationWithoutDriverBeingInstantiatedFails() throws Exception {\ntry (final DefaultLeaderElectionService leaderElectionService =\nnew DefaultLeaderElectionService(\nTestingLeaderElectionDriver.Factory.createFactoryWithNoOpDriver(),\nfatalErrorHandlerExtension.getTestingFatalErrorHandler())) {\nfinal LeaderElection leaderElection =\nleaderElectionService.createLeaderElection(createRandomContenderID());\nassertThatThrownBy(\n() ->\nnew TestingContender(\"unused-address\", leaderElection)\n.startLeaderElection())\n.isInstanceOf(IllegalStateException.class);\nleaderElectionService.startLeaderElectionBackend();\n}\n}\n@Test\n@Test\nvoid testProperCleanupOnLeaderElectionCloseWhenHoldingTheLeadership() throws Exception {\nfinal AtomicReference storedLeaderInformation =\nnew AtomicReference<>();\nnew Context(storedLeaderInformation) {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\nfinal UUID leaderSessionID = UUID.randomUUID();\ngrantLeadership(leaderSessionID);\nassertThat(testingContender.getLeaderSessionID()).isNotNull();\nassertThat(leaderElectionService.getLeaderSessionID(contenderID))\n.isEqualTo(leaderSessionID);\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.hasValue(LeaderInformation.known(leaderSessionID, TEST_URL));\nleaderElection.close();\nassertThat(testingContender.getLeaderSessionID())\n.as(\n\"The LeaderContender should have been informed about the leadership loss.\")\n.isNull();\nassertThat(leaderElectionService.getLeaderSessionID(contenderID))\n.as(\n\"The LeaderElectionService should have its internal state cleaned.\")\n.isNull();\nassertThat(storedLeaderInformation.get().getRegisteredContenderIDs())\n.as(\"The HA backend's data should have been cleaned.\")\n.isEmpty();\n});\n}\n};\n}\n@Test\nvoid testLeaderInformationChangedAndShouldBeCorrected() throws Exception {\nfinal AtomicReference storedLeaderInformation =\nnew AtomicReference<>();\nnew Context(storedLeaderInformation) {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\nfinal UUID leaderSessionID = UUID.randomUUID();\ngrantLeadership(leaderSessionID);\nfinal LeaderInformation expectedLeaderInformation =\nLeaderInformation.known(leaderSessionID, TEST_URL);\nstoredLeaderInformation.set(LeaderInformationRegister.empty());\nleaderElectionService.notifyLeaderInformationChange(\ncontenderID, LeaderInformation.empty());\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.as(\"Removed leader information should have been reset.\")\n.hasValue(expectedLeaderInformation);\nfinal LeaderInformation faultyLeaderInformation =\nLeaderInformation.known(UUID.randomUUID(), \"faulty-address\");\nstoredLeaderInformation.set(\nLeaderInformationRegister.of(\ncontenderID, faultyLeaderInformation));\nleaderElectionService.notifyLeaderInformationChange(\ncontenderID, faultyLeaderInformation);\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.as(\"Overwritten leader information should have been reset.\")\n.hasValue(expectedLeaderInformation);\n});\n}\n};\n}\n@Test\nvoid testHasLeadershipWithLeadershipButNoGrantEventProcessed() throws Exception {\nnew Context() {\n{\nrunTestWithManuallyTriggeredEvents(\nexecutorService -> {\nfinal UUID expectedSessionID = UUID.randomUUID();\ngrantLeadership(expectedSessionID);\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, expectedSessionID))\n.isFalse();\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, UUID.randomUUID()))\n.isFalse();\n});\n}\n};\n}\n@Test\nvoid testHasLeadershipWithLeadershipAndGrantEventProcessed() throws Exception {\nnew Context() {\n{\nrunTestWithManuallyTriggeredEvents(\nexecutorService -> {\nfinal UUID expectedSessionID = UUID.randomUUID();\ngrantLeadership(expectedSessionID);\nassertThat(testingContender.getLeaderSessionID()).isNull();\nexecutorService.trigger();\nassertThat(testingContender.getLeaderSessionID())\n.isEqualTo(expectedSessionID);\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, expectedSessionID))\n.isTrue();\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, UUID.randomUUID()))\n.isFalse();\n});\n}\n};\n}\n@Test\nvoid testHasLeadershipWithLeadershipLostButNoRevokeEventProcessed() throws Exception {\nnew Context() {\n{\nrunTestWithManuallyTriggeredEvents(\nexecutorService -> {\ngrantLeadership();\nexecutorService.trigger();\nfinal UUID expectedSessionID = testingContender.getLeaderSessionID();\nrevokeLeadership();\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, expectedSessionID))\n.as(\n\"No operation should be handled anymore after the HA backend \"\n+ \"indicated leadership loss even if the onRevokeLeadership wasn't \"\n+ \"processed, yet, because some other process could have picked up \"\n+ \"the leadership in the meantime already based on the HA \"\n+ \"backend's decision.\")\n.isFalse();\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, UUID.randomUUID()))\n.isFalse();\n});\n}\n};\n}\n@Test\nvoid testHasLeadershipWithLeadershipLostAndRevokeEventProcessed() throws Exception {\nnew Context() {\n{\nrunTestWithManuallyTriggeredEvents(\nexecutorService -> {\ngrantLeadership();\nexecutorService.trigger();\nfinal UUID expectedSessionID = testingContender.getLeaderSessionID();\nrevokeLeadership();\nexecutorService.trigger();\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, expectedSessionID))\n.isFalse();\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, UUID.randomUUID()))\n.isFalse();\n});\n}\n};\n}\n@Test\nvoid testHasLeadershipAfterLeaderElectionClose() throws Exception {\nnew Context() {\n{\nrunTestWithManuallyTriggeredEvents(\nexecutorService -> {\ngrantLeadership();\nexecutorService.trigger();\nfinal UUID expectedSessionID = testingContender.getLeaderSessionID();\nleaderElection.close();\nassertThat(\nleaderElectionService.hasLeadership(\ncontenderID, expectedSessionID))\n.isFalse();\n});\n}\n};\n}\n@Test\nvoid testLeaderInformationChangedIfNotBeingLeader() throws Exception {\nfinal AtomicReference storedLeaderInformation =\nnew AtomicReference<>();\nnew Context(storedLeaderInformation) {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\nfinal LeaderInformation differentLeaderInformation =\nLeaderInformation.known(UUID.randomUUID(), \"different-address\");\nstoredLeaderInformation.set(\nLeaderInformationRegister.of(\ncontenderID, differentLeaderInformation));\nleaderElectionService.notifyLeaderInformationChange(\ncontenderID, differentLeaderInformation);\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.as(\"The external storage shouldn't have been changed.\")\n.hasValue(differentLeaderInformation);\n});\n}\n};\n}\n@Test\nvoid testOnGrantLeadershipIsIgnoredAfterLeaderElectionBeingStop() throws Exception {\nnew Context() {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\nleaderElection.close();\ngrantLeadership();\nassertThat(leaderElectionService.getLeaderSessionID(contenderID))\n.as(\n\"The grant event shouldn't have been processed by the LeaderElectionService.\")\n.isNull();\nassertThat(testingContender.getLeaderSessionID())\n.as(\n\"The grant event shouldn't have been forwarded to the contender.\")\n.isNull();\n});\n}\n};\n}\n@Test\nvoid testOnLeaderInformationChangeIsIgnoredAfterLeaderElectionBeingStop() throws Exception {\nfinal AtomicReference storedLeaderInformation =\nnew AtomicReference<>();\nnew Context(storedLeaderInformation) {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\ngrantLeadership();\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.isPresent();\nleaderElection.close();\nstoredLeaderInformation.set(LeaderInformationRegister.empty());\nleaderElectionService.notifyLeaderInformationChange(\ncontenderID, LeaderInformation.empty());\nassertThat(storedLeaderInformation.get().getRegisteredContenderIDs())\n.as(\"The external storage shouldn't have been corrected.\")\n.isEmpty();\n});\n}\n};\n}\n@Test\nvoid testOnRevokeLeadershipIsTriggeredAfterLeaderElectionBeingStop() throws Exception {\nnew Context() {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\ngrantLeadership();\nfinal UUID oldSessionId =\nleaderElectionService.getLeaderSessionID(contenderID);\nassertThat(testingContender.getLeaderSessionID())\n.isEqualTo(oldSessionId);\nleaderElection.close();\nassertThat(testingContender.getLeaderSessionID())\n.as(\n\"LeaderContender should have been revoked as part of the stop call.\")\n.isNull();\n});\n}\n};\n}\n@Test\nvoid testOldConfirmLeaderInformationWhileHavingNewLeadership() throws Exception {\nfinal AtomicReference storedLeaderInformation =\nnew AtomicReference<>();\nnew Context(storedLeaderInformation) {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\ngrantLeadership();\nfinal UUID currentLeaderSessionId =\nleaderElectionService.getLeaderSessionID(contenderID);\nassertThat(currentLeaderSessionId).isNotNull();\nfinal LeaderInformation expectedLeaderInformation =\nLeaderInformation.known(currentLeaderSessionId, TEST_URL);\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.hasValue(expectedLeaderInformation);\nleaderElection.confirmLeadership(UUID.randomUUID(), TEST_URL);\nassertThat(leaderElectionService.getLeaderSessionID(contenderID))\n.isEqualTo(currentLeaderSessionId);\nassertThat(storedLeaderInformation.get().forContenderID(contenderID))\n.as(\n\"The leader information in the external storage shouldn't have been updated.\")\n.hasValue(expectedLeaderInformation);\n});\n}\n};\n}\n@Test\nvoid testOldConfirmationWhileHavingLeadershipLost() throws Exception {\nnew Context() {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\ngrantLeadership();\nfinal UUID currentLeaderSessionId =\nleaderElectionService.getLeaderSessionID(contenderID);\nassertThat(currentLeaderSessionId).isNotNull();\nrevokeLeadership();\nleaderElection.confirmLeadership(currentLeaderSessionId, TEST_URL);\nassertThat(leaderElectionService.getLeaderSessionID(contenderID))\n.isNull();\n});\n}\n};\n}\n@Test\nvoid testErrorForwarding() throws Exception {\nnew Context() {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\nfinal Exception testException = new Exception(\"test leader exception\");\ntestingLeaderElectionDriver.triggerFatalError(testException);\nassertThat(testingContender.getError())\n.isNotNull()\n.hasCause(testException);\ntestingContender.clearError();\n});\n}\n};\n}\n@Test\nvoid testErrorIsIgnoredAfterLeaderElectionBeingClosed() throws Exception {\nnew Context() {\n{\nrunTestWithSynchronousEventHandling(\n() -> {\nfinal Exception testException = new Exception(\"test leader exception\");\nleaderElection.close();\ntestingLeaderElectionDriver.triggerFatalError(testException);\nassertThat(testingContender.getError()).isNull();\nassertThat(\nfatalErrorHandlerExtension\n.getTestingFatalErrorHandler()\n.getException())\n.as(\n\"The fallback error handler should have caught the error in this case.\")\n.isEqualTo(testException);\nfatalErrorHandlerExtension.getTestingFatalErrorHandler().clearError();\n});\n}\n};\n}\n@Test\nvoid testOnLeadershipChangeDoesNotBlock() throws Exception {\nfinal CompletableFuture initialLeaderInformation =\nnew CompletableFuture<>();\nfinal OneShotLatch latch = new OneShotLatch();\nfinal AtomicBoolean leadershipGranted = new AtomicBoolean(false);\nfinal TestingLeaderElectionDriver.Builder driverBuilder =\nTestingLeaderElectionDriver.newBuilder(\nleadershipGranted, new AtomicReference<>(), new AtomicBoolean())\n.setPublishLeaderInformationConsumer(\n(lock, contenderID, leaderInformation) -> {\ntry {\nlock.lock();\nif (!initialLeaderInformation.isDone()) {\ninitialLeaderInformation.complete(leaderInformation);\n} else {\nlatch.awaitQuietly();\n}\n} finally {\nlock.unlock();\n}\n})\n.setHasLeadershipFunction(lock -> leadershipGranted.get());\nfinal TestingLeaderElectionDriver.Factory driverFactory =\nnew TestingLeaderElectionDriver.Factory(driverBuilder);\nfinal DefaultLeaderElectionService testInstance =\nnew DefaultLeaderElectionService(\ndriverFactory, fatalErrorHandlerExtension.getTestingFatalErrorHandler());\ntestInstance.startLeaderElectionBackend();\nfinal String contenderID = \"contender-id\";\nfinal String address = \"leader-address\";\nfinal LeaderElection leaderElection = testInstance.createLeaderElection(contenderID);\nleaderElection.startLeaderElection(\nTestingGenericLeaderContender.newBuilder()\n.setGrantLeadershipConsumer(\nsessionID ->\ntestInstance.confirmLeadership(\ncontenderID, sessionID, address))\n.build());\nfinal UUID sessionID = UUID.randomUUID();\nleadershipGranted.set(true);\ntestInstance.isLeader(sessionID);\nFlinkAssertions.assertThatFuture(initialLeaderInformation)\n.eventuallySucceeds()\n.as(\"The LeaderInformation should have been forwarded to the driver.\")\n.isEqualTo(LeaderInformation.known(sessionID, address));\ntestInstance.onLeaderInformationChange(LeaderInformation.empty());\nlatch.trigger();\nleaderElection.close();\ntestInstance.close();\n}\n@Test\nvoid testOnGrantLeadershipAsyncDoesNotBlock() throws Exception {\ntestNonBlockingCall(\nlatch ->\nTestingGenericLeaderContender.newBuilder()\n.setGrantLeadershipConsumer(\nignoredSessionID -> latch.awaitQuietly())\n.build(),\n(leadershipGranted, listener) -> {\nleadershipGranted.set(true);\nlistener.isLeader(UUID.randomUUID());\n});\n}\n@Test\nvoid testOnRevokeLeadershipDoesNotBlock() throws Exception {\ntestNonBlockingCall(\nlatch ->\nTestingGenericLeaderContender.newBuilder()\n.setRevokeLeadershipRunnable(latch::awaitQuietly)\n.build(),\n(leadershipGranted, listener) -> {\nleadershipGranted.set(true);\nlistener.isLeader(UUID.randomUUID());\nleadershipGranted.set(false);\nlistener.notLeader();\n});\n}\nprivate void testNonBlockingCall(\nFunction contenderCreator,\nBiConsumer\nlistenerAction)\nthrows Exception {\nfinal OneShotLatch latch = new OneShotLatch();\nfinal TestingGenericLeaderContender contender = contenderCreator.apply(latch);\nfinal AtomicBoolean leadershipGranted = new AtomicBoolean(false);\nfinal TestingLeaderElectionDriver.Factory driverFactory =\nnew TestingLeaderElectionDriver.Factory(\nTestingLeaderElectionDriver.newBuilder(\nleadershipGranted, new AtomicReference<>(), new AtomicBoolean()));\nfinal DefaultLeaderElectionService testInstance =\nnew DefaultLeaderElectionService(\ndriverFactory, fatalErrorHandlerExtension.getTestingFatalErrorHandler());\ntestInstance.startLeaderElectionBackend();\nfinal LeaderElection leaderElection =\ntestInstance.createLeaderElection(createRandomContenderID());\nleaderElection.startLeaderElection(contender);\nlistenerAction.accept(leadershipGranted, testInstance);\nlatch.trigger();\nleaderElection.close();\ntestInstance.close();\n}\nprivate static String createRandomContenderID() {\nreturn String.format(\"contender-id-%s\", UUID.randomUUID());\n}\nprivate class Context {\nprivate final TestingLeaderElectionDriver.Factory driverFactory;\nprivate final AtomicBoolean leadershipGranted;\nfinal String contenderID = createRandomContenderID();\nDefaultLeaderElectionService leaderElectionService;\nTestingContender testingContender;\nTestingLeaderElectionDriver testingLeaderElectionDriver;\nLeaderElection leaderElection;\nprivate Context() {\nthis(new AtomicBoolean(false), new AtomicReference<>());\n}\nprivate Context(AtomicReference storedLeaderInformation) {\nthis(new AtomicBoolean(false), storedLeaderInformation);\n}\nprivate Context(\nAtomicBoolean leadershipGranted,\nAtomicReference storedLeaderInformation) {\nthis(\nleadershipGranted,\nTestingLeaderElectionDriver.newBuilder(\nleadershipGranted, storedLeaderInformation, new AtomicBoolean()));\n}\nprivate Context(\nAtomicBoolean leadershipGranted,\nTestingLeaderElectionDriver.Builder driverBuilder) {\nthis.leadershipGranted = leadershipGranted;\nthis.driverFactory = new TestingLeaderElectionDriver.Factory(driverBuilder);\n}\nvoid grantLeadership() {\ngrantLeadership(UUID.randomUUID());\n}\nvoid grantLeadership(UUID leaderSessionID) {\nleadershipGranted.set(true);\nleaderElectionService.isLeader(leaderSessionID);\n}\nvoid revokeLeadership() {\nleadershipGranted.set(false);\nleaderElectionService.notLeader();\n}\nvoid runTestWithSynchronousEventHandling(RunnableWithException testMethod)\nthrows Exception {\nrunTest(testMethod, Executors.newDirectExecutorService());\n}\nvoid runTestWithManuallyTriggeredEvents(\nThrowingConsumer testMethod)\nthrows Exception {\nfinal ManuallyTriggeredScheduledExecutorService executorService =\nnew ManuallyTriggeredScheduledExecutorService();\nrunTest(() -> testMethod.accept(executorService), executorService);\n}\nvoid runTest(RunnableWithException testMethod, ExecutorService leaderEventOperationExecutor)\nthrows Exception {\ntry {\nleaderElectionService =\nnew DefaultLeaderElectionService(\ndriverFactory,\nDefaultLeaderElectionServiceTest.this.fatalErrorHandlerExtension\n.getTestingFatalErrorHandler(),\nleaderEventOperationExecutor);\nleaderElectionService.startLeaderElectionBackend();\ntestingLeaderElectionDriver = driverFactory.assertAndGetOnlyCreatedDriver();\nleaderElection = leaderElectionService.createLeaderElection(contenderID);\ntestingContender = new TestingContender(TEST_URL, leaderElection);\ntestingContender.startLeaderElection();\ntestMethod.run();\n} finally {\nif (leaderElection != null) {\nleaderElection.close();\n}\nif (leaderElectionService != null) {\nleaderElectionService.close();\n}\nif (testingContender != null) {\ntestingContender.throwErrorIfPresent();\n}\nif (testingLeaderElectionDriver != null) {\ntestingLeaderElectionDriver.close();\n}\n}\n}\n}\n}" + }, + { + "comment": "Yes, that looks way better. I have dropped the `clear` action on empty collections.", + "method_body": "public List snapshotState(long checkpointId) {\nLOG.debug(\"Trigger the new transaction for downstream readers.\");\nList splits =\n((PulsarUnorderedFetcherManager) splitFetcherManager)\n.snapshotState(checkpointId);\nif (splits.isEmpty() && transactionsOfFinishedSplits.isEmpty()) {\ntransactionsToCommit.put(checkpointId, Collections.emptyList());\n} else {\nList txnIDs =\ntransactionsToCommit.computeIfAbsent(checkpointId, id -> new ArrayList<>());\nfor (PulsarPartitionSplit split : splits) {\nTxnID uncommittedTransactionId = split.getUncommittedTransactionId();\nif (uncommittedTransactionId != null) {\ntxnIDs.add(uncommittedTransactionId);\n}\n}\n}\nreturn splits;\n}", + "target_code": ".snapshotState(checkpointId);", + "method_body_after": "public List snapshotState(long checkpointId) {\nLOG.debug(\"Trigger the new transaction for downstream readers.\");\nList splits =\n((PulsarUnorderedFetcherManager) splitFetcherManager)\n.snapshotState(checkpointId);\nif (coordinatorClient != null) {\nList txnIDs =\ntransactionsToCommit.computeIfAbsent(checkpointId, id -> new ArrayList<>());\nfor (PulsarPartitionSplit split : splits) {\nTxnID uncommittedTransactionId = split.getUncommittedTransactionId();\nif (uncommittedTransactionId != null) {\ntxnIDs.add(uncommittedTransactionId);\n}\n}\n}\nreturn splits;\n}", + "context_before": "class PulsarUnorderedSourceReader extends PulsarSourceReaderBase {\nprivate static final Logger LOG = LoggerFactory.getLogger(PulsarUnorderedSourceReader.class);\nprivate final TransactionCoordinatorClient coordinatorClient;\nprivate final SortedMap> transactionsToCommit;\nprivate final List transactionsOfFinishedSplits;\npublic PulsarUnorderedSourceReader(\nFutureCompletingBlockingQueue>> elementsQueue,\nSupplier> splitReaderSupplier,\nConfiguration configuration,\nSourceReaderContext context,\nSourceConfiguration sourceConfiguration,\nPulsarClient pulsarClient,\nPulsarAdmin pulsarAdmin,\nTransactionCoordinatorClient coordinatorClient) {\nsuper(\nelementsQueue,\nnew PulsarUnorderedFetcherManager<>(elementsQueue, splitReaderSupplier::get),\nconfiguration,\ncontext,\nsourceConfiguration,\npulsarClient,\npulsarAdmin);\nthis.coordinatorClient = coordinatorClient;\nthis.transactionsToCommit = Collections.synchronizedSortedMap(new TreeMap<>());\nthis.transactionsOfFinishedSplits = Collections.synchronizedList(new ArrayList<>());\n}\n@Override\nprotected void onSplitFinished(Map finishedSplitIds) {\nif (LOG.isDebugEnabled()) {\nLOG.debug(\"onSplitFinished event: {}\", finishedSplitIds);\n}\nfor (Map.Entry entry : finishedSplitIds.entrySet()) {\nPulsarPartitionSplitState state = entry.getValue();\nTxnID uncommittedTransactionId = state.getUncommittedTransactionId();\nif (uncommittedTransactionId != null) {\ntransactionsOfFinishedSplits.add(uncommittedTransactionId);\n}\n}\n}\n@Override\n@Override\npublic void notifyCheckpointComplete(long checkpointId) throws Exception {\nLOG.debug(\"Committing transactions for checkpoint {}\", checkpointId);\nif (coordinatorClient != null) {\nfor (Map.Entry> entry : transactionsToCommit.entrySet()) {\nLong currentCheckpointId = entry.getKey();\nif (currentCheckpointId > checkpointId) {\ncontinue;\n}\nList transactions = entry.getValue();\nfor (TxnID transaction : transactions) {\ncoordinatorClient.commit(transaction);\ntransactionsOfFinishedSplits.removeIf(txnID -> txnID.equals(transaction));\n}\ntransactionsToCommit.remove(currentCheckpointId);\n}\n} else {\ntransactionsToCommit.entrySet().removeIf(e -> e.getKey() < checkpointId);\ntransactionsOfFinishedSplits.clear();\n}\n}\n}", + "context_after": "class PulsarUnorderedSourceReader extends PulsarSourceReaderBase {\nprivate static final Logger LOG = LoggerFactory.getLogger(PulsarUnorderedSourceReader.class);\n@Nullable private final TransactionCoordinatorClient coordinatorClient;\nprivate final SortedMap> transactionsToCommit;\nprivate final List transactionsOfFinishedSplits;\npublic PulsarUnorderedSourceReader(\nFutureCompletingBlockingQueue>> elementsQueue,\nSupplier> splitReaderSupplier,\nConfiguration configuration,\nSourceReaderContext context,\nSourceConfiguration sourceConfiguration,\nPulsarClient pulsarClient,\nPulsarAdmin pulsarAdmin,\n@Nullable TransactionCoordinatorClient coordinatorClient) {\nsuper(\nelementsQueue,\nnew PulsarUnorderedFetcherManager<>(elementsQueue, splitReaderSupplier::get),\nconfiguration,\ncontext,\nsourceConfiguration,\npulsarClient,\npulsarAdmin);\nthis.coordinatorClient = coordinatorClient;\nthis.transactionsToCommit = Collections.synchronizedSortedMap(new TreeMap<>());\nthis.transactionsOfFinishedSplits = Collections.synchronizedList(new ArrayList<>());\n}\n@Override\nprotected void onSplitFinished(Map finishedSplitIds) {\nif (LOG.isDebugEnabled()) {\nLOG.debug(\"onSplitFinished event: {}\", finishedSplitIds);\n}\nif (coordinatorClient != null) {\nfor (Map.Entry entry : finishedSplitIds.entrySet()) {\nPulsarPartitionSplitState state = entry.getValue();\nTxnID uncommittedTransactionId = state.getUncommittedTransactionId();\nif (uncommittedTransactionId != null) {\ntransactionsOfFinishedSplits.add(uncommittedTransactionId);\n}\n}\n}\n}\n@Override\n@Override\npublic void notifyCheckpointComplete(long checkpointId) throws Exception {\nLOG.debug(\"Committing transactions for checkpoint {}\", checkpointId);\nif (coordinatorClient != null) {\nfor (Map.Entry> entry : transactionsToCommit.entrySet()) {\nLong currentCheckpointId = entry.getKey();\nif (currentCheckpointId > checkpointId) {\ncontinue;\n}\nList transactions = entry.getValue();\nfor (TxnID transaction : transactions) {\ncoordinatorClient.commit(transaction);\ntransactionsOfFinishedSplits.remove(transaction);\n}\ntransactionsToCommit.remove(currentCheckpointId);\n}\n}\n}\n}" + }, + { + "comment": "we should set the request what the DISTINCT_LOCAL agg want to. in later stage if the missing enforce helper found that the GLOBAL agg's distribute satisfy the DISTINCT_LOCAL agg, no exchange node will be added", + "method_body": "public Void visitPhysicalAggregate(PhysicalAggregate agg, PlanContext context) {\nif (agg.getAggPhase().isLocal()) {\naddToRequestPropertyToChildren(PhysicalProperties.ANY);\nreturn null;\n}\nif (agg.getAggPhase() == AggPhase.DISTINCT_LOCAL) {\naddToRequestPropertyToChildren(PhysicalProperties.ANY);\nreturn null;\n}\nList partitionExpressions = agg.getPartitionExpressions();\nif (partitionExpressions.isEmpty()) {\naddToRequestPropertyToChildren(PhysicalProperties.GATHER);\nreturn null;\n}\nif (partitionExpressions.stream().allMatch(SlotReference.class::isInstance)) {\nList partitionedSlots = partitionExpressions.stream()\n.map(SlotReference.class::cast)\n.map(SlotReference::getExprId)\n.collect(Collectors.toList());\naddToRequestPropertyToChildren(\nPhysicalProperties.createHash(new DistributionSpecHash(partitionedSlots, ShuffleType.AGGREGATE)));\nreturn null;\n}\nthrow new RuntimeException(\"Need to add a rule to split aggregate to aggregate(project),\"\n+ \" see more in AggregateDisassemble\");\n}", + "target_code": "addToRequestPropertyToChildren(PhysicalProperties.ANY);", + "method_body_after": "public Void visitPhysicalAggregate(PhysicalAggregate agg, PlanContext context) {\nif (agg.getAggPhase().isLocal()) {\naddToRequestPropertyToChildren(PhysicalProperties.ANY);\nreturn null;\n}\nif (agg.getAggPhase() == AggPhase.GLOBAL && !agg.isFinalPhase()) {\naddToRequestPropertyToChildren(requestPropertyFromParent);\nreturn null;\n}\nList partitionExpressions = agg.getPartitionExpressions();\nif (partitionExpressions.isEmpty()) {\naddToRequestPropertyToChildren(PhysicalProperties.GATHER);\nreturn null;\n}\nif (partitionExpressions.stream().allMatch(SlotReference.class::isInstance)) {\nList partitionedSlots = partitionExpressions.stream()\n.map(SlotReference.class::cast)\n.map(SlotReference::getExprId)\n.collect(Collectors.toList());\naddToRequestPropertyToChildren(\nPhysicalProperties.createHash(new DistributionSpecHash(partitionedSlots, ShuffleType.AGGREGATE)));\nreturn null;\n}\nthrow new RuntimeException(\"Need to add a rule to split aggregate to aggregate(project),\"\n+ \" see more in AggregateDisassemble\");\n}", + "context_before": "class RequestPropertyDeriver extends PlanVisitor {\n/*\n* requestPropertyFromParent\n* \u2502\n* \u25bc\n* curNode (current plan node in current CostAndEnforcerJob)\n* \u2502\n* \u25bc\n* requestPropertyToChildren\n*/\nprivate final PhysicalProperties requestPropertyFromParent;\nprivate List> requestPropertyToChildren;\npublic RequestPropertyDeriver(JobContext context) {\nthis.requestPropertyFromParent = context.getRequiredProperties();\n}\npublic List> getRequestChildrenPropertyList(GroupExpression groupExpression) {\nrequestPropertyToChildren = Lists.newArrayList();\ngroupExpression.getPlan().accept(this, new PlanContext(groupExpression));\nreturn requestPropertyToChildren;\n}\n@Override\npublic Void visit(Plan plan, PlanContext context) {\nList requiredPropertyList = Lists.newArrayList();\nfor (int i = 0; i < context.getGroupExpression().arity(); i++) {\nrequiredPropertyList.add(PhysicalProperties.ANY);\n}\nrequestPropertyToChildren.add(requiredPropertyList);\nreturn null;\n}\n@Override\n@Override\npublic Void visitPhysicalQuickSort(PhysicalQuickSort sort, PlanContext context) {\naddToRequestPropertyToChildren(PhysicalProperties.ANY);\nreturn null;\n}\n@Override\npublic Void visitPhysicalLocalQuickSort(PhysicalLocalQuickSort sort, PlanContext context) {\naddToRequestPropertyToChildren(PhysicalProperties.ANY);\nreturn null;\n}\n@Override\npublic Void visitPhysicalHashJoin(PhysicalHashJoin hashJoin, PlanContext context) {\nif (JoinUtils.couldShuffle(hashJoin)) {\nPair, List> onClauseUsedSlots = JoinUtils.getOnClauseUsedSlots(hashJoin);\naddToRequestPropertyToChildren(\nPhysicalProperties.createHash(\nnew DistributionSpecHash(onClauseUsedSlots.first, ShuffleType.JOIN)),\nPhysicalProperties.createHash(\nnew DistributionSpecHash(onClauseUsedSlots.second, ShuffleType.JOIN)));\n}\nif (JoinUtils.couldBroadcast(hashJoin)) {\naddToRequestPropertyToChildren(PhysicalProperties.ANY, PhysicalProperties.REPLICATED);\n}\nreturn null;\n}\n@Override\npublic Void visitPhysicalNestedLoopJoin(\nPhysicalNestedLoopJoin nestedLoopJoin, PlanContext context) {\naddToRequestPropertyToChildren(PhysicalProperties.ANY, PhysicalProperties.REPLICATED);\nreturn null;\n}\n/**\n* helper function to assemble request children physical properties\n* @param physicalProperties one set request properties for children\n*/\nprivate void addToRequestPropertyToChildren(PhysicalProperties... physicalProperties) {\nrequestPropertyToChildren.add(Lists.newArrayList(physicalProperties));\n}\n}", + "context_after": "class RequestPropertyDeriver extends PlanVisitor {\n/*\n* requestPropertyFromParent\n* \u2502\n* \u25bc\n* curNode (current plan node in current CostAndEnforcerJob)\n* \u2502\n* \u25bc\n* requestPropertyToChildren\n*/\nprivate final PhysicalProperties requestPropertyFromParent;\nprivate List> requestPropertyToChildren;\npublic RequestPropertyDeriver(JobContext context) {\nthis.requestPropertyFromParent = context.getRequiredProperties();\n}\npublic List> getRequestChildrenPropertyList(GroupExpression groupExpression) {\nrequestPropertyToChildren = Lists.newArrayList();\ngroupExpression.getPlan().accept(this, new PlanContext(groupExpression));\nreturn requestPropertyToChildren;\n}\n@Override\npublic Void visit(Plan plan, PlanContext context) {\nList requiredPropertyList = Lists.newArrayList();\nfor (int i = 0; i < context.getGroupExpression().arity(); i++) {\nrequiredPropertyList.add(PhysicalProperties.ANY);\n}\nrequestPropertyToChildren.add(requiredPropertyList);\nreturn null;\n}\n@Override\n@Override\npublic Void visitPhysicalQuickSort(PhysicalQuickSort sort, PlanContext context) {\naddToRequestPropertyToChildren(PhysicalProperties.ANY);\nreturn null;\n}\n@Override\npublic Void visitPhysicalLocalQuickSort(PhysicalLocalQuickSort sort, PlanContext context) {\naddToRequestPropertyToChildren(PhysicalProperties.ANY);\nreturn null;\n}\n@Override\npublic Void visitPhysicalHashJoin(PhysicalHashJoin hashJoin, PlanContext context) {\nif (JoinUtils.couldShuffle(hashJoin)) {\nPair, List> onClauseUsedSlots = JoinUtils.getOnClauseUsedSlots(hashJoin);\naddToRequestPropertyToChildren(\nPhysicalProperties.createHash(\nnew DistributionSpecHash(onClauseUsedSlots.first, ShuffleType.JOIN)),\nPhysicalProperties.createHash(\nnew DistributionSpecHash(onClauseUsedSlots.second, ShuffleType.JOIN)));\n}\nif (JoinUtils.couldBroadcast(hashJoin)) {\naddToRequestPropertyToChildren(PhysicalProperties.ANY, PhysicalProperties.REPLICATED);\n}\nreturn null;\n}\n@Override\npublic Void visitPhysicalNestedLoopJoin(\nPhysicalNestedLoopJoin nestedLoopJoin, PlanContext context) {\naddToRequestPropertyToChildren(PhysicalProperties.ANY, PhysicalProperties.REPLICATED);\nreturn null;\n}\n/**\n* helper function to assemble request children physical properties\n* @param physicalProperties one set request properties for children\n*/\nprivate void addToRequestPropertyToChildren(PhysicalProperties... physicalProperties) {\nrequestPropertyToChildren.add(Lists.newArrayList(physicalProperties));\n}\n}" + }, + { + "comment": "`\"/modules/\"` can be replaced with `ProjectConstants.MODULES_ROOT`", + "method_body": "private String normalizeFileName(String fileName) {\nString orgName = IdentifierUtils.encodeNonFunctionIdentifier(\nthis.module.packageInstance().packageOrg().toString());\nfor (Module module : this.module.packageInstance().modules()) {\nString sourceRoot;\nString packageName;\nif (module.isDefaultModule()) {\npackageName = IdentifierUtils.encodeNonFunctionIdentifier(\nmodule.packageInstance().packageName().toString());\nsourceRoot = module.project().sourceRoot().getFileName().toString();\n} else {\npackageName = IdentifierUtils.encodeNonFunctionIdentifier(module.descriptor().name().toString());\nsourceRoot = module.project().sourceRoot().getFileName().toString() + \"/modules/\" +\nmodule.moduleName().moduleNamePart();\n}\nif (fileName.contains(orgName + \"/\" + packageName + \"/\")) {\norgName = Pattern.quote(orgName);\npackageName = Pattern.quote(packageName);\nString normalizedFileName = fileName.replaceAll(\"^\" + orgName + \"/\" +\npackageName + \"/.*/\", sourceRoot + \"/\");\nnormalizedFileName = normalizedFileName.replaceAll(\"^\" + orgName + \"/\" +\npackageName + \"/.*\", sourceRoot);\nreturn normalizedFileName;\n}\n}\nreturn fileName;\n}", + "target_code": "sourceRoot = module.project().sourceRoot().getFileName().toString() + \"/modules/\" +", + "method_body_after": "private String normalizeFileName(String fileName) {\nString orgName = IdentifierUtils.encodeNonFunctionIdentifier(\nthis.module.packageInstance().packageOrg().toString());\nfor (Module module : this.module.packageInstance().modules()) {\nString packageName = IdentifierUtils.encodeNonFunctionIdentifier(\nmodule.moduleName().toString());\nString sourceRoot = module.project().sourceRoot().getFileName().toString();\nif (!module.isDefaultModule()) {\nsourceRoot = sourceRoot + \"/\" + ProjectConstants.MODULES_ROOT + \"/\" +\nmodule.moduleName().moduleNamePart();\n}\nif (fileName.contains(orgName + \"/\" + packageName + \"/\")) {\norgName = Pattern.quote(orgName);\npackageName = Pattern.quote(packageName);\nString normalizedFileName = fileName.replaceAll(\"^\" + orgName + \"/\" +\npackageName + \"/.*/\", sourceRoot + \"/\");\nnormalizedFileName = normalizedFileName.replaceAll(\"^\" + orgName + \"/\" +\npackageName + \"/.*\", sourceRoot);\nreturn normalizedFileName;\n}\n}\nreturn fileName;\n}", + "context_before": "class name for classes generated for bal files\nIClassCoverage modifiedClass = new NormalizedCoverageClass(classCoverage,\nnormalizeFileName(classCoverage.getPackageName()),\nnormalizeFileName(classCoverage.getName()));\nmodifiedClasses.add(modifiedClass);\n} else {\nmodifiedClasses.add(classCoverage);\n}", + "context_after": "class name for classes generated for bal files\nIClassCoverage modifiedClassCoverage = new NormalizedCoverageClass(classCoverage,\nnormalizeFileName(classCoverage.getPackageName()),\nnormalizeFileName(classCoverage.getName()));\nmodifiedClasses.add(modifiedClassCoverage);\n} else {\nmodifiedClasses.add(classCoverage);\n}" + }, + { + "comment": "why remove copyOf?", + "method_body": "protected AbstractTreeNode(List children) {\nthis.children = children;\n}", + "target_code": "this.children = children;", + "method_body_after": "protected AbstractTreeNode(List children) {\nthis.children = ImmutableList.copyOf(children);\n}", + "context_before": "class AbstractTreeNode>\nimplements TreeNode {\nprotected final ObjectId id = StatementScopeIdGenerator.newObjectId();\nprotected final List children;\nprotected AbstractTreeNode(NODE_TYPE... children) {\nthis(ImmutableList.copyOf(children));\n}\n@Override\npublic NODE_TYPE child(int index) {\nreturn children.get(index);\n}\n@Override\npublic List children() {\nreturn children;\n}\npublic int arity() {\nreturn children.size();\n}\n}", + "context_after": "class AbstractTreeNode>\nimplements TreeNode {\nprotected final ObjectId id = StatementScopeIdGenerator.newObjectId();\nprotected final List children;\nprotected AbstractTreeNode(NODE_TYPE... children) {\nthis.children = ImmutableList.copyOf(children);\n}\n@Override\npublic NODE_TYPE child(int index) {\nreturn children.get(index);\n}\n@Override\npublic List children() {\nreturn children;\n}\npublic int arity() {\nreturn children.size();\n}\n}" + }, + { + "comment": "The same question about test coverage here?", + "method_body": "public void initializeState(FunctionInitializationContext context) throws Exception {\nstate = context.getOperatorStateStore().getListState(stateDescriptor);\nboolean recoveredUserContext = false;\nif (context.isRestored()) {\nLOG.info(\"{} - restoring state\", name());\nfor (State operatorState : state.get()) {\nuserContext = operatorState.getContext();\nList> recoveredTransactions =\noperatorState.getPendingCommitTransactions();\nList handledTransactions = new ArrayList<>(recoveredTransactions.size() + 1);\nfor (TransactionHolder recoveredTransaction : recoveredTransactions) {\nrecoverAndCommitInternal(recoveredTransaction);\nhandledTransactions.add(recoveredTransaction.handle);\nLOG.info(\"{} committed recovered transaction {}\", name(), recoveredTransaction);\n}\n{\nTXN transaction = operatorState.getPendingTransaction().handle;\nif (transaction != null) {\nrecoverAndAbort(transaction);\nhandledTransactions.add(transaction);\nLOG.info(\n\"{} aborted recovered transaction {}\",\nname(),\noperatorState.getPendingTransaction());\n}\n}\nif (userContext.isPresent()) {\nfinishRecoveringContext(handledTransactions);\nrecoveredUserContext = true;\n}\n}\n}\nif (!recoveredUserContext) {\nLOG.info(\"{} - no state to restore\", name());\nuserContext = initializeUserContext();\n}\nthis.pendingCommitTransactions.clear();\ncurrentTransactionHolder = beginTransactionInternal();\nLOG.debug(\"{} - started new transaction '{}'\", name(), currentTransactionHolder);\n}", + "target_code": "if (transaction != null) {", + "method_body_after": "public void initializeState(FunctionInitializationContext context) throws Exception {\nstate = context.getOperatorStateStore().getListState(stateDescriptor);\nboolean recoveredUserContext = false;\nif (context.isRestored()) {\nLOG.info(\"{} - restoring state\", name());\nfor (State operatorState : state.get()) {\nuserContext = operatorState.getContext();\nList> recoveredTransactions =\noperatorState.getPendingCommitTransactions();\nList handledTransactions = new ArrayList<>(recoveredTransactions.size() + 1);\nfor (TransactionHolder recoveredTransaction : recoveredTransactions) {\nrecoverAndCommitInternal(recoveredTransaction);\nhandledTransactions.add(recoveredTransaction.handle);\nLOG.info(\"{} committed recovered transaction {}\", name(), recoveredTransaction);\n}\n{\nTXN transaction = operatorState.getPendingTransaction().handle;\ncheckNotNull(transaction, \"Pending transaction is not expected to be null\");\nrecoverAndAbort(transaction);\nhandledTransactions.add(transaction);\nLOG.info(\n\"{} aborted recovered transaction {}\",\nname(),\noperatorState.getPendingTransaction());\n}\nif (userContext.isPresent()) {\nfinishRecoveringContext(handledTransactions);\nrecoveredUserContext = true;\n}\n}\n}\nif (!recoveredUserContext) {\nLOG.info(\"{} - no state to restore\", name());\nuserContext = initializeUserContext();\n}\nthis.pendingCommitTransactions.clear();\ncurrentTransactionHolder = beginTransactionInternal();\nLOG.debug(\"{} - started new transaction '{}'\", name(), currentTransactionHolder);\n}", + "context_before": "class to support two phase commit\n/** Write value within a transaction. */\nprotected abstract void invoke(TXN transaction, IN value, Context context) throws Exception;\n/**\n* Method that starts a new transaction.\n*\n* @return newly created transaction.\n*/\nprotected abstract TXN beginTransaction() throws Exception;\n/**\n* Pre commit previously created transaction. Pre commit must make all of the necessary steps to\n* prepare the transaction for a commit that might happen in the future. After this point the\n* transaction might still be aborted, but underlying implementation must ensure that commit\n* calls on already pre committed transactions will always succeed.\n*\n*

Usually implementation involves flushing the data.\n*/\nprotected abstract void preCommit(TXN transaction) throws Exception;\n/**\n* Commit a pre-committed transaction. If this method fail, Flink application will be restarted\n* and {@link TwoPhaseCommitSinkFunction", + "context_after": "class to support two phase commit\n/** Write value within a transaction. */\nprotected abstract void invoke(TXN transaction, IN value, Context context) throws Exception;\n/**\n* Method that starts a new transaction.\n*\n* @return newly created transaction.\n*/\nprotected abstract TXN beginTransaction() throws Exception;\n/**\n* Pre commit previously created transaction. Pre commit must make all of the necessary steps to\n* prepare the transaction for a commit that might happen in the future. After this point the\n* transaction might still be aborted, but underlying implementation must ensure that commit\n* calls on already pre committed transactions will always succeed.\n*\n*

Usually implementation involves flushing the data.\n*/\nprotected abstract void preCommit(TXN transaction) throws Exception;\n/**\n* Commit a pre-committed transaction. If this method fail, Flink application will be restarted\n* and {@link TwoPhaseCommitSinkFunction" + }, + { + "comment": "I think we can skip this call as part of `toJsonNode()` and only keep it for the getter. ", + "method_body": "public JsonNode toJsonNode() {\nif (this.changeFeedProcessorItemAsJsonNode == null) {\nthis.changeFeedProcessorItemAsJsonNode = constructChangeFeedProcessorItemAsJsonNode();\n}\nif (this.changeFeedMetaDataInternal == null) {\nthis.changeFeedMetaDataInternal = Utils.getSimpleObjectMapper().convertValue(this.changeFeedMetaData, ChangeFeedMetaData.class);\n}\nreturn this.changeFeedProcessorItemAsJsonNode;\n}", + "target_code": "if (this.changeFeedMetaDataInternal == null) {", + "method_body_after": "public JsonNode toJsonNode() {\nif (this.changeFeedProcessorItemAsJsonNode == null) {\nthis.changeFeedProcessorItemAsJsonNode = constructChangeFeedProcessorItemAsJsonNode();\n}\nreturn this.changeFeedProcessorItemAsJsonNode;\n}", + "context_before": "class ChangeFeedProcessorItem {\n@JsonProperty(\"current\")\n@JsonInclude(JsonInclude.Include.NON_NULL)\nprivate JsonNode current;\n@JsonProperty(\"previous\")\n@JsonInclude(JsonInclude.Include.NON_NULL)\nprivate JsonNode previous;\n@JsonProperty(\"metadata\")\n@JsonInclude(JsonInclude.Include.NON_NULL)\nprivate JsonNode changeFeedMetaData;\nprivate ChangeFeedMetaData changeFeedMetaDataInternal;\nprivate JsonNode changeFeedProcessorItemAsJsonNode;\n/**\n* Gets the change feed current item.\n*\n* @return change feed current item.\n*/\n@Beta(value = Beta.SinceVersion.V4_37_0, warningText = Beta.PREVIEW_SUBJECT_TO_CHANGE_WARNING)\npublic JsonNode getCurrent() {\nreturn current;\n}\n/**\n* Gets the change feed previous item.\n* For delete operations, previous image is always going to be provided.\n* The previous image on replace operations is not going to be exposed by default and requires account-level or container-level opt-in.\n*\n* @return change feed previous item.\n*/\n@Beta(value = Beta.SinceVersion.V4_37_0, warningText = Beta.PREVIEW_SUBJECT_TO_CHANGE_WARNING)\npublic JsonNode getPrevious() {\nreturn previous;\n}\n/**\n* Gets the change feed metadata.\n*\n* @return change feed metadata.\n*/\n@Beta(value = Beta.SinceVersion.V4_37_0, warningText = Beta.PREVIEW_SUBJECT_TO_CHANGE_WARNING)\n@JsonIgnore\npublic ChangeFeedMetaData getChangeFeedMetaData() {\nif (this.changeFeedProcessorItemAsJsonNode == null) {\nthis.changeFeedProcessorItemAsJsonNode = constructChangeFeedProcessorItemAsJsonNode();\n}\nif (this.changeFeedMetaDataInternal == null) {\nthis.changeFeedMetaDataInternal = Utils.getSimpleObjectMapper().convertValue(this.changeFeedMetaData, ChangeFeedMetaData.class);\n}\nreturn this.changeFeedMetaDataInternal;\n}\n/**\n* Helper API to convert this changeFeedProcessorItem instance to raw JsonNode format.\n*\n* @return jsonNode format of this changeFeedProcessorItem instance.\n*\n* @throws IllegalArgumentException If conversion fails due to incompatible type;\n* if so, root cause will contain underlying checked exception data binding functionality threw\n*/\n@Beta(value = Beta.SinceVersion.V4_37_0, warningText = Beta.PREVIEW_SUBJECT_TO_CHANGE_WARNING)\n@Override\npublic String toString() {\ntry {\nif (this.changeFeedProcessorItemAsJsonNode == null) {\nthis.changeFeedProcessorItemAsJsonNode = constructChangeFeedProcessorItemAsJsonNode();\n}\nreturn Utils.getSimpleObjectMapper().writeValueAsString(this.changeFeedProcessorItemAsJsonNode);\n} catch (JsonProcessingException e) {\nthrow new IllegalStateException(\"Unable to convert object to string\", e);\n}\n}\nprivate JsonNode constructChangeFeedProcessorItemAsJsonNode() {\nObjectNode objectNode = Utils.getSimpleObjectMapper().createObjectNode();\nif (this.previous != null) {\nobjectNode.set(\"previous\", this.previous);\n}\nif (this.current != null) {\nobjectNode.set(\"current\", this.current);\n}\nif (this.changeFeedMetaData != null) {\nobjectNode.set(\"metadata\", this.changeFeedMetaData);\n}\nreturn objectNode;\n}\n}", + "context_after": "class ChangeFeedProcessorItem {\n@JsonProperty(\"current\")\n@JsonInclude(JsonInclude.Include.NON_NULL)\nprivate JsonNode current;\n@JsonProperty(\"previous\")\n@JsonInclude(JsonInclude.Include.NON_NULL)\nprivate JsonNode previous;\n@JsonProperty(\"metadata\")\n@JsonInclude(JsonInclude.Include.NON_NULL)\nprivate JsonNode changeFeedMetaData;\nprivate ChangeFeedMetaData changeFeedMetaDataInternal;\nprivate JsonNode changeFeedProcessorItemAsJsonNode;\n/**\n* Gets the change feed current item.\n*\n* @return change feed current item.\n*/\n@Beta(value = Beta.SinceVersion.V4_37_0, warningText = Beta.PREVIEW_SUBJECT_TO_CHANGE_WARNING)\npublic JsonNode getCurrent() {\nreturn current;\n}\n/**\n* Gets the change feed previous item.\n* For delete operations, previous image is always going to be provided.\n* The previous image on replace operations is not going to be exposed by default and requires account-level or container-level opt-in.\n*\n* @return change feed previous item.\n*/\n@Beta(value = Beta.SinceVersion.V4_37_0, warningText = Beta.PREVIEW_SUBJECT_TO_CHANGE_WARNING)\npublic JsonNode getPrevious() {\nreturn previous;\n}\n/**\n* Gets the change feed metadata.\n*\n* @return change feed metadata.\n*/\n@Beta(value = Beta.SinceVersion.V4_37_0, warningText = Beta.PREVIEW_SUBJECT_TO_CHANGE_WARNING)\n@JsonIgnore\npublic ChangeFeedMetaData getChangeFeedMetaData() {\nif (this.changeFeedMetaDataInternal == null) {\nthis.changeFeedMetaDataInternal = Utils.getSimpleObjectMapper().convertValue(this.changeFeedMetaData, ChangeFeedMetaData.class);\n}\nreturn this.changeFeedMetaDataInternal;\n}\n/**\n* Helper API to convert this changeFeedProcessorItem instance to raw JsonNode format.\n*\n* @return jsonNode format of this changeFeedProcessorItem instance.\n*\n* @throws IllegalArgumentException If conversion fails due to incompatible type;\n* if so, root cause will contain underlying checked exception data binding functionality threw\n*/\n@Beta(value = Beta.SinceVersion.V4_37_0, warningText = Beta.PREVIEW_SUBJECT_TO_CHANGE_WARNING)\n@Override\npublic String toString() {\ntry {\nif (this.changeFeedProcessorItemAsJsonNode == null) {\nthis.changeFeedProcessorItemAsJsonNode = constructChangeFeedProcessorItemAsJsonNode();\n}\nreturn Utils.getSimpleObjectMapper().writeValueAsString(this.changeFeedProcessorItemAsJsonNode);\n} catch (JsonProcessingException e) {\nthrow new IllegalStateException(\"Unable to convert object to string\", e);\n}\n}\nprivate JsonNode constructChangeFeedProcessorItemAsJsonNode() {\nObjectNode objectNode = Utils.getSimpleObjectMapper().createObjectNode();\nif (this.previous != null) {\nobjectNode.set(\"previous\", this.previous);\n}\nif (this.current != null) {\nobjectNode.set(\"current\", this.current);\n}\nif (this.changeFeedMetaData != null) {\nobjectNode.set(\"metadata\", this.changeFeedMetaData);\n}\nreturn objectNode;\n}\n}" + }, + { + "comment": "Will `createIfNonExistent` always have a set `Boolean` at this point? Otherwise, this will now print `null` instead of `false` when not set explicitly. Could use `getCreateIfNonExistent()` instead.", + "method_body": "public String toString() {\nStringBuilder string = new StringBuilder();\nstring.append(\"update of document '\");\nstring.append(docId);\nstring.append(\"': \");\nstring.append(\"create-if-non-existent=\");\nstring.append(createIfNonExistent);\nstring.append(\": \");\nstring.append(\"[\");\nfor (FieldUpdate fieldUpdate : fieldUpdates) {\nstring.append(fieldUpdate).append(\" \");\n}\nstring.append(\"]\");\nif (fieldPathUpdates.size() > 0) {\nstring.append(\" [ \");\nfor (FieldPathUpdate up : fieldPathUpdates) {\nstring.append(up.toString()).append(\" \");\n}\nstring.append(\" ]\");\n}\nreturn string.toString();\n}", + "target_code": "string.append(createIfNonExistent);", + "method_body_after": "public String toString() {\nStringBuilder string = new StringBuilder();\nstring.append(\"update of document '\");\nstring.append(docId);\nstring.append(\"': \");\nstring.append(\"create-if-non-existent=\");\nstring.append(createIfNonExistent);\nstring.append(\": \");\nstring.append(\"[\");\nfor (FieldUpdate fieldUpdate : fieldUpdates) {\nstring.append(fieldUpdate).append(\" \");\n}\nstring.append(\"]\");\nif (fieldPathUpdates.size() > 0) {\nstring.append(\" [ \");\nfor (FieldPathUpdate up : fieldPathUpdates) {\nstring.append(up.toString()).append(\" \");\n}\nstring.append(\" ]\");\n}\nreturn string.toString();\n}", + "context_before": "class DocumentUpdate extends DocumentOperation implements Iterable {\npublic static final int CLASSID = 0x1000 + 6;\nprivate DocumentId docId;\nprivate final List fieldUpdates;\nprivate final List fieldPathUpdates;\nprivate DocumentType documentType;\nprivate Boolean createIfNonExistent;\n/**\n* Creates a DocumentUpdate.\n*\n* @param docId the ID of the update\n* @param docType the document type that this update is valid for\n*/\npublic DocumentUpdate(DocumentType docType, DocumentId docId) {\nthis.docId = docId;\nthis.documentType = docType;\nthis.fieldUpdates = new ArrayList<>();\nthis.fieldPathUpdates = new ArrayList<>();\n}\n/**\n* Creates a new document update using a reader\n*/\npublic DocumentUpdate(DocumentUpdateReader reader) {\ndocId = null;\ndocumentType = null;\nfieldUpdates = new ArrayList<>();\nfieldPathUpdates = new ArrayList<>();\nreader.read(this);\n}\n/**\n* Creates a DocumentUpdate.\n*\n* @param docId the ID of the update\n* @param docType the document type that this update is valid for\n*/\npublic DocumentUpdate(DocumentType docType, String docId) {\nthis(docType, new DocumentId(docId));\n}\npublic DocumentId getId() {\nreturn docId;\n}\n/**\n* Sets the document id of the document to update.\n* Use only while deserializing - changing the document id after creation has undefined behaviour.\n*/\npublic void setId(DocumentId id) {\ndocId = id;\n}\nprivate void verifyType(Document doc) {\nif (!documentType.equals(doc.getDataType())) {\nthrow new IllegalArgumentException(\n\"Document \" + doc.getId() + \" with type \" + doc.getDataType() + \" must have same type as update, which is type \" + documentType);\n}\n}\n/**\n* Applies this document update.\n*\n* @param doc the document to apply the update to\n* @return a reference to itself\n* @throws IllegalArgumentException if the document does not have the same document type as this update\n*/\npublic DocumentUpdate applyTo(Document doc) {\nverifyType(doc);\nfor (FieldUpdate fieldUpdate : fieldUpdates) {\nfieldUpdate.applyTo(doc);\n}\nfor (FieldPathUpdate fieldPathUpdate : fieldPathUpdates) {\nfieldPathUpdate.applyTo(doc);\n}\nreturn this;\n}\n/**\n* Prune away any field update that will not modify any field in the document.\n* @param doc document to check against\n* @return a reference to itself\n* @throws IllegalArgumentException if the document does not have the same document type as this update\n*/\npublic DocumentUpdate prune(Document doc) {\nverifyType(doc);\nfor (Iterator iter = fieldUpdates.iterator(); iter.hasNext();) {\nFieldUpdate update = iter.next();\nif (!update.isEmpty()) {\nValueUpdate last = update.getValueUpdate(update.size() - 1);\nif (last instanceof AssignValueUpdate) {\nFieldValue currentValue = doc.getFieldValue(update.getField());\nif ((currentValue != null) && currentValue.equals(last.getValue())) {\niter.remove();\n}\n} else if (last instanceof ClearValueUpdate) {\nFieldValue currentValue = doc.getFieldValue(update.getField());\nif (currentValue == null) {\niter.remove();\n} else {\nFieldValue copy = currentValue.clone();\ncopy.clear();\nif (currentValue.equals(copy)) {\niter.remove();\n}\n}\n}\n}\n}\nreturn this;\n}\n/**\n* Get an unmodifiable list of all field updates that this document update specifies.\n*\n* @return a list of all FieldUpdates in this DocumentUpdate\n* @deprecated Use fieldUpdates() instead.\n*/\n@Deprecated\npublic List getFieldUpdates() {\nreturn Collections.unmodifiableList(fieldUpdates);\n}\n/**\n* Get an unmodifiable collection of all field updates that this document update specifies.\n*\n* @return a collection of all FieldUpdates in this DocumentUpdate\n*/\npublic Collection fieldUpdates() {\nreturn Collections.unmodifiableCollection(fieldUpdates);\n}\n/**\n* Get an unmodifiable list of all field path updates this document update specifies.\n*\n* @return Returns a list of all field path updates in this document update.\n* @deprecated Use fieldPathUpdates() instead.\n*/\n@Deprecated\npublic List getFieldPathUpdates() {\nreturn Collections.unmodifiableList(fieldPathUpdates);\n}\n/**\n* Get an unmodifiable collection of all field path updates that this document update specifies.\n*\n* @return a collection of all FieldPathUpdates in this DocumentUpdate\n*/\npublic Collection fieldPathUpdates() {\nreturn Collections.unmodifiableCollection(fieldPathUpdates);\n}\n/** Returns the type of the document this updates\n*\n* @return The documentype of the document\n*/\npublic DocumentType getDocumentType() {\nreturn documentType;\n}\n/**\n* Sets the document type. Use only while deserializing - changing the document type after creation\n* has undefined behaviour.\n*/\npublic void setDocumentType(DocumentType type) {\ndocumentType = type;\n}\n/**\n* Get the field update at the specified index in the list of field updates.\n*\n* @param index the index of the FieldUpdate to return\n* @return the FieldUpdate at the specified index\n* @throws IndexOutOfBoundsException if index is out of range\n* @deprecated use getFieldUpdate(Field field) instead.\n*/\n@Deprecated\npublic FieldUpdate getFieldUpdate(int index) {\nreturn fieldUpdates.get(index);\n}\n/**\n* Replaces the field update at the specified index in the list of field updates.\n*\n* @param index index of the FieldUpdate to replace\n* @param upd the FieldUpdate to be stored at the specified position\n* @return the FieldUpdate previously at the specified position\n* @throws IndexOutOfBoundsException if index is out of range\n* @deprecated Use removeFieldUpdate/addFieldUpdate instead\n*/\n@Deprecated\npublic FieldUpdate setFieldUpdate(int index, FieldUpdate upd) {\nFieldUpdate old = fieldUpdates.get(index);\nfieldUpdates.set(index, upd);\nreturn old;\n}\n/**\n* Returns the update for a field\n*\n* @param field the field to return the update of\n* @return the update for the field, or null if that field has no update in this\n*/\npublic FieldUpdate getFieldUpdate(Field field) {\nreturn getFieldUpdateById(field.getId());\n}\n/** Removes all field updates from the list for field updates. */\npublic void clearFieldUpdates() {\nfieldUpdates.clear();\n}\n/**\n* Returns the update for a field name\n*\n* @param fieldName the field name to return the update of\n* @return the update for the field, or null if that field has no update in this\n*/\npublic FieldUpdate getFieldUpdate(String fieldName) {\nField field = documentType.getField(fieldName);\nreturn field != null ? getFieldUpdate(field) : null;\n}\nprivate FieldUpdate getFieldUpdateById(Integer fieldId) {\nfor (FieldUpdate fieldUpdate : fieldUpdates) {\nif (fieldUpdate.getField().getId() == fieldId) {\nreturn fieldUpdate;\n}\n}\nreturn null;\n}\n/**\n* Assigns the field updates of this document update.\n* This document update receives ownership of the list - it can not be subsequently used\n* by the caller. Also note that there no assumptions can be made on the order of items\n* after this call. They might have been joined if for the same field or reordered.\n*\n* @param fieldUpdates the new list of updates of this\n* @throws NullPointerException if the argument passed is null\n*/\npublic void setFieldUpdates(Collection fieldUpdates) {\nif (fieldUpdates == null) {\nthrow new NullPointerException(\"The field updates of a document update can not be null\");\n}\nclearFieldUpdates();\naddFieldUpdates(fieldUpdates);\n}\npublic void addFieldUpdates(Collection fieldUpdates) {\nfor (FieldUpdate fieldUpdate : fieldUpdates) {\naddFieldUpdate(fieldUpdate);\n}\n}\n/**\n* Get the number of field updates in this document update.\n*\n* @return the size of the List of FieldUpdates\n*/\npublic int size() {\nreturn fieldUpdates.size();\n}\n/**\n* Adds the given {@link FieldUpdate} to this DocumentUpdate. If this DocumentUpdate already contains a FieldUpdate\n* for the named field, the content of the given FieldUpdate is added to the existing one.\n*\n* @param update The FieldUpdate to add to this DocumentUpdate.\n* @return This, to allow chaining.\n* @throws IllegalArgumentException If the {@link DocumentType} of this DocumentUpdate does not have a corresponding\n* field.\n*/\npublic DocumentUpdate addFieldUpdate(FieldUpdate update) {\nint fieldId = update.getField().getId();\nif (documentType.getField(fieldId) == null) {\nthrow new IllegalArgumentException(\"Document type '\" + documentType.getName() + \"' does not have field '\" + update.getField().getName() + \"'.\");\n}\nFieldUpdate prevUpdate = getFieldUpdateById(fieldId);\nif (prevUpdate != update) {\nif (prevUpdate != null) {\nprevUpdate.addAll(update);\n} else {\nfieldUpdates.add(update);\n}\n}\nreturn this;\n}\n/**\n* Adds a field path update to perform on the document.\n*\n* @return a reference to itself.\n*/\npublic DocumentUpdate addFieldPathUpdate(FieldPathUpdate fieldPathUpdate) {\nfieldPathUpdates.add(fieldPathUpdate);\nreturn this;\n}\n/**\n* Adds all the field- and field path updates of the given document update to this. If the given update refers to a\n* different document or document type than this, this method throws an exception.\n*\n* @param update The update whose content to add to this.\n* @throws IllegalArgumentException If the {@link DocumentId} or {@link DocumentType} of the given DocumentUpdate\n* does not match the content of this.\n*/\npublic void addAll(DocumentUpdate update) {\nif (update == null) {\nreturn;\n}\nif (!docId.equals(update.docId)) {\nthrow new IllegalArgumentException(\"Expected \" + docId + \", got \" + update.docId + \".\");\n}\nif (!documentType.equals(update.documentType)) {\nthrow new IllegalArgumentException(\"Expected \" + documentType + \", got \" + update.documentType + \".\");\n}\naddFieldUpdates(update.fieldUpdates());\nfor (FieldPathUpdate pathUpd : update.fieldPathUpdates) {\naddFieldPathUpdate(pathUpd);\n}\n}\n/**\n* Removes the field update at the specified position in the list of field updates.\n*\n* @param index the index of the FieldUpdate to remove\n* @return the FieldUpdate previously at the specified position\n* @throws IndexOutOfBoundsException if index is out of range\n* @deprecated use removeFieldUpdate(Field field) instead.\n*/\n@Deprecated\npublic FieldUpdate removeFieldUpdate(int index) {\nFieldUpdate prev = getFieldUpdate(index);\nfieldUpdates.remove(index);\nreturn removeFieldUpdate(prev.getField());\n}\npublic FieldUpdate removeFieldUpdate(Field field) {\nfor (Iterator it = fieldUpdates.iterator(); it.hasNext();) {\nFieldUpdate fieldUpdate = it.next();\nif (fieldUpdate.getField().equals(field)) {\nit.remove();\nreturn fieldUpdate;\n}\n}\nreturn null; }\npublic FieldUpdate removeFieldUpdate(String fieldName) {\nField field = documentType.getField(fieldName);\nreturn field != null ? removeFieldUpdate(field) : null;\n}\n/**\n* Returns the document type of this document update.\n*\n* @return the document type of this document update\n*/\npublic DocumentType getType() {\nreturn documentType;\n}\npublic final void serialize(GrowableByteBuffer buf) {\nserialize(DocumentSerializerFactory.create42(buf));\n}\npublic void serialize(DocumentUpdateWriter data) {\ndata.write(this);\n}\n@Override\npublic boolean equals(Object o) {\nif (this == o) return true;\nif (!(o instanceof DocumentUpdate)) return false;\nDocumentUpdate that = (DocumentUpdate) o;\nif (docId != null ? !docId.equals(that.docId) : that.docId != null) return false;\nif (documentType != null ? !documentType.equals(that.documentType) : that.documentType != null) return false;\nif (fieldPathUpdates != null ? !fieldPathUpdates.equals(that.fieldPathUpdates) : that.fieldPathUpdates != null)\nreturn false;\nif (fieldUpdates != null ? !fieldUpdates.equals(that.fieldUpdates) : that.fieldUpdates != null) return false;\nif (this.getCreateIfNonExistent() != ((DocumentUpdate) o).getCreateIfNonExistent()) return false;\nreturn true;\n}\n@Override\npublic int hashCode() {\nint result = docId != null ? docId.hashCode() : 0;\nresult = 31 * result + (fieldUpdates != null ? fieldUpdates.hashCode() : 0);\nresult = 31 * result + (fieldPathUpdates != null ? fieldPathUpdates.hashCode() : 0);\nresult = 31 * result + (documentType != null ? documentType.hashCode() : 0);\nreturn result;\n}\n@Override\n@Override\npublic Iterator iterator() {\nreturn fieldPathUpdates.iterator();\n}\n/**\n* Returns whether or not this field update contains any field- or field path updates.\n*\n* @return True if this update is empty.\n*/\npublic boolean isEmpty() {\nreturn fieldUpdates.isEmpty() && fieldPathUpdates.isEmpty();\n}\n/**\n* Sets whether this update should create the document it updates if that document does not exist.\n* In this case an empty document is created before the update is applied.\n*\n* @since 5.17\n* @param value Whether the document it updates should be created.\n*/\npublic void setCreateIfNonExistent(boolean value) {\ncreateIfNonExistent = value;\n}\n/**\n* Gets whether this update should create the document it updates if that document does not exist.\n*\n* @since 5.17\n* @return Whether the document it updates should be created.\n*/\npublic boolean getCreateIfNonExistent() {\nreturn createIfNonExistent != null && createIfNonExistent;\n}\npublic Optional getOptionalCreateIfNonExistent() {\nreturn Optional.ofNullable(createIfNonExistent);\n}\n}", + "context_after": "class DocumentUpdate extends DocumentOperation implements Iterable {\npublic static final int CLASSID = 0x1000 + 6;\nprivate DocumentId docId;\nprivate final List fieldUpdates;\nprivate final List fieldPathUpdates;\nprivate DocumentType documentType;\nprivate Boolean createIfNonExistent;\n/**\n* Creates a DocumentUpdate.\n*\n* @param docId the ID of the update\n* @param docType the document type that this update is valid for\n*/\npublic DocumentUpdate(DocumentType docType, DocumentId docId) {\nthis.docId = docId;\nthis.documentType = docType;\nthis.fieldUpdates = new ArrayList<>();\nthis.fieldPathUpdates = new ArrayList<>();\n}\n/**\n* Creates a new document update using a reader\n*/\npublic DocumentUpdate(DocumentUpdateReader reader) {\ndocId = null;\ndocumentType = null;\nfieldUpdates = new ArrayList<>();\nfieldPathUpdates = new ArrayList<>();\nreader.read(this);\n}\n/**\n* Creates a DocumentUpdate.\n*\n* @param docId the ID of the update\n* @param docType the document type that this update is valid for\n*/\npublic DocumentUpdate(DocumentType docType, String docId) {\nthis(docType, new DocumentId(docId));\n}\npublic DocumentId getId() {\nreturn docId;\n}\n/**\n* Sets the document id of the document to update.\n* Use only while deserializing - changing the document id after creation has undefined behaviour.\n*/\npublic void setId(DocumentId id) {\ndocId = id;\n}\nprivate void verifyType(Document doc) {\nif (!documentType.equals(doc.getDataType())) {\nthrow new IllegalArgumentException(\n\"Document \" + doc.getId() + \" with type \" + doc.getDataType() + \" must have same type as update, which is type \" + documentType);\n}\n}\n/**\n* Applies this document update.\n*\n* @param doc the document to apply the update to\n* @return a reference to itself\n* @throws IllegalArgumentException if the document does not have the same document type as this update\n*/\npublic DocumentUpdate applyTo(Document doc) {\nverifyType(doc);\nfor (FieldUpdate fieldUpdate : fieldUpdates) {\nfieldUpdate.applyTo(doc);\n}\nfor (FieldPathUpdate fieldPathUpdate : fieldPathUpdates) {\nfieldPathUpdate.applyTo(doc);\n}\nreturn this;\n}\n/**\n* Prune away any field update that will not modify any field in the document.\n* @param doc document to check against\n* @return a reference to itself\n* @throws IllegalArgumentException if the document does not have the same document type as this update\n*/\npublic DocumentUpdate prune(Document doc) {\nverifyType(doc);\nfor (Iterator iter = fieldUpdates.iterator(); iter.hasNext();) {\nFieldUpdate update = iter.next();\nif (!update.isEmpty()) {\nValueUpdate last = update.getValueUpdate(update.size() - 1);\nif (last instanceof AssignValueUpdate) {\nFieldValue currentValue = doc.getFieldValue(update.getField());\nif ((currentValue != null) && currentValue.equals(last.getValue())) {\niter.remove();\n}\n} else if (last instanceof ClearValueUpdate) {\nFieldValue currentValue = doc.getFieldValue(update.getField());\nif (currentValue == null) {\niter.remove();\n} else {\nFieldValue copy = currentValue.clone();\ncopy.clear();\nif (currentValue.equals(copy)) {\niter.remove();\n}\n}\n}\n}\n}\nreturn this;\n}\n/**\n* Get an unmodifiable list of all field updates that this document update specifies.\n*\n* @return a list of all FieldUpdates in this DocumentUpdate\n* @deprecated Use fieldUpdates() instead.\n*/\n@Deprecated\npublic List getFieldUpdates() {\nreturn Collections.unmodifiableList(fieldUpdates);\n}\n/**\n* Get an unmodifiable collection of all field updates that this document update specifies.\n*\n* @return a collection of all FieldUpdates in this DocumentUpdate\n*/\npublic Collection fieldUpdates() {\nreturn Collections.unmodifiableCollection(fieldUpdates);\n}\n/**\n* Get an unmodifiable list of all field path updates this document update specifies.\n*\n* @return Returns a list of all field path updates in this document update.\n* @deprecated Use fieldPathUpdates() instead.\n*/\n@Deprecated\npublic List getFieldPathUpdates() {\nreturn Collections.unmodifiableList(fieldPathUpdates);\n}\n/**\n* Get an unmodifiable collection of all field path updates that this document update specifies.\n*\n* @return a collection of all FieldPathUpdates in this DocumentUpdate\n*/\npublic Collection fieldPathUpdates() {\nreturn Collections.unmodifiableCollection(fieldPathUpdates);\n}\n/** Returns the type of the document this updates\n*\n* @return The documentype of the document\n*/\npublic DocumentType getDocumentType() {\nreturn documentType;\n}\n/**\n* Sets the document type. Use only while deserializing - changing the document type after creation\n* has undefined behaviour.\n*/\npublic void setDocumentType(DocumentType type) {\ndocumentType = type;\n}\n/**\n* Get the field update at the specified index in the list of field updates.\n*\n* @param index the index of the FieldUpdate to return\n* @return the FieldUpdate at the specified index\n* @throws IndexOutOfBoundsException if index is out of range\n* @deprecated use getFieldUpdate(Field field) instead.\n*/\n@Deprecated\npublic FieldUpdate getFieldUpdate(int index) {\nreturn fieldUpdates.get(index);\n}\n/**\n* Replaces the field update at the specified index in the list of field updates.\n*\n* @param index index of the FieldUpdate to replace\n* @param upd the FieldUpdate to be stored at the specified position\n* @return the FieldUpdate previously at the specified position\n* @throws IndexOutOfBoundsException if index is out of range\n* @deprecated Use removeFieldUpdate/addFieldUpdate instead\n*/\n@Deprecated\npublic FieldUpdate setFieldUpdate(int index, FieldUpdate upd) {\nFieldUpdate old = fieldUpdates.get(index);\nfieldUpdates.set(index, upd);\nreturn old;\n}\n/**\n* Returns the update for a field\n*\n* @param field the field to return the update of\n* @return the update for the field, or null if that field has no update in this\n*/\npublic FieldUpdate getFieldUpdate(Field field) {\nreturn getFieldUpdateById(field.getId());\n}\n/** Removes all field updates from the list for field updates. */\npublic void clearFieldUpdates() {\nfieldUpdates.clear();\n}\n/**\n* Returns the update for a field name\n*\n* @param fieldName the field name to return the update of\n* @return the update for the field, or null if that field has no update in this\n*/\npublic FieldUpdate getFieldUpdate(String fieldName) {\nField field = documentType.getField(fieldName);\nreturn field != null ? getFieldUpdate(field) : null;\n}\nprivate FieldUpdate getFieldUpdateById(Integer fieldId) {\nfor (FieldUpdate fieldUpdate : fieldUpdates) {\nif (fieldUpdate.getField().getId() == fieldId) {\nreturn fieldUpdate;\n}\n}\nreturn null;\n}\n/**\n* Assigns the field updates of this document update.\n* This document update receives ownership of the list - it can not be subsequently used\n* by the caller. Also note that there no assumptions can be made on the order of items\n* after this call. They might have been joined if for the same field or reordered.\n*\n* @param fieldUpdates the new list of updates of this\n* @throws NullPointerException if the argument passed is null\n*/\npublic void setFieldUpdates(Collection fieldUpdates) {\nif (fieldUpdates == null) {\nthrow new NullPointerException(\"The field updates of a document update can not be null\");\n}\nclearFieldUpdates();\naddFieldUpdates(fieldUpdates);\n}\npublic void addFieldUpdates(Collection fieldUpdates) {\nfor (FieldUpdate fieldUpdate : fieldUpdates) {\naddFieldUpdate(fieldUpdate);\n}\n}\n/**\n* Get the number of field updates in this document update.\n*\n* @return the size of the List of FieldUpdates\n*/\npublic int size() {\nreturn fieldUpdates.size();\n}\n/**\n* Adds the given {@link FieldUpdate} to this DocumentUpdate. If this DocumentUpdate already contains a FieldUpdate\n* for the named field, the content of the given FieldUpdate is added to the existing one.\n*\n* @param update The FieldUpdate to add to this DocumentUpdate.\n* @return This, to allow chaining.\n* @throws IllegalArgumentException If the {@link DocumentType} of this DocumentUpdate does not have a corresponding\n* field.\n*/\npublic DocumentUpdate addFieldUpdate(FieldUpdate update) {\nint fieldId = update.getField().getId();\nif (documentType.getField(fieldId) == null) {\nthrow new IllegalArgumentException(\"Document type '\" + documentType.getName() + \"' does not have field '\" + update.getField().getName() + \"'.\");\n}\nFieldUpdate prevUpdate = getFieldUpdateById(fieldId);\nif (prevUpdate != update) {\nif (prevUpdate != null) {\nprevUpdate.addAll(update);\n} else {\nfieldUpdates.add(update);\n}\n}\nreturn this;\n}\n/**\n* Adds a field path update to perform on the document.\n*\n* @return a reference to itself.\n*/\npublic DocumentUpdate addFieldPathUpdate(FieldPathUpdate fieldPathUpdate) {\nfieldPathUpdates.add(fieldPathUpdate);\nreturn this;\n}\n/**\n* Adds all the field- and field path updates of the given document update to this. If the given update refers to a\n* different document or document type than this, this method throws an exception.\n*\n* @param update The update whose content to add to this.\n* @throws IllegalArgumentException If the {@link DocumentId} or {@link DocumentType} of the given DocumentUpdate\n* does not match the content of this.\n*/\npublic void addAll(DocumentUpdate update) {\nif (update == null) {\nreturn;\n}\nif (!docId.equals(update.docId)) {\nthrow new IllegalArgumentException(\"Expected \" + docId + \", got \" + update.docId + \".\");\n}\nif (!documentType.equals(update.documentType)) {\nthrow new IllegalArgumentException(\"Expected \" + documentType + \", got \" + update.documentType + \".\");\n}\naddFieldUpdates(update.fieldUpdates());\nfor (FieldPathUpdate pathUpd : update.fieldPathUpdates) {\naddFieldPathUpdate(pathUpd);\n}\n}\n/**\n* Removes the field update at the specified position in the list of field updates.\n*\n* @param index the index of the FieldUpdate to remove\n* @return the FieldUpdate previously at the specified position\n* @throws IndexOutOfBoundsException if index is out of range\n* @deprecated use removeFieldUpdate(Field field) instead.\n*/\n@Deprecated\npublic FieldUpdate removeFieldUpdate(int index) {\nFieldUpdate prev = getFieldUpdate(index);\nfieldUpdates.remove(index);\nreturn removeFieldUpdate(prev.getField());\n}\npublic FieldUpdate removeFieldUpdate(Field field) {\nfor (Iterator it = fieldUpdates.iterator(); it.hasNext();) {\nFieldUpdate fieldUpdate = it.next();\nif (fieldUpdate.getField().equals(field)) {\nit.remove();\nreturn fieldUpdate;\n}\n}\nreturn null; }\npublic FieldUpdate removeFieldUpdate(String fieldName) {\nField field = documentType.getField(fieldName);\nreturn field != null ? removeFieldUpdate(field) : null;\n}\n/**\n* Returns the document type of this document update.\n*\n* @return the document type of this document update\n*/\npublic DocumentType getType() {\nreturn documentType;\n}\npublic final void serialize(GrowableByteBuffer buf) {\nserialize(DocumentSerializerFactory.create42(buf));\n}\npublic void serialize(DocumentUpdateWriter data) {\ndata.write(this);\n}\n@Override\npublic boolean equals(Object o) {\nif (this == o) return true;\nif (!(o instanceof DocumentUpdate)) return false;\nDocumentUpdate that = (DocumentUpdate) o;\nif (docId != null ? !docId.equals(that.docId) : that.docId != null) return false;\nif (documentType != null ? !documentType.equals(that.documentType) : that.documentType != null) return false;\nif (fieldPathUpdates != null ? !fieldPathUpdates.equals(that.fieldPathUpdates) : that.fieldPathUpdates != null)\nreturn false;\nif (fieldUpdates != null ? !fieldUpdates.equals(that.fieldUpdates) : that.fieldUpdates != null) return false;\nif (this.getCreateIfNonExistent() != ((DocumentUpdate) o).getCreateIfNonExistent()) return false;\nreturn true;\n}\n@Override\npublic int hashCode() {\nint result = docId != null ? docId.hashCode() : 0;\nresult = 31 * result + (fieldUpdates != null ? fieldUpdates.hashCode() : 0);\nresult = 31 * result + (fieldPathUpdates != null ? fieldPathUpdates.hashCode() : 0);\nresult = 31 * result + (documentType != null ? documentType.hashCode() : 0);\nreturn result;\n}\n@Override\n@Override\npublic Iterator iterator() {\nreturn fieldPathUpdates.iterator();\n}\n/**\n* Returns whether or not this field update contains any field- or field path updates.\n*\n* @return True if this update is empty.\n*/\npublic boolean isEmpty() {\nreturn fieldUpdates.isEmpty() && fieldPathUpdates.isEmpty();\n}\n/**\n* Sets whether this update should create the document it updates if that document does not exist.\n* In this case an empty document is created before the update is applied.\n*\n* @since 5.17\n* @param value Whether the document it updates should be created.\n*/\npublic void setCreateIfNonExistent(boolean value) {\ncreateIfNonExistent = value;\n}\n/**\n* Gets whether this update should create the document it updates if that document does not exist.\n*\n* @since 5.17\n* @return Whether the document it updates should be created.\n*/\npublic boolean getCreateIfNonExistent() {\nreturn createIfNonExistent != null && createIfNonExistent;\n}\npublic Optional getOptionalCreateIfNonExistent() {\nreturn Optional.ofNullable(createIfNonExistent);\n}\n}" + }, + { + "comment": "```suggestion this.trustStorePassword = Optional.ofNullable(trustStorePassword); ```", + "method_body": "public void setTrustStorePassword(String trustStorePassword) {\nthis.trustStorePassword = Optional.of(trustStorePassword);\n}", + "target_code": "this.trustStorePassword = Optional.of(trustStorePassword);", + "method_body_after": "public void setTrustStorePassword(String trustStorePassword) {\nthis.trustStorePassword = Optional.ofNullable(trustStorePassword);\n}", + "context_before": "class CertificateChain {\n/**\n* Common name of the leaf certificate. It must be set if the {@link\n* this certificate imported.\n*\n*/\n@ConfigItem\npublic Optional leafCertificateName = Optional.empty();\n/**\n* Truststore file which keeps thumbprints of the trusted certificates.\n*/\n@ConfigItem\npublic Optional trustStoreFile = Optional.empty();\n/**\n* A parameter to specify the password of the truststore file if it is configured with {@link\n*/\n@ConfigItem\npublic Optional trustStorePassword = Optional.empty();\n/**\n* A parameter to specify the alias of the truststore certificate.\n*/\n@ConfigItem\npublic Optional trustStoreCertAlias = Optional.empty();\n/**\n* An optional parameter to specify type of the truststore file. If not given, the type is automatically\n* detected\n* based on the file name.\n*/\n@ConfigItem\npublic Optional trustStoreFileType = Optional.empty();\npublic Optional getTrustStoreFile() {\nreturn trustStoreFile;\n}\npublic void setTrustStoreFile(Path trustStoreFile) {\nthis.trustStoreFile = Optional.of(trustStoreFile);\n}\npublic Optional getTrustStoreCertAlias() {\nreturn trustStoreCertAlias;\n}\npublic void setTrustStoreCertAlias(String trustStoreCertAlias) {\nthis.trustStoreCertAlias = Optional.of(trustStoreCertAlias);\n}\npublic Optional getTrustStoreFileType() {\nreturn trustStoreFileType;\n}\npublic void setTrustStoreFileType(Optional trustStoreFileType) {\nthis.trustStoreFileType = trustStoreFileType;\n}\npublic Optional getLeafCertificateName() {\nreturn leafCertificateName;\n}\npublic void setLeafCertificateName(String leafCertificateName) {\nthis.leafCertificateName = Optional.of(leafCertificateName);\n}\npublic Optional getTrustStorePassword() {\nreturn trustStorePassword;\n}\n}", + "context_after": "class CertificateChain {\n/**\n* Common name of the leaf certificate. It must be set if the {@link\n* this certificate imported.\n*\n*/\n@ConfigItem\npublic Optional leafCertificateName = Optional.empty();\n/**\n* Truststore file which keeps thumbprints of the trusted certificates.\n*/\n@ConfigItem\npublic Optional trustStoreFile = Optional.empty();\n/**\n* A parameter to specify the password of the truststore file if it is configured with {@link\n*/\n@ConfigItem\npublic Optional trustStorePassword = Optional.empty();\n/**\n* A parameter to specify the alias of the truststore certificate.\n*/\n@ConfigItem\npublic Optional trustStoreCertAlias = Optional.empty();\n/**\n* An optional parameter to specify type of the truststore file. If not given, the type is automatically\n* detected\n* based on the file name.\n*/\n@ConfigItem\npublic Optional trustStoreFileType = Optional.empty();\npublic Optional getTrustStoreFile() {\nreturn trustStoreFile;\n}\npublic void setTrustStoreFile(Path trustStoreFile) {\nthis.trustStoreFile = Optional.of(trustStoreFile);\n}\npublic Optional getTrustStoreCertAlias() {\nreturn trustStoreCertAlias;\n}\npublic void setTrustStoreCertAlias(String trustStoreCertAlias) {\nthis.trustStoreCertAlias = Optional.of(trustStoreCertAlias);\n}\npublic Optional getTrustStoreFileType() {\nreturn trustStoreFileType;\n}\npublic void setTrustStoreFileType(Optional trustStoreFileType) {\nthis.trustStoreFileType = trustStoreFileType;\n}\npublic Optional getLeafCertificateName() {\nreturn leafCertificateName;\n}\npublic void setLeafCertificateName(String leafCertificateName) {\nthis.leafCertificateName = Optional.of(leafCertificateName);\n}\npublic Optional getTrustStorePassword() {\nreturn trustStorePassword;\n}\n}" + }, + { + "comment": "\u7981\u7528\u7684\u5c31\u4e0d\u9700\u8981\u751f\u6210\u4e86\u3002", + "method_body": "public void toHashTree(HashTree tree, MsLoopController element, ParameterConfig config) {\nfinal HashTree groupTree = controller(tree, element);\nif (groupTree == null) {\nreturn;\n}\nConstantTimer constantTimer = getConstantTimer(element);\nif (constantTimer != null) {\ngroupTree.add(constantTimer);\n}\nparseChild(groupTree, element, config);\n}", + "target_code": "", + "method_body_after": "public void toHashTree(HashTree tree, MsLoopController element, ParameterConfig config) {\nif (BooleanUtils.isFalse(element.getEnable())) {\nLogUtils.info(\"MsLoopController is disabled\");\nreturn;\n}\nfinal HashTree groupTree = controller(tree, element);\nif (groupTree == null) {\nreturn;\n}\nConstantTimer constantTimer = getConstantTimer(element);\nif (constantTimer != null) {\ngroupTree.add(constantTimer);\n}\nparseChild(groupTree, element, config);\n}", + "context_before": "class MsLoopControllerConverter extends AbstractJmeterElementConverter {\n@Override\nprivate HashTree controller(HashTree tree, MsLoopController element) {\nif (StringUtils.equals(element.getLoopType(), LoopType.WHILE.name()) && element.getWhileController() != null) {\nRunTime runTime = new RunTime();\nrunTime.setEnabled(true);\nrunTime.setProperty(TestElement.TEST_CLASS, RunTime.class.getName());\nrunTime.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass(\"RunTimeGui\"));\nlong timeout = element.getWhileController().getTimeout() / 1000;\nif (timeout < 1) {\ntimeout = 1;\n}\nrunTime.setRuntime(timeout);\nString condition;\nif (StringUtils.equals(WhileConditionType.CONDITION.name(), element.getWhileController().getConditionType())) {\ncondition = element.getWhileController().getMsWhileVariable().getConditionValue();\n} else {\ncondition = element.getWhileController().getMsWhileScript().getScriptValue();\n}\nString ifCondition = \"${__jexl3(\" + condition + \")}\";\nString whileCondition = \"${__jexl3(\" + condition + \" && \\\"${\" + element.getCurrentTime() + \"}\\\" !=\\\"stop\\\")}\";\nHashTree ifHashTree = tree.add(ifController(ifCondition, element.getEnable()));\naddPreProc(ifHashTree, element);\nHashTree hashTree = ifHashTree.add(initWhileController(element, whileCondition));\nJSR223PreProcessor jsr223PreProcessor = new JSR223PreProcessor();\njsr223PreProcessor.setName(\"While \u5faa\u73af\u63a7\u5236\u5668\u8d85\u65f6\u5904\u7406\u811a\u672c\");\njsr223PreProcessor.setProperty(TestElement.TEST_CLASS, JSR223Sampler.class.getName());\njsr223PreProcessor.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass(\"TestBeanGUI\"));\njsr223PreProcessor.setProperty(\"scriptLanguage\", ScriptLanguageType.BEANSHELL.name().toLowerCase());\nScriptFilter.verify(ScriptLanguageType.BEANSHELL.name().toLowerCase(), element.getName(), script(element));\njsr223PreProcessor.setProperty(\"script\", script(element));\nhashTree.add(jsr223PreProcessor);\nreturn hashTree;\n}\nif (StringUtils.equals(element.getLoopType(), LoopType.FOREACH.name()) && element.getForEachController() != null) {\nreturn tree.add(initForeachController(element));\n}\nif (StringUtils.equals(element.getLoopType(), LoopType.LOOP_COUNT.name()) && element.getMsCountController() != null) {\nString ifCondition = StringUtils.join(\"${__jexl3(\", element.getMsCountController().getLoops(), \" > 0 \", \")}\");\nHashTree ifHashTree = tree.add(ifController(ifCondition, element.getEnable()));\nreturn ifHashTree.add(initLoopController(element));\n}\nreturn null;\n}\nprivate String script(MsLoopController element) {\nreturn StringUtils.LF + \"import java.util.*;\\n\" + \"import java.text.SimpleDateFormat;\\n\" + \"import org.apache.jmeter.threads.JMeterContextService;\\n\" + StringUtils.LF + \"\n}\nprivate void addPreProc(HashTree hashTree, MsLoopController element) {\nJSR223Sampler sampler = new JSR223Sampler();\nsampler.setName(\"MS_CLEAR_LOOPS_VAR_\" + element.getCurrentTime());\nsampler.setProperty(TestElement.TEST_CLASS, JSR223Sampler.class.getName());\nsampler.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass(\"TestBeanGUI\"));\nsampler.setProperty(\"scriptLanguage\", ScriptLanguageType.BEANSHELL.name().toLowerCase());\nScriptFilter.verify(ScriptLanguageType.BEANSHELL.name().toLowerCase(), element.getName(), script(element));\nsampler.setProperty(\"script\", \"vars.put(\\\"\" + element.getCurrentTime() + \"\\\", null);\");\nhashTree.add(sampler);\n}\nprivate LoopController initLoopController(MsLoopController element) {\nLoopController loopController = new LoopController();\nloopController.setEnabled(element.getEnable());\nloopController.setName(StringUtils.isNotBlank(element.getName()) ? element.getName() : \"Loop Controller\");\nloopController.setProperty(TestElement.TEST_CLASS, LoopController.class.getName());\nloopController.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass(\"LoopControlPanel\"));\nloopController.setLoops(element.getMsCountController().getLoops());\nif (StringUtils.isNotEmpty(element.getMsCountController().getLoops())) {\nloopController.setContinueForever(true);\n}\nreturn loopController;\n}\nprivate IfController ifController(String condition, boolean enable) {\nIfController ifController = new IfController();\nifController.setEnabled(enable);\nifController.setName(\"while ifController\");\nifController.setProperty(TestElement.TEST_CLASS, IfController.class.getName());\nifController.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass(\"IfControllerPanel\"));\nifController.setCondition(condition);\nifController.setEvaluateAll(false);\nifController.setUseExpression(true);\nreturn ifController;\n}\nprivate WhileController initWhileController(MsLoopController element, String condition) {\nWhileController controller = new WhileController();\ncontroller.setEnabled(element.getEnable());\ncontroller.setName(StringUtils.isNotBlank(element.getName()) ? element.getName() : \"While Controller\");\ncontroller.setProperty(TestElement.TEST_CLASS, WhileController.class.getName());\ncontroller.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass(\"WhileControllerGui\"));\ncontroller.setCondition(condition);\nreturn controller;\n}\nprivate ForeachController initForeachController(MsLoopController element) {\nForeachController controller = new ForeachController();\ncontroller.setEnabled(element.getEnable());\ncontroller.setName(StringUtils.isNotBlank(element.getName()) ? element.getName() : \"Foreach Controller\");\ncontroller.setProperty(TestElement.TEST_CLASS, ForeachController.class.getName());\ncontroller.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass(\"ForeachControlPanel\"));\ncontroller.setInputVal(element.getForEachController().getVariable());\ncontroller.setReturnVal(element.getForEachController().getValue());\ncontroller.setUseSeparator(true);\nreturn controller;\n}\nprivate ConstantTimer getConstantTimer(MsLoopController element) {\nConstantTimer constantTimer = new ConstantTimer();\nconstantTimer.setEnabled(element.getEnable());\nconstantTimer.setProperty(TestElement.TEST_CLASS, ConstantTimer.class.getName());\nconstantTimer.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass(\"ConstantTimerGui\"));\nif (StringUtils.equals(element.getLoopType(), LoopType.WHILE.name()) && element.getWhileController() != null) {\nreturn null;\n}\nif (StringUtils.equals(element.getLoopType(), LoopType.FOREACH.name()) && element.getForEachController() != null &&\nelement.getForEachController().getLoopTime() > 0) {\nconstantTimer.setProperty(\"ConstantTimer.delay\", element.getForEachController().getLoopTime());\nconstantTimer.setDelay(element.getForEachController().getLoopTime() + StringUtils.EMPTY);\nconstantTimer.setName(element.getForEachController().getLoopTime() + \" ms\");\nreturn constantTimer;\n}\nif (StringUtils.equals(element.getLoopType(), LoopType.LOOP_COUNT.name()) && element.getMsCountController() != null &&\nelement.getMsCountController().getLoopTime() > 0) {\nconstantTimer.setProperty(\"ConstantTimer.delay\", element.getMsCountController().getLoopTime() + \"\");\nconstantTimer.setDelay(element.getMsCountController().getLoopTime() + StringUtils.EMPTY);\nconstantTimer.setName(element.getMsCountController().getLoopTime() + \" ms\");\nreturn constantTimer;\n}\nreturn null;\n}\n}", + "context_after": "class MsLoopControllerConverter extends AbstractJmeterElementConverter {\n@Override\nprivate HashTree controller(HashTree tree, MsLoopController element) {\nif (StringUtils.equals(element.getLoopType(), LoopType.WHILE.name()) && element.getWhileController() != null) {\nRunTime runTime = new RunTime();\nrunTime.setEnabled(true);\nrunTime.setProperty(TestElement.TEST_CLASS, RunTime.class.getName());\nrunTime.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass(\"RunTimeGui\"));\nlong timeout = element.getWhileController().getTimeout() / 1000;\nif (timeout < 1) {\ntimeout = 1;\n}\nrunTime.setRuntime(timeout);\nString condition;\nif (StringUtils.equals(WhileConditionType.CONDITION.name(), element.getWhileController().getConditionType())) {\ncondition = element.getWhileController().getMsWhileVariable().getConditionValue();\n} else {\ncondition = element.getWhileController().getMsWhileScript().getScriptValue();\n}\nString ifCondition = \"${__jexl3(\" + condition + \")}\";\nString whileCondition = \"${__jexl3(\" + condition + \" && \\\"${\" + element.getCurrentTime() + \"}\\\" !=\\\"stop\\\")}\";\nHashTree ifHashTree = tree.add(ifController(ifCondition, element.getEnable()));\naddPreProc(ifHashTree, element);\nHashTree hashTree = ifHashTree.add(initWhileController(element, whileCondition));\nJSR223PreProcessor jsr223PreProcessor = new JSR223PreProcessor();\njsr223PreProcessor.setName(\"While \u5faa\u73af\u63a7\u5236\u5668\u8d85\u65f6\u5904\u7406\u811a\u672c\");\njsr223PreProcessor.setProperty(TestElement.TEST_CLASS, JSR223Sampler.class.getName());\njsr223PreProcessor.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass(\"TestBeanGUI\"));\njsr223PreProcessor.setProperty(\"scriptLanguage\", ScriptLanguageType.BEANSHELL.name().toLowerCase());\nScriptFilter.verify(ScriptLanguageType.BEANSHELL.name().toLowerCase(), element.getName(), script(element));\njsr223PreProcessor.setProperty(\"script\", script(element));\nhashTree.add(jsr223PreProcessor);\nreturn hashTree;\n}\nif (StringUtils.equals(element.getLoopType(), LoopType.FOREACH.name()) && element.getForEachController() != null) {\nreturn tree.add(initForeachController(element));\n}\nif (StringUtils.equals(element.getLoopType(), LoopType.LOOP_COUNT.name()) && element.getMsCountController() != null) {\nString ifCondition = StringUtils.join(\"${__jexl3(\", element.getMsCountController().getLoops(), \" > 0 \", \")}\");\nHashTree ifHashTree = tree.add(ifController(ifCondition, element.getEnable()));\nreturn ifHashTree.add(initLoopController(element));\n}\nreturn null;\n}\nprivate static String script(MsLoopController element) {\nString script = \"\"\"\nimport java.util.*;\nimport java.text.SimpleDateFormat;\nimport org.apache.jmeter.threads.JMeterContextService;\ntry{\nString ms_current_timer = vars.get(\"%s\");\nlong _nowTime = System.currentTimeMillis();\nif(ms_current_timer == null ){\nvars.put(\"%s\",_nowTime.toString());\n}\nlong time = Long.parseLong(vars.get(\"%s\"));\nif((_nowTime - time) > %s ){\nvars.put(\"%s\", \"stop\");\nlog.info( \"\u7ed3\u675f\u5faa\u73af\");\n}\n}catch (Exception e){\nlog.info( e.getMessage());\nvars.put(\"%s\", \"stop\");\n}\n\"\"\";\nreturn String.format(script, element.getCurrentTime(), element.getCurrentTime(), element.getCurrentTime(), element.getWhileController().getTimeout(), element.getCurrentTime(), element.getCurrentTime());\n}\nprivate void addPreProc(HashTree hashTree, MsLoopController element) {\nJSR223Sampler sampler = new JSR223Sampler();\nsampler.setName(\"MS_CLEAR_LOOPS_VAR_\" + element.getCurrentTime());\nsampler.setProperty(TestElement.TEST_CLASS, JSR223Sampler.class.getName());\nsampler.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass(\"TestBeanGUI\"));\nsampler.setProperty(\"scriptLanguage\", ScriptLanguageType.BEANSHELL.name().toLowerCase());\nScriptFilter.verify(ScriptLanguageType.BEANSHELL.name().toLowerCase(), element.getName(), script(element));\nsampler.setProperty(\"script\", \"vars.put(\\\"\" + element.getCurrentTime() + \"\\\", null);\");\nhashTree.add(sampler);\n}\nprivate LoopController initLoopController(MsLoopController element) {\nLoopController loopController = new LoopController();\nloopController.setEnabled(element.getEnable());\nloopController.setName(StringUtils.isNotBlank(element.getName()) ? element.getName() : \"Loop Controller\");\nloopController.setProperty(TestElement.TEST_CLASS, LoopController.class.getName());\nloopController.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass(\"LoopControlPanel\"));\nloopController.setLoops(element.getMsCountController().getLoops());\nif (StringUtils.isNotEmpty(element.getMsCountController().getLoops())) {\nloopController.setContinueForever(true);\n}\nreturn loopController;\n}\nprivate IfController ifController(String condition, boolean enable) {\nIfController ifController = new IfController();\nifController.setEnabled(enable);\nifController.setName(\"while ifController\");\nifController.setProperty(TestElement.TEST_CLASS, IfController.class.getName());\nifController.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass(\"IfControllerPanel\"));\nifController.setCondition(condition);\nifController.setEvaluateAll(false);\nifController.setUseExpression(true);\nreturn ifController;\n}\nprivate WhileController initWhileController(MsLoopController element, String condition) {\nWhileController controller = new WhileController();\ncontroller.setEnabled(element.getEnable());\ncontroller.setName(StringUtils.isNotBlank(element.getName()) ? element.getName() : \"While Controller\");\ncontroller.setProperty(TestElement.TEST_CLASS, WhileController.class.getName());\ncontroller.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass(\"WhileControllerGui\"));\ncontroller.setCondition(condition);\nreturn controller;\n}\nprivate ForeachController initForeachController(MsLoopController element) {\nForeachController controller = new ForeachController();\ncontroller.setEnabled(element.getEnable());\ncontroller.setName(StringUtils.isNotBlank(element.getName()) ? element.getName() : \"Foreach Controller\");\ncontroller.setProperty(TestElement.TEST_CLASS, ForeachController.class.getName());\ncontroller.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass(\"ForeachControlPanel\"));\ncontroller.setInputVal(element.getForEachController().getVariable());\ncontroller.setReturnVal(element.getForEachController().getValue());\ncontroller.setUseSeparator(true);\nreturn controller;\n}\nprivate ConstantTimer getConstantTimer(MsLoopController element) {\nConstantTimer constantTimer = new ConstantTimer();\nconstantTimer.setEnabled(element.getEnable());\nconstantTimer.setProperty(TestElement.TEST_CLASS, ConstantTimer.class.getName());\nconstantTimer.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass(\"ConstantTimerGui\"));\nif (StringUtils.equals(element.getLoopType(), LoopType.WHILE.name()) && element.getWhileController() != null) {\nreturn null;\n}\nif (StringUtils.equals(element.getLoopType(), LoopType.FOREACH.name()) && element.getForEachController() != null &&\nelement.getForEachController().getLoopTime() > 0) {\nconstantTimer.setProperty(\"ConstantTimer.delay\", element.getForEachController().getLoopTime());\nconstantTimer.setDelay(element.getForEachController().getLoopTime() + StringUtils.EMPTY);\nconstantTimer.setName(element.getForEachController().getLoopTime() + \" ms\");\nreturn constantTimer;\n}\nif (StringUtils.equals(element.getLoopType(), LoopType.LOOP_COUNT.name()) && element.getMsCountController() != null &&\nelement.getMsCountController().getLoopTime() > 0) {\nconstantTimer.setProperty(\"ConstantTimer.delay\", element.getMsCountController().getLoopTime() + \"\");\nconstantTimer.setDelay(element.getMsCountController().getLoopTime() + StringUtils.EMPTY);\nconstantTimer.setName(element.getMsCountController().getLoopTime() + \" ms\");\nreturn constantTimer;\n}\nreturn null;\n}\n}" + }, + { + "comment": "you can't guarantee this in the `finally` block. (for example if submitJobDetached failed but the job is actually running)", + "method_body": "public void testDuplicateRegistrationFailsJob() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal int numKeys = 256;\nJobID jobId = null;\ntry {\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env\n.addSource(new TestKeyRangeSource(numKeys));\nReducingStateDescriptor> reducingState = new ReducingStateDescriptor<>(\n\"any-name\",\nnew SumReduce(),\nsource.getType());\nfinal String queryName = \"duplicate-me\";\nfinal QueryableStateStream> queryableState =\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = -4126824763829132959L;\n@Override\npublic Integer getKey(Tuple2 value) throws Exception {\nreturn value.f0;\n}\n}).asQueryableState(queryName, reducingState);\nfinal QueryableStateStream> duplicate =\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = -6265024000462809436L;\n@Override\npublic Integer getKey(Tuple2 value) throws Exception {\nreturn value.f0;\n}\n}).asQueryableState(queryName);\nJobGraph jobGraph = env.getStreamGraph().getJobGraph();\njobId = jobGraph.getJobID();\nfinal CompletableFuture failedFuture =\nnotifyWhenJobStatusIs(jobId, JobStatus.FAILED, deadline);\ncluster.submitJobDetached(jobGraph);\nTestingJobManagerMessages.JobStatusIs jobStatus =\nfailedFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\nassertEquals(JobStatus.FAILED, jobStatus.state());\nJobManagerMessages.JobFound jobFound = FutureUtils.toJava(\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new JobManagerMessages.RequestJob(jobId), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(JobManagerMessages.JobFound.class)))\n.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\nString failureCause = jobFound.executionGraph().getFailureCause().getExceptionAsString();\nassertTrue(\"Not instance of SuppressRestartsException\", failureCause.startsWith(\"org.apache.flink.runtime.execution.SuppressRestartsException\"));\nint causedByIndex = failureCause.indexOf(\"Caused by: \");\nString subFailureCause = failureCause.substring(causedByIndex + \"Caused by: \".length());\nassertTrue(\"Not caused by IllegalStateException\", subFailureCause.startsWith(\"java.lang.IllegalStateException\"));\nassertTrue(\"Exception does not contain registration name\", subFailureCause.contains(queryName));\n} finally {\nif (jobId != null) {\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new JobManagerMessages.CancelJob(jobId), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(CancellationSuccess.class));\n}\n}\n}", + "target_code": "cluster.getLeaderGateway(deadline.timeLeft())", + "method_body_after": "public void testDuplicateRegistrationFailsJob() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal int numKeys = 256;\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env.addSource(new TestKeyRangeSource(numKeys));\nReducingStateDescriptor> reducingState = new ReducingStateDescriptor<>(\n\"any-name\",\nnew SumReduce(),\nsource.getType());\nfinal String queryName = \"duplicate-me\";\nfinal QueryableStateStream> queryableState =\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = -4126824763829132959L;\n@Override\npublic Integer getKey(Tuple2 value) {\nreturn value.f0;\n}\n}).asQueryableState(queryName, reducingState);\nfinal QueryableStateStream> duplicate =\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = -6265024000462809436L;\n@Override\npublic Integer getKey(Tuple2 value) {\nreturn value.f0;\n}\n}).asQueryableState(queryName);\nfinal JobGraph jobGraph = env.getStreamGraph().getJobGraph();\nfinal JobID jobId = jobGraph.getJobID();\nfinal CompletableFuture failedFuture =\nnotifyWhenJobStatusIs(jobId, JobStatus.FAILED, deadline);\nfinal CompletableFuture cancellationFuture =\nnotifyWhenJobStatusIs(jobId, JobStatus.CANCELED, deadline);\ncluster.submitJobDetached(jobGraph);\ntry {\nfinal TestingJobManagerMessages.JobStatusIs jobStatus =\nfailedFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\nassertEquals(JobStatus.FAILED, jobStatus.state());\n} catch (Exception e) {\nif (jobId != null) {\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new JobManagerMessages.CancelJob(jobId), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(CancellationSuccess.class));\ncancellationFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n}\nthrow e;\n}\nJobManagerMessages.JobFound jobFound = FutureUtils.toJava(\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new JobManagerMessages.RequestJob(jobId), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(JobManagerMessages.JobFound.class)))\n.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\nString failureCause = jobFound.executionGraph().getFailureCause().getExceptionAsString();\nassertEquals(JobStatus.FAILED, jobFound.executionGraph().getState());\nassertTrue(\"Not instance of SuppressRestartsException\", failureCause.startsWith(\"org.apache.flink.runtime.execution.SuppressRestartsException\"));\nint causedByIndex = failureCause.indexOf(\"Caused by: \");\nString subFailureCause = failureCause.substring(causedByIndex + \"Caused by: \".length());\nassertTrue(\"Not caused by IllegalStateException\", subFailureCause.startsWith(\"java.lang.IllegalStateException\"));\nassertTrue(\"Exception does not contain registration name\", subFailureCause.contains(queryName));\n}", + "context_before": "class AbstractQueryableStateTestBase extends TestLogger {\nprivate static final FiniteDuration TEST_TIMEOUT = new FiniteDuration(10000L, TimeUnit.SECONDS);\nprivate final ScheduledExecutorService executorService = Executors.newScheduledThreadPool(4);\nprivate final ScheduledExecutor executor = new ScheduledExecutorServiceAdapter(executorService);\n/**\n* State backend to use.\n*/\nprotected AbstractStateBackend stateBackend;\n/**\n* Shared between all the test. Make sure to have at least NUM_SLOTS\n* available after your test finishes, e.g. cancel the job you submitted.\n*/\nprotected static FlinkMiniCluster cluster;\n/**\n* Client shared between all the test.\n*/\nprotected static QueryableStateClient client;\nprotected static int maxParallelism;\n@Before\npublic void setUp() throws Exception {\nthis.stateBackend = createStateBackend();\nAssert.assertNotNull(cluster);\nmaxParallelism = cluster.configuration().getInteger(ConfigConstants.LOCAL_NUMBER_TASK_MANAGER, 1) *\ncluster.configuration().getInteger(ConfigConstants.TASK_MANAGER_NUM_TASK_SLOTS, 1);\n}\n/**\n* Creates a state backend instance which is used in the {@link\n* test case.\n*\n* @return a state backend instance for each unit test\n*/\nprotected abstract AbstractStateBackend createStateBackend() throws Exception;\n/**\n* Runs a simple topology producing random (key, 1) pairs at the sources (where\n* number of keys is in fixed in range 0...numKeys). The records are keyed and\n* a reducing queryable state instance is created, which sums up the records.\n*\n*

After submitting the job in detached mode, the QueryableStateCLient is used\n* to query the counts of each key in rounds until all keys have non-zero counts.\n*/\n@Test\n@SuppressWarnings(\"unchecked\")\npublic void testQueryableState() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal int numKeys = 256;\nCompletableFuture cancellationFuture = null;\nJobID jobId = null;\ntry {\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env\n.addSource(new TestKeyRangeSource(numKeys));\nReducingStateDescriptor> reducingState = new ReducingStateDescriptor<>(\n\"any-name\",\nnew SumReduce(),\nsource.getType());\nfinal String queryName = \"hakuna-matata\";\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 7143749578983540352L;\n@Override\npublic Integer getKey(Tuple2 value) throws Exception {\nreturn value.f0;\n}\n}).asQueryableState(queryName, reducingState);\nJobGraph jobGraph = env.getStreamGraph().getJobGraph();\njobId = jobGraph.getJobID();\ncancellationFuture = notifyWhenJobStatusIs(jobId, JobStatus.CANCELED, deadline);\ncluster.submitJobDetached(jobGraph);\nfinal AtomicLongArray counts = new AtomicLongArray(numKeys);\nboolean allNonZero = false;\nwhile (!allNonZero && deadline.hasTimeLeft()) {\nallNonZero = true;\nfinal List>>> futures = new ArrayList<>(numKeys);\nfor (int i = 0; i < numKeys; i++) {\nfinal int key = i;\nif (counts.get(key) > 0L) {\ncontinue;\n} else {\nallNonZero = false;\n}\nCompletableFuture>> result = getKvState(\ndeadline,\nclient,\njobId,\nqueryName,\nkey,\nBasicTypeInfo.INT_TYPE_INFO,\nreducingState,\nfalse,\nexecutor);\nresult.thenAccept(response -> {\ntry {\nTuple2 res = response.get();\ncounts.set(key, res.f1);\nassertEquals(\"Key mismatch\", key, res.f0.intValue());\n} catch (Exception e) {\nAssert.fail(e.getMessage());\n}\n});\nfutures.add(result);\n}\nCompletableFuture\n.allOf(futures.toArray(new CompletableFuture[futures.size()]))\n.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n}\nassertTrue(\"Not all keys are non-zero\", allNonZero);\nfor (int i = 0; i < numKeys; i++) {\nlong count = counts.get(i);\nassertTrue(\"Count at position \" + i + \" is \" + count, count > 0);\n}\n} finally {\nif (jobId != null) {\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new JobManagerMessages.CancelJob(jobId), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(CancellationSuccess.class));\ncancellationFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n}\n}\n}\n/**\n* Tests that duplicate query registrations fail the job at the JobManager.\n*/\n@Test\n/**\n* Tests simple value state queryable state instance. Each source emits\n* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then\n* queried. The tests succeeds after each subtask index is queried with\n* value numElements (the latest element updated the state).\n*/\n@Test\npublic void testValueState() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nCompletableFuture cancellationFuture = null;\nJobID jobId = null;\ntry {\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env\n.addSource(new TestAscendingValueSource(numElements));\nValueStateDescriptor> valueState = new ValueStateDescriptor<>(\n\"any\",\nsource.getType());\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 7662520075515707428L;\n@Override\npublic Integer getKey(Tuple2 value) throws Exception {\nreturn value.f0;\n}\n}).asQueryableState(\"hakuna\", valueState);\nJobGraph jobGraph = env.getStreamGraph().getJobGraph();\njobId = jobGraph.getJobID();\ncancellationFuture = notifyWhenJobStatusIs(jobId, JobStatus.CANCELED, deadline);\ncluster.submitJobDetached(jobGraph);\nexecuteValueQuery(deadline, client, jobId, \"hakuna\", valueState, numElements);\n} finally {\nif (jobId != null) {\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new JobManagerMessages.CancelJob(jobId), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(CancellationSuccess.class));\ncancellationFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n}\n}\n}\n/**\n* Tests that the correct exception is thrown if the query\n* contains a wrong jobId or wrong queryable state name.\n*/\n@Test\n@Ignore\npublic void testWrongJobIdAndWrongQueryableStateName() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nCompletableFuture cancellationFuture = null;\nJobID jobId = null;\ntry {\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env\n.addSource(new TestAscendingValueSource(numElements));\nValueStateDescriptor> valueState =\nnew ValueStateDescriptor<>(\"any\", source.getType());\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 7662520075515707428L;\n@Override\npublic Integer getKey(Tuple2 value) throws Exception {\nreturn value.f0;\n}\n}).asQueryableState(\"hakuna\", valueState);\nJobGraph jobGraph = env.getStreamGraph().getJobGraph();\njobId = jobGraph.getJobID();\ncancellationFuture = notifyWhenJobStatusIs(jobId, JobStatus.CANCELED, deadline);\nCompletableFuture runningFuture =\nnotifyWhenJobStatusIs(jobId, JobStatus.RUNNING, deadline);\ncluster.submitJobDetached(jobGraph);\nTestingJobManagerMessages.JobStatusIs jobStatus =\nrunningFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\nassertEquals(JobStatus.RUNNING, jobStatus.state());\nfinal JobID wrongJobId = new JobID();\nCompletableFuture>> unknownJobFuture = client.getKvState(\nwrongJobId,\n\"hakuna\",\n0,\nBasicTypeInfo.INT_TYPE_INFO,\nvalueState);\ntry {\nunknownJobFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\nfail();\n} catch (ExecutionException e) {\nAssert.assertTrue(\"GOT: \" + e.getCause().getMessage(), e.getCause() instanceof RuntimeException);\nAssert.assertTrue(\"GOT: \" + e.getCause().getMessage(), e.getCause().getMessage().contains(\n\"FlinkJobNotFoundException: Could not find Flink job (\" + wrongJobId + \")\"));\n} catch (Exception f) {\nfail(\"Unexpected type of exception: \" + f.getMessage());\n}\nCompletableFuture>> unknownQSName = client.getKvState(\njobId,\n\"wrong-hakuna\",\n0,\nBasicTypeInfo.INT_TYPE_INFO,\nvalueState);\ntry {\nunknownQSName.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\nfail();\n} catch (ExecutionException e) {\nAssert.assertTrue(\"GOT: \" + e.getCause().getMessage(), e.getCause() instanceof RuntimeException);\nAssert.assertTrue(\"GOT: \" + e.getCause().getMessage(), e.getCause().getMessage().contains(\n\"UnknownKvStateLocation: No KvStateLocation found for KvState instance with name 'wrong-hakuna'.\"));\n} catch (Exception f) {\nfail(\"Unexpected type of exception: \" + f.getMessage());\n}\n} finally {\nif (jobId != null) {\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new JobManagerMessages.CancelJob(jobId), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(CancellationSuccess.class));\ncancellationFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n}\n}\n}\n/**\n* Similar tests as {@link\n* job, we already issue one request which fails.\n*/\n@Test\npublic void testQueryNonStartedJobState() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nCompletableFuture cancellationFuture = null;\nJobID jobId = null;\ntry {\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env\n.addSource(new TestAscendingValueSource(numElements));\nValueStateDescriptor> valueState = new ValueStateDescriptor<>(\n\"any\",\nsource.getType(),\nnull);\nQueryableStateStream> queryableState =\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 7480503339992214681L;\n@Override\npublic Integer getKey(Tuple2 value) throws Exception {\nreturn value.f0;\n}\n}).asQueryableState(\"hakuna\", valueState);\nJobGraph jobGraph = env.getStreamGraph().getJobGraph();\njobId = jobGraph.getJobID();\ncancellationFuture = notifyWhenJobStatusIs(jobId, JobStatus.CANCELED, deadline);\nlong expected = numElements;\nclient.getKvState(\njobId,\nqueryableState.getQueryableStateName(),\n0,\nBasicTypeInfo.INT_TYPE_INFO,\nvalueState);\ncluster.submitJobDetached(jobGraph);\nexecuteValueQuery(deadline, client, jobId, \"hakuna\", valueState, expected);\n} finally {\nif (jobId != null) {\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new JobManagerMessages.CancelJob(jobId), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(CancellationSuccess.class));\ncancellationFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n}\n}\n}\n/**\n* Tests simple value state queryable state instance with a default value\n* set. Each source emits (subtaskIndex, 0)..(subtaskIndex, numElements)\n* tuples, the key is mapped to 1 but key 0 is queried which should throw\n* a {@link UnknownKeyOrNamespaceException} exception.\n*\n* @throws UnknownKeyOrNamespaceException thrown due querying a non-existent key\n*/\n@Test(expected = UnknownKeyOrNamespaceException.class)\npublic void testValueStateDefault() throws Throwable {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nCompletableFuture cancellationFuture = null;\nJobID jobId = null;\ntry {\nStreamExecutionEnvironment env =\nStreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies\n.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env\n.addSource(new TestAscendingValueSource(numElements));\nValueStateDescriptor> valueState =\nnew ValueStateDescriptor<>(\n\"any\",\nsource.getType(),\nTuple2.of(0, 1337L));\nQueryableStateStream>\nqueryableState =\nsource.keyBy(\nnew KeySelector, Integer>() {\nprivate static final long serialVersionUID = 4509274556892655887L;\n@Override\npublic Integer getKey(\nTuple2 value) throws\nException {\nreturn 1;\n}\n}).asQueryableState(\"hakuna\", valueState);\nJobGraph jobGraph = env.getStreamGraph().getJobGraph();\njobId = jobGraph.getJobID();\ncancellationFuture = notifyWhenJobStatusIs(jobId, JobStatus.CANCELED, deadline);\ncluster.submitJobDetached(jobGraph);\nint key = 0;\nCompletableFuture>> future = getKvState(\ndeadline,\nclient,\njobId,\nqueryableState.getQueryableStateName(),\nkey,\nBasicTypeInfo.INT_TYPE_INFO,\nvalueState,\ntrue,\nexecutor);\ntry {\nfuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n} catch (ExecutionException | CompletionException e) {\nthrow e.getCause();\n}\n} finally {\nif (jobId != null) {\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new JobManagerMessages.CancelJob(jobId), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(CancellationSuccess.class));\ncancellationFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n}\n}\n}\n/**\n* Tests simple value state queryable state instance. Each source emits\n* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then\n* queried. The tests succeeds after each subtask index is queried with\n* value numElements (the latest element updated the state).\n*\n*

This is the same as the simple value state test, but uses the API shortcut.\n*/\n@Test\npublic void testValueStateShortcut() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nCompletableFuture cancellationFuture = null;\nJobID jobId = null;\ntry {\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env\n.addSource(new TestAscendingValueSource(numElements));\nQueryableStateStream> queryableState =\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 9168901838808830068L;\n@Override\npublic Integer getKey(Tuple2 value) throws Exception {\nreturn value.f0;\n}\n}).asQueryableState(\"matata\");\nJobGraph jobGraph = env.getStreamGraph().getJobGraph();\njobId = jobGraph.getJobID();\ncancellationFuture = notifyWhenJobStatusIs(jobId, JobStatus.CANCELED, deadline);\ncluster.submitJobDetached(jobGraph);\nfinal ValueStateDescriptor> stateDesc =\n(ValueStateDescriptor>) queryableState.getStateDescriptor();\nexecuteValueQuery(deadline, client, jobId, \"matata\", stateDesc, numElements);\n} finally {\nif (jobId != null) {\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new JobManagerMessages.CancelJob(jobId), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(CancellationSuccess.class));\ncancellationFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n}\n}\n}\n/**\n* Tests simple folding state queryable state instance. Each source emits\n* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then\n* queried. The folding state sums these up and maps them to Strings. The\n* test succeeds after each subtask index is queried with result n*(n+1)/2\n* (as a String).\n*/\n@Test\npublic void testFoldingState() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal int numElements = 1024;\nCompletableFuture cancellationFuture = null;\nJobID jobId = null;\ntry {\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env\n.addSource(new TestAscendingValueSource(numElements));\nFoldingStateDescriptor, String> foldingState =\nnew FoldingStateDescriptor<>(\n\"any\",\n\"0\",\nnew SumFold(),\nStringSerializer.INSTANCE);\nQueryableStateStream queryableState =\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = -842809958106747539L;\n@Override\npublic Integer getKey(Tuple2 value) throws Exception {\nreturn value.f0;\n}\n}).asQueryableState(\"pumba\", foldingState);\nJobGraph jobGraph = env.getStreamGraph().getJobGraph();\njobId = jobGraph.getJobID();\ncancellationFuture = notifyWhenJobStatusIs(jobId, JobStatus.CANCELED, deadline);\ncluster.submitJobDetached(jobGraph);\nString expected = Integer.toString(numElements * (numElements + 1) / 2);\nfor (int key = 0; key < maxParallelism; key++) {\nboolean success = false;\nwhile (deadline.hasTimeLeft() && !success) {\nCompletableFuture, String>> future = getKvState(\ndeadline,\nclient,\njobId,\n\"pumba\",\nkey,\nBasicTypeInfo.INT_TYPE_INFO,\nfoldingState,\nfalse,\nexecutor);\nString value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).get();\nif (expected.equals(value)) {\nsuccess = true;\n} else {\nThread.sleep(50L);\n}\n}\nassertTrue(\"Did not succeed query\", success);\n}\n} finally {\nif (jobId != null) {\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new JobManagerMessages.CancelJob(jobId), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(CancellationSuccess.class));\ncancellationFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n}\n}\n}\n/**\n* Tests simple reducing state queryable state instance. Each source emits\n* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then\n* queried. The reducing state instance sums these up. The test succeeds\n* after each subtask index is queried with result n*(n+1)/2.\n*/\n@Test\npublic void testReducingState() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nCompletableFuture cancellationFuture = null;\nJobID jobId = null;\ntry {\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env\n.addSource(new TestAscendingValueSource(numElements));\nReducingStateDescriptor> reducingState =\nnew ReducingStateDescriptor<>(\n\"any\",\nnew SumReduce(),\nsource.getType());\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 8470749712274833552L;\n@Override\npublic Integer getKey(Tuple2 value) throws Exception {\nreturn value.f0;\n}\n}).asQueryableState(\"jungle\", reducingState);\nJobGraph jobGraph = env.getStreamGraph().getJobGraph();\njobId = jobGraph.getJobID();\ncancellationFuture = notifyWhenJobStatusIs(jobId, JobStatus.CANCELED, deadline);\ncluster.submitJobDetached(jobGraph);\nlong expected = numElements * (numElements + 1L) / 2L;\nfor (int key = 0; key < maxParallelism; key++) {\nboolean success = false;\nwhile (deadline.hasTimeLeft() && !success) {\nCompletableFuture>> future = getKvState(\ndeadline,\nclient,\njobId,\n\"jungle\",\nkey,\nBasicTypeInfo.INT_TYPE_INFO,\nreducingState,\nfalse,\nexecutor);\nTuple2 value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).get();\nassertEquals(\"Key mismatch\", key, value.f0.intValue());\nif (expected == value.f1) {\nsuccess = true;\n} else {\nThread.sleep(50L);\n}\n}\nassertTrue(\"Did not succeed query\", success);\n}\n} finally {\nif (jobId != null) {\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new JobManagerMessages.CancelJob(jobId), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(CancellationSuccess.class));\ncancellationFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n}\n}\n}\n/**\n* Tests simple map state queryable state instance. Each source emits\n* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then\n* queried. The map state instance sums the values up. The test succeeds\n* after each subtask index is queried with result n*(n+1)/2.\n*/\n@Test\npublic void testMapState() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nCompletableFuture cancellationFuture = null;\nJobID jobId = null;\ntry {\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env\n.addSource(new TestAscendingValueSource(numElements));\nfinal MapStateDescriptor> mapStateDescriptor = new MapStateDescriptor<>(\n\"timon\",\nBasicTypeInfo.INT_TYPE_INFO,\nsource.getType());\nmapStateDescriptor.setQueryable(\"timon-queryable\");\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 8470749712274833552L;\n@Override\npublic Integer getKey(Tuple2 value) throws Exception {\nreturn value.f0;\n}\n}).process(new ProcessFunction, Object>() {\nprivate static final long serialVersionUID = -805125545438296619L;\nprivate transient MapState> mapState;\n@Override\npublic void open(Configuration parameters) throws Exception {\nsuper.open(parameters);\nmapState = getRuntimeContext().getMapState(mapStateDescriptor);\n}\n@Override\npublic void processElement(Tuple2 value, Context ctx, Collector out) throws Exception {\nTuple2 v = mapState.get(value.f0);\nif (v == null) {\nv = new Tuple2<>(value.f0, 0L);\n}\nmapState.put(value.f0, new Tuple2<>(v.f0, v.f1 + value.f1));\n}\n});\nJobGraph jobGraph = env.getStreamGraph().getJobGraph();\njobId = jobGraph.getJobID();\ncancellationFuture = notifyWhenJobStatusIs(jobId, JobStatus.CANCELED, deadline);\ncluster.submitJobDetached(jobGraph);\nlong expected = numElements * (numElements + 1L) / 2L;\nfor (int key = 0; key < maxParallelism; key++) {\nboolean success = false;\nwhile (deadline.hasTimeLeft() && !success) {\nCompletableFuture>> future = getKvState(\ndeadline,\nclient,\njobId,\n\"timon-queryable\",\nkey,\nBasicTypeInfo.INT_TYPE_INFO,\nmapStateDescriptor,\nfalse,\nexecutor);\nTuple2 value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).get(key);\nassertEquals(\"Key mismatch\", key, value.f0.intValue());\nif (expected == value.f1) {\nsuccess = true;\n} else {\nThread.sleep(50L);\n}\n}\nassertTrue(\"Did not succeed query\", success);\n}\n} finally {\nif (jobId != null) {\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new JobManagerMessages.CancelJob(jobId), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(CancellationSuccess.class));\ncancellationFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n}\n}\n}\n/**\n* Tests simple list state queryable state instance. Each source emits\n* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then\n* queried. The list state instance add the values to the list. The test\n* succeeds after each subtask index is queried and the list contains\n* the correct number of distinct elements.\n*/\n@Test\npublic void testListState() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nCompletableFuture cancellationFuture = null;\nJobID jobId = null;\ntry {\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env\n.addSource(new TestAscendingValueSource(numElements));\nfinal ListStateDescriptor listStateDescriptor = new ListStateDescriptor(\n\"list\",\nBasicTypeInfo.LONG_TYPE_INFO);\nlistStateDescriptor.setQueryable(\"list-queryable\");\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 8470749712274833552L;\n@Override\npublic Integer getKey(Tuple2 value) throws Exception {\nreturn value.f0;\n}\n}).process(new ProcessFunction, Object>() {\nprivate static final long serialVersionUID = -805125545438296619L;\nprivate transient ListState listState;\n@Override\npublic void open(Configuration parameters) throws Exception {\nsuper.open(parameters);\nlistState = getRuntimeContext().getListState(listStateDescriptor);\n}\n@Override\npublic void processElement(Tuple2 value, Context ctx, Collector out) throws Exception {\nlistState.add(value.f1);\n}\n});\nJobGraph jobGraph = env.getStreamGraph().getJobGraph();\njobId = jobGraph.getJobID();\ncancellationFuture = notifyWhenJobStatusIs(jobId, JobStatus.CANCELED, deadline);\ncluster.submitJobDetached(jobGraph);\nMap> results = new HashMap<>();\nfor (int key = 0; key < maxParallelism; key++) {\nboolean success = false;\nwhile (deadline.hasTimeLeft() && !success) {\nfinal CompletableFuture> future = getKvState(\ndeadline,\nclient,\njobId,\n\"list-queryable\",\nkey,\nBasicTypeInfo.INT_TYPE_INFO,\nlistStateDescriptor,\nfalse,\nexecutor);\nIterable value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).get();\nSet res = new HashSet<>();\nfor (Long v: value) {\nres.add(v);\n}\nif (res.size() == numElements + 1L) {\nsuccess = true;\nresults.put(key, res);\n} else {\nThread.sleep(50L);\n}\n}\nassertTrue(\"Did not succeed query\", success);\n}\nfor (int key = 0; key < maxParallelism; key++) {\nSet values = results.get(key);\nfor (long i = 0L; i <= numElements; i++) {\nassertTrue(values.contains(i));\n}\n}\n} finally {\nif (jobId != null) {\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new JobManagerMessages.CancelJob(jobId), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(CancellationSuccess.class));\ncancellationFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n}\n}\n}\n@Test\npublic void testAggregatingState() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nCompletableFuture cancellationFuture = null;\nJobID jobId = null;\ntry {\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env\n.addSource(new TestAscendingValueSource(numElements));\nfinal AggregatingStateDescriptor, String, String> aggrStateDescriptor =\nnew AggregatingStateDescriptor<>(\n\"aggregates\",\nnew SumAggr(),\nString.class);\naggrStateDescriptor.setQueryable(\"aggr-queryable\");\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 8470749712274833552L;\n@Override\npublic Integer getKey(Tuple2 value) throws Exception {\nreturn value.f0;\n}\n}).transform(\n\"TestAggregatingOperator\",\nBasicTypeInfo.STRING_TYPE_INFO,\nnew AggregatingTestOperator(aggrStateDescriptor)\n);\nJobGraph jobGraph = env.getStreamGraph().getJobGraph();\njobId = jobGraph.getJobID();\ncancellationFuture = notifyWhenJobStatusIs(jobId, JobStatus.CANCELED, deadline);\ncluster.submitJobDetached(jobGraph);\nfor (int key = 0; key < maxParallelism; key++) {\nboolean success = false;\nwhile (deadline.hasTimeLeft() && !success) {\nCompletableFuture, String>> future = getKvState(\ndeadline,\nclient,\njobId,\n\"aggr-queryable\",\nkey,\nBasicTypeInfo.INT_TYPE_INFO,\naggrStateDescriptor,\nfalse,\nexecutor);\nString value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).get();\nif (Long.parseLong(value) == numElements * (numElements + 1L) / 2L) {\nsuccess = true;\n} else {\nThread.sleep(50L);\n}\n}\nassertTrue(\"Did not succeed query\", success);\n}\n} finally {\nif (jobId != null) {\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new JobManagerMessages.CancelJob(jobId), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(CancellationSuccess.class));\ncancellationFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n}\n}\n}\n/**\n* Test source producing (key, 0)..(key, maxValue) with key being the sub\n* task index.\n*\n*

After all tuples have been emitted, the source waits to be cancelled\n* and does not immediately finish.\n*/\nprivate static class TestAscendingValueSource extends RichParallelSourceFunction> {\nprivate static final long serialVersionUID = 1459935229498173245L;\nprivate final long maxValue;\nprivate volatile boolean isRunning = true;\nTestAscendingValueSource(long maxValue) {\nPreconditions.checkArgument(maxValue >= 0);\nthis.maxValue = maxValue;\n}\n@Override\npublic void open(Configuration parameters) throws Exception {\nsuper.open(parameters);\n}\n@Override\npublic void run(SourceContext> ctx) throws Exception {\nint key = getRuntimeContext().getIndexOfThisSubtask();\nTuple2 record = new Tuple2<>(key, 0L);\nlong currentValue = 0;\nwhile (isRunning && currentValue <= maxValue) {\nsynchronized (ctx.getCheckpointLock()) {\nrecord.f1 = currentValue;\nctx.collect(record);\n}\ncurrentValue++;\n}\nwhile (isRunning) {\nsynchronized (this) {\nwait();\n}\n}\n}\n@Override\npublic void cancel() {\nisRunning = false;\nsynchronized (this) {\nnotifyAll();\n}\n}\n}\n/**\n* Test source producing (key, 1) tuples with random key in key range (numKeys).\n*/\nprivate static class TestKeyRangeSource extends RichParallelSourceFunction> implements CheckpointListener {\nprivate static final long serialVersionUID = -5744725196953582710L;\nprivate static final AtomicLong LATEST_CHECKPOINT_ID = new AtomicLong();\nprivate final int numKeys;\nprivate final ThreadLocalRandom random = ThreadLocalRandom.current();\nprivate volatile boolean isRunning = true;\nTestKeyRangeSource(int numKeys) {\nthis.numKeys = numKeys;\n}\n@Override\npublic void open(Configuration parameters) throws Exception {\nsuper.open(parameters);\nif (getRuntimeContext().getIndexOfThisSubtask() == 0) {\nLATEST_CHECKPOINT_ID.set(0L);\n}\n}\n@Override\npublic void run(SourceContext> ctx) throws Exception {\nTuple2 record = new Tuple2<>(0, 1L);\nwhile (isRunning) {\nsynchronized (ctx.getCheckpointLock()) {\nrecord.f0 = random.nextInt(numKeys);\nctx.collect(record);\n}\nThread.sleep(1L);\n}\n}\n@Override\npublic void cancel() {\nisRunning = false;\n}\n@Override\npublic void notifyCheckpointComplete(long checkpointId) throws Exception {\nif (getRuntimeContext().getIndexOfThisSubtask() == 0) {\nLATEST_CHECKPOINT_ID.set(checkpointId);\n}\n}\n}\n/**\n* An operator that uses {@link AggregatingState}.\n*\n*

The operator exists for lack of possibility to get an\n* {@link AggregatingState} from the {@link org.apache.flink.api.common.functions.RuntimeContext}.\n* If this were not the case, we could have a {@link ProcessFunction}.\n*/\nprivate static class AggregatingTestOperator\nextends AbstractStreamOperator\nimplements OneInputStreamOperator, String> {\nprivate static final long serialVersionUID = 1L;\nprivate final AggregatingStateDescriptor, String, String> stateDescriptor;\nprivate transient AggregatingState, String> state;\nAggregatingTestOperator(AggregatingStateDescriptor, String, String> stateDesc) {\nthis.stateDescriptor = stateDesc;\n}\n@Override\npublic void open() throws Exception {\nsuper.open();\nthis.state = getKeyedStateBackend().getPartitionedState(\nVoidNamespace.INSTANCE,\nVoidNamespaceSerializer.INSTANCE,\nstateDescriptor);\n}\n@Override\npublic void processElement(StreamRecord> element) throws Exception {\nstate.add(element.getValue());\n}\n}\n/**\n* Test {@link AggregateFunction} concatenating the already stored string with the long passed as argument.\n*/\nprivate static class SumAggr implements AggregateFunction, String, String> {\nprivate static final long serialVersionUID = -6249227626701264599L;\n@Override\npublic String createAccumulator() {\nreturn \"0\";\n}\n@Override\npublic String add(Tuple2 value, String accumulator) {\nlong acc = Long.valueOf(accumulator);\nacc += value.f1;\nreturn Long.toString(acc);\n}\n@Override\npublic String getResult(String accumulator) {\nreturn accumulator;\n}\n@Override\npublic String merge(String a, String b) {\nreturn Long.toString(Long.valueOf(a) + Long.valueOf(b));\n}\n}\n/**\n* Test {@link FoldFunction} concatenating the already stored string with the long passed as argument.\n*/\nprivate static class SumFold implements FoldFunction, String> {\nprivate static final long serialVersionUID = -6249227626701264599L;\n@Override\npublic String fold(String accumulator, Tuple2 value) throws Exception {\nlong acc = Long.valueOf(accumulator);\nacc += value.f1;\nreturn Long.toString(acc);\n}\n}\n/**\n* Test {@link ReduceFunction} summing up its two arguments.\n*/\nprotected static class SumReduce implements ReduceFunction> {\nprivate static final long serialVersionUID = -8651235077342052336L;\n@Override\npublic Tuple2 reduce(Tuple2 value1, Tuple2 value2) throws Exception {\nvalue1.f1 += value2.f1;\nreturn value1;\n}\n}\nprivate CompletableFuture notifyWhenJobStatusIs(\nfinal JobID jobId, final JobStatus status, final Deadline deadline) {\nreturn FutureUtils.toJava(\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new TestingJobManagerMessages.NotifyWhenJobStatus(jobId, status), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(TestingJobManagerMessages.JobStatusIs.class)));\n}\nprivate static CompletableFuture getKvState(\nfinal Deadline deadline,\nfinal QueryableStateClient client,\nfinal JobID jobId,\nfinal String queryName,\nfinal K key,\nfinal TypeInformation keyTypeInfo,\nfinal StateDescriptor stateDescriptor,\nfinal boolean failForUnknownKeyOrNamespace,\nfinal ScheduledExecutor executor) throws InterruptedException {\nfinal CompletableFuture resultFuture = new CompletableFuture<>();\ngetKvStateIgnoringCertainExceptions(\ndeadline, resultFuture, client, jobId, queryName, key, keyTypeInfo,\nstateDescriptor, failForUnknownKeyOrNamespace, executor);\nreturn resultFuture;\n}\nprivate static void getKvStateIgnoringCertainExceptions(\nfinal Deadline deadline,\nfinal CompletableFuture resultFuture,\nfinal QueryableStateClient client,\nfinal JobID jobId,\nfinal String queryName,\nfinal K key,\nfinal TypeInformation keyTypeInfo,\nfinal StateDescriptor stateDescriptor,\nfinal boolean failForUnknownKeyOrNamespace,\nfinal ScheduledExecutor executor) throws InterruptedException {\nif (!resultFuture.isDone()) {\nThread.sleep(100L);\nCompletableFuture expected = client.getKvState(jobId, queryName, key, keyTypeInfo, stateDescriptor);\nexpected.whenCompleteAsync((result, throwable) -> {\nif (throwable != null) {\nif (\nthrowable.getCause() instanceof CancellationException ||\nthrowable.getCause() instanceof AssertionError ||\n(failForUnknownKeyOrNamespace && throwable.getCause() instanceof UnknownKeyOrNamespaceException)\n) {\nresultFuture.completeExceptionally(throwable.getCause());\n} else if (deadline.hasTimeLeft()) {\ntry {\ngetKvStateIgnoringCertainExceptions(\ndeadline, resultFuture, client, jobId, queryName, key, keyTypeInfo,\nstateDescriptor, failForUnknownKeyOrNamespace, executor);\n} catch (InterruptedException e) {\ne.printStackTrace();\n}\n}\n} else {\nresultFuture.complete(result);\n}\n}, executor);\nresultFuture.whenComplete((result, throwable) -> expected.cancel(false));\n}\n}\n/**\n* Retry a query for state for keys between 0 and {@link\n* expected equals the value of the result tuple's second field.\n*/\nprivate void executeValueQuery(\nfinal Deadline deadline,\nfinal QueryableStateClient client,\nfinal JobID jobId,\nfinal String queryableStateName,\nfinal ValueStateDescriptor> stateDescriptor,\nfinal long expected) throws Exception {\nfor (int key = 0; key < maxParallelism; key++) {\nboolean success = false;\nwhile (deadline.hasTimeLeft() && !success) {\nCompletableFuture>> future = getKvState(\ndeadline,\nclient,\njobId,\nqueryableStateName,\nkey,\nBasicTypeInfo.INT_TYPE_INFO,\nstateDescriptor,\nfalse,\nexecutor);\nTuple2 value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).value();\nassertEquals(\"Key mismatch\", key, value.f0.intValue());\nif (expected == value.f1) {\nsuccess = true;\n} else {\nThread.sleep(50L);\n}\n}\nassertTrue(\"Did not succeed query\", success);\n}\n}\n}", + "context_after": "class AbstractQueryableStateTestBase extends TestLogger {\nprivate static final FiniteDuration TEST_TIMEOUT = new FiniteDuration(10000L, TimeUnit.SECONDS);\nprivate final ScheduledExecutorService executorService = Executors.newScheduledThreadPool(4);\nprivate final ScheduledExecutor executor = new ScheduledExecutorServiceAdapter(executorService);\n/**\n* State backend to use.\n*/\nprotected AbstractStateBackend stateBackend;\n/**\n* Shared between all the test. Make sure to have at least NUM_SLOTS\n* available after your test finishes, e.g. cancel the job you submitted.\n*/\nprotected static FlinkMiniCluster cluster;\n/**\n* Client shared between all the test.\n*/\nprotected static QueryableStateClient client;\nprotected static int maxParallelism;\n@Before\npublic void setUp() throws Exception {\nthis.stateBackend = createStateBackend();\nAssert.assertNotNull(cluster);\nmaxParallelism = cluster.configuration().getInteger(ConfigConstants.LOCAL_NUMBER_TASK_MANAGER, 1) *\ncluster.configuration().getInteger(ConfigConstants.TASK_MANAGER_NUM_TASK_SLOTS, 1);\n}\n/**\n* Creates a state backend instance which is used in the {@link\n* test case.\n*\n* @return a state backend instance for each unit test\n*/\nprotected abstract AbstractStateBackend createStateBackend() throws Exception;\n/**\n* Runs a simple topology producing random (key, 1) pairs at the sources (where\n* number of keys is in fixed in range 0...numKeys). The records are keyed and\n* a reducing queryable state instance is created, which sums up the records.\n*\n*

After submitting the job in detached mode, the QueryableStateCLient is used\n* to query the counts of each key in rounds until all keys have non-zero counts.\n*/\n@Test\n@SuppressWarnings(\"unchecked\")\npublic void testQueryableState() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal int numKeys = 256;\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env.addSource(new TestKeyRangeSource(numKeys));\nReducingStateDescriptor> reducingState = new ReducingStateDescriptor<>(\n\"any-name\", new SumReduce(), \tsource.getType());\nfinal String queryName = \"hakuna-matata\";\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 7143749578983540352L;\n@Override\npublic Integer getKey(Tuple2 value) {\nreturn value.f0;\n}\n}).asQueryableState(queryName, reducingState);\ntry (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(cluster, env, deadline)) {\nfinal JobID jobId = autoCancellableJob.getJobId();\nfinal JobGraph jobGraph = autoCancellableJob.getJobGraph();\ncluster.submitJobDetached(jobGraph);\nfinal AtomicLongArray counts = new AtomicLongArray(numKeys);\nboolean allNonZero = false;\nwhile (!allNonZero && deadline.hasTimeLeft()) {\nallNonZero = true;\nfinal List>>> futures = new ArrayList<>(numKeys);\nfor (int i = 0; i < numKeys; i++) {\nfinal int key = i;\nif (counts.get(key) > 0L) {\ncontinue;\n} else {\nallNonZero = false;\n}\nCompletableFuture>> result = getKvState(\ndeadline,\nclient,\njobId,\nqueryName,\nkey,\nBasicTypeInfo.INT_TYPE_INFO,\nreducingState,\nfalse,\nexecutor);\nresult.thenAccept(response -> {\ntry {\nTuple2 res = response.get();\ncounts.set(key, res.f1);\nassertEquals(\"Key mismatch\", key, res.f0.intValue());\n} catch (Exception e) {\nAssert.fail(e.getMessage());\n}\n});\nfutures.add(result);\n}\nCompletableFuture\n.allOf(futures.toArray(new CompletableFuture[futures.size()]))\n.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n}\nassertTrue(\"Not all keys are non-zero\", allNonZero);\nfor (int i = 0; i < numKeys; i++) {\nlong count = counts.get(i);\nassertTrue(\"Count at position \" + i + \" is \" + count, count > 0);\n}\n}\n}\n/**\n* Tests that duplicate query registrations fail the job at the JobManager.\n*/\n@Test\n/**\n* Tests simple value state queryable state instance. Each source emits\n* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then\n* queried. The tests succeeds after each subtask index is queried with\n* value numElements (the latest element updated the state).\n*/\n@Test\npublic void testValueState() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env.addSource(new TestAscendingValueSource(numElements));\nValueStateDescriptor> valueState = new ValueStateDescriptor<>(\"any\", source.getType());\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 7662520075515707428L;\n@Override\npublic Integer getKey(Tuple2 value) {\nreturn value.f0;\n}\n}).asQueryableState(\"hakuna\", valueState);\ntry (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(cluster, env, deadline)) {\nfinal JobID jobId = autoCancellableJob.getJobId();\nfinal JobGraph jobGraph = autoCancellableJob.getJobGraph();\ncluster.submitJobDetached(jobGraph);\nexecuteValueQuery(deadline, client, jobId, \"hakuna\", valueState, numElements);\n}\n}\n/**\n* Tests that the correct exception is thrown if the query\n* contains a wrong jobId or wrong queryable state name.\n*/\n@Test\n@Ignore\npublic void testWrongJobIdAndWrongQueryableStateName() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env.addSource(new TestAscendingValueSource(numElements));\nValueStateDescriptor> valueState = new ValueStateDescriptor<>(\"any\", source.getType());\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 7662520075515707428L;\n@Override\npublic Integer getKey(Tuple2 value) {\nreturn value.f0;\n}\n}).asQueryableState(\"hakuna\", valueState);\ntry (AutoCancellableJob closableJobGraph = new AutoCancellableJob(cluster, env, deadline)) {\nCompletableFuture runningFuture =\nnotifyWhenJobStatusIs(closableJobGraph.getJobId(), JobStatus.RUNNING, deadline);\ncluster.submitJobDetached(closableJobGraph.getJobGraph());\nTestingJobManagerMessages.JobStatusIs jobStatus =\nrunningFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\nassertEquals(JobStatus.RUNNING, jobStatus.state());\nfinal JobID wrongJobId = new JobID();\nCompletableFuture>> unknownJobFuture = client.getKvState(\nwrongJobId,\n\"hakuna\",\n0,\nBasicTypeInfo.INT_TYPE_INFO,\nvalueState);\ntry {\nunknownJobFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\nfail();\n} catch (ExecutionException e) {\nAssert.assertTrue(\"GOT: \" + e.getCause().getMessage(), e.getCause() instanceof RuntimeException);\nAssert.assertTrue(\"GOT: \" + e.getCause().getMessage(), e.getCause().getMessage().contains(\n\"FlinkJobNotFoundException: Could not find Flink job (\" + wrongJobId + \")\"));\n} catch (Exception f) {\nfail(\"Unexpected type of exception: \" + f.getMessage());\n}\nCompletableFuture>> unknownQSName = client.getKvState(\nclosableJobGraph.getJobId(),\n\"wrong-hakuna\",\n0,\nBasicTypeInfo.INT_TYPE_INFO,\nvalueState);\ntry {\nunknownQSName.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\nfail();\n} catch (ExecutionException e) {\nAssert.assertTrue(\"GOT: \" + e.getCause().getMessage(), e.getCause() instanceof RuntimeException);\nAssert.assertTrue(\"GOT: \" + e.getCause().getMessage(), e.getCause().getMessage().contains(\n\"UnknownKvStateLocation: No KvStateLocation found for KvState instance with name 'wrong-hakuna'.\"));\n} catch (Exception f) {\nfail(\"Unexpected type of exception: \" + f.getMessage());\n}\n}\n}\n/**\n* Similar tests as {@link\n* job, we already issue one request which fails.\n*/\n@Test\npublic void testQueryNonStartedJobState() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env.addSource(new TestAscendingValueSource(numElements));\nValueStateDescriptor> valueState = new ValueStateDescriptor<>(\n\"any\", source.getType(), \tnull);\nQueryableStateStream> queryableState =\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 7480503339992214681L;\n@Override\npublic Integer getKey(Tuple2 value) {\nreturn value.f0;\n}\n}).asQueryableState(\"hakuna\", valueState);\ntry (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(cluster, env, deadline)) {\nfinal JobID jobId = autoCancellableJob.getJobId();\nfinal JobGraph jobGraph = autoCancellableJob.getJobGraph();\nlong expected = numElements;\nclient.getKvState(\nautoCancellableJob.getJobId(),\nqueryableState.getQueryableStateName(),\n0,\nBasicTypeInfo.INT_TYPE_INFO,\nvalueState);\ncluster.submitJobDetached(jobGraph);\nexecuteValueQuery(deadline, client, jobId, \"hakuna\", valueState, expected);\n}\n}\n/**\n* Tests simple value state queryable state instance with a default value\n* set. Each source emits (subtaskIndex, 0)..(subtaskIndex, numElements)\n* tuples, the key is mapped to 1 but key 0 is queried which should throw\n* a {@link UnknownKeyOrNamespaceException} exception.\n*\n* @throws UnknownKeyOrNamespaceException thrown due querying a non-existent key\n*/\n@Test(expected = UnknownKeyOrNamespaceException.class)\npublic void testValueStateDefault() throws Throwable {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env.addSource(new TestAscendingValueSource(numElements));\nValueStateDescriptor> valueState = new ValueStateDescriptor<>(\n\"any\", source.getType(), \tTuple2.of(0, 1337L));\nQueryableStateStream> queryableState = source.keyBy(\nnew KeySelector, Integer>() {\nprivate static final long serialVersionUID = 4509274556892655887L;\n@Override\npublic Integer getKey(Tuple2 value) {\nreturn 1;\n}\n}).asQueryableState(\"hakuna\", valueState);\ntry (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(cluster, env, deadline)) {\nfinal JobID jobId = autoCancellableJob.getJobId();\nfinal JobGraph jobGraph = autoCancellableJob.getJobGraph();\ncluster.submitJobDetached(jobGraph);\nint key = 0;\nCompletableFuture>> future = getKvState(\ndeadline,\nclient,\njobId,\nqueryableState.getQueryableStateName(),\nkey,\nBasicTypeInfo.INT_TYPE_INFO,\nvalueState,\ntrue,\nexecutor);\ntry {\nfuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n} catch (ExecutionException | CompletionException e) {\nthrow e.getCause();\n}\n}\n}\n/**\n* Tests simple value state queryable state instance. Each source emits\n* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then\n* queried. The tests succeeds after each subtask index is queried with\n* value numElements (the latest element updated the state).\n*\n*

This is the same as the simple value state test, but uses the API shortcut.\n*/\n@Test\npublic void testValueStateShortcut() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env.addSource(new TestAscendingValueSource(numElements));\nfinal QueryableStateStream> queryableState =\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 9168901838808830068L;\n@Override\npublic Integer getKey(Tuple2 value) {\nreturn value.f0;\n}\n}).asQueryableState(\"matata\");\nfinal ValueStateDescriptor> stateDesc =\n(ValueStateDescriptor>) queryableState.getStateDescriptor();\ntry (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(cluster, env, deadline)) {\nfinal JobID jobId = autoCancellableJob.getJobId();\nfinal JobGraph jobGraph = autoCancellableJob.getJobGraph();\ncluster.submitJobDetached(jobGraph);\nexecuteValueQuery(deadline, client, jobId, \"matata\", stateDesc, numElements);\n}\n}\n/**\n* Tests simple folding state queryable state instance. Each source emits\n* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then\n* queried. The folding state sums these up and maps them to Strings. The\n* test succeeds after each subtask index is queried with result n*(n+1)/2\n* (as a String).\n*/\n@Test\npublic void testFoldingState() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal int numElements = 1024;\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env.addSource(new TestAscendingValueSource(numElements));\nFoldingStateDescriptor, String> foldingState = new FoldingStateDescriptor<>(\n\"any\", \"0\", new SumFold(), StringSerializer.INSTANCE);\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = -842809958106747539L;\n@Override\npublic Integer getKey(Tuple2 value) {\nreturn value.f0;\n}\n}).asQueryableState(\"pumba\", foldingState);\ntry (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(cluster, env, deadline)) {\nfinal JobID jobId = autoCancellableJob.getJobId();\nfinal JobGraph jobGraph = autoCancellableJob.getJobGraph();\ncluster.submitJobDetached(jobGraph);\nfinal String expected = Integer.toString(numElements * (numElements + 1) / 2);\nfor (int key = 0; key < maxParallelism; key++) {\nboolean success = false;\nwhile (deadline.hasTimeLeft() && !success) {\nCompletableFuture, String>> future = getKvState(\ndeadline,\nclient,\njobId,\n\"pumba\",\nkey,\nBasicTypeInfo.INT_TYPE_INFO,\nfoldingState,\nfalse,\nexecutor);\nString value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).get();\nif (expected.equals(value)) {\nsuccess = true;\n} else {\nThread.sleep(50L);\n}\n}\nassertTrue(\"Did not succeed query\", success);\n}\n}\n}\n/**\n* Tests simple reducing state queryable state instance. Each source emits\n* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then\n* queried. The reducing state instance sums these up. The test succeeds\n* after each subtask index is queried with result n*(n+1)/2.\n*/\n@Test\npublic void testReducingState() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env.addSource(new TestAscendingValueSource(numElements));\nReducingStateDescriptor> reducingState = new ReducingStateDescriptor<>(\n\"any\", new SumReduce(), source.getType());\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 8470749712274833552L;\n@Override\npublic Integer getKey(Tuple2 value) {\nreturn value.f0;\n}\n}).asQueryableState(\"jungle\", reducingState);\ntry (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(cluster, env, deadline)) {\nfinal JobID jobId = autoCancellableJob.getJobId();\nfinal JobGraph jobGraph = autoCancellableJob.getJobGraph();\ncluster.submitJobDetached(jobGraph);\nfinal long expected = numElements * (numElements + 1L) / 2L;\nfor (int key = 0; key < maxParallelism; key++) {\nboolean success = false;\nwhile (deadline.hasTimeLeft() && !success) {\nCompletableFuture>> future = getKvState(\ndeadline,\nclient,\njobId,\n\"jungle\",\nkey,\nBasicTypeInfo.INT_TYPE_INFO,\nreducingState,\nfalse,\nexecutor);\nTuple2 value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).get();\nassertEquals(\"Key mismatch\", key, value.f0.intValue());\nif (expected == value.f1) {\nsuccess = true;\n} else {\nThread.sleep(50L);\n}\n}\nassertTrue(\"Did not succeed query\", success);\n}\n}\n}\n/**\n* Tests simple map state queryable state instance. Each source emits\n* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then\n* queried. The map state instance sums the values up. The test succeeds\n* after each subtask index is queried with result n*(n+1)/2.\n*/\n@Test\npublic void testMapState() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env.addSource(new TestAscendingValueSource(numElements));\nfinal MapStateDescriptor> mapStateDescriptor = new MapStateDescriptor<>(\n\"timon\", BasicTypeInfo.INT_TYPE_INFO, source.getType());\nmapStateDescriptor.setQueryable(\"timon-queryable\");\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 8470749712274833552L;\n@Override\npublic Integer getKey(Tuple2 value) {\nreturn value.f0;\n}\n}).process(new ProcessFunction, Object>() {\nprivate static final long serialVersionUID = -805125545438296619L;\nprivate transient MapState> mapState;\n@Override\npublic void open(Configuration parameters) throws Exception {\nsuper.open(parameters);\nmapState = getRuntimeContext().getMapState(mapStateDescriptor);\n}\n@Override\npublic void processElement(Tuple2 value, Context ctx, Collector out) throws Exception {\nTuple2 v = mapState.get(value.f0);\nif (v == null) {\nv = new Tuple2<>(value.f0, 0L);\n}\nmapState.put(value.f0, new Tuple2<>(v.f0, v.f1 + value.f1));\n}\n});\ntry (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(cluster, env, deadline)) {\nfinal JobID jobId = autoCancellableJob.getJobId();\nfinal JobGraph jobGraph = autoCancellableJob.getJobGraph();\ncluster.submitJobDetached(jobGraph);\nfinal long expected = numElements * (numElements + 1L) / 2L;\nfor (int key = 0; key < maxParallelism; key++) {\nboolean success = false;\nwhile (deadline.hasTimeLeft() && !success) {\nCompletableFuture>> future = getKvState(\ndeadline,\nclient,\njobId,\n\"timon-queryable\",\nkey,\nBasicTypeInfo.INT_TYPE_INFO,\nmapStateDescriptor,\nfalse,\nexecutor);\nTuple2 value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).get(key);\nassertEquals(\"Key mismatch\", key, value.f0.intValue());\nif (expected == value.f1) {\nsuccess = true;\n} else {\nThread.sleep(50L);\n}\n}\nassertTrue(\"Did not succeed query\", success);\n}\n}\n}\n/**\n* Tests simple list state queryable state instance. Each source emits\n* (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then\n* queried. The list state instance add the values to the list. The test\n* succeeds after each subtask index is queried and the list contains\n* the correct number of distinct elements.\n*/\n@Test\npublic void testListState() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env.addSource(new TestAscendingValueSource(numElements));\nfinal ListStateDescriptor listStateDescriptor = new ListStateDescriptor(\n\"list\", BasicTypeInfo.LONG_TYPE_INFO);\nlistStateDescriptor.setQueryable(\"list-queryable\");\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 8470749712274833552L;\n@Override\npublic Integer getKey(Tuple2 value) {\nreturn value.f0;\n}\n}).process(new ProcessFunction, Object>() {\nprivate static final long serialVersionUID = -805125545438296619L;\nprivate transient ListState listState;\n@Override\npublic void open(Configuration parameters) throws Exception {\nsuper.open(parameters);\nlistState = getRuntimeContext().getListState(listStateDescriptor);\n}\n@Override\npublic void processElement(Tuple2 value, Context ctx, Collector out) throws Exception {\nlistState.add(value.f1);\n}\n});\ntry (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(cluster, env, deadline)) {\nfinal JobID jobId = autoCancellableJob.getJobId();\nfinal JobGraph jobGraph = autoCancellableJob.getJobGraph();\ncluster.submitJobDetached(jobGraph);\nfinal Map> results = new HashMap<>();\nfor (int key = 0; key < maxParallelism; key++) {\nboolean success = false;\nwhile (deadline.hasTimeLeft() && !success) {\nfinal CompletableFuture> future = getKvState(\ndeadline,\nclient,\njobId,\n\"list-queryable\",\nkey,\nBasicTypeInfo.INT_TYPE_INFO,\nlistStateDescriptor,\nfalse,\nexecutor);\nIterable value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).get();\nSet res = new HashSet<>();\nfor (Long v: value) {\nres.add(v);\n}\nif (res.size() == numElements + 1L) {\nsuccess = true;\nresults.put(key, res);\n} else {\nThread.sleep(50L);\n}\n}\nassertTrue(\"Did not succeed query\", success);\n}\nfor (int key = 0; key < maxParallelism; key++) {\nSet values = results.get(key);\nfor (long i = 0L; i <= numElements; i++) {\nassertTrue(values.contains(i));\n}\n}\n}\n}\n@Test\npublic void testAggregatingState() throws Exception {\nfinal Deadline deadline = TEST_TIMEOUT.fromNow();\nfinal long numElements = 1024L;\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setStateBackend(stateBackend);\nenv.setParallelism(maxParallelism);\nenv.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));\nDataStream> source = env.addSource(new TestAscendingValueSource(numElements));\nfinal AggregatingStateDescriptor, String, String> aggrStateDescriptor =\nnew AggregatingStateDescriptor<>(\"aggregates\", new SumAggr(), String.class);\naggrStateDescriptor.setQueryable(\"aggr-queryable\");\nsource.keyBy(new KeySelector, Integer>() {\nprivate static final long serialVersionUID = 8470749712274833552L;\n@Override\npublic Integer getKey(Tuple2 value) {\nreturn value.f0;\n}\n}).transform(\n\"TestAggregatingOperator\",\nBasicTypeInfo.STRING_TYPE_INFO,\nnew AggregatingTestOperator(aggrStateDescriptor)\n);\ntry (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(cluster, env, deadline)) {\nfinal JobID jobId = autoCancellableJob.getJobId();\nfinal JobGraph jobGraph = autoCancellableJob.getJobGraph();\ncluster.submitJobDetached(jobGraph);\nfor (int key = 0; key < maxParallelism; key++) {\nboolean success = false;\nwhile (deadline.hasTimeLeft() && !success) {\nCompletableFuture, String>> future = getKvState(\ndeadline,\nclient,\njobId,\n\"aggr-queryable\",\nkey,\nBasicTypeInfo.INT_TYPE_INFO,\naggrStateDescriptor,\nfalse,\nexecutor);\nString value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).get();\nif (Long.parseLong(value) == numElements * (numElements + 1L) / 2L) {\nsuccess = true;\n} else {\nThread.sleep(50L);\n}\n}\nassertTrue(\"Did not succeed query\", success);\n}\n}\n}\n/**\n* Test source producing (key, 0)..(key, maxValue) with key being the sub\n* task index.\n*\n*

After all tuples have been emitted, the source waits to be cancelled\n* and does not immediately finish.\n*/\nprivate static class TestAscendingValueSource extends RichParallelSourceFunction> {\nprivate static final long serialVersionUID = 1459935229498173245L;\nprivate final long maxValue;\nprivate volatile boolean isRunning = true;\nTestAscendingValueSource(long maxValue) {\nPreconditions.checkArgument(maxValue >= 0);\nthis.maxValue = maxValue;\n}\n@Override\npublic void open(Configuration parameters) throws Exception {\nsuper.open(parameters);\n}\n@Override\npublic void run(SourceContext> ctx) throws Exception {\nint key = getRuntimeContext().getIndexOfThisSubtask();\nTuple2 record = new Tuple2<>(key, 0L);\nlong currentValue = 0;\nwhile (isRunning && currentValue <= maxValue) {\nsynchronized (ctx.getCheckpointLock()) {\nrecord.f1 = currentValue;\nctx.collect(record);\n}\ncurrentValue++;\n}\nwhile (isRunning) {\nsynchronized (this) {\nwait();\n}\n}\n}\n@Override\npublic void cancel() {\nisRunning = false;\nsynchronized (this) {\nnotifyAll();\n}\n}\n}\n/**\n* Test source producing (key, 1) tuples with random key in key range (numKeys).\n*/\nprivate static class TestKeyRangeSource extends RichParallelSourceFunction> implements CheckpointListener {\nprivate static final long serialVersionUID = -5744725196953582710L;\nprivate static final AtomicLong LATEST_CHECKPOINT_ID = new AtomicLong();\nprivate final int numKeys;\nprivate final ThreadLocalRandom random = ThreadLocalRandom.current();\nprivate volatile boolean isRunning = true;\nTestKeyRangeSource(int numKeys) {\nthis.numKeys = numKeys;\n}\n@Override\npublic void open(Configuration parameters) throws Exception {\nsuper.open(parameters);\nif (getRuntimeContext().getIndexOfThisSubtask() == 0) {\nLATEST_CHECKPOINT_ID.set(0L);\n}\n}\n@Override\npublic void run(SourceContext> ctx) throws Exception {\nTuple2 record = new Tuple2<>(0, 1L);\nwhile (isRunning) {\nsynchronized (ctx.getCheckpointLock()) {\nrecord.f0 = random.nextInt(numKeys);\nctx.collect(record);\n}\nThread.sleep(1L);\n}\n}\n@Override\npublic void cancel() {\nisRunning = false;\n}\n@Override\npublic void notifyCheckpointComplete(long checkpointId) throws Exception {\nif (getRuntimeContext().getIndexOfThisSubtask() == 0) {\nLATEST_CHECKPOINT_ID.set(checkpointId);\n}\n}\n}\n/**\n* An operator that uses {@link AggregatingState}.\n*\n*

The operator exists for lack of possibility to get an\n* {@link AggregatingState} from the {@link org.apache.flink.api.common.functions.RuntimeContext}.\n* If this were not the case, we could have a {@link ProcessFunction}.\n*/\nprivate static class AggregatingTestOperator\nextends AbstractStreamOperator\nimplements OneInputStreamOperator, String> {\nprivate static final long serialVersionUID = 1L;\nprivate final AggregatingStateDescriptor, String, String> stateDescriptor;\nprivate transient AggregatingState, String> state;\nAggregatingTestOperator(AggregatingStateDescriptor, String, String> stateDesc) {\nthis.stateDescriptor = stateDesc;\n}\n@Override\npublic void open() throws Exception {\nsuper.open();\nthis.state = getKeyedStateBackend().getPartitionedState(\nVoidNamespace.INSTANCE,\nVoidNamespaceSerializer.INSTANCE,\nstateDescriptor);\n}\n@Override\npublic void processElement(StreamRecord> element) throws Exception {\nstate.add(element.getValue());\n}\n}\n/**\n* Test {@link AggregateFunction} concatenating the already stored string with the long passed as argument.\n*/\nprivate static class SumAggr implements AggregateFunction, String, String> {\nprivate static final long serialVersionUID = -6249227626701264599L;\n@Override\npublic String createAccumulator() {\nreturn \"0\";\n}\n@Override\npublic String add(Tuple2 value, String accumulator) {\nlong acc = Long.valueOf(accumulator);\nacc += value.f1;\nreturn Long.toString(acc);\n}\n@Override\npublic String getResult(String accumulator) {\nreturn accumulator;\n}\n@Override\npublic String merge(String a, String b) {\nreturn Long.toString(Long.valueOf(a) + Long.valueOf(b));\n}\n}\n/**\n* Test {@link FoldFunction} concatenating the already stored string with the long passed as argument.\n*/\nprivate static class SumFold implements FoldFunction, String> {\nprivate static final long serialVersionUID = -6249227626701264599L;\n@Override\npublic String fold(String accumulator, Tuple2 value) throws Exception {\nlong acc = Long.valueOf(accumulator);\nacc += value.f1;\nreturn Long.toString(acc);\n}\n}\n/**\n* Test {@link ReduceFunction} summing up its two arguments.\n*/\nprotected static class SumReduce implements ReduceFunction> {\nprivate static final long serialVersionUID = -8651235077342052336L;\n@Override\npublic Tuple2 reduce(Tuple2 value1, Tuple2 value2) throws Exception {\nvalue1.f1 += value2.f1;\nreturn value1;\n}\n}\n/**\n* A wrapper of the job graph that makes sure to cancel the job and wait for\n* termination after the execution of every test.\n*/\nprivate static class AutoCancellableJob implements AutoCloseable {\nprivate final FlinkMiniCluster cluster;\nprivate final Deadline deadline;\nprivate final JobGraph jobGraph;\nprivate final JobID jobId;\nprivate final CompletableFuture cancellationFuture;\nAutoCancellableJob(final FlinkMiniCluster cluster, final StreamExecutionEnvironment env, final Deadline deadline) {\nPreconditions.checkNotNull(env);\nthis.cluster = Preconditions.checkNotNull(cluster);\nthis.jobGraph = env.getStreamGraph().getJobGraph();\nthis.deadline = Preconditions.checkNotNull(deadline);\nthis.jobId = jobGraph.getJobID();\nthis.cancellationFuture = notifyWhenJobStatusIs(jobId, JobStatus.CANCELED, deadline);\n}\nJobGraph getJobGraph() {\nreturn jobGraph;\n}\nJobID getJobId() {\nreturn jobId;\n}\n@Override\npublic void close() throws Exception {\nif (jobId != null) {\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new JobManagerMessages.CancelJob(jobId), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(CancellationSuccess.class));\ncancellationFuture.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);\n}\n}\n}\nprivate static CompletableFuture notifyWhenJobStatusIs(\nfinal JobID jobId, final JobStatus status, final Deadline deadline) {\nreturn FutureUtils.toJava(\ncluster.getLeaderGateway(deadline.timeLeft())\n.ask(new TestingJobManagerMessages.NotifyWhenJobStatus(jobId, status), deadline.timeLeft())\n.mapTo(ClassTag$.MODULE$.apply(TestingJobManagerMessages.JobStatusIs.class)));\n}\nprivate static CompletableFuture getKvState(\nfinal Deadline deadline,\nfinal QueryableStateClient client,\nfinal JobID jobId,\nfinal String queryName,\nfinal K key,\nfinal TypeInformation keyTypeInfo,\nfinal StateDescriptor stateDescriptor,\nfinal boolean failForUnknownKeyOrNamespace,\nfinal ScheduledExecutor executor) throws InterruptedException {\nfinal CompletableFuture resultFuture = new CompletableFuture<>();\ngetKvStateIgnoringCertainExceptions(\ndeadline, resultFuture, client, jobId, queryName, key, keyTypeInfo,\nstateDescriptor, failForUnknownKeyOrNamespace, executor);\nreturn resultFuture;\n}\nprivate static void getKvStateIgnoringCertainExceptions(\nfinal Deadline deadline,\nfinal CompletableFuture resultFuture,\nfinal QueryableStateClient client,\nfinal JobID jobId,\nfinal String queryName,\nfinal K key,\nfinal TypeInformation keyTypeInfo,\nfinal StateDescriptor stateDescriptor,\nfinal boolean failForUnknownKeyOrNamespace,\nfinal ScheduledExecutor executor) throws InterruptedException {\nif (!resultFuture.isDone()) {\nThread.sleep(100L);\nCompletableFuture expected = client.getKvState(jobId, queryName, key, keyTypeInfo, stateDescriptor);\nexpected.whenCompleteAsync((result, throwable) -> {\nif (throwable != null) {\nif (\nthrowable.getCause() instanceof CancellationException ||\nthrowable.getCause() instanceof AssertionError ||\n(failForUnknownKeyOrNamespace && throwable.getCause() instanceof UnknownKeyOrNamespaceException)\n) {\nresultFuture.completeExceptionally(throwable.getCause());\n} else if (deadline.hasTimeLeft()) {\ntry {\ngetKvStateIgnoringCertainExceptions(\ndeadline, resultFuture, client, jobId, queryName, key, keyTypeInfo,\nstateDescriptor, failForUnknownKeyOrNamespace, executor);\n} catch (InterruptedException e) {\ne.printStackTrace();\n}\n}\n} else {\nresultFuture.complete(result);\n}\n}, executor);\nresultFuture.whenComplete((result, throwable) -> expected.cancel(false));\n}\n}\n/**\n* Retry a query for state for keys between 0 and {@link\n* expected equals the value of the result tuple's second field.\n*/\nprivate void executeValueQuery(\nfinal Deadline deadline,\nfinal QueryableStateClient client,\nfinal JobID jobId,\nfinal String queryableStateName,\nfinal ValueStateDescriptor> stateDescriptor,\nfinal long expected) throws Exception {\nfor (int key = 0; key < maxParallelism; key++) {\nboolean success = false;\nwhile (deadline.hasTimeLeft() && !success) {\nCompletableFuture>> future = getKvState(\ndeadline,\nclient,\njobId,\nqueryableStateName,\nkey,\nBasicTypeInfo.INT_TYPE_INFO,\nstateDescriptor,\nfalse,\nexecutor);\nTuple2 value = future.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS).value();\nassertEquals(\"Key mismatch\", key, value.f0.intValue());\nif (expected == value.f1) {\nsuccess = true;\n} else {\nThread.sleep(50L);\n}\n}\nassertTrue(\"Did not succeed query\", success);\n}\n}\n}" + }, + { + "comment": "mvColumnIdxToOrigColumnIdx?", + "method_body": "private void analyzeSubquery(Analyzer analyzer) throws UserException {\nSet mentionedColumns = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER);\nif (targetColumnNames == null) {\nfor (Column col : targetTable.getBaseSchema()) {\nmentionedColumns.add(col.getName());\ntargetColumns.add(col);\n}\n} else {\nfor (String colName : targetColumnNames) {\nColumn col = targetTable.getColumn(colName);\nif (col == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_FIELD_ERROR, colName, targetTable.getName());\n}\nif (!mentionedColumns.add(colName)) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_FIELD_SPECIFIED_TWICE, colName);\n}\ntargetColumns.add(col);\n}\nfor (Column col : targetTable.getBaseSchema()) {\nif (col.getType().isHllType() && !mentionedColumns.contains(col.getName())) {\nthrow new AnalysisException (\" hll column \" + col.getName() + \" mush in insert into columns\");\n}\nif (col.getType().isBitmapType() && !mentionedColumns.contains(col.getName())) {\nthrow new AnalysisException (\" object column \" + col.getName() + \" mush in insert into columns\");\n}\n}\n}\n/*\n* When doing schema change, there may be some shadow columns. we should add\n* them to the end of targetColumns. And use 'origColIdxsForShadowCols' to save\n* the index of column in 'targetColumns' which the shadow column related to.\n* eg: origin targetColumns: (A,B,C), shadow column: __doris_shadow_B after\n* processing, targetColumns: (A, B, C, __doris_shadow_B), and\n* origColIdxsForShadowCols has 1 element: \"1\", which is the index of column B\n* in targetColumns.\n*\n* Rule A: If the column which the shadow column related to is not mentioned,\n* then do not add the shadow column to targetColumns. They will be filled by\n* null or default value when loading.\n*/\nList origColIdxsForShadowCols = Lists.newArrayList();\nfor (Column column : targetTable.getFullSchema()) {\nif (column.isNameWithPrefix(SchemaChangeHandler.SHADOW_NAME_PRFIX)) {\nString origName = Column.removeNamePrefix(column.getName());\nfor (int i = 0; i < targetColumns.size(); i++) {\nif (targetColumns.get(i).nameEquals(origName, false)) {\norigColIdxsForShadowCols.add(i);\ntargetColumns.add(column);\nbreak;\n}\n}\n}\n}\nMap origColIdx2MvColIdx = Maps.newHashMap();\nfor (int mvColumnIdx = 0; mvColumnIdx < targetTable.getFullSchema().size(); ++mvColumnIdx) {\nColumn column = targetTable.getFullSchema().get(mvColumnIdx);\nif (column.isNameWithPrefix(CreateMaterializedViewStmt.MATERIALIZED_VIEW_NAME_PRFIX)) {\nList slots = new ArrayList<>();\ncolumn.getDefineExpr().collect(SlotRef.class, slots);\nPreconditions.checkArgument(slots.size() == 1);\nString origName = ((SlotRef) slots.get(0)).getColumnName();\nfor (int originColumnIdx = 0; originColumnIdx < targetColumns.size(); originColumnIdx++) {\nif (targetColumns.get(originColumnIdx).nameEquals(origName, false)) {\norigColIdx2MvColIdx.put(mvColumnIdx, originColumnIdx);\ntargetColumns.add(column);\nbreak;\n}\n}\n}\n}\nqueryStmt.setFromInsert(true);\nqueryStmt.analyze(analyzer);\nif (mentionedColumns.size() != queryStmt.getResultExprs().size()) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_VALUE_COUNT);\n}\ncheckColumnCoverage(mentionedColumns, targetTable.getBaseSchema()) ;\nif (queryStmt instanceof SelectStmt && ((SelectStmt) queryStmt).getTableRefs().isEmpty()) {\nSelectStmt selectStmt = (SelectStmt) queryStmt;\nif (selectStmt.getValueList() != null) {\nList> rows = selectStmt.getValueList().getRows();\nfor (int rowIdx = 0; rowIdx < rows.size(); ++rowIdx) {\nanalyzeRow(analyzer, targetColumns, rows, rowIdx, origColIdxsForShadowCols, origColIdx2MvColIdx);\n}\nselectStmt.getResultExprs().clear();\nselectStmt.getBaseTblResultExprs().clear();\nfor (int i = 0; i < selectStmt.getValueList().getFirstRow().size(); ++i) {\nselectStmt.getResultExprs().add(selectStmt.getValueList().getFirstRow().get(i));\nselectStmt.getBaseTblResultExprs().add(selectStmt.getValueList().getFirstRow().get(i));\n}\n} else {\nList> rows = Lists.newArrayList();\nrows.add(selectStmt.getResultExprs());\nanalyzeRow(analyzer, targetColumns, rows, 0, origColIdxsForShadowCols, origColIdx2MvColIdx);\nselectStmt.getResultExprs().clear();\nfor (Expr expr : rows.get(0)) {\nselectStmt.getResultExprs().add(expr);\n}\n}\nisStreaming = true;\n} else {\nif (!origColIdxsForShadowCols.isEmpty()) {\nfor (Integer idx : origColIdxsForShadowCols) {\nqueryStmt.getResultExprs().add(queryStmt.getResultExprs().get(idx));\n}\n}\nif (!origColIdx2MvColIdx.isEmpty()) {\norigColIdx2MvColIdx.forEach((key, value) -> {\nColumn mvColumn = targetTable.getFullSchema().get(key);\nExpr expr = mvColumn.getDefineExpr();\nArrayList slots = new ArrayList<>();\nexpr.collect(SlotRef.class, slots);\nExprSubstitutionMap smap = new ExprSubstitutionMap();\nsmap.getLhs().add(slots.get(0));\nsmap.getRhs().add(queryStmt.getResultExprs().get(value));\nqueryStmt.getResultExprs().add(Expr.substituteList(Lists.newArrayList(expr), smap, analyzer, false).get(0));\n});\n}\nfor (int i = 0; i < targetColumns.size(); ++i) {\nColumn column = targetColumns.get(i);\nif (column.getType().isHllType()) {\nExpr expr = queryStmt.getResultExprs().get(i);\ncheckHllCompatibility(column, expr);\n}\nif (column.getAggregationType() == AggregateType.BITMAP_UNION) {\nExpr expr = queryStmt.getResultExprs().get(i);\ncheckBitmapCompatibility(column, expr);\n}\n}\n}\nif (!origColIdxsForShadowCols.isEmpty() || !origColIdx2MvColIdx.isEmpty()) {\nif (queryStmt.getResultExprs().size() != queryStmt.getBaseTblResultExprs().size()) {\nfor (Integer idx : origColIdxsForShadowCols) {\nqueryStmt.getBaseTblResultExprs().add(queryStmt.getBaseTblResultExprs().get(idx));\n}\nfor (Integer idx : origColIdx2MvColIdx.keySet()) {\nqueryStmt.getBaseTblResultExprs().add(queryStmt.getResultExprs().get(idx));\n}\n}\nif (queryStmt.getResultExprs().size() != queryStmt.getColLabels().size()) {\nfor (Integer idx : origColIdxsForShadowCols) {\nqueryStmt.getColLabels().add(queryStmt.getColLabels().get(idx));\n}\nfor (Integer idx : origColIdx2MvColIdx.values()) {\nqueryStmt.getColLabels().add(queryStmt.getColLabels().get(idx));\n}\n}\n}\nif (LOG.isDebugEnabled()) {\nfor (Expr expr : queryStmt.getResultExprs()) {\nLOG.debug(\"final result expr: {}, {}\", expr, System.identityHashCode(expr));\n}\nfor (Expr expr : queryStmt.getBaseTblResultExprs()) {\nLOG.debug(\"final base table result expr: {}, {}\", expr, System.identityHashCode(expr));\n}\nfor (String colLabel : queryStmt.getColLabels()) {\nLOG.debug(\"final col label: {}\", colLabel);\n}\n}\n}", + "target_code": "origColIdx2MvColIdx.put(mvColumnIdx, originColumnIdx);", + "method_body_after": "private void analyzeSubquery(Analyzer analyzer) throws UserException {\nSet mentionedColumns = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER);\nif (targetColumnNames == null) {\nfor (Column col : targetTable.getBaseSchema()) {\nmentionedColumns.add(col.getName());\ntargetColumns.add(col);\n}\n} else {\nfor (String colName : targetColumnNames) {\nColumn col = targetTable.getColumn(colName);\nif (col == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_FIELD_ERROR, colName, targetTable.getName());\n}\nif (!mentionedColumns.add(colName)) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_FIELD_SPECIFIED_TWICE, colName);\n}\ntargetColumns.add(col);\n}\nfor (Column col : targetTable.getBaseSchema()) {\nif (col.getType().isHllType() && !mentionedColumns.contains(col.getName())) {\nthrow new AnalysisException (\" hll column \" + col.getName() + \" mush in insert into columns\");\n}\nif (col.getType().isBitmapType() && !mentionedColumns.contains(col.getName())) {\nthrow new AnalysisException (\" object column \" + col.getName() + \" mush in insert into columns\");\n}\n}\n}\n/*\n* When doing schema change, there may be some shadow columns. we should add\n* them to the end of targetColumns. And use 'origColIdxsForExtendCols' to save\n* the index of column in 'targetColumns' which the shadow column related to.\n* eg: origin targetColumns: (A,B,C), shadow column: __doris_shadow_B after\n* processing, targetColumns: (A, B, C, __doris_shadow_B), and\n* origColIdxsForExtendCols has 1 element: \"1\", which is the index of column B\n* in targetColumns.\n*\n* Rule A: If the column which the shadow column related to is not mentioned,\n* then do not add the shadow column to targetColumns. They will be filled by\n* null or default value when loading.\n*\n* When table have materialized view, there may be some materialized view columns.\n* we should add them to the end of targetColumns.\n* eg: origin targetColumns: (A,B,C), shadow column: mv_bitmap_union_C\n* after processing, targetColumns: (A, B, C, mv_bitmap_union_C), and\n* origColIdx2MVColumn has 1 element: \"2, mv_bitmap_union_C\"\n* will be used in as a mapping from queryStmt.getResultExprs() to targetColumns define expr\n*/\nList> origColIdxsForExtendCols = Lists.newArrayList();\nfor (Column column : targetTable.getFullSchema()) {\nif (column.isNameWithPrefix(SchemaChangeHandler.SHADOW_NAME_PRFIX)) {\nString origName = Column.removeNamePrefix(column.getName());\nfor (int i = 0; i < targetColumns.size(); i++) {\nif (targetColumns.get(i).nameEquals(origName, false)) {\norigColIdxsForExtendCols.add(new Pair<>(i, null));\ntargetColumns.add(column);\nbreak;\n}\n}\n}\nif (column.isNameWithPrefix(CreateMaterializedViewStmt.MATERIALIZED_VIEW_NAME_PRFIX)) {\nSlotRef refColumn = column.getRefColumn();\nif (refColumn == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_FIELD_ERROR, column.getName(), targetTable.getName());\n}\nString origName = refColumn.getColumnName();\nfor (int originColumnIdx = 0; originColumnIdx < targetColumns.size(); originColumnIdx++) {\nif (targetColumns.get(originColumnIdx).nameEquals(origName, false)) {\norigColIdxsForExtendCols.add(new Pair<>(originColumnIdx, column));\ntargetColumns.add(column);\nbreak;\n}\n}\n}\n}\nqueryStmt.setFromInsert(true);\nqueryStmt.analyze(analyzer);\nif (mentionedColumns.size() != queryStmt.getResultExprs().size()) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_VALUE_COUNT);\n}\ncheckColumnCoverage(mentionedColumns, targetTable.getBaseSchema()) ;\nif (queryStmt instanceof SelectStmt && ((SelectStmt) queryStmt).getTableRefs().isEmpty()) {\nSelectStmt selectStmt = (SelectStmt) queryStmt;\nif (selectStmt.getValueList() != null) {\nList> rows = selectStmt.getValueList().getRows();\nfor (int rowIdx = 0; rowIdx < rows.size(); ++rowIdx) {\nanalyzeRow(analyzer, targetColumns, rows, rowIdx, origColIdxsForExtendCols);\n}\nselectStmt.getResultExprs().clear();\nselectStmt.getBaseTblResultExprs().clear();\nfor (int i = 0; i < selectStmt.getValueList().getFirstRow().size(); ++i) {\nselectStmt.getResultExprs().add(selectStmt.getValueList().getFirstRow().get(i));\nselectStmt.getBaseTblResultExprs().add(selectStmt.getValueList().getFirstRow().get(i));\n}\n} else {\nList> rows = Lists.newArrayList();\nrows.add(selectStmt.getResultExprs());\nanalyzeRow(analyzer, targetColumns, rows, 0, origColIdxsForExtendCols);\nselectStmt.getResultExprs().clear();\nfor (Expr expr : rows.get(0)) {\nselectStmt.getResultExprs().add(expr);\n}\n}\nisStreaming = true;\n} else {\nif (!origColIdxsForExtendCols.isEmpty()) {\nfor (Pair entry : origColIdxsForExtendCols) {\nif (entry.second == null) {\nqueryStmt.getResultExprs().add(queryStmt.getResultExprs().get(entry.first));\n} else {\nExprSubstitutionMap smap = new ExprSubstitutionMap();\nsmap.getLhs().add(entry.second.getRefColumn());\nsmap.getRhs().add(queryStmt.getResultExprs().get(entry.first));\nExpr e = Expr.substituteList(Lists.newArrayList(entry.second.getDefineExpr()), smap, analyzer, false).get(0);\nqueryStmt.getResultExprs().add(e);\n}\n}\n}\nfor (int i = 0; i < targetColumns.size(); ++i) {\nColumn column = targetColumns.get(i);\nif (column.getType().isHllType()) {\nExpr expr = queryStmt.getResultExprs().get(i);\ncheckHllCompatibility(column, expr);\n}\nif (column.getAggregationType() == AggregateType.BITMAP_UNION) {\nExpr expr = queryStmt.getResultExprs().get(i);\ncheckBitmapCompatibility(column, expr);\n}\n}\n}\nif (!origColIdxsForExtendCols.isEmpty()) {\nif (queryStmt.getResultExprs().size() != queryStmt.getBaseTblResultExprs().size()) {\nfor (Pair entry : origColIdxsForExtendCols) {\nif (entry.second == null) {\nqueryStmt.getBaseTblResultExprs().add(queryStmt.getBaseTblResultExprs().get(entry.first));\n} else {\nExprSubstitutionMap smap = new ExprSubstitutionMap();\nsmap.getLhs().add(entry.second.getRefColumn());\nsmap.getRhs().add(queryStmt.getResultExprs().get(entry.first));\nExpr e = Expr.substituteList(Lists.newArrayList(entry.second.getDefineExpr()), smap, analyzer, false).get(0);\nqueryStmt.getBaseTblResultExprs().add(e);\n}\n}\n}\nif (queryStmt.getResultExprs().size() != queryStmt.getColLabels().size()) {\nfor (Pair entry : origColIdxsForExtendCols) {\nqueryStmt.getColLabels().add(queryStmt.getColLabels().get(entry.first));\n}\n}\n}\nif (LOG.isDebugEnabled()) {\nfor (Expr expr : queryStmt.getResultExprs()) {\nLOG.debug(\"final result expr: {}, {}\", expr, System.identityHashCode(expr));\n}\nfor (Expr expr : queryStmt.getBaseTblResultExprs()) {\nLOG.debug(\"final base table result expr: {}, {}\", expr, System.identityHashCode(expr));\n}\nfor (String colLabel : queryStmt.getColLabels()) {\nLOG.debug(\"final col label: {}\", colLabel);\n}\n}\n}", + "context_before": "class InsertStmt extends DdlStmt {\nprivate static final Logger LOG = LogManager.getLogger(InsertStmt.class);\npublic static final String SHUFFLE_HINT = \"SHUFFLE\";\npublic static final String NOSHUFFLE_HINT = \"NOSHUFFLE\";\npublic static final String STREAMING = \"STREAMING\";\nprivate final TableName tblName;\nprivate final PartitionNames targetPartitionNames;\nprivate List targetPartitionIds = Lists.newArrayList();\nprivate final List targetColumnNames;\nprivate QueryStmt queryStmt;\nprivate final List planHints;\nprivate Boolean isRepartition;\nprivate boolean isStreaming = false;\nprivate String label = null;\nprivate boolean isUserSpecifiedLabel = false;\nprivate UUID uuid;\nprivate Map indexIdToSchemaHash = null;\nprivate ArrayList resultExprs = Lists.newArrayList();\nprivate Map exprByName = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);\nprivate Table targetTable;\nprivate Database db;\nprivate long transactionId;\nprivate TupleDescriptor olapTuple;\nprivate DataSink dataSink;\nprivate DataPartition dataPartition;\nprivate List targetColumns = Lists.newArrayList();\n/*\n* InsertStmt may be analyzed twice, but transaction must be only begun once.\n* So use a boolean to check if transaction already begun.\n*/\nprivate boolean isTransactionBegin = false;\npublic InsertStmt(InsertTarget target, String label, List cols, InsertSource source, List hints) {\nthis.tblName = target.getTblName();\nthis.targetPartitionNames = target.getPartitionNames();\nthis.label = label;\nthis.queryStmt = source.getQueryStmt();\nthis.planHints = hints;\nthis.targetColumnNames = cols;\nif (!Strings.isNullOrEmpty(label)) {\nisUserSpecifiedLabel = true;\n}\n}\npublic InsertStmt(TableName name, QueryStmt queryStmt) {\nthis.tblName = name;\nthis.targetPartitionNames = null;\nthis.targetColumnNames = null;\nthis.queryStmt = queryStmt;\nthis.planHints = null;\n}\npublic TupleDescriptor getOlapTuple() {\nreturn olapTuple;\n}\npublic Table getTargetTable() {\nreturn targetTable;\n}\npublic void setTargetTable(Table targetTable) {\nthis.targetTable = targetTable;\n}\npublic Map getIndexIdToSchemaHash() {\nreturn this.indexIdToSchemaHash;\n}\npublic long getTransactionId() {\nreturn this.transactionId;\n}\npublic Boolean isRepartition() {\nreturn isRepartition;\n}\npublic String getDb() {\nreturn tblName.getDb();\n}\npublic void getDbs(Analyzer analyzer, Map dbs) throws AnalysisException {\nqueryStmt.getDbs(analyzer, dbs);\ntblName.analyze(analyzer);\nString dbName = tblName.getDb();\nDatabase db = analyzer.getCatalog().getDb(dbName);\nif (db == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_DB_ERROR, dbName);\n}\nif (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), tblName.getDb(), tblName.getTbl(),\nPrivPredicate.LOAD)) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, \"LOAD\",\nConnectContext.get().getQualifiedUser(),\nConnectContext.get().getRemoteIP(), tblName.getTbl());\n}\ndbs.put(dbName, db);\n}\npublic QueryStmt getQueryStmt() {\nreturn queryStmt;\n}\npublic void setQueryStmt(QueryStmt queryStmt) {\nthis.queryStmt = queryStmt;\n}\n@Override\npublic void rewriteExprs(ExprRewriter rewriter) throws AnalysisException {\nPreconditions.checkState(isAnalyzed());\nqueryStmt.rewriteExprs(rewriter);\n}\n@Override\npublic boolean isExplain() {\nreturn queryStmt.isExplain();\n}\npublic boolean isStreaming() {\nreturn isStreaming;\n}\npublic String getLabel() {\nreturn label;\n}\npublic boolean isUserSpecifiedLabel() {\nreturn isUserSpecifiedLabel;\n}\npublic UUID getUUID() {\nreturn uuid;\n}\npublic DataSink getDataSink() {\nreturn dataSink;\n}\npublic Database getDbObj() {\nreturn db;\n}\npublic boolean isTransactionBegin() {\nreturn isTransactionBegin;\n}\n@Override\npublic void analyze(Analyzer analyzer) throws UserException {\nsuper.analyze(analyzer);\nif (targetTable == null) {\ntblName.analyze(analyzer);\n}\nif (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), tblName.getDb(),\ntblName.getTbl(), PrivPredicate.LOAD)) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, \"LOAD\",\nConnectContext.get().getQualifiedUser(),\nConnectContext.get().getRemoteIP(), tblName.getTbl());\n}\nif (targetPartitionNames != null) {\ntargetPartitionNames.analyze(analyzer);\n}\nanalyzeTargetTable(analyzer);\nanalyzeSubquery(analyzer);\nanalyzePlanHints(analyzer);\ncreateDataSink();\ndb = analyzer.getCatalog().getDb(tblName.getDb());\nuuid = UUID.randomUUID();\nlong timeoutSecond = ConnectContext.get().getSessionVariable().getQueryTimeoutS();\nif (!isExplain() && !isTransactionBegin) {\nif (Strings.isNullOrEmpty(label)) {\nlabel = \"insert_\" + uuid.toString();\n}\nif (targetTable instanceof OlapTable) {\nLoadJobSourceType sourceType = LoadJobSourceType.INSERT_STREAMING;\nMetricRepo.COUNTER_LOAD_ADD.increase(1L);\ntransactionId = Catalog.getCurrentGlobalTransactionMgr().beginTransaction(db.getId(),\nLists.newArrayList(targetTable.getId()), label,\nnew TxnCoordinator(TxnSourceType.FE, FrontendOptions.getLocalHostAddress()),\nsourceType, timeoutSecond);\n}\nisTransactionBegin = true;\n}\nif (!isExplain() && targetTable instanceof OlapTable) {\nOlapTableSink sink = (OlapTableSink) dataSink;\nTUniqueId loadId = new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());\nsink.init(loadId, transactionId, db.getId(), timeoutSecond);\n}\n}\nprivate void analyzeTargetTable(Analyzer analyzer) throws AnalysisException {\nif (targetTable == null) {\ntargetTable = analyzer.getTable(tblName);\nif (targetTable == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, tblName.getTbl());\n}\n}\nif (targetTable instanceof OlapTable) {\nOlapTable olapTable = (OlapTable) targetTable;\nif (targetPartitionNames != null) {\nif (olapTable.getPartitionInfo().getType() == PartitionType.UNPARTITIONED) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_PARTITION_CLAUSE_NO_ALLOWED);\n}\nfor (String partName : targetPartitionNames.getPartitionNames()) {\nPartition part = olapTable.getPartition(partName, targetPartitionNames.isTemp());\nif (part == null) {\nErrorReport.reportAnalysisException(\nErrorCode.ERR_UNKNOWN_PARTITION, partName, targetTable.getName());\n}\ntargetPartitionIds.add(part.getId());\n}\n} else {\nfor (Partition partition : olapTable.getPartitions()) {\ntargetPartitionIds.add(partition.getId());\n}\n}\nDescriptorTable descTable = analyzer.getDescTbl();\nolapTuple = descTable.createTupleDescriptor();\nfor (Column col : olapTable.getFullSchema()) {\nSlotDescriptor slotDesc = descTable.addSlotDescriptor(olapTuple);\nslotDesc.setIsMaterialized(true);\nslotDesc.setType(col.getType());\nslotDesc.setColumn(col);\nif (col.isAllowNull()) {\nslotDesc.setIsNullable(true);\n} else {\nslotDesc.setIsNullable(false);\n}\n}\nindexIdToSchemaHash = olapTable.getIndexIdToSchemaHash();\n} else if (targetTable instanceof MysqlTable) {\nif (targetPartitionNames != null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_PARTITION_CLAUSE_NO_ALLOWED);\n}\n} else if (targetTable instanceof BrokerTable) {\nif (targetPartitionNames != null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_PARTITION_CLAUSE_NO_ALLOWED);\n}\nBrokerTable brokerTable = (BrokerTable) targetTable;\nif (!brokerTable.isWritable()) {\nthrow new AnalysisException(\"table \" + brokerTable.getName()\n+ \"is not writable. path should be an dir\");\n}\n} else {\nErrorReport.reportAnalysisException(\nErrorCode.ERR_NON_INSERTABLE_TABLE, targetTable.getName(), targetTable.getType());\n}\n}\nprivate void checkColumnCoverage(Set mentionedCols, List baseColumns)\nthrows AnalysisException {\nfor (Column col : baseColumns) {\nif (mentionedCols.contains(col.getName())) {\ncontinue;\n}\nif (col.getDefaultValue() == null && !col.isAllowNull()) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_COL_NOT_MENTIONED, col.getName());\n}\n}\n}\nprivate void analyzeRow(Analyzer analyzer, List targetColumns, List> rows,\nint rowIdx, List origColIdxsForShadowCols, Map origColIdxsForMVCols) throws AnalysisException {\nif (rows.get(rowIdx).size() != targetColumns.size() - origColIdxsForShadowCols.size() - origColIdxsForMVCols.size()) {\nthrow new AnalysisException(\"Column count doesn't match value count at row \" + (rowIdx + 1));\n}\nArrayList row = rows.get(rowIdx);\nif (!origColIdxsForShadowCols.isEmpty()) {\n/**\n* we should extends the row for shadow columns.\n* eg:\n* the origin row has exprs: (expr1, expr2, expr3), and targetColumns is (A, B, C, __doris_shadow_b)\n* after processing, extentedRow is (expr1, expr2, expr3, expr2)\n*/\nArrayList extentedRow = Lists.newArrayList();\nextentedRow.addAll(row);\nfor (Integer idx : origColIdxsForShadowCols) {\nextentedRow.add(extentedRow.get(idx));\n}\nrow = extentedRow;\nrows.set(rowIdx, row);\n}\nfor (int i = 0; i < row.size(); ++i) {\nExpr expr = row.get(i);\nColumn col = targetColumns.get(i);\nif (col.getType().equals(Type.HLL)) {\ncheckHllCompatibility(col, expr);\n}\nif (expr instanceof DefaultValueExpr) {\nif (targetColumns.get(i).getDefaultValue() == null) {\nthrow new AnalysisException(\"Column has no default value, column=\" + targetColumns.get(i).getName());\n}\nexpr = new StringLiteral(targetColumns.get(i).getDefaultValue());\n}\nexpr.analyze(analyzer);\nif (col.getAggregationType() == AggregateType.BITMAP_UNION) {\ncheckBitmapCompatibility(col, expr);\n}\nrow.set(i, checkTypeCompatibility(col, expr));\n}\nif (!origColIdxsForMVCols.isEmpty()) {\nArrayList extentedRow = Lists.newArrayList();\nextentedRow.addAll(row);\nfor (Map.Entry entry : origColIdxsForMVCols.entrySet()) {\nColumn mvColumn = targetTable.getFullSchema().get(entry.getKey());\nExpr expr = mvColumn.getDefineExpr();\nArrayList slots = new ArrayList<>();\nexpr.collect(SlotRef.class, slots);\nExprSubstitutionMap smap = new ExprSubstitutionMap();\nsmap.getLhs().add(slots.get(0));\nsmap.getRhs().add(extentedRow.get( entry.getValue()));\nextentedRow.add(Expr.substituteList(Lists.newArrayList(expr), smap, analyzer, false).get(0));\n}\nrow = extentedRow;\nrows.set(rowIdx, row);\n}\n}\nprivate void analyzePlanHints(Analyzer analyzer) throws AnalysisException {\nif (planHints == null) {\nreturn;\n}\nfor (String hint : planHints) {\nif (SHUFFLE_HINT.equalsIgnoreCase(hint)) {\nif (!targetTable.isPartitioned()) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_INSERT_HINT_NOT_SUPPORT);\n}\nif (isRepartition != null && !isRepartition) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_PLAN_HINT_CONFILT, hint);\n}\nisRepartition = Boolean.TRUE;\n} else if (NOSHUFFLE_HINT.equalsIgnoreCase(hint)) {\nif (!targetTable.isPartitioned()) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_INSERT_HINT_NOT_SUPPORT);\n}\nif (isRepartition != null && isRepartition) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_PLAN_HINT_CONFILT, hint);\n}\nisRepartition = Boolean.FALSE;\n} else if (STREAMING.equalsIgnoreCase(hint)) {\nisStreaming = true;\n} else {\nErrorReport.reportAnalysisException(ErrorCode.ERR_UNKNOWN_PLAN_HINT, hint);\n}\n}\n}\nprivate void checkHllCompatibility(Column col, Expr expr) throws AnalysisException {\nfinal String hllMismatchLog = \"Column's type is HLL,\"\n+ \" SelectList must contains HLL or hll_hash or hll_empty function's result, column=\" + col.getName();\nif (expr instanceof SlotRef) {\nfinal SlotRef slot = (SlotRef) expr;\nif (!slot.getType().equals(Type.HLL)) {\nthrow new AnalysisException(hllMismatchLog);\n}\n} else if (expr instanceof FunctionCallExpr) {\nfinal FunctionCallExpr functionExpr = (FunctionCallExpr) expr;\nif (!functionExpr.getFnName().getFunction().equalsIgnoreCase(\"hll_hash\") &&\n!functionExpr.getFnName().getFunction().equalsIgnoreCase(\"hll_empty\")) {\nthrow new AnalysisException(hllMismatchLog);\n}\n} else {\nthrow new AnalysisException(hllMismatchLog);\n}\n}\nprivate void checkBitmapCompatibility(Column col, Expr expr) throws AnalysisException {\nString errorMsg = String.format(\"bitmap column %s require the function return type is BITMAP\",\ncol.getName());\nif (!expr.getType().isBitmapType()) {\nthrow new AnalysisException(errorMsg);\n}\n}\nprivate Expr checkTypeCompatibility(Column col, Expr expr) throws AnalysisException {\nif (col.getDataType().equals(expr.getType().getPrimitiveType())) {\nreturn expr;\n}\nreturn expr.castTo(col.getType());\n}\npublic void prepareExpressions() throws UserException {\nList selectList = Expr.cloneList(queryStmt.getBaseTblResultExprs());\nint numCols = targetColumns.size();\nfor (int i = 0; i < numCols; ++i) {\nColumn col = targetColumns.get(i);\nExpr expr = checkTypeCompatibility(col, selectList.get(i));\nselectList.set(i, expr);\nexprByName.put(col.getName(), expr);\n}\nfor (Column col : targetTable.getFullSchema()) {\nif (exprByName.containsKey(col.getName())) {\nresultExprs.add(exprByName.get(col.getName()));\n} else {\nif (col.getDefaultValue() == null) {\n/*\nThe import stmt has been filtered in function checkColumnCoverage when\nthe default value of column is null and column is not nullable.\nSo the default value of column may simply be null when column is nullable\n*/\nPreconditions.checkState(col.isAllowNull());\nresultExprs.add(NullLiteral.create(col.getType()));\n}\nelse {\nresultExprs.add(checkTypeCompatibility(col, new StringLiteral(col.getDefaultValue())));\n}\n}\n}\n}\nprivate DataSink createDataSink() throws AnalysisException {\nif (dataSink != null) {\nreturn dataSink;\n}\nif (targetTable instanceof OlapTable) {\ndataSink = new OlapTableSink((OlapTable) targetTable, olapTuple, targetPartitionIds);\ndataPartition = dataSink.getOutputPartition();\n} else if (targetTable instanceof BrokerTable) {\nBrokerTable table = (BrokerTable) targetTable;\nBrokerDesc brokerDesc = new BrokerDesc(table.getBrokerName(), table.getBrokerProperties());\ndataSink = new ExportSink(\ntable.getWritablePath(),\ntable.getColumnSeparator(),\ntable.getLineDelimiter(),\nbrokerDesc);\ndataPartition = dataSink.getOutputPartition();\n} else {\ndataSink = DataSink.createDataSink(targetTable);\ndataPartition = DataPartition.UNPARTITIONED;\n}\nreturn dataSink;\n}\npublic void complete() throws UserException {\nif (!isExplain() && targetTable instanceof OlapTable) {\n((OlapTableSink) dataSink).complete();\nTransactionState txnState = Catalog.getCurrentGlobalTransactionMgr().getTransactionState(db.getId(), transactionId);\nif (txnState == null) {\nthrow new DdlException(\"txn does not exist: \" + transactionId);\n}\ntxnState.addTableIndexes((OlapTable) targetTable);\n}\n}\n@Override\npublic ArrayList getResultExprs() {\nreturn resultExprs;\n}\npublic DataPartition getDataPartition() {\nreturn dataPartition;\n}\n@Override\npublic void reset() {\nsuper.reset();\nqueryStmt.reset();\nresultExprs.clear();\nexprByName.clear();\ndataSink = null;\ndataPartition = null;\ntargetColumns.clear();\n}\n@Override\npublic RedirectStatus getRedirectStatus() {\nif (isExplain()) {\nreturn RedirectStatus.NO_FORWARD;\n} else {\nreturn RedirectStatus.FORWARD_WITH_SYNC;\n}\n}\n}", + "context_after": "class InsertStmt extends DdlStmt {\nprivate static final Logger LOG = LogManager.getLogger(InsertStmt.class);\npublic static final String SHUFFLE_HINT = \"SHUFFLE\";\npublic static final String NOSHUFFLE_HINT = \"NOSHUFFLE\";\npublic static final String STREAMING = \"STREAMING\";\nprivate final TableName tblName;\nprivate final PartitionNames targetPartitionNames;\nprivate List targetPartitionIds = Lists.newArrayList();\nprivate final List targetColumnNames;\nprivate QueryStmt queryStmt;\nprivate final List planHints;\nprivate Boolean isRepartition;\nprivate boolean isStreaming = false;\nprivate String label = null;\nprivate boolean isUserSpecifiedLabel = false;\nprivate UUID uuid;\nprivate Map indexIdToSchemaHash = null;\nprivate ArrayList resultExprs = Lists.newArrayList();\nprivate Map exprByName = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);\nprivate Table targetTable;\nprivate Database db;\nprivate long transactionId;\nprivate TupleDescriptor olapTuple;\nprivate DataSink dataSink;\nprivate DataPartition dataPartition;\nprivate List targetColumns = Lists.newArrayList();\n/*\n* InsertStmt may be analyzed twice, but transaction must be only begun once.\n* So use a boolean to check if transaction already begun.\n*/\nprivate boolean isTransactionBegin = false;\npublic InsertStmt(InsertTarget target, String label, List cols, InsertSource source, List hints) {\nthis.tblName = target.getTblName();\nthis.targetPartitionNames = target.getPartitionNames();\nthis.label = label;\nthis.queryStmt = source.getQueryStmt();\nthis.planHints = hints;\nthis.targetColumnNames = cols;\nif (!Strings.isNullOrEmpty(label)) {\nisUserSpecifiedLabel = true;\n}\n}\npublic InsertStmt(TableName name, QueryStmt queryStmt) {\nthis.tblName = name;\nthis.targetPartitionNames = null;\nthis.targetColumnNames = null;\nthis.queryStmt = queryStmt;\nthis.planHints = null;\n}\npublic TupleDescriptor getOlapTuple() {\nreturn olapTuple;\n}\npublic Table getTargetTable() {\nreturn targetTable;\n}\npublic void setTargetTable(Table targetTable) {\nthis.targetTable = targetTable;\n}\npublic Map getIndexIdToSchemaHash() {\nreturn this.indexIdToSchemaHash;\n}\npublic long getTransactionId() {\nreturn this.transactionId;\n}\npublic Boolean isRepartition() {\nreturn isRepartition;\n}\npublic String getDb() {\nreturn tblName.getDb();\n}\npublic void getDbs(Analyzer analyzer, Map dbs) throws AnalysisException {\nqueryStmt.getDbs(analyzer, dbs);\ntblName.analyze(analyzer);\nString dbName = tblName.getDb();\nDatabase db = analyzer.getCatalog().getDb(dbName);\nif (db == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_DB_ERROR, dbName);\n}\nif (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), tblName.getDb(), tblName.getTbl(),\nPrivPredicate.LOAD)) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, \"LOAD\",\nConnectContext.get().getQualifiedUser(),\nConnectContext.get().getRemoteIP(), tblName.getTbl());\n}\ndbs.put(dbName, db);\n}\npublic QueryStmt getQueryStmt() {\nreturn queryStmt;\n}\npublic void setQueryStmt(QueryStmt queryStmt) {\nthis.queryStmt = queryStmt;\n}\n@Override\npublic void rewriteExprs(ExprRewriter rewriter) throws AnalysisException {\nPreconditions.checkState(isAnalyzed());\nqueryStmt.rewriteExprs(rewriter);\n}\n@Override\npublic boolean isExplain() {\nreturn queryStmt.isExplain();\n}\npublic boolean isStreaming() {\nreturn isStreaming;\n}\npublic String getLabel() {\nreturn label;\n}\npublic boolean isUserSpecifiedLabel() {\nreturn isUserSpecifiedLabel;\n}\npublic UUID getUUID() {\nreturn uuid;\n}\npublic DataSink getDataSink() {\nreturn dataSink;\n}\npublic Database getDbObj() {\nreturn db;\n}\npublic boolean isTransactionBegin() {\nreturn isTransactionBegin;\n}\n@Override\npublic void analyze(Analyzer analyzer) throws UserException {\nsuper.analyze(analyzer);\nif (targetTable == null) {\ntblName.analyze(analyzer);\n}\nif (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), tblName.getDb(),\ntblName.getTbl(), PrivPredicate.LOAD)) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, \"LOAD\",\nConnectContext.get().getQualifiedUser(),\nConnectContext.get().getRemoteIP(), tblName.getTbl());\n}\nif (targetPartitionNames != null) {\ntargetPartitionNames.analyze(analyzer);\n}\nanalyzeTargetTable(analyzer);\nanalyzeSubquery(analyzer);\nanalyzePlanHints(analyzer);\ncreateDataSink();\ndb = analyzer.getCatalog().getDb(tblName.getDb());\nuuid = UUID.randomUUID();\nlong timeoutSecond = ConnectContext.get().getSessionVariable().getQueryTimeoutS();\nif (!isExplain() && !isTransactionBegin) {\nif (Strings.isNullOrEmpty(label)) {\nlabel = \"insert_\" + uuid.toString();\n}\nif (targetTable instanceof OlapTable) {\nLoadJobSourceType sourceType = LoadJobSourceType.INSERT_STREAMING;\nMetricRepo.COUNTER_LOAD_ADD.increase(1L);\ntransactionId = Catalog.getCurrentGlobalTransactionMgr().beginTransaction(db.getId(),\nLists.newArrayList(targetTable.getId()), label,\nnew TxnCoordinator(TxnSourceType.FE, FrontendOptions.getLocalHostAddress()),\nsourceType, timeoutSecond);\n}\nisTransactionBegin = true;\n}\nif (!isExplain() && targetTable instanceof OlapTable) {\nOlapTableSink sink = (OlapTableSink) dataSink;\nTUniqueId loadId = new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());\nsink.init(loadId, transactionId, db.getId(), timeoutSecond);\n}\n}\nprivate void analyzeTargetTable(Analyzer analyzer) throws AnalysisException {\nif (targetTable == null) {\ntargetTable = analyzer.getTable(tblName);\nif (targetTable == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, tblName.getTbl());\n}\n}\nif (targetTable instanceof OlapTable) {\nOlapTable olapTable = (OlapTable) targetTable;\nif (targetPartitionNames != null) {\nif (olapTable.getPartitionInfo().getType() == PartitionType.UNPARTITIONED) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_PARTITION_CLAUSE_NO_ALLOWED);\n}\nfor (String partName : targetPartitionNames.getPartitionNames()) {\nPartition part = olapTable.getPartition(partName, targetPartitionNames.isTemp());\nif (part == null) {\nErrorReport.reportAnalysisException(\nErrorCode.ERR_UNKNOWN_PARTITION, partName, targetTable.getName());\n}\ntargetPartitionIds.add(part.getId());\n}\n} else {\nfor (Partition partition : olapTable.getPartitions()) {\ntargetPartitionIds.add(partition.getId());\n}\n}\nDescriptorTable descTable = analyzer.getDescTbl();\nolapTuple = descTable.createTupleDescriptor();\nfor (Column col : olapTable.getFullSchema()) {\nSlotDescriptor slotDesc = descTable.addSlotDescriptor(olapTuple);\nslotDesc.setIsMaterialized(true);\nslotDesc.setType(col.getType());\nslotDesc.setColumn(col);\nif (col.isAllowNull()) {\nslotDesc.setIsNullable(true);\n} else {\nslotDesc.setIsNullable(false);\n}\n}\nindexIdToSchemaHash = olapTable.getIndexIdToSchemaHash();\n} else if (targetTable instanceof MysqlTable) {\nif (targetPartitionNames != null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_PARTITION_CLAUSE_NO_ALLOWED);\n}\n} else if (targetTable instanceof BrokerTable) {\nif (targetPartitionNames != null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_PARTITION_CLAUSE_NO_ALLOWED);\n}\nBrokerTable brokerTable = (BrokerTable) targetTable;\nif (!brokerTable.isWritable()) {\nthrow new AnalysisException(\"table \" + brokerTable.getName()\n+ \"is not writable. path should be an dir\");\n}\n} else {\nErrorReport.reportAnalysisException(\nErrorCode.ERR_NON_INSERTABLE_TABLE, targetTable.getName(), targetTable.getType());\n}\n}\nprivate void checkColumnCoverage(Set mentionedCols, List baseColumns)\nthrows AnalysisException {\nfor (Column col : baseColumns) {\nif (mentionedCols.contains(col.getName())) {\ncontinue;\n}\nif (col.getDefaultValue() == null && !col.isAllowNull()) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_COL_NOT_MENTIONED, col.getName());\n}\n}\n}\nprivate void analyzeRow(Analyzer analyzer, List targetColumns, List> rows,\nint rowIdx, List> origColIdxsForExtendCols) throws AnalysisException {\nif (rows.get(rowIdx).size() != targetColumns.size() - origColIdxsForExtendCols.size()) {\nthrow new AnalysisException(\"Column count doesn't match value count at row \" + (rowIdx + 1));\n}\nArrayList row = rows.get(rowIdx);\nif (!origColIdxsForExtendCols.isEmpty()) {\n/**\n* we should extends the row for shadow columns.\n* eg:\n* the origin row has exprs: (expr1, expr2, expr3), and targetColumns is (A, B, C, __doris_shadow_b)\n* after processing, extentedRow is (expr1, expr2, expr3, expr2)\n*/\nArrayList extentedRow = Lists.newArrayList();\nextentedRow.addAll(row);\nfor (Pair entry : origColIdxsForExtendCols) {\nif (entry == null) {\nextentedRow.add(extentedRow.get(entry.first));\n} else {\nExprSubstitutionMap smap = new ExprSubstitutionMap();\nsmap.getLhs().add(entry.second.getRefColumn());\nsmap.getRhs().add(extentedRow.get(entry.first));\nextentedRow.add(Expr.substituteList(Lists.newArrayList(entry.second.getDefineExpr()), smap, analyzer, false).get(0));\n}\n}\nrow = extentedRow;\nrows.set(rowIdx, row);\n}\nfor (int i = 0; i < row.size(); ++i) {\nExpr expr = row.get(i);\nColumn col = targetColumns.get(i);\nif (col.getType().equals(Type.HLL)) {\ncheckHllCompatibility(col, expr);\n}\nif (expr instanceof DefaultValueExpr) {\nif (targetColumns.get(i).getDefaultValue() == null) {\nthrow new AnalysisException(\"Column has no default value, column=\" + targetColumns.get(i).getName());\n}\nexpr = new StringLiteral(targetColumns.get(i).getDefaultValue());\n}\nexpr.analyze(analyzer);\nif (col.getAggregationType() == AggregateType.BITMAP_UNION) {\ncheckBitmapCompatibility(col, expr);\n}\nrow.set(i, checkTypeCompatibility(col, expr));\n}\n}\nprivate void analyzePlanHints(Analyzer analyzer) throws AnalysisException {\nif (planHints == null) {\nreturn;\n}\nfor (String hint : planHints) {\nif (SHUFFLE_HINT.equalsIgnoreCase(hint)) {\nif (!targetTable.isPartitioned()) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_INSERT_HINT_NOT_SUPPORT);\n}\nif (isRepartition != null && !isRepartition) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_PLAN_HINT_CONFILT, hint);\n}\nisRepartition = Boolean.TRUE;\n} else if (NOSHUFFLE_HINT.equalsIgnoreCase(hint)) {\nif (!targetTable.isPartitioned()) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_INSERT_HINT_NOT_SUPPORT);\n}\nif (isRepartition != null && isRepartition) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_PLAN_HINT_CONFILT, hint);\n}\nisRepartition = Boolean.FALSE;\n} else if (STREAMING.equalsIgnoreCase(hint)) {\nisStreaming = true;\n} else {\nErrorReport.reportAnalysisException(ErrorCode.ERR_UNKNOWN_PLAN_HINT, hint);\n}\n}\n}\nprivate void checkHllCompatibility(Column col, Expr expr) throws AnalysisException {\nfinal String hllMismatchLog = \"Column's type is HLL,\"\n+ \" SelectList must contains HLL or hll_hash or hll_empty function's result, column=\" + col.getName();\nif (expr instanceof SlotRef) {\nfinal SlotRef slot = (SlotRef) expr;\nif (!slot.getType().equals(Type.HLL)) {\nthrow new AnalysisException(hllMismatchLog);\n}\n} else if (expr instanceof FunctionCallExpr) {\nfinal FunctionCallExpr functionExpr = (FunctionCallExpr) expr;\nif (!functionExpr.getFnName().getFunction().equalsIgnoreCase(\"hll_hash\") &&\n!functionExpr.getFnName().getFunction().equalsIgnoreCase(\"hll_empty\")) {\nthrow new AnalysisException(hllMismatchLog);\n}\n} else {\nthrow new AnalysisException(hllMismatchLog);\n}\n}\nprivate void checkBitmapCompatibility(Column col, Expr expr) throws AnalysisException {\nString errorMsg = String.format(\"bitmap column %s require the function return type is BITMAP\",\ncol.getName());\nif (!expr.getType().isBitmapType()) {\nthrow new AnalysisException(errorMsg);\n}\n}\nprivate Expr checkTypeCompatibility(Column col, Expr expr) throws AnalysisException {\nif (col.getDataType().equals(expr.getType().getPrimitiveType())) {\nreturn expr;\n}\nreturn expr.castTo(col.getType());\n}\npublic void prepareExpressions() throws UserException {\nList selectList = Expr.cloneList(queryStmt.getBaseTblResultExprs());\nint numCols = targetColumns.size();\nfor (int i = 0; i < numCols; ++i) {\nColumn col = targetColumns.get(i);\nExpr expr = checkTypeCompatibility(col, selectList.get(i));\nselectList.set(i, expr);\nexprByName.put(col.getName(), expr);\n}\nfor (Column col : targetTable.getFullSchema()) {\nif (exprByName.containsKey(col.getName())) {\nresultExprs.add(exprByName.get(col.getName()));\n} else {\nif (col.getDefaultValue() == null) {\n/*\nThe import stmt has been filtered in function checkColumnCoverage when\nthe default value of column is null and column is not nullable.\nSo the default value of column may simply be null when column is nullable\n*/\nPreconditions.checkState(col.isAllowNull());\nresultExprs.add(NullLiteral.create(col.getType()));\n}\nelse {\nresultExprs.add(checkTypeCompatibility(col, new StringLiteral(col.getDefaultValue())));\n}\n}\n}\n}\nprivate DataSink createDataSink() throws AnalysisException {\nif (dataSink != null) {\nreturn dataSink;\n}\nif (targetTable instanceof OlapTable) {\ndataSink = new OlapTableSink((OlapTable) targetTable, olapTuple, targetPartitionIds);\ndataPartition = dataSink.getOutputPartition();\n} else if (targetTable instanceof BrokerTable) {\nBrokerTable table = (BrokerTable) targetTable;\nBrokerDesc brokerDesc = new BrokerDesc(table.getBrokerName(), table.getBrokerProperties());\ndataSink = new ExportSink(\ntable.getWritablePath(),\ntable.getColumnSeparator(),\ntable.getLineDelimiter(),\nbrokerDesc);\ndataPartition = dataSink.getOutputPartition();\n} else {\ndataSink = DataSink.createDataSink(targetTable);\ndataPartition = DataPartition.UNPARTITIONED;\n}\nreturn dataSink;\n}\npublic void complete() throws UserException {\nif (!isExplain() && targetTable instanceof OlapTable) {\n((OlapTableSink) dataSink).complete();\nTransactionState txnState = Catalog.getCurrentGlobalTransactionMgr().getTransactionState(db.getId(), transactionId);\nif (txnState == null) {\nthrow new DdlException(\"txn does not exist: \" + transactionId);\n}\ntxnState.addTableIndexes((OlapTable) targetTable);\n}\n}\n@Override\npublic ArrayList getResultExprs() {\nreturn resultExprs;\n}\npublic DataPartition getDataPartition() {\nreturn dataPartition;\n}\n@Override\npublic void reset() {\nsuper.reset();\ntargetPartitionIds.clear();\nqueryStmt.reset();\nresultExprs.clear();\nexprByName.clear();\ndataSink = null;\ndataPartition = null;\ntargetColumns.clear();\n}\n@Override\npublic RedirectStatus getRedirectStatus() {\nif (isExplain()) {\nreturn RedirectStatus.NO_FORWARD;\n} else {\nreturn RedirectStatus.FORWARD_WITH_SYNC;\n}\n}\n}" + }, + { + "comment": "Why don't we have to handle variable reference fields?", + "method_body": "public void visit(BLangRecordLiteral recordLiteral) {\nfor (RecordLiteralNode.RecordField field : recordLiteral.fields) {\nif (field.isKeyValueField()) {\nBLangRecordLiteral.BLangRecordKeyValueField keyValueField =\n(BLangRecordLiteral.BLangRecordKeyValueField) field;\nkeyValueField.key.expr = rewriteExpr(keyValueField.key.expr);\nkeyValueField.valueExpr = rewriteExpr(keyValueField.valueExpr);\n} else if (field.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {\nBLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =\n(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;\nspreadOpField.expr = rewriteExpr(spreadOpField.expr);\n}\n}\nresult = recordLiteral;\n}", + "target_code": "} else if (field.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {", + "method_body_after": "public void visit(BLangRecordLiteral recordLiteral) {\nfor (RecordLiteralNode.RecordField field : recordLiteral.fields) {\nif (field.isKeyValueField()) {\nBLangRecordLiteral.BLangRecordKeyValueField keyValueField =\n(BLangRecordLiteral.BLangRecordKeyValueField) field;\nkeyValueField.key.expr = rewriteExpr(keyValueField.key.expr);\nkeyValueField.valueExpr = rewriteExpr(keyValueField.valueExpr);\n} else if (field.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {\nBLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =\n(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;\nspreadOpField.expr = rewriteExpr(spreadOpField.expr);\n}\n}\nresult = recordLiteral;\n}", + "context_before": "class ClosureGenerator extends BLangNodeVisitor {\nprivate static final CompilerContext.Key CLOSURE_GENERATOR_KEY = new CompilerContext.Key<>();\nprivate Queue queue;\nprivate List annotationClosureReferences;\nprivate SymbolTable symTable;\nprivate SymbolEnv env;\nprivate BLangNode result;\nprivate SymbolResolver symResolver;\nprivate AnnotationDesugar annotationDesugar;\npublic static ClosureGenerator getInstance(CompilerContext context) {\nClosureGenerator closureGenerator = context.get(CLOSURE_GENERATOR_KEY);\nif (closureGenerator == null) {\nclosureGenerator = new ClosureGenerator(context);\n}\nreturn closureGenerator;\n}\nprivate ClosureGenerator(CompilerContext context) {\ncontext.put(CLOSURE_GENERATOR_KEY, this);\nthis.symTable = SymbolTable.getInstance(context);\nthis.queue = new LinkedList<>();\nthis.annotationClosureReferences = new ArrayList<>();\nthis.symResolver = SymbolResolver.getInstance(context);\nthis.annotationDesugar = AnnotationDesugar.getInstance(context);\n}\n@Override\npublic void visit(BLangPackage pkgNode) {\nSymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgNode.symbol);\nfor (int i = 0; i < pkgNode.functions.size(); i++) {\nBLangFunction bLangFunction = pkgNode.functions.get(i);\nif (!bLangFunction.flagSet.contains(Flag.LAMBDA)) {\nSymbolEnv funcEnv = SymbolEnv.createFunctionEnv(bLangFunction, bLangFunction.symbol.scope, pkgEnv);\nrewriteParamsAndReturnTypeOfFunction(bLangFunction, funcEnv);\n}\n}\npkgNode.services.forEach(service -> rewrite(service, pkgEnv));\npkgNode.typeDefinitions.forEach(typeDefinition -> rewrite(typeDefinition, pkgEnv));\npkgNode.xmlnsList.forEach(xmlns -> rewrite(xmlns, pkgEnv));\npkgNode.constants.forEach(constant -> rewrite(constant, pkgEnv));\npkgNode.annotations.forEach(annotation -> rewrite(annotation, pkgEnv));\npkgNode.initFunction = rewrite(pkgNode.initFunction, pkgEnv);\npkgNode.classDefinitions = rewrite(pkgNode.classDefinitions, pkgEnv);\npkgNode.globalVars.forEach(globalVar -> rewrite(globalVar, pkgEnv));\naddClosuresToGlobalVariableList(pkgEnv);\nfor (int i = 0; i < pkgNode.functions.size(); i++) {\nBLangFunction bLangFunction = pkgNode.functions.get(i);\nif (!bLangFunction.flagSet.contains(Flag.LAMBDA)) {\nrewrite(bLangFunction, pkgEnv);\n}\n}\nresult = pkgNode;\n}\nprivate void addClosuresToGlobalVariableList(SymbolEnv pkgEnv) {\nIterator iterator = queue.iterator();\nwhile (iterator.hasNext()) {\nBLangSimpleVariable simpleVariable = queue.poll().var;\nsimpleVariable.flagSet.add(Flag.PUBLIC);\nsimpleVariable.symbol.flags |= Flags.PUBLIC;\npkgEnv.enclPkg.globalVars.add(0, rewrite(simpleVariable, pkgEnv));\n}\nfor (BLangSimpleVariableDef closureReference : annotationClosureReferences) {\npkgEnv.enclPkg.globalVars.add(rewrite(closureReference.var, pkgEnv));\n}\n}\n@Override\npublic void visit(BLangFunction funcNode) {\nSymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);\nif (funcNode.flagSet.contains(Flag.LAMBDA)) {\nrewriteParamsAndReturnTypeOfFunction(funcNode, funcEnv);\n}\nfuncNode.body = rewrite(funcNode.body, funcEnv);\nresult = funcNode;\n}\npublic void rewriteParamsAndReturnTypeOfFunction(BLangFunction funcNode, SymbolEnv funcEnv) {\nfor (BLangSimpleVariable bLangSimpleVariable : funcNode.requiredParams) {\nrewrite(bLangSimpleVariable, funcEnv);\n}\nif (funcNode.restParam != null) {\nfuncNode.restParam = rewrite(funcNode.restParam, funcEnv);\n}\nif (funcNode.returnTypeNode != null && funcNode.returnTypeNode.getKind() != null) {\nfuncNode.returnTypeNode = rewrite(funcNode.returnTypeNode, funcEnv);\n}\n}\n@Override\npublic void visit(BLangBlockFunctionBody body) {\nSymbolEnv blockEnv = SymbolEnv.createFuncBodyEnv(body, env);\nbody.stmts = rewriteStmt(body.stmts, blockEnv);\nresult = body;\n}\n@Override\npublic void visit(BLangRawTemplateLiteral rawTemplateLiteral) {\nresult = rawTemplateLiteral;\n}\n@Override\npublic void visit(BLangExprFunctionBody exprBody) {\nexprBody.expr = rewriteExpr(exprBody.expr);\nresult = exprBody;\n}\n@Override\npublic void visit(BLangResourceFunction resourceFunction) {\nvisit((BLangFunction) resourceFunction);\n}\n@Override\npublic void visit(BLangExternalFunctionBody body) {\nresult = body;\n}\n@Override\npublic void visit(BLangBlockStmt blockNode) {\nSymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, env);\nblockNode.stmts = rewriteStmt(blockNode.stmts, blockEnv);\nresult = blockNode;\n}\n@Override\npublic void visit(BLangService serviceNode) {\nresult = serviceNode;\n}\n@Override\npublic void visit(BLangSimpleVariableDef varDefNode) {\nvarDefNode.var = rewrite(varDefNode.var, env);\nresult = varDefNode;\n}\n@Override\npublic void visit(BLangReturn returnNode) {\nif (returnNode.expr != null) {\nreturnNode.expr = rewriteExpr(returnNode.expr);\n}\nresult = returnNode;\n}\n@Override\npublic void visit(BLangTypeDefinition typeDef) {\ntypeDef.typeNode = rewrite(typeDef.typeNode, env);\nresult = typeDef;\n}\n@Override\npublic void visit(BLangIntersectionTypeNode intersectionTypeNode) {\nList rewrittenConstituents = new ArrayList<>();\nfor (BLangType constituentTypeNode : intersectionTypeNode.constituentTypeNodes) {\nrewrittenConstituents.add(rewrite(constituentTypeNode, env));\n}\nintersectionTypeNode.constituentTypeNodes = rewrittenConstituents;\nresult = intersectionTypeNode;\n}\n@Override\npublic void visit(BLangClassDefinition classDefinition) {\nSymbolEnv classEnv = SymbolEnv.createClassEnv(classDefinition, classDefinition.symbol.scope, env);\nfor (BLangSimpleVariable bLangSimpleVariable : classDefinition.fields) {\nbLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, classEnv);\n}\nresult = classDefinition;\n}\n@Override\npublic void visit(BLangObjectTypeNode objectTypeNode) {\nfor (BLangSimpleVariable field : objectTypeNode.fields) {\nrewrite(field, env);\n}\nresult = objectTypeNode;\n}\n@Override\npublic void visit(BLangObjectConstructorExpression objectConstructorExpression) {\nobjectConstructorExpression.typeInit = rewriteExpr(objectConstructorExpression.typeInit);\nresult = objectConstructorExpression;\n}\n@Override\npublic void visit(BLangRecordTypeNode recordTypeNode) {\nBTypeSymbol typeSymbol = recordTypeNode.getBType().tsymbol;\nBSymbol owner = typeSymbol.owner;\ndesugarFieldAnnotations(owner, typeSymbol, recordTypeNode.fields, recordTypeNode.pos);\nfor (BLangSimpleVariable field : recordTypeNode.fields) {\nrewrite(field, env);\n}\nrecordTypeNode.restFieldType = rewrite(recordTypeNode.restFieldType, env);\nresult = recordTypeNode;\n}\n@Override\npublic void visit(BLangTupleTypeNode tupleTypeNode) {\nBTypeSymbol typeSymbol = tupleTypeNode.getBType().tsymbol;\nBSymbol owner = typeSymbol.owner;\ndesugarFieldAnnotations(owner, typeSymbol, tupleTypeNode.members, tupleTypeNode.pos);\nList rewrittenMembers = new ArrayList<>();\ntupleTypeNode.members.forEach(member -> rewrittenMembers.add(rewrite(member, env)));\ntupleTypeNode.members = rewrittenMembers;\ntupleTypeNode.restParamType = rewrite(tupleTypeNode.restParamType, env);\nresult = tupleTypeNode;\n}\nprivate void desugarFieldAnnotations(BSymbol owner, BTypeSymbol typeSymbol, List fields,\nLocation pos) {\nif (owner.getKind() != SymbolKind.PACKAGE) {\nowner = getOwner(env);\nBLangLambdaFunction lambdaFunction = annotationDesugar.defineFieldAnnotations(fields, pos, env.enclPkg, env,\ntypeSymbol.pkgID, owner);\nif (lambdaFunction != null) {\nboolean isPackageLevel = owner.getKind() == SymbolKind.PACKAGE;\nBInvokableSymbol invokableSymbol = createSimpleVariable(lambdaFunction.function, lambdaFunction,\nisPackageLevel);\ntypeSymbol.annotations = createSimpleVariable(invokableSymbol, isPackageLevel);\n}\n}\n}\n@Override\npublic void visit(BLangFiniteTypeNode finiteTypeNode) {\nfiniteTypeNode.valueSpace.forEach(param -> rewrite(param, env));\nresult = finiteTypeNode;\n}\n@Override\npublic void visit(BLangArrayType arrayType) {\narrayType.elemtype = rewrite(arrayType.elemtype, env);\nresult = arrayType;\n}\n@Override\npublic void visit(BLangUserDefinedType userDefinedType) {\nresult = userDefinedType;\n}\n@Override\npublic void visit(BLangUnionTypeNode unionTypeNode) {\nList rewrittenMembers = new ArrayList<>();\nunionTypeNode.memberTypeNodes.forEach(typeNode -> rewrittenMembers.add(rewrite(typeNode, env)));\nunionTypeNode.memberTypeNodes = rewrittenMembers;\nresult = unionTypeNode;\n}\n@Override\npublic void visit(BLangValueType valueType) {\nresult = valueType;\n}\n@Override\npublic void visit(BLangBuiltInRefTypeNode builtInRefTypeNode) {\nresult = builtInRefTypeNode;\n}\n@Override\npublic void visit(BLangStreamType streamType) {\nstreamType.constraint = rewrite(streamType.constraint, env);\nstreamType.error = rewrite(streamType.error, env);\nresult = streamType;\n}\n@Override\npublic void visit(BLangConstrainedType constrainedType) {\nconstrainedType.constraint = rewrite(constrainedType.constraint, env);\nresult = constrainedType;\n}\n@Override\npublic void visit(BLangErrorType errorType) {\nerrorType.detailType = rewrite(errorType.detailType, env);\nresult = errorType;\n}\n@Override\npublic void visit(BLangTableTypeNode tableTypeNode) {\ntableTypeNode.constraint = rewrite(tableTypeNode.constraint, env);\ntableTypeNode.tableKeyTypeConstraint = rewrite(tableTypeNode.tableKeyTypeConstraint, env);\nresult = tableTypeNode;\n}\n@Override\npublic void visit(BLangInvocation.BLangResourceAccessInvocation resourceAccessInvocation) {\nresult = resourceAccessInvocation;\n}\n@Override\npublic void visit(BLangTableKeyTypeConstraint keyTypeConstraint) {\nkeyTypeConstraint.keyType = rewrite(keyTypeConstraint.keyType, env);\nresult = keyTypeConstraint;\n}\n@Override\npublic void visit(BLangFunctionTypeNode functionTypeNode) {\nSymbolEnv funcEnv = SymbolEnv.createTypeEnv(functionTypeNode, functionTypeNode.getBType().tsymbol.scope, env);\nfor (BLangSimpleVariable param : functionTypeNode.params) {\nrewrite(param, funcEnv);\n}\nif (functionTypeNode.restParam != null) {\nfunctionTypeNode.restParam.typeNode = rewrite(functionTypeNode.restParam.typeNode, env);\n}\nif (functionTypeNode.returnTypeNode != null) {\nfunctionTypeNode.returnTypeNode = rewrite(functionTypeNode.returnTypeNode, env);\n}\nresult = functionTypeNode;\n}\n@Override\npublic void visit(BLangSimpleVariable varNode) {\nif (varNode.typeNode != null && varNode.typeNode.getKind() != null) {\nvarNode.typeNode = rewrite(varNode.typeNode, env);\n}\nBLangExpression bLangExpression;\nif (varNode.symbol != null && Symbols.isFlagOn(varNode.symbol.flags, Flags.DEFAULTABLE_PARAM)) {\nString closureName = generateName(varNode.symbol.name.value, env.node);\nbLangExpression = createClosureForDefaultValue(closureName, varNode.name.value, varNode);\n} else {\nbLangExpression = rewriteExpr(varNode.expr);\n}\nvarNode.expr = bLangExpression;\nresult = varNode;\n}\nprivate BSymbol getOwner(SymbolEnv symbolEnv) {\nwhile (symbolEnv.node.getKind() != NodeKind.PACKAGE) {\nNodeKind kind = symbolEnv.node.getKind();\nif (kind != NodeKind.BLOCK_FUNCTION_BODY && kind != NodeKind.BLOCK) {\nsymbolEnv = symbolEnv.enclEnv;\ncontinue;\n}\nreturn symbolEnv.enclInvokable.symbol;\n}\nreturn symbolEnv.enclPkg.symbol;\n}\nprivate BLangExpression createClosureForDefaultValue(String closureName, String paramName,\nBLangSimpleVariable varNode) {\nBSymbol owner = getOwner(env);\nBInvokableTypeSymbol symbol = (BInvokableTypeSymbol) env.node.getBType().tsymbol;\nBLangFunction function = createFunction(closureName, varNode.pos, owner.pkgID, owner, varNode.getBType());\nupdateFunctionParams(function, symbol.params, paramName);\nBLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(function.pos, (BLangBlockFunctionBody) function.body);\nreturnStmt.expr = varNode.expr;\nBLangLambdaFunction lambdaFunction = createLambdaFunction(function);\nlambdaFunction.capturedClosureEnv = env.createClone();\nBInvokableSymbol varSymbol = createSimpleVariable(function, lambdaFunction, false);\nenv.enclPkg.symbol.scope.define(function.symbol.name, function.symbol);\nenv.enclPkg.functions.add(function);\nenv.enclPkg.topLevelNodes.add(function);\nsymbol.defaultValues.put(paramName, varSymbol);\nreturn returnStmt.expr;\n}\nprivate void updateFunctionParams(BLangFunction funcNode, List params, String paramName) {\nBInvokableSymbol funcSymbol = funcNode.symbol;\nfor (BVarSymbol symbol : params) {\nif (paramName.equals(symbol.name.value)) {\nbreak;\n}\nBInvokableType funcType = (BInvokableType) funcSymbol.type;\nBVarSymbol varSymbol = ASTBuilderUtil.duplicateParamSymbol(symbol, funcSymbol);\nvarSymbol.flags = 0;\nfuncSymbol.scope.define(varSymbol.name, varSymbol);\nfuncSymbol.params.add(varSymbol);\nfuncType.paramTypes.add(varSymbol.type);\nfuncNode.requiredParams.add(ASTBuilderUtil.createVariable(varSymbol.pos, varSymbol.name.value,\nvarSymbol.type, null, varSymbol));\n}\n}\nBLangLambdaFunction createLambdaFunction(BLangFunction function) {\nBLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();\nlambdaFunction.function = function;\nlambdaFunction.setBType(function.getBType());\nlambdaFunction.capturedClosureEnv = env;\nreturn lambdaFunction;\n}\npublic BInvokableSymbol createSimpleVariable(BLangFunction function, BLangLambdaFunction lambdaFunction,\nboolean isAnnotationClosure) {\nBInvokableSymbol invokableSymbol = function.symbol;\nBType type = function.getBType();\nBInvokableSymbol varSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, invokableSymbol.name,\ninvokableSymbol.pkgID, type,\ninvokableSymbol.owner, function.pos, VIRTUAL);\nvarSymbol.params = invokableSymbol.params;\nvarSymbol.restParam = invokableSymbol.restParam;\nvarSymbol.retType = invokableSymbol.retType;\nBLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(function.pos, function.name.value, type,\nlambdaFunction, varSymbol);\nBLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDef(function.pos);\nvariableDef.var = simpleVariable;\nvariableDef.setBType(type);\nif (isAnnotationClosure) {\nannotationClosureReferences.add(variableDef);\nreturn varSymbol;\n}\nqueue.add(variableDef);\nreturn varSymbol;\n}\npublic BVarSymbol createSimpleVariable(BInvokableSymbol invokableSymbol, boolean isAnnotationClosure) {\nBType type = invokableSymbol.retType;\nLocation pos = invokableSymbol.pos;\nName name = invokableSymbol.name;\nBVarSymbol varSymbol = new BVarSymbol(0, name, invokableSymbol.originalName, invokableSymbol.pkgID, type,\ninvokableSymbol.owner, pos, VIRTUAL);\nBLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(pos, name.value, type,\ngetInvocation(invokableSymbol), varSymbol);\nBLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDef(pos);\nvariableDef.var = simpleVariable;\nvariableDef.setBType(type);\nif (isAnnotationClosure) {\nannotationClosureReferences.add(variableDef);\nreturn varSymbol;\n}\nqueue.add(variableDef);\nreturn varSymbol;\n}\nprivate BLangInvocation getInvocation(BInvokableSymbol symbol) {\nBLangInvocation funcInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();\nfuncInvocation.setBType(symbol.retType);\nfuncInvocation.symbol = symbol;\nfuncInvocation.name = ASTBuilderUtil.createIdentifier(symbol.pos, symbol.name.value);\nfuncInvocation.functionPointerInvocation = true;\nreturn funcInvocation;\n}\nprivate BLangFunction createFunction(String funcName, Location pos, PackageID pkgID, BSymbol owner, BType bType) {\nBLangFunction function = ASTBuilderUtil.createFunction(pos, funcName);\nfunction.flagSet.add(Flag.PUBLIC);\nBInvokableTypeSymbol invokableTypeSymbol = Symbols.createInvokableTypeSymbol(SymTag.FUNCTION_TYPE, Flags.PUBLIC,\npkgID, bType, owner, pos, VIRTUAL);\nfunction.setBType(new BInvokableType(new ArrayList<>(), bType, invokableTypeSymbol));\nBLangBuiltInRefTypeNode typeNode = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();\ntypeNode.setBType(bType);\ntypeNode.typeKind = bType.getKind();\ntypeNode.pos = pos;\nfunction.returnTypeNode = typeNode;\nBInvokableSymbol functionSymbol = new BInvokableSymbol(SymTag.FUNCTION, Flags.PUBLIC, new Name(funcName), pkgID,\nfunction.getBType(), owner, pos, VIRTUAL);\nfunctionSymbol.bodyExist = true;\nfunctionSymbol.kind = SymbolKind.FUNCTION;\nfunctionSymbol.retType = function.returnTypeNode.getBType();\nfunctionSymbol.scope = new Scope(functionSymbol);\nfunction.symbol = functionSymbol;\nreturn function;\n}\nprivate String generateName(String name, BLangNode parent) {\nif (parent == null) {\nreturn DOLLAR + name;\n}\nswitch (parent.getKind()) {\ncase CLASS_DEFN:\nname = ((BLangClassDefinition) parent).name.getValue() + UNDERSCORE + name;\nreturn generateName(name, parent.parent);\ncase FUNCTION:\nname = ((BLangFunction) parent).symbol.name.value.replaceAll(\"\\\\.\", UNDERSCORE) + UNDERSCORE + name;\nreturn generateName(name, parent.parent);\ncase RESOURCE_FUNC:\nname = ((BLangResourceFunction) parent).name.value + UNDERSCORE + name;\nreturn generateName(name, parent.parent);\ncase VARIABLE:\nname = ((BLangSimpleVariable) parent).name.getValue() + UNDERSCORE + name;\nreturn generateName(name, parent.parent);\ncase TYPE_DEFINITION:\nname = ((BLangTypeDefinition) parent).name.getValue() + UNDERSCORE + name;\nreturn generateName(name, parent.parent);\ncase SERVICE:\nname = ((BLangService) parent).name.getValue() + UNDERSCORE + name;\nreturn generateName(name, parent.parent);\ndefault:\nreturn generateName(name, parent.parent);\n}\n}\n@Override\npublic void visit(BLangTupleVariable varNode) {\nrewrite(varNode.restVariable, env);\nresult = varNode;\n}\n@Override\npublic void visit(BLangRecordVariable varNode) {\nvarNode.expr = rewriteExpr(varNode.expr);\nresult = varNode;\n}\n@Override\npublic void visit(BLangErrorVariable varNode) {\nvarNode.expr = rewriteExpr(varNode.expr);\nresult = varNode;\n}\n@Override\npublic void visit(BLangTupleVariableDef varDefNode) {\nvarDefNode.var = rewrite(varDefNode.var, env);\nresult = varDefNode;\n}\n@Override\npublic void visit(BLangRecordVariableDef varDefNode) {\nvarDefNode.var = rewrite(varDefNode.var, env);\nresult = varDefNode;\n}\n@Override\npublic void visit(BLangErrorVariableDef varDefNode) {\nvarDefNode.errorVariable = rewrite(varDefNode.errorVariable, env);\nresult = varDefNode;\n}\n@Override\npublic void visit(BLangAssignment assignNode) {\nassignNode.varRef = rewriteExpr(assignNode.varRef);\nassignNode.expr = rewriteExpr(assignNode.expr);\nresult = assignNode;\n}\n@Override\npublic void visit(BLangTupleDestructure tupleDestructure) {\nresult = tupleDestructure;\n}\n@Override\npublic void visit(BLangRecordDestructure recordDestructure) {\nresult = recordDestructure;\n}\n@Override\npublic void visit(BLangErrorDestructure errorDestructure) {\nresult = errorDestructure;\n}\n@Override\npublic void visit(BLangRetry retryNode) {\nretryNode.retryBody = rewrite(retryNode.retryBody, env);\nresult = retryNode;\n}\n@Override\npublic void visit(BLangRetryTransaction retryTransaction) {\nretryTransaction.transaction = rewrite(retryTransaction.transaction, env);\nresult = retryTransaction;\n}\n@Override\npublic void visit(BLangContinue nextNode) {\nresult = nextNode;\n}\n@Override\npublic void visit(BLangBreak breakNode) {\nresult = breakNode;\n}\n@Override\npublic void visit(BLangPanic panicNode) {\npanicNode.expr = rewriteExpr(panicNode.expr);\nresult = panicNode;\n}\n@Override\npublic void visit(BLangDo doNode) {\ndoNode.body = rewrite(doNode.body, env);\nresult = doNode;\n}\n@Override\npublic void visit(BLangXMLNSStatement xmlnsStmtNode) {\nxmlnsStmtNode.xmlnsDecl = rewrite(xmlnsStmtNode.xmlnsDecl, env);\nresult = xmlnsStmtNode;\n}\n@Override\npublic void visit(BLangXMLNS xmlnsNode) {\nxmlnsNode.namespaceURI = rewriteExpr(xmlnsNode.namespaceURI);\nresult = xmlnsNode;\n}\n@Override\npublic void visit(BLangExpressionStmt exprStmtNode) {\nexprStmtNode.expr = rewriteExpr(exprStmtNode.expr);\nresult = exprStmtNode;\n}\n@Override\npublic void visit(BLangFail failNode) {\nif (failNode.exprStmt != null) {\nfailNode.exprStmt = rewrite(failNode.exprStmt, env);\n}\nresult = failNode;\n}\n@Override\npublic void visit(BLangIf ifNode) {\nifNode.expr = rewriteExpr(ifNode.expr);\nifNode.body = rewrite(ifNode.body, env);\nifNode.elseStmt = rewrite(ifNode.elseStmt, env);\nresult = ifNode;\n}\n@Override\npublic void visit(BLangForeach foreach) {\nresult = foreach;\n}\n@Override\npublic void visit(BLangWhile whileNode) {\nwhileNode.expr = rewriteExpr(whileNode.expr);\nwhileNode.body = rewrite(whileNode.body, env);\nresult = whileNode;\n}\n@Override\npublic void visit(BLangLock lockNode) {\nlockNode.body = rewrite(lockNode.body, env);\nresult = lockNode;\n}\n@Override\npublic void visit(BLangLock.BLangLockStmt lockNode) {\nresult = lockNode;\n}\n@Override\npublic void visit(BLangLock.BLangUnLockStmt unLockNode) {\nresult = unLockNode;\n}\n@Override\npublic void visit(BLangTransaction transactionNode) {\ntransactionNode.transactionBody = rewrite(transactionNode.transactionBody, env);\nresult = transactionNode;\n}\n@Override\npublic void visit(BLangRollback rollbackNode) {\nrollbackNode.expr = rewriteExpr(rollbackNode.expr);\nresult = rollbackNode;\n}\n@Override\npublic void visit(BLangTransactionalExpr transactionalExpr) {\nresult = transactionalExpr;\n}\n@Override\npublic void visit(BLangCommitExpr commitExpr) {\nresult = commitExpr;\n}\n@Override\npublic void visit(BLangForkJoin forkJoin) {\nresult = forkJoin;\n}\n@Override\npublic void visit(BLangLiteral literalExpr) {\nresult = literalExpr;\n}\n@Override\npublic void visit(BLangListConstructorExpr listConstructorExpr) {\nlistConstructorExpr.exprs = rewriteExprs(listConstructorExpr.exprs);\nresult = listConstructorExpr;\n}\n@Override\npublic void visit(BLangTableConstructorExpr tableConstructorExpr) {\nrewriteExprs(tableConstructorExpr.recordLiteralList);\nresult = tableConstructorExpr;\n}\n@Override\npublic void visit(BLangListConstructorExpr.BLangJSONArrayLiteral jsonArrayLiteral) {\njsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);\nresult = jsonArrayLiteral;\n}\n@Override\npublic void visit(BLangListConstructorExpr.BLangTupleLiteral tupleLiteral) {\ntupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);\nresult = tupleLiteral;\n}\n@Override\npublic void visit(BLangListConstructorExpr.BLangArrayLiteral arrayLiteral) {\narrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);\nresult = arrayLiteral;\n}\n@Override\n@Override\npublic void visit(BLangSimpleVarRef varRefExpr) {\nBSymbol varRefSym = varRefExpr.symbol;\nif (varRefSym != null) {\nboolean isMemberOfFunction = Symbols.isFlagOn(varRefSym.flags, Flags.REQUIRED_PARAM) ||\nSymbols.isFlagOn(varRefSym.flags, Flags.DEFAULTABLE_PARAM);\nif (isMemberOfFunction) {\nupdateFunctionParamsOfClosures(env, varRefExpr);\n}\n}\nBLangInvokableNode encInvokable = env.enclInvokable;\nBSymbol symbol = varRefExpr.symbol;\nif (varRefSym == null || encInvokable == null || (symbol.tag & SymTag.VARIABLE) != SymTag.VARIABLE) {\nresult = varRefExpr;\nreturn;\n}\nupdateClosureVariable((BVarSymbol) symbol, encInvokable, varRefExpr.pos);\nresult = varRefExpr;\n}\nprivate void updateFunctionParamsOfClosures(SymbolEnv symbolEnv, BLangSimpleVarRef varRefExpr) {\nBLangFunction closure = null;\nwhile (symbolEnv != null && symbolEnv.node.getKind() != NodeKind.PACKAGE) {\nif (symbolEnv.node.getKind() != NodeKind.FUNCTION) {\nsymbolEnv = symbolEnv.enclEnv;\ncontinue;\n}\nBLangFunction bLangFunction = (BLangFunction) symbolEnv.node;\nBLangInvokableNode enclInvokable = symbolEnv.enclInvokable;\nif (enclInvokable.flagSet.contains(Flag.LAMBDA) && !enclInvokable.flagSet.contains(Flag.QUERY_LAMBDA) &&\n!enclInvokable.flagSet.contains(Flag.ANONYMOUS)) {\nclosure = bLangFunction;\n}\nsymbolEnv = symbolEnv.enclEnv;\n}\nif (closure != null) {\nupdateFunctionParams(closure, varRefExpr);\n}\n}\nprivate void updateFunctionParams(BLangFunction funcNode, BLangSimpleVarRef varRefExpr) {\nBInvokableSymbol funcSymbol = funcNode.symbol;\nfor (BVarSymbol varSymbol : funcSymbol.params) {\nif (varSymbol.name.value.equals(varRefExpr.symbol.name.value)) {\nvarRefExpr.symbol = varSymbol;\nreturn;\n}\n}\n}\nprivate SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) {\nif (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) {\nreturn env.enclEnv;\n}\nif (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) {\nreturn env.enclEnv;\n}\nif (env.enclInvokable != null && env.enclInvokable == encInvokable) {\nreturn findEnclosingInvokableEnv(env.enclEnv, encInvokable);\n}\nreturn env;\n}\n@Override\npublic void visit(BLangFieldBasedAccess fieldAccessExpr) {\nfieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);\nresult = fieldAccessExpr;\n}\n@Override\npublic void visit(BLangIndexBasedAccess indexAccessExpr) {\nindexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);\nindexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);\nresult = indexAccessExpr;\n}\n@Override\npublic void visit(BLangCompoundAssignment compoundAssignment) {\nresult = compoundAssignment;\n}\n@Override\npublic void visit(BLangInvocation invocation) {\nrewriteInvocationExpr(invocation);\nBLangInvokableNode encInvokable = env.enclInvokable;\nif (encInvokable == null || !invocation.functionPointerInvocation) {\nreturn;\n}\nupdateClosureVariable((BVarSymbol) invocation.symbol, encInvokable, invocation.pos);\n}\npublic void rewriteInvocationExpr(BLangInvocation invocation) {\ninvocation.requiredArgs = rewriteExprs(invocation.requiredArgs);\nresult = invocation;\n}\n@Override\npublic void visit(BLangQueryAction queryAction) {\nresult = queryAction;\n}\n@Override\npublic void visit(BLangCheckPanickedExpr checkedExpr) {\nresult = checkedExpr;\n}\n@Override\npublic void visit(BLangErrorConstructorExpr errorConstructorExpr) {\nrewriteExprs(errorConstructorExpr.positionalArgs);\nerrorConstructorExpr.errorDetail = rewriteExpr(errorConstructorExpr.errorDetail);\nresult = errorConstructorExpr;\n}\n@Override\npublic void visit(BLangTypeInit typeInitExpr) {\ntypeInitExpr.initInvocation = rewriteExpr(typeInitExpr.initInvocation);\nresult = typeInitExpr;\n}\n@Override\npublic void visit(BLangTernaryExpr ternaryExpr) {\nternaryExpr.expr = rewriteExpr(ternaryExpr.expr);\nternaryExpr.thenExpr = rewriteExpr(ternaryExpr.thenExpr);\nternaryExpr.elseExpr = rewriteExpr(ternaryExpr.elseExpr);\nresult = ternaryExpr;\n}\n@Override\npublic void visit(BLangWaitExpr waitExpr) {\nList exprList = new ArrayList<>();\nwaitExpr.exprList.forEach(expression -> exprList.add(rewriteExpr(expression)));\nwaitExpr.exprList = exprList;\nresult = waitExpr;\n}\n@Override\npublic void visit(BLangWaitForAllExpr waitExpr) {\nresult = waitExpr;\n}\n@Override\npublic void visit(BLangTrapExpr trapExpr) {\ntrapExpr.expr = rewriteExpr(trapExpr.expr);\nresult = trapExpr;\n}\n@Override\npublic void visit(BLangBinaryExpr binaryExpr) {\nbinaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);\nbinaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);\nresult = binaryExpr;\n}\n@Override\npublic void visit(BLangElvisExpr elvisExpr) {\nresult = elvisExpr;\n}\n@Override\npublic void visit(BLangGroupExpr groupExpr) {\ngroupExpr.expression = rewriteExpr(groupExpr.expression);\nresult = groupExpr;\n}\n@Override\npublic void visit(BLangUnaryExpr unaryExpr) {\nunaryExpr.expr = rewriteExpr(unaryExpr.expr);\nresult = unaryExpr;\n}\n@Override\npublic void visit(BLangTypeConversionExpr conversionExpr) {\nconversionExpr.expr = rewriteExpr(conversionExpr.expr);\nconversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);\nresult = conversionExpr;\n}\n@Override\npublic void visit(BLangLambdaFunction bLangLambdaFunction) {\nbLangLambdaFunction.function = rewrite(bLangLambdaFunction.function, bLangLambdaFunction.capturedClosureEnv);\nresult = bLangLambdaFunction;\n}\n@Override\npublic void visit(BLangArrowFunction bLangArrowFunction) {\nresult = bLangArrowFunction;\n}\n@Override\npublic void visit(BLangXMLQName xmlQName) {\nresult = xmlQName;\n}\n@Override\npublic void visit(BLangXMLAttribute xmlAttribute) {\nxmlAttribute.name = rewriteExpr(xmlAttribute.name);\nxmlAttribute.value = rewriteExpr(xmlAttribute.value);\nresult = xmlAttribute;\n}\n@Override\npublic void visit(BLangXMLElementLiteral xmlElementLiteral) {\nxmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);\nxmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);\nxmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);\nxmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);\nresult = xmlElementLiteral;\n}\n@Override\npublic void visit(BLangXMLTextLiteral xmlTextLiteral) {\nxmlTextLiteral.textFragments.forEach(this::rewriteExpr);\nxmlTextLiteral.concatExpr = rewriteExpr(xmlTextLiteral.concatExpr);\nresult = xmlTextLiteral;\n}\n@Override\npublic void visit(BLangXMLCommentLiteral xmlCommentLiteral) {\nxmlCommentLiteral.textFragments.forEach(this::rewriteExpr);\nresult = xmlCommentLiteral;\n}\n@Override\npublic void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {\nxmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);\nxmlProcInsLiteral.dataFragments.forEach(this::rewriteExpr);\nresult = xmlProcInsLiteral;\n}\n@Override\npublic void visit(BLangXMLQuotedString xmlQuotedString) {\nxmlQuotedString.textFragments.forEach(this::rewriteExpr);\nresult = xmlQuotedString;\n}\n@Override\npublic void visit(BLangStringTemplateLiteral stringTemplateLiteral) {\nstringTemplateLiteral.exprs.forEach(this::rewriteExpr);\nresult = stringTemplateLiteral;\n}\n@Override\npublic void visit(BLangWorkerSend workerSendNode) {\nworkerSendNode.expr = rewriteExpr(workerSendNode.expr);\nresult = workerSendNode;\n}\n@Override\npublic void visit(BLangWorkerSyncSendExpr syncSendExpr) {\nsyncSendExpr.expr = rewriteExpr(syncSendExpr.expr);\nresult = syncSendExpr;\n}\n@Override\npublic void visit(BLangWorkerReceive workerReceiveNode) {\nresult = workerReceiveNode;\n}\n@Override\npublic void visit(BLangWorkerFlushExpr workerFlushExpr) {\nresult = workerFlushExpr;\n}\n@Override\npublic void visit(BLangSimpleVarRef.BLangLocalVarRef localVarRef) {\nBLangInvokableNode encInvokable = env.enclInvokable;\nBSymbol symbol = localVarRef.symbol;\nif (encInvokable == null || (symbol.tag & SymTag.VARIABLE) != SymTag.VARIABLE) {\nresult = localVarRef;\nreturn;\n}\nupdateClosureVariable((BVarSymbol) symbol, encInvokable, localVarRef.pos);\nresult = localVarRef;\n}\nprivate void updateClosureVariable(BVarSymbol varSymbol, BLangInvokableNode encInvokable, Location pos) {\nSet flagSet = encInvokable.flagSet;\nboolean isClosure = !flagSet.contains(Flag.QUERY_LAMBDA) && flagSet.contains(Flag.LAMBDA) &&\n!flagSet.contains(Flag.ATTACHED);\nif (!varSymbol.closure && isClosure) {\nSymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);\nBSymbol resolvedSymbol =\nsymResolver.lookupClosureVarSymbol(encInvokableEnv, varSymbol.name, SymTag.VARIABLE);\nif (resolvedSymbol != symTable.notFoundSymbol) {\nvarSymbol.closure = true;\n((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(varSymbol, pos));\n}\n}\n}\n@Override\npublic void visit(BLangIgnoreExpr ignoreExpr) {\nresult = ignoreExpr;\n}\n@Override\npublic void visit(BLangDynamicArgExpr dynamicParamExpr) {\ndynamicParamExpr.condition = rewriteExpr(dynamicParamExpr.condition);\ndynamicParamExpr.conditionalArgument = rewriteExpr(dynamicParamExpr.conditionalArgument);\nresult = dynamicParamExpr;\n}\n@Override\npublic void visit(BLangSimpleVarRef.BLangPackageVarRef packageVarRef) {\nresult = packageVarRef;\n}\n@Override\npublic void visit(BLangConstRef constRef) {\nresult = constRef;\n}\n@Override\npublic void visit(BLangSimpleVarRef.BLangFunctionVarRef functionVarRef) {\nresult = functionVarRef;\n}\n@Override\npublic void visit(BLangIndexBasedAccess.BLangStructFieldAccessExpr fieldAccessExpr) {\nfieldAccessExpr.indexExpr = rewriteExpr(fieldAccessExpr.indexExpr);\nfieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);\nresult = fieldAccessExpr;\n}\n@Override\npublic void visit(BLangFieldBasedAccess.BLangStructFunctionVarRef functionVarRef) {\nfunctionVarRef.expr = rewriteExpr(functionVarRef.expr);\nresult = functionVarRef;\n}\n@Override\npublic void visit(BLangIndexBasedAccess.BLangMapAccessExpr mapKeyAccessExpr) {\nmapKeyAccessExpr.indexExpr = rewriteExpr(mapKeyAccessExpr.indexExpr);\nmapKeyAccessExpr.expr = rewriteExpr(mapKeyAccessExpr.expr);\nresult = mapKeyAccessExpr;\n}\n@Override\npublic void visit(BLangIndexBasedAccess.BLangTableAccessExpr tableKeyAccessExpr) {\ntableKeyAccessExpr.indexExpr = rewriteExpr(tableKeyAccessExpr.indexExpr);\ntableKeyAccessExpr.expr = rewriteExpr(tableKeyAccessExpr.expr);\nresult = tableKeyAccessExpr;\n}\n@Override\npublic void visit(BLangIndexBasedAccess.BLangArrayAccessExpr arrayIndexAccessExpr) {\narrayIndexAccessExpr.indexExpr = rewriteExpr(arrayIndexAccessExpr.indexExpr);\narrayIndexAccessExpr.expr = rewriteExpr(arrayIndexAccessExpr.expr);\nresult = arrayIndexAccessExpr;\n}\n@Override\npublic void visit(BLangIndexBasedAccess.BLangTupleAccessExpr arrayIndexAccessExpr) {\narrayIndexAccessExpr.indexExpr = rewriteExpr(arrayIndexAccessExpr.indexExpr);\narrayIndexAccessExpr.expr = rewriteExpr(arrayIndexAccessExpr.expr);\nresult = arrayIndexAccessExpr;\n}\n@Override\npublic void visit(BLangIndexBasedAccess.BLangXMLAccessExpr xmlIndexAccessExpr) {\nxmlIndexAccessExpr.indexExpr = rewriteExpr(xmlIndexAccessExpr.indexExpr);\nxmlIndexAccessExpr.expr = rewriteExpr(xmlIndexAccessExpr.expr);\nresult = xmlIndexAccessExpr;\n}\n@Override\npublic void visit(BLangXMLElementAccess xmlElementAccess) {\nxmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);\nresult = xmlElementAccess;\n}\n@Override\npublic void visit(BLangXMLNavigationAccess xmlNavigation) {\nxmlNavigation.expr = rewriteExpr(xmlNavigation.expr);\nxmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);\nresult = xmlNavigation;\n}\n@Override\npublic void visit(BLangIndexBasedAccess.BLangJSONAccessExpr jsonAccessExpr) {\njsonAccessExpr.indexExpr = rewriteExpr(jsonAccessExpr.indexExpr);\njsonAccessExpr.expr = rewriteExpr(jsonAccessExpr.expr);\nresult = jsonAccessExpr;\n}\n@Override\npublic void visit(BLangIndexBasedAccess.BLangStringAccessExpr stringAccessExpr) {\nstringAccessExpr.indexExpr = rewriteExpr(stringAccessExpr.indexExpr);\nstringAccessExpr.expr = rewriteExpr(stringAccessExpr.expr);\nresult = stringAccessExpr;\n}\n@Override\npublic void visit(BLangRecordLiteral.BLangMapLiteral mapLiteral) {\nfor (RecordLiteralNode.RecordField field : mapLiteral.fields) {\nif (field.isKeyValueField()) {\nBLangRecordLiteral.BLangRecordKeyValueField keyValueField =\n(BLangRecordLiteral.BLangRecordKeyValueField) field;\nkeyValueField.key.expr = rewriteExpr(keyValueField.key.expr);\nkeyValueField.valueExpr = rewriteExpr(keyValueField.valueExpr);\ncontinue;\n}\nBLangRecordLiteral.BLangRecordSpreadOperatorField spreadField =\n(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;\nspreadField.expr = rewriteExpr(spreadField.expr);\n}\nresult = mapLiteral;\n}\n@Override\npublic void visit(BLangRecordLiteral.BLangStructLiteral structLiteral) {\nfor (RecordLiteralNode.RecordField field : structLiteral.fields) {\nif (field.isKeyValueField()) {\nBLangRecordLiteral.BLangRecordKeyValueField keyValueField =\n(BLangRecordLiteral.BLangRecordKeyValueField) field;\nkeyValueField.key.expr = rewriteExpr(keyValueField.key.expr);\nkeyValueField.valueExpr = rewriteExpr(keyValueField.valueExpr);\ncontinue;\n}\nBLangRecordLiteral.BLangRecordSpreadOperatorField spreadField =\n(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;\nspreadField.expr = rewriteExpr(spreadField.expr);\n}\nresult = structLiteral;\n}\n@Override\npublic void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {\nwaitLiteral.keyValuePairs.forEach(keyValue -> {\nif (keyValue.valueExpr != null) {\nkeyValue.valueExpr = rewriteExpr(keyValue.valueExpr);\n} else {\nkeyValue.keyExpr = rewriteExpr(keyValue.keyExpr);\n}\n});\nresult = waitLiteral;\n}\n@Override\npublic void visit(BLangIsAssignableExpr assignableExpr) {\nassignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);\nresult = assignableExpr;\n}\n@Override\npublic void visit(BLangInvocation.BFunctionPointerInvocation fpInvocation) {\nfpInvocation.expr = rewriteExpr(fpInvocation.expr);\nrewriteInvocationExpr(fpInvocation);\n}\n@Override\npublic void visit(BLangTypedescExpr accessExpr) {\nresult = accessExpr;\n}\n@Override\npublic void visit(BLangRestArgsExpression bLangVarArgsExpression) {\nresult = rewriteExpr(bLangVarArgsExpression.expr);\n}\n@Override\npublic void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {\nbLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);\nresult = bLangNamedArgsExpression;\n}\n@Override\npublic void visit(BLangCheckedExpr checkedExpr) {\nresult = checkedExpr;\n}\n@Override\npublic void visit(BLangServiceConstructorExpr serviceConstructorExpr) {\nresult = serviceConstructorExpr;\n}\n@Override\npublic void visit(BLangTypeTestExpr typeTestExpr) {\ntypeTestExpr.expr = rewriteExpr(typeTestExpr.expr);\nresult = typeTestExpr;\n}\n@Override\npublic void visit(BLangIsLikeExpr isLikeExpr) {\nisLikeExpr.expr = rewriteExpr(isLikeExpr.expr);\nresult = isLikeExpr;\n}\npublic void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {\nresult = nsPrefixedFieldBasedAccess;\n}\n@Override\npublic void visit(BLangLetExpression letExpression) {\nfor (BLangLetVariable letVariable : letExpression.letVarDeclarations) {\nrewrite((BLangNode) letVariable.definitionNode, env);\n}\nletExpression.expr = rewriteExpr(letExpression.expr);\nresult = letExpression;\n}\n@Override\npublic void visit(BLangAnnotAccessExpr annotAccessExpr) {\nannotAccessExpr.expr = rewriteExpr(annotAccessExpr.expr);\nresult = annotAccessExpr;\n}\n@Override\npublic void visit(BLangStatementExpression bLangStatementExpression) {\nif (bLangStatementExpression.stmt.getKind() == NodeKind.BLOCK) {\nBLangBlockStmt bLangBlockStmt = (BLangBlockStmt) bLangStatementExpression.stmt;\nfor (int i = 0; i < bLangBlockStmt.stmts.size(); i++) {\nBLangStatement stmt = bLangBlockStmt.stmts.remove(i);\nbLangBlockStmt.stmts.add(i, rewrite(stmt, env));\n}\n} else {\nbLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);\n}\nbLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);\nresult = bLangStatementExpression;\n}\n@Override\npublic void visit(BLangInvocation.BLangActionInvocation invocation) {\nrewriteInvocationExpr(invocation);\n}\n@Override\npublic void visit(BLangIdentifier identifierNode) {\n/* ignore */\n}\n@Override\npublic void visit(BLangAnnotation annotationNode) {\n/* ignore */\n}\n@Override\npublic void visit(BLangAnnotationAttachment annAttachmentNode) {\n/* ignore */\n}\n@Override\npublic void visit(BLangConstant constant) {\nresult = constant;\n}\n@Override\npublic void visit(BLangNumericLiteral literalExpr) {\nresult = literalExpr;\n}\n@Override\npublic void visit(BLangTupleVarRef varRefExpr) {\nresult = varRefExpr;\n}\n@Override\npublic void visit(BLangRecordVarRef varRefExpr) {\nresult = varRefExpr;\n}\n@Override\npublic void visit(BLangErrorVarRef varRefExpr) {\nresult = varRefExpr;\n}\n@Override\npublic void visit(BLangSimpleVarRef.BLangTypeLoad typeLoad) {\nresult = typeLoad;\n}\n@Override\npublic void visit(BLangRecordLiteral.BLangChannelLiteral channelLiteral) {\nchannelLiteral.fields.forEach(field -> {\nBLangRecordLiteral.BLangRecordKeyValueField keyValue = (BLangRecordLiteral.BLangRecordKeyValueField) field;\nkeyValue.key.expr = rewriteExpr(keyValue.key.expr);\nkeyValue.valueExpr = rewriteExpr(keyValue.valueExpr);\n});\nresult = channelLiteral;\n}\n@Override\npublic void visit(BLangXMLNS.BLangLocalXMLNS xmlnsNode) {\nxmlnsNode.namespaceURI = rewriteExpr(xmlnsNode.namespaceURI);\nresult = xmlnsNode;\n}\n@Override\npublic void visit(BLangXMLNS.BLangPackageXMLNS xmlnsNode) {\nxmlnsNode.namespaceURI = rewriteExpr(xmlnsNode.namespaceURI);\nresult = xmlnsNode;\n}\n@Override\npublic void visit(BLangListConstructorExpr.BLangListConstructorSpreadOpExpr listConstructorSpreadOpExpr) {\nlistConstructorSpreadOpExpr.expr = rewriteExpr(listConstructorSpreadOpExpr.expr);\nresult = listConstructorSpreadOpExpr;\n}\n@Override\npublic void visit(BLangQueryExpr queryExpr) {\nresult = queryExpr;\n}\n@Override\npublic void visit(BLangMatchStatement matchStatement) {\nresult = matchStatement;\n}\n@Override\npublic void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) {\nxmlSequenceLiteral.xmlItems.forEach(this::rewriteExpr);\nresult = xmlSequenceLiteral;\n}\n@Override\npublic void visit(BLangRegExpTemplateLiteral regExpTemplateLiteral) {\nList interpolationsList =\nsymResolver.getListOfInterpolations(regExpTemplateLiteral.reDisjunction.sequenceList);\ninterpolationsList.forEach(this::rewriteExpr);\nresult = regExpTemplateLiteral;\n}\n@Override\npublic void visit(BLangMarkdownDocumentationLine bLangMarkdownDocumentationLine) {\n/* Ignore */\n}\n@Override\npublic void visit(BLangMarkdownParameterDocumentation bLangDocumentationParameter) {\n/* Ignore */\n}\n@Override\npublic void visit(BLangMarkdownReturnParameterDocumentation bLangMarkdownReturnParameterDocumentation) {\n/* Ignore */\n}\n@Override\npublic void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) {\nresult = inferTypedescExpr;\n}\n@Override\npublic void visit(BLangMarkdownDocumentation bLangMarkdownDocumentation) {\n/* Ignore */\n}\n@SuppressWarnings(\"unchecked\")\nprivate E rewrite(E node, SymbolEnv env) {\nif (node == null) {\nreturn null;\n}\nSymbolEnv previousEnv = this.env;\nthis.env = env;\nnode.accept(this);\nBLangNode resultNode = this.result;\nthis.result = null;\nthis.env = previousEnv;\nreturn (E) resultNode;\n}\nprivate List rewrite(List nodeList, SymbolEnv env) {\nfor (int i = 0; i < nodeList.size(); i++) {\nnodeList.set(i, rewrite(nodeList.get(i), env));\n}\nreturn nodeList;\n}\n@SuppressWarnings(\"unchecked\")\nprivate E rewriteExpr(E node) {\nif (node == null) {\nreturn null;\n}\nnode.accept(this);\nBLangNode resultNode = this.result;\nthis.result = null;\nreturn (E) resultNode;\n}\n@SuppressWarnings(\"unchecked\")\nprivate List rewriteStmt(List nodeList, SymbolEnv env) {\nQueue previousQueue = this.queue;\nthis.queue = new LinkedList<>();\nint size = nodeList.size();\nfor (int i = 0; i < size; i++) {\nE node = rewrite(nodeList.remove(0), env);\nIterator iterator = queue.iterator();\nwhile (iterator.hasNext()) {\nnodeList.add(rewrite((E) queue.poll(), env));\n}\nnodeList.add(node);\n}\nthis.queue = previousQueue;\nreturn nodeList;\n}\n@SuppressWarnings(\"unchecked\")\nprivate List rewriteExprs(List nodeList) {\nfor (int i = 0; i < nodeList.size(); i++) {\nnodeList.set(i, rewriteExpr(nodeList.get(i)));\n}\nreturn nodeList;\n}\n}", + "context_after": "class ClosureGenerator extends BLangNodeVisitor {\nprivate static final CompilerContext.Key CLOSURE_GENERATOR_KEY = new CompilerContext.Key<>();\nprivate Queue queue;\nprivate List annotationClosureReferences;\nprivate SymbolTable symTable;\nprivate SymbolEnv env;\nprivate BLangNode result;\nprivate SymbolResolver symResolver;\nprivate AnnotationDesugar annotationDesugar;\npublic static ClosureGenerator getInstance(CompilerContext context) {\nClosureGenerator closureGenerator = context.get(CLOSURE_GENERATOR_KEY);\nif (closureGenerator == null) {\nclosureGenerator = new ClosureGenerator(context);\n}\nreturn closureGenerator;\n}\nprivate ClosureGenerator(CompilerContext context) {\ncontext.put(CLOSURE_GENERATOR_KEY, this);\nthis.symTable = SymbolTable.getInstance(context);\nthis.queue = new LinkedList<>();\nthis.annotationClosureReferences = new ArrayList<>();\nthis.symResolver = SymbolResolver.getInstance(context);\nthis.annotationDesugar = AnnotationDesugar.getInstance(context);\n}\n@Override\npublic void visit(BLangPackage pkgNode) {\nSymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgNode.symbol);\nfor (int i = 0; i < pkgNode.functions.size(); i++) {\nBLangFunction bLangFunction = pkgNode.functions.get(i);\nif (!bLangFunction.flagSet.contains(Flag.LAMBDA)) {\nSymbolEnv funcEnv = SymbolEnv.createFunctionEnv(bLangFunction, bLangFunction.symbol.scope, pkgEnv);\nrewriteParamsAndReturnTypeOfFunction(bLangFunction, funcEnv);\n}\n}\npkgNode.services.forEach(service -> rewrite(service, pkgEnv));\npkgNode.typeDefinitions.forEach(typeDefinition -> rewrite(typeDefinition, pkgEnv));\npkgNode.xmlnsList.forEach(xmlns -> rewrite(xmlns, pkgEnv));\npkgNode.constants.forEach(constant -> rewrite(constant, pkgEnv));\npkgNode.annotations.forEach(annotation -> rewrite(annotation, pkgEnv));\npkgNode.initFunction = rewrite(pkgNode.initFunction, pkgEnv);\npkgNode.classDefinitions = rewrite(pkgNode.classDefinitions, pkgEnv);\npkgNode.globalVars.forEach(globalVar -> rewrite(globalVar, pkgEnv));\naddClosuresToGlobalVariableList(pkgEnv);\nfor (int i = 0; i < pkgNode.functions.size(); i++) {\nBLangFunction bLangFunction = pkgNode.functions.get(i);\nif (!bLangFunction.flagSet.contains(Flag.LAMBDA)) {\nrewrite(bLangFunction, pkgEnv);\n}\n}\nresult = pkgNode;\n}\nprivate void addClosuresToGlobalVariableList(SymbolEnv pkgEnv) {\nIterator iterator = queue.iterator();\nwhile (iterator.hasNext()) {\nBLangSimpleVariable simpleVariable = queue.poll().var;\nsimpleVariable.flagSet.add(Flag.PUBLIC);\nsimpleVariable.symbol.flags |= Flags.PUBLIC;\npkgEnv.enclPkg.globalVars.add(0, rewrite(simpleVariable, pkgEnv));\n}\nfor (BLangSimpleVariableDef closureReference : annotationClosureReferences) {\npkgEnv.enclPkg.globalVars.add(rewrite(closureReference.var, pkgEnv));\n}\n}\n@Override\npublic void visit(BLangFunction funcNode) {\nSymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);\nif (funcNode.flagSet.contains(Flag.LAMBDA)) {\nrewriteParamsAndReturnTypeOfFunction(funcNode, funcEnv);\n}\nfuncNode.body = rewrite(funcNode.body, funcEnv);\nresult = funcNode;\n}\npublic void rewriteParamsAndReturnTypeOfFunction(BLangFunction funcNode, SymbolEnv funcEnv) {\nfor (BLangSimpleVariable bLangSimpleVariable : funcNode.requiredParams) {\nrewrite(bLangSimpleVariable, funcEnv);\n}\nif (funcNode.restParam != null) {\nfuncNode.restParam = rewrite(funcNode.restParam, funcEnv);\n}\nif (funcNode.returnTypeNode != null && funcNode.returnTypeNode.getKind() != null) {\nfuncNode.returnTypeNode = rewrite(funcNode.returnTypeNode, funcEnv);\n}\n}\n@Override\npublic void visit(BLangBlockFunctionBody body) {\nSymbolEnv blockEnv = SymbolEnv.createFuncBodyEnv(body, env);\nbody.stmts = rewriteStmt(body.stmts, blockEnv);\nresult = body;\n}\n@Override\npublic void visit(BLangRawTemplateLiteral rawTemplateLiteral) {\nresult = rawTemplateLiteral;\n}\n@Override\npublic void visit(BLangExprFunctionBody exprBody) {\nexprBody.expr = rewriteExpr(exprBody.expr);\nresult = exprBody;\n}\n@Override\npublic void visit(BLangResourceFunction resourceFunction) {\nvisit((BLangFunction) resourceFunction);\n}\n@Override\npublic void visit(BLangExternalFunctionBody body) {\nresult = body;\n}\n@Override\npublic void visit(BLangBlockStmt blockNode) {\nSymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, env);\nblockNode.stmts = rewriteStmt(blockNode.stmts, blockEnv);\nresult = blockNode;\n}\n@Override\npublic void visit(BLangService serviceNode) {\nresult = serviceNode;\n}\n@Override\npublic void visit(BLangSimpleVariableDef varDefNode) {\nvarDefNode.var = rewrite(varDefNode.var, env);\nresult = varDefNode;\n}\n@Override\npublic void visit(BLangReturn returnNode) {\nif (returnNode.expr != null) {\nreturnNode.expr = rewriteExpr(returnNode.expr);\n}\nresult = returnNode;\n}\n@Override\npublic void visit(BLangTypeDefinition typeDef) {\ntypeDef.typeNode = rewrite(typeDef.typeNode, env);\nresult = typeDef;\n}\n@Override\npublic void visit(BLangIntersectionTypeNode intersectionTypeNode) {\nList rewrittenConstituents = new ArrayList<>();\nfor (BLangType constituentTypeNode : intersectionTypeNode.constituentTypeNodes) {\nrewrittenConstituents.add(rewrite(constituentTypeNode, env));\n}\nintersectionTypeNode.constituentTypeNodes = rewrittenConstituents;\nresult = intersectionTypeNode;\n}\n@Override\npublic void visit(BLangClassDefinition classDefinition) {\nSymbolEnv classEnv = SymbolEnv.createClassEnv(classDefinition, classDefinition.symbol.scope, env);\nfor (BLangSimpleVariable bLangSimpleVariable : classDefinition.fields) {\nbLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, classEnv);\n}\nresult = classDefinition;\n}\n@Override\npublic void visit(BLangObjectTypeNode objectTypeNode) {\nfor (BLangSimpleVariable field : objectTypeNode.fields) {\nrewrite(field, env);\n}\nresult = objectTypeNode;\n}\n@Override\npublic void visit(BLangObjectConstructorExpression objectConstructorExpression) {\nobjectConstructorExpression.typeInit = rewriteExpr(objectConstructorExpression.typeInit);\nresult = objectConstructorExpression;\n}\n@Override\npublic void visit(BLangRecordTypeNode recordTypeNode) {\nBTypeSymbol typeSymbol = recordTypeNode.getBType().tsymbol;\nBSymbol owner = typeSymbol.owner;\ndesugarFieldAnnotations(owner, typeSymbol, recordTypeNode.fields, recordTypeNode.pos);\nfor (BLangSimpleVariable field : recordTypeNode.fields) {\nrewrite(field, env);\n}\nrecordTypeNode.restFieldType = rewrite(recordTypeNode.restFieldType, env);\nresult = recordTypeNode;\n}\n@Override\npublic void visit(BLangTupleTypeNode tupleTypeNode) {\nBTypeSymbol typeSymbol = tupleTypeNode.getBType().tsymbol;\nBSymbol owner = typeSymbol.owner;\ndesugarFieldAnnotations(owner, typeSymbol, tupleTypeNode.members, tupleTypeNode.pos);\nList rewrittenMembers = new ArrayList<>();\ntupleTypeNode.members.forEach(member -> rewrittenMembers.add(rewrite(member, env)));\ntupleTypeNode.members = rewrittenMembers;\ntupleTypeNode.restParamType = rewrite(tupleTypeNode.restParamType, env);\nresult = tupleTypeNode;\n}\nprivate void desugarFieldAnnotations(BSymbol owner, BTypeSymbol typeSymbol, List fields,\nLocation pos) {\nif (owner.getKind() != SymbolKind.PACKAGE) {\nowner = getOwner(env);\nBLangLambdaFunction lambdaFunction = annotationDesugar.defineFieldAnnotations(fields, pos, env.enclPkg, env,\ntypeSymbol.pkgID, owner);\nif (lambdaFunction != null) {\nboolean isPackageLevel = owner.getKind() == SymbolKind.PACKAGE;\nBInvokableSymbol invokableSymbol = createSimpleVariable(lambdaFunction.function, lambdaFunction,\nisPackageLevel);\ntypeSymbol.annotations = createSimpleVariable(invokableSymbol, isPackageLevel);\n}\n}\n}\n@Override\npublic void visit(BLangFiniteTypeNode finiteTypeNode) {\nfiniteTypeNode.valueSpace.forEach(param -> rewrite(param, env));\nresult = finiteTypeNode;\n}\n@Override\npublic void visit(BLangArrayType arrayType) {\narrayType.elemtype = rewrite(arrayType.elemtype, env);\nresult = arrayType;\n}\n@Override\npublic void visit(BLangUserDefinedType userDefinedType) {\nresult = userDefinedType;\n}\n@Override\npublic void visit(BLangUnionTypeNode unionTypeNode) {\nList rewrittenMembers = new ArrayList<>();\nunionTypeNode.memberTypeNodes.forEach(typeNode -> rewrittenMembers.add(rewrite(typeNode, env)));\nunionTypeNode.memberTypeNodes = rewrittenMembers;\nresult = unionTypeNode;\n}\n@Override\npublic void visit(BLangValueType valueType) {\nresult = valueType;\n}\n@Override\npublic void visit(BLangBuiltInRefTypeNode builtInRefTypeNode) {\nresult = builtInRefTypeNode;\n}\n@Override\npublic void visit(BLangStreamType streamType) {\nstreamType.constraint = rewrite(streamType.constraint, env);\nstreamType.error = rewrite(streamType.error, env);\nresult = streamType;\n}\n@Override\npublic void visit(BLangConstrainedType constrainedType) {\nconstrainedType.constraint = rewrite(constrainedType.constraint, env);\nresult = constrainedType;\n}\n@Override\npublic void visit(BLangErrorType errorType) {\nerrorType.detailType = rewrite(errorType.detailType, env);\nresult = errorType;\n}\n@Override\npublic void visit(BLangTableTypeNode tableTypeNode) {\ntableTypeNode.constraint = rewrite(tableTypeNode.constraint, env);\ntableTypeNode.tableKeyTypeConstraint = rewrite(tableTypeNode.tableKeyTypeConstraint, env);\nresult = tableTypeNode;\n}\n@Override\npublic void visit(BLangInvocation.BLangResourceAccessInvocation resourceAccessInvocation) {\nresult = resourceAccessInvocation;\n}\n@Override\npublic void visit(BLangTableKeyTypeConstraint keyTypeConstraint) {\nkeyTypeConstraint.keyType = rewrite(keyTypeConstraint.keyType, env);\nresult = keyTypeConstraint;\n}\n@Override\npublic void visit(BLangFunctionTypeNode functionTypeNode) {\nSymbolEnv funcEnv = SymbolEnv.createTypeEnv(functionTypeNode, functionTypeNode.getBType().tsymbol.scope, env);\nfor (BLangSimpleVariable param : functionTypeNode.params) {\nrewrite(param, funcEnv);\n}\nif (functionTypeNode.restParam != null) {\nfunctionTypeNode.restParam.typeNode = rewrite(functionTypeNode.restParam.typeNode, env);\n}\nif (functionTypeNode.returnTypeNode != null) {\nfunctionTypeNode.returnTypeNode = rewrite(functionTypeNode.returnTypeNode, env);\n}\nresult = functionTypeNode;\n}\n@Override\npublic void visit(BLangSimpleVariable varNode) {\nif (varNode.typeNode != null && varNode.typeNode.getKind() != null) {\nvarNode.typeNode = rewrite(varNode.typeNode, env);\n}\nBLangExpression bLangExpression;\nif (varNode.symbol != null && Symbols.isFlagOn(varNode.symbol.flags, Flags.DEFAULTABLE_PARAM)) {\nString closureName = generateName(varNode.symbol.name.value, env.node);\nbLangExpression = createClosureForDefaultValue(closureName, varNode.name.value, varNode);\n} else {\nbLangExpression = rewriteExpr(varNode.expr);\n}\nvarNode.expr = bLangExpression;\nresult = varNode;\n}\nprivate BSymbol getOwner(SymbolEnv symbolEnv) {\nwhile (symbolEnv.node.getKind() != NodeKind.PACKAGE) {\nNodeKind kind = symbolEnv.node.getKind();\nif (kind != NodeKind.BLOCK_FUNCTION_BODY && kind != NodeKind.BLOCK) {\nsymbolEnv = symbolEnv.enclEnv;\ncontinue;\n}\nreturn symbolEnv.enclInvokable.symbol;\n}\nreturn symbolEnv.enclPkg.symbol;\n}\nprivate BLangExpression createClosureForDefaultValue(String closureName, String paramName,\nBLangSimpleVariable varNode) {\nBSymbol owner = getOwner(env);\nBInvokableTypeSymbol symbol = (BInvokableTypeSymbol) env.node.getBType().tsymbol;\nBLangFunction function = createFunction(closureName, varNode.pos, owner.pkgID, owner, varNode.getBType());\nupdateFunctionParams(function, symbol.params, paramName);\nBLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(function.pos, (BLangBlockFunctionBody) function.body);\nreturnStmt.expr = varNode.expr;\nBLangLambdaFunction lambdaFunction = createLambdaFunction(function);\nlambdaFunction.capturedClosureEnv = env.createClone();\nBInvokableSymbol varSymbol = createSimpleVariable(function, lambdaFunction, false);\nenv.enclPkg.symbol.scope.define(function.symbol.name, function.symbol);\nenv.enclPkg.functions.add(function);\nenv.enclPkg.topLevelNodes.add(function);\nsymbol.defaultValues.put(paramName, varSymbol);\nreturn returnStmt.expr;\n}\nprivate void updateFunctionParams(BLangFunction funcNode, List params, String paramName) {\nBInvokableSymbol funcSymbol = funcNode.symbol;\nfor (BVarSymbol symbol : params) {\nif (paramName.equals(symbol.name.value)) {\nbreak;\n}\nBInvokableType funcType = (BInvokableType) funcSymbol.type;\nBVarSymbol varSymbol = ASTBuilderUtil.duplicateParamSymbol(symbol, funcSymbol);\nvarSymbol.flags = 0;\nfuncSymbol.scope.define(varSymbol.name, varSymbol);\nfuncSymbol.params.add(varSymbol);\nfuncType.paramTypes.add(varSymbol.type);\nfuncNode.requiredParams.add(ASTBuilderUtil.createVariable(varSymbol.pos, varSymbol.name.value,\nvarSymbol.type, null, varSymbol));\n}\n}\nBLangLambdaFunction createLambdaFunction(BLangFunction function) {\nBLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();\nlambdaFunction.function = function;\nlambdaFunction.setBType(function.getBType());\nlambdaFunction.capturedClosureEnv = env;\nreturn lambdaFunction;\n}\npublic BInvokableSymbol createSimpleVariable(BLangFunction function, BLangLambdaFunction lambdaFunction,\nboolean isAnnotationClosure) {\nBInvokableSymbol invokableSymbol = function.symbol;\nBType type = function.getBType();\nBInvokableSymbol varSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, invokableSymbol.name,\ninvokableSymbol.pkgID, type,\ninvokableSymbol.owner, function.pos, VIRTUAL);\nvarSymbol.params = invokableSymbol.params;\nvarSymbol.restParam = invokableSymbol.restParam;\nvarSymbol.retType = invokableSymbol.retType;\nBLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(function.pos, function.name.value, type,\nlambdaFunction, varSymbol);\nBLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDef(function.pos);\nvariableDef.var = simpleVariable;\nvariableDef.setBType(type);\nif (isAnnotationClosure) {\nannotationClosureReferences.add(variableDef);\nreturn varSymbol;\n}\nqueue.add(variableDef);\nreturn varSymbol;\n}\npublic BVarSymbol createSimpleVariable(BInvokableSymbol invokableSymbol, boolean isAnnotationClosure) {\nBType type = invokableSymbol.retType;\nLocation pos = invokableSymbol.pos;\nName name = invokableSymbol.name;\nBVarSymbol varSymbol = new BVarSymbol(0, name, invokableSymbol.originalName, invokableSymbol.pkgID, type,\ninvokableSymbol.owner, pos, VIRTUAL);\nBLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(pos, name.value, type,\ngetInvocation(invokableSymbol), varSymbol);\nBLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDef(pos);\nvariableDef.var = simpleVariable;\nvariableDef.setBType(type);\nif (isAnnotationClosure) {\nannotationClosureReferences.add(variableDef);\nreturn varSymbol;\n}\nqueue.add(variableDef);\nreturn varSymbol;\n}\nprivate BLangInvocation getInvocation(BInvokableSymbol symbol) {\nBLangInvocation funcInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();\nfuncInvocation.setBType(symbol.retType);\nfuncInvocation.symbol = symbol;\nfuncInvocation.name = ASTBuilderUtil.createIdentifier(symbol.pos, symbol.name.value);\nfuncInvocation.functionPointerInvocation = true;\nreturn funcInvocation;\n}\nprivate BLangFunction createFunction(String funcName, Location pos, PackageID pkgID, BSymbol owner, BType bType) {\nBLangFunction function = ASTBuilderUtil.createFunction(pos, funcName);\nfunction.flagSet.add(Flag.PUBLIC);\nBInvokableTypeSymbol invokableTypeSymbol = Symbols.createInvokableTypeSymbol(SymTag.FUNCTION_TYPE, Flags.PUBLIC,\npkgID, bType, owner, pos, VIRTUAL);\nfunction.setBType(new BInvokableType(new ArrayList<>(), bType, invokableTypeSymbol));\nBLangBuiltInRefTypeNode typeNode = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();\ntypeNode.setBType(bType);\ntypeNode.typeKind = bType.getKind();\ntypeNode.pos = pos;\nfunction.returnTypeNode = typeNode;\nBInvokableSymbol functionSymbol = new BInvokableSymbol(SymTag.FUNCTION, Flags.PUBLIC, new Name(funcName), pkgID,\nfunction.getBType(), owner, pos, VIRTUAL);\nfunctionSymbol.bodyExist = true;\nfunctionSymbol.kind = SymbolKind.FUNCTION;\nfunctionSymbol.retType = function.returnTypeNode.getBType();\nfunctionSymbol.scope = new Scope(functionSymbol);\nfunction.symbol = functionSymbol;\nreturn function;\n}\nprivate String generateName(String name, BLangNode parent) {\nif (parent == null) {\nreturn DOLLAR + name;\n}\nswitch (parent.getKind()) {\ncase CLASS_DEFN:\nname = ((BLangClassDefinition) parent).name.getValue() + UNDERSCORE + name;\nreturn generateName(name, parent.parent);\ncase FUNCTION:\nname = ((BLangFunction) parent).symbol.name.value.replaceAll(\"\\\\.\", UNDERSCORE) + UNDERSCORE + name;\nreturn generateName(name, parent.parent);\ncase RESOURCE_FUNC:\nname = ((BLangResourceFunction) parent).name.value + UNDERSCORE + name;\nreturn generateName(name, parent.parent);\ncase VARIABLE:\nname = ((BLangSimpleVariable) parent).name.getValue() + UNDERSCORE + name;\nreturn generateName(name, parent.parent);\ncase TYPE_DEFINITION:\nname = ((BLangTypeDefinition) parent).name.getValue() + UNDERSCORE + name;\nreturn generateName(name, parent.parent);\ncase SERVICE:\nname = ((BLangService) parent).name.getValue() + UNDERSCORE + name;\nreturn generateName(name, parent.parent);\ndefault:\nreturn generateName(name, parent.parent);\n}\n}\n@Override\npublic void visit(BLangTupleVariable varNode) {\nrewrite(varNode.restVariable, env);\nresult = varNode;\n}\n@Override\npublic void visit(BLangRecordVariable varNode) {\nvarNode.expr = rewriteExpr(varNode.expr);\nresult = varNode;\n}\n@Override\npublic void visit(BLangErrorVariable varNode) {\nvarNode.expr = rewriteExpr(varNode.expr);\nresult = varNode;\n}\n@Override\npublic void visit(BLangTupleVariableDef varDefNode) {\nvarDefNode.var = rewrite(varDefNode.var, env);\nresult = varDefNode;\n}\n@Override\npublic void visit(BLangRecordVariableDef varDefNode) {\nvarDefNode.var = rewrite(varDefNode.var, env);\nresult = varDefNode;\n}\n@Override\npublic void visit(BLangErrorVariableDef varDefNode) {\nvarDefNode.errorVariable = rewrite(varDefNode.errorVariable, env);\nresult = varDefNode;\n}\n@Override\npublic void visit(BLangAssignment assignNode) {\nassignNode.varRef = rewriteExpr(assignNode.varRef);\nassignNode.expr = rewriteExpr(assignNode.expr);\nresult = assignNode;\n}\n@Override\npublic void visit(BLangTupleDestructure tupleDestructure) {\nresult = tupleDestructure;\n}\n@Override\npublic void visit(BLangRecordDestructure recordDestructure) {\nresult = recordDestructure;\n}\n@Override\npublic void visit(BLangErrorDestructure errorDestructure) {\nresult = errorDestructure;\n}\n@Override\npublic void visit(BLangRetry retryNode) {\nretryNode.retryBody = rewrite(retryNode.retryBody, env);\nresult = retryNode;\n}\n@Override\npublic void visit(BLangRetryTransaction retryTransaction) {\nretryTransaction.transaction = rewrite(retryTransaction.transaction, env);\nresult = retryTransaction;\n}\n@Override\npublic void visit(BLangContinue nextNode) {\nresult = nextNode;\n}\n@Override\npublic void visit(BLangBreak breakNode) {\nresult = breakNode;\n}\n@Override\npublic void visit(BLangPanic panicNode) {\npanicNode.expr = rewriteExpr(panicNode.expr);\nresult = panicNode;\n}\n@Override\npublic void visit(BLangDo doNode) {\ndoNode.body = rewrite(doNode.body, env);\nresult = doNode;\n}\n@Override\npublic void visit(BLangXMLNSStatement xmlnsStmtNode) {\nxmlnsStmtNode.xmlnsDecl = rewrite(xmlnsStmtNode.xmlnsDecl, env);\nresult = xmlnsStmtNode;\n}\n@Override\npublic void visit(BLangXMLNS xmlnsNode) {\nxmlnsNode.namespaceURI = rewriteExpr(xmlnsNode.namespaceURI);\nresult = xmlnsNode;\n}\n@Override\npublic void visit(BLangExpressionStmt exprStmtNode) {\nexprStmtNode.expr = rewriteExpr(exprStmtNode.expr);\nresult = exprStmtNode;\n}\n@Override\npublic void visit(BLangFail failNode) {\nif (failNode.exprStmt != null) {\nfailNode.exprStmt = rewrite(failNode.exprStmt, env);\n}\nresult = failNode;\n}\n@Override\npublic void visit(BLangIf ifNode) {\nifNode.expr = rewriteExpr(ifNode.expr);\nifNode.body = rewrite(ifNode.body, env);\nifNode.elseStmt = rewrite(ifNode.elseStmt, env);\nresult = ifNode;\n}\n@Override\npublic void visit(BLangForeach foreach) {\nresult = foreach;\n}\n@Override\npublic void visit(BLangWhile whileNode) {\nwhileNode.expr = rewriteExpr(whileNode.expr);\nwhileNode.body = rewrite(whileNode.body, env);\nresult = whileNode;\n}\n@Override\npublic void visit(BLangLock lockNode) {\nlockNode.body = rewrite(lockNode.body, env);\nresult = lockNode;\n}\n@Override\npublic void visit(BLangLock.BLangLockStmt lockNode) {\nresult = lockNode;\n}\n@Override\npublic void visit(BLangLock.BLangUnLockStmt unLockNode) {\nresult = unLockNode;\n}\n@Override\npublic void visit(BLangTransaction transactionNode) {\ntransactionNode.transactionBody = rewrite(transactionNode.transactionBody, env);\nresult = transactionNode;\n}\n@Override\npublic void visit(BLangRollback rollbackNode) {\nrollbackNode.expr = rewriteExpr(rollbackNode.expr);\nresult = rollbackNode;\n}\n@Override\npublic void visit(BLangTransactionalExpr transactionalExpr) {\nresult = transactionalExpr;\n}\n@Override\npublic void visit(BLangCommitExpr commitExpr) {\nresult = commitExpr;\n}\n@Override\npublic void visit(BLangForkJoin forkJoin) {\nresult = forkJoin;\n}\n@Override\npublic void visit(BLangLiteral literalExpr) {\nresult = literalExpr;\n}\n@Override\npublic void visit(BLangListConstructorExpr listConstructorExpr) {\nlistConstructorExpr.exprs = rewriteExprs(listConstructorExpr.exprs);\nresult = listConstructorExpr;\n}\n@Override\npublic void visit(BLangTableConstructorExpr tableConstructorExpr) {\nrewriteExprs(tableConstructorExpr.recordLiteralList);\nresult = tableConstructorExpr;\n}\n@Override\npublic void visit(BLangListConstructorExpr.BLangJSONArrayLiteral jsonArrayLiteral) {\njsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);\nresult = jsonArrayLiteral;\n}\n@Override\npublic void visit(BLangListConstructorExpr.BLangTupleLiteral tupleLiteral) {\ntupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);\nresult = tupleLiteral;\n}\n@Override\npublic void visit(BLangListConstructorExpr.BLangArrayLiteral arrayLiteral) {\narrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);\nresult = arrayLiteral;\n}\n@Override\n@Override\npublic void visit(BLangSimpleVarRef varRefExpr) {\nBSymbol varRefSym = varRefExpr.symbol;\nif (varRefSym != null) {\nboolean isMemberOfFunction = Symbols.isFlagOn(varRefSym.flags, Flags.REQUIRED_PARAM) ||\nSymbols.isFlagOn(varRefSym.flags, Flags.DEFAULTABLE_PARAM);\nif (isMemberOfFunction) {\nupdateFunctionParamsOfClosures(env, varRefExpr);\n}\n}\nBLangInvokableNode encInvokable = env.enclInvokable;\nBSymbol symbol = varRefExpr.symbol;\nif (varRefSym == null || encInvokable == null || (symbol.tag & SymTag.VARIABLE) != SymTag.VARIABLE) {\nresult = varRefExpr;\nreturn;\n}\nupdateClosureVariable((BVarSymbol) symbol, encInvokable, varRefExpr.pos);\nresult = varRefExpr;\n}\nprivate void updateFunctionParamsOfClosures(SymbolEnv symbolEnv, BLangSimpleVarRef varRefExpr) {\nBLangFunction closure = null;\nwhile (symbolEnv != null && symbolEnv.node.getKind() != NodeKind.PACKAGE) {\nif (symbolEnv.node.getKind() != NodeKind.FUNCTION) {\nsymbolEnv = symbolEnv.enclEnv;\ncontinue;\n}\nBLangFunction bLangFunction = (BLangFunction) symbolEnv.node;\nBLangInvokableNode enclInvokable = symbolEnv.enclInvokable;\nif (enclInvokable.flagSet.contains(Flag.LAMBDA) && !enclInvokable.flagSet.contains(Flag.QUERY_LAMBDA) &&\n!enclInvokable.flagSet.contains(Flag.ANONYMOUS)) {\nclosure = bLangFunction;\n}\nsymbolEnv = symbolEnv.enclEnv;\n}\nif (closure != null) {\nupdateFunctionParams(closure, varRefExpr);\n}\n}\nprivate void updateFunctionParams(BLangFunction funcNode, BLangSimpleVarRef varRefExpr) {\nBInvokableSymbol funcSymbol = funcNode.symbol;\nfor (BVarSymbol varSymbol : funcSymbol.params) {\nif (varSymbol.name.value.equals(varRefExpr.symbol.name.value)) {\nvarRefExpr.symbol = varSymbol;\nreturn;\n}\n}\n}\nprivate SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) {\nif (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) {\nreturn env.enclEnv;\n}\nif (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) {\nreturn env.enclEnv;\n}\nif (env.enclInvokable != null && env.enclInvokable == encInvokable) {\nreturn findEnclosingInvokableEnv(env.enclEnv, encInvokable);\n}\nreturn env;\n}\n@Override\npublic void visit(BLangFieldBasedAccess fieldAccessExpr) {\nfieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);\nresult = fieldAccessExpr;\n}\n@Override\npublic void visit(BLangIndexBasedAccess indexAccessExpr) {\nindexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);\nindexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);\nresult = indexAccessExpr;\n}\n@Override\npublic void visit(BLangCompoundAssignment compoundAssignment) {\nresult = compoundAssignment;\n}\n@Override\npublic void visit(BLangInvocation invocation) {\nrewriteInvocationExpr(invocation);\nBLangInvokableNode encInvokable = env.enclInvokable;\nif (encInvokable == null || !invocation.functionPointerInvocation) {\nreturn;\n}\nupdateClosureVariable((BVarSymbol) invocation.symbol, encInvokable, invocation.pos);\n}\npublic void rewriteInvocationExpr(BLangInvocation invocation) {\ninvocation.requiredArgs = rewriteExprs(invocation.requiredArgs);\nresult = invocation;\n}\n@Override\npublic void visit(BLangQueryAction queryAction) {\nresult = queryAction;\n}\n@Override\npublic void visit(BLangCheckPanickedExpr checkedExpr) {\nresult = checkedExpr;\n}\n@Override\npublic void visit(BLangErrorConstructorExpr errorConstructorExpr) {\nrewriteExprs(errorConstructorExpr.positionalArgs);\nerrorConstructorExpr.errorDetail = rewriteExpr(errorConstructorExpr.errorDetail);\nresult = errorConstructorExpr;\n}\n@Override\npublic void visit(BLangTypeInit typeInitExpr) {\ntypeInitExpr.initInvocation = rewriteExpr(typeInitExpr.initInvocation);\nresult = typeInitExpr;\n}\n@Override\npublic void visit(BLangTernaryExpr ternaryExpr) {\nternaryExpr.expr = rewriteExpr(ternaryExpr.expr);\nternaryExpr.thenExpr = rewriteExpr(ternaryExpr.thenExpr);\nternaryExpr.elseExpr = rewriteExpr(ternaryExpr.elseExpr);\nresult = ternaryExpr;\n}\n@Override\npublic void visit(BLangWaitExpr waitExpr) {\nList exprList = new ArrayList<>();\nwaitExpr.exprList.forEach(expression -> exprList.add(rewriteExpr(expression)));\nwaitExpr.exprList = exprList;\nresult = waitExpr;\n}\n@Override\npublic void visit(BLangWaitForAllExpr waitExpr) {\nresult = waitExpr;\n}\n@Override\npublic void visit(BLangTrapExpr trapExpr) {\ntrapExpr.expr = rewriteExpr(trapExpr.expr);\nresult = trapExpr;\n}\n@Override\npublic void visit(BLangBinaryExpr binaryExpr) {\nbinaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);\nbinaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);\nresult = binaryExpr;\n}\n@Override\npublic void visit(BLangElvisExpr elvisExpr) {\nresult = elvisExpr;\n}\n@Override\npublic void visit(BLangGroupExpr groupExpr) {\ngroupExpr.expression = rewriteExpr(groupExpr.expression);\nresult = groupExpr;\n}\n@Override\npublic void visit(BLangUnaryExpr unaryExpr) {\nunaryExpr.expr = rewriteExpr(unaryExpr.expr);\nresult = unaryExpr;\n}\n@Override\npublic void visit(BLangTypeConversionExpr conversionExpr) {\nconversionExpr.expr = rewriteExpr(conversionExpr.expr);\nconversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);\nresult = conversionExpr;\n}\n@Override\npublic void visit(BLangLambdaFunction bLangLambdaFunction) {\nbLangLambdaFunction.function = rewrite(bLangLambdaFunction.function, bLangLambdaFunction.capturedClosureEnv);\nresult = bLangLambdaFunction;\n}\n@Override\npublic void visit(BLangArrowFunction bLangArrowFunction) {\nresult = bLangArrowFunction;\n}\n@Override\npublic void visit(BLangXMLQName xmlQName) {\nresult = xmlQName;\n}\n@Override\npublic void visit(BLangXMLAttribute xmlAttribute) {\nxmlAttribute.name = rewriteExpr(xmlAttribute.name);\nxmlAttribute.value = rewriteExpr(xmlAttribute.value);\nresult = xmlAttribute;\n}\n@Override\npublic void visit(BLangXMLElementLiteral xmlElementLiteral) {\nxmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);\nxmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);\nxmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);\nxmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);\nresult = xmlElementLiteral;\n}\n@Override\npublic void visit(BLangXMLTextLiteral xmlTextLiteral) {\nxmlTextLiteral.textFragments.forEach(this::rewriteExpr);\nxmlTextLiteral.concatExpr = rewriteExpr(xmlTextLiteral.concatExpr);\nresult = xmlTextLiteral;\n}\n@Override\npublic void visit(BLangXMLCommentLiteral xmlCommentLiteral) {\nxmlCommentLiteral.textFragments.forEach(this::rewriteExpr);\nresult = xmlCommentLiteral;\n}\n@Override\npublic void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {\nxmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);\nxmlProcInsLiteral.dataFragments.forEach(this::rewriteExpr);\nresult = xmlProcInsLiteral;\n}\n@Override\npublic void visit(BLangXMLQuotedString xmlQuotedString) {\nxmlQuotedString.textFragments.forEach(this::rewriteExpr);\nresult = xmlQuotedString;\n}\n@Override\npublic void visit(BLangStringTemplateLiteral stringTemplateLiteral) {\nstringTemplateLiteral.exprs.forEach(this::rewriteExpr);\nresult = stringTemplateLiteral;\n}\n@Override\npublic void visit(BLangWorkerSend workerSendNode) {\nworkerSendNode.expr = rewriteExpr(workerSendNode.expr);\nresult = workerSendNode;\n}\n@Override\npublic void visit(BLangWorkerSyncSendExpr syncSendExpr) {\nsyncSendExpr.expr = rewriteExpr(syncSendExpr.expr);\nresult = syncSendExpr;\n}\n@Override\npublic void visit(BLangWorkerReceive workerReceiveNode) {\nresult = workerReceiveNode;\n}\n@Override\npublic void visit(BLangWorkerFlushExpr workerFlushExpr) {\nresult = workerFlushExpr;\n}\n@Override\npublic void visit(BLangSimpleVarRef.BLangLocalVarRef localVarRef) {\nBLangInvokableNode encInvokable = env.enclInvokable;\nBSymbol symbol = localVarRef.symbol;\nif (encInvokable == null || (symbol.tag & SymTag.VARIABLE) != SymTag.VARIABLE) {\nresult = localVarRef;\nreturn;\n}\nupdateClosureVariable((BVarSymbol) symbol, encInvokable, localVarRef.pos);\nresult = localVarRef;\n}\nprivate void updateClosureVariable(BVarSymbol varSymbol, BLangInvokableNode encInvokable, Location pos) {\nSet flagSet = encInvokable.flagSet;\nboolean isClosure = !flagSet.contains(Flag.QUERY_LAMBDA) && flagSet.contains(Flag.LAMBDA) &&\n!flagSet.contains(Flag.ATTACHED);\nif (!varSymbol.closure && isClosure) {\nSymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);\nBSymbol resolvedSymbol =\nsymResolver.lookupClosureVarSymbol(encInvokableEnv, varSymbol.name, SymTag.VARIABLE);\nif (resolvedSymbol != symTable.notFoundSymbol) {\nvarSymbol.closure = true;\n((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(varSymbol, pos));\n}\n}\n}\n@Override\npublic void visit(BLangIgnoreExpr ignoreExpr) {\nresult = ignoreExpr;\n}\n@Override\npublic void visit(BLangDynamicArgExpr dynamicParamExpr) {\ndynamicParamExpr.condition = rewriteExpr(dynamicParamExpr.condition);\ndynamicParamExpr.conditionalArgument = rewriteExpr(dynamicParamExpr.conditionalArgument);\nresult = dynamicParamExpr;\n}\n@Override\npublic void visit(BLangSimpleVarRef.BLangPackageVarRef packageVarRef) {\nresult = packageVarRef;\n}\n@Override\npublic void visit(BLangConstRef constRef) {\nresult = constRef;\n}\n@Override\npublic void visit(BLangSimpleVarRef.BLangFunctionVarRef functionVarRef) {\nresult = functionVarRef;\n}\n@Override\npublic void visit(BLangIndexBasedAccess.BLangStructFieldAccessExpr fieldAccessExpr) {\nfieldAccessExpr.indexExpr = rewriteExpr(fieldAccessExpr.indexExpr);\nfieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);\nresult = fieldAccessExpr;\n}\n@Override\npublic void visit(BLangFieldBasedAccess.BLangStructFunctionVarRef functionVarRef) {\nfunctionVarRef.expr = rewriteExpr(functionVarRef.expr);\nresult = functionVarRef;\n}\n@Override\npublic void visit(BLangIndexBasedAccess.BLangMapAccessExpr mapKeyAccessExpr) {\nmapKeyAccessExpr.indexExpr = rewriteExpr(mapKeyAccessExpr.indexExpr);\nmapKeyAccessExpr.expr = rewriteExpr(mapKeyAccessExpr.expr);\nresult = mapKeyAccessExpr;\n}\n@Override\npublic void visit(BLangIndexBasedAccess.BLangTableAccessExpr tableKeyAccessExpr) {\ntableKeyAccessExpr.indexExpr = rewriteExpr(tableKeyAccessExpr.indexExpr);\ntableKeyAccessExpr.expr = rewriteExpr(tableKeyAccessExpr.expr);\nresult = tableKeyAccessExpr;\n}\n@Override\npublic void visit(BLangIndexBasedAccess.BLangArrayAccessExpr arrayIndexAccessExpr) {\narrayIndexAccessExpr.indexExpr = rewriteExpr(arrayIndexAccessExpr.indexExpr);\narrayIndexAccessExpr.expr = rewriteExpr(arrayIndexAccessExpr.expr);\nresult = arrayIndexAccessExpr;\n}\n@Override\npublic void visit(BLangIndexBasedAccess.BLangTupleAccessExpr arrayIndexAccessExpr) {\narrayIndexAccessExpr.indexExpr = rewriteExpr(arrayIndexAccessExpr.indexExpr);\narrayIndexAccessExpr.expr = rewriteExpr(arrayIndexAccessExpr.expr);\nresult = arrayIndexAccessExpr;\n}\n@Override\npublic void visit(BLangIndexBasedAccess.BLangXMLAccessExpr xmlIndexAccessExpr) {\nxmlIndexAccessExpr.indexExpr = rewriteExpr(xmlIndexAccessExpr.indexExpr);\nxmlIndexAccessExpr.expr = rewriteExpr(xmlIndexAccessExpr.expr);\nresult = xmlIndexAccessExpr;\n}\n@Override\npublic void visit(BLangXMLElementAccess xmlElementAccess) {\nxmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);\nresult = xmlElementAccess;\n}\n@Override\npublic void visit(BLangXMLNavigationAccess xmlNavigation) {\nxmlNavigation.expr = rewriteExpr(xmlNavigation.expr);\nxmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);\nresult = xmlNavigation;\n}\n@Override\npublic void visit(BLangIndexBasedAccess.BLangJSONAccessExpr jsonAccessExpr) {\njsonAccessExpr.indexExpr = rewriteExpr(jsonAccessExpr.indexExpr);\njsonAccessExpr.expr = rewriteExpr(jsonAccessExpr.expr);\nresult = jsonAccessExpr;\n}\n@Override\npublic void visit(BLangIndexBasedAccess.BLangStringAccessExpr stringAccessExpr) {\nstringAccessExpr.indexExpr = rewriteExpr(stringAccessExpr.indexExpr);\nstringAccessExpr.expr = rewriteExpr(stringAccessExpr.expr);\nresult = stringAccessExpr;\n}\n@Override\npublic void visit(BLangRecordLiteral.BLangMapLiteral mapLiteral) {\nfor (RecordLiteralNode.RecordField field : mapLiteral.fields) {\nif (field.isKeyValueField()) {\nBLangRecordLiteral.BLangRecordKeyValueField keyValueField =\n(BLangRecordLiteral.BLangRecordKeyValueField) field;\nkeyValueField.key.expr = rewriteExpr(keyValueField.key.expr);\nkeyValueField.valueExpr = rewriteExpr(keyValueField.valueExpr);\ncontinue;\n}\nBLangRecordLiteral.BLangRecordSpreadOperatorField spreadField =\n(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;\nspreadField.expr = rewriteExpr(spreadField.expr);\n}\nresult = mapLiteral;\n}\n@Override\npublic void visit(BLangRecordLiteral.BLangStructLiteral structLiteral) {\nfor (RecordLiteralNode.RecordField field : structLiteral.fields) {\nif (field.isKeyValueField()) {\nBLangRecordLiteral.BLangRecordKeyValueField keyValueField =\n(BLangRecordLiteral.BLangRecordKeyValueField) field;\nkeyValueField.key.expr = rewriteExpr(keyValueField.key.expr);\nkeyValueField.valueExpr = rewriteExpr(keyValueField.valueExpr);\ncontinue;\n}\nBLangRecordLiteral.BLangRecordSpreadOperatorField spreadField =\n(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;\nspreadField.expr = rewriteExpr(spreadField.expr);\n}\nresult = structLiteral;\n}\n@Override\npublic void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {\nwaitLiteral.keyValuePairs.forEach(keyValue -> {\nif (keyValue.valueExpr != null) {\nkeyValue.valueExpr = rewriteExpr(keyValue.valueExpr);\n} else {\nkeyValue.keyExpr = rewriteExpr(keyValue.keyExpr);\n}\n});\nresult = waitLiteral;\n}\n@Override\npublic void visit(BLangIsAssignableExpr assignableExpr) {\nassignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);\nresult = assignableExpr;\n}\n@Override\npublic void visit(BLangInvocation.BFunctionPointerInvocation fpInvocation) {\nfpInvocation.expr = rewriteExpr(fpInvocation.expr);\nrewriteInvocationExpr(fpInvocation);\n}\n@Override\npublic void visit(BLangTypedescExpr accessExpr) {\nresult = accessExpr;\n}\n@Override\npublic void visit(BLangRestArgsExpression bLangVarArgsExpression) {\nresult = rewriteExpr(bLangVarArgsExpression.expr);\n}\n@Override\npublic void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {\nbLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);\nresult = bLangNamedArgsExpression;\n}\n@Override\npublic void visit(BLangCheckedExpr checkedExpr) {\nresult = checkedExpr;\n}\n@Override\npublic void visit(BLangServiceConstructorExpr serviceConstructorExpr) {\nresult = serviceConstructorExpr;\n}\n@Override\npublic void visit(BLangTypeTestExpr typeTestExpr) {\ntypeTestExpr.expr = rewriteExpr(typeTestExpr.expr);\nresult = typeTestExpr;\n}\n@Override\npublic void visit(BLangIsLikeExpr isLikeExpr) {\nisLikeExpr.expr = rewriteExpr(isLikeExpr.expr);\nresult = isLikeExpr;\n}\npublic void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {\nresult = nsPrefixedFieldBasedAccess;\n}\n@Override\npublic void visit(BLangLetExpression letExpression) {\nfor (BLangLetVariable letVariable : letExpression.letVarDeclarations) {\nrewrite((BLangNode) letVariable.definitionNode, env);\n}\nletExpression.expr = rewriteExpr(letExpression.expr);\nresult = letExpression;\n}\n@Override\npublic void visit(BLangAnnotAccessExpr annotAccessExpr) {\nannotAccessExpr.expr = rewriteExpr(annotAccessExpr.expr);\nresult = annotAccessExpr;\n}\n@Override\npublic void visit(BLangStatementExpression bLangStatementExpression) {\nif (bLangStatementExpression.stmt.getKind() == NodeKind.BLOCK) {\nBLangBlockStmt bLangBlockStmt = (BLangBlockStmt) bLangStatementExpression.stmt;\nfor (int i = 0; i < bLangBlockStmt.stmts.size(); i++) {\nBLangStatement stmt = bLangBlockStmt.stmts.remove(i);\nbLangBlockStmt.stmts.add(i, rewrite(stmt, env));\n}\n} else {\nbLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);\n}\nbLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);\nresult = bLangStatementExpression;\n}\n@Override\npublic void visit(BLangInvocation.BLangActionInvocation invocation) {\nrewriteInvocationExpr(invocation);\n}\n@Override\npublic void visit(BLangIdentifier identifierNode) {\n/* ignore */\n}\n@Override\npublic void visit(BLangAnnotation annotationNode) {\n/* ignore */\n}\n@Override\npublic void visit(BLangAnnotationAttachment annAttachmentNode) {\n/* ignore */\n}\n@Override\npublic void visit(BLangConstant constant) {\nresult = constant;\n}\n@Override\npublic void visit(BLangNumericLiteral literalExpr) {\nresult = literalExpr;\n}\n@Override\npublic void visit(BLangTupleVarRef varRefExpr) {\nresult = varRefExpr;\n}\n@Override\npublic void visit(BLangRecordVarRef varRefExpr) {\nresult = varRefExpr;\n}\n@Override\npublic void visit(BLangErrorVarRef varRefExpr) {\nresult = varRefExpr;\n}\n@Override\npublic void visit(BLangSimpleVarRef.BLangTypeLoad typeLoad) {\nresult = typeLoad;\n}\n@Override\npublic void visit(BLangRecordLiteral.BLangChannelLiteral channelLiteral) {\nchannelLiteral.fields.forEach(field -> {\nBLangRecordLiteral.BLangRecordKeyValueField keyValue = (BLangRecordLiteral.BLangRecordKeyValueField) field;\nkeyValue.key.expr = rewriteExpr(keyValue.key.expr);\nkeyValue.valueExpr = rewriteExpr(keyValue.valueExpr);\n});\nresult = channelLiteral;\n}\n@Override\npublic void visit(BLangXMLNS.BLangLocalXMLNS xmlnsNode) {\nxmlnsNode.namespaceURI = rewriteExpr(xmlnsNode.namespaceURI);\nresult = xmlnsNode;\n}\n@Override\npublic void visit(BLangXMLNS.BLangPackageXMLNS xmlnsNode) {\nxmlnsNode.namespaceURI = rewriteExpr(xmlnsNode.namespaceURI);\nresult = xmlnsNode;\n}\n@Override\npublic void visit(BLangListConstructorExpr.BLangListConstructorSpreadOpExpr listConstructorSpreadOpExpr) {\nlistConstructorSpreadOpExpr.expr = rewriteExpr(listConstructorSpreadOpExpr.expr);\nresult = listConstructorSpreadOpExpr;\n}\n@Override\npublic void visit(BLangQueryExpr queryExpr) {\nresult = queryExpr;\n}\n@Override\npublic void visit(BLangMatchStatement matchStatement) {\nresult = matchStatement;\n}\n@Override\npublic void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) {\nxmlSequenceLiteral.xmlItems.forEach(this::rewriteExpr);\nresult = xmlSequenceLiteral;\n}\n@Override\npublic void visit(BLangRegExpTemplateLiteral regExpTemplateLiteral) {\nList interpolationsList =\nsymResolver.getListOfInterpolations(regExpTemplateLiteral.reDisjunction.sequenceList);\ninterpolationsList.forEach(this::rewriteExpr);\nresult = regExpTemplateLiteral;\n}\n@Override\npublic void visit(BLangMarkdownDocumentationLine bLangMarkdownDocumentationLine) {\n/* Ignore */\n}\n@Override\npublic void visit(BLangMarkdownParameterDocumentation bLangDocumentationParameter) {\n/* Ignore */\n}\n@Override\npublic void visit(BLangMarkdownReturnParameterDocumentation bLangMarkdownReturnParameterDocumentation) {\n/* Ignore */\n}\n@Override\npublic void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) {\nresult = inferTypedescExpr;\n}\n@Override\npublic void visit(BLangMarkdownDocumentation bLangMarkdownDocumentation) {\n/* Ignore */\n}\n@SuppressWarnings(\"unchecked\")\nprivate E rewrite(E node, SymbolEnv env) {\nif (node == null) {\nreturn null;\n}\nSymbolEnv previousEnv = this.env;\nthis.env = env;\nnode.accept(this);\nBLangNode resultNode = this.result;\nthis.result = null;\nthis.env = previousEnv;\nreturn (E) resultNode;\n}\nprivate List rewrite(List nodeList, SymbolEnv env) {\nfor (int i = 0; i < nodeList.size(); i++) {\nnodeList.set(i, rewrite(nodeList.get(i), env));\n}\nreturn nodeList;\n}\n@SuppressWarnings(\"unchecked\")\nprivate E rewriteExpr(E node) {\nif (node == null) {\nreturn null;\n}\nnode.accept(this);\nBLangNode resultNode = this.result;\nthis.result = null;\nreturn (E) resultNode;\n}\n@SuppressWarnings(\"unchecked\")\nprivate List rewriteStmt(List nodeList, SymbolEnv env) {\nQueue previousQueue = this.queue;\nthis.queue = new LinkedList<>();\nint size = nodeList.size();\nfor (int i = 0; i < size; i++) {\nE node = rewrite(nodeList.remove(0), env);\nIterator iterator = queue.iterator();\nwhile (iterator.hasNext()) {\nnodeList.add(rewrite((E) queue.poll(), env));\n}\nnodeList.add(node);\n}\nthis.queue = previousQueue;\nreturn nodeList;\n}\n@SuppressWarnings(\"unchecked\")\nprivate List rewriteExprs(List nodeList) {\nfor (int i = 0; i < nodeList.size(); i++) {\nnodeList.set(i, rewriteExpr(nodeList.get(i)));\n}\nreturn nodeList;\n}\n}" + }, + { + "comment": "The main reasons is the CatalogManager, FunctionManager use sessionConfiguration . If we rebuild a new Configuration, we can 't modify the values of the option for CatalogManager, FunctionManager . ", + "method_body": "public void set(String key, String value) {\ntry {\nthis.sessionEnv = Environment.enrich(sessionEnv, Collections.singletonMap(key, value));\nsessionConfiguration.setString(key, value);\n} catch (Throwable t) {\nthrow new SqlExecutionException(\"Could not set session property.\", t);\n}\nthis.executionContext = new ExecutionContext(sessionEnv, executionContext);\n}", + "target_code": "sessionConfiguration.setString(key, value);", + "method_body_after": "public void set(String key, String value) {\ntry {\nthis.sessionEnv = Environment.enrich(sessionEnv, Collections.singletonMap(key, value));\nsessionConfiguration.setString(key, value);\n} catch (Throwable t) {\nthrow new SqlExecutionException(\"Could not set session property.\", t);\n}\nthis.executionContext = new ExecutionContext(sessionEnv, executionContext);\n}", + "context_before": "class SessionContext {\nprivate static final Logger LOG = LoggerFactory.getLogger(SessionContext.class);\nprivate final String sessionId;\nprivate final DefaultContext defaultContext;\nprivate Environment sessionEnv;\nprivate final Configuration sessionConfiguration;\nprivate final SessionState sessionState;\nprivate final URLClassLoader classLoader;\nprivate ExecutionContext executionContext;\npublic SessionContext(\nDefaultContext defaultContext,\nString sessionId,\nEnvironment sessionEnv,\nConfiguration sessionConfiguration,\nURLClassLoader classLoader,\nSessionState sessionState,\nExecutionContext executionContext) {\nthis.defaultContext = defaultContext;\nthis.sessionId = sessionId;\nthis.sessionEnv = sessionEnv;\nthis.sessionConfiguration = sessionConfiguration;\nthis.classLoader = classLoader;\nthis.sessionState = sessionState;\nthis.executionContext = executionContext;\n}\npublic String getSessionId() {\nreturn this.sessionId;\n}\npublic Environment getSessionEnvironment() {\nreturn this.sessionEnv;\n}\npublic ExecutionContext getExecutionContext() {\nreturn this.executionContext;\n}\n/** Reset properties to default. It will rebuild a new {@link ExecutionContext}. */\npublic void reset() {\nsessionEnv = defaultContext.getDefaultEnv().clone();\nfor (String key : sessionConfiguration.toMap().keySet()) {\nConfigOption keyToDelete = ConfigOptions.key(key).stringType().noDefaultValue();\nsessionConfiguration.removeConfig(keyToDelete);\n}\nsessionConfiguration.addAll(defaultContext.getFlinkConfig());\nexecutionContext = new ExecutionContext(sessionEnv, executionContext);\n}\n/** Set properties. It will rebuild a new {@link ExecutionContext} */\n/** Close resources, e.g. catalogs. */\npublic void close() {\ntry (TemporaryClassLoaderContext ignored = TemporaryClassLoaderContext.of(classLoader)) {\nfor (String name : sessionState.catalogManager.listCatalogs()) {\nsessionState.catalogManager.getCatalog(name).ifPresent(Catalog::close);\n}\n}\ntry {\nclassLoader.close();\n} catch (IOException e) {\nLOG.debug(\"Error while closing class loader.\", e);\n}\n}\npublic static SessionContext create(DefaultContext defaultContext, String sessionId) {\nEnvironment sessionEnv = defaultContext.getDefaultEnv().clone();\nConfiguration configuration = defaultContext.getFlinkConfig().clone();\nURLClassLoader classLoader =\nClientUtils.buildUserCodeClassLoader(\ndefaultContext.getDependencies(),\nCollections.emptyList(),\nSessionContext.class.getClassLoader(),\nconfiguration);\nModuleManager moduleManager = new ModuleManager();\nfinal EnvironmentSettings settings = sessionEnv.getExecution().getEnvironmentSettings();\nCatalogManager catalogManager =\nCatalogManager.newBuilder()\n.classLoader(classLoader)\n.config(configuration)\n.defaultCatalog(\nsettings.getBuiltInCatalogName(),\nnew GenericInMemoryCatalog(\nsettings.getBuiltInCatalogName(),\nsettings.getBuiltInDatabaseName()))\n.build();\nFunctionCatalog functionCatalog =\nnew FunctionCatalog(configuration, catalogManager, moduleManager);\nSessionState sessionState =\nnew SessionState(catalogManager, moduleManager, functionCatalog);\nreturn new SessionContext(\ndefaultContext,\nsessionId,\nsessionEnv,\nconfiguration,\nclassLoader,\nsessionState,\nnew ExecutionContext(sessionEnv, configuration, classLoader, sessionState));\n}\n@Override\npublic boolean equals(Object o) {\nif (this == o) {\nreturn true;\n}\nif (!(o instanceof SessionContext)) {\nreturn false;\n}\nSessionContext context = (SessionContext) o;\nreturn Objects.equals(sessionId, context.sessionId)\n&& Objects.equals(defaultContext, context.defaultContext)\n&& Objects.equals(sessionEnv, context.sessionEnv)\n&& Objects.equals(sessionConfiguration, context.sessionConfiguration)\n&& Objects.equals(classLoader, context.classLoader)\n&& Objects.equals(sessionState, context.sessionState)\n&& Objects.equals(executionContext, context.executionContext);\n}\n@Override\npublic int hashCode() {\nreturn Objects.hash(\nsessionId,\ndefaultContext,\nsessionEnv,\nsessionConfiguration,\nclassLoader,\nsessionState,\nexecutionContext);\n}\n/** session state. */\npublic static class SessionState {\npublic final CatalogManager catalogManager;\npublic final FunctionCatalog functionCatalog;\npublic final ModuleManager moduleManager;\npublic SessionState(\nCatalogManager catalogManager,\nModuleManager moduleManager,\nFunctionCatalog functionCatalog) {\nthis.catalogManager = catalogManager;\nthis.moduleManager = moduleManager;\nthis.functionCatalog = functionCatalog;\n}\n@Override\npublic boolean equals(Object obj) {\nif (this == obj) {\nreturn true;\n}\nif (!(obj instanceof SessionState)) {\nreturn false;\n}\nSessionState sessionState = (SessionState) obj;\nreturn catalogManager.equals(sessionState.catalogManager)\n&& moduleManager.equals(sessionState.moduleManager)\n&& functionCatalog.equals(sessionState.functionCatalog);\n}\n}\n}", + "context_after": "class SessionContext {\nprivate static final Logger LOG = LoggerFactory.getLogger(SessionContext.class);\nprivate final String sessionId;\nprivate final DefaultContext defaultContext;\nprivate Environment sessionEnv;\nprivate final Configuration sessionConfiguration;\nprivate final SessionState sessionState;\nprivate final URLClassLoader classLoader;\nprivate ExecutionContext executionContext;\nprivate SessionContext(\nDefaultContext defaultContext,\nString sessionId,\nEnvironment sessionEnv,\nConfiguration sessionConfiguration,\nURLClassLoader classLoader,\nSessionState sessionState,\nExecutionContext executionContext) {\nthis.defaultContext = defaultContext;\nthis.sessionId = sessionId;\nthis.sessionEnv = sessionEnv;\nthis.sessionConfiguration = sessionConfiguration;\nthis.classLoader = classLoader;\nthis.sessionState = sessionState;\nthis.executionContext = executionContext;\n}\npublic String getSessionId() {\nreturn this.sessionId;\n}\npublic Environment getSessionEnvironment() {\nreturn this.sessionEnv;\n}\npublic ExecutionContext getExecutionContext() {\nreturn this.executionContext;\n}\n/**\n* Reset properties to default. It will rebuild a new {@link ExecutionContext}.\n*\n*

Reset runtime configurations specific to the current session which were set via the SET\n* command to their default values.\n*/\npublic void reset() {\nsessionEnv = defaultContext.getDefaultEnv().clone();\nfor (String key : sessionConfiguration.toMap().keySet()) {\nConfigOption keyToDelete = ConfigOptions.key(key).stringType().noDefaultValue();\nsessionConfiguration.removeConfig(keyToDelete);\n}\nsessionConfiguration.addAll(defaultContext.getFlinkConfig());\nexecutionContext = new ExecutionContext(sessionEnv, executionContext);\n}\n/** Set properties. It will rebuild a new {@link ExecutionContext} */\n/** Close resources, e.g. catalogs. */\npublic void close() {\ntry (TemporaryClassLoaderContext ignored = TemporaryClassLoaderContext.of(classLoader)) {\nfor (String name : sessionState.catalogManager.listCatalogs()) {\nsessionState.catalogManager.getCatalog(name).ifPresent(Catalog::close);\n}\n}\ntry {\nclassLoader.close();\n} catch (IOException e) {\nLOG.debug(\"Error while closing class loader.\", e);\n}\n}\npublic static SessionContext create(DefaultContext defaultContext, String sessionId) {\nEnvironment sessionEnv = defaultContext.getDefaultEnv().clone();\nConfiguration configuration = defaultContext.getFlinkConfig().clone();\nURLClassLoader classLoader =\nClientUtils.buildUserCodeClassLoader(\ndefaultContext.getDependencies(),\nCollections.emptyList(),\nSessionContext.class.getClassLoader(),\nconfiguration);\nModuleManager moduleManager = new ModuleManager();\nfinal EnvironmentSettings settings = sessionEnv.getExecution().getEnvironmentSettings();\nCatalogManager catalogManager =\nCatalogManager.newBuilder()\n.classLoader(classLoader)\n.config(configuration)\n.defaultCatalog(\nsettings.getBuiltInCatalogName(),\nnew GenericInMemoryCatalog(\nsettings.getBuiltInCatalogName(),\nsettings.getBuiltInDatabaseName()))\n.build();\nFunctionCatalog functionCatalog =\nnew FunctionCatalog(configuration, catalogManager, moduleManager);\nSessionState sessionState =\nnew SessionState(catalogManager, moduleManager, functionCatalog);\nExecutionContext executionContext =\nnew ExecutionContext(sessionEnv, configuration, classLoader, sessionState);\nLegacyTableEnvironmentInitializer.initializeSessionState(\nexecutionContext.getTableEnvironment(), sessionEnv, classLoader);\nreturn new SessionContext(\ndefaultContext,\nsessionId,\nsessionEnv,\nconfiguration,\nclassLoader,\nsessionState,\nexecutionContext);\n}\n/** session state. */\npublic static class SessionState {\npublic final CatalogManager catalogManager;\npublic final FunctionCatalog functionCatalog;\npublic final ModuleManager moduleManager;\npublic SessionState(\nCatalogManager catalogManager,\nModuleManager moduleManager,\nFunctionCatalog functionCatalog) {\nthis.catalogManager = catalogManager;\nthis.moduleManager = moduleManager;\nthis.functionCatalog = functionCatalog;\n}\n}\n}" + }, + { + "comment": "`this` is unnecessary which can be removed", + "method_body": "public ShardingSphereSavepoint(final String name) throws SQLException {\nif (name == null || name.length() == 0) {\nthrow new SQLException(\"Savepoint name can not be NULL or empty\");\n}\nthis.savepointName = name;\n}", + "target_code": "this.savepointName = name;", + "method_body_after": "public ShardingSphereSavepoint(final String name) throws SQLException {\nif (null == name || 0 == name.length()) {\nthrow new SQLException(\"Savepoint name can not be NULL or empty\");\n}\nsavepointName = name;\n}", + "context_before": "class ShardingSphereSavepoint implements Savepoint {\nprivate final String savepointName;\npublic ShardingSphereSavepoint() {\nthis.savepointName = getUniqueId();\n}\n@Override\npublic int getSavepointId() throws SQLException {\nthrow new SQLException(\"Only named savepoint are supported.\");\n}\n@Override\npublic String getSavepointName() {\nreturn savepointName;\n}\nprivate static String getUniqueId() {\nString uidStr = new UID().toString();\nint uidLength = uidStr.length();\nStringBuilder safeString = new StringBuilder(uidLength + 1);\nsafeString.append('_');\nfor (int i = 0; i < uidLength; i++) {\nchar c = uidStr.charAt(i);\nif (Character.isLetter(c) || Character.isDigit(c)) {\nsafeString.append(c);\n} else {\nsafeString.append('_');\n}\n}\nreturn safeString.toString();\n}\n}", + "context_after": "class ShardingSphereSavepoint implements Savepoint {\nprivate final String savepointName;\npublic ShardingSphereSavepoint() {\nsavepointName = getUniqueId();\n}\n@Override\npublic int getSavepointId() throws SQLException {\nthrow new SQLException(\"Only named savepoint are supported.\");\n}\n@Override\npublic String getSavepointName() {\nreturn savepointName;\n}\nprivate static String getUniqueId() {\nString uidStr = new UID().toString();\nint uidLength = uidStr.length();\nStringBuilder safeString = new StringBuilder(uidLength + 1);\nsafeString.append('_');\nfor (int i = 0; i < uidLength; i++) {\nchar c = uidStr.charAt(i);\nif (Character.isLetter(c) || Character.isDigit(c)) {\nsafeString.append(c);\n} else {\nsafeString.append('_');\n}\n}\nreturn safeString.toString();\n}\n}" + }, + { + "comment": "Nit: don't include the name of the type in the variable name. ```suggestion Map oneOfFieldLocation = Maps.newHashMap(); ```", + "method_body": "static Schema getSchema(Descriptors.Descriptor descriptor) {\nSet oneOfFields = Sets.newHashSet();\nMap oneOfFieldLocationMap = Maps.newHashMap();\nList fields = Lists.newArrayListWithCapacity(descriptor.getFields().size());\nfor (OneofDescriptor oneofDescriptor : descriptor.getOneofs()) {\nList subFields = Lists.newArrayListWithCapacity(oneofDescriptor.getFieldCount());\nMap enumIds = Maps.newHashMap();\nfor (FieldDescriptor fieldDescriptor : oneofDescriptor.getFields()) {\noneOfFields.add(fieldDescriptor.getNumber());\nFieldType fieldType = beamFieldTypeFromProtoField(fieldDescriptor);\nsubFields.add(\nwithFieldNumber(\nField.nullable(fieldDescriptor.getName(), fieldType), fieldDescriptor.getNumber()));\ncheckArgument(\nenumIds.putIfAbsent(fieldDescriptor.getName(), fieldDescriptor.getNumber()) == null);\n}\nFieldType oneOfType = FieldType.logicalType(OneOfType.create(subFields, enumIds));\noneOfFieldLocationMap.put(\noneofDescriptor.getFields().get(0).getNumber(),\nField.of(oneofDescriptor.getName(), oneOfType));\n}\nfor (Descriptors.FieldDescriptor fieldDescriptor : descriptor.getFields()) {\nint fieldDescriptorNumber = fieldDescriptor.getNumber();\nif (!oneOfFields.contains(fieldDescriptorNumber)) {\nFieldType fieldType = beamFieldTypeFromProtoField(fieldDescriptor);\nfields.add(\nwithFieldNumber(Field.of(fieldDescriptor.getName(), fieldType), fieldDescriptorNumber)\n.withOptions(getFieldOptions(fieldDescriptor)));\n} else if (oneOfFieldLocationMap.containsKey(fieldDescriptorNumber)) {\nField oneOfField = oneOfFieldLocationMap.get(fieldDescriptorNumber);\nif (oneOfField != null) {\nfields.add(oneOfField);\n}\n}\n}\nreturn Schema.builder()\n.addFields(fields)\n.setOptions(\ngetSchemaOptions(descriptor)\n.setOption(\nSCHEMA_OPTION_META_TYPE_NAME, FieldType.STRING, descriptor.getFullName()))\n.build();\n}", + "target_code": "Map oneOfFieldLocationMap = Maps.newHashMap();", + "method_body_after": "static Schema getSchema(Descriptors.Descriptor descriptor) {\n/* OneOfComponentFields refers to the field number in the protobuf where the component subfields\n* are. This is needed to prevent double inclusion of the component fields.*/\nSet oneOfComponentFields = Sets.newHashSet();\n/* OneOfFieldLocation stores the field number of the first field in the OneOf. Using this, we can use the location\nof the first field in the OneOf as the location of the entire OneOf.*/\nMap oneOfFieldLocation = Maps.newHashMap();\nList fields = Lists.newArrayListWithCapacity(descriptor.getFields().size());\nfor (OneofDescriptor oneofDescriptor : descriptor.getOneofs()) {\nList subFields = Lists.newArrayListWithCapacity(oneofDescriptor.getFieldCount());\nMap enumIds = Maps.newHashMap();\nfor (FieldDescriptor fieldDescriptor : oneofDescriptor.getFields()) {\noneOfComponentFields.add(fieldDescriptor.getNumber());\nFieldType fieldType = beamFieldTypeFromProtoField(fieldDescriptor);\nsubFields.add(\nwithFieldNumber(\nField.nullable(fieldDescriptor.getName(), fieldType), fieldDescriptor.getNumber()));\ncheckArgument(\nenumIds.putIfAbsent(fieldDescriptor.getName(), fieldDescriptor.getNumber()) == null);\n}\nFieldType oneOfType = FieldType.logicalType(OneOfType.create(subFields, enumIds));\noneOfFieldLocation.put(\noneofDescriptor.getFields().get(0).getNumber(),\nField.of(oneofDescriptor.getName(), oneOfType));\n}\nfor (Descriptors.FieldDescriptor fieldDescriptor : descriptor.getFields()) {\nint fieldDescriptorNumber = fieldDescriptor.getNumber();\nif (!oneOfComponentFields.contains(fieldDescriptorNumber)) {\nFieldType fieldType = beamFieldTypeFromProtoField(fieldDescriptor);\nfields.add(\nwithFieldNumber(Field.of(fieldDescriptor.getName(), fieldType), fieldDescriptorNumber)\n.withOptions(getFieldOptions(fieldDescriptor)));\n/* Note that descriptor.getFields() returns an iterator in the order of the fields in the .proto file, not\n* in field number order. Therefore we can safely insert the OneOfField at the field of its first component.*/\n} else {\nField oneOfField = oneOfFieldLocation.get(fieldDescriptorNumber);\nif (oneOfField != null) {\nfields.add(oneOfField);\n}\n}\n}\nreturn Schema.builder()\n.addFields(fields)\n.setOptions(\ngetSchemaOptions(descriptor)\n.setOption(\nSCHEMA_OPTION_META_TYPE_NAME, FieldType.STRING, descriptor.getFullName()))\n.build();\n}", + "context_before": "class ProtoSchemaTranslator {\npublic static final String SCHEMA_OPTION_META_NUMBER = \"beam:option:proto:meta:number\";\npublic static final String SCHEMA_OPTION_META_TYPE_NAME = \"beam:option:proto:meta:type_name\";\n/** Option prefix for options on messages. */\npublic static final String SCHEMA_OPTION_MESSAGE_PREFIX = \"beam:option:proto:message:\";\n/** Option prefix for options on fields. */\npublic static final String SCHEMA_OPTION_FIELD_PREFIX = \"beam:option:proto:field:\";\n/** Attach a proto field number to a type. */\nstatic Field withFieldNumber(Field field, int number) {\nreturn field.withOptions(\nSchema.Options.builder().setOption(SCHEMA_OPTION_META_NUMBER, FieldType.INT32, number));\n}\n/** Return the proto field number for a type. */\nstatic int getFieldNumber(Field field) {\nreturn field.getOptions().getValue(SCHEMA_OPTION_META_NUMBER);\n}\n/** Return a Beam scheam representing a proto class. */\nstatic Schema getSchema(Class clazz) {\nreturn getSchema(ProtobufUtil.getDescriptorForClass(clazz));\n}\nprivate static FieldType beamFieldTypeFromProtoField(\nDescriptors.FieldDescriptor protoFieldDescriptor) {\nFieldType fieldType = null;\nif (protoFieldDescriptor.isMapField()) {\nFieldDescriptor keyFieldDescriptor =\nprotoFieldDescriptor.getMessageType().findFieldByName(\"key\");\nFieldDescriptor valueFieldDescriptor =\nprotoFieldDescriptor.getMessageType().findFieldByName(\"value\");\nfieldType =\nFieldType.map(\nbeamFieldTypeFromProtoField(keyFieldDescriptor).withNullable(false),\nbeamFieldTypeFromProtoField(valueFieldDescriptor).withNullable(false));\n} else if (protoFieldDescriptor.isRepeated()) {\nfieldType =\nFieldType.array(\nbeamFieldTypeFromSingularProtoField(protoFieldDescriptor).withNullable(false));\n} else {\nfieldType = beamFieldTypeFromSingularProtoField(protoFieldDescriptor);\n}\nreturn fieldType;\n}\nprivate static FieldType beamFieldTypeFromSingularProtoField(\nDescriptors.FieldDescriptor protoFieldDescriptor) {\nDescriptors.FieldDescriptor.Type fieldDescriptor = protoFieldDescriptor.getType();\nFieldType fieldType;\nswitch (fieldDescriptor) {\ncase INT32:\nfieldType = FieldType.INT32;\nbreak;\ncase INT64:\nfieldType = FieldType.INT64;\nbreak;\ncase FLOAT:\nfieldType = FieldType.FLOAT;\nbreak;\ncase DOUBLE:\nfieldType = FieldType.DOUBLE;\nbreak;\ncase BOOL:\nfieldType = FieldType.BOOLEAN;\nbreak;\ncase STRING:\nfieldType = FieldType.STRING;\nbreak;\ncase BYTES:\nfieldType = FieldType.BYTES;\nbreak;\ncase UINT32:\nfieldType = FieldType.logicalType(new UInt32());\nbreak;\ncase SINT32:\nfieldType = FieldType.logicalType(new SInt32());\nbreak;\ncase FIXED32:\nfieldType = FieldType.logicalType(new Fixed32());\nbreak;\ncase SFIXED32:\nfieldType = FieldType.logicalType(new SFixed32());\nbreak;\ncase UINT64:\nfieldType = FieldType.logicalType(new UInt64());\nbreak;\ncase SINT64:\nfieldType = FieldType.logicalType(new SInt64());\nbreak;\ncase FIXED64:\nfieldType = FieldType.logicalType(new Fixed64());\nbreak;\ncase SFIXED64:\nfieldType = FieldType.logicalType(new SFixed64());\nbreak;\ncase ENUM:\nMap enumValues = Maps.newHashMap();\nfor (EnumValueDescriptor enumValue : protoFieldDescriptor.getEnumType().getValues()) {\nif (enumValues.putIfAbsent(enumValue.getName(), enumValue.getNumber()) != null) {\nthrow new RuntimeException(\"Aliased enumerations not currently supported.\");\n}\n}\nfieldType = FieldType.logicalType(EnumerationType.create(enumValues));\nbreak;\ncase MESSAGE:\ncase GROUP:\nString fullName = protoFieldDescriptor.getMessageType().getFullName();\nswitch (fullName) {\ncase \"google.protobuf.Timestamp\":\nfieldType = FieldType.logicalType(new NanosInstant());\nbreak;\ncase \"google.protobuf.Int32Value\":\ncase \"google.protobuf.UInt32Value\":\ncase \"google.protobuf.Int64Value\":\ncase \"google.protobuf.UInt64Value\":\ncase \"google.protobuf.FloatValue\":\ncase \"google.protobuf.DoubleValue\":\ncase \"google.protobuf.StringValue\":\ncase \"google.protobuf.BoolValue\":\ncase \"google.protobuf.BytesValue\":\nfieldType =\nbeamFieldTypeFromSingularProtoField(\nprotoFieldDescriptor.getMessageType().findFieldByNumber(1));\nbreak;\ncase \"google.protobuf.Duration\":\nfieldType = FieldType.logicalType(new NanosDuration());\nbreak;\ncase \"google.protobuf.Any\":\nthrow new RuntimeException(\"Any not yet supported\");\ndefault:\nfieldType = FieldType.row(getSchema(protoFieldDescriptor.getMessageType()));\n}\nif (protoFieldDescriptor.isOptional()) {\nfieldType = fieldType.withNullable(true);\n}\nbreak;\ndefault:\nthrow new RuntimeException(\"Field type not matched.\");\n}\nreturn fieldType;\n}\nprivate static Schema.Options.Builder getFieldOptions(FieldDescriptor fieldDescriptor) {\nreturn getOptions(SCHEMA_OPTION_FIELD_PREFIX, fieldDescriptor.getOptions().getAllFields());\n}\nprivate static Schema.Options.Builder getSchemaOptions(Descriptors.Descriptor descriptor) {\nreturn getOptions(SCHEMA_OPTION_MESSAGE_PREFIX, descriptor.getOptions().getAllFields());\n}\nprivate static Schema.Options.Builder getOptions(\nString prefix, Map allFields) {\nSchema.Options.Builder optionsBuilder = Schema.Options.builder();\nfor (Map.Entry entry : allFields.entrySet()) {\nFieldDescriptor fieldDescriptor = entry.getKey();\nFieldType fieldType = beamFieldTypeFromProtoField(fieldDescriptor);\nswitch (fieldType.getTypeName()) {\ncase BYTE:\ncase BYTES:\ncase INT16:\ncase INT32:\ncase INT64:\ncase DECIMAL:\ncase FLOAT:\ncase DOUBLE:\ncase STRING:\ncase BOOLEAN:\ncase LOGICAL_TYPE:\ncase ROW:\ncase ARRAY:\ncase ITERABLE:\nField field = Field.of(\"OPTION\", fieldType);\nProtoDynamicMessageSchema schema = ProtoDynamicMessageSchema.forSchema(Schema.of(field));\noptionsBuilder.setOption(\nprefix + fieldDescriptor.getFullName(),\nfieldType,\nschema.createConverter(field).convertFromProtoValue(entry.getValue()));\nbreak;\ncase MAP:\ncase DATETIME:\ndefault:\nthrow new IllegalStateException(\"These datatypes are not possible in extentions.\");\n}\n}\nreturn optionsBuilder;\n}\n}", + "context_after": "class ProtoSchemaTranslator {\npublic static final String SCHEMA_OPTION_META_NUMBER = \"beam:option:proto:meta:number\";\npublic static final String SCHEMA_OPTION_META_TYPE_NAME = \"beam:option:proto:meta:type_name\";\n/** Option prefix for options on messages. */\npublic static final String SCHEMA_OPTION_MESSAGE_PREFIX = \"beam:option:proto:message:\";\n/** Option prefix for options on fields. */\npublic static final String SCHEMA_OPTION_FIELD_PREFIX = \"beam:option:proto:field:\";\n/** Attach a proto field number to a type. */\nstatic Field withFieldNumber(Field field, int number) {\nreturn field.withOptions(\nSchema.Options.builder().setOption(SCHEMA_OPTION_META_NUMBER, FieldType.INT32, number));\n}\n/** Return the proto field number for a type. */\nstatic int getFieldNumber(Field field) {\nreturn field.getOptions().getValue(SCHEMA_OPTION_META_NUMBER);\n}\n/** Return a Beam scheam representing a proto class. */\nstatic Schema getSchema(Class clazz) {\nreturn getSchema(ProtobufUtil.getDescriptorForClass(clazz));\n}\nprivate static FieldType beamFieldTypeFromProtoField(\nDescriptors.FieldDescriptor protoFieldDescriptor) {\nFieldType fieldType = null;\nif (protoFieldDescriptor.isMapField()) {\nFieldDescriptor keyFieldDescriptor =\nprotoFieldDescriptor.getMessageType().findFieldByName(\"key\");\nFieldDescriptor valueFieldDescriptor =\nprotoFieldDescriptor.getMessageType().findFieldByName(\"value\");\nfieldType =\nFieldType.map(\nbeamFieldTypeFromProtoField(keyFieldDescriptor).withNullable(false),\nbeamFieldTypeFromProtoField(valueFieldDescriptor).withNullable(false));\n} else if (protoFieldDescriptor.isRepeated()) {\nfieldType =\nFieldType.array(\nbeamFieldTypeFromSingularProtoField(protoFieldDescriptor).withNullable(false));\n} else {\nfieldType = beamFieldTypeFromSingularProtoField(protoFieldDescriptor);\n}\nreturn fieldType;\n}\nprivate static FieldType beamFieldTypeFromSingularProtoField(\nDescriptors.FieldDescriptor protoFieldDescriptor) {\nDescriptors.FieldDescriptor.Type fieldDescriptor = protoFieldDescriptor.getType();\nFieldType fieldType;\nswitch (fieldDescriptor) {\ncase INT32:\nfieldType = FieldType.INT32;\nbreak;\ncase INT64:\nfieldType = FieldType.INT64;\nbreak;\ncase FLOAT:\nfieldType = FieldType.FLOAT;\nbreak;\ncase DOUBLE:\nfieldType = FieldType.DOUBLE;\nbreak;\ncase BOOL:\nfieldType = FieldType.BOOLEAN;\nbreak;\ncase STRING:\nfieldType = FieldType.STRING;\nbreak;\ncase BYTES:\nfieldType = FieldType.BYTES;\nbreak;\ncase UINT32:\nfieldType = FieldType.logicalType(new UInt32());\nbreak;\ncase SINT32:\nfieldType = FieldType.logicalType(new SInt32());\nbreak;\ncase FIXED32:\nfieldType = FieldType.logicalType(new Fixed32());\nbreak;\ncase SFIXED32:\nfieldType = FieldType.logicalType(new SFixed32());\nbreak;\ncase UINT64:\nfieldType = FieldType.logicalType(new UInt64());\nbreak;\ncase SINT64:\nfieldType = FieldType.logicalType(new SInt64());\nbreak;\ncase FIXED64:\nfieldType = FieldType.logicalType(new Fixed64());\nbreak;\ncase SFIXED64:\nfieldType = FieldType.logicalType(new SFixed64());\nbreak;\ncase ENUM:\nMap enumValues = Maps.newHashMap();\nfor (EnumValueDescriptor enumValue : protoFieldDescriptor.getEnumType().getValues()) {\nif (enumValues.putIfAbsent(enumValue.getName(), enumValue.getNumber()) != null) {\nthrow new RuntimeException(\"Aliased enumerations not currently supported.\");\n}\n}\nfieldType = FieldType.logicalType(EnumerationType.create(enumValues));\nbreak;\ncase MESSAGE:\ncase GROUP:\nString fullName = protoFieldDescriptor.getMessageType().getFullName();\nswitch (fullName) {\ncase \"google.protobuf.Timestamp\":\nfieldType = FieldType.logicalType(new NanosInstant());\nbreak;\ncase \"google.protobuf.Int32Value\":\ncase \"google.protobuf.UInt32Value\":\ncase \"google.protobuf.Int64Value\":\ncase \"google.protobuf.UInt64Value\":\ncase \"google.protobuf.FloatValue\":\ncase \"google.protobuf.DoubleValue\":\ncase \"google.protobuf.StringValue\":\ncase \"google.protobuf.BoolValue\":\ncase \"google.protobuf.BytesValue\":\nfieldType =\nbeamFieldTypeFromSingularProtoField(\nprotoFieldDescriptor.getMessageType().findFieldByNumber(1));\nbreak;\ncase \"google.protobuf.Duration\":\nfieldType = FieldType.logicalType(new NanosDuration());\nbreak;\ncase \"google.protobuf.Any\":\nthrow new RuntimeException(\"Any not yet supported\");\ndefault:\nfieldType = FieldType.row(getSchema(protoFieldDescriptor.getMessageType()));\n}\nif (protoFieldDescriptor.isOptional()) {\nfieldType = fieldType.withNullable(true);\n}\nbreak;\ndefault:\nthrow new RuntimeException(\"Field type not matched.\");\n}\nreturn fieldType;\n}\nprivate static Schema.Options.Builder getFieldOptions(FieldDescriptor fieldDescriptor) {\nreturn getOptions(SCHEMA_OPTION_FIELD_PREFIX, fieldDescriptor.getOptions().getAllFields());\n}\nprivate static Schema.Options.Builder getSchemaOptions(Descriptors.Descriptor descriptor) {\nreturn getOptions(SCHEMA_OPTION_MESSAGE_PREFIX, descriptor.getOptions().getAllFields());\n}\nprivate static Schema.Options.Builder getOptions(\nString prefix, Map allFields) {\nSchema.Options.Builder optionsBuilder = Schema.Options.builder();\nfor (Map.Entry entry : allFields.entrySet()) {\nFieldDescriptor fieldDescriptor = entry.getKey();\nFieldType fieldType = beamFieldTypeFromProtoField(fieldDescriptor);\nswitch (fieldType.getTypeName()) {\ncase BYTE:\ncase BYTES:\ncase INT16:\ncase INT32:\ncase INT64:\ncase DECIMAL:\ncase FLOAT:\ncase DOUBLE:\ncase STRING:\ncase BOOLEAN:\ncase LOGICAL_TYPE:\ncase ROW:\ncase ARRAY:\ncase ITERABLE:\nField field = Field.of(\"OPTION\", fieldType);\nProtoDynamicMessageSchema schema = ProtoDynamicMessageSchema.forSchema(Schema.of(field));\noptionsBuilder.setOption(\nprefix + fieldDescriptor.getFullName(),\nfieldType,\nschema.createConverter(field).convertFromProtoValue(entry.getValue()));\nbreak;\ncase MAP:\ncase DATETIME:\ndefault:\nthrow new IllegalStateException(\"These datatypes are not possible in extentions.\");\n}\n}\nreturn optionsBuilder;\n}\n}" + }, + { + "comment": "Ah, endpoints.", + "method_body": "public void testApplicationApi() {\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);\ntester.assertResponse(request(\"/application/v4/\", GET).userIdentity(USER_ID),\nnew File(\"root.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"tenant-without-applications.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", PUT)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\"),\nnew File(\"tenant-without-applications.json\"));\ntester.assertResponse(request(\"/application/v4/user\", GET).userIdentity(USER_ID),\nnew File(\"user.json\"));\ntester.assertResponse(request(\"/application/v4/user\", PUT).userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Created user 'by-myuser'\\\"}\");\ntester.assertResponse(request(\"/application/v4/user\", GET).userIdentity(USER_ID),\nnew File(\"user-which-exists.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/by-myuser\", DELETE).userIdentity(USER_ID),\n\"{\\\"tenant\\\":\\\"by-myuser\\\",\\\"type\\\":\\\"USER\\\",\\\"applications\\\":[]}\");\ntester.assertResponse(request(\"/application/v4/tenant/\", GET).userIdentity(USER_ID),\nnew File(\"tenant-list.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/cost\", GET).userIdentity(USER_ID).oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"months\\\":[]}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/cost/2018-01\", GET).userIdentity(USER_ID).oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"month\\\":\\\"2018-01\\\",\\\"items\\\":[]}\");\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN_2, USER_ID);\nregisterContact(1234);\ntester.assertResponse(request(\"/application/v4/tenant/tenant2\", POST)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT)\n.data(\"{\\\"athensDomain\\\":\\\"domain2\\\", \\\"property\\\":\\\"property2\\\", \\\"propertyId\\\":\\\"1234\\\"}\"),\nnew File(\"tenant-without-applications-with-id.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant2\", PUT)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT)\n.data(\"{\\\"athensDomain\\\":\\\"domain2\\\", \\\"property\\\":\\\"property2\\\", \\\"propertyId\\\":\\\"1234\\\"}\"),\nnew File(\"tenant-without-applications-with-id.json\"));\nupdateContactInformation();\ntester.assertResponse(request(\"/application/v4/tenant/tenant2\", GET).userIdentity(USER_ID),\nnew File(\"tenant-with-contact-info.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", POST)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"instance-reference.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", GET).userIdentity(USER_ID),\nnew File(\"tenant-with-application.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/\", GET).userIdentity(USER_ID),\nnew File(\"instance-list.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/\", GET).userIdentity(USER_ID),\nnew File(\"instance-list.json\"));\naddUserToHostedOperatorRole(HostedAthenzIdentities.from(HOSTED_VESPA_OPERATOR));\nApplicationId id = ApplicationId.from(\"tenant1\", \"application1\", \"instance1\");\nvar app1 = deploymentTester.newDeploymentContext(id);\nMultiPartStreamer entity = createApplicationDeployData(applicationPackageInstance1, true);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploy/dev-us-east-1/\", POST)\n.data(entity)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Deployment started in run 1 of dev-us-east-1 for tenant1.application1.instance1. This may take about 15 minutes the first time.\\\",\\\"run\\\":1}\");\napp1.runJob(JobType.devUsEast1);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/otheruser/deploy/dev-us-east-1\", POST)\n.userIdentity(OTHER_USER_ID)\n.data(createApplicationDeployData(applicationPackageInstance1, false)),\nnew File(\"deployment-job-accepted-2.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/otheruser/environment/dev/region/us-east-1\", DELETE)\n.userIdentity(OTHER_USER_ID),\n\"{\\\"message\\\":\\\"Deactivated tenant1.application1.otheruser in dev.us-east-1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/otheruser\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted instance tenant1.application1.otheruser\\\"}\");\naddScrewdriverUserToDeployRole(SCREWDRIVER_ID,\nATHENZ_TENANT_DOMAIN,\nid.application());\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/submit\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID)\n.data(createApplicationSubmissionData(applicationPackageInstance1, 123)),\n\"{\\\"message\\\":\\\"Application package version: 1.0.1-commit1, source revision of repository 'repository1', branch 'master' with commit 'commit1', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\\\"}\");\napp1.runJob(JobType.systemTest).runJob(JobType.stagingTest).runJob(JobType.productionUsCentral1);\nentity = createApplicationDeployData(Optional.empty(),\nOptional.of(ApplicationVersion.from(DeploymentContext.defaultSourceRevision, 666)),\ntrue);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/\", POST)\n.data(entity)\n.userIdentity(HOSTED_VESPA_OPERATOR),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"No application package found for tenant1.application1 with version 1.0.666-commit1\\\"}\",\n400);\nentity = createApplicationDeployData(Optional.empty(),\nOptional.of(ApplicationVersion.from(DeploymentContext.defaultSourceRevision, 1)),\ntrue);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/\", POST)\n.data(entity)\n.userIdentity(HOSTED_VESPA_OPERATOR),\nnew File(\"deploy-result.json\"));\nentity = createApplicationDeployData(Optional.empty(), true);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/\", POST)\n.data(entity)\n.userIdentity(HOSTED_VESPA_OPERATOR),\nnew File(\"deploy-result.json\"));\nApplicationPackage applicationPackage = new ApplicationPackageBuilder()\n.instances(\"instance1\")\n.globalServiceId(\"foo\")\n.environment(Environment.prod)\n.region(\"us-west-1\")\n.region(\"us-east-3\")\n.allow(ValidationId.globalEndpointChange)\n.build();\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2/instance/default\", POST)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"instance-reference-2.json\"));\nApplicationId id2 = ApplicationId.from(\"tenant2\", \"application2\", \"instance1\");\nvar app2 = deploymentTester.newDeploymentContext(id2);\naddScrewdriverUserToDeployRole(SCREWDRIVER_ID,\nATHENZ_TENANT_DOMAIN_2,\nid2.application());\ndeploymentTester.applications().deploymentTrigger().triggerChange(TenantAndApplicationId.from(id2), Change.of(Version.fromString(\"7.0\")));\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2/submit\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID)\n.data(createApplicationSubmissionData(applicationPackage, 1000)),\n\"{\\\"message\\\":\\\"Application package version: 1.0.1-commit1, source revision of repository 'repository1', branch 'master' with commit 'commit1', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\\\"}\");\ndeploymentTester.triggerJobs();\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2\", GET)\n.userIdentity(USER_ID),\nnew File(\"application2.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2\", GET)\n.screwdriverIdentity(SCREWDRIVER_ID),\nnew File(\"application2.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2\", PATCH)\n.userIdentity(USER_ID)\n.data(\"{\\\"majorVersion\\\":7}\"),\n\"{\\\"message\\\":\\\"Set major version to 7\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2/key\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"key\\\":\\\"\" + pemPublicKey + \"\\\"}\"),\n\"{\\\"keys\\\":[\\\"-----BEGIN PUBLIC KEY-----\\\\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuKVFA8dXk43kVfYKzkUqhEY2rDT9\\\\nz/4jKSTHwbYR8wdsOSrJGVEUPbS2nguIJ64OJH7gFnxM6sxUVj+Nm2HlXw==\\\\n-----END PUBLIC KEY-----\\\\n\\\"]}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2/instance/default\", PATCH)\n.userIdentity(USER_ID)\n.data(\"{\\\"pemDeployKey\\\":\\\"\" + pemPublicKey + \"\\\"}\"),\n\"{\\\"message\\\":\\\"Added deploy key \" + quotedPemPublicKey + \"\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2\", GET)\n.userIdentity(USER_ID),\nnew File(\"application2-with-patches.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2\", PATCH)\n.userIdentity(USER_ID)\n.data(\"{\\\"majorVersion\\\":null}\"),\n\"{\\\"message\\\":\\\"Set major version to empty\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2/key\", DELETE)\n.userIdentity(USER_ID)\n.data(\"{\\\"key\\\":\\\"\" + pemPublicKey + \"\\\"}\"),\n\"{\\\"keys\\\":[]}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2\", GET)\n.userIdentity(USER_ID),\nnew File(\"application2.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2/instance/default\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted instance tenant2.application2.default\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted application tenant2.application2\\\"}\");\ndeploymentTester.upgrader().overrideConfidence(Version.fromString(\"6.1\"), VespaVersion.Confidence.broken);\ndeploymentTester.controllerTester().computeVersionStatus();\nsetDeploymentMaintainedInfo();\nsetZoneInRotation(\"rotation-fqdn-1\", ZoneId.from(\"prod\", \"us-central-1\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", GET)\n.userIdentity(USER_ID),\nnew File(\"instance.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1\", GET)\n.userIdentity(USER_ID),\nnew File(\"deployment.json\"));\naddIssues(deploymentTester, TenantAndApplicationId.from(\"tenant1\", \"application1\"));\ntester.assertResponse(request(\"/application/v4/\", GET)\n.userIdentity(USER_ID)\n.recursive(\"deployment\"),\nnew File(\"recursive-root.json\"));\ntester.assertResponse(request(\"/application/v4/\", GET)\n.userIdentity(USER_ID)\n.recursive(\"tenant\"),\nnew File(\"recursive-until-tenant-root.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/\", GET)\n.userIdentity(USER_ID)\n.recursive(\"true\"),\nnew File(\"tenant1-recursive.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", GET)\n.userIdentity(USER_ID)\n.recursive(\"true\"),\nnew File(\"instance1-recursive.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/nodes\", GET)\n.userIdentity(USER_ID),\nnew File(\"application-nodes.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application1/environment/dev/region/us-central-1/instance/default/logs?from=1233&to=3214\", GET)\n.userIdentity(USER_ID),\n\"INFO - All good\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying\", DELETE)\n.userIdentity(HOSTED_VESPA_OPERATOR),\n\"{\\\"message\\\":\\\"Changed deployment from 'application change to 1.0.1-commit1' to 'no change' for application 'tenant1.application1'\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying\", DELETE)\n.userIdentity(USER_ID)\n.data(\"{\\\"cancel\\\":\\\"all\\\"}\"),\n\"{\\\"message\\\":\\\"No deployment in progress for application 'tenant1.application1' at this time\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin\", POST)\n.userIdentity(USER_ID)\n.data(\"6.1.0\"),\n\"{\\\"message\\\":\\\"Triggered pin to 6.1 for tenant1.application1\\\"}\");\nassertTrue(\"Action is logged to audit log\",\ntester.controller().auditLogger().readLog().entries().stream()\n.anyMatch(entry -> entry.resource().equals(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin\")));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying\", GET)\n.userIdentity(USER_ID), \"{\\\"platform\\\":\\\"6.1\\\",\\\"pinned\\\":true}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin\", GET)\n.userIdentity(USER_ID), \"{\\\"platform\\\":\\\"6.1\\\",\\\"pinned\\\":true}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin\", DELETE)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Changed deployment from 'pin to 6.1' to 'upgrade to 6.1' for application 'tenant1.application1'\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying\", GET)\n.userIdentity(USER_ID), \"{\\\"platform\\\":\\\"6.1\\\",\\\"pinned\\\":false}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin\", POST)\n.userIdentity(USER_ID)\n.data(\"6.1\"),\n\"{\\\"message\\\":\\\"Triggered pin to 6.1 for tenant1.application1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying\", GET)\n.userIdentity(USER_ID), \"{\\\"platform\\\":\\\"6.1\\\",\\\"pinned\\\":true}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/platform\", DELETE)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Changed deployment from 'pin to 6.1' to 'pin to current platform' for application 'tenant1.application1'\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying\", GET)\n.userIdentity(USER_ID), \"{\\\"pinned\\\":true}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin\", DELETE)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Changed deployment from 'pin to current platform' to 'no change' for application 'tenant1.application1'\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying\", GET)\n.userIdentity(USER_ID), \"{}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/job/production-us-west-1/pause\", POST)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"production-us-west-1 for tenant1.application1.instance1 paused for \" + DeploymentTrigger.maxPause + \"\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/job/production-us-west-1\", POST)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Triggered production-us-west-1 for tenant1.application1.instance1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/restart\", POST)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Requested restart of tenant1.application1.instance1 in prod.us-central-1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/restart\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID),\n\"{\\\"message\\\":\\\"Requested restart of tenant1.application1.instance1 in prod.us-central-1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/staging/region/us-central-1/instance/instance1/restart\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID),\n\"{\\\"message\\\":\\\"Requested restart of tenant1.application1.instance1 in staging.us-central-1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/test/region/us-central-1/instance/instance1/restart\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID),\n\"{\\\"message\\\":\\\"Requested restart of tenant1.application1.instance1 in test.us-central-1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-central-1/instance/instance1/restart\", POST)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Requested restart of tenant1.application1.instance1 in dev.us-central-1\\\"}\");\ndeploymentTester.configServer().nodeRepository().addFixedNodes(ZoneId.from(\"prod\", \"us-central-1\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/restart?hostname=hostA\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID),\n\"{\\\"message\\\":\\\"Requested restart of tenant1.application1.instance1 in prod.us-central-1\\\"}\", 200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/suspended\", GET)\n.userIdentity(USER_ID),\nnew File(\"suspended.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/service\", GET)\n.userIdentity(USER_ID),\nnew File(\"services.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/service/storagenode-awe3slno6mmq2fye191y324jl/state/v1/\", GET)\n.userIdentity(USER_ID),\nnew File(\"service.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"delete-with-active-deployments.json\"), 400);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-east-1/instance/instance1\", DELETE)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Deactivated tenant1.application1.instance1 in dev.us-east-1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1\", DELETE)\n.screwdriverIdentity(SCREWDRIVER_ID),\n\"{\\\"message\\\":\\\"Deactivated tenant1.application1.instance1 in prod.us-central-1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1\", DELETE)\n.screwdriverIdentity(SCREWDRIVER_ID),\n\"{\\\"message\\\":\\\"Deactivated tenant1.application1.instance1 in prod.us-central-1\\\"}\");\ntester.controller().applications().deploy(ApplicationId.from(\"tenant1\", \"application1\", \"default\"),\nZoneId.from(\"prod\", \"us-central-1\"),\nOptional.of(applicationPackageDefault),\nnew DeployOptions(true, Optional.empty(), false, false));\ntester.controller().applications().deploy(ApplicationId.from(\"tenant1\", \"application1\", \"my-user\"),\nZoneId.from(\"dev\", \"us-east-1\"),\nOptional.of(applicationPackageDefault),\nnew DeployOptions(false, Optional.empty(), false, false));\ntester.serviceRegistry().routingGeneratorMock().putEndpoints(new DeploymentId(ApplicationId.from(\"tenant1\", \"application1\", \"default\"), ZoneId.from(\"prod\", \"us-central-1\")),\nList.of(new RoutingEndpoint(\"https:\ntester.serviceRegistry().routingGeneratorMock().putEndpoints(new DeploymentId(ApplicationId.from(\"tenant1\", \"application1\", \"my-user\"), ZoneId.from(\"dev\", \"us-east-1\")),\nList.of(new RoutingEndpoint(\"https:\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/my-user/job/dev-us-east-1/test-config\", GET)\n.userIdentity(USER_ID),\nnew File(\"test-config-dev.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/my-user/job/production-us-central-1/test-config\", GET)\n.userIdentity(USER_ID),\nnew File(\"test-config.json\"));\ntester.controller().applications().deactivate(ApplicationId.from(\"tenant1\", \"application1\", \"default\"),\nZoneId.from(\"prod\", \"us-central-1\"));\ntester.controller().applications().deactivate(ApplicationId.from(\"tenant1\", \"application1\", \"my-user\"),\nZoneId.from(\"dev\", \"us-east-1\"));\nApplicationPackage packageWithServiceForWrongDomain = new ApplicationPackageBuilder()\n.instances(\"instance1\")\n.environment(Environment.prod)\n.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from(ATHENZ_TENANT_DOMAIN_2.getName()), AthenzService.from(\"service\"))\n.region(\"us-west-1\")\n.build();\nallowLaunchOfService(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN_2, \"service\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/submit\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID)\n.data(createApplicationSubmissionData(packageWithServiceForWrongDomain, 123)),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Athenz domain in deployment.xml: [domain2] must match tenant domain: [domain1]\\\"}\", 400);\nApplicationPackage packageWithService = new ApplicationPackageBuilder()\n.instances(\"instance1\")\n.globalServiceId(\"foo\")\n.environment(Environment.prod)\n.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from(ATHENZ_TENANT_DOMAIN.getName()), AthenzService.from(\"service\"))\n.region(\"us-central-1\")\n.parallel(\"us-west-1\", \"us-east-3\")\n.build();\nallowLaunchOfService(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, \"service\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/submit\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID)\n.data(createApplicationSubmissionData(packageWithService, 123)),\n\"{\\\"message\\\":\\\"Application package version: 1.0.2-commit1, source revision of repository 'repository1', branch 'master' with commit 'commit1', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/package\", GET).userIdentity(HOSTED_VESPA_OPERATOR),\n(response) -> {\nassertEquals(\"attachment; filename=\\\"tenant1.application1-build2.zip\\\"\", response.getHeaders().getFirst(\"Content-Disposition\"));\nassertArrayEquals(packageWithService.zippedContent(), response.getBody());\n},\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/package?build=1\", GET).userIdentity(HOSTED_VESPA_OPERATOR),\n(response) -> {\nassertEquals(\"attachment; filename=\\\"tenant1.application1-build1.zip\\\"\", response.getHeaders().getFirst(\"Content-Disposition\"));\nassertArrayEquals(applicationPackageInstance1.zippedContent(), response.getBody());\n},\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/submit\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID)\n.header(\"X-Content-Hash\", \"not/the/right/hash\")\n.data(createApplicationSubmissionData(packageWithService, 123)),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Value of X-Content-Hash header does not match computed content hash\\\"}\", 400);\nMultiPartStreamer streamer = createApplicationSubmissionData(packageWithService, 123);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/submit\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID)\n.header(\"X-Content-Hash\", Base64.getEncoder().encodeToString(Signatures.sha256Digest(streamer::data)))\n.data(streamer),\n\"{\\\"message\\\":\\\"Application package version: 1.0.3-commit1, source revision of repository 'repository1', branch 'master' with commit 'commit1', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\\\"}\");\nApplicationPackage multiInstanceSpec = new ApplicationPackageBuilder()\n.instances(\"instance1,instance2\")\n.systemTest()\n.stagingTest()\n.environment(Environment.prod)\n.region(\"us-central-1\")\n.parallel(\"us-west-1\", \"us-east-3\")\n.endpoint(\"default\", \"foo\", \"us-central-1\", \"us-west-1\", \"us-east-3\")\n.build();\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/submit\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID)\n.data(createApplicationSubmissionData(multiInstanceSpec, 123)),\n\"{\\\"message\\\":\\\"Application package version: 1.0.4-commit1, source revision of repository 'repository1', branch 'master' with commit 'commit1', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\\\"}\");\nassertEquals(2, tester.controller().applications().deploymentTrigger().triggerReadyJobs());\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/job\", GET)\n.userIdentity(USER_ID),\nnew File(\"jobs.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/job/system-test\", GET)\n.userIdentity(USER_ID),\nnew File(\"system-test-job.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/job/system-test/run/1\", GET)\n.userIdentity(USER_ID),\nnew File(\"system-test-details.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/job/staging-test\", DELETE)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Aborting run 2 of staging-test for tenant1.application1.instance1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/submit\", DELETE)\n.userIdentity(HOSTED_VESPA_OPERATOR),\n\"{\\\"message\\\":\\\"Unregistered 'tenant1.application1' from internal deployment pipeline.\\\"}\");\nbyte[] data = new byte[0];\ntester.assertResponse(request(\"/application/v4/user?user=new_user&domain=by\", PUT)\n.data(data)\n.userIdentity(new UserId(\"new_user\")),\nnew File(\"create-user-response.json\"));\ntester.assertResponse(request(\"/application/v4/user\", GET)\n.userIdentity(new UserId(\"other_user\")),\n\"{\\\"user\\\":\\\"other_user\\\",\\\"tenants\\\":[],\\\"tenantExists\\\":false}\");\ntester.assertResponse(request(\"/application/v4/\", Request.Method.OPTIONS)\n.userIdentity(USER_ID),\n\"\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/default\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted instance tenant1.application1.default\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/my-user\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted instance tenant1.application1.my-user\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted instance tenant1.application1.instance1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance2\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted instance tenant1.application1.instance2\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", DELETE).userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"tenant-without-applications.json\"));\n}", + "target_code": ".region(\"us-central-1\")", + "method_body_after": "public void testApplicationApi() {\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);\ntester.assertResponse(request(\"/application/v4/\", GET).userIdentity(USER_ID),\nnew File(\"root.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"tenant-without-applications.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", PUT)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\"),\nnew File(\"tenant-without-applications.json\"));\ntester.assertResponse(request(\"/application/v4/user\", GET).userIdentity(USER_ID),\nnew File(\"user.json\"));\ntester.assertResponse(request(\"/application/v4/user\", PUT).userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Created user 'by-myuser'\\\"}\");\ntester.assertResponse(request(\"/application/v4/user\", GET).userIdentity(USER_ID),\nnew File(\"user-which-exists.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/by-myuser\", DELETE).userIdentity(USER_ID),\n\"{\\\"tenant\\\":\\\"by-myuser\\\",\\\"type\\\":\\\"USER\\\",\\\"applications\\\":[]}\");\ntester.assertResponse(request(\"/application/v4/tenant/\", GET).userIdentity(USER_ID),\nnew File(\"tenant-list.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/cost\", GET).userIdentity(USER_ID).oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"months\\\":[]}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/cost/2018-01\", GET).userIdentity(USER_ID).oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"month\\\":\\\"2018-01\\\",\\\"items\\\":[]}\");\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN_2, USER_ID);\nregisterContact(1234);\ntester.assertResponse(request(\"/application/v4/tenant/tenant2\", POST)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT)\n.data(\"{\\\"athensDomain\\\":\\\"domain2\\\", \\\"property\\\":\\\"property2\\\", \\\"propertyId\\\":\\\"1234\\\"}\"),\nnew File(\"tenant-without-applications-with-id.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant2\", PUT)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT)\n.data(\"{\\\"athensDomain\\\":\\\"domain2\\\", \\\"property\\\":\\\"property2\\\", \\\"propertyId\\\":\\\"1234\\\"}\"),\nnew File(\"tenant-without-applications-with-id.json\"));\nupdateContactInformation();\ntester.assertResponse(request(\"/application/v4/tenant/tenant2\", GET).userIdentity(USER_ID),\nnew File(\"tenant-with-contact-info.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", POST)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"instance-reference.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", GET).userIdentity(USER_ID),\nnew File(\"tenant-with-application.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/\", GET).userIdentity(USER_ID),\nnew File(\"instance-list.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/\", GET).userIdentity(USER_ID),\nnew File(\"instance-list.json\"));\naddUserToHostedOperatorRole(HostedAthenzIdentities.from(HOSTED_VESPA_OPERATOR));\nApplicationId id = ApplicationId.from(\"tenant1\", \"application1\", \"instance1\");\nvar app1 = deploymentTester.newDeploymentContext(id);\nMultiPartStreamer entity = createApplicationDeployData(applicationPackageInstance1, true);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploy/dev-us-east-1/\", POST)\n.data(entity)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Deployment started in run 1 of dev-us-east-1 for tenant1.application1.instance1. This may take about 15 minutes the first time.\\\",\\\"run\\\":1}\");\napp1.runJob(JobType.devUsEast1);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/otheruser/deploy/dev-us-east-1\", POST)\n.userIdentity(OTHER_USER_ID)\n.data(createApplicationDeployData(applicationPackageInstance1, false)),\nnew File(\"deployment-job-accepted-2.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/otheruser/environment/dev/region/us-east-1\", DELETE)\n.userIdentity(OTHER_USER_ID),\n\"{\\\"message\\\":\\\"Deactivated tenant1.application1.otheruser in dev.us-east-1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/otheruser\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted instance tenant1.application1.otheruser\\\"}\");\naddScrewdriverUserToDeployRole(SCREWDRIVER_ID,\nATHENZ_TENANT_DOMAIN,\nid.application());\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/submit\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID)\n.data(createApplicationSubmissionData(applicationPackageInstance1, 123)),\n\"{\\\"message\\\":\\\"Application package version: 1.0.1-commit1, source revision of repository 'repository1', branch 'master' with commit 'commit1', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\\\"}\");\napp1.runJob(JobType.systemTest).runJob(JobType.stagingTest).runJob(JobType.productionUsCentral1);\nentity = createApplicationDeployData(Optional.empty(),\nOptional.of(ApplicationVersion.from(DeploymentContext.defaultSourceRevision, 666)),\ntrue);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/\", POST)\n.data(entity)\n.userIdentity(HOSTED_VESPA_OPERATOR),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"No application package found for tenant1.application1 with version 1.0.666-commit1\\\"}\",\n400);\nentity = createApplicationDeployData(Optional.empty(),\nOptional.of(ApplicationVersion.from(DeploymentContext.defaultSourceRevision, 1)),\ntrue);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/\", POST)\n.data(entity)\n.userIdentity(HOSTED_VESPA_OPERATOR),\nnew File(\"deploy-result.json\"));\nentity = createApplicationDeployData(Optional.empty(), true);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/\", POST)\n.data(entity)\n.userIdentity(HOSTED_VESPA_OPERATOR),\nnew File(\"deploy-result.json\"));\nApplicationPackage applicationPackage = new ApplicationPackageBuilder()\n.instances(\"instance1\")\n.globalServiceId(\"foo\")\n.environment(Environment.prod)\n.region(\"us-west-1\")\n.region(\"us-east-3\")\n.allow(ValidationId.globalEndpointChange)\n.build();\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2/instance/default\", POST)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"instance-reference-2.json\"));\nApplicationId id2 = ApplicationId.from(\"tenant2\", \"application2\", \"instance1\");\nvar app2 = deploymentTester.newDeploymentContext(id2);\naddScrewdriverUserToDeployRole(SCREWDRIVER_ID,\nATHENZ_TENANT_DOMAIN_2,\nid2.application());\ndeploymentTester.applications().deploymentTrigger().triggerChange(TenantAndApplicationId.from(id2), Change.of(Version.fromString(\"7.0\")));\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2/submit\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID)\n.data(createApplicationSubmissionData(applicationPackage, 1000)),\n\"{\\\"message\\\":\\\"Application package version: 1.0.1-commit1, source revision of repository 'repository1', branch 'master' with commit 'commit1', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\\\"}\");\ndeploymentTester.triggerJobs();\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2\", GET)\n.userIdentity(USER_ID),\nnew File(\"application2.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2\", GET)\n.screwdriverIdentity(SCREWDRIVER_ID),\nnew File(\"application2.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2\", PATCH)\n.userIdentity(USER_ID)\n.data(\"{\\\"majorVersion\\\":7}\"),\n\"{\\\"message\\\":\\\"Set major version to 7\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2/key\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"key\\\":\\\"\" + pemPublicKey + \"\\\"}\"),\n\"{\\\"keys\\\":[\\\"-----BEGIN PUBLIC KEY-----\\\\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuKVFA8dXk43kVfYKzkUqhEY2rDT9\\\\nz/4jKSTHwbYR8wdsOSrJGVEUPbS2nguIJ64OJH7gFnxM6sxUVj+Nm2HlXw==\\\\n-----END PUBLIC KEY-----\\\\n\\\"]}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2/instance/default\", PATCH)\n.userIdentity(USER_ID)\n.data(\"{\\\"pemDeployKey\\\":\\\"\" + pemPublicKey + \"\\\"}\"),\n\"{\\\"message\\\":\\\"Added deploy key \" + quotedPemPublicKey + \"\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2\", GET)\n.userIdentity(USER_ID),\nnew File(\"application2-with-patches.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2\", PATCH)\n.userIdentity(USER_ID)\n.data(\"{\\\"majorVersion\\\":null}\"),\n\"{\\\"message\\\":\\\"Set major version to empty\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2/key\", DELETE)\n.userIdentity(USER_ID)\n.data(\"{\\\"key\\\":\\\"\" + pemPublicKey + \"\\\"}\"),\n\"{\\\"keys\\\":[]}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2\", GET)\n.userIdentity(USER_ID),\nnew File(\"application2.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2/instance/default\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted instance tenant2.application2.default\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted application tenant2.application2\\\"}\");\ndeploymentTester.upgrader().overrideConfidence(Version.fromString(\"6.1\"), VespaVersion.Confidence.broken);\ndeploymentTester.controllerTester().computeVersionStatus();\nsetDeploymentMaintainedInfo();\nsetZoneInRotation(\"rotation-fqdn-1\", ZoneId.from(\"prod\", \"us-central-1\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", GET)\n.userIdentity(USER_ID),\nnew File(\"instance.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1\", GET)\n.userIdentity(USER_ID),\nnew File(\"deployment.json\"));\naddIssues(deploymentTester, TenantAndApplicationId.from(\"tenant1\", \"application1\"));\ntester.assertResponse(request(\"/application/v4/\", GET)\n.userIdentity(USER_ID)\n.recursive(\"deployment\"),\nnew File(\"recursive-root.json\"));\ntester.assertResponse(request(\"/application/v4/\", GET)\n.userIdentity(USER_ID)\n.recursive(\"tenant\"),\nnew File(\"recursive-until-tenant-root.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/\", GET)\n.userIdentity(USER_ID)\n.recursive(\"true\"),\nnew File(\"tenant1-recursive.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", GET)\n.userIdentity(USER_ID)\n.recursive(\"true\"),\nnew File(\"instance1-recursive.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/nodes\", GET)\n.userIdentity(USER_ID),\nnew File(\"application-nodes.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application1/environment/dev/region/us-central-1/instance/default/logs?from=1233&to=3214\", GET)\n.userIdentity(USER_ID),\n\"INFO - All good\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying\", DELETE)\n.userIdentity(HOSTED_VESPA_OPERATOR),\n\"{\\\"message\\\":\\\"Changed deployment from 'application change to 1.0.1-commit1' to 'no change' for application 'tenant1.application1'\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying\", DELETE)\n.userIdentity(USER_ID)\n.data(\"{\\\"cancel\\\":\\\"all\\\"}\"),\n\"{\\\"message\\\":\\\"No deployment in progress for application 'tenant1.application1' at this time\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin\", POST)\n.userIdentity(USER_ID)\n.data(\"6.1.0\"),\n\"{\\\"message\\\":\\\"Triggered pin to 6.1 for tenant1.application1\\\"}\");\nassertTrue(\"Action is logged to audit log\",\ntester.controller().auditLogger().readLog().entries().stream()\n.anyMatch(entry -> entry.resource().equals(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin\")));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying\", GET)\n.userIdentity(USER_ID), \"{\\\"platform\\\":\\\"6.1\\\",\\\"pinned\\\":true}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin\", GET)\n.userIdentity(USER_ID), \"{\\\"platform\\\":\\\"6.1\\\",\\\"pinned\\\":true}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin\", DELETE)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Changed deployment from 'pin to 6.1' to 'upgrade to 6.1' for application 'tenant1.application1'\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying\", GET)\n.userIdentity(USER_ID), \"{\\\"platform\\\":\\\"6.1\\\",\\\"pinned\\\":false}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin\", POST)\n.userIdentity(USER_ID)\n.data(\"6.1\"),\n\"{\\\"message\\\":\\\"Triggered pin to 6.1 for tenant1.application1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying\", GET)\n.userIdentity(USER_ID), \"{\\\"platform\\\":\\\"6.1\\\",\\\"pinned\\\":true}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/platform\", DELETE)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Changed deployment from 'pin to 6.1' to 'pin to current platform' for application 'tenant1.application1'\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying\", GET)\n.userIdentity(USER_ID), \"{\\\"pinned\\\":true}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin\", DELETE)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Changed deployment from 'pin to current platform' to 'no change' for application 'tenant1.application1'\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying\", GET)\n.userIdentity(USER_ID), \"{}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/job/production-us-west-1/pause\", POST)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"production-us-west-1 for tenant1.application1.instance1 paused for \" + DeploymentTrigger.maxPause + \"\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/job/production-us-west-1\", POST)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Triggered production-us-west-1 for tenant1.application1.instance1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/restart\", POST)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Requested restart of tenant1.application1.instance1 in prod.us-central-1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/restart\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID),\n\"{\\\"message\\\":\\\"Requested restart of tenant1.application1.instance1 in prod.us-central-1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/staging/region/us-central-1/instance/instance1/restart\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID),\n\"{\\\"message\\\":\\\"Requested restart of tenant1.application1.instance1 in staging.us-central-1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/test/region/us-central-1/instance/instance1/restart\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID),\n\"{\\\"message\\\":\\\"Requested restart of tenant1.application1.instance1 in test.us-central-1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-central-1/instance/instance1/restart\", POST)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Requested restart of tenant1.application1.instance1 in dev.us-central-1\\\"}\");\ndeploymentTester.configServer().nodeRepository().addFixedNodes(ZoneId.from(\"prod\", \"us-central-1\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/restart?hostname=hostA\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID),\n\"{\\\"message\\\":\\\"Requested restart of tenant1.application1.instance1 in prod.us-central-1\\\"}\", 200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/suspended\", GET)\n.userIdentity(USER_ID),\nnew File(\"suspended.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/service\", GET)\n.userIdentity(USER_ID),\nnew File(\"services.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/service/storagenode-awe3slno6mmq2fye191y324jl/state/v1/\", GET)\n.userIdentity(USER_ID),\nnew File(\"service.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"delete-with-active-deployments.json\"), 400);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-east-1/instance/instance1\", DELETE)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Deactivated tenant1.application1.instance1 in dev.us-east-1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1\", DELETE)\n.screwdriverIdentity(SCREWDRIVER_ID),\n\"{\\\"message\\\":\\\"Deactivated tenant1.application1.instance1 in prod.us-central-1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1\", DELETE)\n.screwdriverIdentity(SCREWDRIVER_ID),\n\"{\\\"message\\\":\\\"Deactivated tenant1.application1.instance1 in prod.us-central-1\\\"}\");\ntester.controller().applications().deploy(ApplicationId.from(\"tenant1\", \"application1\", \"default\"),\nZoneId.from(\"prod\", \"us-central-1\"),\nOptional.of(applicationPackageDefault),\nnew DeployOptions(true, Optional.empty(), false, false));\ntester.controller().applications().deploy(ApplicationId.from(\"tenant1\", \"application1\", \"my-user\"),\nZoneId.from(\"dev\", \"us-east-1\"),\nOptional.of(applicationPackageDefault),\nnew DeployOptions(false, Optional.empty(), false, false));\ntester.serviceRegistry().routingGeneratorMock().putEndpoints(new DeploymentId(ApplicationId.from(\"tenant1\", \"application1\", \"default\"), ZoneId.from(\"prod\", \"us-central-1\")),\nList.of(new RoutingEndpoint(\"https:\ntester.serviceRegistry().routingGeneratorMock().putEndpoints(new DeploymentId(ApplicationId.from(\"tenant1\", \"application1\", \"my-user\"), ZoneId.from(\"dev\", \"us-east-1\")),\nList.of(new RoutingEndpoint(\"https:\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/my-user/job/dev-us-east-1/test-config\", GET)\n.userIdentity(USER_ID),\nnew File(\"test-config-dev.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/my-user/job/production-us-central-1/test-config\", GET)\n.userIdentity(USER_ID),\nnew File(\"test-config.json\"));\ntester.controller().applications().deactivate(ApplicationId.from(\"tenant1\", \"application1\", \"default\"),\nZoneId.from(\"prod\", \"us-central-1\"));\ntester.controller().applications().deactivate(ApplicationId.from(\"tenant1\", \"application1\", \"my-user\"),\nZoneId.from(\"dev\", \"us-east-1\"));\nApplicationPackage packageWithServiceForWrongDomain = new ApplicationPackageBuilder()\n.instances(\"instance1\")\n.environment(Environment.prod)\n.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from(ATHENZ_TENANT_DOMAIN_2.getName()), AthenzService.from(\"service\"))\n.region(\"us-west-1\")\n.build();\nallowLaunchOfService(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN_2, \"service\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/submit\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID)\n.data(createApplicationSubmissionData(packageWithServiceForWrongDomain, 123)),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Athenz domain in deployment.xml: [domain2] must match tenant domain: [domain1]\\\"}\", 400);\nApplicationPackage packageWithService = new ApplicationPackageBuilder()\n.instances(\"instance1\")\n.globalServiceId(\"foo\")\n.environment(Environment.prod)\n.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from(ATHENZ_TENANT_DOMAIN.getName()), AthenzService.from(\"service\"))\n.region(\"us-central-1\")\n.parallel(\"us-west-1\", \"us-east-3\")\n.build();\nallowLaunchOfService(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, \"service\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/submit\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID)\n.data(createApplicationSubmissionData(packageWithService, 123)),\n\"{\\\"message\\\":\\\"Application package version: 1.0.2-commit1, source revision of repository 'repository1', branch 'master' with commit 'commit1', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/package\", GET).userIdentity(HOSTED_VESPA_OPERATOR),\n(response) -> {\nassertEquals(\"attachment; filename=\\\"tenant1.application1-build2.zip\\\"\", response.getHeaders().getFirst(\"Content-Disposition\"));\nassertArrayEquals(packageWithService.zippedContent(), response.getBody());\n},\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/package?build=1\", GET).userIdentity(HOSTED_VESPA_OPERATOR),\n(response) -> {\nassertEquals(\"attachment; filename=\\\"tenant1.application1-build1.zip\\\"\", response.getHeaders().getFirst(\"Content-Disposition\"));\nassertArrayEquals(applicationPackageInstance1.zippedContent(), response.getBody());\n},\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/submit\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID)\n.header(\"X-Content-Hash\", \"not/the/right/hash\")\n.data(createApplicationSubmissionData(packageWithService, 123)),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Value of X-Content-Hash header does not match computed content hash\\\"}\", 400);\nMultiPartStreamer streamer = createApplicationSubmissionData(packageWithService, 123);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/submit\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID)\n.header(\"X-Content-Hash\", Base64.getEncoder().encodeToString(Signatures.sha256Digest(streamer::data)))\n.data(streamer),\n\"{\\\"message\\\":\\\"Application package version: 1.0.3-commit1, source revision of repository 'repository1', branch 'master' with commit 'commit1', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\\\"}\");\nApplicationPackage multiInstanceSpec = new ApplicationPackageBuilder()\n.instances(\"instance1,instance2\")\n.environment(Environment.prod)\n.region(\"us-central-1\")\n.parallel(\"us-west-1\", \"us-east-3\")\n.endpoint(\"default\", \"foo\", \"us-central-1\", \"us-west-1\", \"us-east-3\")\n.build();\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/submit\", POST)\n.screwdriverIdentity(SCREWDRIVER_ID)\n.data(createApplicationSubmissionData(multiInstanceSpec, 123)),\n\"{\\\"message\\\":\\\"Application package version: 1.0.4-commit1, source revision of repository 'repository1', branch 'master' with commit 'commit1', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\\\"}\");\nassertEquals(2, tester.controller().applications().deploymentTrigger().triggerReadyJobs());\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/job\", GET)\n.userIdentity(USER_ID),\nnew File(\"jobs.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/job/system-test\", GET)\n.userIdentity(USER_ID),\nnew File(\"system-test-job.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/job/system-test/run/1\", GET)\n.userIdentity(USER_ID),\nnew File(\"system-test-details.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/job/staging-test\", DELETE)\n.userIdentity(USER_ID),\n\"{\\\"message\\\":\\\"Aborting run 2 of staging-test for tenant1.application1.instance1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/submit\", DELETE)\n.userIdentity(HOSTED_VESPA_OPERATOR),\n\"{\\\"message\\\":\\\"Unregistered 'tenant1.application1' from internal deployment pipeline.\\\"}\");\nbyte[] data = new byte[0];\ntester.assertResponse(request(\"/application/v4/user?user=new_user&domain=by\", PUT)\n.data(data)\n.userIdentity(new UserId(\"new_user\")),\nnew File(\"create-user-response.json\"));\ntester.assertResponse(request(\"/application/v4/user\", GET)\n.userIdentity(new UserId(\"other_user\")),\n\"{\\\"user\\\":\\\"other_user\\\",\\\"tenants\\\":[],\\\"tenantExists\\\":false}\");\ntester.assertResponse(request(\"/application/v4/\", Request.Method.OPTIONS)\n.userIdentity(USER_ID),\n\"\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/default\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted instance tenant1.application1.default\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/my-user\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted instance tenant1.application1.my-user\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted instance tenant1.application1.instance1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance2\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted instance tenant1.application1.instance2\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", DELETE).userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"tenant-without-applications.json\"));\n}", + "context_before": "class ApplicationApiTest extends ControllerContainerTest {\nprivate static final String responseFiles = \"src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/responses/\";\nprivate static final String pemPublicKey = \"-----BEGIN PUBLIC KEY-----\\n\" +\n\"MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuKVFA8dXk43kVfYKzkUqhEY2rDT9\\n\" +\n\"z/4jKSTHwbYR8wdsOSrJGVEUPbS2nguIJ64OJH7gFnxM6sxUVj+Nm2HlXw==\\n\" +\n\"-----END PUBLIC KEY-----\\n\";\nprivate static final String quotedPemPublicKey = pemPublicKey.replaceAll(\"\\\\n\", \"\\\\\\\\n\");\nprivate static final ApplicationPackage applicationPackageDefault = new ApplicationPackageBuilder()\n.instances(\"default\")\n.environment(Environment.prod)\n.globalServiceId(\"foo\")\n.region(\"us-central-1\")\n.region(\"us-east-3\")\n.region(\"us-west-1\")\n.blockChange(false, true, \"mon-fri\", \"0-8\", \"UTC\")\n.build();\nprivate static final ApplicationPackage applicationPackageInstance1 = new ApplicationPackageBuilder()\n.instances(\"instance1\")\n.environment(Environment.prod)\n.globalServiceId(\"foo\")\n.region(\"us-central-1\")\n.region(\"us-east-3\")\n.region(\"us-west-1\")\n.blockChange(false, true, \"mon-fri\", \"0-8\", \"UTC\")\n.build();\nprivate static final AthenzDomain ATHENZ_TENANT_DOMAIN = new AthenzDomain(\"domain1\");\nprivate static final AthenzDomain ATHENZ_TENANT_DOMAIN_2 = new AthenzDomain(\"domain2\");\nprivate static final ScrewdriverId SCREWDRIVER_ID = new ScrewdriverId(\"12345\");\nprivate static final UserId USER_ID = new UserId(\"myuser\");\nprivate static final UserId OTHER_USER_ID = new UserId(\"otheruser\");\nprivate static final UserId HOSTED_VESPA_OPERATOR = new UserId(\"johnoperator\");\nprivate static final OktaIdentityToken OKTA_IT = new OktaIdentityToken(\"okta-it\");\nprivate static final OktaAccessToken OKTA_AT = new OktaAccessToken(\"okta-at\");\nprivate ContainerTester tester;\nprivate DeploymentTester deploymentTester;\n@Before\npublic void before() {\ntester = new ContainerTester(container, responseFiles);\ndeploymentTester = new DeploymentTester(new ControllerTester(tester));\ndeploymentTester.controllerTester().computeVersionStatus();\n}\n@Test\nprivate void addIssues(DeploymentTester tester, TenantAndApplicationId id) {\ntester.applications().lockApplicationOrThrow(id, application ->\ntester.controller().applications().store(application.withDeploymentIssueId(IssueId.from(\"123\"))\n.withOwnershipIssueId(IssueId.from(\"321\"))\n.withOwner(User.from(\"owner-username\"))));\n}\n@Test\npublic void testRotationOverride() {\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);\nApplicationPackage applicationPackage = new ApplicationPackageBuilder()\n.instances(\"instance1\")\n.globalServiceId(\"foo\")\n.region(\"us-west-1\")\n.region(\"us-east-3\")\n.build();\nvar app = deploymentTester.newDeploymentContext(createTenantAndApplication());\napp.submit(applicationPackage).runJob(JobType.systemTest).runJob(JobType.stagingTest).runJob(JobType.productionUsWest1);\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2/environment/prod/region/us-west-1/instance/default/global-rotation\", GET)\n.userIdentity(USER_ID),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"tenant2.application2 not found\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-east-3/global-rotation\", GET)\n.userIdentity(USER_ID),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"application 'tenant1.application1.instance1' has no deployment in prod.us-east-3\\\"}\",\n404);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-east-3/global-rotation/override\", PUT)\n.userIdentity(USER_ID)\n.data(\"{\\\"reason\\\":\\\"unit-test\\\"}\"),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"application 'tenant1.application1.instance1' has no deployment in prod.us-east-3\\\"}\",\n404);\nsetZoneInRotation(\"rotation-fqdn-1\", ZoneId.from(\"prod\", \"us-west-1\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation\", GET)\n.userIdentity(USER_ID),\nnew File(\"global-rotation.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation/override\", GET)\n.userIdentity(USER_ID),\nnew File(\"global-rotation-get.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation/override\", PUT)\n.userIdentity(USER_ID)\n.data(\"{\\\"reason\\\":\\\"unit-test\\\"}\"),\nnew File(\"global-rotation-put.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation/override\", DELETE)\n.userIdentity(USER_ID)\n.data(\"{\\\"reason\\\":\\\"unit-test\\\"}\"),\nnew File(\"global-rotation-delete.json\"));\n}\n@Test\npublic void multiple_endpoints() {\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);\nApplicationPackage applicationPackage = new ApplicationPackageBuilder()\n.instances(\"instance1\")\n.region(\"us-west-1\")\n.region(\"us-east-3\")\n.region(\"eu-west-1\")\n.endpoint(\"eu\", \"default\", \"eu-west-1\")\n.endpoint(\"default\", \"default\", \"us-west-1\", \"us-east-3\")\n.build();\nvar app = deploymentTester.newDeploymentContext(\"tenant1\", \"application1\", \"instance1\");\napp.submit(applicationPackage).deploy();\nsetZoneInRotation(\"rotation-fqdn-2\", ZoneId.from(\"prod\", \"us-west-1\"));\nsetZoneInRotation(\"rotation-fqdn-2\", ZoneId.from(\"prod\", \"us-east-3\"));\nsetZoneInRotation(\"rotation-fqdn-1\", ZoneId.from(\"prod\", \"eu-west-1\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation\", GET)\n.userIdentity(USER_ID),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"application 'tenant1.application1.instance1' has multiple rotations. Query parameter 'endpointId' must be given\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation?endpointId=default\", GET)\n.userIdentity(USER_ID),\n\"{\\\"bcpStatus\\\":{\\\"rotationStatus\\\":\\\"IN\\\"}}\",\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation?endpointId=eu\", GET)\n.userIdentity(USER_ID),\n\"{\\\"bcpStatus\\\":{\\\"rotationStatus\\\":\\\"UNKNOWN\\\"}}\",\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/eu-west-1/global-rotation?endpointId=eu\", GET)\n.userIdentity(USER_ID),\n\"{\\\"bcpStatus\\\":{\\\"rotationStatus\\\":\\\"IN\\\"}}\",\n200);\n}\n@Test\npublic void testDeployDirectly() {\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);\naddUserToHostedOperatorRole(HostedAthenzIdentities.from(HOSTED_VESPA_OPERATOR));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", POST).userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"tenant-without-applications.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", POST)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"instance-reference.json\"));\naddScrewdriverUserToDeployRole(SCREWDRIVER_ID,\nATHENZ_TENANT_DOMAIN,\nApplicationName.from(\"application1\"));\nMultiPartStreamer entity = createApplicationDeployData(applicationPackageInstance1, true);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/deploy\", POST)\n.data(entity)\n.screwdriverIdentity(SCREWDRIVER_ID),\nnew File(\"deploy-result.json\"));\nMultiPartStreamer noAppEntity = createApplicationDeployData(Optional.empty(), true);\ntester.assertResponse(request(\"/application/v4/tenant/hosted-vespa/application/routing/environment/prod/region/us-central-1/instance/default/deploy\", POST)\n.data(noAppEntity)\n.userIdentity(HOSTED_VESPA_OPERATOR),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Deployment of system applications during a system upgrade is not allowed\\\"}\",\n400);\ndeploymentTester.controllerTester().upgradeSystem(deploymentTester.controller().versionStatus().controllerVersion().get().versionNumber());\ntester.assertResponse(request(\"/application/v4/tenant/hosted-vespa/application/routing/environment/prod/region/us-central-1/instance/default/deploy\", POST)\n.data(noAppEntity)\n.userIdentity(HOSTED_VESPA_OPERATOR),\nnew File(\"deploy-result.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/hosted-vespa/application/proxy-host/environment/prod/region/us-central-1/instance/instance1/deploy\", POST)\n.data(noAppEntity)\n.userIdentity(HOSTED_VESPA_OPERATOR),\nnew File(\"deploy-no-deployment.json\"), 400);\n}\n@Test\npublic void testSortsDeploymentsAndJobs() {\nApplicationPackage applicationPackage = new ApplicationPackageBuilder()\n.instances(\"instance1\")\n.region(\"us-east-3\")\n.build();\nvar app = deploymentTester.newDeploymentContext(\"tenant1\", \"application1\", \"instance1\");\napp.submit(applicationPackage).deploy();\napplicationPackage = new ApplicationPackageBuilder()\n.instances(\"instance1\")\n.globalServiceId(\"foo\")\n.region(\"us-west-1\")\n.region(\"us-east-3\")\n.build();\napp.submit(applicationPackage).runJob(JobType.systemTest).runJob(JobType.stagingTest).runJob(JobType.productionUsWest1);\nsetZoneInRotation(\"rotation-fqdn-1\", ZoneId.from(\"prod\", \"us-west-1\"));\napp.runJob(JobType.stagingTest).runJob(JobType.productionUsEast3);\nsetDeploymentMaintainedInfo();\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", GET)\n.userIdentity(USER_ID),\nnew File(\"instance-without-change-multiple-deployments.json\"));\n}\n@Test\npublic void testMeteringResponses() {\nMockMeteringClient mockMeteringClient = tester.serviceRegistry().meteringService();\nResourceAllocation currentSnapshot = new ResourceAllocation(1, 2, 3);\nResourceAllocation thisMonth = new ResourceAllocation(12, 24, 1000);\nResourceAllocation lastMonth = new ResourceAllocation(24, 48, 2000);\nApplicationId applicationId = ApplicationId.from(\"doesnotexist\", \"doesnotexist\", \"default\");\nMap> snapshotHistory = Map.of(applicationId, List.of(\nnew ResourceSnapshot(applicationId, 1, 2,3, Instant.ofEpochMilli(123), ZoneId.defaultId()),\nnew ResourceSnapshot(applicationId, 1, 2,3, Instant.ofEpochMilli(246), ZoneId.defaultId()),\nnew ResourceSnapshot(applicationId, 1, 2,3, Instant.ofEpochMilli(492), ZoneId.defaultId())));\nmockMeteringClient.setMeteringInfo(new MeteringInfo(thisMonth, lastMonth, currentSnapshot, snapshotHistory));\ntester.assertResponse(request(\"/application/v4/tenant/doesnotexist/application/doesnotexist/metering\", GET)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"instance1-metering.json\"));\n}\n@Test\npublic void testTenantCostResponse() {\nApplicationId applicationId = createTenantAndApplication();\nMockTenantCost mockTenantCost = deploymentTester.controllerTester().serviceRegistry().tenantCost();\nmockTenantCost.setMonthsWithMetering(\nnew TreeSet<>(Set.of(\nYearMonth.of(2019, 10),\nYearMonth.of(2019, 9)\n))\n);\ntester.assertResponse(request(\"/application/v4/tenant/\" + applicationId.tenant().value() + \"/cost\", GET)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"months\\\":[\\\"2019-09\\\",\\\"2019-10\\\"]}\");\nCostInfo costInfo1 = new CostInfo(applicationId, ZoneId.from(\"prod\", \"us-south-1\"),\nnew BigDecimal(\"7.0\"),\nnew BigDecimal(\"600.0\"),\nnew BigDecimal(\"1000.0\"),\n35, 23, 10);\nCostInfo costInfo2 = new CostInfo(applicationId, ZoneId.from(\"prod\", \"us-north-1\"),\nnew BigDecimal(\"2.0\"),\nnew BigDecimal(\"3.0\"),\nnew BigDecimal(\"4.0\"),\n10, 20, 30);\nmockTenantCost.setCostInfoList(\nList.of(costInfo1, costInfo2)\n);\ntester.assertResponse(request(\"/application/v4/tenant/\" + applicationId.tenant().value() + \"/cost/2019-09\", GET)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"cost-report.json\"));\n}\n@Test\npublic void testErrorResponses() throws Exception {\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", PUT)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\"),\n\"{\\n \\\"code\\\" : 403,\\n \\\"message\\\" : \\\"Access denied\\\"\\n}\",\n403);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", GET)\n.userIdentity(USER_ID),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"Tenant 'tenant1' does not exist\\\"}\",\n404);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1\", GET)\n.userIdentity(USER_ID),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"tenant1.application1 not found\\\"}\",\n404);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-east/instance/default\", GET)\n.userIdentity(USER_ID),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"tenant1.application1 not found\\\"}\",\n404);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"tenant-without-applications.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant2\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Could not create tenant 'tenant2': The Athens domain 'domain1' is already connected to tenant 'tenant1'\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", POST)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\"),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Tenant 'tenant1' already exists\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/my_tenant_2\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"New tenant or application names must start with a letter, may contain no more than 20 characters, and may only contain lowercase letters, digits or dashes, but no double-dashes.\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/by-tenant2\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Athenz tenant name cannot have prefix 'by-'\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/hosted-vespa\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Tenant 'hosted-vespa' already exists\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", POST)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"instance-reference.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", POST)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT)\n.userIdentity(USER_ID),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Could not create 'tenant1.application1.instance1': Instance already exists\\\"}\",\n400);\nConfigServerMock configServer = tester.serviceRegistry().configServerMock();\nconfigServer.throwOnNextPrepare(new ConfigServerException(new URI(\"server-url\"), \"Failed to prepare application\", ConfigServerException.ErrorCode.INVALID_APPLICATION_PACKAGE, null));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/package\", GET).userIdentity(HOSTED_VESPA_OPERATOR),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"No application package has been submitted for 'tenant1.application1'\\\"}\",\n404);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/package?build=42\", GET).userIdentity(HOSTED_VESPA_OPERATOR),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"No application package found for 'tenant1.application1' with build number 42\\\"}\",\n404);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/package?build=foobar\", GET).userIdentity(HOSTED_VESPA_OPERATOR),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Invalid build number: For input string: \\\\\\\"foobar\\\\\\\"\\\"}\",\n400);\nMultiPartStreamer entity = createApplicationDeployData(applicationPackageInstance1, true);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy\", POST)\n.data(entity)\n.userIdentity(USER_ID),\nnew File(\"deploy-failure.json\"), 400);\nconfigServer.throwOnNextPrepare(new ConfigServerException(new URI(\"server-url\"), \"Failed to prepare application\", ConfigServerException.ErrorCode.OUT_OF_CAPACITY, null));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy\", POST)\n.data(entity)\n.userIdentity(USER_ID),\nnew File(\"deploy-out-of-capacity.json\"), 400);\nconfigServer.throwOnNextPrepare(new ConfigServerException(new URI(\"server-url\"), \"Failed to activate application\", ConfigServerException.ErrorCode.ACTIVATION_CONFLICT, null));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy\", POST)\n.data(entity)\n.userIdentity(USER_ID),\nnew File(\"deploy-activation-conflict.json\"), 409);\nconfigServer.throwOnNextPrepare(new ConfigServerException(new URI(\"server-url\"), \"Internal server error\", ConfigServerException.ErrorCode.INTERNAL_SERVER_ERROR, null));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy\", POST)\n.data(entity)\n.userIdentity(USER_ID),\nnew File(\"deploy-internal-server-error.json\"), 500);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Could not delete tenant 'tenant1': This tenant has active applications\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted instance tenant1.application1.instance1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", DELETE)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT)\n.userIdentity(USER_ID),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"Could not delete instance 'tenant1.application1.instance1': Instance not found\\\"}\",\n404);\ntester.assertResponse(request(\"/application/v4/tenant/no-such-tenant/cost\", GET).userIdentity(USER_ID).oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"Tenant 'no-such-tenant' does not exist\\\"}\", 404);\ntester.assertResponse(request(\"/application/v4/tenant/no-such-tenant/cost/2018-01-01\", GET).userIdentity(USER_ID).oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"Tenant 'no-such-tenant' does not exist\\\"}\", 404);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/cost/not-a-valid-date\", GET).userIdentity(USER_ID).oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Could not parse year-month 'not-a-valid-date'\\\"}\", 400);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"tenant-without-applications.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", DELETE)\n.userIdentity(USER_ID),\n\"{\\n \\\"code\\\" : 403,\\n \\\"message\\\" : \\\"Access denied\\\"\\n}\",\n403);\ntester.controller().curator().writeTenant(new AthenzTenant(TenantName.from(\"my_tenant\"), ATHENZ_TENANT_DOMAIN,\nnew Property(\"property1\"), Optional.empty(), Optional.empty()));\ntester.assertResponse(request(\"/application/v4/tenant/my-tenant\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Tenant 'my-tenant' already exists\\\"}\",\n400);\n}\n@Test\npublic void testAuthorization() {\nUserId authorizedUser = USER_ID;\nUserId unauthorizedUser = new UserId(\"othertenant\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", POST)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\"),\n\"{\\n \\\"message\\\" : \\\"Not authenticated\\\"\\n}\",\n401);\ntester.assertResponse(request(\"/application/v4/tenant/\", GET)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\"),\n\"[]\",\n200);\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", POST)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT)\n.userIdentity(unauthorizedUser),\n\"{\\\"error-code\\\":\\\"FORBIDDEN\\\",\\\"message\\\":\\\"The user 'user.othertenant' is not admin in Athenz domain 'domain1'\\\"}\",\n403);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", POST)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.userIdentity(authorizedUser)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"tenant-without-applications.json\"),\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", POST)\n.userIdentity(unauthorizedUser)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\n \\\"code\\\" : 403,\\n \\\"message\\\" : \\\"Access denied\\\"\\n}\",\n403);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", POST)\n.userIdentity(authorizedUser)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"instance-reference.json\"),\n200);\nMultiPartStreamer entity = createApplicationDeployData(applicationPackageDefault, true);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-west-1/instance/default/deploy\", POST)\n.data(entity)\n.userIdentity(USER_ID),\n\"{\\n \\\"code\\\" : 403,\\n \\\"message\\\" : \\\"Access denied\\\"\\n}\",\n403);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1\", DELETE)\n.userIdentity(unauthorizedUser),\n\"{\\n \\\"code\\\" : 403,\\n \\\"message\\\" : \\\"Access denied\\\"\\n}\",\n403);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/default\", POST)\n.userIdentity(authorizedUser)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"instance-reference-default.json\"),\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1\", DELETE)\n.userIdentity(authorizedUser)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Could not delete application; more than one instance present: [tenant1.application1, tenant1.application1.instance1]\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/default\", DELETE)\n.userIdentity(authorizedUser)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted instance tenant1.application1.default\\\"}\",\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1\", DELETE)\n.userIdentity(authorizedUser)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted application tenant1.application1\\\"}\",\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", PUT)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.userIdentity(unauthorizedUser),\n\"{\\n \\\"code\\\" : 403,\\n \\\"message\\\" : \\\"Access denied\\\"\\n}\",\n403);\ncreateAthenzDomainWithAdmin(new AthenzDomain(\"domain2\"), USER_ID);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", PUT)\n.data(\"{\\\"athensDomain\\\":\\\"domain2\\\", \\\"property\\\":\\\"property1\\\"}\")\n.userIdentity(authorizedUser)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"tenant\\\":\\\"tenant1\\\",\\\"type\\\":\\\"ATHENS\\\",\\\"athensDomain\\\":\\\"domain2\\\",\\\"property\\\":\\\"property1\\\",\\\"applications\\\":[]}\",\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", DELETE)\n.userIdentity(unauthorizedUser),\n\"{\\n \\\"code\\\" : 403,\\n \\\"message\\\" : \\\"Access denied\\\"\\n}\",\n403);\n}\n@Test\npublic void athenz_service_must_be_allowed_to_launch_and_be_under_tenant_domain() {\nApplicationPackage applicationPackage = new ApplicationPackageBuilder()\n.upgradePolicy(\"default\")\n.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from(\"another.domain\"), com.yahoo.config.provision.AthenzService.from(\"service\"))\n.environment(Environment.prod)\n.region(\"us-west-1\")\n.build();\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);\ndeploymentTester.controllerTester().createTenant(\"tenant1\", ATHENZ_TENANT_DOMAIN.getName(), 1234L);\nvar application = deploymentTester.newDeploymentContext(\"tenant1\", \"application1\", \"default\");\nScrewdriverId screwdriverId = new ScrewdriverId(\"123\");\naddScrewdriverUserToDeployRole(screwdriverId, ATHENZ_TENANT_DOMAIN, application.instanceId().application());\nallowLaunchOfService(new com.yahoo.vespa.athenz.api.AthenzService(new AthenzDomain(\"another.domain\"), \"service\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/submit/\", POST)\n.data(createApplicationSubmissionData(applicationPackage, 123))\n.screwdriverIdentity(screwdriverId),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Athenz domain in deployment.xml: [another.domain] must match tenant domain: [domain1]\\\"}\",\n400);\napplicationPackage = new ApplicationPackageBuilder()\n.upgradePolicy(\"default\")\n.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from(\"domain1\"), com.yahoo.config.provision.AthenzService.from(\"service\"))\n.environment(Environment.prod)\n.region(\"us-west-1\")\n.build();\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/submit\", POST)\n.data(createApplicationSubmissionData(applicationPackage, 123))\n.screwdriverIdentity(screwdriverId),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Not allowed to launch Athenz service domain1.service\\\"}\",\n400);\nallowLaunchOfService(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, \"service\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/submit/\", POST)\n.data(createApplicationSubmissionData(applicationPackage, 123))\n.screwdriverIdentity(screwdriverId),\n\"{\\\"message\\\":\\\"Application package version: 1.0.1-commit1, source revision of repository 'repository1', branch 'master' with commit 'commit1', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\\\"}\");\n}\n@Test\npublic void personal_deployment_with_athenz_service_requires_user_is_admin() {\nUserId tenantAdmin = new UserId(\"tenant-admin\");\nUserId userId = new UserId(\"new-user\");\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, tenantAdmin);\nallowLaunchOfService(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, \"service\"));\ntester.assertResponse(request(\"/application/v4/user?user=new_user&domain=by\", PUT)\n.userIdentity(userId),\nnew File(\"create-user-response.json\"));\nApplicationPackage applicationPackage = new ApplicationPackageBuilder()\n.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from(\"domain1\"), com.yahoo.config.provision.AthenzService.from(\"service\"))\n.build();\nString expectedResult=\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"User user.new-user is not allowed to launch services in Athenz domain domain1. Please reach out to the domain admin.\\\"}\";\nMultiPartStreamer entity = createApplicationDeployData(applicationPackage, true);\ntester.assertResponse(request(\"/application/v4/tenant/by-new-user/application/application1/environment/dev/region/us-west-1/instance/default\", POST)\n.data(entity)\n.userIdentity(userId),\nexpectedResult,\n400);\ncreateTenantAndApplication();\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/new-user/deploy/dev-us-east-1\", POST)\n.data(entity)\n.userIdentity(userId),\nexpectedResult,\n400);\ntester.athenzClientFactory().getSetup()\n.domains.get(ATHENZ_TENANT_DOMAIN)\n.admin(HostedAthenzIdentities.from(userId));\ntester.assertResponse(request(\"/application/v4/tenant/by-new-user/application/application1/environment/dev/region/us-west-1/instance/default\", POST)\n.data(entity)\n.userIdentity(userId),\nnew File(\"deploy-result.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/new-user/deploy/dev-us-east-1\", POST)\n.data(entity)\n.userIdentity(userId),\n\"{\\\"message\\\":\\\"Deployment started in run 1 of dev-us-east-1 for tenant1.application1.new-user. This may take about 15 minutes the first time.\\\",\\\"run\\\":1}\");\n}\n@Test\npublic void testJobStatusReporting() {\naddUserToHostedOperatorRole(HostedAthenzIdentities.from(HOSTED_VESPA_OPERATOR));\nvar app = deploymentTester.newDeploymentContext(createTenantAndApplication());\nVersion vespaVersion = tester.configServer().initialVersion();\napp.submit(applicationPackageInstance1);\nString data = \"{\\\"jobName\\\":\\\"system-test\\\",\\\"instance\\\":\\\"instance1\\\"}\";\nvar request = request(\"/application/v4/tenant/tenant1/application/application1/jobreport\", POST)\n.data(data)\n.userIdentity(HOSTED_VESPA_OPERATOR);\ntester.assertResponse(request, \"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Notified of completion \" +\n\"of system-test for tenant1.application1.instance1, but that has not been triggered; last was never\\\"}\",\n400);\ndeploymentTester.triggerJobs();\ntester.assertResponse(request, \"{\\\"message\\\":\\\"ok\\\"}\");\nJobStatus recordedStatus = app.instance().deploymentJobs().jobStatus().get(JobType.systemTest);\nassertNotNull(\"Status was recorded\", recordedStatus);\nassertTrue(recordedStatus.isSuccess());\nassertEquals(vespaVersion, recordedStatus.lastCompleted().get().platform());\n}\n@Test\npublic void applicationWithRoutingPolicy() {\nvar app = deploymentTester.newDeploymentContext(createTenantAndApplication());\nApplicationPackage applicationPackage = new ApplicationPackageBuilder()\n.environment(Environment.prod)\n.instances(\"instance1\")\n.region(\"us-west-1\")\n.build();\napp.submit(applicationPackage).deploy();\nRoutingPolicy policy = new RoutingPolicy(app.instanceId(),\nClusterSpec.Id.from(\"default\"),\nZoneId.from(Environment.prod, RegionName.from(\"us-west-1\")),\nHostName.from(\"lb-0-canonical-name\"),\nOptional.of(\"dns-zone-1\"), Set.of(EndpointId.of(\"c0\")));\ntester.controller().curator().writeRoutingPolicies(app.instanceId(), Set.of(policy));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", GET)\n.userIdentity(USER_ID),\nnew File(\"instance-with-routing-policy.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-west-1/instance/instance1\", GET)\n.userIdentity(USER_ID),\nnew File(\"deployment-with-routing-policy.json\"));\n}\nprivate MultiPartStreamer createApplicationDeployData(ApplicationPackage applicationPackage, boolean deployDirectly) {\nreturn createApplicationDeployData(Optional.of(applicationPackage), deployDirectly);\n}\nprivate MultiPartStreamer createApplicationDeployData(Optional applicationPackage, boolean deployDirectly) {\nreturn createApplicationDeployData(applicationPackage, Optional.empty(), deployDirectly);\n}\nprivate MultiPartStreamer createApplicationDeployData(Optional applicationPackage,\nOptional applicationVersion, boolean deployDirectly) {\nMultiPartStreamer streamer = new MultiPartStreamer();\nstreamer.addJson(\"deployOptions\", deployOptions(deployDirectly, applicationVersion));\napplicationPackage.ifPresent(ap -> streamer.addBytes(\"applicationZip\", ap.zippedContent()));\nreturn streamer;\n}\nprivate MultiPartStreamer createApplicationSubmissionData(ApplicationPackage applicationPackage, long projectId) {\nreturn new MultiPartStreamer().addJson(EnvironmentResource.SUBMIT_OPTIONS, \"{\\\"repository\\\":\\\"repository1\\\",\\\"branch\\\":\\\"master\\\",\\\"commit\\\":\\\"commit1\\\",\"\n+ \"\\\"projectId\\\":\" + projectId + \",\\\"authorEmail\\\":\\\"a@b\\\"}\")\n.addBytes(EnvironmentResource.APPLICATION_ZIP, applicationPackage.zippedContent())\n.addBytes(EnvironmentResource.APPLICATION_TEST_ZIP, \"content\".getBytes());\n}\nprivate String deployOptions(boolean deployDirectly, Optional applicationVersion) {\nreturn \"{\\\"vespaVersion\\\":null,\" +\n\"\\\"ignoreValidationErrors\\\":false,\" +\n\"\\\"deployDirectly\\\":\" + deployDirectly +\napplicationVersion.map(version ->\n\",\" +\n\"\\\"buildNumber\\\":\" + version.buildNumber().getAsLong() + \",\" +\n\"\\\"sourceRevision\\\":{\" +\n\"\\\"repository\\\":\\\"\" + version.source().get().repository() + \"\\\",\" +\n\"\\\"branch\\\":\\\"\" + version.source().get().branch() + \"\\\",\" +\n\"\\\"commit\\\":\\\"\" + version.source().get().commit() + \"\\\"\" +\n\"}\"\n).orElse(\"\") +\n\"}\";\n}\n/** Make a request with (athens) user domain1.mytenant */\nprivate RequestBuilder request(String path, Request.Method method) {\nreturn new RequestBuilder(path, method);\n}\n/**\n* In production this happens outside hosted Vespa, so there is no API for it and we need to reach down into the\n* mock setup to replicate the action.\n*/\nprivate void createAthenzDomainWithAdmin(AthenzDomain domain, UserId userId) {\nAthenzDbMock.Domain domainMock = tester.athenzClientFactory().getSetup().getOrCreateDomain(domain);\ndomainMock.markAsVespaTenant();\ndomainMock.admin(AthenzUser.fromUserId(userId.id()));\n}\n/**\n* Mock athenz service identity configuration. Simulates that configserver is allowed to launch a service\n*/\nprivate void allowLaunchOfService(com.yahoo.vespa.athenz.api.AthenzService service) {\nAthenzDbMock.Domain domainMock = tester.athenzClientFactory().getSetup().getOrCreateDomain(service.getDomain());\ndomainMock.services.put(service.getName(), new AthenzDbMock.Service(true));\n}\n/**\n* In production this happens outside hosted Vespa, so there is no API for it and we need to reach down into the\n* mock setup to replicate the action.\n*/\nprivate void addScrewdriverUserToDeployRole(ScrewdriverId screwdriverId,\nAthenzDomain domain,\nApplicationName application) {\ntester.authorize(domain, HostedAthenzIdentities.from(screwdriverId), ApplicationAction.deploy, application);\n}\nprivate ApplicationId createTenantAndApplication() {\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"tenant-without-applications.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", POST)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"instance-reference.json\"));\naddScrewdriverUserToDeployRole(SCREWDRIVER_ID, ATHENZ_TENANT_DOMAIN, ApplicationName.from(\"application1\"));\nreturn ApplicationId.from(\"tenant1\", \"application1\", \"instance1\");\n}\n/**\n* Cluster info, utilization and application and deployment metrics are maintained async by maintainers.\n*\n* This sets these values as if the maintainers has been ran.\n*/\nprivate void setDeploymentMaintainedInfo() {\nfor (Application application : deploymentTester.applications().asList()) {\ndeploymentTester.applications().lockApplicationOrThrow(application.id(), lockedApplication -> {\nlockedApplication = lockedApplication.with(new ApplicationMetrics(0.5, 0.7));\nfor (Instance instance : application.instances().values()) {\nfor (Deployment deployment : instance.deployments().values()) {\nMap clusterInfo = new HashMap<>();\nList hostnames = new ArrayList<>();\nhostnames.add(\"host1\");\nhostnames.add(\"host2\");\nclusterInfo.put(ClusterSpec.Id.from(\"cluster1\"),\nnew ClusterInfo(\"flavor1\", 37, 2, 4, 50,\nClusterSpec.Type.content, hostnames));\nDeploymentMetrics metrics = new DeploymentMetrics(1, 2, 3, 4, 5,\nOptional.of(Instant.ofEpochMilli(123123)), Map.of());\nlockedApplication = lockedApplication.with(instance.name(),\nlockedInstance -> lockedInstance.withClusterInfo(deployment.zone(), clusterInfo)\n.with(deployment.zone(), metrics)\n.recordActivityAt(Instant.parse(\"2018-06-01T10:15:30.00Z\"), deployment.zone()));\n}\ndeploymentTester.applications().store(lockedApplication);\n}\n});\n}\n}\nprivate void setZoneInRotation(String rotationName, ZoneId zone) {\ntester.serviceRegistry().globalRoutingServiceMock().setStatus(rotationName, zone, com.yahoo.vespa.hosted.controller.api.integration.routing.RotationStatus.IN);\nnew RotationStatusUpdater(tester.controller(), Duration.ofDays(1), new JobControl(tester.controller().curator())).run();\n}\nprivate void updateContactInformation() {\nContact contact = new Contact(URI.create(\"www.contacts.tld/1234\"),\nURI.create(\"www.properties.tld/1234\"),\nURI.create(\"www.issues.tld/1234\"),\nList.of(List.of(\"alice\"), List.of(\"bob\")), \"queue\", Optional.empty());\ntester.controller().tenants().lockIfPresent(TenantName.from(\"tenant2\"),\nLockedTenant.Athenz.class,\nlockedTenant -> tester.controller().tenants().store(lockedTenant.with(contact)));\n}\nprivate void registerContact(long propertyId) {\nPropertyId p = new PropertyId(String.valueOf(propertyId));\ntester.serviceRegistry().contactRetrieverMock().addContact(p, new Contact(URI.create(\"www.issues.tld/\" + p.id()),\nURI.create(\"www.contacts.tld/\" + p.id()),\nURI.create(\"www.properties.tld/\" + p.id()),\nList.of(Collections.singletonList(\"alice\"),\nCollections.singletonList(\"bob\")),\n\"queue\", Optional.empty()));\n}\nprivate static class RequestBuilder implements Supplier {\nprivate final String path;\nprivate final Request.Method method;\nprivate byte[] data = new byte[0];\nprivate AthenzIdentity identity;\nprivate OktaIdentityToken oktaIdentityToken;\nprivate OktaAccessToken oktaAccessToken;\nprivate String contentType = \"application/json\";\nprivate Map> headers = new HashMap<>();\nprivate String recursive;\nprivate RequestBuilder(String path, Request.Method method) {\nthis.path = path;\nthis.method = method;\n}\nprivate RequestBuilder data(byte[] data) { this.data = data; return this; }\nprivate RequestBuilder data(String data) { return data(data.getBytes(StandardCharsets.UTF_8)); }\nprivate RequestBuilder data(MultiPartStreamer streamer) {\nreturn Exceptions.uncheck(() -> data(streamer.data().readAllBytes()).contentType(streamer.contentType()));\n}\nprivate RequestBuilder userIdentity(UserId userId) { this.identity = HostedAthenzIdentities.from(userId); return this; }\nprivate RequestBuilder screwdriverIdentity(ScrewdriverId screwdriverId) { this.identity = HostedAthenzIdentities.from(screwdriverId); return this; }\nprivate RequestBuilder oktaIdentityToken(OktaIdentityToken oktaIdentityToken) { this.oktaIdentityToken = oktaIdentityToken; return this; }\nprivate RequestBuilder oktaAccessToken(OktaAccessToken oktaAccessToken) { this.oktaAccessToken = oktaAccessToken; return this; }\nprivate RequestBuilder contentType(String contentType) { this.contentType = contentType; return this; }\nprivate RequestBuilder recursive(String recursive) { this.recursive = recursive; return this; }\nprivate RequestBuilder header(String name, String value) {\nthis.headers.putIfAbsent(name, new ArrayList<>());\nthis.headers.get(name).add(value);\nreturn this;\n}\n@Override\npublic Request get() {\nRequest request = new Request(\"http:\n(recursive == null ? \"\" : \"?recursive=\" + recursive),\ndata, method);\nrequest.getHeaders().addAll(headers);\nrequest.getHeaders().put(\"Content-Type\", contentType);\nif (identity != null) {\naddIdentityToRequest(request, identity);\n}\nif (oktaIdentityToken != null) {\naddOktaIdentityToken(request, oktaIdentityToken);\n}\nif (oktaAccessToken != null) {\naddOktaAccessToken(request, oktaAccessToken);\n}\nreturn request;\n}\n}\n}", + "context_after": "class ApplicationApiTest extends ControllerContainerTest {\nprivate static final String responseFiles = \"src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/responses/\";\nprivate static final String pemPublicKey = \"-----BEGIN PUBLIC KEY-----\\n\" +\n\"MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuKVFA8dXk43kVfYKzkUqhEY2rDT9\\n\" +\n\"z/4jKSTHwbYR8wdsOSrJGVEUPbS2nguIJ64OJH7gFnxM6sxUVj+Nm2HlXw==\\n\" +\n\"-----END PUBLIC KEY-----\\n\";\nprivate static final String quotedPemPublicKey = pemPublicKey.replaceAll(\"\\\\n\", \"\\\\\\\\n\");\nprivate static final ApplicationPackage applicationPackageDefault = new ApplicationPackageBuilder()\n.instances(\"default\")\n.environment(Environment.prod)\n.globalServiceId(\"foo\")\n.region(\"us-central-1\")\n.region(\"us-east-3\")\n.region(\"us-west-1\")\n.blockChange(false, true, \"mon-fri\", \"0-8\", \"UTC\")\n.build();\nprivate static final ApplicationPackage applicationPackageInstance1 = new ApplicationPackageBuilder()\n.instances(\"instance1\")\n.environment(Environment.prod)\n.globalServiceId(\"foo\")\n.region(\"us-central-1\")\n.region(\"us-east-3\")\n.region(\"us-west-1\")\n.blockChange(false, true, \"mon-fri\", \"0-8\", \"UTC\")\n.build();\nprivate static final AthenzDomain ATHENZ_TENANT_DOMAIN = new AthenzDomain(\"domain1\");\nprivate static final AthenzDomain ATHENZ_TENANT_DOMAIN_2 = new AthenzDomain(\"domain2\");\nprivate static final ScrewdriverId SCREWDRIVER_ID = new ScrewdriverId(\"12345\");\nprivate static final UserId USER_ID = new UserId(\"myuser\");\nprivate static final UserId OTHER_USER_ID = new UserId(\"otheruser\");\nprivate static final UserId HOSTED_VESPA_OPERATOR = new UserId(\"johnoperator\");\nprivate static final OktaIdentityToken OKTA_IT = new OktaIdentityToken(\"okta-it\");\nprivate static final OktaAccessToken OKTA_AT = new OktaAccessToken(\"okta-at\");\nprivate ContainerTester tester;\nprivate DeploymentTester deploymentTester;\n@Before\npublic void before() {\ntester = new ContainerTester(container, responseFiles);\ndeploymentTester = new DeploymentTester(new ControllerTester(tester));\ndeploymentTester.controllerTester().computeVersionStatus();\n}\n@Test\nprivate void addIssues(DeploymentTester tester, TenantAndApplicationId id) {\ntester.applications().lockApplicationOrThrow(id, application ->\ntester.controller().applications().store(application.withDeploymentIssueId(IssueId.from(\"123\"))\n.withOwnershipIssueId(IssueId.from(\"321\"))\n.withOwner(User.from(\"owner-username\"))));\n}\n@Test\npublic void testRotationOverride() {\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);\nApplicationPackage applicationPackage = new ApplicationPackageBuilder()\n.instances(\"instance1\")\n.globalServiceId(\"foo\")\n.region(\"us-west-1\")\n.region(\"us-east-3\")\n.build();\nvar app = deploymentTester.newDeploymentContext(createTenantAndApplication());\napp.submit(applicationPackage).runJob(JobType.systemTest).runJob(JobType.stagingTest).runJob(JobType.productionUsWest1);\ntester.assertResponse(request(\"/application/v4/tenant/tenant2/application/application2/environment/prod/region/us-west-1/instance/default/global-rotation\", GET)\n.userIdentity(USER_ID),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"tenant2.application2 not found\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-east-3/global-rotation\", GET)\n.userIdentity(USER_ID),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"application 'tenant1.application1.instance1' has no deployment in prod.us-east-3\\\"}\",\n404);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-east-3/global-rotation/override\", PUT)\n.userIdentity(USER_ID)\n.data(\"{\\\"reason\\\":\\\"unit-test\\\"}\"),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"application 'tenant1.application1.instance1' has no deployment in prod.us-east-3\\\"}\",\n404);\nsetZoneInRotation(\"rotation-fqdn-1\", ZoneId.from(\"prod\", \"us-west-1\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation\", GET)\n.userIdentity(USER_ID),\nnew File(\"global-rotation.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation/override\", GET)\n.userIdentity(USER_ID),\nnew File(\"global-rotation-get.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation/override\", PUT)\n.userIdentity(USER_ID)\n.data(\"{\\\"reason\\\":\\\"unit-test\\\"}\"),\nnew File(\"global-rotation-put.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation/override\", DELETE)\n.userIdentity(USER_ID)\n.data(\"{\\\"reason\\\":\\\"unit-test\\\"}\"),\nnew File(\"global-rotation-delete.json\"));\n}\n@Test\npublic void multiple_endpoints() {\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);\nApplicationPackage applicationPackage = new ApplicationPackageBuilder()\n.instances(\"instance1\")\n.region(\"us-west-1\")\n.region(\"us-east-3\")\n.region(\"eu-west-1\")\n.endpoint(\"eu\", \"default\", \"eu-west-1\")\n.endpoint(\"default\", \"default\", \"us-west-1\", \"us-east-3\")\n.build();\nvar app = deploymentTester.newDeploymentContext(\"tenant1\", \"application1\", \"instance1\");\napp.submit(applicationPackage).deploy();\nsetZoneInRotation(\"rotation-fqdn-2\", ZoneId.from(\"prod\", \"us-west-1\"));\nsetZoneInRotation(\"rotation-fqdn-2\", ZoneId.from(\"prod\", \"us-east-3\"));\nsetZoneInRotation(\"rotation-fqdn-1\", ZoneId.from(\"prod\", \"eu-west-1\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation\", GET)\n.userIdentity(USER_ID),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"application 'tenant1.application1.instance1' has multiple rotations. Query parameter 'endpointId' must be given\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation?endpointId=default\", GET)\n.userIdentity(USER_ID),\n\"{\\\"bcpStatus\\\":{\\\"rotationStatus\\\":\\\"IN\\\"}}\",\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation?endpointId=eu\", GET)\n.userIdentity(USER_ID),\n\"{\\\"bcpStatus\\\":{\\\"rotationStatus\\\":\\\"UNKNOWN\\\"}}\",\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/eu-west-1/global-rotation?endpointId=eu\", GET)\n.userIdentity(USER_ID),\n\"{\\\"bcpStatus\\\":{\\\"rotationStatus\\\":\\\"IN\\\"}}\",\n200);\n}\n@Test\npublic void testDeployDirectly() {\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);\naddUserToHostedOperatorRole(HostedAthenzIdentities.from(HOSTED_VESPA_OPERATOR));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", POST).userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"tenant-without-applications.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", POST)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"instance-reference.json\"));\naddScrewdriverUserToDeployRole(SCREWDRIVER_ID,\nATHENZ_TENANT_DOMAIN,\nApplicationName.from(\"application1\"));\nMultiPartStreamer entity = createApplicationDeployData(applicationPackageInstance1, true);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/deploy\", POST)\n.data(entity)\n.screwdriverIdentity(SCREWDRIVER_ID),\nnew File(\"deploy-result.json\"));\nMultiPartStreamer noAppEntity = createApplicationDeployData(Optional.empty(), true);\ntester.assertResponse(request(\"/application/v4/tenant/hosted-vespa/application/routing/environment/prod/region/us-central-1/instance/default/deploy\", POST)\n.data(noAppEntity)\n.userIdentity(HOSTED_VESPA_OPERATOR),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Deployment of system applications during a system upgrade is not allowed\\\"}\",\n400);\ndeploymentTester.controllerTester().upgradeSystem(deploymentTester.controller().versionStatus().controllerVersion().get().versionNumber());\ntester.assertResponse(request(\"/application/v4/tenant/hosted-vespa/application/routing/environment/prod/region/us-central-1/instance/default/deploy\", POST)\n.data(noAppEntity)\n.userIdentity(HOSTED_VESPA_OPERATOR),\nnew File(\"deploy-result.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/hosted-vespa/application/proxy-host/environment/prod/region/us-central-1/instance/instance1/deploy\", POST)\n.data(noAppEntity)\n.userIdentity(HOSTED_VESPA_OPERATOR),\nnew File(\"deploy-no-deployment.json\"), 400);\n}\n@Test\npublic void testSortsDeploymentsAndJobs() {\nApplicationPackage applicationPackage = new ApplicationPackageBuilder()\n.instances(\"instance1\")\n.region(\"us-east-3\")\n.build();\nvar app = deploymentTester.newDeploymentContext(\"tenant1\", \"application1\", \"instance1\");\napp.submit(applicationPackage).deploy();\napplicationPackage = new ApplicationPackageBuilder()\n.instances(\"instance1\")\n.globalServiceId(\"foo\")\n.region(\"us-west-1\")\n.region(\"us-east-3\")\n.build();\napp.submit(applicationPackage).runJob(JobType.systemTest).runJob(JobType.stagingTest).runJob(JobType.productionUsWest1);\nsetZoneInRotation(\"rotation-fqdn-1\", ZoneId.from(\"prod\", \"us-west-1\"));\napp.runJob(JobType.stagingTest).runJob(JobType.productionUsEast3);\nsetDeploymentMaintainedInfo();\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", GET)\n.userIdentity(USER_ID),\nnew File(\"instance-without-change-multiple-deployments.json\"));\n}\n@Test\npublic void testMeteringResponses() {\nMockMeteringClient mockMeteringClient = tester.serviceRegistry().meteringService();\nResourceAllocation currentSnapshot = new ResourceAllocation(1, 2, 3);\nResourceAllocation thisMonth = new ResourceAllocation(12, 24, 1000);\nResourceAllocation lastMonth = new ResourceAllocation(24, 48, 2000);\nApplicationId applicationId = ApplicationId.from(\"doesnotexist\", \"doesnotexist\", \"default\");\nMap> snapshotHistory = Map.of(applicationId, List.of(\nnew ResourceSnapshot(applicationId, 1, 2,3, Instant.ofEpochMilli(123), ZoneId.defaultId()),\nnew ResourceSnapshot(applicationId, 1, 2,3, Instant.ofEpochMilli(246), ZoneId.defaultId()),\nnew ResourceSnapshot(applicationId, 1, 2,3, Instant.ofEpochMilli(492), ZoneId.defaultId())));\nmockMeteringClient.setMeteringInfo(new MeteringInfo(thisMonth, lastMonth, currentSnapshot, snapshotHistory));\ntester.assertResponse(request(\"/application/v4/tenant/doesnotexist/application/doesnotexist/metering\", GET)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"instance1-metering.json\"));\n}\n@Test\npublic void testTenantCostResponse() {\nApplicationId applicationId = createTenantAndApplication();\nMockTenantCost mockTenantCost = deploymentTester.controllerTester().serviceRegistry().tenantCost();\nmockTenantCost.setMonthsWithMetering(\nnew TreeSet<>(Set.of(\nYearMonth.of(2019, 10),\nYearMonth.of(2019, 9)\n))\n);\ntester.assertResponse(request(\"/application/v4/tenant/\" + applicationId.tenant().value() + \"/cost\", GET)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"months\\\":[\\\"2019-09\\\",\\\"2019-10\\\"]}\");\nCostInfo costInfo1 = new CostInfo(applicationId, ZoneId.from(\"prod\", \"us-south-1\"),\nnew BigDecimal(\"7.0\"),\nnew BigDecimal(\"600.0\"),\nnew BigDecimal(\"1000.0\"),\n35, 23, 10);\nCostInfo costInfo2 = new CostInfo(applicationId, ZoneId.from(\"prod\", \"us-north-1\"),\nnew BigDecimal(\"2.0\"),\nnew BigDecimal(\"3.0\"),\nnew BigDecimal(\"4.0\"),\n10, 20, 30);\nmockTenantCost.setCostInfoList(\nList.of(costInfo1, costInfo2)\n);\ntester.assertResponse(request(\"/application/v4/tenant/\" + applicationId.tenant().value() + \"/cost/2019-09\", GET)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"cost-report.json\"));\n}\n@Test\npublic void testErrorResponses() throws Exception {\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", PUT)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\"),\n\"{\\n \\\"code\\\" : 403,\\n \\\"message\\\" : \\\"Access denied\\\"\\n}\",\n403);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", GET)\n.userIdentity(USER_ID),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"Tenant 'tenant1' does not exist\\\"}\",\n404);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1\", GET)\n.userIdentity(USER_ID),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"tenant1.application1 not found\\\"}\",\n404);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-east/instance/default\", GET)\n.userIdentity(USER_ID),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"tenant1.application1 not found\\\"}\",\n404);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"tenant-without-applications.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant2\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Could not create tenant 'tenant2': The Athens domain 'domain1' is already connected to tenant 'tenant1'\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", POST)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\"),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Tenant 'tenant1' already exists\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/my_tenant_2\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"New tenant or application names must start with a letter, may contain no more than 20 characters, and may only contain lowercase letters, digits or dashes, but no double-dashes.\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/by-tenant2\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Athenz tenant name cannot have prefix 'by-'\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/hosted-vespa\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Tenant 'hosted-vespa' already exists\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", POST)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"instance-reference.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", POST)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT)\n.userIdentity(USER_ID),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Could not create 'tenant1.application1.instance1': Instance already exists\\\"}\",\n400);\nConfigServerMock configServer = tester.serviceRegistry().configServerMock();\nconfigServer.throwOnNextPrepare(new ConfigServerException(new URI(\"server-url\"), \"Failed to prepare application\", ConfigServerException.ErrorCode.INVALID_APPLICATION_PACKAGE, null));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/package\", GET).userIdentity(HOSTED_VESPA_OPERATOR),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"No application package has been submitted for 'tenant1.application1'\\\"}\",\n404);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/package?build=42\", GET).userIdentity(HOSTED_VESPA_OPERATOR),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"No application package found for 'tenant1.application1' with build number 42\\\"}\",\n404);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/package?build=foobar\", GET).userIdentity(HOSTED_VESPA_OPERATOR),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Invalid build number: For input string: \\\\\\\"foobar\\\\\\\"\\\"}\",\n400);\nMultiPartStreamer entity = createApplicationDeployData(applicationPackageInstance1, true);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy\", POST)\n.data(entity)\n.userIdentity(USER_ID),\nnew File(\"deploy-failure.json\"), 400);\nconfigServer.throwOnNextPrepare(new ConfigServerException(new URI(\"server-url\"), \"Failed to prepare application\", ConfigServerException.ErrorCode.OUT_OF_CAPACITY, null));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy\", POST)\n.data(entity)\n.userIdentity(USER_ID),\nnew File(\"deploy-out-of-capacity.json\"), 400);\nconfigServer.throwOnNextPrepare(new ConfigServerException(new URI(\"server-url\"), \"Failed to activate application\", ConfigServerException.ErrorCode.ACTIVATION_CONFLICT, null));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy\", POST)\n.data(entity)\n.userIdentity(USER_ID),\nnew File(\"deploy-activation-conflict.json\"), 409);\nconfigServer.throwOnNextPrepare(new ConfigServerException(new URI(\"server-url\"), \"Internal server error\", ConfigServerException.ErrorCode.INTERNAL_SERVER_ERROR, null));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy\", POST)\n.data(entity)\n.userIdentity(USER_ID),\nnew File(\"deploy-internal-server-error.json\"), 500);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Could not delete tenant 'tenant1': This tenant has active applications\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted instance tenant1.application1.instance1\\\"}\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", DELETE)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT)\n.userIdentity(USER_ID),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"Could not delete instance 'tenant1.application1.instance1': Instance not found\\\"}\",\n404);\ntester.assertResponse(request(\"/application/v4/tenant/no-such-tenant/cost\", GET).userIdentity(USER_ID).oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"Tenant 'no-such-tenant' does not exist\\\"}\", 404);\ntester.assertResponse(request(\"/application/v4/tenant/no-such-tenant/cost/2018-01-01\", GET).userIdentity(USER_ID).oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"NOT_FOUND\\\",\\\"message\\\":\\\"Tenant 'no-such-tenant' does not exist\\\"}\", 404);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/cost/not-a-valid-date\", GET).userIdentity(USER_ID).oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Could not parse year-month 'not-a-valid-date'\\\"}\", 400);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", DELETE)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"tenant-without-applications.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", DELETE)\n.userIdentity(USER_ID),\n\"{\\n \\\"code\\\" : 403,\\n \\\"message\\\" : \\\"Access denied\\\"\\n}\",\n403);\ntester.controller().curator().writeTenant(new AthenzTenant(TenantName.from(\"my_tenant\"), ATHENZ_TENANT_DOMAIN,\nnew Property(\"property1\"), Optional.empty(), Optional.empty()));\ntester.assertResponse(request(\"/application/v4/tenant/my-tenant\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Tenant 'my-tenant' already exists\\\"}\",\n400);\n}\n@Test\npublic void testAuthorization() {\nUserId authorizedUser = USER_ID;\nUserId unauthorizedUser = new UserId(\"othertenant\");\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", POST)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\"),\n\"{\\n \\\"message\\\" : \\\"Not authenticated\\\"\\n}\",\n401);\ntester.assertResponse(request(\"/application/v4/tenant/\", GET)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\"),\n\"[]\",\n200);\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", POST)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT)\n.userIdentity(unauthorizedUser),\n\"{\\\"error-code\\\":\\\"FORBIDDEN\\\",\\\"message\\\":\\\"The user 'user.othertenant' is not admin in Athenz domain 'domain1'\\\"}\",\n403);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", POST)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.userIdentity(authorizedUser)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"tenant-without-applications.json\"),\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", POST)\n.userIdentity(unauthorizedUser)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\n \\\"code\\\" : 403,\\n \\\"message\\\" : \\\"Access denied\\\"\\n}\",\n403);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", POST)\n.userIdentity(authorizedUser)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"instance-reference.json\"),\n200);\nMultiPartStreamer entity = createApplicationDeployData(applicationPackageDefault, true);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-west-1/instance/default/deploy\", POST)\n.data(entity)\n.userIdentity(USER_ID),\n\"{\\n \\\"code\\\" : 403,\\n \\\"message\\\" : \\\"Access denied\\\"\\n}\",\n403);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1\", DELETE)\n.userIdentity(unauthorizedUser),\n\"{\\n \\\"code\\\" : 403,\\n \\\"message\\\" : \\\"Access denied\\\"\\n}\",\n403);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/default\", POST)\n.userIdentity(authorizedUser)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"instance-reference-default.json\"),\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1\", DELETE)\n.userIdentity(authorizedUser)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Could not delete application; more than one instance present: [tenant1.application1, tenant1.application1.instance1]\\\"}\",\n400);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/default\", DELETE)\n.userIdentity(authorizedUser)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted instance tenant1.application1.default\\\"}\",\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1\", DELETE)\n.userIdentity(authorizedUser)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"message\\\":\\\"Deleted application tenant1.application1\\\"}\",\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", PUT)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.userIdentity(unauthorizedUser),\n\"{\\n \\\"code\\\" : 403,\\n \\\"message\\\" : \\\"Access denied\\\"\\n}\",\n403);\ncreateAthenzDomainWithAdmin(new AthenzDomain(\"domain2\"), USER_ID);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", PUT)\n.data(\"{\\\"athensDomain\\\":\\\"domain2\\\", \\\"property\\\":\\\"property1\\\"}\")\n.userIdentity(authorizedUser)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\n\"{\\\"tenant\\\":\\\"tenant1\\\",\\\"type\\\":\\\"ATHENS\\\",\\\"athensDomain\\\":\\\"domain2\\\",\\\"property\\\":\\\"property1\\\",\\\"applications\\\":[]}\",\n200);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", DELETE)\n.userIdentity(unauthorizedUser),\n\"{\\n \\\"code\\\" : 403,\\n \\\"message\\\" : \\\"Access denied\\\"\\n}\",\n403);\n}\n@Test\npublic void athenz_service_must_be_allowed_to_launch_and_be_under_tenant_domain() {\nApplicationPackage applicationPackage = new ApplicationPackageBuilder()\n.upgradePolicy(\"default\")\n.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from(\"another.domain\"), com.yahoo.config.provision.AthenzService.from(\"service\"))\n.environment(Environment.prod)\n.region(\"us-west-1\")\n.build();\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);\ndeploymentTester.controllerTester().createTenant(\"tenant1\", ATHENZ_TENANT_DOMAIN.getName(), 1234L);\nvar application = deploymentTester.newDeploymentContext(\"tenant1\", \"application1\", \"default\");\nScrewdriverId screwdriverId = new ScrewdriverId(\"123\");\naddScrewdriverUserToDeployRole(screwdriverId, ATHENZ_TENANT_DOMAIN, application.instanceId().application());\nallowLaunchOfService(new com.yahoo.vespa.athenz.api.AthenzService(new AthenzDomain(\"another.domain\"), \"service\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/submit/\", POST)\n.data(createApplicationSubmissionData(applicationPackage, 123))\n.screwdriverIdentity(screwdriverId),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Athenz domain in deployment.xml: [another.domain] must match tenant domain: [domain1]\\\"}\",\n400);\napplicationPackage = new ApplicationPackageBuilder()\n.upgradePolicy(\"default\")\n.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from(\"domain1\"), com.yahoo.config.provision.AthenzService.from(\"service\"))\n.environment(Environment.prod)\n.region(\"us-west-1\")\n.build();\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/submit\", POST)\n.data(createApplicationSubmissionData(applicationPackage, 123))\n.screwdriverIdentity(screwdriverId),\n\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Not allowed to launch Athenz service domain1.service\\\"}\",\n400);\nallowLaunchOfService(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, \"service\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/submit/\", POST)\n.data(createApplicationSubmissionData(applicationPackage, 123))\n.screwdriverIdentity(screwdriverId),\n\"{\\\"message\\\":\\\"Application package version: 1.0.1-commit1, source revision of repository 'repository1', branch 'master' with commit 'commit1', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\\\"}\");\n}\n@Test\npublic void personal_deployment_with_athenz_service_requires_user_is_admin() {\nUserId tenantAdmin = new UserId(\"tenant-admin\");\nUserId userId = new UserId(\"new-user\");\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, tenantAdmin);\nallowLaunchOfService(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, \"service\"));\ntester.assertResponse(request(\"/application/v4/user?user=new_user&domain=by\", PUT)\n.userIdentity(userId),\nnew File(\"create-user-response.json\"));\nApplicationPackage applicationPackage = new ApplicationPackageBuilder()\n.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from(\"domain1\"), com.yahoo.config.provision.AthenzService.from(\"service\"))\n.build();\nString expectedResult=\"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"User user.new-user is not allowed to launch services in Athenz domain domain1. Please reach out to the domain admin.\\\"}\";\nMultiPartStreamer entity = createApplicationDeployData(applicationPackage, true);\ntester.assertResponse(request(\"/application/v4/tenant/by-new-user/application/application1/environment/dev/region/us-west-1/instance/default\", POST)\n.data(entity)\n.userIdentity(userId),\nexpectedResult,\n400);\ncreateTenantAndApplication();\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/new-user/deploy/dev-us-east-1\", POST)\n.data(entity)\n.userIdentity(userId),\nexpectedResult,\n400);\ntester.athenzClientFactory().getSetup()\n.domains.get(ATHENZ_TENANT_DOMAIN)\n.admin(HostedAthenzIdentities.from(userId));\ntester.assertResponse(request(\"/application/v4/tenant/by-new-user/application/application1/environment/dev/region/us-west-1/instance/default\", POST)\n.data(entity)\n.userIdentity(userId),\nnew File(\"deploy-result.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/new-user/deploy/dev-us-east-1\", POST)\n.data(entity)\n.userIdentity(userId),\n\"{\\\"message\\\":\\\"Deployment started in run 1 of dev-us-east-1 for tenant1.application1.new-user. This may take about 15 minutes the first time.\\\",\\\"run\\\":1}\");\n}\n@Test\npublic void testJobStatusReporting() {\naddUserToHostedOperatorRole(HostedAthenzIdentities.from(HOSTED_VESPA_OPERATOR));\nvar app = deploymentTester.newDeploymentContext(createTenantAndApplication());\nVersion vespaVersion = tester.configServer().initialVersion();\napp.submit(applicationPackageInstance1);\nString data = \"{\\\"jobName\\\":\\\"system-test\\\",\\\"instance\\\":\\\"instance1\\\"}\";\nvar request = request(\"/application/v4/tenant/tenant1/application/application1/jobreport\", POST)\n.data(data)\n.userIdentity(HOSTED_VESPA_OPERATOR);\ntester.assertResponse(request, \"{\\\"error-code\\\":\\\"BAD_REQUEST\\\",\\\"message\\\":\\\"Notified of completion \" +\n\"of system-test for tenant1.application1.instance1, but that has not been triggered; last was never\\\"}\",\n400);\ndeploymentTester.triggerJobs();\ntester.assertResponse(request, \"{\\\"message\\\":\\\"ok\\\"}\");\nJobStatus recordedStatus = app.instance().deploymentJobs().jobStatus().get(JobType.systemTest);\nassertNotNull(\"Status was recorded\", recordedStatus);\nassertTrue(recordedStatus.isSuccess());\nassertEquals(vespaVersion, recordedStatus.lastCompleted().get().platform());\n}\n@Test\npublic void applicationWithRoutingPolicy() {\nvar app = deploymentTester.newDeploymentContext(createTenantAndApplication());\nApplicationPackage applicationPackage = new ApplicationPackageBuilder()\n.environment(Environment.prod)\n.instances(\"instance1\")\n.region(\"us-west-1\")\n.build();\napp.submit(applicationPackage).deploy();\nRoutingPolicy policy = new RoutingPolicy(app.instanceId(),\nClusterSpec.Id.from(\"default\"),\nZoneId.from(Environment.prod, RegionName.from(\"us-west-1\")),\nHostName.from(\"lb-0-canonical-name\"),\nOptional.of(\"dns-zone-1\"), Set.of(EndpointId.of(\"c0\")));\ntester.controller().curator().writeRoutingPolicies(app.instanceId(), Set.of(policy));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", GET)\n.userIdentity(USER_ID),\nnew File(\"instance-with-routing-policy.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-west-1/instance/instance1\", GET)\n.userIdentity(USER_ID),\nnew File(\"deployment-with-routing-policy.json\"));\n}\nprivate MultiPartStreamer createApplicationDeployData(ApplicationPackage applicationPackage, boolean deployDirectly) {\nreturn createApplicationDeployData(Optional.of(applicationPackage), deployDirectly);\n}\nprivate MultiPartStreamer createApplicationDeployData(Optional applicationPackage, boolean deployDirectly) {\nreturn createApplicationDeployData(applicationPackage, Optional.empty(), deployDirectly);\n}\nprivate MultiPartStreamer createApplicationDeployData(Optional applicationPackage,\nOptional applicationVersion, boolean deployDirectly) {\nMultiPartStreamer streamer = new MultiPartStreamer();\nstreamer.addJson(\"deployOptions\", deployOptions(deployDirectly, applicationVersion));\napplicationPackage.ifPresent(ap -> streamer.addBytes(\"applicationZip\", ap.zippedContent()));\nreturn streamer;\n}\nprivate MultiPartStreamer createApplicationSubmissionData(ApplicationPackage applicationPackage, long projectId) {\nreturn new MultiPartStreamer().addJson(EnvironmentResource.SUBMIT_OPTIONS, \"{\\\"repository\\\":\\\"repository1\\\",\\\"branch\\\":\\\"master\\\",\\\"commit\\\":\\\"commit1\\\",\"\n+ \"\\\"projectId\\\":\" + projectId + \",\\\"authorEmail\\\":\\\"a@b\\\"}\")\n.addBytes(EnvironmentResource.APPLICATION_ZIP, applicationPackage.zippedContent())\n.addBytes(EnvironmentResource.APPLICATION_TEST_ZIP, \"content\".getBytes());\n}\nprivate String deployOptions(boolean deployDirectly, Optional applicationVersion) {\nreturn \"{\\\"vespaVersion\\\":null,\" +\n\"\\\"ignoreValidationErrors\\\":false,\" +\n\"\\\"deployDirectly\\\":\" + deployDirectly +\napplicationVersion.map(version ->\n\",\" +\n\"\\\"buildNumber\\\":\" + version.buildNumber().getAsLong() + \",\" +\n\"\\\"sourceRevision\\\":{\" +\n\"\\\"repository\\\":\\\"\" + version.source().get().repository() + \"\\\",\" +\n\"\\\"branch\\\":\\\"\" + version.source().get().branch() + \"\\\",\" +\n\"\\\"commit\\\":\\\"\" + version.source().get().commit() + \"\\\"\" +\n\"}\"\n).orElse(\"\") +\n\"}\";\n}\n/** Make a request with (athens) user domain1.mytenant */\nprivate RequestBuilder request(String path, Request.Method method) {\nreturn new RequestBuilder(path, method);\n}\n/**\n* In production this happens outside hosted Vespa, so there is no API for it and we need to reach down into the\n* mock setup to replicate the action.\n*/\nprivate void createAthenzDomainWithAdmin(AthenzDomain domain, UserId userId) {\nAthenzDbMock.Domain domainMock = tester.athenzClientFactory().getSetup().getOrCreateDomain(domain);\ndomainMock.markAsVespaTenant();\ndomainMock.admin(AthenzUser.fromUserId(userId.id()));\n}\n/**\n* Mock athenz service identity configuration. Simulates that configserver is allowed to launch a service\n*/\nprivate void allowLaunchOfService(com.yahoo.vespa.athenz.api.AthenzService service) {\nAthenzDbMock.Domain domainMock = tester.athenzClientFactory().getSetup().getOrCreateDomain(service.getDomain());\ndomainMock.services.put(service.getName(), new AthenzDbMock.Service(true));\n}\n/**\n* In production this happens outside hosted Vespa, so there is no API for it and we need to reach down into the\n* mock setup to replicate the action.\n*/\nprivate void addScrewdriverUserToDeployRole(ScrewdriverId screwdriverId,\nAthenzDomain domain,\nApplicationName application) {\ntester.authorize(domain, HostedAthenzIdentities.from(screwdriverId), ApplicationAction.deploy, application);\n}\nprivate ApplicationId createTenantAndApplication() {\ncreateAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);\ntester.assertResponse(request(\"/application/v4/tenant/tenant1\", POST)\n.userIdentity(USER_ID)\n.data(\"{\\\"athensDomain\\\":\\\"domain1\\\", \\\"property\\\":\\\"property1\\\"}\")\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"tenant-without-applications.json\"));\ntester.assertResponse(request(\"/application/v4/tenant/tenant1/application/application1/instance/instance1\", POST)\n.userIdentity(USER_ID)\n.oktaAccessToken(OKTA_AT).oktaIdentityToken(OKTA_IT),\nnew File(\"instance-reference.json\"));\naddScrewdriverUserToDeployRole(SCREWDRIVER_ID, ATHENZ_TENANT_DOMAIN, ApplicationName.from(\"application1\"));\nreturn ApplicationId.from(\"tenant1\", \"application1\", \"instance1\");\n}\n/**\n* Cluster info, utilization and application and deployment metrics are maintained async by maintainers.\n*\n* This sets these values as if the maintainers has been ran.\n*/\nprivate void setDeploymentMaintainedInfo() {\nfor (Application application : deploymentTester.applications().asList()) {\ndeploymentTester.applications().lockApplicationOrThrow(application.id(), lockedApplication -> {\nlockedApplication = lockedApplication.with(new ApplicationMetrics(0.5, 0.7));\nfor (Instance instance : application.instances().values()) {\nfor (Deployment deployment : instance.deployments().values()) {\nMap clusterInfo = new HashMap<>();\nList hostnames = new ArrayList<>();\nhostnames.add(\"host1\");\nhostnames.add(\"host2\");\nclusterInfo.put(ClusterSpec.Id.from(\"cluster1\"),\nnew ClusterInfo(\"flavor1\", 37, 2, 4, 50,\nClusterSpec.Type.content, hostnames));\nDeploymentMetrics metrics = new DeploymentMetrics(1, 2, 3, 4, 5,\nOptional.of(Instant.ofEpochMilli(123123)), Map.of());\nlockedApplication = lockedApplication.with(instance.name(),\nlockedInstance -> lockedInstance.withClusterInfo(deployment.zone(), clusterInfo)\n.with(deployment.zone(), metrics)\n.recordActivityAt(Instant.parse(\"2018-06-01T10:15:30.00Z\"), deployment.zone()));\n}\ndeploymentTester.applications().store(lockedApplication);\n}\n});\n}\n}\nprivate void setZoneInRotation(String rotationName, ZoneId zone) {\ntester.serviceRegistry().globalRoutingServiceMock().setStatus(rotationName, zone, com.yahoo.vespa.hosted.controller.api.integration.routing.RotationStatus.IN);\nnew RotationStatusUpdater(tester.controller(), Duration.ofDays(1), new JobControl(tester.controller().curator())).run();\n}\nprivate void updateContactInformation() {\nContact contact = new Contact(URI.create(\"www.contacts.tld/1234\"),\nURI.create(\"www.properties.tld/1234\"),\nURI.create(\"www.issues.tld/1234\"),\nList.of(List.of(\"alice\"), List.of(\"bob\")), \"queue\", Optional.empty());\ntester.controller().tenants().lockIfPresent(TenantName.from(\"tenant2\"),\nLockedTenant.Athenz.class,\nlockedTenant -> tester.controller().tenants().store(lockedTenant.with(contact)));\n}\nprivate void registerContact(long propertyId) {\nPropertyId p = new PropertyId(String.valueOf(propertyId));\ntester.serviceRegistry().contactRetrieverMock().addContact(p, new Contact(URI.create(\"www.issues.tld/\" + p.id()),\nURI.create(\"www.contacts.tld/\" + p.id()),\nURI.create(\"www.properties.tld/\" + p.id()),\nList.of(Collections.singletonList(\"alice\"),\nCollections.singletonList(\"bob\")),\n\"queue\", Optional.empty()));\n}\nprivate static class RequestBuilder implements Supplier {\nprivate final String path;\nprivate final Request.Method method;\nprivate byte[] data = new byte[0];\nprivate AthenzIdentity identity;\nprivate OktaIdentityToken oktaIdentityToken;\nprivate OktaAccessToken oktaAccessToken;\nprivate String contentType = \"application/json\";\nprivate Map> headers = new HashMap<>();\nprivate String recursive;\nprivate RequestBuilder(String path, Request.Method method) {\nthis.path = path;\nthis.method = method;\n}\nprivate RequestBuilder data(byte[] data) { this.data = data; return this; }\nprivate RequestBuilder data(String data) { return data(data.getBytes(StandardCharsets.UTF_8)); }\nprivate RequestBuilder data(MultiPartStreamer streamer) {\nreturn Exceptions.uncheck(() -> data(streamer.data().readAllBytes()).contentType(streamer.contentType()));\n}\nprivate RequestBuilder userIdentity(UserId userId) { this.identity = HostedAthenzIdentities.from(userId); return this; }\nprivate RequestBuilder screwdriverIdentity(ScrewdriverId screwdriverId) { this.identity = HostedAthenzIdentities.from(screwdriverId); return this; }\nprivate RequestBuilder oktaIdentityToken(OktaIdentityToken oktaIdentityToken) { this.oktaIdentityToken = oktaIdentityToken; return this; }\nprivate RequestBuilder oktaAccessToken(OktaAccessToken oktaAccessToken) { this.oktaAccessToken = oktaAccessToken; return this; }\nprivate RequestBuilder contentType(String contentType) { this.contentType = contentType; return this; }\nprivate RequestBuilder recursive(String recursive) { this.recursive = recursive; return this; }\nprivate RequestBuilder header(String name, String value) {\nthis.headers.putIfAbsent(name, new ArrayList<>());\nthis.headers.get(name).add(value);\nreturn this;\n}\n@Override\npublic Request get() {\nRequest request = new Request(\"http:\n(recursive == null ? \"\" : \"?recursive=\" + recursive),\ndata, method);\nrequest.getHeaders().addAll(headers);\nrequest.getHeaders().put(\"Content-Type\", contentType);\nif (identity != null) {\naddIdentityToRequest(request, identity);\n}\nif (oktaIdentityToken != null) {\naddOktaIdentityToken(request, oktaIdentityToken);\n}\nif (oktaAccessToken != null) {\naddOktaAccessToken(request, oktaAccessToken);\n}\nreturn request;\n}\n}\n}" + }, + { + "comment": "assertNotNull is unnecessary assertion in this case, it is better to remove it", + "method_body": "public void assertGetInstancesWithAssignedGlobalRuleBuilderClasses() {\nList configuredBuilders = new ArrayList<>(1);\nFixtureGlobalRuleBuilder builder = mock(FixtureGlobalRuleBuilder.class);\nconfiguredBuilders.add(builder);\nCollection> configuredBuilderClasses = configuredBuilders.stream().map(each -> (Class) each.getClass()).collect(Collectors.toSet());\nCollection actual = GlobalRuleBuilderFactory.getInstances(configuredBuilderClasses);\nassertNotNull(actual);\nassertThat(actual.size(), is(0));\n}", + "target_code": "assertNotNull(actual);", + "method_body_after": "public void assertGetInstancesWithAssignedGlobalRuleBuilderClasses() {\nList configuredBuilders = new ArrayList<>(1);\nFixtureGlobalRuleBuilder builder = mock(FixtureGlobalRuleBuilder.class);\nconfiguredBuilders.add(builder);\nCollection> configuredBuilderClasses = configuredBuilders.stream().map(each -> (Class) each.getClass()).collect(Collectors.toSet());\nCollection actual = GlobalRuleBuilderFactory.getInstances(configuredBuilderClasses);\nassertTrue(actual.isEmpty());\n}", + "context_before": "class GlobalRuleBuilderFactoryTest {\n@SuppressWarnings(\"rawtypes\")\n@Test\npublic void assertGetInstanceMap() {\nFixtureGlobalRuleConfiguration configuration = new FixtureGlobalRuleConfiguration();\nMap actual = GlobalRuleBuilderFactory.getInstanceMap(Collections.singletonList(configuration));\nassertNotNull(actual);\nassertFalse(actual.isEmpty());\nassertTrue(actual.containsKey(configuration));\nassertThat(actual.get(configuration), instanceOf(FixtureGlobalRuleBuilder.class));\n}\n@SuppressWarnings(\"rawtypes\")\n@Test\npublic void assertGetInstancesWithoutAssignedGlobalRuleBuilderClasses() {\nCollection actual = GlobalRuleBuilderFactory.getInstances(Collections.emptyList());\nassertNotNull(actual);\nassertThat(actual.size(), is(1));\nassertThat(actual.iterator().next(), instanceOf(FixtureGlobalRuleBuilder.class));\n}\n@SuppressWarnings({\"rawtypes\", \"unchecked\"})\n@Test\n}", + "context_after": "class GlobalRuleBuilderFactoryTest {\n@SuppressWarnings(\"rawtypes\")\n@Test\npublic void assertGetInstanceMap() {\nFixtureGlobalRuleConfiguration configuration = new FixtureGlobalRuleConfiguration();\nMap actual = GlobalRuleBuilderFactory.getInstanceMap(Collections.singletonList(configuration));\nassertThat(actual.get(configuration), instanceOf(FixtureGlobalRuleBuilder.class));\n}\n@SuppressWarnings(\"rawtypes\")\n@Test\npublic void assertGetInstancesWithoutAssignedGlobalRuleBuilderClasses() {\nCollection actual = GlobalRuleBuilderFactory.getInstances(Collections.emptyList());\nassertThat(actual.size(), is(1));\nassertThat(actual.iterator().next(), instanceOf(FixtureGlobalRuleBuilder.class));\n}\n@SuppressWarnings({\"rawtypes\", \"unchecked\"})\n@Test\n}" + }, + { + "comment": "Same as https://github.com/ballerina-platform/ballerina-lang/pull/35135#discussion_r819257450", + "method_body": "public void visit(BLangExpressionStmt exprStmtNode, AnalyzerData data) {\nSymbolEnv env = data.env;\nSymbolEnv stmtEnv = new SymbolEnv(exprStmtNode, env.scope);\nenv.copyTo(stmtEnv);\nBLangExpression expr = exprStmtNode.expr;\nBType bType = typeChecker.checkExpr(expr, stmtEnv, symTable.noType, data.prevEnvs);\nif (bType != symTable.nilType && bType != symTable.semanticError &&\nexpr.getKind() != NodeKind.FAIL &&\n!types.isNeverTypeOrStructureTypeWithARequiredNeverMember(bType)) {\ndlog.error(exprStmtNode.pos, DiagnosticErrorCode.ASSIGNMENT_REQUIRED, bType);\n} else if (expr.getKind() == NodeKind.INVOCATION &&\ntypes.isNeverTypeOrStructureTypeWithARequiredNeverMember(expr.getBType())) {\ndata.notCompletedNormally = true;\n}\nvalidateWorkerAnnAttachments(exprStmtNode.expr, data);\n}", + "target_code": "BType bType = typeChecker.checkExpr(expr, stmtEnv, symTable.noType, data.prevEnvs);", + "method_body_after": "public void visit(BLangExpressionStmt exprStmtNode, AnalyzerData data) {\nSymbolEnv currentEnv = data.env;\nSymbolEnv stmtEnv = new SymbolEnv(exprStmtNode, currentEnv.scope);\ncurrentEnv.copyTo(stmtEnv);\nBLangExpression expr = exprStmtNode.expr;\nBType bType = typeChecker.checkExpr(expr, stmtEnv, data.prevEnvs);\nif (bType != symTable.nilType && bType != symTable.semanticError &&\nexpr.getKind() != NodeKind.FAIL &&\n!types.isNeverTypeOrStructureTypeWithARequiredNeverMember(bType)) {\ndlog.error(exprStmtNode.pos, DiagnosticErrorCode.ASSIGNMENT_REQUIRED, bType);\n} else if (expr.getKind() == NodeKind.INVOCATION &&\ntypes.isNeverTypeOrStructureTypeWithARequiredNeverMember(expr.getBType())) {\ndata.notCompletedNormally = true;\n}\nvalidateWorkerAnnAttachments(exprStmtNode.expr, data);\n}", + "context_before": "class representing a service-decl or object-ctor with service prefix\nAttachPoint.Point attachedPoint;\nSet flagSet = classDefinition.flagSet;\nif (flagSet.contains(Flag.OBJECT_CTOR) && flagSet.contains(Flag.SERVICE)) {\nattachedPoint = AttachPoint.Point.SERVICE;\n}", + "context_after": "class representing a service-decl or object-ctor with service prefix\nAttachPoint.Point attachedPoint;\nSet flagSet = classDefinition.flagSet;\nif (flagSet.contains(Flag.OBJECT_CTOR) && flagSet.contains(Flag.SERVICE)) {\nattachedPoint = AttachPoint.Point.SERVICE;\n}" + }, + { + "comment": "Gotcha, thanks", + "method_body": "private int computeCapacity(SectionBlock block) {\nint ret = 0;\nfor (TemplateNode node : block.nodes) {\nif (Parser.isDummyNode(node)) {\ncontinue;\n}\nif (node.isText()) {\nret += node.asText().getValue().length();\n} else if (node.isExpression()) {\nret += 10;\n} else if (node.isSection()) {\nSectionHelper helper = node.asSection().getHelper();\nif (LoopSectionHelper.class.isInstance(helper)) {\nret += 10 * computeCapacity(node.asSection().blocks.get(0));\n} else if (IncludeSectionHelper.class.isInstance(helper)) {\nret += 500;\n} else if (UserTagSectionHelper.class.isInstance(helper)) {\nret += 200;\n} else {\nfor (SectionBlock b : node.asSection().blocks) {\nret += computeCapacity(b);\n}\n}\n}\n}\nreturn ret;\n}", + "target_code": "for (TemplateNode node : block.nodes) {", + "method_body_after": "private int computeCapacity(SectionBlock block) {\nint ret = 0;\nfor (TemplateNode node : block.nodes) {\nif (Parser.isDummyNode(node)) {\ncontinue;\n}\nif (node.isText()) {\nret += node.asText().getValue().length();\n} else if (node.isExpression()) {\nret += 10;\n} else if (node.isSection()) {\nSectionHelper helper = node.asSection().getHelper();\nif (LoopSectionHelper.class.isInstance(helper)) {\nret += 10 * computeCapacity(node.asSection().blocks.get(0));\n} else if (IncludeSectionHelper.class.isInstance(helper)) {\nret += 500;\n} else if (UserTagSectionHelper.class.isInstance(helper)) {\nret += 200;\n} else {\nfor (SectionBlock b : node.asSection().blocks) {\nret += computeCapacity(b);\n}\n}\n}\n}\nreturn ret;\n}", + "context_before": "class Capacity {\nstatic final int LIMIT = 64 * 1024;\nfinal int computed;\nint max;\nCapacity() {\nthis.computed = Math.min(computeCapacity(root.blocks.get(0)), LIMIT);\n}\nvoid update(int length) {\nif (length > max) {\nmax = length < LIMIT ? length : LIMIT;\n}\n}\nint get() {\nreturn Math.max(max, computed);\n}\n}", + "context_after": "class Capacity {\nstatic final int LIMIT = 64 * 1024;\nfinal int computed;\nint max;\nCapacity() {\nthis.computed = Math.min(computeCapacity(root.blocks.get(0)), LIMIT);\n}\nvoid update(int length) {\nif (length > max) {\nmax = length < LIMIT ? length : LIMIT;\n}\n}\nint get() {\nreturn Math.max(max, computed);\n}\n}" + }, + { + "comment": "`LocalInputChannel` only spills when it awaits barrier. So it spills the buffer on first sight and it cannot be better on downsteam level. We could of course also move spilling lingering buffers to the upstream. It might also be an improvement for later, but it adds quite a bit of complexity as barriers also need to be propagated upstream.", + "method_body": "Optional getNextBuffer() throws IOException {\ncheckError();\nResultSubpartitionView subpartitionView = this.subpartitionView;\nif (subpartitionView == null) {\nif (isReleased) {\nreturn Optional.empty();\n}\nsubpartitionView = checkAndWaitForSubpartitionView();\n}\nBufferAndBacklog next = subpartitionView.getNextBuffer();\nif (next == null) {\nif (subpartitionView.isReleased()) {\nthrow new CancelTaskException(\"Consumed partition \" + subpartitionView + \" has been released.\");\n} else {\nreturn Optional.empty();\n}\n}\nBuffer buffer = next.buffer();\nnumBytesIn.inc(buffer.getSize());\nnumBuffersIn.inc();\nif (buffer.isBuffer()) {\nfor (final long barrierId : pendingCheckpointBarriers) {\nchannelStateWriter.addInputData(\nbarrierId,\ngetChannelInfo(),\nChannelStateWriter.SEQUENCE_NUMBER_UNKNOWN,\nCloseableIterator.ofElement(buffer.retainBuffer(), Buffer::recycleBuffer));\n}\n} else if (buffer.getDataType().hasPriority()) {\nfinal AbstractEvent priorityEvent = parsePriorityEvent(buffer);\nif (priorityEvent instanceof CheckpointBarrier) {\nfinal long barrierId = ((CheckpointBarrier) priorityEvent).getId();\nseenCheckpointBarriers.add(barrierId);\npendingCheckpointBarriers.remove(barrierId);\n}\n}\nreturn Optional.of(new BufferAndAvailability(buffer, next.getNextDataType(), next.buffersInBacklog()));\n}", + "target_code": "CloseableIterator.ofElement(buffer.retainBuffer(), Buffer::recycleBuffer));", + "method_body_after": "Optional getNextBuffer() throws IOException {\ncheckError();\nResultSubpartitionView subpartitionView = this.subpartitionView;\nif (subpartitionView == null) {\nif (isReleased) {\nreturn Optional.empty();\n}\nsubpartitionView = checkAndWaitForSubpartitionView();\n}\nBufferAndBacklog next = subpartitionView.getNextBuffer();\nif (next == null) {\nif (subpartitionView.isReleased()) {\nthrow new CancelTaskException(\"Consumed partition \" + subpartitionView + \" has been released.\");\n} else {\nreturn Optional.empty();\n}\n}\nBuffer buffer = next.buffer();\nnumBytesIn.inc(buffer.getSize());\nnumBuffersIn.inc();\nif (buffer.getDataType().hasPriority()) {\nchannelStatePersister.checkForBarrier(buffer);\n} else {\nchannelStatePersister.maybePersist(buffer);\n}\nreturn Optional.of(new BufferAndAvailability(\nbuffer,\nnext.getNextDataType(),\nnext.buffersInBacklog(),\nnext.getSequenceNumber()));\n}", + "context_before": "class LocalInputChannel extends InputChannel implements BufferAvailabilityListener {\nprivate static final Logger LOG = LoggerFactory.getLogger(LocalInputChannel.class);\nprivate final Object requestLock = new Object();\n/** The local partition manager. */\nprivate final ResultPartitionManager partitionManager;\n/** Task event dispatcher for backwards events. */\nprivate final TaskEventPublisher taskEventPublisher;\n/** The consumed subpartition. */\nprivate volatile ResultSubpartitionView subpartitionView;\nprivate volatile boolean isReleased;\n/** All started checkpoints where a barrier has not been received yet. */\nprivate Deque pendingCheckpointBarriers = new ArrayDeque<>(2);\nprivate Deque seenCheckpointBarriers = new ArrayDeque<>(2);\npublic LocalInputChannel(\nSingleInputGate inputGate,\nint channelIndex,\nResultPartitionID partitionId,\nResultPartitionManager partitionManager,\nTaskEventPublisher taskEventPublisher,\nCounter numBytesIn,\nCounter numBuffersIn) {\nthis(inputGate, channelIndex, partitionId, partitionManager, taskEventPublisher, 0, 0, numBytesIn, numBuffersIn);\n}\npublic LocalInputChannel(\nSingleInputGate inputGate,\nint channelIndex,\nResultPartitionID partitionId,\nResultPartitionManager partitionManager,\nTaskEventPublisher taskEventPublisher,\nint initialBackoff,\nint maxBackoff,\nCounter numBytesIn,\nCounter numBuffersIn) {\nsuper(inputGate, channelIndex, partitionId, initialBackoff, maxBackoff, numBytesIn, numBuffersIn);\nthis.partitionManager = checkNotNull(partitionManager);\nthis.taskEventPublisher = checkNotNull(taskEventPublisher);\n}\npublic void checkpointStarted(CheckpointBarrier barrier) {\ncheckState(channelStateWriter != null, \"Channel state writer not injected\");\nif (!seenCheckpointBarriers.contains(barrier.getId())) {\npendingCheckpointBarriers.add(barrier.getId());\n}\n}\npublic void checkpointStopped(long checkpointId) {\npendingCheckpointBarriers.remove(checkpointId);\nseenCheckpointBarriers.remove(checkpointId);\n}\n@Override\nprotected void requestSubpartition(int subpartitionIndex) throws IOException {\nboolean retriggerRequest = false;\nsynchronized (requestLock) {\ncheckState(!isReleased, \"LocalInputChannel has been released already\");\nif (subpartitionView == null) {\nLOG.debug(\"{}: Requesting LOCAL subpartition {} of partition {}.\",\nthis, subpartitionIndex, partitionId);\ntry {\nResultSubpartitionView subpartitionView = partitionManager.createSubpartitionView(\npartitionId, subpartitionIndex, this);\nif (subpartitionView == null) {\nthrow new IOException(\"Error requesting subpartition.\");\n}\nthis.subpartitionView = subpartitionView;\nif (isReleased) {\nsubpartitionView.releaseAllResources();\nthis.subpartitionView = null;\n}\n} catch (PartitionNotFoundException notFound) {\nif (increaseBackoff()) {\nretriggerRequest = true;\n} else {\nthrow notFound;\n}\n}\n}\n}\nif (retriggerRequest) {\ninputGate.retriggerPartitionRequest(partitionId.getPartitionId());\n}\n}\n/**\n* Retriggers a subpartition request.\n*/\nvoid retriggerSubpartitionRequest(Timer timer, final int subpartitionIndex) {\nsynchronized (requestLock) {\ncheckState(subpartitionView == null, \"already requested partition\");\ntimer.schedule(new TimerTask() {\n@Override\npublic void run() {\ntry {\nrequestSubpartition(subpartitionIndex);\n} catch (Throwable t) {\nsetError(t);\n}\n}\n}, getCurrentBackoff());\n}\n}\n@Override\n@Override\npublic void notifyDataAvailable() {\nnotifyChannelNonEmpty();\n}\nprivate ResultSubpartitionView checkAndWaitForSubpartitionView() {\nsynchronized (requestLock) {\ncheckState(!isReleased, \"released\");\ncheckState(subpartitionView != null, \"Queried for a buffer before requesting the subpartition.\");\nreturn subpartitionView;\n}\n}\n@Override\npublic void resumeConsumption() {\ncheckState(!isReleased, \"Channel released.\");\nsubpartitionView.resumeConsumption();\nif (subpartitionView.isAvailable(Integer.MAX_VALUE)) {\nnotifyChannelNonEmpty();\n}\n}\n@Override\nvoid sendTaskEvent(TaskEvent event) throws IOException {\ncheckError();\ncheckState(subpartitionView != null, \"Tried to send task event to producer before requesting the subpartition.\");\nif (!taskEventPublisher.publish(partitionId, event)) {\nthrow new IOException(\"Error while publishing event \" + event + \" to producer. The producer could not be found.\");\n}\n}\n@Override\nboolean isReleased() {\nreturn isReleased;\n}\n/**\n* Releases the partition reader.\n*/\n@Override\nvoid releaseAllResources() throws IOException {\nif (!isReleased) {\nisReleased = true;\nResultSubpartitionView view = subpartitionView;\nif (view != null) {\nview.releaseAllResources();\nsubpartitionView = null;\n}\n}\n}\n@Override\npublic int unsynchronizedGetNumberOfQueuedBuffers() {\nResultSubpartitionView view = subpartitionView;\nif (view != null) {\nreturn view.unsynchronizedGetNumberOfQueuedBuffers();\n}\nreturn 0;\n}\n@Override\npublic String toString() {\nreturn \"LocalInputChannel [\" + partitionId + \"]\";\n}\n@VisibleForTesting\nResultSubpartitionView getSubpartitionView() {\nreturn subpartitionView;\n}\n}", + "context_after": "class LocalInputChannel extends InputChannel implements BufferAvailabilityListener, ChannelStateHolder {\nprivate static final Logger LOG = LoggerFactory.getLogger(LocalInputChannel.class);\nprivate final Object requestLock = new Object();\n/** The local partition manager. */\nprivate final ResultPartitionManager partitionManager;\n/** Task event dispatcher for backwards events. */\nprivate final TaskEventPublisher taskEventPublisher;\n/** The consumed subpartition. */\nprivate volatile ResultSubpartitionView subpartitionView;\nprivate volatile boolean isReleased;\nprivate ChannelStatePersister channelStatePersister = new ChannelStatePersister(null);\npublic LocalInputChannel(\nSingleInputGate inputGate,\nint channelIndex,\nResultPartitionID partitionId,\nResultPartitionManager partitionManager,\nTaskEventPublisher taskEventPublisher,\nCounter numBytesIn,\nCounter numBuffersIn) {\nthis(inputGate, channelIndex, partitionId, partitionManager, taskEventPublisher, 0, 0, numBytesIn, numBuffersIn);\n}\npublic LocalInputChannel(\nSingleInputGate inputGate,\nint channelIndex,\nResultPartitionID partitionId,\nResultPartitionManager partitionManager,\nTaskEventPublisher taskEventPublisher,\nint initialBackoff,\nint maxBackoff,\nCounter numBytesIn,\nCounter numBuffersIn) {\nsuper(inputGate, channelIndex, partitionId, initialBackoff, maxBackoff, numBytesIn, numBuffersIn);\nthis.partitionManager = checkNotNull(partitionManager);\nthis.taskEventPublisher = checkNotNull(taskEventPublisher);\n}\npublic void setChannelStateWriter(ChannelStateWriter channelStateWriter) {\ncheckState(!channelStatePersister.isInitialized(), \"Already initialized\");\nchannelStatePersister = new ChannelStatePersister(checkNotNull(channelStateWriter));\n}\npublic void checkpointStarted(CheckpointBarrier barrier) {\nchannelStatePersister.startPersisting(barrier.getId(), Collections.emptyList());\n}\npublic void checkpointStopped(long checkpointId) {\nchannelStatePersister.stopPersisting();\n}\n@Override\nprotected void requestSubpartition(int subpartitionIndex) throws IOException {\nboolean retriggerRequest = false;\nsynchronized (requestLock) {\ncheckState(!isReleased, \"LocalInputChannel has been released already\");\nif (subpartitionView == null) {\nLOG.debug(\"{}: Requesting LOCAL subpartition {} of partition {}.\",\nthis, subpartitionIndex, partitionId);\ntry {\nResultSubpartitionView subpartitionView = partitionManager.createSubpartitionView(\npartitionId, subpartitionIndex, this);\nif (subpartitionView == null) {\nthrow new IOException(\"Error requesting subpartition.\");\n}\nthis.subpartitionView = subpartitionView;\nif (isReleased) {\nsubpartitionView.releaseAllResources();\nthis.subpartitionView = null;\n}\n} catch (PartitionNotFoundException notFound) {\nif (increaseBackoff()) {\nretriggerRequest = true;\n} else {\nthrow notFound;\n}\n}\n}\n}\nif (retriggerRequest) {\ninputGate.retriggerPartitionRequest(partitionId.getPartitionId());\n}\n}\n/**\n* Retriggers a subpartition request.\n*/\nvoid retriggerSubpartitionRequest(Timer timer, final int subpartitionIndex) {\nsynchronized (requestLock) {\ncheckState(subpartitionView == null, \"already requested partition\");\ntimer.schedule(new TimerTask() {\n@Override\npublic void run() {\ntry {\nrequestSubpartition(subpartitionIndex);\n} catch (Throwable t) {\nsetError(t);\n}\n}\n}, getCurrentBackoff());\n}\n}\n@Override\n@Override\npublic void notifyDataAvailable() {\nnotifyChannelNonEmpty();\n}\nprivate ResultSubpartitionView checkAndWaitForSubpartitionView() {\nsynchronized (requestLock) {\ncheckState(!isReleased, \"released\");\ncheckState(subpartitionView != null, \"Queried for a buffer before requesting the subpartition.\");\nreturn subpartitionView;\n}\n}\n@Override\npublic void resumeConsumption() {\ncheckState(!isReleased, \"Channel released.\");\nsubpartitionView.resumeConsumption();\nif (subpartitionView.isAvailable(Integer.MAX_VALUE)) {\nnotifyChannelNonEmpty();\n}\n}\n@Override\nvoid sendTaskEvent(TaskEvent event) throws IOException {\ncheckError();\ncheckState(subpartitionView != null, \"Tried to send task event to producer before requesting the subpartition.\");\nif (!taskEventPublisher.publish(partitionId, event)) {\nthrow new IOException(\"Error while publishing event \" + event + \" to producer. The producer could not be found.\");\n}\n}\n@Override\nboolean isReleased() {\nreturn isReleased;\n}\n/**\n* Releases the partition reader.\n*/\n@Override\nvoid releaseAllResources() throws IOException {\nif (!isReleased) {\nisReleased = true;\nResultSubpartitionView view = subpartitionView;\nif (view != null) {\nview.releaseAllResources();\nsubpartitionView = null;\n}\n}\n}\n@Override\npublic int unsynchronizedGetNumberOfQueuedBuffers() {\nResultSubpartitionView view = subpartitionView;\nif (view != null) {\nreturn view.unsynchronizedGetNumberOfQueuedBuffers();\n}\nreturn 0;\n}\n@Override\npublic String toString() {\nreturn \"LocalInputChannel [\" + partitionId + \"]\";\n}\n@VisibleForTesting\nResultSubpartitionView getSubpartitionView() {\nreturn subpartitionView;\n}\n}" + }, + { + "comment": "`Connection` and `Channel` are not `Autocloseable`. I'm adding a try/catch anyway.", + "method_body": "public void testReadQueue() throws Exception {\nfinal int maxNumRecords = 10;\nPCollection raw =\np.apply(\nRabbitMqIO.read()\n.withUri(\"amqp:\n.withQueue(\"READ\")\n.withMaxNumRecords(maxNumRecords));\nPCollection output = raw.apply(ParDo.of(new ConverterFn()));\nList records = generateRecords(maxNumRecords);\nPAssert.that(output).containsInAnyOrder(records);\nConnectionFactory connectionFactory = new ConnectionFactory();\nconnectionFactory.setUri(\"amqp:\nConnection connection = connectionFactory.newConnection();\nChannel channel = connection.createChannel();\nchannel.queueDeclare(\"READ\", false, false, false, null);\nfor (byte[] record : records) {\nchannel.basicPublish(\"\", \"READ\", null, record);\n}\np.run();\nchannel.close();\nconnection.close();\n}", + "target_code": "channel.close();", + "method_body_after": "public void testReadQueue() throws Exception {\nfinal int maxNumRecords = 10;\nPCollection raw =\np.apply(\nRabbitMqIO.read()\n.withUri(\"amqp:\n.withQueue(\"READ\")\n.withMaxNumRecords(maxNumRecords));\nPCollection output =\nraw.apply(\nMapElements.into(TypeDescriptors.strings())\n.via(\n(RabbitMqMessage message) ->\nnew String(message.getBody(), StandardCharsets.UTF_8)));\nList records =\ngenerateRecords(maxNumRecords)\n.stream()\n.map(record -> new String(record, StandardCharsets.UTF_8))\n.collect(Collectors.toList());\nPAssert.that(output).containsInAnyOrder(records);\nConnectionFactory connectionFactory = new ConnectionFactory();\nconnectionFactory.setUri(\"amqp:\nConnection connection = null;\nChannel channel = null;\ntry {\nconnection = connectionFactory.newConnection();\nchannel = connection.createChannel();\nchannel.queueDeclare(\"READ\", false, false, false, null);\nfor (String record : records) {\nchannel.basicPublish(\"\", \"READ\", null, record.getBytes(StandardCharsets.UTF_8));\n}\np.run();\n} finally {\nif (channel != null) {\nchannel.close();\n}\nif (connection != null) {\nconnection.close();\n}\n}\n}", + "context_before": "class RabbitMqIOTest implements Serializable {\nprivate static final Logger LOG = LoggerFactory.getLogger(RabbitMqIOTest.class);\nprivate static int port;\n@ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder();\n@Rule public transient TestPipeline p = TestPipeline.create();\nprivate static transient Broker broker;\n@BeforeClass\npublic static void startBroker() throws Exception {\ntry (ServerSocket serverSocket = new ServerSocket(0)) {\nport = serverSocket.getLocalPort();\n}\nSystem.setProperty(\"derby.stream.error.field\", \"MyApp.DEV_NULL\");\nbroker = new Broker();\nBrokerOptions options = new BrokerOptions();\noptions.setConfigProperty(BrokerOptions.QPID_AMQP_PORT, String.valueOf(port));\noptions.setConfigProperty(BrokerOptions.QPID_WORK_DIR, temporaryFolder.newFolder().toString());\noptions.setConfigProperty(BrokerOptions.QPID_HOME_DIR, \"src/test/qpid\");\nbroker.startup(options);\n}\n@AfterClass\npublic static void stopBroker() {\nbroker.shutdown();\n}\n@Test\n@Test(timeout = 60 * 1000)\npublic void testReadExchange() throws Exception {\nfinal int maxNumRecords = 10;\nPCollection raw =\np.apply(\nRabbitMqIO.read()\n.withUri(\"amqp:\n.withExchange(\"READEXCHANGE\", \"fanout\", \"test\")\n.withMaxNumRecords(maxNumRecords));\nPCollection output = raw.apply(ParDo.of(new ConverterFn()));\nList records = generateRecords(maxNumRecords);\nPAssert.that(output).containsInAnyOrder(records);\nConnectionFactory connectionFactory = new ConnectionFactory();\nconnectionFactory.setUri(\"amqp:\nConnection connection = connectionFactory.newConnection();\nfinal Channel channel = connection.createChannel();\nchannel.exchangeDeclare(\"READEXCHANGE\", \"fanout\");\nThread publisher =\nnew Thread(\n() -> {\ntry {\nThread.sleep(5000);\n} catch (Exception e) {\nLOG.error(e.getMessage(), e);\n}\nfor (int i = 0; i < 10; i++) {\ntry {\nchannel.basicPublish(\n\"READEXCHANGE\", \"test\", null, (\"Test \" + i).getBytes(StandardCharsets.UTF_8));\n} catch (Exception e) {\nLOG.error(e.getMessage(), e);\n}\n}\n});\npublisher.start();\np.run();\npublisher.join();\nchannel.close();\nconnection.close();\n}\n@Test\npublic void testWriteQueue() throws Exception {\nfinal int maxNumRecords = 1000;\nList data =\nIntStream.range(0, maxNumRecords)\n.mapToObj(i -> new RabbitMqMessage((\"Test \" + i).getBytes(StandardCharsets.UTF_8)))\n.collect(Collectors.toList());\np.apply(Create.of(data))\n.apply(\nRabbitMqIO.write().withUri(\"amqp:\nfinal List received = new ArrayList<>();\nConnectionFactory connectionFactory = new ConnectionFactory();\nconnectionFactory.setUri(\"amqp:\nConnection connection = connectionFactory.newConnection();\nChannel channel = connection.createChannel();\nchannel.queueDeclare(\"TEST\", true, false, false, null);\nConsumer consumer =\nnew DefaultConsumer(channel) {\n@Override\npublic void handleDelivery(\nString consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body)\nthrows IOException {\nString message = new String(body, \"UTF-8\");\nreceived.add(message);\n}\n};\nchannel.basicConsume(\"TEST\", true, consumer);\np.run();\nwhile (received.size() < maxNumRecords) {\nThread.sleep(500);\n}\nassertEquals(maxNumRecords, received.size());\nfor (int i = 0; i < maxNumRecords; i++) {\nassertTrue(received.contains(\"Test \" + i));\n}\nchannel.close();\nconnection.close();\n}\n@Test\npublic void testWriteExchange() throws Exception {\nfinal int maxNumRecords = 1000;\nList data =\nIntStream.range(0, maxNumRecords)\n.mapToObj(i -> new RabbitMqMessage((\"Test \" + i).getBytes(StandardCharsets.UTF_8)))\n.collect(Collectors.toList());\np.apply(Create.of(data))\n.apply(\nRabbitMqIO.write()\n.withUri(\"amqp:\n.withExchange(\"WRITE\", \"fanout\"));\nfinal List received = new ArrayList<>();\nConnectionFactory connectionFactory = new ConnectionFactory();\nconnectionFactory.setUri(\"amqp:\nConnection connection = connectionFactory.newConnection();\nChannel channel = connection.createChannel();\nchannel.exchangeDeclare(\"WRITE\", \"fanout\");\nString queueName = channel.queueDeclare().getQueue();\nchannel.queueBind(queueName, \"WRITE\", \"\");\nConsumer consumer =\nnew DefaultConsumer(channel) {\n@Override\npublic void handleDelivery(\nString consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body)\nthrows IOException {\nString message = new String(body, \"UTF-8\");\nreceived.add(message);\n}\n};\nchannel.basicConsume(queueName, true, consumer);\np.run();\nwhile (received.size() < maxNumRecords) {\nThread.sleep(500);\n}\nassertEquals(maxNumRecords, received.size());\nfor (int i = 0; i < maxNumRecords; i++) {\nassertTrue(received.contains(\"Test \" + i));\n}\nchannel.close();\nconnection.close();\n}\nprivate static class ConverterFn extends DoFn {\nConverterFn() {}\n@ProcessElement\npublic void processElement(ProcessContext c) {\nRabbitMqMessage message = c.element();\nc.output(message.getBody());\n}\n}\nprivate static List generateRecords(int maxNumRecords) {\nreturn IntStream.range(0, maxNumRecords)\n.mapToObj(i -> (\"Test \" + i).getBytes(StandardCharsets.UTF_8))\n.collect(Collectors.toList());\n}\n}", + "context_after": "class RabbitMqIOTest implements Serializable {\nprivate static final Logger LOG = LoggerFactory.getLogger(RabbitMqIOTest.class);\nprivate static int port;\n@ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder();\n@Rule public transient TestPipeline p = TestPipeline.create();\nprivate static transient Broker broker;\n@BeforeClass\npublic static void startBroker() throws Exception {\ntry (ServerSocket serverSocket = new ServerSocket(0)) {\nport = serverSocket.getLocalPort();\n}\nSystem.setProperty(\"derby.stream.error.field\", \"MyApp.DEV_NULL\");\nbroker = new Broker();\nBrokerOptions options = new BrokerOptions();\noptions.setConfigProperty(BrokerOptions.QPID_AMQP_PORT, String.valueOf(port));\noptions.setConfigProperty(BrokerOptions.QPID_WORK_DIR, temporaryFolder.newFolder().toString());\noptions.setConfigProperty(BrokerOptions.QPID_HOME_DIR, \"src/test/qpid\");\nbroker.startup(options);\n}\n@AfterClass\npublic static void stopBroker() {\nbroker.shutdown();\n}\n@Test\n@Test(timeout = 60 * 1000)\npublic void testReadExchange() throws Exception {\nfinal int maxNumRecords = 10;\nPCollection raw =\np.apply(\nRabbitMqIO.read()\n.withUri(\"amqp:\n.withExchange(\"READEXCHANGE\", \"fanout\", \"test\")\n.withMaxNumRecords(maxNumRecords));\nPCollection output =\nraw.apply(\nMapElements.into(TypeDescriptors.strings())\n.via(\n(RabbitMqMessage message) ->\nnew String(message.getBody(), StandardCharsets.UTF_8)));\nList records =\ngenerateRecords(maxNumRecords)\n.stream()\n.map(record -> new String(record, StandardCharsets.UTF_8))\n.collect(Collectors.toList());\nPAssert.that(output).containsInAnyOrder(records);\nConnectionFactory connectionFactory = new ConnectionFactory();\nconnectionFactory.setUri(\"amqp:\nConnection connection = null;\nChannel channel = null;\ntry {\nconnection = connectionFactory.newConnection();\nchannel = connection.createChannel();\nchannel.exchangeDeclare(\"READEXCHANGE\", \"fanout\");\nChannel finalChannel = channel;\nThread publisher =\nnew Thread(\n() -> {\ntry {\nThread.sleep(5000);\n} catch (Exception e) {\nLOG.error(e.getMessage(), e);\n}\nfor (int i = 0; i < maxNumRecords; i++) {\ntry {\nfinalChannel.basicPublish(\n\"READEXCHANGE\",\n\"test\",\nnull,\n(\"Test \" + i).getBytes(StandardCharsets.UTF_8));\n} catch (Exception e) {\nLOG.error(e.getMessage(), e);\n}\n}\n});\npublisher.start();\np.run();\npublisher.join();\n} finally {\nif (channel != null) {\nchannel.close();\n}\nif (connection != null) {\nconnection.close();\n}\n}\n}\n@Test\npublic void testWriteQueue() throws Exception {\nfinal int maxNumRecords = 1000;\nList data =\ngenerateRecords(maxNumRecords)\n.stream()\n.map(bytes -> new RabbitMqMessage(bytes))\n.collect(Collectors.toList());\np.apply(Create.of(data))\n.apply(\nRabbitMqIO.write().withUri(\"amqp:\nfinal List received = new ArrayList<>();\nConnectionFactory connectionFactory = new ConnectionFactory();\nconnectionFactory.setUri(\"amqp:\nConnection connection = null;\nChannel channel = null;\ntry {\nconnection = connectionFactory.newConnection();\nchannel = connection.createChannel();\nchannel.queueDeclare(\"TEST\", true, false, false, null);\nConsumer consumer = new TestConsumer(channel, received);\nchannel.basicConsume(\"TEST\", true, consumer);\np.run();\nwhile (received.size() < maxNumRecords) {\nThread.sleep(500);\n}\nassertEquals(maxNumRecords, received.size());\nfor (int i = 0; i < maxNumRecords; i++) {\nassertTrue(received.contains(\"Test \" + i));\n}\n} finally {\nif (channel != null) {\nchannel.close();\n}\nif (connection != null) {\nconnection.close();\n}\n}\n}\n@Test\npublic void testWriteExchange() throws Exception {\nfinal int maxNumRecords = 1000;\nList data =\ngenerateRecords(maxNumRecords)\n.stream()\n.map(bytes -> new RabbitMqMessage(bytes))\n.collect(Collectors.toList());\np.apply(Create.of(data))\n.apply(\nRabbitMqIO.write()\n.withUri(\"amqp:\n.withExchange(\"WRITE\", \"fanout\"));\nfinal List received = new ArrayList<>();\nConnectionFactory connectionFactory = new ConnectionFactory();\nconnectionFactory.setUri(\"amqp:\nConnection connection = null;\nChannel channel = null;\ntry {\nconnection = connectionFactory.newConnection();\nchannel = connection.createChannel();\nchannel.exchangeDeclare(\"WRITE\", \"fanout\");\nString queueName = channel.queueDeclare().getQueue();\nchannel.queueBind(queueName, \"WRITE\", \"\");\nConsumer consumer = new TestConsumer(channel, received);\nchannel.basicConsume(queueName, true, consumer);\np.run();\nwhile (received.size() < maxNumRecords) {\nThread.sleep(500);\n}\nassertEquals(maxNumRecords, received.size());\nfor (int i = 0; i < maxNumRecords; i++) {\nassertTrue(received.contains(\"Test \" + i));\n}\n} finally {\nif (channel != null) {\nchannel.close();\n}\nif (connection != null) {\nconnection.close();\n}\n}\n}\nprivate static List generateRecords(int maxNumRecords) {\nreturn IntStream.range(0, maxNumRecords)\n.mapToObj(i -> (\"Test \" + i).getBytes(StandardCharsets.UTF_8))\n.collect(Collectors.toList());\n}\nprivate static class TestConsumer extends DefaultConsumer {\nprivate final List received;\npublic TestConsumer(Channel channel, List received) {\nsuper(channel);\nthis.received = received;\n}\n@Override\npublic void handleDelivery(\nString consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body)\nthrows IOException {\nString message = new String(body, \"UTF-8\");\nreceived.add(message);\n}\n}\n}" + }, + { + "comment": "In async samples, we don't generally use `block()`.", + "method_body": "public static void main(String[] args) throws InterruptedException {\nString azureOpenaiKey = \"{azure-open-ai-key}\";\nString endpoint = \"{azure-open-ai-endpoint}\";\nString deploymentOrModelId = \"{azure-open-ai-deployment-model-id}\";\nOpenAIAsyncClient client = new OpenAIClientBuilder()\n.endpoint(endpoint)\n.credential(new AzureKeyCredential(azureOpenaiKey))\n.buildAsyncClient();\nString prompt = \"Tell me 3 facts about pineapples\";\nSinks.Empty completionSink = Sinks.empty();\nclient.getCompletions(deploymentOrModelId, prompt)\n.timeout(Duration.ofSeconds(10))\n.subscribe(\ncompletions -> {\nfor (Choice choice : completions.getChoices()) {\nSystem.out.printf(\"%s.%n\", choice.getText());\n}\n},\nerror -> System.err.println(\"There was an error getting completions.\" + error),\n() -> completionSink.emitEmpty(Sinks.EmitFailureHandler.FAIL_FAST)\n);\ncompletionSink.asMono().block();\n}", + "target_code": "completionSink.asMono().block();", + "method_body_after": "public static void main(String[] args) throws InterruptedException {\nString azureOpenAIKey = \"{azure-open-ai-key}\";\nString endpoint = \"{azure-open-ai-endpoint}\";\nString deploymentOrModelId = \"{azure-open-ai-deployment-model-id}\";\nOpenAIAsyncClient client = new OpenAIClientBuilder()\n.endpoint(endpoint)\n.credential(new AzureKeyCredential(azureOpenAIKey))\n.buildAsyncClient();\nString prompt = \"Tell me 3 facts about pineapples\";\nclient.getCompletions(deploymentOrModelId, prompt)\n.subscribe(\ncompletions -> {\nfor (Choice choice : completions.getChoices()) {\nSystem.out.printf(\"%s.%n\", choice.getText());\n}\n},\nerror -> System.err.println(\"There was an error getting completions.\" + error),\n() -> System.out.println(\"Completed called getCompletions.\")\n);\nTimeUnit.SECONDS.sleep(10);\n}", + "context_before": "class GetCompletionsFromPromptAsync {\n/**\n* The sample will return the text choices that are generated based on the prompt provided by the user\n*\n* @param args Unused. Arguments to the program.\n*/\n}", + "context_after": "class GetCompletionsFromPromptAsync {\n/**\n* The sample will return the text choices that are generated based on the prompt provided by the user\n*\n* @param args Unused. Arguments to the program.\n*/\n}" + }, + { + "comment": "It is inconsistent to use `absoluteTimeMillis()` here and `relativeTimeMillis()` within `rescheduleTrigger()`.", + "method_body": "public void setIsProcessingBacklog(OperatorID operatorID, boolean isProcessingBacklog) {\nif (isProcessingBacklog) {\nbacklogOperators.add(operatorID);\n} else {\nbacklogOperators.remove(operatorID);\n}\nif (getCurrentCheckpointInterval() != Long.MAX_VALUE) {\nlong newNextCheckpointTriggeringTime =\nclock.absoluteTimeMillis() + getCurrentCheckpointInterval();\nif (newNextCheckpointTriggeringTime < nextCheckpointTriggeringRelativeTime) {\nrescheduleTrigger(getCurrentCheckpointInterval());\n}\n}\n}", + "target_code": "clock.absoluteTimeMillis() + getCurrentCheckpointInterval();", + "method_body_after": "public void setIsProcessingBacklog(OperatorID operatorID, boolean isProcessingBacklog) {\nsynchronized (lock) {\nif (isProcessingBacklog) {\nbacklogOperators.add(operatorID);\n} else {\nbacklogOperators.remove(operatorID);\n}\nlong currentCheckpointInterval = getCurrentCheckpointInterval();\nif (currentCheckpointInterval\n!= CheckpointCoordinatorConfiguration.DISABLED_CHECKPOINT_INTERVAL) {\nlong currentRelativeTime = clock.relativeTimeMillis();\nif (currentRelativeTime + currentCheckpointInterval\n< nextCheckpointTriggeringRelativeTime) {\nrescheduleTrigger(currentRelativeTime, currentCheckpointInterval);\n}\n}\n}\n}", + "context_before": "class CheckpointCoordinator {\nprivate static final Logger LOG = LoggerFactory.getLogger(CheckpointCoordinator.class);\n/** The number of recent checkpoints whose IDs are remembered. */\nprivate static final int NUM_GHOST_CHECKPOINT_IDS = 16;\n/** Coordinator-wide lock to safeguard the checkpoint updates. */\nprivate final Object lock = new Object();\n/** The job whose checkpoint this coordinator coordinates. */\nprivate final JobID job;\n/** Default checkpoint properties. */\nprivate final CheckpointProperties checkpointProperties;\n/** The executor used for asynchronous calls, like potentially blocking I/O. */\nprivate final Executor executor;\nprivate final CheckpointsCleaner checkpointsCleaner;\n/** The operator coordinators that need to be checkpointed. */\nprivate final Collection coordinatorsToCheckpoint;\n/** Map from checkpoint ID to the pending checkpoint. */\n@GuardedBy(\"lock\")\nprivate final Map pendingCheckpoints;\n/**\n* Completed checkpoints. Implementations can be blocking. Make sure calls to methods accessing\n* this don't block the job manager actor and run asynchronously.\n*/\nprivate final CompletedCheckpointStore completedCheckpointStore;\n/**\n* The root checkpoint state backend, which is responsible for initializing the checkpoint,\n* storing the metadata, and cleaning up the checkpoint.\n*/\nprivate final CheckpointStorageCoordinatorView checkpointStorageView;\n/** A list of recent checkpoint IDs, to identify late messages (vs invalid ones). */\nprivate final ArrayDeque recentPendingCheckpoints;\n/**\n* Checkpoint ID counter to ensure ascending IDs. In case of job manager failures, these need to\n* be ascending across job managers.\n*/\nprivate final CheckpointIDCounter checkpointIdCounter;\n/**\n* The checkpoint interval in normal situations. Actual trigger time may be affected by the max\n* concurrent checkpoints, minimum-pause values and checkpoint interval during backlog.\n*/\nprivate final long baseInterval;\n/**\n* The checkpoint interval when any source reports isProcessingBacklog=true. Actual trigger time\n* may be affected by the max concurrent checkpoints and minimum-pause values.\n*/\nprivate final long baseIntervalDuringBacklog;\n/** The max time (in ms) that a checkpoint may take. */\nprivate final long checkpointTimeout;\n/**\n* The min time(in ms) to delay after a checkpoint could be triggered. Allows to enforce minimum\n* processing time between checkpoint attempts\n*/\nprivate final long minPauseBetweenCheckpoints;\n/**\n* The timer that handles the checkpoint timeouts and triggers periodic checkpoints. It must be\n* single-threaded. Eventually it will be replaced by main thread executor.\n*/\nprivate final ScheduledExecutor timer;\n/** The master checkpoint hooks executed by this checkpoint coordinator. */\nprivate final HashMap> masterHooks;\nprivate final boolean unalignedCheckpointsEnabled;\nprivate final long alignedCheckpointTimeout;\n/** Actor that receives status updates from the execution graph this coordinator works for. */\nprivate JobStatusListener jobStatusListener;\n/** A handle to the current periodic trigger, to cancel it when necessary. */\nprivate Future currentPeriodicTrigger;\n/**\n* The timestamp (via {@link Clock\n* triggered.\n*\n*

If it's value is {@link Long\n* scheduled.\n*/\nprivate long nextCheckpointTriggeringRelativeTime;\n/**\n* The timestamp (via {@link Clock\n*/\nprivate long lastCheckpointCompletionRelativeTime;\n/**\n* Flag whether a triggered checkpoint should immediately schedule the next checkpoint.\n* Non-volatile, because only accessed in synchronized scope\n*/\nprivate boolean periodicScheduling;\n/** Flag marking the coordinator as shut down (not accepting any messages any more). */\nprivate volatile boolean shutdown;\n/** Optional tracker for checkpoint statistics. */\nprivate final CheckpointStatsTracker statsTracker;\nprivate final BiFunction<\nSet,\nMap,\nVertexFinishedStateChecker>\nvertexFinishedStateCheckerFactory;\n/** Id of checkpoint for which in-flight data should be ignored on recovery. */\nprivate final long checkpointIdOfIgnoredInFlightData;\nprivate final CheckpointFailureManager failureManager;\nprivate final Clock clock;\nprivate final boolean isExactlyOnceMode;\n/** Flag represents there is an in-flight trigger request. */\nprivate boolean isTriggering = false;\nprivate final CheckpointRequestDecider requestDecider;\nprivate final CheckpointPlanCalculator checkpointPlanCalculator;\n/** IDs of the source operators that are currently processing backlog. */\nprivate final Set backlogOperators = Collections.synchronizedSet(new HashSet<>());\nprivate boolean baseLocationsForCheckpointInitialized = false;\nprivate boolean forceFullSnapshot;\npublic CheckpointCoordinator(\nJobID job,\nCheckpointCoordinatorConfiguration chkConfig,\nCollection coordinatorsToCheckpoint,\nCheckpointIDCounter checkpointIDCounter,\nCompletedCheckpointStore completedCheckpointStore,\nCheckpointStorage checkpointStorage,\nExecutor executor,\nCheckpointsCleaner checkpointsCleaner,\nScheduledExecutor timer,\nCheckpointFailureManager failureManager,\nCheckpointPlanCalculator checkpointPlanCalculator,\nCheckpointStatsTracker statsTracker) {\nthis(\njob,\nchkConfig,\ncoordinatorsToCheckpoint,\ncheckpointIDCounter,\ncompletedCheckpointStore,\ncheckpointStorage,\nexecutor,\ncheckpointsCleaner,\ntimer,\nfailureManager,\ncheckpointPlanCalculator,\nSystemClock.getInstance(),\nstatsTracker,\nVertexFinishedStateChecker::new);\n}\n@VisibleForTesting\npublic CheckpointCoordinator(\nJobID job,\nCheckpointCoordinatorConfiguration chkConfig,\nCollection coordinatorsToCheckpoint,\nCheckpointIDCounter checkpointIDCounter,\nCompletedCheckpointStore completedCheckpointStore,\nCheckpointStorage checkpointStorage,\nExecutor executor,\nCheckpointsCleaner checkpointsCleaner,\nScheduledExecutor timer,\nCheckpointFailureManager failureManager,\nCheckpointPlanCalculator checkpointPlanCalculator,\nClock clock,\nCheckpointStatsTracker statsTracker,\nBiFunction<\nSet,\nMap,\nVertexFinishedStateChecker>\nvertexFinishedStateCheckerFactory) {\ncheckNotNull(checkpointStorage);\nlong minPauseBetweenCheckpoints = chkConfig.getMinPauseBetweenCheckpoints();\nif (minPauseBetweenCheckpoints > 365L * 24 * 60 * 60 * 1_000) {\nminPauseBetweenCheckpoints = 365L * 24 * 60 * 60 * 1_000;\n}\nlong baseInterval = chkConfig.getCheckpointInterval();\nif (baseInterval < minPauseBetweenCheckpoints) {\nbaseInterval = minPauseBetweenCheckpoints;\n}\nthis.job = checkNotNull(job);\nthis.baseInterval = baseInterval;\nthis.baseIntervalDuringBacklog = chkConfig.getCheckpointIntervalDuringBacklog();\nthis.checkpointTimeout = chkConfig.getCheckpointTimeout();\nthis.minPauseBetweenCheckpoints = minPauseBetweenCheckpoints;\nthis.coordinatorsToCheckpoint =\nCollections.unmodifiableCollection(coordinatorsToCheckpoint);\nthis.pendingCheckpoints = new LinkedHashMap<>();\nthis.checkpointIdCounter = checkNotNull(checkpointIDCounter);\nthis.completedCheckpointStore = checkNotNull(completedCheckpointStore);\nthis.executor = checkNotNull(executor);\nthis.checkpointsCleaner = checkNotNull(checkpointsCleaner);\nthis.failureManager = checkNotNull(failureManager);\nthis.checkpointPlanCalculator = checkNotNull(checkpointPlanCalculator);\nthis.clock = checkNotNull(clock);\nthis.isExactlyOnceMode = chkConfig.isExactlyOnce();\nthis.unalignedCheckpointsEnabled = chkConfig.isUnalignedCheckpointsEnabled();\nthis.alignedCheckpointTimeout = chkConfig.getAlignedCheckpointTimeout();\nthis.checkpointIdOfIgnoredInFlightData = chkConfig.getCheckpointIdOfIgnoredInFlightData();\nthis.recentPendingCheckpoints = new ArrayDeque<>(NUM_GHOST_CHECKPOINT_IDS);\nthis.masterHooks = new HashMap<>();\nthis.timer = timer;\nthis.checkpointProperties =\nCheckpointProperties.forCheckpoint(chkConfig.getCheckpointRetentionPolicy());\ntry {\nthis.checkpointStorageView = checkpointStorage.createCheckpointStorage(job);\nif (isPeriodicCheckpointingConfigured()) {\ncheckpointStorageView.initializeBaseLocationsForCheckpoint();\nbaseLocationsForCheckpointInitialized = true;\n}\n} catch (IOException e) {\nthrow new FlinkRuntimeException(\n\"Failed to create checkpoint storage at checkpoint coordinator side.\", e);\n}\ntry {\ncheckpointIDCounter.start();\n} catch (Throwable t) {\nthrow new RuntimeException(\n\"Failed to start checkpoint ID counter: \" + t.getMessage(), t);\n}\nthis.requestDecider =\nnew CheckpointRequestDecider(\nchkConfig.getMaxConcurrentCheckpoints(),\nthis::rescheduleTrigger,\nthis.clock,\nthis.minPauseBetweenCheckpoints,\nthis.pendingCheckpoints::size,\nthis.checkpointsCleaner::getNumberOfCheckpointsToClean);\nthis.statsTracker = checkNotNull(statsTracker, \"Statistic tracker can not be null\");\nthis.vertexFinishedStateCheckerFactory = checkNotNull(vertexFinishedStateCheckerFactory);\n}\n/**\n* Adds the given master hook to the checkpoint coordinator. This method does nothing, if the\n* checkpoint coordinator already contained a hook with the same ID (as defined via {@link\n* MasterTriggerRestoreHook\n*\n* @param hook The hook to add.\n* @return True, if the hook was added, false if the checkpoint coordinator already contained a\n* hook with the same ID.\n*/\npublic boolean addMasterHook(MasterTriggerRestoreHook hook) {\ncheckNotNull(hook);\nfinal String id = hook.getIdentifier();\ncheckArgument(!StringUtils.isNullOrWhitespaceOnly(id), \"The hook has a null or empty id\");\nsynchronized (lock) {\nif (!masterHooks.containsKey(id)) {\nmasterHooks.put(id, hook);\nreturn true;\n} else {\nreturn false;\n}\n}\n}\n/** Gets the number of currently register master hooks. */\npublic int getNumberOfRegisteredMasterHooks() {\nsynchronized (lock) {\nreturn masterHooks.size();\n}\n}\n/**\n* Shuts down the checkpoint coordinator.\n*\n*

After this method has been called, the coordinator does not accept and further messages\n* and cannot trigger any further checkpoints.\n*/\npublic void shutdown() throws Exception {\nsynchronized (lock) {\nif (!shutdown) {\nshutdown = true;\nLOG.info(\"Stopping checkpoint coordinator for job {}.\", job);\nperiodicScheduling = false;\nMasterHooks.close(masterHooks.values(), LOG);\nmasterHooks.clear();\nfinal CheckpointException reason =\nnew CheckpointException(\nCheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN);\nabortPendingAndQueuedCheckpoints(reason);\n}\n}\n}\npublic boolean isShutdown() {\nreturn shutdown;\n}\n/**\n* Reports whether a source operator is currently processing backlog.\n*\n*

If any source operator is processing backlog, the checkpoint interval would be decided by\n* {@code execution.checkpointing.interval-during-backlog} instead of {@code\n* execution.checkpointing.interval}.\n*\n*

If a source has not invoked this method, the source is considered to have\n* isProcessingBacklog=false. If a source operator has invoked this method multiple times, the\n* last reported value is used.\n*\n* @param operatorID the operator ID of the source operator.\n* @param isProcessingBacklog whether the source operator is processing backlog.\n*/\n/**\n* Triggers a savepoint with the given savepoint directory as a target.\n*\n* @param targetLocation Target location for the savepoint, optional. If null, the state\n* backend's configured default will be used.\n* @return A future to the completed checkpoint\n* @throws IllegalStateException If no savepoint directory has been specified and no default\n* savepoint directory has been configured\n*/\npublic CompletableFuture triggerSavepoint(\n@Nullable final String targetLocation, final SavepointFormatType formatType) {\nfinal CheckpointProperties properties =\nCheckpointProperties.forSavepoint(!unalignedCheckpointsEnabled, formatType);\nreturn triggerSavepointInternal(properties, targetLocation);\n}\n/**\n* Triggers a synchronous savepoint with the given savepoint directory as a target.\n*\n* @param terminate flag indicating if the job should terminate or just suspend\n* @param targetLocation Target location for the savepoint, optional. If null, the state\n* backend's configured default will be used.\n* @return A future to the completed checkpoint\n* @throws IllegalStateException If no savepoint directory has been specified and no default\n* savepoint directory has been configured\n*/\npublic CompletableFuture triggerSynchronousSavepoint(\nfinal boolean terminate,\n@Nullable final String targetLocation,\nSavepointFormatType formatType) {\nfinal CheckpointProperties properties =\nCheckpointProperties.forSyncSavepoint(\n!unalignedCheckpointsEnabled, terminate, formatType);\nreturn triggerSavepointInternal(properties, targetLocation);\n}\nprivate CompletableFuture triggerSavepointInternal(\nfinal CheckpointProperties checkpointProperties,\n@Nullable final String targetLocation) {\ncheckNotNull(checkpointProperties);\nreturn triggerCheckpointFromCheckpointThread(checkpointProperties, targetLocation, false);\n}\nprivate CompletableFuture triggerCheckpointFromCheckpointThread(\nCheckpointProperties checkpointProperties, String targetLocation, boolean isPeriodic) {\nfinal CompletableFuture resultFuture = new CompletableFuture<>();\ntimer.execute(\n() ->\ntriggerCheckpoint(checkpointProperties, targetLocation, isPeriodic)\n.whenComplete(\n(completedCheckpoint, throwable) -> {\nif (throwable == null) {\nresultFuture.complete(completedCheckpoint);\n} else {\nresultFuture.completeExceptionally(throwable);\n}\n}));\nreturn resultFuture;\n}\n/**\n* Triggers a new standard checkpoint and uses the given timestamp as the checkpoint timestamp.\n* The return value is a future. It completes when the checkpoint triggered finishes or an error\n* occurred.\n*\n* @param isPeriodic Flag indicating whether this triggered checkpoint is periodic.\n* @return a future to the completed checkpoint.\n*/\npublic CompletableFuture triggerCheckpoint(boolean isPeriodic) {\nreturn triggerCheckpointFromCheckpointThread(checkpointProperties, null, isPeriodic);\n}\n/**\n* Triggers one new checkpoint with the given checkpointType. The returned future completes when\n* the triggered checkpoint finishes or an error occurred.\n*\n* @param checkpointType specifies the back up type of the checkpoint to trigger.\n* @return a future to the completed checkpoint.\n*/\npublic CompletableFuture triggerCheckpoint(CheckpointType checkpointType) {\nif (checkpointType == null) {\nthrow new IllegalArgumentException(\"checkpointType cannot be null\");\n}\nfinal SnapshotType snapshotType;\nswitch (checkpointType) {\ncase CONFIGURED:\nsnapshotType = checkpointProperties.getCheckpointType();\nbreak;\ncase FULL:\nsnapshotType = FULL_CHECKPOINT;\nbreak;\ncase INCREMENTAL:\nsnapshotType = CHECKPOINT;\nbreak;\ndefault:\nthrow new IllegalArgumentException(\"unknown checkpointType: \" + checkpointType);\n}\nfinal CheckpointProperties properties =\nnew CheckpointProperties(\ncheckpointProperties.forceCheckpoint(),\nsnapshotType,\ncheckpointProperties.discardOnSubsumed(),\ncheckpointProperties.discardOnJobFinished(),\ncheckpointProperties.discardOnJobCancelled(),\ncheckpointProperties.discardOnJobFailed(),\ncheckpointProperties.discardOnJobSuspended(),\ncheckpointProperties.isUnclaimed());\nreturn triggerCheckpointFromCheckpointThread(properties, null, false);\n}\n@VisibleForTesting\nCompletableFuture triggerCheckpoint(\nCheckpointProperties props,\n@Nullable String externalSavepointLocation,\nboolean isPeriodic) {\nCheckpointTriggerRequest request =\nnew CheckpointTriggerRequest(props, externalSavepointLocation, isPeriodic);\nchooseRequestToExecute(request).ifPresent(this::startTriggeringCheckpoint);\nreturn request.onCompletionPromise;\n}\nprivate void startTriggeringCheckpoint(CheckpointTriggerRequest request) {\ntry {\nsynchronized (lock) {\npreCheckGlobalState(request.isPeriodic);\n}\nPreconditions.checkState(!isTriggering);\nisTriggering = true;\nfinal long timestamp = System.currentTimeMillis();\nCompletableFuture checkpointPlanFuture =\ncheckpointPlanCalculator.calculateCheckpointPlan();\nboolean initializeBaseLocations = !baseLocationsForCheckpointInitialized;\nbaseLocationsForCheckpointInitialized = true;\nCompletableFuture masterTriggerCompletionPromise = new CompletableFuture<>();\nfinal CompletableFuture pendingCheckpointCompletableFuture =\ncheckpointPlanFuture\n.thenApplyAsync(\nplan -> {\ntry {\nlong checkpointID =\ncheckpointIdCounter.getAndIncrement();\nreturn new Tuple2<>(plan, checkpointID);\n} catch (Throwable e) {\nthrow new CompletionException(e);\n}\n},\nexecutor)\n.thenApplyAsync(\n(checkpointInfo) ->\ncreatePendingCheckpoint(\ntimestamp,\nrequest.props,\ncheckpointInfo.f0,\nrequest.isPeriodic,\ncheckpointInfo.f1,\nrequest.getOnCompletionFuture(),\nmasterTriggerCompletionPromise),\ntimer);\nfinal CompletableFuture coordinatorCheckpointsComplete =\npendingCheckpointCompletableFuture\n.thenApplyAsync(\npendingCheckpoint -> {\ntry {\nCheckpointStorageLocation checkpointStorageLocation =\ninitializeCheckpointLocation(\npendingCheckpoint.getCheckpointID(),\nrequest.props,\nrequest.externalSavepointLocation,\ninitializeBaseLocations);\nreturn Tuple2.of(\npendingCheckpoint, checkpointStorageLocation);\n} catch (Throwable e) {\nthrow new CompletionException(e);\n}\n},\nexecutor)\n.thenComposeAsync(\n(checkpointInfo) -> {\nPendingCheckpoint pendingCheckpoint = checkpointInfo.f0;\nif (pendingCheckpoint.isDisposed()) {\nreturn null;\n}\nsynchronized (lock) {\npendingCheckpoint.setCheckpointTargetLocation(\ncheckpointInfo.f1);\n}\nreturn OperatorCoordinatorCheckpoints\n.triggerAndAcknowledgeAllCoordinatorCheckpointsWithCompletion(\ncoordinatorsToCheckpoint,\npendingCheckpoint,\ntimer);\n},\ntimer);\nfinal CompletableFuture masterStatesComplete =\ncoordinatorCheckpointsComplete.thenComposeAsync(\nignored -> {\nPendingCheckpoint checkpoint =\nFutureUtils.getWithoutException(\npendingCheckpointCompletableFuture);\nif (checkpoint == null || checkpoint.isDisposed()) {\nreturn null;\n}\nreturn snapshotMasterState(checkpoint);\n},\ntimer);\nFutureUtils.forward(\nCompletableFuture.allOf(masterStatesComplete, coordinatorCheckpointsComplete),\nmasterTriggerCompletionPromise);\nFutureUtils.assertNoException(\nmasterTriggerCompletionPromise\n.handleAsync(\n(ignored, throwable) -> {\nfinal PendingCheckpoint checkpoint =\nFutureUtils.getWithoutException(\npendingCheckpointCompletableFuture);\nPreconditions.checkState(\ncheckpoint != null || throwable != null,\n\"Either the pending checkpoint needs to be created or an error must have occurred.\");\nif (throwable != null) {\nif (checkpoint == null) {\nonTriggerFailure(request, throwable);\n} else {\nonTriggerFailure(checkpoint, throwable);\n}\n} else {\ntriggerCheckpointRequest(\nrequest, timestamp, checkpoint);\n}\nreturn null;\n},\ntimer)\n.exceptionally(\nerror -> {\nif (!isShutdown()) {\nthrow new CompletionException(error);\n} else if (findThrowable(\nerror, RejectedExecutionException.class)\n.isPresent()) {\nLOG.debug(\"Execution rejected during shutdown\");\n} else {\nLOG.warn(\"Error encountered during shutdown\", error);\n}\nreturn null;\n}));\n} catch (Throwable throwable) {\nonTriggerFailure(request, throwable);\n}\n}\nprivate void triggerCheckpointRequest(\nCheckpointTriggerRequest request, long timestamp, PendingCheckpoint checkpoint) {\nif (checkpoint.isDisposed()) {\nonTriggerFailure(\ncheckpoint,\nnew CheckpointException(\nCheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE,\ncheckpoint.getFailureCause()));\n} else {\ntriggerTasks(request, timestamp, checkpoint)\n.exceptionally(\nfailure -> {\nLOG.info(\n\"Triggering Checkpoint {} for job {} failed due to {}\",\ncheckpoint.getCheckpointID(),\njob,\nfailure);\nfinal CheckpointException cause;\nif (failure instanceof CheckpointException) {\ncause = (CheckpointException) failure;\n} else {\ncause =\nnew CheckpointException(\nCheckpointFailureReason\n.TRIGGER_CHECKPOINT_FAILURE,\nfailure);\n}\ntimer.execute(\n() -> {\nsynchronized (lock) {\nabortPendingCheckpoint(checkpoint, cause);\n}\n});\nreturn null;\n});\nif (maybeCompleteCheckpoint(checkpoint)) {\nonTriggerSuccess();\n}\n}\n}\nprivate CompletableFuture triggerTasks(\nCheckpointTriggerRequest request, long timestamp, PendingCheckpoint checkpoint) {\nfinal long checkpointId = checkpoint.getCheckpointID();\nfinal SnapshotType type;\nif (this.forceFullSnapshot && !request.props.isSavepoint()) {\ntype = FULL_CHECKPOINT;\n} else {\ntype = request.props.getCheckpointType();\n}\nfinal CheckpointOptions checkpointOptions =\nCheckpointOptions.forConfig(\ntype,\ncheckpoint.getCheckpointStorageLocation().getLocationReference(),\nisExactlyOnceMode,\nunalignedCheckpointsEnabled,\nalignedCheckpointTimeout);\nList> acks = new ArrayList<>();\nfor (Execution execution : checkpoint.getCheckpointPlan().getTasksToTrigger()) {\nif (request.props.isSynchronous()) {\nacks.add(\nexecution.triggerSynchronousSavepoint(\ncheckpointId, timestamp, checkpointOptions));\n} else {\nacks.add(execution.triggerCheckpoint(checkpointId, timestamp, checkpointOptions));\n}\n}\nreturn FutureUtils.waitForAll(acks);\n}\n/**\n* Initialize the checkpoint location asynchronously. It will be expected to be executed in io\n* thread due to it might be time-consuming.\n*\n* @param checkpointID checkpoint id\n* @param props checkpoint properties\n* @param externalSavepointLocation the external savepoint location, it might be null\n* @return the checkpoint location\n*/\nprivate CheckpointStorageLocation initializeCheckpointLocation(\nlong checkpointID,\nCheckpointProperties props,\n@Nullable String externalSavepointLocation,\nboolean initializeBaseLocations)\nthrows Exception {\nfinal CheckpointStorageLocation checkpointStorageLocation;\nif (props.isSavepoint()) {\ncheckpointStorageLocation =\ncheckpointStorageView.initializeLocationForSavepoint(\ncheckpointID, externalSavepointLocation);\n} else {\nif (initializeBaseLocations) {\ncheckpointStorageView.initializeBaseLocationsForCheckpoint();\n}\ncheckpointStorageLocation =\ncheckpointStorageView.initializeLocationForCheckpoint(checkpointID);\n}\nreturn checkpointStorageLocation;\n}\nprivate PendingCheckpoint createPendingCheckpoint(\nlong timestamp,\nCheckpointProperties props,\nCheckpointPlan checkpointPlan,\nboolean isPeriodic,\nlong checkpointID,\nCompletableFuture onCompletionPromise,\nCompletableFuture masterTriggerCompletionPromise) {\nsynchronized (lock) {\ntry {\npreCheckGlobalState(isPeriodic);\n} catch (Throwable t) {\nthrow new CompletionException(t);\n}\n}\nPendingCheckpointStats pendingCheckpointStats =\ntrackPendingCheckpointStats(checkpointID, checkpointPlan, props, timestamp);\nfinal PendingCheckpoint checkpoint =\nnew PendingCheckpoint(\njob,\ncheckpointID,\ntimestamp,\ncheckpointPlan,\nOperatorInfo.getIds(coordinatorsToCheckpoint),\nmasterHooks.keySet(),\nprops,\nonCompletionPromise,\npendingCheckpointStats,\nmasterTriggerCompletionPromise);\nsynchronized (lock) {\npendingCheckpoints.put(checkpointID, checkpoint);\nScheduledFuture cancellerHandle =\ntimer.schedule(\nnew CheckpointCanceller(checkpoint),\ncheckpointTimeout,\nTimeUnit.MILLISECONDS);\nif (!checkpoint.setCancellerHandle(cancellerHandle)) {\ncancellerHandle.cancel(false);\n}\n}\nLOG.info(\n\"Triggering checkpoint {} (type={}) @ {} for job {}.\",\ncheckpointID,\ncheckpoint.getProps().getCheckpointType(),\ntimestamp,\njob);\nreturn checkpoint;\n}\n/**\n* Snapshot master hook states asynchronously.\n*\n* @param checkpoint the pending checkpoint\n* @return the future represents master hook states are finished or not\n*/\nprivate CompletableFuture snapshotMasterState(PendingCheckpoint checkpoint) {\nif (masterHooks.isEmpty()) {\nreturn CompletableFuture.completedFuture(null);\n}\nfinal long checkpointID = checkpoint.getCheckpointID();\nfinal long timestamp = checkpoint.getCheckpointTimestamp();\nfinal CompletableFuture masterStateCompletableFuture = new CompletableFuture<>();\nfor (MasterTriggerRestoreHook masterHook : masterHooks.values()) {\nMasterHooks.triggerHook(masterHook, checkpointID, timestamp, executor)\n.whenCompleteAsync(\n(masterState, throwable) -> {\ntry {\nsynchronized (lock) {\nif (masterStateCompletableFuture.isDone()) {\nreturn;\n}\nif (checkpoint.isDisposed()) {\nthrow new IllegalStateException(\n\"Checkpoint \"\n+ checkpointID\n+ \" has been discarded\");\n}\nif (throwable == null) {\ncheckpoint.acknowledgeMasterState(\nmasterHook.getIdentifier(), masterState);\nif (checkpoint.areMasterStatesFullyAcknowledged()) {\nmasterStateCompletableFuture.complete(null);\n}\n} else {\nmasterStateCompletableFuture.completeExceptionally(\nthrowable);\n}\n}\n} catch (Throwable t) {\nmasterStateCompletableFuture.completeExceptionally(t);\n}\n},\ntimer);\n}\nreturn masterStateCompletableFuture;\n}\n/** Trigger request is successful. NOTE, it must be invoked if trigger request is successful. */\nprivate void onTriggerSuccess() {\nisTriggering = false;\nexecuteQueuedRequest();\n}\n/**\n* The trigger request is failed prematurely without a proper initialization. There is no\n* resource to release, but the completion promise needs to fail manually here.\n*\n* @param onCompletionPromise the completion promise of the checkpoint/savepoint\n* @param throwable the reason of trigger failure\n*/\nprivate void onTriggerFailure(\nCheckpointTriggerRequest onCompletionPromise, Throwable throwable) {\nfinal CheckpointException checkpointException =\ngetCheckpointException(\nCheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, throwable);\nonCompletionPromise.completeExceptionally(checkpointException);\nonTriggerFailure((PendingCheckpoint) null, onCompletionPromise.props, checkpointException);\n}\nprivate void onTriggerFailure(PendingCheckpoint checkpoint, Throwable throwable) {\ncheckArgument(checkpoint != null, \"Pending checkpoint can not be null.\");\nonTriggerFailure(checkpoint, checkpoint.getProps(), throwable);\n}\n/**\n* The trigger request is failed. NOTE, it must be invoked if trigger request is failed.\n*\n* @param checkpoint the pending checkpoint which is failed. It could be null if it's failed\n* prematurely without a proper initialization.\n* @param throwable the reason of trigger failure\n*/\nprivate void onTriggerFailure(\n@Nullable PendingCheckpoint checkpoint,\nCheckpointProperties checkpointProperties,\nThrowable throwable) {\nthrowable = ExceptionUtils.stripCompletionException(throwable);\ntry {\ncoordinatorsToCheckpoint.forEach(\nOperatorCoordinatorCheckpointContext::abortCurrentTriggering);\nfinal CheckpointException cause =\ngetCheckpointException(\nCheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, throwable);\nif (checkpoint != null && !checkpoint.isDisposed()) {\nsynchronized (lock) {\nabortPendingCheckpoint(checkpoint, cause);\n}\n} else {\nfailureManager.handleCheckpointException(\ncheckpoint, checkpointProperties, cause, null, job, null, statsTracker);\n}\n} finally {\nisTriggering = false;\nexecuteQueuedRequest();\n}\n}\nprivate void executeQueuedRequest() {\nchooseQueuedRequestToExecute().ifPresent(this::startTriggeringCheckpoint);\n}\nprivate Optional chooseQueuedRequestToExecute() {\nsynchronized (lock) {\nreturn requestDecider.chooseQueuedRequestToExecute(\nisTriggering, lastCheckpointCompletionRelativeTime);\n}\n}\nprivate Optional chooseRequestToExecute(\nCheckpointTriggerRequest request) {\nsynchronized (lock) {\nOptional checkpointTriggerRequest =\nrequestDecider.chooseRequestToExecute(\nrequest, isTriggering, lastCheckpointCompletionRelativeTime);\nreturn checkpointTriggerRequest;\n}\n}\nprivate boolean maybeCompleteCheckpoint(PendingCheckpoint checkpoint) {\nsynchronized (lock) {\nif (checkpoint.isFullyAcknowledged()) {\ntry {\nif (shutdown) {\nreturn false;\n}\ncompletePendingCheckpoint(checkpoint);\n} catch (CheckpointException ce) {\nonTriggerFailure(checkpoint, ce);\nreturn false;\n}\n}\n}\nreturn true;\n}\n/**\n* Receives a {@link DeclineCheckpoint} message for a pending checkpoint.\n*\n* @param message Checkpoint decline from the task manager\n* @param taskManagerLocationInfo The location info of the decline checkpoint message's sender\n*/\npublic void receiveDeclineMessage(DeclineCheckpoint message, String taskManagerLocationInfo) {\nif (shutdown || message == null) {\nreturn;\n}\nif (!job.equals(message.getJob())) {\nthrow new IllegalArgumentException(\n\"Received DeclineCheckpoint message for job \"\n+ message.getJob()\n+ \" from \"\n+ taskManagerLocationInfo\n+ \" while this coordinator handles job \"\n+ job);\n}\nfinal long checkpointId = message.getCheckpointId();\nfinal CheckpointException checkpointException =\nmessage.getSerializedCheckpointException().unwrap();\nfinal String reason = checkpointException.getMessage();\nPendingCheckpoint checkpoint;\nsynchronized (lock) {\nif (shutdown) {\nreturn;\n}\ncheckpoint = pendingCheckpoints.get(checkpointId);\nif (checkpoint != null) {\nPreconditions.checkState(\n!checkpoint.isDisposed(),\n\"Received message for discarded but non-removed checkpoint \"\n+ checkpointId);\nLOG.info(\n\"Decline checkpoint {} by task {} of job {} at {}.\",\ncheckpointId,\nmessage.getTaskExecutionId(),\njob,\ntaskManagerLocationInfo,\ncheckpointException.getCause());\nabortPendingCheckpoint(\ncheckpoint, checkpointException, message.getTaskExecutionId());\n} else if (LOG.isDebugEnabled()) {\nif (recentPendingCheckpoints.contains(checkpointId)) {\nLOG.debug(\n\"Received another decline message for now expired checkpoint attempt {} from task {} of job {} at {} : {}\",\ncheckpointId,\nmessage.getTaskExecutionId(),\njob,\ntaskManagerLocationInfo,\nreason);\n} else {\nLOG.debug(\n\"Received decline message for unknown (too old?) checkpoint attempt {} from task {} of job {} at {} : {}\",\ncheckpointId,\nmessage.getTaskExecutionId(),\njob,\ntaskManagerLocationInfo,\nreason);\n}\n}\n}\n}\n/**\n* Receives an AcknowledgeCheckpoint message and returns whether the message was associated with\n* a pending checkpoint.\n*\n* @param message Checkpoint ack from the task manager\n* @param taskManagerLocationInfo The location of the acknowledge checkpoint message's sender\n* @return Flag indicating whether the ack'd checkpoint was associated with a pending\n* checkpoint.\n* @throws CheckpointException If the checkpoint cannot be added to the completed checkpoint\n* store.\n*/\npublic boolean receiveAcknowledgeMessage(\nAcknowledgeCheckpoint message, String taskManagerLocationInfo)\nthrows CheckpointException {\nif (shutdown || message == null) {\nreturn false;\n}\nif (!job.equals(message.getJob())) {\nLOG.error(\n\"Received wrong AcknowledgeCheckpoint message for job {} from {} : {}\",\njob,\ntaskManagerLocationInfo,\nmessage);\nreturn false;\n}\nfinal long checkpointId = message.getCheckpointId();\nsynchronized (lock) {\nif (shutdown) {\nreturn false;\n}\nfinal PendingCheckpoint checkpoint = pendingCheckpoints.get(checkpointId);\nif (message.getSubtaskState() != null) {\nif (checkpoint == null || !checkpoint.getProps().isSavepoint()) {\nmessage.getSubtaskState()\n.registerSharedStates(\ncompletedCheckpointStore.getSharedStateRegistry(),\ncheckpointId);\n}\n}\nif (checkpoint != null && !checkpoint.isDisposed()) {\nswitch (checkpoint.acknowledgeTask(\nmessage.getTaskExecutionId(),\nmessage.getSubtaskState(),\nmessage.getCheckpointMetrics())) {\ncase SUCCESS:\nLOG.debug(\n\"Received acknowledge message for checkpoint {} from task {} of job {} at {}.\",\ncheckpointId,\nmessage.getTaskExecutionId(),\nmessage.getJob(),\ntaskManagerLocationInfo);\nif (checkpoint.isFullyAcknowledged()) {\ncompletePendingCheckpoint(checkpoint);\n}\nbreak;\ncase DUPLICATE:\nLOG.debug(\n\"Received a duplicate acknowledge message for checkpoint {}, task {}, job {}, location {}.\",\nmessage.getCheckpointId(),\nmessage.getTaskExecutionId(),\nmessage.getJob(),\ntaskManagerLocationInfo);\nbreak;\ncase UNKNOWN:\nLOG.warn(\n\"Could not acknowledge the checkpoint {} for task {} of job {} at {}, \"\n+ \"because the task's execution attempt id was unknown. Discarding \"\n+ \"the state handle to avoid lingering state.\",\nmessage.getCheckpointId(),\nmessage.getTaskExecutionId(),\nmessage.getJob(),\ntaskManagerLocationInfo);\ndiscardSubtaskState(\nmessage.getJob(),\nmessage.getTaskExecutionId(),\nmessage.getCheckpointId(),\nmessage.getSubtaskState());\nbreak;\ncase DISCARDED:\nLOG.warn(\n\"Could not acknowledge the checkpoint {} for task {} of job {} at {}, \"\n+ \"because the pending checkpoint had been discarded. Discarding the \"\n+ \"state handle tp avoid lingering state.\",\nmessage.getCheckpointId(),\nmessage.getTaskExecutionId(),\nmessage.getJob(),\ntaskManagerLocationInfo);\ndiscardSubtaskState(\nmessage.getJob(),\nmessage.getTaskExecutionId(),\nmessage.getCheckpointId(),\nmessage.getSubtaskState());\n}\nreturn true;\n} else if (checkpoint != null) {\nthrow new IllegalStateException(\n\"Received message for discarded but non-removed checkpoint \"\n+ checkpointId);\n} else {\nreportStats(\nmessage.getCheckpointId(),\nmessage.getTaskExecutionId(),\nmessage.getCheckpointMetrics());\nboolean wasPendingCheckpoint;\nif (recentPendingCheckpoints.contains(checkpointId)) {\nwasPendingCheckpoint = true;\nLOG.warn(\n\"Received late message for now expired checkpoint attempt {} from task \"\n+ \"{} of job {} at {}.\",\ncheckpointId,\nmessage.getTaskExecutionId(),\nmessage.getJob(),\ntaskManagerLocationInfo);\n} else {\nLOG.debug(\n\"Received message for an unknown checkpoint {} from task {} of job {} at {}.\",\ncheckpointId,\nmessage.getTaskExecutionId(),\nmessage.getJob(),\ntaskManagerLocationInfo);\nwasPendingCheckpoint = false;\n}\ndiscardSubtaskState(\nmessage.getJob(),\nmessage.getTaskExecutionId(),\nmessage.getCheckpointId(),\nmessage.getSubtaskState());\nreturn wasPendingCheckpoint;\n}\n}\n}\n/**\n* Try to complete the given pending checkpoint.\n*\n*

Important: This method should only be called in the checkpoint lock scope.\n*\n* @param pendingCheckpoint to complete\n* @throws CheckpointException if the completion failed\n*/\nprivate void completePendingCheckpoint(PendingCheckpoint pendingCheckpoint)\nthrows CheckpointException {\nfinal long checkpointId = pendingCheckpoint.getCheckpointID();\nfinal CompletedCheckpoint completedCheckpoint;\nfinal CompletedCheckpoint lastSubsumed;\nfinal CheckpointProperties props = pendingCheckpoint.getProps();\ncompletedCheckpointStore.getSharedStateRegistry().checkpointCompleted(checkpointId);\ntry {\ncompletedCheckpoint = finalizeCheckpoint(pendingCheckpoint);\nPreconditions.checkState(pendingCheckpoint.isDisposed() && completedCheckpoint != null);\nif (!props.isSavepoint()) {\nlastSubsumed =\naddCompletedCheckpointToStoreAndSubsumeOldest(\ncheckpointId, completedCheckpoint, pendingCheckpoint);\n} else {\nlastSubsumed = null;\n}\npendingCheckpoint.getCompletionFuture().complete(completedCheckpoint);\nreportCompletedCheckpoint(completedCheckpoint);\n} catch (Exception exception) {\npendingCheckpoint.getCompletionFuture().completeExceptionally(exception);\nthrow exception;\n} finally {\npendingCheckpoints.remove(checkpointId);\nscheduleTriggerRequest();\n}\ncleanupAfterCompletedCheckpoint(\npendingCheckpoint, checkpointId, completedCheckpoint, lastSubsumed, props);\n}\nprivate void reportCompletedCheckpoint(CompletedCheckpoint completedCheckpoint) {\nfailureManager.handleCheckpointSuccess(completedCheckpoint.getCheckpointID());\nCompletedCheckpointStats completedCheckpointStats = completedCheckpoint.getStatistic();\nif (completedCheckpointStats != null) {\nLOG.trace(\n\"Checkpoint {} size: {}Kb, duration: {}ms\",\ncompletedCheckpoint.getCheckpointID(),\ncompletedCheckpointStats.getStateSize() == 0\n? 0\n: completedCheckpointStats.getStateSize() / 1024,\ncompletedCheckpointStats.getEndToEndDuration());\nstatsTracker.reportCompletedCheckpoint(completedCheckpointStats);\n}\n}\nprivate void cleanupAfterCompletedCheckpoint(\nPendingCheckpoint pendingCheckpoint,\nlong checkpointId,\nCompletedCheckpoint completedCheckpoint,\nCompletedCheckpoint lastSubsumed,\nCheckpointProperties props) {\nrememberRecentCheckpointId(checkpointId);\nlastCheckpointCompletionRelativeTime = clock.relativeTimeMillis();\nlogCheckpointInfo(completedCheckpoint);\nif (!props.isSavepoint() || props.isSynchronous()) {\ndropSubsumedCheckpoints(checkpointId);\nsendAcknowledgeMessages(\npendingCheckpoint.getCheckpointPlan().getTasksToCommitTo(),\ncheckpointId,\ncompletedCheckpoint.getTimestamp(),\nextractIdIfDiscardedOnSubsumed(lastSubsumed));\n}\n}\nprivate void logCheckpointInfo(CompletedCheckpoint completedCheckpoint) {\nLOG.info(\n\"Completed checkpoint {} for job {} ({} bytes, checkpointDuration={} ms, finalizationTime={} ms).\",\ncompletedCheckpoint.getCheckpointID(),\njob,\ncompletedCheckpoint.getStateSize(),\ncompletedCheckpoint.getCompletionTimestamp() - completedCheckpoint.getTimestamp(),\nSystem.currentTimeMillis() - completedCheckpoint.getCompletionTimestamp());\nif (LOG.isDebugEnabled()) {\nStringBuilder builder = new StringBuilder();\nbuilder.append(\"Checkpoint state: \");\nfor (OperatorState state : completedCheckpoint.getOperatorStates().values()) {\nbuilder.append(state);\nbuilder.append(\", \");\n}\nbuilder.setLength(builder.length() - 2);\nLOG.debug(builder.toString());\n}\n}\nprivate CompletedCheckpoint finalizeCheckpoint(PendingCheckpoint pendingCheckpoint)\nthrows CheckpointException {\ntry {\nfinal CompletedCheckpoint completedCheckpoint =\npendingCheckpoint.finalizeCheckpoint(\ncheckpointsCleaner, this::scheduleTriggerRequest, executor);\nreturn completedCheckpoint;\n} catch (Exception e1) {\nfinal CheckpointFailureReason failureReason =\ne1 instanceof PartialFinishingNotSupportedByStateException\n? CheckpointFailureReason.CHECKPOINT_DECLINED_TASK_CLOSING\n: CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE;\nif (!pendingCheckpoint.isDisposed()) {\nabortPendingCheckpoint(\npendingCheckpoint, new CheckpointException(failureReason, e1));\n}\nthrow new CheckpointException(\n\"Could not finalize the pending checkpoint \"\n+ pendingCheckpoint.getCheckpointID()\n+ '.',\nfailureReason,\ne1);\n}\n}\nprivate long extractIdIfDiscardedOnSubsumed(CompletedCheckpoint lastSubsumed) {\nfinal long lastSubsumedCheckpointId;\nif (lastSubsumed != null && lastSubsumed.getProperties().discardOnSubsumed()) {\nlastSubsumedCheckpointId = lastSubsumed.getCheckpointID();\n} else {\nlastSubsumedCheckpointId = CheckpointStoreUtil.INVALID_CHECKPOINT_ID;\n}\nreturn lastSubsumedCheckpointId;\n}\nprivate CompletedCheckpoint addCompletedCheckpointToStoreAndSubsumeOldest(\nlong checkpointId,\nCompletedCheckpoint completedCheckpoint,\nPendingCheckpoint pendingCheckpoint)\nthrows CheckpointException {\nList tasksToAbort =\npendingCheckpoint.getCheckpointPlan().getTasksToCommitTo();\ntry {\nfinal CompletedCheckpoint subsumedCheckpoint =\ncompletedCheckpointStore.addCheckpointAndSubsumeOldestOne(\ncompletedCheckpoint, checkpointsCleaner, this::scheduleTriggerRequest);\nthis.forceFullSnapshot = false;\nreturn subsumedCheckpoint;\n} catch (Exception exception) {\npendingCheckpoint.getCompletionFuture().completeExceptionally(exception);\nif (exception instanceof PossibleInconsistentStateException) {\nLOG.warn(\n\"An error occurred while writing checkpoint {} to the underlying metadata\"\n+ \" store. Flink was not able to determine whether the metadata was\"\n+ \" successfully persisted. The corresponding state located at '{}'\"\n+ \" won't be discarded and needs to be cleaned up manually.\",\ncompletedCheckpoint.getCheckpointID(),\ncompletedCheckpoint.getExternalPointer());\n} else {\ncheckpointsCleaner.cleanCheckpointOnFailedStoring(completedCheckpoint, executor);\n}\nfinal CheckpointException checkpointException =\nnew CheckpointException(\n\"Could not complete the pending checkpoint \" + checkpointId + '.',\nCheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE,\nexception);\nreportFailedCheckpoint(pendingCheckpoint, checkpointException);\nsendAbortedMessages(tasksToAbort, checkpointId, completedCheckpoint.getTimestamp());\nthrow checkpointException;\n}\n}\nprivate void reportFailedCheckpoint(\nPendingCheckpoint pendingCheckpoint, CheckpointException exception) {\nfailureManager.handleCheckpointException(\npendingCheckpoint,\npendingCheckpoint.getProps(),\nexception,\nnull,\njob,\ngetStatsCallback(pendingCheckpoint),\nstatsTracker);\n}\nvoid scheduleTriggerRequest() {\nsynchronized (lock) {\nif (isShutdown()) {\nLOG.debug(\n\"Skip scheduling trigger request because the CheckpointCoordinator is shut down\");\n} else {\ntimer.execute(this::executeQueuedRequest);\n}\n}\n}\n@VisibleForTesting\nvoid sendAcknowledgeMessages(\nList tasksToCommit,\nlong completedCheckpointId,\nlong completedTimestamp,\nlong lastSubsumedCheckpointId) {\nfor (ExecutionVertex ev : tasksToCommit) {\nExecution ee = ev.getCurrentExecutionAttempt();\nif (ee != null) {\nee.notifyCheckpointOnComplete(\ncompletedCheckpointId, completedTimestamp, lastSubsumedCheckpointId);\n}\n}\nfor (OperatorCoordinatorCheckpointContext coordinatorContext : coordinatorsToCheckpoint) {\ncoordinatorContext.notifyCheckpointComplete(completedCheckpointId);\n}\n}\nprivate void sendAbortedMessages(\nList tasksToAbort, long checkpointId, long timeStamp) {\nassert (Thread.holdsLock(lock));\nlong latestCompletedCheckpointId = completedCheckpointStore.getLatestCheckpointId();\nexecutor.execute(\n() -> {\nfor (ExecutionVertex ev : tasksToAbort) {\nExecution ee = ev.getCurrentExecutionAttempt();\nif (ee != null) {\nee.notifyCheckpointAborted(\ncheckpointId, latestCompletedCheckpointId, timeStamp);\n}\n}\n});\nfor (OperatorCoordinatorCheckpointContext coordinatorContext : coordinatorsToCheckpoint) {\ncoordinatorContext.notifyCheckpointAborted(checkpointId);\n}\n}\n/**\n* Fails all pending checkpoints which have not been acknowledged by the given execution attempt\n* id.\n*\n* @param executionAttemptId for which to discard unacknowledged pending checkpoints\n* @param cause of the failure\n*/\npublic void failUnacknowledgedPendingCheckpointsFor(\nExecutionAttemptID executionAttemptId, Throwable cause) {\nsynchronized (lock) {\nabortPendingCheckpoints(\ncheckpoint -> !checkpoint.isAcknowledgedBy(executionAttemptId),\nnew CheckpointException(CheckpointFailureReason.TASK_FAILURE, cause));\n}\n}\nprivate void rememberRecentCheckpointId(long id) {\nif (recentPendingCheckpoints.size() >= NUM_GHOST_CHECKPOINT_IDS) {\nrecentPendingCheckpoints.removeFirst();\n}\nrecentPendingCheckpoints.addLast(id);\n}\nprivate void dropSubsumedCheckpoints(long checkpointId) {\nabortPendingCheckpoints(\ncheckpoint ->\ncheckpoint.getCheckpointID() < checkpointId && checkpoint.canBeSubsumed(),\nnew CheckpointException(CheckpointFailureReason.CHECKPOINT_SUBSUMED));\n}\n/**\n* Restores the latest checkpointed state to a set of subtasks. This method represents a \"local\"\n* or \"regional\" failover and does restore states to coordinators. Note that a regional failover\n* might still include all tasks.\n*\n* @param tasks Set of job vertices to restore. State for these vertices is restored via {@link\n* Execution\n* @return An {@code OptionalLong} with the checkpoint ID, if state was restored, an empty\n* {@code OptionalLong} otherwise.\n* @throws IllegalStateException If the CheckpointCoordinator is shut down.\n* @throws IllegalStateException If no completed checkpoint is available and the \n* failIfNoCheckpoint flag has been set.\n* @throws IllegalStateException If the checkpoint contains state that cannot be mapped to any\n* job vertex in tasks and the allowNonRestoredState flag has not\n* been set.\n* @throws IllegalStateException If the max parallelism changed for an operator that restores\n* state from this checkpoint.\n* @throws IllegalStateException If the parallelism changed for an operator that restores\n* non-partitioned state from this checkpoint.\n*/\npublic OptionalLong restoreLatestCheckpointedStateToSubtasks(\nfinal Set tasks) throws Exception {\nreturn restoreLatestCheckpointedStateInternal(\ntasks,\nOperatorCoordinatorRestoreBehavior\n.SKIP,\nfalse,\ntrue,\nfalse);\n}\n/**\n* Restores the latest checkpointed state to all tasks and all coordinators. This method\n* represents a \"global restore\"-style operation where all stateful tasks and coordinators from\n* the given set of Job Vertices are restored. are restored to their latest checkpointed state.\n*\n* @param tasks Set of job vertices to restore. State for these vertices is restored via {@link\n* Execution\n* @param allowNonRestoredState Allow checkpoint state that cannot be mapped to any job vertex\n* in tasks.\n* @return true if state was restored, false otherwise.\n* @throws IllegalStateException If the CheckpointCoordinator is shut down.\n* @throws IllegalStateException If no completed checkpoint is available and the \n* failIfNoCheckpoint flag has been set.\n* @throws IllegalStateException If the checkpoint contains state that cannot be mapped to any\n* job vertex in tasks and the allowNonRestoredState flag has not\n* been set.\n* @throws IllegalStateException If the max parallelism changed for an operator that restores\n* state from this checkpoint.\n* @throws IllegalStateException If the parallelism changed for an operator that restores\n* non-partitioned state from this checkpoint.\n*/\npublic boolean restoreLatestCheckpointedStateToAll(\nfinal Set tasks, final boolean allowNonRestoredState)\nthrows Exception {\nfinal OptionalLong restoredCheckpointId =\nrestoreLatestCheckpointedStateInternal(\ntasks,\nOperatorCoordinatorRestoreBehavior\n.RESTORE_OR_RESET,\nfalse,\nallowNonRestoredState,\nfalse);\nreturn restoredCheckpointId.isPresent();\n}\n/**\n* Restores the latest checkpointed at the beginning of the job execution. If there is a\n* checkpoint, this method acts like a \"global restore\"-style operation where all stateful tasks\n* and coordinators from the given set of Job Vertices are restored.\n*\n* @param tasks Set of job vertices to restore. State for these vertices is restored via {@link\n* Execution\n* @return True, if a checkpoint was found and its state was restored, false otherwise.\n*/\npublic boolean restoreInitialCheckpointIfPresent(final Set tasks)\nthrows Exception {\nfinal OptionalLong restoredCheckpointId =\nrestoreLatestCheckpointedStateInternal(\ntasks,\nOperatorCoordinatorRestoreBehavior.RESTORE_IF_CHECKPOINT_PRESENT,\nfalse,\nfalse,\ntrue);\nreturn restoredCheckpointId.isPresent();\n}\n/**\n* Performs the actual restore operation to the given tasks.\n*\n*

This method returns the restored checkpoint ID (as an optional) or an empty optional, if\n* no checkpoint was restored.\n*/\nprivate OptionalLong restoreLatestCheckpointedStateInternal(\nfinal Set tasks,\nfinal OperatorCoordinatorRestoreBehavior operatorCoordinatorRestoreBehavior,\nfinal boolean errorIfNoCheckpoint,\nfinal boolean allowNonRestoredState,\nfinal boolean checkForPartiallyFinishedOperators)\nthrows Exception {\nsynchronized (lock) {\nif (shutdown) {\nthrow new IllegalStateException(\"CheckpointCoordinator is shut down\");\n}\nCompletedCheckpoint latest = completedCheckpointStore.getLatestCheckpoint();\nif (latest == null) {\nLOG.info(\"No checkpoint found during restore.\");\nif (errorIfNoCheckpoint) {\nthrow new IllegalStateException(\"No completed checkpoint available\");\n}\nLOG.debug(\"Resetting the master hooks.\");\nMasterHooks.reset(masterHooks.values(), LOG);\nif (operatorCoordinatorRestoreBehavior\n== OperatorCoordinatorRestoreBehavior.RESTORE_OR_RESET) {\nLOG.info(\"Resetting the Operator Coordinators to an empty state.\");\nrestoreStateToCoordinators(\nOperatorCoordinator.NO_CHECKPOINT, Collections.emptyMap());\n}\nreturn OptionalLong.empty();\n}\nLOG.info(\"Restoring job {} from {}.\", job, latest);\nthis.forceFullSnapshot = latest.getProperties().isUnclaimed();\nfinal Map operatorStates = extractOperatorStates(latest);\nif (checkForPartiallyFinishedOperators) {\nVertexFinishedStateChecker vertexFinishedStateChecker =\nvertexFinishedStateCheckerFactory.apply(tasks, operatorStates);\nvertexFinishedStateChecker.validateOperatorsFinishedState();\n}\nStateAssignmentOperation stateAssignmentOperation =\nnew StateAssignmentOperation(\nlatest.getCheckpointID(), tasks, operatorStates, allowNonRestoredState);\nstateAssignmentOperation.assignStates();\nMasterHooks.restoreMasterHooks(\nmasterHooks,\nlatest.getMasterHookStates(),\nlatest.getCheckpointID(),\nallowNonRestoredState,\nLOG);\nif (operatorCoordinatorRestoreBehavior != OperatorCoordinatorRestoreBehavior.SKIP) {\nrestoreStateToCoordinators(latest.getCheckpointID(), operatorStates);\n}\nlong restoreTimestamp = System.currentTimeMillis();\nRestoredCheckpointStats restored =\nnew RestoredCheckpointStats(\nlatest.getCheckpointID(),\nlatest.getProperties(),\nrestoreTimestamp,\nlatest.getExternalPointer());\nstatsTracker.reportRestoredCheckpoint(restored);\nreturn OptionalLong.of(latest.getCheckpointID());\n}\n}\nprivate Map extractOperatorStates(CompletedCheckpoint checkpoint) {\nMap originalOperatorStates = checkpoint.getOperatorStates();\nif (checkpoint.getCheckpointID() != checkpointIdOfIgnoredInFlightData) {\nreturn originalOperatorStates;\n}\nHashMap newStates = new HashMap<>();\nfor (OperatorState originalOperatorState : originalOperatorStates.values()) {\nnewStates.put(\noriginalOperatorState.getOperatorID(),\noriginalOperatorState.copyAndDiscardInFlightData());\n}\nreturn newStates;\n}\n/**\n* Restore the state with given savepoint.\n*\n* @param restoreSettings Settings for a snapshot to restore from. Includes the path and\n* parameters for the restore process.\n* @param tasks Map of job vertices to restore. State for these vertices is restored via {@link\n* Execution\n* @param userClassLoader The class loader to resolve serialized classes in legacy savepoint\n* versions.\n*/\npublic boolean restoreSavepoint(\nSavepointRestoreSettings restoreSettings,\nMap tasks,\nClassLoader userClassLoader)\nthrows Exception {\nfinal String savepointPointer = restoreSettings.getRestorePath();\nfinal boolean allowNonRestored = restoreSettings.allowNonRestoredState();\nPreconditions.checkNotNull(savepointPointer, \"The savepoint path cannot be null.\");\nLOG.info(\n\"Starting job {} from savepoint {} ({})\",\njob,\nsavepointPointer,\n(allowNonRestored ? \"allowing non restored state\" : \"\"));\nfinal CompletedCheckpointStorageLocation checkpointLocation =\ncheckpointStorageView.resolveCheckpoint(savepointPointer);\nfinal CheckpointProperties checkpointProperties;\nswitch (restoreSettings.getRestoreMode()) {\ncase CLAIM:\ncheckpointProperties = this.checkpointProperties;\nbreak;\ncase LEGACY:\ncheckpointProperties =\nCheckpointProperties.forSavepoint(\nfalse,\nSavepointFormatType.CANONICAL);\nbreak;\ncase NO_CLAIM:\ncheckpointProperties = CheckpointProperties.forUnclaimedSnapshot();\nbreak;\ndefault:\nthrow new IllegalArgumentException(\"Unknown snapshot restore mode\");\n}\nCompletedCheckpoint savepoint =\nCheckpoints.loadAndValidateCheckpoint(\njob,\ntasks,\ncheckpointLocation,\nuserClassLoader,\nallowNonRestored,\ncheckpointProperties);\nsavepoint.registerSharedStatesAfterRestored(\ncompletedCheckpointStore.getSharedStateRegistry(),\nrestoreSettings.getRestoreMode());\ncompletedCheckpointStore.addCheckpointAndSubsumeOldestOne(\nsavepoint, checkpointsCleaner, this::scheduleTriggerRequest);\nlong nextCheckpointId = savepoint.getCheckpointID() + 1;\ncheckpointIdCounter.setCount(nextCheckpointId);\nLOG.info(\"Reset the checkpoint ID of job {} to {}.\", job, nextCheckpointId);\nfinal OptionalLong restoredCheckpointId =\nrestoreLatestCheckpointedStateInternal(\nnew HashSet<>(tasks.values()),\nOperatorCoordinatorRestoreBehavior.RESTORE_IF_CHECKPOINT_PRESENT,\ntrue,\nallowNonRestored,\ntrue);\nreturn restoredCheckpointId.isPresent();\n}\npublic int getNumberOfPendingCheckpoints() {\nsynchronized (lock) {\nreturn this.pendingCheckpoints.size();\n}\n}\npublic int getNumberOfRetainedSuccessfulCheckpoints() {\nsynchronized (lock) {\nreturn completedCheckpointStore.getNumberOfRetainedCheckpoints();\n}\n}\npublic Map getPendingCheckpoints() {\nsynchronized (lock) {\nreturn new HashMap<>(this.pendingCheckpoints);\n}\n}\npublic List getSuccessfulCheckpoints() throws Exception {\nsynchronized (lock) {\nreturn completedCheckpointStore.getAllCheckpoints();\n}\n}\npublic CheckpointStorageCoordinatorView getCheckpointStorage() {\nreturn checkpointStorageView;\n}\npublic CompletedCheckpointStore getCheckpointStore() {\nreturn completedCheckpointStore;\n}\n/**\n* Gets the checkpoint interval. Its value might vary depending on whether there is processing\n* backlog.\n*/\nprivate long getCurrentCheckpointInterval() {\nreturn backlogOperators.isEmpty() ? baseInterval : baseIntervalDuringBacklog;\n}\npublic long getCheckpointTimeout() {\nreturn checkpointTimeout;\n}\n/** @deprecated use {@link\n@Deprecated\n@VisibleForTesting\nPriorityQueue getTriggerRequestQueue() {\nsynchronized (lock) {\nreturn requestDecider.getTriggerRequestQueue();\n}\n}\npublic boolean isTriggering() {\nreturn isTriggering;\n}\n@VisibleForTesting\nboolean isCurrentPeriodicTriggerAvailable() {\nreturn currentPeriodicTrigger != null;\n}\n/**\n* Returns whether periodic checkpointing has been configured.\n*\n* @return true if periodic checkpoints have been configured.\n*/\npublic boolean isPeriodicCheckpointingConfigured() {\nreturn baseInterval != Long.MAX_VALUE;\n}\npublic void startCheckpointScheduler() {\nsynchronized (lock) {\nif (shutdown) {\nthrow new IllegalArgumentException(\"Checkpoint coordinator is shut down\");\n}\nPreconditions.checkState(\nisPeriodicCheckpointingConfigured(),\n\"Can not start checkpoint scheduler, if no periodic checkpointing is configured\");\nstopCheckpointScheduler();\nperiodicScheduling = true;\nlong initDelay = getRandomInitDelay();\nnextCheckpointTriggeringRelativeTime = clock.relativeTimeMillis() + initDelay;\ncurrentPeriodicTrigger = scheduleTriggerWithDelay(initDelay);\n}\n}\npublic void stopCheckpointScheduler() {\nsynchronized (lock) {\nperiodicScheduling = false;\ncancelPeriodicTrigger();\nfinal CheckpointException reason =\nnew CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SUSPEND);\nabortPendingAndQueuedCheckpoints(reason);\n}\n}\npublic boolean isPeriodicCheckpointingStarted() {\nreturn periodicScheduling;\n}\n/**\n* Aborts all the pending checkpoints due to en exception.\n*\n* @param exception The exception.\n*/\npublic void abortPendingCheckpoints(CheckpointException exception) {\nsynchronized (lock) {\nabortPendingCheckpoints(ignored -> true, exception);\n}\n}\nprivate void abortPendingCheckpoints(\nPredicate checkpointToFailPredicate, CheckpointException exception) {\nassert Thread.holdsLock(lock);\nfinal PendingCheckpoint[] pendingCheckpointsToFail =\npendingCheckpoints.values().stream()\n.filter(checkpointToFailPredicate)\n.toArray(PendingCheckpoint[]::new);\nfor (PendingCheckpoint pendingCheckpoint : pendingCheckpointsToFail) {\nabortPendingCheckpoint(pendingCheckpoint, exception);\n}\n}\nprivate void rescheduleTrigger(long tillNextMillis) {\ncancelPeriodicTrigger();\nnextCheckpointTriggeringRelativeTime = clock.relativeTimeMillis() + tillNextMillis;\ncurrentPeriodicTrigger = scheduleTriggerWithDelay(tillNextMillis);\n}\nprivate void cancelPeriodicTrigger() {\nif (currentPeriodicTrigger != null) {\nnextCheckpointTriggeringRelativeTime = Long.MAX_VALUE;\ncurrentPeriodicTrigger.cancel(false);\ncurrentPeriodicTrigger = null;\n}\n}\nprivate long getRandomInitDelay() {\nreturn ThreadLocalRandom.current()\n.nextLong(minPauseBetweenCheckpoints, getCurrentCheckpointInterval() + 1L);\n}\nprivate ScheduledFuture scheduleTriggerWithDelay(long initDelay) {\nreturn timer.schedule(new ScheduledTrigger(), initDelay, TimeUnit.MILLISECONDS);\n}\nprivate void restoreStateToCoordinators(\nfinal long checkpointId, final Map operatorStates)\nthrows Exception {\nfor (OperatorCoordinatorCheckpointContext coordContext : coordinatorsToCheckpoint) {\nfinal OperatorState state = operatorStates.get(coordContext.operatorId());\nfinal ByteStreamStateHandle coordinatorState =\nstate == null ? null : state.getCoordinatorState();\nfinal byte[] bytes = coordinatorState == null ? null : coordinatorState.getData();\ncoordContext.resetToCheckpoint(checkpointId, bytes);\n}\n}\npublic JobStatusListener createActivatorDeactivator() {\nsynchronized (lock) {\nif (shutdown) {\nthrow new IllegalArgumentException(\"Checkpoint coordinator is shut down\");\n}\nif (jobStatusListener == null) {\njobStatusListener = new CheckpointCoordinatorDeActivator(this);\n}\nreturn jobStatusListener;\n}\n}\nint getNumQueuedRequests() {\nsynchronized (lock) {\nreturn requestDecider.getNumQueuedRequests();\n}\n}\npublic void reportStats(long id, ExecutionAttemptID attemptId, CheckpointMetrics metrics) {\nstatsTracker.reportIncompleteStats(id, attemptId, metrics);\n}\nfinal class ScheduledTrigger implements Runnable {\n@Override\npublic void run() {\nlong checkpointInterval = getCurrentCheckpointInterval();\nif (checkpointInterval != Long.MAX_VALUE) {\nnextCheckpointTriggeringRelativeTime += checkpointInterval;\ncurrentPeriodicTrigger =\ntimer.schedule(this, checkpointInterval, TimeUnit.MILLISECONDS);\n} else {\nnextCheckpointTriggeringRelativeTime = Long.MAX_VALUE;\ncurrentPeriodicTrigger = null;\n}\ntry {\ntriggerCheckpoint(checkpointProperties, null, true);\n} catch (Exception e) {\nLOG.error(\"Exception while triggering checkpoint for job {}.\", job, e);\n}\n}\n}\n/**\n* Discards the given state object asynchronously belonging to the given job, execution attempt\n* id and checkpoint id.\n*\n* @param jobId identifying the job to which the state object belongs\n* @param executionAttemptID identifying the task to which the state object belongs\n* @param checkpointId of the state object\n* @param subtaskState to discard asynchronously\n*/\nprivate void discardSubtaskState(\nfinal JobID jobId,\nfinal ExecutionAttemptID executionAttemptID,\nfinal long checkpointId,\nfinal TaskStateSnapshot subtaskState) {\nif (subtaskState != null) {\nexecutor.execute(\nnew Runnable() {\n@Override\npublic void run() {\ntry {\nsubtaskState.discardState();\n} catch (Throwable t2) {\nLOG.warn(\n\"Could not properly discard state object of checkpoint {} \"\n+ \"belonging to task {} of job {}.\",\ncheckpointId,\nexecutionAttemptID,\njobId,\nt2);\n}\n}\n});\n}\n}\nprivate void abortPendingCheckpoint(\nPendingCheckpoint pendingCheckpoint, CheckpointException exception) {\nabortPendingCheckpoint(pendingCheckpoint, exception, null);\n}\nprivate void abortPendingCheckpoint(\nPendingCheckpoint pendingCheckpoint,\nCheckpointException exception,\n@Nullable final ExecutionAttemptID executionAttemptID) {\nassert (Thread.holdsLock(lock));\nif (!pendingCheckpoint.isDisposed()) {\ntry {\npendingCheckpoint.abort(\nexception.getCheckpointFailureReason(),\nexception.getCause(),\ncheckpointsCleaner,\nthis::scheduleTriggerRequest,\nexecutor,\nstatsTracker);\nfailureManager.handleCheckpointException(\npendingCheckpoint,\npendingCheckpoint.getProps(),\nexception,\nexecutionAttemptID,\njob,\ngetStatsCallback(pendingCheckpoint),\nstatsTracker);\n} finally {\nsendAbortedMessages(\npendingCheckpoint.getCheckpointPlan().getTasksToCommitTo(),\npendingCheckpoint.getCheckpointID(),\npendingCheckpoint.getCheckpointTimestamp());\npendingCheckpoints.remove(pendingCheckpoint.getCheckpointID());\nrememberRecentCheckpointId(pendingCheckpoint.getCheckpointID());\nscheduleTriggerRequest();\n}\n}\n}\nprivate void preCheckGlobalState(boolean isPeriodic) throws CheckpointException {\nif (shutdown) {\nthrow new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN);\n}\nif (isPeriodic && !periodicScheduling) {\nthrow new CheckpointException(CheckpointFailureReason.PERIODIC_SCHEDULER_SHUTDOWN);\n}\n}\nprivate void abortPendingAndQueuedCheckpoints(CheckpointException exception) {\nassert (Thread.holdsLock(lock));\nrequestDecider.abortAll(exception);\nabortPendingCheckpoints(exception);\n}\n/**\n* The canceller of checkpoint. The checkpoint might be cancelled if it doesn't finish in a\n* configured period.\n*/\nclass CheckpointCanceller implements Runnable {\nprivate final PendingCheckpoint pendingCheckpoint;\nprivate CheckpointCanceller(PendingCheckpoint pendingCheckpoint) {\nthis.pendingCheckpoint = checkNotNull(pendingCheckpoint);\n}\n@Override\npublic void run() {\nsynchronized (lock) {\nif (!pendingCheckpoint.isDisposed()) {\nLOG.info(\n\"Checkpoint {} of job {} expired before completing.\",\npendingCheckpoint.getCheckpointID(),\njob);\nabortPendingCheckpoint(\npendingCheckpoint,\nnew CheckpointException(CheckpointFailureReason.CHECKPOINT_EXPIRED));\n}\n}\n}\n}\nprivate static CheckpointException getCheckpointException(\nCheckpointFailureReason defaultReason, Throwable throwable) {\nfinal Optional ioExceptionOptional =\nfindThrowable(throwable, IOException.class);\nif (ioExceptionOptional.isPresent()) {\nreturn new CheckpointException(CheckpointFailureReason.IO_EXCEPTION, throwable);\n} else {\nfinal Optional checkpointExceptionOptional =\nfindThrowable(throwable, CheckpointException.class);\nreturn checkpointExceptionOptional.orElseGet(\n() -> new CheckpointException(defaultReason, throwable));\n}\n}\nstatic class CheckpointTriggerRequest {\nfinal long timestamp;\nfinal CheckpointProperties props;\nfinal @Nullable String externalSavepointLocation;\nfinal boolean isPeriodic;\nprivate final CompletableFuture onCompletionPromise =\nnew CompletableFuture<>();\nCheckpointTriggerRequest(\nCheckpointProperties props,\n@Nullable String externalSavepointLocation,\nboolean isPeriodic) {\nthis.timestamp = System.currentTimeMillis();\nthis.props = checkNotNull(props);\nthis.externalSavepointLocation = externalSavepointLocation;\nthis.isPeriodic = isPeriodic;\n}\nCompletableFuture getOnCompletionFuture() {\nreturn onCompletionPromise;\n}\npublic void completeExceptionally(CheckpointException exception) {\nonCompletionPromise.completeExceptionally(exception);\n}\npublic boolean isForce() {\nreturn props.forceCheckpoint();\n}\n}\nprivate enum OperatorCoordinatorRestoreBehavior {\n/** Coordinators are always restored. If there is no checkpoint, they are restored empty. */\nRESTORE_OR_RESET,\n/** Coordinators are restored if there was a checkpoint. */\nRESTORE_IF_CHECKPOINT_PRESENT,\n/** Coordinators are not restored during this checkpoint restore. */\nSKIP;\n}\nprivate PendingCheckpointStats trackPendingCheckpointStats(\nlong checkpointId,\nCheckpointPlan checkpointPlan,\nCheckpointProperties props,\nlong checkpointTimestamp) {\nMap vertices =\nStream.concat(\ncheckpointPlan.getTasksToWaitFor().stream(),\ncheckpointPlan.getFinishedTasks().stream())\n.map(Execution::getVertex)\n.map(ExecutionVertex::getJobVertex)\n.distinct()\n.collect(\ntoMap(\nExecutionJobVertex::getJobVertexId,\nExecutionJobVertex::getParallelism));\nPendingCheckpointStats pendingCheckpointStats =\nstatsTracker.reportPendingCheckpoint(\ncheckpointId, checkpointTimestamp, props, vertices);\nreportFinishedTasks(pendingCheckpointStats, checkpointPlan.getFinishedTasks());\nreturn pendingCheckpointStats;\n}\nprivate void reportFinishedTasks(\nPendingCheckpointStats pendingCheckpointStats, List finishedTasks) {\nlong now = System.currentTimeMillis();\nfinishedTasks.forEach(\nexecution ->\npendingCheckpointStats.reportSubtaskStats(\nexecution.getVertex().getJobvertexId(),\nnew SubtaskStateStats(execution.getParallelSubtaskIndex(), now)));\n}\n@Nullable\nprivate PendingCheckpointStats getStatsCallback(PendingCheckpoint pendingCheckpoint) {\nreturn statsTracker.getPendingCheckpointStats(pendingCheckpoint.getCheckpointID());\n}\n}", + "context_after": "class CheckpointCoordinator {\nprivate static final Logger LOG = LoggerFactory.getLogger(CheckpointCoordinator.class);\n/** The number of recent checkpoints whose IDs are remembered. */\nprivate static final int NUM_GHOST_CHECKPOINT_IDS = 16;\n/** Coordinator-wide lock to safeguard the checkpoint updates. */\nprivate final Object lock = new Object();\n/** The job whose checkpoint this coordinator coordinates. */\nprivate final JobID job;\n/** Default checkpoint properties. */\nprivate final CheckpointProperties checkpointProperties;\n/** The executor used for asynchronous calls, like potentially blocking I/O. */\nprivate final Executor executor;\nprivate final CheckpointsCleaner checkpointsCleaner;\n/** The operator coordinators that need to be checkpointed. */\nprivate final Collection coordinatorsToCheckpoint;\n/** Map from checkpoint ID to the pending checkpoint. */\n@GuardedBy(\"lock\")\nprivate final Map pendingCheckpoints;\n/**\n* Completed checkpoints. Implementations can be blocking. Make sure calls to methods accessing\n* this don't block the job manager actor and run asynchronously.\n*/\nprivate final CompletedCheckpointStore completedCheckpointStore;\n/**\n* The root checkpoint state backend, which is responsible for initializing the checkpoint,\n* storing the metadata, and cleaning up the checkpoint.\n*/\nprivate final CheckpointStorageCoordinatorView checkpointStorageView;\n/** A list of recent checkpoint IDs, to identify late messages (vs invalid ones). */\nprivate final ArrayDeque recentPendingCheckpoints;\n/**\n* Checkpoint ID counter to ensure ascending IDs. In case of job manager failures, these need to\n* be ascending across job managers.\n*/\nprivate final CheckpointIDCounter checkpointIdCounter;\n/**\n* The checkpoint interval when there is no source reporting isProcessingBacklog=true. Actual\n* trigger time may be affected by the max concurrent checkpoints, minimum-pause values and\n* checkpoint interval during backlog.\n*/\nprivate final long baseInterval;\n/**\n* The checkpoint interval when any source reports isProcessingBacklog=true. Actual trigger time\n* may be affected by the max concurrent checkpoints and minimum-pause values.\n*/\nprivate final long baseIntervalDuringBacklog;\n/** The max time (in ms) that a checkpoint may take. */\nprivate final long checkpointTimeout;\n/**\n* The min time(in ms) to delay after a checkpoint could be triggered. Allows to enforce minimum\n* processing time between checkpoint attempts\n*/\nprivate final long minPauseBetweenCheckpoints;\n/**\n* The timer that handles the checkpoint timeouts and triggers periodic checkpoints. It must be\n* single-threaded. Eventually it will be replaced by main thread executor.\n*/\nprivate final ScheduledExecutor timer;\n/** The master checkpoint hooks executed by this checkpoint coordinator. */\nprivate final HashMap> masterHooks;\nprivate final boolean unalignedCheckpointsEnabled;\nprivate final long alignedCheckpointTimeout;\n/** Actor that receives status updates from the execution graph this coordinator works for. */\nprivate JobStatusListener jobStatusListener;\n/**\n* The current periodic trigger. Used to deduplicate concurrently scheduled checkpoints if any.\n*/\n@GuardedBy(\"lock\")\nprivate ScheduledTrigger currentPeriodicTrigger;\n/** A handle to the current periodic trigger, to cancel it when necessary. */\n@GuardedBy(\"lock\")\nprivate ScheduledFuture currentPeriodicTriggerFuture;\n/**\n* The timestamp (via {@link Clock\n* triggered.\n*\n*

If it's value is {@link Long\n* scheduled.\n*/\n@GuardedBy(\"lock\")\nprivate long nextCheckpointTriggeringRelativeTime;\n/**\n* The timestamp (via {@link Clock\n*/\nprivate long lastCheckpointCompletionRelativeTime;\n/**\n* Flag whether a triggered checkpoint should immediately schedule the next checkpoint.\n* Non-volatile, because only accessed in synchronized scope\n*/\nprivate boolean periodicScheduling;\n/** Flag marking the coordinator as shut down (not accepting any messages any more). */\nprivate volatile boolean shutdown;\n/** Optional tracker for checkpoint statistics. */\nprivate final CheckpointStatsTracker statsTracker;\nprivate final BiFunction<\nSet,\nMap,\nVertexFinishedStateChecker>\nvertexFinishedStateCheckerFactory;\n/** Id of checkpoint for which in-flight data should be ignored on recovery. */\nprivate final long checkpointIdOfIgnoredInFlightData;\nprivate final CheckpointFailureManager failureManager;\nprivate final Clock clock;\nprivate final boolean isExactlyOnceMode;\n/** Flag represents there is an in-flight trigger request. */\nprivate boolean isTriggering = false;\nprivate final CheckpointRequestDecider requestDecider;\nprivate final CheckpointPlanCalculator checkpointPlanCalculator;\n/** IDs of the source operators that are currently processing backlog. */\n@GuardedBy(\"lock\")\nprivate final Set backlogOperators = new HashSet<>();\nprivate boolean baseLocationsForCheckpointInitialized = false;\nprivate boolean forceFullSnapshot;\npublic CheckpointCoordinator(\nJobID job,\nCheckpointCoordinatorConfiguration chkConfig,\nCollection coordinatorsToCheckpoint,\nCheckpointIDCounter checkpointIDCounter,\nCompletedCheckpointStore completedCheckpointStore,\nCheckpointStorage checkpointStorage,\nExecutor executor,\nCheckpointsCleaner checkpointsCleaner,\nScheduledExecutor timer,\nCheckpointFailureManager failureManager,\nCheckpointPlanCalculator checkpointPlanCalculator,\nCheckpointStatsTracker statsTracker) {\nthis(\njob,\nchkConfig,\ncoordinatorsToCheckpoint,\ncheckpointIDCounter,\ncompletedCheckpointStore,\ncheckpointStorage,\nexecutor,\ncheckpointsCleaner,\ntimer,\nfailureManager,\ncheckpointPlanCalculator,\nSystemClock.getInstance(),\nstatsTracker,\nVertexFinishedStateChecker::new);\n}\n@VisibleForTesting\npublic CheckpointCoordinator(\nJobID job,\nCheckpointCoordinatorConfiguration chkConfig,\nCollection coordinatorsToCheckpoint,\nCheckpointIDCounter checkpointIDCounter,\nCompletedCheckpointStore completedCheckpointStore,\nCheckpointStorage checkpointStorage,\nExecutor executor,\nCheckpointsCleaner checkpointsCleaner,\nScheduledExecutor timer,\nCheckpointFailureManager failureManager,\nCheckpointPlanCalculator checkpointPlanCalculator,\nClock clock,\nCheckpointStatsTracker statsTracker,\nBiFunction<\nSet,\nMap,\nVertexFinishedStateChecker>\nvertexFinishedStateCheckerFactory) {\ncheckNotNull(checkpointStorage);\nlong minPauseBetweenCheckpoints = chkConfig.getMinPauseBetweenCheckpoints();\nif (minPauseBetweenCheckpoints > 365L * 24 * 60 * 60 * 1_000) {\nminPauseBetweenCheckpoints = 365L * 24 * 60 * 60 * 1_000;\n}\nlong baseInterval = chkConfig.getCheckpointInterval();\nif (baseInterval < minPauseBetweenCheckpoints) {\nbaseInterval = minPauseBetweenCheckpoints;\n}\nthis.job = checkNotNull(job);\nthis.baseInterval = baseInterval;\nthis.baseIntervalDuringBacklog = chkConfig.getCheckpointIntervalDuringBacklog();\nthis.nextCheckpointTriggeringRelativeTime = Long.MAX_VALUE;\nthis.checkpointTimeout = chkConfig.getCheckpointTimeout();\nthis.minPauseBetweenCheckpoints = minPauseBetweenCheckpoints;\nthis.coordinatorsToCheckpoint =\nCollections.unmodifiableCollection(coordinatorsToCheckpoint);\nthis.pendingCheckpoints = new LinkedHashMap<>();\nthis.checkpointIdCounter = checkNotNull(checkpointIDCounter);\nthis.completedCheckpointStore = checkNotNull(completedCheckpointStore);\nthis.executor = checkNotNull(executor);\nthis.checkpointsCleaner = checkNotNull(checkpointsCleaner);\nthis.failureManager = checkNotNull(failureManager);\nthis.checkpointPlanCalculator = checkNotNull(checkpointPlanCalculator);\nthis.clock = checkNotNull(clock);\nthis.isExactlyOnceMode = chkConfig.isExactlyOnce();\nthis.unalignedCheckpointsEnabled = chkConfig.isUnalignedCheckpointsEnabled();\nthis.alignedCheckpointTimeout = chkConfig.getAlignedCheckpointTimeout();\nthis.checkpointIdOfIgnoredInFlightData = chkConfig.getCheckpointIdOfIgnoredInFlightData();\nthis.recentPendingCheckpoints = new ArrayDeque<>(NUM_GHOST_CHECKPOINT_IDS);\nthis.masterHooks = new HashMap<>();\nthis.timer = timer;\nthis.checkpointProperties =\nCheckpointProperties.forCheckpoint(chkConfig.getCheckpointRetentionPolicy());\ntry {\nthis.checkpointStorageView = checkpointStorage.createCheckpointStorage(job);\nif (isPeriodicCheckpointingConfigured()) {\ncheckpointStorageView.initializeBaseLocationsForCheckpoint();\nbaseLocationsForCheckpointInitialized = true;\n}\n} catch (IOException e) {\nthrow new FlinkRuntimeException(\n\"Failed to create checkpoint storage at checkpoint coordinator side.\", e);\n}\ntry {\ncheckpointIDCounter.start();\n} catch (Throwable t) {\nthrow new RuntimeException(\n\"Failed to start checkpoint ID counter: \" + t.getMessage(), t);\n}\nthis.requestDecider =\nnew CheckpointRequestDecider(\nchkConfig.getMaxConcurrentCheckpoints(),\nthis::rescheduleTrigger,\nthis.clock,\nthis.minPauseBetweenCheckpoints,\nthis.pendingCheckpoints::size,\nthis.checkpointsCleaner::getNumberOfCheckpointsToClean);\nthis.statsTracker = checkNotNull(statsTracker, \"Statistic tracker can not be null\");\nthis.vertexFinishedStateCheckerFactory = checkNotNull(vertexFinishedStateCheckerFactory);\n}\n/**\n* Adds the given master hook to the checkpoint coordinator. This method does nothing, if the\n* checkpoint coordinator already contained a hook with the same ID (as defined via {@link\n* MasterTriggerRestoreHook\n*\n* @param hook The hook to add.\n* @return True, if the hook was added, false if the checkpoint coordinator already contained a\n* hook with the same ID.\n*/\npublic boolean addMasterHook(MasterTriggerRestoreHook hook) {\ncheckNotNull(hook);\nfinal String id = hook.getIdentifier();\ncheckArgument(!StringUtils.isNullOrWhitespaceOnly(id), \"The hook has a null or empty id\");\nsynchronized (lock) {\nif (!masterHooks.containsKey(id)) {\nmasterHooks.put(id, hook);\nreturn true;\n} else {\nreturn false;\n}\n}\n}\n/** Gets the number of currently register master hooks. */\npublic int getNumberOfRegisteredMasterHooks() {\nsynchronized (lock) {\nreturn masterHooks.size();\n}\n}\n/**\n* Shuts down the checkpoint coordinator.\n*\n*

After this method has been called, the coordinator does not accept and further messages\n* and cannot trigger any further checkpoints.\n*/\npublic void shutdown() throws Exception {\nsynchronized (lock) {\nif (!shutdown) {\nshutdown = true;\nLOG.info(\"Stopping checkpoint coordinator for job {}.\", job);\nperiodicScheduling = false;\nMasterHooks.close(masterHooks.values(), LOG);\nmasterHooks.clear();\nfinal CheckpointException reason =\nnew CheckpointException(\nCheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN);\nabortPendingAndQueuedCheckpoints(reason);\n}\n}\n}\npublic boolean isShutdown() {\nreturn shutdown;\n}\n/**\n* Reports whether a source operator is currently processing backlog.\n*\n*

If any source operator is processing backlog, the checkpoint interval would be decided by\n* {@code execution.checkpointing.interval-during-backlog} instead of {@code\n* execution.checkpointing.interval}.\n*\n*

If a source has not invoked this method, the source is considered to have\n* isProcessingBacklog=false. If a source operator has invoked this method multiple times, the\n* last reported value is used.\n*\n* @param operatorID the operator ID of the source operator.\n* @param isProcessingBacklog whether the source operator is processing backlog.\n*/\n/**\n* Triggers a savepoint with the given savepoint directory as a target.\n*\n* @param targetLocation Target location for the savepoint, optional. If null, the state\n* backend's configured default will be used.\n* @return A future to the completed checkpoint\n* @throws IllegalStateException If no savepoint directory has been specified and no default\n* savepoint directory has been configured\n*/\npublic CompletableFuture triggerSavepoint(\n@Nullable final String targetLocation, final SavepointFormatType formatType) {\nfinal CheckpointProperties properties =\nCheckpointProperties.forSavepoint(!unalignedCheckpointsEnabled, formatType);\nreturn triggerSavepointInternal(properties, targetLocation);\n}\n/**\n* Triggers a synchronous savepoint with the given savepoint directory as a target.\n*\n* @param terminate flag indicating if the job should terminate or just suspend\n* @param targetLocation Target location for the savepoint, optional. If null, the state\n* backend's configured default will be used.\n* @return A future to the completed checkpoint\n* @throws IllegalStateException If no savepoint directory has been specified and no default\n* savepoint directory has been configured\n*/\npublic CompletableFuture triggerSynchronousSavepoint(\nfinal boolean terminate,\n@Nullable final String targetLocation,\nSavepointFormatType formatType) {\nfinal CheckpointProperties properties =\nCheckpointProperties.forSyncSavepoint(\n!unalignedCheckpointsEnabled, terminate, formatType);\nreturn triggerSavepointInternal(properties, targetLocation);\n}\nprivate CompletableFuture triggerSavepointInternal(\nfinal CheckpointProperties checkpointProperties,\n@Nullable final String targetLocation) {\ncheckNotNull(checkpointProperties);\nreturn triggerCheckpointFromCheckpointThread(checkpointProperties, targetLocation, false);\n}\nprivate CompletableFuture triggerCheckpointFromCheckpointThread(\nCheckpointProperties checkpointProperties, String targetLocation, boolean isPeriodic) {\nfinal CompletableFuture resultFuture = new CompletableFuture<>();\ntimer.execute(\n() ->\ntriggerCheckpoint(checkpointProperties, targetLocation, isPeriodic)\n.whenComplete(\n(completedCheckpoint, throwable) -> {\nif (throwable == null) {\nresultFuture.complete(completedCheckpoint);\n} else {\nresultFuture.completeExceptionally(throwable);\n}\n}));\nreturn resultFuture;\n}\n/**\n* Triggers a new standard checkpoint and uses the given timestamp as the checkpoint timestamp.\n* The return value is a future. It completes when the checkpoint triggered finishes or an error\n* occurred.\n*\n* @param isPeriodic Flag indicating whether this triggered checkpoint is periodic.\n* @return a future to the completed checkpoint.\n*/\npublic CompletableFuture triggerCheckpoint(boolean isPeriodic) {\nreturn triggerCheckpointFromCheckpointThread(checkpointProperties, null, isPeriodic);\n}\n/**\n* Triggers one new checkpoint with the given checkpointType. The returned future completes when\n* the triggered checkpoint finishes or an error occurred.\n*\n* @param checkpointType specifies the backup type of the checkpoint to trigger.\n* @return a future to the completed checkpoint.\n*/\npublic CompletableFuture triggerCheckpoint(CheckpointType checkpointType) {\nif (checkpointType == null) {\nthrow new IllegalArgumentException(\"checkpointType cannot be null\");\n}\nfinal SnapshotType snapshotType;\nswitch (checkpointType) {\ncase CONFIGURED:\nsnapshotType = checkpointProperties.getCheckpointType();\nbreak;\ncase FULL:\nsnapshotType = FULL_CHECKPOINT;\nbreak;\ncase INCREMENTAL:\nsnapshotType = CHECKPOINT;\nbreak;\ndefault:\nthrow new IllegalArgumentException(\"unknown checkpointType: \" + checkpointType);\n}\nfinal CheckpointProperties properties =\nnew CheckpointProperties(\ncheckpointProperties.forceCheckpoint(),\nsnapshotType,\ncheckpointProperties.discardOnSubsumed(),\ncheckpointProperties.discardOnJobFinished(),\ncheckpointProperties.discardOnJobCancelled(),\ncheckpointProperties.discardOnJobFailed(),\ncheckpointProperties.discardOnJobSuspended(),\ncheckpointProperties.isUnclaimed());\nreturn triggerCheckpointFromCheckpointThread(properties, null, false);\n}\n@VisibleForTesting\nCompletableFuture triggerCheckpoint(\nCheckpointProperties props,\n@Nullable String externalSavepointLocation,\nboolean isPeriodic) {\nCheckpointTriggerRequest request =\nnew CheckpointTriggerRequest(props, externalSavepointLocation, isPeriodic);\nchooseRequestToExecute(request).ifPresent(this::startTriggeringCheckpoint);\nreturn request.onCompletionPromise;\n}\nprivate void startTriggeringCheckpoint(CheckpointTriggerRequest request) {\ntry {\nsynchronized (lock) {\npreCheckGlobalState(request.isPeriodic);\n}\nPreconditions.checkState(!isTriggering);\nisTriggering = true;\nfinal long timestamp = System.currentTimeMillis();\nCompletableFuture checkpointPlanFuture =\ncheckpointPlanCalculator.calculateCheckpointPlan();\nboolean initializeBaseLocations = !baseLocationsForCheckpointInitialized;\nbaseLocationsForCheckpointInitialized = true;\nCompletableFuture masterTriggerCompletionPromise = new CompletableFuture<>();\nfinal CompletableFuture pendingCheckpointCompletableFuture =\ncheckpointPlanFuture\n.thenApplyAsync(\nplan -> {\ntry {\nlong checkpointID =\ncheckpointIdCounter.getAndIncrement();\nreturn new Tuple2<>(plan, checkpointID);\n} catch (Throwable e) {\nthrow new CompletionException(e);\n}\n},\nexecutor)\n.thenApplyAsync(\n(checkpointInfo) ->\ncreatePendingCheckpoint(\ntimestamp,\nrequest.props,\ncheckpointInfo.f0,\nrequest.isPeriodic,\ncheckpointInfo.f1,\nrequest.getOnCompletionFuture(),\nmasterTriggerCompletionPromise),\ntimer);\nfinal CompletableFuture coordinatorCheckpointsComplete =\npendingCheckpointCompletableFuture\n.thenApplyAsync(\npendingCheckpoint -> {\ntry {\nCheckpointStorageLocation checkpointStorageLocation =\ninitializeCheckpointLocation(\npendingCheckpoint.getCheckpointID(),\nrequest.props,\nrequest.externalSavepointLocation,\ninitializeBaseLocations);\nreturn Tuple2.of(\npendingCheckpoint, checkpointStorageLocation);\n} catch (Throwable e) {\nthrow new CompletionException(e);\n}\n},\nexecutor)\n.thenComposeAsync(\n(checkpointInfo) -> {\nPendingCheckpoint pendingCheckpoint = checkpointInfo.f0;\nif (pendingCheckpoint.isDisposed()) {\nreturn null;\n}\nsynchronized (lock) {\npendingCheckpoint.setCheckpointTargetLocation(\ncheckpointInfo.f1);\n}\nreturn OperatorCoordinatorCheckpoints\n.triggerAndAcknowledgeAllCoordinatorCheckpointsWithCompletion(\ncoordinatorsToCheckpoint,\npendingCheckpoint,\ntimer);\n},\ntimer);\nfinal CompletableFuture masterStatesComplete =\ncoordinatorCheckpointsComplete.thenComposeAsync(\nignored -> {\nPendingCheckpoint checkpoint =\nFutureUtils.getWithoutException(\npendingCheckpointCompletableFuture);\nif (checkpoint == null || checkpoint.isDisposed()) {\nreturn null;\n}\nreturn snapshotMasterState(checkpoint);\n},\ntimer);\nFutureUtils.forward(\nCompletableFuture.allOf(masterStatesComplete, coordinatorCheckpointsComplete),\nmasterTriggerCompletionPromise);\nFutureUtils.assertNoException(\nmasterTriggerCompletionPromise\n.handleAsync(\n(ignored, throwable) -> {\nfinal PendingCheckpoint checkpoint =\nFutureUtils.getWithoutException(\npendingCheckpointCompletableFuture);\nPreconditions.checkState(\ncheckpoint != null || throwable != null,\n\"Either the pending checkpoint needs to be created or an error must have occurred.\");\nif (throwable != null) {\nif (checkpoint == null) {\nonTriggerFailure(request, throwable);\n} else {\nonTriggerFailure(checkpoint, throwable);\n}\n} else {\ntriggerCheckpointRequest(\nrequest, timestamp, checkpoint);\n}\nreturn null;\n},\ntimer)\n.exceptionally(\nerror -> {\nif (!isShutdown()) {\nthrow new CompletionException(error);\n} else if (findThrowable(\nerror, RejectedExecutionException.class)\n.isPresent()) {\nLOG.debug(\"Execution rejected during shutdown\");\n} else {\nLOG.warn(\"Error encountered during shutdown\", error);\n}\nreturn null;\n}));\n} catch (Throwable throwable) {\nonTriggerFailure(request, throwable);\n}\n}\nprivate void triggerCheckpointRequest(\nCheckpointTriggerRequest request, long timestamp, PendingCheckpoint checkpoint) {\nif (checkpoint.isDisposed()) {\nonTriggerFailure(\ncheckpoint,\nnew CheckpointException(\nCheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE,\ncheckpoint.getFailureCause()));\n} else {\ntriggerTasks(request, timestamp, checkpoint)\n.exceptionally(\nfailure -> {\nLOG.info(\n\"Triggering Checkpoint {} for job {} failed due to {}\",\ncheckpoint.getCheckpointID(),\njob,\nfailure);\nfinal CheckpointException cause;\nif (failure instanceof CheckpointException) {\ncause = (CheckpointException) failure;\n} else {\ncause =\nnew CheckpointException(\nCheckpointFailureReason\n.TRIGGER_CHECKPOINT_FAILURE,\nfailure);\n}\ntimer.execute(\n() -> {\nsynchronized (lock) {\nabortPendingCheckpoint(checkpoint, cause);\n}\n});\nreturn null;\n});\nif (maybeCompleteCheckpoint(checkpoint)) {\nonTriggerSuccess();\n}\n}\n}\nprivate CompletableFuture triggerTasks(\nCheckpointTriggerRequest request, long timestamp, PendingCheckpoint checkpoint) {\nfinal long checkpointId = checkpoint.getCheckpointID();\nfinal SnapshotType type;\nif (this.forceFullSnapshot && !request.props.isSavepoint()) {\ntype = FULL_CHECKPOINT;\n} else {\ntype = request.props.getCheckpointType();\n}\nfinal CheckpointOptions checkpointOptions =\nCheckpointOptions.forConfig(\ntype,\ncheckpoint.getCheckpointStorageLocation().getLocationReference(),\nisExactlyOnceMode,\nunalignedCheckpointsEnabled,\nalignedCheckpointTimeout);\nList> acks = new ArrayList<>();\nfor (Execution execution : checkpoint.getCheckpointPlan().getTasksToTrigger()) {\nif (request.props.isSynchronous()) {\nacks.add(\nexecution.triggerSynchronousSavepoint(\ncheckpointId, timestamp, checkpointOptions));\n} else {\nacks.add(execution.triggerCheckpoint(checkpointId, timestamp, checkpointOptions));\n}\n}\nreturn FutureUtils.waitForAll(acks);\n}\n/**\n* Initialize the checkpoint location asynchronously. It will be expected to be executed in io\n* thread due to it might be time-consuming.\n*\n* @param checkpointID checkpoint id\n* @param props checkpoint properties\n* @param externalSavepointLocation the external savepoint location, it might be null\n* @return the checkpoint location\n*/\nprivate CheckpointStorageLocation initializeCheckpointLocation(\nlong checkpointID,\nCheckpointProperties props,\n@Nullable String externalSavepointLocation,\nboolean initializeBaseLocations)\nthrows Exception {\nfinal CheckpointStorageLocation checkpointStorageLocation;\nif (props.isSavepoint()) {\ncheckpointStorageLocation =\ncheckpointStorageView.initializeLocationForSavepoint(\ncheckpointID, externalSavepointLocation);\n} else {\nif (initializeBaseLocations) {\ncheckpointStorageView.initializeBaseLocationsForCheckpoint();\n}\ncheckpointStorageLocation =\ncheckpointStorageView.initializeLocationForCheckpoint(checkpointID);\n}\nreturn checkpointStorageLocation;\n}\nprivate PendingCheckpoint createPendingCheckpoint(\nlong timestamp,\nCheckpointProperties props,\nCheckpointPlan checkpointPlan,\nboolean isPeriodic,\nlong checkpointID,\nCompletableFuture onCompletionPromise,\nCompletableFuture masterTriggerCompletionPromise) {\nsynchronized (lock) {\ntry {\npreCheckGlobalState(isPeriodic);\n} catch (Throwable t) {\nthrow new CompletionException(t);\n}\n}\nPendingCheckpointStats pendingCheckpointStats =\ntrackPendingCheckpointStats(checkpointID, checkpointPlan, props, timestamp);\nfinal PendingCheckpoint checkpoint =\nnew PendingCheckpoint(\njob,\ncheckpointID,\ntimestamp,\ncheckpointPlan,\nOperatorInfo.getIds(coordinatorsToCheckpoint),\nmasterHooks.keySet(),\nprops,\nonCompletionPromise,\npendingCheckpointStats,\nmasterTriggerCompletionPromise);\nsynchronized (lock) {\npendingCheckpoints.put(checkpointID, checkpoint);\nScheduledFuture cancellerHandle =\ntimer.schedule(\nnew CheckpointCanceller(checkpoint),\ncheckpointTimeout,\nTimeUnit.MILLISECONDS);\nif (!checkpoint.setCancellerHandle(cancellerHandle)) {\ncancellerHandle.cancel(false);\n}\n}\nLOG.info(\n\"Triggering checkpoint {} (type={}) @ {} for job {}.\",\ncheckpointID,\ncheckpoint.getProps().getCheckpointType(),\ntimestamp,\njob);\nreturn checkpoint;\n}\n/**\n* Snapshot master hook states asynchronously.\n*\n* @param checkpoint the pending checkpoint\n* @return the future represents master hook states are finished or not\n*/\nprivate CompletableFuture snapshotMasterState(PendingCheckpoint checkpoint) {\nif (masterHooks.isEmpty()) {\nreturn CompletableFuture.completedFuture(null);\n}\nfinal long checkpointID = checkpoint.getCheckpointID();\nfinal long timestamp = checkpoint.getCheckpointTimestamp();\nfinal CompletableFuture masterStateCompletableFuture = new CompletableFuture<>();\nfor (MasterTriggerRestoreHook masterHook : masterHooks.values()) {\nMasterHooks.triggerHook(masterHook, checkpointID, timestamp, executor)\n.whenCompleteAsync(\n(masterState, throwable) -> {\ntry {\nsynchronized (lock) {\nif (masterStateCompletableFuture.isDone()) {\nreturn;\n}\nif (checkpoint.isDisposed()) {\nthrow new IllegalStateException(\n\"Checkpoint \"\n+ checkpointID\n+ \" has been discarded\");\n}\nif (throwable == null) {\ncheckpoint.acknowledgeMasterState(\nmasterHook.getIdentifier(), masterState);\nif (checkpoint.areMasterStatesFullyAcknowledged()) {\nmasterStateCompletableFuture.complete(null);\n}\n} else {\nmasterStateCompletableFuture.completeExceptionally(\nthrowable);\n}\n}\n} catch (Throwable t) {\nmasterStateCompletableFuture.completeExceptionally(t);\n}\n},\ntimer);\n}\nreturn masterStateCompletableFuture;\n}\n/** Trigger request is successful. NOTE, it must be invoked if trigger request is successful. */\nprivate void onTriggerSuccess() {\nisTriggering = false;\nexecuteQueuedRequest();\n}\n/**\n* The trigger request is failed prematurely without a proper initialization. There is no\n* resource to release, but the completion promise needs to fail manually here.\n*\n* @param onCompletionPromise the completion promise of the checkpoint/savepoint\n* @param throwable the reason of trigger failure\n*/\nprivate void onTriggerFailure(\nCheckpointTriggerRequest onCompletionPromise, Throwable throwable) {\nfinal CheckpointException checkpointException =\ngetCheckpointException(\nCheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, throwable);\nonCompletionPromise.completeExceptionally(checkpointException);\nonTriggerFailure((PendingCheckpoint) null, onCompletionPromise.props, checkpointException);\n}\nprivate void onTriggerFailure(PendingCheckpoint checkpoint, Throwable throwable) {\ncheckArgument(checkpoint != null, \"Pending checkpoint can not be null.\");\nonTriggerFailure(checkpoint, checkpoint.getProps(), throwable);\n}\n/**\n* The trigger request is failed. NOTE, it must be invoked if trigger request is failed.\n*\n* @param checkpoint the pending checkpoint which is failed. It could be null if it's failed\n* prematurely without a proper initialization.\n* @param throwable the reason of trigger failure\n*/\nprivate void onTriggerFailure(\n@Nullable PendingCheckpoint checkpoint,\nCheckpointProperties checkpointProperties,\nThrowable throwable) {\nthrowable = ExceptionUtils.stripCompletionException(throwable);\ntry {\ncoordinatorsToCheckpoint.forEach(\nOperatorCoordinatorCheckpointContext::abortCurrentTriggering);\nfinal CheckpointException cause =\ngetCheckpointException(\nCheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, throwable);\nif (checkpoint != null && !checkpoint.isDisposed()) {\nsynchronized (lock) {\nabortPendingCheckpoint(checkpoint, cause);\n}\n} else {\nfailureManager.handleCheckpointException(\ncheckpoint, checkpointProperties, cause, null, job, null, statsTracker);\n}\n} finally {\nisTriggering = false;\nexecuteQueuedRequest();\n}\n}\nprivate void executeQueuedRequest() {\nchooseQueuedRequestToExecute().ifPresent(this::startTriggeringCheckpoint);\n}\nprivate Optional chooseQueuedRequestToExecute() {\nsynchronized (lock) {\nreturn requestDecider.chooseQueuedRequestToExecute(\nisTriggering, lastCheckpointCompletionRelativeTime);\n}\n}\nprivate Optional chooseRequestToExecute(\nCheckpointTriggerRequest request) {\nsynchronized (lock) {\nOptional checkpointTriggerRequest =\nrequestDecider.chooseRequestToExecute(\nrequest, isTriggering, lastCheckpointCompletionRelativeTime);\nreturn checkpointTriggerRequest;\n}\n}\nprivate boolean maybeCompleteCheckpoint(PendingCheckpoint checkpoint) {\nsynchronized (lock) {\nif (checkpoint.isFullyAcknowledged()) {\ntry {\nif (shutdown) {\nreturn false;\n}\ncompletePendingCheckpoint(checkpoint);\n} catch (CheckpointException ce) {\nonTriggerFailure(checkpoint, ce);\nreturn false;\n}\n}\n}\nreturn true;\n}\n/**\n* Receives a {@link DeclineCheckpoint} message for a pending checkpoint.\n*\n* @param message Checkpoint decline from the task manager\n* @param taskManagerLocationInfo The location info of the decline checkpoint message's sender\n*/\npublic void receiveDeclineMessage(DeclineCheckpoint message, String taskManagerLocationInfo) {\nif (shutdown || message == null) {\nreturn;\n}\nif (!job.equals(message.getJob())) {\nthrow new IllegalArgumentException(\n\"Received DeclineCheckpoint message for job \"\n+ message.getJob()\n+ \" from \"\n+ taskManagerLocationInfo\n+ \" while this coordinator handles job \"\n+ job);\n}\nfinal long checkpointId = message.getCheckpointId();\nfinal CheckpointException checkpointException =\nmessage.getSerializedCheckpointException().unwrap();\nfinal String reason = checkpointException.getMessage();\nPendingCheckpoint checkpoint;\nsynchronized (lock) {\nif (shutdown) {\nreturn;\n}\ncheckpoint = pendingCheckpoints.get(checkpointId);\nif (checkpoint != null) {\nPreconditions.checkState(\n!checkpoint.isDisposed(),\n\"Received message for discarded but non-removed checkpoint \"\n+ checkpointId);\nLOG.info(\n\"Decline checkpoint {} by task {} of job {} at {}.\",\ncheckpointId,\nmessage.getTaskExecutionId(),\njob,\ntaskManagerLocationInfo,\ncheckpointException.getCause());\nabortPendingCheckpoint(\ncheckpoint, checkpointException, message.getTaskExecutionId());\n} else if (LOG.isDebugEnabled()) {\nif (recentPendingCheckpoints.contains(checkpointId)) {\nLOG.debug(\n\"Received another decline message for now expired checkpoint attempt {} from task {} of job {} at {} : {}\",\ncheckpointId,\nmessage.getTaskExecutionId(),\njob,\ntaskManagerLocationInfo,\nreason);\n} else {\nLOG.debug(\n\"Received decline message for unknown (too old?) checkpoint attempt {} from task {} of job {} at {} : {}\",\ncheckpointId,\nmessage.getTaskExecutionId(),\njob,\ntaskManagerLocationInfo,\nreason);\n}\n}\n}\n}\n/**\n* Receives an AcknowledgeCheckpoint message and returns whether the message was associated with\n* a pending checkpoint.\n*\n* @param message Checkpoint ack from the task manager\n* @param taskManagerLocationInfo The location of the acknowledge checkpoint message's sender\n* @return Flag indicating whether the ack'd checkpoint was associated with a pending\n* checkpoint.\n* @throws CheckpointException If the checkpoint cannot be added to the completed checkpoint\n* store.\n*/\npublic boolean receiveAcknowledgeMessage(\nAcknowledgeCheckpoint message, String taskManagerLocationInfo)\nthrows CheckpointException {\nif (shutdown || message == null) {\nreturn false;\n}\nif (!job.equals(message.getJob())) {\nLOG.error(\n\"Received wrong AcknowledgeCheckpoint message for job {} from {} : {}\",\njob,\ntaskManagerLocationInfo,\nmessage);\nreturn false;\n}\nfinal long checkpointId = message.getCheckpointId();\nsynchronized (lock) {\nif (shutdown) {\nreturn false;\n}\nfinal PendingCheckpoint checkpoint = pendingCheckpoints.get(checkpointId);\nif (message.getSubtaskState() != null) {\nif (checkpoint == null || !checkpoint.getProps().isSavepoint()) {\nmessage.getSubtaskState()\n.registerSharedStates(\ncompletedCheckpointStore.getSharedStateRegistry(),\ncheckpointId);\n}\n}\nif (checkpoint != null && !checkpoint.isDisposed()) {\nswitch (checkpoint.acknowledgeTask(\nmessage.getTaskExecutionId(),\nmessage.getSubtaskState(),\nmessage.getCheckpointMetrics())) {\ncase SUCCESS:\nLOG.debug(\n\"Received acknowledge message for checkpoint {} from task {} of job {} at {}.\",\ncheckpointId,\nmessage.getTaskExecutionId(),\nmessage.getJob(),\ntaskManagerLocationInfo);\nif (checkpoint.isFullyAcknowledged()) {\ncompletePendingCheckpoint(checkpoint);\n}\nbreak;\ncase DUPLICATE:\nLOG.debug(\n\"Received a duplicate acknowledge message for checkpoint {}, task {}, job {}, location {}.\",\nmessage.getCheckpointId(),\nmessage.getTaskExecutionId(),\nmessage.getJob(),\ntaskManagerLocationInfo);\nbreak;\ncase UNKNOWN:\nLOG.warn(\n\"Could not acknowledge the checkpoint {} for task {} of job {} at {}, \"\n+ \"because the task's execution attempt id was unknown. Discarding \"\n+ \"the state handle to avoid lingering state.\",\nmessage.getCheckpointId(),\nmessage.getTaskExecutionId(),\nmessage.getJob(),\ntaskManagerLocationInfo);\ndiscardSubtaskState(\nmessage.getJob(),\nmessage.getTaskExecutionId(),\nmessage.getCheckpointId(),\nmessage.getSubtaskState());\nbreak;\ncase DISCARDED:\nLOG.warn(\n\"Could not acknowledge the checkpoint {} for task {} of job {} at {}, \"\n+ \"because the pending checkpoint had been discarded. Discarding the \"\n+ \"state handle tp avoid lingering state.\",\nmessage.getCheckpointId(),\nmessage.getTaskExecutionId(),\nmessage.getJob(),\ntaskManagerLocationInfo);\ndiscardSubtaskState(\nmessage.getJob(),\nmessage.getTaskExecutionId(),\nmessage.getCheckpointId(),\nmessage.getSubtaskState());\n}\nreturn true;\n} else if (checkpoint != null) {\nthrow new IllegalStateException(\n\"Received message for discarded but non-removed checkpoint \"\n+ checkpointId);\n} else {\nreportStats(\nmessage.getCheckpointId(),\nmessage.getTaskExecutionId(),\nmessage.getCheckpointMetrics());\nboolean wasPendingCheckpoint;\nif (recentPendingCheckpoints.contains(checkpointId)) {\nwasPendingCheckpoint = true;\nLOG.warn(\n\"Received late message for now expired checkpoint attempt {} from task \"\n+ \"{} of job {} at {}.\",\ncheckpointId,\nmessage.getTaskExecutionId(),\nmessage.getJob(),\ntaskManagerLocationInfo);\n} else {\nLOG.debug(\n\"Received message for an unknown checkpoint {} from task {} of job {} at {}.\",\ncheckpointId,\nmessage.getTaskExecutionId(),\nmessage.getJob(),\ntaskManagerLocationInfo);\nwasPendingCheckpoint = false;\n}\ndiscardSubtaskState(\nmessage.getJob(),\nmessage.getTaskExecutionId(),\nmessage.getCheckpointId(),\nmessage.getSubtaskState());\nreturn wasPendingCheckpoint;\n}\n}\n}\n/**\n* Try to complete the given pending checkpoint.\n*\n*

Important: This method should only be called in the checkpoint lock scope.\n*\n* @param pendingCheckpoint to complete\n* @throws CheckpointException if the completion failed\n*/\nprivate void completePendingCheckpoint(PendingCheckpoint pendingCheckpoint)\nthrows CheckpointException {\nfinal long checkpointId = pendingCheckpoint.getCheckpointID();\nfinal CompletedCheckpoint completedCheckpoint;\nfinal CompletedCheckpoint lastSubsumed;\nfinal CheckpointProperties props = pendingCheckpoint.getProps();\ncompletedCheckpointStore.getSharedStateRegistry().checkpointCompleted(checkpointId);\ntry {\ncompletedCheckpoint = finalizeCheckpoint(pendingCheckpoint);\nPreconditions.checkState(pendingCheckpoint.isDisposed() && completedCheckpoint != null);\nif (!props.isSavepoint()) {\nlastSubsumed =\naddCompletedCheckpointToStoreAndSubsumeOldest(\ncheckpointId, completedCheckpoint, pendingCheckpoint);\n} else {\nlastSubsumed = null;\n}\npendingCheckpoint.getCompletionFuture().complete(completedCheckpoint);\nreportCompletedCheckpoint(completedCheckpoint);\n} catch (Exception exception) {\npendingCheckpoint.getCompletionFuture().completeExceptionally(exception);\nthrow exception;\n} finally {\npendingCheckpoints.remove(checkpointId);\nscheduleTriggerRequest();\n}\ncleanupAfterCompletedCheckpoint(\npendingCheckpoint, checkpointId, completedCheckpoint, lastSubsumed, props);\n}\nprivate void reportCompletedCheckpoint(CompletedCheckpoint completedCheckpoint) {\nfailureManager.handleCheckpointSuccess(completedCheckpoint.getCheckpointID());\nCompletedCheckpointStats completedCheckpointStats = completedCheckpoint.getStatistic();\nif (completedCheckpointStats != null) {\nLOG.trace(\n\"Checkpoint {} size: {}Kb, duration: {}ms\",\ncompletedCheckpoint.getCheckpointID(),\ncompletedCheckpointStats.getStateSize() == 0\n? 0\n: completedCheckpointStats.getStateSize() / 1024,\ncompletedCheckpointStats.getEndToEndDuration());\nstatsTracker.reportCompletedCheckpoint(completedCheckpointStats);\n}\n}\nprivate void cleanupAfterCompletedCheckpoint(\nPendingCheckpoint pendingCheckpoint,\nlong checkpointId,\nCompletedCheckpoint completedCheckpoint,\nCompletedCheckpoint lastSubsumed,\nCheckpointProperties props) {\nrememberRecentCheckpointId(checkpointId);\nlastCheckpointCompletionRelativeTime = clock.relativeTimeMillis();\nlogCheckpointInfo(completedCheckpoint);\nif (!props.isSavepoint() || props.isSynchronous()) {\ndropSubsumedCheckpoints(checkpointId);\nsendAcknowledgeMessages(\npendingCheckpoint.getCheckpointPlan().getTasksToCommitTo(),\ncheckpointId,\ncompletedCheckpoint.getTimestamp(),\nextractIdIfDiscardedOnSubsumed(lastSubsumed));\n}\n}\nprivate void logCheckpointInfo(CompletedCheckpoint completedCheckpoint) {\nLOG.info(\n\"Completed checkpoint {} for job {} ({} bytes, checkpointDuration={} ms, finalizationTime={} ms).\",\ncompletedCheckpoint.getCheckpointID(),\njob,\ncompletedCheckpoint.getStateSize(),\ncompletedCheckpoint.getCompletionTimestamp() - completedCheckpoint.getTimestamp(),\nSystem.currentTimeMillis() - completedCheckpoint.getCompletionTimestamp());\nif (LOG.isDebugEnabled()) {\nStringBuilder builder = new StringBuilder();\nbuilder.append(\"Checkpoint state: \");\nfor (OperatorState state : completedCheckpoint.getOperatorStates().values()) {\nbuilder.append(state);\nbuilder.append(\", \");\n}\nbuilder.setLength(builder.length() - 2);\nLOG.debug(builder.toString());\n}\n}\nprivate CompletedCheckpoint finalizeCheckpoint(PendingCheckpoint pendingCheckpoint)\nthrows CheckpointException {\ntry {\nfinal CompletedCheckpoint completedCheckpoint =\npendingCheckpoint.finalizeCheckpoint(\ncheckpointsCleaner, this::scheduleTriggerRequest, executor);\nreturn completedCheckpoint;\n} catch (Exception e1) {\nfinal CheckpointFailureReason failureReason =\ne1 instanceof PartialFinishingNotSupportedByStateException\n? CheckpointFailureReason.CHECKPOINT_DECLINED_TASK_CLOSING\n: CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE;\nif (!pendingCheckpoint.isDisposed()) {\nabortPendingCheckpoint(\npendingCheckpoint, new CheckpointException(failureReason, e1));\n}\nthrow new CheckpointException(\n\"Could not finalize the pending checkpoint \"\n+ pendingCheckpoint.getCheckpointID()\n+ '.',\nfailureReason,\ne1);\n}\n}\nprivate long extractIdIfDiscardedOnSubsumed(CompletedCheckpoint lastSubsumed) {\nfinal long lastSubsumedCheckpointId;\nif (lastSubsumed != null && lastSubsumed.getProperties().discardOnSubsumed()) {\nlastSubsumedCheckpointId = lastSubsumed.getCheckpointID();\n} else {\nlastSubsumedCheckpointId = CheckpointStoreUtil.INVALID_CHECKPOINT_ID;\n}\nreturn lastSubsumedCheckpointId;\n}\nprivate CompletedCheckpoint addCompletedCheckpointToStoreAndSubsumeOldest(\nlong checkpointId,\nCompletedCheckpoint completedCheckpoint,\nPendingCheckpoint pendingCheckpoint)\nthrows CheckpointException {\nList tasksToAbort =\npendingCheckpoint.getCheckpointPlan().getTasksToCommitTo();\ntry {\nfinal CompletedCheckpoint subsumedCheckpoint =\ncompletedCheckpointStore.addCheckpointAndSubsumeOldestOne(\ncompletedCheckpoint, checkpointsCleaner, this::scheduleTriggerRequest);\nthis.forceFullSnapshot = false;\nreturn subsumedCheckpoint;\n} catch (Exception exception) {\npendingCheckpoint.getCompletionFuture().completeExceptionally(exception);\nif (exception instanceof PossibleInconsistentStateException) {\nLOG.warn(\n\"An error occurred while writing checkpoint {} to the underlying metadata\"\n+ \" store. Flink was not able to determine whether the metadata was\"\n+ \" successfully persisted. The corresponding state located at '{}'\"\n+ \" won't be discarded and needs to be cleaned up manually.\",\ncompletedCheckpoint.getCheckpointID(),\ncompletedCheckpoint.getExternalPointer());\n} else {\ncheckpointsCleaner.cleanCheckpointOnFailedStoring(completedCheckpoint, executor);\n}\nfinal CheckpointException checkpointException =\nnew CheckpointException(\n\"Could not complete the pending checkpoint \" + checkpointId + '.',\nCheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE,\nexception);\nreportFailedCheckpoint(pendingCheckpoint, checkpointException);\nsendAbortedMessages(tasksToAbort, checkpointId, completedCheckpoint.getTimestamp());\nthrow checkpointException;\n}\n}\nprivate void reportFailedCheckpoint(\nPendingCheckpoint pendingCheckpoint, CheckpointException exception) {\nfailureManager.handleCheckpointException(\npendingCheckpoint,\npendingCheckpoint.getProps(),\nexception,\nnull,\njob,\ngetStatsCallback(pendingCheckpoint),\nstatsTracker);\n}\nvoid scheduleTriggerRequest() {\nsynchronized (lock) {\nif (isShutdown()) {\nLOG.debug(\n\"Skip scheduling trigger request because the CheckpointCoordinator is shut down\");\n} else {\ntimer.execute(this::executeQueuedRequest);\n}\n}\n}\n@VisibleForTesting\nvoid sendAcknowledgeMessages(\nList tasksToCommit,\nlong completedCheckpointId,\nlong completedTimestamp,\nlong lastSubsumedCheckpointId) {\nfor (ExecutionVertex ev : tasksToCommit) {\nExecution ee = ev.getCurrentExecutionAttempt();\nif (ee != null) {\nee.notifyCheckpointOnComplete(\ncompletedCheckpointId, completedTimestamp, lastSubsumedCheckpointId);\n}\n}\nfor (OperatorCoordinatorCheckpointContext coordinatorContext : coordinatorsToCheckpoint) {\ncoordinatorContext.notifyCheckpointComplete(completedCheckpointId);\n}\n}\nprivate void sendAbortedMessages(\nList tasksToAbort, long checkpointId, long timeStamp) {\nassert (Thread.holdsLock(lock));\nlong latestCompletedCheckpointId = completedCheckpointStore.getLatestCheckpointId();\nexecutor.execute(\n() -> {\nfor (ExecutionVertex ev : tasksToAbort) {\nExecution ee = ev.getCurrentExecutionAttempt();\nif (ee != null) {\nee.notifyCheckpointAborted(\ncheckpointId, latestCompletedCheckpointId, timeStamp);\n}\n}\n});\nfor (OperatorCoordinatorCheckpointContext coordinatorContext : coordinatorsToCheckpoint) {\ncoordinatorContext.notifyCheckpointAborted(checkpointId);\n}\n}\n/**\n* Fails all pending checkpoints which have not been acknowledged by the given execution attempt\n* id.\n*\n* @param executionAttemptId for which to discard unacknowledged pending checkpoints\n* @param cause of the failure\n*/\npublic void failUnacknowledgedPendingCheckpointsFor(\nExecutionAttemptID executionAttemptId, Throwable cause) {\nsynchronized (lock) {\nabortPendingCheckpoints(\ncheckpoint -> !checkpoint.isAcknowledgedBy(executionAttemptId),\nnew CheckpointException(CheckpointFailureReason.TASK_FAILURE, cause));\n}\n}\nprivate void rememberRecentCheckpointId(long id) {\nif (recentPendingCheckpoints.size() >= NUM_GHOST_CHECKPOINT_IDS) {\nrecentPendingCheckpoints.removeFirst();\n}\nrecentPendingCheckpoints.addLast(id);\n}\nprivate void dropSubsumedCheckpoints(long checkpointId) {\nabortPendingCheckpoints(\ncheckpoint ->\ncheckpoint.getCheckpointID() < checkpointId && checkpoint.canBeSubsumed(),\nnew CheckpointException(CheckpointFailureReason.CHECKPOINT_SUBSUMED));\n}\n/**\n* Restores the latest checkpointed state to a set of subtasks. This method represents a \"local\"\n* or \"regional\" failover and does restore states to coordinators. Note that a regional failover\n* might still include all tasks.\n*\n* @param tasks Set of job vertices to restore. State for these vertices is restored via {@link\n* Execution\n* @return An {@code OptionalLong} with the checkpoint ID, if state was restored, an empty\n* {@code OptionalLong} otherwise.\n* @throws IllegalStateException If the CheckpointCoordinator is shut down.\n* @throws IllegalStateException If no completed checkpoint is available and the \n* failIfNoCheckpoint flag has been set.\n* @throws IllegalStateException If the checkpoint contains state that cannot be mapped to any\n* job vertex in tasks and the allowNonRestoredState flag has not\n* been set.\n* @throws IllegalStateException If the max parallelism changed for an operator that restores\n* state from this checkpoint.\n* @throws IllegalStateException If the parallelism changed for an operator that restores\n* non-partitioned state from this checkpoint.\n*/\npublic OptionalLong restoreLatestCheckpointedStateToSubtasks(\nfinal Set tasks) throws Exception {\nreturn restoreLatestCheckpointedStateInternal(\ntasks,\nOperatorCoordinatorRestoreBehavior\n.SKIP,\nfalse,\ntrue,\nfalse);\n}\n/**\n* Restores the latest checkpointed state to all tasks and all coordinators. This method\n* represents a \"global restore\"-style operation where all stateful tasks and coordinators from\n* the given set of Job Vertices are restored. are restored to their latest checkpointed state.\n*\n* @param tasks Set of job vertices to restore. State for these vertices is restored via {@link\n* Execution\n* @param allowNonRestoredState Allow checkpoint state that cannot be mapped to any job vertex\n* in tasks.\n* @return true if state was restored, false otherwise.\n* @throws IllegalStateException If the CheckpointCoordinator is shut down.\n* @throws IllegalStateException If no completed checkpoint is available and the \n* failIfNoCheckpoint flag has been set.\n* @throws IllegalStateException If the checkpoint contains state that cannot be mapped to any\n* job vertex in tasks and the allowNonRestoredState flag has not\n* been set.\n* @throws IllegalStateException If the max parallelism changed for an operator that restores\n* state from this checkpoint.\n* @throws IllegalStateException If the parallelism changed for an operator that restores\n* non-partitioned state from this checkpoint.\n*/\npublic boolean restoreLatestCheckpointedStateToAll(\nfinal Set tasks, final boolean allowNonRestoredState)\nthrows Exception {\nfinal OptionalLong restoredCheckpointId =\nrestoreLatestCheckpointedStateInternal(\ntasks,\nOperatorCoordinatorRestoreBehavior\n.RESTORE_OR_RESET,\nfalse,\nallowNonRestoredState,\nfalse);\nreturn restoredCheckpointId.isPresent();\n}\n/**\n* Restores the latest checkpointed at the beginning of the job execution. If there is a\n* checkpoint, this method acts like a \"global restore\"-style operation where all stateful tasks\n* and coordinators from the given set of Job Vertices are restored.\n*\n* @param tasks Set of job vertices to restore. State for these vertices is restored via {@link\n* Execution\n* @return True, if a checkpoint was found and its state was restored, false otherwise.\n*/\npublic boolean restoreInitialCheckpointIfPresent(final Set tasks)\nthrows Exception {\nfinal OptionalLong restoredCheckpointId =\nrestoreLatestCheckpointedStateInternal(\ntasks,\nOperatorCoordinatorRestoreBehavior.RESTORE_IF_CHECKPOINT_PRESENT,\nfalse,\nfalse,\ntrue);\nreturn restoredCheckpointId.isPresent();\n}\n/**\n* Performs the actual restore operation to the given tasks.\n*\n*

This method returns the restored checkpoint ID (as an optional) or an empty optional, if\n* no checkpoint was restored.\n*/\nprivate OptionalLong restoreLatestCheckpointedStateInternal(\nfinal Set tasks,\nfinal OperatorCoordinatorRestoreBehavior operatorCoordinatorRestoreBehavior,\nfinal boolean errorIfNoCheckpoint,\nfinal boolean allowNonRestoredState,\nfinal boolean checkForPartiallyFinishedOperators)\nthrows Exception {\nsynchronized (lock) {\nif (shutdown) {\nthrow new IllegalStateException(\"CheckpointCoordinator is shut down\");\n}\nCompletedCheckpoint latest = completedCheckpointStore.getLatestCheckpoint();\nif (latest == null) {\nLOG.info(\"No checkpoint found during restore.\");\nif (errorIfNoCheckpoint) {\nthrow new IllegalStateException(\"No completed checkpoint available\");\n}\nLOG.debug(\"Resetting the master hooks.\");\nMasterHooks.reset(masterHooks.values(), LOG);\nif (operatorCoordinatorRestoreBehavior\n== OperatorCoordinatorRestoreBehavior.RESTORE_OR_RESET) {\nLOG.info(\"Resetting the Operator Coordinators to an empty state.\");\nrestoreStateToCoordinators(\nOperatorCoordinator.NO_CHECKPOINT, Collections.emptyMap());\n}\nreturn OptionalLong.empty();\n}\nLOG.info(\"Restoring job {} from {}.\", job, latest);\nthis.forceFullSnapshot = latest.getProperties().isUnclaimed();\nfinal Map operatorStates = extractOperatorStates(latest);\nif (checkForPartiallyFinishedOperators) {\nVertexFinishedStateChecker vertexFinishedStateChecker =\nvertexFinishedStateCheckerFactory.apply(tasks, operatorStates);\nvertexFinishedStateChecker.validateOperatorsFinishedState();\n}\nStateAssignmentOperation stateAssignmentOperation =\nnew StateAssignmentOperation(\nlatest.getCheckpointID(), tasks, operatorStates, allowNonRestoredState);\nstateAssignmentOperation.assignStates();\nMasterHooks.restoreMasterHooks(\nmasterHooks,\nlatest.getMasterHookStates(),\nlatest.getCheckpointID(),\nallowNonRestoredState,\nLOG);\nif (operatorCoordinatorRestoreBehavior != OperatorCoordinatorRestoreBehavior.SKIP) {\nrestoreStateToCoordinators(latest.getCheckpointID(), operatorStates);\n}\nlong restoreTimestamp = System.currentTimeMillis();\nRestoredCheckpointStats restored =\nnew RestoredCheckpointStats(\nlatest.getCheckpointID(),\nlatest.getProperties(),\nrestoreTimestamp,\nlatest.getExternalPointer());\nstatsTracker.reportRestoredCheckpoint(restored);\nreturn OptionalLong.of(latest.getCheckpointID());\n}\n}\nprivate Map extractOperatorStates(CompletedCheckpoint checkpoint) {\nMap originalOperatorStates = checkpoint.getOperatorStates();\nif (checkpoint.getCheckpointID() != checkpointIdOfIgnoredInFlightData) {\nreturn originalOperatorStates;\n}\nHashMap newStates = new HashMap<>();\nfor (OperatorState originalOperatorState : originalOperatorStates.values()) {\nnewStates.put(\noriginalOperatorState.getOperatorID(),\noriginalOperatorState.copyAndDiscardInFlightData());\n}\nreturn newStates;\n}\n/**\n* Restore the state with given savepoint.\n*\n* @param restoreSettings Settings for a snapshot to restore from. Includes the path and\n* parameters for the restore process.\n* @param tasks Map of job vertices to restore. State for these vertices is restored via {@link\n* Execution\n* @param userClassLoader The class loader to resolve serialized classes in legacy savepoint\n* versions.\n*/\npublic boolean restoreSavepoint(\nSavepointRestoreSettings restoreSettings,\nMap tasks,\nClassLoader userClassLoader)\nthrows Exception {\nfinal String savepointPointer = restoreSettings.getRestorePath();\nfinal boolean allowNonRestored = restoreSettings.allowNonRestoredState();\nPreconditions.checkNotNull(savepointPointer, \"The savepoint path cannot be null.\");\nLOG.info(\n\"Starting job {} from savepoint {} ({})\",\njob,\nsavepointPointer,\n(allowNonRestored ? \"allowing non restored state\" : \"\"));\nfinal CompletedCheckpointStorageLocation checkpointLocation =\ncheckpointStorageView.resolveCheckpoint(savepointPointer);\nfinal CheckpointProperties checkpointProperties;\nswitch (restoreSettings.getRestoreMode()) {\ncase CLAIM:\ncheckpointProperties = this.checkpointProperties;\nbreak;\ncase LEGACY:\ncheckpointProperties =\nCheckpointProperties.forSavepoint(\nfalse,\nSavepointFormatType.CANONICAL);\nbreak;\ncase NO_CLAIM:\ncheckpointProperties = CheckpointProperties.forUnclaimedSnapshot();\nbreak;\ndefault:\nthrow new IllegalArgumentException(\"Unknown snapshot restore mode\");\n}\nCompletedCheckpoint savepoint =\nCheckpoints.loadAndValidateCheckpoint(\njob,\ntasks,\ncheckpointLocation,\nuserClassLoader,\nallowNonRestored,\ncheckpointProperties);\nsavepoint.registerSharedStatesAfterRestored(\ncompletedCheckpointStore.getSharedStateRegistry(),\nrestoreSettings.getRestoreMode());\ncompletedCheckpointStore.addCheckpointAndSubsumeOldestOne(\nsavepoint, checkpointsCleaner, this::scheduleTriggerRequest);\nlong nextCheckpointId = savepoint.getCheckpointID() + 1;\ncheckpointIdCounter.setCount(nextCheckpointId);\nLOG.info(\"Reset the checkpoint ID of job {} to {}.\", job, nextCheckpointId);\nfinal OptionalLong restoredCheckpointId =\nrestoreLatestCheckpointedStateInternal(\nnew HashSet<>(tasks.values()),\nOperatorCoordinatorRestoreBehavior.RESTORE_IF_CHECKPOINT_PRESENT,\ntrue,\nallowNonRestored,\ntrue);\nreturn restoredCheckpointId.isPresent();\n}\npublic int getNumberOfPendingCheckpoints() {\nsynchronized (lock) {\nreturn this.pendingCheckpoints.size();\n}\n}\npublic int getNumberOfRetainedSuccessfulCheckpoints() {\nsynchronized (lock) {\nreturn completedCheckpointStore.getNumberOfRetainedCheckpoints();\n}\n}\npublic Map getPendingCheckpoints() {\nsynchronized (lock) {\nreturn new HashMap<>(this.pendingCheckpoints);\n}\n}\npublic List getSuccessfulCheckpoints() throws Exception {\nsynchronized (lock) {\nreturn completedCheckpointStore.getAllCheckpoints();\n}\n}\npublic CheckpointStorageCoordinatorView getCheckpointStorage() {\nreturn checkpointStorageView;\n}\npublic CompletedCheckpointStore getCheckpointStore() {\nreturn completedCheckpointStore;\n}\n/**\n* Gets the checkpoint interval. Its value might vary depending on whether there is processing\n* backlog.\n*/\nprivate long getCurrentCheckpointInterval() {\nreturn backlogOperators.isEmpty() ? baseInterval : baseIntervalDuringBacklog;\n}\npublic long getCheckpointTimeout() {\nreturn checkpointTimeout;\n}\n/** @deprecated use {@link\n@Deprecated\n@VisibleForTesting\nPriorityQueue getTriggerRequestQueue() {\nsynchronized (lock) {\nreturn requestDecider.getTriggerRequestQueue();\n}\n}\npublic boolean isTriggering() {\nreturn isTriggering;\n}\n@VisibleForTesting\nboolean isCurrentPeriodicTriggerAvailable() {\nreturn currentPeriodicTrigger != null;\n}\n/**\n* Returns whether periodic checkpointing has been configured.\n*\n* @return true if periodic checkpoints have been configured.\n*/\npublic boolean isPeriodicCheckpointingConfigured() {\nreturn baseInterval != CheckpointCoordinatorConfiguration.DISABLED_CHECKPOINT_INTERVAL;\n}\npublic void startCheckpointScheduler() {\nsynchronized (lock) {\nif (shutdown) {\nthrow new IllegalArgumentException(\"Checkpoint coordinator is shut down\");\n}\nPreconditions.checkState(\nisPeriodicCheckpointingConfigured(),\n\"Can not start checkpoint scheduler, if no periodic checkpointing is configured\");\nstopCheckpointScheduler();\nperiodicScheduling = true;\nscheduleTriggerWithDelay(clock.relativeTimeMillis(), getRandomInitDelay());\n}\n}\npublic void stopCheckpointScheduler() {\nsynchronized (lock) {\nperiodicScheduling = false;\ncancelPeriodicTrigger();\nfinal CheckpointException reason =\nnew CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SUSPEND);\nabortPendingAndQueuedCheckpoints(reason);\n}\n}\npublic boolean isPeriodicCheckpointingStarted() {\nreturn periodicScheduling;\n}\n/**\n* Aborts all the pending checkpoints due to en exception.\n*\n* @param exception The exception.\n*/\npublic void abortPendingCheckpoints(CheckpointException exception) {\nsynchronized (lock) {\nabortPendingCheckpoints(ignored -> true, exception);\n}\n}\nprivate void abortPendingCheckpoints(\nPredicate checkpointToFailPredicate, CheckpointException exception) {\nassert Thread.holdsLock(lock);\nfinal PendingCheckpoint[] pendingCheckpointsToFail =\npendingCheckpoints.values().stream()\n.filter(checkpointToFailPredicate)\n.toArray(PendingCheckpoint[]::new);\nfor (PendingCheckpoint pendingCheckpoint : pendingCheckpointsToFail) {\nabortPendingCheckpoint(pendingCheckpoint, exception);\n}\n}\nprivate void rescheduleTrigger(long currentTimeMillis, long tillNextMillis) {\ncancelPeriodicTrigger();\nscheduleTriggerWithDelay(currentTimeMillis, tillNextMillis);\n}\nprivate void cancelPeriodicTrigger() {\nif (currentPeriodicTrigger != null) {\nnextCheckpointTriggeringRelativeTime = Long.MAX_VALUE;\ncurrentPeriodicTriggerFuture.cancel(false);\ncurrentPeriodicTrigger = null;\ncurrentPeriodicTriggerFuture = null;\n}\n}\nprivate long getRandomInitDelay() {\nreturn ThreadLocalRandom.current().nextLong(minPauseBetweenCheckpoints, baseInterval + 1L);\n}\nprivate void scheduleTriggerWithDelay(long currentRelativeTime, long initDelay) {\nnextCheckpointTriggeringRelativeTime = currentRelativeTime + initDelay;\ncurrentPeriodicTrigger = new ScheduledTrigger();\ncurrentPeriodicTriggerFuture =\ntimer.schedule(currentPeriodicTrigger, initDelay, TimeUnit.MILLISECONDS);\n}\nprivate void restoreStateToCoordinators(\nfinal long checkpointId, final Map operatorStates)\nthrows Exception {\nfor (OperatorCoordinatorCheckpointContext coordContext : coordinatorsToCheckpoint) {\nfinal OperatorState state = operatorStates.get(coordContext.operatorId());\nfinal ByteStreamStateHandle coordinatorState =\nstate == null ? null : state.getCoordinatorState();\nfinal byte[] bytes = coordinatorState == null ? null : coordinatorState.getData();\ncoordContext.resetToCheckpoint(checkpointId, bytes);\n}\n}\npublic JobStatusListener createActivatorDeactivator() {\nsynchronized (lock) {\nif (shutdown) {\nthrow new IllegalArgumentException(\"Checkpoint coordinator is shut down\");\n}\nif (jobStatusListener == null) {\njobStatusListener = new CheckpointCoordinatorDeActivator(this);\n}\nreturn jobStatusListener;\n}\n}\nint getNumQueuedRequests() {\nsynchronized (lock) {\nreturn requestDecider.getNumQueuedRequests();\n}\n}\npublic void reportStats(long id, ExecutionAttemptID attemptId, CheckpointMetrics metrics) {\nstatsTracker.reportIncompleteStats(id, attemptId, metrics);\n}\nfinal class ScheduledTrigger implements Runnable {\n@Override\npublic void run() {\nsynchronized (lock) {\nif (currentPeriodicTrigger != this) {\nreturn;\n}\nlong checkpointInterval = getCurrentCheckpointInterval();\nif (checkpointInterval\n!= CheckpointCoordinatorConfiguration.DISABLED_CHECKPOINT_INTERVAL) {\nnextCheckpointTriggeringRelativeTime += checkpointInterval;\ncurrentPeriodicTriggerFuture =\ntimer.schedule(\nthis,\nMath.max(\n0,\nnextCheckpointTriggeringRelativeTime\n- clock.relativeTimeMillis()),\nTimeUnit.MILLISECONDS);\n} else {\nnextCheckpointTriggeringRelativeTime = Long.MAX_VALUE;\ncurrentPeriodicTrigger = null;\ncurrentPeriodicTriggerFuture = null;\n}\n}\ntry {\ntriggerCheckpoint(checkpointProperties, null, true);\n} catch (Exception e) {\nLOG.error(\"Exception while triggering checkpoint for job {}.\", job, e);\n}\n}\n}\n/**\n* Discards the given state object asynchronously belonging to the given job, execution attempt\n* id and checkpoint id.\n*\n* @param jobId identifying the job to which the state object belongs\n* @param executionAttemptID identifying the task to which the state object belongs\n* @param checkpointId of the state object\n* @param subtaskState to discard asynchronously\n*/\nprivate void discardSubtaskState(\nfinal JobID jobId,\nfinal ExecutionAttemptID executionAttemptID,\nfinal long checkpointId,\nfinal TaskStateSnapshot subtaskState) {\nif (subtaskState != null) {\nexecutor.execute(\nnew Runnable() {\n@Override\npublic void run() {\ntry {\nsubtaskState.discardState();\n} catch (Throwable t2) {\nLOG.warn(\n\"Could not properly discard state object of checkpoint {} \"\n+ \"belonging to task {} of job {}.\",\ncheckpointId,\nexecutionAttemptID,\njobId,\nt2);\n}\n}\n});\n}\n}\nprivate void abortPendingCheckpoint(\nPendingCheckpoint pendingCheckpoint, CheckpointException exception) {\nabortPendingCheckpoint(pendingCheckpoint, exception, null);\n}\nprivate void abortPendingCheckpoint(\nPendingCheckpoint pendingCheckpoint,\nCheckpointException exception,\n@Nullable final ExecutionAttemptID executionAttemptID) {\nassert (Thread.holdsLock(lock));\nif (!pendingCheckpoint.isDisposed()) {\ntry {\npendingCheckpoint.abort(\nexception.getCheckpointFailureReason(),\nexception.getCause(),\ncheckpointsCleaner,\nthis::scheduleTriggerRequest,\nexecutor,\nstatsTracker);\nfailureManager.handleCheckpointException(\npendingCheckpoint,\npendingCheckpoint.getProps(),\nexception,\nexecutionAttemptID,\njob,\ngetStatsCallback(pendingCheckpoint),\nstatsTracker);\n} finally {\nsendAbortedMessages(\npendingCheckpoint.getCheckpointPlan().getTasksToCommitTo(),\npendingCheckpoint.getCheckpointID(),\npendingCheckpoint.getCheckpointTimestamp());\npendingCheckpoints.remove(pendingCheckpoint.getCheckpointID());\nrememberRecentCheckpointId(pendingCheckpoint.getCheckpointID());\nscheduleTriggerRequest();\n}\n}\n}\nprivate void preCheckGlobalState(boolean isPeriodic) throws CheckpointException {\nif (shutdown) {\nthrow new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN);\n}\nif (isPeriodic && !periodicScheduling) {\nthrow new CheckpointException(CheckpointFailureReason.PERIODIC_SCHEDULER_SHUTDOWN);\n}\n}\nprivate void abortPendingAndQueuedCheckpoints(CheckpointException exception) {\nassert (Thread.holdsLock(lock));\nrequestDecider.abortAll(exception);\nabortPendingCheckpoints(exception);\n}\n/**\n* The canceller of checkpoint. The checkpoint might be cancelled if it doesn't finish in a\n* configured period.\n*/\nclass CheckpointCanceller implements Runnable {\nprivate final PendingCheckpoint pendingCheckpoint;\nprivate CheckpointCanceller(PendingCheckpoint pendingCheckpoint) {\nthis.pendingCheckpoint = checkNotNull(pendingCheckpoint);\n}\n@Override\npublic void run() {\nsynchronized (lock) {\nif (!pendingCheckpoint.isDisposed()) {\nLOG.info(\n\"Checkpoint {} of job {} expired before completing.\",\npendingCheckpoint.getCheckpointID(),\njob);\nabortPendingCheckpoint(\npendingCheckpoint,\nnew CheckpointException(CheckpointFailureReason.CHECKPOINT_EXPIRED));\n}\n}\n}\n}\nprivate static CheckpointException getCheckpointException(\nCheckpointFailureReason defaultReason, Throwable throwable) {\nfinal Optional ioExceptionOptional =\nfindThrowable(throwable, IOException.class);\nif (ioExceptionOptional.isPresent()) {\nreturn new CheckpointException(CheckpointFailureReason.IO_EXCEPTION, throwable);\n} else {\nfinal Optional checkpointExceptionOptional =\nfindThrowable(throwable, CheckpointException.class);\nreturn checkpointExceptionOptional.orElseGet(\n() -> new CheckpointException(defaultReason, throwable));\n}\n}\nstatic class CheckpointTriggerRequest {\nfinal long timestamp;\nfinal CheckpointProperties props;\nfinal @Nullable String externalSavepointLocation;\nfinal boolean isPeriodic;\nprivate final CompletableFuture onCompletionPromise =\nnew CompletableFuture<>();\nCheckpointTriggerRequest(\nCheckpointProperties props,\n@Nullable String externalSavepointLocation,\nboolean isPeriodic) {\nthis.timestamp = System.currentTimeMillis();\nthis.props = checkNotNull(props);\nthis.externalSavepointLocation = externalSavepointLocation;\nthis.isPeriodic = isPeriodic;\n}\nCompletableFuture getOnCompletionFuture() {\nreturn onCompletionPromise;\n}\npublic void completeExceptionally(CheckpointException exception) {\nonCompletionPromise.completeExceptionally(exception);\n}\npublic boolean isForce() {\nreturn props.forceCheckpoint();\n}\n}\nprivate enum OperatorCoordinatorRestoreBehavior {\n/** Coordinators are always restored. If there is no checkpoint, they are restored empty. */\nRESTORE_OR_RESET,\n/** Coordinators are restored if there was a checkpoint. */\nRESTORE_IF_CHECKPOINT_PRESENT,\n/** Coordinators are not restored during this checkpoint restore. */\nSKIP;\n}\nprivate PendingCheckpointStats trackPendingCheckpointStats(\nlong checkpointId,\nCheckpointPlan checkpointPlan,\nCheckpointProperties props,\nlong checkpointTimestamp) {\nMap vertices =\nStream.concat(\ncheckpointPlan.getTasksToWaitFor().stream(),\ncheckpointPlan.getFinishedTasks().stream())\n.map(Execution::getVertex)\n.map(ExecutionVertex::getJobVertex)\n.distinct()\n.collect(\ntoMap(\nExecutionJobVertex::getJobVertexId,\nExecutionJobVertex::getParallelism));\nPendingCheckpointStats pendingCheckpointStats =\nstatsTracker.reportPendingCheckpoint(\ncheckpointId, checkpointTimestamp, props, vertices);\nreportFinishedTasks(pendingCheckpointStats, checkpointPlan.getFinishedTasks());\nreturn pendingCheckpointStats;\n}\nprivate void reportFinishedTasks(\nPendingCheckpointStats pendingCheckpointStats, List finishedTasks) {\nlong now = System.currentTimeMillis();\nfinishedTasks.forEach(\nexecution ->\npendingCheckpointStats.reportSubtaskStats(\nexecution.getVertex().getJobvertexId(),\nnew SubtaskStateStats(execution.getParallelSubtaskIndex(), now)));\n}\n@Nullable\nprivate PendingCheckpointStats getStatsCallback(PendingCheckpoint pendingCheckpoint) {\nreturn statsTracker.getPendingCheckpointStats(pendingCheckpoint.getCheckpointID());\n}\n}" + }, + { + "comment": "The `CompletionStage` will be computed eagerly when the method is invoked while `Uni` is supposed to be lazy. This can be fixed using a `Supplier`: (:warning: untested suggestion :warning:) ```suggestion return Uni.createFrom().completionStage(new Supplier<>() { @Override public CompletionStage get() { return cache.clearAsync(); } }); ```", + "method_body": "private Uni invalidateAll(CacheInvalidateAll binding) {\nRemoteCache cache = cacheManager.getCache(binding.cacheName());\nLOGGER.debugf(\"Invalidating all entries from cache [%s]\", binding.cacheName());\nreturn Uni.createFrom().completionStage(cache.clearAsync());\n}", + "target_code": "return Uni.createFrom().completionStage(cache.clearAsync());", + "method_body_after": "private Uni invalidateAll(CacheInvalidateAll binding) {\nRemoteCache cache = cacheManager.getCache(binding.cacheName());\nLOGGER.debugf(\"Invalidating all entries from cache [%s]\", binding.cacheName());\nreturn Uni.createFrom().completionStage(new Supplier<>() {\n@Override\npublic CompletionStage get() {\nreturn cache.clearAsync();\n}\n});\n}", + "context_before": "class CacheInvalidateAllInterceptor extends CacheInterceptor {\nprivate static final Logger LOGGER = Logger.getLogger(CacheInvalidateAllInterceptor.class);\nprivate static final String INTERCEPTOR_BINDINGS_ERROR_MSG = \"The Quarkus Infinispan Client extension is not working properly (CacheInvalidateAll interceptor bindings retrieval failed), please create a GitHub issue in the Quarkus repository to help the maintainers fix this bug\";\n@AroundInvoke\npublic Object intercept(InvocationContext invocationContext) throws Exception {\nCacheInterceptionContext interceptionContext = getInterceptionContext(invocationContext,\nCacheInvalidateAll.class);\nif (interceptionContext.getInterceptorBindings().isEmpty()) {\nLOGGER.warn(INTERCEPTOR_BINDINGS_ERROR_MSG);\nreturn invocationContext.proceed();\n}\nReturnType returnType = determineReturnType(invocationContext.getMethod().getReturnType());\nif (returnType == ReturnType.NonAsync) {\nreturn invalidateAllBlocking(invocationContext, interceptionContext);\n}\nreturn invalidateAllNonBlocking(invocationContext, interceptionContext, returnType);\n}\nprivate Object invalidateAllNonBlocking(InvocationContext invocationContext,\nCacheInterceptionContext interceptionContext, ReturnType returnType) {\nLOGGER.trace(\"Invalidating all cache entries in a non-blocking way\");\nvar uni = Multi.createFrom().iterable(interceptionContext.getInterceptorBindings())\n.onItem().transformToUniAndMerge(new Function>() {\n@Override\npublic Uni apply(CacheInvalidateAll binding) {\nreturn invalidateAll(binding);\n}\n})\n.onItem().ignoreAsUni()\n.onItem().transformToUni(new Function>() {\n@Override\npublic Uni apply(Object ignored) {\ntry {\nreturn asyncInvocationResultToUni(invocationContext.proceed(), returnType);\n} catch (Exception e) {\nthrow new CacheException(e);\n}\n}\n});\nreturn createAsyncResult(uni, returnType);\n}\nprivate Object invalidateAllBlocking(InvocationContext invocationContext,\nCacheInterceptionContext interceptionContext) throws Exception {\nLOGGER.trace(\"Invalidating all cache entries in a blocking way\");\nfor (CacheInvalidateAll binding : interceptionContext.getInterceptorBindings()) {\ninvalidateAll(binding).await().indefinitely();\n}\nreturn invocationContext.proceed();\n}\n}", + "context_after": "class CacheInvalidateAllInterceptor extends CacheInterceptor {\nprivate static final Logger LOGGER = Logger.getLogger(CacheInvalidateAllInterceptor.class);\nprivate static final String INTERCEPTOR_BINDINGS_ERROR_MSG = \"The Quarkus Infinispan Client extension is not working properly (CacheInvalidateAll interceptor bindings retrieval failed), please create a GitHub issue in the Quarkus repository to help the maintainers fix this bug\";\n@AroundInvoke\npublic Object intercept(InvocationContext invocationContext) throws Exception {\nCacheInterceptionContext interceptionContext = getInterceptionContext(invocationContext,\nCacheInvalidateAll.class);\nif (interceptionContext.getInterceptorBindings().isEmpty()) {\nLOGGER.warn(INTERCEPTOR_BINDINGS_ERROR_MSG);\nreturn invocationContext.proceed();\n}\nReturnType returnType = determineReturnType(invocationContext.getMethod().getReturnType());\nif (returnType == ReturnType.NonAsync) {\nreturn invalidateAllBlocking(invocationContext, interceptionContext);\n}\nreturn invalidateAllNonBlocking(invocationContext, interceptionContext, returnType);\n}\nprivate Object invalidateAllNonBlocking(InvocationContext invocationContext,\nCacheInterceptionContext interceptionContext, ReturnType returnType) {\nLOGGER.trace(\"Invalidating all cache entries in a non-blocking way\");\nvar uni = Multi.createFrom().iterable(interceptionContext.getInterceptorBindings())\n.onItem().transformToUniAndMerge(new Function>() {\n@Override\npublic Uni apply(CacheInvalidateAll binding) {\nreturn invalidateAll(binding);\n}\n})\n.onItem().ignoreAsUni()\n.onItem().transformToUni(new Function>() {\n@Override\npublic Uni apply(Object ignored) {\ntry {\nreturn asyncInvocationResultToUni(invocationContext.proceed(), returnType);\n} catch (Exception e) {\nthrow new CacheException(e);\n}\n}\n});\nreturn createAsyncResult(uni, returnType);\n}\nprivate Object invalidateAllBlocking(InvocationContext invocationContext,\nCacheInterceptionContext interceptionContext) throws Exception {\nLOGGER.trace(\"Invalidating all cache entries in a blocking way\");\nfor (CacheInvalidateAll binding : interceptionContext.getInterceptorBindings()) {\ninvalidateAll(binding).await().indefinitely();\n}\nreturn invocationContext.proceed();\n}\n}" + }, + { + "comment": "Can't we deal with the offsets here? Instead of the lines and columns?", + "method_body": "public static SignatureHelp getSignatureHelp(SignatureContext context) {\nfillTokenInfoAtCursor(context);\nint activeParamIndex = 0;\nOptional sNode = context.getNodeAtCursor();\nif (sNode.isEmpty()) {\nreturn null;\n}\nSyntaxKind sKind = sNode.get().kind();\nNonTerminalNode evalNode = sNode.get();\nwhile (evalNode != null &&\nsKind != SyntaxKind.FUNCTION_CALL &&\nsKind != SyntaxKind.METHOD_CALL &&\nsKind != SyntaxKind.REMOTE_METHOD_CALL_ACTION &&\nsKind != SyntaxKind.IMPLICIT_NEW_EXPRESSION &&\nsKind != SyntaxKind.EXPLICIT_NEW_EXPRESSION) {\nevalNode = evalNode.parent();\nsKind = (evalNode != null) ? evalNode.kind() : null;\n}\nif (evalNode == null) {\nreturn null;\n}\nChildNodeList childrenInParen = evalNode.children();\nswitch (sKind) {\ncase IMPLICIT_NEW_EXPRESSION:\nOptional implicitArgList =\n((ImplicitNewExpressionNode) evalNode).parenthesizedArgList();\nif (implicitArgList.isPresent()) {\nchildrenInParen = implicitArgList.get().children();\n}\nbreak;\ncase EXPLICIT_NEW_EXPRESSION:\nchildrenInParen = ((ExplicitNewExpressionNode) evalNode).parenthesizedArgList().children();\nbreak;\n}\nint cLine = context.getCursorPosition().getLine();\nint cCol = context.getCursorPosition().getCharacter();\nfor (Node child : childrenInParen) {\nint sLine = child.lineRange().startLine().line();\nint sCol = child.lineRange().startLine().offset();\nif ((cLine == sLine && cCol < sCol) || (cLine < sLine)) {\nbreak;\n}\nif (child.kind() == SyntaxKind.COMMA_TOKEN) {\nactiveParamIndex++;\n}\n}\nList signatures = new ArrayList<>();\nOptional signatureInfo = SignatureHelpUtil.getSignatureInformation(context);\nsignatureInfo.ifPresent(signatures::add);\nSignatureHelp signatureHelp = new SignatureHelp();\nsignatureHelp.setActiveParameter(activeParamIndex);\nsignatureHelp.setActiveSignature(0);\nsignatureHelp.setSignatures(signatures);\nreturn signatureHelp;\n}", + "target_code": "}", + "method_body_after": "public static SignatureHelp getSignatureHelp(SignatureContext context) {\nfillTokenInfoAtCursor(context);\nOptional sNode = context.getNodeAtCursor();\nif (sNode.isEmpty()) {\nreturn null;\n}\nSyntaxKind sKind = sNode.get().kind();\nNonTerminalNode evalNode = sNode.get();\nwhile (evalNode != null &&\nsKind != SyntaxKind.FUNCTION_CALL &&\nsKind != SyntaxKind.METHOD_CALL &&\nsKind != SyntaxKind.REMOTE_METHOD_CALL_ACTION &&\nsKind != SyntaxKind.IMPLICIT_NEW_EXPRESSION &&\nsKind != SyntaxKind.EXPLICIT_NEW_EXPRESSION) {\nevalNode = evalNode.parent();\nsKind = (evalNode != null) ? evalNode.kind() : null;\n}\nif (evalNode == null) {\nreturn null;\n}\nChildNodeList childrenInParen = evalNode.children();\nswitch (sKind) {\ncase IMPLICIT_NEW_EXPRESSION:\nOptional implicitArgList =\n((ImplicitNewExpressionNode) evalNode).parenthesizedArgList();\nif (implicitArgList.isPresent()) {\nchildrenInParen = implicitArgList.get().children();\n}\nbreak;\ncase EXPLICIT_NEW_EXPRESSION:\nchildrenInParen = ((ExplicitNewExpressionNode) evalNode).parenthesizedArgList().children();\nbreak;\ndefault:\nbreak;\n}\nint activeParamIndex = 0;\nint cursorPosition = context.getCursorPositionInTree();\nfor (Node child : childrenInParen) {\nint childPosition = child.textRange().endOffset();\nif (cursorPosition < childPosition) {\nbreak;\n}\nif (child.kind() == SyntaxKind.COMMA_TOKEN) {\nactiveParamIndex++;\n}\n}\nList signatures = new ArrayList<>();\nOptional signatureInfo = SignatureHelpUtil.getSignatureInformation(context);\nsignatureInfo.ifPresent(signatures::add);\nSignatureHelp signatureHelp = new SignatureHelp();\nsignatureHelp.setActiveParameter(activeParamIndex);\nsignatureHelp.setActiveSignature(0);\nsignatureHelp.setSignatures(signatures);\nreturn signatureHelp;\n}", + "context_before": "class SignatureHelpUtil {\nprivate SignatureHelpUtil() {\n}\n/**\n* Get Signature Help for a the invocation node in the given context.\n*\n* @param context Signature Help context.\n* @return {@link SignatureHelp} SignatureHelp for the invocation node.\n*/\n/**\n* Get the signature information for a given context.\n*\n* @param context Lang Server Signature Help Context\n* @return {@link SignatureInformation} Signature information for the invocation node.\n*/\nprivate static Optional getSignatureInformation(SignatureContext context) {\nOptional functionSymbol = getFunctionSymbol(context);\nif (functionSymbol.isEmpty()) {\nreturn Optional.empty();\n}\nList parameterInformationList = new ArrayList<>();\nSignatureInformation signatureInformation = new SignatureInformation();\nSignatureInfoModel signatureInfoModel = getSignatureInfoModel(functionSymbol.get(), context);\nOptional functionName = functionSymbol.get().getName();\nOptional nodeAtCursor = context.getNodeAtCursor();\nif (functionName.isEmpty() || nodeAtCursor.isEmpty()) {\nreturn Optional.empty();\n}\nStringBuilder labelBuilder = new StringBuilder();\nSyntaxKind syntaxKind = nodeAtCursor.get().kind();\nif (functionName.get().equals(Names.USER_DEFINED_INIT_SUFFIX.getValue())\n&& (syntaxKind == SyntaxKind.IMPLICIT_NEW_EXPRESSION\n|| syntaxKind == SyntaxKind.EXPLICIT_NEW_EXPRESSION)) {\nlabelBuilder.append(SyntaxKind.NEW_KEYWORD.stringValue());\n} else {\nlabelBuilder.append(functionName.get());\n}\nlabelBuilder.append(\"(\");\nList parameterInfoModels = signatureInfoModel.getParameterInfoModels();\nfor (int i = 0; i < parameterInfoModels.size(); i++) {\nParameterInfoModel paramModel = parameterInfoModels.get(i);\nint labelOffset = labelBuilder.toString().length();\nlabelBuilder.append(paramModel.parameter.getType());\nParameterInformation paramInfo = new ParameterInformation();\nparamInfo.setDocumentation(getParameterDocumentation(paramModel));\nint paramStart = labelOffset;\nint paramEnd = labelOffset + paramModel.parameter.getType().length();\nif (paramModel.parameter.getName().isPresent()) {\nparamStart = paramEnd + 1;\nparamEnd += (paramModel.parameter.getName().get() + \" \").length();\nlabelBuilder.append(\" \").append(paramModel.parameter.getName().get());\n}\nif (i < parameterInfoModels.size() - 1) {\nlabelBuilder.append(\", \");\n}\nparamInfo.setLabel(Tuple.two(paramStart, paramEnd));\nparameterInformationList.add(paramInfo);\n}\nlabelBuilder.append(\")\");\nsignatureInformation.setLabel(labelBuilder.toString());\nsignatureInformation.setParameters(parameterInformationList);\nsignatureInformation.setDocumentation(signatureInfoModel.signatureDescription);\nreturn Optional.of(signatureInformation);\n}\n/**\n* Get the required signature information filled model.\n*\n* @param functionSymbol Invokable symbol\n* @param context Lang Server Signature Help Context\n* @return {@link SignatureInfoModel} SignatureInfoModel containing signature information\n*/\nprivate static SignatureInfoModel getSignatureInfoModel(FunctionSymbol functionSymbol, SignatureContext context) {\nMap paramToDesc = new HashMap<>();\nSignatureInfoModel signatureInfoModel = new SignatureInfoModel();\nList paramModels = new ArrayList<>();\nOptional documentation = functionSymbol.documentation();\nList parameters = new ArrayList<>();\nif (documentation.isPresent()) {\nif (documentation.get().description().isPresent()) {\nsignatureInfoModel.setSignatureDescription(documentation.get().description().get().trim(), context);\n}\ndocumentation.get().parameterMap().forEach(paramToDesc::put);\n}\nparameters.addAll(functionSymbol.typeDescriptor().params().orElse(new ArrayList<>()).stream()\n.map(param -> new Parameter(param, false, false, context)).collect(Collectors.toList()));\nOptional restParam = functionSymbol.typeDescriptor().restParam();\nrestParam.ifPresent(parameter -> parameters.add(new Parameter(parameter, false, true, context)));\nboolean skipFirstParam = functionSymbol.kind() == METHOD\n&& CommonUtil.isLangLib(functionSymbol.getModule().get().id());\nfor (int i = 0; i < parameters.size(); i++) {\nif (i == 0 && skipFirstParam) {\ncontinue;\n}\nParameter param = parameters.get(i);\nString desc = \"\";\nif (param.getName().isPresent() && paramToDesc.containsKey(param.getName().get())) {\ndesc = paramToDesc.get(param.getName().get());\n}\nparamModels.add(new ParameterInfoModel(param, desc, context));\n}\nsignatureInfoModel.setParameterInfoModels(paramModels);\nreturn signatureInfoModel;\n}\nprivate static MarkupContent getParameterDocumentation(ParameterInfoModel paramInfo) {\nMarkupContent paramDocumentation = new MarkupContent();\nparamDocumentation.setKind(CommonUtil.MARKDOWN_MARKUP_KIND);\nString type = paramInfo.parameter.getType();\nStringBuilder markupContent = new StringBuilder();\nmarkupContent.append(\"**Parameter**\")\n.append(CommonUtil.MD_LINE_SEPARATOR)\n.append(\"**\")\n.append((!type.isEmpty()) ? \"`\" + type + \"`\" : \"\");\nif (paramInfo.parameter.getName().isPresent()) {\nmarkupContent.append(paramInfo.parameter.getName().get());\n}\nmarkupContent.append(\"**\");\nif (!paramInfo.description.isBlank()) {\nmarkupContent.append(\": \").append(paramInfo.description);\n}\nparamDocumentation.setValue(markupContent.toString());\nreturn paramDocumentation;\n}\n/**\n* Parameter model to hold the parameter information meta data.\n*/\nprivate static class Parameter {\nprivate final boolean isRestArg;\nprivate final boolean isOptional;\nprivate final ParameterSymbol parameterSymbol;\nprivate final SignatureContext signatureContext;\npublic Parameter(ParameterSymbol parameterSymbol,\nboolean isOptional,\nboolean isRestArg,\nSignatureContext signatureContext) {\nthis.parameterSymbol = parameterSymbol;\nthis.isOptional = isOptional;\nthis.isRestArg = isRestArg;\nthis.signatureContext = signatureContext;\n}\npublic Optional getName() {\nreturn (parameterSymbol.getName().isPresent() && this.isOptional)\n? Optional.of(parameterSymbol.getName().get() + \"?\") : parameterSymbol.getName();\n}\npublic String getType() {\nString type = CommonUtil.getModifiedTypeName(this.signatureContext, parameterSymbol.typeDescriptor());\nif (this.isRestArg && !\"\".equals(type)) {\nif (type.contains(\"[]\")) {\ntype = type.substring(0, type.length() - 2);\n}\ntype += \"...\";\n}\nreturn type;\n}\n}\n/**\n* Parameter information model to hold the parameter information meta data.\n*/\nprivate static class ParameterInfoModel {\nprivate final String description;\nprivate final Parameter parameter;\npublic ParameterInfoModel(Parameter parameter, String desc, SignatureContext signatureContext) {\nthis.parameter = parameter;\nthis.description = desc;\n}\n@Override\npublic String toString() {\nreturn this.parameter.getType()\n+ (parameter.getName().isPresent() ? (\" \" + parameter.getName().get()) : \"\");\n}\n}\n/**\n* Signature information model to collect the info required for the signature.\n*/\nprivate static class SignatureInfoModel {\nprivate List parameterInfoModels;\nprivate Either signatureDescription;\nList getParameterInfoModels() {\nreturn parameterInfoModels;\n}\nvoid setParameterInfoModels(List parameterInfoModels) {\nthis.parameterInfoModels = parameterInfoModels;\n}\nvoid setSignatureDescription(String signatureDescription, SignatureContext signatureContext) {\nSignatureInformationCapabilities capabilities = signatureContext.capabilities().getSignatureInformation();\nList documentationFormat = capabilities != null ? capabilities.getDocumentationFormat()\n: new ArrayList<>();\nif (documentationFormat != null\n&& !documentationFormat.isEmpty()\n&& documentationFormat.get(0).equals(CommonUtil.MARKDOWN_MARKUP_KIND)) {\nMarkupContent signatureMarkupContent = new MarkupContent();\nsignatureMarkupContent.setKind(CommonUtil.MARKDOWN_MARKUP_KIND);\nsignatureMarkupContent.setValue(\n\"**Description**\" + CommonUtil.MD_LINE_SEPARATOR + signatureDescription);\nthis.signatureDescription = Either.forRight(signatureMarkupContent);\n} else {\nthis.signatureDescription = Either.forLeft(\n\"Description\" + CommonUtil.LINE_SEPARATOR + signatureDescription);\n}\n}\n}\n/**\n* Find the token at cursor.\n*/\nprivate static void fillTokenInfoAtCursor(SignatureContext context) {\nOptional document = context.currentDocument();\nif (document.isEmpty()) {\nreturn;\n}\nTextDocument textDocument = document.get().textDocument();\nPosition position = context.getCursorPosition();\nint txtPos = textDocument.textPositionFrom(LinePosition.from(position.getLine(), position.getCharacter()));\ncontext.setCursorPositionInTree(txtPos);\nTextRange range = TextRange.from(txtPos, 0);\nNonTerminalNode nonTerminalNode = ((ModulePartNode) document.get().syntaxTree().rootNode()).findNode(range);\nwhile (true) {\nif (nonTerminalNode != null && (!withinTextRange(txtPos, nonTerminalNode)\n|| (nonTerminalNode.kind() != SyntaxKind.FUNCTION_CALL\n&& nonTerminalNode.kind() != SyntaxKind.METHOD_CALL)\n&& nonTerminalNode.kind() != SyntaxKind.REMOTE_METHOD_CALL_ACTION\n&& nonTerminalNode.kind() != SyntaxKind.IMPLICIT_NEW_EXPRESSION)\n&& nonTerminalNode.kind() != SyntaxKind.EXPLICIT_NEW_EXPRESSION) {\nnonTerminalNode = nonTerminalNode.parent();\ncontinue;\n}\nbreak;\n}\ncontext.setNodeAtCursor(nonTerminalNode);\n}\nprivate static boolean withinTextRange(int position, @Nonnull NonTerminalNode node) {\nTextRange rangeWithMinutiae = node.textRangeWithMinutiae();\nTextRange textRange = node.textRange();\nTextRange leadingMinutiaeRange = TextRange.from(rangeWithMinutiae.startOffset(),\ntextRange.startOffset() - rangeWithMinutiae.startOffset());\nreturn leadingMinutiaeRange.endOffset() <= position;\n}\npublic static Optional getFunctionSymbol(SignatureContext context) {\nif (context.getNodeAtCursor().isEmpty()) {\nreturn Optional.empty();\n}\nNonTerminalNode nodeAtCursor = context.getNodeAtCursor().get();\nif (nodeAtCursor.kind() == SyntaxKind.FUNCTION_CALL) {\nNameReferenceNode nameReferenceNode = ((FunctionCallExpressionNode) nodeAtCursor).functionName();\nString funcName;\nPredicate symbolPredicate = symbol -> symbol.kind() == FUNCTION;\nList filteredContent;\nif (nameReferenceNode.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nQualifiedNameReferenceNode qNameRef = (QualifiedNameReferenceNode) nameReferenceNode;\nfuncName = (qNameRef).identifier().text();\nfilteredContent = QNameReferenceUtil.getModuleContent(context, qNameRef,\nsymbolPredicate.and(symbol -> symbol.getName().orElse(\"\").equals(funcName)));\n} else {\nfuncName = ((SimpleNameReferenceNode) nameReferenceNode).name().text();\nList visibleSymbols = context.visibleSymbols(context.getCursorPosition());\nfilteredContent = visibleSymbols.stream()\n.filter(symbolPredicate.and(symbol -> symbol.getName().get().equals(funcName)))\n.collect(Collectors.toList());\n}\nreturn filteredContent.stream().map(symbol -> (FunctionSymbol) symbol).findAny();\n}\nOptional typeDesc;\nString methodName;\nif (nodeAtCursor.kind() == SyntaxKind.METHOD_CALL) {\nMethodCallExpressionNode methodCall = (MethodCallExpressionNode) nodeAtCursor;\ntypeDesc = getTypeDesc(context, methodCall.expression());\nmethodName = ((SimpleNameReferenceNode) methodCall.methodName()).name().text();\n} else if (nodeAtCursor.kind() == SyntaxKind.REMOTE_METHOD_CALL_ACTION) {\nRemoteMethodCallActionNode remoteMethodCall = (RemoteMethodCallActionNode) nodeAtCursor;\ntypeDesc = getTypeDesc(context, remoteMethodCall.expression());\nmethodName = remoteMethodCall.methodName().name().text();\n} else if (nodeAtCursor.kind() == SyntaxKind.IMPLICIT_NEW_EXPRESSION\n|| nodeAtCursor.kind() == SyntaxKind.EXPLICIT_NEW_EXPRESSION) {\nContextTypeResolver resolver = new ContextTypeResolver(context);\ntypeDesc = nodeAtCursor.apply(resolver);\nmethodName = Names.USER_DEFINED_INIT_SUFFIX.getValue();\n} else {\nreturn Optional.empty();\n}\nif (typeDesc.isEmpty()) {\nreturn Optional.empty();\n}\nreturn getFunctionSymbolsForTypeDesc(typeDesc.get()).stream()\n.filter(functionSymbol -> functionSymbol.getName().orElse(\"\").equals(methodName))\n.findAny();\n}\nprivate static Optional getTypeDesc(SignatureContext ctx, ExpressionNode expr) {\nswitch (expr.kind()) {\ncase SIMPLE_NAME_REFERENCE:\n/*\nCaptures the following\n(1) fieldName\n*/\nreturn getTypeDescForNameRef(ctx, (SimpleNameReferenceNode) expr);\ncase FUNCTION_CALL:\n/*\nCaptures the following\n(1) functionName()\n*/\nreturn getTypeDescForFunctionCall(ctx, (FunctionCallExpressionNode) expr);\ncase METHOD_CALL: {\n/*\nAddress the following\n(1) test.testMethod()\n*/\nreturn getTypeDescForMethodCall(ctx, (MethodCallExpressionNode) expr);\n}\ncase FIELD_ACCESS: {\n/*\nAddress the following\n(1) test1.test2\n*/\nreturn getTypeDescForFieldAccess(ctx, (FieldAccessExpressionNode) expr);\n}\ndefault:\nreturn Optional.empty();\n}\n}\nprivate static Optional getTypeDescForFieldAccess(\nSignatureContext context, FieldAccessExpressionNode node) {\nString fieldName = ((SimpleNameReferenceNode) node.fieldName()).name().text();\nExpressionNode expressionNode = node.expression();\nOptional typeDescriptor = getTypeDesc(context, expressionNode);\nif (typeDescriptor.isEmpty()) {\nreturn Optional.empty();\n}\nTypeSymbol rawType = CommonUtil.getRawType(typeDescriptor.get());\nswitch (rawType.typeKind()) {\ncase OBJECT:\nObjectFieldSymbol objField = ((ObjectTypeSymbol) rawType).fieldDescriptors().get(fieldName);\nreturn objField != null ? Optional.of(objField.typeDescriptor()) : Optional.empty();\ncase RECORD:\nRecordFieldSymbol recField = ((RecordTypeSymbol) rawType).fieldDescriptors().get(fieldName);\nreturn recField != null ? Optional.of(recField.typeDescriptor()) : Optional.empty();\ndefault:\nreturn Optional.empty();\n}\n}\nprivate static Optional getTypeDescForNameRef(SignatureContext context,\nNameReferenceNode referenceNode) {\nif (referenceNode.kind() != SyntaxKind.SIMPLE_NAME_REFERENCE) {\nreturn Optional.empty();\n}\nString name = ((SimpleNameReferenceNode) referenceNode).name().text();\nList visibleSymbols = context.visibleSymbols(context.getCursorPosition());\nOptional symbolRef = visibleSymbols.stream()\n.filter(symbol -> Objects.equals(symbol.getName().orElse(null), name))\n.findFirst();\nif (symbolRef.isEmpty()) {\nreturn Optional.empty();\n}\nreturn SymbolUtil.getTypeDescriptor(symbolRef.get());\n}\nprivate static Optional getTypeDescForFunctionCall(\nSignatureContext context, FunctionCallExpressionNode expr) {\nString fName = ((SimpleNameReferenceNode) expr.functionName()).name().text();\nList visibleSymbols = context.visibleSymbols(context.getCursorPosition());\nOptional symbolRef = visibleSymbols.stream()\n.filter(symbol -> symbol.kind() == SymbolKind.FUNCTION && symbol.getName().get().equals(fName))\n.map(symbol -> (FunctionSymbol) symbol)\n.findFirst();\nif (symbolRef.isEmpty()) {\nreturn Optional.empty();\n}\nreturn symbolRef.get().typeDescriptor().returnTypeDescriptor();\n}\nprivate static Optional getTypeDescForMethodCall(\nSignatureContext context, MethodCallExpressionNode node) {\nString methodName = ((SimpleNameReferenceNode) node.methodName()).name().text();\nOptional fieldTypeDesc = getTypeDesc(context, node.expression());\nif (fieldTypeDesc.isEmpty()) {\nreturn Optional.empty();\n}\nList visibleMethods = fieldTypeDesc.get().langLibMethods();\nif (CommonUtil.getRawType(fieldTypeDesc.get()).typeKind() == TypeDescKind.OBJECT) {\nvisibleMethods.addAll(((ObjectTypeSymbol) CommonUtil.getRawType(fieldTypeDesc.get())).methods().values());\n}\nOptional filteredMethod = visibleMethods.stream()\n.filter(methodSymbol -> Objects.equals(methodSymbol.getName().orElse(null), methodName))\n.findFirst();\nif (filteredMethod.isEmpty()) {\nreturn Optional.empty();\n}\nreturn filteredMethod.get().typeDescriptor().returnTypeDescriptor();\n}\nprivate static List getFunctionSymbolsForTypeDesc(TypeSymbol typeDescriptor) {\nList functionSymbols = new ArrayList<>();\nTypeSymbol rawType = CommonUtil.getRawType(typeDescriptor);\nif (rawType.typeKind() == TypeDescKind.OBJECT) {\nObjectTypeSymbol objTypeDesc = (ObjectTypeSymbol) rawType;\nfunctionSymbols.addAll(objTypeDesc.methods().values());\n}\nif (rawType.kind() == CLASS && ((ClassSymbol) rawType).initMethod().isPresent()) {\nfunctionSymbols.add(((ClassSymbol) rawType).initMethod().get());\n}\nfunctionSymbols.addAll(typeDescriptor.langLibMethods());\nreturn functionSymbols;\n}\n}", + "context_after": "class SignatureHelpUtil {\nprivate SignatureHelpUtil() {\n}\n/**\n* Get Signature Help for a the invocation node in the given context.\n*\n* @param context Signature Help context.\n* @return {@link SignatureHelp} SignatureHelp for the invocation node.\n*/\n/**\n* Get the signature information for a given context.\n*\n* @param context Lang Server Signature Help Context\n* @return {@link SignatureInformation} Signature information for the invocation node.\n*/\nprivate static Optional getSignatureInformation(SignatureContext context) {\nOptional functionSymbol = getFunctionSymbol(context);\nif (functionSymbol.isEmpty()) {\nreturn Optional.empty();\n}\nList parameterInformationList = new ArrayList<>();\nSignatureInformation signatureInformation = new SignatureInformation();\nSignatureInfoModel signatureInfoModel = getSignatureInfoModel(functionSymbol.get(), context);\nOptional functionName = functionSymbol.get().getName();\nOptional nodeAtCursor = context.getNodeAtCursor();\nif (functionName.isEmpty() || nodeAtCursor.isEmpty()) {\nreturn Optional.empty();\n}\nStringBuilder labelBuilder = new StringBuilder();\nSyntaxKind syntaxKind = nodeAtCursor.get().kind();\nif (functionName.get().equals(Names.USER_DEFINED_INIT_SUFFIX.getValue())\n&& (syntaxKind == SyntaxKind.IMPLICIT_NEW_EXPRESSION\n|| syntaxKind == SyntaxKind.EXPLICIT_NEW_EXPRESSION)) {\nlabelBuilder.append(SyntaxKind.NEW_KEYWORD.stringValue());\n} else {\nlabelBuilder.append(functionName.get());\n}\nlabelBuilder.append(\"(\");\nList parameterInfoModels = signatureInfoModel.getParameterInfoModels();\nfor (int i = 0; i < parameterInfoModels.size(); i++) {\nParameterInfoModel paramModel = parameterInfoModels.get(i);\nint labelOffset = labelBuilder.toString().length();\nlabelBuilder.append(paramModel.parameter.getType());\nParameterInformation paramInfo = new ParameterInformation();\nparamInfo.setDocumentation(getParameterDocumentation(paramModel));\nint paramStart = labelOffset;\nint paramEnd = labelOffset + paramModel.parameter.getType().length();\nif (paramModel.parameter.getName().isPresent()) {\nparamStart = paramEnd + 1;\nparamEnd += (paramModel.parameter.getName().get() + \" \").length();\nlabelBuilder.append(\" \").append(paramModel.parameter.getName().get());\n}\nif (i < parameterInfoModels.size() - 1) {\nlabelBuilder.append(\", \");\n}\nparamInfo.setLabel(Tuple.two(paramStart, paramEnd));\nparameterInformationList.add(paramInfo);\n}\nlabelBuilder.append(\")\");\nsignatureInformation.setLabel(labelBuilder.toString());\nsignatureInformation.setParameters(parameterInformationList);\nsignatureInformation.setDocumentation(signatureInfoModel.signatureDescription);\nreturn Optional.of(signatureInformation);\n}\n/**\n* Get the required signature information filled model.\n*\n* @param functionSymbol Invokable symbol\n* @param context Lang Server Signature Help Context\n* @return {@link SignatureInfoModel} SignatureInfoModel containing signature information\n*/\nprivate static SignatureInfoModel getSignatureInfoModel(FunctionSymbol functionSymbol, SignatureContext context) {\nMap paramToDesc = new HashMap<>();\nSignatureInfoModel signatureInfoModel = new SignatureInfoModel();\nList paramModels = new ArrayList<>();\nOptional documentation = functionSymbol.documentation();\nList parameters = new ArrayList<>();\nif (documentation.isPresent()) {\nif (documentation.get().description().isPresent()) {\nsignatureInfoModel.setSignatureDescription(documentation.get().description().get().trim(), context);\n}\ndocumentation.get().parameterMap().forEach(paramToDesc::put);\n}\nList parameterSymbols = functionSymbol.typeDescriptor().params().orElse(new ArrayList<>());\nparameters.addAll(parameterSymbols.stream()\n.map(param -> new Parameter(param, false, false, context)).collect(Collectors.toList()));\nOptional restParam = functionSymbol.typeDescriptor().restParam();\nrestParam.ifPresent(parameter -> parameters.add(new Parameter(parameter, false, true, context)));\nboolean skipFirstParam = functionSymbol.kind() == METHOD\n&& CommonUtil.isLangLib(functionSymbol.getModule().get().id());\nfor (int i = 0; i < parameters.size(); i++) {\nif (i == 0 && skipFirstParam) {\ncontinue;\n}\nParameter param = parameters.get(i);\nString desc = \"\";\nif (param.getName().isPresent() && paramToDesc.containsKey(param.getName().get())) {\ndesc = paramToDesc.get(param.getName().get());\n}\nparamModels.add(new ParameterInfoModel(param, desc, context));\n}\nsignatureInfoModel.setParameterInfoModels(paramModels);\nreturn signatureInfoModel;\n}\nprivate static MarkupContent getParameterDocumentation(ParameterInfoModel paramInfo) {\nMarkupContent paramDocumentation = new MarkupContent();\nparamDocumentation.setKind(CommonUtil.MARKDOWN_MARKUP_KIND);\nString type = paramInfo.parameter.getType();\nStringBuilder markupContent = new StringBuilder();\nmarkupContent.append(\"**Parameter**\")\n.append(CommonUtil.MD_LINE_SEPARATOR)\n.append(\"**\")\n.append((!type.isEmpty()) ? \"`\" + type + \"`\" : \"\");\nif (paramInfo.parameter.getName().isPresent()) {\nmarkupContent.append(paramInfo.parameter.getName().get());\n}\nmarkupContent.append(\"**\");\nif (!paramInfo.description.isBlank()) {\nmarkupContent.append(\": \").append(paramInfo.description);\n}\nparamDocumentation.setValue(markupContent.toString());\nreturn paramDocumentation;\n}\n/**\n* Parameter model to hold the parameter information meta data.\n*/\nprivate static class Parameter {\nprivate final boolean isRestArg;\nprivate final boolean isOptional;\nprivate final ParameterSymbol parameterSymbol;\nprivate final SignatureContext signatureContext;\npublic Parameter(ParameterSymbol parameterSymbol,\nboolean isOptional,\nboolean isRestArg,\nSignatureContext signatureContext) {\nthis.parameterSymbol = parameterSymbol;\nthis.isOptional = isOptional;\nthis.isRestArg = isRestArg;\nthis.signatureContext = signatureContext;\n}\npublic Optional getName() {\nreturn (parameterSymbol.getName().isPresent() && this.isOptional)\n? Optional.of(parameterSymbol.getName().get() + \"?\") : parameterSymbol.getName();\n}\npublic String getType() {\nString type = CommonUtil.getModifiedTypeName(this.signatureContext, parameterSymbol.typeDescriptor());\nif (this.isRestArg && !\"\".equals(type)) {\nif (type.contains(\"[]\")) {\ntype = type.substring(0, type.length() - 2);\n}\ntype += \"...\";\n}\nreturn type;\n}\n}\n/**\n* Parameter information model to hold the parameter information meta data.\n*/\nprivate static class ParameterInfoModel {\nprivate final String description;\nprivate final Parameter parameter;\npublic ParameterInfoModel(Parameter parameter, String desc, SignatureContext signatureContext) {\nthis.parameter = parameter;\nthis.description = desc;\n}\n@Override\npublic String toString() {\nreturn this.parameter.getType()\n+ (parameter.getName().isPresent() ? (\" \" + parameter.getName().get()) : \"\");\n}\n}\n/**\n* Signature information model to collect the info required for the signature.\n*/\nprivate static class SignatureInfoModel {\nprivate List parameterInfoModels;\nprivate Either signatureDescription;\nList getParameterInfoModels() {\nreturn parameterInfoModels;\n}\nvoid setParameterInfoModels(List parameterInfoModels) {\nthis.parameterInfoModels = parameterInfoModels;\n}\nvoid setSignatureDescription(String signatureDescription, SignatureContext signatureContext) {\nSignatureInformationCapabilities capabilities = signatureContext.capabilities().getSignatureInformation();\nList documentationFormat = capabilities != null ? capabilities.getDocumentationFormat()\n: new ArrayList<>();\nif (documentationFormat != null\n&& !documentationFormat.isEmpty()\n&& documentationFormat.get(0).equals(CommonUtil.MARKDOWN_MARKUP_KIND)) {\nMarkupContent signatureMarkupContent = new MarkupContent();\nsignatureMarkupContent.setKind(CommonUtil.MARKDOWN_MARKUP_KIND);\nsignatureMarkupContent.setValue(\n\"**Description**\" + CommonUtil.MD_LINE_SEPARATOR + signatureDescription);\nthis.signatureDescription = Either.forRight(signatureMarkupContent);\n} else {\nthis.signatureDescription = Either.forLeft(\n\"Description\" + CommonUtil.LINE_SEPARATOR + signatureDescription);\n}\n}\n}\n/**\n* Find the token at cursor.\n*/\nprivate static void fillTokenInfoAtCursor(SignatureContext context) {\nOptional document = context.currentDocument();\nif (document.isEmpty()) {\nreturn;\n}\nTextDocument textDocument = document.get().textDocument();\nPosition position = context.getCursorPosition();\nint txtPos = textDocument.textPositionFrom(LinePosition.from(position.getLine(), position.getCharacter()));\ncontext.setCursorPositionInTree(txtPos);\nTextRange range = TextRange.from(txtPos, 0);\nNonTerminalNode nonTerminalNode = ((ModulePartNode) document.get().syntaxTree().rootNode()).findNode(range);\nwhile (true) {\nif (nonTerminalNode != null && (!withinTextRange(txtPos, nonTerminalNode)\n|| (nonTerminalNode.kind() != SyntaxKind.FUNCTION_CALL\n&& nonTerminalNode.kind() != SyntaxKind.METHOD_CALL)\n&& nonTerminalNode.kind() != SyntaxKind.REMOTE_METHOD_CALL_ACTION\n&& nonTerminalNode.kind() != SyntaxKind.IMPLICIT_NEW_EXPRESSION)\n&& nonTerminalNode.kind() != SyntaxKind.EXPLICIT_NEW_EXPRESSION) {\nnonTerminalNode = nonTerminalNode.parent();\ncontinue;\n}\nbreak;\n}\ncontext.setNodeAtCursor(nonTerminalNode);\n}\nprivate static boolean withinTextRange(int position, @Nonnull NonTerminalNode node) {\nTextRange rangeWithMinutiae = node.textRangeWithMinutiae();\nTextRange textRange = node.textRange();\nTextRange leadingMinutiaeRange = TextRange.from(rangeWithMinutiae.startOffset(),\ntextRange.startOffset() - rangeWithMinutiae.startOffset());\nreturn leadingMinutiaeRange.endOffset() <= position;\n}\npublic static Optional getFunctionSymbol(SignatureContext context) {\nif (context.getNodeAtCursor().isEmpty()) {\nreturn Optional.empty();\n}\nNonTerminalNode nodeAtCursor = context.getNodeAtCursor().get();\nif (nodeAtCursor.kind() == SyntaxKind.FUNCTION_CALL) {\nNameReferenceNode nameReferenceNode = ((FunctionCallExpressionNode) nodeAtCursor).functionName();\nString funcName;\nPredicate symbolPredicate = symbol -> symbol.kind() == FUNCTION;\nList filteredContent;\nif (nameReferenceNode.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nQualifiedNameReferenceNode qNameRef = (QualifiedNameReferenceNode) nameReferenceNode;\nfuncName = (qNameRef).identifier().text();\nfilteredContent = QNameReferenceUtil.getModuleContent(context, qNameRef,\nsymbolPredicate.and(symbol -> symbol.getName().orElse(\"\").equals(funcName)));\n} else {\nfuncName = ((SimpleNameReferenceNode) nameReferenceNode).name().text();\nList visibleSymbols = context.visibleSymbols(context.getCursorPosition());\nfilteredContent = visibleSymbols.stream()\n.filter(symbolPredicate.and(symbol -> symbol.getName().get().equals(funcName)))\n.collect(Collectors.toList());\n}\nreturn filteredContent.stream().map(symbol -> (FunctionSymbol) symbol).findAny();\n}\nOptional typeDesc;\nString methodName;\nif (nodeAtCursor.kind() == SyntaxKind.METHOD_CALL) {\nMethodCallExpressionNode methodCall = (MethodCallExpressionNode) nodeAtCursor;\ntypeDesc = getTypeDesc(context, methodCall.expression());\nmethodName = ((SimpleNameReferenceNode) methodCall.methodName()).name().text();\n} else if (nodeAtCursor.kind() == SyntaxKind.REMOTE_METHOD_CALL_ACTION) {\nRemoteMethodCallActionNode remoteMethodCall = (RemoteMethodCallActionNode) nodeAtCursor;\ntypeDesc = getTypeDesc(context, remoteMethodCall.expression());\nmethodName = remoteMethodCall.methodName().name().text();\n} else if (nodeAtCursor.kind() == SyntaxKind.IMPLICIT_NEW_EXPRESSION\n|| nodeAtCursor.kind() == SyntaxKind.EXPLICIT_NEW_EXPRESSION) {\nContextTypeResolver resolver = new ContextTypeResolver(context);\ntypeDesc = nodeAtCursor.apply(resolver);\nmethodName = Names.USER_DEFINED_INIT_SUFFIX.getValue();\n} else {\nreturn Optional.empty();\n}\nif (typeDesc.isEmpty()) {\nreturn Optional.empty();\n}\nreturn getFunctionSymbolsForTypeDesc(typeDesc.get()).stream()\n.filter(functionSymbol -> functionSymbol.getName().orElse(\"\").equals(methodName))\n.findAny();\n}\nprivate static Optional getTypeDesc(SignatureContext ctx, ExpressionNode expr) {\nswitch (expr.kind()) {\ncase SIMPLE_NAME_REFERENCE:\n/*\nCaptures the following\n(1) fieldName\n*/\nreturn getTypeDescForNameRef(ctx, (SimpleNameReferenceNode) expr);\ncase FUNCTION_CALL:\n/*\nCaptures the following\n(1) functionName()\n*/\nreturn getTypeDescForFunctionCall(ctx, (FunctionCallExpressionNode) expr);\ncase METHOD_CALL: {\n/*\nAddress the following\n(1) test.testMethod()\n*/\nreturn getTypeDescForMethodCall(ctx, (MethodCallExpressionNode) expr);\n}\ncase FIELD_ACCESS: {\n/*\nAddress the following\n(1) test1.test2\n*/\nreturn getTypeDescForFieldAccess(ctx, (FieldAccessExpressionNode) expr);\n}\ndefault:\nreturn Optional.empty();\n}\n}\nprivate static Optional getTypeDescForFieldAccess(\nSignatureContext context, FieldAccessExpressionNode node) {\nString fieldName = ((SimpleNameReferenceNode) node.fieldName()).name().text();\nExpressionNode expressionNode = node.expression();\nOptional typeDescriptor = getTypeDesc(context, expressionNode);\nif (typeDescriptor.isEmpty()) {\nreturn Optional.empty();\n}\nTypeSymbol rawType = CommonUtil.getRawType(typeDescriptor.get());\nswitch (rawType.typeKind()) {\ncase OBJECT:\nObjectFieldSymbol objField = ((ObjectTypeSymbol) rawType).fieldDescriptors().get(fieldName);\nreturn objField != null ? Optional.of(objField.typeDescriptor()) : Optional.empty();\ncase RECORD:\nRecordFieldSymbol recField = ((RecordTypeSymbol) rawType).fieldDescriptors().get(fieldName);\nreturn recField != null ? Optional.of(recField.typeDescriptor()) : Optional.empty();\ndefault:\nreturn Optional.empty();\n}\n}\nprivate static Optional getTypeDescForNameRef(SignatureContext context,\nNameReferenceNode referenceNode) {\nif (referenceNode.kind() != SyntaxKind.SIMPLE_NAME_REFERENCE) {\nreturn Optional.empty();\n}\nString name = ((SimpleNameReferenceNode) referenceNode).name().text();\nList visibleSymbols = context.visibleSymbols(context.getCursorPosition());\nOptional symbolRef = visibleSymbols.stream()\n.filter(symbol -> Objects.equals(symbol.getName().orElse(null), name))\n.findFirst();\nif (symbolRef.isEmpty()) {\nreturn Optional.empty();\n}\nreturn SymbolUtil.getTypeDescriptor(symbolRef.get());\n}\nprivate static Optional getTypeDescForFunctionCall(\nSignatureContext context, FunctionCallExpressionNode expr) {\nString fName = ((SimpleNameReferenceNode) expr.functionName()).name().text();\nList visibleSymbols = context.visibleSymbols(context.getCursorPosition());\nOptional symbolRef = visibleSymbols.stream()\n.filter(symbol -> symbol.kind() == SymbolKind.FUNCTION && symbol.getName().get().equals(fName))\n.map(symbol -> (FunctionSymbol) symbol)\n.findFirst();\nif (symbolRef.isEmpty()) {\nreturn Optional.empty();\n}\nreturn symbolRef.get().typeDescriptor().returnTypeDescriptor();\n}\nprivate static Optional getTypeDescForMethodCall(\nSignatureContext context, MethodCallExpressionNode node) {\nString methodName = ((SimpleNameReferenceNode) node.methodName()).name().text();\nOptional fieldTypeDesc = getTypeDesc(context, node.expression());\nif (fieldTypeDesc.isEmpty()) {\nreturn Optional.empty();\n}\nList visibleMethods = fieldTypeDesc.get().langLibMethods();\nif (CommonUtil.getRawType(fieldTypeDesc.get()).typeKind() == TypeDescKind.OBJECT) {\nvisibleMethods.addAll(((ObjectTypeSymbol) CommonUtil.getRawType(fieldTypeDesc.get())).methods().values());\n}\nOptional filteredMethod = visibleMethods.stream()\n.filter(methodSymbol -> Objects.equals(methodSymbol.getName().orElse(null), methodName))\n.findFirst();\nif (filteredMethod.isEmpty()) {\nreturn Optional.empty();\n}\nreturn filteredMethod.get().typeDescriptor().returnTypeDescriptor();\n}\nprivate static List getFunctionSymbolsForTypeDesc(TypeSymbol typeDescriptor) {\nList functionSymbols = new ArrayList<>();\nTypeSymbol rawType = CommonUtil.getRawType(typeDescriptor);\nif (rawType.typeKind() == TypeDescKind.OBJECT) {\nObjectTypeSymbol objTypeDesc = (ObjectTypeSymbol) rawType;\nfunctionSymbols.addAll(objTypeDesc.methods().values());\n}\nif (rawType.kind() == CLASS && ((ClassSymbol) rawType).initMethod().isPresent()) {\nfunctionSymbols.add(((ClassSymbol) rawType).initMethod().get());\n}\nfunctionSymbols.addAll(typeDescriptor.langLibMethods());\nreturn functionSymbols;\n}\n}" + }, + { + "comment": "I changed it as: > \"invalid Ballerina source path, it should be a name of a module in a Ballerina project or a file with a '.bal' extension or an executable .jar file.\"", + "method_body": "public void execute() {\nif (this.helpFlag) {\nString commandUsageInfo = BLauncherCmd.getCommandUsageInfo(Constants.RUN_COMMAND);\nthis.errStream.println(commandUsageInfo);\nreturn;\n}\nif (this.argList == null || this.argList.size() == 0) {\nCommandUtil.printError(this.errStream,\n\"no ballerina program given.\",\n\"ballerina run { | }\",\ntrue);\nRuntime.getRuntime().exit(1);\nreturn;\n}\nif (null != this.debugPort) {\nSystem.setProperty(SYSTEM_PROP_BAL_DEBUG, this.debugPort);\n}\nString[] programArgs = this.getProgramArgs(this.argList);\nPath sourceRootPath = this.sourceRoot == null ? Paths.get(System.getProperty(\"user.dir\")) :\nPaths.get(this.sourceRoot);\nPath sourcePath;\nPath targetPath;\nif (this.argList.get(0).endsWith(BLangConstants.BLANG_SRC_FILE_SUFFIX)) {\nif (Paths.get(this.argList.get(0)).isAbsolute()) {\nsourcePath = Paths.get(this.argList.get(0));\nsourceRootPath = sourcePath.getParent();\n} else {\nsourcePath = sourceRootPath.resolve(this.argList.get(0));\n}\nif (Files.notExists(sourcePath)) {\nCommandUtil.printError(this.errStream,\n\"'\" + sourcePath + \"' Ballerina file does not exist.\",\nnull,\nfalse);\nRuntime.getRuntime().exit(1);\nreturn;\n}\nif (!Files.isRegularFile(sourcePath)) {\nCommandUtil.printError(this.errStream,\n\"'\" + sourcePath + \"' is not a Ballerina file. check if it is a symlink or a shortcut.\",\nnull,\nfalse);\nRuntime.getRuntime().exit(1);\nreturn;\n}\ntry {\ntargetPath = Files.createTempDirectory(\"ballerina-run-\" + System.nanoTime());\n} catch (IOException e) {\nthrow LauncherUtils.createLauncherException(\"error occurred when creating executable.\");\n}\n} else if (Files.exists(\nsourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME).resolve(this.argList.get(0))) &&\nFiles.isDirectory(\nsourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME).resolve(this.argList.get(0)))) {\nif (!RepoUtils.isBallerinaProject(sourceRootPath)) {\nCommandUtil.printError(this.errStream,\n\"you are trying to run a module that is not inside a project.\",\nnull,\nfalse);\nRuntime.getRuntime().exit(1);\nreturn;\n}\nif (Paths.get(argList.get(0)).isAbsolute()) {\nCommandUtil.printError(this.errStream,\n\"you are trying to run a module by giving the absolute path. you only need give \" +\n\"the name of the module.\",\n\"ballerina run \",\ntrue);\nRuntime.getRuntime().exit(1);\nreturn;\n}\nString moduleName = argList.get(0);\nif (moduleName.endsWith(\"/\")) {\nmoduleName = moduleName.substring(0, moduleName.length() - 1);\n}\nsourcePath = Paths.get(moduleName);\nif (Files.notExists(sourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME).resolve(sourcePath))) {\nCommandUtil.printError(this.errStream,\n\"'\" + sourcePath + \"' module does not exist.\",\n\"ballerina run \",\ntrue);\nRuntime.getRuntime().exit(1);\nreturn;\n}\ntargetPath = sourceRootPath.resolve(ProjectDirConstants.TARGET_DIR_NAME);\n} else {\nCommandUtil.printError(this.errStream,\n\"invalid Ballerina source path, it should either be a module name in a Ballerina project, a \" +\n\"file with a \\'\" + BLangConstants.BLANG_SRC_FILE_SUFFIX + \"\\' extension or a .jar file.\",\n\"ballerina run { | }\",\ntrue);\nRuntime.getRuntime().exit(1);\nreturn;\n}\nsourceRootPath = sourceRootPath.normalize();\nsourcePath = sourcePath == null ? null : sourcePath.normalize();\ntargetPath = targetPath.normalize();\nCompilerContext compilerContext = new CompilerContext();\nCompilerOptions options = CompilerOptions.getInstance(compilerContext);\noptions.put(PROJECT_DIR, sourceRootPath.toString());\noptions.put(OFFLINE, Boolean.toString(this.offline));\noptions.put(COMPILER_PHASE, CompilerPhase.BIR_GEN.toString());\noptions.put(LOCK_ENABLED, Boolean.toString(true));\noptions.put(SKIP_TESTS, Boolean.toString(true));\noptions.put(TEST_ENABLED, \"true\");\noptions.put(EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(this.experimentalFlag));\nBuildContext buildContext = new BuildContext(sourceRootPath, targetPath, sourcePath, compilerContext);\nbuildContext.setOut(this.outStream);\nbuildContext.setErr(this.errStream);\nboolean isSingleFileBuild = buildContext.getSourceType().equals(SINGLE_BAL_FILE);\nTaskExecutor taskExecutor = new TaskExecutor.TaskBuilder()\n.addTask(new CleanTargetDirTask(), isSingleFileBuild)\n.addTask(new CreateTargetDirTask())\n.addTask(new CompileTask())\n.addTask(new CreateBaloTask(), isSingleFileBuild)\n.addTask(new CreateBirTask())\n.addTask(new CopyNativeLibTask())\n.addTask(new CreateJarTask(false))\n.addTask(new CopyModuleJarTask())\n.addTask(new CreateExecutableTask())\n.addTask(new PrintExecutablePathTask(), isSingleFileBuild)\n.addTask(new PrintRunningExecutableTask(!isSingleFileBuild))\n.addTask(new RunExecutableTask(programArgs))\n.build();\ntaskExecutor.executeTasks(buildContext);\n}", + "target_code": "\"file with a \\'\" + BLangConstants.BLANG_SRC_FILE_SUFFIX + \"\\' extension or a .jar file.\",", + "method_body_after": "public void execute() {\nif (this.helpFlag) {\nString commandUsageInfo = BLauncherCmd.getCommandUsageInfo(Constants.RUN_COMMAND);\nthis.errStream.println(commandUsageInfo);\nreturn;\n}\nif (this.argList == null || this.argList.size() == 0) {\nCommandUtil.printError(this.errStream,\n\"no ballerina program given.\",\n\"ballerina run { | | }\",\ntrue);\nRuntime.getRuntime().exit(1);\nreturn;\n}\nif (null != this.debugPort) {\nSystem.setProperty(SYSTEM_PROP_BAL_DEBUG, this.debugPort);\n}\nString[] programArgs = this.getProgramArgs(this.argList);\nPath sourceRootPath = this.sourceRoot == null ? Paths.get(System.getProperty(\"user.dir\")) :\nPaths.get(this.sourceRoot);\nPath sourcePath;\nPath targetPath;\nif (this.argList.get(0).endsWith(BLangConstants.BLANG_SRC_FILE_SUFFIX)) {\nif (Paths.get(this.argList.get(0)).isAbsolute()) {\nsourcePath = Paths.get(this.argList.get(0));\nsourceRootPath = sourcePath.getParent();\n} else {\nsourcePath = sourceRootPath.resolve(this.argList.get(0));\n}\nif (Files.notExists(sourcePath)) {\nCommandUtil.printError(this.errStream,\n\"'\" + sourcePath + \"' Ballerina file does not exist.\",\nnull,\nfalse);\nRuntime.getRuntime().exit(1);\nreturn;\n}\nif (!Files.isRegularFile(sourcePath)) {\nCommandUtil.printError(this.errStream,\n\"'\" + sourcePath + \"' is not a Ballerina file. check if it is a symlink or a shortcut.\",\nnull,\nfalse);\nRuntime.getRuntime().exit(1);\nreturn;\n}\ntry {\ntargetPath = Files.createTempDirectory(\"ballerina-run-\" + System.nanoTime());\n} catch (IOException e) {\nthrow LauncherUtils.createLauncherException(\"error occurred when creating executable.\");\n}\n} else if (Files.exists(\nsourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME).resolve(this.argList.get(0))) &&\nFiles.isDirectory(\nsourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME).resolve(this.argList.get(0)))) {\nif (!RepoUtils.isBallerinaProject(sourceRootPath)) {\nCommandUtil.printError(this.errStream,\n\"you are trying to run a module that is not inside a project.\",\nnull,\nfalse);\nRuntime.getRuntime().exit(1);\nreturn;\n}\nif (Paths.get(argList.get(0)).isAbsolute()) {\nCommandUtil.printError(this.errStream,\n\"you are trying to run a module by giving the absolute path. you only need give \" +\n\"the name of the module.\",\n\"ballerina run \",\ntrue);\nRuntime.getRuntime().exit(1);\nreturn;\n}\nString moduleName = argList.get(0);\nif (moduleName.endsWith(\"/\")) {\nmoduleName = moduleName.substring(0, moduleName.length() - 1);\n}\nsourcePath = Paths.get(moduleName);\nif (Files.notExists(sourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME).resolve(sourcePath))) {\nCommandUtil.printError(this.errStream,\n\"'\" + sourcePath + \"' module does not exist.\",\n\"ballerina run \",\ntrue);\nRuntime.getRuntime().exit(1);\nreturn;\n}\ntargetPath = sourceRootPath.resolve(ProjectDirConstants.TARGET_DIR_NAME);\n} else {\nCommandUtil.printError(this.errStream,\n\"invalid Ballerina source path. It should either be a name of a module in a Ballerina project, \" +\n\"a file with a '\" + BLangConstants.BLANG_SRC_FILE_SUFFIX + \"' extension, or an executable '\" +\nBLANG_COMPILED_JAR_EXT + \"' file.\",\n\"ballerina run { | | }\",\ntrue);\nRuntime.getRuntime().exit(1);\nreturn;\n}\nsourceRootPath = sourceRootPath.normalize();\nsourcePath = sourcePath == null ? null : sourcePath.normalize();\ntargetPath = targetPath.normalize();\nCompilerContext compilerContext = new CompilerContext();\nCompilerOptions options = CompilerOptions.getInstance(compilerContext);\noptions.put(PROJECT_DIR, sourceRootPath.toString());\noptions.put(OFFLINE, Boolean.toString(this.offline));\noptions.put(COMPILER_PHASE, CompilerPhase.BIR_GEN.toString());\noptions.put(LOCK_ENABLED, Boolean.toString(true));\noptions.put(SKIP_TESTS, Boolean.toString(true));\noptions.put(TEST_ENABLED, \"true\");\noptions.put(EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(this.experimentalFlag));\nBuildContext buildContext = new BuildContext(sourceRootPath, targetPath, sourcePath, compilerContext);\nbuildContext.setOut(this.outStream);\nbuildContext.setErr(this.errStream);\nboolean isSingleFileBuild = buildContext.getSourceType().equals(SINGLE_BAL_FILE);\nTaskExecutor taskExecutor = new TaskExecutor.TaskBuilder()\n.addTask(new CleanTargetDirTask(), isSingleFileBuild)\n.addTask(new CreateTargetDirTask())\n.addTask(new CompileTask())\n.addTask(new CreateBaloTask(), isSingleFileBuild)\n.addTask(new CreateBirTask())\n.addTask(new CopyNativeLibTask())\n.addTask(new CreateJarTask(false))\n.addTask(new CopyModuleJarTask())\n.addTask(new CreateExecutableTask())\n.addTask(new PrintExecutablePathTask(), isSingleFileBuild)\n.addTask(new PrintRunningExecutableTask(!isSingleFileBuild))\n.addTask(new RunExecutableTask(programArgs))\n.build();\ntaskExecutor.executeTasks(buildContext);\n}", + "context_before": "class RunCommand implements BLauncherCmd {\nprivate final PrintStream outStream;\nprivate final PrintStream errStream;\n@CommandLine.Parameters(description = \"Program arguments\")\nprivate List argList;\n@CommandLine.Option(names = {\"--sourceroot\"},\ndescription = \"Path to the directory containing source files and modules\")\nprivate String sourceRoot;\n@CommandLine.Option(names = {\"--help\", \"-h\", \"?\"}, hidden = true)\nprivate boolean helpFlag;\n@CommandLine.Option(names = {\"--offline\"}, description = \"Builds offline without downloading dependencies and \" +\n\"then run.\")\nprivate boolean offline;\n@CommandLine.Option(names = \"--debug\", hidden = true)\nprivate String debugPort;\n@CommandLine.Option(names = \"--experimental\", description = \"Enable experimental language features.\")\nprivate boolean experimentalFlag;\npublic RunCommand() {\nthis.outStream = System.err;\nthis.errStream = System.err;\n}\npublic RunCommand(PrintStream outStream, PrintStream errStream) {\nthis.outStream = outStream;\nthis.errStream = errStream;\n}\n/**\n* Get the program args from the passed argument list.\n*\n* @param argList The argument list.\n* @return An array of program args.\n*/\nprivate String[] getProgramArgs(List argList) {\nString[] argsArray = argList.toArray(new String[0]);\nreturn Arrays.copyOfRange(argsArray, 1, argsArray.length);\n}\n@Override\npublic String getName() {\nreturn BallerinaCliCommands.RUN;\n}\n@Override\npublic void printLongDesc(StringBuilder out) {\nout.append(\"Run command runs a compiled Ballerina program. \\n\");\nout.append(\"\\n\");\nout.append(\"If a Ballerina source file or a module is given, \\n\");\nout.append(\"run command compiles and runs it. \\n\");\nout.append(\"\\n\");\nout.append(\"By default, 'ballerina run' executes the main function. \\n\");\nout.append(\"If the main function is not there, it executes services. \\n\");\nout.append(\"\\n\");\nout.append(\"If the -s flag is given, 'ballerina run' executes\\n\");\nout.append(\"services instead of the main function.\\n\");\n}\n@Override\npublic void printUsage(StringBuilder out) {\nout.append(\" ballerina run [--offline]\\n\" +\n\" [--sourceroot]\\n\" +\n\" { | module-name | executable-jar} [(--key=value)...] \"\n+ \"[--] [args...] \\n\");\n}\n@Override\npublic void setParentCmdParser(CommandLine parentCmdParser) {\n}\n}", + "context_after": "class RunCommand implements BLauncherCmd {\nprivate final PrintStream outStream;\nprivate final PrintStream errStream;\n@CommandLine.Parameters(description = \"Program arguments\")\nprivate List argList;\n@CommandLine.Option(names = {\"--sourceroot\"},\ndescription = \"Path to the directory containing source files and modules\")\nprivate String sourceRoot;\n@CommandLine.Option(names = {\"--help\", \"-h\", \"?\"}, hidden = true)\nprivate boolean helpFlag;\n@CommandLine.Option(names = {\"--offline\"}, description = \"Builds offline without downloading dependencies and \" +\n\"then run.\")\nprivate boolean offline;\n@CommandLine.Option(names = \"--debug\", hidden = true)\nprivate String debugPort;\n@CommandLine.Option(names = \"--experimental\", description = \"Enable experimental language features.\")\nprivate boolean experimentalFlag;\npublic RunCommand() {\nthis.outStream = System.err;\nthis.errStream = System.err;\n}\npublic RunCommand(PrintStream outStream, PrintStream errStream) {\nthis.outStream = outStream;\nthis.errStream = errStream;\n}\n/**\n* Get the program args from the passed argument list.\n*\n* @param argList The argument list.\n* @return An array of program args.\n*/\nprivate String[] getProgramArgs(List argList) {\nString[] argsArray = argList.toArray(new String[0]);\nreturn Arrays.copyOfRange(argsArray, 1, argsArray.length);\n}\n@Override\npublic String getName() {\nreturn BallerinaCliCommands.RUN;\n}\n@Override\npublic void printLongDesc(StringBuilder out) {\nout.append(\"Run command runs a compiled Ballerina program. \\n\");\nout.append(\"\\n\");\nout.append(\"If a Ballerina source file or a module is given, \\n\");\nout.append(\"run command compiles and runs it. \\n\");\nout.append(\"\\n\");\nout.append(\"By default, 'ballerina run' executes the main function. \\n\");\nout.append(\"If the main function is not there, it executes services. \\n\");\nout.append(\"\\n\");\nout.append(\"If the -s flag is given, 'ballerina run' executes\\n\");\nout.append(\"services instead of the main function.\\n\");\n}\n@Override\npublic void printUsage(StringBuilder out) {\nout.append(\" ballerina run [--offline]\\n\" +\n\" [--sourceroot]\\n\" +\n\" { | module-name | executable-jar} [(--key=value)...] \"\n+ \"[--] [args...] \\n\");\n}\n@Override\npublic void setParentCmdParser(CommandLine parentCmdParser) {\n}\n}" + }, + { + "comment": "line 637, 638 LOG.debug can merged.", + "method_body": "public TLoadTxnBeginResult loadTxnBegin(TLoadTxnBeginRequest request) throws TException {\nString clientAddr = getClientAddrAsString();\nLOG.debug(\"receive txn begin request, db: {}, tbl: {}, label: {}, backend: {}\",\nrequest.getDb(), request.getTbl(), request.getLabel(), clientAddr);\nLOG.debug(\"txn begin request: {}\", request);\nTLoadTxnBeginResult result = new TLoadTxnBeginResult();\nTStatus status = new TStatus(TStatusCode.OK);\nresult.setStatus(status);\ntry {\nresult.setTxnId(loadTxnBeginImpl(request, clientAddr));\n} catch (DuplicatedRequestException e) {\nLOG.warn(\"duplicate request for stream load. request id: {}, txn: {}\", e.getDuplicatedRequestId(), e.getTxnId());\nresult.setTxnId(e.getTxnId());\n} catch (LabelAlreadyUsedException e) {\nstatus.setStatusCode(TStatusCode.LABEL_ALREADY_EXISTS);\nstatus.addToErrorMsgs(e.getMessage());\nresult.setJobStatus(e.getJobStatus());\n} catch (UserException e) {\nLOG.warn(\"failed to begin: {}\", e.getMessage());\nstatus.setStatusCode(TStatusCode.ANALYSIS_ERROR);\nstatus.addToErrorMsgs(e.getMessage());\n} catch (Throwable e) {\nLOG.warn(\"catch unknown result.\", e);\nstatus.setStatusCode(TStatusCode.INTERNAL_ERROR);\nstatus.addToErrorMsgs(Strings.nullToEmpty(e.getMessage()));\nreturn result;\n}\nreturn result;\n}", + "target_code": "LOG.debug(\"receive txn begin request, db: {}, tbl: {}, label: {}, backend: {}\",", + "method_body_after": "public TLoadTxnBeginResult loadTxnBegin(TLoadTxnBeginRequest request) throws TException {\nString clientAddr = getClientAddrAsString();\nLOG.debug(\"receive txn begin request: {}, backend: {}\", request, clientAddr);\nTLoadTxnBeginResult result = new TLoadTxnBeginResult();\nTStatus status = new TStatus(TStatusCode.OK);\nresult.setStatus(status);\ntry {\nresult.setTxnId(loadTxnBeginImpl(request, clientAddr));\n} catch (DuplicatedRequestException e) {\nLOG.warn(\"duplicate request for stream load. request id: {}, txn: {}\", e.getDuplicatedRequestId(), e.getTxnId());\nresult.setTxnId(e.getTxnId());\n} catch (LabelAlreadyUsedException e) {\nstatus.setStatusCode(TStatusCode.LABEL_ALREADY_EXISTS);\nstatus.addToErrorMsgs(e.getMessage());\nresult.setJobStatus(e.getJobStatus());\n} catch (UserException e) {\nLOG.warn(\"failed to begin: {}\", e.getMessage());\nstatus.setStatusCode(TStatusCode.ANALYSIS_ERROR);\nstatus.addToErrorMsgs(e.getMessage());\n} catch (Throwable e) {\nLOG.warn(\"catch unknown result.\", e);\nstatus.setStatusCode(TStatusCode.INTERNAL_ERROR);\nstatus.addToErrorMsgs(Strings.nullToEmpty(e.getMessage()));\nreturn result;\n}\nreturn result;\n}", + "context_before": "class FrontendServiceImpl implements FrontendService.Iface {\nprivate static final Logger LOG = LogManager.getLogger(FrontendServiceImpl.class);\nprivate MasterImpl masterImpl;\nprivate ExecuteEnv exeEnv;\npublic FrontendServiceImpl(ExecuteEnv exeEnv) {\nmasterImpl = new MasterImpl();\nthis.exeEnv = exeEnv;\n}\n@Override\npublic TGetDbsResult getDbNames(TGetDbsParams params) throws TException {\nLOG.debug(\"get db request: {}\", params);\nTGetDbsResult result = new TGetDbsResult();\nList dbs = Lists.newArrayList();\nPatternMatcher matcher = null;\nif (params.isSetPattern()) {\ntry {\nmatcher = PatternMatcher.createMysqlPattern(params.getPattern(),\nCaseSensibility.DATABASE.getCaseSensibility());\n} catch (AnalysisException e) {\nthrow new TException(\"Pattern is in bad format: \" + params.getPattern());\n}\n}\nCatalog catalog = Catalog.getCurrentCatalog();\nList dbNames = catalog.getDbNames();\nLOG.debug(\"get db names: {}\", dbNames);\nUserIdentity currentUser = null;\nif (params.isSetCurrentUserIdent()) {\ncurrentUser = UserIdentity.fromThrift(params.current_user_ident);\n} else {\ncurrentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);\n}\nfor (String fullName : dbNames) {\nif (!catalog.getAuth().checkDbPriv(currentUser, fullName, PrivPredicate.SHOW)) {\ncontinue;\n}\nfinal String db = ClusterNamespace.getNameFromFullName(fullName);\nif (matcher != null && !matcher.match(db)) {\ncontinue;\n}\ndbs.add(fullName);\n}\nresult.setDbs(dbs);\nreturn result;\n}\n@Override\npublic TGetTablesResult getTableNames(TGetTablesParams params) throws TException {\nLOG.debug(\"get table name request: {}\", params);\nTGetTablesResult result = new TGetTablesResult();\nList tablesResult = Lists.newArrayList();\nresult.setTables(tablesResult);\nPatternMatcher matcher = null;\nif (params.isSetPattern()) {\ntry {\nmatcher = PatternMatcher.createMysqlPattern(params.getPattern(),\nCaseSensibility.TABLE.getCaseSensibility());\n} catch (AnalysisException e) {\nthrow new TException(\"Pattern is in bad format: \" + params.getPattern());\n}\n}\nDatabase db = Catalog.getCurrentCatalog().getDb(params.db);\nUserIdentity currentUser = null;\nif (params.isSetCurrentUserIdent()) {\ncurrentUser = UserIdentity.fromThrift(params.current_user_ident);\n} else {\ncurrentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);\n}\nif (db != null) {\nfor (String tableName : db.getTableNamesWithLock()) {\nLOG.debug(\"get table: {}, wait to check\", tableName);\nif (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(currentUser, params.db,\ntableName, PrivPredicate.SHOW)) {\ncontinue;\n}\nif (matcher != null && !matcher.match(tableName)) {\ncontinue;\n}\ntablesResult.add(tableName);\n}\n}\nreturn result;\n}\n@Override\npublic TListTableStatusResult listTableStatus(TGetTablesParams params) throws TException {\nLOG.debug(\"get list table request: {}\", params);\nTListTableStatusResult result = new TListTableStatusResult();\nList tablesResult = Lists.newArrayList();\nresult.setTables(tablesResult);\nPatternMatcher matcher = null;\nif (params.isSetPattern()) {\ntry {\nmatcher = PatternMatcher.createMysqlPattern(params.getPattern(),\nCaseSensibility.TABLE.getCaseSensibility());\n} catch (AnalysisException e) {\nthrow new TException(\"Pattern is in bad format \" + params.getPattern());\n}\n}\nDatabase db = Catalog.getCurrentCatalog().getDb(params.db);\nUserIdentity currentUser = null;\nif (params.isSetCurrentUserIdent()) {\ncurrentUser = UserIdentity.fromThrift(params.current_user_ident);\n} else {\ncurrentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);\n}\nif (db != null) {\ndb.readLock();\ntry {\nfor (Table table : db.getTables()) {\nif (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(currentUser, params.db,\ntable.getName(), PrivPredicate.SHOW)) {\ncontinue;\n}\nif (matcher != null && !matcher.match(table.getName())) {\ncontinue;\n}\nTTableStatus status = new TTableStatus();\nstatus.setName(table.getName());\nstatus.setType(table.getMysqlType());\nstatus.setEngine(table.getEngine());\nstatus.setComment(table.getComment());\nstatus.setCreateTime(table.getCreateTime());\nstatus.setLastCheckTime(table.getLastCheckTime());\ntablesResult.add(status);\n}\n} finally {\ndb.readUnlock();\n}\n}\nreturn result;\n}\n@Override\npublic TFeResult updateExportTaskStatus(TUpdateExportTaskStatusRequest request) throws TException {\nTStatus status = new TStatus(TStatusCode.OK);\nTFeResult result = new TFeResult(FrontendServiceVersion.V1, status);\nreturn result;\n}\n@Override\npublic TDescribeTableResult describeTable(TDescribeTableParams params) throws TException {\nLOG.debug(\"get desc table request: {}\", params);\nTDescribeTableResult result = new TDescribeTableResult();\nList columns = Lists.newArrayList();\nresult.setColumns(columns);\nUserIdentity currentUser = null;\nif (params.isSetCurrentUserIdent()) {\ncurrentUser = UserIdentity.fromThrift(params.current_user_ident);\n} else {\ncurrentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);\n}\nif (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(currentUser, params.db,\nparams.getTableName(), PrivPredicate.SHOW)) {\nreturn result;\n}\nDatabase db = Catalog.getCurrentCatalog().getDb(params.db);\nif (db != null) {\ndb.readLock();\ntry {\nTable table = db.getTable(params.getTableName());\nif (table != null) {\nfor (Column column : table.getBaseSchema(params.isShowHiddenColumns())) {\nfinal TColumnDesc desc = new TColumnDesc(column.getName(), column.getDataType().toThrift());\nfinal Integer precision = column.getOriginType().getPrecision();\nif (precision != null) {\ndesc.setColumnPrecision(precision);\n}\nfinal Integer columnLength = column.getOriginType().getColumnSize();\nif (columnLength != null) {\ndesc.setColumnLength(columnLength);\n}\nfinal Integer decimalDigits = column.getOriginType().getDecimalDigits();\nif (decimalDigits != null) {\ndesc.setColumnScale(decimalDigits);\n}\nfinal TColumnDef colDef = new TColumnDef(desc);\nfinal String comment = column.getComment();\nif(comment != null) {\ncolDef.setComment(comment);\n}\ncolumns.add(colDef);\n}\n}\n} finally {\ndb.readUnlock();\n}\n}\nreturn result;\n}\n@Override\npublic TShowVariableResult showVariables(TShowVariableRequest params) throws TException {\nTShowVariableResult result = new TShowVariableResult();\nMap map = Maps.newHashMap();\nresult.setVariables(map);\nConnectContext ctx = exeEnv.getScheduler().getContext(params.getThreadId());\nif (ctx == null) {\nreturn result;\n}\nList> rows = VariableMgr.dump(SetType.fromThrift(params.getVarType()), ctx.getSessionVariable(),\nnull);\nfor (List row : rows) {\nmap.put(row.get(0), row.get(1));\n}\nreturn result;\n}\n@Override\npublic TReportExecStatusResult reportExecStatus(TReportExecStatusParams params) throws TException {\nreturn QeProcessorImpl.INSTANCE.reportExecStatus(params, getClientAddr());\n}\n@Override\npublic TMasterResult finishTask(TFinishTaskRequest request) throws TException {\nreturn masterImpl.finishTask(request);\n}\n@Override\npublic TMasterResult report(TReportRequest request) throws TException {\nreturn masterImpl.report(request);\n}\n@Override\npublic TFetchResourceResult fetchResource() throws TException {\nreturn masterImpl.fetchResource();\n}\n@Deprecated\n@Override\npublic TFeResult miniLoad(TMiniLoadRequest request) throws TException {\nLOG.debug(\"receive mini load request: label: {}, db: {}, tbl: {}, backend: {}\",\nrequest.getLabel(), request.getDb(), request.getTbl(), request.getBackend());\nConnectContext context = new ConnectContext(null);\nString cluster = SystemInfoService.DEFAULT_CLUSTER;\nif (request.isSetCluster()) {\ncluster = request.cluster;\n}\nfinal String fullDbName = ClusterNamespace.getFullName(cluster, request.db);\nrequest.setDb(fullDbName);\ncontext.setCluster(cluster);\ncontext.setDatabase(ClusterNamespace.getFullName(cluster, request.db));\ncontext.setQualifiedUser(ClusterNamespace.getFullName(cluster, request.user));\ncontext.setCatalog(Catalog.getCurrentCatalog());\ncontext.getState().reset();\ncontext.setThreadLocalInfo();\nTStatus status = new TStatus(TStatusCode.OK);\nTFeResult result = new TFeResult(FrontendServiceVersion.V1, status);\ntry {\nif (request.isSetSubLabel()) {\nExecuteEnv.getInstance().getMultiLoadMgr().load(request);\n} else {\nif (Catalog.getCurrentCatalog().getLoadManager().createLoadJobV1FromRequest(request)) {\ntry {\nlogMiniLoadStmt(request);\n} catch (Exception e) {\nLOG.warn(\"failed log mini load stmt\", e);\n}\n}\n}\n} catch (UserException e) {\nLOG.warn(\"add mini load error: {}\", e.getMessage());\nstatus.setStatusCode(TStatusCode.ANALYSIS_ERROR);\nstatus.addToErrorMsgs(e.getMessage());\n} catch (Throwable e) {\nLOG.warn(\"unexpected exception when adding mini load\", e);\nstatus.setStatusCode(TStatusCode.ANALYSIS_ERROR);\nstatus.addToErrorMsgs(Strings.nullToEmpty(e.getMessage()));\n} finally {\nConnectContext.remove();\n}\nLOG.debug(\"mini load result: {}\", result);\nreturn result;\n}\nprivate void logMiniLoadStmt(TMiniLoadRequest request) throws UnknownHostException {\nString stmt = getMiniLoadStmt(request);\nAuditEvent auditEvent = new AuditEventBuilder().setEventType(EventType.AFTER_QUERY)\n.setClientIp(request.user_ip + \":0\")\n.setUser(request.user)\n.setDb(request.db)\n.setState(TStatusCode.OK.name())\n.setQueryTime(0)\n.setStmt(stmt).build();\nCatalog.getCurrentAuditEventProcessor().handleAuditEvent(auditEvent);\n}\nprivate String getMiniLoadStmt(TMiniLoadRequest request) throws UnknownHostException {\nStringBuilder stringBuilder = new StringBuilder();\nstringBuilder.append(\"curl --location-trusted -u user:passwd -T \");\nif (request.files.size() == 1) {\nstringBuilder.append(request.files.get(0));\n} else if (request.files.size() > 1) {\nstringBuilder.append(\"\\\"{\").append(Joiner.on(\",\").join(request.files)).append(\"}\\\"\");\n}\nInetAddress masterAddress = FrontendOptions.getLocalHost();\nstringBuilder.append(\" http:\nstringBuilder.append(Config.http_port).append(\"/api/\").append(request.db).append(\"/\");\nstringBuilder.append(request.tbl).append(\"/_load?label=\").append(request.label);\nif (!request.properties.isEmpty()) {\nstringBuilder.append(\"&\");\nList props = Lists.newArrayList();\nfor (Map.Entry entry : request.properties.entrySet()) {\nString prop = entry.getKey() + \"=\" + entry.getValue();\nprops.add(prop);\n}\nstringBuilder.append(Joiner.on(\"&\").join(props));\n}\nreturn stringBuilder.toString();\n}\n@Override\npublic TFeResult updateMiniEtlTaskStatus(TUpdateMiniEtlTaskStatusRequest request) throws TException {\nTFeResult result = new TFeResult();\nresult.setProtocolVersion(FrontendServiceVersion.V1);\nTStatus status = new TStatus(TStatusCode.OK);\nresult.setStatus(status);\nTUniqueId etlTaskId = request.getEtlTaskId();\nlong jobId = etlTaskId.getHi();\nlong taskId = etlTaskId.getLo();\nLoadJob job = Catalog.getCurrentCatalog().getLoadInstance().getLoadJob(jobId);\nif (job == null) {\nString failMsg = \"job does not exist. id: \" + jobId;\nLOG.warn(failMsg);\nstatus.setStatusCode(TStatusCode.CANCELLED);\nstatus.addToErrorMsgs(failMsg);\nreturn result;\n}\nMiniEtlTaskInfo taskInfo = job.getMiniEtlTask(taskId);\nif (taskInfo == null) {\nString failMsg = \"task info does not exist. task id: \" + taskId + \", job id: \" + jobId;\nLOG.warn(failMsg);\nstatus.setStatusCode(TStatusCode.CANCELLED);\nstatus.addToErrorMsgs(failMsg);\nreturn result;\n}\nTMiniLoadEtlStatusResult statusResult = request.getEtlTaskStatus();\nLOG.debug(\"load job id: {}, etl task id: {}, status: {}\", jobId, taskId, statusResult);\nEtlStatus taskStatus = taskInfo.getTaskStatus();\nif (taskStatus.setState(statusResult.getEtlState())) {\nif (statusResult.isSetCounters()) {\ntaskStatus.setCounters(statusResult.getCounters());\n}\nif (statusResult.isSetTrackingUrl()) {\ntaskStatus.setTrackingUrl(statusResult.getTrackingUrl());\n}\nif (statusResult.isSetFileMap()) {\ntaskStatus.setFileMap(statusResult.getFileMap());\n}\n}\nreturn result;\n}\n@Override\npublic TMiniLoadBeginResult miniLoadBegin(TMiniLoadBeginRequest request) throws TException {\nLOG.debug(\"receive mini load begin request. label: {}, user: {}, ip: {}\",\nrequest.getLabel(), request.getUser(), request.getUserIp());\nTMiniLoadBeginResult result = new TMiniLoadBeginResult();\nTStatus status = new TStatus(TStatusCode.OK);\nresult.setStatus(status);\ntry {\nString cluster = SystemInfoService.DEFAULT_CLUSTER;\nif (request.isSetCluster()) {\ncluster = request.cluster;\n}\ncheckPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),\nrequest.getTbl(), request.getUserIp(), PrivPredicate.LOAD);\nif (request.isSetSubLabel()) {\n} else {\nresult.setTxnId(Catalog.getCurrentCatalog().getLoadManager().createLoadJobFromMiniLoad(request));\n}\nreturn result;\n} catch (UserException e) {\nstatus.setStatusCode(TStatusCode.ANALYSIS_ERROR);\nstatus.addToErrorMsgs(e.getMessage());\nreturn result;\n} catch (Throwable e) {\nLOG.warn(\"catch unknown result.\", e);\nstatus.setStatusCode(TStatusCode.INTERNAL_ERROR);\nstatus.addToErrorMsgs(Strings.nullToEmpty(e.getMessage()));\nreturn result;\n}\n}\n@Override\npublic TFeResult isMethodSupported(TIsMethodSupportedRequest request) throws TException {\nTStatus status = new TStatus(TStatusCode.OK);\nTFeResult result = new TFeResult(FrontendServiceVersion.V1, status);\nswitch (request.getFunctionName()){\ncase \"STREAMING_MINI_LOAD\":\nbreak;\ndefault:\nstatus.setStatusCode(NOT_IMPLEMENTED_ERROR);\nbreak;\n}\nreturn result;\n}\n@Override\npublic TMasterOpResult forward(TMasterOpRequest params) throws TException {\nTNetworkAddress clientAddr = getClientAddr();\nif (clientAddr != null) {\nFrontend fe = Catalog.getCurrentCatalog().getFeByHost(clientAddr.getHostname());\nif (fe == null) {\nLOG.warn(\"reject request from invalid host. client: {}\", clientAddr);\nthrow new TException(\"request from invalid host was rejected.\");\n}\n}\nLOG.debug(\"receive forwarded stmt {} from FE: {}\", params.getStmtId(), clientAddr.getHostname());\nConnectContext context = new ConnectContext(null);\nConnectProcessor processor = new ConnectProcessor(context);\nTMasterOpResult result = processor.proxyExecute(params);\nConnectContext.remove();\nreturn result;\n}\nprivate void checkPasswordAndPrivs(String cluster, String user, String passwd, String db, String tbl,\nString clientIp, PrivPredicate predicate) throws AuthenticationException {\nfinal String fullUserName = ClusterNamespace.getFullName(cluster, user);\nfinal String fullDbName = ClusterNamespace.getFullName(cluster, db);\nList currentUser = Lists.newArrayList();\nif (!Catalog.getCurrentCatalog().getAuth().checkPlainPassword(fullUserName, clientIp, passwd, currentUser)) {\nthrow new AuthenticationException(\"Access denied for \" + fullUserName + \"@\" + clientIp);\n}\nPreconditions.checkState(currentUser.size() == 1);\nif (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(currentUser.get(0), fullDbName, tbl, predicate)) {\nthrow new AuthenticationException(\n\"Access denied; you need (at least one of) the LOAD privilege(s) for this operation\");\n}\n}\n@Override\npublic TFeResult loadCheck(TLoadCheckRequest request) throws TException {\nLOG.debug(\"receive load check request. label: {}, user: {}, ip: {}\",\nrequest.getLabel(), request.getUser(), request.getUserIp());\nTStatus status = new TStatus(TStatusCode.OK);\nTFeResult result = new TFeResult(FrontendServiceVersion.V1, status);\ntry {\nString cluster = SystemInfoService.DEFAULT_CLUSTER;\nif (request.isSetCluster()) {\ncluster = request.cluster;\n}\ncheckPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),\nrequest.getTbl(), request.getUserIp(), PrivPredicate.LOAD);\n} catch (UserException e) {\nstatus.setStatusCode(TStatusCode.ANALYSIS_ERROR);\nstatus.addToErrorMsgs(e.getMessage());\nreturn result;\n} catch (Throwable e) {\nLOG.warn(\"catch unknown result.\", e);\nstatus.setStatusCode(TStatusCode.INTERNAL_ERROR);\nstatus.addToErrorMsgs(Strings.nullToEmpty(e.getMessage()));\nreturn result;\n}\nreturn result;\n}\n@Override\nprivate long loadTxnBeginImpl(TLoadTxnBeginRequest request, String clientIp) throws UserException {\nString cluster = request.getCluster();\nif (Strings.isNullOrEmpty(cluster)) {\ncluster = SystemInfoService.DEFAULT_CLUSTER;\n}\ncheckPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),\nrequest.getTbl(), request.getUserIp(), PrivPredicate.LOAD);\nif (Strings.isNullOrEmpty(request.getLabel())) {\nthrow new UserException(\"empty label in begin request\");\n}\nCatalog catalog = Catalog.getCurrentCatalog();\nString fullDbName = ClusterNamespace.getFullName(cluster, request.getDb());\nDatabase db = catalog.getDb(fullDbName);\nif (db == null) {\nString dbName = fullDbName;\nif (Strings.isNullOrEmpty(request.getCluster())) {\ndbName = request.getDb();\n}\nthrow new UserException(\"unknown database, database=\" + dbName);\n}\nTable table = null;\ndb.readLock();\ntry {\ntable = db.getTable(request.tbl);\nif (table == null || table.getType() != TableType.OLAP) {\nthrow new UserException(\"unknown table, table=\" + request.tbl);\n}\n} finally {\ndb.readUnlock();\n}\nlong timeoutSecond = request.isSetTimeout() ? request.getTimeout() : Config.stream_load_default_timeout_second;\nMetricRepo.COUNTER_LOAD_ADD.increase(1L);\nreturn Catalog.getCurrentGlobalTransactionMgr().beginTransaction(\ndb.getId(), Lists.newArrayList(table.getId()), request.getLabel(), request.getRequestId(),\nnew TxnCoordinator(TxnSourceType.BE, clientIp),\nTransactionState.LoadJobSourceType.BACKEND_STREAMING, -1, timeoutSecond);\n}\n@Override\npublic TLoadTxnCommitResult loadTxnCommit(TLoadTxnCommitRequest request) throws TException {\nString clientAddr = getClientAddrAsString();\nLOG.debug(\"receive txn commit request. db: {}, tbl: {}, txn id: {}, backend: {}\",\nrequest.getDb(), request.getTbl(), request.getTxnId(), clientAddr);\nLOG.debug(\"txn commit request: {}\", request);\nTLoadTxnCommitResult result = new TLoadTxnCommitResult();\nTStatus status = new TStatus(TStatusCode.OK);\nresult.setStatus(status);\ntry {\nif (!loadTxnCommitImpl(request)) {\nstatus.setStatusCode(TStatusCode.PUBLISH_TIMEOUT);\nstatus.addToErrorMsgs(\"transaction commit successfully, BUT data will be visible later\");\n}\n} catch (UserException e) {\nLOG.warn(\"failed to commit txn: {}: {}\", request.getTxnId(), e.getMessage());\nstatus.setStatusCode(TStatusCode.ANALYSIS_ERROR);\nstatus.addToErrorMsgs(e.getMessage());\n} catch (Throwable e) {\nLOG.warn(\"catch unknown result.\", e);\nstatus.setStatusCode(TStatusCode.INTERNAL_ERROR);\nstatus.addToErrorMsgs(Strings.nullToEmpty(e.getMessage()));\nreturn result;\n}\nreturn result;\n}\nprivate boolean loadTxnCommitImpl(TLoadTxnCommitRequest request) throws UserException {\nString cluster = request.getCluster();\nif (Strings.isNullOrEmpty(cluster)) {\ncluster = SystemInfoService.DEFAULT_CLUSTER;\n}\nif (request.isSetAuthCode()) {\n} else {\ncheckPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),\nrequest.getTbl(), request.getUserIp(), PrivPredicate.LOAD);\n}\nCatalog catalog = Catalog.getCurrentCatalog();\nString fullDbName = ClusterNamespace.getFullName(cluster, request.getDb());\nDatabase db = catalog.getDb(fullDbName);\nif (db == null) {\nString dbName = fullDbName;\nif (Strings.isNullOrEmpty(request.getCluster())) {\ndbName = request.getDb();\n}\nthrow new UserException(\"unknown database, database=\" + dbName);\n}\nlong timeoutMs = request.isSetThriftRpcTimeoutMs() ? request.getThriftRpcTimeoutMs() : 5000;\nboolean ret = Catalog.getCurrentGlobalTransactionMgr().commitAndPublishTransaction(\ndb, request.getTxnId(),\nTabletCommitInfo.fromThrift(request.getCommitInfos()),\ntimeoutMs, TxnCommitAttachment.fromThrift(request.txnCommitAttachment));\nif (ret) {\nMetricRepo.COUNTER_LOAD_FINISHED.increase(1L);\n}\nreturn ret;\n}\n@Override\npublic TLoadTxnRollbackResult loadTxnRollback(TLoadTxnRollbackRequest request) throws TException {\nString clientAddr = getClientAddrAsString();\nLOG.debug(\"receive txn rollback request. db: {}, tbl: {}, txn id: {}, reason: {}, backend: {}\",\nrequest.getDb(), request.getTbl(), request.getTxnId(), request.getReason(), clientAddr);\nLOG.debug(\"txn rollback request: {}\", request);\nTLoadTxnRollbackResult result = new TLoadTxnRollbackResult();\nTStatus status = new TStatus(TStatusCode.OK);\nresult.setStatus(status);\ntry {\nloadTxnRollbackImpl(request);\n} catch (UserException e) {\nLOG.warn(\"failed to rollback txn {}: {}\", request.getTxnId(), e.getMessage());\nstatus.setStatusCode(TStatusCode.ANALYSIS_ERROR);\nstatus.addToErrorMsgs(e.getMessage());\n} catch (Throwable e) {\nLOG.warn(\"catch unknown result.\", e);\nstatus.setStatusCode(TStatusCode.INTERNAL_ERROR);\nstatus.addToErrorMsgs(Strings.nullToEmpty(e.getMessage()));\nreturn result;\n}\nreturn result;\n}\nprivate void loadTxnRollbackImpl(TLoadTxnRollbackRequest request) throws UserException {\nString cluster = request.getCluster();\nif (Strings.isNullOrEmpty(cluster)) {\ncluster = SystemInfoService.DEFAULT_CLUSTER;\n}\nif (request.isSetAuthCode()) {\n} else {\ncheckPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),\nrequest.getTbl(), request.getUserIp(), PrivPredicate.LOAD);\n}\nString dbName = ClusterNamespace.getFullName(cluster, request.getDb());\nDatabase db = Catalog.getCurrentCatalog().getDb(dbName);\nif (db == null) {\nthrow new MetaNotFoundException(\"db \" + request.getDb() + \" does not exist\");\n}\nlong dbId = db.getId();\nCatalog.getCurrentGlobalTransactionMgr().abortTransaction(dbId, request.getTxnId(),\nrequest.isSetReason() ? request.getReason() : \"system cancel\",\nTxnCommitAttachment.fromThrift(request.getTxnCommitAttachment()));\n}\n@Override\npublic TStreamLoadPutResult streamLoadPut(TStreamLoadPutRequest request) {\nString clientAddr = getClientAddrAsString();\nLOG.debug(\"receive stream load put request. db:{}, tbl: {}, txn id: {}, load id: {}, backend: {}\",\nrequest.getDb(), request.getTbl(), request.getTxnId(), DebugUtil.printId(request.getLoadId()),\nclientAddr);\nLOG.debug(\"stream load put request: {}\", request);\nTStreamLoadPutResult result = new TStreamLoadPutResult();\nTStatus status = new TStatus(TStatusCode.OK);\nresult.setStatus(status);\ntry {\nresult.setParams(streamLoadPutImpl(request));\n} catch (UserException e) {\nLOG.warn(\"failed to get stream load plan: {}\", e.getMessage());\nstatus.setStatusCode(TStatusCode.ANALYSIS_ERROR);\nstatus.addToErrorMsgs(e.getMessage());\n} catch (Throwable e) {\nLOG.warn(\"catch unknown result.\", e);\nstatus.setStatusCode(TStatusCode.INTERNAL_ERROR);\nstatus.addToErrorMsgs(Strings.nullToEmpty(e.getMessage()));\nreturn result;\n}\nreturn result;\n}\nprivate TExecPlanFragmentParams streamLoadPutImpl(TStreamLoadPutRequest request) throws UserException {\nString cluster = request.getCluster();\nif (Strings.isNullOrEmpty(cluster)) {\ncluster = SystemInfoService.DEFAULT_CLUSTER;\n}\nCatalog catalog = Catalog.getCurrentCatalog();\nString fullDbName = ClusterNamespace.getFullName(cluster, request.getDb());\nDatabase db = catalog.getDb(fullDbName);\nif (db == null) {\nString dbName = fullDbName;\nif (Strings.isNullOrEmpty(request.getCluster())) {\ndbName = request.getDb();\n}\nthrow new UserException(\"unknown database, database=\" + dbName);\n}\nlong timeoutMs = request.isSetThriftRpcTimeoutMs() ? request.getThriftRpcTimeoutMs() : 5000;\nif (!db.tryReadLock(timeoutMs, TimeUnit.MILLISECONDS)) {\nthrow new UserException(\"get database read lock timeout, database=\" + fullDbName);\n}\ntry {\nTable table = db.getTable(request.getTbl());\nif (table == null) {\nthrow new UserException(\"unknown table, table=\" + request.getTbl());\n}\nif (!(table instanceof OlapTable)) {\nthrow new UserException(\"load table type is not OlapTable, type=\" + table.getClass());\n}\nStreamLoadTask streamLoadTask = StreamLoadTask.fromTStreamLoadPutRequest(request, db);\nStreamLoadPlanner planner = new StreamLoadPlanner(db, (OlapTable) table, streamLoadTask);\nTExecPlanFragmentParams plan = planner.plan(streamLoadTask.getId());\nTransactionState txnState = Catalog.getCurrentGlobalTransactionMgr().getTransactionState(db.getId(), request.getTxnId());\nif (txnState == null) {\nthrow new UserException(\"txn does not exist: \" + request.getTxnId());\n}\ntxnState.addTableIndexes((OlapTable) table);\nreturn plan;\n} finally {\ndb.readUnlock();\n}\n}\n@Override\npublic TStatus snapshotLoaderReport(TSnapshotLoaderReportRequest request) throws TException {\nif (Catalog.getCurrentCatalog().getBackupHandler().report(request.getTaskType(), request.getJobId(),\nrequest.getTaskId(), request.getFinishedNum(), request.getTotalNum())) {\nreturn new TStatus(TStatusCode.OK);\n}\nreturn new TStatus(TStatusCode.CANCELLED);\n}\nprivate TNetworkAddress getClientAddr() {\nThriftServerContext connectionContext = ThriftServerEventProcessor.getConnectionContext();\nif (connectionContext != null) {\nreturn connectionContext.getClient();\n}\nreturn null;\n}\nprivate String getClientAddrAsString() {\nTNetworkAddress addr = getClientAddr();\nreturn addr == null ? \"unknown\" : addr.hostname;\n}\n}", + "context_after": "class FrontendServiceImpl implements FrontendService.Iface {\nprivate static final Logger LOG = LogManager.getLogger(FrontendServiceImpl.class);\nprivate MasterImpl masterImpl;\nprivate ExecuteEnv exeEnv;\npublic FrontendServiceImpl(ExecuteEnv exeEnv) {\nmasterImpl = new MasterImpl();\nthis.exeEnv = exeEnv;\n}\n@Override\npublic TGetDbsResult getDbNames(TGetDbsParams params) throws TException {\nLOG.debug(\"get db request: {}\", params);\nTGetDbsResult result = new TGetDbsResult();\nList dbs = Lists.newArrayList();\nPatternMatcher matcher = null;\nif (params.isSetPattern()) {\ntry {\nmatcher = PatternMatcher.createMysqlPattern(params.getPattern(),\nCaseSensibility.DATABASE.getCaseSensibility());\n} catch (AnalysisException e) {\nthrow new TException(\"Pattern is in bad format: \" + params.getPattern());\n}\n}\nCatalog catalog = Catalog.getCurrentCatalog();\nList dbNames = catalog.getDbNames();\nLOG.debug(\"get db names: {}\", dbNames);\nUserIdentity currentUser = null;\nif (params.isSetCurrentUserIdent()) {\ncurrentUser = UserIdentity.fromThrift(params.current_user_ident);\n} else {\ncurrentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);\n}\nfor (String fullName : dbNames) {\nif (!catalog.getAuth().checkDbPriv(currentUser, fullName, PrivPredicate.SHOW)) {\ncontinue;\n}\nfinal String db = ClusterNamespace.getNameFromFullName(fullName);\nif (matcher != null && !matcher.match(db)) {\ncontinue;\n}\ndbs.add(fullName);\n}\nresult.setDbs(dbs);\nreturn result;\n}\n@Override\npublic TGetTablesResult getTableNames(TGetTablesParams params) throws TException {\nLOG.debug(\"get table name request: {}\", params);\nTGetTablesResult result = new TGetTablesResult();\nList tablesResult = Lists.newArrayList();\nresult.setTables(tablesResult);\nPatternMatcher matcher = null;\nif (params.isSetPattern()) {\ntry {\nmatcher = PatternMatcher.createMysqlPattern(params.getPattern(),\nCaseSensibility.TABLE.getCaseSensibility());\n} catch (AnalysisException e) {\nthrow new TException(\"Pattern is in bad format: \" + params.getPattern());\n}\n}\nDatabase db = Catalog.getCurrentCatalog().getDb(params.db);\nUserIdentity currentUser = null;\nif (params.isSetCurrentUserIdent()) {\ncurrentUser = UserIdentity.fromThrift(params.current_user_ident);\n} else {\ncurrentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);\n}\nif (db != null) {\nfor (String tableName : db.getTableNamesWithLock()) {\nLOG.debug(\"get table: {}, wait to check\", tableName);\nif (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(currentUser, params.db,\ntableName, PrivPredicate.SHOW)) {\ncontinue;\n}\nif (matcher != null && !matcher.match(tableName)) {\ncontinue;\n}\ntablesResult.add(tableName);\n}\n}\nreturn result;\n}\n@Override\npublic TListTableStatusResult listTableStatus(TGetTablesParams params) throws TException {\nLOG.debug(\"get list table request: {}\", params);\nTListTableStatusResult result = new TListTableStatusResult();\nList tablesResult = Lists.newArrayList();\nresult.setTables(tablesResult);\nPatternMatcher matcher = null;\nif (params.isSetPattern()) {\ntry {\nmatcher = PatternMatcher.createMysqlPattern(params.getPattern(),\nCaseSensibility.TABLE.getCaseSensibility());\n} catch (AnalysisException e) {\nthrow new TException(\"Pattern is in bad format \" + params.getPattern());\n}\n}\nDatabase db = Catalog.getCurrentCatalog().getDb(params.db);\nUserIdentity currentUser = null;\nif (params.isSetCurrentUserIdent()) {\ncurrentUser = UserIdentity.fromThrift(params.current_user_ident);\n} else {\ncurrentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);\n}\nif (db != null) {\ndb.readLock();\ntry {\nfor (Table table : db.getTables()) {\nif (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(currentUser, params.db,\ntable.getName(), PrivPredicate.SHOW)) {\ncontinue;\n}\nif (matcher != null && !matcher.match(table.getName())) {\ncontinue;\n}\nTTableStatus status = new TTableStatus();\nstatus.setName(table.getName());\nstatus.setType(table.getMysqlType());\nstatus.setEngine(table.getEngine());\nstatus.setComment(table.getComment());\nstatus.setCreateTime(table.getCreateTime());\nstatus.setLastCheckTime(table.getLastCheckTime());\ntablesResult.add(status);\n}\n} finally {\ndb.readUnlock();\n}\n}\nreturn result;\n}\n@Override\npublic TFeResult updateExportTaskStatus(TUpdateExportTaskStatusRequest request) throws TException {\nTStatus status = new TStatus(TStatusCode.OK);\nTFeResult result = new TFeResult(FrontendServiceVersion.V1, status);\nreturn result;\n}\n@Override\npublic TDescribeTableResult describeTable(TDescribeTableParams params) throws TException {\nLOG.debug(\"get desc table request: {}\", params);\nTDescribeTableResult result = new TDescribeTableResult();\nList columns = Lists.newArrayList();\nresult.setColumns(columns);\nUserIdentity currentUser = null;\nif (params.isSetCurrentUserIdent()) {\ncurrentUser = UserIdentity.fromThrift(params.current_user_ident);\n} else {\ncurrentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);\n}\nif (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(currentUser, params.db,\nparams.getTableName(), PrivPredicate.SHOW)) {\nreturn result;\n}\nDatabase db = Catalog.getCurrentCatalog().getDb(params.db);\nif (db != null) {\ndb.readLock();\ntry {\nTable table = db.getTable(params.getTableName());\nif (table != null) {\nfor (Column column : table.getBaseSchema(params.isShowHiddenColumns())) {\nfinal TColumnDesc desc = new TColumnDesc(column.getName(), column.getDataType().toThrift());\nfinal Integer precision = column.getOriginType().getPrecision();\nif (precision != null) {\ndesc.setColumnPrecision(precision);\n}\nfinal Integer columnLength = column.getOriginType().getColumnSize();\nif (columnLength != null) {\ndesc.setColumnLength(columnLength);\n}\nfinal Integer decimalDigits = column.getOriginType().getDecimalDigits();\nif (decimalDigits != null) {\ndesc.setColumnScale(decimalDigits);\n}\nfinal TColumnDef colDef = new TColumnDef(desc);\nfinal String comment = column.getComment();\nif(comment != null) {\ncolDef.setComment(comment);\n}\ncolumns.add(colDef);\n}\n}\n} finally {\ndb.readUnlock();\n}\n}\nreturn result;\n}\n@Override\npublic TShowVariableResult showVariables(TShowVariableRequest params) throws TException {\nTShowVariableResult result = new TShowVariableResult();\nMap map = Maps.newHashMap();\nresult.setVariables(map);\nConnectContext ctx = exeEnv.getScheduler().getContext(params.getThreadId());\nif (ctx == null) {\nreturn result;\n}\nList> rows = VariableMgr.dump(SetType.fromThrift(params.getVarType()), ctx.getSessionVariable(),\nnull);\nfor (List row : rows) {\nmap.put(row.get(0), row.get(1));\n}\nreturn result;\n}\n@Override\npublic TReportExecStatusResult reportExecStatus(TReportExecStatusParams params) throws TException {\nreturn QeProcessorImpl.INSTANCE.reportExecStatus(params, getClientAddr());\n}\n@Override\npublic TMasterResult finishTask(TFinishTaskRequest request) throws TException {\nreturn masterImpl.finishTask(request);\n}\n@Override\npublic TMasterResult report(TReportRequest request) throws TException {\nreturn masterImpl.report(request);\n}\n@Override\npublic TFetchResourceResult fetchResource() throws TException {\nreturn masterImpl.fetchResource();\n}\n@Deprecated\n@Override\npublic TFeResult miniLoad(TMiniLoadRequest request) throws TException {\nLOG.debug(\"receive mini load request: label: {}, db: {}, tbl: {}, backend: {}\",\nrequest.getLabel(), request.getDb(), request.getTbl(), request.getBackend());\nConnectContext context = new ConnectContext(null);\nString cluster = SystemInfoService.DEFAULT_CLUSTER;\nif (request.isSetCluster()) {\ncluster = request.cluster;\n}\nfinal String fullDbName = ClusterNamespace.getFullName(cluster, request.db);\nrequest.setDb(fullDbName);\ncontext.setCluster(cluster);\ncontext.setDatabase(ClusterNamespace.getFullName(cluster, request.db));\ncontext.setQualifiedUser(ClusterNamespace.getFullName(cluster, request.user));\ncontext.setCatalog(Catalog.getCurrentCatalog());\ncontext.getState().reset();\ncontext.setThreadLocalInfo();\nTStatus status = new TStatus(TStatusCode.OK);\nTFeResult result = new TFeResult(FrontendServiceVersion.V1, status);\ntry {\nif (request.isSetSubLabel()) {\nExecuteEnv.getInstance().getMultiLoadMgr().load(request);\n} else {\nif (Catalog.getCurrentCatalog().getLoadManager().createLoadJobV1FromRequest(request)) {\ntry {\nlogMiniLoadStmt(request);\n} catch (Exception e) {\nLOG.warn(\"failed log mini load stmt\", e);\n}\n}\n}\n} catch (UserException e) {\nLOG.warn(\"add mini load error: {}\", e.getMessage());\nstatus.setStatusCode(TStatusCode.ANALYSIS_ERROR);\nstatus.addToErrorMsgs(e.getMessage());\n} catch (Throwable e) {\nLOG.warn(\"unexpected exception when adding mini load\", e);\nstatus.setStatusCode(TStatusCode.ANALYSIS_ERROR);\nstatus.addToErrorMsgs(Strings.nullToEmpty(e.getMessage()));\n} finally {\nConnectContext.remove();\n}\nLOG.debug(\"mini load result: {}\", result);\nreturn result;\n}\nprivate void logMiniLoadStmt(TMiniLoadRequest request) throws UnknownHostException {\nString stmt = getMiniLoadStmt(request);\nAuditEvent auditEvent = new AuditEventBuilder().setEventType(EventType.AFTER_QUERY)\n.setClientIp(request.user_ip + \":0\")\n.setUser(request.user)\n.setDb(request.db)\n.setState(TStatusCode.OK.name())\n.setQueryTime(0)\n.setStmt(stmt).build();\nCatalog.getCurrentAuditEventProcessor().handleAuditEvent(auditEvent);\n}\nprivate String getMiniLoadStmt(TMiniLoadRequest request) throws UnknownHostException {\nStringBuilder stringBuilder = new StringBuilder();\nstringBuilder.append(\"curl --location-trusted -u user:passwd -T \");\nif (request.files.size() == 1) {\nstringBuilder.append(request.files.get(0));\n} else if (request.files.size() > 1) {\nstringBuilder.append(\"\\\"{\").append(Joiner.on(\",\").join(request.files)).append(\"}\\\"\");\n}\nInetAddress masterAddress = FrontendOptions.getLocalHost();\nstringBuilder.append(\" http:\nstringBuilder.append(Config.http_port).append(\"/api/\").append(request.db).append(\"/\");\nstringBuilder.append(request.tbl).append(\"/_load?label=\").append(request.label);\nif (!request.properties.isEmpty()) {\nstringBuilder.append(\"&\");\nList props = Lists.newArrayList();\nfor (Map.Entry entry : request.properties.entrySet()) {\nString prop = entry.getKey() + \"=\" + entry.getValue();\nprops.add(prop);\n}\nstringBuilder.append(Joiner.on(\"&\").join(props));\n}\nreturn stringBuilder.toString();\n}\n@Override\npublic TFeResult updateMiniEtlTaskStatus(TUpdateMiniEtlTaskStatusRequest request) throws TException {\nTFeResult result = new TFeResult();\nresult.setProtocolVersion(FrontendServiceVersion.V1);\nTStatus status = new TStatus(TStatusCode.OK);\nresult.setStatus(status);\nTUniqueId etlTaskId = request.getEtlTaskId();\nlong jobId = etlTaskId.getHi();\nlong taskId = etlTaskId.getLo();\nLoadJob job = Catalog.getCurrentCatalog().getLoadInstance().getLoadJob(jobId);\nif (job == null) {\nString failMsg = \"job does not exist. id: \" + jobId;\nLOG.warn(failMsg);\nstatus.setStatusCode(TStatusCode.CANCELLED);\nstatus.addToErrorMsgs(failMsg);\nreturn result;\n}\nMiniEtlTaskInfo taskInfo = job.getMiniEtlTask(taskId);\nif (taskInfo == null) {\nString failMsg = \"task info does not exist. task id: \" + taskId + \", job id: \" + jobId;\nLOG.warn(failMsg);\nstatus.setStatusCode(TStatusCode.CANCELLED);\nstatus.addToErrorMsgs(failMsg);\nreturn result;\n}\nTMiniLoadEtlStatusResult statusResult = request.getEtlTaskStatus();\nLOG.debug(\"load job id: {}, etl task id: {}, status: {}\", jobId, taskId, statusResult);\nEtlStatus taskStatus = taskInfo.getTaskStatus();\nif (taskStatus.setState(statusResult.getEtlState())) {\nif (statusResult.isSetCounters()) {\ntaskStatus.setCounters(statusResult.getCounters());\n}\nif (statusResult.isSetTrackingUrl()) {\ntaskStatus.setTrackingUrl(statusResult.getTrackingUrl());\n}\nif (statusResult.isSetFileMap()) {\ntaskStatus.setFileMap(statusResult.getFileMap());\n}\n}\nreturn result;\n}\n@Override\npublic TMiniLoadBeginResult miniLoadBegin(TMiniLoadBeginRequest request) throws TException {\nLOG.debug(\"receive mini load begin request. label: {}, user: {}, ip: {}\",\nrequest.getLabel(), request.getUser(), request.getUserIp());\nTMiniLoadBeginResult result = new TMiniLoadBeginResult();\nTStatus status = new TStatus(TStatusCode.OK);\nresult.setStatus(status);\ntry {\nString cluster = SystemInfoService.DEFAULT_CLUSTER;\nif (request.isSetCluster()) {\ncluster = request.cluster;\n}\ncheckPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),\nrequest.getTbl(), request.getUserIp(), PrivPredicate.LOAD);\nif (request.isSetSubLabel()) {\n} else {\nresult.setTxnId(Catalog.getCurrentCatalog().getLoadManager().createLoadJobFromMiniLoad(request));\n}\nreturn result;\n} catch (UserException e) {\nstatus.setStatusCode(TStatusCode.ANALYSIS_ERROR);\nstatus.addToErrorMsgs(e.getMessage());\nreturn result;\n} catch (Throwable e) {\nLOG.warn(\"catch unknown result.\", e);\nstatus.setStatusCode(TStatusCode.INTERNAL_ERROR);\nstatus.addToErrorMsgs(Strings.nullToEmpty(e.getMessage()));\nreturn result;\n}\n}\n@Override\npublic TFeResult isMethodSupported(TIsMethodSupportedRequest request) throws TException {\nTStatus status = new TStatus(TStatusCode.OK);\nTFeResult result = new TFeResult(FrontendServiceVersion.V1, status);\nswitch (request.getFunctionName()){\ncase \"STREAMING_MINI_LOAD\":\nbreak;\ndefault:\nstatus.setStatusCode(NOT_IMPLEMENTED_ERROR);\nbreak;\n}\nreturn result;\n}\n@Override\npublic TMasterOpResult forward(TMasterOpRequest params) throws TException {\nTNetworkAddress clientAddr = getClientAddr();\nif (clientAddr != null) {\nFrontend fe = Catalog.getCurrentCatalog().getFeByHost(clientAddr.getHostname());\nif (fe == null) {\nLOG.warn(\"reject request from invalid host. client: {}\", clientAddr);\nthrow new TException(\"request from invalid host was rejected.\");\n}\n}\nLOG.debug(\"receive forwarded stmt {} from FE: {}\", params.getStmtId(), clientAddr.getHostname());\nConnectContext context = new ConnectContext(null);\nConnectProcessor processor = new ConnectProcessor(context);\nTMasterOpResult result = processor.proxyExecute(params);\nConnectContext.remove();\nreturn result;\n}\nprivate void checkPasswordAndPrivs(String cluster, String user, String passwd, String db, String tbl,\nString clientIp, PrivPredicate predicate) throws AuthenticationException {\nfinal String fullUserName = ClusterNamespace.getFullName(cluster, user);\nfinal String fullDbName = ClusterNamespace.getFullName(cluster, db);\nList currentUser = Lists.newArrayList();\nif (!Catalog.getCurrentCatalog().getAuth().checkPlainPassword(fullUserName, clientIp, passwd, currentUser)) {\nthrow new AuthenticationException(\"Access denied for \" + fullUserName + \"@\" + clientIp);\n}\nPreconditions.checkState(currentUser.size() == 1);\nif (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(currentUser.get(0), fullDbName, tbl, predicate)) {\nthrow new AuthenticationException(\n\"Access denied; you need (at least one of) the LOAD privilege(s) for this operation\");\n}\n}\n@Override\npublic TFeResult loadCheck(TLoadCheckRequest request) throws TException {\nLOG.debug(\"receive load check request. label: {}, user: {}, ip: {}\",\nrequest.getLabel(), request.getUser(), request.getUserIp());\nTStatus status = new TStatus(TStatusCode.OK);\nTFeResult result = new TFeResult(FrontendServiceVersion.V1, status);\ntry {\nString cluster = SystemInfoService.DEFAULT_CLUSTER;\nif (request.isSetCluster()) {\ncluster = request.cluster;\n}\ncheckPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),\nrequest.getTbl(), request.getUserIp(), PrivPredicate.LOAD);\n} catch (UserException e) {\nstatus.setStatusCode(TStatusCode.ANALYSIS_ERROR);\nstatus.addToErrorMsgs(e.getMessage());\nreturn result;\n} catch (Throwable e) {\nLOG.warn(\"catch unknown result.\", e);\nstatus.setStatusCode(TStatusCode.INTERNAL_ERROR);\nstatus.addToErrorMsgs(Strings.nullToEmpty(e.getMessage()));\nreturn result;\n}\nreturn result;\n}\n@Override\nprivate long loadTxnBeginImpl(TLoadTxnBeginRequest request, String clientIp) throws UserException {\nString cluster = request.getCluster();\nif (Strings.isNullOrEmpty(cluster)) {\ncluster = SystemInfoService.DEFAULT_CLUSTER;\n}\ncheckPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),\nrequest.getTbl(), request.getUserIp(), PrivPredicate.LOAD);\nif (Strings.isNullOrEmpty(request.getLabel())) {\nthrow new UserException(\"empty label in begin request\");\n}\nCatalog catalog = Catalog.getCurrentCatalog();\nString fullDbName = ClusterNamespace.getFullName(cluster, request.getDb());\nDatabase db = catalog.getDb(fullDbName);\nif (db == null) {\nString dbName = fullDbName;\nif (Strings.isNullOrEmpty(request.getCluster())) {\ndbName = request.getDb();\n}\nthrow new UserException(\"unknown database, database=\" + dbName);\n}\nTable table = null;\ndb.readLock();\ntry {\ntable = db.getTable(request.tbl);\nif (table == null || table.getType() != TableType.OLAP) {\nthrow new UserException(\"unknown table, table=\" + request.tbl);\n}\n} finally {\ndb.readUnlock();\n}\nlong timeoutSecond = request.isSetTimeout() ? request.getTimeout() : Config.stream_load_default_timeout_second;\nMetricRepo.COUNTER_LOAD_ADD.increase(1L);\nreturn Catalog.getCurrentGlobalTransactionMgr().beginTransaction(\ndb.getId(), Lists.newArrayList(table.getId()), request.getLabel(), request.getRequestId(),\nnew TxnCoordinator(TxnSourceType.BE, clientIp),\nTransactionState.LoadJobSourceType.BACKEND_STREAMING, -1, timeoutSecond);\n}\n@Override\npublic TLoadTxnCommitResult loadTxnCommit(TLoadTxnCommitRequest request) throws TException {\nString clientAddr = getClientAddrAsString();\nLOG.debug(\"receive txn commit request: {}, backend: {}\", request, clientAddr);\nTLoadTxnCommitResult result = new TLoadTxnCommitResult();\nTStatus status = new TStatus(TStatusCode.OK);\nresult.setStatus(status);\ntry {\nif (!loadTxnCommitImpl(request)) {\nstatus.setStatusCode(TStatusCode.PUBLISH_TIMEOUT);\nstatus.addToErrorMsgs(\"transaction commit successfully, BUT data will be visible later\");\n}\n} catch (UserException e) {\nLOG.warn(\"failed to commit txn: {}: {}\", request.getTxnId(), e.getMessage());\nstatus.setStatusCode(TStatusCode.ANALYSIS_ERROR);\nstatus.addToErrorMsgs(e.getMessage());\n} catch (Throwable e) {\nLOG.warn(\"catch unknown result.\", e);\nstatus.setStatusCode(TStatusCode.INTERNAL_ERROR);\nstatus.addToErrorMsgs(Strings.nullToEmpty(e.getMessage()));\nreturn result;\n}\nreturn result;\n}\nprivate boolean loadTxnCommitImpl(TLoadTxnCommitRequest request) throws UserException {\nString cluster = request.getCluster();\nif (Strings.isNullOrEmpty(cluster)) {\ncluster = SystemInfoService.DEFAULT_CLUSTER;\n}\nif (request.isSetAuthCode()) {\n} else {\ncheckPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),\nrequest.getTbl(), request.getUserIp(), PrivPredicate.LOAD);\n}\nCatalog catalog = Catalog.getCurrentCatalog();\nString fullDbName = ClusterNamespace.getFullName(cluster, request.getDb());\nDatabase db = catalog.getDb(fullDbName);\nif (db == null) {\nString dbName = fullDbName;\nif (Strings.isNullOrEmpty(request.getCluster())) {\ndbName = request.getDb();\n}\nthrow new UserException(\"unknown database, database=\" + dbName);\n}\nlong timeoutMs = request.isSetThriftRpcTimeoutMs() ? request.getThriftRpcTimeoutMs() : 5000;\nboolean ret = Catalog.getCurrentGlobalTransactionMgr().commitAndPublishTransaction(\ndb, request.getTxnId(),\nTabletCommitInfo.fromThrift(request.getCommitInfos()),\ntimeoutMs, TxnCommitAttachment.fromThrift(request.txnCommitAttachment));\nif (ret) {\nMetricRepo.COUNTER_LOAD_FINISHED.increase(1L);\n}\nreturn ret;\n}\n@Override\npublic TLoadTxnRollbackResult loadTxnRollback(TLoadTxnRollbackRequest request) throws TException {\nString clientAddr = getClientAddrAsString();\nLOG.debug(\"receive txn rollback request: {}, backend: {}\", request, clientAddr);\nTLoadTxnRollbackResult result = new TLoadTxnRollbackResult();\nTStatus status = new TStatus(TStatusCode.OK);\nresult.setStatus(status);\ntry {\nloadTxnRollbackImpl(request);\n} catch (UserException e) {\nLOG.warn(\"failed to rollback txn {}: {}\", request.getTxnId(), e.getMessage());\nstatus.setStatusCode(TStatusCode.ANALYSIS_ERROR);\nstatus.addToErrorMsgs(e.getMessage());\n} catch (Throwable e) {\nLOG.warn(\"catch unknown result.\", e);\nstatus.setStatusCode(TStatusCode.INTERNAL_ERROR);\nstatus.addToErrorMsgs(Strings.nullToEmpty(e.getMessage()));\nreturn result;\n}\nreturn result;\n}\nprivate void loadTxnRollbackImpl(TLoadTxnRollbackRequest request) throws UserException {\nString cluster = request.getCluster();\nif (Strings.isNullOrEmpty(cluster)) {\ncluster = SystemInfoService.DEFAULT_CLUSTER;\n}\nif (request.isSetAuthCode()) {\n} else {\ncheckPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),\nrequest.getTbl(), request.getUserIp(), PrivPredicate.LOAD);\n}\nString dbName = ClusterNamespace.getFullName(cluster, request.getDb());\nDatabase db = Catalog.getCurrentCatalog().getDb(dbName);\nif (db == null) {\nthrow new MetaNotFoundException(\"db \" + request.getDb() + \" does not exist\");\n}\nlong dbId = db.getId();\nCatalog.getCurrentGlobalTransactionMgr().abortTransaction(dbId, request.getTxnId(),\nrequest.isSetReason() ? request.getReason() : \"system cancel\",\nTxnCommitAttachment.fromThrift(request.getTxnCommitAttachment()));\n}\n@Override\npublic TStreamLoadPutResult streamLoadPut(TStreamLoadPutRequest request) {\nString clientAddr = getClientAddrAsString();\nLOG.debug(\"receive stream load put request: {}, backend: {}\", request, clientAddr);\nTStreamLoadPutResult result = new TStreamLoadPutResult();\nTStatus status = new TStatus(TStatusCode.OK);\nresult.setStatus(status);\ntry {\nresult.setParams(streamLoadPutImpl(request));\n} catch (UserException e) {\nLOG.warn(\"failed to get stream load plan: {}\", e.getMessage());\nstatus.setStatusCode(TStatusCode.ANALYSIS_ERROR);\nstatus.addToErrorMsgs(e.getMessage());\n} catch (Throwable e) {\nLOG.warn(\"catch unknown result.\", e);\nstatus.setStatusCode(TStatusCode.INTERNAL_ERROR);\nstatus.addToErrorMsgs(Strings.nullToEmpty(e.getMessage()));\nreturn result;\n}\nreturn result;\n}\nprivate TExecPlanFragmentParams streamLoadPutImpl(TStreamLoadPutRequest request) throws UserException {\nString cluster = request.getCluster();\nif (Strings.isNullOrEmpty(cluster)) {\ncluster = SystemInfoService.DEFAULT_CLUSTER;\n}\nCatalog catalog = Catalog.getCurrentCatalog();\nString fullDbName = ClusterNamespace.getFullName(cluster, request.getDb());\nDatabase db = catalog.getDb(fullDbName);\nif (db == null) {\nString dbName = fullDbName;\nif (Strings.isNullOrEmpty(request.getCluster())) {\ndbName = request.getDb();\n}\nthrow new UserException(\"unknown database, database=\" + dbName);\n}\nlong timeoutMs = request.isSetThriftRpcTimeoutMs() ? request.getThriftRpcTimeoutMs() : 5000;\nif (!db.tryReadLock(timeoutMs, TimeUnit.MILLISECONDS)) {\nthrow new UserException(\"get database read lock timeout, database=\" + fullDbName);\n}\ntry {\nTable table = db.getTable(request.getTbl());\nif (table == null) {\nthrow new UserException(\"unknown table, table=\" + request.getTbl());\n}\nif (!(table instanceof OlapTable)) {\nthrow new UserException(\"load table type is not OlapTable, type=\" + table.getClass());\n}\nStreamLoadTask streamLoadTask = StreamLoadTask.fromTStreamLoadPutRequest(request, db);\nStreamLoadPlanner planner = new StreamLoadPlanner(db, (OlapTable) table, streamLoadTask);\nTExecPlanFragmentParams plan = planner.plan(streamLoadTask.getId());\nTransactionState txnState = Catalog.getCurrentGlobalTransactionMgr().getTransactionState(db.getId(), request.getTxnId());\nif (txnState == null) {\nthrow new UserException(\"txn does not exist: \" + request.getTxnId());\n}\ntxnState.addTableIndexes((OlapTable) table);\nreturn plan;\n} finally {\ndb.readUnlock();\n}\n}\n@Override\npublic TStatus snapshotLoaderReport(TSnapshotLoaderReportRequest request) throws TException {\nif (Catalog.getCurrentCatalog().getBackupHandler().report(request.getTaskType(), request.getJobId(),\nrequest.getTaskId(), request.getFinishedNum(), request.getTotalNum())) {\nreturn new TStatus(TStatusCode.OK);\n}\nreturn new TStatus(TStatusCode.CANCELLED);\n}\nprivate TNetworkAddress getClientAddr() {\nThriftServerContext connectionContext = ThriftServerEventProcessor.getConnectionContext();\nif (connectionContext != null) {\nreturn connectionContext.getClient();\n}\nreturn null;\n}\nprivate String getClientAddrAsString() {\nTNetworkAddress addr = getClientAddr();\nreturn addr == null ? \"unknown\" : addr.hostname;\n}\n}" + }, + { + "comment": "The parameter is the estimated initial capacity. The implementation will automatically adjust the size of the table to be a mathematical product of the initial capacity and the load factor. IOW if you expect to store 100 elements, pass in 100 as the initial capacity.", + "method_body": "public HashSet apply(final int value) {\nreturn new HashSet<>(value);\n}", + "target_code": "return new HashSet<>(value);", + "method_body_after": "public HashSet apply(final int value) {\nreturn new HashSet<>(getInitialCapacityFromExpectedSize(value));\n}", + "context_before": "class HashSetFactory implements IntFunction> {\nprivate static final HashSetFactory INSTANCE = new HashSetFactory<>();\nprivate HashSetFactory() {\n}\n@SuppressWarnings(\"unchecked\")\npublic static HashSetFactory getInstance() {\nreturn (HashSetFactory) INSTANCE;\n}\n}", + "context_after": "class HashSetFactory implements IntFunction> {\nprivate static final HashSetFactory INSTANCE = new HashSetFactory<>();\nprivate HashSetFactory() {\n}\n/**\n* As the default loadFactor is of 0.75, we need to calculate the initial capacity from the expected size to avoid\n* resizing the collection when we populate the collection with all the initial elements. We use a calculation\n* similar to what is done in {@link java.util.HashMap\n*\n* @param expectedSize the expected size of the collection\n* @return the initial capacity of the collection\n*/\nprivate int getInitialCapacityFromExpectedSize(int expectedSize) {\nif (expectedSize < 3) {\nreturn expectedSize + 1;\n}\nreturn (int) ((float) expectedSize / 0.75f + 1.0f);\n}\n@SuppressWarnings(\"unchecked\")\npublic static HashSetFactory getInstance() {\nreturn (HashSetFactory) INSTANCE;\n}\n}" + }, + { + "comment": "`unusedAlgorithmsCollection` name could be `actual`", + "method_body": "void assertFind() {\nShardingRuleConfiguration ruleConfig = new ShardingRuleConfiguration();\nShardingTableRuleConfiguration shardingTableRuleConfiguration = getShardingTableRuleConfiguration();\nMap allAlgorithms = getAlgorithms();\nruleConfig.getTables().add(shardingTableRuleConfiguration);\nruleConfig.getShardingAlgorithms().putAll(allAlgorithms);\nruleConfig.setDefaultDatabaseShardingStrategy(new StandardShardingStrategyConfiguration(\"order_id\", USED_DATABASE_SHARDING_DEFAULT_ALGORITHM));\nruleConfig.setDefaultTableShardingStrategy(new StandardShardingStrategyConfiguration(\"order_id\", USED_TABLE_SHARDING_DEFAULT_ALGORITHM));\nCollection unusedAlgorithmsCollection = UnusedAlgorithmFinder.find(ruleConfig);\nassertNotNull(unusedAlgorithmsCollection);\nassertThat(unusedAlgorithmsCollection.size(), is(1));\nassertTrue(unusedAlgorithmsCollection.contains(UNUSED_ALGORITHM));\n}", + "target_code": "Collection unusedAlgorithmsCollection = UnusedAlgorithmFinder.find(ruleConfig);", + "method_body_after": "void assertFind() {\nShardingRuleConfiguration ruleConfig = new ShardingRuleConfiguration();\nShardingTableRuleConfiguration shardingTableRuleConfig = getShardingTableRuleConfiguration();\nruleConfig.getTables().add(shardingTableRuleConfig);\nruleConfig.getShardingAlgorithms().putAll(getAlgorithms());\nruleConfig.setDefaultDatabaseShardingStrategy(new StandardShardingStrategyConfiguration(\"order_id\", USED_DATABASE_SHARDING_DEFAULT_ALGORITHM));\nruleConfig.setDefaultTableShardingStrategy(new StandardShardingStrategyConfiguration(\"order_id\", USED_TABLE_SHARDING_DEFAULT_ALGORITHM));\nCollection actual = UnusedAlgorithmFinder.find(ruleConfig);\nassertNotNull(actual);\nassertThat(actual.size(), is(1));\nassertTrue(actual.contains(UNUSED_ALGORITHM));\n}", + "context_before": "class UnusedAlgorithmFinderTest {\nprivate static final String USED_TABLE_SHARDING_ALGORITHM = \"used_table_sharding_algorithm\";\nprivate static final String USED_TABLE_SHARDING_DEFAULT_ALGORITHM = \"used_table_sharding_default_algorithm\";\nprivate static final String USED_DATABASE_SHARDING_ALGORITHM = \"used_database_sharding_algorithm\";\nprivate static final String USED_DATABASE_SHARDING_DEFAULT_ALGORITHM = \"used_database_sharding_default_algorithm\";\nprivate static final String UNUSED_ALGORITHM = \"unused_algorithm\";\n@Test\nprivate ShardingTableRuleConfiguration getShardingTableRuleConfiguration() {\nShardingTableRuleConfiguration shardingTableRuleConfiguration = new ShardingTableRuleConfiguration(\"t_order\", null);\nshardingTableRuleConfiguration.setTableShardingStrategy(new StandardShardingStrategyConfiguration(\"order_id\", USED_TABLE_SHARDING_ALGORITHM));\nshardingTableRuleConfiguration.setDatabaseShardingStrategy(new StandardShardingStrategyConfiguration(\"order_id\", USED_DATABASE_SHARDING_ALGORITHM));\nreturn shardingTableRuleConfiguration;\n}\nprivate Map getAlgorithms() {\nreturn ImmutableMap.of(\nUSED_DATABASE_SHARDING_ALGORITHM, new AlgorithmConfiguration(\"INLINE\", new Properties()),\nUSED_DATABASE_SHARDING_DEFAULT_ALGORITHM, new AlgorithmConfiguration(\"INLINE\", new Properties()),\nUSED_TABLE_SHARDING_ALGORITHM, new AlgorithmConfiguration(\"INLINE\", new Properties()),\nUSED_TABLE_SHARDING_DEFAULT_ALGORITHM, new AlgorithmConfiguration(\"INLINE\", new Properties()),\nUNUSED_ALGORITHM, new AlgorithmConfiguration(\"INLINE\", new Properties()));\n}\n}", + "context_after": "class UnusedAlgorithmFinderTest {\nprivate static final String USED_TABLE_SHARDING_ALGORITHM = \"used_table_sharding_algorithm\";\nprivate static final String USED_TABLE_SHARDING_DEFAULT_ALGORITHM = \"used_table_sharding_default_algorithm\";\nprivate static final String USED_DATABASE_SHARDING_ALGORITHM = \"used_database_sharding_algorithm\";\nprivate static final String USED_DATABASE_SHARDING_DEFAULT_ALGORITHM = \"used_database_sharding_default_algorithm\";\nprivate static final String UNUSED_ALGORITHM = \"unused_algorithm\";\n@Test\nprivate ShardingTableRuleConfiguration getShardingTableRuleConfiguration() {\nShardingTableRuleConfiguration result = new ShardingTableRuleConfiguration(\"t_order\", null);\nresult.setTableShardingStrategy(new StandardShardingStrategyConfiguration(\"order_id\", USED_TABLE_SHARDING_ALGORITHM));\nresult.setDatabaseShardingStrategy(new StandardShardingStrategyConfiguration(\"order_id\", USED_DATABASE_SHARDING_ALGORITHM));\nreturn result;\n}\nprivate Map getAlgorithms() {\nreturn ImmutableMap.of(\nUSED_DATABASE_SHARDING_ALGORITHM, new AlgorithmConfiguration(\"INLINE\", new Properties()),\nUSED_DATABASE_SHARDING_DEFAULT_ALGORITHM, new AlgorithmConfiguration(\"INLINE\", new Properties()),\nUSED_TABLE_SHARDING_ALGORITHM, new AlgorithmConfiguration(\"INLINE\", new Properties()),\nUSED_TABLE_SHARDING_DEFAULT_ALGORITHM, new AlgorithmConfiguration(\"INLINE\", new Properties()),\nUNUSED_ALGORITHM, new AlgorithmConfiguration(\"INLINE\", new Properties()));\n}\n}" + }, + { + "comment": "I think I fixed it in b83b280ce2fb493eb647ffa589613c0b0362f39a which is part of #5774 ", + "method_body": "private ExpectedJobIdJobManagerRunnerFactory(JobID expectedJobId) {\nthis.expectedJobId = expectedJobId;\n}", + "target_code": "this.expectedJobId = expectedJobId;", + "method_body_after": "private ExpectedJobIdJobManagerRunnerFactory(JobID expectedJobId) {\nthis.expectedJobId = expectedJobId;\n}", + "context_before": "class ExpectedJobIdJobManagerRunnerFactory implements Dispatcher.JobManagerRunnerFactory {\nprivate final JobID expectedJobId;\n@Override\npublic JobManagerRunner createJobManagerRunner(\nResourceID resourceId,\nJobGraph jobGraph,\nConfiguration configuration,\nRpcService rpcService,\nHighAvailabilityServices highAvailabilityServices,\nHeartbeatServices heartbeatServices,\nBlobServer blobServer,\nJobManagerSharedServices jobManagerSharedServices,\nJobManagerJobMetricGroup jobManagerJobMetricGroup,\n@Nullable String metricQueryServicePath,\n@Nullable String restAddress) throws Exception {\nassertEquals(expectedJobId, jobGraph.getJobID());\nreturn Dispatcher.DefaultJobManagerRunnerFactory.INSTANCE.createJobManagerRunner(\nresourceId,\njobGraph,\nconfiguration,\nrpcService,\nhighAvailabilityServices,\nheartbeatServices,\nblobServer,\njobManagerSharedServices,\njobManagerJobMetricGroup,\nmetricQueryServicePath,\nrestAddress);\n}\n}", + "context_after": "class ExpectedJobIdJobManagerRunnerFactory implements Dispatcher.JobManagerRunnerFactory {\nprivate final JobID expectedJobId;\n@Override\npublic JobManagerRunner createJobManagerRunner(\nResourceID resourceId,\nJobGraph jobGraph,\nConfiguration configuration,\nRpcService rpcService,\nHighAvailabilityServices highAvailabilityServices,\nHeartbeatServices heartbeatServices,\nBlobServer blobServer,\nJobManagerSharedServices jobManagerSharedServices,\nJobManagerJobMetricGroupFactory jobManagerJobMetricGroupFactory) throws Exception {\nassertEquals(expectedJobId, jobGraph.getJobID());\nreturn Dispatcher.DefaultJobManagerRunnerFactory.INSTANCE.createJobManagerRunner(\nresourceId,\njobGraph,\nconfiguration,\nrpcService,\nhighAvailabilityServices,\nheartbeatServices,\nblobServer,\njobManagerSharedServices,\njobManagerJobMetricGroupFactory);\n}\n}" + }, + { + "comment": "Didn't touch these lines but I'll dedupe.", + "method_body": "public void setUp() throws IOException {\nthis.mockGcsUtil = mock(GcsUtil.class);\nwhen(mockGcsUtil.create(any(GcsPath.class), anyString()))\n.then(\nnew Answer() {\n@Override\npublic SeekableByteChannel answer(InvocationOnMock invocation) throws Throwable {\nreturn FileChannel.open(\nFiles.createTempFile(\"channel-\", \".tmp\"),\nStandardOpenOption.CREATE,\nStandardOpenOption.WRITE,\nStandardOpenOption.DELETE_ON_CLOSE);\n}\n});\nwhen(mockGcsUtil.create(any(GcsPath.class), anyString(), anyInt()))\n.then(\nnew Answer() {\n@Override\npublic SeekableByteChannel answer(InvocationOnMock invocation) throws Throwable {\nreturn FileChannel.open(\nFiles.createTempFile(\"channel-\", \".tmp\"),\nStandardOpenOption.CREATE,\nStandardOpenOption.WRITE,\nStandardOpenOption.DELETE_ON_CLOSE);\n}\n});\nwhen(mockGcsUtil.expand(any(GcsPath.class))).then(new Answer>() {\n@Override\npublic List answer(InvocationOnMock invocation) throws Throwable {\nreturn ImmutableList.of((GcsPath) invocation.getArguments()[0]);\n}\n});\nwhen(mockGcsUtil.bucketAccessible(GcsPath.fromUri(VALID_STAGING_BUCKET))).thenReturn(true);\nwhen(mockGcsUtil.bucketAccessible(GcsPath.fromUri(VALID_STAGING_BUCKET))).thenReturn(true);\nwhen(mockGcsUtil.bucketAccessible(GcsPath.fromUri(VALID_TEMP_BUCKET))).thenReturn(true);\nwhen(mockGcsUtil.bucketAccessible(GcsPath.fromUri(VALID_TEMP_BUCKET + \"/staging/\"))).\nthenReturn(true);\nwhen(mockGcsUtil.bucketAccessible(GcsPath.fromUri(VALID_PROFILE_BUCKET))).thenReturn(true);\nwhen(mockGcsUtil.bucketAccessible(GcsPath.fromUri(NON_EXISTENT_BUCKET))).thenReturn(false);\nwhen(mockGcsUtil.getObjects(anyListOf(GcsPath.class)))\n.thenAnswer(\nnew Answer>() {\n@Override\npublic List answer(\nInvocationOnMock invocationOnMock) throws Throwable {\nList gcsPaths = (List) invocationOnMock.getArguments()[0];\nList results = new ArrayList<>();\nfor (GcsPath gcsPath : gcsPaths) {\nif (gcsPath.getBucket().equals(VALID_BUCKET)) {\nStorageObject resultObject = new StorageObject();\nresultObject.setBucket(gcsPath.getBucket());\nresultObject.setName(gcsPath.getObject());\nresults.add(GcsUtil.StorageObjectOrIOException.create(resultObject));\n}\n}\nreturn results;\n}\n});\nwhen(mockGcsUtil.bucketAccessible(GcsPath.fromUri(\"gs:\nmockJobs = mock(Dataflow.Projects.Locations.Jobs.class);\n}", + "target_code": "new Answer>() {", + "method_body_after": "public void setUp() throws IOException {\nthis.mockGcsUtil = mock(GcsUtil.class);\nwhen(mockGcsUtil.create(any(GcsPath.class), anyString()))\n.then(\nnew Answer() {\n@Override\npublic SeekableByteChannel answer(InvocationOnMock invocation) throws Throwable {\nreturn FileChannel.open(\nFiles.createTempFile(\"channel-\", \".tmp\"),\nStandardOpenOption.CREATE,\nStandardOpenOption.WRITE,\nStandardOpenOption.DELETE_ON_CLOSE);\n}\n});\nwhen(mockGcsUtil.create(any(GcsPath.class), anyString(), anyInt()))\n.then(\nnew Answer() {\n@Override\npublic SeekableByteChannel answer(InvocationOnMock invocation) throws Throwable {\nreturn FileChannel.open(\nFiles.createTempFile(\"channel-\", \".tmp\"),\nStandardOpenOption.CREATE,\nStandardOpenOption.WRITE,\nStandardOpenOption.DELETE_ON_CLOSE);\n}\n});\nwhen(mockGcsUtil.expand(any(GcsPath.class))).then(new Answer>() {\n@Override\npublic List answer(InvocationOnMock invocation) throws Throwable {\nreturn ImmutableList.of((GcsPath) invocation.getArguments()[0]);\n}\n});\nwhen(mockGcsUtil.bucketAccessible(GcsPath.fromUri(VALID_STAGING_BUCKET))).thenReturn(true);\nwhen(mockGcsUtil.bucketAccessible(GcsPath.fromUri(VALID_TEMP_BUCKET))).thenReturn(true);\nwhen(mockGcsUtil.bucketAccessible(GcsPath.fromUri(VALID_TEMP_BUCKET + \"/staging/\"))).\nthenReturn(true);\nwhen(mockGcsUtil.bucketAccessible(GcsPath.fromUri(VALID_PROFILE_BUCKET))).thenReturn(true);\nwhen(mockGcsUtil.bucketAccessible(GcsPath.fromUri(NON_EXISTENT_BUCKET))).thenReturn(false);\nwhen(mockGcsUtil.getObjects(anyListOf(GcsPath.class)))\n.thenAnswer(\nnew Answer>() {\n@Override\npublic List answer(\nInvocationOnMock invocationOnMock) throws Throwable {\nList gcsPaths = (List) invocationOnMock.getArguments()[0];\nList results = new ArrayList<>();\nfor (GcsPath gcsPath : gcsPaths) {\nif (gcsPath.getBucket().equals(VALID_BUCKET)) {\nStorageObject resultObject = new StorageObject();\nresultObject.setBucket(gcsPath.getBucket());\nresultObject.setName(gcsPath.getObject());\nresults.add(GcsUtil.StorageObjectOrIOException.create(resultObject));\n}\n}\nreturn results;\n}\n});\nwhen(mockGcsUtil.bucketAccessible(GcsPath.fromUri(\"gs:\nmockJobs = mock(Dataflow.Projects.Locations.Jobs.class);\n}", + "context_before": "class DataflowRunnerTest implements Serializable {\nprivate static final String VALID_BUCKET = \"valid-bucket\";\nprivate static final String VALID_STAGING_BUCKET = \"gs:\nprivate static final String VALID_TEMP_BUCKET = \"gs:\nprivate static final String VALID_PROFILE_BUCKET = \"gs:\nprivate static final String NON_EXISTENT_BUCKET = \"gs:\nprivate static final String PROJECT_ID = \"some-project\";\nprivate static final String REGION_ID = \"some-region-1\";\n@Rule public transient TemporaryFolder tmpFolder = new TemporaryFolder();\n@Rule public transient ExpectedException thrown = ExpectedException.none();\n@Rule public transient ExpectedLogs expectedLogs = ExpectedLogs.none(DataflowRunner.class);\nprivate transient Dataflow.Projects.Locations.Jobs mockJobs;\nprivate transient GcsUtil mockGcsUtil;\nprivate static void assertValidJob(Job job) {\nassertNull(job.getId());\nassertNull(job.getCurrentState());\nassertTrue(Pattern.matches(\"[a-z]([-a-z0-9]*[a-z0-9])?\", job.getName()));\n}\n@Before\nprivate Pipeline buildDataflowPipeline(DataflowPipelineOptions options) {\noptions.setStableUniqueNames(CheckEnabled.ERROR);\noptions.setRunner(DataflowRunner.class);\nPipeline p = Pipeline.create(options);\np.apply(\"ReadMyFile\", TextIO.read().from(\"gs:\n.apply(\"WriteMyFile\", TextIO.write().to(\"gs:\nFileSystems.setDefaultPipelineOptions(options);\nreturn p;\n}\nprivate Dataflow buildMockDataflow() throws IOException {\nDataflow mockDataflowClient = mock(Dataflow.class);\nDataflow.Projects mockProjects = mock(Dataflow.Projects.class);\nDataflow.Projects.Locations mockLocations = mock(Dataflow.Projects.Locations.class);\nDataflow.Projects.Locations.Jobs.Create mockRequest =\nmock(Dataflow.Projects.Locations.Jobs.Create.class);\nDataflow.Projects.Locations.Jobs.List mockList = mock(\nDataflow.Projects.Locations.Jobs.List.class);\nwhen(mockDataflowClient.projects()).thenReturn(mockProjects);\nwhen(mockProjects.locations()).thenReturn(mockLocations);\nwhen(mockLocations.jobs()).thenReturn(mockJobs);\nwhen(mockJobs.create(eq(PROJECT_ID), eq(REGION_ID), isA(Job.class))).thenReturn(mockRequest);\nwhen(mockJobs.list(eq(PROJECT_ID), eq(REGION_ID))).thenReturn(mockList);\nwhen(mockList.setPageToken(anyString())).thenReturn(mockList);\nwhen(mockList.execute())\n.thenReturn(\nnew ListJobsResponse()\n.setJobs(\nArrays.asList(\nnew Job()\n.setName(\"oldjobname\")\n.setId(\"oldJobId\")\n.setCurrentState(\"JOB_STATE_RUNNING\"))));\nJob resultJob = new Job();\nresultJob.setId(\"newid\");\nwhen(mockRequest.execute()).thenReturn(resultJob);\nreturn mockDataflowClient;\n}\nprivate GcsUtil buildMockGcsUtil() throws IOException {\nGcsUtil mockGcsUtil = mock(GcsUtil.class);\nwhen(mockGcsUtil.create(any(GcsPath.class), anyString()))\n.then(new Answer() {\n@Override\npublic SeekableByteChannel answer(InvocationOnMock invocation) throws Throwable {\nreturn FileChannel.open(\nFiles.createTempFile(\"channel-\", \".tmp\"),\nStandardOpenOption.CREATE, StandardOpenOption.DELETE_ON_CLOSE);\n}\n});\nwhen(mockGcsUtil.expand(any(GcsPath.class))).then(new Answer>() {\n@Override\npublic List answer(InvocationOnMock invocation) throws Throwable {\nreturn ImmutableList.of((GcsPath) invocation.getArguments()[0]);\n}\n});\nreturn mockGcsUtil;\n}\nprivate DataflowPipelineOptions buildPipelineOptions() throws IOException {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setProject(PROJECT_ID);\noptions.setTempLocation(VALID_TEMP_BUCKET);\noptions.setRegion(REGION_ID);\noptions.setFilesToStage(new LinkedList());\noptions.setDataflowClient(buildMockDataflow());\noptions.setGcsUtil(mockGcsUtil);\noptions.setGcpCredential(new TestCredential());\nFileSystems.setDefaultPipelineOptions(options);\nreturn options;\n}\n@Test\npublic void testPathValidation() {\nString[] args = new String[] {\n\"--runner=DataflowRunner\",\n\"--tempLocation=/tmp/not/a/gs/path\",\n\"--project=test-project\",\n\"--credentialFactoryClass=\" + NoopCredentialFactory.class.getName(),\n};\ntry {\nPipeline.create(PipelineOptionsFactory.fromArgs(args).create()).run();\nfail();\n} catch (RuntimeException e) {\nassertThat(\nThrowables.getStackTraceAsString(e),\ncontainsString(\"DataflowRunner requires gcpTempLocation\"));\n}\n}\n@Test\npublic void testPathExistsValidation() {\nString[] args = new String[] {\n\"--runner=DataflowRunner\",\n\"--tempLocation=gs:\n\"--project=test-project\",\n\"--credentialFactoryClass=\" + NoopCredentialFactory.class.getName(),\n};\ntry {\nPipeline.create(PipelineOptionsFactory.fromArgs(args).create()).run();\nfail();\n} catch (RuntimeException e) {\nassertThat(\nThrowables.getStackTraceAsString(e),\nboth(containsString(\"gs:\n.and(containsString(\"does not exist or is not writeable\")));\n}\n}\n@Test\npublic void testPathValidatorOverride() {\nString[] args = new String[] {\n\"--runner=DataflowRunner\",\n\"--tempLocation=/tmp/testing\",\n\"--project=test-project\",\n\"--credentialFactoryClass=\" + NoopCredentialFactory.class.getName(),\n\"--pathValidatorClass=\" + NoopPathValidator.class.getName(),\n};\nTestPipeline.fromOptions(PipelineOptionsFactory.fromArgs(args).create());\n}\n@Test\npublic void testFromOptionsWithUppercaseConvertsToLowercase() throws Exception {\nString mixedCase = \"ThisJobNameHasMixedCase\";\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setJobName(mixedCase);\nDataflowRunner.fromOptions(options);\nassertThat(options.getJobName(), equalTo(mixedCase.toLowerCase()));\n}\n@Test\npublic void testFromOptionsUserAgentFromPipelineInfo() throws Exception {\nDataflowPipelineOptions options = buildPipelineOptions();\nDataflowRunner.fromOptions(options);\nString expectedName = DataflowRunnerInfo.getDataflowRunnerInfo().getName().replace(\" \", \"_\");\nassertThat(options.getUserAgent(), containsString(expectedName));\nString expectedVersion = DataflowRunnerInfo.getDataflowRunnerInfo().getVersion();\nassertThat(options.getUserAgent(), containsString(expectedVersion));\n}\n@Test\npublic void testRun() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\nPipeline p = buildDataflowPipeline(options);\nDataflowPipelineJob job = (DataflowPipelineJob) p.run();\nassertEquals(\"newid\", job.getJobId());\nArgumentCaptor jobCaptor = ArgumentCaptor.forClass(Job.class);\nMockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());\nassertValidJob(jobCaptor.getValue());\n}\n/** Options for testing. */\npublic interface RuntimeTestOptions extends PipelineOptions {\nValueProvider getInput();\nvoid setInput(ValueProvider value);\nValueProvider getOutput();\nvoid setOutput(ValueProvider value);\n}\n@Test\npublic void testTextIOWithRuntimeParameters() throws IOException {\nDataflowPipelineOptions dataflowOptions = buildPipelineOptions();\nRuntimeTestOptions options = dataflowOptions.as(RuntimeTestOptions.class);\nPipeline p = buildDataflowPipeline(dataflowOptions);\np\n.apply(TextIO.read().from(options.getInput()))\n.apply(TextIO.write().to(options.getOutput()));\n}\n/**\n* Tests that all reads are consumed by at least one {@link PTransform}.\n*/\n@Test\npublic void testUnconsumedReads() throws IOException {\nDataflowPipelineOptions dataflowOptions = buildPipelineOptions();\nRuntimeTestOptions options = dataflowOptions.as(RuntimeTestOptions.class);\nPipeline p = buildDataflowPipeline(dataflowOptions);\nPCollection unconsumed = p.apply(TextIO.read().from(options.getInput()));\nDataflowRunner.fromOptions(dataflowOptions).replaceTransforms(p);\nfinal AtomicBoolean unconsumedSeenAsInput = new AtomicBoolean();\np.traverseTopologically(new PipelineVisitor.Defaults() {\n@Override\npublic void visitPrimitiveTransform(Node node) {\nunconsumedSeenAsInput.set(true);\n}\n});\nassertThat(unconsumedSeenAsInput.get(), is(true));\n}\n@Test\npublic void testRunReturnDifferentRequestId() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\nDataflow mockDataflowClient = options.getDataflowClient();\nDataflow.Projects.Locations.Jobs.Create mockRequest = mock(\nDataflow.Projects.Locations.Jobs.Create.class);\nwhen(mockDataflowClient.projects().locations().jobs()\n.create(eq(PROJECT_ID), eq(REGION_ID), any(Job.class)))\n.thenReturn(mockRequest);\nJob resultJob = new Job();\nresultJob.setId(\"newid\");\nresultJob.setClientRequestId(\"different_request_id\");\nwhen(mockRequest.execute()).thenReturn(resultJob);\nPipeline p = buildDataflowPipeline(options);\ntry {\np.run();\nfail(\"Expected DataflowJobAlreadyExistsException\");\n} catch (DataflowJobAlreadyExistsException expected) {\nassertThat(expected.getMessage(),\ncontainsString(\"If you want to submit a second job, try again by setting a \"\n+ \"different name using --jobName.\"));\nassertEquals(expected.getJob().getJobId(), resultJob.getId());\n}\n}\n@Test\npublic void testUpdate() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setUpdate(true);\noptions.setJobName(\"oldJobName\");\nPipeline p = buildDataflowPipeline(options);\nDataflowPipelineJob job = (DataflowPipelineJob) p.run();\nassertEquals(\"newid\", job.getJobId());\nArgumentCaptor jobCaptor = ArgumentCaptor.forClass(Job.class);\nMockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());\nassertValidJob(jobCaptor.getValue());\n}\n@Test\npublic void testUpdateNonExistentPipeline() throws IOException {\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"Could not find running job named badjobname\");\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setUpdate(true);\noptions.setJobName(\"badJobName\");\nPipeline p = buildDataflowPipeline(options);\np.run();\n}\n@Test\npublic void testUpdateAlreadyUpdatedPipeline() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setUpdate(true);\noptions.setJobName(\"oldJobName\");\nDataflow mockDataflowClient = options.getDataflowClient();\nDataflow.Projects.Locations.Jobs.Create mockRequest = mock(\nDataflow.Projects.Locations.Jobs.Create.class);\nwhen(mockDataflowClient.projects().locations().jobs()\n.create(eq(PROJECT_ID), eq(REGION_ID), any(Job.class)))\n.thenReturn(mockRequest);\nfinal Job resultJob = new Job();\nresultJob.setId(\"newid\");\nresultJob.setClientRequestId(\"different_request_id\");\nwhen(mockRequest.execute()).thenReturn(resultJob);\nPipeline p = buildDataflowPipeline(options);\nthrown.expect(DataflowJobAlreadyUpdatedException.class);\nthrown.expect(new TypeSafeMatcher() {\n@Override\npublic void describeTo(Description description) {\ndescription.appendText(\"Expected job ID: \" + resultJob.getId());\n}\n@Override\nprotected boolean matchesSafely(DataflowJobAlreadyUpdatedException item) {\nreturn resultJob.getId().equals(item.getJob().getJobId());\n}\n});\nthrown.expectMessage(\"The job named oldjobname with id: oldJobId has already been updated \"\n+ \"into job id: newid and cannot be updated again.\");\np.run();\n}\n@Test\npublic void testRunWithFiles() throws IOException {\nfinal String cloudDataflowDataset = \"somedataset\";\nFile temp1 = File.createTempFile(\"DataflowRunnerTest\", \"txt\");\ntemp1.deleteOnExit();\nFile temp2 = File.createTempFile(\"DataflowRunnerTest2\", \"txt\");\ntemp2.deleteOnExit();\nString overridePackageName = \"alias.txt\";\nwhen(mockGcsUtil.getObjects(anyListOf(GcsPath.class)))\n.thenReturn(ImmutableList.of(GcsUtil.StorageObjectOrIOException.create(\nnew FileNotFoundException(\"some/path\"))));\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setFilesToStage(ImmutableList.of(\ntemp1.getAbsolutePath(),\noverridePackageName + \"=\" + temp2.getAbsolutePath()));\noptions.setStagingLocation(VALID_STAGING_BUCKET);\noptions.setTempLocation(VALID_TEMP_BUCKET);\noptions.setTempDatasetId(cloudDataflowDataset);\noptions.setProject(PROJECT_ID);\noptions.setRegion(REGION_ID);\noptions.setJobName(\"job\");\noptions.setDataflowClient(buildMockDataflow());\noptions.setGcsUtil(mockGcsUtil);\noptions.setGcpCredential(new TestCredential());\nwhen(mockGcsUtil.create(any(GcsPath.class), anyString(), anyInt()))\n.then(\nnew Answer() {\n@Override\npublic SeekableByteChannel answer(InvocationOnMock invocation) throws Throwable {\nreturn FileChannel.open(\nFiles.createTempFile(\"channel-\", \".tmp\"),\nStandardOpenOption.CREATE,\nStandardOpenOption.WRITE,\nStandardOpenOption.DELETE_ON_CLOSE);\n}\n});\nPipeline p = buildDataflowPipeline(options);\nDataflowPipelineJob job = (DataflowPipelineJob) p.run();\nassertEquals(\"newid\", job.getJobId());\nArgumentCaptor jobCaptor = ArgumentCaptor.forClass(Job.class);\nMockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());\nJob workflowJob = jobCaptor.getValue();\nassertValidJob(workflowJob);\nassertEquals(\n2,\nworkflowJob.getEnvironment().getWorkerPools().get(0).getPackages().size());\nDataflowPackage workflowPackage1 =\nworkflowJob.getEnvironment().getWorkerPools().get(0).getPackages().get(0);\nassertThat(workflowPackage1.getName(), startsWith(temp1.getName()));\nDataflowPackage workflowPackage2 =\nworkflowJob.getEnvironment().getWorkerPools().get(0).getPackages().get(1);\nassertEquals(overridePackageName, workflowPackage2.getName());\nassertEquals(\nGcsPath.fromUri(VALID_TEMP_BUCKET).toResourceName(),\nworkflowJob.getEnvironment().getTempStoragePrefix());\nassertEquals(\ncloudDataflowDataset,\nworkflowJob.getEnvironment().getDataset());\nassertEquals(\nDataflowRunnerInfo.getDataflowRunnerInfo().getName(),\nworkflowJob.getEnvironment().getUserAgent().get(\"name\"));\nassertEquals(\nDataflowRunnerInfo.getDataflowRunnerInfo().getVersion(),\nworkflowJob.getEnvironment().getUserAgent().get(\"version\"));\n}\n@Test\npublic void runWithDefaultFilesToStage() throws Exception {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setFilesToStage(null);\nDataflowRunner.fromOptions(options);\nassertTrue(!options.getFilesToStage().isEmpty());\n}\n@Test\npublic void detectClassPathResourceWithFileResources() throws Exception {\nFile file = tmpFolder.newFile(\"file\");\nFile file2 = tmpFolder.newFile(\"file2\");\nURLClassLoader classLoader = new URLClassLoader(new URL[] {\nfile.toURI().toURL(),\nfile2.toURI().toURL()\n});\nassertEquals(ImmutableList.of(file.getAbsolutePath(), file2.getAbsolutePath()),\nDataflowRunner.detectClassPathResourcesToStage(classLoader));\n}\n@Test\npublic void detectClassPathResourcesWithUnsupportedClassLoader() {\nClassLoader mockClassLoader = Mockito.mock(ClassLoader.class);\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"Unable to use ClassLoader to detect classpath elements.\");\nDataflowRunner.detectClassPathResourcesToStage(mockClassLoader);\n}\n@Test\npublic void detectClassPathResourceWithNonFileResources() throws Exception {\nString url = \"http:\nURLClassLoader classLoader = new URLClassLoader(new URL[] {\nnew URL(url)\n});\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"Unable to convert url (\" + url + \") to file.\");\nDataflowRunner.detectClassPathResourcesToStage(classLoader);\n}\n@Test\npublic void testGcsStagingLocationInitialization() throws Exception {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setTempLocation(VALID_TEMP_BUCKET);\noptions.setProject(PROJECT_ID);\noptions.setGcpCredential(new TestCredential());\noptions.setGcsUtil(mockGcsUtil);\noptions.setRunner(DataflowRunner.class);\nDataflowRunner.fromOptions(options);\nassertNotNull(options.getStagingLocation());\n}\n@Test\npublic void testInvalidGcpTempLocation() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setGcpTempLocation(\"file:\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(containsString(\"Expected a valid 'gs:\nDataflowRunner.fromOptions(options);\nArgumentCaptor jobCaptor = ArgumentCaptor.forClass(Job.class);\nMockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());\nassertValidJob(jobCaptor.getValue());\n}\n@Test\npublic void testNonGcsTempLocation() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setTempLocation(\"file:\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\n\"DataflowRunner requires gcpTempLocation, \"\n+ \"but failed to retrieve a value from PipelineOptions\");\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testInvalidStagingLocation() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setStagingLocation(\"file:\ntry {\nDataflowRunner.fromOptions(options);\nfail(\"fromOptions should have failed\");\n} catch (IllegalArgumentException e) {\nassertThat(e.getMessage(), containsString(\"Expected a valid 'gs:\n}\noptions.setStagingLocation(\"my/staging/location\");\ntry {\nDataflowRunner.fromOptions(options);\nfail(\"fromOptions should have failed\");\n} catch (IllegalArgumentException e) {\nassertThat(e.getMessage(), containsString(\"Expected a valid 'gs:\n}\n}\n@Test\npublic void testInvalidProfileLocation() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setSaveProfilesToGcs(\"file:\ntry {\nDataflowRunner.fromOptions(options);\nfail(\"fromOptions should have failed\");\n} catch (IllegalArgumentException e) {\nassertThat(e.getMessage(), containsString(\"Expected a valid 'gs:\n}\noptions.setSaveProfilesToGcs(\"my/staging/location\");\ntry {\nDataflowRunner.fromOptions(options);\nfail(\"fromOptions should have failed\");\n} catch (IllegalArgumentException e) {\nassertThat(e.getMessage(), containsString(\"Expected a valid 'gs:\n}\n}\n@Test\npublic void testNonExistentTempLocation() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setGcpTempLocation(NON_EXISTENT_BUCKET);\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(containsString(\n\"Output path does not exist or is not writeable: \" + NON_EXISTENT_BUCKET));\nDataflowRunner.fromOptions(options);\nArgumentCaptor jobCaptor = ArgumentCaptor.forClass(Job.class);\nMockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());\nassertValidJob(jobCaptor.getValue());\n}\n@Test\npublic void testNonExistentStagingLocation() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setStagingLocation(NON_EXISTENT_BUCKET);\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(containsString(\n\"Output path does not exist or is not writeable: \" + NON_EXISTENT_BUCKET));\nDataflowRunner.fromOptions(options);\nArgumentCaptor jobCaptor = ArgumentCaptor.forClass(Job.class);\nMockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());\nassertValidJob(jobCaptor.getValue());\n}\n@Test\npublic void testNonExistentProfileLocation() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setSaveProfilesToGcs(NON_EXISTENT_BUCKET);\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(containsString(\n\"Output path does not exist or is not writeable: \" + NON_EXISTENT_BUCKET));\nDataflowRunner.fromOptions(options);\nArgumentCaptor jobCaptor = ArgumentCaptor.forClass(Job.class);\nMockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());\nassertValidJob(jobCaptor.getValue());\n}\n@Test\npublic void testNoProjectFails() {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setProject(null);\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"Project id\");\nthrown.expectMessage(\"when running a Dataflow in the cloud\");\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testProjectId() throws IOException {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setProject(\"foo-12345\");\noptions.setGcpTempLocation(VALID_TEMP_BUCKET);\noptions.setGcsUtil(mockGcsUtil);\noptions.setGcpCredential(new TestCredential());\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testProjectPrefix() throws IOException {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setProject(\"google.com:some-project-12345\");\noptions.setGcpTempLocation(VALID_TEMP_BUCKET);\noptions.setGcsUtil(mockGcsUtil);\noptions.setGcpCredential(new TestCredential());\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testProjectNumber() throws IOException {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setProject(\"12345\");\noptions.setGcpTempLocation(VALID_TEMP_BUCKET);\noptions.setGcsUtil(mockGcsUtil);\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"Project ID\");\nthrown.expectMessage(\"project number\");\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testProjectDescription() throws IOException {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setProject(\"some project\");\noptions.setGcpTempLocation(VALID_TEMP_BUCKET);\noptions.setGcsUtil(mockGcsUtil);\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"Project ID\");\nthrown.expectMessage(\"project description\");\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testInvalidNumberOfWorkerHarnessThreads() throws IOException {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\nFileSystems.setDefaultPipelineOptions(options);\noptions.setRunner(DataflowRunner.class);\noptions.setProject(\"foo-12345\");\noptions.setGcpTempLocation(VALID_TEMP_BUCKET);\noptions.setGcsUtil(mockGcsUtil);\noptions.as(DataflowPipelineDebugOptions.class).setNumberOfWorkerHarnessThreads(-1);\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"Number of worker harness threads\");\nthrown.expectMessage(\"Please make sure the value is non-negative.\");\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testNoStagingLocationAndNoTempLocationFails() {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setProject(\"foo-project\");\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"DataflowRunner requires gcpTempLocation, \"\n+ \"but failed to retrieve a value from PipelineOption\");\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testGcpTempAndNoTempLocationSucceeds() throws Exception {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setGcpCredential(new TestCredential());\noptions.setProject(\"foo-project\");\noptions.setGcpTempLocation(VALID_TEMP_BUCKET);\noptions.setGcsUtil(mockGcsUtil);\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testTempLocationAndNoGcpTempLocationSucceeds() throws Exception {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setGcpCredential(new TestCredential());\noptions.setProject(\"foo-project\");\noptions.setTempLocation(VALID_TEMP_BUCKET);\noptions.setGcsUtil(mockGcsUtil);\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testValidProfileLocation() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setSaveProfilesToGcs(VALID_PROFILE_BUCKET);\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testInvalidJobName() throws IOException {\nList invalidNames = Arrays.asList(\n\"invalid_name\",\n\"0invalid\",\n\"invalid-\");\nList expectedReason = Arrays.asList(\n\"JobName invalid\",\n\"JobName invalid\",\n\"JobName invalid\");\nfor (int i = 0; i < invalidNames.size(); ++i) {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setJobName(invalidNames.get(i));\ntry {\nDataflowRunner.fromOptions(options);\nfail(\"Expected IllegalArgumentException for jobName \"\n+ options.getJobName());\n} catch (IllegalArgumentException e) {\nassertThat(e.getMessage(),\ncontainsString(expectedReason.get(i)));\n}\n}\n}\n@Test\npublic void testValidJobName() throws IOException {\nList names = Arrays.asList(\"ok\", \"Ok\", \"A-Ok\", \"ok-123\",\n\"this-one-is-fairly-long-01234567890123456789\");\nfor (String name : names) {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setJobName(name);\nDataflowRunner runner = DataflowRunner\n.fromOptions(options);\nassertNotNull(runner);\n}\n}\n@Test\npublic void testGcsUploadBufferSizeIsUnsetForBatchWhenDefault() throws IOException {\nDataflowPipelineOptions batchOptions = buildPipelineOptions();\nbatchOptions.setRunner(DataflowRunner.class);\nPipeline.create(batchOptions);\nassertNull(batchOptions.getGcsUploadBufferSizeBytes());\n}\n@Test\npublic void testGcsUploadBufferSizeIsSetForStreamingWhenDefault() throws IOException {\nDataflowPipelineOptions streamingOptions = buildPipelineOptions();\nstreamingOptions.setStreaming(true);\nstreamingOptions.setRunner(DataflowRunner.class);\nPipeline p = Pipeline.create(streamingOptions);\np.run();\nassertEquals(\nDataflowRunner.GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT,\nstreamingOptions.getGcsUploadBufferSizeBytes().intValue());\n}\n@Test\npublic void testGcsUploadBufferSizeUnchangedWhenNotDefault() throws IOException {\nint gcsUploadBufferSizeBytes = 12345678;\nDataflowPipelineOptions batchOptions = buildPipelineOptions();\nbatchOptions.setGcsUploadBufferSizeBytes(gcsUploadBufferSizeBytes);\nbatchOptions.setRunner(DataflowRunner.class);\nPipeline.create(batchOptions);\nassertEquals(gcsUploadBufferSizeBytes, batchOptions.getGcsUploadBufferSizeBytes().intValue());\nDataflowPipelineOptions streamingOptions = buildPipelineOptions();\nstreamingOptions.setStreaming(true);\nstreamingOptions.setGcsUploadBufferSizeBytes(gcsUploadBufferSizeBytes);\nstreamingOptions.setRunner(DataflowRunner.class);\nPipeline.create(streamingOptions);\nassertEquals(\ngcsUploadBufferSizeBytes, streamingOptions.getGcsUploadBufferSizeBytes().intValue());\n}\n/**\n* A fake PTransform for testing.\n*/\npublic static class TestTransform\nextends PTransform, PCollection> {\npublic boolean translated = false;\n@Override\npublic PCollection expand(PCollection input) {\nreturn PCollection.createPrimitiveOutputInternal(\ninput.getPipeline(),\nWindowingStrategy.globalDefault(),\ninput.isBounded(),\ninput.getCoder());\n}\n}\n@Test\npublic void testTransformTranslatorMissing() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\nPipeline p = Pipeline.create(options);\np.apply(Create.of(Arrays.asList(1, 2, 3)))\n.apply(new TestTransform());\nthrown.expect(IllegalStateException.class);\nthrown.expectMessage(Matchers.containsString(\"no translator registered\"));\nDataflowPipelineTranslator.fromOptions(options)\n.translate(\np, DataflowRunner.fromOptions(options), Collections.emptyList());\nArgumentCaptor jobCaptor = ArgumentCaptor.forClass(Job.class);\nMockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());\nassertValidJob(jobCaptor.getValue());\n}\n@Test\npublic void testTransformTranslator() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\nPipeline p = Pipeline.create(options);\nTestTransform transform = new TestTransform();\np.apply(Create.of(Arrays.asList(1, 2, 3)).withCoder(BigEndianIntegerCoder.of()))\n.apply(transform);\nDataflowPipelineTranslator translator = DataflowRunner\n.fromOptions(options).getTranslator();\nDataflowPipelineTranslator.registerTransformTranslator(\nTestTransform.class,\nnew TransformTranslator() {\n@SuppressWarnings(\"unchecked\")\n@Override\npublic void translate(\nTestTransform transform,\nTranslationContext context) {\ntransform.translated = true;\nStepTranslationContext stepContext = context.addStep(transform, \"TestTranslate\");\nstepContext.addOutput(context.getOutput(transform));\n}\n});\ntranslator.translate(\np, DataflowRunner.fromOptions(options), Collections.emptyList());\nassertTrue(transform.translated);\n}\nprivate void verifyMapStateUnsupported(PipelineOptions options) throws Exception {\nPipeline p = Pipeline.create(options);\np.apply(Create.of(KV.of(13, 42)))\n.apply(\nParDo.of(\nnew DoFn, Void>() {\n@StateId(\"fizzle\")\nprivate final StateSpec> voidState = StateSpecs.map();\n@ProcessElement\npublic void process() {}\n}));\nthrown.expectMessage(\"MapState\");\nthrown.expect(UnsupportedOperationException.class);\np.run();\n}\n@Test\npublic void testMapStateUnsupportedInBatch() throws Exception {\nPipelineOptions options = buildPipelineOptions();\noptions.as(StreamingOptions.class).setStreaming(false);\nverifyMapStateUnsupported(options);\n}\n@Test\npublic void testMapStateUnsupportedInStreaming() throws Exception {\nPipelineOptions options = buildPipelineOptions();\noptions.as(StreamingOptions.class).setStreaming(true);\nverifyMapStateUnsupported(options);\n}\nprivate void verifySetStateUnsupported(PipelineOptions options) throws Exception {\nPipeline p = Pipeline.create(options);\np.apply(Create.of(KV.of(13, 42)))\n.apply(\nParDo.of(\nnew DoFn, Void>() {\n@StateId(\"fizzle\")\nprivate final StateSpec> voidState = StateSpecs.set();\n@ProcessElement\npublic void process() {}\n}));\nthrown.expectMessage(\"SetState\");\nthrown.expect(UnsupportedOperationException.class);\np.run();\n}\n@Test\npublic void testSetStateUnsupportedInBatch() throws Exception {\nPipelineOptions options = buildPipelineOptions();\noptions.as(StreamingOptions.class).setStreaming(false);\nPipeline p = Pipeline.create(options);\nverifySetStateUnsupported(options);\n}\n@Test\npublic void testSetStateUnsupportedInStreaming() throws Exception {\nPipelineOptions options = buildPipelineOptions();\noptions.as(StreamingOptions.class).setStreaming(true);\nverifySetStateUnsupported(options);\n}\n/** Records all the composite transforms visited within the Pipeline. */\nprivate static class CompositeTransformRecorder extends PipelineVisitor.Defaults {\nprivate List> transforms = new ArrayList<>();\n@Override\npublic CompositeBehavior enterCompositeTransform(TransformHierarchy.Node node) {\nif (node.getTransform() != null) {\ntransforms.add(node.getTransform());\n}\nreturn CompositeBehavior.ENTER_TRANSFORM;\n}\npublic List> getCompositeTransforms() {\nreturn transforms;\n}\n}\n@Test\npublic void testApplyIsScopedToExactClass() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\nPipeline p = Pipeline.create(options);\nCreate.TimestampedValues transform =\nCreate.timestamped(Arrays.asList(TimestampedValue.of(\"TestString\", Instant.now())));\np.apply(transform);\nCompositeTransformRecorder recorder = new CompositeTransformRecorder();\np.traverseTopologically(recorder);\nassertThat(\n\"Expected to have seen CreateTimestamped composite transform.\",\nrecorder.getCompositeTransforms(),\nhasItem(transform));\nassertThat(\n\"Expected to have two composites, CreateTimestamped and Create.Values\",\nrecorder.getCompositeTransforms(),\nhasItem(Matchers.>isA((Class) Create.Values.class)));\n}\n@Test\npublic void testToString() {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setJobName(\"TestJobName\");\noptions.setProject(\"test-project\");\noptions.setTempLocation(\"gs:\noptions.setGcpCredential(new TestCredential());\noptions.setPathValidatorClass(NoopPathValidator.class);\noptions.setRunner(DataflowRunner.class);\nassertEquals(\n\"DataflowRunner\nDataflowRunner.fromOptions(options).toString());\n}\n/**\n* Tests that the {@link DataflowRunner} with {@code --templateLocation} returns normally when the\n* runner is successfully run.\n*/\n@Test\npublic void testTemplateRunnerFullCompletion() throws Exception {\nFile existingFile = tmpFolder.newFile();\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setJobName(\"TestJobName\");\noptions.setGcpCredential(new TestCredential());\noptions.setPathValidatorClass(NoopPathValidator.class);\noptions.setProject(\"test-project\");\noptions.setRunner(DataflowRunner.class);\noptions.setTemplateLocation(existingFile.getPath());\noptions.setTempLocation(tmpFolder.getRoot().getPath());\nPipeline p = Pipeline.create(options);\np.run();\nexpectedLogs.verifyInfo(\"Template successfully created\");\n}\n/**\n* Tests that the {@link DataflowRunner} with {@code --templateLocation} throws the appropriate\n* exception when an output file is not writable.\n*/\n@Test\npublic void testTemplateRunnerLoggedErrorForFile() throws Exception {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setJobName(\"TestJobName\");\noptions.setRunner(DataflowRunner.class);\noptions.setTemplateLocation(\"\noptions.setProject(\"test-project\");\noptions.setTempLocation(tmpFolder.getRoot().getPath());\noptions.setGcpCredential(new TestCredential());\noptions.setPathValidatorClass(NoopPathValidator.class);\nPipeline p = Pipeline.create(options);\nthrown.expectMessage(\"Cannot create output file at\");\nthrown.expect(RuntimeException.class);\np.run();\n}\n@Test\npublic void testHasExperiment() {\nDataflowPipelineDebugOptions options =\nPipelineOptionsFactory.as(DataflowPipelineDebugOptions.class);\noptions.setExperiments(null);\nassertFalse(DataflowRunner.hasExperiment(options, \"foo\"));\noptions.setExperiments(ImmutableList.of(\"foo\", \"bar\"));\nassertTrue(DataflowRunner.hasExperiment(options, \"foo\"));\nassertTrue(DataflowRunner.hasExperiment(options, \"bar\"));\nassertFalse(DataflowRunner.hasExperiment(options, \"baz\"));\nassertFalse(DataflowRunner.hasExperiment(options, \"ba\"));\nassertFalse(DataflowRunner.hasExperiment(options, \"BAR\"));\n}\n@Test\npublic void testWorkerHarnessContainerImage() {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setWorkerHarnessContainerImage(\"some-container\");\nassertThat(getContainerImageForJob(options), equalTo(\"some-container\"));\noptions.setWorkerHarnessContainerImage(\"gcr.io/IMAGE/foo\");\noptions.setExperiments(null);\noptions.setStreaming(false);\nassertThat(\ngetContainerImageForJob(options), equalTo(\"gcr.io/beam-java-batch/foo\"));\noptions.setStreaming(true);\nassertThat(\ngetContainerImageForJob(options), equalTo(\"gcr.io/beam-java-streaming/foo\"));\noptions.setExperiments(ImmutableList.of(\"experiment1\", \"beam_fn_api\"));\nassertThat(\ngetContainerImageForJob(options), equalTo(\"gcr.io/java/foo\"));\n}\n@Test\npublic void testStreamingWriteWithNoShardingReturnsNewTransform() {\nPipelineOptions options = TestPipeline.testingPipelineOptions();\noptions.as(DataflowPipelineWorkerPoolOptions.class).setMaxNumWorkers(10);\ntestStreamingWriteOverride(options, 20);\n}\n@Test\npublic void testStreamingWriteWithNoShardingReturnsNewTransformMaxWorkersUnset() {\nPipelineOptions options = TestPipeline.testingPipelineOptions();\ntestStreamingWriteOverride(options, StreamingShardedWriteFactory.DEFAULT_NUM_SHARDS);\n}\nprivate void verifyMergingStatefulParDoRejected(PipelineOptions options) throws Exception {\nPipeline p = Pipeline.create(options);\np.apply(Create.of(KV.of(13, 42)))\n.apply(Window.>into(Sessions.withGapDuration(Duration.millis(1))))\n.apply(ParDo.of(new DoFn, Void>() {\n@StateId(\"fizzle\")\nprivate final StateSpec> voidState = StateSpecs.value();\n@ProcessElement\npublic void process() {}\n}));\nthrown.expectMessage(\"merging\");\nthrown.expect(UnsupportedOperationException.class);\np.run();\n}\n@Test\npublic void testMergingStatefulRejectedInStreaming() throws Exception {\nPipelineOptions options = buildPipelineOptions();\noptions.as(StreamingOptions.class).setStreaming(true);\nverifyMergingStatefulParDoRejected(options);\n}\n@Test\npublic void testMergingStatefulRejectedInBatch() throws Exception {\nPipelineOptions options = buildPipelineOptions();\noptions.as(StreamingOptions.class).setStreaming(false);\nverifyMergingStatefulParDoRejected(options);\n}\nprivate void testStreamingWriteOverride(PipelineOptions options, int expectedNumShards) {\nTestPipeline p = TestPipeline.fromOptions(options);\nStreamingShardedWriteFactory factory =\nnew StreamingShardedWriteFactory<>(p.getOptions());\nWriteFiles original = WriteFiles.to(new TestSink(tmpFolder.toString()));\nPCollection objs = (PCollection) p.apply(Create.empty(VoidCoder.of()));\nAppliedPTransform, WriteFilesResult, WriteFiles>\noriginalApplication =\nAppliedPTransform.of(\n\"writefiles\",\nobjs.expand(),\nCollections., PValue>emptyMap(),\noriginal,\np);\nWriteFiles replacement =\n(WriteFiles)\nfactory.getReplacementTransform(originalApplication).getTransform();\nassertThat(replacement, not(equalTo((Object) original)));\nassertThat(replacement.getNumShards().get(), equalTo(expectedNumShards));\n}\nprivate static class TestSink extends FileBasedSink {\n@Override\npublic void validate(PipelineOptions options) {}\nTestSink(String tmpFolder) {\nsuper(\nStaticValueProvider.of(FileSystems.matchNewResource(tmpFolder, true)),\nDynamicFileDestinations.constant(\nnew FilenamePolicy() {\n@Override\npublic ResourceId windowedFilename(\nint shardNumber,\nint numShards,\nBoundedWindow window,\nPaneInfo paneInfo,\nOutputFileHints outputFileHints) {\nthrow new UnsupportedOperationException(\"should not be called\");\n}\n@Nullable\n@Override\npublic ResourceId unwindowedFilename(\nint shardNumber, int numShards, OutputFileHints outputFileHints) {\nthrow new UnsupportedOperationException(\"should not be called\");\n}\n},\nSerializableFunctions.identity()));\n}\n@Override\npublic WriteOperation createWriteOperation() {\nthrow new IllegalArgumentException(\"Should not be used\");\n}\n}\n}", + "context_after": "class DataflowRunnerTest implements Serializable {\nprivate static final String VALID_BUCKET = \"valid-bucket\";\nprivate static final String VALID_STAGING_BUCKET = \"gs:\nprivate static final String VALID_TEMP_BUCKET = \"gs:\nprivate static final String VALID_PROFILE_BUCKET = \"gs:\nprivate static final String NON_EXISTENT_BUCKET = \"gs:\nprivate static final String PROJECT_ID = \"some-project\";\nprivate static final String REGION_ID = \"some-region-1\";\n@Rule public transient TemporaryFolder tmpFolder = new TemporaryFolder();\n@Rule public transient ExpectedException thrown = ExpectedException.none();\n@Rule public transient ExpectedLogs expectedLogs = ExpectedLogs.none(DataflowRunner.class);\nprivate transient Dataflow.Projects.Locations.Jobs mockJobs;\nprivate transient GcsUtil mockGcsUtil;\nprivate static void assertValidJob(Job job) {\nassertNull(job.getId());\nassertNull(job.getCurrentState());\nassertTrue(Pattern.matches(\"[a-z]([-a-z0-9]*[a-z0-9])?\", job.getName()));\nfor (WorkerPool workerPool : job.getEnvironment().getWorkerPools()) {\nassertThat(workerPool.getMetadata(),\nhasKey(DataflowRunner.STAGED_PIPELINE_METADATA_PROPERTY));\n}\n}\n@Before\nprivate Pipeline buildDataflowPipeline(DataflowPipelineOptions options) {\noptions.setStableUniqueNames(CheckEnabled.ERROR);\noptions.setRunner(DataflowRunner.class);\nPipeline p = Pipeline.create(options);\np.apply(\"ReadMyFile\", TextIO.read().from(\"gs:\n.apply(\"WriteMyFile\", TextIO.write().to(\"gs:\nFileSystems.setDefaultPipelineOptions(options);\nreturn p;\n}\nprivate Dataflow buildMockDataflow() throws IOException {\nDataflow mockDataflowClient = mock(Dataflow.class);\nDataflow.Projects mockProjects = mock(Dataflow.Projects.class);\nDataflow.Projects.Locations mockLocations = mock(Dataflow.Projects.Locations.class);\nDataflow.Projects.Locations.Jobs.Create mockRequest =\nmock(Dataflow.Projects.Locations.Jobs.Create.class);\nDataflow.Projects.Locations.Jobs.List mockList = mock(\nDataflow.Projects.Locations.Jobs.List.class);\nwhen(mockDataflowClient.projects()).thenReturn(mockProjects);\nwhen(mockProjects.locations()).thenReturn(mockLocations);\nwhen(mockLocations.jobs()).thenReturn(mockJobs);\nwhen(mockJobs.create(eq(PROJECT_ID), eq(REGION_ID), isA(Job.class))).thenReturn(mockRequest);\nwhen(mockJobs.list(eq(PROJECT_ID), eq(REGION_ID))).thenReturn(mockList);\nwhen(mockList.setPageToken(anyString())).thenReturn(mockList);\nwhen(mockList.execute())\n.thenReturn(\nnew ListJobsResponse()\n.setJobs(\nArrays.asList(\nnew Job()\n.setName(\"oldjobname\")\n.setId(\"oldJobId\")\n.setCurrentState(\"JOB_STATE_RUNNING\"))));\nJob resultJob = new Job();\nresultJob.setId(\"newid\");\nwhen(mockRequest.execute()).thenReturn(resultJob);\nreturn mockDataflowClient;\n}\nprivate GcsUtil buildMockGcsUtil() throws IOException {\nGcsUtil mockGcsUtil = mock(GcsUtil.class);\nwhen(mockGcsUtil.create(any(GcsPath.class), anyString()))\n.then(new Answer() {\n@Override\npublic SeekableByteChannel answer(InvocationOnMock invocation) throws Throwable {\nreturn FileChannel.open(\nFiles.createTempFile(\"channel-\", \".tmp\"),\nStandardOpenOption.CREATE, StandardOpenOption.DELETE_ON_CLOSE);\n}\n});\nwhen(mockGcsUtil.expand(any(GcsPath.class))).then(new Answer>() {\n@Override\npublic List answer(InvocationOnMock invocation) throws Throwable {\nreturn ImmutableList.of((GcsPath) invocation.getArguments()[0]);\n}\n});\nreturn mockGcsUtil;\n}\nprivate DataflowPipelineOptions buildPipelineOptions() throws IOException {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setProject(PROJECT_ID);\noptions.setTempLocation(VALID_TEMP_BUCKET);\noptions.setRegion(REGION_ID);\noptions.setFilesToStage(new LinkedList());\noptions.setDataflowClient(buildMockDataflow());\noptions.setGcsUtil(mockGcsUtil);\noptions.setGcpCredential(new TestCredential());\nFileSystems.setDefaultPipelineOptions(options);\nreturn options;\n}\n@Test\npublic void testPathValidation() {\nString[] args = new String[] {\n\"--runner=DataflowRunner\",\n\"--tempLocation=/tmp/not/a/gs/path\",\n\"--project=test-project\",\n\"--credentialFactoryClass=\" + NoopCredentialFactory.class.getName(),\n};\ntry {\nPipeline.create(PipelineOptionsFactory.fromArgs(args).create()).run();\nfail();\n} catch (RuntimeException e) {\nassertThat(\nThrowables.getStackTraceAsString(e),\ncontainsString(\"DataflowRunner requires gcpTempLocation\"));\n}\n}\n@Test\npublic void testPathExistsValidation() {\nString[] args = new String[] {\n\"--runner=DataflowRunner\",\n\"--tempLocation=gs:\n\"--project=test-project\",\n\"--credentialFactoryClass=\" + NoopCredentialFactory.class.getName(),\n};\ntry {\nPipeline.create(PipelineOptionsFactory.fromArgs(args).create()).run();\nfail();\n} catch (RuntimeException e) {\nassertThat(\nThrowables.getStackTraceAsString(e),\nboth(containsString(\"gs:\n.and(containsString(\"does not exist or is not writeable\")));\n}\n}\n@Test\npublic void testPathValidatorOverride() {\nString[] args = new String[] {\n\"--runner=DataflowRunner\",\n\"--tempLocation=/tmp/testing\",\n\"--project=test-project\",\n\"--credentialFactoryClass=\" + NoopCredentialFactory.class.getName(),\n\"--pathValidatorClass=\" + NoopPathValidator.class.getName(),\n};\nTestPipeline.fromOptions(PipelineOptionsFactory.fromArgs(args).create());\n}\n@Test\npublic void testFromOptionsWithUppercaseConvertsToLowercase() throws Exception {\nString mixedCase = \"ThisJobNameHasMixedCase\";\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setJobName(mixedCase);\nDataflowRunner.fromOptions(options);\nassertThat(options.getJobName(), equalTo(mixedCase.toLowerCase()));\n}\n@Test\npublic void testFromOptionsUserAgentFromPipelineInfo() throws Exception {\nDataflowPipelineOptions options = buildPipelineOptions();\nDataflowRunner.fromOptions(options);\nString expectedName = DataflowRunnerInfo.getDataflowRunnerInfo().getName().replace(\" \", \"_\");\nassertThat(options.getUserAgent(), containsString(expectedName));\nString expectedVersion = DataflowRunnerInfo.getDataflowRunnerInfo().getVersion();\nassertThat(options.getUserAgent(), containsString(expectedVersion));\n}\n@Test\npublic void testRun() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\nPipeline p = buildDataflowPipeline(options);\nDataflowPipelineJob job = (DataflowPipelineJob) p.run();\nassertEquals(\"newid\", job.getJobId());\nArgumentCaptor jobCaptor = ArgumentCaptor.forClass(Job.class);\nMockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());\nassertValidJob(jobCaptor.getValue());\n}\n/** Options for testing. */\npublic interface RuntimeTestOptions extends PipelineOptions {\nValueProvider getInput();\nvoid setInput(ValueProvider value);\nValueProvider getOutput();\nvoid setOutput(ValueProvider value);\n}\n@Test\npublic void testTextIOWithRuntimeParameters() throws IOException {\nDataflowPipelineOptions dataflowOptions = buildPipelineOptions();\nRuntimeTestOptions options = dataflowOptions.as(RuntimeTestOptions.class);\nPipeline p = buildDataflowPipeline(dataflowOptions);\np\n.apply(TextIO.read().from(options.getInput()))\n.apply(TextIO.write().to(options.getOutput()));\n}\n/**\n* Tests that all reads are consumed by at least one {@link PTransform}.\n*/\n@Test\npublic void testUnconsumedReads() throws IOException {\nDataflowPipelineOptions dataflowOptions = buildPipelineOptions();\nRuntimeTestOptions options = dataflowOptions.as(RuntimeTestOptions.class);\nPipeline p = buildDataflowPipeline(dataflowOptions);\nPCollection unconsumed = p.apply(TextIO.read().from(options.getInput()));\nDataflowRunner.fromOptions(dataflowOptions).replaceTransforms(p);\nfinal AtomicBoolean unconsumedSeenAsInput = new AtomicBoolean();\np.traverseTopologically(new PipelineVisitor.Defaults() {\n@Override\npublic void visitPrimitiveTransform(Node node) {\nunconsumedSeenAsInput.set(true);\n}\n});\nassertThat(unconsumedSeenAsInput.get(), is(true));\n}\n@Test\npublic void testRunReturnDifferentRequestId() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\nDataflow mockDataflowClient = options.getDataflowClient();\nDataflow.Projects.Locations.Jobs.Create mockRequest = mock(\nDataflow.Projects.Locations.Jobs.Create.class);\nwhen(mockDataflowClient.projects().locations().jobs()\n.create(eq(PROJECT_ID), eq(REGION_ID), any(Job.class)))\n.thenReturn(mockRequest);\nJob resultJob = new Job();\nresultJob.setId(\"newid\");\nresultJob.setClientRequestId(\"different_request_id\");\nwhen(mockRequest.execute()).thenReturn(resultJob);\nPipeline p = buildDataflowPipeline(options);\ntry {\np.run();\nfail(\"Expected DataflowJobAlreadyExistsException\");\n} catch (DataflowJobAlreadyExistsException expected) {\nassertThat(expected.getMessage(),\ncontainsString(\"If you want to submit a second job, try again by setting a \"\n+ \"different name using --jobName.\"));\nassertEquals(expected.getJob().getJobId(), resultJob.getId());\n}\n}\n@Test\npublic void testUpdate() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setUpdate(true);\noptions.setJobName(\"oldJobName\");\nPipeline p = buildDataflowPipeline(options);\nDataflowPipelineJob job = (DataflowPipelineJob) p.run();\nassertEquals(\"newid\", job.getJobId());\nArgumentCaptor jobCaptor = ArgumentCaptor.forClass(Job.class);\nMockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());\nassertValidJob(jobCaptor.getValue());\n}\n@Test\npublic void testUpdateNonExistentPipeline() throws IOException {\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"Could not find running job named badjobname\");\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setUpdate(true);\noptions.setJobName(\"badJobName\");\nPipeline p = buildDataflowPipeline(options);\np.run();\n}\n@Test\npublic void testUpdateAlreadyUpdatedPipeline() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setUpdate(true);\noptions.setJobName(\"oldJobName\");\nDataflow mockDataflowClient = options.getDataflowClient();\nDataflow.Projects.Locations.Jobs.Create mockRequest = mock(\nDataflow.Projects.Locations.Jobs.Create.class);\nwhen(mockDataflowClient.projects().locations().jobs()\n.create(eq(PROJECT_ID), eq(REGION_ID), any(Job.class)))\n.thenReturn(mockRequest);\nfinal Job resultJob = new Job();\nresultJob.setId(\"newid\");\nresultJob.setClientRequestId(\"different_request_id\");\nwhen(mockRequest.execute()).thenReturn(resultJob);\nPipeline p = buildDataflowPipeline(options);\nthrown.expect(DataflowJobAlreadyUpdatedException.class);\nthrown.expect(new TypeSafeMatcher() {\n@Override\npublic void describeTo(Description description) {\ndescription.appendText(\"Expected job ID: \" + resultJob.getId());\n}\n@Override\nprotected boolean matchesSafely(DataflowJobAlreadyUpdatedException item) {\nreturn resultJob.getId().equals(item.getJob().getJobId());\n}\n});\nthrown.expectMessage(\"The job named oldjobname with id: oldJobId has already been updated \"\n+ \"into job id: newid and cannot be updated again.\");\np.run();\n}\n@Test\npublic void testRunWithFiles() throws IOException {\nfinal String cloudDataflowDataset = \"somedataset\";\nFile temp1 = File.createTempFile(\"DataflowRunnerTest\", \"txt\");\ntemp1.deleteOnExit();\nFile temp2 = File.createTempFile(\"DataflowRunnerTest2\", \"txt\");\ntemp2.deleteOnExit();\nString overridePackageName = \"alias.txt\";\nwhen(mockGcsUtil.getObjects(anyListOf(GcsPath.class)))\n.thenReturn(ImmutableList.of(GcsUtil.StorageObjectOrIOException.create(\nnew FileNotFoundException(\"some/path\"))));\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setFilesToStage(ImmutableList.of(\ntemp1.getAbsolutePath(),\noverridePackageName + \"=\" + temp2.getAbsolutePath()));\noptions.setStagingLocation(VALID_STAGING_BUCKET);\noptions.setTempLocation(VALID_TEMP_BUCKET);\noptions.setTempDatasetId(cloudDataflowDataset);\noptions.setProject(PROJECT_ID);\noptions.setRegion(REGION_ID);\noptions.setJobName(\"job\");\noptions.setDataflowClient(buildMockDataflow());\noptions.setGcsUtil(mockGcsUtil);\noptions.setGcpCredential(new TestCredential());\nwhen(mockGcsUtil.create(any(GcsPath.class), anyString(), anyInt()))\n.then(\nnew Answer() {\n@Override\npublic SeekableByteChannel answer(InvocationOnMock invocation) throws Throwable {\nreturn FileChannel.open(\nFiles.createTempFile(\"channel-\", \".tmp\"),\nStandardOpenOption.CREATE,\nStandardOpenOption.WRITE,\nStandardOpenOption.DELETE_ON_CLOSE);\n}\n});\nPipeline p = buildDataflowPipeline(options);\nDataflowPipelineJob job = (DataflowPipelineJob) p.run();\nassertEquals(\"newid\", job.getJobId());\nArgumentCaptor jobCaptor = ArgumentCaptor.forClass(Job.class);\nMockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());\nJob workflowJob = jobCaptor.getValue();\nassertValidJob(workflowJob);\nassertEquals(\n2,\nworkflowJob.getEnvironment().getWorkerPools().get(0).getPackages().size());\nDataflowPackage workflowPackage1 =\nworkflowJob.getEnvironment().getWorkerPools().get(0).getPackages().get(0);\nassertThat(workflowPackage1.getName(), startsWith(temp1.getName()));\nDataflowPackage workflowPackage2 =\nworkflowJob.getEnvironment().getWorkerPools().get(0).getPackages().get(1);\nassertEquals(overridePackageName, workflowPackage2.getName());\nassertEquals(\nGcsPath.fromUri(VALID_TEMP_BUCKET).toResourceName(),\nworkflowJob.getEnvironment().getTempStoragePrefix());\nassertEquals(\ncloudDataflowDataset,\nworkflowJob.getEnvironment().getDataset());\nassertEquals(\nDataflowRunnerInfo.getDataflowRunnerInfo().getName(),\nworkflowJob.getEnvironment().getUserAgent().get(\"name\"));\nassertEquals(\nDataflowRunnerInfo.getDataflowRunnerInfo().getVersion(),\nworkflowJob.getEnvironment().getUserAgent().get(\"version\"));\n}\n@Test\npublic void runWithDefaultFilesToStage() throws Exception {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setFilesToStage(null);\nDataflowRunner.fromOptions(options);\nassertTrue(!options.getFilesToStage().isEmpty());\n}\n@Test\npublic void detectClassPathResourceWithFileResources() throws Exception {\nFile file = tmpFolder.newFile(\"file\");\nFile file2 = tmpFolder.newFile(\"file2\");\nURLClassLoader classLoader = new URLClassLoader(new URL[] {\nfile.toURI().toURL(),\nfile2.toURI().toURL()\n});\nassertEquals(ImmutableList.of(file.getAbsolutePath(), file2.getAbsolutePath()),\nDataflowRunner.detectClassPathResourcesToStage(classLoader));\n}\n@Test\npublic void detectClassPathResourcesWithUnsupportedClassLoader() {\nClassLoader mockClassLoader = Mockito.mock(ClassLoader.class);\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"Unable to use ClassLoader to detect classpath elements.\");\nDataflowRunner.detectClassPathResourcesToStage(mockClassLoader);\n}\n@Test\npublic void detectClassPathResourceWithNonFileResources() throws Exception {\nString url = \"http:\nURLClassLoader classLoader = new URLClassLoader(new URL[] {\nnew URL(url)\n});\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"Unable to convert url (\" + url + \") to file.\");\nDataflowRunner.detectClassPathResourcesToStage(classLoader);\n}\n@Test\npublic void testGcsStagingLocationInitialization() throws Exception {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setTempLocation(VALID_TEMP_BUCKET);\noptions.setProject(PROJECT_ID);\noptions.setGcpCredential(new TestCredential());\noptions.setGcsUtil(mockGcsUtil);\noptions.setRunner(DataflowRunner.class);\nDataflowRunner.fromOptions(options);\nassertNotNull(options.getStagingLocation());\n}\n@Test\npublic void testInvalidGcpTempLocation() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setGcpTempLocation(\"file:\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(containsString(\"Expected a valid 'gs:\nDataflowRunner.fromOptions(options);\nArgumentCaptor jobCaptor = ArgumentCaptor.forClass(Job.class);\nMockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());\nassertValidJob(jobCaptor.getValue());\n}\n@Test\npublic void testNonGcsTempLocation() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setTempLocation(\"file:\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\n\"DataflowRunner requires gcpTempLocation, \"\n+ \"but failed to retrieve a value from PipelineOptions\");\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testInvalidStagingLocation() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setStagingLocation(\"file:\ntry {\nDataflowRunner.fromOptions(options);\nfail(\"fromOptions should have failed\");\n} catch (IllegalArgumentException e) {\nassertThat(e.getMessage(), containsString(\"Expected a valid 'gs:\n}\noptions.setStagingLocation(\"my/staging/location\");\ntry {\nDataflowRunner.fromOptions(options);\nfail(\"fromOptions should have failed\");\n} catch (IllegalArgumentException e) {\nassertThat(e.getMessage(), containsString(\"Expected a valid 'gs:\n}\n}\n@Test\npublic void testInvalidProfileLocation() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setSaveProfilesToGcs(\"file:\ntry {\nDataflowRunner.fromOptions(options);\nfail(\"fromOptions should have failed\");\n} catch (IllegalArgumentException e) {\nassertThat(e.getMessage(), containsString(\"Expected a valid 'gs:\n}\noptions.setSaveProfilesToGcs(\"my/staging/location\");\ntry {\nDataflowRunner.fromOptions(options);\nfail(\"fromOptions should have failed\");\n} catch (IllegalArgumentException e) {\nassertThat(e.getMessage(), containsString(\"Expected a valid 'gs:\n}\n}\n@Test\npublic void testNonExistentTempLocation() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setGcpTempLocation(NON_EXISTENT_BUCKET);\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(containsString(\n\"Output path does not exist or is not writeable: \" + NON_EXISTENT_BUCKET));\nDataflowRunner.fromOptions(options);\nArgumentCaptor jobCaptor = ArgumentCaptor.forClass(Job.class);\nMockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());\nassertValidJob(jobCaptor.getValue());\n}\n@Test\npublic void testNonExistentStagingLocation() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setStagingLocation(NON_EXISTENT_BUCKET);\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(containsString(\n\"Output path does not exist or is not writeable: \" + NON_EXISTENT_BUCKET));\nDataflowRunner.fromOptions(options);\nArgumentCaptor jobCaptor = ArgumentCaptor.forClass(Job.class);\nMockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());\nassertValidJob(jobCaptor.getValue());\n}\n@Test\npublic void testNonExistentProfileLocation() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setSaveProfilesToGcs(NON_EXISTENT_BUCKET);\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(containsString(\n\"Output path does not exist or is not writeable: \" + NON_EXISTENT_BUCKET));\nDataflowRunner.fromOptions(options);\nArgumentCaptor jobCaptor = ArgumentCaptor.forClass(Job.class);\nMockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());\nassertValidJob(jobCaptor.getValue());\n}\n@Test\npublic void testNoProjectFails() {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setProject(null);\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"Project id\");\nthrown.expectMessage(\"when running a Dataflow in the cloud\");\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testProjectId() throws IOException {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setProject(\"foo-12345\");\noptions.setGcpTempLocation(VALID_TEMP_BUCKET);\noptions.setGcsUtil(mockGcsUtil);\noptions.setGcpCredential(new TestCredential());\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testProjectPrefix() throws IOException {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setProject(\"google.com:some-project-12345\");\noptions.setGcpTempLocation(VALID_TEMP_BUCKET);\noptions.setGcsUtil(mockGcsUtil);\noptions.setGcpCredential(new TestCredential());\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testProjectNumber() throws IOException {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setProject(\"12345\");\noptions.setGcpTempLocation(VALID_TEMP_BUCKET);\noptions.setGcsUtil(mockGcsUtil);\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"Project ID\");\nthrown.expectMessage(\"project number\");\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testProjectDescription() throws IOException {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setProject(\"some project\");\noptions.setGcpTempLocation(VALID_TEMP_BUCKET);\noptions.setGcsUtil(mockGcsUtil);\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"Project ID\");\nthrown.expectMessage(\"project description\");\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testInvalidNumberOfWorkerHarnessThreads() throws IOException {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\nFileSystems.setDefaultPipelineOptions(options);\noptions.setRunner(DataflowRunner.class);\noptions.setProject(\"foo-12345\");\noptions.setGcpTempLocation(VALID_TEMP_BUCKET);\noptions.setGcsUtil(mockGcsUtil);\noptions.as(DataflowPipelineDebugOptions.class).setNumberOfWorkerHarnessThreads(-1);\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"Number of worker harness threads\");\nthrown.expectMessage(\"Please make sure the value is non-negative.\");\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testNoStagingLocationAndNoTempLocationFails() {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setProject(\"foo-project\");\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"DataflowRunner requires gcpTempLocation, \"\n+ \"but failed to retrieve a value from PipelineOption\");\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testGcpTempAndNoTempLocationSucceeds() throws Exception {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setGcpCredential(new TestCredential());\noptions.setProject(\"foo-project\");\noptions.setGcpTempLocation(VALID_TEMP_BUCKET);\noptions.setGcsUtil(mockGcsUtil);\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testTempLocationAndNoGcpTempLocationSucceeds() throws Exception {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setRunner(DataflowRunner.class);\noptions.setGcpCredential(new TestCredential());\noptions.setProject(\"foo-project\");\noptions.setTempLocation(VALID_TEMP_BUCKET);\noptions.setGcsUtil(mockGcsUtil);\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testValidProfileLocation() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setSaveProfilesToGcs(VALID_PROFILE_BUCKET);\nDataflowRunner.fromOptions(options);\n}\n@Test\npublic void testInvalidJobName() throws IOException {\nList invalidNames = Arrays.asList(\n\"invalid_name\",\n\"0invalid\",\n\"invalid-\");\nList expectedReason = Arrays.asList(\n\"JobName invalid\",\n\"JobName invalid\",\n\"JobName invalid\");\nfor (int i = 0; i < invalidNames.size(); ++i) {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setJobName(invalidNames.get(i));\ntry {\nDataflowRunner.fromOptions(options);\nfail(\"Expected IllegalArgumentException for jobName \"\n+ options.getJobName());\n} catch (IllegalArgumentException e) {\nassertThat(e.getMessage(),\ncontainsString(expectedReason.get(i)));\n}\n}\n}\n@Test\npublic void testValidJobName() throws IOException {\nList names = Arrays.asList(\"ok\", \"Ok\", \"A-Ok\", \"ok-123\",\n\"this-one-is-fairly-long-01234567890123456789\");\nfor (String name : names) {\nDataflowPipelineOptions options = buildPipelineOptions();\noptions.setJobName(name);\nDataflowRunner runner = DataflowRunner\n.fromOptions(options);\nassertNotNull(runner);\n}\n}\n@Test\npublic void testGcsUploadBufferSizeIsUnsetForBatchWhenDefault() throws IOException {\nDataflowPipelineOptions batchOptions = buildPipelineOptions();\nbatchOptions.setRunner(DataflowRunner.class);\nPipeline.create(batchOptions);\nassertNull(batchOptions.getGcsUploadBufferSizeBytes());\n}\n@Test\npublic void testGcsUploadBufferSizeIsSetForStreamingWhenDefault() throws IOException {\nDataflowPipelineOptions streamingOptions = buildPipelineOptions();\nstreamingOptions.setStreaming(true);\nstreamingOptions.setRunner(DataflowRunner.class);\nPipeline p = Pipeline.create(streamingOptions);\np.run();\nassertEquals(\nDataflowRunner.GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT,\nstreamingOptions.getGcsUploadBufferSizeBytes().intValue());\n}\n@Test\npublic void testGcsUploadBufferSizeUnchangedWhenNotDefault() throws IOException {\nint gcsUploadBufferSizeBytes = 12345678;\nDataflowPipelineOptions batchOptions = buildPipelineOptions();\nbatchOptions.setGcsUploadBufferSizeBytes(gcsUploadBufferSizeBytes);\nbatchOptions.setRunner(DataflowRunner.class);\nPipeline.create(batchOptions);\nassertEquals(gcsUploadBufferSizeBytes, batchOptions.getGcsUploadBufferSizeBytes().intValue());\nDataflowPipelineOptions streamingOptions = buildPipelineOptions();\nstreamingOptions.setStreaming(true);\nstreamingOptions.setGcsUploadBufferSizeBytes(gcsUploadBufferSizeBytes);\nstreamingOptions.setRunner(DataflowRunner.class);\nPipeline.create(streamingOptions);\nassertEquals(\ngcsUploadBufferSizeBytes, streamingOptions.getGcsUploadBufferSizeBytes().intValue());\n}\n/**\n* A fake PTransform for testing.\n*/\npublic static class TestTransform\nextends PTransform, PCollection> {\npublic boolean translated = false;\n@Override\npublic PCollection expand(PCollection input) {\nreturn PCollection.createPrimitiveOutputInternal(\ninput.getPipeline(),\nWindowingStrategy.globalDefault(),\ninput.isBounded(),\ninput.getCoder());\n}\n}\n@Test\npublic void testTransformTranslatorMissing() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\nPipeline p = Pipeline.create(options);\np.apply(Create.of(Arrays.asList(1, 2, 3)))\n.apply(new TestTransform());\nthrown.expect(IllegalStateException.class);\nthrown.expectMessage(Matchers.containsString(\"no translator registered\"));\nDataflowPipelineTranslator.fromOptions(options)\n.translate(\np, DataflowRunner.fromOptions(options), Collections.emptyList());\nArgumentCaptor jobCaptor = ArgumentCaptor.forClass(Job.class);\nMockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());\nassertValidJob(jobCaptor.getValue());\n}\n@Test\npublic void testTransformTranslator() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\nPipeline p = Pipeline.create(options);\nTestTransform transform = new TestTransform();\np.apply(Create.of(Arrays.asList(1, 2, 3)).withCoder(BigEndianIntegerCoder.of()))\n.apply(transform);\nDataflowPipelineTranslator translator = DataflowRunner\n.fromOptions(options).getTranslator();\nDataflowPipelineTranslator.registerTransformTranslator(\nTestTransform.class,\nnew TransformTranslator() {\n@SuppressWarnings(\"unchecked\")\n@Override\npublic void translate(\nTestTransform transform,\nTranslationContext context) {\ntransform.translated = true;\nStepTranslationContext stepContext = context.addStep(transform, \"TestTranslate\");\nstepContext.addOutput(context.getOutput(transform));\n}\n});\ntranslator.translate(\np, DataflowRunner.fromOptions(options), Collections.emptyList());\nassertTrue(transform.translated);\n}\nprivate void verifyMapStateUnsupported(PipelineOptions options) throws Exception {\nPipeline p = Pipeline.create(options);\np.apply(Create.of(KV.of(13, 42)))\n.apply(\nParDo.of(\nnew DoFn, Void>() {\n@StateId(\"fizzle\")\nprivate final StateSpec> voidState = StateSpecs.map();\n@ProcessElement\npublic void process() {}\n}));\nthrown.expectMessage(\"MapState\");\nthrown.expect(UnsupportedOperationException.class);\np.run();\n}\n@Test\npublic void testMapStateUnsupportedInBatch() throws Exception {\nPipelineOptions options = buildPipelineOptions();\noptions.as(StreamingOptions.class).setStreaming(false);\nverifyMapStateUnsupported(options);\n}\n@Test\npublic void testMapStateUnsupportedInStreaming() throws Exception {\nPipelineOptions options = buildPipelineOptions();\noptions.as(StreamingOptions.class).setStreaming(true);\nverifyMapStateUnsupported(options);\n}\nprivate void verifySetStateUnsupported(PipelineOptions options) throws Exception {\nPipeline p = Pipeline.create(options);\np.apply(Create.of(KV.of(13, 42)))\n.apply(\nParDo.of(\nnew DoFn, Void>() {\n@StateId(\"fizzle\")\nprivate final StateSpec> voidState = StateSpecs.set();\n@ProcessElement\npublic void process() {}\n}));\nthrown.expectMessage(\"SetState\");\nthrown.expect(UnsupportedOperationException.class);\np.run();\n}\n@Test\npublic void testSetStateUnsupportedInBatch() throws Exception {\nPipelineOptions options = buildPipelineOptions();\noptions.as(StreamingOptions.class).setStreaming(false);\nPipeline p = Pipeline.create(options);\nverifySetStateUnsupported(options);\n}\n@Test\npublic void testSetStateUnsupportedInStreaming() throws Exception {\nPipelineOptions options = buildPipelineOptions();\noptions.as(StreamingOptions.class).setStreaming(true);\nverifySetStateUnsupported(options);\n}\n/** Records all the composite transforms visited within the Pipeline. */\nprivate static class CompositeTransformRecorder extends PipelineVisitor.Defaults {\nprivate List> transforms = new ArrayList<>();\n@Override\npublic CompositeBehavior enterCompositeTransform(TransformHierarchy.Node node) {\nif (node.getTransform() != null) {\ntransforms.add(node.getTransform());\n}\nreturn CompositeBehavior.ENTER_TRANSFORM;\n}\npublic List> getCompositeTransforms() {\nreturn transforms;\n}\n}\n@Test\npublic void testApplyIsScopedToExactClass() throws IOException {\nDataflowPipelineOptions options = buildPipelineOptions();\nPipeline p = Pipeline.create(options);\nCreate.TimestampedValues transform =\nCreate.timestamped(Arrays.asList(TimestampedValue.of(\"TestString\", Instant.now())));\np.apply(transform);\nCompositeTransformRecorder recorder = new CompositeTransformRecorder();\np.traverseTopologically(recorder);\nassertThat(\n\"Expected to have seen CreateTimestamped composite transform.\",\nrecorder.getCompositeTransforms(),\nhasItem(transform));\nassertThat(\n\"Expected to have two composites, CreateTimestamped and Create.Values\",\nrecorder.getCompositeTransforms(),\nhasItem(Matchers.>isA((Class) Create.Values.class)));\n}\n@Test\npublic void testToString() {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setJobName(\"TestJobName\");\noptions.setProject(\"test-project\");\noptions.setTempLocation(\"gs:\noptions.setGcpCredential(new TestCredential());\noptions.setPathValidatorClass(NoopPathValidator.class);\noptions.setRunner(DataflowRunner.class);\nassertEquals(\n\"DataflowRunner\nDataflowRunner.fromOptions(options).toString());\n}\n/**\n* Tests that the {@link DataflowRunner} with {@code --templateLocation} returns normally when the\n* runner is successfully run.\n*/\n@Test\npublic void testTemplateRunnerFullCompletion() throws Exception {\nFile existingFile = tmpFolder.newFile();\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setJobName(\"TestJobName\");\noptions.setGcpCredential(new TestCredential());\noptions.setPathValidatorClass(NoopPathValidator.class);\noptions.setProject(\"test-project\");\noptions.setRunner(DataflowRunner.class);\noptions.setTemplateLocation(existingFile.getPath());\noptions.setTempLocation(tmpFolder.getRoot().getPath());\nPipeline p = Pipeline.create(options);\np.run();\nexpectedLogs.verifyInfo(\"Template successfully created\");\n}\n/**\n* Tests that the {@link DataflowRunner} with {@code --templateLocation} throws the appropriate\n* exception when an output file is not writable.\n*/\n@Test\npublic void testTemplateRunnerLoggedErrorForFile() throws Exception {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setJobName(\"TestJobName\");\noptions.setRunner(DataflowRunner.class);\noptions.setTemplateLocation(\"\noptions.setProject(\"test-project\");\noptions.setTempLocation(tmpFolder.getRoot().getPath());\noptions.setGcpCredential(new TestCredential());\noptions.setPathValidatorClass(NoopPathValidator.class);\nPipeline p = Pipeline.create(options);\nthrown.expectMessage(\"Cannot create output file at\");\nthrown.expect(RuntimeException.class);\np.run();\n}\n@Test\npublic void testHasExperiment() {\nDataflowPipelineDebugOptions options =\nPipelineOptionsFactory.as(DataflowPipelineDebugOptions.class);\noptions.setExperiments(null);\nassertFalse(DataflowRunner.hasExperiment(options, \"foo\"));\noptions.setExperiments(ImmutableList.of(\"foo\", \"bar\"));\nassertTrue(DataflowRunner.hasExperiment(options, \"foo\"));\nassertTrue(DataflowRunner.hasExperiment(options, \"bar\"));\nassertFalse(DataflowRunner.hasExperiment(options, \"baz\"));\nassertFalse(DataflowRunner.hasExperiment(options, \"ba\"));\nassertFalse(DataflowRunner.hasExperiment(options, \"BAR\"));\n}\n@Test\npublic void testWorkerHarnessContainerImage() {\nDataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);\noptions.setWorkerHarnessContainerImage(\"some-container\");\nassertThat(getContainerImageForJob(options), equalTo(\"some-container\"));\noptions.setWorkerHarnessContainerImage(\"gcr.io/IMAGE/foo\");\noptions.setExperiments(null);\noptions.setStreaming(false);\nassertThat(\ngetContainerImageForJob(options), equalTo(\"gcr.io/beam-java-batch/foo\"));\noptions.setStreaming(true);\nassertThat(\ngetContainerImageForJob(options), equalTo(\"gcr.io/beam-java-streaming/foo\"));\noptions.setExperiments(ImmutableList.of(\"experiment1\", \"beam_fn_api\"));\nassertThat(\ngetContainerImageForJob(options), equalTo(\"gcr.io/java/foo\"));\n}\n@Test\npublic void testStreamingWriteWithNoShardingReturnsNewTransform() {\nPipelineOptions options = TestPipeline.testingPipelineOptions();\noptions.as(DataflowPipelineWorkerPoolOptions.class).setMaxNumWorkers(10);\ntestStreamingWriteOverride(options, 20);\n}\n@Test\npublic void testStreamingWriteWithNoShardingReturnsNewTransformMaxWorkersUnset() {\nPipelineOptions options = TestPipeline.testingPipelineOptions();\ntestStreamingWriteOverride(options, StreamingShardedWriteFactory.DEFAULT_NUM_SHARDS);\n}\nprivate void verifyMergingStatefulParDoRejected(PipelineOptions options) throws Exception {\nPipeline p = Pipeline.create(options);\np.apply(Create.of(KV.of(13, 42)))\n.apply(Window.>into(Sessions.withGapDuration(Duration.millis(1))))\n.apply(ParDo.of(new DoFn, Void>() {\n@StateId(\"fizzle\")\nprivate final StateSpec> voidState = StateSpecs.value();\n@ProcessElement\npublic void process() {}\n}));\nthrown.expectMessage(\"merging\");\nthrown.expect(UnsupportedOperationException.class);\np.run();\n}\n@Test\npublic void testMergingStatefulRejectedInStreaming() throws Exception {\nPipelineOptions options = buildPipelineOptions();\noptions.as(StreamingOptions.class).setStreaming(true);\nverifyMergingStatefulParDoRejected(options);\n}\n@Test\npublic void testMergingStatefulRejectedInBatch() throws Exception {\nPipelineOptions options = buildPipelineOptions();\noptions.as(StreamingOptions.class).setStreaming(false);\nverifyMergingStatefulParDoRejected(options);\n}\nprivate void testStreamingWriteOverride(PipelineOptions options, int expectedNumShards) {\nTestPipeline p = TestPipeline.fromOptions(options);\nStreamingShardedWriteFactory factory =\nnew StreamingShardedWriteFactory<>(p.getOptions());\nWriteFiles original = WriteFiles.to(new TestSink(tmpFolder.toString()));\nPCollection objs = (PCollection) p.apply(Create.empty(VoidCoder.of()));\nAppliedPTransform, WriteFilesResult, WriteFiles>\noriginalApplication =\nAppliedPTransform.of(\n\"writefiles\",\nobjs.expand(),\nCollections., PValue>emptyMap(),\noriginal,\np);\nWriteFiles replacement =\n(WriteFiles)\nfactory.getReplacementTransform(originalApplication).getTransform();\nassertThat(replacement, not(equalTo((Object) original)));\nassertThat(replacement.getNumShards().get(), equalTo(expectedNumShards));\n}\nprivate static class TestSink extends FileBasedSink {\n@Override\npublic void validate(PipelineOptions options) {}\nTestSink(String tmpFolder) {\nsuper(\nStaticValueProvider.of(FileSystems.matchNewResource(tmpFolder, true)),\nDynamicFileDestinations.constant(\nnew FilenamePolicy() {\n@Override\npublic ResourceId windowedFilename(\nint shardNumber,\nint numShards,\nBoundedWindow window,\nPaneInfo paneInfo,\nOutputFileHints outputFileHints) {\nthrow new UnsupportedOperationException(\"should not be called\");\n}\n@Nullable\n@Override\npublic ResourceId unwindowedFilename(\nint shardNumber, int numShards, OutputFileHints outputFileHints) {\nthrow new UnsupportedOperationException(\"should not be called\");\n}\n},\nSerializableFunctions.identity()));\n}\n@Override\npublic WriteOperation createWriteOperation() {\nthrow new IllegalArgumentException(\"Should not be used\");\n}\n}\n}" + }, + { + "comment": "Could we add requestId generator? There're much hard-coded code for now", + "method_body": "public void userEventTriggered(final ChannelHandlerContext ctx, final Object evt) {\nif (evt instanceof CreateSubscriptionEvent) {\nBuilder builder = CDCRequest.newBuilder();\nbuilder.setCreateSubscription(buildCreateSubscriptionRequest());\nbuilder.setRequestId(UUID.randomUUID().toString());\nctx.writeAndFlush(builder.build());\n}\n}", + "target_code": "builder.setRequestId(UUID.randomUUID().toString());", + "method_body_after": "public void userEventTriggered(final ChannelHandlerContext ctx, final Object evt) {\nif (evt instanceof CreateSubscriptionEvent) {\nCDCRequest request = CDCRequest.newBuilder().setCreateSubscription(buildCreateSubscriptionRequest()).setRequestId(RequestIdUtil.generateRequestId()).build();\nctx.writeAndFlush(request);\n}\n}", + "context_before": "class SubscriptionRequestHandler extends ChannelInboundHandlerAdapter {\n@Override\nprivate CreateSubscriptionRequest buildCreateSubscriptionRequest() {\nreturn CreateSubscriptionRequest.newBuilder().setSubscriptionMode(SubscriptionMode.INCREMENTAL).setSubscriptionName(\"sharding_db\").setDatabase(\"sharding_db\")\n.addAllTableNames(Arrays.asList(\"t_order\", \"t_order_item\")).build();\n}\n@Override\npublic void channelRead(final ChannelHandlerContext ctx, final Object msg) {\nCDCResponse response = (CDCResponse) msg;\nif (Status.SUCCEED == response.getStatus()) {\nprocessSucceed(ctx, response);\n} else {\nlog.error(\"subscription response error {}\", msg);\n}\n}\nprivate void processSucceed(final ChannelHandlerContext ctx, final CDCResponse response) {\nif (response.hasCreateSubscriptionResult()) {\nlog.info(\"create subscription succeed, subcrption name {}\", response.getCreateSubscriptionResult().getSubscriptionName());\nBuilder builder = CDCRequest.newBuilder();\nbuilder.setStartSubscription(buildStartSubscriptionRequest(response.getCreateSubscriptionResult().getSubscriptionName()));\nbuilder.setRequestId(UUID.randomUUID().toString());\nctx.writeAndFlush(builder.build());\n}\n}\nprivate StartSubscriptionRequest buildStartSubscriptionRequest(final String subscriptionName) {\nreturn StartSubscriptionRequest.newBuilder().setSubscriptionName(subscriptionName).build();\n}\n}", + "context_after": "class SubscriptionRequestHandler extends ChannelInboundHandlerAdapter {\n@Override\nprivate CreateSubscriptionRequest buildCreateSubscriptionRequest() {\nTableName tableName = TableName.newBuilder().build();\nreturn CreateSubscriptionRequest.newBuilder().setSubscriptionMode(SubscriptionMode.INCREMENTAL).setSubscriptionName(\"sharding_db\").setDatabase(\"sharding_db\")\n.addTableNames(tableName).build();\n}\n@Override\npublic void channelRead(final ChannelHandlerContext ctx, final Object msg) {\nCDCResponse response = (CDCResponse) msg;\nif (Status.SUCCEED == response.getStatus()) {\nprocessSucceed(ctx, response);\n} else {\nlog.error(\"subscription response error {}\", msg);\n}\n}\nprivate void processSucceed(final ChannelHandlerContext ctx, final CDCResponse response) {\nif (response.hasCreateSubscriptionResult()) {\nlog.info(\"create subscription succeed, subcrption name {}\", response.getCreateSubscriptionResult().getSubscriptionName());\nBuilder builder = CDCRequest.newBuilder();\nbuilder.setStartSubscription(buildStartSubscriptionRequest(response.getCreateSubscriptionResult().getSubscriptionName()));\nbuilder.setRequestId(RequestIdUtil.generateRequestId());\nctx.writeAndFlush(builder.build());\n}\n}\nprivate StartSubscriptionRequest buildStartSubscriptionRequest(final String subscriptionName) {\nreturn StartSubscriptionRequest.newBuilder().setSubscriptionName(subscriptionName).build();\n}\n@Override\npublic void exceptionCaught(final ChannelHandlerContext ctx, final Throwable cause) {\nlog.error(\"subscription handler error\", cause);\n}\n}" + }, + { + "comment": "Two catch branches are identical. So we don't need explicitly catch UnsupportedEncodingException here ", + "method_body": "private String getReturnValue(HTTPCarbonMessage response) {\nReader reader;\nfinal int bufferSize = 1024;\nfinal char[] buffer = new char[bufferSize];\nfinal StringBuilder out = new StringBuilder();\ntry {\nreader = new InputStreamReader(new HttpMessageDataStreamer(response).getInputStream(), UTF_8);\nwhile (true) {\nint size = reader.read(buffer, 0, buffer.length);\nif (size < 0) {\nbreak;\n}\nout.append(buffer, 0, size);\n}\n} catch (UnsupportedEncodingException e) {\nLOG.error(\"Error occured while reading the response value in getReturnValue\", e.getMessage());\n} catch (IOException e) {\nLOG.error(\"Error occured while reading the response value in getReturnValue\", e.getMessage());\n}\nreturn out.toString();\n}", + "target_code": "} catch (IOException e) {", + "method_body_after": "private String getReturnValue(HTTPCarbonMessage response) {\nReader reader;\nfinal int bufferSize = 1024;\nfinal char[] buffer = new char[bufferSize];\nfinal StringBuilder out = new StringBuilder();\ntry {\nreader = new InputStreamReader(new HttpMessageDataStreamer(response).getInputStream(), UTF_8);\nwhile (true) {\nint size = reader.read(buffer, 0, buffer.length);\nif (size < 0) {\nbreak;\n}\nout.append(buffer, 0, size);\n}\n} catch (IOException e) {\nLOG.error(\"Error occured while reading the response value in getReturnValue\", e.getMessage());\n}\nreturn out.toString();\n}", + "context_before": "class RequestNativeFunctionSuccessTest {\nprivate static final Logger LOG = LoggerFactory.getLogger(RequestNativeFunctionSuccessTest.class);\nprivate CompileResult result, serviceResult;\nprivate final String requestStruct = Constants.REQUEST;\nprivate final String headerStruct = HEADER_VALUE_STRUCT;\nprivate final String protocolPackageHttp = Constants.PROTOCOL_PACKAGE_HTTP;\nprivate final String protocolPackageMime = PROTOCOL_PACKAGE_MIME;\nprivate final String protocolPackageFile = PROTOCOL_PACKAGE_FILE;\nprivate final String entityStruct = Constants.ENTITY;\nprivate final String mediaTypeStruct = MEDIA_TYPE;\nprivate String sourceFilePath = \"test-src/statements/services/nativeimpl/request/request-native-function.bal\";\n@BeforeClass\npublic void setup() {\nresult = BCompileUtil.compile(sourceFilePath);\nserviceResult = BServiceUtil.setupProgramFile(this, sourceFilePath);\n}\n@Test\npublic void testAddHeader() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nHttpUtil.addCarbonMsg(request, cMsg);\nString headerName = \"header1\";\nString headerValue = \"headerValue\";\nBString key = new BString(headerName);\nBString value = new BString(headerValue);\nBValue[] inputArg = { request, key, value };\nBValue[] returnVals = BRunUtil.invoke(result, \"testAddHeader\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entityStruct = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBMap map = (BMap) entityStruct.getRefField(ENTITY_HEADERS_INDEX);\nBRefValueArray array = (BRefValueArray) map.get(headerName);\nAssert.assertEquals(((BStruct) array.get(0)).getStringField(0), headerValue);\n}\n@Test(description = \"Test addHeader function within a service\")\npublic void testServiceAddHeader() {\nString key = \"lang\";\nString value = \"ballerina\";\nString path = \"/hello/addheader/\" + key + \"/\" + value;\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(getReturnValue(response));\nAssert.assertEquals(bJson.value().get(key).asText(), value);\n}\n@Test(description = \"Test req struct add Header function\")\npublic void testStructAddHeader() {\nString value = \"ballerina\";\nString path = \"/hello/addReqHeader\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(getReturnValue(response));\nAssert.assertEquals(bJson.value().get(\"headerValue\").asText(), value);\nAssert.assertEquals(bJson.value().get(\"paramValue\").asText(), String.valueOf(6));\n}\n@Test(description = \"Test req struct add Header function without params\")\npublic void testStructAddHeaderWithNoParam() {\nString value = \"ballerina\";\nString path = \"/hello/addReqHeaderWithoutParam\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(getReturnValue(response));\nAssert.assertEquals(bJson.value().get(\"headerValue\").asText(), value);\nAssert.assertEquals(bJson.value().get(\"paramValue\").asText(), \"param is null\");\n}\n@Test(description = \"Test req struct add Header function\")\npublic void testAddHeaderViaBalFunction() {\nString path = \"/hello/addReqHeaderFunc\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(getReturnValue(response));\nAssert.assertEquals(bJson.value().get(\"headerValue\").asText(), \"chamil\");\nAssert.assertEquals(bJson.value().get(\"size\").asText(), String.valueOf(3));\n}\n@Test(description = \"Test getBinaryPayload method of the request\")\npublic void testGetBinaryPayloadMethod() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nBStruct entity = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct);\nBStruct mediaType = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, mediaTypeStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nString payload = \"ballerina\";\nString contentType = OCTET_STREAM;\nMimeUtil.setContentType(mediaType, entity, contentType);\nentity.setBlobField(BYTE_DATA_INDEX, payload.getBytes());\nentity.setBooleanField(IS_IN_MEMORY_INDEX, 1);\nrequest.addNativeData(MESSAGE_ENTITY, entity);\nrequest.addNativeData(IS_ENTITY_BODY_PRESENT, true);\nHttpUtil.addCarbonMsg(request, cMsg);\nBValue[] inputArg = { request };\nBValue[] returnVals = BRunUtil.invoke(result, \"testGetBinaryPayload\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertEquals(returnVals[0].stringValue(), payload);\n}\n@Test\npublic void testGetContentLength() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(\"\", Constants.HTTP_METHOD_GET);\nString payload = \"ballerina\";\ncMsg.setHeader(Constants.HTTP_CONTENT_LENGTH, String.valueOf(payload.length()));\nHttpUtil.addCarbonMsg(request, cMsg);\nHttpUtil.setHeaderValueStructType(\nBCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, headerStruct));\nBStruct entity = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct);\nBStruct mediaType = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, mediaTypeStruct);\nHttpUtil.populateInboundRequest(request, entity, mediaType, cMsg);\nBValue[] inputArg = { request };\nBValue[] returnVals = BRunUtil.invoke(result, \"testGetContentLength\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertEquals(payload.length(), ((BInteger) returnVals[0]).intValue());\n}\n@Test(description = \"Test GetContentLength function within a service\")\npublic void testServiceGetContentLength() {\nString key = \"lang\";\nString value = \"ballerina\";\nString path = \"/hello/getContentLength\";\nString jsonString = \"{\\\"\" + key + \"\\\":\\\"\" + value + \"\\\"}\";\nint length = jsonString.length();\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_POST, jsonString);\ncMsg.setHeader(Constants.HTTP_CONTENT_LENGTH, String.valueOf(length));\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"value\").asText(), String.valueOf(length));\n}\n@Test\npublic void testGetHeader() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(\"\", Constants.HTTP_METHOD_GET);\ncMsg.setHeader(CONTENT_TYPE, APPLICATION_FORM);\nHttpUtil.setHeaderValueStructType(\nBCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, headerStruct));\nBStruct entity = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct);\nBStruct mediaType = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, mediaTypeStruct);\nHttpUtil.populateInboundRequest(request, entity, mediaType, cMsg);\nBString key = new BString(CONTENT_TYPE);\nBValue[] inputArg = { request, key };\nBValue[] returnVals = BRunUtil.invoke(result, \"testGetHeader\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertEquals(returnVals[0].stringValue(), APPLICATION_FORM);\n}\n@Test(description = \"Test GetHeader function within a service\")\npublic void testServiceGetHeader() {\nString path = \"/hello/getHeader\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\ncMsg.setHeader(CONTENT_TYPE, APPLICATION_FORM);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"value\").asText(), APPLICATION_FORM);\n}\n@Test(description = \"Test struct Get Header operation\")\npublic void testStructGetHeader() {\nString path = \"/hello/getReqHeader\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\ncMsg.setHeader(\"test-header\", APPLICATION_FORM);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(getReturnValue(response));\nAssert.assertEquals(bJson.value().get(\"value\").asText(), APPLICATION_FORM);\n}\n@Test(description = \"Test GetHeaders function within a function\")\npublic void testGetHeaders() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(\"\", Constants.HTTP_METHOD_GET);\ncMsg.setHeader(\"test-header\", APPLICATION_FORM + \",\" + TEXT_PLAIN + \";b=5\");\nHttpUtil.setHeaderValueStructType(\nBCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, headerStruct));\nBStruct entity = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct);\nBStruct mediaType = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, mediaTypeStruct);\nHttpUtil.populateInboundRequest(request, entity, mediaType, cMsg);\nBString key = new BString(\"test-header\");\nBValue[] inputArg = { request, key };\nBValue[] returnVals = BRunUtil.invoke(result, \"testGetHeaders\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertEquals(returnVals[0].stringValue(), TEXT_PLAIN);\n}\n@Test(description = \"Test GetHeaders function within a service\")\npublic void testServiceGetHeaders() {\nString path = \"/hello/getHeaders\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\ncMsg.setHeader(\"test-header\", APPLICATION_FORM + \",\" + TEXT_PLAIN + \";b=5\");\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(getReturnValue(response));\nAssert.assertEquals(bJson.value().get(\"value\").asText(), TEXT_PLAIN);\nAssert.assertEquals(bJson.value().get(\"paramValue\").asText(), String.valueOf(5));\n}\n@Test(description = \"Test GetHeaders function with values of struct\")\npublic void testStructGetHeaders() {\nString path = \"/hello/getReqHeaders\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\ncMsg.setHeader(\"test-header\", APPLICATION_FORM);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(getReturnValue(response));\nAssert.assertEquals(bJson.value().get(\"value\").asText(), \"transport\");\n}\n@Test\npublic void testGetJsonPayload() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nBStruct entity = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct);\nBStruct mediaType = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, mediaTypeStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nString payload = \"{'code':'123'}\";\ncMsg.setHeader(CONTENT_TYPE, APPLICATION_JSON);\nHttpUtil.addCarbonMsg(request, cMsg);\nMimeUtil.setContentType(mediaType, entity, APPLICATION_JSON);\nentity.setRefField(JSON_DATA_INDEX, new BJSON(payload));\nentity.setBooleanField(IS_IN_MEMORY_INDEX, 1);\nrequest.addNativeData(MESSAGE_ENTITY, entity);\nrequest.addNativeData(IS_ENTITY_BODY_PRESENT, true);\nBValue[] inputArg = { request };\nBValue[] returnVals = BRunUtil.invoke(result, \"testGetJsonPayload\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertEquals(((BJSON) returnVals[0]).value().get(\"code\").asText(), \"123\");\n}\n@Test(description = \"Test GetJsonPayload function within a service\")\npublic void testServiceGetJsonPayload() {\nString key = \"lang\";\nString value = \"ballerina\";\nString path = \"/hello/getJsonPayload\";\nString jsonString = \"{\\\"\" + key + \"\\\":\\\"\" + value + \"\\\"}\";\nList
headers = new ArrayList
();\nheaders.add(new Header(\"Content-Type\", APPLICATION_JSON));\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_POST, headers, jsonString);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nAssert.assertEquals(new BJSON(getReturnValue(response)).value().stringValue(), value);\n}\n@Test\npublic void testGetProperty() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nString propertyName = \"wso2\";\nString propertyValue = \"Ballerina\";\ncMsg.setProperty(propertyName, propertyValue);\nHttpUtil.addCarbonMsg(request, cMsg);\nBString name = new BString(propertyName);\nBValue[] inputArg = { request, name };\nBValue[] returnVals = BRunUtil.invoke(result, \"testGetProperty\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertEquals(returnVals[0].stringValue(), propertyValue);\n}\n@Test(description = \"Test GetProperty function within a service\")\npublic void testServiceGetProperty() {\nString propertyName = \"wso2\";\nString propertyValue = \"Ballerina\";\nString path = \"/hello/GetProperty\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\ncMsg.setProperty(propertyName, propertyValue);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"value\").asText(), propertyValue);\n}\n@Test\npublic void testGetStringPayload() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nBStruct entity = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct);\nBStruct mediaType = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, mediaTypeStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nString payload = \"ballerina\";\nString contentType = TEXT_PLAIN;\nMimeUtil.setContentType(mediaType, entity, contentType);\nentity.setStringField(TEXT_DATA_INDEX, payload);\nentity.setBooleanField(IS_IN_MEMORY_INDEX, 1);\nrequest.addNativeData(MESSAGE_ENTITY, entity);\nrequest.addNativeData(IS_ENTITY_BODY_PRESENT, true);\nHttpUtil.addCarbonMsg(request, cMsg);\nBValue[] inputArg = { request };\nBValue[] returnVals = BRunUtil.invoke(result, \"testGetStringPayload\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertEquals(returnVals[0].stringValue(), payload);\n}\n@Test(description = \"Test GetStringPayload function within a service\")\npublic void testServiceGetStringPayload() {\nString value = \"ballerina\";\nString path = \"/hello/GetStringPayload\";\nList
headers = new ArrayList
();\nheaders.add(new Header(\"Content-Type\", TEXT_PLAIN));\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_POST, headers, value);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nAssert.assertEquals(getReturnValue(response), value);\n}\n@Test\npublic void testGetXmlPayload() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nBStruct entity = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct);\nBStruct mediaType = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, mediaTypeStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nString payload = \"ballerina\";\nString contentType = APPLICATION_XML;\nMimeUtil.setContentType(mediaType, entity, contentType);\nentity.setRefField(XML_DATA_INDEX, new BXMLItem(payload));\nentity.setBooleanField(IS_IN_MEMORY_INDEX, 1);\nrequest.addNativeData(MESSAGE_ENTITY, entity);\nrequest.addNativeData(IS_ENTITY_BODY_PRESENT, true);\nHttpUtil.addCarbonMsg(request, cMsg);\nBValue[] inputArg = { request };\nBValue[] returnVals = BRunUtil.invoke(result, \"testGetXmlPayload\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertEquals(((BXMLItem) returnVals[0]).getTextValue().stringValue(), \"ballerina\");\n}\n@Test(description = \"Test GetXmlPayload function within a service\")\npublic void testServiceGetXmlPayload() {\nString value = \"ballerina\";\nString path = \"/hello/GetXmlPayload\";\nString bxmlItemString = \"ballerina\";\nList
headers = new ArrayList
();\nheaders.add(new Header(\"Content-Type\", APPLICATION_XML));\nHTTPTestRequest cMsg = MessageUtils\n.generateHTTPMessage(path, Constants.HTTP_METHOD_POST, headers, bxmlItemString);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nAssert.assertEquals(getReturnValue(response), value);\n}\n@Test\npublic void testRemoveHeader() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nString expect = \"Expect\";\ncMsg.setHeader(expect, \"100-continue\");\nHttpUtil.addCarbonMsg(request, cMsg);\nBString key = new BString(expect);\nBValue[] inputArg = { request, key };\nBValue[] returnVals = BRunUtil.invoke(result, \"testRemoveHeader\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entityStruct = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBMap map = (BMap) entityStruct.getRefField(ENTITY_HEADERS_INDEX);\nAssert.assertNull(map.get(\"100-continue\"));\n}\n@Test(description = \"Test RemoveHeader function within a service\")\npublic void testServiceRemoveHeader() {\nString path = \"/hello/RemoveHeader\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\ncMsg.setHeader(CONTENT_TYPE, APPLICATION_FORM);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"value\").asText(), \"value is null\");\n}\n@Test\npublic void testRemoveAllHeaders() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nString expect = \"Expect\";\nString range = \"Range\";\ncMsg.setHeader(expect, \"100-continue\");\ncMsg.setHeader(range, \"bytes=500-999\");\nHttpUtil.addCarbonMsg(request, cMsg);\nBValue[] inputArg = { request };\nBValue[] returnVals = BRunUtil.invoke(result, \"testRemoveAllHeaders\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entityStruct = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBMap map = (BMap) entityStruct.getRefField(ENTITY_HEADERS_INDEX);\nAssert.assertNull(map.get(expect));\nAssert.assertNull(map.get(range));\n}\n@Test(description = \"Test RemoveAllHeaders function within a service\")\npublic void testServiceRemoveAllHeaders() {\nString expect = \"Expect\";\nString range = \"Range\";\nString path = \"/hello/RemoveAllHeaders\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\ncMsg.setHeader(expect, \"100-continue\");\ncMsg.setHeader(range, \"bytes=500-999\");\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"value\").asText(), \"value is null\");\n}\n@Test\npublic void testSetHeader() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nHttpUtil.addCarbonMsg(request, cMsg);\nString range = \"Range\";\nString rangeValue = \"bytes=500-999\";\nBString key = new BString(range);\nBString value = new BString(rangeValue);\nBValue[] inputArg = { request, key, value };\nBValue[] returnVals = BRunUtil.invoke(result, \"testSetHeader\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entityStruct = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBMap map = (BMap) entityStruct.getRefField(ENTITY_HEADERS_INDEX);\nBRefValueArray array = (BRefValueArray) map.get(range);\nAssert.assertEquals(((BStruct) array.get(0)).getStringField(0), rangeValue);\n}\n@Test\npublic void testSetHeaderStruct() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(\"\", Constants.HTTP_METHOD_GET);\nHttpUtil.addCarbonMsg(request, cMsg);\nHttpUtil.setHeaderValueStructType(\nBCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct));\nBStruct entity = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct);\nBStruct mediaType = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, mediaTypeStruct);\nHttpUtil.populateInboundRequest(request, entity, mediaType, cMsg);\nString range = \"Range\";\nString rangeValue = \"bytes=500-999\";\nBString key = new BString(range);\nBString value = new BString(rangeValue);\nBValue[] inputArg = { request, key, value };\nBValue[] returnVals = BRunUtil.invoke(result, \"testSetHeaderStruct\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entityStruct = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBMap map = (BMap) entityStruct.getRefField(ENTITY_HEADERS_INDEX);\nBRefValueArray array = (BRefValueArray) map.get(range);\nAssert.assertEquals(((BStruct) array.get(0)).getStringField(0), rangeValue);\n}\n@Test(description = \"Test SetHeader function within a service\")\npublic void testServiceSetHeader() {\nString key = \"lang\";\nString value = \"ballerina\";\nString path = \"/hello/setHeader/\" + key + \"/\" + value;\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"value\").asText(), value);\n}\n@Test(description = \"Test Setting Header in struct within a service\")\npublic void testServiceSetHeaderStruct() {\nString key = \"lang\";\nString value = \"ballerina\";\nString path = \"/hello/setHeaderStruct/\" + key + \"/\" + value;\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"value\").asText(), value);\n}\n@Test\npublic void testSetJsonPayload() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage requestMsg = HttpUtil.createHttpCarbonMessage(true);\nHttpUtil.addCarbonMsg(request, requestMsg);\nBJSON value = new BJSON(\"{'name':'wso2'}\");\nBValue[] inputArg = { request, value };\nBValue[] returnVals = BRunUtil.invoke(result, \"testSetJsonPayload\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entity = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBJSON bJson = (BJSON) entity.getRefField(JSON_DATA_INDEX);\nAssert.assertEquals(bJson.value().get(\"name\").asText(), \"wso2\", \"Payload is not set properly\");\n}\n@Test(description = \"Test SetJsonPayload function within a service\")\npublic void testServiceSetJsonPayload() {\nString value = \"ballerina\";\nString path = \"/hello/SetJsonPayload/\" + value;\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"lang\").asText(), value);\n}\n@Test\npublic void testSetProperty() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nHttpUtil.addCarbonMsg(request, cMsg);\nString propertyName = \"wso2\";\nString propertyValue = \"Ballerina\";\nBString name = new BString(propertyName);\nBString value = new BString(propertyValue);\nBValue[] inputArg = { request, name, value };\nBValue[] returnVals = BRunUtil.invoke(result, \"testSetProperty\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nHTTPCarbonMessage response = HttpUtil.getCarbonMsg((BStruct) returnVals[0], null);\nAssert.assertEquals(response.getProperty(propertyName), propertyValue);\n}\n@Test(description = \"Test SetProperty function within a service\")\npublic void testServiceSetProperty() {\nString key = \"lang\";\nString value = \"ballerina\";\nString path = \"/hello/SetProperty/\" + key + \"/\" + value;\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"value\").asText(), value);\n}\n@Test\npublic void testSetStringPayload() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nHttpUtil.addCarbonMsg(request, cMsg);\nBString value = new BString(\"Ballerina\");\nBValue[] inputArg = { request, value };\nBValue[] returnVals = BRunUtil.invoke(result, \"testSetStringPayload\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entity = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nString stringValue = entity.getStringField(TEXT_DATA_INDEX);\nAssert.assertEquals(stringValue, \"Ballerina\", \"Payload is not set properly\");\n}\n@Test(description = \"Test SetStringPayload function within a service\")\npublic void testServiceSetStringPayload() {\nString value = \"ballerina\";\nString path = \"/hello/SetStringPayload/\" + value;\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"lang\").asText(), value);\n}\n@Test\npublic void testSetXmlPayload() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nHttpUtil.addCarbonMsg(request, cMsg);\nBXMLItem value = new BXMLItem(\"Ballerina\");\nBValue[] inputArg = { request, value };\nBValue[] returnVals = BRunUtil.invoke(result, \"testSetXmlPayload\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entity = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBXMLItem xmlValue = (BXMLItem) entity.getRefField(XML_DATA_INDEX);\nAssert.assertEquals(xmlValue.getTextValue().stringValue(), \"Ballerina\", \"Payload is not set properly\");\n}\n@Test(description = \"Test SetXmlPayload function within a service\")\npublic void testServiceSetXmlPayload() {\nString value = \"Ballerina\";\nString path = \"/hello/SetXmlPayload/\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"lang\").asText(), value);\n}\n@Test\npublic void testGetMethod() {\nString path = \"/hello/11\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nAssert.assertEquals(\nStringUtils.getStringFromInputStream(new HttpMessageDataStreamer(response).getInputStream()),\nConstants.HTTP_METHOD_GET);\n}\n@Test\npublic void testGetRequestURL() {\nString path = \"/hello/12\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nAssert.assertEquals(\nStringUtils.getStringFromInputStream(new HttpMessageDataStreamer(response).getInputStream()), path);\n}\n@Test(description = \"Test setBinaryPayload() function within a service\")\npublic void testServiceSetBinaryPayload() {\nString value = \"Ballerina\";\nString path = \"/hello/SetBinaryPayload/\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"lang\").asText(), value);\n}\n@Test(description = \"Test getBinaryPayload() function within a service\")\npublic void testServiceGetBinaryPayload() {\nString payload = \"ballerina\";\nString path = \"/hello/GetBinaryPayload\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_POST, payload);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nAssert.assertEquals(\nStringUtils.getStringFromInputStream(new HttpMessageDataStreamer(response).getInputStream()), payload);\n}\n@Test(description = \"Test setBinaryPayload() function\")\npublic void testSetBinaryPayload() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nHttpUtil.addCarbonMsg(request, cMsg);\nBBlob value = new BBlob(\"Ballerina\".getBytes());\nBValue[] inputArg = { request, value };\nBValue[] returnVals = BRunUtil.invoke(result, \"testSetBinaryPayload\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entity = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBlobDataSource blobDataSource = new BlobDataSource(entity.getBlobField(BYTE_DATA_INDEX));\nAssert.assertEquals(blobDataSource.getMessageAsString(), \"Ballerina\", \"Payload is not set properly\");\n}\n@Test (description = \"Test setEntityBody() function\")\npublic void testSetEntityBody() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage requestMsg = HttpUtil.createHttpCarbonMessage(true);\nHttpUtil.addCarbonMsg(request, requestMsg);\ntry {\nFile file = File.createTempFile(\"test\", \".json\");\nfile.deleteOnExit();\nBufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(file));\nbufferedWriter.write(\"{'name':'wso2'}\");\nbufferedWriter.close();\nBStruct fileStruct = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageFile, FILE);\nfileStruct.setStringField(0, file.getAbsolutePath());\nBValue[] inputArg = { request, fileStruct, new BString(APPLICATION_JSON) };\nBValue[] returnVals = BRunUtil.invoke(result, \"testSetEntityBody\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entity = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBStruct returnFileStruct = (BStruct) entity.getRefField(OVERFLOW_DATA_INDEX);\nString returnJsonValue = new String (Files.readAllBytes(Paths.get(returnFileStruct.getStringField(0))),\nUTF_8);\nBJSON bJson = new BJSON(returnJsonValue);\nAssert.assertEquals(bJson.value().get(\"name\").asText(), \"wso2\", \"Payload is not set properly\");\n} catch (IOException e) {\nLOG.error(\"Error occured while creating a temporary file in testSetEntityBody\", e.getMessage());\n}\n}\n/**\n* Get the response value from input stream.\n*\n* @param response carbon response\n* @return return value from input stream as a string\n*/\n}", + "context_after": "class RequestNativeFunctionSuccessTest {\nprivate static final Logger LOG = LoggerFactory.getLogger(RequestNativeFunctionSuccessTest.class);\nprivate CompileResult result, serviceResult;\nprivate final String requestStruct = Constants.REQUEST;\nprivate final String headerStruct = HEADER_VALUE_STRUCT;\nprivate final String protocolPackageHttp = Constants.PROTOCOL_PACKAGE_HTTP;\nprivate final String protocolPackageMime = PROTOCOL_PACKAGE_MIME;\nprivate final String protocolPackageFile = PROTOCOL_PACKAGE_FILE;\nprivate final String entityStruct = Constants.ENTITY;\nprivate final String mediaTypeStruct = MEDIA_TYPE;\nprivate String sourceFilePath = \"test-src/statements/services/nativeimpl/request/request-native-function.bal\";\n@BeforeClass\npublic void setup() {\nresult = BCompileUtil.compile(sourceFilePath);\nserviceResult = BServiceUtil.setupProgramFile(this, sourceFilePath);\n}\n@Test\npublic void testAddHeader() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nHttpUtil.addCarbonMsg(request, cMsg);\nString headerName = \"header1\";\nString headerValue = \"headerValue\";\nBString key = new BString(headerName);\nBString value = new BString(headerValue);\nBValue[] inputArg = { request, key, value };\nBValue[] returnVals = BRunUtil.invoke(result, \"testAddHeader\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entityStruct = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBMap map = (BMap) entityStruct.getRefField(ENTITY_HEADERS_INDEX);\nBRefValueArray array = (BRefValueArray) map.get(headerName);\nAssert.assertEquals(((BStruct) array.get(0)).getStringField(0), headerValue);\n}\n@Test(description = \"Test addHeader function within a service\")\npublic void testServiceAddHeader() {\nString key = \"lang\";\nString value = \"ballerina\";\nString path = \"/hello/addheader/\" + key + \"/\" + value;\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(getReturnValue(response));\nAssert.assertEquals(bJson.value().get(key).asText(), value);\n}\n@Test(description = \"Test req struct add Header function\")\npublic void testStructAddHeader() {\nString value = \"ballerina\";\nString path = \"/hello/addReqHeader\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(getReturnValue(response));\nAssert.assertEquals(bJson.value().get(\"headerValue\").asText(), value);\nAssert.assertEquals(bJson.value().get(\"paramValue\").asText(), String.valueOf(6));\n}\n@Test(description = \"Test req struct add Header function without params\")\npublic void testStructAddHeaderWithNoParam() {\nString value = \"ballerina\";\nString path = \"/hello/addReqHeaderWithoutParam\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(getReturnValue(response));\nAssert.assertEquals(bJson.value().get(\"headerValue\").asText(), value);\nAssert.assertEquals(bJson.value().get(\"paramValue\").asText(), \"param is null\");\n}\n@Test(description = \"Test req struct add Header function\")\npublic void testAddHeaderViaBalFunction() {\nString path = \"/hello/addReqHeaderFunc\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(getReturnValue(response));\nAssert.assertEquals(bJson.value().get(\"headerValue\").asText(), \"chamil\");\nAssert.assertEquals(bJson.value().get(\"size\").asText(), String.valueOf(3));\n}\n@Test(description = \"Test getBinaryPayload method of the request\")\npublic void testGetBinaryPayloadMethod() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nBStruct entity = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct);\nBStruct mediaType = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, mediaTypeStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nString payload = \"ballerina\";\nMimeUtil.setContentType(mediaType, entity, OCTET_STREAM);\nentity.setBlobField(BYTE_DATA_INDEX, payload.getBytes());\nentity.setBooleanField(IS_IN_MEMORY_INDEX, 1);\nrequest.addNativeData(MESSAGE_ENTITY, entity);\nrequest.addNativeData(IS_ENTITY_BODY_PRESENT, true);\nHttpUtil.addCarbonMsg(request, cMsg);\nBValue[] inputArg = { request };\nBValue[] returnVals = BRunUtil.invoke(result, \"testGetBinaryPayload\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertEquals(returnVals[0].stringValue(), payload);\n}\n@Test\npublic void testGetContentLength() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(\"\", Constants.HTTP_METHOD_GET);\nString payload = \"ballerina\";\ncMsg.setHeader(Constants.HTTP_CONTENT_LENGTH, String.valueOf(payload.length()));\nHttpUtil.addCarbonMsg(request, cMsg);\nHttpUtil.setHeaderValueStructType(\nBCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, headerStruct));\nBStruct entity = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct);\nBStruct mediaType = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, mediaTypeStruct);\nHttpUtil.populateInboundRequest(request, entity, mediaType, cMsg);\nBValue[] inputArg = { request };\nBValue[] returnVals = BRunUtil.invoke(result, \"testGetContentLength\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertEquals(payload.length(), ((BInteger) returnVals[0]).intValue());\n}\n@Test(description = \"Test GetContentLength function within a service\")\npublic void testServiceGetContentLength() {\nString key = \"lang\";\nString value = \"ballerina\";\nString path = \"/hello/getContentLength\";\nString jsonString = \"{\\\"\" + key + \"\\\":\\\"\" + value + \"\\\"}\";\nint length = jsonString.length();\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_POST, jsonString);\ncMsg.setHeader(Constants.HTTP_CONTENT_LENGTH, String.valueOf(length));\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"value\").asText(), String.valueOf(length));\n}\n@Test\npublic void testGetHeader() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(\"\", Constants.HTTP_METHOD_GET);\ncMsg.setHeader(CONTENT_TYPE, APPLICATION_FORM);\nHttpUtil.setHeaderValueStructType(\nBCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, headerStruct));\nBStruct entity = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct);\nBStruct mediaType = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, mediaTypeStruct);\nHttpUtil.populateInboundRequest(request, entity, mediaType, cMsg);\nBString key = new BString(CONTENT_TYPE);\nBValue[] inputArg = { request, key };\nBValue[] returnVals = BRunUtil.invoke(result, \"testGetHeader\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertEquals(returnVals[0].stringValue(), APPLICATION_FORM);\n}\n@Test(description = \"Test GetHeader function within a service\")\npublic void testServiceGetHeader() {\nString path = \"/hello/getHeader\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\ncMsg.setHeader(CONTENT_TYPE, APPLICATION_FORM);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"value\").asText(), APPLICATION_FORM);\n}\n@Test(description = \"Test struct Get Header operation\")\npublic void testStructGetHeader() {\nString path = \"/hello/getReqHeader\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\ncMsg.setHeader(\"test-header\", APPLICATION_FORM);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(getReturnValue(response));\nAssert.assertEquals(bJson.value().get(\"value\").asText(), APPLICATION_FORM);\n}\n@Test(description = \"Test GetHeaders function within a function\")\npublic void testGetHeaders() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(\"\", Constants.HTTP_METHOD_GET);\ncMsg.setHeader(\"test-header\", APPLICATION_FORM + \",\" + TEXT_PLAIN + \";b=5\");\nHttpUtil.setHeaderValueStructType(\nBCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, headerStruct));\nBStruct entity = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct);\nBStruct mediaType = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, mediaTypeStruct);\nHttpUtil.populateInboundRequest(request, entity, mediaType, cMsg);\nBString key = new BString(\"test-header\");\nBValue[] inputArg = { request, key };\nBValue[] returnVals = BRunUtil.invoke(result, \"testGetHeaders\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertEquals(returnVals[0].stringValue(), TEXT_PLAIN);\n}\n@Test(description = \"Test GetHeaders function within a service\")\npublic void testServiceGetHeaders() {\nString path = \"/hello/getHeaders\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\ncMsg.setHeader(\"test-header\", APPLICATION_FORM + \",\" + TEXT_PLAIN + \";b=5\");\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(getReturnValue(response));\nAssert.assertEquals(bJson.value().get(\"value\").asText(), TEXT_PLAIN);\nAssert.assertEquals(bJson.value().get(\"paramValue\").asText(), String.valueOf(5));\n}\n@Test(description = \"Test GetHeaders function with values of struct\")\npublic void testStructGetHeaders() {\nString path = \"/hello/getReqHeaders\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\ncMsg.setHeader(\"test-header\", APPLICATION_FORM);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(getReturnValue(response));\nAssert.assertEquals(bJson.value().get(\"value\").asText(), \"transport\");\n}\n@Test\npublic void testGetJsonPayload() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nBStruct entity = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct);\nBStruct mediaType = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, mediaTypeStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nString payload = \"{'code':'123'}\";\ncMsg.setHeader(CONTENT_TYPE, APPLICATION_JSON);\nHttpUtil.addCarbonMsg(request, cMsg);\nMimeUtil.setContentType(mediaType, entity, APPLICATION_JSON);\nentity.setRefField(JSON_DATA_INDEX, new BJSON(payload));\nentity.setBooleanField(IS_IN_MEMORY_INDEX, 1);\nrequest.addNativeData(MESSAGE_ENTITY, entity);\nrequest.addNativeData(IS_ENTITY_BODY_PRESENT, true);\nBValue[] inputArg = { request };\nBValue[] returnVals = BRunUtil.invoke(result, \"testGetJsonPayload\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertEquals(((BJSON) returnVals[0]).value().get(\"code\").asText(), \"123\");\n}\n@Test(description = \"Test GetJsonPayload function within a service\")\npublic void testServiceGetJsonPayload() {\nString key = \"lang\";\nString value = \"ballerina\";\nString path = \"/hello/getJsonPayload\";\nString jsonString = \"{\\\"\" + key + \"\\\":\\\"\" + value + \"\\\"}\";\nList
headers = new ArrayList
();\nheaders.add(new Header(\"Content-Type\", APPLICATION_JSON));\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_POST, headers, jsonString);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nAssert.assertEquals(new BJSON(getReturnValue(response)).value().stringValue(), value);\n}\n@Test\npublic void testGetProperty() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nString propertyName = \"wso2\";\nString propertyValue = \"Ballerina\";\ncMsg.setProperty(propertyName, propertyValue);\nHttpUtil.addCarbonMsg(request, cMsg);\nBString name = new BString(propertyName);\nBValue[] inputArg = { request, name };\nBValue[] returnVals = BRunUtil.invoke(result, \"testGetProperty\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertEquals(returnVals[0].stringValue(), propertyValue);\n}\n@Test(description = \"Test GetProperty function within a service\")\npublic void testServiceGetProperty() {\nString propertyName = \"wso2\";\nString propertyValue = \"Ballerina\";\nString path = \"/hello/GetProperty\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\ncMsg.setProperty(propertyName, propertyValue);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"value\").asText(), propertyValue);\n}\n@Test\npublic void testGetStringPayload() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nBStruct entity = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct);\nBStruct mediaType = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, mediaTypeStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nString payload = \"ballerina\";\nMimeUtil.setContentType(mediaType, entity, TEXT_PLAIN);\nentity.setStringField(TEXT_DATA_INDEX, payload);\nentity.setBooleanField(IS_IN_MEMORY_INDEX, 1);\nrequest.addNativeData(MESSAGE_ENTITY, entity);\nrequest.addNativeData(IS_ENTITY_BODY_PRESENT, true);\nHttpUtil.addCarbonMsg(request, cMsg);\nBValue[] inputArg = { request };\nBValue[] returnVals = BRunUtil.invoke(result, \"testGetStringPayload\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertEquals(returnVals[0].stringValue(), payload);\n}\n@Test(description = \"Test GetStringPayload function within a service\")\npublic void testServiceGetStringPayload() {\nString value = \"ballerina\";\nString path = \"/hello/GetStringPayload\";\nList
headers = new ArrayList
();\nheaders.add(new Header(\"Content-Type\", TEXT_PLAIN));\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_POST, headers, value);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nAssert.assertEquals(getReturnValue(response), value);\n}\n@Test\npublic void testGetXmlPayload() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nBStruct entity = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct);\nBStruct mediaType = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, mediaTypeStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nString payload = \"ballerina\";\nMimeUtil.setContentType(mediaType, entity, APPLICATION_XML);\nentity.setRefField(XML_DATA_INDEX, new BXMLItem(payload));\nentity.setBooleanField(IS_IN_MEMORY_INDEX, 1);\nrequest.addNativeData(MESSAGE_ENTITY, entity);\nrequest.addNativeData(IS_ENTITY_BODY_PRESENT, true);\nHttpUtil.addCarbonMsg(request, cMsg);\nBValue[] inputArg = { request };\nBValue[] returnVals = BRunUtil.invoke(result, \"testGetXmlPayload\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertEquals(((BXMLItem) returnVals[0]).getTextValue().stringValue(), \"ballerina\");\n}\n@Test(description = \"Test GetXmlPayload function within a service\")\npublic void testServiceGetXmlPayload() {\nString value = \"ballerina\";\nString path = \"/hello/GetXmlPayload\";\nString bxmlItemString = \"ballerina\";\nList
headers = new ArrayList
();\nheaders.add(new Header(\"Content-Type\", APPLICATION_XML));\nHTTPTestRequest cMsg = MessageUtils\n.generateHTTPMessage(path, Constants.HTTP_METHOD_POST, headers, bxmlItemString);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nAssert.assertEquals(getReturnValue(response), value);\n}\n@Test\npublic void testRemoveHeader() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nString expect = \"Expect\";\ncMsg.setHeader(expect, \"100-continue\");\nHttpUtil.addCarbonMsg(request, cMsg);\nBString key = new BString(expect);\nBValue[] inputArg = { request, key };\nBValue[] returnVals = BRunUtil.invoke(result, \"testRemoveHeader\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entityStruct = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBMap map = (BMap) entityStruct.getRefField(ENTITY_HEADERS_INDEX);\nAssert.assertNull(map.get(\"100-continue\"));\n}\n@Test(description = \"Test RemoveHeader function within a service\")\npublic void testServiceRemoveHeader() {\nString path = \"/hello/RemoveHeader\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\ncMsg.setHeader(CONTENT_TYPE, APPLICATION_FORM);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"value\").asText(), \"value is null\");\n}\n@Test\npublic void testRemoveAllHeaders() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nString expect = \"Expect\";\nString range = \"Range\";\ncMsg.setHeader(expect, \"100-continue\");\ncMsg.setHeader(range, \"bytes=500-999\");\nHttpUtil.addCarbonMsg(request, cMsg);\nBValue[] inputArg = { request };\nBValue[] returnVals = BRunUtil.invoke(result, \"testRemoveAllHeaders\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entityStruct = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBMap map = (BMap) entityStruct.getRefField(ENTITY_HEADERS_INDEX);\nAssert.assertNull(map.get(expect));\nAssert.assertNull(map.get(range));\n}\n@Test(description = \"Test RemoveAllHeaders function within a service\")\npublic void testServiceRemoveAllHeaders() {\nString expect = \"Expect\";\nString range = \"Range\";\nString path = \"/hello/RemoveAllHeaders\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\ncMsg.setHeader(expect, \"100-continue\");\ncMsg.setHeader(range, \"bytes=500-999\");\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"value\").asText(), \"value is null\");\n}\n@Test\npublic void testSetHeader() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nHttpUtil.addCarbonMsg(request, cMsg);\nString range = \"Range\";\nString rangeValue = \"bytes=500-999\";\nBString key = new BString(range);\nBString value = new BString(rangeValue);\nBValue[] inputArg = { request, key, value };\nBValue[] returnVals = BRunUtil.invoke(result, \"testSetHeader\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entityStruct = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBMap map = (BMap) entityStruct.getRefField(ENTITY_HEADERS_INDEX);\nBRefValueArray array = (BRefValueArray) map.get(range);\nAssert.assertEquals(((BStruct) array.get(0)).getStringField(0), rangeValue);\n}\n@Test\npublic void testSetHeaderStruct() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(\"\", Constants.HTTP_METHOD_GET);\nHttpUtil.addCarbonMsg(request, cMsg);\nHttpUtil.setHeaderValueStructType(\nBCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct));\nBStruct entity = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, entityStruct);\nBStruct mediaType = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageMime, mediaTypeStruct);\nHttpUtil.populateInboundRequest(request, entity, mediaType, cMsg);\nString range = \"Range\";\nString rangeValue = \"bytes=500-999\";\nBString key = new BString(range);\nBString value = new BString(rangeValue);\nBValue[] inputArg = { request, key, value };\nBValue[] returnVals = BRunUtil.invoke(result, \"testSetHeaderStruct\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entityStruct = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBMap map = (BMap) entityStruct.getRefField(ENTITY_HEADERS_INDEX);\nBRefValueArray array = (BRefValueArray) map.get(range);\nAssert.assertEquals(((BStruct) array.get(0)).getStringField(0), rangeValue);\n}\n@Test(description = \"Test SetHeader function within a service\")\npublic void testServiceSetHeader() {\nString key = \"lang\";\nString value = \"ballerina\";\nString path = \"/hello/setHeader/\" + key + \"/\" + value;\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"value\").asText(), value);\n}\n@Test(description = \"Test Setting Header in struct within a service\")\npublic void testServiceSetHeaderStruct() {\nString key = \"lang\";\nString value = \"ballerina\";\nString path = \"/hello/setHeaderStruct/\" + key + \"/\" + value;\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"value\").asText(), value);\n}\n@Test\npublic void testSetJsonPayload() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage requestMsg = HttpUtil.createHttpCarbonMessage(true);\nHttpUtil.addCarbonMsg(request, requestMsg);\nBJSON value = new BJSON(\"{'name':'wso2'}\");\nBValue[] inputArg = { request, value };\nBValue[] returnVals = BRunUtil.invoke(result, \"testSetJsonPayload\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entity = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBJSON bJson = (BJSON) entity.getRefField(JSON_DATA_INDEX);\nAssert.assertEquals(bJson.value().get(\"name\").asText(), \"wso2\", \"Payload is not set properly\");\n}\n@Test(description = \"Test SetJsonPayload function within a service\")\npublic void testServiceSetJsonPayload() {\nString value = \"ballerina\";\nString path = \"/hello/SetJsonPayload/\" + value;\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"lang\").asText(), value);\n}\n@Test\npublic void testSetProperty() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nHttpUtil.addCarbonMsg(request, cMsg);\nString propertyName = \"wso2\";\nString propertyValue = \"Ballerina\";\nBString name = new BString(propertyName);\nBString value = new BString(propertyValue);\nBValue[] inputArg = { request, name, value };\nBValue[] returnVals = BRunUtil.invoke(result, \"testSetProperty\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nHTTPCarbonMessage response = HttpUtil.getCarbonMsg((BStruct) returnVals[0], null);\nAssert.assertEquals(response.getProperty(propertyName), propertyValue);\n}\n@Test(description = \"Test SetProperty function within a service\")\npublic void testServiceSetProperty() {\nString key = \"lang\";\nString value = \"ballerina\";\nString path = \"/hello/SetProperty/\" + key + \"/\" + value;\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"value\").asText(), value);\n}\n@Test\npublic void testSetStringPayload() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nHttpUtil.addCarbonMsg(request, cMsg);\nBString value = new BString(\"Ballerina\");\nBValue[] inputArg = { request, value };\nBValue[] returnVals = BRunUtil.invoke(result, \"testSetStringPayload\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entity = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nString stringValue = entity.getStringField(TEXT_DATA_INDEX);\nAssert.assertEquals(stringValue, \"Ballerina\", \"Payload is not set properly\");\n}\n@Test(description = \"Test SetStringPayload function within a service\")\npublic void testServiceSetStringPayload() {\nString value = \"ballerina\";\nString path = \"/hello/SetStringPayload/\" + value;\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"lang\").asText(), value);\n}\n@Test\npublic void testSetXmlPayload() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nHttpUtil.addCarbonMsg(request, cMsg);\nBXMLItem value = new BXMLItem(\"Ballerina\");\nBValue[] inputArg = { request, value };\nBValue[] returnVals = BRunUtil.invoke(result, \"testSetXmlPayload\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entity = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBXMLItem xmlValue = (BXMLItem) entity.getRefField(XML_DATA_INDEX);\nAssert.assertEquals(xmlValue.getTextValue().stringValue(), \"Ballerina\", \"Payload is not set properly\");\n}\n@Test(description = \"Test SetXmlPayload function within a service\")\npublic void testServiceSetXmlPayload() {\nString value = \"Ballerina\";\nString path = \"/hello/SetXmlPayload/\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"lang\").asText(), value);\n}\n@Test\npublic void testGetMethod() {\nString path = \"/hello/11\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nAssert.assertEquals(\nStringUtils.getStringFromInputStream(new HttpMessageDataStreamer(response).getInputStream()),\nConstants.HTTP_METHOD_GET);\n}\n@Test\npublic void testGetRequestURL() {\nString path = \"/hello/12\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nAssert.assertEquals(\nStringUtils.getStringFromInputStream(new HttpMessageDataStreamer(response).getInputStream()), path);\n}\n@Test(description = \"Test setBinaryPayload() function within a service\")\npublic void testServiceSetBinaryPayload() {\nString value = \"Ballerina\";\nString path = \"/hello/SetBinaryPayload/\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_GET);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nBJSON bJson = new BJSON(new HttpMessageDataStreamer(response).getInputStream());\nAssert.assertEquals(bJson.value().get(\"lang\").asText(), value);\n}\n@Test(description = \"Test getBinaryPayload() function within a service\")\npublic void testServiceGetBinaryPayload() {\nString payload = \"ballerina\";\nString path = \"/hello/GetBinaryPayload\";\nHTTPTestRequest cMsg = MessageUtils.generateHTTPMessage(path, Constants.HTTP_METHOD_POST, payload);\nHTTPCarbonMessage response = Services.invokeNew(serviceResult, cMsg);\nAssert.assertNotNull(response, \"Response message not found\");\nAssert.assertEquals(\nStringUtils.getStringFromInputStream(new HttpMessageDataStreamer(response).getInputStream()), payload);\n}\n@Test(description = \"Test setBinaryPayload() function\")\npublic void testSetBinaryPayload() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage cMsg = HttpUtil.createHttpCarbonMessage(true);\nHttpUtil.addCarbonMsg(request, cMsg);\nBBlob value = new BBlob(\"Ballerina\".getBytes());\nBValue[] inputArg = { request, value };\nBValue[] returnVals = BRunUtil.invoke(result, \"testSetBinaryPayload\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entity = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBlobDataSource blobDataSource = new BlobDataSource(entity.getBlobField(BYTE_DATA_INDEX));\nAssert.assertEquals(blobDataSource.getMessageAsString(), \"Ballerina\", \"Payload is not set properly\");\n}\n@Test (description = \"Test setEntityBody() function\")\npublic void testSetEntityBody() {\nBStruct request = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageHttp, requestStruct);\nHTTPCarbonMessage requestMsg = HttpUtil.createHttpCarbonMessage(true);\nHttpUtil.addCarbonMsg(request, requestMsg);\ntry {\nFile file = File.createTempFile(\"test\", \".json\");\nfile.deleteOnExit();\nBufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(file));\nbufferedWriter.write(\"{'name':'wso2'}\");\nbufferedWriter.close();\nBStruct fileStruct = BCompileUtil.createAndGetStruct(result.getProgFile(), protocolPackageFile, FILE);\nfileStruct.setStringField(0, file.getAbsolutePath());\nBValue[] inputArg = { request, fileStruct, new BString(APPLICATION_JSON) };\nBValue[] returnVals = BRunUtil.invoke(result, \"testSetEntityBody\", inputArg);\nAssert.assertFalse(returnVals == null || returnVals.length == 0 || returnVals[0] == null,\n\"Invalid Return Values.\");\nAssert.assertTrue(returnVals[0] instanceof BStruct);\nBStruct entity = (BStruct) ((BStruct) returnVals[0]).getNativeData(MESSAGE_ENTITY);\nBStruct returnFileStruct = (BStruct) entity.getRefField(OVERFLOW_DATA_INDEX);\nString returnJsonValue = new String (Files.readAllBytes(Paths.get(returnFileStruct.getStringField(0))),\nUTF_8);\nBJSON bJson = new BJSON(returnJsonValue);\nAssert.assertEquals(bJson.value().get(\"name\").asText(), \"wso2\", \"Payload is not set properly\");\n} catch (IOException e) {\nLOG.error(\"Error occured while creating a temporary file in testSetEntityBody\", e.getMessage());\n}\n}\n/**\n* Get the response value from input stream.\n*\n* @param response carbon response\n* @return return value from input stream as a string\n*/\n}" + }, + { + "comment": "Shall we have some tests for both values with same type, in order to cover these lines?", + "method_body": "public static boolean isReferenceEqual(Object lhsValue, Object rhsValue) {\nif (lhsValue == rhsValue) {\nreturn true;\n}\nif (lhsValue == null || rhsValue == null) {\nreturn false;\n}\nType lhsType = getType(lhsValue);\nType rhsType = getType(rhsValue);\nswitch(lhsType.getTag()) {\ncase TypeTags.INT_TAG:\nif (rhsType.getTag() != TypeTags.BYTE_TAG || rhsType.getTag() != TypeTags.INT_TAG) {\nreturn false;\n}\nreturn lhsValue.equals(((Number) rhsValue).longValue());\ncase TypeTags.BYTE_TAG:\nif (rhsType.getTag() != TypeTags.BYTE_TAG || rhsType.getTag() != TypeTags.INT_TAG) {\nreturn false;\n}\nreturn lhsValue.equals(((Number) rhsValue).byteValue());\ncase TypeTags.FLOAT_TAG:\nif (rhsType.getTag() != TypeTags.FLOAT_TAG) {\nreturn false;\n}\nreturn lhsValue.equals(((Number) rhsValue).doubleValue());\ncase TypeTags.DECIMAL_TAG:\nif (rhsType.getTag() != TypeTags.DECIMAL_TAG) {\nreturn false;\n}\nreturn checkDecimalExactEqual((DecimalValue) lhsValue, (DecimalValue) rhsValue);\ncase TypeTags.BOOLEAN_TAG:\ncase TypeTags.STRING_TAG:\nreturn lhsValue.equals(rhsValue);\n}\nif (TypeTags.isXMLTypeTag(lhsType.getTag()) && TypeTags.isXMLTypeTag(rhsType.getTag())) {\nreturn isXMLValueRefEqual((XmlValue) lhsValue, (XmlValue) rhsValue);\n}\nif (isHandleType(lhsType) && isHandleType(rhsType)) {\nreturn isHandleValueRefEqual(lhsValue, rhsValue);\n}\nreturn false;\n}", + "target_code": "return lhsValue.equals(((Number) rhsValue).longValue());", + "method_body_after": "public static boolean isReferenceEqual(Object lhsValue, Object rhsValue) {\nif (lhsValue == rhsValue) {\nreturn true;\n}\nif (lhsValue == null || rhsValue == null) {\nreturn false;\n}\nType lhsType = getType(lhsValue);\nType rhsType = getType(rhsValue);\nswitch (lhsType.getTag()) {\ncase TypeTags.FLOAT_TAG:\nif (rhsType.getTag() != TypeTags.FLOAT_TAG) {\nreturn false;\n}\nreturn lhsValue.equals(((Number) rhsValue).doubleValue());\ncase TypeTags.DECIMAL_TAG:\nif (rhsType.getTag() != TypeTags.DECIMAL_TAG) {\nreturn false;\n}\nreturn checkDecimalExactEqual((DecimalValue) lhsValue, (DecimalValue) rhsValue);\ncase TypeTags.INT_TAG:\ncase TypeTags.BYTE_TAG:\ncase TypeTags.BOOLEAN_TAG:\ncase TypeTags.STRING_TAG:\nreturn isEqual(lhsValue, rhsValue);\ncase TypeTags.XML_TAG:\ncase TypeTags.XML_COMMENT_TAG:\ncase TypeTags.XML_ELEMENT_TAG:\ncase TypeTags.XML_PI_TAG:\ncase TypeTags.XML_TEXT_TAG:\nif (!TypeTags.isXMLTypeTag(rhsType.getTag())) {\nreturn false;\n}\nreturn isXMLValueRefEqual((XmlValue) lhsValue, (XmlValue) rhsValue);\ncase TypeTags.HANDLE_TAG:\nif (rhsType.getTag() != TypeTags.HANDLE_TAG) {\nreturn false;\n}\nreturn isHandleValueRefEqual(lhsValue, rhsValue);\n}\nreturn false;\n}", + "context_before": "class TypeChecker {\npublic static Object checkCast(Object sourceVal, Type targetType) {\nif (checkIsType(sourceVal, targetType)) {\nreturn sourceVal;\n}\nType sourceType = getType(sourceVal);\nif (sourceType.getTag() <= TypeTags.BOOLEAN_TAG && targetType.getTag() <= TypeTags.BOOLEAN_TAG) {\nreturn TypeConverter.castValues(targetType, sourceVal);\n}\nif (sourceType.getTag() <= TypeTags.BOOLEAN_TAG && targetType.getTag() == TypeTags.UNION_TAG) {\nfor (Type memberType : ((BUnionType) targetType).getMemberTypes()) {\ntry {\nreturn TypeConverter.castValues(memberType, sourceVal);\n} catch (Exception e) {\n}\n}\n}\nthrow ErrorUtils.createTypeCastError(sourceVal, targetType);\n}\npublic static long anyToInt(Object sourceVal) {\nreturn TypeConverter.anyToIntCast(sourceVal,\n() -> ErrorUtils.createTypeCastError(sourceVal, TYPE_INT));\n}\npublic static long anyToSigned32(Object sourceVal) {\nreturn TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_32,\n() -> ErrorUtils.createTypeCastError(sourceVal,\nTYPE_INT_SIGNED_32));\n}\npublic static long anyToSigned16(Object sourceVal) {\nreturn TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_16,\n() -> ErrorUtils.createTypeCastError(sourceVal,\nTYPE_INT_SIGNED_16));\n}\npublic static long anyToSigned8(Object sourceVal) {\nreturn TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_8,\n() -> ErrorUtils.createTypeCastError(sourceVal,\nTYPE_INT_SIGNED_8));\n}\npublic static long anyToUnsigned32(Object sourceVal) {\nreturn TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_32,\n() -> ErrorUtils.createTypeCastError(sourceVal,\nTYPE_INT_UNSIGNED_32));\n}\npublic static long anyToUnsigned16(Object sourceVal) {\nreturn TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_16,\n() -> ErrorUtils\n.createTypeCastError(sourceVal, TYPE_INT_UNSIGNED_16));\n}\npublic static long anyToUnsigned8(Object sourceVal) {\nreturn TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_8,\n() -> ErrorUtils\n.createTypeCastError(sourceVal,\nTYPE_INT_UNSIGNED_8));\n}\npublic static double anyToFloat(Object sourceVal) {\nreturn TypeConverter.anyToFloatCast(sourceVal, () -> ErrorUtils\n.createTypeCastError(sourceVal, TYPE_FLOAT));\n}\npublic static boolean anyToBoolean(Object sourceVal) {\nreturn TypeConverter.anyToBooleanCast(sourceVal, () -> ErrorUtils\n.createTypeCastError(sourceVal, TYPE_BOOLEAN));\n}\npublic static int anyToByte(Object sourceVal) {\nreturn TypeConverter.anyToByteCast(sourceVal, () -> ErrorUtils.createTypeCastError(sourceVal,\nTYPE_BYTE));\n}\npublic static DecimalValue anyToDecimal(Object sourceVal) {\nreturn TypeConverter.anyToDecimalCast(sourceVal, () -> ErrorUtils.createTypeCastError(sourceVal,\nTYPE_DECIMAL));\n}\npublic static byte anyToJByte(Object sourceVal) {\nreturn TypeConverter.anyToJByteCast(sourceVal,\n() -> ErrorUtils.createBToJTypeCastError(sourceVal, \"byte\"));\n}\npublic static char anyToJChar(Object sourceVal) {\nreturn TypeConverter.anyToJCharCast(sourceVal,\n() -> ErrorUtils.createBToJTypeCastError(sourceVal, \"char\"));\n}\npublic static short anyToJShort(Object sourceVal) {\nreturn TypeConverter.anyToJShortCast(sourceVal,\n() -> ErrorUtils.createBToJTypeCastError(sourceVal, \"short\"));\n}\npublic static int anyToJInt(Object sourceVal) {\nreturn TypeConverter.anyToJIntCast(sourceVal,\n() -> ErrorUtils.createBToJTypeCastError(sourceVal, \"int\"));\n}\npublic static long anyToJLong(Object sourceVal) {\nreturn TypeConverter.anyToJLongCast(sourceVal,\n() -> ErrorUtils.createBToJTypeCastError(sourceVal, \"long\"));\n}\npublic static float anyToJFloat(Object sourceVal) {\nreturn TypeConverter.anyToJFloatCast(sourceVal,\n() -> ErrorUtils.createBToJTypeCastError(sourceVal, \"float\"));\n}\npublic static double anyToJDouble(Object sourceVal) {\nreturn TypeConverter.anyToJDoubleCast(sourceVal,\n() -> ErrorUtils.createBToJTypeCastError(sourceVal, \"double\"));\n}\npublic static boolean anyToJBoolean(Object sourceVal) {\nreturn TypeConverter.anyToJBooleanCast(sourceVal,\n() -> ErrorUtils.createBToJTypeCastError(sourceVal, \"boolean\"));\n}\n/**\n* Check whether a given value belongs to the given type.\n*\n* @param sourceVal value to check the type\n* @param targetType type to be test against\n* @return true if the value belongs to the given type, false otherwise\n*/\npublic static boolean checkIsType(Object sourceVal, Type targetType) {\nreturn checkIsType(sourceVal, getType(sourceVal), targetType);\n}\n/**\n* Check whether a given value belongs to the given type.\n*\n* @param sourceVal value to check the type\n* @param sourceType type of the value\n* @param targetType type to be test against\n* @return true if the value belongs to the given type, false otherwise\n*/\npublic static boolean checkIsType(Object sourceVal, Type sourceType, Type targetType) {\nif (checkIsType(sourceVal, sourceType, targetType, null)) {\nreturn true;\n}\nif (sourceType.getTag() == TypeTags.XML_TAG) {\nXmlValue val = (XmlValue) sourceVal;\nif (val.getNodeType() == XmlNodeType.SEQUENCE) {\nreturn checkIsLikeOnValue(sourceVal, sourceType, targetType, new ArrayList<>(), false);\n}\n}\nif (isMutable(sourceVal, sourceType)) {\nreturn false;\n}\nreturn checkIsLikeOnValue(sourceVal, sourceType, targetType, new ArrayList<>(), false);\n}\n/**\n* Check whether a given value has the same shape as the given type.\n*\n* @param sourceValue value to check the shape\n* @param targetType type to check the shape against\n* @return true if the value has the same shape as the given type; false otherwise\n*/\npublic static boolean checkIsLikeType(Object sourceValue, Type targetType) {\nreturn checkIsLikeType(sourceValue, targetType, false);\n}\n/**\n* Check whether a given value has the same shape as the given type.\n*\n* @param sourceValue value to check the shape\n* @param targetType type to check the shape against\n* @param allowNumericConversion whether numeric conversion is allowed to change the shape to the target type\n* @return true if the value has the same shape as the given type; false otherwise\n*/\npublic static boolean checkIsLikeType(Object sourceValue, Type targetType, boolean allowNumericConversion) {\nreturn checkIsLikeType(sourceValue, targetType, new ArrayList<>(), allowNumericConversion);\n}\n/**\n* Check whether two types are the same.\n*\n* @param sourceType type to test\n* @param targetType type to test against\n* @return true if the two types are same; false otherwise\n*/\npublic static boolean isSameType(Type sourceType, Type targetType) {\nint sourceTypeTag = sourceType.getTag();\nint targetTypeTag = targetType.getTag();\nif (sourceType == targetType) {\nreturn true;\n}\nif (sourceTypeTag == targetTypeTag) {\nif (sourceType.equals(targetType)) {\nreturn true;\n}\nswitch (sourceTypeTag) {\ncase TypeTags.ARRAY_TAG:\nreturn checkArrayEquivalent(sourceType, targetType);\ncase TypeTags.FINITE_TYPE_TAG:\nSet sourceValueSpace = ((BFiniteType) sourceType).valueSpace;\nSet targetValueSpace = ((BFiniteType) targetType).valueSpace;\nif (sourceValueSpace.size() != targetValueSpace.size()) {\nreturn false;\n}\nfor (Object sourceVal : sourceValueSpace) {\nif (!containsType(targetValueSpace, getType(sourceVal))) {\nreturn false;\n}\n}\nreturn true;\ndefault:\nbreak;\n}\n}\nif (sourceTypeTag == TypeTags.FINITE_TYPE_TAG) {\nfor (Object value : ((BFiniteType) sourceType).valueSpace) {\nif (!isSameType(getType(value), targetType)) {\nreturn false;\n}\n}\nreturn true;\n}\nif (targetTypeTag == TypeTags.FINITE_TYPE_TAG) {\nfor (Object value : ((BFiniteType) targetType).valueSpace) {\nif (!isSameType(getType(value), sourceType)) {\nreturn false;\n}\n}\nreturn true;\n}\nreturn false;\n}\npublic static Type getType(Object value) {\nif (value == null) {\nreturn TYPE_NULL;\n} else if (value instanceof Number) {\nif (value instanceof Long) {\nreturn TYPE_INT;\n} else if (value instanceof Double) {\nreturn TYPE_FLOAT;\n} else if (value instanceof Integer || value instanceof Byte) {\nreturn TYPE_BYTE;\n}\n} else if (value instanceof BString) {\nreturn TYPE_STRING;\n} else if (value instanceof Boolean) {\nreturn TYPE_BOOLEAN;\n}\nreturn ((BValue) value).getType();\n}\n/**\n* Deep value equality check for anydata.\n*\n* @param lhsValue The value on the left hand side\n* @param rhsValue The value on the right hand side\n* @return True if values are equal, else false.\n*/\npublic static boolean isEqual(Object lhsValue, Object rhsValue) {\nreturn isEqual(lhsValue, rhsValue, new ArrayList<>());\n}\n/**\n* Check if two decimal values are equal in value.\n*\n* @param lhsValue The value on the left hand side\n* @param rhsValue The value of the right hand side\n* @return True if values are equal, else false.\n*/\npublic static boolean checkDecimalEqual(DecimalValue lhsValue, DecimalValue rhsValue) {\nreturn isDecimalRealNumber(lhsValue) && isDecimalRealNumber(rhsValue) &&\nlhsValue.decimalValue().compareTo(rhsValue.decimalValue()) == 0;\n}\n/**\n* Check if two decimal values are exactly equal.\n*\n* @param lhsValue The value on the left-hand side\n* @param rhsValue The value of the right-hand side\n* @return True if values are exactly equal, else false.\n*/\npublic static boolean checkDecimalExactEqual(DecimalValue lhsValue, DecimalValue rhsValue) {\nreturn isDecimalRealNumber(lhsValue) && isDecimalRealNumber(rhsValue)\n&& lhsValue.decimalValue().equals(rhsValue.decimalValue());\n}\n/**\n* Checks if the given decimal number is a real number.\n*\n* @param decimalValue The decimal value being checked\n* @return True if the decimal value is a real number.\n*/\nprivate static boolean isDecimalRealNumber(DecimalValue decimalValue) {\nreturn decimalValue.valueKind == DecimalValueKind.ZERO || decimalValue.valueKind == DecimalValueKind.OTHER;\n}\n/**\n* Reference equality check for values. If both the values are simple basic types, returns the same\n* result as {@link\n*\n* @param lhsValue The value on the left hand side\n* @param rhsValue The value on the right hand side\n* @return True if values are reference equal or in the case of simple basic types if the values are equal,\n* else false.\n*/\nprivate static boolean isXMLValueRefEqual(XmlValue lhsValue, XmlValue rhsValue) {\nif (lhsValue.getNodeType() != rhsValue.getNodeType()) {\nreturn false;\n}\nif (lhsValue.getNodeType() == XmlNodeType.SEQUENCE && rhsValue.getNodeType() == XmlNodeType.SEQUENCE) {\nreturn isXMLSequenceRefEqual((XmlSequence) lhsValue, (XmlSequence) rhsValue);\n}\nif (lhsValue.getNodeType() == XmlNodeType.TEXT && rhsValue.getNodeType() == XmlNodeType.TEXT) {\nreturn isEqual(lhsValue, rhsValue);\n}\nreturn false;\n}\nprivate static boolean isXMLSequenceRefEqual(XmlSequence lhsValue, XmlSequence rhsValue) {\nIterator lhsIter = lhsValue.getChildrenList().iterator();\nIterator rhsIter = rhsValue.getChildrenList().iterator();\nwhile (lhsIter.hasNext() && rhsIter.hasNext()) {\nBXml l = lhsIter.next();\nBXml r = rhsIter.next();\nif (!(l == r || isXMLValueRefEqual((XmlValue) l, (XmlValue) r))) {\nreturn false;\n}\n}\nreturn lhsIter.hasNext() == rhsIter.hasNext();\n}\n/**\n* Get the typedesc of a value.\n*\n* @param value Value\n* @return type desc associated with the value\n*/\npublic static TypedescValue getTypedesc(Object value) {\nType type = TypeChecker.getType(value);\nif (type == null) {\nreturn null;\n}\nif (value instanceof MapValue) {\nTypedescValue typedesc = (TypedescValue) ((MapValue) value).getTypedesc();\nif (typedesc != null) {\nreturn typedesc;\n}\n}\nreturn new TypedescValueImpl(type);\n}\n/**\n* Get the annotation value if present.\n*\n* @param typedescValue The typedesc value\n* @param annotTag The annot-tag-reference\n* @return the annotation value if present, nil else\n*/\npublic static Object getAnnotValue(TypedescValue typedescValue, String annotTag) {\nType describingType = typedescValue.getDescribingType();\nif (!(describingType instanceof BAnnotatableType)) {\nreturn null;\n}\nreturn ((BAnnotatableType) describingType).getAnnotation(StringUtils.fromString(annotTag));\n}\npublic static Object getAnnotValue(TypedescValue typedescValue, BString annotTag) {\nType describingType = typedescValue.getDescribingType();\nif (!(describingType instanceof BAnnotatableType)) {\nreturn null;\n}\nreturn ((BAnnotatableType) describingType).getAnnotation(annotTag);\n}\n/**\n* Check whether a given type is equivalent to a target type.\n*\n* @param sourceType type to check\n* @param targetType type to compare with\n* @return flag indicating the the equivalence of the two types\n*/\npublic static boolean checkIsType(Type sourceType, Type targetType) {\nreturn checkIsType(sourceType, targetType, (List) null);\n}\n@Deprecated\npublic static boolean checkIsType(Type sourceType, Type targetType, List unresolvedTypes) {\nif (sourceType == targetType || (sourceType.getTag() == targetType.getTag() && sourceType.equals(targetType))) {\nreturn true;\n}\nif (checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(sourceType)) {\nreturn true;\n}\nif (targetType.isReadOnly() && !sourceType.isReadOnly()) {\nreturn false;\n}\nint sourceTypeTag = sourceType.getTag();\nint targetTypeTag = targetType.getTag();\nif (sourceTypeTag == TypeTags.INTERSECTION_TAG) {\nreturn checkIsType(((BIntersectionType) sourceType).getEffectiveType(),\ntargetTypeTag != TypeTags.INTERSECTION_TAG ? targetType :\n((BIntersectionType) targetType).getEffectiveType(), unresolvedTypes);\n}\nif (targetTypeTag == TypeTags.INTERSECTION_TAG) {\nreturn checkIsType(sourceType, ((BIntersectionType) targetType).getEffectiveType(), unresolvedTypes);\n}\nif (sourceTypeTag == TypeTags.PARAMETERIZED_TYPE_TAG) {\nif (targetTypeTag != TypeTags.PARAMETERIZED_TYPE_TAG) {\nreturn checkIsType(((BParameterizedType) sourceType).getParamValueType(), targetType, unresolvedTypes);\n}\nreturn checkIsType(((BParameterizedType) sourceType).getParamValueType(),\n((BParameterizedType) targetType).getParamValueType(), unresolvedTypes);\n}\nif (sourceTypeTag == TypeTags.READONLY_TAG) {\nreturn checkIsType(PredefinedTypes.ANY_AND_READONLY_OR_ERROR_TYPE,\ntargetType, unresolvedTypes);\n}\nif (targetTypeTag == TypeTags.READONLY_TAG) {\nreturn checkIsType(sourceType, PredefinedTypes.ANY_AND_READONLY_OR_ERROR_TYPE, unresolvedTypes);\n}\nif (sourceTypeTag == TypeTags.UNION_TAG) {\nreturn isUnionTypeMatch((BUnionType) sourceType, targetType, unresolvedTypes);\n}\nif (sourceTypeTag == TypeTags.FINITE_TYPE_TAG &&\n(targetTypeTag == TypeTags.FINITE_TYPE_TAG || targetTypeTag <= TypeTags.NULL_TAG ||\ntargetTypeTag == TypeTags.XML_TEXT_TAG)) {\nreturn isFiniteTypeMatch((BFiniteType) sourceType, targetType);\n}\nswitch (targetTypeTag) {\ncase TypeTags.BYTE_TAG:\ncase TypeTags.SIGNED8_INT_TAG:\ncase TypeTags.FLOAT_TAG:\ncase TypeTags.DECIMAL_TAG:\ncase TypeTags.CHAR_STRING_TAG:\ncase TypeTags.BOOLEAN_TAG:\ncase TypeTags.NULL_TAG:\nreturn sourceTypeTag == targetTypeTag;\ncase TypeTags.STRING_TAG:\nreturn TypeTags.isStringTypeTag(sourceTypeTag);\ncase TypeTags.XML_TEXT_TAG:\nif (sourceTypeTag == TypeTags.XML_TAG) {\nreturn ((BXmlType) sourceType).constraint.getTag() == TypeTags.NEVER_TAG;\n}\nreturn sourceTypeTag == targetTypeTag;\ncase TypeTags.INT_TAG:\nreturn sourceTypeTag == TypeTags.INT_TAG || sourceTypeTag == TypeTags.BYTE_TAG ||\n(sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.UNSIGNED32_INT_TAG);\ncase TypeTags.SIGNED16_INT_TAG:\nreturn sourceTypeTag == TypeTags.BYTE_TAG ||\n(sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.SIGNED16_INT_TAG);\ncase TypeTags.SIGNED32_INT_TAG:\nreturn sourceTypeTag == TypeTags.BYTE_TAG ||\n(sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.SIGNED32_INT_TAG);\ncase TypeTags.UNSIGNED8_INT_TAG:\nreturn sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG;\ncase TypeTags.UNSIGNED16_INT_TAG:\nreturn sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG ||\nsourceTypeTag == TypeTags.UNSIGNED16_INT_TAG;\ncase TypeTags.UNSIGNED32_INT_TAG:\nreturn sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG ||\nsourceTypeTag == TypeTags.UNSIGNED16_INT_TAG || sourceTypeTag == TypeTags.UNSIGNED32_INT_TAG;\ncase TypeTags.ANY_TAG:\nreturn checkIsAnyType(sourceType);\ncase TypeTags.ANYDATA_TAG:\nreturn sourceType.isAnydata();\ncase TypeTags.SERVICE_TAG:\nreturn checkIsServiceType(sourceType, targetType,\nunresolvedTypes == null ? new ArrayList<>() : unresolvedTypes);\ncase TypeTags.HANDLE_TAG:\nreturn sourceTypeTag == TypeTags.HANDLE_TAG;\ncase TypeTags.READONLY_TAG:\nreturn isInherentlyImmutableType(sourceType) || sourceType.isReadOnly();\ncase TypeTags.XML_ELEMENT_TAG:\ncase TypeTags.XML_COMMENT_TAG:\ncase TypeTags.XML_PI_TAG:\nreturn targetTypeTag == sourceTypeTag;\ndefault:\nreturn checkIsRecursiveType(sourceType, targetType,\nunresolvedTypes == null ? new ArrayList<>() : unresolvedTypes);\n}\n}\nprivate static boolean checkIsType(Object sourceVal, Type sourceType, Type targetType,\nList unresolvedTypes) {\nint sourceTypeTag = sourceType.getTag();\nint targetTypeTag = targetType.getTag();\nif (sourceTypeTag != TypeTags.RECORD_TYPE_TAG && sourceTypeTag != TypeTags.OBJECT_TYPE_TAG) {\nreturn checkIsType(sourceType, targetType);\n}\nif (targetTypeTag == TypeTags.INTERSECTION_TAG) {\ntargetType = ((BIntersectionType) targetType).getEffectiveType();\ntargetTypeTag = targetType.getTag();\n}\nif (sourceType == targetType || (sourceType.getTag() == targetType.getTag() && sourceType.equals(targetType))) {\nreturn true;\n}\nif (targetType.isReadOnly() && !sourceType.isReadOnly()) {\nreturn false;\n}\nswitch (targetTypeTag) {\ncase TypeTags.ANY_TAG:\nreturn checkIsAnyType(sourceType);\ncase TypeTags.READONLY_TAG:\nreturn isInherentlyImmutableType(sourceType) || sourceType.isReadOnly();\ndefault:\nreturn checkIsRecursiveTypeOnValue(sourceVal, sourceType, targetType, sourceTypeTag, targetTypeTag,\nunresolvedTypes == null ? new ArrayList<>() : unresolvedTypes);\n}\n}\nprivate static boolean checkTypeDescType(Type sourceType, BTypedescType targetType,\nList unresolvedTypes) {\nif (sourceType.getTag() != TypeTags.TYPEDESC_TAG) {\nreturn false;\n}\nBTypedescType sourceTypedesc = (BTypedescType) sourceType;\nreturn checkIsType(sourceTypedesc.getConstraint(), targetType.getConstraint(), unresolvedTypes);\n}\nprivate static boolean checkIsRecursiveType(Type sourceType, Type targetType, List unresolvedTypes) {\nswitch (targetType.getTag()) {\ncase TypeTags.MAP_TAG:\nreturn checkIsMapType(sourceType, (BMapType) targetType, unresolvedTypes);\ncase TypeTags.STREAM_TAG:\nreturn checkIsStreamType(sourceType, (BStreamType) targetType, unresolvedTypes);\ncase TypeTags.TABLE_TAG:\nreturn checkIsTableType(sourceType, (BTableType) targetType, unresolvedTypes);\ncase TypeTags.JSON_TAG:\nreturn checkIsJSONType(sourceType, unresolvedTypes);\ncase TypeTags.RECORD_TYPE_TAG:\nreturn checkIsRecordType(sourceType, (BRecordType) targetType, unresolvedTypes);\ncase TypeTags.FUNCTION_POINTER_TAG:\nreturn checkIsFunctionType(sourceType, (BFunctionType) targetType);\ncase TypeTags.ARRAY_TAG:\nreturn checkIsArrayType(sourceType, (BArrayType) targetType, unresolvedTypes);\ncase TypeTags.TUPLE_TAG:\nreturn checkIsTupleType(sourceType, (BTupleType) targetType, unresolvedTypes);\ncase TypeTags.UNION_TAG:\nreturn checkIsUnionType(sourceType, (BUnionType) targetType, unresolvedTypes);\ncase TypeTags.OBJECT_TYPE_TAG:\nreturn checkObjectEquivalency(sourceType, (BObjectType) targetType, unresolvedTypes);\ncase TypeTags.FINITE_TYPE_TAG:\nreturn checkIsFiniteType(sourceType, (BFiniteType) targetType);\ncase TypeTags.FUTURE_TAG:\nreturn checkIsFutureType(sourceType, (BFutureType) targetType, unresolvedTypes);\ncase TypeTags.ERROR_TAG:\nreturn checkIsErrorType(sourceType, (BErrorType) targetType, unresolvedTypes);\ncase TypeTags.TYPEDESC_TAG:\nreturn checkTypeDescType(sourceType, (BTypedescType) targetType, unresolvedTypes);\ncase TypeTags.XML_TAG:\nreturn checkIsXMLType(sourceType, targetType, unresolvedTypes);\ndefault:\nreturn false;\n}\n}\nprivate static boolean checkIsRecursiveTypeOnValue(Object sourceVal, Type sourceType, Type targetType,\nint sourceTypeTag, int targetTypeTag,\nList unresolvedTypes) {\nswitch (targetTypeTag) {\ncase TypeTags.ANYDATA_TAG:\nif (sourceTypeTag == TypeTags.OBJECT_TYPE_TAG) {\nreturn false;\n}\nreturn checkRecordBelongsToAnydataType((MapValue) sourceVal, (BRecordType) sourceType, unresolvedTypes);\ncase TypeTags.MAP_TAG:\nreturn checkIsMapType(sourceVal, sourceType, (BMapType) targetType, unresolvedTypes);\ncase TypeTags.JSON_TAG:\nreturn checkIsMapType(sourceVal, sourceType,\nnew BMapType(targetType.isReadOnly() ? TYPE_READONLY_JSON :\nTYPE_JSON), unresolvedTypes);\ncase TypeTags.RECORD_TYPE_TAG:\nreturn checkIsRecordType(sourceVal, sourceType, (BRecordType) targetType, unresolvedTypes);\ncase TypeTags.UNION_TAG:\nfor (Type type : ((BUnionType) targetType).getMemberTypes()) {\nif (checkIsType(sourceVal, sourceType, type, unresolvedTypes)) {\nreturn true;\n}\n}\nreturn false;\ncase TypeTags.OBJECT_TYPE_TAG:\nreturn checkObjectEquivalency(sourceVal, sourceType, (BObjectType) targetType, unresolvedTypes);\ndefault:\nreturn false;\n}\n}\nprivate static boolean isFiniteTypeMatch(BFiniteType sourceType, Type targetType) {\nfor (Object bValue : sourceType.valueSpace) {\nif (!checkIsType(bValue, targetType)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean isUnionTypeMatch(BUnionType sourceType, Type targetType, List unresolvedTypes) {\nfor (Type type : sourceType.getMemberTypes()) {\nif (!checkIsType(type, targetType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkIsUnionType(Type sourceType, BUnionType targetType, List unresolvedTypes) {\nTypePair pair = new TypePair(sourceType, targetType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nswitch (sourceType.getTag()) {\ncase TypeTags.UNION_TAG:\ncase TypeTags.JSON_TAG:\ncase TypeTags.ANYDATA_TAG:\nreturn isUnionTypeMatch((BUnionType) sourceType, targetType, unresolvedTypes);\ncase TypeTags.FINITE_TYPE_TAG:\nreturn isFiniteTypeMatch((BFiniteType) sourceType, targetType);\ndefault:\nfor (Type type : targetType.getMemberTypes()) {\nif (checkIsType(sourceType, type, unresolvedTypes)) {\nreturn true;\n}\n}\nreturn false;\n}\n}\nprivate static boolean checkIsMapType(Type sourceType, BMapType targetType, List unresolvedTypes) {\nType targetConstrainedType = targetType.getConstrainedType();\nswitch (sourceType.getTag()) {\ncase TypeTags.MAP_TAG:\nreturn checkConstraints(((BMapType) sourceType).getConstrainedType(), targetConstrainedType,\nunresolvedTypes);\ncase TypeTags.RECORD_TYPE_TAG:\nBRecordType recType = (BRecordType) sourceType;\nBUnionType wideTypeUnion = new BUnionType(getWideTypeComponents(recType));\nreturn checkConstraints(wideTypeUnion, targetConstrainedType, unresolvedTypes);\ndefault:\nreturn false;\n}\n}\nprivate static boolean checkIsMapType(Object sourceVal, Type sourceType, BMapType targetType,\nList unresolvedTypes) {\nType targetConstrainedType = targetType.getConstrainedType();\nswitch (sourceType.getTag()) {\ncase TypeTags.MAP_TAG:\nreturn checkConstraints(((BMapType) sourceType).getConstrainedType(), targetConstrainedType,\nunresolvedTypes);\ncase TypeTags.RECORD_TYPE_TAG:\nreturn checkIsMapType((MapValue) sourceVal, (BRecordType) sourceType, unresolvedTypes,\ntargetConstrainedType);\ndefault:\nreturn false;\n}\n}\nprivate static boolean checkIsMapType(MapValue sourceVal, BRecordType sourceType, List unresolvedTypes,\nType targetConstrainedType) {\nfor (Field field : sourceType.getFields().values()) {\nif (!SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) {\nif (!checkIsType(field.getFieldType(), targetConstrainedType, unresolvedTypes)) {\nreturn false;\n}\ncontinue;\n}\nBString name = StringUtils.fromString(field.getFieldName());\nif (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL) && !sourceVal.containsKey(name)) {\ncontinue;\n}\nif (!checkIsLikeType(sourceVal.get(name), targetConstrainedType)) {\nreturn false;\n}\n}\nif (sourceType.sealed) {\nreturn true;\n}\nreturn checkIsType(sourceType.restFieldType, targetConstrainedType, unresolvedTypes);\n}\nprivate static boolean checkIsXMLType(Type sourceType, Type targetType, List unresolvedTypes) {\nint sourceTag = sourceType.getTag();\nif (sourceTag == TypeTags.FINITE_TYPE_TAG) {\nreturn isFiniteTypeMatch((BFiniteType) sourceType, targetType);\n}\nBXmlType target = ((BXmlType) targetType);\nif (sourceTag == TypeTags.XML_TAG) {\nType targetConstraint = target.constraint;\nwhile (target.constraint.getTag() == TypeTags.XML_TAG) {\ntarget = (BXmlType) target.constraint;\ntargetConstraint = target.constraint;\n}\nBXmlType source = (BXmlType) sourceType;\nif (source.constraint.getTag() == TypeTags.NEVER_TAG) {\nif (targetConstraint.getTag() == TypeTags.UNION_TAG) {\nreturn checkIsUnionType(sourceType, (BUnionType) targetConstraint, unresolvedTypes);\n}\nreturn targetConstraint.getTag() == TypeTags.XML_TEXT_TAG ||\ntargetConstraint.getTag() == TypeTags.NEVER_TAG;\n}\nreturn checkIsType(source.constraint, targetConstraint, unresolvedTypes);\n}\nif (TypeTags.isXMLTypeTag(sourceTag)) {\nreturn checkIsType(sourceType, target.constraint, unresolvedTypes);\n}\nreturn false;\n}\nprivate static List getWideTypeComponents(BRecordType recType) {\nList types = new ArrayList<>();\nfor (Field f : recType.getFields().values()) {\ntypes.add(f.getFieldType());\n}\nif (!recType.sealed) {\ntypes.add(recType.restFieldType);\n}\nreturn types;\n}\nprivate static boolean checkIsStreamType(Type sourceType, BStreamType targetType, List unresolvedTypes) {\nif (sourceType.getTag() != TypeTags.STREAM_TAG) {\nreturn false;\n}\nreturn checkConstraints(((BStreamType) sourceType).getConstrainedType(), targetType.getConstrainedType(),\nunresolvedTypes)\n&& checkConstraints(((BStreamType) sourceType).getCompletionType(), targetType.getCompletionType(),\nunresolvedTypes);\n}\nprivate static boolean checkIsTableType(Type sourceType, BTableType targetType, List unresolvedTypes) {\nif (sourceType.getTag() != TypeTags.TABLE_TAG) {\nreturn false;\n}\nBTableType srcTableType = (BTableType) sourceType;\nif (!checkConstraints(srcTableType.getConstrainedType(), targetType.getConstrainedType(),\nunresolvedTypes)) {\nreturn false;\n}\nif (targetType.getKeyType() == null && targetType.getFieldNames() == null) {\nreturn true;\n}\nif (targetType.getKeyType() != null) {\nif (srcTableType.getKeyType() != null &&\n(checkConstraints(srcTableType.getKeyType(), targetType.getKeyType(), unresolvedTypes))) {\nreturn true;\n}\nif (srcTableType.getFieldNames() == null) {\nreturn false;\n}\nList fieldTypes = new ArrayList<>();\nArrays.stream(srcTableType.getFieldNames()).forEach(field -> fieldTypes\n.add(Objects.requireNonNull(getTableConstraintField(srcTableType.getConstrainedType(), field))\n.getFieldType()));\nif (fieldTypes.size() == 1) {\nreturn checkConstraints(fieldTypes.get(0), targetType.getKeyType(), unresolvedTypes);\n}\nBTupleType tupleType = new BTupleType(fieldTypes);\nreturn checkConstraints(tupleType, targetType.getKeyType(), unresolvedTypes);\n}\nreturn Arrays.equals(srcTableType.getFieldNames(), targetType.getFieldNames());\n}\nstatic BField getTableConstraintField(Type constraintType, String fieldName) {\nswitch (constraintType.getTag()) {\ncase TypeTags.RECORD_TYPE_TAG:\nMap fieldList = ((BRecordType) constraintType).getFields();\nreturn (BField) fieldList.get(fieldName);\ncase TypeTags.INTERSECTION_TAG:\nType effectiveType = ((BIntersectionType) constraintType).getEffectiveType();\nreturn getTableConstraintField(effectiveType, fieldName);\ncase TypeTags.UNION_TAG:\nBUnionType unionType = (BUnionType) constraintType;\nList memTypes = unionType.getMemberTypes();\nList fields = memTypes.stream().map(type -> getTableConstraintField(type, fieldName))\n.filter(Objects::nonNull).collect(Collectors.toList());\nif (fields.size() != memTypes.size()) {\nreturn null;\n}\nif (fields.stream().allMatch(field -> isSameType(field.getFieldType(), fields.get(0).getFieldType()))) {\nreturn fields.get(0);\n}\n}\nreturn null;\n}\nprivate static boolean checkIsJSONType(Type sourceType, List unresolvedTypes) {\nBJsonType jsonType = (BJsonType) TYPE_JSON;\nTypePair pair = new TypePair(sourceType, jsonType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nswitch (sourceType.getTag()) {\ncase TypeTags.STRING_TAG:\ncase TypeTags.CHAR_STRING_TAG:\ncase TypeTags.INT_TAG:\ncase TypeTags.SIGNED32_INT_TAG:\ncase TypeTags.SIGNED16_INT_TAG:\ncase TypeTags.SIGNED8_INT_TAG:\ncase TypeTags.UNSIGNED32_INT_TAG:\ncase TypeTags.UNSIGNED16_INT_TAG:\ncase TypeTags.UNSIGNED8_INT_TAG:\ncase TypeTags.BYTE_TAG:\ncase TypeTags.FLOAT_TAG:\ncase TypeTags.DECIMAL_TAG:\ncase TypeTags.BOOLEAN_TAG:\ncase TypeTags.NULL_TAG:\ncase TypeTags.JSON_TAG:\nreturn true;\ncase TypeTags.ARRAY_TAG:\nreturn checkIsType(((BArrayType) sourceType).getElementType(), jsonType, unresolvedTypes);\ncase TypeTags.FINITE_TYPE_TAG:\nreturn isFiniteTypeMatch((BFiniteType) sourceType, jsonType);\ncase TypeTags.MAP_TAG:\nreturn checkIsType(((BMapType) sourceType).getConstrainedType(), jsonType, unresolvedTypes);\ncase TypeTags.RECORD_TYPE_TAG:\nBRecordType recordType = (BRecordType) sourceType;\nfor (Field field : recordType.getFields().values()) {\nif (!checkIsJSONType(field.getFieldType(), unresolvedTypes)) {\nreturn false;\n}\n}\nif (!recordType.sealed) {\nreturn checkIsJSONType(recordType.restFieldType, unresolvedTypes);\n}\nreturn true;\ncase TypeTags.TUPLE_TAG:\nBTupleType sourceTupleType = (BTupleType) sourceType;\nfor (Type memberType : sourceTupleType.getTupleTypes()) {\nif (!checkIsJSONType(memberType, unresolvedTypes)) {\nreturn false;\n}\n}\nType tupleRestType = sourceTupleType.getRestType();\nif (tupleRestType != null) {\nreturn checkIsJSONType(tupleRestType, unresolvedTypes);\n}\nreturn true;\ncase TypeTags.UNION_TAG:\nfor (Type memberType : ((BUnionType) sourceType).getMemberTypes()) {\nif (!checkIsJSONType(memberType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate static boolean checkIsRecordType(Type sourceType, BRecordType targetType, List unresolvedTypes) {\nswitch (sourceType.getTag()) {\ncase TypeTags.RECORD_TYPE_TAG:\nreturn checkIsRecordType((BRecordType) sourceType, targetType, unresolvedTypes);\ncase TypeTags.MAP_TAG:\nreturn checkIsRecordType((BMapType) sourceType, targetType, unresolvedTypes);\n}\nreturn false;\n}\nprivate static boolean checkIsRecordType(BRecordType sourceRecordType, BRecordType targetType,\nList unresolvedTypes) {\nTypePair pair = new TypePair(sourceRecordType, targetType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nif (targetType.sealed && !sourceRecordType.sealed) {\nreturn false;\n}\nif (!sourceRecordType.sealed &&\n!checkIsType(sourceRecordType.restFieldType, targetType.restFieldType, unresolvedTypes)) {\nreturn false;\n}\nMap sourceFields = sourceRecordType.getFields();\nSet targetFieldNames = targetType.getFields().keySet();\nfor (Map.Entry targetFieldEntry : targetType.getFields().entrySet()) {\nField targetField = targetFieldEntry.getValue();\nField sourceField = sourceFields.get(targetFieldEntry.getKey());\nif (sourceField == null) {\nreturn false;\n}\nif (hasIncompatibleReadOnlyFlags(targetField, sourceField)) {\nreturn false;\n}\nif (!SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL)\n&& SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.OPTIONAL)) {\nreturn false;\n}\nif (!checkIsType(sourceField.getFieldType(), targetField.getFieldType(), unresolvedTypes)) {\nreturn false;\n}\n}\nif (targetType.sealed) {\nreturn targetFieldNames.containsAll(sourceFields.keySet());\n}\nfor (Map.Entry sourceFieldEntry : sourceFields.entrySet()) {\nif (targetFieldNames.contains(sourceFieldEntry.getKey())) {\ncontinue;\n}\nif (!checkIsType(sourceFieldEntry.getValue().getFieldType(), targetType.restFieldType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkIsRecordType(BMapType sourceType, BRecordType targetType,\nList unresolvedTypes) {\nTypePair pair = new TypePair(sourceType, targetType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nif (targetType.sealed) {\nreturn false;\n}\nType constraintType = sourceType.getConstrainedType();\nfor (Field field : targetType.getFields().values()) {\nvar flags = field.getFlags();\nif (!SymbolFlags.isFlagOn(flags, SymbolFlags.OPTIONAL)) {\nreturn false;\n}\nif (SymbolFlags.isFlagOn(flags, SymbolFlags.READONLY) && !sourceType.isReadOnly()) {\nreturn false;\n}\nif (!checkIsType(constraintType, field.getFieldType(), unresolvedTypes)) {\nreturn false;\n}\n}\nreturn checkIsType(constraintType, targetType.restFieldType, unresolvedTypes);\n}\nprivate static boolean checkRecordBelongsToAnydataType(MapValue sourceVal, BRecordType recordType,\nList unresolvedTypes) {\nType targetType = TYPE_ANYDATA;\nTypePair pair = new TypePair(recordType, targetType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nMap fields = recordType.getFields();\nfor (Map.Entry fieldEntry : fields.entrySet()) {\nString fieldName = fieldEntry.getKey();\nField field = fieldEntry.getValue();\nif (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) {\nBString fieldNameBString = StringUtils.fromString(fieldName);\nif (SymbolFlags\n.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL) && !sourceVal.containsKey(fieldNameBString)) {\ncontinue;\n}\nif (!checkIsLikeType(sourceVal.get(fieldNameBString), targetType)) {\nreturn false;\n}\n} else {\nif (!checkIsType(field.getFieldType(), targetType, unresolvedTypes)) {\nreturn false;\n}\n}\n}\nif (recordType.sealed) {\nreturn true;\n}\nreturn checkIsType(recordType.restFieldType, targetType, unresolvedTypes);\n}\nprivate static boolean checkIsRecordType(Object sourceVal, Type sourceType, BRecordType targetType,\nList unresolvedTypes) {\nswitch (sourceType.getTag()) {\ncase TypeTags.RECORD_TYPE_TAG:\nreturn checkIsRecordType((MapValue) sourceVal, (BRecordType) sourceType, targetType, unresolvedTypes);\ncase TypeTags.MAP_TAG:\nreturn checkIsRecordType((BMapType) sourceType, targetType, unresolvedTypes);\n}\nreturn false;\n}\nprivate static boolean checkIsRecordType(MapValue sourceRecordValue, BRecordType sourceRecordType,\nBRecordType targetType, List unresolvedTypes) {\nTypePair pair = new TypePair(sourceRecordType, targetType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nif (targetType.sealed && !sourceRecordType.sealed) {\nreturn false;\n}\nif (!sourceRecordType.sealed &&\n!checkIsType(sourceRecordType.restFieldType, targetType.restFieldType, unresolvedTypes)) {\nreturn false;\n}\nMap sourceFields = sourceRecordType.getFields();\nSet targetFieldNames = targetType.getFields().keySet();\nfor (Map.Entry targetFieldEntry : targetType.getFields().entrySet()) {\nString fieldName = targetFieldEntry.getKey();\nField targetField = targetFieldEntry.getValue();\nField sourceField = sourceFields.get(fieldName);\nif (sourceField == null) {\nif (!SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL)) {\nreturn false;\n}\ncontinue;\n}\nif (hasIncompatibleReadOnlyFlags(targetField, sourceField)) {\nreturn false;\n}\nboolean optionalTargetField = SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL);\nboolean optionalSourceField = SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.OPTIONAL);\nif (SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.READONLY)) {\nBString fieldNameBString = StringUtils.fromString(fieldName);\nif (optionalSourceField && !sourceRecordValue.containsKey(fieldNameBString)) {\nif (!optionalTargetField) {\nreturn false;\n}\ncontinue;\n}\nif (!checkIsLikeType(sourceRecordValue.get(fieldNameBString), targetField.getFieldType())) {\nreturn false;\n}\n} else {\nif (!optionalTargetField && optionalSourceField) {\nreturn false;\n}\nif (!checkIsType(sourceField.getFieldType(), targetField.getFieldType(), unresolvedTypes)) {\nreturn false;\n}\n}\n}\nif (targetType.sealed) {\nfor (String sourceFieldName : sourceFields.keySet()) {\nif (targetFieldNames.contains(sourceFieldName)) {\ncontinue;\n}\nif (!checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(\nsourceFields.get(sourceFieldName).getFieldType())) {\nreturn false;\n}\n}\nreturn true;\n}\nfor (Map.Entry targetFieldEntry : sourceFields.entrySet()) {\nString fieldName = targetFieldEntry.getKey();\nField field = targetFieldEntry.getValue();\nif (targetFieldNames.contains(fieldName)) {\ncontinue;\n}\nif (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) {\nif (!checkIsLikeType(sourceRecordValue.get(StringUtils.fromString(fieldName)),\ntargetType.restFieldType)) {\nreturn false;\n}\n} else if (!checkIsType(field.getFieldType(), targetType.restFieldType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean hasIncompatibleReadOnlyFlags(Field targetField, Field sourceField) {\nreturn SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.READONLY) && !SymbolFlags\n.isFlagOn(sourceField.getFlags(),\nSymbolFlags.READONLY);\n}\nprivate static boolean checkIsArrayType(BArrayType sourceType, BArrayType targetType,\nList unresolvedTypes) {\nswitch (sourceType.getState()) {\ncase OPEN:\nif (targetType.getState() != ArrayState.OPEN) {\nreturn false;\n}\nbreak;\ncase CLOSED:\nif (targetType.getState() == ArrayState.CLOSED &&\nsourceType.getSize() != targetType.getSize()) {\nreturn false;\n}\nbreak;\n}\nreturn checkIsType(sourceType.getElementType(), targetType.getElementType(), unresolvedTypes);\n}\nprivate static boolean checkIsArrayType(BTupleType sourceType, BArrayType targetType,\nList unresolvedTypes) {\nList tupleTypes = sourceType.getTupleTypes();\nType sourceRestType = sourceType.getRestType();\nType targetElementType = targetType.getElementType();\nif (targetType.getState() == ArrayState.OPEN) {\nfor (Type sourceElementType : tupleTypes) {\nif (!checkIsType(sourceElementType, targetElementType, unresolvedTypes)) {\nreturn false;\n}\n}\nif (sourceRestType != null) {\nreturn checkIsType(sourceRestType, targetElementType, unresolvedTypes);\n}\nreturn true;\n}\nif (sourceRestType != null) {\nreturn false;\n}\nif (tupleTypes.size() != targetType.getSize()) {\nreturn false;\n}\nfor (Type sourceElementType : tupleTypes) {\nif (!checkIsType(sourceElementType, targetElementType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkIsArrayType(Type sourceType, BArrayType targetType, List unresolvedTypes) {\nint sourceTypeTag = sourceType.getTag();\nif (sourceTypeTag == TypeTags.UNION_TAG) {\nfor (Type memberType : ((BUnionType) sourceType).getMemberTypes()) {\nif (!checkIsArrayType(memberType, targetType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nif (sourceTypeTag != TypeTags.ARRAY_TAG && sourceTypeTag != TypeTags.TUPLE_TAG) {\nreturn false;\n}\nif (sourceTypeTag == TypeTags.ARRAY_TAG) {\nreturn checkIsArrayType((BArrayType) sourceType, targetType, unresolvedTypes);\n}\nreturn checkIsArrayType((BTupleType) sourceType, targetType, unresolvedTypes);\n}\nprivate static boolean checkIsTupleType(BArrayType sourceType, BTupleType targetType,\nList unresolvedTypes) {\nType sourceElementType = sourceType.getElementType();\nList targetTypes = targetType.getTupleTypes();\nType targetRestType = targetType.getRestType();\nswitch (sourceType.getState()) {\ncase OPEN:\nif (targetRestType == null) {\nreturn false;\n}\nif (targetTypes.isEmpty()) {\nreturn checkIsType(sourceElementType, targetRestType, unresolvedTypes);\n}\nreturn false;\ncase CLOSED:\nif (sourceType.getSize() < targetTypes.size()) {\nreturn false;\n}\nif (targetTypes.isEmpty()) {\nif (targetRestType != null) {\nreturn checkIsType(sourceElementType, targetRestType, unresolvedTypes);\n}\nreturn sourceType.getSize() == 0;\n}\nfor (Type targetElementType : targetTypes) {\nif (!(checkIsType(sourceElementType, targetElementType, unresolvedTypes))) {\nreturn false;\n}\n}\nif (sourceType.getSize() == targetTypes.size()) {\nreturn true;\n}\nif (targetRestType != null) {\nreturn checkIsType(sourceElementType, targetRestType, unresolvedTypes);\n}\nreturn false;\ndefault:\nreturn false;\n}\n}\nprivate static boolean checkIsTupleType(BTupleType sourceType, BTupleType targetType,\nList unresolvedTypes) {\nList sourceTypes = sourceType.getTupleTypes();\nType sourceRestType = sourceType.getRestType();\nList targetTypes = targetType.getTupleTypes();\nType targetRestType = targetType.getRestType();\nif (sourceRestType != null && targetRestType == null) {\nreturn false;\n}\nint sourceTypeSize = sourceTypes.size();\nint targetTypeSize = targetTypes.size();\nif (sourceRestType == null && targetRestType == null && sourceTypeSize != targetTypeSize) {\nreturn false;\n}\nif (sourceTypeSize < targetTypeSize) {\nreturn false;\n}\nfor (int i = 0; i < targetTypeSize; i++) {\nif (!checkIsType(sourceTypes.get(i), targetTypes.get(i), unresolvedTypes)) {\nreturn false;\n}\n}\nif (sourceTypeSize == targetTypeSize) {\nif (sourceRestType != null) {\nreturn checkIsType(sourceRestType, targetRestType, unresolvedTypes);\n}\nreturn true;\n}\nfor (int i = targetTypeSize; i < sourceTypeSize; i++) {\nif (!checkIsType(sourceTypes.get(i), targetRestType, unresolvedTypes)) {\nreturn false;\n}\n}\nif (sourceRestType != null) {\nreturn checkIsType(sourceRestType, targetRestType, unresolvedTypes);\n}\nreturn true;\n}\nprivate static boolean checkIsTupleType(Type sourceType, BTupleType targetType, List unresolvedTypes) {\nint sourceTypeTag = sourceType.getTag();\nif (sourceTypeTag == TypeTags.UNION_TAG) {\nfor (Type memberType : ((BUnionType) sourceType).getMemberTypes()) {\nif (!checkIsTupleType(memberType, targetType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nif (sourceTypeTag != TypeTags.ARRAY_TAG && sourceTypeTag != TypeTags.TUPLE_TAG) {\nreturn false;\n}\nif (sourceTypeTag == TypeTags.ARRAY_TAG) {\nreturn checkIsTupleType((BArrayType) sourceType, targetType, unresolvedTypes);\n}\nreturn checkIsTupleType((BTupleType) sourceType, targetType, unresolvedTypes);\n}\nprivate static boolean checkIsAnyType(Type sourceType) {\nswitch (sourceType.getTag()) {\ncase TypeTags.ERROR_TAG:\ncase TypeTags.READONLY_TAG:\nreturn false;\ncase TypeTags.UNION_TAG:\ncase TypeTags.ANYDATA_TAG:\ncase TypeTags.JSON_TAG:\nfor (Type memberType : ((BUnionType) sourceType).getMemberTypes()) {\nif (!checkIsAnyType(memberType)) {\nreturn false;\n}\n}\nreturn true;\n}\nreturn true;\n}\nprivate static boolean checkIsFiniteType(Type sourceType, BFiniteType targetType) {\nif (sourceType.getTag() != TypeTags.FINITE_TYPE_TAG) {\nreturn false;\n}\nBFiniteType sourceFiniteType = (BFiniteType) sourceType;\nif (sourceFiniteType.valueSpace.size() != targetType.valueSpace.size()) {\nreturn false;\n}\nreturn targetType.valueSpace.containsAll(sourceFiniteType.valueSpace);\n}\nprivate static boolean checkIsFutureType(Type sourceType, BFutureType targetType, List unresolvedTypes) {\nif (sourceType.getTag() != TypeTags.FUTURE_TAG) {\nreturn false;\n}\nreturn checkConstraints(((BFutureType) sourceType).getConstrainedType(), targetType.getConstrainedType(),\nunresolvedTypes);\n}\nprivate static boolean checkObjectEquivalency(Type sourceType, BObjectType targetType,\nList unresolvedTypes) {\nreturn checkObjectEquivalency(null, sourceType, targetType, unresolvedTypes);\n}\nprivate static boolean checkObjectEquivalency(Object sourceVal, Type sourceType, BObjectType targetType,\nList unresolvedTypes) {\nif (sourceType.getTag() != TypeTags.OBJECT_TYPE_TAG && sourceType.getTag() != TypeTags.SERVICE_TAG) {\nreturn false;\n}\nTypePair pair = new TypePair(sourceType, targetType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nBObjectType sourceObjectType = (BObjectType) sourceType;\nif (SymbolFlags.isFlagOn(targetType.flags, SymbolFlags.ISOLATED) &&\n!SymbolFlags.isFlagOn(sourceObjectType.flags, SymbolFlags.ISOLATED)) {\nreturn false;\n}\nMap targetFields = targetType.getFields();\nMap sourceFields = sourceObjectType.getFields();\nMethodType[] targetFuncs = targetType.getMethods();\nMethodType[] sourceFuncs = sourceObjectType.getMethods();\nif (targetType.getFields().values().stream().anyMatch(field -> SymbolFlags\n.isFlagOn(field.getFlags(), SymbolFlags.PRIVATE))\n|| Stream.of(targetFuncs).anyMatch(func -> SymbolFlags.isFlagOn(func.getFlags(),\nSymbolFlags.PRIVATE))) {\nreturn false;\n}\nif (targetFields.size() > sourceFields.size() || targetFuncs.length > sourceFuncs.length) {\nreturn false;\n}\nString targetTypeModule = Optional.ofNullable(targetType.getPackage()).map(Module::toString).orElse(\"\");\nString sourceTypeModule = Optional.ofNullable(sourceObjectType.getPackage()).map(Module::toString).orElse(\"\");\nif (sourceVal == null) {\nif (!checkObjectSubTypeForFields(targetFields, sourceFields, targetTypeModule, sourceTypeModule,\nunresolvedTypes)) {\nreturn false;\n}\n} else if (!checkObjectSubTypeForFieldsByValue(targetFields, sourceFields, targetTypeModule, sourceTypeModule,\n(BObject) sourceVal, unresolvedTypes)) {\nreturn false;\n}\nreturn checkObjectSubTypeForMethods(unresolvedTypes, targetFuncs, sourceFuncs, targetTypeModule,\nsourceTypeModule, sourceObjectType, targetType);\n}\nprivate static boolean checkObjectSubTypeForFields(Map targetFields,\nMap sourceFields, String targetTypeModule,\nString sourceTypeModule, List unresolvedTypes) {\nfor (Field lhsField : targetFields.values()) {\nField rhsField = sourceFields.get(lhsField.getFieldName());\nif (rhsField == null ||\n!isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsField.getFlags(),\nrhsField.getFlags()) || hasIncompatibleReadOnlyFlags(lhsField,\nrhsField) ||\n!checkIsType(rhsField.getFieldType(), lhsField.getFieldType(), unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkObjectSubTypeForFieldsByValue(Map targetFields,\nMap sourceFields, String targetTypeModule,\nString sourceTypeModule, BObject sourceObjVal,\nList unresolvedTypes) {\nfor (Field lhsField : targetFields.values()) {\nString name = lhsField.getFieldName();\nField rhsField = sourceFields.get(name);\nif (rhsField == null ||\n!isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsField.getFlags(),\nrhsField.getFlags()) || hasIncompatibleReadOnlyFlags(lhsField,\nrhsField)) {\nreturn false;\n}\nif (SymbolFlags.isFlagOn(rhsField.getFlags(), SymbolFlags.FINAL)) {\nObject fieldValue = sourceObjVal.get(StringUtils.fromString(name));\nType fieldValueType = getType(fieldValue);\nif (fieldValueType.isReadOnly()) {\nif (!checkIsLikeType(fieldValue, lhsField.getFieldType())) {\nreturn false;\n}\ncontinue;\n}\nif (!checkIsType(fieldValueType, lhsField.getFieldType(), unresolvedTypes)) {\nreturn false;\n}\n} else if (!checkIsType(rhsField.getFieldType(), lhsField.getFieldType(), unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkObjectSubTypeForMethods(List unresolvedTypes,\nMethodType[] targetFuncs,\nMethodType[] sourceFuncs,\nString targetTypeModule, String sourceTypeModule,\nBObjectType sourceType, BObjectType targetType) {\nfor (MethodType lhsFunc : targetFuncs) {\nif (SymbolFlags.isFlagOn(lhsFunc.getFlags(), SymbolFlags.RESOURCE)) {\ncontinue;\n}\nMethodType rhsFunc = getMatchingInvokableType(sourceFuncs, lhsFunc, unresolvedTypes);\nif (rhsFunc == null ||\n!isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsFunc.getFlags(),\nrhsFunc.getFlags())) {\nreturn false;\n}\nif (SymbolFlags.isFlagOn(lhsFunc.getFlags(), SymbolFlags.REMOTE) != SymbolFlags\n.isFlagOn(rhsFunc.getFlags(), SymbolFlags.REMOTE)) {\nreturn false;\n}\n}\nBTypeIdSet targetTypeIdSet = targetType.typeIdSet;\nif (targetTypeIdSet == null) {\nreturn true;\n}\nBTypeIdSet sourceTypeIdSet = sourceType.typeIdSet;\nif (sourceTypeIdSet == null) {\nreturn false;\n}\nreturn sourceTypeIdSet.containsAll(targetTypeIdSet);\n}\nprivate static boolean isInSameVisibilityRegion(String lhsTypePkg, String rhsTypePkg, long lhsFlags,\nlong rhsFlags) {\nif (SymbolFlags.isFlagOn(lhsFlags, SymbolFlags.PRIVATE)) {\nreturn lhsTypePkg.equals(rhsTypePkg);\n} else if (SymbolFlags.isFlagOn(lhsFlags, SymbolFlags.PUBLIC)) {\nreturn SymbolFlags.isFlagOn(rhsFlags, SymbolFlags.PUBLIC);\n}\nreturn !SymbolFlags.isFlagOn(rhsFlags, SymbolFlags.PRIVATE) && !SymbolFlags\n.isFlagOn(rhsFlags, SymbolFlags.PUBLIC) &&\nlhsTypePkg.equals(rhsTypePkg);\n}\nprivate static MethodType getMatchingInvokableType(MethodType[] rhsFuncs,\nMethodType lhsFunc,\nList unresolvedTypes) {\nreturn Arrays.stream(rhsFuncs)\n.filter(rhsFunc -> lhsFunc.getName().equals(rhsFunc.getName()))\n.filter(rhsFunc -> checkFunctionTypeEqualityForObjectType(rhsFunc.getType(), lhsFunc.getType(),\nunresolvedTypes))\n.findFirst()\n.orElse(null);\n}\nprivate static boolean checkFunctionTypeEqualityForObjectType(FunctionType source, FunctionType target,\nList unresolvedTypes) {\nif (hasIncompatibleIsolatedFlags(target, source)) {\nreturn false;\n}\nif (source.getParameterTypes().length != target.getParameterTypes().length) {\nreturn false;\n}\nfor (int i = 0; i < source.getParameterTypes().length; i++) {\nif (!checkIsType(target.getParameterTypes()[i], source.getParameterTypes()[i], unresolvedTypes)) {\nreturn false;\n}\n}\nif (source.getReturnType() == null && target.getReturnType() == null) {\nreturn true;\n} else if (source.getReturnType() == null || target.getReturnType() == null) {\nreturn false;\n}\nreturn checkIsType(source.getReturnType(), target.getReturnType(), unresolvedTypes);\n}\nprivate static boolean checkIsFunctionType(Type sourceType, BFunctionType targetType) {\nif (sourceType.getTag() != TypeTags.FUNCTION_POINTER_TAG) {\nreturn false;\n}\nBFunctionType source = (BFunctionType) sourceType;\nif (hasIncompatibleIsolatedFlags(targetType, source) || hasIncompatibleTransactionalFlags(targetType, source)) {\nreturn false;\n}\nif (SymbolFlags.isFlagOn(targetType.getFlags(), SymbolFlags.ANY_FUNCTION)) {\nreturn true;\n}\nif (source.paramTypes.length != targetType.paramTypes.length) {\nreturn false;\n}\nfor (int i = 0; i < source.paramTypes.length; i++) {\nif (!checkIsType(targetType.paramTypes[i], source.paramTypes[i], new ArrayList<>())) {\nreturn false;\n}\n}\nreturn checkIsType(source.retType, targetType.retType, new ArrayList<>());\n}\nprivate static boolean hasIncompatibleIsolatedFlags(FunctionType target, FunctionType source) {\nreturn SymbolFlags.isFlagOn(target.getFlags(), SymbolFlags.ISOLATED) && !SymbolFlags\n.isFlagOn(source.getFlags(), SymbolFlags.ISOLATED);\n}\nprivate static boolean hasIncompatibleTransactionalFlags(FunctionType target, FunctionType source) {\nreturn SymbolFlags.isFlagOn(source.getFlags(), SymbolFlags.TRANSACTIONAL) && !SymbolFlags\n.isFlagOn(target.getFlags(), SymbolFlags.TRANSACTIONAL);\n}\nprivate static boolean checkIsServiceType(Type sourceType, Type targetType, List unresolvedTypes) {\nif (sourceType.getTag() == TypeTags.SERVICE_TAG) {\nreturn checkObjectEquivalency(sourceType, (BObjectType) targetType, unresolvedTypes);\n}\nif (sourceType.getTag() == TypeTags.OBJECT_TYPE_TAG) {\nvar flags = ((BObjectType) sourceType).flags;\nreturn (flags & SymbolFlags.SERVICE) == SymbolFlags.SERVICE;\n}\nreturn false;\n}\npublic static boolean isInherentlyImmutableType(Type sourceType) {\nif (isSimpleBasicType(sourceType)) {\nreturn true;\n}\nswitch (sourceType.getTag()) {\ncase TypeTags.XML_TEXT_TAG:\ncase TypeTags.FINITE_TYPE_TAG:\ncase TypeTags.READONLY_TAG:\ncase TypeTags.NULL_TAG:\ncase TypeTags.ERROR_TAG:\ncase TypeTags.INVOKABLE_TAG:\ncase TypeTags.SERVICE_TAG:\ncase TypeTags.TYPEDESC_TAG:\ncase TypeTags.FUNCTION_POINTER_TAG:\ncase TypeTags.HANDLE_TAG:\nreturn true;\ncase TypeTags.XML_TAG:\nreturn ((BXmlType) sourceType).constraint.getTag() == TypeTags.NEVER_TAG;\n}\nreturn false;\n}\npublic static boolean isSelectivelyImmutableType(Type type, Set unresolvedTypes) {\nif (!unresolvedTypes.add(type)) {\nreturn true;\n}\nswitch (type.getTag()) {\ncase TypeTags.ANY_TAG:\ncase TypeTags.ANYDATA_TAG:\ncase TypeTags.JSON_TAG:\ncase TypeTags.XML_TAG:\ncase TypeTags.XML_COMMENT_TAG:\ncase TypeTags.XML_ELEMENT_TAG:\ncase TypeTags.XML_PI_TAG:\nreturn true;\ncase TypeTags.ARRAY_TAG:\nType elementType = ((BArrayType) type).getElementType();\nreturn isInherentlyImmutableType(elementType) ||\nisSelectivelyImmutableType(elementType, unresolvedTypes);\ncase TypeTags.TUPLE_TAG:\nBTupleType tupleType = (BTupleType) type;\nfor (Type tupMemType : tupleType.getTupleTypes()) {\nif (!isInherentlyImmutableType(tupMemType) &&\n!isSelectivelyImmutableType(tupMemType, unresolvedTypes)) {\nreturn false;\n}\n}\nType tupRestType = tupleType.getRestType();\nif (tupRestType == null) {\nreturn true;\n}\nreturn isInherentlyImmutableType(tupRestType) ||\nisSelectivelyImmutableType(tupRestType, unresolvedTypes);\ncase TypeTags.RECORD_TYPE_TAG:\nBRecordType recordType = (BRecordType) type;\nfor (Field field : recordType.getFields().values()) {\nType fieldType = field.getFieldType();\nif (!isInherentlyImmutableType(fieldType) &&\n!isSelectivelyImmutableType(fieldType, unresolvedTypes)) {\nreturn false;\n}\n}\nType recordRestType = recordType.restFieldType;\nif (recordRestType == null) {\nreturn true;\n}\nreturn isInherentlyImmutableType(recordRestType) ||\nisSelectivelyImmutableType(recordRestType, unresolvedTypes);\ncase TypeTags.OBJECT_TYPE_TAG:\nBObjectType objectType = (BObjectType) type;\nif (SymbolFlags.isFlagOn(objectType.flags, SymbolFlags.CLASS) &&\n!SymbolFlags.isFlagOn(objectType.flags, SymbolFlags.READONLY)) {\nreturn false;\n}\nfor (Field field : objectType.getFields().values()) {\nType fieldType = field.getFieldType();\nif (!isInherentlyImmutableType(fieldType) &&\n!isSelectivelyImmutableType(fieldType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\ncase TypeTags.MAP_TAG:\nType constraintType = ((BMapType) type).getConstrainedType();\nreturn isInherentlyImmutableType(constraintType) ||\nisSelectivelyImmutableType(constraintType, unresolvedTypes);\ncase TypeTags.TABLE_TAG:\nType tableConstraintType = ((BTableType) type).getConstrainedType();\nreturn isInherentlyImmutableType(tableConstraintType) ||\nisSelectivelyImmutableType(tableConstraintType, unresolvedTypes);\ncase TypeTags.UNION_TAG:\nboolean readonlyIntersectionExists = false;\nfor (Type memberType : ((BUnionType) type).getMemberTypes()) {\nif (isInherentlyImmutableType(memberType) ||\nisSelectivelyImmutableType(memberType, unresolvedTypes)) {\nreadonlyIntersectionExists = true;\nbreak;\n}\n}\nreturn readonlyIntersectionExists;\ncase TypeTags.INTERSECTION_TAG:\nreturn isSelectivelyImmutableType(((BIntersectionType) type).getEffectiveType(), unresolvedTypes);\n}\nreturn false;\n}\nprivate static boolean checkConstraints(Type sourceConstraint, Type targetConstraint,\nList unresolvedTypes) {\nif (sourceConstraint == null) {\nsourceConstraint = TYPE_ANY;\n}\nif (targetConstraint == null) {\ntargetConstraint = TYPE_ANY;\n}\nreturn checkIsType(sourceConstraint, targetConstraint, unresolvedTypes);\n}\nprivate static boolean isMutable(Object value, Type sourceType) {\nif (value == null || sourceType.getTag() < TypeTags.NULL_TAG ||\nsourceType.getTag() == TypeTags.FINITE_TYPE_TAG) {\nreturn false;\n}\nreturn !((RefValue) value).isFrozen();\n}\nprivate static boolean checkArrayEquivalent(Type actualType, Type expType) {\nif (expType.getTag() == TypeTags.ARRAY_TAG && actualType.getTag() == TypeTags.ARRAY_TAG) {\nBArrayType lhrArrayType = (BArrayType) expType;\nBArrayType rhsArrayType = (BArrayType) actualType;\nreturn checkIsArrayType(rhsArrayType, lhrArrayType, new ArrayList<>());\n}\nreturn expType == actualType;\n}\nprivate static boolean checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(Type type) {\nSet visitedTypeSet = new HashSet<>();\nreturn checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(type, visitedTypeSet);\n}\nprivate static boolean checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(Type type,\nSet visitedTypeSet) {\nswitch (type.getTag()) {\ncase TypeTags.NEVER_TAG:\nreturn true;\ncase TypeTags.RECORD_TYPE_TAG:\nBRecordType recordType = (BRecordType) type;\nvisitedTypeSet.add(recordType.getName());\nfor (Field field : recordType.getFields().values()) {\nif ((SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.REQUIRED) ||\n!SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL)) &&\n!visitedTypeSet.contains(field.getFieldType()) &&\ncheckIsNeverTypeOrStructureTypeWithARequiredNeverMember(field.getFieldType(),\nvisitedTypeSet)) {\nreturn true;\n}\n}\nreturn false;\ncase TypeTags.TUPLE_TAG:\nBTupleType tupleType = (BTupleType) type;\nvisitedTypeSet.add(tupleType.getName());\nList tupleTypes = tupleType.getTupleTypes();\nfor (Type mem : tupleTypes) {\nif (!visitedTypeSet.add(mem.getName())) {\ncontinue;\n}\nif (checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(mem, visitedTypeSet)) {\nreturn true;\n}\n}\nreturn false;\ncase TypeTags.ARRAY_TAG:\nBArrayType arrayType = (BArrayType) type;\nvisitedTypeSet.add(arrayType.getName());\nType elemType = arrayType.getElementType();\nvisitedTypeSet.add(elemType.getName());\nreturn arrayType.getState() != ArrayState.OPEN &&\ncheckIsNeverTypeOrStructureTypeWithARequiredNeverMember(elemType, visitedTypeSet);\ndefault:\nreturn false;\n}\n}\n/**\n* Check whether a given value confirms to a given type. First it checks if the type of the value, and\n* if fails then falls back to checking the value.\n*\n* @param sourceValue Value to check\n* @param targetType Target type\n* @param unresolvedValues Values that are unresolved so far\n* @param allowNumericConversion Flag indicating whether to perform numeric conversions\n* @return True if the value confirms to the provided type. False, otherwise.\n*/\nprivate static boolean checkIsLikeType(Object sourceValue, Type targetType, List unresolvedValues,\nboolean allowNumericConversion) {\nType sourceType = getType(sourceValue);\nif (checkIsType(sourceType, targetType, new ArrayList<>())) {\nreturn true;\n}\nreturn checkIsLikeOnValue(sourceValue, sourceType, targetType, unresolvedValues, allowNumericConversion);\n}\n/**\n* Check whether a given value confirms to a given type. Strictly checks the value only, and does not consider the\n* type of the value for consideration.\n*\n* @param sourceValue Value to check\n* @param sourceType Type of the value\n* @param targetType Target type\n* @param unresolvedValues Values that are unresolved so far\n* @param allowNumericConversion Flag indicating whether to perform numeric conversions\n* @return True if the value confirms to the provided type. False, otherwise.\n*/\nprivate static boolean checkIsLikeOnValue(Object sourceValue, Type sourceType, Type targetType,\nList unresolvedValues, boolean allowNumericConversion) {\nint sourceTypeTag = sourceType.getTag();\nint targetTypeTag = targetType.getTag();\nif (sourceTypeTag == TypeTags.INTERSECTION_TAG) {\nreturn checkIsLikeOnValue(sourceValue, ((BIntersectionType) sourceType).getEffectiveType(),\ntargetTypeTag != TypeTags.INTERSECTION_TAG ? targetType :\n((BIntersectionType) targetType).getEffectiveType(),\nunresolvedValues, allowNumericConversion);\n}\nif (targetTypeTag == TypeTags.INTERSECTION_TAG) {\nreturn checkIsLikeOnValue(sourceValue, sourceType, ((BIntersectionType) targetType).getEffectiveType(),\nunresolvedValues, allowNumericConversion);\n}\nif (sourceTypeTag == TypeTags.PARAMETERIZED_TYPE_TAG) {\nif (targetTypeTag != TypeTags.PARAMETERIZED_TYPE_TAG) {\nreturn checkIsLikeOnValue(sourceValue, ((BParameterizedType) sourceType).getParamValueType(),\ntargetType, unresolvedValues, allowNumericConversion);\n}\nreturn checkIsLikeOnValue(sourceValue, ((BParameterizedType) sourceType).getParamValueType(),\n((BParameterizedType) targetType).getParamValueType(), unresolvedValues,\nallowNumericConversion);\n}\nswitch (targetTypeTag) {\ncase TypeTags.READONLY_TAG:\nreturn true;\ncase TypeTags.BYTE_TAG:\nif (TypeTags.isIntegerTypeTag(sourceTypeTag)) {\nreturn isByteLiteral((Long) sourceValue);\n}\nreturn allowNumericConversion && TypeConverter.isConvertibleToByte(sourceValue);\ncase TypeTags.INT_TAG:\nreturn allowNumericConversion && TypeConverter.isConvertibleToInt(sourceValue);\ncase TypeTags.SIGNED32_INT_TAG:\ncase TypeTags.SIGNED16_INT_TAG:\ncase TypeTags.SIGNED8_INT_TAG:\ncase TypeTags.UNSIGNED32_INT_TAG:\ncase TypeTags.UNSIGNED16_INT_TAG:\ncase TypeTags.UNSIGNED8_INT_TAG:\nif (TypeTags.isIntegerTypeTag(sourceTypeTag) || targetTypeTag == TypeTags.BYTE_TAG) {\nreturn TypeConverter.isConvertibleToIntSubType(sourceValue, targetType);\n}\nreturn allowNumericConversion && TypeConverter.isConvertibleToIntSubType(sourceValue, targetType);\ncase TypeTags.FLOAT_TAG:\ncase TypeTags.DECIMAL_TAG:\nreturn allowNumericConversion && TypeConverter.isConvertibleToFloatingPointTypes(sourceValue);\ncase TypeTags.CHAR_STRING_TAG:\nreturn TypeConverter.isConvertibleToChar(sourceValue);\ncase TypeTags.RECORD_TYPE_TAG:\nreturn checkIsLikeRecordType(sourceValue, (BRecordType) targetType, unresolvedValues,\nallowNumericConversion);\ncase TypeTags.TABLE_TAG:\nreturn checkIsLikeTableType(sourceValue, (BTableType) targetType, unresolvedValues,\nallowNumericConversion);\ncase TypeTags.JSON_TAG:\nreturn checkIsLikeJSONType(sourceValue, sourceType, (BJsonType) targetType, unresolvedValues,\nallowNumericConversion);\ncase TypeTags.MAP_TAG:\nreturn checkIsLikeMapType(sourceValue, (BMapType) targetType, unresolvedValues, allowNumericConversion);\ncase TypeTags.STREAM_TAG:\nreturn checkIsLikeStreamType(sourceValue, (BStreamType) targetType);\ncase TypeTags.ARRAY_TAG:\nreturn checkIsLikeArrayType(sourceValue, (BArrayType) targetType, unresolvedValues,\nallowNumericConversion);\ncase TypeTags.TUPLE_TAG:\nreturn checkIsLikeTupleType(sourceValue, (BTupleType) targetType, unresolvedValues,\nallowNumericConversion);\ncase TypeTags.ERROR_TAG:\nreturn checkIsLikeErrorType(sourceValue, (BErrorType) targetType, unresolvedValues,\nallowNumericConversion);\ncase TypeTags.ANYDATA_TAG:\nreturn checkIsLikeAnydataType(sourceValue, sourceType, unresolvedValues, allowNumericConversion);\ncase TypeTags.FINITE_TYPE_TAG:\nreturn checkFiniteTypeAssignable(sourceValue, sourceType, (BFiniteType) targetType);\ncase TypeTags.XML_ELEMENT_TAG:\nif (sourceTypeTag == TypeTags.XML_TAG) {\nXmlValue xmlSource = (XmlValue) sourceValue;\nreturn xmlSource.isSingleton();\n}\nreturn false;\ncase TypeTags.XML_COMMENT_TAG:\ncase TypeTags.XML_PI_TAG:\ncase TypeTags.XML_TEXT_TAG:\nif (sourceTypeTag == TypeTags.XML_TAG) {\nreturn checkIsLikeNonElementSingleton((XmlValue) sourceValue, targetType);\n}\nreturn false;\ncase TypeTags.XML_TAG:\nif (sourceTypeTag == TypeTags.XML_TAG) {\nreturn checkIsLikeXMLSequenceType((XmlValue) sourceValue, targetType);\n}\nreturn false;\ncase TypeTags.UNION_TAG:\nif (allowNumericConversion) {\nList compatibleTypesWithNumConversion = new ArrayList<>();\nList compatibleTypesWithoutNumConversion = new ArrayList<>();\nfor (Type type : ((BUnionType) targetType).getMemberTypes()) {\nList tempList = new ArrayList<>(unresolvedValues.size());\ntempList.addAll(unresolvedValues);\nif (checkIsLikeType(sourceValue, type, tempList, false)) {\ncompatibleTypesWithoutNumConversion.add(type);\n}\nif (checkIsLikeType(sourceValue, type, unresolvedValues, true)) {\ncompatibleTypesWithNumConversion.add(type);\n}\n}\nreturn compatibleTypesWithNumConversion.size() != 0 &&\ncompatibleTypesWithNumConversion.size() - compatibleTypesWithoutNumConversion.size() <= 1;\n} else {\nfor (Type type : ((BUnionType) targetType).getMemberTypes()) {\nif (checkIsLikeType(sourceValue, type, unresolvedValues, false)) {\nreturn true;\n}\n}\n}\nreturn false;\ndefault:\nreturn false;\n}\n}\nprivate static XmlNodeType getXmlNodeType(Type type) {\nXmlNodeType nodeType = null;\nswitch (type.getTag()) {\ncase TypeTags.XML_ELEMENT_TAG:\nnodeType = XmlNodeType.ELEMENT;\nbreak;\ncase TypeTags.XML_COMMENT_TAG:\nnodeType = XmlNodeType.COMMENT;\nbreak;\ncase TypeTags.XML_PI_TAG:\nnodeType = XmlNodeType.PI;\nbreak;\ncase TypeTags.XML_TEXT_TAG:\nnodeType = XmlNodeType.TEXT;\nbreak;\ndefault:\nreturn null;\n}\nreturn nodeType;\n}\nprivate static boolean checkIsLikeNonElementSingleton(XmlValue xmlSource, Type targetType) {\nXmlNodeType nodeType = getXmlNodeType(targetType);\nif (nodeType == null) {\nreturn false;\n}\nif (xmlSource.getNodeType() == nodeType) {\nreturn true;\n}\nif (xmlSource.getNodeType() == XmlNodeType.SEQUENCE) {\nXmlSequence seq = (XmlSequence) xmlSource;\nreturn seq.size() == 1 && seq.getChildrenList().get(0).getNodeType() == nodeType ||\n(nodeType == XmlNodeType.TEXT && seq.isEmpty());\n}\nreturn false;\n}\nprivate static boolean checkIsLikeXMLSequenceType(XmlValue xmlSource, Type targetType) {\nif (xmlSource.getNodeType() != XmlNodeType.SEQUENCE) {\nreturn false;\n}\nSet acceptedNodes = new HashSet<>();\nBXmlType target = (BXmlType) targetType;\nif (target.constraint.getTag() == TypeTags.UNION_TAG) {\ngetXMLNodeOnUnion((BUnionType) target.constraint, acceptedNodes);\n} else {\nacceptedNodes.add(getXmlNodeType(((BXmlType) targetType).constraint));\n}\nXmlSequence seq = (XmlSequence) xmlSource;\nfor (BXml m : seq.getChildrenList()) {\nif (!acceptedNodes.contains(m.getNodeType())) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static void getXMLNodeOnUnion(BUnionType unionType, Set nodeTypes) {\nif (nodeTypes.size() == 4) {\nreturn;\n}\nfor (Type memberType : unionType.getMemberTypes()) {\nif (memberType.getTag() == TypeTags.UNION_TAG) {\ngetXMLNodeOnUnion((BUnionType) memberType, nodeTypes);\n} else {\nnodeTypes.add(getXmlNodeType(memberType));\n}\n}\n}\npublic static boolean isNumericType(Type type) {\nreturn type.getTag() < TypeTags.STRING_TAG || TypeTags.isIntegerTypeTag(type.getTag());\n}\nprivate static boolean checkIsLikeAnydataType(Object sourceValue, Type sourceType,\nList unresolvedValues,\nboolean allowNumericConversion) {\nswitch (sourceType.getTag()) {\ncase TypeTags.RECORD_TYPE_TAG:\ncase TypeTags.JSON_TAG:\ncase TypeTags.MAP_TAG:\nreturn isLikeType(((MapValueImpl) sourceValue).values().toArray(), TYPE_ANYDATA,\nunresolvedValues, allowNumericConversion);\ncase TypeTags.ARRAY_TAG:\nArrayValue arr = (ArrayValue) sourceValue;\nBArrayType arrayType = (BArrayType) arr.getType();\nswitch (arrayType.getElementType().getTag()) {\ncase TypeTags.INT_TAG:\ncase TypeTags.FLOAT_TAG:\ncase TypeTags.DECIMAL_TAG:\ncase TypeTags.STRING_TAG:\ncase TypeTags.BOOLEAN_TAG:\ncase TypeTags.BYTE_TAG:\nreturn true;\ndefault:\nreturn isLikeType(arr.getValues(), TYPE_ANYDATA, unresolvedValues,\nallowNumericConversion);\n}\ncase TypeTags.TUPLE_TAG:\nreturn isLikeType(((ArrayValue) sourceValue).getValues(), TYPE_ANYDATA, unresolvedValues,\nallowNumericConversion);\ncase TypeTags.ANYDATA_TAG:\nreturn true;\ncase TypeTags.FINITE_TYPE_TAG:\ncase TypeTags.UNION_TAG:\nreturn checkIsLikeType(sourceValue, TYPE_ANYDATA, unresolvedValues, allowNumericConversion);\ndefault:\nreturn false;\n}\n}\nprivate static boolean isLikeType(Object[] objects, Type targetType, List unresolvedValues,\nboolean allowNumericConversion) {\nfor (Object value : objects) {\nif (!checkIsLikeType(value, targetType, unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkIsLikeTupleType(Object sourceValue, BTupleType targetType,\nList unresolvedValues, boolean allowNumericConversion) {\nif (!(sourceValue instanceof ArrayValue)) {\nreturn false;\n}\nArrayValue source = (ArrayValue) sourceValue;\nList targetTypes = targetType.getTupleTypes();\nint sourceTypeSize = source.size();\nint targetTypeSize = targetTypes.size();\nType targetRestType = targetType.getRestType();\nif (sourceTypeSize < targetTypeSize) {\nreturn false;\n}\nif (targetRestType == null && sourceTypeSize > targetTypeSize) {\nreturn false;\n}\nfor (int i = 0; i < targetTypeSize; i++) {\nif (!checkIsLikeType(source.getRefValue(i), targetTypes.get(i), unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nfor (int i = targetTypeSize; i < sourceTypeSize; i++) {\nif (!checkIsLikeType(source.getRefValue(i), targetRestType, unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n}\nstatic boolean isByteLiteral(long longValue) {\nreturn (longValue >= BBYTE_MIN_VALUE && longValue <= BBYTE_MAX_VALUE);\n}\nstatic boolean isSigned32LiteralValue(Long longObject) {\nreturn (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE);\n}\nstatic boolean isSigned16LiteralValue(Long longObject) {\nreturn (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE);\n}\nstatic boolean isSigned8LiteralValue(Long longObject) {\nreturn (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE);\n}\nstatic boolean isUnsigned32LiteralValue(Long longObject) {\nreturn (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE);\n}\nstatic boolean isUnsigned16LiteralValue(Long longObject) {\nreturn (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE);\n}\nstatic boolean isUnsigned8LiteralValue(Long longObject) {\nreturn (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE);\n}\nstatic boolean isCharLiteralValue(Object object) {\nString value;\nif (object instanceof BString) {\nvalue = ((BString) object).getValue();\n} else if (object instanceof String) {\nvalue = (String) object;\n} else {\nreturn false;\n}\nreturn value.codePoints().count() == 1;\n}\nprivate static boolean checkIsLikeArrayType(Object sourceValue, BArrayType targetType,\nList unresolvedValues, boolean allowNumericConversion) {\nif (!(sourceValue instanceof ArrayValue)) {\nreturn false;\n}\nArrayValue source = (ArrayValue) sourceValue;\nType targetTypeElementType = targetType.getElementType();\nif (source.getType().getTag() == TypeTags.ARRAY_TAG) {\nType sourceElementType = ((BArrayType) source.getType()).getElementType();\nif (isValueType(sourceElementType)) {\nif (checkIsType(sourceElementType, targetTypeElementType, new ArrayList<>())) {\nreturn true;\n}\nif (allowNumericConversion && isNumericType(sourceElementType)) {\nif (isNumericType(targetTypeElementType)) {\nreturn true;\n}\nif (targetTypeElementType.getTag() != TypeTags.UNION_TAG) {\nreturn false;\n}\nList targetNumericTypes = new ArrayList<>();\nfor (Type memType : ((BUnionType) targetTypeElementType).getMemberTypes()) {\nif (isNumericType(memType) && !targetNumericTypes.contains(memType)) {\ntargetNumericTypes.add(memType);\n}\n}\nreturn targetNumericTypes.size() == 1;\n}\nif (targetTypeElementType.getTag() == TypeTags.FLOAT_TAG ||\ntargetTypeElementType.getTag() == TypeTags.DECIMAL_TAG) {\nreturn false;\n}\n}\n}\nfor (int i = 0; i < source.size(); i++) {\nif (!checkIsLikeType(source.get(i), targetTypeElementType, unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkIsLikeMapType(Object sourceValue, BMapType targetType,\nList unresolvedValues, boolean allowNumericConversion) {\nif (!(sourceValue instanceof MapValueImpl)) {\nreturn false;\n}\nfor (Object mapEntry : ((MapValueImpl) sourceValue).values()) {\nif (!checkIsLikeType(mapEntry, targetType.getConstrainedType(), unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkIsLikeStreamType(Object sourceValue, BStreamType targetType) {\nif (!(sourceValue instanceof StreamValue)) {\nreturn false;\n}\nBStreamType streamType = (BStreamType) ((StreamValue) sourceValue).getType();\nreturn streamType.getConstrainedType() == targetType.getConstrainedType();\n}\nprivate static boolean checkIsLikeJSONType(Object sourceValue, Type sourceType, BJsonType targetType,\nList unresolvedValues, boolean allowNumericConversion) {\nif (sourceType.getTag() == TypeTags.ARRAY_TAG) {\nArrayValue source = (ArrayValue) sourceValue;\nType elementType = ((BArrayType) source.getType()).getElementType();\nif (isValueType(elementType)) {\nreturn checkIsType(elementType, targetType, new ArrayList<>());\n}\nObject[] arrayValues = source.getValues();\nfor (int i = 0; i < ((ArrayValue) sourceValue).size(); i++) {\nif (!checkIsLikeType(arrayValues[i], targetType, unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n} else if (sourceType.getTag() == TypeTags.MAP_TAG) {\nfor (Object value : ((MapValueImpl) sourceValue).values()) {\nif (!checkIsLikeType(value, targetType, unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n} else if (sourceType.getTag() == TypeTags.RECORD_TYPE_TAG) {\nTypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType);\nif (unresolvedValues.contains(typeValuePair)) {\nreturn true;\n}\nunresolvedValues.add(typeValuePair);\nfor (Object object : ((MapValueImpl) sourceValue).values()) {\nif (!checkIsLikeType(object, targetType, unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n} else if (sourceType.getTag() == TypeTags.TUPLE_TAG) {\nfor (Object obj : ((TupleValueImpl) sourceValue).getValues()) {\nif (!checkIsLikeType(obj, targetType, unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n}\nreturn false;\n}\nprivate static boolean checkIsLikeRecordType(Object sourceValue, BRecordType targetType,\nList unresolvedValues, boolean allowNumericConversion) {\nif (!(sourceValue instanceof MapValueImpl)) {\nreturn false;\n}\nTypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType);\nif (unresolvedValues.contains(typeValuePair)) {\nreturn true;\n}\nunresolvedValues.add(typeValuePair);\nMap targetTypeField = new HashMap<>();\nType restFieldType = targetType.restFieldType;\nfor (Field field : targetType.getFields().values()) {\ntargetTypeField.put(field.getFieldName(), field.getFieldType());\n}\nfor (Map.Entry targetTypeEntry : targetTypeField.entrySet()) {\nObject fieldName = StringUtils.fromString(targetTypeEntry.getKey().toString());\nif (!(((MapValueImpl) sourceValue).containsKey(fieldName)) &&\n!SymbolFlags.isFlagOn(targetType.getFields().get(fieldName.toString()).getFlags(),\nSymbolFlags.OPTIONAL)) {\nreturn false;\n}\n}\nfor (Object object : ((MapValueImpl) sourceValue).entrySet()) {\nMap.Entry valueEntry = (Map.Entry) object;\nString fieldName = valueEntry.getKey().toString();\nif (targetTypeField.containsKey(fieldName)) {\nif (!checkIsLikeType((valueEntry.getValue()), targetTypeField.get(fieldName),\nunresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n} else {\nif (!targetType.sealed) {\nif (!checkIsLikeType((valueEntry.getValue()), restFieldType, unresolvedValues,\nallowNumericConversion)) {\nreturn false;\n}\n} else {\nreturn false;\n}\n}\n}\nreturn true;\n}\nprivate static boolean checkIsLikeTableType(Object sourceValue, BTableType targetType,\nList unresolvedValues, boolean allowNumericConversion) {\nif (!(sourceValue instanceof TableValueImpl)) {\nreturn false;\n}\nTableValueImpl tableValue = (TableValueImpl) sourceValue;\nBTableType sourceType = (BTableType) tableValue.getType();\nif (targetType.getKeyType() != null && sourceType.getFieldNames() == null) {\nreturn false;\n}\nif (sourceType.getKeyType() != null && !checkIsType(tableValue.getKeyType(), targetType.getKeyType())) {\nreturn false;\n}\nTypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType);\nif (unresolvedValues.contains(typeValuePair)) {\nreturn true;\n}\nObject[] objects = tableValue.values().toArray();\nfor (Object object : objects) {\nif (!checkIsLikeType(object, targetType.getConstrainedType(), allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkFiniteTypeAssignable(Object sourceValue, Type sourceType, BFiniteType targetType) {\nfor (Object valueSpaceItem : targetType.valueSpace) {\nif (isFiniteTypeValue(sourceValue, sourceType, valueSpaceItem)) {\nreturn true;\n}\n}\nreturn false;\n}\nprotected static boolean isFiniteTypeValue(Object sourceValue, Type sourceType, Object valueSpaceItem) {\nType valueSpaceItemType = getType(valueSpaceItem);\nif (valueSpaceItemType.getTag() > TypeTags.FLOAT_TAG) {\nreturn valueSpaceItemType.getTag() == sourceType.getTag() &&\n(valueSpaceItem == sourceValue || valueSpaceItem.equals(sourceValue));\n}\nswitch (sourceType.getTag()) {\ncase TypeTags.BYTE_TAG:\ncase TypeTags.INT_TAG:\nreturn ((Number) sourceValue).longValue() == ((Number) valueSpaceItem).longValue();\ncase TypeTags.FLOAT_TAG:\nif (sourceType.getTag() != valueSpaceItemType.getTag()) {\nreturn false;\n}\nreturn ((Number) sourceValue).doubleValue() == ((Number) valueSpaceItem).doubleValue();\ncase TypeTags.DECIMAL_TAG:\ndefault:\nif (sourceType.getTag() != valueSpaceItemType.getTag()) {\nreturn false;\n}\nreturn valueSpaceItem.equals(sourceValue);\n}\n}\nprivate static boolean checkIsErrorType(Type sourceType, BErrorType targetType, List unresolvedTypes) {\nif (sourceType.getTag() != TypeTags.ERROR_TAG) {\nreturn false;\n}\nTypePair pair = new TypePair(sourceType, targetType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nBErrorType bErrorType = (BErrorType) sourceType;\nif (!checkIsType(bErrorType.detailType, targetType.detailType, unresolvedTypes)) {\nreturn false;\n}\nif (targetType.typeIdSet == null) {\nreturn true;\n}\nBTypeIdSet sourceTypeIdSet = bErrorType.typeIdSet;\nif (sourceTypeIdSet == null) {\nreturn false;\n}\nreturn sourceTypeIdSet.containsAll(targetType.typeIdSet);\n}\nprivate static boolean checkIsLikeErrorType(Object sourceValue, BErrorType targetType,\nList unresolvedValues, boolean allowNumericConversion) {\nType sourceType = getType(sourceValue);\nif (sourceValue == null || sourceType.getTag() != TypeTags.ERROR_TAG) {\nreturn false;\n}\nif (!checkIsLikeType(((ErrorValue) sourceValue).getDetails(), targetType.detailType, unresolvedValues,\nallowNumericConversion)) {\nreturn false;\n}\nif (targetType.typeIdSet == null) {\nreturn true;\n}\nBTypeIdSet sourceIdSet = ((BErrorType) sourceType).typeIdSet;\nif (sourceIdSet == null) {\nreturn false;\n}\nreturn sourceIdSet.containsAll(targetType.typeIdSet);\n}\nprivate static boolean isSimpleBasicType(Type type) {\nreturn type.getTag() < TypeTags.NULL_TAG;\n}\nprivate static boolean isHandleType(Type type) {\nreturn type.getTag() == TypeTags.HANDLE_TAG;\n}\n/**\n* Deep value equality check for anydata.\n*\n* @param lhsValue The value on the left hand side\n* @param rhsValue The value on the right hand side\n* @param checkedValues Structured value pairs already compared or being compared\n* @return True if values are equal, else false.\n*/\nprivate static boolean isEqual(Object lhsValue, Object rhsValue, List checkedValues) {\nif (lhsValue == rhsValue) {\nreturn true;\n}\nif (null == lhsValue || null == rhsValue) {\nreturn false;\n}\nint lhsValTypeTag = getType(lhsValue).getTag();\nint rhsValTypeTag = getType(rhsValue).getTag();\nswitch (lhsValTypeTag) {\ncase TypeTags.STRING_TAG:\ncase TypeTags.BOOLEAN_TAG:\nreturn lhsValue.equals(rhsValue);\ncase TypeTags.INT_TAG:\nif (rhsValTypeTag != TypeTags.BYTE_TAG && rhsValTypeTag != TypeTags.INT_TAG) {\nreturn false;\n}\nreturn lhsValue.equals(((Number) rhsValue).longValue());\ncase TypeTags.BYTE_TAG:\nif (rhsValTypeTag != TypeTags.BYTE_TAG && rhsValTypeTag != TypeTags.INT_TAG) {\nreturn false;\n}\nreturn ((Number) lhsValue).byteValue() == ((Number) rhsValue).byteValue();\ncase TypeTags.FLOAT_TAG:\nif (rhsValTypeTag != TypeTags.FLOAT_TAG) {\nreturn false;\n}\nif (Double.isNaN((Double) lhsValue) && Double.isNaN((Double) rhsValue)) {\nreturn true;\n}\nreturn ((Number) lhsValue).doubleValue() == ((Number) rhsValue).doubleValue();\ncase TypeTags.DECIMAL_TAG:\nif (rhsValTypeTag != TypeTags.DECIMAL_TAG) {\nreturn false;\n}\nreturn checkDecimalEqual((DecimalValue) lhsValue, (DecimalValue) rhsValue);\ncase TypeTags.XML_TAG:\nif (lhsValue instanceof XmlText) {\nreturn TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlText) lhsValue, (XmlValue) rhsValue);\n}\nreturn TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlSequence) lhsValue, (XmlValue) rhsValue);\ncase TypeTags.XML_ELEMENT_TAG:\nreturn TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlItem) lhsValue, (XmlValue) rhsValue);\ncase TypeTags.XML_COMMENT_TAG:\nreturn TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlComment) lhsValue, (XmlValue) rhsValue);\ncase TypeTags.XML_TEXT_TAG:\nreturn TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlText) lhsValue, (XmlValue) rhsValue);\ncase TypeTags.XML_PI_TAG:\nreturn TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlPi) lhsValue, (XmlValue) rhsValue);\ncase TypeTags.MAP_TAG:\ncase TypeTags.JSON_TAG:\ncase TypeTags.RECORD_TYPE_TAG:\nreturn isMappingType(rhsValTypeTag) && isEqual((MapValueImpl) lhsValue, (MapValueImpl) rhsValue,\ncheckedValues);\ncase TypeTags.TUPLE_TAG:\ncase TypeTags.ARRAY_TAG:\nreturn isListType(rhsValTypeTag) &&\nisEqual((ArrayValue) lhsValue, (ArrayValue) rhsValue, checkedValues);\ncase TypeTags.ERROR_TAG:\nreturn rhsValTypeTag == TypeTags.ERROR_TAG &&\nisEqual((ErrorValue) lhsValue, (ErrorValue) rhsValue, checkedValues);\ncase TypeTags.SERVICE_TAG:\nbreak;\ncase TypeTags.TABLE_TAG:\nreturn rhsValTypeTag == TypeTags.TABLE_TAG &&\nisEqual((TableValueImpl) lhsValue, (TableValueImpl) rhsValue, checkedValues);\n}\nreturn false;\n}\nprivate static boolean isListType(int typeTag) {\nreturn typeTag == TypeTags.ARRAY_TAG || typeTag == TypeTags.TUPLE_TAG;\n}\nprivate static boolean isMappingType(int typeTag) {\nreturn typeTag == TypeTags.MAP_TAG || typeTag == TypeTags.RECORD_TYPE_TAG || typeTag == TypeTags.JSON_TAG;\n}\n/**\n* Deep equality check for an array/tuple.\n*\n* @param lhsList The array/tuple on the left hand side\n* @param rhsList The array/tuple on the right hand side\n* @param checkedValues Structured value pairs already compared or being compared\n* @return True if the array/tuple values are equal, else false.\n*/\nprivate static boolean isEqual(ArrayValue lhsList, ArrayValue rhsList, List checkedValues) {\nValuePair compValuePair = new ValuePair(lhsList, rhsList);\nif (checkedValues.contains(compValuePair)) {\nreturn true;\n}\ncheckedValues.add(compValuePair);\nif (lhsList.size() != rhsList.size()) {\nreturn false;\n}\nfor (int i = 0; i < lhsList.size(); i++) {\nif (!isEqual(lhsList.get(i), rhsList.get(i), checkedValues)) {\nreturn false;\n}\n}\nreturn true;\n}\n/**\n* Deep equality check for a map.\n*\n* @param lhsMap Map on the left hand side\n* @param rhsMap Map on the right hand side\n* @param checkedValues Structured value pairs already compared or being compared\n* @return True if the map values are equal, else false.\n*/\nprivate static boolean isEqual(MapValueImpl lhsMap, MapValueImpl rhsMap, List checkedValues) {\nValuePair compValuePair = new ValuePair(lhsMap, rhsMap);\nif (checkedValues.contains(compValuePair)) {\nreturn true;\n}\ncheckedValues.add(compValuePair);\nif (lhsMap.size() != rhsMap.size()) {\nreturn false;\n}\nif (!lhsMap.keySet().containsAll(rhsMap.keySet())) {\nreturn false;\n}\nIterator> mapIterator = lhsMap.entrySet().iterator();\nwhile (mapIterator.hasNext()) {\nMap.Entry lhsMapEntry = mapIterator.next();\nif (!isEqual(lhsMapEntry.getValue(), rhsMap.get(lhsMapEntry.getKey()), checkedValues)) {\nreturn false;\n}\n}\nreturn true;\n}\n/**\n* Deep equality check for a table.\n*\n* @param lhsTable Table on the left hand side\n* @param rhsTable Table on the right hand side\n* @param checkedValues Structured value pairs already compared or being compared\n* @return True if the table values are equal, else false.\n*/\nprivate static boolean isEqual(TableValueImpl lhsTable, TableValueImpl rhsTable, List checkedValues) {\nValuePair compValuePair = new ValuePair(lhsTable, rhsTable);\nif (checkedValues.contains(compValuePair)) {\nreturn true;\n}\ncheckedValues.add(compValuePair);\nif (lhsTable.size() != rhsTable.size()) {\nreturn false;\n}\nboolean isLhsKeyedTable = ((BTableType) lhsTable.getType()).getFieldNames() != null &&\n((BTableType) lhsTable.getType()).getFieldNames().length > 0;\nboolean isRhsKeyedTable = ((BTableType) rhsTable.getType()).getFieldNames() != null &&\n((BTableType) rhsTable.getType()).getFieldNames().length > 0;\nObject[] lhsTableValues = lhsTable.values().toArray();\nObject[] rhsTableValues = rhsTable.values().toArray();\nif (isLhsKeyedTable == isRhsKeyedTable) {\nfor (int i = 0; i < lhsTableValues.length; i++) {\nif (!isEqual(lhsTableValues[i], rhsTableValues[i], checkedValues)) {\nreturn false;\n}\n}\nreturn true;\n}\nreturn false;\n}\n/**\n* Deep equality check for error.\n*\n* @param lhsError The error on the left hand side\n* @param rhsError The error on the right hand side\n* @param checkedValues Errors already compared or being compared\n* @return True if the error values are equal, else false.\n*/\nprivate static boolean isEqual(ErrorValue lhsError, ErrorValue rhsError, List checkedValues) {\nValuePair compValuePair = new ValuePair(lhsError, rhsError);\nif (checkedValues.contains(compValuePair)) {\nreturn true;\n}\ncheckedValues.add(compValuePair);\nreturn isEqual(lhsError.getMessage(), rhsError.getMessage(), checkedValues) &&\nisEqual((MapValueImpl) lhsError.getDetails(), (MapValueImpl) rhsError.getDetails(), checkedValues) &&\nisEqual(lhsError.getCause(), rhsError.getCause(), checkedValues);\n}\n/**\n* Deep equality check for XML Sequence.\n*\n* @param lhsXMLSequence The XML sequence on the left hand side\n* @param rhsXml The XML on the right hand side\n* @return True if the XML values are equal, else false.\n*/\nprivate static boolean isEqual(XmlSequence lhsXMLSequence, XmlValue rhsXml) {\nif (rhsXml instanceof XmlSequence) {\nXmlSequence rhsXMLSequence = (XmlSequence) rhsXml;\nreturn isXMLSequenceChildrenEqual(lhsXMLSequence.getChildrenList(), rhsXMLSequence.getChildrenList());\n}\nif (rhsXml instanceof XmlItem) {\nreturn lhsXMLSequence.getChildrenList().size() == 1 &&\nisEqual(lhsXMLSequence.getChildrenList().get(0), rhsXml);\n}\nreturn lhsXMLSequence.getChildrenList().isEmpty() &&\nTypeUtils.getType(rhsXml) == PredefinedTypes.TYPE_XML_NEVER;\n}\n/**\n* Deep equality check for XML item.\n*\n* @param lhsXMLItem The XML item on the left hand side\n* @param rhsXml The XML on the right hand side\n* @return True if the XML values are equal, else false.\n*/\nprivate static boolean isEqual(XmlItem lhsXMLItem, XmlValue rhsXml) {\nif (rhsXml instanceof XmlItem) {\nXmlItem rhsXMLItem = (XmlItem) rhsXml;\nif (!(rhsXMLItem.getQName().equals(lhsXMLItem.getQName()))) {\nreturn false;\n}\nif (!(rhsXMLItem.getAttributesMap().entrySet().equals(lhsXMLItem.getAttributesMap().entrySet()))) {\nreturn false;\n}\nreturn isEqual(rhsXMLItem.getChildrenSeq(), lhsXMLItem.getChildrenSeq());\n}\nif (rhsXml instanceof XmlSequence) {\nXmlSequence rhsXMLSequence = (XmlSequence) rhsXml;\nreturn rhsXMLSequence.getChildrenList().size() == 1 &&\nisEqual(lhsXMLItem, rhsXMLSequence.getChildrenList().get(0));\n}\nreturn false;\n}\n/**\n* Deep equality check for XML Text.\n*\n* @param lhsXMLText The XML text on the left hand side\n* @param rhsXml The XML on the right hand side\n* @return True if the XML values are equal, else false.\n*/\nprivate static boolean isEqual(XmlText lhsXMLText, XmlValue rhsXml) {\nif (rhsXml instanceof XmlText) {\nXmlText rhsXMLText = (XmlText) rhsXml;\nreturn lhsXMLText.getTextValue().equals(rhsXMLText.getTextValue());\n}\nreturn lhsXMLText.getType() == PredefinedTypes.TYPE_XML_NEVER && rhsXml instanceof XmlSequence &&\n((XmlSequence) rhsXml).getChildrenList().isEmpty();\n}\n/**\n* Deep equality check for XML Comment.\n*\n* @param lhsXMLComment The XML comment on the left hand side\n* @param rhsXml The XML on the right hand side\n* @return True if the XML values are equal, else false.\n*/\nprivate static boolean isEqual(XmlComment lhsXMLComment, XmlValue rhsXml) {\nif (!(rhsXml instanceof XmlComment)) {\nreturn false;\n}\nXmlComment rhXMLComment = (XmlComment) rhsXml;\nreturn lhsXMLComment.getTextValue().equals(rhXMLComment.getTextValue());\n}\n/**\n* Deep equality check for XML Processing Instruction.\n*\n* @param lhsXMLPi The XML processing instruction on the left hand side\n* @param rhsXml The XML on the right hand side\n* @return True if the XML values are equal, else false.\n*/\nprivate static boolean isEqual(XmlPi lhsXMLPi, XmlValue rhsXml) {\nif (!(rhsXml instanceof XmlPi)) {\nreturn false;\n}\nXmlPi rhsXMLPi = (XmlPi) rhsXml;\nreturn lhsXMLPi.getData().equals(rhsXMLPi.getData()) && lhsXMLPi.getTarget().equals(rhsXMLPi.getTarget());\n}\nprivate static boolean isXMLSequenceChildrenEqual(List lhsList, List rhsList) {\nif (lhsList.size() != rhsList.size()) {\nreturn false;\n}\nfor (int i = 0; i < lhsList.size(); i++) {\nif (!isEqual(lhsList.get(i), rhsList.get(i))) {\nreturn false;\n}\n}\nreturn true;\n}\n/**\n* Type vector of size two, to hold the source and the target types.\n*\n* @since 0.995.0\n*/\nprivate static class TypePair {\nType sourceType;\nType targetType;\npublic TypePair(Type sourceType, Type targetType) {\nthis.sourceType = sourceType;\nthis.targetType = targetType;\n}\n@Override\npublic boolean equals(Object obj) {\nif (!(obj instanceof TypePair)) {\nreturn false;\n}\nTypePair other = (TypePair) obj;\nreturn this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType);\n}\n}\n/**\n* Check the reference equality of handle values.\n*\n* @param lhsValue The value on the left hand side\n* @param rhsValue The value on the right hand side\n* @return True if values are equal, else false.\n*/\nprivate static boolean isHandleValueRefEqual(Object lhsValue, Object rhsValue) {\nHandleValue lhsHandle = (HandleValue) lhsValue;\nHandleValue rhsHandle = (HandleValue) rhsValue;\nreturn lhsHandle.getValue() == rhsHandle.getValue();\n}\n/**\n* Unordered value vector of size two, to hold two values being compared.\n*\n* @since 0.995.0\n*/\nprivate static class ValuePair {\nArrayList valueList = new ArrayList<>(2);\nValuePair(Object valueOne, Object valueTwo) {\nvalueList.add(valueOne);\nvalueList.add(valueTwo);\n}\n@Override\npublic boolean equals(Object otherPair) {\nif (!(otherPair instanceof ValuePair)) {\nreturn false;\n}\nArrayList otherList = ((ValuePair) otherPair).valueList;\nArrayList currentList = valueList;\nif (otherList.size() != currentList.size()) {\nreturn false;\n}\nfor (int i = 0; i < otherList.size(); i++) {\nif (!otherList.get(i).equals(currentList.get(i))) {\nreturn false;\n}\n}\nreturn true;\n}\n}\n/**\n* Checks whether a given {@link BType} has an implicit initial value or not.\n* @param type {@link BType} to be analyzed.\n* @return whether there's an implicit initial value or not.\n*/\npublic static boolean hasFillerValue(Type type) {\nreturn hasFillerValue(type, new ArrayList<>());\n}\nprivate static boolean hasFillerValue(Type type, List unanalyzedTypes) {\nif (type == null) {\nreturn true;\n}\nif (type.getTag() < TypeTags.RECORD_TYPE_TAG &&\n!(type.getTag() == TypeTags.CHAR_STRING_TAG || type.getTag() == TypeTags.NEVER_TAG)) {\nreturn true;\n}\nswitch (type.getTag()) {\ncase TypeTags.STREAM_TAG:\ncase TypeTags.MAP_TAG:\ncase TypeTags.ANY_TAG:\nreturn true;\ncase TypeTags.ARRAY_TAG:\nreturn checkFillerValue((BArrayType) type, unanalyzedTypes);\ncase TypeTags.FINITE_TYPE_TAG:\nreturn checkFillerValue((BFiniteType) type);\ncase TypeTags.OBJECT_TYPE_TAG:\nreturn checkFillerValue((BObjectType) type);\ncase TypeTags.RECORD_TYPE_TAG:\nreturn checkFillerValue((BRecordType) type, unanalyzedTypes);\ncase TypeTags.TUPLE_TAG:\nreturn checkFillerValue((BTupleType) type, unanalyzedTypes);\ncase TypeTags.UNION_TAG:\nreturn checkFillerValue((BUnionType) type, unanalyzedTypes);\ndefault:\nreturn false;\n}\n}\nprivate static boolean checkFillerValue(BTupleType tupleType, List unAnalyzedTypes) {\nif (unAnalyzedTypes.contains(tupleType)) {\nreturn true;\n}\nunAnalyzedTypes.add(tupleType);\nfor (Type member : tupleType.getTupleTypes()) {\nif (!hasFillerValue(member, unAnalyzedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkFillerValue(BUnionType type, List unAnalyzedTypes) {\nif (unAnalyzedTypes.contains(type)) {\nreturn true;\n}\nunAnalyzedTypes.add(type);\nif (type.isNullable()) {\nreturn true;\n}\nIterator iterator = type.getMemberTypes().iterator();\nType firstMember;\nfor (firstMember = iterator.next(); iterator.hasNext(); ) {\nif (!isSameType(firstMember, iterator.next())) {\nreturn false;\n}\n}\nreturn isValueType(firstMember) && hasFillerValue(firstMember);\n}\nprivate static boolean checkFillerValue(BRecordType type, List unAnalyzedTypes) {\nif (unAnalyzedTypes.contains(type)) {\nreturn true;\n}\nunAnalyzedTypes.add(type);\nfor (Field field : type.getFields().values()) {\nif (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL)) {\ncontinue;\n}\nif (!SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.REQUIRED)) {\ncontinue;\n}\nreturn false;\n}\nreturn true;\n}\nprivate static boolean checkFillerValue(BArrayType type, List unAnalyzedTypes) {\nreturn type.getState() == ArrayState.OPEN || hasFillerValue(type.getElementType(), unAnalyzedTypes);\n}\nprivate static boolean checkFillerValue(BObjectType type) {\nif (type.getTag() == TypeTags.SERVICE_TAG) {\nreturn false;\n} else {\nMethodType generatedInitializer = type.generatedInitializer;\nif (generatedInitializer == null) {\nreturn false;\n}\nFunctionType initFuncType = generatedInitializer.getType();\nboolean noParams = initFuncType.getParameterTypes().length == 0;\nboolean nilReturn = initFuncType.getReturnType().getTag() == TypeTags.NULL_TAG;\nreturn noParams && nilReturn;\n}\n}\nprivate static boolean checkFillerValue(BFiniteType type) {\nfor (Object value: type.valueSpace) {\nif (value == null) {\nreturn true;\n}\n}\nif (type.valueSpace.size() == 1) {\nreturn true;\n}\nObject firstElement = type.valueSpace.iterator().next();\nfor (Object value : type.valueSpace) {\nif (value.getClass() != firstElement.getClass()) {\nreturn false;\n}\n}\nif (firstElement instanceof String) {\nreturn containsElement(type.valueSpace, \"\\\"\\\"\");\n} else if (firstElement instanceof Byte\n|| firstElement instanceof Integer\n|| firstElement instanceof Long) {\nreturn containsElement(type.valueSpace, \"0\");\n} else if (firstElement instanceof Float\n|| firstElement instanceof Double\n|| firstElement instanceof BigDecimal) {\nreturn containsElement(type.valueSpace, \"0.0\");\n} else if (firstElement instanceof Boolean) {\nreturn containsElement(type.valueSpace, \"false\");\n} else {\nreturn false;\n}\n}\nprivate static boolean containsElement(Set valueSpace, String e) {\nfor (Object value : valueSpace) {\nif (value != null && value.toString().equals(e)) {\nreturn true;\n}\n}\nreturn false;\n}\nprivate static boolean containsType(Set valueSpace, Type type) {\nfor (Object value : valueSpace) {\nif (!isSameType(type, getType(value))) {\nreturn false;\n}\n}\nreturn true;\n}\npublic static Object handleAnydataValues(Object sourceVal, Type targetType) {\nif (sourceVal != null && !(sourceVal instanceof Number) && !(sourceVal instanceof BString) &&\n!(sourceVal instanceof Boolean) && !(sourceVal instanceof BValue)) {\nthrow ErrorUtils.createJToBTypeCastError(sourceVal.getClass(), targetType);\n}\nreturn sourceVal;\n}\nprivate TypeChecker() {\n}\n}", + "context_after": "class TypeChecker {\npublic static Object checkCast(Object sourceVal, Type targetType) {\nif (checkIsType(sourceVal, targetType)) {\nreturn sourceVal;\n}\nType sourceType = getType(sourceVal);\nif (sourceType.getTag() <= TypeTags.BOOLEAN_TAG && targetType.getTag() <= TypeTags.BOOLEAN_TAG) {\nreturn TypeConverter.castValues(targetType, sourceVal);\n}\nif (sourceType.getTag() <= TypeTags.BOOLEAN_TAG && targetType.getTag() == TypeTags.UNION_TAG) {\nfor (Type memberType : ((BUnionType) targetType).getMemberTypes()) {\ntry {\nreturn TypeConverter.castValues(memberType, sourceVal);\n} catch (Exception e) {\n}\n}\n}\nthrow ErrorUtils.createTypeCastError(sourceVal, targetType);\n}\npublic static long anyToInt(Object sourceVal) {\nreturn TypeConverter.anyToIntCast(sourceVal,\n() -> ErrorUtils.createTypeCastError(sourceVal, TYPE_INT));\n}\npublic static long anyToSigned32(Object sourceVal) {\nreturn TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_32,\n() -> ErrorUtils.createTypeCastError(sourceVal,\nTYPE_INT_SIGNED_32));\n}\npublic static long anyToSigned16(Object sourceVal) {\nreturn TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_16,\n() -> ErrorUtils.createTypeCastError(sourceVal,\nTYPE_INT_SIGNED_16));\n}\npublic static long anyToSigned8(Object sourceVal) {\nreturn TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_8,\n() -> ErrorUtils.createTypeCastError(sourceVal,\nTYPE_INT_SIGNED_8));\n}\npublic static long anyToUnsigned32(Object sourceVal) {\nreturn TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_32,\n() -> ErrorUtils.createTypeCastError(sourceVal,\nTYPE_INT_UNSIGNED_32));\n}\npublic static long anyToUnsigned16(Object sourceVal) {\nreturn TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_16,\n() -> ErrorUtils\n.createTypeCastError(sourceVal, TYPE_INT_UNSIGNED_16));\n}\npublic static long anyToUnsigned8(Object sourceVal) {\nreturn TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_8,\n() -> ErrorUtils\n.createTypeCastError(sourceVal,\nTYPE_INT_UNSIGNED_8));\n}\npublic static double anyToFloat(Object sourceVal) {\nreturn TypeConverter.anyToFloatCast(sourceVal, () -> ErrorUtils\n.createTypeCastError(sourceVal, TYPE_FLOAT));\n}\npublic static boolean anyToBoolean(Object sourceVal) {\nreturn TypeConverter.anyToBooleanCast(sourceVal, () -> ErrorUtils\n.createTypeCastError(sourceVal, TYPE_BOOLEAN));\n}\npublic static int anyToByte(Object sourceVal) {\nreturn TypeConverter.anyToByteCast(sourceVal, () -> ErrorUtils.createTypeCastError(sourceVal,\nTYPE_BYTE));\n}\npublic static DecimalValue anyToDecimal(Object sourceVal) {\nreturn TypeConverter.anyToDecimalCast(sourceVal, () -> ErrorUtils.createTypeCastError(sourceVal,\nTYPE_DECIMAL));\n}\npublic static byte anyToJByte(Object sourceVal) {\nreturn TypeConverter.anyToJByteCast(sourceVal,\n() -> ErrorUtils.createBToJTypeCastError(sourceVal, \"byte\"));\n}\npublic static char anyToJChar(Object sourceVal) {\nreturn TypeConverter.anyToJCharCast(sourceVal,\n() -> ErrorUtils.createBToJTypeCastError(sourceVal, \"char\"));\n}\npublic static short anyToJShort(Object sourceVal) {\nreturn TypeConverter.anyToJShortCast(sourceVal,\n() -> ErrorUtils.createBToJTypeCastError(sourceVal, \"short\"));\n}\npublic static int anyToJInt(Object sourceVal) {\nreturn TypeConverter.anyToJIntCast(sourceVal,\n() -> ErrorUtils.createBToJTypeCastError(sourceVal, \"int\"));\n}\npublic static long anyToJLong(Object sourceVal) {\nreturn TypeConverter.anyToJLongCast(sourceVal,\n() -> ErrorUtils.createBToJTypeCastError(sourceVal, \"long\"));\n}\npublic static float anyToJFloat(Object sourceVal) {\nreturn TypeConverter.anyToJFloatCast(sourceVal,\n() -> ErrorUtils.createBToJTypeCastError(sourceVal, \"float\"));\n}\npublic static double anyToJDouble(Object sourceVal) {\nreturn TypeConverter.anyToJDoubleCast(sourceVal,\n() -> ErrorUtils.createBToJTypeCastError(sourceVal, \"double\"));\n}\npublic static boolean anyToJBoolean(Object sourceVal) {\nreturn TypeConverter.anyToJBooleanCast(sourceVal,\n() -> ErrorUtils.createBToJTypeCastError(sourceVal, \"boolean\"));\n}\n/**\n* Check whether a given value belongs to the given type.\n*\n* @param sourceVal value to check the type\n* @param targetType type to be test against\n* @return true if the value belongs to the given type, false otherwise\n*/\npublic static boolean checkIsType(Object sourceVal, Type targetType) {\nreturn checkIsType(sourceVal, getType(sourceVal), targetType);\n}\n/**\n* Check whether a given value belongs to the given type.\n*\n* @param sourceVal value to check the type\n* @param sourceType type of the value\n* @param targetType type to be test against\n* @return true if the value belongs to the given type, false otherwise\n*/\npublic static boolean checkIsType(Object sourceVal, Type sourceType, Type targetType) {\nif (checkIsType(sourceVal, sourceType, targetType, null)) {\nreturn true;\n}\nif (sourceType.getTag() == TypeTags.XML_TAG) {\nXmlValue val = (XmlValue) sourceVal;\nif (val.getNodeType() == XmlNodeType.SEQUENCE) {\nreturn checkIsLikeOnValue(sourceVal, sourceType, targetType, new ArrayList<>(), false);\n}\n}\nif (isMutable(sourceVal, sourceType)) {\nreturn false;\n}\nreturn checkIsLikeOnValue(sourceVal, sourceType, targetType, new ArrayList<>(), false);\n}\n/**\n* Check whether a given value has the same shape as the given type.\n*\n* @param sourceValue value to check the shape\n* @param targetType type to check the shape against\n* @return true if the value has the same shape as the given type; false otherwise\n*/\npublic static boolean checkIsLikeType(Object sourceValue, Type targetType) {\nreturn checkIsLikeType(sourceValue, targetType, false);\n}\n/**\n* Check whether a given value has the same shape as the given type.\n*\n* @param sourceValue value to check the shape\n* @param targetType type to check the shape against\n* @param allowNumericConversion whether numeric conversion is allowed to change the shape to the target type\n* @return true if the value has the same shape as the given type; false otherwise\n*/\npublic static boolean checkIsLikeType(Object sourceValue, Type targetType, boolean allowNumericConversion) {\nreturn checkIsLikeType(sourceValue, targetType, new ArrayList<>(), allowNumericConversion);\n}\n/**\n* Check whether two types are the same.\n*\n* @param sourceType type to test\n* @param targetType type to test against\n* @return true if the two types are same; false otherwise\n*/\npublic static boolean isSameType(Type sourceType, Type targetType) {\nint sourceTypeTag = sourceType.getTag();\nint targetTypeTag = targetType.getTag();\nif (sourceType == targetType) {\nreturn true;\n}\nif (sourceTypeTag == targetTypeTag) {\nif (sourceType.equals(targetType)) {\nreturn true;\n}\nswitch (sourceTypeTag) {\ncase TypeTags.ARRAY_TAG:\nreturn checkArrayEquivalent(sourceType, targetType);\ncase TypeTags.FINITE_TYPE_TAG:\nSet sourceValueSpace = ((BFiniteType) sourceType).valueSpace;\nSet targetValueSpace = ((BFiniteType) targetType).valueSpace;\nif (sourceValueSpace.size() != targetValueSpace.size()) {\nreturn false;\n}\nfor (Object sourceVal : sourceValueSpace) {\nif (!containsType(targetValueSpace, getType(sourceVal))) {\nreturn false;\n}\n}\nreturn true;\ndefault:\nbreak;\n}\n}\nif (sourceTypeTag == TypeTags.FINITE_TYPE_TAG) {\nfor (Object value : ((BFiniteType) sourceType).valueSpace) {\nif (!isSameType(getType(value), targetType)) {\nreturn false;\n}\n}\nreturn true;\n}\nif (targetTypeTag == TypeTags.FINITE_TYPE_TAG) {\nfor (Object value : ((BFiniteType) targetType).valueSpace) {\nif (!isSameType(getType(value), sourceType)) {\nreturn false;\n}\n}\nreturn true;\n}\nreturn false;\n}\npublic static Type getType(Object value) {\nif (value == null) {\nreturn TYPE_NULL;\n} else if (value instanceof Number) {\nif (value instanceof Long) {\nreturn TYPE_INT;\n} else if (value instanceof Double) {\nreturn TYPE_FLOAT;\n} else if (value instanceof Integer || value instanceof Byte) {\nreturn TYPE_BYTE;\n}\n} else if (value instanceof BString) {\nreturn TYPE_STRING;\n} else if (value instanceof Boolean) {\nreturn TYPE_BOOLEAN;\n}\nreturn ((BValue) value).getType();\n}\n/**\n* Deep value equality check for anydata.\n*\n* @param lhsValue The value on the left hand side\n* @param rhsValue The value on the right hand side\n* @return True if values are equal, else false.\n*/\npublic static boolean isEqual(Object lhsValue, Object rhsValue) {\nreturn isEqual(lhsValue, rhsValue, new ArrayList<>());\n}\n/**\n* Check if two decimal values are equal in value.\n*\n* @param lhsValue The value on the left hand side\n* @param rhsValue The value of the right hand side\n* @return True if values are equal, else false.\n*/\npublic static boolean checkDecimalEqual(DecimalValue lhsValue, DecimalValue rhsValue) {\nreturn isDecimalRealNumber(lhsValue) && isDecimalRealNumber(rhsValue) &&\nlhsValue.decimalValue().compareTo(rhsValue.decimalValue()) == 0;\n}\n/**\n* Check if two decimal values are exactly equal.\n*\n* @param lhsValue The value on the left-hand side\n* @param rhsValue The value of the right-hand side\n* @return True if values are exactly equal, else false.\n*/\npublic static boolean checkDecimalExactEqual(DecimalValue lhsValue, DecimalValue rhsValue) {\nreturn isDecimalRealNumber(lhsValue) && isDecimalRealNumber(rhsValue)\n&& lhsValue.decimalValue().equals(rhsValue.decimalValue());\n}\n/**\n* Checks if the given decimal number is a real number.\n*\n* @param decimalValue The decimal value being checked\n* @return True if the decimal value is a real number.\n*/\nprivate static boolean isDecimalRealNumber(DecimalValue decimalValue) {\nreturn decimalValue.valueKind == DecimalValueKind.ZERO || decimalValue.valueKind == DecimalValueKind.OTHER;\n}\n/**\n* Reference equality check for values. If both the values are simple basic types, returns the same\n* result as {@link\n*\n* @param lhsValue The value on the left hand side\n* @param rhsValue The value on the right hand side\n* @return True if values are reference equal or in the case of simple basic types if the values are equal,\n* else false.\n*/\nprivate static boolean isXMLValueRefEqual(XmlValue lhsValue, XmlValue rhsValue) {\nif (lhsValue.getNodeType() != rhsValue.getNodeType()) {\nreturn false;\n}\nif (lhsValue.getNodeType() == XmlNodeType.SEQUENCE && rhsValue.getNodeType() == XmlNodeType.SEQUENCE) {\nreturn isXMLSequenceRefEqual((XmlSequence) lhsValue, (XmlSequence) rhsValue);\n}\nif (lhsValue.getNodeType() == XmlNodeType.TEXT && rhsValue.getNodeType() == XmlNodeType.TEXT) {\nreturn isEqual(lhsValue, rhsValue);\n}\nreturn false;\n}\nprivate static boolean isXMLSequenceRefEqual(XmlSequence lhsValue, XmlSequence rhsValue) {\nIterator lhsIter = lhsValue.getChildrenList().iterator();\nIterator rhsIter = rhsValue.getChildrenList().iterator();\nwhile (lhsIter.hasNext() && rhsIter.hasNext()) {\nBXml l = lhsIter.next();\nBXml r = rhsIter.next();\nif (!(l == r || isXMLValueRefEqual((XmlValue) l, (XmlValue) r))) {\nreturn false;\n}\n}\nreturn lhsIter.hasNext() == rhsIter.hasNext();\n}\n/**\n* Get the typedesc of a value.\n*\n* @param value Value\n* @return type desc associated with the value\n*/\npublic static TypedescValue getTypedesc(Object value) {\nType type = TypeChecker.getType(value);\nif (type == null) {\nreturn null;\n}\nif (isSimpleBasicType(type)) {\nreturn new TypedescValueImpl(new BFiniteType(value.toString(), Set.of(value), 0));\n}\nif (value instanceof RefValue) {\nreturn (TypedescValue) ((RefValue) value).getTypedesc();\n}\nreturn new TypedescValueImpl(type);\n}\n/**\n* Get the annotation value if present.\n*\n* @param typedescValue The typedesc value\n* @param annotTag The annot-tag-reference\n* @return the annotation value if present, nil else\n*/\npublic static Object getAnnotValue(TypedescValue typedescValue, String annotTag) {\nType describingType = typedescValue.getDescribingType();\nif (!(describingType instanceof BAnnotatableType)) {\nreturn null;\n}\nreturn ((BAnnotatableType) describingType).getAnnotation(StringUtils.fromString(annotTag));\n}\npublic static Object getAnnotValue(TypedescValue typedescValue, BString annotTag) {\nType describingType = typedescValue.getDescribingType();\nif (!(describingType instanceof BAnnotatableType)) {\nreturn null;\n}\nreturn ((BAnnotatableType) describingType).getAnnotation(annotTag);\n}\n/**\n* Check whether a given type is equivalent to a target type.\n*\n* @param sourceType type to check\n* @param targetType type to compare with\n* @return flag indicating the the equivalence of the two types\n*/\npublic static boolean checkIsType(Type sourceType, Type targetType) {\nreturn checkIsType(sourceType, targetType, (List) null);\n}\n@Deprecated\npublic static boolean checkIsType(Type sourceType, Type targetType, List unresolvedTypes) {\nif (sourceType == targetType || (sourceType.getTag() == targetType.getTag() && sourceType.equals(targetType))) {\nreturn true;\n}\nif (checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(sourceType)) {\nreturn true;\n}\nif (targetType.isReadOnly() && !sourceType.isReadOnly()) {\nreturn false;\n}\nint sourceTypeTag = sourceType.getTag();\nint targetTypeTag = targetType.getTag();\nif (sourceTypeTag == TypeTags.INTERSECTION_TAG) {\nreturn checkIsType(((BIntersectionType) sourceType).getEffectiveType(),\ntargetTypeTag != TypeTags.INTERSECTION_TAG ? targetType :\n((BIntersectionType) targetType).getEffectiveType(), unresolvedTypes);\n}\nif (targetTypeTag == TypeTags.INTERSECTION_TAG) {\nreturn checkIsType(sourceType, ((BIntersectionType) targetType).getEffectiveType(), unresolvedTypes);\n}\nif (sourceTypeTag == TypeTags.PARAMETERIZED_TYPE_TAG) {\nif (targetTypeTag != TypeTags.PARAMETERIZED_TYPE_TAG) {\nreturn checkIsType(((BParameterizedType) sourceType).getParamValueType(), targetType, unresolvedTypes);\n}\nreturn checkIsType(((BParameterizedType) sourceType).getParamValueType(),\n((BParameterizedType) targetType).getParamValueType(), unresolvedTypes);\n}\nif (sourceTypeTag == TypeTags.READONLY_TAG) {\nreturn checkIsType(PredefinedTypes.ANY_AND_READONLY_OR_ERROR_TYPE,\ntargetType, unresolvedTypes);\n}\nif (targetTypeTag == TypeTags.READONLY_TAG) {\nreturn checkIsType(sourceType, PredefinedTypes.ANY_AND_READONLY_OR_ERROR_TYPE, unresolvedTypes);\n}\nif (sourceTypeTag == TypeTags.UNION_TAG) {\nreturn isUnionTypeMatch((BUnionType) sourceType, targetType, unresolvedTypes);\n}\nif (sourceTypeTag == TypeTags.FINITE_TYPE_TAG &&\n(targetTypeTag == TypeTags.FINITE_TYPE_TAG || targetTypeTag <= TypeTags.NULL_TAG ||\ntargetTypeTag == TypeTags.XML_TEXT_TAG)) {\nreturn isFiniteTypeMatch((BFiniteType) sourceType, targetType);\n}\nswitch (targetTypeTag) {\ncase TypeTags.BYTE_TAG:\ncase TypeTags.SIGNED8_INT_TAG:\ncase TypeTags.FLOAT_TAG:\ncase TypeTags.DECIMAL_TAG:\ncase TypeTags.CHAR_STRING_TAG:\ncase TypeTags.BOOLEAN_TAG:\ncase TypeTags.NULL_TAG:\nreturn sourceTypeTag == targetTypeTag;\ncase TypeTags.STRING_TAG:\nreturn TypeTags.isStringTypeTag(sourceTypeTag);\ncase TypeTags.XML_TEXT_TAG:\nif (sourceTypeTag == TypeTags.XML_TAG) {\nreturn ((BXmlType) sourceType).constraint.getTag() == TypeTags.NEVER_TAG;\n}\nreturn sourceTypeTag == targetTypeTag;\ncase TypeTags.INT_TAG:\nreturn sourceTypeTag == TypeTags.INT_TAG || sourceTypeTag == TypeTags.BYTE_TAG ||\n(sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.UNSIGNED32_INT_TAG);\ncase TypeTags.SIGNED16_INT_TAG:\nreturn sourceTypeTag == TypeTags.BYTE_TAG ||\n(sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.SIGNED16_INT_TAG);\ncase TypeTags.SIGNED32_INT_TAG:\nreturn sourceTypeTag == TypeTags.BYTE_TAG ||\n(sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.SIGNED32_INT_TAG);\ncase TypeTags.UNSIGNED8_INT_TAG:\nreturn sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG;\ncase TypeTags.UNSIGNED16_INT_TAG:\nreturn sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG ||\nsourceTypeTag == TypeTags.UNSIGNED16_INT_TAG;\ncase TypeTags.UNSIGNED32_INT_TAG:\nreturn sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG ||\nsourceTypeTag == TypeTags.UNSIGNED16_INT_TAG || sourceTypeTag == TypeTags.UNSIGNED32_INT_TAG;\ncase TypeTags.ANY_TAG:\nreturn checkIsAnyType(sourceType);\ncase TypeTags.ANYDATA_TAG:\nreturn sourceType.isAnydata();\ncase TypeTags.SERVICE_TAG:\nreturn checkIsServiceType(sourceType, targetType,\nunresolvedTypes == null ? new ArrayList<>() : unresolvedTypes);\ncase TypeTags.HANDLE_TAG:\nreturn sourceTypeTag == TypeTags.HANDLE_TAG;\ncase TypeTags.READONLY_TAG:\nreturn isInherentlyImmutableType(sourceType) || sourceType.isReadOnly();\ncase TypeTags.XML_ELEMENT_TAG:\ncase TypeTags.XML_COMMENT_TAG:\ncase TypeTags.XML_PI_TAG:\nreturn targetTypeTag == sourceTypeTag;\ndefault:\nreturn checkIsRecursiveType(sourceType, targetType,\nunresolvedTypes == null ? new ArrayList<>() : unresolvedTypes);\n}\n}\nprivate static boolean checkIsType(Object sourceVal, Type sourceType, Type targetType,\nList unresolvedTypes) {\nint sourceTypeTag = sourceType.getTag();\nint targetTypeTag = targetType.getTag();\nif (sourceTypeTag != TypeTags.RECORD_TYPE_TAG && sourceTypeTag != TypeTags.OBJECT_TYPE_TAG) {\nreturn checkIsType(sourceType, targetType);\n}\nif (targetTypeTag == TypeTags.INTERSECTION_TAG) {\ntargetType = ((BIntersectionType) targetType).getEffectiveType();\ntargetTypeTag = targetType.getTag();\n}\nif (sourceType == targetType || (sourceType.getTag() == targetType.getTag() && sourceType.equals(targetType))) {\nreturn true;\n}\nif (targetType.isReadOnly() && !sourceType.isReadOnly()) {\nreturn false;\n}\nswitch (targetTypeTag) {\ncase TypeTags.ANY_TAG:\nreturn checkIsAnyType(sourceType);\ncase TypeTags.READONLY_TAG:\nreturn isInherentlyImmutableType(sourceType) || sourceType.isReadOnly();\ndefault:\nreturn checkIsRecursiveTypeOnValue(sourceVal, sourceType, targetType, sourceTypeTag, targetTypeTag,\nunresolvedTypes == null ? new ArrayList<>() : unresolvedTypes);\n}\n}\nprivate static boolean checkTypeDescType(Type sourceType, BTypedescType targetType,\nList unresolvedTypes) {\nif (sourceType.getTag() != TypeTags.TYPEDESC_TAG) {\nreturn false;\n}\nBTypedescType sourceTypedesc = (BTypedescType) sourceType;\nreturn checkIsType(sourceTypedesc.getConstraint(), targetType.getConstraint(), unresolvedTypes);\n}\nprivate static boolean checkIsRecursiveType(Type sourceType, Type targetType, List unresolvedTypes) {\nswitch (targetType.getTag()) {\ncase TypeTags.MAP_TAG:\nreturn checkIsMapType(sourceType, (BMapType) targetType, unresolvedTypes);\ncase TypeTags.STREAM_TAG:\nreturn checkIsStreamType(sourceType, (BStreamType) targetType, unresolvedTypes);\ncase TypeTags.TABLE_TAG:\nreturn checkIsTableType(sourceType, (BTableType) targetType, unresolvedTypes);\ncase TypeTags.JSON_TAG:\nreturn checkIsJSONType(sourceType, unresolvedTypes);\ncase TypeTags.RECORD_TYPE_TAG:\nreturn checkIsRecordType(sourceType, (BRecordType) targetType, unresolvedTypes);\ncase TypeTags.FUNCTION_POINTER_TAG:\nreturn checkIsFunctionType(sourceType, (BFunctionType) targetType);\ncase TypeTags.ARRAY_TAG:\nreturn checkIsArrayType(sourceType, (BArrayType) targetType, unresolvedTypes);\ncase TypeTags.TUPLE_TAG:\nreturn checkIsTupleType(sourceType, (BTupleType) targetType, unresolvedTypes);\ncase TypeTags.UNION_TAG:\nreturn checkIsUnionType(sourceType, (BUnionType) targetType, unresolvedTypes);\ncase TypeTags.OBJECT_TYPE_TAG:\nreturn checkObjectEquivalency(sourceType, (BObjectType) targetType, unresolvedTypes);\ncase TypeTags.FINITE_TYPE_TAG:\nreturn checkIsFiniteType(sourceType, (BFiniteType) targetType);\ncase TypeTags.FUTURE_TAG:\nreturn checkIsFutureType(sourceType, (BFutureType) targetType, unresolvedTypes);\ncase TypeTags.ERROR_TAG:\nreturn checkIsErrorType(sourceType, (BErrorType) targetType, unresolvedTypes);\ncase TypeTags.TYPEDESC_TAG:\nreturn checkTypeDescType(sourceType, (BTypedescType) targetType, unresolvedTypes);\ncase TypeTags.XML_TAG:\nreturn checkIsXMLType(sourceType, targetType, unresolvedTypes);\ndefault:\nreturn false;\n}\n}\nprivate static boolean checkIsRecursiveTypeOnValue(Object sourceVal, Type sourceType, Type targetType,\nint sourceTypeTag, int targetTypeTag,\nList unresolvedTypes) {\nswitch (targetTypeTag) {\ncase TypeTags.ANYDATA_TAG:\nif (sourceTypeTag == TypeTags.OBJECT_TYPE_TAG) {\nreturn false;\n}\nreturn checkRecordBelongsToAnydataType((MapValue) sourceVal, (BRecordType) sourceType, unresolvedTypes);\ncase TypeTags.MAP_TAG:\nreturn checkIsMapType(sourceVal, sourceType, (BMapType) targetType, unresolvedTypes);\ncase TypeTags.JSON_TAG:\nreturn checkIsMapType(sourceVal, sourceType,\nnew BMapType(targetType.isReadOnly() ? TYPE_READONLY_JSON :\nTYPE_JSON), unresolvedTypes);\ncase TypeTags.RECORD_TYPE_TAG:\nreturn checkIsRecordType(sourceVal, sourceType, (BRecordType) targetType, unresolvedTypes);\ncase TypeTags.UNION_TAG:\nfor (Type type : ((BUnionType) targetType).getMemberTypes()) {\nif (checkIsType(sourceVal, sourceType, type, unresolvedTypes)) {\nreturn true;\n}\n}\nreturn false;\ncase TypeTags.OBJECT_TYPE_TAG:\nreturn checkObjectEquivalency(sourceVal, sourceType, (BObjectType) targetType, unresolvedTypes);\ndefault:\nreturn false;\n}\n}\nprivate static boolean isFiniteTypeMatch(BFiniteType sourceType, Type targetType) {\nfor (Object bValue : sourceType.valueSpace) {\nif (!checkIsType(bValue, targetType)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean isUnionTypeMatch(BUnionType sourceType, Type targetType, List unresolvedTypes) {\nfor (Type type : sourceType.getMemberTypes()) {\nif (!checkIsType(type, targetType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkIsUnionType(Type sourceType, BUnionType targetType, List unresolvedTypes) {\nTypePair pair = new TypePair(sourceType, targetType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nswitch (sourceType.getTag()) {\ncase TypeTags.UNION_TAG:\ncase TypeTags.JSON_TAG:\ncase TypeTags.ANYDATA_TAG:\nreturn isUnionTypeMatch((BUnionType) sourceType, targetType, unresolvedTypes);\ncase TypeTags.FINITE_TYPE_TAG:\nreturn isFiniteTypeMatch((BFiniteType) sourceType, targetType);\ndefault:\nfor (Type type : targetType.getMemberTypes()) {\nif (checkIsType(sourceType, type, unresolvedTypes)) {\nreturn true;\n}\n}\nreturn false;\n}\n}\nprivate static boolean checkIsMapType(Type sourceType, BMapType targetType, List unresolvedTypes) {\nType targetConstrainedType = targetType.getConstrainedType();\nswitch (sourceType.getTag()) {\ncase TypeTags.MAP_TAG:\nreturn checkConstraints(((BMapType) sourceType).getConstrainedType(), targetConstrainedType,\nunresolvedTypes);\ncase TypeTags.RECORD_TYPE_TAG:\nBRecordType recType = (BRecordType) sourceType;\nBUnionType wideTypeUnion = new BUnionType(getWideTypeComponents(recType));\nreturn checkConstraints(wideTypeUnion, targetConstrainedType, unresolvedTypes);\ndefault:\nreturn false;\n}\n}\nprivate static boolean checkIsMapType(Object sourceVal, Type sourceType, BMapType targetType,\nList unresolvedTypes) {\nType targetConstrainedType = targetType.getConstrainedType();\nswitch (sourceType.getTag()) {\ncase TypeTags.MAP_TAG:\nreturn checkConstraints(((BMapType) sourceType).getConstrainedType(), targetConstrainedType,\nunresolvedTypes);\ncase TypeTags.RECORD_TYPE_TAG:\nreturn checkIsMapType((MapValue) sourceVal, (BRecordType) sourceType, unresolvedTypes,\ntargetConstrainedType);\ndefault:\nreturn false;\n}\n}\nprivate static boolean checkIsMapType(MapValue sourceVal, BRecordType sourceType, List unresolvedTypes,\nType targetConstrainedType) {\nfor (Field field : sourceType.getFields().values()) {\nif (!SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) {\nif (!checkIsType(field.getFieldType(), targetConstrainedType, unresolvedTypes)) {\nreturn false;\n}\ncontinue;\n}\nBString name = StringUtils.fromString(field.getFieldName());\nif (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL) && !sourceVal.containsKey(name)) {\ncontinue;\n}\nif (!checkIsLikeType(sourceVal.get(name), targetConstrainedType)) {\nreturn false;\n}\n}\nif (sourceType.sealed) {\nreturn true;\n}\nreturn checkIsType(sourceType.restFieldType, targetConstrainedType, unresolvedTypes);\n}\nprivate static boolean checkIsXMLType(Type sourceType, Type targetType, List unresolvedTypes) {\nint sourceTag = sourceType.getTag();\nif (sourceTag == TypeTags.FINITE_TYPE_TAG) {\nreturn isFiniteTypeMatch((BFiniteType) sourceType, targetType);\n}\nBXmlType target = ((BXmlType) targetType);\nif (sourceTag == TypeTags.XML_TAG) {\nType targetConstraint = target.constraint;\nwhile (target.constraint.getTag() == TypeTags.XML_TAG) {\ntarget = (BXmlType) target.constraint;\ntargetConstraint = target.constraint;\n}\nBXmlType source = (BXmlType) sourceType;\nif (source.constraint.getTag() == TypeTags.NEVER_TAG) {\nif (targetConstraint.getTag() == TypeTags.UNION_TAG) {\nreturn checkIsUnionType(sourceType, (BUnionType) targetConstraint, unresolvedTypes);\n}\nreturn targetConstraint.getTag() == TypeTags.XML_TEXT_TAG ||\ntargetConstraint.getTag() == TypeTags.NEVER_TAG;\n}\nreturn checkIsType(source.constraint, targetConstraint, unresolvedTypes);\n}\nif (TypeTags.isXMLTypeTag(sourceTag)) {\nreturn checkIsType(sourceType, target.constraint, unresolvedTypes);\n}\nreturn false;\n}\nprivate static List getWideTypeComponents(BRecordType recType) {\nList types = new ArrayList<>();\nfor (Field f : recType.getFields().values()) {\ntypes.add(f.getFieldType());\n}\nif (!recType.sealed) {\ntypes.add(recType.restFieldType);\n}\nreturn types;\n}\nprivate static boolean checkIsStreamType(Type sourceType, BStreamType targetType, List unresolvedTypes) {\nif (sourceType.getTag() != TypeTags.STREAM_TAG) {\nreturn false;\n}\nreturn checkConstraints(((BStreamType) sourceType).getConstrainedType(), targetType.getConstrainedType(),\nunresolvedTypes)\n&& checkConstraints(((BStreamType) sourceType).getCompletionType(), targetType.getCompletionType(),\nunresolvedTypes);\n}\nprivate static boolean checkIsTableType(Type sourceType, BTableType targetType, List unresolvedTypes) {\nif (sourceType.getTag() != TypeTags.TABLE_TAG) {\nreturn false;\n}\nBTableType srcTableType = (BTableType) sourceType;\nif (!checkConstraints(srcTableType.getConstrainedType(), targetType.getConstrainedType(),\nunresolvedTypes)) {\nreturn false;\n}\nif (targetType.getKeyType() == null && targetType.getFieldNames() == null) {\nreturn true;\n}\nif (targetType.getKeyType() != null) {\nif (srcTableType.getKeyType() != null &&\n(checkConstraints(srcTableType.getKeyType(), targetType.getKeyType(), unresolvedTypes))) {\nreturn true;\n}\nif (srcTableType.getFieldNames() == null) {\nreturn false;\n}\nList fieldTypes = new ArrayList<>();\nArrays.stream(srcTableType.getFieldNames()).forEach(field -> fieldTypes\n.add(Objects.requireNonNull(getTableConstraintField(srcTableType.getConstrainedType(), field))\n.getFieldType()));\nif (fieldTypes.size() == 1) {\nreturn checkConstraints(fieldTypes.get(0), targetType.getKeyType(), unresolvedTypes);\n}\nBTupleType tupleType = new BTupleType(fieldTypes);\nreturn checkConstraints(tupleType, targetType.getKeyType(), unresolvedTypes);\n}\nreturn Arrays.equals(srcTableType.getFieldNames(), targetType.getFieldNames());\n}\nstatic BField getTableConstraintField(Type constraintType, String fieldName) {\nswitch (constraintType.getTag()) {\ncase TypeTags.RECORD_TYPE_TAG:\nMap fieldList = ((BRecordType) constraintType).getFields();\nreturn (BField) fieldList.get(fieldName);\ncase TypeTags.INTERSECTION_TAG:\nType effectiveType = ((BIntersectionType) constraintType).getEffectiveType();\nreturn getTableConstraintField(effectiveType, fieldName);\ncase TypeTags.UNION_TAG:\nBUnionType unionType = (BUnionType) constraintType;\nList memTypes = unionType.getMemberTypes();\nList fields = memTypes.stream().map(type -> getTableConstraintField(type, fieldName))\n.filter(Objects::nonNull).collect(Collectors.toList());\nif (fields.size() != memTypes.size()) {\nreturn null;\n}\nif (fields.stream().allMatch(field -> isSameType(field.getFieldType(), fields.get(0).getFieldType()))) {\nreturn fields.get(0);\n}\n}\nreturn null;\n}\nprivate static boolean checkIsJSONType(Type sourceType, List unresolvedTypes) {\nBJsonType jsonType = (BJsonType) TYPE_JSON;\nTypePair pair = new TypePair(sourceType, jsonType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nswitch (sourceType.getTag()) {\ncase TypeTags.STRING_TAG:\ncase TypeTags.CHAR_STRING_TAG:\ncase TypeTags.INT_TAG:\ncase TypeTags.SIGNED32_INT_TAG:\ncase TypeTags.SIGNED16_INT_TAG:\ncase TypeTags.SIGNED8_INT_TAG:\ncase TypeTags.UNSIGNED32_INT_TAG:\ncase TypeTags.UNSIGNED16_INT_TAG:\ncase TypeTags.UNSIGNED8_INT_TAG:\ncase TypeTags.BYTE_TAG:\ncase TypeTags.FLOAT_TAG:\ncase TypeTags.DECIMAL_TAG:\ncase TypeTags.BOOLEAN_TAG:\ncase TypeTags.NULL_TAG:\ncase TypeTags.JSON_TAG:\nreturn true;\ncase TypeTags.ARRAY_TAG:\nreturn checkIsType(((BArrayType) sourceType).getElementType(), jsonType, unresolvedTypes);\ncase TypeTags.FINITE_TYPE_TAG:\nreturn isFiniteTypeMatch((BFiniteType) sourceType, jsonType);\ncase TypeTags.MAP_TAG:\nreturn checkIsType(((BMapType) sourceType).getConstrainedType(), jsonType, unresolvedTypes);\ncase TypeTags.RECORD_TYPE_TAG:\nBRecordType recordType = (BRecordType) sourceType;\nfor (Field field : recordType.getFields().values()) {\nif (!checkIsJSONType(field.getFieldType(), unresolvedTypes)) {\nreturn false;\n}\n}\nif (!recordType.sealed) {\nreturn checkIsJSONType(recordType.restFieldType, unresolvedTypes);\n}\nreturn true;\ncase TypeTags.TUPLE_TAG:\nBTupleType sourceTupleType = (BTupleType) sourceType;\nfor (Type memberType : sourceTupleType.getTupleTypes()) {\nif (!checkIsJSONType(memberType, unresolvedTypes)) {\nreturn false;\n}\n}\nType tupleRestType = sourceTupleType.getRestType();\nif (tupleRestType != null) {\nreturn checkIsJSONType(tupleRestType, unresolvedTypes);\n}\nreturn true;\ncase TypeTags.UNION_TAG:\nfor (Type memberType : ((BUnionType) sourceType).getMemberTypes()) {\nif (!checkIsJSONType(memberType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate static boolean checkIsRecordType(Type sourceType, BRecordType targetType, List unresolvedTypes) {\nswitch (sourceType.getTag()) {\ncase TypeTags.RECORD_TYPE_TAG:\nreturn checkIsRecordType((BRecordType) sourceType, targetType, unresolvedTypes);\ncase TypeTags.MAP_TAG:\nreturn checkIsRecordType((BMapType) sourceType, targetType, unresolvedTypes);\n}\nreturn false;\n}\nprivate static boolean checkIsRecordType(BRecordType sourceRecordType, BRecordType targetType,\nList unresolvedTypes) {\nTypePair pair = new TypePair(sourceRecordType, targetType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nif (targetType.sealed && !sourceRecordType.sealed) {\nreturn false;\n}\nif (!sourceRecordType.sealed &&\n!checkIsType(sourceRecordType.restFieldType, targetType.restFieldType, unresolvedTypes)) {\nreturn false;\n}\nMap sourceFields = sourceRecordType.getFields();\nSet targetFieldNames = targetType.getFields().keySet();\nfor (Map.Entry targetFieldEntry : targetType.getFields().entrySet()) {\nField targetField = targetFieldEntry.getValue();\nField sourceField = sourceFields.get(targetFieldEntry.getKey());\nif (sourceField == null) {\nreturn false;\n}\nif (hasIncompatibleReadOnlyFlags(targetField, sourceField)) {\nreturn false;\n}\nif (!SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL)\n&& SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.OPTIONAL)) {\nreturn false;\n}\nif (!checkIsType(sourceField.getFieldType(), targetField.getFieldType(), unresolvedTypes)) {\nreturn false;\n}\n}\nif (targetType.sealed) {\nreturn targetFieldNames.containsAll(sourceFields.keySet());\n}\nfor (Map.Entry sourceFieldEntry : sourceFields.entrySet()) {\nif (targetFieldNames.contains(sourceFieldEntry.getKey())) {\ncontinue;\n}\nif (!checkIsType(sourceFieldEntry.getValue().getFieldType(), targetType.restFieldType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkIsRecordType(BMapType sourceType, BRecordType targetType,\nList unresolvedTypes) {\nTypePair pair = new TypePair(sourceType, targetType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nif (targetType.sealed) {\nreturn false;\n}\nType constraintType = sourceType.getConstrainedType();\nfor (Field field : targetType.getFields().values()) {\nvar flags = field.getFlags();\nif (!SymbolFlags.isFlagOn(flags, SymbolFlags.OPTIONAL)) {\nreturn false;\n}\nif (SymbolFlags.isFlagOn(flags, SymbolFlags.READONLY) && !sourceType.isReadOnly()) {\nreturn false;\n}\nif (!checkIsType(constraintType, field.getFieldType(), unresolvedTypes)) {\nreturn false;\n}\n}\nreturn checkIsType(constraintType, targetType.restFieldType, unresolvedTypes);\n}\nprivate static boolean checkRecordBelongsToAnydataType(MapValue sourceVal, BRecordType recordType,\nList unresolvedTypes) {\nType targetType = TYPE_ANYDATA;\nTypePair pair = new TypePair(recordType, targetType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nMap fields = recordType.getFields();\nfor (Map.Entry fieldEntry : fields.entrySet()) {\nString fieldName = fieldEntry.getKey();\nField field = fieldEntry.getValue();\nif (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) {\nBString fieldNameBString = StringUtils.fromString(fieldName);\nif (SymbolFlags\n.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL) && !sourceVal.containsKey(fieldNameBString)) {\ncontinue;\n}\nif (!checkIsLikeType(sourceVal.get(fieldNameBString), targetType)) {\nreturn false;\n}\n} else {\nif (!checkIsType(field.getFieldType(), targetType, unresolvedTypes)) {\nreturn false;\n}\n}\n}\nif (recordType.sealed) {\nreturn true;\n}\nreturn checkIsType(recordType.restFieldType, targetType, unresolvedTypes);\n}\nprivate static boolean checkIsRecordType(Object sourceVal, Type sourceType, BRecordType targetType,\nList unresolvedTypes) {\nswitch (sourceType.getTag()) {\ncase TypeTags.RECORD_TYPE_TAG:\nreturn checkIsRecordType((MapValue) sourceVal, (BRecordType) sourceType, targetType, unresolvedTypes);\ncase TypeTags.MAP_TAG:\nreturn checkIsRecordType((BMapType) sourceType, targetType, unresolvedTypes);\n}\nreturn false;\n}\nprivate static boolean checkIsRecordType(MapValue sourceRecordValue, BRecordType sourceRecordType,\nBRecordType targetType, List unresolvedTypes) {\nTypePair pair = new TypePair(sourceRecordType, targetType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nif (targetType.sealed && !sourceRecordType.sealed) {\nreturn false;\n}\nif (!sourceRecordType.sealed &&\n!checkIsType(sourceRecordType.restFieldType, targetType.restFieldType, unresolvedTypes)) {\nreturn false;\n}\nMap sourceFields = sourceRecordType.getFields();\nSet targetFieldNames = targetType.getFields().keySet();\nfor (Map.Entry targetFieldEntry : targetType.getFields().entrySet()) {\nString fieldName = targetFieldEntry.getKey();\nField targetField = targetFieldEntry.getValue();\nField sourceField = sourceFields.get(fieldName);\nif (sourceField == null) {\nif (!SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL)) {\nreturn false;\n}\ncontinue;\n}\nif (hasIncompatibleReadOnlyFlags(targetField, sourceField)) {\nreturn false;\n}\nboolean optionalTargetField = SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL);\nboolean optionalSourceField = SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.OPTIONAL);\nif (SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.READONLY)) {\nBString fieldNameBString = StringUtils.fromString(fieldName);\nif (optionalSourceField && !sourceRecordValue.containsKey(fieldNameBString)) {\nif (!optionalTargetField) {\nreturn false;\n}\ncontinue;\n}\nif (!checkIsLikeType(sourceRecordValue.get(fieldNameBString), targetField.getFieldType())) {\nreturn false;\n}\n} else {\nif (!optionalTargetField && optionalSourceField) {\nreturn false;\n}\nif (!checkIsType(sourceField.getFieldType(), targetField.getFieldType(), unresolvedTypes)) {\nreturn false;\n}\n}\n}\nif (targetType.sealed) {\nfor (String sourceFieldName : sourceFields.keySet()) {\nif (targetFieldNames.contains(sourceFieldName)) {\ncontinue;\n}\nif (!checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(\nsourceFields.get(sourceFieldName).getFieldType())) {\nreturn false;\n}\n}\nreturn true;\n}\nfor (Map.Entry targetFieldEntry : sourceFields.entrySet()) {\nString fieldName = targetFieldEntry.getKey();\nField field = targetFieldEntry.getValue();\nif (targetFieldNames.contains(fieldName)) {\ncontinue;\n}\nif (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) {\nif (!checkIsLikeType(sourceRecordValue.get(StringUtils.fromString(fieldName)),\ntargetType.restFieldType)) {\nreturn false;\n}\n} else if (!checkIsType(field.getFieldType(), targetType.restFieldType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean hasIncompatibleReadOnlyFlags(Field targetField, Field sourceField) {\nreturn SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.READONLY) && !SymbolFlags\n.isFlagOn(sourceField.getFlags(),\nSymbolFlags.READONLY);\n}\nprivate static boolean checkIsArrayType(BArrayType sourceType, BArrayType targetType,\nList unresolvedTypes) {\nswitch (sourceType.getState()) {\ncase OPEN:\nif (targetType.getState() != ArrayState.OPEN) {\nreturn false;\n}\nbreak;\ncase CLOSED:\nif (targetType.getState() == ArrayState.CLOSED &&\nsourceType.getSize() != targetType.getSize()) {\nreturn false;\n}\nbreak;\n}\nreturn checkIsType(sourceType.getElementType(), targetType.getElementType(), unresolvedTypes);\n}\nprivate static boolean checkIsArrayType(BTupleType sourceType, BArrayType targetType,\nList unresolvedTypes) {\nList tupleTypes = sourceType.getTupleTypes();\nType sourceRestType = sourceType.getRestType();\nType targetElementType = targetType.getElementType();\nif (targetType.getState() == ArrayState.OPEN) {\nfor (Type sourceElementType : tupleTypes) {\nif (!checkIsType(sourceElementType, targetElementType, unresolvedTypes)) {\nreturn false;\n}\n}\nif (sourceRestType != null) {\nreturn checkIsType(sourceRestType, targetElementType, unresolvedTypes);\n}\nreturn true;\n}\nif (sourceRestType != null) {\nreturn false;\n}\nif (tupleTypes.size() != targetType.getSize()) {\nreturn false;\n}\nfor (Type sourceElementType : tupleTypes) {\nif (!checkIsType(sourceElementType, targetElementType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkIsArrayType(Type sourceType, BArrayType targetType, List unresolvedTypes) {\nint sourceTypeTag = sourceType.getTag();\nif (sourceTypeTag == TypeTags.UNION_TAG) {\nfor (Type memberType : ((BUnionType) sourceType).getMemberTypes()) {\nif (!checkIsArrayType(memberType, targetType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nif (sourceTypeTag != TypeTags.ARRAY_TAG && sourceTypeTag != TypeTags.TUPLE_TAG) {\nreturn false;\n}\nif (sourceTypeTag == TypeTags.ARRAY_TAG) {\nreturn checkIsArrayType((BArrayType) sourceType, targetType, unresolvedTypes);\n}\nreturn checkIsArrayType((BTupleType) sourceType, targetType, unresolvedTypes);\n}\nprivate static boolean checkIsTupleType(BArrayType sourceType, BTupleType targetType,\nList unresolvedTypes) {\nType sourceElementType = sourceType.getElementType();\nList targetTypes = targetType.getTupleTypes();\nType targetRestType = targetType.getRestType();\nswitch (sourceType.getState()) {\ncase OPEN:\nif (targetRestType == null) {\nreturn false;\n}\nif (targetTypes.isEmpty()) {\nreturn checkIsType(sourceElementType, targetRestType, unresolvedTypes);\n}\nreturn false;\ncase CLOSED:\nif (sourceType.getSize() < targetTypes.size()) {\nreturn false;\n}\nif (targetTypes.isEmpty()) {\nif (targetRestType != null) {\nreturn checkIsType(sourceElementType, targetRestType, unresolvedTypes);\n}\nreturn sourceType.getSize() == 0;\n}\nfor (Type targetElementType : targetTypes) {\nif (!(checkIsType(sourceElementType, targetElementType, unresolvedTypes))) {\nreturn false;\n}\n}\nif (sourceType.getSize() == targetTypes.size()) {\nreturn true;\n}\nif (targetRestType != null) {\nreturn checkIsType(sourceElementType, targetRestType, unresolvedTypes);\n}\nreturn false;\ndefault:\nreturn false;\n}\n}\nprivate static boolean checkIsTupleType(BTupleType sourceType, BTupleType targetType,\nList unresolvedTypes) {\nList sourceTypes = sourceType.getTupleTypes();\nType sourceRestType = sourceType.getRestType();\nList targetTypes = targetType.getTupleTypes();\nType targetRestType = targetType.getRestType();\nif (sourceRestType != null && targetRestType == null) {\nreturn false;\n}\nint sourceTypeSize = sourceTypes.size();\nint targetTypeSize = targetTypes.size();\nif (sourceRestType == null && targetRestType == null && sourceTypeSize != targetTypeSize) {\nreturn false;\n}\nif (sourceTypeSize < targetTypeSize) {\nreturn false;\n}\nfor (int i = 0; i < targetTypeSize; i++) {\nif (!checkIsType(sourceTypes.get(i), targetTypes.get(i), unresolvedTypes)) {\nreturn false;\n}\n}\nif (sourceTypeSize == targetTypeSize) {\nif (sourceRestType != null) {\nreturn checkIsType(sourceRestType, targetRestType, unresolvedTypes);\n}\nreturn true;\n}\nfor (int i = targetTypeSize; i < sourceTypeSize; i++) {\nif (!checkIsType(sourceTypes.get(i), targetRestType, unresolvedTypes)) {\nreturn false;\n}\n}\nif (sourceRestType != null) {\nreturn checkIsType(sourceRestType, targetRestType, unresolvedTypes);\n}\nreturn true;\n}\nprivate static boolean checkIsTupleType(Type sourceType, BTupleType targetType, List unresolvedTypes) {\nint sourceTypeTag = sourceType.getTag();\nif (sourceTypeTag == TypeTags.UNION_TAG) {\nfor (Type memberType : ((BUnionType) sourceType).getMemberTypes()) {\nif (!checkIsTupleType(memberType, targetType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nif (sourceTypeTag != TypeTags.ARRAY_TAG && sourceTypeTag != TypeTags.TUPLE_TAG) {\nreturn false;\n}\nif (sourceTypeTag == TypeTags.ARRAY_TAG) {\nreturn checkIsTupleType((BArrayType) sourceType, targetType, unresolvedTypes);\n}\nreturn checkIsTupleType((BTupleType) sourceType, targetType, unresolvedTypes);\n}\nprivate static boolean checkIsAnyType(Type sourceType) {\nswitch (sourceType.getTag()) {\ncase TypeTags.ERROR_TAG:\ncase TypeTags.READONLY_TAG:\nreturn false;\ncase TypeTags.UNION_TAG:\ncase TypeTags.ANYDATA_TAG:\ncase TypeTags.JSON_TAG:\nfor (Type memberType : ((BUnionType) sourceType).getMemberTypes()) {\nif (!checkIsAnyType(memberType)) {\nreturn false;\n}\n}\nreturn true;\n}\nreturn true;\n}\nprivate static boolean checkIsFiniteType(Type sourceType, BFiniteType targetType) {\nif (sourceType.getTag() != TypeTags.FINITE_TYPE_TAG) {\nreturn false;\n}\nBFiniteType sourceFiniteType = (BFiniteType) sourceType;\nif (sourceFiniteType.valueSpace.size() != targetType.valueSpace.size()) {\nreturn false;\n}\nreturn targetType.valueSpace.containsAll(sourceFiniteType.valueSpace);\n}\nprivate static boolean checkIsFutureType(Type sourceType, BFutureType targetType, List unresolvedTypes) {\nif (sourceType.getTag() != TypeTags.FUTURE_TAG) {\nreturn false;\n}\nreturn checkConstraints(((BFutureType) sourceType).getConstrainedType(), targetType.getConstrainedType(),\nunresolvedTypes);\n}\nprivate static boolean checkObjectEquivalency(Type sourceType, BObjectType targetType,\nList unresolvedTypes) {\nreturn checkObjectEquivalency(null, sourceType, targetType, unresolvedTypes);\n}\nprivate static boolean checkObjectEquivalency(Object sourceVal, Type sourceType, BObjectType targetType,\nList unresolvedTypes) {\nif (sourceType.getTag() != TypeTags.OBJECT_TYPE_TAG && sourceType.getTag() != TypeTags.SERVICE_TAG) {\nreturn false;\n}\nTypePair pair = new TypePair(sourceType, targetType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nBObjectType sourceObjectType = (BObjectType) sourceType;\nif (SymbolFlags.isFlagOn(targetType.flags, SymbolFlags.ISOLATED) &&\n!SymbolFlags.isFlagOn(sourceObjectType.flags, SymbolFlags.ISOLATED)) {\nreturn false;\n}\nMap targetFields = targetType.getFields();\nMap sourceFields = sourceObjectType.getFields();\nMethodType[] targetFuncs = targetType.getMethods();\nMethodType[] sourceFuncs = sourceObjectType.getMethods();\nif (targetType.getFields().values().stream().anyMatch(field -> SymbolFlags\n.isFlagOn(field.getFlags(), SymbolFlags.PRIVATE))\n|| Stream.of(targetFuncs).anyMatch(func -> SymbolFlags.isFlagOn(func.getFlags(),\nSymbolFlags.PRIVATE))) {\nreturn false;\n}\nif (targetFields.size() > sourceFields.size() || targetFuncs.length > sourceFuncs.length) {\nreturn false;\n}\nString targetTypeModule = Optional.ofNullable(targetType.getPackage()).map(Module::toString).orElse(\"\");\nString sourceTypeModule = Optional.ofNullable(sourceObjectType.getPackage()).map(Module::toString).orElse(\"\");\nif (sourceVal == null) {\nif (!checkObjectSubTypeForFields(targetFields, sourceFields, targetTypeModule, sourceTypeModule,\nunresolvedTypes)) {\nreturn false;\n}\n} else if (!checkObjectSubTypeForFieldsByValue(targetFields, sourceFields, targetTypeModule, sourceTypeModule,\n(BObject) sourceVal, unresolvedTypes)) {\nreturn false;\n}\nreturn checkObjectSubTypeForMethods(unresolvedTypes, targetFuncs, sourceFuncs, targetTypeModule,\nsourceTypeModule, sourceObjectType, targetType);\n}\nprivate static boolean checkObjectSubTypeForFields(Map targetFields,\nMap sourceFields, String targetTypeModule,\nString sourceTypeModule, List unresolvedTypes) {\nfor (Field lhsField : targetFields.values()) {\nField rhsField = sourceFields.get(lhsField.getFieldName());\nif (rhsField == null ||\n!isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsField.getFlags(),\nrhsField.getFlags()) || hasIncompatibleReadOnlyFlags(lhsField,\nrhsField) ||\n!checkIsType(rhsField.getFieldType(), lhsField.getFieldType(), unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkObjectSubTypeForFieldsByValue(Map targetFields,\nMap sourceFields, String targetTypeModule,\nString sourceTypeModule, BObject sourceObjVal,\nList unresolvedTypes) {\nfor (Field lhsField : targetFields.values()) {\nString name = lhsField.getFieldName();\nField rhsField = sourceFields.get(name);\nif (rhsField == null ||\n!isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsField.getFlags(),\nrhsField.getFlags()) || hasIncompatibleReadOnlyFlags(lhsField,\nrhsField)) {\nreturn false;\n}\nif (SymbolFlags.isFlagOn(rhsField.getFlags(), SymbolFlags.FINAL)) {\nObject fieldValue = sourceObjVal.get(StringUtils.fromString(name));\nType fieldValueType = getType(fieldValue);\nif (fieldValueType.isReadOnly()) {\nif (!checkIsLikeType(fieldValue, lhsField.getFieldType())) {\nreturn false;\n}\ncontinue;\n}\nif (!checkIsType(fieldValueType, lhsField.getFieldType(), unresolvedTypes)) {\nreturn false;\n}\n} else if (!checkIsType(rhsField.getFieldType(), lhsField.getFieldType(), unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkObjectSubTypeForMethods(List unresolvedTypes,\nMethodType[] targetFuncs,\nMethodType[] sourceFuncs,\nString targetTypeModule, String sourceTypeModule,\nBObjectType sourceType, BObjectType targetType) {\nfor (MethodType lhsFunc : targetFuncs) {\nif (SymbolFlags.isFlagOn(lhsFunc.getFlags(), SymbolFlags.RESOURCE)) {\ncontinue;\n}\nMethodType rhsFunc = getMatchingInvokableType(sourceFuncs, lhsFunc, unresolvedTypes);\nif (rhsFunc == null ||\n!isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsFunc.getFlags(),\nrhsFunc.getFlags())) {\nreturn false;\n}\nif (SymbolFlags.isFlagOn(lhsFunc.getFlags(), SymbolFlags.REMOTE) != SymbolFlags\n.isFlagOn(rhsFunc.getFlags(), SymbolFlags.REMOTE)) {\nreturn false;\n}\n}\nBTypeIdSet targetTypeIdSet = targetType.typeIdSet;\nif (targetTypeIdSet == null) {\nreturn true;\n}\nBTypeIdSet sourceTypeIdSet = sourceType.typeIdSet;\nif (sourceTypeIdSet == null) {\nreturn false;\n}\nreturn sourceTypeIdSet.containsAll(targetTypeIdSet);\n}\nprivate static boolean isInSameVisibilityRegion(String lhsTypePkg, String rhsTypePkg, long lhsFlags,\nlong rhsFlags) {\nif (SymbolFlags.isFlagOn(lhsFlags, SymbolFlags.PRIVATE)) {\nreturn lhsTypePkg.equals(rhsTypePkg);\n} else if (SymbolFlags.isFlagOn(lhsFlags, SymbolFlags.PUBLIC)) {\nreturn SymbolFlags.isFlagOn(rhsFlags, SymbolFlags.PUBLIC);\n}\nreturn !SymbolFlags.isFlagOn(rhsFlags, SymbolFlags.PRIVATE) && !SymbolFlags\n.isFlagOn(rhsFlags, SymbolFlags.PUBLIC) &&\nlhsTypePkg.equals(rhsTypePkg);\n}\nprivate static MethodType getMatchingInvokableType(MethodType[] rhsFuncs,\nMethodType lhsFunc,\nList unresolvedTypes) {\nreturn Arrays.stream(rhsFuncs)\n.filter(rhsFunc -> lhsFunc.getName().equals(rhsFunc.getName()))\n.filter(rhsFunc -> checkFunctionTypeEqualityForObjectType(rhsFunc.getType(), lhsFunc.getType(),\nunresolvedTypes))\n.findFirst()\n.orElse(null);\n}\nprivate static boolean checkFunctionTypeEqualityForObjectType(FunctionType source, FunctionType target,\nList unresolvedTypes) {\nif (hasIncompatibleIsolatedFlags(target, source)) {\nreturn false;\n}\nif (source.getParameters().length != target.getParameters().length) {\nreturn false;\n}\nfor (int i = 0; i < source.getParameters().length; i++) {\nif (!checkIsType(target.getParameters()[i].type, source.getParameters()[i].type, unresolvedTypes)) {\nreturn false;\n}\n}\nif (source.getReturnType() == null && target.getReturnType() == null) {\nreturn true;\n} else if (source.getReturnType() == null || target.getReturnType() == null) {\nreturn false;\n}\nreturn checkIsType(source.getReturnType(), target.getReturnType(), unresolvedTypes);\n}\nprivate static boolean checkIsFunctionType(Type sourceType, BFunctionType targetType) {\nif (sourceType.getTag() != TypeTags.FUNCTION_POINTER_TAG) {\nreturn false;\n}\nBFunctionType source = (BFunctionType) sourceType;\nif (hasIncompatibleIsolatedFlags(targetType, source) || hasIncompatibleTransactionalFlags(targetType, source)) {\nreturn false;\n}\nif (SymbolFlags.isFlagOn(targetType.getFlags(), SymbolFlags.ANY_FUNCTION)) {\nreturn true;\n}\nif (source.parameters.length != targetType.parameters.length) {\nreturn false;\n}\nfor (int i = 0; i < source.parameters.length; i++) {\nif (!checkIsType(targetType.parameters[i].type, source.parameters[i].type, new ArrayList<>())) {\nreturn false;\n}\n}\nreturn checkIsType(source.retType, targetType.retType, new ArrayList<>());\n}\nprivate static boolean hasIncompatibleIsolatedFlags(FunctionType target, FunctionType source) {\nreturn SymbolFlags.isFlagOn(target.getFlags(), SymbolFlags.ISOLATED) && !SymbolFlags\n.isFlagOn(source.getFlags(), SymbolFlags.ISOLATED);\n}\nprivate static boolean hasIncompatibleTransactionalFlags(FunctionType target, FunctionType source) {\nreturn SymbolFlags.isFlagOn(source.getFlags(), SymbolFlags.TRANSACTIONAL) && !SymbolFlags\n.isFlagOn(target.getFlags(), SymbolFlags.TRANSACTIONAL);\n}\nprivate static boolean checkIsServiceType(Type sourceType, Type targetType, List unresolvedTypes) {\nif (sourceType.getTag() == TypeTags.SERVICE_TAG) {\nreturn checkObjectEquivalency(sourceType, (BObjectType) targetType, unresolvedTypes);\n}\nif (sourceType.getTag() == TypeTags.OBJECT_TYPE_TAG) {\nvar flags = ((BObjectType) sourceType).flags;\nreturn (flags & SymbolFlags.SERVICE) == SymbolFlags.SERVICE;\n}\nreturn false;\n}\npublic static boolean isInherentlyImmutableType(Type sourceType) {\nif (isSimpleBasicType(sourceType)) {\nreturn true;\n}\nswitch (sourceType.getTag()) {\ncase TypeTags.XML_TEXT_TAG:\ncase TypeTags.FINITE_TYPE_TAG:\ncase TypeTags.READONLY_TAG:\ncase TypeTags.NULL_TAG:\ncase TypeTags.ERROR_TAG:\ncase TypeTags.INVOKABLE_TAG:\ncase TypeTags.SERVICE_TAG:\ncase TypeTags.TYPEDESC_TAG:\ncase TypeTags.FUNCTION_POINTER_TAG:\ncase TypeTags.HANDLE_TAG:\nreturn true;\ncase TypeTags.XML_TAG:\nreturn ((BXmlType) sourceType).constraint.getTag() == TypeTags.NEVER_TAG;\n}\nreturn false;\n}\npublic static boolean isSelectivelyImmutableType(Type type, Set unresolvedTypes) {\nif (!unresolvedTypes.add(type)) {\nreturn true;\n}\nswitch (type.getTag()) {\ncase TypeTags.ANY_TAG:\ncase TypeTags.ANYDATA_TAG:\ncase TypeTags.JSON_TAG:\ncase TypeTags.XML_TAG:\ncase TypeTags.XML_COMMENT_TAG:\ncase TypeTags.XML_ELEMENT_TAG:\ncase TypeTags.XML_PI_TAG:\nreturn true;\ncase TypeTags.ARRAY_TAG:\nType elementType = ((BArrayType) type).getElementType();\nreturn isInherentlyImmutableType(elementType) ||\nisSelectivelyImmutableType(elementType, unresolvedTypes);\ncase TypeTags.TUPLE_TAG:\nBTupleType tupleType = (BTupleType) type;\nfor (Type tupMemType : tupleType.getTupleTypes()) {\nif (!isInherentlyImmutableType(tupMemType) &&\n!isSelectivelyImmutableType(tupMemType, unresolvedTypes)) {\nreturn false;\n}\n}\nType tupRestType = tupleType.getRestType();\nif (tupRestType == null) {\nreturn true;\n}\nreturn isInherentlyImmutableType(tupRestType) ||\nisSelectivelyImmutableType(tupRestType, unresolvedTypes);\ncase TypeTags.RECORD_TYPE_TAG:\nBRecordType recordType = (BRecordType) type;\nfor (Field field : recordType.getFields().values()) {\nType fieldType = field.getFieldType();\nif (!isInherentlyImmutableType(fieldType) &&\n!isSelectivelyImmutableType(fieldType, unresolvedTypes)) {\nreturn false;\n}\n}\nType recordRestType = recordType.restFieldType;\nif (recordRestType == null) {\nreturn true;\n}\nreturn isInherentlyImmutableType(recordRestType) ||\nisSelectivelyImmutableType(recordRestType, unresolvedTypes);\ncase TypeTags.OBJECT_TYPE_TAG:\nBObjectType objectType = (BObjectType) type;\nif (SymbolFlags.isFlagOn(objectType.flags, SymbolFlags.CLASS) &&\n!SymbolFlags.isFlagOn(objectType.flags, SymbolFlags.READONLY)) {\nreturn false;\n}\nfor (Field field : objectType.getFields().values()) {\nType fieldType = field.getFieldType();\nif (!isInherentlyImmutableType(fieldType) &&\n!isSelectivelyImmutableType(fieldType, unresolvedTypes)) {\nreturn false;\n}\n}\nreturn true;\ncase TypeTags.MAP_TAG:\nType constraintType = ((BMapType) type).getConstrainedType();\nreturn isInherentlyImmutableType(constraintType) ||\nisSelectivelyImmutableType(constraintType, unresolvedTypes);\ncase TypeTags.TABLE_TAG:\nType tableConstraintType = ((BTableType) type).getConstrainedType();\nreturn isInherentlyImmutableType(tableConstraintType) ||\nisSelectivelyImmutableType(tableConstraintType, unresolvedTypes);\ncase TypeTags.UNION_TAG:\nboolean readonlyIntersectionExists = false;\nfor (Type memberType : ((BUnionType) type).getMemberTypes()) {\nif (isInherentlyImmutableType(memberType) ||\nisSelectivelyImmutableType(memberType, unresolvedTypes)) {\nreadonlyIntersectionExists = true;\nbreak;\n}\n}\nreturn readonlyIntersectionExists;\ncase TypeTags.INTERSECTION_TAG:\nreturn isSelectivelyImmutableType(((BIntersectionType) type).getEffectiveType(), unresolvedTypes);\n}\nreturn false;\n}\nprivate static boolean checkConstraints(Type sourceConstraint, Type targetConstraint,\nList unresolvedTypes) {\nif (sourceConstraint == null) {\nsourceConstraint = TYPE_ANY;\n}\nif (targetConstraint == null) {\ntargetConstraint = TYPE_ANY;\n}\nreturn checkIsType(sourceConstraint, targetConstraint, unresolvedTypes);\n}\nprivate static boolean isMutable(Object value, Type sourceType) {\nif (value == null || sourceType.getTag() < TypeTags.NULL_TAG ||\nsourceType.getTag() == TypeTags.FINITE_TYPE_TAG) {\nreturn false;\n}\nreturn !((RefValue) value).isFrozen();\n}\nprivate static boolean checkArrayEquivalent(Type actualType, Type expType) {\nif (expType.getTag() == TypeTags.ARRAY_TAG && actualType.getTag() == TypeTags.ARRAY_TAG) {\nBArrayType lhrArrayType = (BArrayType) expType;\nBArrayType rhsArrayType = (BArrayType) actualType;\nreturn checkIsArrayType(rhsArrayType, lhrArrayType, new ArrayList<>());\n}\nreturn expType == actualType;\n}\nprivate static boolean checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(Type type) {\nSet visitedTypeSet = new HashSet<>();\nreturn checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(type, visitedTypeSet);\n}\nprivate static boolean checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(Type type,\nSet visitedTypeSet) {\nswitch (type.getTag()) {\ncase TypeTags.NEVER_TAG:\nreturn true;\ncase TypeTags.RECORD_TYPE_TAG:\nBRecordType recordType = (BRecordType) type;\nvisitedTypeSet.add(recordType.getName());\nfor (Field field : recordType.getFields().values()) {\nif ((SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.REQUIRED) ||\n!SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL)) &&\n!visitedTypeSet.contains(field.getFieldType()) &&\ncheckIsNeverTypeOrStructureTypeWithARequiredNeverMember(field.getFieldType(),\nvisitedTypeSet)) {\nreturn true;\n}\n}\nreturn false;\ncase TypeTags.TUPLE_TAG:\nBTupleType tupleType = (BTupleType) type;\nvisitedTypeSet.add(tupleType.getName());\nList tupleTypes = tupleType.getTupleTypes();\nfor (Type mem : tupleTypes) {\nif (!visitedTypeSet.add(mem.getName())) {\ncontinue;\n}\nif (checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(mem, visitedTypeSet)) {\nreturn true;\n}\n}\nreturn false;\ncase TypeTags.ARRAY_TAG:\nBArrayType arrayType = (BArrayType) type;\nvisitedTypeSet.add(arrayType.getName());\nType elemType = arrayType.getElementType();\nvisitedTypeSet.add(elemType.getName());\nreturn arrayType.getState() != ArrayState.OPEN &&\ncheckIsNeverTypeOrStructureTypeWithARequiredNeverMember(elemType, visitedTypeSet);\ndefault:\nreturn false;\n}\n}\n/**\n* Check whether a given value confirms to a given type. First it checks if the type of the value, and\n* if fails then falls back to checking the value.\n*\n* @param sourceValue Value to check\n* @param targetType Target type\n* @param unresolvedValues Values that are unresolved so far\n* @param allowNumericConversion Flag indicating whether to perform numeric conversions\n* @return True if the value confirms to the provided type. False, otherwise.\n*/\nprivate static boolean checkIsLikeType(Object sourceValue, Type targetType, List unresolvedValues,\nboolean allowNumericConversion) {\nType sourceType = getType(sourceValue);\nif (checkIsType(sourceType, targetType, new ArrayList<>())) {\nreturn true;\n}\nreturn checkIsLikeOnValue(sourceValue, sourceType, targetType, unresolvedValues, allowNumericConversion);\n}\n/**\n* Check whether a given value confirms to a given type. Strictly checks the value only, and does not consider the\n* type of the value for consideration.\n*\n* @param sourceValue Value to check\n* @param sourceType Type of the value\n* @param targetType Target type\n* @param unresolvedValues Values that are unresolved so far\n* @param allowNumericConversion Flag indicating whether to perform numeric conversions\n* @return True if the value confirms to the provided type. False, otherwise.\n*/\nprivate static boolean checkIsLikeOnValue(Object sourceValue, Type sourceType, Type targetType,\nList unresolvedValues, boolean allowNumericConversion) {\nint sourceTypeTag = sourceType.getTag();\nint targetTypeTag = targetType.getTag();\nif (sourceTypeTag == TypeTags.INTERSECTION_TAG) {\nreturn checkIsLikeOnValue(sourceValue, ((BIntersectionType) sourceType).getEffectiveType(),\ntargetTypeTag != TypeTags.INTERSECTION_TAG ? targetType :\n((BIntersectionType) targetType).getEffectiveType(),\nunresolvedValues, allowNumericConversion);\n}\nif (targetTypeTag == TypeTags.INTERSECTION_TAG) {\nreturn checkIsLikeOnValue(sourceValue, sourceType, ((BIntersectionType) targetType).getEffectiveType(),\nunresolvedValues, allowNumericConversion);\n}\nif (sourceTypeTag == TypeTags.PARAMETERIZED_TYPE_TAG) {\nif (targetTypeTag != TypeTags.PARAMETERIZED_TYPE_TAG) {\nreturn checkIsLikeOnValue(sourceValue, ((BParameterizedType) sourceType).getParamValueType(),\ntargetType, unresolvedValues, allowNumericConversion);\n}\nreturn checkIsLikeOnValue(sourceValue, ((BParameterizedType) sourceType).getParamValueType(),\n((BParameterizedType) targetType).getParamValueType(), unresolvedValues,\nallowNumericConversion);\n}\nswitch (targetTypeTag) {\ncase TypeTags.READONLY_TAG:\nreturn true;\ncase TypeTags.BYTE_TAG:\nif (TypeTags.isIntegerTypeTag(sourceTypeTag)) {\nreturn isByteLiteral((Long) sourceValue);\n}\nreturn allowNumericConversion && TypeConverter.isConvertibleToByte(sourceValue);\ncase TypeTags.INT_TAG:\nreturn allowNumericConversion && TypeConverter.isConvertibleToInt(sourceValue);\ncase TypeTags.SIGNED32_INT_TAG:\ncase TypeTags.SIGNED16_INT_TAG:\ncase TypeTags.SIGNED8_INT_TAG:\ncase TypeTags.UNSIGNED32_INT_TAG:\ncase TypeTags.UNSIGNED16_INT_TAG:\ncase TypeTags.UNSIGNED8_INT_TAG:\nif (TypeTags.isIntegerTypeTag(sourceTypeTag) || targetTypeTag == TypeTags.BYTE_TAG) {\nreturn TypeConverter.isConvertibleToIntSubType(sourceValue, targetType);\n}\nreturn allowNumericConversion && TypeConverter.isConvertibleToIntSubType(sourceValue, targetType);\ncase TypeTags.FLOAT_TAG:\ncase TypeTags.DECIMAL_TAG:\nreturn allowNumericConversion && TypeConverter.isConvertibleToFloatingPointTypes(sourceValue);\ncase TypeTags.CHAR_STRING_TAG:\nreturn TypeConverter.isConvertibleToChar(sourceValue);\ncase TypeTags.RECORD_TYPE_TAG:\nreturn checkIsLikeRecordType(sourceValue, (BRecordType) targetType, unresolvedValues,\nallowNumericConversion);\ncase TypeTags.TABLE_TAG:\nreturn checkIsLikeTableType(sourceValue, (BTableType) targetType, unresolvedValues,\nallowNumericConversion);\ncase TypeTags.JSON_TAG:\nreturn checkIsLikeJSONType(sourceValue, sourceType, (BJsonType) targetType, unresolvedValues,\nallowNumericConversion);\ncase TypeTags.MAP_TAG:\nreturn checkIsLikeMapType(sourceValue, (BMapType) targetType, unresolvedValues, allowNumericConversion);\ncase TypeTags.STREAM_TAG:\nreturn checkIsLikeStreamType(sourceValue, (BStreamType) targetType);\ncase TypeTags.ARRAY_TAG:\nreturn checkIsLikeArrayType(sourceValue, (BArrayType) targetType, unresolvedValues,\nallowNumericConversion);\ncase TypeTags.TUPLE_TAG:\nreturn checkIsLikeTupleType(sourceValue, (BTupleType) targetType, unresolvedValues,\nallowNumericConversion);\ncase TypeTags.ERROR_TAG:\nreturn checkIsLikeErrorType(sourceValue, (BErrorType) targetType, unresolvedValues,\nallowNumericConversion);\ncase TypeTags.ANYDATA_TAG:\nreturn checkIsLikeAnydataType(sourceValue, sourceType, unresolvedValues, allowNumericConversion);\ncase TypeTags.FINITE_TYPE_TAG:\nreturn checkFiniteTypeAssignable(sourceValue, sourceType, (BFiniteType) targetType);\ncase TypeTags.XML_ELEMENT_TAG:\nif (sourceTypeTag == TypeTags.XML_TAG) {\nXmlValue xmlSource = (XmlValue) sourceValue;\nreturn xmlSource.isSingleton();\n}\nreturn false;\ncase TypeTags.XML_COMMENT_TAG:\ncase TypeTags.XML_PI_TAG:\ncase TypeTags.XML_TEXT_TAG:\nif (sourceTypeTag == TypeTags.XML_TAG) {\nreturn checkIsLikeNonElementSingleton((XmlValue) sourceValue, targetType);\n}\nreturn false;\ncase TypeTags.XML_TAG:\nif (sourceTypeTag == TypeTags.XML_TAG) {\nreturn checkIsLikeXMLSequenceType((XmlValue) sourceValue, targetType);\n}\nreturn false;\ncase TypeTags.UNION_TAG:\nif (allowNumericConversion) {\nList compatibleTypesWithNumConversion = new ArrayList<>();\nList compatibleTypesWithoutNumConversion = new ArrayList<>();\nfor (Type type : ((BUnionType) targetType).getMemberTypes()) {\nList tempList = new ArrayList<>(unresolvedValues.size());\ntempList.addAll(unresolvedValues);\nif (checkIsLikeType(sourceValue, type, tempList, false)) {\ncompatibleTypesWithoutNumConversion.add(type);\n}\nif (checkIsLikeType(sourceValue, type, unresolvedValues, true)) {\ncompatibleTypesWithNumConversion.add(type);\n}\n}\nreturn compatibleTypesWithNumConversion.size() != 0 &&\ncompatibleTypesWithNumConversion.size() - compatibleTypesWithoutNumConversion.size() <= 1;\n} else {\nfor (Type type : ((BUnionType) targetType).getMemberTypes()) {\nif (checkIsLikeType(sourceValue, type, unresolvedValues, false)) {\nreturn true;\n}\n}\n}\nreturn false;\ndefault:\nreturn false;\n}\n}\nprivate static XmlNodeType getXmlNodeType(Type type) {\nXmlNodeType nodeType = null;\nswitch (type.getTag()) {\ncase TypeTags.XML_ELEMENT_TAG:\nnodeType = XmlNodeType.ELEMENT;\nbreak;\ncase TypeTags.XML_COMMENT_TAG:\nnodeType = XmlNodeType.COMMENT;\nbreak;\ncase TypeTags.XML_PI_TAG:\nnodeType = XmlNodeType.PI;\nbreak;\ncase TypeTags.XML_TEXT_TAG:\nnodeType = XmlNodeType.TEXT;\nbreak;\ndefault:\nreturn null;\n}\nreturn nodeType;\n}\nprivate static boolean checkIsLikeNonElementSingleton(XmlValue xmlSource, Type targetType) {\nXmlNodeType nodeType = getXmlNodeType(targetType);\nif (nodeType == null) {\nreturn false;\n}\nif (xmlSource.getNodeType() == nodeType) {\nreturn true;\n}\nif (xmlSource.getNodeType() == XmlNodeType.SEQUENCE) {\nXmlSequence seq = (XmlSequence) xmlSource;\nreturn seq.size() == 1 && seq.getChildrenList().get(0).getNodeType() == nodeType ||\n(nodeType == XmlNodeType.TEXT && seq.isEmpty());\n}\nreturn false;\n}\nprivate static boolean checkIsLikeXMLSequenceType(XmlValue xmlSource, Type targetType) {\nif (xmlSource.getNodeType() != XmlNodeType.SEQUENCE) {\nreturn false;\n}\nSet acceptedNodes = new HashSet<>();\nBXmlType target = (BXmlType) targetType;\nif (target.constraint.getTag() == TypeTags.UNION_TAG) {\ngetXMLNodeOnUnion((BUnionType) target.constraint, acceptedNodes);\n} else {\nacceptedNodes.add(getXmlNodeType(((BXmlType) targetType).constraint));\n}\nXmlSequence seq = (XmlSequence) xmlSource;\nfor (BXml m : seq.getChildrenList()) {\nif (!acceptedNodes.contains(m.getNodeType())) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static void getXMLNodeOnUnion(BUnionType unionType, Set nodeTypes) {\nif (nodeTypes.size() == 4) {\nreturn;\n}\nfor (Type memberType : unionType.getMemberTypes()) {\nif (memberType.getTag() == TypeTags.UNION_TAG) {\ngetXMLNodeOnUnion((BUnionType) memberType, nodeTypes);\n} else {\nnodeTypes.add(getXmlNodeType(memberType));\n}\n}\n}\npublic static boolean isNumericType(Type type) {\nreturn type.getTag() < TypeTags.STRING_TAG || TypeTags.isIntegerTypeTag(type.getTag());\n}\nprivate static boolean checkIsLikeAnydataType(Object sourceValue, Type sourceType,\nList unresolvedValues,\nboolean allowNumericConversion) {\nswitch (sourceType.getTag()) {\ncase TypeTags.RECORD_TYPE_TAG:\ncase TypeTags.JSON_TAG:\ncase TypeTags.MAP_TAG:\nreturn isLikeType(((MapValueImpl) sourceValue).values().toArray(), TYPE_ANYDATA,\nunresolvedValues, allowNumericConversion);\ncase TypeTags.ARRAY_TAG:\nArrayValue arr = (ArrayValue) sourceValue;\nBArrayType arrayType = (BArrayType) arr.getType();\nswitch (arrayType.getElementType().getTag()) {\ncase TypeTags.INT_TAG:\ncase TypeTags.FLOAT_TAG:\ncase TypeTags.DECIMAL_TAG:\ncase TypeTags.STRING_TAG:\ncase TypeTags.BOOLEAN_TAG:\ncase TypeTags.BYTE_TAG:\nreturn true;\ndefault:\nreturn isLikeType(arr.getValues(), TYPE_ANYDATA, unresolvedValues,\nallowNumericConversion);\n}\ncase TypeTags.TUPLE_TAG:\nreturn isLikeType(((ArrayValue) sourceValue).getValues(), TYPE_ANYDATA, unresolvedValues,\nallowNumericConversion);\ncase TypeTags.ANYDATA_TAG:\nreturn true;\ncase TypeTags.FINITE_TYPE_TAG:\ncase TypeTags.UNION_TAG:\nreturn checkIsLikeType(sourceValue, TYPE_ANYDATA, unresolvedValues, allowNumericConversion);\ndefault:\nreturn false;\n}\n}\nprivate static boolean isLikeType(Object[] objects, Type targetType, List unresolvedValues,\nboolean allowNumericConversion) {\nfor (Object value : objects) {\nif (!checkIsLikeType(value, targetType, unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkIsLikeTupleType(Object sourceValue, BTupleType targetType,\nList unresolvedValues, boolean allowNumericConversion) {\nif (!(sourceValue instanceof ArrayValue)) {\nreturn false;\n}\nArrayValue source = (ArrayValue) sourceValue;\nList targetTypes = targetType.getTupleTypes();\nint sourceTypeSize = source.size();\nint targetTypeSize = targetTypes.size();\nType targetRestType = targetType.getRestType();\nif (sourceTypeSize < targetTypeSize) {\nreturn false;\n}\nif (targetRestType == null && sourceTypeSize > targetTypeSize) {\nreturn false;\n}\nfor (int i = 0; i < targetTypeSize; i++) {\nif (!checkIsLikeType(source.getRefValue(i), targetTypes.get(i), unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nfor (int i = targetTypeSize; i < sourceTypeSize; i++) {\nif (!checkIsLikeType(source.getRefValue(i), targetRestType, unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n}\nstatic boolean isByteLiteral(long longValue) {\nreturn (longValue >= BBYTE_MIN_VALUE && longValue <= BBYTE_MAX_VALUE);\n}\nstatic boolean isSigned32LiteralValue(Long longObject) {\nreturn (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE);\n}\nstatic boolean isSigned16LiteralValue(Long longObject) {\nreturn (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE);\n}\nstatic boolean isSigned8LiteralValue(Long longObject) {\nreturn (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE);\n}\nstatic boolean isUnsigned32LiteralValue(Long longObject) {\nreturn (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE);\n}\nstatic boolean isUnsigned16LiteralValue(Long longObject) {\nreturn (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE);\n}\nstatic boolean isUnsigned8LiteralValue(Long longObject) {\nreturn (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE);\n}\nstatic boolean isCharLiteralValue(Object object) {\nString value;\nif (object instanceof BString) {\nvalue = ((BString) object).getValue();\n} else if (object instanceof String) {\nvalue = (String) object;\n} else {\nreturn false;\n}\nreturn value.codePoints().count() == 1;\n}\nprivate static boolean checkIsLikeArrayType(Object sourceValue, BArrayType targetType,\nList unresolvedValues, boolean allowNumericConversion) {\nif (!(sourceValue instanceof ArrayValue)) {\nreturn false;\n}\nArrayValue source = (ArrayValue) sourceValue;\nType targetTypeElementType = targetType.getElementType();\nif (source.getType().getTag() == TypeTags.ARRAY_TAG) {\nType sourceElementType = ((BArrayType) source.getType()).getElementType();\nif (isValueType(sourceElementType)) {\nif (checkIsType(sourceElementType, targetTypeElementType, new ArrayList<>())) {\nreturn true;\n}\nif (allowNumericConversion && isNumericType(sourceElementType)) {\nif (isNumericType(targetTypeElementType)) {\nreturn true;\n}\nif (targetTypeElementType.getTag() != TypeTags.UNION_TAG) {\nreturn false;\n}\nList targetNumericTypes = new ArrayList<>();\nfor (Type memType : ((BUnionType) targetTypeElementType).getMemberTypes()) {\nif (isNumericType(memType) && !targetNumericTypes.contains(memType)) {\ntargetNumericTypes.add(memType);\n}\n}\nreturn targetNumericTypes.size() == 1;\n}\nif (targetTypeElementType.getTag() == TypeTags.FLOAT_TAG ||\ntargetTypeElementType.getTag() == TypeTags.DECIMAL_TAG) {\nreturn false;\n}\n}\n}\nfor (int i = 0; i < source.size(); i++) {\nif (!checkIsLikeType(source.get(i), targetTypeElementType, unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkIsLikeMapType(Object sourceValue, BMapType targetType,\nList unresolvedValues, boolean allowNumericConversion) {\nif (!(sourceValue instanceof MapValueImpl)) {\nreturn false;\n}\nfor (Object mapEntry : ((MapValueImpl) sourceValue).values()) {\nif (!checkIsLikeType(mapEntry, targetType.getConstrainedType(), unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkIsLikeStreamType(Object sourceValue, BStreamType targetType) {\nif (!(sourceValue instanceof StreamValue)) {\nreturn false;\n}\nBStreamType streamType = (BStreamType) ((StreamValue) sourceValue).getType();\nreturn streamType.getConstrainedType() == targetType.getConstrainedType();\n}\nprivate static boolean checkIsLikeJSONType(Object sourceValue, Type sourceType, BJsonType targetType,\nList unresolvedValues, boolean allowNumericConversion) {\nif (sourceType.getTag() == TypeTags.ARRAY_TAG) {\nArrayValue source = (ArrayValue) sourceValue;\nType elementType = ((BArrayType) source.getType()).getElementType();\nif (isValueType(elementType)) {\nreturn checkIsType(elementType, targetType, new ArrayList<>());\n}\nObject[] arrayValues = source.getValues();\nfor (int i = 0; i < ((ArrayValue) sourceValue).size(); i++) {\nif (!checkIsLikeType(arrayValues[i], targetType, unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n} else if (sourceType.getTag() == TypeTags.MAP_TAG) {\nfor (Object value : ((MapValueImpl) sourceValue).values()) {\nif (!checkIsLikeType(value, targetType, unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n} else if (sourceType.getTag() == TypeTags.RECORD_TYPE_TAG) {\nTypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType);\nif (unresolvedValues.contains(typeValuePair)) {\nreturn true;\n}\nunresolvedValues.add(typeValuePair);\nfor (Object object : ((MapValueImpl) sourceValue).values()) {\nif (!checkIsLikeType(object, targetType, unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n} else if (sourceType.getTag() == TypeTags.TUPLE_TAG) {\nfor (Object obj : ((TupleValueImpl) sourceValue).getValues()) {\nif (!checkIsLikeType(obj, targetType, unresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n}\nreturn false;\n}\nprivate static boolean checkIsLikeRecordType(Object sourceValue, BRecordType targetType,\nList unresolvedValues, boolean allowNumericConversion) {\nif (!(sourceValue instanceof MapValueImpl)) {\nreturn false;\n}\nTypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType);\nif (unresolvedValues.contains(typeValuePair)) {\nreturn true;\n}\nunresolvedValues.add(typeValuePair);\nMap targetTypeField = new HashMap<>();\nType restFieldType = targetType.restFieldType;\nfor (Field field : targetType.getFields().values()) {\ntargetTypeField.put(field.getFieldName(), field.getFieldType());\n}\nfor (Map.Entry targetTypeEntry : targetTypeField.entrySet()) {\nObject fieldName = StringUtils.fromString(targetTypeEntry.getKey().toString());\nif (!(((MapValueImpl) sourceValue).containsKey(fieldName)) &&\n!SymbolFlags.isFlagOn(targetType.getFields().get(fieldName.toString()).getFlags(),\nSymbolFlags.OPTIONAL)) {\nreturn false;\n}\n}\nfor (Object object : ((MapValueImpl) sourceValue).entrySet()) {\nMap.Entry valueEntry = (Map.Entry) object;\nString fieldName = valueEntry.getKey().toString();\nif (targetTypeField.containsKey(fieldName)) {\nif (!checkIsLikeType((valueEntry.getValue()), targetTypeField.get(fieldName),\nunresolvedValues, allowNumericConversion)) {\nreturn false;\n}\n} else {\nif (!targetType.sealed) {\nif (!checkIsLikeType((valueEntry.getValue()), restFieldType, unresolvedValues,\nallowNumericConversion)) {\nreturn false;\n}\n} else {\nreturn false;\n}\n}\n}\nreturn true;\n}\nprivate static boolean checkIsLikeTableType(Object sourceValue, BTableType targetType,\nList unresolvedValues, boolean allowNumericConversion) {\nif (!(sourceValue instanceof TableValueImpl)) {\nreturn false;\n}\nTableValueImpl tableValue = (TableValueImpl) sourceValue;\nBTableType sourceType = (BTableType) tableValue.getType();\nif (targetType.getKeyType() != null && sourceType.getFieldNames() == null) {\nreturn false;\n}\nif (sourceType.getKeyType() != null && !checkIsType(tableValue.getKeyType(), targetType.getKeyType())) {\nreturn false;\n}\nTypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType);\nif (unresolvedValues.contains(typeValuePair)) {\nreturn true;\n}\nObject[] objects = tableValue.values().toArray();\nfor (Object object : objects) {\nif (!checkIsLikeType(object, targetType.getConstrainedType(), allowNumericConversion)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkFiniteTypeAssignable(Object sourceValue, Type sourceType, BFiniteType targetType) {\nfor (Object valueSpaceItem : targetType.valueSpace) {\nif (isFiniteTypeValue(sourceValue, sourceType, valueSpaceItem)) {\nreturn true;\n}\n}\nreturn false;\n}\nprotected static boolean isFiniteTypeValue(Object sourceValue, Type sourceType, Object valueSpaceItem) {\nType valueSpaceItemType = getType(valueSpaceItem);\nif (valueSpaceItemType.getTag() > TypeTags.FLOAT_TAG) {\nreturn valueSpaceItemType.getTag() == sourceType.getTag() &&\n(valueSpaceItem == sourceValue || valueSpaceItem.equals(sourceValue));\n}\nswitch (sourceType.getTag()) {\ncase TypeTags.BYTE_TAG:\ncase TypeTags.INT_TAG:\nreturn ((Number) sourceValue).longValue() == ((Number) valueSpaceItem).longValue();\ncase TypeTags.FLOAT_TAG:\nif (sourceType.getTag() != valueSpaceItemType.getTag()) {\nreturn false;\n}\nreturn ((Number) sourceValue).doubleValue() == ((Number) valueSpaceItem).doubleValue();\ncase TypeTags.DECIMAL_TAG:\ndefault:\nif (sourceType.getTag() != valueSpaceItemType.getTag()) {\nreturn false;\n}\nreturn valueSpaceItem.equals(sourceValue);\n}\n}\nprivate static boolean checkIsErrorType(Type sourceType, BErrorType targetType, List unresolvedTypes) {\nif (sourceType.getTag() != TypeTags.ERROR_TAG) {\nreturn false;\n}\nTypePair pair = new TypePair(sourceType, targetType);\nif (unresolvedTypes.contains(pair)) {\nreturn true;\n}\nunresolvedTypes.add(pair);\nBErrorType bErrorType = (BErrorType) sourceType;\nif (!checkIsType(bErrorType.detailType, targetType.detailType, unresolvedTypes)) {\nreturn false;\n}\nif (targetType.typeIdSet == null) {\nreturn true;\n}\nBTypeIdSet sourceTypeIdSet = bErrorType.typeIdSet;\nif (sourceTypeIdSet == null) {\nreturn false;\n}\nreturn sourceTypeIdSet.containsAll(targetType.typeIdSet);\n}\nprivate static boolean checkIsLikeErrorType(Object sourceValue, BErrorType targetType,\nList unresolvedValues, boolean allowNumericConversion) {\nType sourceType = getType(sourceValue);\nif (sourceValue == null || sourceType.getTag() != TypeTags.ERROR_TAG) {\nreturn false;\n}\nif (!checkIsLikeType(((ErrorValue) sourceValue).getDetails(), targetType.detailType, unresolvedValues,\nallowNumericConversion)) {\nreturn false;\n}\nif (targetType.typeIdSet == null) {\nreturn true;\n}\nBTypeIdSet sourceIdSet = ((BErrorType) sourceType).typeIdSet;\nif (sourceIdSet == null) {\nreturn false;\n}\nreturn sourceIdSet.containsAll(targetType.typeIdSet);\n}\nprivate static boolean isSimpleBasicType(Type type) {\nreturn type.getTag() < TypeTags.NULL_TAG;\n}\nprivate static boolean isHandleType(Type type) {\nreturn type.getTag() == TypeTags.HANDLE_TAG;\n}\n/**\n* Deep value equality check for anydata.\n*\n* @param lhsValue The value on the left hand side\n* @param rhsValue The value on the right hand side\n* @param checkedValues Structured value pairs already compared or being compared\n* @return True if values are equal, else false.\n*/\nprivate static boolean isEqual(Object lhsValue, Object rhsValue, List checkedValues) {\nif (lhsValue == rhsValue) {\nreturn true;\n}\nif (null == lhsValue || null == rhsValue) {\nreturn false;\n}\nint lhsValTypeTag = getType(lhsValue).getTag();\nint rhsValTypeTag = getType(rhsValue).getTag();\nswitch (lhsValTypeTag) {\ncase TypeTags.STRING_TAG:\ncase TypeTags.BOOLEAN_TAG:\nreturn lhsValue.equals(rhsValue);\ncase TypeTags.INT_TAG:\nif (rhsValTypeTag != TypeTags.BYTE_TAG && rhsValTypeTag != TypeTags.INT_TAG) {\nreturn false;\n}\nreturn lhsValue.equals(((Number) rhsValue).longValue());\ncase TypeTags.BYTE_TAG:\nif (rhsValTypeTag != TypeTags.BYTE_TAG && rhsValTypeTag != TypeTags.INT_TAG) {\nreturn false;\n}\nreturn ((Number) lhsValue).byteValue() == ((Number) rhsValue).byteValue();\ncase TypeTags.FLOAT_TAG:\nif (rhsValTypeTag != TypeTags.FLOAT_TAG) {\nreturn false;\n}\nif (Double.isNaN((Double) lhsValue) && Double.isNaN((Double) rhsValue)) {\nreturn true;\n}\nreturn ((Number) lhsValue).doubleValue() == ((Number) rhsValue).doubleValue();\ncase TypeTags.DECIMAL_TAG:\nif (rhsValTypeTag != TypeTags.DECIMAL_TAG) {\nreturn false;\n}\nreturn checkDecimalEqual((DecimalValue) lhsValue, (DecimalValue) rhsValue);\ncase TypeTags.XML_TAG:\nif (lhsValue instanceof XmlText) {\nreturn TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlText) lhsValue, (XmlValue) rhsValue);\n}\nreturn TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlSequence) lhsValue, (XmlValue) rhsValue);\ncase TypeTags.XML_ELEMENT_TAG:\nreturn TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlItem) lhsValue, (XmlValue) rhsValue);\ncase TypeTags.XML_COMMENT_TAG:\nreturn TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlComment) lhsValue, (XmlValue) rhsValue);\ncase TypeTags.XML_TEXT_TAG:\nreturn TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlText) lhsValue, (XmlValue) rhsValue);\ncase TypeTags.XML_PI_TAG:\nreturn TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlPi) lhsValue, (XmlValue) rhsValue);\ncase TypeTags.MAP_TAG:\ncase TypeTags.JSON_TAG:\ncase TypeTags.RECORD_TYPE_TAG:\nreturn isMappingType(rhsValTypeTag) && isEqual((MapValueImpl) lhsValue, (MapValueImpl) rhsValue,\ncheckedValues);\ncase TypeTags.TUPLE_TAG:\ncase TypeTags.ARRAY_TAG:\nreturn isListType(rhsValTypeTag) &&\nisEqual((ArrayValue) lhsValue, (ArrayValue) rhsValue, checkedValues);\ncase TypeTags.ERROR_TAG:\nreturn rhsValTypeTag == TypeTags.ERROR_TAG &&\nisEqual((ErrorValue) lhsValue, (ErrorValue) rhsValue, checkedValues);\ncase TypeTags.SERVICE_TAG:\nbreak;\ncase TypeTags.TABLE_TAG:\nreturn rhsValTypeTag == TypeTags.TABLE_TAG &&\nisEqual((TableValueImpl) lhsValue, (TableValueImpl) rhsValue, checkedValues);\n}\nreturn false;\n}\nprivate static boolean isListType(int typeTag) {\nreturn typeTag == TypeTags.ARRAY_TAG || typeTag == TypeTags.TUPLE_TAG;\n}\nprivate static boolean isMappingType(int typeTag) {\nreturn typeTag == TypeTags.MAP_TAG || typeTag == TypeTags.RECORD_TYPE_TAG || typeTag == TypeTags.JSON_TAG;\n}\n/**\n* Deep equality check for an array/tuple.\n*\n* @param lhsList The array/tuple on the left hand side\n* @param rhsList The array/tuple on the right hand side\n* @param checkedValues Structured value pairs already compared or being compared\n* @return True if the array/tuple values are equal, else false.\n*/\nprivate static boolean isEqual(ArrayValue lhsList, ArrayValue rhsList, List checkedValues) {\nValuePair compValuePair = new ValuePair(lhsList, rhsList);\nif (checkedValues.contains(compValuePair)) {\nreturn true;\n}\ncheckedValues.add(compValuePair);\nif (lhsList.size() != rhsList.size()) {\nreturn false;\n}\nfor (int i = 0; i < lhsList.size(); i++) {\nif (!isEqual(lhsList.get(i), rhsList.get(i), checkedValues)) {\nreturn false;\n}\n}\nreturn true;\n}\n/**\n* Deep equality check for a map.\n*\n* @param lhsMap Map on the left hand side\n* @param rhsMap Map on the right hand side\n* @param checkedValues Structured value pairs already compared or being compared\n* @return True if the map values are equal, else false.\n*/\nprivate static boolean isEqual(MapValueImpl lhsMap, MapValueImpl rhsMap, List checkedValues) {\nValuePair compValuePair = new ValuePair(lhsMap, rhsMap);\nif (checkedValues.contains(compValuePair)) {\nreturn true;\n}\ncheckedValues.add(compValuePair);\nif (lhsMap.size() != rhsMap.size()) {\nreturn false;\n}\nif (!lhsMap.keySet().containsAll(rhsMap.keySet())) {\nreturn false;\n}\nIterator> mapIterator = lhsMap.entrySet().iterator();\nwhile (mapIterator.hasNext()) {\nMap.Entry lhsMapEntry = mapIterator.next();\nif (!isEqual(lhsMapEntry.getValue(), rhsMap.get(lhsMapEntry.getKey()), checkedValues)) {\nreturn false;\n}\n}\nreturn true;\n}\n/**\n* Deep equality check for a table.\n*\n* @param lhsTable Table on the left hand side\n* @param rhsTable Table on the right hand side\n* @param checkedValues Structured value pairs already compared or being compared\n* @return True if the table values are equal, else false.\n*/\nprivate static boolean isEqual(TableValueImpl lhsTable, TableValueImpl rhsTable, List checkedValues) {\nValuePair compValuePair = new ValuePair(lhsTable, rhsTable);\nif (checkedValues.contains(compValuePair)) {\nreturn true;\n}\ncheckedValues.add(compValuePair);\nif (lhsTable.size() != rhsTable.size()) {\nreturn false;\n}\nboolean isLhsKeyedTable = ((BTableType) lhsTable.getType()).getFieldNames() != null &&\n((BTableType) lhsTable.getType()).getFieldNames().length > 0;\nboolean isRhsKeyedTable = ((BTableType) rhsTable.getType()).getFieldNames() != null &&\n((BTableType) rhsTable.getType()).getFieldNames().length > 0;\nObject[] lhsTableValues = lhsTable.values().toArray();\nObject[] rhsTableValues = rhsTable.values().toArray();\nif (isLhsKeyedTable == isRhsKeyedTable) {\nfor (int i = 0; i < lhsTableValues.length; i++) {\nif (!isEqual(lhsTableValues[i], rhsTableValues[i], checkedValues)) {\nreturn false;\n}\n}\nreturn true;\n}\nreturn false;\n}\n/**\n* Deep equality check for error.\n*\n* @param lhsError The error on the left hand side\n* @param rhsError The error on the right hand side\n* @param checkedValues Errors already compared or being compared\n* @return True if the error values are equal, else false.\n*/\nprivate static boolean isEqual(ErrorValue lhsError, ErrorValue rhsError, List checkedValues) {\nValuePair compValuePair = new ValuePair(lhsError, rhsError);\nif (checkedValues.contains(compValuePair)) {\nreturn true;\n}\ncheckedValues.add(compValuePair);\nreturn isEqual(lhsError.getMessage(), rhsError.getMessage(), checkedValues) &&\nisEqual((MapValueImpl) lhsError.getDetails(), (MapValueImpl) rhsError.getDetails(), checkedValues) &&\nisEqual(lhsError.getCause(), rhsError.getCause(), checkedValues);\n}\n/**\n* Deep equality check for XML Sequence.\n*\n* @param lhsXMLSequence The XML sequence on the left hand side\n* @param rhsXml The XML on the right hand side\n* @return True if the XML values are equal, else false.\n*/\nprivate static boolean isEqual(XmlSequence lhsXMLSequence, XmlValue rhsXml) {\nif (rhsXml instanceof XmlSequence) {\nXmlSequence rhsXMLSequence = (XmlSequence) rhsXml;\nreturn isXMLSequenceChildrenEqual(lhsXMLSequence.getChildrenList(), rhsXMLSequence.getChildrenList());\n}\nif (rhsXml instanceof XmlItem) {\nreturn lhsXMLSequence.getChildrenList().size() == 1 &&\nisEqual(lhsXMLSequence.getChildrenList().get(0), rhsXml);\n}\nreturn lhsXMLSequence.getChildrenList().isEmpty() &&\nTypeUtils.getType(rhsXml) == PredefinedTypes.TYPE_XML_NEVER;\n}\n/**\n* Deep equality check for XML item.\n*\n* @param lhsXMLItem The XML item on the left hand side\n* @param rhsXml The XML on the right hand side\n* @return True if the XML values are equal, else false.\n*/\nprivate static boolean isEqual(XmlItem lhsXMLItem, XmlValue rhsXml) {\nif (rhsXml instanceof XmlItem) {\nXmlItem rhsXMLItem = (XmlItem) rhsXml;\nif (!(rhsXMLItem.getQName().equals(lhsXMLItem.getQName()))) {\nreturn false;\n}\nif (!(rhsXMLItem.getAttributesMap().entrySet().equals(lhsXMLItem.getAttributesMap().entrySet()))) {\nreturn false;\n}\nreturn isEqual(rhsXMLItem.getChildrenSeq(), lhsXMLItem.getChildrenSeq());\n}\nif (rhsXml instanceof XmlSequence) {\nXmlSequence rhsXMLSequence = (XmlSequence) rhsXml;\nreturn rhsXMLSequence.getChildrenList().size() == 1 &&\nisEqual(lhsXMLItem, rhsXMLSequence.getChildrenList().get(0));\n}\nreturn false;\n}\n/**\n* Deep equality check for XML Text.\n*\n* @param lhsXMLText The XML text on the left hand side\n* @param rhsXml The XML on the right hand side\n* @return True if the XML values are equal, else false.\n*/\nprivate static boolean isEqual(XmlText lhsXMLText, XmlValue rhsXml) {\nif (rhsXml instanceof XmlText) {\nXmlText rhsXMLText = (XmlText) rhsXml;\nreturn lhsXMLText.getTextValue().equals(rhsXMLText.getTextValue());\n}\nreturn lhsXMLText.getType() == PredefinedTypes.TYPE_XML_NEVER && rhsXml instanceof XmlSequence &&\n((XmlSequence) rhsXml).getChildrenList().isEmpty();\n}\n/**\n* Deep equality check for XML Comment.\n*\n* @param lhsXMLComment The XML comment on the left hand side\n* @param rhsXml The XML on the right hand side\n* @return True if the XML values are equal, else false.\n*/\nprivate static boolean isEqual(XmlComment lhsXMLComment, XmlValue rhsXml) {\nif (!(rhsXml instanceof XmlComment)) {\nreturn false;\n}\nXmlComment rhXMLComment = (XmlComment) rhsXml;\nreturn lhsXMLComment.getTextValue().equals(rhXMLComment.getTextValue());\n}\n/**\n* Deep equality check for XML Processing Instruction.\n*\n* @param lhsXMLPi The XML processing instruction on the left hand side\n* @param rhsXml The XML on the right hand side\n* @return True if the XML values are equal, else false.\n*/\nprivate static boolean isEqual(XmlPi lhsXMLPi, XmlValue rhsXml) {\nif (!(rhsXml instanceof XmlPi)) {\nreturn false;\n}\nXmlPi rhsXMLPi = (XmlPi) rhsXml;\nreturn lhsXMLPi.getData().equals(rhsXMLPi.getData()) && lhsXMLPi.getTarget().equals(rhsXMLPi.getTarget());\n}\nprivate static boolean isXMLSequenceChildrenEqual(List lhsList, List rhsList) {\nif (lhsList.size() != rhsList.size()) {\nreturn false;\n}\nfor (int i = 0; i < lhsList.size(); i++) {\nif (!isEqual(lhsList.get(i), rhsList.get(i))) {\nreturn false;\n}\n}\nreturn true;\n}\n/**\n* Type vector of size two, to hold the source and the target types.\n*\n* @since 0.995.0\n*/\nprivate static class TypePair {\nType sourceType;\nType targetType;\npublic TypePair(Type sourceType, Type targetType) {\nthis.sourceType = sourceType;\nthis.targetType = targetType;\n}\n@Override\npublic boolean equals(Object obj) {\nif (!(obj instanceof TypePair)) {\nreturn false;\n}\nTypePair other = (TypePair) obj;\nreturn this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType);\n}\n}\n/**\n* Check the reference equality of handle values.\n*\n* @param lhsValue The value on the left hand side\n* @param rhsValue The value on the right hand side\n* @return True if values are equal, else false.\n*/\nprivate static boolean isHandleValueRefEqual(Object lhsValue, Object rhsValue) {\nHandleValue lhsHandle = (HandleValue) lhsValue;\nHandleValue rhsHandle = (HandleValue) rhsValue;\nreturn lhsHandle.getValue() == rhsHandle.getValue();\n}\n/**\n* Unordered value vector of size two, to hold two values being compared.\n*\n* @since 0.995.0\n*/\nprivate static class ValuePair {\nArrayList valueList = new ArrayList<>(2);\nValuePair(Object valueOne, Object valueTwo) {\nvalueList.add(valueOne);\nvalueList.add(valueTwo);\n}\n@Override\npublic boolean equals(Object otherPair) {\nif (!(otherPair instanceof ValuePair)) {\nreturn false;\n}\nArrayList otherList = ((ValuePair) otherPair).valueList;\nArrayList currentList = valueList;\nif (otherList.size() != currentList.size()) {\nreturn false;\n}\nfor (int i = 0; i < otherList.size(); i++) {\nif (!otherList.get(i).equals(currentList.get(i))) {\nreturn false;\n}\n}\nreturn true;\n}\n}\n/**\n* Checks whether a given {@link BType} has an implicit initial value or not.\n* @param type {@link BType} to be analyzed.\n* @return whether there's an implicit initial value or not.\n*/\npublic static boolean hasFillerValue(Type type) {\nreturn hasFillerValue(type, new ArrayList<>());\n}\nprivate static boolean hasFillerValue(Type type, List unanalyzedTypes) {\nif (type == null) {\nreturn true;\n}\nif (type.getTag() < TypeTags.RECORD_TYPE_TAG &&\n!(type.getTag() == TypeTags.CHAR_STRING_TAG || type.getTag() == TypeTags.NEVER_TAG)) {\nreturn true;\n}\nswitch (type.getTag()) {\ncase TypeTags.STREAM_TAG:\ncase TypeTags.MAP_TAG:\ncase TypeTags.ANY_TAG:\nreturn true;\ncase TypeTags.ARRAY_TAG:\nreturn checkFillerValue((BArrayType) type, unanalyzedTypes);\ncase TypeTags.FINITE_TYPE_TAG:\nreturn checkFillerValue((BFiniteType) type);\ncase TypeTags.OBJECT_TYPE_TAG:\nreturn checkFillerValue((BObjectType) type);\ncase TypeTags.RECORD_TYPE_TAG:\nreturn checkFillerValue((BRecordType) type, unanalyzedTypes);\ncase TypeTags.TUPLE_TAG:\nreturn checkFillerValue((BTupleType) type, unanalyzedTypes);\ncase TypeTags.UNION_TAG:\nreturn checkFillerValue((BUnionType) type, unanalyzedTypes);\ndefault:\nreturn false;\n}\n}\nprivate static boolean checkFillerValue(BTupleType tupleType, List unAnalyzedTypes) {\nif (unAnalyzedTypes.contains(tupleType)) {\nreturn true;\n}\nunAnalyzedTypes.add(tupleType);\nfor (Type member : tupleType.getTupleTypes()) {\nif (!hasFillerValue(member, unAnalyzedTypes)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate static boolean checkFillerValue(BUnionType type, List unAnalyzedTypes) {\nif (unAnalyzedTypes.contains(type)) {\nreturn true;\n}\nunAnalyzedTypes.add(type);\nif (type.isNullable()) {\nreturn true;\n}\nIterator iterator = type.getMemberTypes().iterator();\nType firstMember;\nfor (firstMember = iterator.next(); iterator.hasNext(); ) {\nif (!isSameType(firstMember, iterator.next())) {\nreturn false;\n}\n}\nreturn isValueType(firstMember) && hasFillerValue(firstMember);\n}\nprivate static boolean checkFillerValue(BRecordType type, List unAnalyzedTypes) {\nif (unAnalyzedTypes.contains(type)) {\nreturn true;\n}\nunAnalyzedTypes.add(type);\nfor (Field field : type.getFields().values()) {\nif (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL)) {\ncontinue;\n}\nif (!SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.REQUIRED)) {\ncontinue;\n}\nreturn false;\n}\nreturn true;\n}\nprivate static boolean checkFillerValue(BArrayType type, List unAnalyzedTypes) {\nreturn type.getState() == ArrayState.OPEN || hasFillerValue(type.getElementType(), unAnalyzedTypes);\n}\nprivate static boolean checkFillerValue(BObjectType type) {\nif (type.getTag() == TypeTags.SERVICE_TAG) {\nreturn false;\n} else {\nMethodType generatedInitializer = type.generatedInitializer;\nif (generatedInitializer == null) {\nreturn false;\n}\nFunctionType initFuncType = generatedInitializer.getType();\nboolean noParams = initFuncType.getParameters().length == 0;\nboolean nilReturn = initFuncType.getReturnType().getTag() == TypeTags.NULL_TAG;\nreturn noParams && nilReturn;\n}\n}\nprivate static boolean checkFillerValue(BFiniteType type) {\nfor (Object value: type.valueSpace) {\nif (value == null) {\nreturn true;\n}\n}\nif (type.valueSpace.size() == 1) {\nreturn true;\n}\nObject firstElement = type.valueSpace.iterator().next();\nfor (Object value : type.valueSpace) {\nif (value.getClass() != firstElement.getClass()) {\nreturn false;\n}\n}\nif (firstElement instanceof String) {\nreturn containsElement(type.valueSpace, \"\\\"\\\"\");\n} else if (firstElement instanceof Byte\n|| firstElement instanceof Integer\n|| firstElement instanceof Long) {\nreturn containsElement(type.valueSpace, \"0\");\n} else if (firstElement instanceof Float\n|| firstElement instanceof Double\n|| firstElement instanceof BigDecimal) {\nreturn containsElement(type.valueSpace, \"0.0\");\n} else if (firstElement instanceof Boolean) {\nreturn containsElement(type.valueSpace, \"false\");\n} else {\nreturn false;\n}\n}\nprivate static boolean containsElement(Set valueSpace, String e) {\nfor (Object value : valueSpace) {\nif (value != null && value.toString().equals(e)) {\nreturn true;\n}\n}\nreturn false;\n}\nprivate static boolean containsType(Set valueSpace, Type type) {\nfor (Object value : valueSpace) {\nif (!isSameType(type, getType(value))) {\nreturn false;\n}\n}\nreturn true;\n}\npublic static Object handleAnydataValues(Object sourceVal, Type targetType) {\nif (sourceVal != null && !(sourceVal instanceof Number) && !(sourceVal instanceof BString) &&\n!(sourceVal instanceof Boolean) && !(sourceVal instanceof BValue)) {\nthrow ErrorUtils.createJToBTypeCastError(sourceVal.getClass(), targetType);\n}\nreturn sourceVal;\n}\nprivate TypeChecker() {\n}\n}" + }, + { + "comment": "You mean that we should enforce it when it's 307?", + "method_body": "void shouldNotRedirectOnPostMethodsByDefault() {\nRedirectingResourceClient302 client302 = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.build(RedirectingResourceClient302.class);\nassertThat(client302.post().getStatus()).isEqualTo(302);\nRedirectingResourceClient307 client307 = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.build(RedirectingResourceClient307.class);\nassertThat(client307.post(1).getStatus()).isEqualTo(307);\n}", + "target_code": ".followRedirects(true)", + "method_body_after": "void shouldNotRedirectOnPostMethodsByDefault() {\nRedirectingResourceClient302 client302 = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.build(RedirectingResourceClient302.class);\nassertThat(client302.post().getStatus()).isEqualTo(302);\nRedirectingResourceClient307 client307 = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.build(RedirectingResourceClient307.class);\nassertThat(client307.post(1).getStatus()).isEqualTo(307);\n}", + "context_before": "class RedirectTest {\n@RegisterExtension\nstatic final QuarkusUnitTest TEST = new QuarkusUnitTest()\n.withApplicationRoot((jar) -> jar\n.addClasses(RedirectingResourceClient302.class, RedirectingResourceClient307.class,\nRedirectingResourceWithRegisterProviderRedirectHandlerClient.class,\nRedirectingResourceWithRedirectHandlerAnnotationClient.class,\nRedirectingResourceWithSeveralRedirectHandlerAnnotationsClient.class,\nEnablePostRedirectHandler.class,\nRedirectingResource.class));\n@TestHTTPResource\nURI uri;\n@Test\nvoid shouldRedirect3Times_whenMax4() {\nRedirectingResourceClient302 client = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.property(QuarkusRestClientProperties.MAX_REDIRECTS, 4)\n.build(RedirectingResourceClient302.class);\nResponse call = client.call(3);\nassertThat(call.getStatus()).isEqualTo(200);\n}\n@Test\nvoid shouldNotRedirect3Times_whenMax2() {\nRedirectingResourceClient302 client = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.property(QuarkusRestClientProperties.MAX_REDIRECTS, 2)\n.build(RedirectingResourceClient302.class);\nassertThat(client.call(3).getStatus()).isEqualTo(302);\n}\n@Test\n@Test\nvoid shouldRedirectWhenUsingCustomRedirectHandlerOnPostMethods() {\nRedirectingResourceClient302 client302 = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.register(EnablePostRedirectHandler.class)\n.build(RedirectingResourceClient302.class);\nassertThat(client302.post().getStatus()).isEqualTo(200);\nRedirectingResourceClient307 client307 = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.register(EnablePostRedirectHandler.class)\n.build(RedirectingResourceClient307.class);\nassertThat(client307.post(1).getStatus()).isEqualTo(200);\n}\n@Test\nvoid shouldRedirectWhenRegisterProviderUsingCustomRedirectHandlerOnPostMethods() {\nRedirectingResourceClient302 client = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.build(RedirectingResourceWithRegisterProviderRedirectHandlerClient.class);\nassertThat(client.post().getStatus()).isEqualTo(200);\n}\n@Test\nvoid shouldRedirectWhenAnnotatedClientRedirectHandlerOnPostMethods() {\nRedirectingResourceClient302 client = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.build(RedirectingResourceWithRedirectHandlerAnnotationClient.class);\nassertThat(client.post().getStatus()).isEqualTo(200);\n}\n@Test\nvoid shouldNotRedirectWhenARedirectHandlerWithMorePriorityIsUsed() {\nRedirectingResourceClient302 client = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.build(RedirectingResourceWithSeveralRedirectHandlerAnnotationsClient.class);\nassertThat(client.post().getStatus()).isEqualTo(302);\n}\n}", + "context_after": "class RedirectTest {\n@RegisterExtension\nstatic final QuarkusUnitTest TEST = new QuarkusUnitTest()\n.withApplicationRoot((jar) -> jar\n.addClasses(RedirectingResourceClient302.class, RedirectingResourceClient307.class,\nRedirectingResourceWithRegisterProviderRedirectHandlerClient.class,\nRedirectingResourceWithRedirectHandlerAnnotationClient.class,\nRedirectingResourceWithSeveralRedirectHandlerAnnotationsClient.class,\nEnablePostRedirectHandler.class,\nRedirectingResource.class));\n@TestHTTPResource\nURI uri;\n@Test\nvoid shouldRedirect3Times_whenMax4() {\nRedirectingResourceClient302 client = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.property(QuarkusRestClientProperties.MAX_REDIRECTS, 4)\n.build(RedirectingResourceClient302.class);\nResponse call = client.call(3);\nassertThat(call.getStatus()).isEqualTo(200);\n}\n@Test\nvoid shouldNotRedirect3Times_whenMax2() {\nRedirectingResourceClient302 client = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.property(QuarkusRestClientProperties.MAX_REDIRECTS, 2)\n.build(RedirectingResourceClient302.class);\nassertThat(client.call(3).getStatus()).isEqualTo(302);\n}\n@Test\n@Test\nvoid shouldRedirectWhenUsingCustomRedirectHandlerOnPostMethods() {\nRedirectingResourceClient302 client302 = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.register(EnablePostRedirectHandler.class)\n.build(RedirectingResourceClient302.class);\nassertThat(client302.post().getStatus()).isEqualTo(200);\nRedirectingResourceClient307 client307 = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.register(EnablePostRedirectHandler.class)\n.build(RedirectingResourceClient307.class);\nassertThat(client307.post(1).getStatus()).isEqualTo(200);\n}\n@Test\nvoid shouldRedirectWhenRegisterProviderUsingCustomRedirectHandlerOnPostMethods() {\nRedirectingResourceClient302 client = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.build(RedirectingResourceWithRegisterProviderRedirectHandlerClient.class);\nassertThat(client.post().getStatus()).isEqualTo(200);\n}\n@Test\nvoid shouldRedirectWhenAnnotatedClientRedirectHandlerOnPostMethods() {\nRedirectingResourceClient302 client = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.build(RedirectingResourceWithRedirectHandlerAnnotationClient.class);\nassertThat(client.post().getStatus()).isEqualTo(200);\n}\n@Test\nvoid shouldNotRedirectWhenARedirectHandlerWithMorePriorityIsUsed() {\nRedirectingResourceClient302 client = RestClientBuilder.newBuilder()\n.baseUri(uri)\n.followRedirects(true)\n.build(RedirectingResourceWithSeveralRedirectHandlerAnnotationsClient.class);\nassertThat(client.post().getStatus()).isEqualTo(302);\n}\n}" + }, + { + "comment": "Most of the operations has the same operands `new String[] { stmt };` can we merge the handling of them ?", + "method_body": "private static Optional parseBySqlParser(Parser sqlParser, String stmt) {\nList operations;\ntry {\noperations = sqlParser.parse(stmt);\n} catch (Throwable e) {\nif (e instanceof ValidationException) {\nthrow new SqlExecutionException(\"Invalidate SQL statement.\", e);\n}\nreturn Optional.empty();\n}\nif (operations.size() != 1) {\nthrow new SqlExecutionException(\"Only single statement is supported now.\");\n}\nfinal SqlCommand cmd;\nString[] operands = new String[0];\nOperation operation = operations.get(0);\nif (operation instanceof CatalogSinkModifyOperation) {\nboolean overwrite = ((CatalogSinkModifyOperation) operation).isOverwrite();\ncmd = overwrite ? SqlCommand.INSERT_OVERWRITE : SqlCommand.INSERT_INTO;\noperands = new String[] { stmt };\n} else if (operation instanceof CreateTableOperation) {\ncmd = SqlCommand.CREATE_TABLE;\noperands = new String[] { stmt };\n} else if (operation instanceof DropTableOperation) {\ncmd = SqlCommand.DROP_TABLE;\noperands = new String[] { stmt };\n} else if (operation instanceof AlterTableOperation) {\ncmd = SqlCommand.ALTER_TABLE;\noperands = new String[] { stmt };\n} else if (operation instanceof CreateViewOperation) {\ncmd = SqlCommand.CREATE_VIEW;\nCreateViewOperation op = (CreateViewOperation) operation;\noperands = new String[] { op.getViewIdentifier().asSerializableString(),\nop.getCatalogView().getOriginalQuery() };\n} else if (operation instanceof DropViewOperation) {\ncmd = SqlCommand.DROP_VIEW;\noperands = new String[] { ((DropViewOperation) operation).getViewIdentifier().asSerializableString() };\n} else if (operation instanceof CreateDatabaseOperation) {\ncmd = SqlCommand.CREATE_DATABASE;\noperands = new String[] { stmt };\n} else if (operation instanceof DropDatabaseOperation) {\ncmd = SqlCommand.DROP_DATABASE;\noperands = new String[] { stmt };\n} else if (operation instanceof AlterDatabaseOperation) {\ncmd = SqlCommand.ALTER_DATABASE;\noperands = new String[] { stmt };\n} else if (operation instanceof CreateCatalogOperation) {\ncmd = SqlCommand.CREATE_CATALOG;\noperands = new String[] { stmt };\n} else if (operation instanceof UseCatalogOperation) {\ncmd = SqlCommand.USE_CATALOG;\noperands = new String[] { String.format(\"`%s`\", ((UseCatalogOperation) operation).getCatalogName()) };\n} else if (operation instanceof UseDatabaseOperation) {\ncmd = SqlCommand.USE;\nUseDatabaseOperation op = ((UseDatabaseOperation) operation);\noperands = new String[] { String.format(\"`%s`.`%s`\", op.getCatalogName(), op.getDatabaseName()) };\n} else if (operation instanceof ShowCatalogsOperation) {\ncmd = SqlCommand.SHOW_CATALOGS;\n} else if (operation instanceof ShowDatabasesOperation) {\ncmd = SqlCommand.SHOW_DATABASES;\n} else if (operation instanceof ShowTablesOperation) {\ncmd = SqlCommand.SHOW_TABLES;\n} else if (operation instanceof ShowFunctionsOperation) {\ncmd = SqlCommand.SHOW_FUNCTIONS;\n} else if (operation instanceof ExplainOperation) {\ncmd = SqlCommand.EXPLAIN;\noperands = new String[] { stmt };\n} else if (operation instanceof DescribeTableOperation) {\ncmd = SqlCommand.DESCRIBE;\noperands = new String[] { ((DescribeTableOperation) operation).getSqlIdentifier().asSerializableString() };\n} else if (operation instanceof QueryOperation) {\ncmd = SqlCommand.SELECT;\noperands = new String[] { stmt };\n} else {\ncmd = null;\n}\nif (cmd == null) {\nreturn Optional.empty();\n} else {\nreturn Optional.of(new SqlCommandCall(cmd, operands));\n}\n}", + "target_code": "} else {", + "method_body_after": "private static Optional parseBySqlParser(Parser sqlParser, String stmt) {\nList operations;\ntry {\noperations = sqlParser.parse(stmt);\n} catch (Throwable e) {\nif (e instanceof ValidationException) {\nthrow new SqlExecutionException(\"Invalidate SQL statement.\", e);\n}\nreturn Optional.empty();\n}\nif (operations.size() != 1) {\nthrow new SqlExecutionException(\"Only single statement is supported now.\");\n}\nfinal SqlCommand cmd;\nString[] operands = new String[] { stmt };\nOperation operation = operations.get(0);\nif (operation instanceof CatalogSinkModifyOperation) {\nboolean overwrite = ((CatalogSinkModifyOperation) operation).isOverwrite();\ncmd = overwrite ? SqlCommand.INSERT_OVERWRITE : SqlCommand.INSERT_INTO;\n} else if (operation instanceof CreateTableOperation) {\ncmd = SqlCommand.CREATE_TABLE;\n} else if (operation instanceof DropTableOperation) {\ncmd = SqlCommand.DROP_TABLE;\n} else if (operation instanceof AlterTableOperation) {\ncmd = SqlCommand.ALTER_TABLE;\n} else if (operation instanceof CreateViewOperation) {\ncmd = SqlCommand.CREATE_VIEW;\nCreateViewOperation op = (CreateViewOperation) operation;\noperands = new String[] { op.getViewIdentifier().asSerializableString(),\nop.getCatalogView().getOriginalQuery() };\n} else if (operation instanceof DropViewOperation) {\ncmd = SqlCommand.DROP_VIEW;\noperands = new String[] { ((DropViewOperation) operation).getViewIdentifier().asSerializableString() };\n} else if (operation instanceof CreateDatabaseOperation) {\ncmd = SqlCommand.CREATE_DATABASE;\n} else if (operation instanceof DropDatabaseOperation) {\ncmd = SqlCommand.DROP_DATABASE;\n} else if (operation instanceof AlterDatabaseOperation) {\ncmd = SqlCommand.ALTER_DATABASE;\n} else if (operation instanceof CreateCatalogOperation) {\ncmd = SqlCommand.CREATE_CATALOG;\n} else if (operation instanceof DropCatalogOperation) {\ncmd = SqlCommand.DROP_CATALOG;\n} else if (operation instanceof UseCatalogOperation) {\ncmd = SqlCommand.USE_CATALOG;\noperands = new String[] { String.format(\"`%s`\", ((UseCatalogOperation) operation).getCatalogName()) };\n} else if (operation instanceof UseDatabaseOperation) {\ncmd = SqlCommand.USE;\nUseDatabaseOperation op = ((UseDatabaseOperation) operation);\noperands = new String[] { String.format(\"`%s`.`%s`\", op.getCatalogName(), op.getDatabaseName()) };\n} else if (operation instanceof ShowCatalogsOperation) {\ncmd = SqlCommand.SHOW_CATALOGS;\noperands = new String[0];\n} else if (operation instanceof ShowDatabasesOperation) {\ncmd = SqlCommand.SHOW_DATABASES;\noperands = new String[0];\n} else if (operation instanceof ShowTablesOperation) {\ncmd = SqlCommand.SHOW_TABLES;\noperands = new String[0];\n} else if (operation instanceof ShowFunctionsOperation) {\ncmd = SqlCommand.SHOW_FUNCTIONS;\noperands = new String[0];\n} else if (operation instanceof CreateCatalogFunctionOperation ||\noperation instanceof CreateTempSystemFunctionOperation) {\ncmd = SqlCommand.CREATE_FUNCTION;\n} else if (operation instanceof DropCatalogFunctionOperation ||\noperation instanceof DropTempSystemFunctionOperation) {\ncmd = SqlCommand.DROP_FUNCTION;\n} else if (operation instanceof AlterCatalogFunctionOperation) {\ncmd = SqlCommand.ALTER_FUNCTION;\n} else if (operation instanceof ExplainOperation) {\ncmd = SqlCommand.EXPLAIN;\n} else if (operation instanceof DescribeTableOperation) {\ncmd = SqlCommand.DESCRIBE;\noperands = new String[] { ((DescribeTableOperation) operation).getSqlIdentifier().asSerializableString() };\n} else if (operation instanceof QueryOperation) {\ncmd = SqlCommand.SELECT;\n} else {\ncmd = null;\n}\nreturn cmd == null ? Optional.empty() : Optional.of(new SqlCommandCall(cmd, operands));\n}", + "context_before": "class SqlCommandParser {\nprivate SqlCommandParser() {\n}\npublic static Optional parse(Parser sqlParser, String stmt) {\nstmt = stmt.trim();\nif (stmt.endsWith(\";\")) {\nstmt = stmt.substring(0, stmt.length() - 1).trim();\n}\nOptional callOpt = parseBySqlParser(sqlParser, stmt);\nif (callOpt.isPresent()) {\nreturn callOpt;\n} else {\nreturn parseByRegexMatching(stmt);\n}\n}\nprivate static Optional parseByRegexMatching(String stmt) {\nfor (SqlCommand cmd : SqlCommand.values()) {\nif (cmd.hasRegexPattern()) {\nfinal Matcher matcher = cmd.pattern.matcher(stmt);\nif (matcher.matches()) {\nfinal String[] groups = new String[matcher.groupCount()];\nfor (int i = 0; i < groups.length; i++) {\ngroups[i] = matcher.group(i + 1);\n}\nreturn cmd.operandConverter.apply(groups)\n.map((operands) -> {\nString[] newOperands = operands;\nif (cmd == SqlCommand.EXPLAIN) {\nnewOperands = new String[] { \"EXPLAIN PLAN FOR \" + operands[0] };\n}\nreturn new SqlCommandCall(cmd, newOperands);\n});\n}\n}\n}\nreturn Optional.empty();\n}\nprivate static final Function> NO_OPERANDS =\n(operands) -> Optional.of(new String[0]);\nprivate static final Function> SINGLE_OPERAND =\n(operands) -> Optional.of(new String[]{operands[0]});\nprivate static final int DEFAULT_PATTERN_FLAGS = Pattern.CASE_INSENSITIVE | Pattern.DOTALL;\n/**\n* Supported SQL commands.\n*/\nenum SqlCommand {\nQUIT(\n\"(QUIT|EXIT)\",\nNO_OPERANDS),\nCLEAR(\n\"CLEAR\",\nNO_OPERANDS),\nHELP(\n\"HELP\",\nNO_OPERANDS),\nSHOW_CATALOGS,\nSHOW_DATABASES,\nSHOW_TABLES,\nSHOW_FUNCTIONS,\nSHOW_MODULES(\n\"SHOW\\\\s+MODULES\",\nNO_OPERANDS),\nUSE_CATALOG,\nUSE,\nCREATE_CATALOG,\nDESC(\n\"DESC\\\\s+(.*)\",\nSINGLE_OPERAND),\nDESCRIBE,\nEXPLAIN(\n\"EXPLAIN\\\\s+(.*)\",\nSINGLE_OPERAND),\nSELECT,\nINSERT_INTO,\nINSERT_OVERWRITE,\nCREATE_TABLE,\nDROP_TABLE,\nCREATE_VIEW,\nCREATE_DATABASE,\nDROP_DATABASE,\nDROP_VIEW,\nALTER_DATABASE,\nALTER_TABLE,\nSET(\n\"SET(\\\\s+(\\\\S+)\\\\s*=(.*))?\",\n(operands) -> {\nif (operands.length < 3) {\nreturn Optional.empty();\n} else if (operands[0] == null) {\nreturn Optional.of(new String[0]);\n}\nreturn Optional.of(new String[]{operands[1], operands[2]});\n}),\nRESET(\n\"RESET\",\nNO_OPERANDS),\nSOURCE(\n\"SOURCE\\\\s+(.*)\",\nSINGLE_OPERAND);\npublic final @Nullable Pattern pattern;\npublic final @Nullable Function> operandConverter;\nSqlCommand() {\nthis.pattern = null;\nthis.operandConverter = null;\n}\nSqlCommand(String matchingRegex, Function> operandConverter) {\nthis.pattern = Pattern.compile(matchingRegex, DEFAULT_PATTERN_FLAGS);\nthis.operandConverter = operandConverter;\n}\n@Override\npublic String toString() {\nreturn super.toString().replace('_', ' ');\n}\npublic boolean hasOperands() {\nreturn operandConverter != NO_OPERANDS;\n}\npublic boolean hasRegexPattern() {\nreturn pattern != null;\n}\n}\n/**\n* Call of SQL command with operands and command type.\n*/\npublic static class SqlCommandCall {\npublic final SqlCommand command;\npublic final String[] operands;\npublic SqlCommandCall(SqlCommand command, String[] operands) {\nthis.command = command;\nthis.operands = operands;\n}\n@Override\npublic boolean equals(Object o) {\nif (this == o) {\nreturn true;\n}\nif (o == null || getClass() != o.getClass()) {\nreturn false;\n}\nSqlCommandCall that = (SqlCommandCall) o;\nreturn command == that.command && Arrays.equals(operands, that.operands);\n}\n@Override\npublic int hashCode() {\nint result = Objects.hash(command);\nresult = 31 * result + Arrays.hashCode(operands);\nreturn result;\n}\n@Override\npublic String toString() {\nreturn command + \"(\" + Arrays.toString(operands) + \")\";\n}\n}\n}", + "context_after": "class SqlCommandParser {\nprivate SqlCommandParser() {\n}\npublic static Optional parse(Parser sqlParser, String stmt) {\nstmt = stmt.trim();\nif (stmt.endsWith(\";\")) {\nstmt = stmt.substring(0, stmt.length() - 1).trim();\n}\nOptional callOpt = parseBySqlParser(sqlParser, stmt);\nif (callOpt.isPresent()) {\nreturn callOpt;\n} else {\nreturn parseByRegexMatching(stmt);\n}\n}\nprivate static Optional parseByRegexMatching(String stmt) {\nfor (SqlCommand cmd : SqlCommand.values()) {\nif (cmd.hasRegexPattern()) {\nfinal Matcher matcher = cmd.pattern.matcher(stmt);\nif (matcher.matches()) {\nfinal String[] groups = new String[matcher.groupCount()];\nfor (int i = 0; i < groups.length; i++) {\ngroups[i] = matcher.group(i + 1);\n}\nreturn cmd.operandConverter.apply(groups)\n.map((operands) -> {\nString[] newOperands = operands;\nif (cmd == SqlCommand.EXPLAIN) {\nnewOperands = new String[] { \"EXPLAIN PLAN FOR \" + operands[0] };\n}\nreturn new SqlCommandCall(cmd, newOperands);\n});\n}\n}\n}\nreturn Optional.empty();\n}\nprivate static final Function> NO_OPERANDS =\n(operands) -> Optional.of(new String[0]);\nprivate static final Function> SINGLE_OPERAND =\n(operands) -> Optional.of(new String[]{operands[0]});\nprivate static final int DEFAULT_PATTERN_FLAGS = Pattern.CASE_INSENSITIVE | Pattern.DOTALL;\n/**\n* Supported SQL commands.\n*/\nenum SqlCommand {\nQUIT(\n\"(QUIT|EXIT)\",\nNO_OPERANDS),\nCLEAR(\n\"CLEAR\",\nNO_OPERANDS),\nHELP(\n\"HELP\",\nNO_OPERANDS),\nSHOW_CATALOGS,\nSHOW_DATABASES,\nSHOW_TABLES,\nSHOW_FUNCTIONS,\nSHOW_MODULES(\n\"SHOW\\\\s+MODULES\",\nNO_OPERANDS),\nUSE_CATALOG,\nUSE,\nCREATE_CATALOG,\nDROP_CATALOG,\nDESC(\n\"DESC\\\\s+(.*)\",\nSINGLE_OPERAND),\nDESCRIBE,\nEXPLAIN(\n\"EXPLAIN\\\\s+(.*)\",\nSINGLE_OPERAND),\nCREATE_DATABASE,\nDROP_DATABASE,\nALTER_DATABASE,\nCREATE_TABLE,\nDROP_TABLE,\nALTER_TABLE,\nCREATE_VIEW,\nDROP_VIEW,\nCREATE_FUNCTION,\nDROP_FUNCTION,\nALTER_FUNCTION,\nSELECT,\nINSERT_INTO,\nINSERT_OVERWRITE,\nSET(\n\"SET(\\\\s+(\\\\S+)\\\\s*=(.*))?\",\n(operands) -> {\nif (operands.length < 3) {\nreturn Optional.empty();\n} else if (operands[0] == null) {\nreturn Optional.of(new String[0]);\n}\nreturn Optional.of(new String[]{operands[1], operands[2]});\n}),\nRESET(\n\"RESET\",\nNO_OPERANDS),\nSOURCE(\n\"SOURCE\\\\s+(.*)\",\nSINGLE_OPERAND);\npublic final @Nullable Pattern pattern;\npublic final @Nullable Function> operandConverter;\nSqlCommand() {\nthis.pattern = null;\nthis.operandConverter = null;\n}\nSqlCommand(String matchingRegex, Function> operandConverter) {\nthis.pattern = Pattern.compile(matchingRegex, DEFAULT_PATTERN_FLAGS);\nthis.operandConverter = operandConverter;\n}\n@Override\npublic String toString() {\nreturn super.toString().replace('_', ' ');\n}\npublic boolean hasOperands() {\nreturn operandConverter != NO_OPERANDS;\n}\npublic boolean hasRegexPattern() {\nreturn pattern != null;\n}\n}\n/**\n* Call of SQL command with operands and command type.\n*/\npublic static class SqlCommandCall {\npublic final SqlCommand command;\npublic final String[] operands;\npublic SqlCommandCall(SqlCommand command, String[] operands) {\nthis.command = command;\nthis.operands = operands;\n}\n@Override\npublic boolean equals(Object o) {\nif (this == o) {\nreturn true;\n}\nif (o == null || getClass() != o.getClass()) {\nreturn false;\n}\nSqlCommandCall that = (SqlCommandCall) o;\nreturn command == that.command && Arrays.equals(operands, that.operands);\n}\n@Override\npublic int hashCode() {\nint result = Objects.hash(command);\nresult = 31 * result + Arrays.hashCode(operands);\nreturn result;\n}\n@Override\npublic String toString() {\nreturn command + \"(\" + Arrays.toString(operands) + \")\";\n}\n}\n}" + }, + { + "comment": "I now see how I wrote this very inefficiently, these fields could easily have been `AtomicBoolean`s or even plain `boolean`s :-) But that's just an observation, the test works just fine.", + "method_body": "public void test() {\nassertEquals(0, MyDependentBean.createdCounter.get());\nassertEquals(0, MyDependentBean.destroyedCounter.get());\nassertEquals(0, MySingletonBean.createdCounter.get());\nassertEquals(0, MySingletonBean.destroyedCounter.get());\nInstanceHandle dependentBean = Arc.container().select(MyDependentBean.class).getHandle();\nInstanceHandle singletonBean = Arc.container().select(MySingletonBean.class).getHandle();\ndependentBean.get();\nsingletonBean.get();\nassertEquals(1, MyDependentBean.createdCounter.get());\nassertEquals(0, MyDependentBean.destroyedCounter.get());\nassertEquals(1, MySingletonBean.createdCounter.get());\nassertEquals(0, MySingletonBean.destroyedCounter.get());\ndependentBean.destroy();\nsingletonBean.destroy();\nassertEquals(1, MyDependentBean.createdCounter.get());\nassertEquals(1, MyDependentBean.destroyedCounter.get());\nassertEquals(1, MySingletonBean.createdCounter.get());\nassertEquals(1, MySingletonBean.destroyedCounter.get());\n}", + "target_code": "assertEquals(1, MySingletonBean.destroyedCounter.get());", + "method_body_after": "public void test() {\nassertEquals(0, MyDependentBean.createdCounter.get());\nassertEquals(0, MyDependentBean.destroyedCounter.get());\nassertEquals(0, MySingletonBean.createdCounter.get());\nassertEquals(0, MySingletonBean.destroyedCounter.get());\nInstanceHandle dependentBean = Arc.container().select(MyDependentBean.class).getHandle();\nInstanceHandle singletonBean = Arc.container().select(MySingletonBean.class).getHandle();\ndependentBean.get();\nsingletonBean.get();\nassertEquals(1, MyDependentBean.createdCounter.get());\nassertEquals(0, MyDependentBean.destroyedCounter.get());\nassertEquals(1, MySingletonBean.createdCounter.get());\nassertEquals(0, MySingletonBean.destroyedCounter.get());\ndependentBean.destroy();\nsingletonBean.destroy();\nassertEquals(1, MyDependentBean.createdCounter.get());\nassertEquals(1, MyDependentBean.destroyedCounter.get());\nassertEquals(1, MySingletonBean.createdCounter.get());\nassertEquals(1, MySingletonBean.destroyedCounter.get());\n}", + "context_before": "class SingletonDestructionTest {\n@RegisterExtension\npublic ArcTestContainer container = ArcTestContainer.builder()\n.beanClasses(MyDependentBean.class, MySingletonBean.class)\n.build();\n@Test\n@Dependent\nstatic class MyDependentBean {\nstatic final AtomicInteger createdCounter = new AtomicInteger(0);\nstatic final AtomicInteger destroyedCounter = new AtomicInteger(0);\n@PostConstruct\nvoid postConstruct() {\ncreatedCounter.incrementAndGet();\n}\n@PreDestroy\nvoid preDestroy() {\ndestroyedCounter.incrementAndGet();\n}\n}\n@Singleton\nstatic class MySingletonBean {\nstatic final AtomicInteger createdCounter = new AtomicInteger(0);\nstatic final AtomicInteger destroyedCounter = new AtomicInteger(0);\n@PostConstruct\nvoid postConstruct() {\ncreatedCounter.incrementAndGet();\n}\n@PreDestroy\nvoid preDestroy() {\ndestroyedCounter.incrementAndGet();\n}\n}\n}", + "context_after": "class SingletonDestructionTest {\n@RegisterExtension\npublic ArcTestContainer container = ArcTestContainer.builder()\n.beanClasses(MyDependentBean.class, MySingletonBean.class)\n.build();\n@Test\n@Dependent\nstatic class MyDependentBean {\nstatic final AtomicInteger createdCounter = new AtomicInteger(0);\nstatic final AtomicInteger destroyedCounter = new AtomicInteger(0);\n@PostConstruct\nvoid postConstruct() {\ncreatedCounter.incrementAndGet();\n}\n@PreDestroy\nvoid preDestroy() {\ndestroyedCounter.incrementAndGet();\n}\n}\n@Singleton\nstatic class MySingletonBean {\nstatic final AtomicInteger createdCounter = new AtomicInteger(0);\nstatic final AtomicInteger destroyedCounter = new AtomicInteger(0);\n@PostConstruct\nvoid postConstruct() {\ncreatedCounter.incrementAndGet();\n}\n@PreDestroy\nvoid preDestroy() {\ndestroyedCounter.incrementAndGet();\n}\n}\n}" + }, + { + "comment": "`FileMergingManager` is created for each TM, so `initFileSystem()` is only called once right? So, 1. in what case initFileSystem will be called several times? 2. what if a TM contains different jobs (Base CP directory maybe different)", + "method_body": "public void registerSubtaskForSharedStates(SubtaskKey subtaskKey) {\nString managedDirName = subtaskKey.getManagedDirName();\nPath managedPath = new Path(sharedStateDir, managedDirName);\nif (!managedSharedStateDir.containsKey(subtaskKey)) {\ncreateManagedDirectory(managedPath);\nmanagedSharedStateDir.put(subtaskKey, managedPath);\n}\n}", + "target_code": "}", + "method_body_after": "public void registerSubtaskForSharedStates(SubtaskKey subtaskKey) {\nString managedDirName = subtaskKey.getManagedDirName();\nPath managedPath = new Path(sharedStateDir, managedDirName);\nif (!managedSharedStateDir.containsKey(subtaskKey)) {\ncreateManagedDirectory(managedPath);\nmanagedSharedStateDir.put(subtaskKey, managedPath);\n}\n}", + "context_before": "class FileMergingSnapshotManagerBase implements FileMergingSnapshotManager {\nprivate static final Logger LOG = LoggerFactory.getLogger(FileMergingSnapshotManager.class);\nprivate final String id;\nprotected final Executor ioExecutor;\nprotected FileSystem fs;\nprotected Path checkpointDir;\nprotected Path sharedStateDir;\nprotected Path taskOwnedStateDir;\nprotected int writeBufferSize;\nprivate boolean fileSystemInitiated = false;\nprotected boolean syncAfterClosingLogicalFile;\nprotected PhysicalFile.PhysicalFileDeleter physicalFileDeleter = this::deletePhysicalFile;\nprivate final Map managedSharedStateDir = new ConcurrentHashMap<>();\nprotected Path managedExclusiveStateDir;\npublic FileMergingSnapshotManagerBase(String id, Executor ioExecutor) {\nthis.id = id;\nthis.ioExecutor = ioExecutor;\n}\n@Override\npublic void initFileSystem(\nFileSystem fileSystem,\nPath checkpointBaseDir,\nPath sharedStateDir,\nPath taskOwnedStateDir) {\nif (fileSystemInitiated) {\nPreconditions.checkArgument(\ncheckpointBaseDir.equals(this.checkpointDir),\n\"The checkpoint base dir is not deterministic across subtasks.\");\nPreconditions.checkArgument(\nsharedStateDir.equals(this.sharedStateDir),\n\"The shared checkpoint dir is not deterministic across subtasks.\");\nPreconditions.checkArgument(\ntaskOwnedStateDir.equals(this.taskOwnedStateDir),\n\"The task-owned checkpoint dir is not deterministic across subtasks.\");\nreturn;\n}\nthis.fs = fileSystem;\nthis.checkpointDir = Preconditions.checkNotNull(checkpointBaseDir);\nthis.sharedStateDir = Preconditions.checkNotNull(sharedStateDir);\nthis.taskOwnedStateDir = Preconditions.checkNotNull(taskOwnedStateDir);\nthis.fileSystemInitiated = true;\nthis.syncAfterClosingLogicalFile = shouldSyncAfterClosingLogicalFile(checkpointBaseDir);\nPath managedExclusivePath = new Path(taskOwnedStateDir, id);\ncreateManagedDirectory(managedExclusivePath);\nthis.managedExclusiveStateDir = managedExclusivePath;\n}\n@Override\nprotected LogicalFile createLogicalFile(\n@Nonnull PhysicalFile physicalFile, @Nonnull SubtaskKey subtaskKey) {\nLogicalFileId fileID = LogicalFileId.generateRandomId();\nreturn new LogicalFile(fileID, physicalFile, subtaskKey);\n}\n@Nonnull\nprotected PhysicalFile createPhysicalFile(SubtaskKey subtaskKey, CheckpointedStateScope scope)\nthrows IOException {\nPhysicalFile result;\nException latestException = null;\nPath dirPath = getManagedDir(subtaskKey, scope);\nif (dirPath == null) {\nthrow new IOException(\n\"Could not get \"\n+ scope\n+ \" path for subtask \"\n+ subtaskKey\n+ \", the directory may have not been created.\");\n}\nfor (int attempt = 0; attempt < 10; attempt++) {\ntry {\nOutputStreamAndPath streamAndPath =\nEntropyInjector.createEntropyAware(\nfs,\ncreatePhysicalFilePath(dirPath),\nFileSystem.WriteMode.NO_OVERWRITE);\nFSDataOutputStream outputStream = streamAndPath.stream();\nPath filePath = streamAndPath.path();\nresult = new PhysicalFile(outputStream, filePath, this.physicalFileDeleter, scope);\nupdateFileCreationMetrics(filePath);\nreturn result;\n} catch (Exception e) {\nlatestException = e;\n}\n}\nthrow new IOException(\n\"Could not open output stream for state file merging.\", latestException);\n}\nprivate void updateFileCreationMetrics(Path path) {\nLOG.debug(\"Create a new physical file {} for checkpoint file merging.\", path);\n}\nprotected Path createPhysicalFilePath(Path dirPath) {\nfinal String fileName = UUID.randomUUID().toString();\nreturn new Path(dirPath, fileName);\n}\nprotected final void deletePhysicalFile(FSDataOutputStream outputStream, Path filePath) {\nif (outputStream != null) {\ntry {\noutputStream.close();\n} catch (IOException e) {\nLOG.warn(\"Fail to close output stream when deleting file: {}\", filePath);\n}\n}\nioExecutor.execute(\n() -> {\ntry {\nfs.delete(filePath, false);\nLOG.debug(\"Physical file deleted: {}.\", filePath);\n} catch (IOException e) {\nLOG.warn(\"Fail to delete file: {}\", filePath);\n}\n});\n}\n/**\n* Get a reused physical file or create one. This will be called in stream creation logic.\n*\n*

Basic logic of file reusing: whenever a physical is needed, this method is called with\n* necessary information provided for acquiring a file. The file will not be reused until it is\n* written and returned to the reused pool by calling {@link\n*\n* @param subtaskKey the subtask key for the caller\n* @param checkpointId the checkpoint id\n* @param scope checkpoint scope\n* @return the requested physical file.\n* @throws IOException thrown if anything goes wrong with file system.\n*/\n@Nonnull\nprotected abstract PhysicalFile getOrCreatePhysicalFileForCheckpoint(\nSubtaskKey subtaskKey, long checkpointId, CheckpointedStateScope scope)\nthrows IOException;\n/**\n* Try to return an existing physical file to the manager for next reuse. Delete if needed.\n*\n*

Basic logic of file reusing, see {@link\n*\n* @param subtaskKey the subtask key for the caller\n* @param checkpointId in which checkpoint this physical is requested.\n* @param physicalFile the returning checkpoint\n* @throws IOException thrown if anything goes wrong with file system.\n* @see\n*/\nprotected abstract void returnPhysicalFileForNextReuse(\nSubtaskKey subtaskKey, long checkpointId, PhysicalFile physicalFile) throws IOException;\n@Override\npublic Path getManagedDir(SubtaskKey subtaskKey, CheckpointedStateScope scope) {\nif (scope.equals(CheckpointedStateScope.SHARED)) {\nreturn managedSharedStateDir.get(subtaskKey);\n} else {\nreturn managedExclusiveStateDir;\n}\n}\nstatic boolean shouldSyncAfterClosingLogicalFile(Path checkpointDir) {\nreturn true;\n}\nprivate void createManagedDirectory(Path managedPath) {\ntry {\nFileStatus fileStatus = null;\ntry {\nfileStatus = fs.getFileStatus(managedPath);\n} catch (FileNotFoundException e) {\n}\nif (fileStatus == null) {\nfs.mkdirs(managedPath);\nLOG.info(\"Created a directory {} for checkpoint file-merging.\", managedPath);\n} else if (fileStatus.isDir()) {\nLOG.info(\"Reusing previous directory {} for checkpoint file-merging.\", managedPath);\n} else {\nthrow new FlinkRuntimeException(\n\"The managed path \"\n+ managedPath\n+ \" for file-merging is occupied by another file. Cannot create directory.\");\n}\n} catch (IOException e) {\nthrow new FlinkRuntimeException(\n\"Cannot create directory \" + managedPath + \" for file-merging \", e);\n}\n}\n@Override\npublic void close() throws IOException {}\n}", + "context_after": "class FileMergingSnapshotManagerBase implements FileMergingSnapshotManager {\nprivate static final Logger LOG = LoggerFactory.getLogger(FileMergingSnapshotManager.class);\n/** The identifier of this manager. */\nprivate final String id;\n/** The executor for I/O operations in this manager. */\nprotected final Executor ioExecutor;\n/** The {@link FileSystem} that this manager works on. */\nprotected FileSystem fs;\nprotected Path checkpointDir;\nprotected Path sharedStateDir;\nprotected Path taskOwnedStateDir;\n/**\n* The file system should only be initialized once.\n*\n* @see FileMergingSnapshotManager\n*/\nprivate boolean fileSystemInitiated = false;\n/**\n* File-system dependent value. Mark whether the file system this manager running on need sync\n* for visibility. If true, DO a file sync after writing each segment .\n*/\nprotected boolean shouldSyncAfterClosingLogicalFile;\nprotected PhysicalFileDeleter physicalFileDeleter = this::deletePhysicalFile;\n/**\n* Currently the shared state files are merged within each subtask, files are split by different\n* directories.\n*/\nprivate final Map managedSharedStateDir = new ConcurrentHashMap<>();\n/**\n* The private state files are merged across subtasks, there is only one directory for\n* merged-files within one TM per job.\n*/\nprotected Path managedExclusiveStateDir;\npublic FileMergingSnapshotManagerBase(String id, Executor ioExecutor) {\nthis.id = id;\nthis.ioExecutor = ioExecutor;\n}\n@Override\npublic void initFileSystem(\nFileSystem fileSystem,\nPath checkpointBaseDir,\nPath sharedStateDir,\nPath taskOwnedStateDir)\nthrows IllegalArgumentException {\nif (fileSystemInitiated) {\nPreconditions.checkArgument(\ncheckpointBaseDir.equals(this.checkpointDir),\n\"The checkpoint base dir is not deterministic across subtasks.\");\nPreconditions.checkArgument(\nsharedStateDir.equals(this.sharedStateDir),\n\"The shared checkpoint dir is not deterministic across subtasks.\");\nPreconditions.checkArgument(\ntaskOwnedStateDir.equals(this.taskOwnedStateDir),\n\"The task-owned checkpoint dir is not deterministic across subtasks.\");\nreturn;\n}\nthis.fs = fileSystem;\nthis.checkpointDir = Preconditions.checkNotNull(checkpointBaseDir);\nthis.sharedStateDir = Preconditions.checkNotNull(sharedStateDir);\nthis.taskOwnedStateDir = Preconditions.checkNotNull(taskOwnedStateDir);\nthis.fileSystemInitiated = true;\nthis.shouldSyncAfterClosingLogicalFile = shouldSyncAfterClosingLogicalFile(fileSystem);\nPath managedExclusivePath = new Path(taskOwnedStateDir, id);\ncreateManagedDirectory(managedExclusivePath);\nthis.managedExclusiveStateDir = managedExclusivePath;\n}\n@Override\n/**\n* Create a logical file on a physical file.\n*\n* @param physicalFile the underlying physical file.\n* @param startOffset the offset of the physical file that the logical file start from.\n* @param length the length of the logical file.\n* @param subtaskKey the id of the subtask that the logical file belongs to.\n* @return the created logical file.\n*/\nprotected LogicalFile createLogicalFile(\n@Nonnull PhysicalFile physicalFile,\nint startOffset,\nint length,\n@Nonnull SubtaskKey subtaskKey) {\nLogicalFileId fileID = LogicalFileId.generateRandomId();\nreturn new LogicalFile(fileID, physicalFile, startOffset, length, subtaskKey);\n}\n/**\n* Create a physical file in right location (managed directory), which is specified by scope of\n* this checkpoint and current subtask.\n*\n* @param subtaskKey the {@link SubtaskKey} of current subtask.\n* @param scope the scope of the checkpoint.\n* @return the created physical file.\n* @throws IOException if anything goes wrong with file system.\n*/\n@Nonnull\nprotected PhysicalFile createPhysicalFile(SubtaskKey subtaskKey, CheckpointedStateScope scope)\nthrows IOException {\nPhysicalFile result;\nException latestException = null;\nPath dirPath = getManagedDir(subtaskKey, scope);\nif (dirPath == null) {\nthrow new IOException(\n\"Could not get \"\n+ scope\n+ \" path for subtask \"\n+ subtaskKey\n+ \", the directory may have not been created.\");\n}\nfor (int attempt = 0; attempt < 10; attempt++) {\ntry {\nOutputStreamAndPath streamAndPath =\nEntropyInjector.createEntropyAware(\nfs,\ngeneratePhysicalFilePath(dirPath),\nFileSystem.WriteMode.NO_OVERWRITE);\nFSDataOutputStream outputStream = streamAndPath.stream();\nPath filePath = streamAndPath.path();\nresult = new PhysicalFile(outputStream, filePath, this.physicalFileDeleter, scope);\nupdateFileCreationMetrics(filePath);\nreturn result;\n} catch (Exception e) {\nlatestException = e;\n}\n}\nthrow new IOException(\n\"Could not open output stream for state file merging.\", latestException);\n}\nprivate void updateFileCreationMetrics(Path path) {\nLOG.debug(\"Create a new physical file {} for checkpoint file merging.\", path);\n}\n/**\n* Generate a file path for a physical file.\n*\n* @param dirPath the parent directory path for the physical file.\n* @return the generated file path for a physical file.\n*/\nprotected Path generatePhysicalFilePath(Path dirPath) {\nfinal String fileName = UUID.randomUUID().toString();\nreturn new Path(dirPath, fileName);\n}\n/**\n* Delete a physical file by given file path. Use the io executor to do the deletion.\n*\n* @param filePath the given file path to delete.\n*/\nprotected final void deletePhysicalFile(Path filePath) {\nioExecutor.execute(\n() -> {\ntry {\nfs.delete(filePath, false);\nLOG.debug(\"Physical file deleted: {}.\", filePath);\n} catch (IOException e) {\nLOG.warn(\"Fail to delete file: {}\", filePath);\n}\n});\n}\n/**\n* Get a reused physical file or create one. This will be called in checkpoint output stream\n* creation logic.\n*\n*

TODO (FLINK-32073): Implement a CheckpointStreamFactory for file-merging that uses this\n* method to create or reuse physical files.\n*\n*

Basic logic of file reusing: whenever a physical file is needed, this method is called\n* with necessary information provided for acquiring a file. The file will not be reused until\n* it is written and returned to the reused pool by calling {@link\n*\n*\n* @param subtaskKey the subtask key for the caller\n* @param checkpointId the checkpoint id\n* @param scope checkpoint scope\n* @return the requested physical file.\n* @throws IOException thrown if anything goes wrong with file system.\n*/\n@Nonnull\nprotected abstract PhysicalFile getOrCreatePhysicalFileForCheckpoint(\nSubtaskKey subtaskKey, long checkpointId, CheckpointedStateScope scope)\nthrows IOException;\n/**\n* Try to return an existing physical file to the manager for next reuse. If this physical file\n* is no longer needed (for reusing), it will be closed.\n*\n*

Basic logic of file reusing, see {@link\n*\n* @param subtaskKey the subtask key for the caller\n* @param checkpointId in which checkpoint this physical file is requested.\n* @param physicalFile the returning checkpoint\n* @throws IOException thrown if anything goes wrong with file system.\n* @see\n*/\nprotected abstract void returnPhysicalFileForNextReuse(\nSubtaskKey subtaskKey, long checkpointId, PhysicalFile physicalFile) throws IOException;\n@Override\npublic Path getManagedDir(SubtaskKey subtaskKey, CheckpointedStateScope scope) {\nif (scope.equals(CheckpointedStateScope.SHARED)) {\nreturn managedSharedStateDir.get(subtaskKey);\n} else {\nreturn managedExclusiveStateDir;\n}\n}\nstatic boolean shouldSyncAfterClosingLogicalFile(FileSystem fileSystem) {\nreturn true;\n}\nprivate void createManagedDirectory(Path managedPath) {\ntry {\nFileStatus fileStatus = null;\ntry {\nfileStatus = fs.getFileStatus(managedPath);\n} catch (FileNotFoundException e) {\n}\nif (fileStatus == null) {\nfs.mkdirs(managedPath);\nLOG.info(\"Created a directory {} for checkpoint file-merging.\", managedPath);\n} else if (fileStatus.isDir()) {\nLOG.info(\"Reusing previous directory {} for checkpoint file-merging.\", managedPath);\n} else {\nthrow new FlinkRuntimeException(\n\"The managed path \"\n+ managedPath\n+ \" for file-merging is occupied by another file. Cannot create directory.\");\n}\n} catch (IOException e) {\nthrow new FlinkRuntimeException(\n\"Cannot create directory \" + managedPath + \" for file-merging \", e);\n}\n}\n@Override\npublic void close() throws IOException {}\n}" + }, + { + "comment": "`checkpointStore.claimOwnership()` method takes a list of partition ownership instances.", + "method_body": "private Mono loadBalance(final Tuple2, List> tuple) {\nreturn Mono.fromRunnable(() -> {\nlogger.info(\"Starting load balancer\");\nMap partitionOwnershipMap = tuple.getT1();\nList partitionIds = tuple.getT2();\nif (CoreUtils.isNullOrEmpty(partitionIds)) {\nthrow logger.logExceptionAsError(Exceptions.propagate(\nnew IllegalStateException(\"There are no partitions in Event Hub \" + eventHubName)));\n}\nint numberOfPartitions = partitionIds.size();\nlogger.info(\"Partition manager returned {} ownership records\", partitionOwnershipMap.size());\nlogger.info(\"EventHubAsyncClient returned {} partitions\", numberOfPartitions);\nif (!isValid(partitionOwnershipMap)) {\nthrow logger.logExceptionAsError(Exceptions.propagate(\nnew IllegalStateException(\"Invalid partitionOwnership data from CheckpointStore\")));\n}\n/*\n* Remove all partitions' ownership that have not been modified for a configuration period of time. This\n* means that the previous EventProcessor that owned the partition is probably down and the partition is now\n* eligible to be claimed by other EventProcessors.\n*/\nMap activePartitionOwnershipMap = removeInactivePartitionOwnerships(\npartitionOwnershipMap);\nlogger.info(\"Number of active ownership records {}\", activePartitionOwnershipMap.size());\n/*\n* Create a map of owner id and a list of partitions it owns\n*/\nMap> ownerPartitionMap = activePartitionOwnershipMap.values()\n.stream()\n.collect(\nCollectors.groupingBy(PartitionOwnership::getOwnerId, mapping(Function.identity(), toList())));\nownerPartitionMap.putIfAbsent(this.ownerId, new ArrayList<>());\nif (CoreUtils.isNullOrEmpty(activePartitionOwnershipMap)) {\n/*\n* If the active partition ownership map is empty, this is the first time an event processor is\n* running or all Event Processors are down for this Event Hub, consumer group combination. All\n* partitions in this Event Hub are available to claim. Choose a random partition to claim ownership.\n*/\nclaimOwnership(partitionOwnershipMap, ownerPartitionMap,\npartitionIds.get(RANDOM.nextInt(numberOfPartitions)));\nreturn;\n}\n/*\n* Find the minimum number of partitions every event processor should own when the load is\n* evenly distributed.\n*/\nint numberOfActiveEventProcessors = ownerPartitionMap.size();\nlogger.info(\"Number of active event processors {}\", ownerPartitionMap.size());\nint minPartitionsPerEventProcessor = numberOfPartitions / numberOfActiveEventProcessors;\n/*\n* If the number of partitions in Event Hub is not evenly divisible by number of active event processors,\n* a few Event Processors may own 1 additional partition than the minimum when the load is balanced.\n* Calculate the number of event processors that can own additional partition.\n*/\nint numberOfEventProcessorsWithAdditionalPartition = numberOfPartitions % numberOfActiveEventProcessors;\nlogger.info(\"Expected min partitions per event processor = {}, expected number of event \"\n+ \"processors with additional partition = {}\", minPartitionsPerEventProcessor,\nnumberOfEventProcessorsWithAdditionalPartition);\nif (isLoadBalanced(minPartitionsPerEventProcessor, numberOfEventProcessorsWithAdditionalPartition,\nownerPartitionMap)) {\nlogger.info(\"Load is balanced\");\ncheckpointStore.claimOwnership(partitionPumpManager.getPartitionPumps().keySet()\n.stream()\n.map(partitionId -> createPartitionOwnershipRequest(partitionOwnershipMap, partitionId))\n.collect(Collectors.toList()))\n.subscribe();\nreturn;\n}\nif (!shouldOwnMorePartitions(minPartitionsPerEventProcessor, ownerPartitionMap)) {\nlogger.info(\"This event processor owns {} partitions and shouldn't own more\",\nownerPartitionMap.get(ownerId).size());\ncheckpointStore.claimOwnership(partitionPumpManager.getPartitionPumps().keySet()\n.stream()\n.map(partitionId -> createPartitionOwnershipRequest(partitionOwnershipMap, partitionId))\n.collect(Collectors.toList()))\n.subscribe();\nreturn;\n}\nlogger.info(\n\"Load is unbalanced and this event processor should own more partitions\");\n/*\n* If some partitions are unclaimed, this could be because an event processor is down and\n* it's partitions are now available for others to own or because event processors are just\n* starting up and gradually claiming partitions to own or new partitions were added to Event Hub.\n* Find any partition that is not actively owned and claim it.\n*\n* OR\n*\n* Find a partition to steal from another event processor. Pick the event processor that has owns the\n* highest number of partitions.\n*/\nString partitionToClaim = partitionIds.parallelStream()\n.filter(partitionId -> !activePartitionOwnershipMap.containsKey(partitionId))\n.findAny()\n.orElseGet(() -> {\nlogger.info(\"No unclaimed partitions, stealing from another event processor\");\nreturn findPartitionToSteal(ownerPartitionMap);\n});\nclaimOwnership(partitionOwnershipMap, ownerPartitionMap, partitionToClaim);\n});\n}", + "target_code": ".collect(Collectors.toList()))", + "method_body_after": "private Mono loadBalance(final Tuple2, List> tuple) {\nreturn Mono.fromRunnable(() -> {\nlogger.info(\"Starting load balancer\");\nMap partitionOwnershipMap = tuple.getT1();\nList partitionIds = tuple.getT2();\nif (CoreUtils.isNullOrEmpty(partitionIds)) {\nthrow logger.logExceptionAsError(Exceptions.propagate(\nnew IllegalStateException(\"There are no partitions in Event Hub \" + eventHubName)));\n}\nint numberOfPartitions = partitionIds.size();\nlogger.info(\"Partition manager returned {} ownership records\", partitionOwnershipMap.size());\nlogger.info(\"EventHubAsyncClient returned {} partitions\", numberOfPartitions);\nif (!isValid(partitionOwnershipMap)) {\nthrow logger.logExceptionAsError(Exceptions.propagate(\nnew IllegalStateException(\"Invalid partitionOwnership data from CheckpointStore\")));\n}\n/*\n* Remove all partitions' ownership that have not been modified for a configuration period of time. This\n* means that the previous EventProcessor that owned the partition is probably down and the partition is now\n* eligible to be claimed by other EventProcessors.\n*/\nMap activePartitionOwnershipMap = removeInactivePartitionOwnerships(\npartitionOwnershipMap);\nlogger.info(\"Number of active ownership records {}\", activePartitionOwnershipMap.size());\n/*\n* Create a map of owner id and a list of partitions it owns\n*/\nMap> ownerPartitionMap = activePartitionOwnershipMap.values()\n.stream()\n.collect(\nCollectors.groupingBy(PartitionOwnership::getOwnerId, mapping(Function.identity(), toList())));\nownerPartitionMap.putIfAbsent(this.ownerId, new ArrayList<>());\nif (CoreUtils.isNullOrEmpty(activePartitionOwnershipMap)) {\n/*\n* If the active partition ownership map is empty, this is the first time an event processor is\n* running or all Event Processors are down for this Event Hub, consumer group combination. All\n* partitions in this Event Hub are available to claim. Choose a random partition to claim ownership.\n*/\nclaimOwnership(partitionOwnershipMap, ownerPartitionMap,\npartitionIds.get(RANDOM.nextInt(numberOfPartitions)));\nreturn;\n}\n/*\n* Find the minimum number of partitions every event processor should own when the load is\n* evenly distributed.\n*/\nint numberOfActiveEventProcessors = ownerPartitionMap.size();\nlogger.info(\"Number of active event processors {}\", ownerPartitionMap.size());\nint minPartitionsPerEventProcessor = numberOfPartitions / numberOfActiveEventProcessors;\n/*\n* If the number of partitions in Event Hub is not evenly divisible by number of active event processors,\n* a few Event Processors may own 1 additional partition than the minimum when the load is balanced.\n* Calculate the number of event processors that can own additional partition.\n*/\nint numberOfEventProcessorsWithAdditionalPartition = numberOfPartitions % numberOfActiveEventProcessors;\nlogger.info(\"Expected min partitions per event processor = {}, expected number of event \"\n+ \"processors with additional partition = {}\", minPartitionsPerEventProcessor,\nnumberOfEventProcessorsWithAdditionalPartition);\nif (isLoadBalanced(minPartitionsPerEventProcessor, numberOfEventProcessorsWithAdditionalPartition,\nownerPartitionMap)) {\nlogger.info(\"Load is balanced\");\ncheckpointStore.claimOwnership(partitionPumpManager.getPartitionPumps().keySet()\n.stream()\n.map(partitionId -> createPartitionOwnershipRequest(partitionOwnershipMap, partitionId))\n.collect(Collectors.toList()))\n.subscribe();\nreturn;\n}\nif (!shouldOwnMorePartitions(minPartitionsPerEventProcessor, ownerPartitionMap)) {\nlogger.info(\"This event processor owns {} partitions and shouldn't own more\",\nownerPartitionMap.get(ownerId).size());\ncheckpointStore.claimOwnership(partitionPumpManager.getPartitionPumps().keySet()\n.stream()\n.map(partitionId -> createPartitionOwnershipRequest(partitionOwnershipMap, partitionId))\n.collect(Collectors.toList()))\n.subscribe();\nreturn;\n}\nlogger.info(\n\"Load is unbalanced and this event processor should own more partitions\");\n/*\n* If some partitions are unclaimed, this could be because an event processor is down and\n* it's partitions are now available for others to own or because event processors are just\n* starting up and gradually claiming partitions to own or new partitions were added to Event Hub.\n* Find any partition that is not actively owned and claim it.\n*\n* OR\n*\n* Find a partition to steal from another event processor. Pick the event processor that has owns the\n* highest number of partitions.\n*/\nString partitionToClaim = partitionIds.parallelStream()\n.filter(partitionId -> !activePartitionOwnershipMap.containsKey(partitionId))\n.findAny()\n.orElseGet(() -> {\nlogger.info(\"No unclaimed partitions, stealing from another event processor\");\nreturn findPartitionToSteal(ownerPartitionMap);\n});\nclaimOwnership(partitionOwnershipMap, ownerPartitionMap, partitionToClaim);\n});\n}", + "context_before": "class PartitionBasedLoadBalancer {\nprivate static final Random RANDOM = new Random();\nprivate final ClientLogger logger = new ClientLogger(PartitionBasedLoadBalancer.class);\nprivate final String eventHubName;\nprivate final String consumerGroupName;\nprivate final CheckpointStore checkpointStore;\nprivate final EventHubAsyncClient eventHubAsyncClient;\nprivate final String ownerId;\nprivate final long inactiveTimeLimitInSeconds;\nprivate final PartitionPumpManager partitionPumpManager;\nprivate final String fullyQualifiedNamespace;\nprivate final Consumer processError;\nprivate final PartitionContext partitionAgnosticContext;\n/**\n* Creates an instance of PartitionBasedLoadBalancer for the given Event Hub name and consumer group.\n*\n* @param checkpointStore The partition manager that this load balancer will use to read/update ownership details.\n* @param eventHubAsyncClient The asynchronous Event Hub client used to consume events.\n* @param eventHubName The Event Hub name the {@link EventProcessorClient} is associated with.\n* @param consumerGroupName The consumer group name the {@link EventProcessorClient} is associated with.\n* @param ownerId The identifier of the {@link EventProcessorClient} that owns this load balancer.\n* @param inactiveTimeLimitInSeconds The time in seconds to wait for an update on an ownership record before\n* assuming the owner of the partition is inactive.\n* @param partitionPumpManager The partition pump manager that keeps track of all EventHubConsumers and partitions\n* that this {@link EventProcessorClient} is processing.\n* @param processError The callback that will be called when an error occurs while running the load balancer.\n*/\nPartitionBasedLoadBalancer(final CheckpointStore checkpointStore,\nfinal EventHubAsyncClient eventHubAsyncClient, final String fullyQualifiedNamespace,\nfinal String eventHubName, final String consumerGroupName, final String ownerId,\nfinal long inactiveTimeLimitInSeconds, final PartitionPumpManager partitionPumpManager,\nfinal Consumer processError) {\nthis.checkpointStore = checkpointStore;\nthis.eventHubAsyncClient = eventHubAsyncClient;\nthis.fullyQualifiedNamespace = fullyQualifiedNamespace;\nthis.eventHubName = eventHubName;\nthis.consumerGroupName = consumerGroupName;\nthis.ownerId = ownerId;\nthis.inactiveTimeLimitInSeconds = inactiveTimeLimitInSeconds;\nthis.partitionPumpManager = partitionPumpManager;\nthis.processError = processError;\nthis.partitionAgnosticContext = new PartitionContext(fullyQualifiedNamespace, eventHubName,\nconsumerGroupName, \"NONE\");\n}\n/**\n* This is the main method responsible for load balancing. This method is expected to be invoked by the {@link\n* EventProcessorClient} periodically. Every call to this method will result in this {@link EventProcessorClient}\n* owning at most one new partition.\n*

\n* The load is considered balanced when no active EventProcessor owns 2 partitions more than any other active\n* EventProcessor. Given that each invocation to this method results in ownership claim of at most one partition,\n* this algorithm converges gradually towards a steady state.\n*

\n* When a new partition is claimed, this method is also responsible for starting a partition pump that creates an\n* {@link EventHubConsumerAsyncClient} for processing events from that partition.\n*/\nvoid loadBalance() {\n/*\n* Retrieve current partition ownership details from the datastore.\n*/\nfinal Mono> partitionOwnershipMono = checkpointStore\n.listOwnership(fullyQualifiedNamespace, eventHubName, consumerGroupName)\n.timeout(Duration.ofMinutes(1))\n.collectMap(PartitionOwnership::getPartitionId, Function.identity());\n/*\n* Retrieve the list of partition ids from the Event Hub.\n*/\nfinal Mono> partitionsMono = eventHubAsyncClient\n.getPartitionIds()\n.timeout(Duration.ofMinutes(1))\n.collectList();\nMono.zip(partitionOwnershipMono, partitionsMono)\n.flatMap(this::loadBalance)\n.subscribe(ignored -> { },\nex -> {\nlogger.warning(Messages.LOAD_BALANCING_FAILED, ex.getMessage());\nErrorContext errorContext = new ErrorContext(partitionAgnosticContext, ex);\nprocessError.accept(errorContext);\n}, () -> logger.info(\"Load balancing completed successfully\"));\n}\n/*\n* This method works with the given partition ownership details and Event Hub partitions to evaluate whether the\n* current Event Processor should take on the responsibility of processing more partitions.\n*/\n/*\n* Check if partition ownership data is valid before proceeding with load balancing.\n*/\nprivate boolean isValid(final Map partitionOwnershipMap) {\nreturn partitionOwnershipMap.values()\n.stream()\n.noneMatch(partitionOwnership -> {\nreturn partitionOwnership.getEventHubName() == null\n|| !partitionOwnership.getEventHubName().equals(this.eventHubName)\n|| partitionOwnership.getConsumerGroup() == null\n|| !partitionOwnership.getConsumerGroup().equals(this.consumerGroupName)\n|| partitionOwnership.getPartitionId() == null\n|| partitionOwnership.getLastModifiedTime() == null\n|| partitionOwnership.getETag() == null;\n});\n}\n/*\n* Find the event processor that owns the maximum number of partitions and steal a random partition\n* from it.\n*/\nprivate String findPartitionToSteal(final Map> ownerPartitionMap) {\nMap.Entry> ownerWithMaxPartitions = ownerPartitionMap.entrySet()\n.stream()\n.max(Comparator.comparingInt(entry -> entry.getValue().size()))\n.get();\nint numberOfPartitions = ownerWithMaxPartitions.getValue().size();\nlogger.info(\"Owner id {} owns {} partitions, stealing a partition from it\", ownerWithMaxPartitions.getKey(),\nnumberOfPartitions);\nreturn ownerWithMaxPartitions.getValue().get(RANDOM.nextInt(numberOfPartitions)).getPartitionId();\n}\n/*\n* When the load is balanced, all active event processors own at least {@code minPartitionsPerEventProcessor}\n* and only {@code numberOfEventProcessorsWithAdditionalPartition} event processors will own 1 additional\n* partition.\n*/\nprivate boolean isLoadBalanced(final int minPartitionsPerEventProcessor,\nfinal int numberOfEventProcessorsWithAdditionalPartition,\nfinal Map> ownerPartitionMap) {\nint count = 0;\nfor (List partitionOwnership : ownerPartitionMap.values()) {\nint numberOfPartitions = partitionOwnership.size();\nif (numberOfPartitions < minPartitionsPerEventProcessor\n|| numberOfPartitions > minPartitionsPerEventProcessor + 1) {\nreturn false;\n}\nif (numberOfPartitions == minPartitionsPerEventProcessor + 1) {\ncount++;\n}\n}\nreturn count == numberOfEventProcessorsWithAdditionalPartition;\n}\n/*\n* This method is called after determining that the load is not balanced. This method will evaluate\n* if the current event processor should own more partitions. Specifically, this method returns true if the\n* current event processor owns less than the minimum number of partitions or if it owns the minimum number\n* and no other event processor owns lesser number of partitions than this event processor.\n*/\nprivate boolean shouldOwnMorePartitions(final int minPartitionsPerEventProcessor,\nfinal Map> ownerPartitionMap) {\nint numberOfPartitionsOwned = ownerPartitionMap.get(this.ownerId).size();\nint leastPartitionsOwnedByAnyEventProcessor =\nownerPartitionMap.values().stream().min(Comparator.comparingInt(List::size)).get().size();\nreturn numberOfPartitionsOwned < minPartitionsPerEventProcessor\n|| numberOfPartitionsOwned == leastPartitionsOwnedByAnyEventProcessor;\n}\n/*\n* This method will create a new map of partition id and PartitionOwnership containing only those partitions\n* that are actively owned. All entries in the original map returned by CheckpointStore that haven't been\n* modified for a duration of time greater than the allowed inactivity time limit are assumed to be owned by\n* dead event processors. These will not be included in the map returned by this method.\n*/\nprivate Map removeInactivePartitionOwnerships(\nfinal Map partitionOwnershipMap) {\nreturn partitionOwnershipMap\n.entrySet()\n.stream()\n.filter(entry -> {\nreturn (System.currentTimeMillis() - entry.getValue().getLastModifiedTime() < TimeUnit.SECONDS\n.toMillis(inactiveTimeLimitInSeconds))\n&& !CoreUtils.isNullOrEmpty(entry.getValue().getOwnerId());\n}).collect(Collectors.toMap(Entry::getKey, Entry::getValue));\n}\nprivate void claimOwnership(final Map partitionOwnershipMap, Map> ownerPartitionsMap, final String partitionIdToClaim) {\nlogger.info(\"Attempting to claim ownership of partition {}\", partitionIdToClaim);\nPartitionOwnership ownershipRequest = createPartitionOwnershipRequest(partitionOwnershipMap,\npartitionIdToClaim);\nList partitionsToClaim = new ArrayList<>();\npartitionsToClaim.add(ownershipRequest);\npartitionsToClaim.addAll(partitionPumpManager.getPartitionPumps()\n.keySet()\n.stream()\n.map(partitionId -> createPartitionOwnershipRequest(partitionOwnershipMap, partitionId))\n.collect(Collectors.toList()));\ncheckpointStore\n.claimOwnership(partitionsToClaim)\n.timeout(Duration.ofMinutes(1))\n.doOnNext(partitionOwnership -> logger.info(\"Successfully claimed ownership of partition {}\",\npartitionOwnership.getPartitionId()))\n.doOnError(ex -> logger\n.warning(Messages.FAILED_TO_CLAIM_OWNERSHIP, ownershipRequest.getPartitionId(),\nex.getMessage(), ex))\n.collectList()\n.zipWith(checkpointStore.listCheckpoints(fullyQualifiedNamespace, eventHubName, consumerGroupName)\n.collectMap(checkpoint -> checkpoint.getPartitionId(), Function.identity()))\n.subscribe(ownedPartitionCheckpointsTuple -> {\nownedPartitionCheckpointsTuple.getT1()\n.stream()\n.forEach(po -> partitionPumpManager.startPartitionPump(po,\nownedPartitionCheckpointsTuple.getT2().get(po.getPartitionId())));\n},\nex -> {\nthrow logger.logExceptionAsError(new RuntimeException(\"Error while listing checkpoints\", ex));\n});\n}\nprivate PartitionOwnership createPartitionOwnershipRequest(\nfinal Map partitionOwnershipMap,\nfinal String partitionIdToClaim) {\nPartitionOwnership previousPartitionOwnership = partitionOwnershipMap.get(partitionIdToClaim);\nPartitionOwnership partitionOwnershipRequest = new PartitionOwnership()\n.setFullyQualifiedNamespace(this.fullyQualifiedNamespace)\n.setOwnerId(this.ownerId)\n.setPartitionId(partitionIdToClaim)\n.setConsumerGroup(this.consumerGroupName)\n.setEventHubName(this.eventHubName)\n.setETag(previousPartitionOwnership == null ? null : previousPartitionOwnership.getETag());\nreturn partitionOwnershipRequest;\n}\n}", + "context_after": "class PartitionBasedLoadBalancer {\nprivate static final Random RANDOM = new Random();\nprivate final ClientLogger logger = new ClientLogger(PartitionBasedLoadBalancer.class);\nprivate final String eventHubName;\nprivate final String consumerGroupName;\nprivate final CheckpointStore checkpointStore;\nprivate final EventHubAsyncClient eventHubAsyncClient;\nprivate final String ownerId;\nprivate final long inactiveTimeLimitInSeconds;\nprivate final PartitionPumpManager partitionPumpManager;\nprivate final String fullyQualifiedNamespace;\nprivate final Consumer processError;\nprivate final PartitionContext partitionAgnosticContext;\n/**\n* Creates an instance of PartitionBasedLoadBalancer for the given Event Hub name and consumer group.\n*\n* @param checkpointStore The partition manager that this load balancer will use to read/update ownership details.\n* @param eventHubAsyncClient The asynchronous Event Hub client used to consume events.\n* @param eventHubName The Event Hub name the {@link EventProcessorClient} is associated with.\n* @param consumerGroupName The consumer group name the {@link EventProcessorClient} is associated with.\n* @param ownerId The identifier of the {@link EventProcessorClient} that owns this load balancer.\n* @param inactiveTimeLimitInSeconds The time in seconds to wait for an update on an ownership record before\n* assuming the owner of the partition is inactive.\n* @param partitionPumpManager The partition pump manager that keeps track of all EventHubConsumers and partitions\n* that this {@link EventProcessorClient} is processing.\n* @param processError The callback that will be called when an error occurs while running the load balancer.\n*/\nPartitionBasedLoadBalancer(final CheckpointStore checkpointStore,\nfinal EventHubAsyncClient eventHubAsyncClient, final String fullyQualifiedNamespace,\nfinal String eventHubName, final String consumerGroupName, final String ownerId,\nfinal long inactiveTimeLimitInSeconds, final PartitionPumpManager partitionPumpManager,\nfinal Consumer processError) {\nthis.checkpointStore = checkpointStore;\nthis.eventHubAsyncClient = eventHubAsyncClient;\nthis.fullyQualifiedNamespace = fullyQualifiedNamespace;\nthis.eventHubName = eventHubName;\nthis.consumerGroupName = consumerGroupName;\nthis.ownerId = ownerId;\nthis.inactiveTimeLimitInSeconds = inactiveTimeLimitInSeconds;\nthis.partitionPumpManager = partitionPumpManager;\nthis.processError = processError;\nthis.partitionAgnosticContext = new PartitionContext(fullyQualifiedNamespace, eventHubName,\nconsumerGroupName, \"NONE\");\n}\n/**\n* This is the main method responsible for load balancing. This method is expected to be invoked by the {@link\n* EventProcessorClient} periodically. Every call to this method will result in this {@link EventProcessorClient}\n* owning at most one new partition.\n*

\n* The load is considered balanced when no active EventProcessor owns 2 partitions more than any other active\n* EventProcessor. Given that each invocation to this method results in ownership claim of at most one partition,\n* this algorithm converges gradually towards a steady state.\n*

\n* When a new partition is claimed, this method is also responsible for starting a partition pump that creates an\n* {@link EventHubConsumerAsyncClient} for processing events from that partition.\n*/\nvoid loadBalance() {\n/*\n* Retrieve current partition ownership details from the datastore.\n*/\nfinal Mono> partitionOwnershipMono = checkpointStore\n.listOwnership(fullyQualifiedNamespace, eventHubName, consumerGroupName)\n.timeout(Duration.ofMinutes(1))\n.collectMap(PartitionOwnership::getPartitionId, Function.identity());\n/*\n* Retrieve the list of partition ids from the Event Hub.\n*/\nfinal Mono> partitionsMono = eventHubAsyncClient\n.getPartitionIds()\n.timeout(Duration.ofMinutes(1))\n.collectList();\nMono.zip(partitionOwnershipMono, partitionsMono)\n.flatMap(this::loadBalance)\n.subscribe(ignored -> { },\nex -> {\nlogger.warning(Messages.LOAD_BALANCING_FAILED, ex.getMessage());\nErrorContext errorContext = new ErrorContext(partitionAgnosticContext, ex);\nprocessError.accept(errorContext);\n}, () -> logger.info(\"Load balancing completed successfully\"));\n}\n/*\n* This method works with the given partition ownership details and Event Hub partitions to evaluate whether the\n* current Event Processor should take on the responsibility of processing more partitions.\n*/\n/*\n* Check if partition ownership data is valid before proceeding with load balancing.\n*/\nprivate boolean isValid(final Map partitionOwnershipMap) {\nreturn partitionOwnershipMap.values()\n.stream()\n.noneMatch(partitionOwnership -> {\nreturn partitionOwnership.getEventHubName() == null\n|| !partitionOwnership.getEventHubName().equals(this.eventHubName)\n|| partitionOwnership.getConsumerGroup() == null\n|| !partitionOwnership.getConsumerGroup().equals(this.consumerGroupName)\n|| partitionOwnership.getPartitionId() == null\n|| partitionOwnership.getLastModifiedTime() == null\n|| partitionOwnership.getETag() == null;\n});\n}\n/*\n* Find the event processor that owns the maximum number of partitions and steal a random partition\n* from it.\n*/\nprivate String findPartitionToSteal(final Map> ownerPartitionMap) {\nMap.Entry> ownerWithMaxPartitions = ownerPartitionMap.entrySet()\n.stream()\n.max(Comparator.comparingInt(entry -> entry.getValue().size()))\n.get();\nint numberOfPartitions = ownerWithMaxPartitions.getValue().size();\nlogger.info(\"Owner id {} owns {} partitions, stealing a partition from it\", ownerWithMaxPartitions.getKey(),\nnumberOfPartitions);\nreturn ownerWithMaxPartitions.getValue().get(RANDOM.nextInt(numberOfPartitions)).getPartitionId();\n}\n/*\n* When the load is balanced, all active event processors own at least {@code minPartitionsPerEventProcessor}\n* and only {@code numberOfEventProcessorsWithAdditionalPartition} event processors will own 1 additional\n* partition.\n*/\nprivate boolean isLoadBalanced(final int minPartitionsPerEventProcessor,\nfinal int numberOfEventProcessorsWithAdditionalPartition,\nfinal Map> ownerPartitionMap) {\nint count = 0;\nfor (List partitionOwnership : ownerPartitionMap.values()) {\nint numberOfPartitions = partitionOwnership.size();\nif (numberOfPartitions < minPartitionsPerEventProcessor\n|| numberOfPartitions > minPartitionsPerEventProcessor + 1) {\nreturn false;\n}\nif (numberOfPartitions == minPartitionsPerEventProcessor + 1) {\ncount++;\n}\n}\nreturn count == numberOfEventProcessorsWithAdditionalPartition;\n}\n/*\n* This method is called after determining that the load is not balanced. This method will evaluate\n* if the current event processor should own more partitions. Specifically, this method returns true if the\n* current event processor owns less than the minimum number of partitions or if it owns the minimum number\n* and no other event processor owns lesser number of partitions than this event processor.\n*/\nprivate boolean shouldOwnMorePartitions(final int minPartitionsPerEventProcessor,\nfinal Map> ownerPartitionMap) {\nint numberOfPartitionsOwned = ownerPartitionMap.get(this.ownerId).size();\nint leastPartitionsOwnedByAnyEventProcessor =\nownerPartitionMap.values().stream().min(Comparator.comparingInt(List::size)).get().size();\nreturn numberOfPartitionsOwned < minPartitionsPerEventProcessor\n|| numberOfPartitionsOwned == leastPartitionsOwnedByAnyEventProcessor;\n}\n/*\n* This method will create a new map of partition id and PartitionOwnership containing only those partitions\n* that are actively owned. All entries in the original map returned by CheckpointStore that haven't been\n* modified for a duration of time greater than the allowed inactivity time limit are assumed to be owned by\n* dead event processors. These will not be included in the map returned by this method.\n*/\nprivate Map removeInactivePartitionOwnerships(\nfinal Map partitionOwnershipMap) {\nreturn partitionOwnershipMap\n.entrySet()\n.stream()\n.filter(entry -> {\nreturn (System.currentTimeMillis() - entry.getValue().getLastModifiedTime() < TimeUnit.SECONDS\n.toMillis(inactiveTimeLimitInSeconds))\n&& !CoreUtils.isNullOrEmpty(entry.getValue().getOwnerId());\n}).collect(Collectors.toMap(Entry::getKey, Entry::getValue));\n}\nprivate void claimOwnership(final Map partitionOwnershipMap, Map> ownerPartitionsMap, final String partitionIdToClaim) {\nlogger.info(\"Attempting to claim ownership of partition {}\", partitionIdToClaim);\nPartitionOwnership ownershipRequest = createPartitionOwnershipRequest(partitionOwnershipMap,\npartitionIdToClaim);\nList partitionsToClaim = new ArrayList<>();\npartitionsToClaim.add(ownershipRequest);\npartitionsToClaim.addAll(partitionPumpManager.getPartitionPumps()\n.keySet()\n.stream()\n.map(partitionId -> createPartitionOwnershipRequest(partitionOwnershipMap, partitionId))\n.collect(Collectors.toList()));\ncheckpointStore\n.claimOwnership(partitionsToClaim)\n.timeout(Duration.ofMinutes(1))\n.doOnNext(partitionOwnership -> logger.info(\"Successfully claimed ownership of partition {}\",\npartitionOwnership.getPartitionId()))\n.doOnError(ex -> logger\n.warning(Messages.FAILED_TO_CLAIM_OWNERSHIP, ownershipRequest.getPartitionId(),\nex.getMessage(), ex))\n.collectList()\n.zipWith(checkpointStore.listCheckpoints(fullyQualifiedNamespace, eventHubName, consumerGroupName)\n.collectMap(checkpoint -> checkpoint.getPartitionId(), Function.identity()))\n.subscribe(ownedPartitionCheckpointsTuple -> {\nownedPartitionCheckpointsTuple.getT1()\n.stream()\n.forEach(po -> partitionPumpManager.startPartitionPump(po,\nownedPartitionCheckpointsTuple.getT2().get(po.getPartitionId())));\n},\nex -> {\nthrow logger.logExceptionAsError(new RuntimeException(\"Error while listing checkpoints\", ex));\n});\n}\nprivate PartitionOwnership createPartitionOwnershipRequest(\nfinal Map partitionOwnershipMap,\nfinal String partitionIdToClaim) {\nPartitionOwnership previousPartitionOwnership = partitionOwnershipMap.get(partitionIdToClaim);\nPartitionOwnership partitionOwnershipRequest = new PartitionOwnership()\n.setFullyQualifiedNamespace(this.fullyQualifiedNamespace)\n.setOwnerId(this.ownerId)\n.setPartitionId(partitionIdToClaim)\n.setConsumerGroup(this.consumerGroupName)\n.setEventHubName(this.eventHubName)\n.setETag(previousPartitionOwnership == null ? null : previousPartitionOwnership.getETag());\nreturn partitionOwnershipRequest;\n}\n}" + }, + { + "comment": "is this conventional to use `Context.NONE` as a value even though it's a different type from what `SPAN_CONTEXT_KEY` normally holds?", + "method_body": "private EventData traceMessageSpan(EventData eventData) {\nOptional eventContextData = eventData.getContext().getData(SPAN_CONTEXT_KEY);\nif (eventContextData.isPresent()) {\nreturn eventData;\n} else {\nContext eventContext = eventData.getContext()\n.addData(AZ_TRACING_NAMESPACE_KEY, AZ_NAMESPACE_VALUE)\n.addData(ENTITY_PATH_KEY, this.entityPath)\n.addData(HOST_NAME_KEY, this.hostname);\neventContext = tracerProvider.startSpan(AZ_TRACING_SERVICE_NAME, eventContext,\nProcessKind.MESSAGE);\nOptional eventDiagnosticIdOptional = eventContext.getData(DIAGNOSTIC_ID_KEY);\nif (eventDiagnosticIdOptional.isPresent()) {\neventData.getProperties().put(DIAGNOSTIC_ID_KEY, eventDiagnosticIdOptional.get().toString());\ntracerProvider.endSpan(eventContext, Signal.complete());\neventData.addContext(SPAN_CONTEXT_KEY, eventContext.getData(SPAN_CONTEXT_KEY).orElse(Context.NONE));\n}\n}\nreturn eventData;\n}", + "target_code": "eventData.addContext(SPAN_CONTEXT_KEY, eventContext.getData(SPAN_CONTEXT_KEY).orElse(Context.NONE));", + "method_body_after": "private EventData traceMessageSpan(EventData eventData) {\nOptional eventContextData = eventData.getContext().getData(SPAN_CONTEXT_KEY);\nif (eventContextData.isPresent()) {\nreturn eventData;\n} else {\nContext eventContext = eventData.getContext()\n.addData(AZ_TRACING_NAMESPACE_KEY, AZ_NAMESPACE_VALUE)\n.addData(ENTITY_PATH_KEY, this.entityPath)\n.addData(HOST_NAME_KEY, this.hostname);\neventContext = tracerProvider.startSpan(AZ_TRACING_SERVICE_NAME, eventContext,\nProcessKind.MESSAGE);\nOptional eventDiagnosticIdOptional = eventContext.getData(DIAGNOSTIC_ID_KEY);\nif (eventDiagnosticIdOptional.isPresent()) {\neventData.getProperties().put(DIAGNOSTIC_ID_KEY, eventDiagnosticIdOptional.get().toString());\ntracerProvider.endSpan(eventContext, Signal.complete());\nObject spanContext = eventContext.getData(SPAN_CONTEXT_KEY).orElse(null);\nif (spanContext != null) {\neventData.addContext(SPAN_CONTEXT_KEY, spanContext);\n}\n}\n}\nreturn eventData;\n}", + "context_before": "class EventDataBatch {\nprivate static final ClientLogger LOGGER = new ClientLogger(EventDataBatch.class);\nprivate final int maxMessageSize;\nprivate final String partitionKey;\nprivate final ErrorContextProvider contextProvider;\nprivate final List events;\nprivate final byte[] eventBytes;\nprivate final String partitionId;\nprivate int sizeInBytes;\nprivate final TracerProvider tracerProvider;\nprivate final String entityPath;\nprivate final String hostname;\nEventDataBatch(int maxMessageSize, String partitionId, String partitionKey, ErrorContextProvider contextProvider,\nTracerProvider tracerProvider, String entityPath, String hostname) {\nthis.maxMessageSize = maxMessageSize;\nthis.partitionKey = partitionKey;\nthis.partitionId = partitionId;\nthis.contextProvider = contextProvider;\nthis.events = new LinkedList<>();\nthis.sizeInBytes = (maxMessageSize / 65536) * 1024;\nthis.eventBytes = new byte[maxMessageSize];\nthis.tracerProvider = tracerProvider;\nthis.entityPath = entityPath;\nthis.hostname = hostname;\n}\n/**\n* Gets the number of {@link EventData events} in the batch.\n*\n* @return The number of {@link EventData events} in the batch.\n*/\npublic int getCount() {\nreturn events.size();\n}\n/**\n* Gets the maximum size, in bytes, of the {@link EventDataBatch}.\n*\n* @return The maximum size, in bytes, of the {@link EventDataBatch}.\n*/\npublic int getMaxSizeInBytes() {\nreturn maxMessageSize;\n}\n/**\n* Gets the size of the {@link EventDataBatch} in bytes.\n*\n* @return the size of the {@link EventDataBatch} in bytes.\n*/\npublic int getSizeInBytes() {\nreturn this.sizeInBytes;\n}\n/**\n* Tries to add an {@link EventData event} to the batch.\n*\n*

This method is not thread-safe; make sure to synchronize the method access when using multiple threads\n* to add events.

\n*\n* @param eventData The {@link EventData} to add to the batch.\n* @return {@code true} if the event could be added to the batch; {@code false} if the event was too large to fit in\n* the batch.\n* @throws IllegalArgumentException if {@code eventData} is {@code null}.\n* @throws AmqpException if {@code eventData} is larger than the maximum size of the {@link EventDataBatch}.\n*/\npublic boolean tryAdd(final EventData eventData) {\nif (eventData == null) {\nthrow LOGGER.logExceptionAsWarning(new NullPointerException(\"eventData cannot be null\"));\n}\nEventData event = tracerProvider.isEnabled() ? traceMessageSpan(eventData) : eventData;\nfinal int size;\ntry {\nsize = getSize(event, events.isEmpty());\n} catch (BufferOverflowException exception) {\nthrow LOGGER.logExceptionAsWarning(new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED,\nString.format(Locale.US, \"Size of the payload exceeded maximum message size: %s kb\",\nmaxMessageSize / 1024),\ncontextProvider.getErrorContext()));\n}\nif (this.sizeInBytes + size > this.maxMessageSize) {\nreturn false;\n}\nthis.sizeInBytes += size;\nthis.events.add(event);\nreturn true;\n}\n/**\n* Method to start and end a \"Azure.EventHubs.message\" span and add the \"DiagnosticId\" as a property of the message.\n*\n* @param eventData The Event to add tracing span for.\n* @return the updated event data object.\n*/\nList getEvents() {\nreturn events;\n}\nString getPartitionKey() {\nreturn partitionKey;\n}\nString getPartitionId() {\nreturn partitionId;\n}\nprivate int getSize(final EventData eventData, final boolean isFirst) {\nObjects.requireNonNull(eventData, \"'eventData' cannot be null.\");\nfinal Message amqpMessage = createAmqpMessage(eventData, partitionKey);\nint eventSize = amqpMessage.encode(this.eventBytes, 0, maxMessageSize);\neventSize += 16;\nif (isFirst) {\namqpMessage.setBody(null);\namqpMessage.setApplicationProperties(null);\namqpMessage.setProperties(null);\namqpMessage.setDeliveryAnnotations(null);\neventSize += amqpMessage.encode(this.eventBytes, 0, maxMessageSize);\n}\nreturn eventSize;\n}\n/*\n* Creates the AMQP message represented by the event data\n*/\nprivate static Message createAmqpMessage(EventData event, String partitionKey) {\nfinal AmqpAnnotatedMessage amqpAnnotatedMessage = event.getRawAmqpMessage();\nfinal Message protonJ = MessageUtils.toProtonJMessage(amqpAnnotatedMessage);\nif (partitionKey == null) {\nreturn protonJ;\n}\nif (protonJ.getMessageAnnotations() == null) {\nprotonJ.setMessageAnnotations(new MessageAnnotations(new HashMap<>()));\n}\nfinal MessageAnnotations messageAnnotations = protonJ.getMessageAnnotations();\nmessageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey);\nreturn protonJ;\n}\n}", + "context_after": "class EventDataBatch {\nprivate static final ClientLogger LOGGER = new ClientLogger(EventDataBatch.class);\nprivate final int maxMessageSize;\nprivate final String partitionKey;\nprivate final ErrorContextProvider contextProvider;\nprivate final List events;\nprivate final byte[] eventBytes;\nprivate final String partitionId;\nprivate int sizeInBytes;\nprivate final TracerProvider tracerProvider;\nprivate final String entityPath;\nprivate final String hostname;\nEventDataBatch(int maxMessageSize, String partitionId, String partitionKey, ErrorContextProvider contextProvider,\nTracerProvider tracerProvider, String entityPath, String hostname) {\nthis.maxMessageSize = maxMessageSize;\nthis.partitionKey = partitionKey;\nthis.partitionId = partitionId;\nthis.contextProvider = contextProvider;\nthis.events = new LinkedList<>();\nthis.sizeInBytes = (maxMessageSize / 65536) * 1024;\nthis.eventBytes = new byte[maxMessageSize];\nthis.tracerProvider = tracerProvider;\nthis.entityPath = entityPath;\nthis.hostname = hostname;\n}\n/**\n* Gets the number of {@link EventData events} in the batch.\n*\n* @return The number of {@link EventData events} in the batch.\n*/\npublic int getCount() {\nreturn events.size();\n}\n/**\n* Gets the maximum size, in bytes, of the {@link EventDataBatch}.\n*\n* @return The maximum size, in bytes, of the {@link EventDataBatch}.\n*/\npublic int getMaxSizeInBytes() {\nreturn maxMessageSize;\n}\n/**\n* Gets the size of the {@link EventDataBatch} in bytes.\n*\n* @return the size of the {@link EventDataBatch} in bytes.\n*/\npublic int getSizeInBytes() {\nreturn this.sizeInBytes;\n}\n/**\n* Tries to add an {@link EventData event} to the batch.\n*\n*

This method is not thread-safe; make sure to synchronize the method access when using multiple threads\n* to add events.

\n*\n* @param eventData The {@link EventData} to add to the batch.\n* @return {@code true} if the event could be added to the batch; {@code false} if the event was too large to fit in\n* the batch.\n* @throws IllegalArgumentException if {@code eventData} is {@code null}.\n* @throws AmqpException if {@code eventData} is larger than the maximum size of the {@link EventDataBatch}.\n*/\npublic boolean tryAdd(final EventData eventData) {\nif (eventData == null) {\nthrow LOGGER.logExceptionAsWarning(new NullPointerException(\"eventData cannot be null\"));\n}\nEventData event = tracerProvider.isEnabled() ? traceMessageSpan(eventData) : eventData;\nfinal int size;\ntry {\nsize = getSize(event, events.isEmpty());\n} catch (BufferOverflowException exception) {\nthrow LOGGER.logExceptionAsWarning(new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED,\nString.format(Locale.US, \"Size of the payload exceeded maximum message size: %s kb\",\nmaxMessageSize / 1024),\ncontextProvider.getErrorContext()));\n}\nif (this.sizeInBytes + size > this.maxMessageSize) {\nreturn false;\n}\nthis.sizeInBytes += size;\nthis.events.add(event);\nreturn true;\n}\n/**\n* Method to start and end a \"Azure.EventHubs.message\" span and add the \"DiagnosticId\" as a property of the message.\n*\n* @param eventData The Event to add tracing span for.\n* @return the updated event data object.\n*/\nList getEvents() {\nreturn events;\n}\nString getPartitionKey() {\nreturn partitionKey;\n}\nString getPartitionId() {\nreturn partitionId;\n}\nprivate int getSize(final EventData eventData, final boolean isFirst) {\nObjects.requireNonNull(eventData, \"'eventData' cannot be null.\");\nfinal Message amqpMessage = createAmqpMessage(eventData, partitionKey);\nint eventSize = amqpMessage.encode(this.eventBytes, 0, maxMessageSize);\neventSize += 16;\nif (isFirst) {\namqpMessage.setBody(null);\namqpMessage.setApplicationProperties(null);\namqpMessage.setProperties(null);\namqpMessage.setDeliveryAnnotations(null);\neventSize += amqpMessage.encode(this.eventBytes, 0, maxMessageSize);\n}\nreturn eventSize;\n}\n/*\n* Creates the AMQP message represented by the event data\n*/\nprivate static Message createAmqpMessage(EventData event, String partitionKey) {\nfinal AmqpAnnotatedMessage amqpAnnotatedMessage = event.getRawAmqpMessage();\nfinal Message protonJ = MessageUtils.toProtonJMessage(amqpAnnotatedMessage);\nif (partitionKey == null) {\nreturn protonJ;\n}\nif (protonJ.getMessageAnnotations() == null) {\nprotonJ.setMessageAnnotations(new MessageAnnotations(new HashMap<>()));\n}\nfinal MessageAnnotations messageAnnotations = protonJ.getMessageAnnotations();\nmessageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey);\nreturn protonJ;\n}\n}" + }, + { + "comment": "Better to delete code directly.", + "method_body": "public void createTable(CreateTableStmt stmt) throws DdlException {\nString engineName = stmt.getEngineName();\nString dbName = stmt.getDbName();\nString tableName = stmt.getTableName();\nDatabase db = getDb(stmt.getDbName());\nif (db == null) {\nErrorReport.reportDdlException(ErrorCode.ERR_BAD_DB_ERROR, dbName);\n}\nif (!stmt.isExternal()) {\nCatalog.getCurrentSystemInfo().checkClusterCapacity(stmt.getClusterName());\ndb.checkQuota();\n}\ndb.readLock();\ntry {\nif (db.getTable(tableName) != null) {\nif (stmt.isSetIfNotExists()) {\nLOG.info(\"create table[{}] which already exists\", tableName);\nreturn;\n} else {\nErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);\n}\n}\n} finally {\ndb.readUnlock();\n}\nif (engineName.equals(\"olap\")) {\ncreateOlapTable(db, stmt);\nreturn;\n} else if (engineName.equals(\"mysql\")) {\ncreateMysqlTable(db, stmt);\nreturn;\n} else if (engineName.equals(\"broker\")) {\nthrow new DdlException(\"Broker table is deprecated.\");\n} else if (engineName.equalsIgnoreCase(\"elasticsearch\") || engineName.equalsIgnoreCase(\"es\")) {\ncreateEsTable(db, stmt);\nreturn;\n} else if (engineName.equalsIgnoreCase(\"hive\")) {\ncreateHiveTable(db, stmt);\nreturn;\n} else {\nErrorReport.reportDdlException(ErrorCode.ERR_UNKNOWN_STORAGE_ENGINE, engineName);\n}\nPreconditions.checkState(false);\n}", + "target_code": "throw new DdlException(\"Broker table is deprecated.\");", + "method_body_after": "public void createTable(CreateTableStmt stmt) throws DdlException {\nString engineName = stmt.getEngineName();\nString dbName = stmt.getDbName();\nString tableName = stmt.getTableName();\nDatabase db = getDb(stmt.getDbName());\nif (db == null) {\nErrorReport.reportDdlException(ErrorCode.ERR_BAD_DB_ERROR, dbName);\n}\nif (!stmt.isExternal()) {\nCatalog.getCurrentSystemInfo().checkClusterCapacity(stmt.getClusterName());\ndb.checkQuota();\n}\ndb.readLock();\ntry {\nif (db.getTable(tableName) != null) {\nif (stmt.isSetIfNotExists()) {\nLOG.info(\"create table[{}] which already exists\", tableName);\nreturn;\n} else {\nErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);\n}\n}\n} finally {\ndb.readUnlock();\n}\nif (engineName.equals(\"olap\")) {\ncreateOlapTable(db, stmt);\nreturn;\n} else if (engineName.equals(\"mysql\")) {\ncreateMysqlTable(db, stmt);\nreturn;\n} else if (engineName.equalsIgnoreCase(\"elasticsearch\") || engineName.equalsIgnoreCase(\"es\")) {\ncreateEsTable(db, stmt);\nreturn;\n} else if (engineName.equalsIgnoreCase(\"hive\")) {\ncreateHiveTable(db, stmt);\nreturn;\n} else {\nErrorReport.reportDdlException(ErrorCode.ERR_UNKNOWN_STORAGE_ENGINE, engineName);\n}\nPreconditions.checkState(false);\n}", + "context_before": "class SingletonHolder {\nprivate static final Catalog INSTANCE = new Catalog();\n}", + "context_after": "class SingletonHolder {\nprivate static final Catalog INSTANCE = new Catalog();\n}" + }, + { + "comment": "This probably belongs to `CalciteUtils`, we have similar casting logic there", + "method_body": "public void processElement(ProcessContext c) {\nassert se != null;\ntry {\nse.evaluate(new Object[] {outputSchema, c, CONTEXT_INSTANCE});\n} catch (InvocationTargetException e) {\nthrow new RuntimeException(\n\"CalcFn failed to evaluate: \" + processElementBlock, e.getCause());\n}\n}", + "target_code": "throw new RuntimeException(", + "method_body_after": "public void processElement(ProcessContext c) {\nassert se != null;\ntry {\nse.evaluate(new Object[] {outputSchema, c, CONTEXT_INSTANCE});\n} catch (InvocationTargetException e) {\nthrow new RuntimeException(\n\"CalcFn failed to evaluate: \" + processElementBlock, e.getCause());\n}\n}", + "context_before": "class CalcFn extends DoFn {\nprivate final String processElementBlock;\nprivate final Schema outputSchema;\nprivate transient @Nullable ScriptEvaluator se = null;\npublic CalcFn(String processElementBlock, Schema outputSchema) {\nthis.processElementBlock = processElementBlock;\nthis.outputSchema = outputSchema;\n}\nScriptEvaluator compile() {\nScriptEvaluator se = new ScriptEvaluator();\nse.setParameters(\nnew String[] {outputSchemaParam.name, processContextParam.name, DataContext.ROOT.name},\nnew Class[] {\n(Class) outputSchemaParam.getType(),\n(Class) processContextParam.getType(),\n(Class) DataContext.ROOT.getType()\n});\ntry {\nse.cook(processElementBlock);\n} catch (CompileException e) {\nthrow new RuntimeException(\"Could not compile CalcFn: \" + processElementBlock, e);\n}\nreturn se;\n}\n@Setup\npublic void setup() {\nthis.se = compile();\n}\n@ProcessElement\n}", + "context_after": "class CalcFn extends DoFn {\nprivate final String processElementBlock;\nprivate final Schema outputSchema;\nprivate transient @Nullable ScriptEvaluator se = null;\npublic CalcFn(String processElementBlock, Schema outputSchema) {\nthis.processElementBlock = processElementBlock;\nthis.outputSchema = outputSchema;\n}\nScriptEvaluator compile() {\nScriptEvaluator se = new ScriptEvaluator();\nse.setParameters(\nnew String[] {outputSchemaParam.name, processContextParam.name, DataContext.ROOT.name},\nnew Class[] {\n(Class) outputSchemaParam.getType(),\n(Class) processContextParam.getType(),\n(Class) DataContext.ROOT.getType()\n});\ntry {\nse.cook(processElementBlock);\n} catch (CompileException e) {\nthrow new RuntimeException(\"Could not compile CalcFn: \" + processElementBlock, e);\n}\nreturn se;\n}\n@Setup\npublic void setup() {\nthis.se = compile();\n}\n@ProcessElement\n}" + }, + { + "comment": "Done", + "method_body": "public boolean onEmitFailure(SignalType signalType, Sinks.EmitResult emitResult) {\nlogger.debug(\"SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}\", signalType, emitResult);\nif (emitResult.equals(Sinks.EmitResult.FAIL_NON_SERIALIZED)) {\nreturn true;\n}\nreturn false;\n}", + "target_code": "return false;", + "method_body_after": "public boolean onEmitFailure(SignalType signalType, Sinks.EmitResult emitResult) {\nif (emitResult.equals(Sinks.EmitResult.FAIL_NON_SERIALIZED)) {\nlogger.debug(\"SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}\", signalType, emitResult);\nreturn true;\n}\nlogger.error(\"SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}\", signalType, emitResult);\nreturn false;\n}", + "context_before": "class SerializedEmitFailureHandler implements Sinks.EmitFailureHandler {\n@Override\n}", + "context_after": "class SerializedEmitFailureHandler implements Sinks.EmitFailureHandler {\n@Override\n}" + }, + { + "comment": "Great, I didn't notice this.", + "method_body": "Set getHostSpecs() {\nreturn getHosts().stream()\n.map(host -> new HostSpec(host.getHostName(), Collections.emptyList(),\nhost.getFlavor(), host.primaryClusterMembership()))\n.collect(Collectors.toCollection(LinkedHashSet::new));\n}", + "target_code": "return getHosts().stream()", + "method_body_after": "Set getHostSpecs() {\nreturn getHosts().stream()\n.map(host -> new HostSpec(host.getHostName(), Collections.emptyList(),\nhost.getFlavor(), host.primaryClusterMembership()))\n.collect(Collectors.toCollection(LinkedHashSet::new));\n}", + "context_before": "class HostSystem extends AbstractConfigProducer {\nprivate static Logger log = Logger.getLogger(HostSystem.class.getName());\nprivate Map ipAddresses = new LinkedHashMap<>();\nprivate Map hostnames = new LinkedHashMap<>();\nprivate final Map hostname2host = new LinkedHashMap<>();\nprivate final HostProvisioner provisioner;\npublic HostSystem(AbstractConfigProducer parent, String name, HostProvisioner provisioner) {\nsuper(parent, name);\nthis.provisioner = provisioner;\n}\n/**\n* Returns the host with the given hostname.\n*\n* @param name the hostname of the host.\n* @return the host with the given hostname.\n*/\npublic HostResource getHostByHostname(String name) {\nif (\"localhost.fortestingpurposesonly\".equals(name)) {\nString localhost = \"localhost\";\nif ( ! getChildren().containsKey(localhost)) {\nnew Host(this, localhost);\n}\nreturn new HostResource(getChildren().get(localhost));\n}\nreturn hostname2host.get(name);\n}\n/**\n* Returns the canonical name of a given host. This will cache names for faster lookup.\n*\n* @param hostname the hostname to retrieve the canonical hostname for.\n* @return The canonical hostname, or null if unable to resolve.\n* @throws UnknownHostException if the hostname cannot be resolved\n*/\npublic String getCanonicalHostname(String hostname) throws UnknownHostException {\nif ( ! hostnames.containsKey(hostname)) {\nhostnames.put(hostname, lookupCanonicalHostname(hostname));\n}\nreturn hostnames.get(hostname);\n}\n/**\n* Static helper method that looks up the canonical name of a given host.\n*\n* @param hostname the hostname to retrieve the canonical hostname for.\n* @return The canonical hostname, or null if unable to resolve.\n* @throws UnknownHostException if the hostname cannot be resolved\n*/\npublic static String lookupCanonicalHostname(String hostname) throws UnknownHostException {\nreturn java.net.InetAddress.getByName(hostname).getCanonicalHostName();\n}\n/**\n* Returns the if address of a host.\n*\n* @param hostname the hostname to retrieve the ip address for.\n* @return The string representation of the ip-address.\n*/\npublic String getIp(String hostname) {\nif (ipAddresses.containsKey(hostname)) return ipAddresses.get(hostname);\nString ipAddress;\nif (hostname.startsWith(MockRoot.MOCKHOST)) {\nipAddress = \"0.0.0.0\";\n} else {\ntry {\nInetAddress address = InetAddress.getByName(hostname);\nipAddress = address.getHostAddress();\n} catch (java.net.UnknownHostException e) {\nlog.warning(\"Unable to find valid IP address of host: \" + hostname);\nipAddress = \"0.0.0.0\";\n}\n}\nipAddresses.put(hostname, ipAddress);\nreturn ipAddress;\n}\n@Override\npublic String toString() {\nStringBuilder sb = new StringBuilder();\nfor (HostResource host : hostname2host.values()) {\nsb.append(host).append(\",\");\n}\nif (sb.length() > 0) sb.deleteCharAt(sb.length() - 1);\nreturn sb.toString();\n}\npublic HostResource getHost(String hostAlias) {\nHostSpec hostSpec = provisioner.allocateHost(hostAlias);\nfor (HostResource resource : hostname2host.values()) {\nif (resource.getHostName().equals(hostSpec.hostname())) {\nhostSpec.membership().ifPresent(resource::addClusterMembership);\nreturn resource;\n}\n}\nreturn addNewHost(hostSpec);\n}\nprivate HostResource addNewHost(HostSpec hostSpec) {\nHost host = new Host(this, hostSpec.hostname());\nHostResource hostResource = new HostResource(host);\nhostResource.setFlavor(hostSpec.flavor());\nhostSpec.membership().ifPresent(hostResource::addClusterMembership);\nhostname2host.put(host.getHostName(), hostResource);\nlog.log(DEBUG, () -> \"Added new host resource for \" + host.getHostName() + \" with flavor \" + hostResource.getFlavor());\nreturn hostResource;\n}\n/** Returns the hosts owned by the application having this system - i.e all hosts except config servers */\npublic List getHosts() {\nreturn hostname2host.values().stream()\n.filter(host -> !host.getHost().runsConfigServer())\n.collect(Collectors.toList());\n}\npublic Map allocateHosts(ClusterSpec cluster, Capacity capacity, int groups, DeployLogger logger) {\nList allocatedHosts = provisioner.prepare(cluster, capacity, groups, new ProvisionDeployLogger(logger));\nMap retAllocatedHosts = new LinkedHashMap<>();\nfor (HostSpec spec : allocatedHosts) {\nHostResource host = getExistingHost(spec).orElseGet(() -> addNewHost(spec));\nretAllocatedHosts.put(host, spec.membership().orElse(null));\nif (! host.getFlavor().isPresent()) {\nhost.setFlavor(spec.flavor());\nlog.log(DEBUG, () -> \"Host resource \" + host.getHostName() + \" had no flavor, setting to \" + spec.flavor());\n}\n}\nretAllocatedHosts.keySet().forEach(host -> log.log(DEBUG, () -> \"Allocated host \" + host.getHostName() + \" with flavor \" + host.getFlavor()));\nreturn retAllocatedHosts;\n}\nprivate Optional getExistingHost(HostSpec key) {\nList hosts = hostname2host.values().stream()\n.filter(resource -> resource.getHostName().equals(key.hostname()))\n.collect(Collectors.toList());\nif (hosts.isEmpty()) {\nreturn Optional.empty();\n} else {\nlog.log(DEBUG, () -> \"Found existing host resource for \" + key.hostname() + \" with flavor \" + hosts.get(0).getFlavor());\nreturn Optional.of(hosts.get(0));\n}\n}\npublic void addBoundHost(HostResource host) {\nhostname2host.put(host.getHostName(), host);\n}\n/** A provision logger which forwards to a deploy logger */\nprivate static class ProvisionDeployLogger implements ProvisionLogger {\nprivate final DeployLogger deployLogger;\npublic ProvisionDeployLogger(DeployLogger deployLogger) {\nthis.deployLogger = deployLogger;\n}\n@Override\npublic void log(Level level, String message) {\ndeployLogger.log(level, message);\n}\n}\n}", + "context_after": "class HostSystem extends AbstractConfigProducer {\nprivate static Logger log = Logger.getLogger(HostSystem.class.getName());\nprivate Map ipAddresses = new LinkedHashMap<>();\nprivate Map hostnames = new LinkedHashMap<>();\nprivate final Map hostname2host = new LinkedHashMap<>();\nprivate final HostProvisioner provisioner;\npublic HostSystem(AbstractConfigProducer parent, String name, HostProvisioner provisioner) {\nsuper(parent, name);\nthis.provisioner = provisioner;\n}\n/**\n* Returns the host with the given hostname.\n*\n* @param name the hostname of the host.\n* @return the host with the given hostname.\n*/\npublic HostResource getHostByHostname(String name) {\nif (\"localhost.fortestingpurposesonly\".equals(name)) {\nString localhost = \"localhost\";\nif ( ! getChildren().containsKey(localhost)) {\nnew Host(this, localhost);\n}\nreturn new HostResource(getChildren().get(localhost));\n}\nreturn hostname2host.get(name);\n}\n/**\n* Returns the canonical name of a given host. This will cache names for faster lookup.\n*\n* @param hostname the hostname to retrieve the canonical hostname for.\n* @return The canonical hostname, or null if unable to resolve.\n* @throws UnknownHostException if the hostname cannot be resolved\n*/\npublic String getCanonicalHostname(String hostname) throws UnknownHostException {\nif ( ! hostnames.containsKey(hostname)) {\nhostnames.put(hostname, lookupCanonicalHostname(hostname));\n}\nreturn hostnames.get(hostname);\n}\n/**\n* Static helper method that looks up the canonical name of a given host.\n*\n* @param hostname the hostname to retrieve the canonical hostname for.\n* @return The canonical hostname, or null if unable to resolve.\n* @throws UnknownHostException if the hostname cannot be resolved\n*/\npublic static String lookupCanonicalHostname(String hostname) throws UnknownHostException {\nreturn java.net.InetAddress.getByName(hostname).getCanonicalHostName();\n}\n/**\n* Returns the if address of a host.\n*\n* @param hostname the hostname to retrieve the ip address for.\n* @return The string representation of the ip-address.\n*/\npublic String getIp(String hostname) {\nif (ipAddresses.containsKey(hostname)) return ipAddresses.get(hostname);\nString ipAddress;\nif (hostname.startsWith(MockRoot.MOCKHOST)) {\nipAddress = \"0.0.0.0\";\n} else {\ntry {\nInetAddress address = InetAddress.getByName(hostname);\nipAddress = address.getHostAddress();\n} catch (java.net.UnknownHostException e) {\nlog.warning(\"Unable to find valid IP address of host: \" + hostname);\nipAddress = \"0.0.0.0\";\n}\n}\nipAddresses.put(hostname, ipAddress);\nreturn ipAddress;\n}\n@Override\npublic String toString() {\nStringBuilder sb = new StringBuilder();\nfor (HostResource host : hostname2host.values()) {\nsb.append(host).append(\",\");\n}\nif (sb.length() > 0) sb.deleteCharAt(sb.length() - 1);\nreturn sb.toString();\n}\npublic HostResource getHost(String hostAlias) {\nHostSpec hostSpec = provisioner.allocateHost(hostAlias);\nfor (HostResource resource : hostname2host.values()) {\nif (resource.getHostName().equals(hostSpec.hostname())) {\nhostSpec.membership().ifPresent(resource::addClusterMembership);\nreturn resource;\n}\n}\nreturn addNewHost(hostSpec);\n}\nprivate HostResource addNewHost(HostSpec hostSpec) {\nHost host = new Host(this, hostSpec.hostname());\nHostResource hostResource = new HostResource(host);\nhostResource.setFlavor(hostSpec.flavor());\nhostSpec.membership().ifPresent(hostResource::addClusterMembership);\nhostname2host.put(host.getHostName(), hostResource);\nlog.log(DEBUG, () -> \"Added new host resource for \" + host.getHostName() + \" with flavor \" + hostResource.getFlavor());\nreturn hostResource;\n}\n/** Returns the hosts owned by the application having this system - i.e all hosts except config servers */\npublic List getHosts() {\nreturn hostname2host.values().stream()\n.filter(host -> !host.getHost().runsConfigServer())\n.collect(Collectors.toList());\n}\npublic Map allocateHosts(ClusterSpec cluster, Capacity capacity, int groups, DeployLogger logger) {\nList allocatedHosts = provisioner.prepare(cluster, capacity, groups, new ProvisionDeployLogger(logger));\nMap retAllocatedHosts = new LinkedHashMap<>();\nfor (HostSpec spec : allocatedHosts) {\nHostResource host = getExistingHost(spec).orElseGet(() -> addNewHost(spec));\nretAllocatedHosts.put(host, spec.membership().orElse(null));\nif (! host.getFlavor().isPresent()) {\nhost.setFlavor(spec.flavor());\nlog.log(DEBUG, () -> \"Host resource \" + host.getHostName() + \" had no flavor, setting to \" + spec.flavor());\n}\n}\nretAllocatedHosts.keySet().forEach(host -> log.log(DEBUG, () -> \"Allocated host \" + host.getHostName() + \" with flavor \" + host.getFlavor()));\nreturn retAllocatedHosts;\n}\nprivate Optional getExistingHost(HostSpec key) {\nList hosts = hostname2host.values().stream()\n.filter(resource -> resource.getHostName().equals(key.hostname()))\n.collect(Collectors.toList());\nif (hosts.isEmpty()) {\nreturn Optional.empty();\n} else {\nlog.log(DEBUG, () -> \"Found existing host resource for \" + key.hostname() + \" with flavor \" + hosts.get(0).getFlavor());\nreturn Optional.of(hosts.get(0));\n}\n}\npublic void addBoundHost(HostResource host) {\nhostname2host.put(host.getHostName(), host);\n}\n/** A provision logger which forwards to a deploy logger */\nprivate static class ProvisionDeployLogger implements ProvisionLogger {\nprivate final DeployLogger deployLogger;\npublic ProvisionDeployLogger(DeployLogger deployLogger) {\nthis.deployLogger = deployLogger;\n}\n@Override\npublic void log(Level level, String message) {\ndeployLogger.log(level, message);\n}\n}\n}" + }, + { + "comment": "Save the error msg somewhere for user to get?", + "method_body": "private void clearJob() {\nPreconditions.checkState(state == JobState.FINISHED || state == JobState.CANCELLED);\nLOG.debug(\"kill etl job and delete etl files. id: {}, state: {}\", id, state);\nif (state == JobState.CANCELLED) {\nif ((!Strings.isNullOrEmpty(appId) && sparkResource.isYarnMaster()) || sparkAppHandle != null) {\ntry {\n} catch (Exception e) {\nLOG.warn(\"kill etl job failed. id: {}, state: {}\", id, state, e);\n}\n}\n}\nif (!Strings.isNullOrEmpty(etlOutputPath)) {\ntry {\nString outputPath = etlOutputPath.substring(0, etlOutputPath.lastIndexOf(\"/\"));\n} catch (Exception e) {\nLOG.warn(\"delete etl files failed. id: {}, state: {}\", id, state, e);\n}\n}\nLOG.debug(\"clear push tasks and infos that not persist. id: {}, state: {}\", id, state);\nwriteLock();\ntry {\nfor (Map sentReplicaPushTask : tabletToSentReplicaPushTask.values()) {\nfor (PushTask pushTask : sentReplicaPushTask.values()) {\nif (pushTask == null) {\ncontinue;\n}\nAgentTaskQueue.removeTask(pushTask.getBackendId(), pushTask.getTaskType(), pushTask.getSignature());\n}\n}\nhiveTableName = \"\";\nsparkAppHandle = null;\nresourceDesc = null;\ntableToLoadPartitions.clear();\nindexToSchemaHash.clear();\ntabletToSentReplicaPushTask.clear();\nfinishedReplicas.clear();\nquorumTablets.clear();\nfullTablets.clear();\n} finally {\nwriteUnlock();\n}\n}", + "target_code": "} catch (Exception e) {", + "method_body_after": "private void clearJob() {\nPreconditions.checkState(state == JobState.FINISHED || state == JobState.CANCELLED);\nLOG.debug(\"kill etl job and delete etl files. id: {}, state: {}\", id, state);\nif (state == JobState.CANCELLED) {\nif ((!Strings.isNullOrEmpty(appId) && sparkResource.isYarnMaster()) || sparkAppHandle != null) {\ntry {\n} catch (Exception e) {\nLOG.warn(\"kill etl job failed. id: {}, state: {}\", id, state, e);\n}\n}\n}\nif (!Strings.isNullOrEmpty(etlOutputPath)) {\ntry {\nString outputPath = etlOutputPath.substring(0, etlOutputPath.lastIndexOf(\"/\"));\n} catch (Exception e) {\nLOG.warn(\"delete etl files failed. id: {}, state: {}\", id, state, e);\n}\n}\nLOG.debug(\"clear push tasks and infos that not persist. id: {}, state: {}\", id, state);\nwriteLock();\ntry {\nfor (Map sentReplicaPushTask : tabletToSentReplicaPushTask.values()) {\nfor (PushTask pushTask : sentReplicaPushTask.values()) {\nif (pushTask == null) {\ncontinue;\n}\nAgentTaskQueue.removeTask(pushTask.getBackendId(), pushTask.getTaskType(), pushTask.getSignature());\n}\n}\nhiveTableName = \"\";\nsparkAppHandle = null;\nresourceDesc = null;\ntableToLoadPartitions.clear();\nindexToSchemaHash.clear();\ntabletToSentReplicaPushTask.clear();\nfinishedReplicas.clear();\nquorumTablets.clear();\nfullTablets.clear();\n} finally {\nwriteUnlock();\n}\n}", + "context_before": "class SparkLoadJob extends BulkLoadJob {\nprivate static final Logger LOG = LogManager.getLogger(SparkLoadJob.class);\npublic static final String BITMAP_DATA_PROPERTY = \"bitmap_data\";\nprivate SparkResource sparkResource;\nprivate long etlStartTimestamp = -1;\nprivate String appId = \"\";\nprivate String etlOutputPath = \"\";\nprivate Map> tabletMetaToFileInfo = Maps.newHashMap();\nprivate String hiveTableName = \"\";\nprivate ResourceDesc resourceDesc;\nprivate SparkAppHandle sparkAppHandle;\nprivate long quorumFinishTimestamp = -1;\nprivate Map> tableToLoadPartitions = Maps.newHashMap();\nprivate Map indexToSchemaHash = Maps.newHashMap();\nprivate Map> tabletToSentReplicaPushTask = Maps.newHashMap();\nprivate Set finishedReplicas = Sets.newHashSet();\nprivate Set quorumTablets = Sets.newHashSet();\nprivate Set fullTablets = Sets.newHashSet();\npublic SparkLoadJob() {\nsuper();\njobType = EtlJobType.SPARK;\n}\npublic SparkLoadJob(long dbId, String label, ResourceDesc resourceDesc, OriginStatement originStmt)\nthrows MetaNotFoundException {\nsuper(dbId, label, originStmt);\nthis.resourceDesc = resourceDesc;\ntimeoutSecond = Config.spark_load_default_timeout_second;\njobType = EtlJobType.SPARK;\n}\npublic String getHiveTableName() {\nreturn hiveTableName;\n}\n@Override\nprotected void setJobProperties(Map properties) throws DdlException {\nsuper.setJobProperties(properties);\nsetResourceInfo();\nif (properties != null) {\nif (properties.containsKey(BITMAP_DATA_PROPERTY)) {\nhiveTableName = properties.get(BITMAP_DATA_PROPERTY);\n}\n}\n}\n/**\n* merge system conf with load stmt\n* @throws DdlException\n*/\nprivate void setResourceInfo() throws DdlException {\nString resourceName = resourceDesc.getName();\nResource oriResource = Catalog.getCurrentCatalog().getResourceMgr().getResource(resourceName);\nif (oriResource == null) {\nthrow new DdlException(\"Resource does not exist. name: \" + resourceName);\n}\nPreconditions.checkState(oriResource instanceof SparkResource);\nsparkResource = ((SparkResource) oriResource).getCopiedResource();\nsparkResource.update(resourceDesc);\nMap brokerProperties = sparkResource.getBrokerPropertiesWithoutPrefix();\nbrokerDesc = new BrokerDesc(sparkResource.getBroker(), brokerProperties);\n}\n/**\n* load job already cancelled or finished, clear job below:\n* 1. kill etl job and delete etl files\n* 2. clear push tasks and infos that not persist\n*/\n@Override\npublic void cancelJobWithoutCheck(FailMsg failMsg, boolean abortTxn, boolean needLog) {\nsuper.cancelJobWithoutCheck(failMsg, abortTxn, needLog);\nclearJob();\n}\n@Override\npublic void cancelJob(FailMsg failMsg) throws DdlException {\nsuper.cancelJob(failMsg);\nclearJob();\n}\n@Override\npublic void write(DataOutput out) throws IOException {\nsuper.write(out);\nsparkResource.write(out);\nout.writeLong(etlStartTimestamp);\nText.writeString(out, appId);\nText.writeString(out, etlOutputPath);\nout.writeInt(tabletMetaToFileInfo.size());\nfor (Map.Entry> entry : tabletMetaToFileInfo.entrySet()) {\nText.writeString(out, entry.getKey());\nText.writeString(out, entry.getValue().first);\nout.writeLong(entry.getValue().second);\n}\n}\npublic void readFields(DataInput in) throws IOException {\nsuper.readFields(in);\nsparkResource = (SparkResource) Resource.read(in);\netlStartTimestamp = in.readLong();\nappId = Text.readString(in);\netlOutputPath = Text.readString(in);\nint size = in.readInt();\nfor (int i = 0; i < size; i++) {\nString tabletMetaStr = Text.readString(in);\nPair fileInfo = Pair.create(Text.readString(in), in.readLong());\ntabletMetaToFileInfo.put(tabletMetaStr, fileInfo);\n}\n}\n}", + "context_after": "class SparkLoadJob extends BulkLoadJob {\nprivate static final Logger LOG = LogManager.getLogger(SparkLoadJob.class);\npublic static final String BITMAP_DATA_PROPERTY = \"bitmap_data\";\nprivate SparkResource sparkResource;\nprivate long etlStartTimestamp = -1;\nprivate String appId = \"\";\nprivate String etlOutputPath = \"\";\nprivate Map> tabletMetaToFileInfo = Maps.newHashMap();\nprivate String hiveTableName = \"\";\nprivate ResourceDesc resourceDesc;\nprivate SparkAppHandle sparkAppHandle;\nprivate long quorumFinishTimestamp = -1;\nprivate Map> tableToLoadPartitions = Maps.newHashMap();\nprivate Map indexToSchemaHash = Maps.newHashMap();\nprivate Map> tabletToSentReplicaPushTask = Maps.newHashMap();\nprivate Set finishedReplicas = Sets.newHashSet();\nprivate Set quorumTablets = Sets.newHashSet();\nprivate Set fullTablets = Sets.newHashSet();\npublic SparkLoadJob() {\nsuper();\njobType = EtlJobType.SPARK;\n}\npublic SparkLoadJob(long dbId, String label, ResourceDesc resourceDesc, OriginStatement originStmt)\nthrows MetaNotFoundException {\nsuper(dbId, label, originStmt);\nthis.resourceDesc = resourceDesc;\ntimeoutSecond = Config.spark_load_default_timeout_second;\njobType = EtlJobType.SPARK;\n}\npublic String getHiveTableName() {\nreturn hiveTableName;\n}\n@Override\nprotected void setJobProperties(Map properties) throws DdlException {\nsuper.setJobProperties(properties);\nsetResourceInfo();\nif (properties != null) {\nif (properties.containsKey(BITMAP_DATA_PROPERTY)) {\nhiveTableName = properties.get(BITMAP_DATA_PROPERTY);\n}\n}\n}\n/**\n* merge system conf with load stmt\n* @throws DdlException\n*/\nprivate void setResourceInfo() throws DdlException {\nString resourceName = resourceDesc.getName();\nResource oriResource = Catalog.getCurrentCatalog().getResourceMgr().getResource(resourceName);\nif (oriResource == null) {\nthrow new DdlException(\"Resource does not exist. name: \" + resourceName);\n}\nsparkResource = ((SparkResource) oriResource).getCopiedResource();\nsparkResource.update(resourceDesc);\nMap brokerProperties = sparkResource.getBrokerPropertiesWithoutPrefix();\nbrokerDesc = new BrokerDesc(sparkResource.getBroker(), brokerProperties);\n}\n/**\n* load job already cancelled or finished, clear job below:\n* 1. kill etl job and delete etl files\n* 2. clear push tasks and infos that not persist\n*/\n@Override\npublic void cancelJobWithoutCheck(FailMsg failMsg, boolean abortTxn, boolean needLog) {\nsuper.cancelJobWithoutCheck(failMsg, abortTxn, needLog);\nclearJob();\n}\n@Override\npublic void cancelJob(FailMsg failMsg) throws DdlException {\nsuper.cancelJob(failMsg);\nclearJob();\n}\n@Override\npublic void write(DataOutput out) throws IOException {\nsuper.write(out);\nsparkResource.write(out);\nout.writeLong(etlStartTimestamp);\nText.writeString(out, appId);\nText.writeString(out, etlOutputPath);\nout.writeInt(tabletMetaToFileInfo.size());\nfor (Map.Entry> entry : tabletMetaToFileInfo.entrySet()) {\nText.writeString(out, entry.getKey());\nText.writeString(out, entry.getValue().first);\nout.writeLong(entry.getValue().second);\n}\n}\npublic void readFields(DataInput in) throws IOException {\nsuper.readFields(in);\nsparkResource = (SparkResource) Resource.read(in);\netlStartTimestamp = in.readLong();\nappId = Text.readString(in);\netlOutputPath = Text.readString(in);\nint size = in.readInt();\nfor (int i = 0; i < size; i++) {\nString tabletMetaStr = Text.readString(in);\nPair fileInfo = Pair.create(Text.readString(in), in.readLong());\ntabletMetaToFileInfo.put(tabletMetaStr, fileInfo);\n}\n}\n}" + }, + { + "comment": "Currently, we don't have `analyzeBatchSentiment(inputs, requestOptions)` API. So we will add one if needed", + "method_body": "public static void main(String[] args) {\nTextAnalyticsAsyncClient client = new TextAnalyticsClientBuilder()\n.subscriptionKey(\"\")\n.endpoint(\"\")\n.buildAsyncClient();\nList inputs = Arrays.asList(\nnew TextDocumentInput(\"1\", \"The hotel was dark and unclean. The restaurant had amazing gnocchi.\", \"en\"),\nnew TextDocumentInput(\"2\", \"The restaurant had amazing gnocchi. The hotel was dark and unclean.\", \"en\")\n);\nfinal TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true);\nclient.analyzeBatchSentimentWithResponse(inputs, requestOptions).subscribe(\nresult -> {\nDocumentResultCollection analyzedBatchResult = result.getValue();\nSystem.out.printf(\"Model version: %s%n\", analyzedBatchResult.getModelVersion());\nfinal TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();\nSystem.out.printf(\"A batch of document statistics, document count: %s, erroneous document count: %s, transaction count: %s, valid document count: %s.%n\",\nbatchStatistics.getDocumentCount(),\nbatchStatistics.getErroneousDocumentCount(),\nbatchStatistics.getTransactionCount(),\nbatchStatistics.getValidDocumentCount());\nfor (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {\nSystem.out.printf(\"Document ID: %s%n\", analyzeSentimentResult.getId());\nfinal TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();\nif (documentSentiment == null) {\nSystem.out.printf(\"Cannot analyze sentiment. Error: %s%n\", analyzeSentimentResult.getError().getMessage());\ncontinue;\n}\nSystem.out.printf(\"Analyzed document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s, length of sentence: %s, offset of sentence: %s.%n\",\ndocumentSentiment.getTextSentimentClass(),\ndocumentSentiment.getPositiveScore(),\ndocumentSentiment.getNeutralScore(),\ndocumentSentiment.getNegativeScore(),\ndocumentSentiment.getLength(),\ndocumentSentiment.getOffset());\nfor (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {\nSystem.out.printf(\"Analyzed sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s, length of sentence: %s, offset of sentence: %s.%n\",\nsentenceSentiment.getTextSentimentClass(),\nsentenceSentiment.getPositiveScore(),\nsentenceSentiment.getNeutralScore(),\nsentenceSentiment.getNegativeScore(),\nsentenceSentiment.getLength(),\nsentenceSentiment.getOffset());\n}\n}\n},\nerror -> System.err.println(\"There was an error analyzing sentiment of the text inputs.\" + error),\n() -> System.out.println(\"Batch of sentiment analyzed.\"));\ntry {\nTimeUnit.SECONDS.sleep(5);\n} catch (InterruptedException ignored) {\n}\n}", + "target_code": "client.analyzeBatchSentimentWithResponse(inputs, requestOptions).subscribe(", + "method_body_after": "public static void main(String[] args) {\nTextAnalyticsAsyncClient client = new TextAnalyticsClientBuilder()\n.subscriptionKey(\"{subscription_key}\")\n.endpoint(\"https:\n.buildAsyncClient();\nList inputs = Arrays.asList(\nnew TextDocumentInput(\"1\", \"The hotel was dark and unclean. The restaurant had amazing gnocchi.\", \"en\"),\nnew TextDocumentInput(\"2\", \"The restaurant had amazing gnocchi. The hotel was dark and unclean.\", \"en\")\n);\nfinal TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true);\nclient.analyzeBatchSentimentWithResponse(inputs, requestOptions).subscribe(\nresult -> {\nDocumentResultCollection analyzedBatchResult = result.getValue();\nSystem.out.printf(\"Model version: %s%n\", analyzedBatchResult.getModelVersion());\nfinal TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();\nSystem.out.printf(\"A batch of document statistics, document count: %s, erroneous document count: %s, transaction count: %s, valid document count: %s.%n\",\nbatchStatistics.getDocumentCount(),\nbatchStatistics.getErroneousDocumentCount(),\nbatchStatistics.getTransactionCount(),\nbatchStatistics.getValidDocumentCount());\nfor (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {\nSystem.out.printf(\"Document ID: %s%n\", analyzeSentimentResult.getId());\nif (analyzeSentimentResult.isError()) {\nSystem.out.printf(\"Cannot analyze sentiment. Error: %s%n\", analyzeSentimentResult.getError().getMessage());\ncontinue;\n}\nfinal TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();\nSystem.out.printf(\"Analyzed document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s, length of sentence: %s, offset of sentence: %s.%n\",\ndocumentSentiment.getTextSentimentClass(),\ndocumentSentiment.getPositiveScore(),\ndocumentSentiment.getNeutralScore(),\ndocumentSentiment.getNegativeScore(),\ndocumentSentiment.getLength(),\ndocumentSentiment.getOffset());\nfor (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {\nSystem.out.printf(\"Analyzed sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s, length of sentence: %s, offset of sentence: %s.%n\",\nsentenceSentiment.getTextSentimentClass(),\nsentenceSentiment.getPositiveScore(),\nsentenceSentiment.getNeutralScore(),\nsentenceSentiment.getNegativeScore(),\nsentenceSentiment.getLength(),\nsentenceSentiment.getOffset());\n}\n}\n},\nerror -> System.err.println(\"There was an error analyzing sentiment of the text inputs.\" + error),\n() -> System.out.println(\"Batch of sentiment analyzed.\"));\ntry {\nTimeUnit.SECONDS.sleep(5);\n} catch (InterruptedException ignored) {\n}\n}", + "context_before": "class AnalyzeSentimentBatchDocumentsAsync {\n/**\n* Main method to invoke this demo about how to analyze sentiment of a batch of text inputs.\n*\n* @param args Unused arguments to the program.\n*/\n}", + "context_after": "class AnalyzeSentimentBatchDocumentsAsync {\n/**\n* Main method to invoke this demo about how to analyze the sentiments of a batch input text.\n*\n* @param args Unused arguments to the program.\n*/\n}" + }, + { + "comment": "Changed code to return an empty List instead of throwing the exception. ", + "method_body": "public List getAsArray(String key) {\nif (key == null) {\nreturn null;\n}\nObject value = configEntries.get(key);\nif (value instanceof List) {\nreturn (List) value;\n}\nthrow new IllegalArgumentException(\"config key '\" + key + \"' does not map to a valid 'array'\");\n}", + "target_code": "throw new IllegalArgumentException(\"config key '\" + key + \"' does not map to a valid 'array'\");", + "method_body_after": "public List getAsArray(String key) {\nif (key == null) {\nreturn null;\n}\nObject value = configEntries.get(key);\nif (value instanceof List) {\nreturn (List) value;\n}\nreturn new ArrayList();\n}", + "context_before": "class ConfigRegistry {\nprivate static final Logger log = LoggerFactory.getLogger(ConfigRegistry.class);\nprivate static final ConfigRegistry configRegistry = new ConfigRegistry();\nprivate static final Pattern encryptedFieldPattern = Pattern.compile(\"@encrypted:\\\\{(.*)\\\\}\");\nprivate static final String ENV_VAR_FORMAT = \"[a-zA-Z_]+[a-zA-Z0-9_]*\";\nprivate Map configEntries = new HashMap<>();\nprivate AESCipherTool cipherTool;\nprivate PrintStream stderr = System.err;\nprivate boolean isInitialized;\nprivate ConfigRegistry() {\n}\npublic static ConfigRegistry getInstance() {\nreturn configRegistry;\n}\n/**\n* Prepares for parsing and loading the configurations by initializing the config processor.\n* @param runtimeParams The Ballerina runtime parameters (-B params)\n* @param configFilePath The path to the user provided Ballerina config file\n* @param ballerinaConfPath Path to the default ballerina.conf file\n* @throws IOException Thrown if there was an error while attempting to process the config file\n*/\npublic void initRegistry(Map runtimeParams, String configFilePath, Path ballerinaConfPath)\nthrows IOException {\nBConfig resolvedConfigs = ConfigProcessor.processConfiguration(runtimeParams, configFilePath,\nballerinaConfPath);\nconfigEntries = resolvedConfigs.getConfigurations();\nif (resolvedConfigs.hasEncryptedValues()) {\nString customSecretFilePath = runtimeParams != null ? runtimeParams.get(\"b7a.config.secret\") : null;\nPath userSecretFile = getUserSecretFile(customSecretFilePath);\ntry {\nif (Files.exists(userSecretFile)) {\ncipherTool = new AESCipherTool(userSecretFile);\n} else {\nstderr.println(\"ballerina: enter secret for config value decryption:\");\ncipherTool = new AESCipherTool(new String(System.console().readPassword()));\n}\n} catch (AESCipherToolException e) {\nString msg = \"failed to initialize the cipher tool: \" + e.getMessage();\nthrow new RuntimeException(msg, e);\n}\n}\naddConfiguration(\"ballerina.source.root\", System.getProperty(\"ballerina.source.root\"));\nisInitialized = true;\n}\n/**\n* Add the specified key/value pair as a configuration entry.\n*\n* @param key The configuration key\n* @param value The configuration value\n*/\npublic void addConfiguration(String key, Object value) {\nconfigEntries.put(key, value);\n}\n/**\n* Add the specified key/value pair as a configuration entry. Here, the key will be derived using the tableHeader\n* and tableField parameters.\n*\n* @param tableHeader The name of the TOML table to which the config will be added\n* @param tableField The config key under which the config value will be mapped in the table\n* @param value The configuration value\n*/\npublic void addConfiguration(String tableHeader, String tableField, Object value) {\naddConfiguration(getConfigKey(tableHeader, tableField), value);\n}\n/**\n* Encrypts the value before adding the specified key/value pair as a configuration entry.\n*\n* @param key The configuration key\n* @param value The configuration value\n*/\npublic void addEncryptedConfiguration(String key, String value) {\nif (cipherTool == null) {\nthrow new RuntimeException(\"cipher tool is not initialized.\");\n}\naddConfiguration(key, String.format(\"@encrypted:{%s}\", value));\n}\n/**\n* Encrypts the given value before adding the specified key/value pair as a configuration entry. Here, the key will\n* be derived using the tableHeader and tableField parameters.\n*\n* @param tableHeader The name of the TOML table to which the config will be added\n* @param tableField The config key under which the config value will be mapped in the table\n* @param value The configuration value\n*/\npublic void addEncryptedConfiguration(String tableHeader, String tableField, String value) {\naddEncryptedConfiguration(getConfigKey(tableHeader, tableField), value);\n}\n/**\n* Checks whether the given key is present in the Config Registry.\n*\n* @param key The key to look-up\n* @return Returns true if the key is in the registry; returns false otherwise\n*/\npublic boolean contains(String key) {\nreturn configEntries.containsKey(key);\n}\n/**\n* Checks whether the given header/field combination is present in the Config Registry.\n*\n* @param tableHeader The name of the TOML table to look-up\n* @param tableField The config key under which the config value will be mapped in the table\n* @return Returns true if the header/field pair is in the registry; returns false otherwise\n*/\npublic boolean contains(String tableHeader, String tableField) {\nreturn contains(getConfigKey(tableHeader, tableField));\n}\n/**\n* Retrieves the config value specified by the key.\n*\n* @param key The key of the config entry\n* @return The associated config value if it exists\n*/\npublic Object getConfiguration(String key) {\nif (contains(key)) {\nObject value = configEntries.get(key);\nif (value instanceof String) {\nreturn resolveStringValue((String) value);\n}\nreturn value;\n}\nreturn null;\n}\n/**\n* Retrieves the config value specified by the section header and the field.\n*\n* @param sectionHeader The header name\n* @param field The field in the section\n* @return The associated config value if it exists\n*/\npublic Object getConfiguration(String sectionHeader, String field) {\nreturn getConfiguration(getConfigKey(sectionHeader, field));\n}\n/**\n* Retrieves the config value specified by the key as a boolean.\n*\n* @param key The key of the config entry\n* @return The associated config value if it exists\n*/\npublic boolean getAsBoolean(String key) {\nif (contains(key)) {\nObject value;\ntry {\nvalue = configEntries.get(key);\nif (value instanceof String) {\nreturn Boolean.parseBoolean(resolveStringValue((String) value));\n}\nreturn (Boolean) value;\n} catch (ClassCastException e) {\nthrow new IllegalArgumentException(\"config key '\" + key + \"' does not map to a valid 'boolean'\");\n}\n}\nreturn Boolean.parseBoolean(lookupEnvVars(key));\n}\n/**\n* Retrieves the config value specified by the section header and the field.\n*\n* @param sectionHeader The header name\n* @param field The field in the section\n* @return The associated config value if it exists\n*/\npublic boolean getAsBoolean(String sectionHeader, String field) {\nreturn getAsBoolean(getConfigKey(sectionHeader, field));\n}\n/**\n* Retrieves the config value specified by the key as an int.\n*\n* @param key The key of the config entry\n* @return The associated config val\n*/\npublic long getAsInt(String key) {\nif (contains(key)) {\nObject value;\ntry {\nvalue = configEntries.get(key);\nif (value instanceof String) {\nreturn Long.parseLong(resolveStringValue((String) value));\n}\nreturn (Long) value;\n} catch (ClassCastException | NumberFormatException e) {\nthrow new IllegalArgumentException(\"config key '\" + key + \"' does not map to a valid 'int'\");\n}\n}\nreturn Long.parseLong(lookupEnvVars(key));\n}\n/**\n* Retrieves the config value specified by the section header and the field.\n*\n* @param sectionHeader The header name\n* @param field The field in the section\n* @return The associated config value if it exists\n*/\npublic long getAsInt(String sectionHeader, String field) {\nreturn getAsInt(getConfigKey(sectionHeader, field));\n}\n/**\n* Retrieves the config value specified by the key as a float.\n*\n* @param key The key of the config entry\n* @return The associated config val\n*/\npublic double getAsFloat(String key) {\nif (contains(key)) {\nObject value;\ntry {\nvalue = configEntries.get(key);\nif (value instanceof String) {\nreturn Double.parseDouble(resolveStringValue((String) value));\n} else if (value instanceof Long) {\nreturn (Long) value;\n}\nreturn (Double) value;\n} catch (ClassCastException | NumberFormatException e) {\nthrow new IllegalArgumentException(\"config key '\" + key + \"' does not map to a valid 'float'\");\n}\n}\nreturn Double.parseDouble(lookupEnvVars(key));\n}\n/**\n* Retrieves the config value specified by the section header and the field.\n*\n* @param sectionHeader The header name\n* @param field The field in the section\n* @return The associated config value if it exists\n*/\npublic double getAsFloat(String sectionHeader, String field) {\nreturn getAsFloat(getConfigKey(sectionHeader, field));\n}\n/**\n* Retrieves the config value specified by the key as a Map.\n*\n* @param key The key of the config entry\n* @return The associated config val\n*/\npublic Map getAsMap(String key) {\nMap section = new HashMap<>();\nint subStringIndex = key.length() + 1;\nconfigEntries.entrySet().forEach(entry -> {\nif (entry.getKey().startsWith(key)) {\nsection.put(entry.getKey().substring(subStringIndex), entry.getValue());\n}\n});\nreturn section;\n}\n/**\n* Retrieves the config value specified by the section header and the field.\n*\n* @param sectionHeader The header name\n* @param field The field in the section\n* @return The associated config value if it exists\n*/\npublic Map getAsMap(String sectionHeader, String field) {\nreturn getAsMap(getConfigKey(sectionHeader, field));\n}\n/**\n* Retrieves the config value specified by the key as a List.\n*\n* @param key The key of the config entry\n* @return The associated config value\n*/\n/**\n* Retrieves the config value specified by the section header and the field.\n*\n* @param sectionHeader The header name\n* @param field The field in the section\n* @return The associated config value if it exists\n*/\npublic List getAsArray(String sectionHeader, String field) {\nreturn getAsArray(getConfigKey(sectionHeader, field));\n}\n/**\n* Retrieve the configuration value mapped by the specified key.\n*\n* @param key The key of the configuration value\n* @return The configuration value as a string\n*/\npublic String getAsString(String key) {\nif (key == null) {\nreturn null;\n}\nif (contains(key)) {\nString value = String.valueOf(configEntries.get(key));\nreturn resolveStringValue(value);\n}\nreturn lookupEnvVars(key);\n}\n/**\n* Retrieve the configuration value mapped by the specified table header and table field.\n*\n* @param tableHeader The name of the TOML table which contains the configuration\n* @param tableField The config key under which the config value is mapped in the table\n* @return The configuration value as a string\n*/\npublic String getAsString(String tableHeader, String tableField) {\nreturn getAsString(getConfigKey(tableHeader, tableField));\n}\n/**\n* Retrieve the configuration value mapped by the specified key.\n*\n* @param key The key of the configuration value\n* @param defaultValue The value to be used if the key is not in the registry\n* @return The configuration value as a string\n*/\npublic String getConfigOrDefault(String key, String defaultValue) {\nString value;\nreturn ((value = getAsString(key)) != null) ? value : defaultValue;\n}\n/**\n* Returns an iterator for the key set of the config registry.\n*\n* @return An iterator for the key set\n*/\npublic Iterator keySetIterator() {\nreturn configEntries.keySet().iterator();\n}\n/**\n* Removes the specified key from the Config Registry.\n*\n* @param key The key for the configuration value to be removed\n* @return The removed configuration value\n*/\npublic Object removeConfiguration(String key) {\nreturn configEntries.remove(key);\n}\n/**\n* Removes all the entries in the Config Registry.\n*/\npublic void resetRegistry() {\nconfigEntries.clear();\n}\nprivate String getConfigKey(String tableHeader, String tableField) {\nreturn tableHeader + \".\" + tableField;\n}\nprivate Path getUserSecretFile(String path) {\nif (path == null) {\nreturn Paths.get(System.getProperty(\"ballerina.source.root\"), \"secret.txt\");\n}\nPath userConfiguredPath = Paths.get(path);\nif (Files.notExists(userConfiguredPath)) {\nthrow new RuntimeException(\"file not found: \" + path);\n}\nreturn userConfiguredPath;\n}\nprivate String resolveStringValue(String value) {\nMatcher base64Matcher = null;\ntry {\nif (value != null) {\nbase64Matcher = encryptedFieldPattern.matcher(value);\nif (base64Matcher.find()) {\nreturn cipherTool.decrypt(base64Matcher.group(1));\n}\n}\n} catch (IllegalArgumentException e) {\nthrow new RuntimeException(\"invalid base 64 value: \" + base64Matcher.group(1));\n} catch (AESCipherToolException e) {\nthrow new RuntimeException(\"failed to retrieve encrypted value: \" + e.getMessage(), e);\n}\nreturn value;\n}\nprivate String lookupEnvVars(String key) {\nString convertedKey = getEnvVarKey(key);\nreturn convertedKey.matches(ENV_VAR_FORMAT) ? System.getenv(convertedKey) : null;\n}\nprivate String getEnvVarKey(String configKey) {\nreturn configKey.replace('.', '_');\n}\npublic boolean isInitialized() {\nreturn isInitialized;\n}\npublic void setInitialized(boolean initialized) {\nisInitialized = initialized;\n}\n}", + "context_after": "class ConfigRegistry {\nprivate static final Logger log = LoggerFactory.getLogger(ConfigRegistry.class);\nprivate static final ConfigRegistry configRegistry = new ConfigRegistry();\nprivate static final Pattern encryptedFieldPattern = Pattern.compile(\"@encrypted:\\\\{(.*)\\\\}\");\nprivate static final String ENV_VAR_FORMAT = \"[a-zA-Z_]+[a-zA-Z0-9_]*\";\nprivate Map configEntries = new HashMap<>();\nprivate AESCipherTool cipherTool;\nprivate PrintStream stderr = System.err;\nprivate boolean isInitialized;\nprivate ConfigRegistry() {\n}\npublic static ConfigRegistry getInstance() {\nreturn configRegistry;\n}\n/**\n* Prepares for parsing and loading the configurations by initializing the config processor.\n* @param runtimeParams The Ballerina runtime parameters (-B params)\n* @param configFilePath The path to the user provided Ballerina config file\n* @param ballerinaConfPath Path to the default ballerina.conf file\n* @throws IOException Thrown if there was an error while attempting to process the config file\n*/\npublic void initRegistry(Map runtimeParams, String configFilePath, Path ballerinaConfPath)\nthrows IOException {\nBConfig resolvedConfigs = ConfigProcessor.processConfiguration(runtimeParams, configFilePath,\nballerinaConfPath);\nconfigEntries = resolvedConfigs.getConfigurations();\nif (resolvedConfigs.hasEncryptedValues()) {\nString customSecretFilePath = runtimeParams != null ? runtimeParams.get(\"b7a.config.secret\") : null;\nPath userSecretFile = getUserSecretFile(customSecretFilePath);\ntry {\nif (Files.exists(userSecretFile)) {\ncipherTool = new AESCipherTool(userSecretFile);\n} else {\nstderr.println(\"ballerina: enter secret for config value decryption:\");\ncipherTool = new AESCipherTool(new String(System.console().readPassword()));\n}\n} catch (AESCipherToolException e) {\nString msg = \"failed to initialize the cipher tool: \" + e.getMessage();\nthrow new RuntimeException(msg, e);\n}\n}\naddConfiguration(\"ballerina.source.root\", System.getProperty(\"ballerina.source.root\"));\nisInitialized = true;\n}\n/**\n* Add the specified key/value pair as a configuration entry.\n*\n* @param key The configuration key\n* @param value The configuration value\n*/\npublic void addConfiguration(String key, Object value) {\nconfigEntries.put(key, value);\n}\n/**\n* Add the specified key/value pair as a configuration entry. Here, the key will be derived using the tableHeader\n* and tableField parameters.\n*\n* @param tableHeader The name of the TOML table to which the config will be added\n* @param tableField The config key under which the config value will be mapped in the table\n* @param value The configuration value\n*/\npublic void addConfiguration(String tableHeader, String tableField, Object value) {\naddConfiguration(getConfigKey(tableHeader, tableField), value);\n}\n/**\n* Encrypts the value before adding the specified key/value pair as a configuration entry.\n*\n* @param key The configuration key\n* @param value The configuration value\n*/\npublic void addEncryptedConfiguration(String key, String value) {\nif (cipherTool == null) {\nthrow new RuntimeException(\"cipher tool is not initialized.\");\n}\naddConfiguration(key, String.format(\"@encrypted:{%s}\", value));\n}\n/**\n* Encrypts the given value before adding the specified key/value pair as a configuration entry. Here, the key will\n* be derived using the tableHeader and tableField parameters.\n*\n* @param tableHeader The name of the TOML table to which the config will be added\n* @param tableField The config key under which the config value will be mapped in the table\n* @param value The configuration value\n*/\npublic void addEncryptedConfiguration(String tableHeader, String tableField, String value) {\naddEncryptedConfiguration(getConfigKey(tableHeader, tableField), value);\n}\n/**\n* Checks whether the given key is present in the Config Registry.\n*\n* @param key The key to look-up\n* @return Returns true if the key is in the registry; returns false otherwise\n*/\npublic boolean contains(String key) {\nreturn configEntries.containsKey(key);\n}\n/**\n* Checks whether the given header/field combination is present in the Config Registry.\n*\n* @param tableHeader The name of the TOML table to look-up\n* @param tableField The config key under which the config value will be mapped in the table\n* @return Returns true if the header/field pair is in the registry; returns false otherwise\n*/\npublic boolean contains(String tableHeader, String tableField) {\nreturn contains(getConfigKey(tableHeader, tableField));\n}\n/**\n* Retrieves the config value specified by the key.\n*\n* @param key The key of the config entry\n* @return The associated config value if it exists\n*/\npublic Object getConfiguration(String key) {\nif (contains(key)) {\nObject value = configEntries.get(key);\nif (value instanceof String) {\nreturn resolveStringValue((String) value);\n}\nreturn value;\n}\nreturn null;\n}\n/**\n* Retrieves the config value specified by the section header and the field.\n*\n* @param sectionHeader The header name\n* @param field The field in the section\n* @return The associated config value if it exists\n*/\npublic Object getConfiguration(String sectionHeader, String field) {\nreturn getConfiguration(getConfigKey(sectionHeader, field));\n}\n/**\n* Retrieves the config value specified by the key as a boolean.\n*\n* @param key The key of the config entry\n* @return The associated config value if it exists\n*/\npublic boolean getAsBoolean(String key) {\nif (contains(key)) {\nObject value;\ntry {\nvalue = configEntries.get(key);\nif (value instanceof String) {\nreturn Boolean.parseBoolean(resolveStringValue((String) value));\n}\nreturn (Boolean) value;\n} catch (ClassCastException e) {\nthrow new IllegalArgumentException(\"config key '\" + key + \"' does not map to a valid 'boolean'\");\n}\n}\nreturn Boolean.parseBoolean(lookupEnvVars(key));\n}\n/**\n* Retrieves the config value specified by the section header and the field.\n*\n* @param sectionHeader The header name\n* @param field The field in the section\n* @return The associated config value if it exists\n*/\npublic boolean getAsBoolean(String sectionHeader, String field) {\nreturn getAsBoolean(getConfigKey(sectionHeader, field));\n}\n/**\n* Retrieves the config value specified by the key as an int.\n*\n* @param key The key of the config entry\n* @return The associated config val\n*/\npublic long getAsInt(String key) {\nif (contains(key)) {\nObject value;\ntry {\nvalue = configEntries.get(key);\nif (value instanceof String) {\nreturn Long.parseLong(resolveStringValue((String) value));\n}\nreturn (Long) value;\n} catch (ClassCastException | NumberFormatException e) {\nthrow new IllegalArgumentException(\"config key '\" + key + \"' does not map to a valid 'int'\");\n}\n}\nreturn Long.parseLong(lookupEnvVars(key));\n}\n/**\n* Retrieves the config value specified by the section header and the field.\n*\n* @param sectionHeader The header name\n* @param field The field in the section\n* @return The associated config value if it exists\n*/\npublic long getAsInt(String sectionHeader, String field) {\nreturn getAsInt(getConfigKey(sectionHeader, field));\n}\n/**\n* Retrieves the config value specified by the key as a float.\n*\n* @param key The key of the config entry\n* @return The associated config val\n*/\npublic double getAsFloat(String key) {\nif (contains(key)) {\nObject value;\ntry {\nvalue = configEntries.get(key);\nif (value instanceof String) {\nreturn Double.parseDouble(resolveStringValue((String) value));\n} else if (value instanceof Long) {\nreturn (Long) value;\n}\nreturn (Double) value;\n} catch (ClassCastException | NumberFormatException e) {\nthrow new IllegalArgumentException(\"config key '\" + key + \"' does not map to a valid 'float'\");\n}\n}\nreturn Double.parseDouble(lookupEnvVars(key));\n}\n/**\n* Retrieves the config value specified by the section header and the field.\n*\n* @param sectionHeader The header name\n* @param field The field in the section\n* @return The associated config value if it exists\n*/\npublic double getAsFloat(String sectionHeader, String field) {\nreturn getAsFloat(getConfigKey(sectionHeader, field));\n}\n/**\n* Retrieves the config value specified by the key as a Map.\n*\n* @param key The key of the config entry\n* @return The associated config val\n*/\npublic Map getAsMap(String key) {\nMap section = new HashMap<>();\nint subStringIndex = key.length() + 1;\nconfigEntries.entrySet().forEach(entry -> {\nif (entry.getKey().startsWith(key)) {\nsection.put(entry.getKey().substring(subStringIndex), entry.getValue());\n}\n});\nreturn section;\n}\n/**\n* Retrieves the config value specified by the section header and the field.\n*\n* @param sectionHeader The header name\n* @param field The field in the section\n* @return The associated config value if it exists\n*/\npublic Map getAsMap(String sectionHeader, String field) {\nreturn getAsMap(getConfigKey(sectionHeader, field));\n}\n/**\n* Retrieves the config value specified by the key as a List.\n*\n* @param key The key of the config entry\n* @return The associated config value\n*/\n/**\n* Retrieves the config value specified by the section header and the field.\n*\n* @param sectionHeader The header name\n* @param field The field in the section\n* @return The associated config value if it exists\n*/\npublic List getAsArray(String sectionHeader, String field) {\nreturn getAsArray(getConfigKey(sectionHeader, field));\n}\n/**\n* Retrieve the configuration value mapped by the specified key.\n*\n* @param key The key of the configuration value\n* @return The configuration value as a string\n*/\npublic String getAsString(String key) {\nif (key == null) {\nreturn null;\n}\nif (contains(key)) {\nString value = String.valueOf(configEntries.get(key));\nreturn resolveStringValue(value);\n}\nreturn lookupEnvVars(key);\n}\n/**\n* Retrieve the configuration value mapped by the specified table header and table field.\n*\n* @param tableHeader The name of the TOML table which contains the configuration\n* @param tableField The config key under which the config value is mapped in the table\n* @return The configuration value as a string\n*/\npublic String getAsString(String tableHeader, String tableField) {\nreturn getAsString(getConfigKey(tableHeader, tableField));\n}\n/**\n* Retrieve the configuration value mapped by the specified key.\n*\n* @param key The key of the configuration value\n* @param defaultValue The value to be used if the key is not in the registry\n* @return The configuration value as a string\n*/\npublic String getConfigOrDefault(String key, String defaultValue) {\nString value;\nreturn ((value = getAsString(key)) != null) ? value : defaultValue;\n}\n/**\n* Returns an iterator for the key set of the config registry.\n*\n* @return An iterator for the key set\n*/\npublic Iterator keySetIterator() {\nreturn configEntries.keySet().iterator();\n}\n/**\n* Removes the specified key from the Config Registry.\n*\n* @param key The key for the configuration value to be removed\n* @return The removed configuration value\n*/\npublic Object removeConfiguration(String key) {\nreturn configEntries.remove(key);\n}\n/**\n* Removes all the entries in the Config Registry.\n*/\npublic void resetRegistry() {\nconfigEntries.clear();\n}\nprivate String getConfigKey(String tableHeader, String tableField) {\nreturn tableHeader + \".\" + tableField;\n}\nprivate Path getUserSecretFile(String path) {\nif (path == null) {\nreturn Paths.get(System.getProperty(\"ballerina.source.root\"), \"secret.txt\");\n}\nPath userConfiguredPath = Paths.get(path);\nif (Files.notExists(userConfiguredPath)) {\nthrow new RuntimeException(\"file not found: \" + path);\n}\nreturn userConfiguredPath;\n}\nprivate String resolveStringValue(String value) {\nMatcher base64Matcher = null;\ntry {\nif (value != null) {\nbase64Matcher = encryptedFieldPattern.matcher(value);\nif (base64Matcher.find()) {\nreturn cipherTool.decrypt(base64Matcher.group(1));\n}\n}\n} catch (IllegalArgumentException e) {\nthrow new RuntimeException(\"invalid base 64 value: \" + base64Matcher.group(1));\n} catch (AESCipherToolException e) {\nthrow new RuntimeException(\"failed to retrieve encrypted value: \" + e.getMessage(), e);\n}\nreturn value;\n}\nprivate String lookupEnvVars(String key) {\nString convertedKey = getEnvVarKey(key);\nreturn convertedKey.matches(ENV_VAR_FORMAT) ? System.getenv(convertedKey) : null;\n}\nprivate String getEnvVarKey(String configKey) {\nreturn configKey.replace('.', '_');\n}\npublic boolean isInitialized() {\nreturn isInitialized;\n}\npublic void setInitialized(boolean initialized) {\nisInitialized = initialized;\n}\n}" + }, + { + "comment": "That was the the creeping feeling I had.", + "method_body": "protected void destroy() {\nlaterExecutor.shutdown();\ndocprocServiceRegistry.allComponents().forEach(docprocService -> docprocService.deconstruct());\n}", + "target_code": "docprocServiceRegistry.allComponents().forEach(docprocService -> docprocService.deconstruct());", + "method_body_after": "protected void destroy() {\nlaterExecutor.shutdown();\ndocprocServiceRegistry.allComponents().forEach(docprocService -> docprocService.deconstruct());\n}", + "context_before": "class DocumentProcessingHandler extends AbstractRequestHandler {\nprivate static Logger log = Logger.getLogger(DocumentProcessingHandler.class.getName());\nprivate final ComponentRegistry docprocServiceRegistry;\nprivate final ComponentRegistry docFactoryRegistry;\nprivate final ChainRegistry chainRegistry = new ChainRegistry<>();\nprivate final ScheduledThreadPoolExecutor laterExecutor =\nnew ScheduledThreadPoolExecutor(2, new DaemonThreadFactory(\"docproc-later-\"));\nprivate ContainerDocumentConfig containerDocConfig;\nprivate final DocumentTypeManager documentTypeManager;\npublic DocumentProcessingHandler(ComponentRegistry docprocServiceRegistry,\nComponentRegistry documentProcessorComponentRegistry,\nComponentRegistry docFactoryRegistry,\nint numThreads,\nDocumentTypeManager documentTypeManager,\nChainsModel chainsModel, SchemaMap schemaMap, Statistics statistics,\nMetric metric,\nContainerDocumentConfig containerDocConfig) {\nthis.docprocServiceRegistry = docprocServiceRegistry;\nthis.docFactoryRegistry = docFactoryRegistry;\nthis.containerDocConfig = containerDocConfig;\nthis.documentTypeManager = documentTypeManager;\nDocprocService.schemaMap = schemaMap;\nlaterExecutor.setContinueExistingPeriodicTasksAfterShutdownPolicy(false);\nlaterExecutor.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);\nif (chainsModel != null) {\nprepareChainRegistry(chainRegistry, chainsModel, documentProcessorComponentRegistry);\nfor (Chain chain : chainRegistry.allComponents()) {\nlog.config(\"Setting up call stack for chain \" + chain.getId());\nDocprocService service = new DocprocService(chain.getId(), convertToCallStack(chain, statistics, metric), documentTypeManager, computeNumThreads(numThreads));\nservice.setInService(true);\ndocprocServiceRegistry.register(service.getId(), service);\n}\n}\n}\nprivate static int computeNumThreads(int maxThreads) {\nreturn (maxThreads > 0) ? maxThreads : Runtime.getRuntime().availableProcessors();\n}\npublic DocumentProcessingHandler(ComponentRegistry docprocServiceRegistry,\nComponentRegistry documentProcessorComponentRegistry,\nComponentRegistry docFactoryRegistry,\nDocumentProcessingHandlerParameters params) {\nthis(docprocServiceRegistry, documentProcessorComponentRegistry, docFactoryRegistry,\nparams.getMaxNumThreads(),\nparams.getDocumentTypeManager(), params.getChainsModel(), params.getSchemaMap(),\nparams.getStatisticsManager(),\nparams.getMetric(),\nparams.getContainerDocConfig());\n}\n@Inject\npublic DocumentProcessingHandler(ComponentRegistry documentProcessorComponentRegistry,\nComponentRegistry docFactoryRegistry,\nChainsConfig chainsConfig,\nSchemamappingConfig mappingConfig,\nDocumentmanagerConfig docManConfig,\nDocprocConfig docprocConfig,\nContainerMbusConfig containerMbusConfig,\nContainerDocumentConfig containerDocConfig,\nStatistics manager,\nMetric metric) {\nthis(new ComponentRegistry<>(),\ndocumentProcessorComponentRegistry, docFactoryRegistry, new DocumentProcessingHandlerParameters().setMaxNumThreads\n(docprocConfig.numthreads())\n.setDocumentTypeManager(new DocumentTypeManager(docManConfig))\n.setChainsModel(buildFromConfig(chainsConfig)).setSchemaMap(configureMapping(mappingConfig))\n.setStatisticsManager(manager)\n.setMetric(metric)\n.setContainerDocumentConfig(containerDocConfig));\n}\n@Override\npublic ComponentRegistry getDocprocServiceRegistry() {\nreturn docprocServiceRegistry;\n}\npublic ChainRegistry getChains() {\nreturn chainRegistry;\n}\nprivate static SchemaMap configureMapping(SchemamappingConfig mappingConfig) {\nSchemaMap map = new SchemaMap();\nmap.configure(mappingConfig);\nreturn map;\n}\nprivate static CallStack convertToCallStack(Chain chain, Statistics statistics, Metric metric) {\nCallStack stack = new CallStack(chain.getId().stringValue(), statistics, metric);\nfor (DocumentProcessor processor : chain.components()) {\nprocessor.getFieldMap().putAll(DocprocService.schemaMap.chainMap(chain.getId().stringValue(), processor.getId().stringValue()));\nstack.addLast(processor);\n}\nreturn stack;\n}\n@Override\npublic ContentChannel handleRequest(Request request, ResponseHandler handler) {\nRequestContext requestContext;\nif (request instanceof MbusRequest) {\nrequestContext = new MbusRequestContext((MbusRequest) request, handler, docprocServiceRegistry, docFactoryRegistry, containerDocConfig);\n} else {\nthrow new IllegalArgumentException(\"Request type not supported: \" + request);\n}\nif (!requestContext.isProcessable()) {\nrequestContext.skip();\nreturn null;\n}\nString serviceName = requestContext.getServiceName();\nDocprocService service = docprocServiceRegistry.getComponent(serviceName);\nif (service == null) {\nlog.log(Level.SEVERE, \"DocprocService for session '\" + serviceName +\n\"' not found, returning request '\" + requestContext + \"'.\");\nrequestContext.processingFailed(RequestContext.ErrorCode.ERROR_PROCESSING_FAILURE,\n\"DocprocService \" + serviceName + \" not found.\");\nreturn null;\n} else if (service.getExecutor().getCallStack().size() == 0) {\nrequestContext.skip();\nreturn null;\n}\nDocumentProcessingTask task = new DocumentProcessingTask(requestContext, this, service, service.getThreadPoolExecutor());\ntask.submit();\nreturn null;\n}\nvoid submit(DocumentProcessingTask task, long delay) {\nLaterTimerTask timerTask = new LaterTimerTask(task, delay);\nlaterExecutor.schedule(timerTask, delay, TimeUnit.MILLISECONDS);\n}\nprivate class LaterTimerTask extends TimerTask {\nprivate DocumentProcessingTask processingTask;\nprivate long delay;\nprivate LaterTimerTask(DocumentProcessingTask processingTask, long delay) {\nthis.delay = delay;\nlog.log(Level.FINE, \"Enqueueing in \" + delay + \" ms due to Progress.LATER: \" + processingTask);\nthis.processingTask = processingTask;\n}\n@Override\npublic void run() {\nlog.log(Level.FINE, \"Submitting after having waited \" + delay + \" ms in LATER queue: \" + processingTask);\nprocessingTask.submit();\n}\n}\npublic DocumentTypeManager getDocumentTypeManager() {\nreturn documentTypeManager;\n}\n}", + "context_after": "class DocumentProcessingHandler extends AbstractRequestHandler {\nprivate static Logger log = Logger.getLogger(DocumentProcessingHandler.class.getName());\nprivate final ComponentRegistry docprocServiceRegistry;\nprivate final ComponentRegistry docFactoryRegistry;\nprivate final ChainRegistry chainRegistry = new ChainRegistry<>();\nprivate final ScheduledThreadPoolExecutor laterExecutor =\nnew ScheduledThreadPoolExecutor(2, new DaemonThreadFactory(\"docproc-later-\"));\nprivate ContainerDocumentConfig containerDocConfig;\nprivate final DocumentTypeManager documentTypeManager;\npublic DocumentProcessingHandler(ComponentRegistry docprocServiceRegistry,\nComponentRegistry documentProcessorComponentRegistry,\nComponentRegistry docFactoryRegistry,\nint numThreads,\nDocumentTypeManager documentTypeManager,\nChainsModel chainsModel, SchemaMap schemaMap, Statistics statistics,\nMetric metric,\nContainerDocumentConfig containerDocConfig) {\nthis.docprocServiceRegistry = docprocServiceRegistry;\nthis.docFactoryRegistry = docFactoryRegistry;\nthis.containerDocConfig = containerDocConfig;\nthis.documentTypeManager = documentTypeManager;\nDocprocService.schemaMap = schemaMap;\nlaterExecutor.setContinueExistingPeriodicTasksAfterShutdownPolicy(false);\nlaterExecutor.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);\nif (chainsModel != null) {\nprepareChainRegistry(chainRegistry, chainsModel, documentProcessorComponentRegistry);\nfor (Chain chain : chainRegistry.allComponents()) {\nlog.config(\"Setting up call stack for chain \" + chain.getId());\nDocprocService service = new DocprocService(chain.getId(), convertToCallStack(chain, statistics, metric), documentTypeManager, computeNumThreads(numThreads));\nservice.setInService(true);\ndocprocServiceRegistry.register(service.getId(), service);\n}\n}\n}\nprivate static int computeNumThreads(int maxThreads) {\nreturn (maxThreads > 0) ? maxThreads : Runtime.getRuntime().availableProcessors();\n}\npublic DocumentProcessingHandler(ComponentRegistry docprocServiceRegistry,\nComponentRegistry documentProcessorComponentRegistry,\nComponentRegistry docFactoryRegistry,\nDocumentProcessingHandlerParameters params) {\nthis(docprocServiceRegistry, documentProcessorComponentRegistry, docFactoryRegistry,\nparams.getMaxNumThreads(),\nparams.getDocumentTypeManager(), params.getChainsModel(), params.getSchemaMap(),\nparams.getStatisticsManager(),\nparams.getMetric(),\nparams.getContainerDocConfig());\n}\n@Inject\npublic DocumentProcessingHandler(ComponentRegistry documentProcessorComponentRegistry,\nComponentRegistry docFactoryRegistry,\nChainsConfig chainsConfig,\nSchemamappingConfig mappingConfig,\nDocumentmanagerConfig docManConfig,\nDocprocConfig docprocConfig,\nContainerMbusConfig containerMbusConfig,\nContainerDocumentConfig containerDocConfig,\nStatistics manager,\nMetric metric) {\nthis(new ComponentRegistry<>(),\ndocumentProcessorComponentRegistry, docFactoryRegistry, new DocumentProcessingHandlerParameters().setMaxNumThreads\n(docprocConfig.numthreads())\n.setDocumentTypeManager(new DocumentTypeManager(docManConfig))\n.setChainsModel(buildFromConfig(chainsConfig)).setSchemaMap(configureMapping(mappingConfig))\n.setStatisticsManager(manager)\n.setMetric(metric)\n.setContainerDocumentConfig(containerDocConfig));\n}\n@Override\npublic ComponentRegistry getDocprocServiceRegistry() {\nreturn docprocServiceRegistry;\n}\npublic ChainRegistry getChains() {\nreturn chainRegistry;\n}\nprivate static SchemaMap configureMapping(SchemamappingConfig mappingConfig) {\nSchemaMap map = new SchemaMap();\nmap.configure(mappingConfig);\nreturn map;\n}\nprivate static CallStack convertToCallStack(Chain chain, Statistics statistics, Metric metric) {\nCallStack stack = new CallStack(chain.getId().stringValue(), statistics, metric);\nfor (DocumentProcessor processor : chain.components()) {\nprocessor.getFieldMap().putAll(DocprocService.schemaMap.chainMap(chain.getId().stringValue(), processor.getId().stringValue()));\nstack.addLast(processor);\n}\nreturn stack;\n}\n@Override\npublic ContentChannel handleRequest(Request request, ResponseHandler handler) {\nRequestContext requestContext;\nif (request instanceof MbusRequest) {\nrequestContext = new MbusRequestContext((MbusRequest) request, handler, docprocServiceRegistry, docFactoryRegistry, containerDocConfig);\n} else {\nthrow new IllegalArgumentException(\"Request type not supported: \" + request);\n}\nif (!requestContext.isProcessable()) {\nrequestContext.skip();\nreturn null;\n}\nString serviceName = requestContext.getServiceName();\nDocprocService service = docprocServiceRegistry.getComponent(serviceName);\nif (service == null) {\nlog.log(Level.SEVERE, \"DocprocService for session '\" + serviceName +\n\"' not found, returning request '\" + requestContext + \"'.\");\nrequestContext.processingFailed(RequestContext.ErrorCode.ERROR_PROCESSING_FAILURE,\n\"DocprocService \" + serviceName + \" not found.\");\nreturn null;\n} else if (service.getExecutor().getCallStack().size() == 0) {\nrequestContext.skip();\nreturn null;\n}\nDocumentProcessingTask task = new DocumentProcessingTask(requestContext, this, service, service.getThreadPoolExecutor());\ntask.submit();\nreturn null;\n}\nvoid submit(DocumentProcessingTask task, long delay) {\nLaterTimerTask timerTask = new LaterTimerTask(task, delay);\nlaterExecutor.schedule(timerTask, delay, TimeUnit.MILLISECONDS);\n}\nprivate class LaterTimerTask extends TimerTask {\nprivate DocumentProcessingTask processingTask;\nprivate long delay;\nprivate LaterTimerTask(DocumentProcessingTask processingTask, long delay) {\nthis.delay = delay;\nlog.log(Level.FINE, \"Enqueueing in \" + delay + \" ms due to Progress.LATER: \" + processingTask);\nthis.processingTask = processingTask;\n}\n@Override\npublic void run() {\nlog.log(Level.FINE, \"Submitting after having waited \" + delay + \" ms in LATER queue: \" + processingTask);\nprocessingTask.submit();\n}\n}\npublic DocumentTypeManager getDocumentTypeManager() {\nreturn documentTypeManager;\n}\n}" + }, + { + "comment": "A switch-case would better suite for this. We may have to go for error-recovery if the next token is neither `ascending`,`descending` nor a valid-token that is possible to come after the order-key", + "method_body": "private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) {\nswitch (peek(lookahead + 1).kind) {\ncase IDENTIFIER_TOKEN:\nSyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind;\nswitch (tokenAfterIdentifier) {\ncase ON_KEYWORD:\ncase OPEN_BRACE_TOKEN:\nreturn true;\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\ncase QUESTION_MARK_TOKEN:\nreturn false;\ndefault:\nreturn false;\n}\ncase ON_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse listener declaration, given the qualifier.\n*

\n* \n* listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ;\n* \n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @return Parsed node\n*/\nprivate STNode parseListenerDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.LISTENER_DECL);\nSTNode listenerKeyword = parseListenerKeyword();\nif (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) {\nSTNode listenerDecl =\nparseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true);\nendContext();\nreturn listenerDecl;\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode variableName = parseVariableName();\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nendContext();\nreturn STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName,\nequalsToken, initializer, semicolonToken);\n}\n/**\n* Parse listener keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseListenerKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LISTENER_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LISTENER_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse constant declaration, given the qualifier.\n*

\n* module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @return Parsed node\n*/\nprivate STNode parseConstantDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.CONSTANT_DECL);\nSTNode constKeyword = parseConstantKeyword();\nSTNode constDecl = parseConstDecl(metadata, qualifier, constKeyword);\nendContext();\nreturn constDecl;\n}\n/**\n* Parse the components that follows after the const keyword of a constant declaration.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the constant decl\n* @param constKeyword Const keyword\n* @return Parsed node\n*/\nprivate STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) {\nSTToken nextToken = peek();\nreturn parseConstDeclFromType(nextToken.kind, metadata, qualifier, constKeyword);\n}\nprivate STNode parseConstDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode keyword) {\nswitch (nextTokenKind) {\ncase ANNOTATION_KEYWORD:\nswitchContext(ParserRuleContext.ANNOTATION_DECL);\nreturn parseAnnotationDeclaration(metadata, qualifier, keyword);\ncase IDENTIFIER_TOKEN:\nreturn parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, keyword, false);\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, keyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseConstDeclFromType(solution.tokenKind, metadata, qualifier, keyword);\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode variableName = parseVariableName();\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, typeDesc, variableName,\nequalsToken, initializer, semicolonToken);\n}\nprivate STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,\nboolean isListener) {\nSTNode varNameOrTypeName = parseStatementStartIdentifier();\nSTNode constDecl =\nparseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener);\nreturn constDecl;\n}\n/**\n* Parse the component that follows the first identifier in a const decl. The identifier\n* can be either the type-name (a user defined type) or the var-name there the type-name\n* is not present.\n*\n* @param qualifier Qualifier that precedes the constant decl\n* @param keyword Keyword\n* @param typeOrVarName Identifier that follows the const-keywoord\n* @return Parsed node\n*/\nprivate STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword,\nSTNode typeOrVarName, boolean isListener) {\nif (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTNode type = typeOrVarName;\nSTNode variableName = parseVariableName();\nreturn parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);\n}\nSTToken token = peek();\nreturn parseConstantOrListenerDeclRhs(token.kind, metadata, qualifier, keyword, typeOrVarName, isListener);\n}\nprivate STNode parseConstantOrListenerDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,\nSTNode keyword, STNode typeOrVarName, boolean isListener) {\nSTNode type;\nSTNode variableName;\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\ntype = typeOrVarName;\nvariableName = parseVariableName();\nbreak;\ncase EQUAL_TOKEN:\nvariableName = ((STSimpleNameReferenceNode) typeOrVarName).name;\ntype = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, keyword,\ntypeOrVarName, isListener);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseConstantOrListenerDeclRhs(solution.tokenKind, metadata, qualifier, keyword, typeOrVarName,\nisListener);\n}\nreturn parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);\n}\nprivate STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener,\nSTNode type, STNode variableName) {\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nif (isListener) {\nreturn STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName,\nequalsToken, initializer, semicolonToken);\n}\nreturn STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName,\nequalsToken, initializer, semicolonToken);\n}\n/**\n* Parse const keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseConstantKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CONST_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CONST_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse nil type descriptor.\n*

\n* nil-type-descriptor := ( ) \n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseNilTypeDescriptor() {\nstartContext(ParserRuleContext.NIL_TYPE_DESCRIPTOR);\nSTNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode closeParenthesisToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createNilTypeDescriptorNode(openParenthesisToken, closeParenthesisToken);\n}\n/**\n* Parse typeof expression.\n*

\n* \n* typeof-expr := typeof expression\n* \n*\n* @param isRhsExpr\n* @return Typeof expression node\n*/\nprivate STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\nSTNode typeofKeyword = parseTypeofKeyword();\nSTNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);\nreturn STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr);\n}\n/**\n* Parse typeof-keyword.\n*\n* @return Typeof-keyword node\n*/\nprivate STNode parseTypeofKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TYPEOF_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TYPEOF_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse optional type descriptor.\n*

\n* optional-type-descriptor := type-descriptor ? \n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) {\nstartContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR);\nSTNode questionMarkToken = parseQuestionMark();\nendContext();\nreturn STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken);\n}\n/**\n* Parse unary expression.\n*

\n* \n* unary-expr := + expression | - expression | ~ expression | ! expression\n* \n*\n* @param isRhsExpr\n* @return Unary expression node\n*/\nprivate STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\nSTNode unaryOperator = parseUnaryOperator();\nSTNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);\nreturn STNodeFactory.createUnaryExpressionNode(unaryOperator, expr);\n}\n/**\n* Parse unary operator.\n* UnaryOperator := + | - | ~ | !\n*\n* @return Parsed node\n*/\nprivate STNode parseUnaryOperator() {\nSTToken token = peek();\nif (isUnaryOperator(token.kind)) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.UNARY_OPERATOR);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Check whether the given token kind is a unary operator.\n*\n* @param kind STToken kind\n* @return true if the token kind refers to a unary operator. false otherwise\n*/\nprivate boolean isUnaryOperator(SyntaxKind kind) {\nswitch (kind) {\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse array type descriptor.\n*

\n* \n* array-type-descriptor := member-type-descriptor [ [ array-length ] ]\n* member-type-descriptor := type-descriptor\n* array-length :=\n* int-literal\n* | constant-reference-expr\n* | inferred-array-length\n* inferred-array-length := *\n* \n*

\n*\n* @param memberTypeDesc\n*\n* @return Parsed Node\n*/\nprivate STNode parseArrayTypeDescriptor(STNode memberTypeDesc) {\nstartContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);\nSTNode openBracketToken = parseOpenBracket();\nSTNode arrayLengthNode = parseArrayLength();\nSTNode closeBracketToken = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, openBracketToken, arrayLengthNode,\ncloseBracketToken);\n}\n/**\n* Parse array length.\n*

\n* \n* array-length :=\n* int-literal\n* | constant-reference-expr\n* | inferred-array-length\n* constant-reference-expr := variable-reference-expr\n* \n*

\n*\n* @return Parsed array length\n*/\nprivate STNode parseArrayLength() {\nSTToken token = peek();\nswitch (token.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase ASTERISK_TOKEN:\nreturn parseBasicLiteral();\ncase CLOSE_BRACKET_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH);\ndefault:\nSolution sol = recover(token, ParserRuleContext.ARRAY_LENGTH);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse annotations.\n*

\n* Note: In the ballerina spec ({@link https:\n* annotations-list is specified as one-or-more annotations. And the usage is marked as\n* optional annotations-list. However, for the consistency of the tree, here we make the\n* annotation-list as zero-or-more annotations, and the usage is not-optional.\n*

\n* annots := annotation*\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotations() {\nSTToken nextToken = peek();\nreturn parseAnnotations(nextToken.kind);\n}\nprivate STNode parseAnnotations(SyntaxKind nextTokenKind) {\nstartContext(ParserRuleContext.ANNOTATIONS);\nList annotList = new ArrayList<>();\nwhile (nextTokenKind == SyntaxKind.AT_TOKEN) {\nannotList.add(parseAnnotation());\nnextTokenKind = peek().kind;\n}\nendContext();\nreturn STNodeFactory.createNodeList(annotList);\n}\n/**\n* Parse annotation attachment.\n*

\n* annotation := @ annot-tag-reference annot-value\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotation() {\nSTNode atToken = parseAtToken();\nSTNode annotReference;\nif (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) {\nannotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n} else {\nannotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE);\n}\nSTNode annotValue;\nif (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {\nannotValue = parseMappingConstructorExpr();\n} else {\nannotValue = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue);\n}\n/**\n* Parse '@' token.\n*\n* @return Parsed node\n*/\nprivate STNode parseAtToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.AT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.AT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse metadata. Meta data consist of optional doc string and\n* an annotations list.\n*

\n* metadata := [DocumentationString] annots\n*\n* @return Parse node\n*/\nprivate STNode parseMetaData(SyntaxKind nextTokenKind) {\nSTNode docString;\nSTNode annotations;\nswitch (nextTokenKind) {\ncase DOCUMENTATION_STRING:\ndocString = parseMarkdownDocumentation();\nannotations = parseAnnotations();\nbreak;\ncase AT_TOKEN:\ndocString = STNodeFactory.createEmptyNode();\nannotations = parseAnnotations(nextTokenKind);\nbreak;\ndefault:\nreturn createEmptyMetadata();\n}\nreturn STNodeFactory.createMetadataNode(docString, annotations);\n}\n/**\n* Create empty metadata node.\n*\n* @return A metadata node with no doc string and no annotations\n*/\nprivate STNode createEmptyMetadata() {\nreturn STNodeFactory.createMetadataNode(STNodeFactory.createEmptyNode(), STNodeFactory.createEmptyNodeList());\n}\n/**\n* Parse is expression.\n* \n* is-expr := expression is type-descriptor\n* \n*\n* @param lhsExpr Preceding expression of the is expression\n* @return Is expression node\n*/\nprivate STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode isKeyword = parseIsKeyword();\nSTNode typeDescriptor =\nparseTypeDescriptorInExpression(ParserRuleContext.TYPE_DESC_IN_EXPRESSION, isInConditionalExpr);\nreturn STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor);\n}\n/**\n* Parse is-keyword.\n*\n* @return Is-keyword node\n*/\nprivate STNode parseIsKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IS_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.IS_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse local type definition statement statement.\n* ocal-type-defn-stmt := [annots] type identifier type-descriptor ;\n*\n* @return local type definition statement statement\n*/\nprivate STNode parseLocalTypeDefinitionStatement(STNode annots) {\nstartContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT);\nSTNode typeKeyword = parseTypeKeyword();\nSTNode typeName = parseTypeName();\nSTNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor,\nsemicolon);\n}\n/**\n* Parse statement which is only consists of an action or expression.\n*\n* @param annots Annotations\n* @param nextTokenKind Next token kind\n* @return Statement node\n*/\nprivate STNode parseExpressionStatement(SyntaxKind nextTokenKind, STNode annots) {\nstartContext(ParserRuleContext.EXPRESSION_STATEMENT);\nSTNode expression = parseActionOrExpressionInLhs(nextTokenKind, annots);\nreturn getExpressionAsStatement(expression);\n}\n/**\n* Parse statements that starts with an expression.\n*\n* @return Statement node\n*/\nprivate STNode parseStatementStartWithExpr(STNode annots) {\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode expr = parseActionOrExpressionInLhs(peek().kind, annots);\nreturn parseStatementStartWithExprRhs(expr);\n}\n/**\n* Parse rhs of statements that starts with an expression.\n*\n* @return Statement node\n*/\nprivate STNode parseStatementStartWithExprRhs(STNode expression) {\nSTToken nextToken = peek();\nreturn parseStatementStartWithExprRhs(nextToken.kind, expression);\n}\n/**\n* Parse the component followed by the expression, at the beginning of a statement.\n*\n* @param nextTokenKind Kind of the next token\n* @return Statement node\n*/\nprivate STNode parseStatementStartWithExprRhs(SyntaxKind nextTokenKind, STNode expression) {\nswitch (nextTokenKind) {\ncase EQUAL_TOKEN:\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nreturn parseAssignmentStmtRhs(expression);\ncase SEMICOLON_TOKEN:\nreturn getExpressionAsStatement(expression);\ncase IDENTIFIER_TOKEN:\ndefault:\nif (isCompoundBinaryOperator(nextTokenKind)) {\nreturn parseCompoundAssignmentStmtRhs(expression);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.STMT_START_WITH_EXPR_RHS, expression);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatementStartWithExprRhs(solution.tokenKind, expression);\n}\n}\nprivate STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) {\nSTNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression);\nSTNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression;\nif (lengthExprs.isEmpty()) {\nreturn STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, indexedExpr.openBracket,\nSTNodeFactory.createEmptyNode(), indexedExpr.closeBracket);\n}\nSTNode lengthExpr = lengthExprs.get(0);\nswitch (lengthExpr.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase ASTERISK_TOKEN:\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\nbreak;\ndefault:\nSTNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(\nindexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);\nindexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics);\nlengthExpr = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, indexedExpr.openBracket, lengthExpr,\nindexedExpr.closeBracket);\n}\nprivate STNode getExpressionAsStatement(STNode expression) {\nswitch (expression.kind) {\ncase METHOD_CALL:\ncase FUNCTION_CALL:\ncase CHECK_EXPRESSION:\ncase FAIL_EXPRESSION:\nreturn parseCallStatement(expression);\ncase REMOTE_METHOD_CALL_ACTION:\ncase CHECK_ACTION:\ncase FAIL_ACTION:\ncase BRACED_ACTION:\ncase START_ACTION:\ncase TRAP_ACTION:\ncase FLUSH_ACTION:\ncase ASYNC_SEND_ACTION:\ncase SYNC_SEND_ACTION:\ncase RECEIVE_ACTION:\ncase WAIT_ACTION:\ncase QUERY_ACTION:\ncase COMMIT_ACTION:\nreturn parseActionStatement(expression);\ndefault:\nSTNode semicolon = parseSemicolon();\nendContext();\nSTNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT,\nexpression, semicolon);\nexprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT);\nreturn exprStmt;\n}\n}\n/**\n*

\n* Parse call statement, given the call expression.\n*

\n* \n* call-stmt := call-expr ;\n*
\n* call-expr := function-call-expr | method-call-expr | checking-keyword call-expr\n*
\n*\n* @param expression Call expression associated with the call statement\n* @return Call statement node\n*/\nprivate STNode parseCallStatement(STNode expression) {\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon);\n}\n/**\n* Check whether a node is a missing node.\n*\n* @param node Node to check\n* @return true if the node is a missing node. false otherwise\n*/\nprivate boolean isMissingNode(STNode node) {\nif (node.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nreturn isMissingNode(((STSimpleNameReferenceNode) node).name);\n}\nreturn node instanceof STMissingToken;\n}\nprivate STNode parseActionStatement(STNode action) {\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon);\n}\n/**\n* Parse remote method call action, given the starting expression.\n*

\n* \n* remote-method-call-action := expression -> method-name ( arg-list )\n*
\n* async-send-action := expression -> peer-worker ;\n*
\n*\n* @param isRhsExpr Is this an RHS action\n* @param expression LHS expression\n* @return\n*/\nprivate STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) {\nSTNode rightArrow = parseRightArrow();\nreturn parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);\n}\nprivate STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) {\nreturn parseRemoteCallOrAsyncSendActionRhs(peek().kind, expression, isRhsExpr, rightArrow);\n}\nprivate STNode parseRemoteCallOrAsyncSendActionRhs(SyntaxKind nextTokenKind, STNode expression, boolean isRhsExpr,\nSTNode rightArrow) {\nSTNode name;\nswitch (nextTokenKind) {\ncase DEFAULT_KEYWORD:\nname = parseDefaultKeyword();\nreturn parseAsyncSendAction(expression, rightArrow, name);\ncase IDENTIFIER_TOKEN:\nname = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());\nbreak;\ncase CONTINUE_KEYWORD:\ncase COMMIT_KEYWORD:\nname = getKeywordAsSimpleNameRef();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS, expression,\nisRhsExpr, rightArrow);\nif (solution.action == Action.REMOVE) {\nname = solution.recoveredNode;\nbreak;\n}\nreturn parseRemoteCallOrAsyncSendActionRhs(solution.tokenKind, expression, isRhsExpr, rightArrow);\n}\nreturn parseRemoteCallOrAsyncSendEnd(peek().kind, expression, rightArrow, name);\n}\nprivate STNode parseRemoteCallOrAsyncSendEnd(SyntaxKind nextTokenKind, STNode expression, STNode rightArrow,\nSTNode name) {\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseRemoteMethodCallAction(expression, rightArrow, name);\ncase SEMICOLON_TOKEN:\nreturn parseAsyncSendAction(expression, rightArrow, name);\ndefault:\nSTToken token = peek();\nSolution solution =\nrecover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END, expression, rightArrow, name);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseRemoteCallOrAsyncSendEnd(solution.tokenKind, expression, rightArrow, name);\n}\n}\n/**\n* Parse default keyword.\n*\n* @return default keyword node\n*/\nprivate STNode parseDefaultKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DEFAULT_KEYWORD) {\nreturn STNodeFactory.createSimpleNameReferenceNode(consume());\n} else {\nSolution sol = recover(token, ParserRuleContext.DEFAULT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) {\nreturn STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker);\n}\nprivate STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) {\nSTNode openParenToken = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode arguments = parseArgsList();\nSTNode closeParenToken = parseCloseParenthesis();\nreturn STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments,\ncloseParenToken);\n}\n/**\n* Parse right arrow (->) token.\n*\n* @return Parsed node\n*/\nprivate STNode parseRightArrow() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.RIGHT_ARROW);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse parameterized type descriptor.\n* parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter\n*\n* @return Parsed node\n*/\nprivate STNode parseParameterizedTypeDescriptor() {\nSTNode parameterizedTypeKeyword = parseParameterizedTypeKeyword();\nSTNode ltToken = parseLTToken();\nSTNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\nSTNode gtToken = parseGTToken();\nreturn STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, ltToken, typeNode,\ngtToken);\n}\n/**\n* Parse map or future keyword token.\n*\n* @return Parsed node\n*/\nprivate STNode parseParameterizedTypeKeyword() {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase MAP_KEYWORD:\ncase FUTURE_KEYWORD:\nreturn consume();\ndefault:\nSolution sol = recover(nextToken, ParserRuleContext.PARAMETERIZED_TYPE);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse < token.\n*\n* @return Parsed node\n*/\nprivate STNode parseGTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.GT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.GT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse > token.\n*\n* @return Parsed node\n*/\nprivate STNode parseLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.LT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse nil literal. Here nil literal is only referred to ( ).\n*\n* @return Parsed node\n*/\nprivate STNode parseNilLiteral() {\nstartContext(ParserRuleContext.NIL_LITERAL);\nSTNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode closeParenthesisToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken);\n}\n/**\n* Parse annotation declaration, given the qualifier.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @param constKeyword Const keyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) {\nstartContext(ParserRuleContext.ANNOTATION_DECL);\nSTNode annotationKeyword = parseAnnotationKeyword();\nSTNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);\nendContext();\nreturn annotDecl;\n}\n/**\n* Parse annotation keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ANNOTATION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ANNOTATION_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse the components that follows after the annotation keyword of a annotation declaration.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the constant decl\n* @param constKeyword Const keyword\n* @param annotationKeyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword) {\nSTToken nextToken = peek();\nreturn parseAnnotationDeclFromType(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword);\n}\nprivate STNode parseAnnotationDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,\nSTNode constKeyword, STNode annotationKeyword) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword);\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier,\nconstKeyword, annotationKeyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAnnotationDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword,\nannotationKeyword);\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL);\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\n/**\n* Parse annotation tag.\n*

\n* annot-tag := identifier\n*\n* @return\n*/\nprivate STNode parseAnnotationTag() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.ANNOTATION_TAG);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword) {\nSTNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);\nif (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword,\ntypeDescOrAnnotTag, annotTag);\n}\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) {\nSTNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag,\nParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false);\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\nSTNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name;\nreturn parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag);\n}\n/**\n* Parse the component that follows the first identifier in an annotation decl. The identifier\n* can be either the type-name (a user defined type) or the annot-tag, where the type-name\n* is not present.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the annotation decl\n* @param constKeyword Const keyword\n* @param annotationKeyword Annotation keyword\n* @param typeDescOrAnnotTag Identifier that follows the annotation-keyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword, STNode typeDescOrAnnotTag) {\nSTToken token = peek();\nreturn parseAnnotationDeclRhs(token.kind, metadata, qualifier, constKeyword, annotationKeyword,\ntypeDescOrAnnotTag);\n}\nprivate STNode parseAnnotationDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,\nSTNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) {\nSTNode typeDesc;\nSTNode annotTag;\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\ntypeDesc = typeDescOrAnnotTag;\nannotTag = parseAnnotationTag();\nbreak;\ncase SEMICOLON_TOKEN:\ncase ON_KEYWORD:\ntypeDesc = STNodeFactory.createEmptyNode();\nannotTag = typeDescOrAnnotTag;\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword,\nannotationKeyword, typeDescOrAnnotTag);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAnnotationDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword,\ntypeDescOrAnnotTag);\n}\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\nprivate STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword, STNode typeDesc, STNode annotTag) {\nSTToken nextToken = peek();\nreturn parseAnnotationDeclAttachPoints(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword,\ntypeDesc, annotTag);\n}\nprivate STNode parseAnnotationDeclAttachPoints(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,\nSTNode constKeyword, STNode annotationKeyword, STNode typeDesc,\nSTNode annotTag) {\nSTNode onKeyword;\nSTNode attachPoints;\nswitch (nextTokenKind) {\ncase SEMICOLON_TOKEN:\nonKeyword = STNodeFactory.createEmptyNode();\nattachPoints = STNodeFactory.createEmptyNodeList();\nbreak;\ncase ON_KEYWORD:\nonKeyword = parseOnKeyword();\nattachPoints = parseAnnotationAttachPoints();\nonKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword,\nDiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier,\nconstKeyword, annotationKeyword, typeDesc, annotTag);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAnnotationDeclAttachPoints(solution.tokenKind, metadata, qualifier, constKeyword,\nannotationKeyword, typeDesc, annotTag);\n}\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword,\ntypeDesc, annotTag, onKeyword, attachPoints, semicolonToken);\n}\n/**\n* Parse annotation attach points.\n*

\n* \n* annot-attach-points := annot-attach-point (, annot-attach-point)*\n*

\n* annot-attach-point := dual-attach-point | source-only-attach-point\n*

\n* dual-attach-point := [source] dual-attach-point-ident\n*

\n* dual-attach-point-ident :=\n* [object] type\n* | [object|resource] function\n* | parameter\n* | return\n* | service\n* | [object|record] field\n*

\n* source-only-attach-point := source source-only-attach-point-ident\n*

\n* source-only-attach-point-ident :=\n* annotation\n* | external\n* | var\n* | const\n* | listener\n* | worker\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationAttachPoints() {\nstartContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST);\nList attachPoints = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndAnnotAttachPointList(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode attachPoint = parseAnnotationAttachPoint();\nattachPoints.add(attachPoint);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndAnnotAttachPointList(nextToken.kind)) {\nleadingComma = parseAttachPointEnd();\nif (leadingComma == null) {\nbreak;\n}\nattachPoints.add(leadingComma);\nattachPoint = parseAnnotationAttachPoint();\nif (attachPoint == null) {\nattachPoint = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);\nattachPoints.add(attachPoint);\nbreak;\n}\nattachPoints.add(attachPoint);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(attachPoints);\n}\n/**\n* Parse annotation attach point end.\n*\n* @return Parsed node\n*/\nprivate STNode parseAttachPointEnd() {\nSTToken nextToken = peek();\nreturn parseAttachPointEnd(nextToken.kind);\n}\nprivate STNode parseAttachPointEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase SEMICOLON_TOKEN:\nreturn null;\ncase COMMA_TOKEN:\nreturn consume();\ndefault:\nSolution sol = recover(peek(), ParserRuleContext.ATTACH_POINT_END);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nreturn sol.tokenKind == SyntaxKind.COMMA_TOKEN ? sol.recoveredNode : null;\n}\n}\nprivate boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse annotation attach point.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationAttachPoint() {\nreturn parseAnnotationAttachPoint(peek().kind);\n}\nprivate STNode parseAnnotationAttachPoint(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\nreturn null;\ncase ANNOTATION_KEYWORD:\ncase EXTERNAL_KEYWORD:\ncase VAR_KEYWORD:\ncase CONST_KEYWORD:\ncase LISTENER_KEYWORD:\ncase WORKER_KEYWORD:\ncase SOURCE_KEYWORD:\nSTNode sourceKeyword = parseSourceKeyword();\nreturn parseAttachPointIdent(sourceKeyword);\ncase OBJECT_KEYWORD:\ncase TYPE_KEYWORD:\ncase RESOURCE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase SERVICE_KEYWORD:\ncase FIELD_KEYWORD:\ncase RECORD_KEYWORD:\nsourceKeyword = STNodeFactory.createEmptyNode();\nSTNode firstIdent = consume();\nreturn parseDualAttachPointIdent(sourceKeyword, firstIdent);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ATTACH_POINT);\nreturn solution.recoveredNode;\n}\n}\n/**\n* Parse source keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseSourceKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SOURCE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SOURCE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse attach point ident gievn.\n*

\n* \n* source-only-attach-point-ident := annotation | external | var | const | listener | worker\n*

\n* dual-attach-point-ident := [object] type | [object|resource] function | parameter\n* | return | service | [object|record] field\n*
\n*\n* @param sourceKeyword Source keyword\n* @return Parsed node\n*/\nprivate STNode parseAttachPointIdent(STNode sourceKeyword) {\nreturn parseAttachPointIdent(peek().kind, sourceKeyword);\n}\nprivate STNode parseAttachPointIdent(SyntaxKind nextTokenKind, STNode sourceKeyword) {\nswitch (nextTokenKind) {\ncase ANNOTATION_KEYWORD:\ncase EXTERNAL_KEYWORD:\ncase VAR_KEYWORD:\ncase CONST_KEYWORD:\ncase LISTENER_KEYWORD:\ncase WORKER_KEYWORD:\nSTNode firstIdent = consume();\nSTNode secondIdent = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent);\ncase OBJECT_KEYWORD:\ncase RESOURCE_KEYWORD:\ncase RECORD_KEYWORD:\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase SERVICE_KEYWORD:\ncase FIELD_KEYWORD:\nfirstIdent = consume();\nreturn parseDualAttachPointIdent(sourceKeyword, firstIdent);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nfirstIdent = solution.recoveredNode;\nreturn parseDualAttachPointIdent(sourceKeyword, firstIdent);\n}\n}\n/**\n* Parse dual-attach-point ident.\n*\n* @param sourceKeyword Source keyword\n* @param firstIdent first part of the dual attach-point\n* @return Parsed node\n*/\nprivate STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) {\nSTNode secondIdent;\nswitch (firstIdent.kind) {\ncase OBJECT_KEYWORD:\nsecondIdent = parseIdentAfterObjectIdent();\nbreak;\ncase RESOURCE_KEYWORD:\nsecondIdent = parseFunctionIdent();\nbreak;\ncase RECORD_KEYWORD:\nsecondIdent = parseFieldIdent();\nbreak;\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase SERVICE_KEYWORD:\ncase FIELD_KEYWORD:\ndefault:\nsecondIdent = STNodeFactory.createEmptyNode();\nbreak;\n}\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent);\n}\n/**\n* Parse the idents that are supported after object-ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseIdentAfterObjectIdent() {\nSTToken token = peek();\nswitch (token.kind) {\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase FIELD_KEYWORD:\nreturn consume();\ndefault:\nSolution sol = recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse function ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseFunctionIdent() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FUNCTION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FUNCTION_IDENT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse field ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldIdent() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FIELD_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FIELD_IDENT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse XML namespace declaration.\n*

\n* xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ;\n*
\n* xml-namespace-uri := simple-const-expr\n*
\n* xml-namespace-prefix := identifier\n*
\n*\n* @return\n*/\nprivate STNode parseXMLNamespaceDeclaration(boolean isModuleVar) {\nstartContext(ParserRuleContext.XML_NAMESPACE_DECLARATION);\nSTNode xmlnsKeyword = parseXMLNSKeyword();\nSTNode namespaceUri = parseXMLNamespaceUri();\nSTNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);\nendContext();\nreturn xmlnsDecl;\n}\n/**\n* Parse xmlns keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLNSKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.XMLNS_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.XMLNS_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse namespace uri.\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLNamespaceUri() {\nSTNode expr = parseSimpleConstExpr();\nswitch (expr.kind) {\ncase STRING_LITERAL:\ncase IDENTIFIER_TOKEN:\ncase QUALIFIED_NAME_REFERENCE:\nbreak;\ndefault:\nexpr = SyntaxErrors.addDiagnostic(expr, DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI);\n}\nreturn expr;\n}\nprivate STNode parseSimpleConstExpr() {\nstartContext(ParserRuleContext.CONSTANT_EXPRESSION);\nSTNode expr = parseSimpleConstExprInternal();\nendContext();\nreturn expr;\n}\nprivate STNode parseSimpleConstExprInternal() {\nSTToken nextToken = peek();\nreturn parseConstExprInternal(nextToken.kind);\n}\n/**\n* Parse constants expr.\n*\n* @return Parsed node\n*/\nprivate STNode parseConstExprInternal(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase STRING_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase NULL_KEYWORD:\nreturn parseBasicLiteral();\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn parseSignedIntOrFloat();\ncase OPEN_PAREN_TOKEN:\nreturn parseNilLiteral();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START);\nreturn solution.recoveredNode;\n}\n}\n/**\n* Parse the portion after the namsepsace-uri of an XML declaration.\n*\n* @param xmlnsKeyword XMLNS keyword\n* @param namespaceUri Namespace URI\n* @return Parsed node\n*/\nprivate STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) {\nreturn parseXMLDeclRhs(peek().kind, xmlnsKeyword, namespaceUri, isModuleVar);\n}\nprivate STNode parseXMLDeclRhs(SyntaxKind nextTokenKind, STNode xmlnsKeyword, STNode namespaceUri,\nboolean isModuleVar) {\nSTNode asKeyword = STNodeFactory.createEmptyNode();\nSTNode namespacePrefix = STNodeFactory.createEmptyNode();\nswitch (nextTokenKind) {\ncase AS_KEYWORD:\nasKeyword = parseAsKeyword();\nnamespacePrefix = parseNamespacePrefix();\nbreak;\ncase SEMICOLON_TOKEN:\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword,\nnamespaceUri, isModuleVar);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseXMLDeclRhs(solution.tokenKind, xmlnsKeyword, namespaceUri, isModuleVar);\n}\nSTNode semicolon = parseSemicolon();\nif (isModuleVar) {\nreturn STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword,\nnamespacePrefix, semicolon);\n}\nreturn STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix,\nsemicolon);\n}\n/**\n* Parse import prefix.\n*\n* @return Parsed node\n*/\nprivate STNode parseNamespacePrefix() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.NAMESPACE_PREFIX);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse named worker declaration.\n*

\n* named-worker-decl := [annots] worker worker-name return-type-descriptor { sequence-stmt }\n*\n* @param annots Annotations attached to the worker decl\n* @return Parsed node\n*/\nprivate STNode parseNamedWorkerDeclaration(STNode annots) {\nstartContext(ParserRuleContext.NAMED_WORKER_DECL);\nSTNode workerKeyword = parseWorkerKeyword();\nSTNode workerName = parseWorkerName();\nSTNode returnTypeDesc = parseReturnTypeDescriptor();\nSTNode workerBody = parseBlockNode();\nendContext();\nreturn STNodeFactory.createNamedWorkerDeclarationNode(annots, workerKeyword, workerName, returnTypeDesc,\nworkerBody);\n}\nprivate STNode parseReturnTypeDescriptor() {\nSTToken token = peek();\nif (token.kind != SyntaxKind.RETURNS_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode returnsKeyword = consume();\nSTNode annot = parseAnnotations();\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);\nreturn STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);\n}\n/**\n* Parse worker keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseWorkerKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.WORKER_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.WORKER_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse worker name.\n*

\n* worker-name := identifier\n*\n* @return Parsed node\n*/\nprivate STNode parseWorkerName() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.WORKER_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse lock statement.\n* lock-stmt := lock block-stmt ;\n*\n* @return Lock statement\n*/\nprivate STNode parseLockStatement() {\nstartContext(ParserRuleContext.LOCK_STMT);\nSTNode lockKeyword = parseLockKeyword();\nSTNode blockStatement = parseBlockNode();\nendContext();\nreturn STNodeFactory.createLockStatementNode(lockKeyword, blockStatement);\n}\n/**\n* Parse lock-keyword.\n*\n* @return lock-keyword node\n*/\nprivate STNode parseLockKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LOCK_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LOCK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse union type descriptor.\n* union-type-descriptor := type-descriptor | type-descriptor\n*\n* @param leftTypeDesc Type desc in the LHS os the union type desc.\n* @param context Current context.\n* @return parsed union type desc node\n*/\nprivate STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode pipeToken = parsePipeToken();\nSTNode rightTypeDesc = parseTypeDescriptor(context, isTypedBindingPattern, false);\nreturn STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc);\n}\n/**\n* Parse pipe token.\n*\n* @return parsed pipe token node\n*/\nprivate STNode parsePipeToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PIPE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.PIPE);\nreturn sol.recoveredNode;\n}\n}\nprivate boolean isTypeStartingToken(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase IDENTIFIER_TOKEN:\ncase SERVICE_KEYWORD:\ncase RECORD_KEYWORD:\ncase OBJECT_KEYWORD:\ncase ABSTRACT_KEYWORD:\ncase CLIENT_KEYWORD:\ncase OPEN_PAREN_TOKEN:\ncase MAP_KEYWORD:\ncase FUTURE_KEYWORD:\ncase TYPEDESC_KEYWORD:\ncase ERROR_KEYWORD:\ncase STREAM_KEYWORD:\ncase TABLE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase DISTINCT_KEYWORD:\nreturn true;\ndefault:\nif (isSingletonTypeDescStart(nodeKind, true)) {\nreturn true;\n}\nreturn isSimpleType(nodeKind);\n}\n}\nstatic boolean isSimpleType(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase INT_KEYWORD:\ncase FLOAT_KEYWORD:\ncase DECIMAL_KEYWORD:\ncase BOOLEAN_KEYWORD:\ncase STRING_KEYWORD:\ncase BYTE_KEYWORD:\ncase XML_KEYWORD:\ncase JSON_KEYWORD:\ncase HANDLE_KEYWORD:\ncase ANY_KEYWORD:\ncase ANYDATA_KEYWORD:\ncase NEVER_KEYWORD:\ncase SERVICE_KEYWORD:\ncase VAR_KEYWORD:\ncase ERROR_KEYWORD:\ncase STREAM_KEYWORD:\ncase TYPEDESC_KEYWORD:\ncase READONLY_KEYWORD:\ncase DISTINCT_KEYWORD:\nreturn true;\ncase TYPE_DESC:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) {\nswitch (typeKeyword) {\ncase INT_KEYWORD:\nreturn SyntaxKind.INT_TYPE_DESC;\ncase FLOAT_KEYWORD:\nreturn SyntaxKind.FLOAT_TYPE_DESC;\ncase DECIMAL_KEYWORD:\nreturn SyntaxKind.DECIMAL_TYPE_DESC;\ncase BOOLEAN_KEYWORD:\nreturn SyntaxKind.BOOLEAN_TYPE_DESC;\ncase STRING_KEYWORD:\nreturn SyntaxKind.STRING_TYPE_DESC;\ncase BYTE_KEYWORD:\nreturn SyntaxKind.BYTE_TYPE_DESC;\ncase XML_KEYWORD:\nreturn SyntaxKind.XML_TYPE_DESC;\ncase JSON_KEYWORD:\nreturn SyntaxKind.JSON_TYPE_DESC;\ncase HANDLE_KEYWORD:\nreturn SyntaxKind.HANDLE_TYPE_DESC;\ncase ANY_KEYWORD:\nreturn SyntaxKind.ANY_TYPE_DESC;\ncase ANYDATA_KEYWORD:\nreturn SyntaxKind.ANYDATA_TYPE_DESC;\ncase READONLY_KEYWORD:\nreturn SyntaxKind.READONLY_TYPE_DESC;\ncase NEVER_KEYWORD:\nreturn SyntaxKind.NEVER_TYPE_DESC;\ncase SERVICE_KEYWORD:\nreturn SyntaxKind.SERVICE_TYPE_DESC;\ncase VAR_KEYWORD:\nreturn SyntaxKind.VAR_TYPE_DESC;\ndefault:\nreturn SyntaxKind.TYPE_DESC;\n}\n}\n/**\n* Parse fork-keyword.\n*\n* @return Fork-keyword node\n*/\nprivate STNode parseForkKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FORK_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FORK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse fork statement.\n* fork-stmt := fork { named-worker-decl+ }\n*\n* @return Fork statement\n*/\nprivate STNode parseForkStatement() {\nstartContext(ParserRuleContext.FORK_STMT);\nSTNode forkKeyword = parseForkKeyword();\nSTNode openBrace = parseOpenBrace();\nArrayList workers = new ArrayList<>();\nwhile (!isEndOfStatements()) {\nSTNode stmt = parseStatement();\nif (stmt == null) {\nbreak;\n}\nswitch (stmt.kind) {\ncase NAMED_WORKER_DECLARATION:\nworkers.add(stmt);\nbreak;\ndefault:\nif (workers.isEmpty()) {\nopenBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt,\nDiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);\n} else {\nupdateLastNodeInListWithInvalidNode(workers, stmt,\nDiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);\n}\n}\n}\nSTNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers);\nSTNode closeBrace = parseCloseBrace();\nendContext();\nSTNode forkStmt =\nSTNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace);\nif (isNodeListEmpty(namedWorkerDeclarations)) {\nreturn SyntaxErrors.addDiagnostic(forkStmt,\nDiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT);\n}\nreturn forkStmt;\n}\n/**\n* Parse trap expression.\n*

\n* \n* trap-expr := trap expression\n* \n*\n* @param allowActions Allow actions\n* @param isRhsExpr Whether this is a RHS expression or not\n* @return Trap expression node\n*/\nprivate STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nSTNode trapKeyword = parseTrapKeyword();\nSTNode expr =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nif (isAction(expr)) {\nreturn STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr);\n}\nreturn STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr);\n}\n/**\n* Parse trap-keyword.\n*\n* @return Trap-keyword node\n*/\nprivate STNode parseTrapKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRAP_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TRAP_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse list constructor expression.\n*

\n* \n* list-constructor-expr := [ [ expr-list ] ]\n*
\n* expr-list := expression (, expression)*\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseListConstructorExpr() {\nstartContext(ParserRuleContext.LIST_CONSTRUCTOR);\nSTNode openBracket = parseOpenBracket();\nSTNode expressions = parseOptionalExpressionsList();\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket);\n}\n/**\n* Parse optional expression list.\n*\n* @return Parsed node\n*/\nprivate STNode parseOptionalExpressionsList() {\nList expressions = new ArrayList<>();\nif (isEndOfListConstructor(peek().kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode expr = parseExpression();\nexpressions.add(expr);\nreturn parseOptionalExpressionsList(expressions);\n}\nprivate STNode parseOptionalExpressionsList(List expressions) {\nSTNode listConstructorMemberEnd;\nwhile (!isEndOfListConstructor(peek().kind)) {\nlistConstructorMemberEnd = parseListConstructorMemberEnd();\nif (listConstructorMemberEnd == null) {\nbreak;\n}\nexpressions.add(listConstructorMemberEnd);\nSTNode expr = parseExpression();\nexpressions.add(expr);\n}\nreturn STNodeFactory.createNodeList(expressions);\n}\nprivate boolean isEndOfListConstructor(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseListConstructorMemberEnd() {\nreturn parseListConstructorMemberEnd(peek().kind);\n}\nprivate STNode parseListConstructorMemberEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListConstructorMemberEnd(solution.tokenKind);\n}\n}\n/**\n* Parse foreach statement.\n* foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt\n*\n* @return foreach statement\n*/\nprivate STNode parseForEachStatement() {\nstartContext(ParserRuleContext.FOREACH_STMT);\nSTNode forEachKeyword = parseForEachKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT);\nSTNode inKeyword = parseInKeyword();\nSTNode actionOrExpr = parseActionOrExpression();\nSTNode blockStatement = parseBlockNode();\nendContext();\nreturn STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr,\nblockStatement);\n}\n/**\n* Parse foreach-keyword.\n*\n* @return ForEach-keyword node\n*/\nprivate STNode parseForEachKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FOREACH_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FOREACH_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse in-keyword.\n*\n* @return In-keyword node\n*/\nprivate STNode parseInKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IN_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.IN_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse type cast expression.\n*

\n* \n* type-cast-expr := < type-cast-param > expression\n*
\n* type-cast-param := [annots] type-descriptor | annots\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nstartContext(ParserRuleContext.TYPE_CAST);\nSTNode ltToken = parseLTToken();\nSTNode typeCastParam = parseTypeCastParam();\nSTNode gtToken = parseGTToken();\nendContext();\nSTNode expression =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nreturn STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression);\n}\nprivate STNode parseTypeCastParam() {\nSTNode annot;\nSTNode type;\nSTToken token = peek();\nswitch (token.kind) {\ncase AT_TOKEN:\nannot = parseAnnotations();\ntoken = peek();\nif (isTypeStartingToken(token.kind)) {\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n} else {\ntype = STNodeFactory.createEmptyNode();\n}\nbreak;\ndefault:\nannot = STNodeFactory.createEmptyNode();\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\nbreak;\n}\nreturn STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type);\n}\n/**\n* Parse table constructor expression.\n*

\n* \n* table-constructor-expr-rhs := [ [row-list] ]\n* \n*\n* @param tableKeyword tableKeyword that precedes this rhs\n* @param keySpecifier keySpecifier that precedes this rhs\n* @return Parsed node\n*/\nprivate STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) {\nswitchContext(ParserRuleContext.TABLE_CONSTRUCTOR);\nSTNode openBracket = parseOpenBracket();\nSTNode rowList = parseRowList();\nSTNode closeBracket = parseCloseBracket();\nreturn STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList,\ncloseBracket);\n}\n/**\n* Parse table-keyword.\n*\n* @return Table-keyword node\n*/\nprivate STNode parseTableKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TABLE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TABLE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse table rows.\n*

\n* row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]\n*\n* @return Parsed node\n*/\nprivate STNode parseRowList() {\nSTToken nextToken = peek();\nif (isEndOfTableRowList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList mappings = new ArrayList<>();\nSTNode mapExpr = parseMappingConstructorExpr();\nmappings.add(mapExpr);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndOfTableRowList(nextToken.kind)) {\nleadingComma = parseComma();\nmappings.add(leadingComma);\nmapExpr = parseMappingConstructorExpr();\nmappings.add(mapExpr);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(mappings);\n}\nprivate boolean isEndOfTableRowList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ncase COMMA_TOKEN:\ncase OPEN_BRACE_TOKEN:\nreturn false;\ndefault:\nreturn isEndOfMappingConstructor(tokenKind);\n}\n}\n/**\n* Parse key specifier.\n*

\n* key-specifier := key ( [ field-name (, field-name)* ] )\n*\n* @return Parsed node\n*/\nprivate STNode parseKeySpecifier() {\nstartContext(ParserRuleContext.KEY_SPECIFIER);\nSTNode keyKeyword = parseKeyKeyword();\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode fieldNames = parseFieldNames();\nSTNode closeParen = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen);\n}\n/**\n* Parse key-keyword.\n*\n* @return Key-keyword node\n*/\nprivate STNode parseKeyKeyword() {\nSTToken token = peek();\nif (isKeyKeyword(token)) {\nreturn getKeyKeyword(consume());\n} else {\nSolution sol = recover(token, ParserRuleContext.KEY_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nstatic boolean isKeyKeyword(STToken token) {\nreturn token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text());\n}\nprivate STNode getKeyKeyword(STToken token) {\nreturn STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(),\ntoken.diagnostics());\n}\n/**\n* Parse field names.\n*

\n* field-name-list := [ field-name (, field-name)* ]\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldNames() {\nSTToken nextToken = peek();\nif (isEndOfFieldNamesList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList fieldNames = new ArrayList<>();\nSTNode fieldName = parseVariableName();\nfieldNames.add(fieldName);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndOfFieldNamesList(nextToken.kind)) {\nleadingComma = parseComma();\nfieldNames.add(leadingComma);\nfieldName = parseVariableName();\nfieldNames.add(fieldName);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(fieldNames);\n}\nprivate boolean isEndOfFieldNamesList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\ncase IDENTIFIER_TOKEN:\nreturn false;\ndefault:\nreturn true;\n}\n}\n/**\n* Parse error type descriptor.\n*

\n* error-type-descriptor := error [error-type-param]\n* error-type-param := < (detail-type-descriptor | inferred-type-descriptor) >\n* detail-type-descriptor := type-descriptor\n* inferred-type-descriptor := *\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseErrorTypeDescriptor() {\nSTNode errorKeywordToken = parseErrorKeyword();\nSTNode errorTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nerrorTypeParamsNode = parseErrorTypeParamsNode();\n} else {\nerrorTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode);\n}\n/**\n* Parse error type param node.\n*

\n* error-type-param := < (detail-type-descriptor | inferred-type-descriptor) >\n* detail-type-descriptor := type-descriptor\n* inferred-type-descriptor := *\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseErrorTypeParamsNode() {\nSTNode ltToken = parseLTToken();\nSTNode parameter;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {\nparameter = consume();\n} else {\nparameter = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n}\nSTNode gtToken = parseGTToken();\nreturn STNodeFactory.createErrorTypeParamsNode(ltToken, parameter, gtToken);\n}\n/**\n* Parse error-keyword.\n*\n* @return Parsed error-keyword node\n*/\nprivate STNode parseErrorKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ERROR_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ERROR_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse typedesc type descriptor.\n* typedesc-type-descriptor := typedesc type-parameter\n*\n* @return Parsed typedesc type node\n*/\nprivate STNode parseTypedescTypeDescriptor() {\nSTNode typedescKeywordToken = parseTypedescKeyword();\nSTNode typedescTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\ntypedescTypeParamsNode = parseTypeParameter();\n} else {\ntypedescTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createTypedescTypeDescriptorNode(typedescKeywordToken, typedescTypeParamsNode);\n}\n/**\n* Parse typedesc-keyword.\n*\n* @return Parsed typedesc-keyword node\n*/\nprivate STNode parseTypedescKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TYPEDESC_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TYPEDESC_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse stream type descriptor.\n*

\n* stream-type-descriptor := stream [stream-type-parameters]\n* stream-type-parameters := < type-descriptor [, type-descriptor]>\n*

\n*\n* @return Parsed stream type descriptor node\n*/\nprivate STNode parseStreamTypeDescriptor() {\nSTNode streamKeywordToken = parseStreamKeyword();\nSTNode streamTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nstreamTypeParamsNode = parseStreamTypeParamsNode();\n} else {\nstreamTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode);\n}\n/**\n* Parse xml type descriptor.\n* xml-type-descriptor := xml type-parameter\n*\n* @return Parsed typedesc type node\n*/\nprivate STNode parseXmlTypeDescriptor() {\nSTNode xmlKeywordToken = parseXMLKeyword();\nSTNode typedescTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\ntypedescTypeParamsNode = parseTypeParameter();\n} else {\ntypedescTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createXmlTypeDescriptorNode(xmlKeywordToken, typedescTypeParamsNode);\n}\n/**\n* Parse stream type params node.\n*

\n* stream-type-parameters := < type-descriptor [, type-descriptor]>\n*

\n*\n* @return Parsed stream type params node\n*/\nprivate STNode parseStreamTypeParamsNode() {\nSTNode ltToken = parseLTToken();\nstartContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);\nSTNode leftTypeDescNode = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC, false);\nSTNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode);\nendContext();\nreturn streamTypedesc;\n}\nprivate STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) {\nreturn parseStreamTypeParamsNode(peek().kind, ltToken, leftTypeDescNode);\n}\nprivate STNode parseStreamTypeParamsNode(SyntaxKind nextTokenKind, STNode ltToken, STNode leftTypeDescNode) {\nSTNode commaToken, rightTypeDescNode, gtToken;\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\ncommaToken = parseComma();\nrightTypeDescNode = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC, false);\nbreak;\ncase GT_TOKEN:\ncommaToken = STNodeFactory.createEmptyNode();\nrightTypeDescNode = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSolution solution =\nrecover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStreamTypeParamsNode(solution.tokenKind, ltToken, leftTypeDescNode);\n}\ngtToken = parseGTToken();\nreturn STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode,\ngtToken);\n}\n/**\n* Parse stream-keyword.\n*\n* @return Parsed stream-keyword node\n*/\nprivate STNode parseStreamKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.STREAM_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.STREAM_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse let expression.\n*

\n* \n* let-expr := let let-var-decl [, let-var-decl]* in expression\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseLetExpression(boolean isRhsExpr) {\nSTNode letKeyword = parseLetKeyword();\nSTNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr);\nSTNode inKeyword = parseInKeyword();\nletKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,\nDiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression);\n}\n/**\n* Parse let-keyword.\n*\n* @return Let-keyword node\n*/\nprivate STNode parseLetKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LET_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LET_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse let variable declarations.\n*

\n* let-var-decl-list := let-var-decl [, let-var-decl]*\n*\n* @return Parsed node\n*/\nprivate STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) {\nstartContext(context);\nList varDecls = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfLetVarDeclarations(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode varDec = parseLetVarDecl(isRhsExpr);\nvarDecls.add(varDec);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndOfLetVarDeclarations(nextToken.kind)) {\nleadingComma = parseComma();\nvarDecls.add(leadingComma);\nvarDec = parseLetVarDecl(isRhsExpr);\nvarDecls.add(varDec);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(varDecls);\n}\nprivate boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\ncase AT_TOKEN:\nreturn false;\ncase IN_KEYWORD:\nreturn true;\ndefault:\nreturn !isTypeStartingToken(tokenKind);\n}\n}\n/**\n* Parse let variable declaration.\n*

\n* let-var-decl := [annots] typed-binding-pattern = expression\n*\n* @return Parsed node\n*/\nprivate STNode parseLetVarDecl(boolean isRhsExpr) {\nSTNode annot = parseAnnotations();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL);\nSTNode assign = parseAssignOp();\nSTNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false);\nreturn STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression);\n}\n/**\n* Parse raw backtick string template expression.\n*

\n* BacktickString := `expression`\n*\n* @return Template expression node\n*/\nprivate STNode parseTemplateExpression() {\nSTNode type = STNodeFactory.createEmptyNode();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContent();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick,\ncontent, endingBackTick);\n}\nprivate STNode parseTemplateContent() {\nList items = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode contentItem = parseTemplateItem();\nitems.add(contentItem);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(items);\n}\nprivate boolean isEndOfBacktickContent(SyntaxKind kind) {\nswitch (kind) {\ncase EOF_TOKEN:\ncase BACKTICK_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseTemplateItem() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {\nreturn parseInterpolation();\n}\nreturn consume();\n}\n/**\n* Parse string template expression.\n*

\n* string-template-expr := string ` expression `\n*\n* @return String template expression node\n*/\nprivate STNode parseStringTemplateExpression() {\nSTNode type = parseStringKeyword();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContent();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick,\ncontent, endingBackTick);\n}\n/**\n* Parse string keyword.\n*\n* @return string keyword node\n*/\nprivate STNode parseStringKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.STRING_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.STRING_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse XML template expression.\n*

\n* xml-template-expr := xml BacktickString\n*\n* @return XML template expression\n*/\nprivate STNode parseXMLTemplateExpression() {\nSTNode xmlKeyword = parseXMLKeyword();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContentAsXML();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword,\nstartingBackTick, content, endingBackTick);\n}\n/**\n* Parse xml keyword.\n*\n* @return xml keyword node\n*/\nprivate STNode parseXMLKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.XML_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.XML_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse the content of the template string as XML. This method first read the\n* input in the same way as the raw-backtick-template (BacktickString). Then\n* it parses the content as XML.\n*\n* @return XML node\n*/\nprivate STNode parseTemplateContentAsXML() {\nArrayDeque expressions = new ArrayDeque<>();\nStringBuilder xmlStringBuilder = new StringBuilder();\nSTToken nextToken = peek();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode contentItem = parseTemplateItem();\nif (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {\nxmlStringBuilder.append(((STToken) contentItem).text());\n} else {\nxmlStringBuilder.append(\"${}\");\nexpressions.add(contentItem);\n}\nnextToken = peek();\n}\nTextDocument textDocument = TextDocuments.from(xmlStringBuilder.toString());\nAbstractTokenReader tokenReader = new TokenReader(new XMLLexer(textDocument.getCharacterReader()));\nXMLParser xmlParser = new XMLParser(tokenReader, expressions);\nreturn xmlParser.parse();\n}\n/**\n* Parse interpolation of a back-tick string.\n*

\n* \n* interpolation := ${ expression }\n* \n*\n* @return Interpolation node\n*/\nprivate STNode parseInterpolation() {\nstartContext(ParserRuleContext.INTERPOLATION);\nSTNode interpolStart = parseInterpolationStart();\nSTNode expr = parseExpression();\nwhile (true) {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.EOF_TOKEN || nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nbreak;\n} else {\nnextToken = consume();\nexpr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken,\nDiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text());\n}\n}\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace);\n}\n/**\n* Parse interpolation start token.\n*

\n* interpolation-start := ${\n*\n* @return Interpolation start token\n*/\nprivate STNode parseInterpolationStart() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse back-tick token.\n*\n* @return Back-tick token\n*/\nprivate STNode parseBacktickToken(ParserRuleContext ctx) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ctx);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse table type descriptor.\n*

\n* table-type-descriptor := table row-type-parameter [key-constraint]\n* row-type-parameter := type-parameter\n* key-constraint := key-specifier | key-type-constraint\n* key-specifier := key ( [ field-name (, field-name)* ] )\n* key-type-constraint := key type-parameter\n*

\n*\n* @return Parsed table type desc node.\n*/\nprivate STNode parseTableTypeDescriptor() {\nSTNode tableKeywordToken = parseTableKeyword();\nSTNode rowTypeParameterNode = parseRowTypeParameter();\nSTNode keyConstraintNode;\nSTToken nextToken = peek();\nif (isKeyKeyword(nextToken)) {\nSTNode keyKeywordToken = getKeyKeyword(consume());\nkeyConstraintNode = parseKeyConstraint(keyKeywordToken);\n} else {\nkeyConstraintNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode);\n}\n/**\n* Parse row type parameter node.\n*

\n* row-type-parameter := type-parameter\n*

\n*\n* @return Parsed node.\n*/\nprivate STNode parseRowTypeParameter() {\nstartContext(ParserRuleContext.ROW_TYPE_PARAM);\nSTNode rowTypeParameterNode = parseTypeParameter();\nendContext();\nreturn rowTypeParameterNode;\n}\n/**\n* Parse type parameter node.\n*

\n* type-parameter := < type-descriptor >\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeParameter() {\nSTNode ltToken = parseLTToken();\nSTNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\nSTNode gtToken = parseGTToken();\nreturn STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken);\n}\n/**\n* Parse key constraint.\n*

\n* key-constraint := key-specifier | key-type-constraint\n*

\n*\n* @return Parsed node.\n*/\nprivate STNode parseKeyConstraint(STNode keyKeywordToken) {\nreturn parseKeyConstraint(peek().kind, keyKeywordToken);\n}\nprivate STNode parseKeyConstraint(SyntaxKind nextTokenKind, STNode keyKeywordToken) {\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseKeySpecifier(keyKeywordToken);\ncase LT_TOKEN:\nreturn parseKeyTypeConstraint(keyKeywordToken);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseKeyConstraint(solution.tokenKind, keyKeywordToken);\n}\n}\n/**\n* Parse key specifier given parsed key keyword token.\n*

\n* key-specifier := key ( [ field-name (, field-name)* ] )\n*\n* @return Parsed node\n*/\nprivate STNode parseKeySpecifier(STNode keyKeywordToken) {\nstartContext(ParserRuleContext.KEY_SPECIFIER);\nSTNode openParenToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode fieldNamesNode = parseFieldNames();\nSTNode closeParenToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken);\n}\n/**\n* Parse key type constraint.\n*

\n* key-type-constraint := key type-parameter\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseKeyTypeConstraint(STNode keyKeywordToken) {\nSTNode typeParameterNode = parseTypeParameter();\nreturn STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode);\n}\n/**\n* Parse function type descriptor.\n*

\n* function-type-descriptor := function function-signature\n*\n* @return Function type descriptor node\n*/\nprivate STNode parseFunctionTypeDesc() {\nstartContext(ParserRuleContext.FUNC_TYPE_DESC);\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode signature = parseFuncSignature(true);\nendContext();\nreturn STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, signature);\n}\n/**\n* Parse explicit anonymous function expression.\n*

\n* explicit-anonymous-function-expr := [annots] function function-signature anon-func-body\n*\n* @param annots Annotations.\n* @param isRhsExpr Is expression in rhs context\n* @return Anonymous function expression node\n*/\nprivate STNode parseExplicitFunctionExpression(STNode annots, boolean isRhsExpr) {\nstartContext(ParserRuleContext.ANON_FUNC_EXPRESSION);\nSTNode funcKeyword = parseFunctionKeyword();\nSTNode funcSignature = parseFuncSignature(false);\nSTNode funcBody = parseAnonFuncBody(isRhsExpr);\nreturn STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, funcKeyword, funcSignature,\nfuncBody);\n}\n/**\n* Parse anonymous function body.\n*

\n* anon-func-body := block-function-body | expr-function-body\n*\n* @param isRhsExpr Is expression in rhs context\n* @return Anon function body node\n*/\nprivate STNode parseAnonFuncBody(boolean isRhsExpr) {\nreturn parseAnonFuncBody(peek().kind, isRhsExpr);\n}\nprivate STNode parseAnonFuncBody(SyntaxKind nextTokenKind, boolean isRhsExpr) {\nswitch (nextTokenKind) {\ncase OPEN_BRACE_TOKEN:\ncase EOF_TOKEN:\nSTNode body = parseFunctionBodyBlock(true);\nendContext();\nreturn body;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nendContext();\nreturn parseExpressionFuncBody(true, isRhsExpr);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ANON_FUNC_BODY, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAnonFuncBody(solution.tokenKind, isRhsExpr);\n}\n}\n/**\n* Parse expression function body.\n*

\n* expr-function-body := => expression\n*\n* @param isAnon Is anonymous function.\n* @param isRhsExpr Is expression in rhs context\n* @return Expression function body node\n*/\nprivate STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) {\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nSTNode semiColon;\nif (isAnon) {\nsemiColon = STNodeFactory.createEmptyNode();\n} else {\nsemiColon = parseSemicolon();\n}\nreturn STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon);\n}\n/**\n* Parse '=>' token.\n*\n* @return Double right arrow token\n*/\nprivate STNode parseDoubleRightArrow() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.EXPR_FUNC_BODY_START);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) {\nswitch (params.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase INFER_PARAM_LIST:\nbreak;\ncase BRACED_EXPRESSION:\nparams = getAnonFuncParam((STBracedExpressionNode) params);\nbreak;\ndefault:\nparams = SyntaxErrors.addDiagnostic(params,\nDiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);\n}\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression);\n}\n/**\n* Create a new anon-func-param node from a braced expression.\n*\n* @param params Braced expression\n* @return Anon-func param node\n*/\nprivate STNode getAnonFuncParam(STBracedExpressionNode params) {\nList paramList = new ArrayList<>();\nparamList.add(params.expression);\nreturn STNodeFactory.createImplicitAnonymousFunctionParameters(params.openParen,\nSTNodeFactory.createNodeList(paramList), params.closeParen);\n}\n/**\n* Parse implicit anon function expression.\n*\n* @param openParen Open parenthesis token\n* @param firstParam First parameter\n* @param isRhsExpr Is expression in rhs context\n* @return Implicit anon function expression node\n*/\nprivate STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) {\nList paramList = new ArrayList<>();\nparamList.add(firstParam);\nSTToken nextToken = peek();\nSTNode paramEnd;\nSTNode param;\nwhile (!isEndOfAnonFuncParametersList(nextToken.kind)) {\nparamEnd = parseImplicitAnonFuncParamEnd(nextToken.kind);\nif (paramEnd == null) {\nbreak;\n}\nparamList.add(paramEnd);\nparam = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM);\nparam = STNodeFactory.createSimpleNameReferenceNode(param);\nparamList.add(param);\nnextToken = peek();\n}\nSTNode params = STNodeFactory.createNodeList(paramList);\nSTNode closeParen = parseCloseParenthesis();\nendContext();\nSTNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\nreturn parseImplicitAnonFunc(inferedParams, isRhsExpr);\n}\nprivate STNode parseImplicitAnonFuncParamEnd() {\nreturn parseImplicitAnonFuncParamEnd(peek().kind);\n}\nprivate STNode parseImplicitAnonFuncParamEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseImplicitAnonFuncParamEnd(solution.tokenKind);\n}\n}\nprivate boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase SEMICOLON_TOKEN:\ncase RETURNS_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse tuple type descriptor.\n*

\n* tuple-type-descriptor := [ tuple-member-type-descriptors ]\n*

\n* tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor]\n* | [ tuple-rest-descriptor ]\n*

\n* tuple-rest-descriptor := type-descriptor ...\n*
\n*\n* @return\n*/\nprivate STNode parseTupleTypeDesc() {\nSTNode openBracket = parseOpenBracket();\nstartContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nSTNode memberTypeDesc = parseTupleMemberTypeDescList();\nSTNode closeBracket = parseCloseBracket();\nendContext();\nopenBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket,\nDiagnosticErrorCode.ERROR_MISSING_TYPE_DESC);\nreturn STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket);\n}\n/**\n* Parse tuple member type descriptors.\n*\n* @return Parsed node\n*/\nprivate STNode parseTupleMemberTypeDescList() {\nList typeDescList = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfTypeList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode typeDesc = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\nreturn parseTupleTypeMembers(typeDesc, typeDescList);\n}\nprivate STNode parseTupleTypeMembers(STNode typeDesc, List typeDescList) {\nSTToken nextToken;\nnextToken = peek();\nSTNode tupleMemberRhs;\nwhile (!isEndOfTypeList(nextToken.kind)) {\ntupleMemberRhs = parseTupleMemberRhs(nextToken.kind);\nif (tupleMemberRhs == null) {\nbreak;\n}\nif (tupleMemberRhs.kind == SyntaxKind.ELLIPSIS_TOKEN) {\ntypeDesc = STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs);\nbreak;\n}\ntypeDescList.add(typeDesc);\ntypeDescList.add(tupleMemberRhs);\ntypeDesc = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\nnextToken = peek();\n}\ntypeDescList.add(typeDesc);\nreturn STNodeFactory.createNodeList(typeDescList);\n}\nprivate STNode parseTupleMemberRhs() {\nreturn parseTupleMemberRhs(peek().kind);\n}\nprivate STNode parseTupleMemberRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ncase ELLIPSIS_TOKEN:\nreturn parseEllipsis();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTupleMemberRhs(solution.tokenKind);\n}\n}\nprivate boolean isEndOfTypeList(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase EOF_TOKEN:\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse table constructor or query expression.\n*

\n* \n* table-constructor-or-query-expr := table-constructor-expr | query-expr\n*
\n* table-constructor-expr := table [key-specifier] [ [row-list] ]\n*
\n* query-expr := [query-construct-type] query-pipeline select-clause\n* [query-construct-type] query-pipeline select-clause on-conflict-clause? limit-lause?\n*
\n* query-construct-type := table key-specifier | stream\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseTableConstructorOrQuery(boolean isRhsExpr) {\nstartContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION);\nSTNode tableOrQueryExpr = parseTableConstructorOrQuery(peek().kind, isRhsExpr);\nendContext();\nreturn tableOrQueryExpr;\n}\nprivate STNode parseTableConstructorOrQuery(SyntaxKind nextTokenKind, boolean isRhsExpr) {\nSTNode queryConstructType;\nswitch (nextTokenKind) {\ncase FROM_KEYWORD:\nqueryConstructType = STNodeFactory.createEmptyNode();\nreturn parseQueryExprRhs(queryConstructType, isRhsExpr);\ncase STREAM_KEYWORD:\nqueryConstructType = parseQueryConstructType(parseStreamKeyword(), null);\nreturn parseQueryExprRhs(queryConstructType, isRhsExpr);\ncase TABLE_KEYWORD:\nSTNode tableKeyword = parseTableKeyword();\nreturn parseTableConstructorOrQuery(tableKeyword, isRhsExpr);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTableConstructorOrQuery(solution.tokenKind, isRhsExpr);\n}\n}\nprivate STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) {\nSTToken nextToken = peek();\nreturn parseTableConstructorOrQuery(nextToken.kind, nextToken, tableKeyword, isRhsExpr);\n}\nprivate STNode parseTableConstructorOrQuery(SyntaxKind nextTokenKind, STToken nextToken, STNode tableKeyword,\nboolean isRhsExpr) {\nSTNode keySpecifier;\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nkeySpecifier = STNodeFactory.createEmptyNode();\nreturn parseTableConstructorExprRhs(tableKeyword, keySpecifier);\ncase KEY_KEYWORD:\nkeySpecifier = parseKeySpecifier();\nreturn parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\ncase IDENTIFIER_TOKEN:\nif (isKeyKeyword(nextToken)) {\nkeySpecifier = parseKeySpecifier();\nreturn parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\n}\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTableConstructorOrQuery(solution.tokenKind, null, tableKeyword, isRhsExpr);\n}\n}\nprivate STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) {\nreturn parseTableConstructorOrQueryRhs(peek().kind, tableKeyword, keySpecifier, isRhsExpr);\n}\nprivate STNode parseTableConstructorOrQueryRhs(SyntaxKind nextTokenKind, STNode tableKeyword, STNode keySpecifier,\nboolean isRhsExpr) {\nswitch (nextTokenKind) {\ncase FROM_KEYWORD:\nreturn parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr);\ncase OPEN_BRACKET_TOKEN:\nreturn parseTableConstructorExprRhs(tableKeyword, keySpecifier);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS, tableKeyword,\nkeySpecifier, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTableConstructorOrQueryRhs(solution.tokenKind, tableKeyword, keySpecifier, isRhsExpr);\n}\n}\n/**\n* Parse query construct type.\n*

\n* query-construct-type := table key-specifier | stream\n*\n* @return Parsed node\n*/\nprivate STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) {\nreturn STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier);\n}\n/**\n* Parse query expression.\n*

\n* \n* query-expr-rhs := query-pipeline select-clause\n* query-pipeline select-clause on-conflict-clause? limit-clause?\n*
\n* query-pipeline := from-clause intermediate-clause*\n*
\n*\n* @param queryConstructType queryConstructType that precedes this rhs\n* @return Parsed node\n*/\nprivate STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) {\nswitchContext(ParserRuleContext.QUERY_EXPRESSION);\nSTNode fromClause = parseFromClause(isRhsExpr);\nList clauses = new ArrayList<>();\nSTNode intermediateClause;\nSTNode selectClause = null;\nwhile (!isEndOfIntermediateClause(peek().kind, SyntaxKind.NONE)) {\nintermediateClause = parseIntermediateClause(isRhsExpr);\nif (intermediateClause == null) {\nbreak;\n}\nif (selectClause != null) {\nselectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause,\nDiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE);\ncontinue;\n}\nif (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) {\nselectClause = intermediateClause;\n} else {\nclauses.add(intermediateClause);\n}\n}\nif (peek().kind == SyntaxKind.DO_KEYWORD) {\nSTNode intermediateClauses = STNodeFactory.createNodeList(clauses);\nSTNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);\nreturn parseQueryAction(queryPipeline, selectClause, isRhsExpr);\n}\nif (selectClause == null) {\nSTNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD);\nSTNode expr = STNodeFactory\n.createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\nselectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr);\nif (clauses.isEmpty()) {\nfromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);\n} else {\nint lastIndex = clauses.size() - 1;\nSTNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex),\nDiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);\nclauses.set(lastIndex, intClauseWithDiagnostic);\n}\n}\nSTNode intermediateClauses = STNodeFactory.createNodeList(clauses);\nSTNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);\nSTNode onConflictClause = parseOnConflictClause(isRhsExpr);\nSTNode limitClause = parseLimitClause(isRhsExpr);\nreturn STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause,\nonConflictClause, limitClause);\n}\n/**\n* Parse limit keyword.\n*\n* @return Limit keyword node\n*/\nprivate STNode parseLimitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LIMIT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LIMIT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse an intermediate clause.\n*

\n* \n* intermediate-clause := from-clause | where-clause | let-clause\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseIntermediateClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nreturn parseIntermediateClause(nextToken.kind, isRhsExpr);\n}\nprivate STNode parseIntermediateClause(SyntaxKind nextTokenKind, boolean isRhsExpr) {\nswitch (nextTokenKind) {\ncase FROM_KEYWORD:\nreturn parseFromClause(isRhsExpr);\ncase WHERE_KEYWORD:\nreturn parseWhereClause(isRhsExpr);\ncase LET_KEYWORD:\nreturn parseLetClause(isRhsExpr);\ncase SELECT_KEYWORD:\nreturn parseSelectClause(isRhsExpr);\ncase JOIN_KEYWORD:\ncase OUTER_KEYWORD:\nreturn parseJoinClause(isRhsExpr);\ncase ORDER_KEYWORD:\ncase BY_KEYWORD:\ncase ASCENDING_KEYWORD:\ncase DESCENDING_KEYWORD:\nreturn parseOrderByClause(isRhsExpr);\ncase DO_KEYWORD:\ncase SEMICOLON_TOKEN:\ncase ON_KEYWORD:\ncase CONFLICT_KEYWORD:\ncase LIMIT_KEYWORD:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseIntermediateClause(solution.tokenKind, isRhsExpr);\n}\n}\n/**\n* Parse select-keyword.\n*\n* @return Select-keyword node\n*/\nprivate STNode parseJoinKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.JOIN_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.JOIN_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse select-keyword.\n*\n* @return Select-keyword node\n*/\nprivate STNode parseOuterKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OUTER_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OUTER_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse order-keyword.\n*\n* @return Order-keyword node\n*/\nprivate STNode parseOrderKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ORDER_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ORDER_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse by-keyword.\n*\n* @return By-keyword node\n*/\nprivate STNode parseByKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BY_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BY_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse ascending-keyword.\n*\n* @return Ascending-keyword node\n*/\nprivate STNode parseAscendingKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ASCENDING_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ASCENDING_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse descending-keyword.\n*\n* @return Descending-keyword node\n*/\nprivate STNode parseDescendingKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DESCENDING_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.DESCENDING_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate boolean isEndOfIntermediateClause(SyntaxKind tokenKind, SyntaxKind precedingNodeKind) {\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase PUBLIC_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase EOF_TOKEN:\ncase RESOURCE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase DOCUMENTATION_STRING:\ncase PRIVATE_KEYWORD:\ncase RETURNS_KEYWORD:\ncase SERVICE_KEYWORD:\ncase TYPE_KEYWORD:\ncase CONST_KEYWORD:\ncase FINAL_KEYWORD:\ncase DO_KEYWORD:\nreturn true;\ndefault:\nreturn isValidExprRhsStart(tokenKind, precedingNodeKind);\n}\n}\n/**\n* Parse from clause.\n*

\n* from-clause := from typed-binding-pattern in expression\n*\n* @return Parsed node\n*/\nprivate STNode parseFromClause(boolean isRhsExpr) {\nSTNode fromKeyword = parseFromKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE);\nSTNode inKeyword = parseInKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression);\n}\n/**\n* Parse from-keyword.\n*\n* @return From-keyword node\n*/\nprivate STNode parseFromKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FROM_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FROM_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse where clause.\n*

\n* where-clause := where expression\n*\n* @return Parsed node\n*/\nprivate STNode parseWhereClause(boolean isRhsExpr) {\nSTNode whereKeyword = parseWhereKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createWhereClauseNode(whereKeyword, expression);\n}\n/**\n* Parse where-keyword.\n*\n* @return Where-keyword node\n*/\nprivate STNode parseWhereKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.WHERE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.WHERE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse let clause.\n*

\n* let-clause := let let-var-decl [, let-var-decl]* \n*\n* @return Parsed node\n*/\nprivate STNode parseLetClause(boolean isRhsExpr) {\nSTNode letKeyword = parseLetKeyword();\nSTNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr);\nletKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,\nDiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);\nreturn STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations);\n}\n/**\n* Parse order by clause.\n*

\n* order-by-clause := order by expression (ascending | descending)? (expression (ascending | descending)?)*\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseOrderByClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.ORDER_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode orderKeyword = parseOrderKeyword();\nSTNode byKeyword = parseByKeyword();\nSTNode orderKeys = parseOrderKeyList(ParserRuleContext.ORDER_KEY, isRhsExpr);\nbyKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword,\nDiagnosticErrorCode.ERROR_MISSING_EXPRESSION);\nreturn STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys);\n}\n/**\n* Parse order key.\n*

\n* order-key-list := order-key [, order-key]*\n*\n* @return Parsed node\n*/\nprivate STNode parseOrderKeyList(ParserRuleContext context, boolean isRhsExpr) {\nstartContext(context);\nList orderKeys = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfOrderKeys(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode orderKey = parseOrderKey(isRhsExpr);\norderKeys.add(orderKey);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndOfOrderKeys(nextToken.kind)) {\nleadingComma = parseComma();\norderKeys.add(leadingComma);\norderKey = parseOrderKey(isRhsExpr);\norderKeys.add(orderKey);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(orderKeys);\n}\nprivate boolean isEndOfOrderKeys(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\nreturn false;\ncase IN_KEYWORD:\nreturn true;\ndefault:\nreturn !isTypeStartingToken(tokenKind);\n}\n}\n/**\n* Parse order key.\n*

\n* order-key := expression (ascending | descending)?\n*\n* @return Parsed node\n*/\nprivate STNode parseOrderKey(boolean isRhsExpr) {\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.ASCENDING_KEYWORD) {\nSTNode ascendingKeyword = parseAscendingKeyword();\nreturn STNodeFactory.createOrderKeyNode(expression, ascendingKeyword, null);\n} else if (nextToken.kind == SyntaxKind.DESCENDING_KEYWORD) {\nSTNode descendingKeyword = parseDescendingKeyword();\nreturn STNodeFactory.createOrderKeyNode(expression, null, descendingKeyword);\n} else {\nreturn STNodeFactory.createOrderKeyNode(expression, null, null);\n}\n}\n/**\n* Parse select clause.\n*

\n* select-clause := select expression\n*\n* @return Parsed node\n*/\nprivate STNode parseSelectClause(boolean isRhsExpr) {\nSTNode selectKeyword = parseSelectKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createSelectClauseNode(selectKeyword, expression);\n}\n/**\n* Parse select-keyword.\n*\n* @return Select-keyword node\n*/\nprivate STNode parseSelectKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SELECT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SELECT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse on-conflict clause.\n*

\n* \n* onConflictClause := on conflict expression\n* \n*\n* @return On conflict clause node\n*/\nprivate STNode parseOnConflictClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.ON_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode onKeyword = parseOnKeyword();\nSTNode conflictKeyword = parseConflictKeyword();\nSTNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr);\n}\n/**\n* Parse conflict keyword.\n*\n* @return Conflict keyword node\n*/\nprivate STNode parseConflictKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CONFLICT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CONFLICT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse limit clause.\n*

\n* limitClause := limit expression\n*\n* @return Limit expression node\n*/\nprivate STNode parseLimitClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.LIMIT_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode limitKeyword = parseLimitKeyword();\nSTNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createLimitClauseNode(limitKeyword, expr);\n}\n/**\n* Parse join clause.\n*

\n* \n* join-clause := (join-var-decl | outer-join-var-decl) in expression\n*
\n* join-var-decl := join (typeName | var) bindingPattern\n*
\n* outer-join-var-decl := outer join var binding-pattern\n*
\n*\n* @return Join clause\n*/\nprivate STNode parseJoinClause(boolean isRhsExpr) {\nSTNode outerKeyword;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.OUTER_KEYWORD) {\nouterKeyword = parseOuterKeyword();\n} else {\nouterKeyword = STNodeFactory.createEmptyNode();\n}\nSTNode joinKeyword = parseJoinKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE);\nSTNode inKeyword = parseInKeyword();\nSTNode onCondition;\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nnextToken = peek();\nif (nextToken.kind == SyntaxKind.ON_KEYWORD) {\nonCondition = parseOnClause(isRhsExpr);\n} else {\nonCondition = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression,\nonCondition);\n}\n/**\n* Parse on clause.\n*

\n* on clause := on expression\n*\n* @return On clause node\n*/\nprivate STNode parseOnClause(boolean isRhsExpr) {\nSTNode onKeyword = parseOnKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createOnClauseNode(onKeyword, expression);\n}\n/**\n* Parse start action.\n*

\n* start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)\n*\n* @return Start action node\n*/\nprivate STNode parseStartAction(STNode annots) {\nSTNode startKeyword = parseStartKeyword();\nSTNode expr = parseActionOrExpression();\nswitch (expr.kind) {\ncase FUNCTION_CALL:\ncase METHOD_CALL:\ncase REMOTE_METHOD_CALL_ACTION:\nbreak;\ndefault:\nif (!isMissingNode(expr)) {\nexpr = SyntaxErrors.addDiagnostic(expr,\nDiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION);\n}\n}\nreturn STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr);\n}\n/**\n* Parse start keyword.\n*\n* @return Start keyword node\n*/\nprivate STNode parseStartKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.START_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.START_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse flush action.\n*

\n* flush-action := flush [peer-worker]\n*\n* @return flush action node\n*/\nprivate STNode parseFlushAction() {\nSTNode flushKeyword = parseFlushKeyword();\nSTNode peerWorker = parseOptionalPeerWorkerName();\nreturn STNodeFactory.createFlushActionNode(flushKeyword, peerWorker);\n}\n/**\n* Parse flush keyword.\n*\n* @return flush keyword node\n*/\nprivate STNode parseFlushKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FLUSH_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FLUSH_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse peer worker.\n*

\n* peer-worker := worker-name | default\n*\n* @return peer worker name node\n*/\nprivate STNode parseOptionalPeerWorkerName() {\nSTToken token = peek();\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase DEFAULT_KEYWORD:\nreturn STNodeFactory.createSimpleNameReferenceNode(consume());\ndefault:\nreturn STNodeFactory.createEmptyNode();\n}\n}\n/**\n* Parse intersection type descriptor.\n*

\n* intersection-type-descriptor := type-descriptor & type-descriptor\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode bitwiseAndToken = consume();\nSTNode rightTypeDesc = parseTypeDescriptor(context, isTypedBindingPattern, false);\nreturn STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc);\n}\n/**\n* Parse singleton type descriptor.\n*

\n* singleton-type-descriptor := simple-const-expr\n* simple-const-expr :=\n* nil-literal\n* | boolean-literal\n* | [Sign] int-literal\n* | [Sign] floating-point-literal\n* | string-literal\n* | constant-reference-expr\n*

\n*/\nprivate STNode parseSingletonTypeDesc() {\nSTNode simpleContExpr = parseSimpleConstExpr();\nreturn STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr);\n}\nprivate STNode parseSignedIntOrFloat() {\nSTNode operator = parseUnaryOperator();\nSTNode literal;\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nliteral = parseBasicLiteral();\nbreak;\ndefault:\nliteral = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL);\nliteral = STNodeFactory.createBasicLiteralNode(literal.kind, literal);\n}\nreturn STNodeFactory.createUnaryExpressionNode(operator, literal);\n}\nprivate boolean isSingletonTypeDescStart(SyntaxKind tokenKind, boolean inTypeDescCtx) {\nSTToken nextNextToken = getNextNextToken(tokenKind);\nswitch (tokenKind) {\ncase STRING_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase NULL_KEYWORD:\nif (inTypeDescCtx || isValidTypeDescRHSOutSideTypeDescCtx(nextNextToken)) {\nreturn true;\n}\nreturn false;\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn isIntOrFloat(nextNextToken);\ndefault:\nreturn false;\n}\n}\nstatic boolean isIntOrFloat(STToken token) {\nswitch (token.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate boolean isValidTypeDescRHSOutSideTypeDescCtx(STToken token) {\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\ncase OPEN_PAREN_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Check whether the parser reached to a valid expression start.\n*\n* @param nextTokenKind Kind of the next immediate token.\n* @param nextTokenIndex Index to the next token.\n* @return true if this is a start of a valid expression. false otherwise\n*/\nprivate boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) {\nnextTokenIndex++;\nswitch (nextTokenKind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nSyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind;\nreturn nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN || nextNextTokenKind == SyntaxKind.COMMA_TOKEN ||\nnextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN ||\nisValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE);\ncase IDENTIFIER_TOKEN:\nreturn isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE);\ncase OPEN_PAREN_TOKEN:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase TYPEOF_KEYWORD:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\ncase TRAP_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase LT_TOKEN:\ncase FROM_KEYWORD:\ncase LET_KEYWORD:\ncase BACKTICK_TOKEN:\ncase NEW_KEYWORD:\ncase LEFT_ARROW_TOKEN:\nreturn true;\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);\ncase FUNCTION_KEYWORD:\ncase TABLE_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD;\ncase STREAM_KEYWORD:\nSTToken nextNextToken = peek(nextTokenIndex);\nreturn nextNextToken.kind == SyntaxKind.KEY_KEYWORD ||\nnextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN ||\nnextNextToken.kind == SyntaxKind.FROM_KEYWORD;\ncase ERROR_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN;\ncase SERVICE_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.OPEN_BRACE_TOKEN;\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN;\ncase START_KEYWORD:\ncase FLUSH_KEYWORD:\ncase WAIT_KEYWORD:\ndefault:\nreturn false;\n}\n}\n/**\n* Parse sync send action.\n*

\n* sync-send-action := expression ->> peer-worker\n*\n* @param expression LHS expression of the sync send action\n* @return Sync send action node\n*/\nprivate STNode parseSyncSendAction(STNode expression) {\nSTNode syncSendToken = parseSyncSendToken();\nSTNode peerWorker = parsePeerWorkerName();\nreturn STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker);\n}\n/**\n* Parse peer worker.\n*

\n* peer-worker := worker-name | default\n*\n* @return peer worker name node\n*/\nprivate STNode parsePeerWorkerName() {\nSTToken token = peek();\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase DEFAULT_KEYWORD:\nreturn STNodeFactory.createSimpleNameReferenceNode(consume());\ndefault:\nSolution sol = recover(token, ParserRuleContext.PEER_WORKER_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse sync send token.\n*

\n* sync-send-token := ->> \n*\n* @return sync send token\n*/\nprivate STNode parseSyncSendToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SYNC_SEND_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SYNC_SEND_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse receive action.\n*

\n* receive-action := single-receive-action | multiple-receive-action\n*\n* @return Receive action\n*/\nprivate STNode parseReceiveAction() {\nSTNode leftArrow = parseLeftArrowToken();\nSTNode receiveWorkers = parseReceiveWorkers();\nreturn STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers);\n}\nprivate STNode parseReceiveWorkers() {\nreturn parseReceiveWorkers(peek().kind);\n}\nprivate STNode parseReceiveWorkers(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase DEFAULT_KEYWORD:\ncase IDENTIFIER_TOKEN:\nreturn parsePeerWorkerName();\ncase OPEN_BRACE_TOKEN:\nreturn parseMultipleReceiveWorkers();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RECEIVE_WORKERS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseReceiveWorkers(solution.tokenKind);\n}\n}\n/**\n* Parse multiple worker receivers.\n*

\n* { receive-field (, receive-field)* }\n*\n* @return Multiple worker receiver node\n*/\nprivate STNode parseMultipleReceiveWorkers() {\nstartContext(ParserRuleContext.MULTI_RECEIVE_WORKERS);\nSTNode openBrace = parseOpenBrace();\nSTNode receiveFields = parseReceiveFields();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nopenBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace,\nDiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION);\nreturn STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace);\n}\nprivate STNode parseReceiveFields() {\nList receiveFields = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfReceiveFields(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode receiveField = parseReceiveField();\nreceiveFields.add(receiveField);\nnextToken = peek();\nSTNode recieveFieldEnd;\nwhile (!isEndOfReceiveFields(nextToken.kind)) {\nrecieveFieldEnd = parseReceiveFieldEnd(nextToken.kind);\nif (recieveFieldEnd == null) {\nbreak;\n}\nreceiveFields.add(recieveFieldEnd);\nreceiveField = parseReceiveField();\nreceiveFields.add(receiveField);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(receiveFields);\n}\nprivate boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseReceiveFieldEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RECEIVE_FIELD_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseReceiveFieldEnd(solution.tokenKind);\n}\n}\nprivate STNode parseReceiveField() {\nreturn parseReceiveField(peek().kind);\n}\n/**\n* Parse receive field.\n*

\n* receive-field := peer-worker | field-name : peer-worker\n*\n* @param nextTokenKind Kind of the next token\n* @return Receiver field node\n*/\nprivate STNode parseReceiveField(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase DEFAULT_KEYWORD:\nreturn parseDefaultKeyword();\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME);\nreturn createQualifiedReceiveField(identifier);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RECEIVE_FIELD);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.tokenKind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn createQualifiedReceiveField(solution.recoveredNode);\n}\nreturn solution.recoveredNode;\n}\n}\nprivate STNode createQualifiedReceiveField(STNode identifier) {\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn identifier;\n}\nSTNode colon = parseColon();\nSTNode peerWorker = parsePeerWorkerName();\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, peerWorker);\n}\n/**\n*\n* Parse left arrow (<-) token.\n*\n* @return left arrow token\n*/\nprivate STNode parseLeftArrowToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LEFT_ARROW_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse signed right shift token (>>).\n*\n* @return Parsed node\n*/\nprivate STNode parseSignedRightShiftToken() {\nSTNode openGTToken = consume();\nSTToken endLGToken = consume();\nSTNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, openGTToken.leadingMinutiae(),\nendLGToken.trailingMinutiae());\nif (!validateRightShiftOperatorWS(openGTToken)) {\ndoubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken,\nDiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP);\n}\nreturn doubleGTToken;\n}\n/**\n* Parse unsigned right shift token (>>>).\n*\n* @return Parsed node\n*/\nprivate STNode parseUnsignedRightShiftToken() {\nSTNode openGTToken = consume();\nSTNode middleGTToken = consume();\nSTNode endLGToken = consume();\nSTNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN,\nopenGTToken.leadingMinutiae(), endLGToken.trailingMinutiae());\nboolean validOpenGTToken = validateRightShiftOperatorWS(openGTToken);\nboolean validMiddleGTToken = validateRightShiftOperatorWS(middleGTToken);\nif (validOpenGTToken && validMiddleGTToken) {\nreturn unsignedRightShiftToken;\n}\nunsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken,\nDiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP);\nreturn unsignedRightShiftToken;\n}\n/**\n* Validate the whitespace between '>' tokens of right shift operators.\n*\n* @param node Preceding node\n* @return the validated node\n*/\nprivate boolean validateRightShiftOperatorWS(STNode node) {\nint diff = node.widthWithTrailingMinutiae() - node.width();\nreturn diff == 0;\n}\n/**\n* Parse wait action.\n*

\n* wait-action := single-wait-action | multiple-wait-action | alternate-wait-action \n*\n* @return Wait action node\n*/\nprivate STNode parseWaitAction() {\nSTNode waitKeyword = parseWaitKeyword();\nif (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {\nreturn parseMultiWaitAction(waitKeyword);\n}\nreturn parseSingleOrAlternateWaitAction(waitKeyword);\n}\n/**\n* Parse wait keyword.\n*\n* @return wait keyword\n*/\nprivate STNode parseWaitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.WAIT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.WAIT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse single or alternate wait actions.\n*

\n* \n* alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+\n*
\n* wait-future-expr := expression but not mapping-constructor-expr\n*
\n*\n* @param waitKeyword wait keyword\n* @return Single or alternate wait action node\n*/\nprivate STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) {\nstartContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS);\nSTToken nextToken = peek();\nif (isEndOfWaitFutureExprList(nextToken.kind)) {\nendContext();\nSTNode waitFutureExprs = STNodeFactory.createEmptyNodeList();\nwaitKeyword = cloneWithDiagnosticIfListEmpty(waitFutureExprs, waitKeyword,\nDiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION);\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs);\n}\nList waitFutureExprList = new ArrayList<>();\nSTNode waitField = parseWaitFutureExpr();\nwaitFutureExprList.add(waitField);\nnextToken = peek();\nSTNode waitFutureExprEnd;\nwhile (!isEndOfWaitFutureExprList(nextToken.kind)) {\nwaitFutureExprEnd = parseWaitFutureExprEnd(nextToken.kind, 1);\nif (waitFutureExprEnd == null) {\nbreak;\n}\nwaitFutureExprList.add(waitFutureExprEnd);\nwaitField = parseWaitFutureExpr();\nwaitFutureExprList.add(waitField);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0));\n}\nprivate boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseWaitFutureExpr() {\nSTNode waitFutureExpr = parseActionOrExpression();\nif (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) {\nwaitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr,\nDiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR);\n} else if (isAction(waitFutureExpr)) {\nwaitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr,\nDiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR);\n}\nreturn waitFutureExpr;\n}\nprivate STNode parseWaitFutureExprEnd(int nextTokenIndex) {\nreturn parseWaitFutureExprEnd(peek().kind, 1);\n}\nprivate STNode parseWaitFutureExprEnd(SyntaxKind nextTokenKind, int nextTokenIndex) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\nreturn parsePipeToken();\ndefault:\nif (isEndOfWaitFutureExprList(nextTokenKind) ||\n!isValidExpressionStart(nextTokenKind, nextTokenIndex)) {\nreturn null;\n}\nSolution solution = recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END, nextTokenIndex);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseWaitFutureExprEnd(solution.tokenKind, 0);\n}\n}\n/**\n* Parse multiple wait action.\n*

\n* multiple-wait-action := wait { wait-field (, wait-field)* }\n*\n* @param waitKeyword Wait keyword\n* @return Multiple wait action node\n*/\nprivate STNode parseMultiWaitAction(STNode waitKeyword) {\nstartContext(ParserRuleContext.MULTI_WAIT_FIELDS);\nSTNode openBrace = parseOpenBrace();\nSTNode waitFields = parseWaitFields();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nopenBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace,\nDiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION);\nSTNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace);\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode);\n}\nprivate STNode parseWaitFields() {\nList waitFields = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfWaitFields(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode waitField = parseWaitField();\nwaitFields.add(waitField);\nnextToken = peek();\nSTNode waitFieldEnd;\nwhile (!isEndOfWaitFields(nextToken.kind)) {\nwaitFieldEnd = parseWaitFieldEnd(nextToken.kind);\nif (waitFieldEnd == null) {\nbreak;\n}\nwaitFields.add(waitFieldEnd);\nwaitField = parseWaitField();\nwaitFields.add(waitField);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(waitFields);\n}\nprivate boolean isEndOfWaitFields(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseWaitFieldEnd() {\nreturn parseWaitFieldEnd(peek().kind);\n}\nprivate STNode parseWaitFieldEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.WAIT_FIELD_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseWaitFieldEnd(solution.tokenKind);\n}\n}\nprivate STNode parseWaitField() {\nreturn parseWaitField(peek().kind);\n}\n/**\n* Parse wait field.\n*

\n* wait-field := variable-name | field-name : wait-future-expr\n*\n* @param nextTokenKind Kind of the next token\n* @return Receiver field node\n*/\nprivate STNode parseWaitField(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME);\nidentifier = STNodeFactory.createSimpleNameReferenceNode(identifier);\nreturn createQualifiedWaitField(identifier);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.WAIT_FIELD_NAME);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseWaitField(solution.tokenKind);\n}\n}\nprivate STNode createQualifiedWaitField(STNode identifier) {\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn identifier;\n}\nSTNode colon = parseColon();\nSTNode waitFutureExpr = parseWaitFutureExpr();\nreturn STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr);\n}\n/**\n* Parse annot access expression.\n*

\n* \n* annot-access-expr := expression .@ annot-tag-reference\n*
\n* annot-tag-reference := qualified-identifier | identifier\n*
\n*\n* @param lhsExpr Preceding expression of the annot access access\n* @return Parsed node\n*/\nprivate STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode annotAccessToken = parseAnnotChainingToken();\nSTNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr);\nreturn STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference);\n}\n/**\n* Parse annot-chaining-token.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotChainingToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse field access identifier.\n*

\n* field-access-identifier := qualified-identifier | identifier\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) {\nreturn parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr);\n}\n/**\n* Parse query action.\n*

\n* query-action := query-pipeline do-clause\n*
\n* do-clause := do block-stmt\n*
\n*\n* @param queryPipeline Query pipeline\n* @param selectClause Select clause if any This is only for validation.\n* @return Query action node\n*/\nprivate STNode parseQueryAction(STNode queryPipeline, STNode selectClause, boolean isRhsExpr) {\nif (selectClause != null) {\nqueryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause,\nDiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION);\n}\nstartContext(ParserRuleContext.DO_CLAUSE);\nSTNode doKeyword = parseDoKeyword();\nSTNode blockStmt = parseBlockNode();\nendContext();\nSTNode limitClause = parseLimitClause(isRhsExpr);\nreturn STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt, limitClause);\n}\n/**\n* Parse 'do' keyword.\n*\n* @return do keyword node\n*/\nprivate STNode parseDoKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DO_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.DO_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse optional field access or xml optional attribute access expression.\n*

\n* \n* optional-field-access-expr := expression ?. field-name\n*
\n* xml-optional-attribute-access-expr := expression ?. xml-attribute-name\n*
\n* xml-attribute-name := xml-qualified-name | qualified-identifier | identifier\n*
\n* xml-qualified-name := xml-namespace-prefix : identifier\n*
\n* xml-namespace-prefix := identifier\n*
\n*\n* @param lhsExpr Preceding expression of the optional access\n* @return Parsed node\n*/\nprivate STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode optionalFieldAccessToken = parseOptionalChainingToken();\nSTNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr);\nreturn STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName);\n}\n/**\n* Parse optional chaining token.\n*\n* @return parsed node\n*/\nprivate STNode parseOptionalChainingToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse conditional expression.\n*

\n* conditional-expr := expression ? expression : expression\n*\n* @param lhsExpr Preceding expression of the question mark\n* @return Parsed node\n*/\nprivate STNode parseConditionalExpression(STNode lhsExpr) {\nstartContext(ParserRuleContext.CONDITIONAL_EXPRESSION);\nSTNode questionMark = parseQuestionMark();\nSTNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true);\nSTNode nextToken = peek();\nSTNode endExpr;\nSTNode colon;\nif (nextToken.kind != SyntaxKind.COLON_TOKEN && middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) middleExpr;\nmiddleExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.modulePrefix);\ncolon = qualifiedNameRef.colon;\nendContext();\nendExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);\n} else {\ncolon = parseColon();\nendContext();\nendExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false);\n}\nreturn STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr);\n}\n/**\n* Parse enum declaration.\n*

\n* module-enum-decl :=\n* metadata\n* [public] enum identifier { enum-member (, enum-member)* }\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @param metadata\n* @param qualifier\n*\n* @return Parsed enum node.\n*/\nprivate STNode parseEnumDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.MODULE_ENUM_DECLARATION);\nSTNode enumKeywordToken = parseEnumKeyword();\nSTNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME);\nSTNode openBraceToken = parseOpenBrace();\nSTNode enumMemberList = parseEnumMemberList();\nSTNode closeBraceToken = parseCloseBrace();\nendContext();\nopenBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken,\nDiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER);\nreturn STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier,\nopenBraceToken, enumMemberList, closeBraceToken);\n}\n/**\n* Parse 'enum' keyword.\n*\n* @return enum keyword node\n*/\nprivate STNode parseEnumKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ENUM_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ENUM_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse enum member list.\n*

\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @return enum member list node.\n*/\nprivate STNode parseEnumMemberList() {\nstartContext(ParserRuleContext.ENUM_MEMBER_LIST);\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList enumMemberList = new ArrayList<>();\nSTNode enumMember = parseEnumMember();\nnextToken = peek();\nSTNode enumMemberRhs;\nwhile (nextToken.kind != SyntaxKind.CLOSE_BRACE_TOKEN) {\nenumMemberRhs = parseEnumMemberEnd(nextToken.kind);\nif (enumMemberRhs == null) {\nbreak;\n}\nenumMemberList.add(enumMember);\nenumMemberList.add(enumMemberRhs);\nenumMember = parseEnumMember();\nnextToken = peek();\n}\nenumMemberList.add(enumMember);\nendContext();\nreturn STNodeFactory.createNodeList(enumMemberList);\n}\n/**\n* Parse enum member.\n*

\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @return Parsed enum member node.\n*/\nprivate STNode parseEnumMember() {\nSTToken nextToken = peek();\nSTNode metadata;\nswitch (nextToken.kind) {\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData(nextToken.kind);\nbreak;\ndefault:\nmetadata = STNodeFactory.createEmptyNode();\n}\nSTNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME);\nreturn parseEnumMemberRhs(metadata, identifierNode);\n}\nprivate STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) {\nreturn parseEnumMemberRhs(peek().kind, metadata, identifierNode);\n}\nprivate STNode parseEnumMemberRhs(SyntaxKind nextToken, STNode metadata, STNode identifierNode) {\nSTNode equalToken, constExprNode;\nswitch (nextToken) {\ncase EQUAL_TOKEN:\nequalToken = parseAssignOp();\nconstExprNode = parseExpression();\nbreak;\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nequalToken = STNodeFactory.createEmptyNode();\nconstExprNode = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS, metadata, identifierNode);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseEnumMemberRhs(solution.tokenKind, metadata, identifierNode);\n}\nreturn STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode);\n}\nprivate STNode parseEnumMemberEnd() {\nreturn parseEnumMemberEnd(peek().kind);\n}\nprivate STNode parseEnumMemberEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ENUM_MEMBER_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseEnumMemberEnd(solution.tokenKind);\n}\n}\n/**\n* Parse transaction statement.\n*

\n* transaction-stmt := \"transaction\" block-stmt ;\n*\n* @return Transaction statement node\n*/\nprivate STNode parseTransactionStatement() {\nstartContext(ParserRuleContext.TRANSACTION_STMT);\nSTNode transactionKeyword = parseTransactionKeyword();\nSTNode blockStmt = parseBlockNode();\nendContext();\nreturn STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt);\n}\n/**\n* Parse transaction keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseTransactionKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRANSACTION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TRANSACTION_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse commit action.\n*

\n* commit-action := \"commit\"\n*\n* @return Commit action node\n*/\nprivate STNode parseCommitAction() {\nSTNode commitKeyword = parseCommitKeyword();\nreturn STNodeFactory.createCommitActionNode(commitKeyword);\n}\n/**\n* Parse commit keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseCommitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COMMIT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.COMMIT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse retry statement.\n*

\n* \n* retry-stmt := \"retry\" retry-spec block-stmt\n*
\n* retry-spec := [type-parameter] [ \"(\" arg-list \")\" ]\n*
\n*\n* @return Retry statement node\n*/\nprivate STNode parseRetryStatement() {\nstartContext(ParserRuleContext.RETRY_STMT);\nSTNode retryKeyword = parseRetryKeyword();\nSTNode retryStmt = parseRetryKeywordRhs(retryKeyword);\nendContext();\nreturn retryStmt;\n}\nprivate STNode parseRetryKeywordRhs(STNode retryKeyword) {\nreturn parseRetryKeywordRhs(peek().kind, retryKeyword);\n}\nprivate STNode parseRetryKeywordRhs(SyntaxKind nextTokenKind, STNode retryKeyword) {\nswitch (nextTokenKind) {\ncase LT_TOKEN:\nSTNode typeParam = parseTypeParameter();\nreturn parseRetryTypeParamRhs(retryKeyword, typeParam);\ncase OPEN_PAREN_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase TRANSACTION_KEYWORD:\ntypeParam = STNodeFactory.createEmptyNode();\nreturn parseRetryTypeParamRhs(nextTokenKind, retryKeyword, typeParam);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS, retryKeyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseRetryKeywordRhs(solution.tokenKind, retryKeyword);\n}\n}\nprivate STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) {\nreturn parseRetryTypeParamRhs(peek().kind, retryKeyword, typeParam);\n}\nprivate STNode parseRetryTypeParamRhs(SyntaxKind nextTokenKind, STNode retryKeyword, STNode typeParam) {\nSTNode args;\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\nargs = parseParenthesizedArgList();\nbreak;\ncase OPEN_BRACE_TOKEN:\ncase TRANSACTION_KEYWORD:\nargs = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS, retryKeyword, typeParam);\nreturn parseRetryTypeParamRhs(solution.tokenKind, retryKeyword, typeParam);\n}\nSTNode blockStmt = parseRetryBody();\nreturn STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt);\n}\nprivate STNode parseRetryBody() {\nreturn parseRetryBody(peek().kind);\n}\nprivate STNode parseRetryBody(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACE_TOKEN:\nreturn parseBlockNode();\ncase TRANSACTION_KEYWORD:\nreturn parseTransactionStatement();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RETRY_BODY);\nreturn parseRetryBody(solution.tokenKind);\n}\n}\n/**\n* Parse retry keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseRetryKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RETRY_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RETRY_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse transaction statement.\n*

\n* rollback-stmt := \"rollback\" [expression] \";\"\n*\n* @return Rollback statement node\n*/\nprivate STNode parseRollbackStatement() {\nstartContext(ParserRuleContext.ROLLBACK_STMT);\nSTNode rollbackKeyword = parseRollbackKeyword();\nSTNode expression;\nif (peek().kind == SyntaxKind.SEMICOLON_TOKEN) {\nexpression = STNodeFactory.createEmptyNode();\n} else {\nexpression = parseExpression();\n}\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon);\n}\n/**\n* Parse rollback keyword.\n*\n* @return Rollback keyword node\n*/\nprivate STNode parseRollbackKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ROLLBACK_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ROLLBACK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse transactional expression.\n*

\n* transactional-expr := \"transactional\"\n*\n* @return Transactional expression node\n*/\nprivate STNode parseTransactionalExpression() {\nSTNode transactionalKeyword = parseTransactionalKeyword();\nreturn STNodeFactory.createTransactionalExpressionNode(transactionalKeyword);\n}\n/**\n* Parse transactional keyword.\n*\n* @return Transactional keyword node\n*/\nprivate STNode parseTransactionalKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ROLLBACK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse service-constructor-expr.\n*

\n* \n* service-constructor-expr := [annots] service service-body-block\n*
\n* service-body-block := { service-method-defn* }\n*
\n* service-method-defn := metadata [resource] function identifier function-signature method-defn-body\n*
\n*\n* @param annots Annotations\n* @return Service constructor expression node\n*/\nprivate STNode parseServiceConstructorExpression(STNode annots) {\nstartContext(ParserRuleContext.SERVICE_CONSTRUCTOR_EXPRESSION);\nSTNode serviceKeyword = parseServiceKeyword();\nSTNode serviceBody = parseServiceBody();\nendContext();\nreturn STNodeFactory.createServiceConstructorExpressionNode(annots, serviceKeyword, serviceBody);\n}\n/**\n* Parse base16 literal.\n*

\n* \n* byte-array-literal := Base16Literal | Base64Literal\n*
\n* Base16Literal := base16 WS ` HexGroup* WS `\n*
\n* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `\n*
\n*\n* @param kind byte array literal kind\n* @return parsed node\n*/\nprivate STNode parseByteArrayLiteral(SyntaxKind kind) {\nSTNode type;\nif (kind == SyntaxKind.BASE16_KEYWORD) {\ntype = parseBase16Keyword();\n} else {\ntype = parseBase64Keyword();\n}\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseByteArrayContent(kind);\nreturn parseByteArrayLiteral(kind, type, startingBackTick, content);\n}\n/**\n* Parse byte array literal.\n*\n* @param baseKind indicates the SyntaxKind base16 or base64\n* @param typeKeyword keyword token, possible values are `base16` and `base64`\n* @param startingBackTick starting backtick token\n* @param byteArrayContent byte array literal content to be validated\n* @return parsed byte array literal node\n*/\nprivate STNode parseByteArrayLiteral(SyntaxKind baseKind, STNode typeKeyword, STNode startingBackTick,\nSTNode byteArrayContent) {\nSTNode content = STNodeFactory.createEmptyNode();\nSTNode newStartingBackTick = startingBackTick;\nSTNodeList items = (STNodeList) byteArrayContent;\nif (items.size() == 1) {\nSTNode item = items.get(0);\nif (baseKind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else if (baseKind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else if (item.kind != SyntaxKind.TEMPLATE_STRING) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else {\ncontent = item;\n}\n} else if (items.size() > 1) {\nSTNode clonedStartingBackTick = startingBackTick;\nfor (int index = 0; index < items.size(); index++) {\nSTNode item = items.get(index);\nclonedStartingBackTick =\nSyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item);\n}\nnewStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick,\nDiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);\n}\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick);\n}\n/**\n* Parse base16 keyword.\n*\n* @return base16 keyword node\n*/\nprivate STNode parseBase16Keyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BASE16_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BASE16_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse base64 keyword.\n*\n* @return base64 keyword node\n*/\nprivate STNode parseBase64Keyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BASE64_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BASE64_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Validate and parse byte array literal content.\n* An error is reported, if the content is invalid.\n*\n* @param kind byte array literal kind\n* @return parsed node\n*/\nprivate STNode parseByteArrayContent(SyntaxKind kind) {\nSTToken nextToken = peek();\nList items = new ArrayList<>();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode content = parseTemplateItem();\nitems.add(content);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(items);\n}\n/**\n* Validate base16 literal content.\n*

\n* \n* Base16Literal := base16 WS ` HexGroup* WS `\n*
\n* HexGroup := WS HexDigit WS HexDigit\n*
\n* WS := WhiteSpaceChar*\n*
\n* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20\n*
\n*\n* @param content the string surrounded by the backticks\n* @return true, if the string content is valid. false otherwise.\n*/\nstatic boolean isValidBase16LiteralContent(String content) {\nchar[] charArray = content.toCharArray();\nint hexDigitCount = 0;\nfor (char c : charArray) {\nswitch (c) {\ncase LexerTerminals.TAB:\ncase LexerTerminals.NEWLINE:\ncase LexerTerminals.CARRIAGE_RETURN:\ncase LexerTerminals.SPACE:\nbreak;\ndefault:\nif (isHexDigit(c)) {\nhexDigitCount++;\n} else {\nreturn false;\n}\nbreak;\n}\n}\nreturn hexDigitCount % 2 == 0;\n}\n/**\n* Validate base64 literal content.\n*

\n* \n* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `\n*
\n* Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char\n*
\n* PaddedBase64Group :=\n* WS Base64Char WS Base64Char WS Base64Char WS PaddingChar\n* | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar\n*
\n* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /\n*
\n* PaddingChar := =\n*
\n* WS := WhiteSpaceChar*\n*
\n* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20\n*
\n*\n* @param content the string surrounded by the backticks\n* @return true, if the string content is valid. false otherwise.\n*/\nstatic boolean isValidBase64LiteralContent(String content) {\nchar[] charArray = content.toCharArray();\nint base64CharCount = 0;\nint paddingCharCount = 0;\nfor (char c : charArray) {\nswitch (c) {\ncase LexerTerminals.TAB:\ncase LexerTerminals.NEWLINE:\ncase LexerTerminals.CARRIAGE_RETURN:\ncase LexerTerminals.SPACE:\nbreak;\ncase LexerTerminals.EQUAL:\npaddingCharCount++;\nbreak;\ndefault:\nif (isBase64Char(c)) {\nif (paddingCharCount == 0) {\nbase64CharCount++;\n} else {\nreturn false;\n}\n} else {\nreturn false;\n}\nbreak;\n}\n}\nif (paddingCharCount > 2) {\nreturn false;\n} else if (paddingCharCount == 0) {\nreturn base64CharCount % 4 == 0;\n} else {\nreturn base64CharCount % 4 == 4 - paddingCharCount;\n}\n}\n/**\n*

\n* Check whether a given char is a base64 char.\n*

\n* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /\n*\n* @param c character to check\n* @return true, if the character represents a base64 char. false otherwise.\n*/\nstatic boolean isBase64Char(int c) {\nif ('a' <= c && c <= 'z') {\nreturn true;\n}\nif ('A' <= c && c <= 'Z') {\nreturn true;\n}\nif (c == '+' || c == '/') {\nreturn true;\n}\nreturn isDigit(c);\n}\nstatic boolean isHexDigit(int c) {\nif ('a' <= c && c <= 'f') {\nreturn true;\n}\nif ('A' <= c && c <= 'F') {\nreturn true;\n}\nreturn isDigit(c);\n}\nstatic boolean isDigit(int c) {\nreturn ('0' <= c && c <= '9');\n}\n/**\n* Parse xml filter expression.\n*

\n* xml-filter-expr := expression .< xml-name-pattern >\n*\n* @param lhsExpr Preceding expression of .< token\n* @return Parsed node\n*/\nprivate STNode parseXMLFilterExpression(STNode lhsExpr) {\nSTNode xmlNamePatternChain = parseXMLFilterExpressionRhs();\nreturn STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain);\n}\n/**\n* Parse xml filter expression rhs.\n*

\n* filer-expression-rhs := .< xml-name-pattern >\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLFilterExpressionRhs() {\nSTNode dotLTToken = parseDotLTToken();\nreturn parseXMLNamePatternChain(dotLTToken);\n}\n/**\n* Parse xml name pattern chain.\n*

\n* \n* xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step\n*
\n* filer-expression-rhs := .< xml-name-pattern >\n*
\n* xml-element-children-step := /< xml-name-pattern >\n*
\n* xml-element-descendants-step := /**\\/\n*
\n*\n* @param startToken Preceding token of xml name pattern\n* @return Parsed node\n*/\nprivate STNode parseXMLNamePatternChain(STNode startToken) {\nstartContext(ParserRuleContext.XML_NAME_PATTERN);\nSTNode xmlNamePattern = parseXMLNamePattern();\nSTNode gtToken = parseGTToken();\nendContext();\nstartToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken,\nDiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN);\nreturn STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken);\n}\n/**\n* Parse .< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseDotLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.DOT_LT_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse xml name pattern.\n*

\n* xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLNamePattern() {\nList xmlAtomicNamePatternList = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfXMLNamePattern(nextToken.kind)) {\nreturn STNodeFactory.createNodeList(xmlAtomicNamePatternList);\n}\nSTNode xmlAtomicNamePattern = parseXMLAtomicNamePattern();\nxmlAtomicNamePatternList.add(xmlAtomicNamePattern);\nSTNode separator;\nwhile (!isEndOfXMLNamePattern(peek().kind)) {\nseparator = parseXMLNamePatternSeparator();\nif (separator == null) {\nbreak;\n}\nxmlAtomicNamePatternList.add(separator);\nxmlAtomicNamePattern = parseXMLAtomicNamePattern();\nxmlAtomicNamePatternList.add(xmlAtomicNamePattern);\n}\nreturn STNodeFactory.createNodeList(xmlAtomicNamePatternList);\n}\nprivate boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase GT_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ncase IDENTIFIER_TOKEN:\ncase ASTERISK_TOKEN:\ncase COLON_TOKEN:\ndefault:\nreturn false;\n}\n}\nprivate STNode parseXMLNamePatternSeparator() {\nSTToken token = peek();\nswitch (token.kind) {\ncase PIPE_TOKEN:\nreturn consume();\ncase GT_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution sol = recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS);\nif (sol.tokenKind == SyntaxKind.GT_TOKEN) {\nreturn null;\n}\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse xml atomic name pattern.\n*

\n* \n* xml-atomic-name-pattern :=\n* *\n* | identifier\n* | xml-namespace-prefix : identifier\n* | xml-namespace-prefix : *\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseXMLAtomicNamePattern() {\nstartContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN);\nSTNode atomicNamePattern = parseXMLAtomicNamePatternBody();\nendContext();\nreturn atomicNamePattern;\n}\nprivate STNode parseXMLAtomicNamePatternBody() {\nSTToken token = peek();\nSTNode identifier;\nswitch (token.kind) {\ncase ASTERISK_TOKEN:\nreturn consume();\ncase IDENTIFIER_TOKEN:\nidentifier = consume();\nbreak;\ndefault:\nSolution sol = recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nif (sol.recoveredNode.kind == SyntaxKind.ASTERISK_TOKEN) {\nreturn sol.recoveredNode;\n}\nidentifier = sol.recoveredNode;\nbreak;\n}\nreturn parseXMLAtomicNameIdentifier(identifier);\n}\nprivate STNode parseXMLAtomicNameIdentifier(STNode identifier) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COLON_TOKEN) {\nSTNode colon = consume();\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {\nSTToken endToken = consume();\nreturn STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken);\n}\n}\nreturn STNodeFactory.createSimpleNameReferenceNode(identifier);\n}\n/**\n* Parse xml step expression.\n*

\n* xml-step-expr := expression xml-step-start\n*\n* @param lhsExpr Preceding expression of /*, /<, or /**\\/< token\n* @return Parsed node\n*/\nprivate STNode parseXMLStepExpression(STNode lhsExpr) {\nSTNode xmlStepStart = parseXMLStepStart();\nreturn STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart);\n}\n/**\n* Parse xml filter expression rhs.\n*

\n* \n* xml-step-start :=\n* xml-all-children-step\n* | xml-element-children-step\n* | xml-element-descendants-step\n*
\n* xml-all-children-step := /*\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLStepStart() {\nSTToken token = peek();\nSTNode startToken;\nswitch (token.kind) {\ncase SLASH_ASTERISK_TOKEN:\nreturn consume();\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\nstartToken = parseDoubleSlashDoubleAsteriskLTToken();\nbreak;\ncase SLASH_LT_TOKEN:\ndefault:\nstartToken = parseSlashLTToken();\nbreak;\n}\nreturn parseXMLNamePatternChain(startToken);\n}\n/**\n* Parse /< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseSlashLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse /< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseDoubleSlashDoubleAsteriskLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse match statement.\n*

\n* match-stmt := match action-or-expr { match-clause+ }\n*\n* @return Match statement\n*/\nprivate STNode parseMatchStatement() {\nstartContext(ParserRuleContext.MATCH_STMT);\nSTNode matchKeyword = parseMatchKeyword();\nSTNode actionOrExpr = parseActionOrExpression();\nstartContext(ParserRuleContext.MATCH_BODY);\nSTNode openBrace = parseOpenBrace();\nSTNode matchClauses = parseMatchClauses();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nendContext();\nreturn STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace);\n}\n/**\n* Parse match keyword.\n*\n* @return Match keyword node\n*/\nprivate STNode parseMatchKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.MATCH_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.MATCH_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse match clauses list.\n*\n* @return Match clauses list\n*/\nprivate STNode parseMatchClauses() {\nList matchClauses = new ArrayList<>();\nwhile (!isEndOfMatchClauses(peek().kind)) {\nSTNode clause = parseMatchClause();\nmatchClauses.add(clause);\n}\nreturn STNodeFactory.createNodeList(matchClauses);\n}\nprivate boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse a single match match clause.\n*

\n* \n* match-clause := match-pattern-list [match-guard] => block-stmt\n*
\n* match-guard := if expression\n*
\n*\n* @return A match clause\n*/\nprivate STNode parseMatchClause() {\nSTNode matchPatterns = parseMatchPatternList();\nSTNode matchGuard = parseMatchGuard();\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode blockStmt = parseBlockNode();\nreturn STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt);\n}\n/**\n* Parse match guard.\n*

\n* match-guard := if expression\n*\n* @return Match guard\n*/\nprivate STNode parseMatchGuard() {\nSTToken nextToken = peek();\nreturn parseMatchGuard(nextToken.kind);\n}\nprivate STNode parseMatchGuard(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IF_KEYWORD:\nSTNode ifKeyword = parseIfKeyword();\nSTNode expr = parseExpression(peek().kind, DEFAULT_OP_PRECEDENCE, true, false, true, false);\nreturn STNodeFactory.createMatchGuardNode(ifKeyword, expr);\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.OPTIONAL_MATCH_GUARD);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMatchGuard(solution.tokenKind);\n}\n}\n/**\n* Parse match patterns list.\n*

\n* match-pattern-list := match-pattern (| match-pattern)*\n*\n* @return Match patterns list\n*/\nprivate STNode parseMatchPatternList() {\nstartContext(ParserRuleContext.MATCH_PATTERN);\nList matchClauses = new ArrayList<>();\nwhile (!isEndOfMatchPattern(peek().kind)) {\nSTNode clause = parseMatchPattern();\nif (clause == null) {\nbreak;\n}\nmatchClauses.add(clause);\nSTNode seperator = parseMatchPatternEnd();\nif (seperator == null) {\nbreak;\n}\nmatchClauses.add(seperator);\n}\nendContext();\nreturn STNodeFactory.createNodeList(matchClauses);\n}\nprivate boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\ncase IF_KEYWORD:\ncase RIGHT_ARROW_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse match pattern.\n*

\n* \n* match-pattern := var binding-pattern\n* | wildcard-match-pattern\n* | const-pattern\n* | list-match-pattern\n* | mapping-match-pattern\n* | functional-match-pattern\n* \n*\n* @return Match pattern\n*/\nprivate STNode parseMatchPattern() {\nSTToken nextToken = peek();\nreturn parseMatchPattern(nextToken.kind);\n}\nprivate STNode parseMatchPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase STRING_LITERAL:\nreturn parseSimpleConstExpr();\ncase IDENTIFIER_TOKEN:\nSTNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);\nreturn parseFunctionalMatchPatternOrConsPattern(typeRefOrConstExpr);\ncase VAR_KEYWORD:\nreturn parseVarTypedBindingPattern();\ncase OPEN_BRACKET_TOKEN:\nreturn parseListMatchPattern();\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingMatchPattern();\ncase ERROR_KEYWORD:\nreturn parseFunctionalMatchPattern(consume());\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MATCH_PATTERN_START);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMatchPattern(solution.tokenKind);\n}\n}\nprivate STNode parseMatchPatternEnd() {\nSTToken nextToken = peek();\nreturn parseMatchPatternEnd(nextToken.kind);\n}\nprivate STNode parseMatchPatternEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\nreturn parsePipeToken();\ncase IF_KEYWORD:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MATCH_PATTERN_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMatchPatternEnd(solution.tokenKind);\n}\n}\n/**\n* Parse var typed binding pattern.\n*

\n* var binding-pattern\n*

\n*\n* @return Parsed typed binding pattern node\n*/\nprivate STNode parseVarTypedBindingPattern() {\nSTNode varKeyword = parseVarKeyword();\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createTypedBindingPatternNode(varKeyword, bindingPattern);\n}\n/**\n* Parse var keyword.\n*\n* @return Var keyword node\n*/\nprivate STNode parseVarKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.VAR_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.VAR_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse list match pattern.\n*

\n* \n* list-match-pattern := [ list-member-match-patterns ]\n* list-member-match-patterns :=\n* match-pattern (, match-pattern)* [, rest-match-pattern]\n* | [ rest-match-pattern ]\n* \n*

\n*\n* @return Parsed list match pattern node\n*/\nprivate STNode parseListMatchPattern() {\nstartContext(ParserRuleContext.LIST_MATCH_PATTERN);\nSTNode openBracketToken = parseOpenBracket();\nList matchPatternList = new ArrayList<>();\nSTNode restMatchPattern = null;\nwhile (!isEndOfListMatchPattern()) {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {\nrestMatchPattern = parseRestMatchPattern();\nbreak;\n}\nSTNode matchPatternListMember = parseMatchPattern();\nmatchPatternList.add(matchPatternListMember);\nSTNode matchPatternMemberRhs = parseListMatchPatternMemberRhs();\nif (matchPatternMemberRhs != null) {\nmatchPatternList.add(matchPatternMemberRhs);\n} else {\nbreak;\n}\n}\nif (restMatchPattern == null) {\nrestMatchPattern = STNodeFactory.createEmptyNode();\n}\nSTNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList);\nSTNode closeBracketToken = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, restMatchPattern,\ncloseBracketToken);\n}\npublic boolean isEndOfListMatchPattern() {\nswitch (peek().kind) {\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse rest match pattern.\n*

\n* \n* rest-match-pattern := ... var variable-name\n* \n*

\n*\n* @return Parsed rest match pattern node\n*/\nprivate STNode parseRestMatchPattern() {\nstartContext(ParserRuleContext.REST_MATCH_PATTERN);\nSTNode ellipsisToken = parseEllipsis();\nSTNode varKeywordToken = parseVarKeyword();\nSTNode variableName = parseVariableName();\nendContext();\nSTSimpleNameReferenceNode simpleNameReferenceNode =\n(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName);\nreturn STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode);\n}\nprivate STNode parseListMatchPatternMemberRhs() {\nreturn parseListMatchPatternMemberRhs(peek().kind);\n}\nprivate STNode parseListMatchPatternMemberRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListMatchPatternMemberRhs(solution.tokenKind);\n}\n}\n/**\n* Parse mapping match pattern.\n*

\n* mapping-match-pattern := { field-match-patterns }\n*
\n* field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern]\n* | [ rest-match-pattern ]\n*
\n* field-match-pattern := field-name : match-pattern\n*
\n* rest-match-pattern := ... var variable-name\n*

\n*\n* @return Parsed Node.\n*/\nprivate STNode parseMappingMatchPattern() {\nstartContext(ParserRuleContext.MAPPING_MATCH_PATTERN);\nSTNode openBraceToken = parseOpenBrace();\nList fieldMatchPatternList = new ArrayList<>();\nSTNode restMatchPattern = null;\nboolean isEndOfFields = false;\nwhile (!isEndOfMappingMatchPattern()) {\nSTNode fieldMatchPatternMember = parseFieldMatchPatternMember();\nif (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {\nrestMatchPattern = fieldMatchPatternMember;\nisEndOfFields = true;\nbreak;\n}\nfieldMatchPatternList.add(fieldMatchPatternMember);\nSTNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();\nif (fieldMatchPatternRhs != null) {\nfieldMatchPatternList.add(fieldMatchPatternRhs);\n} else {\nbreak;\n}\n}\nSTNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();\nwhile (isEndOfFields && fieldMatchPatternRhs != null) {\nSTNode invalidField = parseFieldMatchPatternMember();\nrestMatchPattern =\nSyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, fieldMatchPatternRhs);\nrestMatchPattern = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, invalidField);\nrestMatchPattern = SyntaxErrors.addDiagnostic(restMatchPattern,\nDiagnosticErrorCode.ERROR_MORE_FIELD_MATCH_PATTERNS_AFTER_REST_FIELD);\nfieldMatchPatternRhs = parseFieldMatchPatternRhs();\n}\nif (restMatchPattern == null) {\nrestMatchPattern = STNodeFactory.createEmptyNode();\n}\nSTNode fieldMatchPatterns = STNodeFactory.createNodeList(fieldMatchPatternList);\nSTNode closeBraceToken = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, restMatchPattern,\ncloseBraceToken);\n}\nprivate STNode parseFieldMatchPatternMember() {\nreturn parseFieldMatchPatternMember(peek().kind);\n}\nprivate STNode parseFieldMatchPatternMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseFieldMatchPattern();\ncase ELLIPSIS_TOKEN:\nreturn parseRestMatchPattern();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldMatchPatternMember(solution.tokenKind);\n}\n}\n/**\n* Parse filed match pattern.\n*

\n* field-match-pattern := field-name : match-pattern\n*

\n*\n* @return Parsed field match pattern node\n*/\npublic STNode parseFieldMatchPattern() {\nSTNode fieldNameNode = parseVariableName();\nSTNode colonToken = parseColon();\nSTNode matchPattern = parseMatchPattern();\nreturn STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern);\n}\npublic boolean isEndOfMappingMatchPattern() {\nswitch (peek().kind) {\ncase CLOSE_BRACE_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseFieldMatchPatternRhs() {\nreturn parseFieldMatchPatternRhs(peek().kind);\n}\nprivate STNode parseFieldMatchPatternRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldMatchPatternRhs(solution.tokenKind);\n}\n}\nprivate STNode parseFunctionalMatchPatternOrConsPattern(STNode typeRefOrConstExpr) {\nreturn parseFunctionalMatchPatternOrConsPattern(peek().kind, typeRefOrConstExpr);\n}\nprivate STNode parseFunctionalMatchPatternOrConsPattern(SyntaxKind nextToken, STNode typeRefOrConstExpr) {\nswitch (nextToken) {\ncase OPEN_PAREN_TOKEN:\nreturn parseFunctionalMatchPattern(typeRefOrConstExpr);\ndefault:\nif (isMatchPatternEnd(peek().kind)) {\nreturn typeRefOrConstExpr;\n}\nSolution solution = recover(peek(), ParserRuleContext.FUNC_MATCH_PATTERN_OR_CONST_PATTERN,\ntypeRefOrConstExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFunctionalMatchPatternOrConsPattern(solution.tokenKind, typeRefOrConstExpr);\n}\n}\nprivate boolean isMatchPatternEnd(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase RIGHT_DOUBLE_ARROW_TOKEN:\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase PIPE_TOKEN:\ncase IF_KEYWORD:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse functional match pattern.\n*

\n* functional-match-pattern := functionally-constructible-type-reference ( arg-list-match-pattern )\n*
\n* functionally-constructible-type-reference := error | type-reference\n*
\n* type-reference := identifier | qualified-identifier\n*
\n* arg-list-match-pattern := positional-arg-match-patterns [, other-arg-match-patterns]\n* | other-arg-match-patterns\n*

\n*\n* @return Parsed functional match pattern node.\n*/\nprivate STNode parseFunctionalMatchPattern(STNode typeRef) {\nstartContext(ParserRuleContext.FUNCTIONAL_MATCH_PATTERN);\nSTNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode argListMatchPatternNode = parseArgListMatchPatterns();\nSTNode closeParenthesisToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createFunctionalMatchPatternNode(typeRef, openParenthesisToken, argListMatchPatternNode,\ncloseParenthesisToken);\n}\nprivate STNode parseArgListMatchPatterns() {\nList argListMatchPatterns = new ArrayList<>();\nSyntaxKind lastValidArgKind = SyntaxKind.IDENTIFIER_TOKEN;\nwhile (!isEndOfFunctionalMatchPattern()) {\nSTNode currentArg = parseArgMatchPattern();\nDiagnosticErrorCode errorCode = validateArgMatchPatternOrder(lastValidArgKind, currentArg.kind);\nif (errorCode == null) {\nargListMatchPatterns.add(currentArg);\nlastValidArgKind = currentArg.kind;\n} else {\nupdateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode);\n}\nSTNode argRhs = parseArgMatchPatternRhs();\nif (argRhs == null) {\nbreak;\n}\nif (errorCode == null) {\nargListMatchPatterns.add(argRhs);\n} else {\nupdateLastNodeInListWithInvalidNode(argListMatchPatterns, argRhs, null);\n}\n}\nreturn STNodeFactory.createNodeList(argListMatchPatterns);\n}\nprivate boolean isEndOfFunctionalMatchPattern() {\nswitch (peek().kind) {\ncase CLOSE_PAREN_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse arg match patterns.\n* \n* arg-match-pattern := match-pattern | named-arg-match-pattern | rest-match-pattern\n* \n*

\n*\n* @return parsed arg match pattern node.\n*/\nprivate STNode parseArgMatchPattern() {\nreturn parseArgMatchPattern(peek().kind);\n}\nprivate STNode parseArgMatchPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseNamedOrPositionalArgMatchPattern();\ncase ELLIPSIS_TOKEN:\nreturn parseRestMatchPattern();\ncase OPEN_PAREN_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase STRING_LITERAL:\ncase VAR_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseMatchPattern();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_MATCH_PATTERN);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgMatchPattern(solution.tokenKind);\n}\n}\nprivate STNode parseNamedOrPositionalArgMatchPattern() {\nSTNode identifier = parseIdentifier(ParserRuleContext.MATCH_PATTERN_START);\nSTToken secondToken = peek();\nswitch (secondToken.kind) {\ncase EQUAL_TOKEN:\nreturn parseNamedArgMatchPattern(identifier);\ncase OPEN_PAREN_TOKEN:\nreturn parseFunctionalMatchPattern(identifier);\ncase COMMA_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ndefault:\nreturn identifier;\n}\n}\n/**\n* Parses the next named arg match pattern.\n*
\n* named-arg-match-pattern := arg-name = match-pattern\n*

\n*\n* @return arg match pattern list node added the new arg match pattern\n*/\nprivate STNode parseNamedArgMatchPattern(STNode identifier) {\nstartContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN);\nSTNode equalToken = parseAssignOp();\nSTNode matchPattern = parseMatchPattern();\nendContext();\nreturn STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern);\n}\nprivate STNode parseArgMatchPatternRhs() {\nreturn parseArgMatchPatternRhs(peek().kind);\n}\nprivate STNode parseArgMatchPatternRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_MATCH_PATTERN_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgMatchPatternRhs(solution.tokenKind);\n}\n}\nprivate DiagnosticErrorCode validateArgMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {\nDiagnosticErrorCode errorCode = null;\nswitch (prevArgKind) {\ncase NAMED_ARG_MATCH_PATTERN:\nif (currentArgKind != SyntaxKind.NAMED_ARG_MATCH_PATTERN &&\ncurrentArgKind != SyntaxKind.REST_MATCH_PATTERN) {\nerrorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG;\n}\nbreak;\ncase REST_MATCH_PATTERN:\nerrorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG;\nbreak;\ndefault:\nbreak;\n}\nreturn errorCode;\n}\n/**\n* Parse markdown documentation.\n*\n* @return markdown documentation node\n*/\nprivate STNode parseMarkdownDocumentation() {\nList markdownDocLineList = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) {\nSTToken documentationString = consume();\nSTNode markdownDocLines = parseDocumentationString(documentationString);\nmarkdownDocLineList.add(markdownDocLines);\nnextToken = peek();\n}\nSTNode arrangedMarkdownDocLines = rearrangeMarkdownDocumentationLines(markdownDocLineList);\nreturn STNodeFactory.createMarkdownDocumentationNode(arrangedMarkdownDocLines);\n}\n/**\n* Parse documentation string.\n*\n* @return markdown documentation line list node\n*/\nprivate STNode parseDocumentationString(STToken documentationStringToken) {\nList leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae());\nTextDocument textDocument = TextDocuments.from(documentationStringToken.text());\nDocumentationLexer documentationLexer = new DocumentationLexer(textDocument.getCharacterReader(),\nleadingTriviaList);\nAbstractTokenReader tokenReader = new TokenReader(documentationLexer);\nDocumentationParser documentationParser = new DocumentationParser(tokenReader);\nreturn documentationParser.parse();\n}\nprivate List getLeadingTriviaList(STNode leadingMinutiaeNode) {\nList leadingTriviaList = new ArrayList<>();\nint bucketCount = leadingMinutiaeNode.bucketCount();\nfor (int i = 0; i < bucketCount; i++) {\nleadingTriviaList.add(leadingMinutiaeNode.childInBucket(i));\n}\nreturn leadingTriviaList;\n}\nprivate STNode rearrangeMarkdownDocumentationLines(List markdownDocLineList) {\nList arrangedDocLines = new ArrayList<>();\nfor (STNode markdownDocLines : markdownDocLineList) {\nint bucketCount = markdownDocLines.bucketCount();\nfor (int i = 0; i < bucketCount; i++) {\nSTNode markdownDocLine = markdownDocLines.childInBucket(i);\narrangedDocLines.add(markdownDocLine);\n}\n}\nreturn STNodeFactory.createNodeList(arrangedDocLines);\n}\n/**\n* Parse any statement that starts with a token that has ambiguity between being\n* a type-desc or an expression.\n*\n* @param annots Annotations\n* @return Statement node\n*/\nprivate STNode parseStmtStartsWithTypeOrExpr(SyntaxKind nextTokenKind, STNode annots) {\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode typeOrExpr = parseTypedBindingPatternOrExpr(nextTokenKind, true);\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr);\n}\nprivate STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) {\nif (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(annots, finalKeyword, typedBindingPatternOrExpr, false);\n}\nSTNode expr = getExpression(typedBindingPatternOrExpr);\nexpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);\nreturn parseStatementStartWithExprRhs(expr);\n}\nprivate STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternOrExpr(nextToken.kind, allowAssignment);\n}\nprivate STNode parseTypedBindingPatternOrExpr(SyntaxKind nextTokenKind, boolean allowAssignment) {\nSTNode typeOrExpr;\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseTypedBPOrExprStartsWithOpenParenthesis();\ncase FUNCTION_KEYWORD:\nreturn parseAnonFuncExprOrTypedBPWithFuncType();\ncase IDENTIFIER_TOKEN:\ntypeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\nreturn parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\ncase OPEN_BRACKET_TOKEN:\ntypeOrExpr = parseTypedDescOrExprStartsWithOpenBracket();\nreturn parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nSTNode basicLiteral = parseBasicLiteral();\nreturn parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment);\ndefault:\nif (isValidExpressionStart(nextTokenKind, 1)) {\nreturn parseActionOrExpressionInLhs(nextTokenKind, null);\n}\nreturn parseTypedBindingPattern(ParserRuleContext.VAR_DECL_STMT);\n}\n}\n/**\n* Parse the component after the ambiguous starting node. Ambiguous node could be either an expr\n* or a type-desc. The component followed by this ambiguous node could be the binding-pattern or\n* the expression-rhs.\n*\n* @param typeOrExpr Type desc or the expression\n* @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a\n* valid lvalue expression\n* @return Typed-binding-pattern node or an expression node\n*/\nprivate STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternOrExprRhs(nextToken.kind, typeOrExpr, allowAssignment);\n}\nprivate STNode parseTypedBindingPatternOrExprRhs(SyntaxKind nextTokenKind, STNode typeOrExpr,\nboolean allowAssignment) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\nSTToken nextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode pipe = parsePipeToken();\nSTNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);\nif (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nSTNode newTypeDesc =\nSTNodeFactory.createUnionTypeDescriptorNode(typeOrExpr, pipe, typedBP.typeDescriptor);\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);\n}\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe,\nrhsTypedBPOrExpr);\ncase BITWISE_AND_TOKEN:\nnextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode ampersand = parseBinaryOperator();\nrhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);\nif (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nSTNode newTypeDesc = STNodeFactory.createIntersectionTypeDescriptorNode(typeOrExpr, ampersand,\ntypedBP.typeDescriptor);\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);\n}\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand,\nrhsTypedBPOrExpr);\ncase SEMICOLON_TOKEN:\nif (isDefiniteExpr(typeOrExpr.kind)) {\nreturn typeOrExpr;\n}\nif (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) {\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n}\nreturn typeOrExpr;\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\nif (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) {\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n}\nreturn typeOrExpr;\ncase EQUAL_TOKEN:\nreturn typeOrExpr;\ncase OPEN_BRACKET_TOKEN:\nreturn parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment,\nParserRuleContext.AMBIGUOUS_STMT);\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\ndefault:\nif (isCompoundBinaryOperator(nextTokenKind)) {\nreturn typeOrExpr;\n}\nif (isValidExprRhsStart(nextTokenKind, typeOrExpr.kind)) {\nreturn typeOrExpr;\n}\nSTToken token = peek();\nSolution solution =\nrecover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr, allowAssignment);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypedBindingPatternOrExprRhs(solution.tokenKind, typeOrExpr, allowAssignment);\n}\n}\nprivate STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) {\nstartContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\ntypeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false);\nendContext();\nreturn parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\n}\nprivate STNode parseTypedBPOrExprStartsWithOpenParenthesis() {\nSTNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis();\nif (isDefiniteTypeDesc(exprOrTypeDesc.kind)) {\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc);\n}\nreturn parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false);\n}\nprivate boolean isDefiniteTypeDesc(SyntaxKind kind) {\nreturn kind.compareTo(SyntaxKind.TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0;\n}\nprivate boolean isDefiniteExpr(SyntaxKind kind) {\nif (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nreturn false;\n}\nreturn kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&\nkind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;\n}\n/**\n* Parse type or expression that starts with open parenthesis. Possible options are:\n* 1) () - nil type-desc or nil-literal\n* 2) (T) - Parenthesized type-desc\n* 3) (expr) - Parenthesized expression\n* 4) (param, param, ..) - Anon function params\n*\n* @return Type-desc or expression node\n*/\nprivate STNode parseTypedDescOrExprStartsWithOpenParenthesis() {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {\nSTNode closeParen = parseCloseParenthesis();\nreturn parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen);\n}\nSTNode typeOrExpr = parseTypeDescOrExpr();\nif (isAction(typeOrExpr)) {\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr,\ncloseParen);\n}\nif (isExpression(typeOrExpr.kind)) {\nstartContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);\nreturn parseBracedExprOrAnonFuncParamRhs(peek().kind, openParen, typeOrExpr, false);\n}\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeOrExpr, closeParen);\n}\n/**\n* Parse type-desc or expression. This method does not handle binding patterns.\n*\n* @return Type-desc node or expression node\n*/\nprivate STNode parseTypeDescOrExpr() {\nSTToken nextToken = peek();\nSTNode typeOrExpr;\nswitch (nextToken.kind) {\ncase OPEN_PAREN_TOKEN:\ntypeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis();\nbreak;\ncase FUNCTION_KEYWORD:\ntypeOrExpr = parseAnonFuncExprOrFuncTypeDesc();\nbreak;\ncase IDENTIFIER_TOKEN:\ntypeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\nreturn parseTypeDescOrExprRhs(typeOrExpr);\ncase OPEN_BRACKET_TOKEN:\ntypeOrExpr = parseTypedDescOrExprStartsWithOpenBracket();\nbreak;\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nSTNode basicLiteral = parseBasicLiteral();\nreturn parseTypeDescOrExprRhs(basicLiteral);\ndefault:\nif (isValidExpressionStart(nextToken.kind, 1)) {\nreturn parseActionOrExpressionInLhs(nextToken.kind, null);\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\n}\nif (isDefiniteTypeDesc(typeOrExpr.kind)) {\nreturn parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\n}\nreturn parseTypeDescOrExprRhs(typeOrExpr);\n}\nprivate boolean isExpression(SyntaxKind kind) {\nswitch (kind) {\ncase BASIC_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn true;\ndefault:\nreturn kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&\nkind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;\n}\n}\n/**\n* Parse statement that starts with an empty parenthesis. Empty parenthesis can be\n* 1) Nil literal\n* 2) Nil type-desc\n* 3) Anon-function params\n*\n* @param openParen Open parenthesis\n* @param closeParen Close parenthesis\n* @return Parsed node\n*/\nprivate STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nSTNode params = STNodeFactory.createNodeList();\nSTNode anonFuncParam =\nSTNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\nendContext();\nreturn anonFuncParam;\ndefault:\nreturn STNodeFactory.createNilLiteralNode(openParen, closeParen);\n}\n}\nprivate STNode parseAnonFuncExprOrTypedBPWithFuncType() {\nSTNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc();\nif (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) {\nreturn exprOrTypeDesc;\n}\nreturn parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT);\n}\n/**\n* Parse anon-func-expr or function-type-desc, by resolving the ambiguity.\n*\n* @return Anon-func-expr or function-type-desc\n*/\nprivate STNode parseAnonFuncExprOrFuncTypeDesc() {\nstartContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC);\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode funcSignature = parseFuncSignature(true);\nendContext();\nswitch (peek().kind) {\ncase OPEN_BRACE_TOKEN:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nstartContext(ParserRuleContext.ANON_FUNC_EXPRESSION);\nfuncSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);\nSTNode funcBody = parseAnonFuncBody(false);\nSTNode annots = STNodeFactory.createEmptyNodeList();\nSTNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, functionKeyword,\nfuncSignature, funcBody);\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true);\ncase IDENTIFIER_TOKEN:\ndefault:\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature);\nreturn parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,\ntrue);\n}\n}\nprivate STNode parseTypeDescOrExprRhs(STNode typeOrExpr) {\nSyntaxKind nextTokenKind = peek().kind;\nreturn parseTypeDescOrExprRhs(nextTokenKind, typeOrExpr);\n}\nprivate STNode parseTypeDescOrExprRhs(SyntaxKind nextTokenKind, STNode typeOrExpr) {\nSTNode typeDesc;\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\nSTToken nextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode pipe = parsePipeToken();\nSTNode rhsTypeDescOrExpr = parseTypeDescOrExpr();\nif (isExpression(rhsTypeDescOrExpr.kind)) {\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe,\nrhsTypeDescOrExpr);\n}\ntypeDesc = getTypeDescFromExpr(typeOrExpr);\nrhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);\nreturn STNodeFactory.createUnionTypeDescriptorNode(typeDesc, pipe, rhsTypeDescOrExpr);\ncase BITWISE_AND_TOKEN:\nnextNextToken = peek(2);\nif (nextNextToken.kind != SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode ampersand = parseBinaryOperator();\nrhsTypeDescOrExpr = parseTypeDescOrExpr();\nif (isExpression(rhsTypeDescOrExpr.kind)) {\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand,\nrhsTypeDescOrExpr);\n}\ntypeDesc = getTypeDescFromExpr(typeOrExpr);\nrhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);\nreturn STNodeFactory.createIntersectionTypeDescriptorNode(typeDesc, ampersand, rhsTypeDescOrExpr);\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\nstartContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\ntypeDesc = parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,\nfalse);\nendContext();\nreturn typeDesc;\ncase SEMICOLON_TOKEN:\nreturn getTypeDescFromExpr(typeOrExpr);\ncase EQUAL_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\ncase COMMA_TOKEN:\nreturn typeOrExpr;\ncase OPEN_BRACKET_TOKEN:\nreturn parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true,\nParserRuleContext.AMBIGUOUS_STMT);\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nreturn STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis);\ndefault:\nif (isCompoundBinaryOperator(nextTokenKind)) {\nreturn typeOrExpr;\n}\nif (isValidExprRhsStart(nextTokenKind, typeOrExpr.kind)) {\nreturn parseExpressionRhs(nextTokenKind, DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false,\nfalse);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypeDescOrExprRhs(solution.tokenKind, typeOrExpr);\n}\n}\nprivate boolean isAmbiguous(STNode node) {\nswitch (node.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase BRACKETED_LIST:\nreturn true;\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;\nif (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||\nbinaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn false;\n}\nreturn isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);\ncase BRACED_EXPRESSION:\nreturn isAmbiguous(((STBracedExpressionNode) node).expression);\ncase INDEXED_EXPRESSION:\nSTIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node;\nif (!isAmbiguous(indexExpr.containerExpression)) {\nreturn false;\n}\nSTNode keys = indexExpr.keyExpression;\nfor (int i = 0; i < keys.bucketCount(); i++) {\nSTNode item = keys.childInBucket(i);\nif (item.kind == SyntaxKind.COMMA_TOKEN) {\ncontinue;\n}\nif (!isAmbiguous(item)) {\nreturn false;\n}\n}\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate boolean isAllBasicLiterals(STNode node) {\nswitch (node.kind) {\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn true;\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;\nif (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||\nbinaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn false;\n}\nreturn isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);\ncase BRACED_EXPRESSION:\nreturn isAmbiguous(((STBracedExpressionNode) node).expression);\ncase BRACKETED_LIST:\nSTAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node;\nfor (STNode member : list.members) {\nif (member.kind == SyntaxKind.COMMA_TOKEN) {\ncontinue;\n}\nif (!isAllBasicLiterals(member)) {\nreturn false;\n}\n}\nreturn true;\ncase UNARY_EXPRESSION:\nSTUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node;\nif (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN &&\nunaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) {\nreturn false;\n}\nreturn isNumericLiteral(unaryExpr.expression);\ndefault:\nreturn false;\n}\n}\nprivate boolean isNumericLiteral(STNode node) {\nswitch (node.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseTypedDescOrExprStartsWithOpenBracket() {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList members = new ArrayList<>();\nSTNode memberEnd;\nwhile (!isEndOfListConstructor(peek().kind)) {\nSTNode expr = parseTypeDescOrExpr();\nmembers.add(expr);\nmemberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmembers.add(memberEnd);\n}\nSTNode memberNodes = STNodeFactory.createNodeList(members);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberNodes, closeBracket);\n}\n/**\n* Parse binding-patterns.\n*

\n* \n* binding-pattern := capture-binding-pattern\n* | wildcard-binding-pattern\n* | list-binding-pattern\n* | mapping-binding-pattern\n* | functional-binding-pattern\n*

\n*\n* capture-binding-pattern := variable-name\n* variable-name := identifier\n*

\n*\n* wildcard-binding-pattern := _\n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n*\n* mapping-binding-pattern := { field-binding-patterns }\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n* field-binding-pattern := field-name : binding-pattern | variable-name\n*
\n* rest-binding-pattern := ... variable-name\n*\n*

\n* functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )\n*
\n* arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns]\n* | other-arg-binding-patterns\n*
\n* positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*\n*
\n* positional-arg-binding-pattern := binding-pattern\n*
\n* other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern]\n* | [rest-binding-pattern]\n*
\n* named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*\n*
\n* named-arg-binding-pattern := arg-name = binding-pattern\n*
\n*\n* @return binding-pattern node\n*/\nprivate STNode parseBindingPattern() {\nSTToken token = peek();\nreturn parseBindingPattern(token.kind);\n}\nprivate STNode parseBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseListBindingPattern();\ncase IDENTIFIER_TOKEN:\nreturn parseBindingPatternStartsWithIdentifier();\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingBindingPattern();\ncase ERROR_KEYWORD:\nreturn parseErrorBindingPattern();\ndefault:\nSolution sol = recover(peek(), ParserRuleContext.BINDING_PATTERN);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nreturn parseBindingPattern(sol.tokenKind);\n}\n}\nprivate STNode parseBindingPatternStartsWithIdentifier() {\nSTNode argNameOrBindingPattern =\nparseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);\nSTToken secondToken = peek();\nif (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nstartContext(ParserRuleContext.FUNCTIONAL_BINDING_PATTERN);\nreturn parseFunctionalBindingPattern(argNameOrBindingPattern);\n}\nif (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) {\nSTNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN);\nidentifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern);\nreturn createCaptureOrWildcardBP(identifier);\n}\nreturn createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name);\n}\nprivate STNode createCaptureOrWildcardBP(STNode varName) {\nSTNode bindingPattern;\nif (isWildcardBP(varName)) {\nbindingPattern = getWildcardBindingPattern(varName);\n} else {\nbindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName);\n}\nreturn bindingPattern;\n}\n/**\n* Parse list-binding-patterns.\n*

\n* \n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n*\n* @return list-binding-pattern node\n*/\nprivate STNode parseListBindingPattern() {\nstartContext(ParserRuleContext.LIST_BINDING_PATTERN);\nSTNode openBracket = parseOpenBracket();\nList bindingPatternsList = new ArrayList<>();\nSTNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList);\nendContext();\nreturn listBindingPattern;\n}\nprivate STNode parseListBindingPattern(STNode openBracket, List bindingPatternsList) {\nSTNode listBindingPatternMember = parseListBindingPatternMember();\nbindingPatternsList.add(listBindingPatternMember);\nSTNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList);\nreturn listBindingPattern;\n}\nprivate STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List bindingPatterns) {\nSTNode member = firstMember;\nSTToken token = peek();\nSTNode listBindingPatternRhs = null;\nwhile (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {\nlistBindingPatternRhs = parseListBindingPatternMemberRhs(token.kind);\nif (listBindingPatternRhs == null) {\nbreak;\n}\nbindingPatterns.add(listBindingPatternRhs);\nmember = parseListBindingPatternMember();\nbindingPatterns.add(member);\ntoken = peek();\n}\nSTNode restBindingPattern;\nif (member.kind == SyntaxKind.REST_BINDING_PATTERN) {\nrestBindingPattern = bindingPatterns.remove(bindingPatterns.size() - 1);\n} else {\nrestBindingPattern = STNodeFactory.createEmptyNode();\n}\nSTNode closeBracket = parseCloseBracket();\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);\nreturn STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, restBindingPattern,\ncloseBracket);\n}\nprivate STNode parseListBindingPatternMemberRhs() {\nreturn parseListBindingPatternMemberRhs(peek().kind);\n}\nprivate STNode parseListBindingPatternMemberRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListBindingPatternMemberRhs(solution.tokenKind);\n}\n}\nprivate boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse list-binding-pattern member.\n*

\n* \n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n*\n* @return List binding pattern member\n*/\nprivate STNode parseListBindingPatternMember() {\nSTToken token = peek();\nreturn parseListBindingPatternMember(token.kind);\n}\nprivate STNode parseListBindingPatternMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ncase OPEN_BRACKET_TOKEN:\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseBindingPattern();\ndefault:\nSolution sol = recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nreturn parseListBindingPatternMember(sol.tokenKind);\n}\n}\n/**\n* Parse rest binding pattern.\n*

\n* \n* rest-binding-pattern := ... variable-name\n* \n*\n* @return Rest binding pattern node\n*/\nprivate STNode parseRestBindingPattern() {\nstartContext(ParserRuleContext.REST_BINDING_PATTERN);\nSTNode ellipsis = parseEllipsis();\nSTNode varName = parseVariableName();\nendContext();\nSTSimpleNameReferenceNode simpleNameReferenceNode =\n(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName);\nreturn STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode);\n}\n/**\n* Parse Typed-binding-pattern.\n*

\n* \n* typed-binding-pattern := inferable-type-descriptor binding-pattern\n*

\n* inferable-type-descriptor := type-descriptor | var\n*
\n*\n* @return Typed binding pattern node\n*/\nprivate STNode parseTypedBindingPattern(ParserRuleContext context) {\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false);\nSTNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context);\nreturn typeBindingPattern;\n}\n/**\n* Parse mapping-binding-patterns.\n*

\n* \n* mapping-binding-pattern := { field-binding-patterns }\n*

\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n* field-binding-pattern := field-name : binding-pattern | variable-name\n*
\n*\n* @return mapping-binding-pattern node\n*/\nprivate STNode parseMappingBindingPattern() {\nstartContext(ParserRuleContext.MAPPING_BINDING_PATTERN);\nSTNode openBrace = parseOpenBrace();\nSTToken token = peek();\nif (isEndOfMappingBindingPattern(token.kind)) {\nSTNode closeBrace = parseCloseBrace();\nSTNode bindingPatternsNode = STNodeFactory.createEmptyNodeList();\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nendContext();\nreturn STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, restBindingPattern,\ncloseBrace);\n}\nList bindingPatterns = new ArrayList<>();\nSTNode prevMember = parseMappingBindingPatternMember();\nbindingPatterns.add(prevMember);\nreturn parseMappingBindingPattern(openBrace, bindingPatterns, prevMember);\n}\nprivate STNode parseMappingBindingPattern(STNode openBrace, List bindingPatterns, STNode member) {\nSTToken token = peek();\nSTNode mappingBindingPatternRhs = null;\nwhile (!isEndOfMappingBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {\nmappingBindingPatternRhs = parseMappingBindingPatternEnd(token.kind);\nif (mappingBindingPatternRhs == null) {\nbreak;\n}\nbindingPatterns.add(mappingBindingPatternRhs);\nmember = parseMappingBindingPatternMember();\nif (member.kind == SyntaxKind.REST_BINDING_PATTERN) {\nbreak;\n}\nbindingPatterns.add(member);\ntoken = peek();\n}\nSTNode restBindingPattern;\nif (member.kind == SyntaxKind.REST_BINDING_PATTERN) {\nrestBindingPattern = member;\n} else {\nrestBindingPattern = STNodeFactory.createEmptyNode();\n}\nSTNode closeBrace = parseCloseBrace();\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);\nendContext();\nreturn STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, restBindingPattern,\ncloseBrace);\n}\n/**\n* Parse mapping-binding-pattern entry.\n*

\n* \n* mapping-binding-pattern := { field-binding-patterns }\n*

\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n* field-binding-pattern := field-name : binding-pattern\n* | variable-name\n*
\n*\n* @return mapping-binding-pattern node\n*/\nprivate STNode parseMappingBindingPatternMember() {\nSTToken token = peek();\nswitch (token.kind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ndefault:\nreturn parseFieldBindingPattern();\n}\n}\nprivate STNode parseMappingBindingPatternEnd() {\nreturn parseMappingBindingPatternEnd(peek().kind);\n}\nprivate STNode parseMappingBindingPatternEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MAPPING_BINDING_PATTERN_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMappingBindingPatternEnd(solution.tokenKind);\n}\n}\nprivate STNode parseFieldBindingPattern() {\nreturn parseFieldBindingPattern(peek().kind);\n}\n/**\n* Parse field-binding-pattern.\n* field-binding-pattern := field-name : binding-pattern | varname\n*\n* @return field-binding-pattern node\n*/\nprivate STNode parseFieldBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME);\nSTNode fieldBindingPattern = parseFieldBindingPattern(identifier);\nreturn fieldBindingPattern;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.FIELD_BINDING_PATTERN_NAME);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldBindingPattern(solution.tokenKind);\n}\n}\nprivate STNode parseFieldBindingPattern(STNode identifier) {\nSTNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier);\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference);\n}\nSTNode colon = parseColon();\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern);\n}\nprivate boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse error binding pattern node.\n*

\n* functional-binding-pattern := error ( arg-list-binding-pattern )\n*\n* @return Error binding pattern node.\n*/\nprivate STNode parseErrorBindingPattern() {\nstartContext(ParserRuleContext.FUNCTIONAL_BINDING_PATTERN);\nSTNode typeDesc = parseErrorKeyword();\nreturn parseFunctionalBindingPattern(typeDesc);\n}\n/**\n* Parse functional binding pattern.\n*

\n* \n* functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )\n*

\n* functionally-constructible-type-reference := error | type-reference\n*
\n*\n* @param typeDesc Functionally constructible type reference\n* @return Functional binding pattern node.\n*/\nprivate STNode parseFunctionalBindingPattern(STNode typeDesc) {\nSTNode openParenthesis = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode argListBindingPatterns = parseArgListBindingPatterns();\nSTNode closeParenthesis = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createFunctionalBindingPatternNode(typeDesc, openParenthesis, argListBindingPatterns,\ncloseParenthesis);\n}\nprivate STNode parseArgListBindingPatterns() {\nList argListBindingPatterns = new ArrayList<>();\nSyntaxKind lastValidArgKind = SyntaxKind.CAPTURE_BINDING_PATTERN;\nSTToken nextToken = peek();\nwhile (!isEndOfParametersList(nextToken.kind)) {\nSTNode currentArg = parseArgBindingPattern(nextToken.kind);\nDiagnosticErrorCode errorCode = validateArgBindingPatternOrder(lastValidArgKind, currentArg.kind);\nif (errorCode == null) {\nargListBindingPatterns.add(currentArg);\nlastValidArgKind = currentArg.kind;\n} else {\nupdateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode);\n}\nnextToken = peek();\nSTNode argEnd = parseArgsBindingPatternEnd(nextToken.kind);\nif (argEnd == null) {\nbreak;\n}\nif (errorCode == null) {\nargListBindingPatterns.add(argEnd);\n} else {\nupdateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);\n}\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(argListBindingPatterns);\n}\nprivate STNode parseArgsBindingPatternEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_BINDING_PATTERN_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgsBindingPatternEnd(solution.tokenKind);\n}\n}\n/**\n* Parse arg binding pattern.\n*

\n* \n* arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns]\n* | other-arg-binding-patterns\n*

\n* positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*\n*

\n* positional-arg-binding-pattern := binding-pattern\n*

\n* other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern] | [rest-binding-pattern]\n*

\n* named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*\n*

\n* named-arg-binding-pattern := arg-name = binding-pattern\n*
\n*\n* @return Arg binding pattern\n*/\nprivate STNode parseArgBindingPattern() {\nSTToken nextToken = peek();\nreturn parseArgBindingPattern(nextToken.kind);\n}\nprivate STNode parseArgBindingPattern(SyntaxKind kind) {\nswitch (kind) {\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ncase IDENTIFIER_TOKEN:\nreturn parseNamedOrPositionalArgBindingPattern(kind);\ncase OPEN_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseBindingPattern();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_BINDING_PATTERN);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgBindingPattern(solution.tokenKind);\n}\n}\nprivate STNode parseNamedOrPositionalArgBindingPattern(SyntaxKind nextTokenKind) {\nSTNode argNameOrBindingPattern = parseQualifiedIdentifier(ParserRuleContext.ARG_BINDING_PATTERN_START_IDENT);\nSTToken secondToken = peek();\nswitch (secondToken.kind) {\ncase EQUAL_TOKEN:\nSTNode equal = parseAssignOp();\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createNamedArgBindingPatternNode(argNameOrBindingPattern, equal, bindingPattern);\ncase OPEN_PAREN_TOKEN:\nreturn parseFunctionalBindingPattern(argNameOrBindingPattern);\ncase COMMA_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ndefault:\nreturn createCaptureOrWildcardBP(argNameOrBindingPattern);\n}\n}\nprivate DiagnosticErrorCode validateArgBindingPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {\nDiagnosticErrorCode errorCode = null;\nswitch (prevArgKind) {\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\nbreak;\ncase NAMED_ARG_BINDING_PATTERN:\nif (currentArgKind != SyntaxKind.NAMED_ARG_BINDING_PATTERN &&\ncurrentArgKind != SyntaxKind.REST_BINDING_PATTERN) {\nerrorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG;\n}\nbreak;\ncase REST_BINDING_PATTERN:\nerrorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG;\nbreak;\ndefault:\nthrow new IllegalStateException(\"Invalid SyntaxKind in an argument\");\n}\nreturn errorCode;\n}\n/*\n* This parses Typed binding patterns and deals with ambiguity between types,\n* and binding patterns. An example is 'T[a]'.\n* The ambiguity lies in between:\n* 1) Array Type\n* 2) List binding pattern\n* 3) Member access expression.\n*/\n/**\n* Parse the component after the type-desc, of a typed-binding-pattern.\n*\n* @param typeDesc Starting type-desc of the typed-binding-pattern\n* @return Typed-binding pattern\n*/\nprivate STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternTypeRhs(nextToken.kind, typeDesc, context, true);\n}\nprivate STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternTypeRhs(nextToken.kind, typeDesc, context, isRoot);\n}\nprivate STNode parseTypedBindingPatternTypeRhs(SyntaxKind nextTokenKind, STNode typeDesc, ParserRuleContext context,\nboolean isRoot) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode bindingPattern = parseBindingPattern(nextTokenKind);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\ncase OPEN_BRACKET_TOKEN:\nSTNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context);\nassert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN;\nreturn typedBindingPattern;\ncase CLOSE_PAREN_TOKEN:\ncase COMMA_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nif (!isRoot) {\nreturn typeDesc;\n}\ndefault:\nSolution solution =\nrecover(peek(), ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS, typeDesc, context, isRoot);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypedBindingPatternTypeRhs(solution.tokenKind, typeDesc, context, isRoot);\n}\n}\n/**\n* Parse typed-binding pattern with list, array-type-desc, or member-access-expr.\n*\n* @param typeDescOrExpr Type desc or the expression at the start\n* @param isTypedBindingPattern Is this is a typed-binding-pattern.\n* @return Parsed node\n*/\nprivate STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern,\nboolean allowAssignment, ParserRuleContext context) {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nif (isBracketedListEnd(peek().kind)) {\nreturn parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context);\n}\nSTNode member = parseBracketedListMember(isTypedBindingPattern);\nSyntaxKind currentNodeType = getBracketedListNodeType(member);\nswitch (currentNodeType) {\ncase ARRAY_TYPE_DESC:\nSTNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context);\nreturn typedBindingPattern;\ncase LIST_BINDING_PATTERN:\nSTNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\ncase INDEXED_EXPRESSION:\nreturn parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member);\ncase NONE:\ndefault:\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd != null) {\nList memberList = new ArrayList<>();\nmemberList.add(member);\nmemberList.add(memberEnd);\nSTNode bindingPattern = parseAsListBindingPattern(openBracket, memberList);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n}\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,\nisTypedBindingPattern, allowAssignment, context);\n}\nprivate STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) {\nmember = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nSTNode keyExpr = STNodeFactory.createNodeList(member);\nSTNode memberAccessExpr =\nSTNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket);\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false);\n}\nprivate boolean isBracketedListEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseBracketedListMember(boolean isTypedBindingPattern) {\nreturn parseBracketedListMember(peek().kind, isTypedBindingPattern);\n}\n/**\n* Parse a member of an ambiguous bracketed list. This member could be:\n* 1) Array length\n* 2) Key expression of a member-access-expr\n* 3) A member-binding pattern of a list-binding-pattern.\n*\n* @param nextTokenKind Kind of the next token\n* @param isTypedBindingPattern Is this in a definite typed-binding pattern\n* @return Parsed member node\n*/\nprivate STNode parseBracketedListMember(SyntaxKind nextTokenKind, boolean isTypedBindingPattern) {\nswitch (nextTokenKind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase ASTERISK_TOKEN:\ncase STRING_LITERAL:\nreturn parseBasicLiteral();\ncase CLOSE_BRACKET_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\ncase ELLIPSIS_TOKEN:\ncase OPEN_BRACKET_TOKEN:\nreturn parseStatementStartBracketedListMember();\ncase IDENTIFIER_TOKEN:\nif (isTypedBindingPattern) {\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nnextTokenKind = peek().kind;\nif (nextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseListBindingPatternMember();\n}\nreturn identifier;\n}\nbreak;\ndefault:\nif (!isTypedBindingPattern && isValidExpressionStart(nextTokenKind, 1)) {\nbreak;\n}\nParserRuleContext recoverContext =\nisTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH\n: ParserRuleContext.BRACKETED_LIST_MEMBER;\nSolution solution = recover(peek(), recoverContext, isTypedBindingPattern);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseBracketedListMember(solution.tokenKind, isTypedBindingPattern);\n}\nSTNode expr = parseExpression();\nif (isWildcardBP(expr)) {\nreturn getWildcardBindingPattern(expr);\n}\nif (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE || expr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nnextTokenKind = peek().kind;\nif (nextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseListBindingPatternMember();\n}\n}\nreturn expr;\n}\n/**\n* Treat the current node as an array, and parse the remainder of the binding pattern.\n*\n* @param typeDesc Type-desc\n* @param openBracket Open bracket\n* @param member Member\n* @return Parsed node\n*/\nprivate STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) {\ntypeDesc = getTypeDescFromExpr(typeDesc);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true,\ncontext);\n}\nprivate STNode parseBracketedListMemberEnd() {\nreturn parseBracketedListMemberEnd(peek().kind);\n}\nprivate STNode parseBracketedListMemberEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseBracketedListMemberEnd(solution.tokenKind);\n}\n}\n/**\n* We reach here to break ambiguity of T[a]. This could be:\n* 1) Array Type Desc\n* 2) Member access on LHS\n* 3) Typed-binding-pattern\n*\n* @param typeDescOrExpr Type name or the expr that precede the open-bracket.\n* @param openBracket Open bracket\n* @param member Member\n* @param closeBracket Open bracket\n* @param isTypedBindingPattern Is this is a typed-binding-pattern.\n* @return Specific node that matches to T[a], after solving ambiguity.\n*/\nprivate STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket, boolean isTypedBindingPattern,\nboolean allowAssignment, ParserRuleContext context) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternOrMemberAccessRhs(nextToken.kind, typeDescOrExpr, openBracket, member,\ncloseBracket, isTypedBindingPattern, allowAssignment, context);\n}\nprivate STNode parseTypedBindingPatternOrMemberAccessRhs(SyntaxKind nextTokenKind, STNode typeDescOrExpr,\nSTNode openBracket, STNode member, STNode closeBracket,\nboolean isTypedBindingPattern, boolean allowAssignment,\nParserRuleContext context) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nSTNode arrayTypeDesc = createArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\nreturn parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);\ncase OPEN_BRACKET_TOKEN:\nif (isTypedBindingPattern) {\ntypeDesc = getTypeDescFromExpr(typeDescOrExpr);\narrayTypeDesc =\nSTNodeFactory.createArrayTypeDescriptorNode(typeDesc, openBracket, member, closeBracket);\nreturn parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);\n}\nSTNode keyExpr = STNodeFactory.createNodeList(member);\nSTNode expr =\nSTNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);\nreturn parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context);\ncase QUESTION_MARK_TOKEN:\ntypeDesc = getTypeDescFromExpr(typeDescOrExpr);\narrayTypeDesc = createArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\ntypeDesc = parseComplexTypeDescriptor(arrayTypeDesc,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nreturn parseTypedBindingPatternTypeRhs(typeDesc, context);\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\nreturn parseComplexTypeDescInTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket,\ncontext, isTypedBindingPattern);\ncase IN_KEYWORD:\nif (context != ParserRuleContext.FOREACH_STMT && context != ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\ncase EQUAL_TOKEN:\nif (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nif (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) {\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\n}\nkeyExpr = STNodeFactory.createNodeList(member);\ntypeDescOrExpr = getExpression(typeDescOrExpr);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);\ncase SEMICOLON_TOKEN:\nif (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\ncase CLOSE_BRACE_TOKEN:\ncase COMMA_TOKEN:\nif (context == ParserRuleContext.AMBIGUOUS_STMT) {\nkeyExpr = STNodeFactory.createNodeList(member);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,\ncloseBracket);\n}\ndefault:\nif (isValidExprRhsStart(nextTokenKind, closeBracket.kind)) {\nkeyExpr = STNodeFactory.createNodeList(member);\ntypeDescOrExpr = getExpression(typeDescOrExpr);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,\ncloseBracket);\n}\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.BRACKETED_LIST_RHS, typeDescOrExpr, openBracket, member,\ncloseBracket, isTypedBindingPattern, allowAssignment, context);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypedBindingPatternOrMemberAccessRhs(solution.tokenKind, typeDescOrExpr, openBracket, member,\ncloseBracket, isTypedBindingPattern, allowAssignment, context);\n}\nprivate STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket) {\nSTNode bindingPatterns;\nif (isEmpty(member)) {\nbindingPatterns = STNodeFactory.createEmptyNodeList();\n} else {\nSTNode bindingPattern = getBindingPattern(member);\nbindingPatterns = STNodeFactory.createNodeList(bindingPattern);\n}\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nSTNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns,\nrestBindingPattern, closeBracket);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n}\n/**\n* Parse a union or intersection type-desc/binary-expression that involves ambiguous\n* bracketed list in lhs.\n*

\n* e.g: (T[a] & R..) or (T[a] | R.. )\n*

\n* Complexity occurs in scenarios such as T[a] |/& R[b]. If the token after this\n* is another binding-pattern, then (T[a] |/& R[b]) becomes the type-desc. However,\n* if the token follows this is an equal or semicolon, then (T[a] |/& R) becomes\n* the type-desc, and [b] becomes the binding pattern.\n*\n* @param typeDescOrExpr Type desc or the expression\n* @param openBracket Open bracket\n* @param member Member\n* @param closeBracket Close bracket\n* @param context COntext in which the typed binding pattern occurs\n* @return Parsed node\n*/\nprivate STNode parseComplexTypeDescInTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode pipeOrAndToken = parseUnionOrIntersectionToken();\nSTNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false);\nif (isTypedBindingPattern || typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);\nlhsTypeDesc = createArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);\nSTTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr;\nSTNode newTypeDesc;\nif (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) {\nnewTypeDesc = STNodeFactory.createUnionTypeDescriptorNode(lhsTypeDesc, pipeOrAndToken,\nrhsTypedBindingPattern.typeDescriptor);\n} else {\nnewTypeDesc = STNodeFactory.createIntersectionTypeDescriptorNode(lhsTypeDesc, pipeOrAndToken,\nrhsTypedBindingPattern.typeDescriptor);\n}\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern);\n} else {\nSTNode keyExpr = getExpression(member);\nSTNode containerExpr = getExpression(typeDescOrExpr);\nSTNode lhsExpr =\nSTNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket);\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken,\ntypedBindingPatternOrExpr);\n}\n}\nprivate STNode createArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) {\nif (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\nSTUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;\nSTNode middleTypeDesc = createArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc);\nlhsTypeDesc = STNodeFactory.createUnionTypeDescriptorNode(unionTypeDesc.leftTypeDesc,\nunionTypeDesc.pipeToken, middleTypeDesc);\n} else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\nSTIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc;\nSTNode middleTypeDesc =\ncreateArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc);\nlhsTypeDesc = STNodeFactory.createIntersectionTypeDescriptorNode(intersectionTypeDesc.leftTypeDesc,\nintersectionTypeDesc.bitwiseAndToken, middleTypeDesc);\n} else {\nlhsTypeDesc = STNodeFactory.createArrayTypeDescriptorNode(lhsTypeDesc, openBracket, member, closeBracket);\n}\nreturn lhsTypeDesc;\n}\n/**\n* Parse union (|) or intersection (&) type operator.\n*\n* @return pipe or bitwise and token\n*/\nprivate STNode parseUnionOrIntersectionToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Infer the type of the ambiguous bracketed list, based on the type of the member.\n*\n* @param memberNode Member node\n* @return Inferred type of the bracketed list\n*/\nprivate SyntaxKind getBracketedListNodeType(STNode memberNode) {\nif (isEmpty(memberNode)) {\nreturn SyntaxKind.NONE;\n}\nif (isDefiniteTypeDesc(memberNode.kind)) {\nreturn SyntaxKind.TUPLE_TYPE_DESC;\n}\nswitch (memberNode.kind) {\ncase ASTERISK_TOKEN:\nreturn SyntaxKind.ARRAY_TYPE_DESC;\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.LIST_BINDING_PATTERN;\ncase QUALIFIED_NAME_REFERENCE:\ncase REST_TYPE:\nreturn SyntaxKind.TUPLE_TYPE_DESC;\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase SIMPLE_NAME_REFERENCE:\ncase BRACKETED_LIST:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.NONE;\ndefault:\nreturn SyntaxKind.INDEXED_EXPRESSION;\n}\n}\n/*\n* This section tries to break the ambiguity in parsing a statement that starts with a open-bracket.\n* The ambiguity lies in between:\n* 1) Assignment that starts with list binding pattern\n* 2) Var-decl statement that starts with tuple type\n* 3) Statement that starts with list constructor, such as sync-send, etc.\n*/\n/**\n* Parse any statement that starts with an open-bracket.\n*\n* @param annots Annotations attached to the statement.\n* @return Parsed node\n*/\nprivate STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) {\nstartContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT);\nreturn parseStatementStartsWithOpenBracket(annots, true, possibleMappingField);\n}\nprivate STNode parseMemberBracketedList(boolean possibleMappingField) {\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseStatementStartsWithOpenBracket(annots, false, possibleMappingField);\n}\n/**\n* The bracketed list at the start of a statement can be one of the following.\n* 1) List binding pattern\n* 2) Tuple type\n* 3) List constructor\n*\n* @param isRoot Is this the root of the list\n* @return Parsed node\n*/\nprivate STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) {\nstartContext(ParserRuleContext.STMT_START_BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList memberList = new ArrayList<>();\nwhile (!isBracketedListEnd(peek().kind)) {\nSTNode member = parseStatementStartBracketedListMember();\nSyntaxKind currentNodeType = getStmtStartBracketedListType(member);\nswitch (currentNodeType) {\ncase TUPLE_TYPE_DESC:\nreturn parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\ncase LIST_BINDING_PATTERN:\nreturn parseAsListBindingPattern(openBracket, memberList, member, isRoot);\ncase LIST_CONSTRUCTOR:\nreturn parseAsListConstructor(openBracket, memberList, member, isRoot);\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nreturn parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot);\ncase NONE:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\n}\nSTNode closeBracket = parseCloseBracket();\nSTNode bracketedList = parseStatementStartBracketedList(annots, openBracket, memberList, closeBracket, isRoot,\npossibleMappingField);\nreturn bracketedList;\n}\nprivate STNode parseStatementStartBracketedListMember() {\nSTToken nextToken = peek();\nreturn parseStatementStartBracketedListMember(nextToken.kind);\n}\n/**\n* Parse a member of a list-binding-pattern, tuple-type-desc, or\n* list-constructor-expr, when the parent is ambiguous.\n*\n* @param nextTokenKind Kind of the next token.\n* @return Parsed node\n*/\nprivate STNode parseStatementStartBracketedListMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseMemberBracketedList(false);\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nif (isWildcardBP(identifier)) {\nSTNode varName = ((STSimpleNameReferenceNode) identifier).name;\nreturn getWildcardBindingPattern(varName);\n}\nnextTokenKind = peek().kind;\nif (nextTokenKind == SyntaxKind.ELLIPSIS_TOKEN) {\nSTNode ellipsis = parseEllipsis();\nreturn STNodeFactory.createRestDescriptorNode(identifier, ellipsis);\n}\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true);\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingBindingPatterOrMappingConstructor();\ncase ERROR_KEYWORD:\nif (getNextNextToken(nextTokenKind).kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseErrorConstructorExpr();\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase ELLIPSIS_TOKEN:\nreturn parseListBindingPatternMember();\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\nif (getNextNextToken(nextTokenKind).kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseExpression(false);\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\nif (getNextNextToken(nextTokenKind).kind == SyntaxKind.LT_TOKEN) {\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nreturn parseExpression(false);\ncase OPEN_PAREN_TOKEN:\nreturn parseTypeDescOrExpr();\ndefault:\nif (isValidExpressionStart(nextTokenKind, 1)) {\nreturn parseExpression(false);\n}\nif (isTypeStartingToken(nextTokenKind)) {\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nSolution solution = recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatementStartBracketedListMember(solution.tokenKind);\n}\n}\nprivate STNode parseAsTupleTypeDesc(STNode annots, STNode openBracket, List memberList, STNode member,\nboolean isRoot) {\nmemberList = getTypeDescList(memberList);\nstartContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nSTNode tupleTypeMembers = parseTupleTypeMembers(member, memberList);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nSTNode tupleType = STNodeFactory.createTupleTypeDescriptorNode(openBracket, tupleTypeMembers, closeBracket);\nSTNode typeDesc =\nparseComplexTypeDescriptor(tupleType, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nendContext();\nSTNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT, isRoot);\nif (!isRoot) {\nreturn typedBindingPattern;\n}\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(annots, STNodeFactory.createEmptyNode(), typedBindingPattern, false);\n}\nprivate STNode parseAsListBindingPattern(STNode openBracket, List memberList, STNode member,\nboolean isRoot) {\nmemberList = getBindingPatternsList(memberList);\nmemberList.add(member);\nswitchContext(ParserRuleContext.LIST_BINDING_PATTERN);\nSTNode listBindingPattern = parseListBindingPattern(openBracket, member, memberList);\nendContext();\nif (!isRoot) {\nreturn listBindingPattern;\n}\nreturn parseAssignmentStmtRhs(listBindingPattern);\n}\nprivate STNode parseAsListBindingPattern(STNode openBracket, List memberList) {\nmemberList = getBindingPatternsList(memberList);\nswitchContext(ParserRuleContext.LIST_BINDING_PATTERN);\nSTNode listBindingPattern = parseListBindingPattern(openBracket, memberList);\nendContext();\nreturn listBindingPattern;\n}\nprivate STNode parseAsListBindingPatternOrListConstructor(STNode openBracket, List memberList,\nSTNode member, boolean isRoot) {\nmemberList.add(member);\nSTNode memberEnd = parseBracketedListMemberEnd();\nSTNode listBindingPatternOrListCons;\nif (memberEnd == null) {\nSTNode closeBracket = parseCloseBracket();\nlistBindingPatternOrListCons =\nparseListBindingPatternOrListConstructor(openBracket, memberList, closeBracket, isRoot);\n} else {\nmemberList.add(memberEnd);\nlistBindingPatternOrListCons = parseListBindingPatternOrListConstructor(openBracket, memberList, isRoot);\n}\nreturn listBindingPatternOrListCons;\n}\nprivate SyntaxKind getStmtStartBracketedListType(STNode memberNode) {\nif (memberNode.kind.compareTo(SyntaxKind.TYPE_DESC) >= 0 &&\nmemberNode.kind.compareTo(SyntaxKind.TYPEDESC_TYPE_DESC) <= 0) {\nreturn SyntaxKind.TUPLE_TYPE_DESC;\n}\nswitch (memberNode.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase ASTERISK_TOKEN:\nreturn SyntaxKind.ARRAY_TYPE_DESC;\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.LIST_BINDING_PATTERN;\ncase QUALIFIED_NAME_REFERENCE:\ncase REST_TYPE:\nreturn SyntaxKind.TUPLE_TYPE_DESC;\ncase LIST_CONSTRUCTOR:\ncase MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.LIST_CONSTRUCTOR;\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR;\ncase SIMPLE_NAME_REFERENCE:\ncase BRACKETED_LIST:\nreturn SyntaxKind.NONE;\ncase FUNCTION_CALL:\nif (isPosibleFunctionalBindingPattern((STFunctionCallExpressionNode) memberNode)) {\nreturn SyntaxKind.NONE;\n}\nreturn SyntaxKind.LIST_CONSTRUCTOR;\ndefault:\nif (isExpression(memberNode.kind) && !isAllBasicLiterals(memberNode) && !isAmbiguous(memberNode)) {\nreturn SyntaxKind.LIST_CONSTRUCTOR;\n}\nreturn SyntaxKind.NONE;\n}\n}\nprivate boolean isPosibleFunctionalBindingPattern(STFunctionCallExpressionNode funcCall) {\nSTNode args = funcCall.arguments;\nint size = args.bucketCount();\nfor (int i = 0; i < size; i++) {\nSTNode arg = args.childInBucket(i);\nif (arg.kind != SyntaxKind.NAMED_ARG && arg.kind != SyntaxKind.POSITIONAL_ARG &&\narg.kind != SyntaxKind.REST_ARG) {\ncontinue;\n}\nif (!isPosibleArgBindingPattern((STFunctionArgumentNode) arg)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate boolean isPosibleArgBindingPattern(STFunctionArgumentNode arg) {\nswitch (arg.kind) {\ncase POSITIONAL_ARG:\nSTNode expr = ((STPositionalArgumentNode) arg).expression;\nreturn isPosibleBindingPattern(expr);\ncase NAMED_ARG:\nexpr = ((STNamedArgumentNode) arg).expression;\nreturn isPosibleBindingPattern(expr);\ncase REST_ARG:\nexpr = ((STRestArgumentNode) arg).expression;\nreturn expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE;\ndefault:\nreturn false;\n}\n}\nprivate boolean isPosibleBindingPattern(STNode node) {\nswitch (node.kind) {\ncase SIMPLE_NAME_REFERENCE:\nreturn true;\ncase LIST_CONSTRUCTOR:\nSTListConstructorExpressionNode listConstructor = (STListConstructorExpressionNode) node;\nfor (int i = 0; i < listConstructor.bucketCount(); i++) {\nSTNode expr = listConstructor.childInBucket(i);\nif (!isPosibleBindingPattern(expr)) {\nreturn false;\n}\n}\nreturn true;\ncase MAPPING_CONSTRUCTOR:\nSTMappingConstructorExpressionNode mappingConstructor = (STMappingConstructorExpressionNode) node;\nfor (int i = 0; i < mappingConstructor.bucketCount(); i++) {\nSTNode expr = mappingConstructor.childInBucket(i);\nif (!isPosibleBindingPattern(expr)) {\nreturn false;\n}\n}\nreturn true;\ncase SPECIFIC_FIELD:\nSTSpecificFieldNode specificField = (STSpecificFieldNode) node;\nif (specificField.readonlyKeyword != null) {\nreturn false;\n}\nif (specificField.valueExpr == null) {\nreturn true;\n}\nreturn isPosibleBindingPattern(specificField.valueExpr);\ncase FUNCTION_CALL:\nreturn isPosibleFunctionalBindingPattern((STFunctionCallExpressionNode) node);\ndefault:\nreturn false;\n}\n}\nprivate STNode parseStatementStartBracketedList(STNode annots, STNode openBracket, List members,\nSTNode closeBracket, boolean isRoot, boolean possibleMappingField) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase EQUAL_TOKEN:\nif (!isRoot) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\n}\nSTNode memberBindingPatterns = STNodeFactory.createNodeList(getBindingPatternsList(members));\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nSTNode listBindingPattern = STNodeFactory.createListBindingPatternNode(openBracket,\nmemberBindingPatterns, restBindingPattern, closeBracket);\nendContext();\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nreturn parseAssignmentStmtRhs(listBindingPattern);\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\nif (!isRoot) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\n}\nif (members.isEmpty()) {\nopenBracket =\nSyntaxErrors.addDiagnostic(openBracket, DiagnosticErrorCode.ERROR_MISSING_TUPLE_MEMBER);\n}\nswitchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\nstartContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nSTNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members));\nSTNode tupleTypeDesc =\nSTNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);\nendContext();\nSTNode typeDesc = parseComplexTypeDescriptor(tupleTypeDesc,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nSTNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\nendContext();\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, typedBindingPattern);\ncase OPEN_BRACKET_TOKEN:\nif (!isRoot) {\nmemberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members));\ntupleTypeDesc =\nSTNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);\nendContext();\ntypeDesc = parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\nreturn typeDesc;\n}\nSTAmbiguousCollectionNode list =\nnew STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\nendContext();\nSTNode tpbOrExpr = parseTypedBindingPatternOrExprRhs(list, true);\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, tpbOrExpr);\ncase COLON_TOKEN:\nif (possibleMappingField && members.size() == 1) {\nstartContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nSTNode colon = parseColon();\nSTNode fieldNameExpr = getExpression(members.get(0));\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createComputedNameFieldNode(openBracket, fieldNameExpr, closeBracket, colon,\nvalueExpr);\n}\ndefault:\nendContext();\nif (!isRoot) {\nreturn new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\n}\nlist = new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\nSTNode exprOrTPB = parseTypedBindingPatternOrExprRhs(list, false);\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, exprOrTPB);\n}\n}\nprivate boolean isWildcardBP(STNode node) {\nswitch (node.kind) {\ncase SIMPLE_NAME_REFERENCE:\nSTToken nameToken = (STToken) ((STSimpleNameReferenceNode) node).name;\nreturn isUnderscoreToken(nameToken);\ncase IDENTIFIER_TOKEN:\nreturn isUnderscoreToken((STToken) node);\ndefault:\nreturn false;\n}\n}\nprivate boolean isUnderscoreToken(STToken token) {\nreturn \"_\".equals(token.text());\n}\nprivate STNode getWildcardBindingPattern(STNode identifier) {\nswitch (identifier.kind) {\ncase SIMPLE_NAME_REFERENCE:\nSTNode varName = ((STSimpleNameReferenceNode) identifier).name;\nreturn STNodeFactory.createWildcardBindingPatternNode(varName);\ncase IDENTIFIER_TOKEN:\nreturn STNodeFactory.createWildcardBindingPatternNode(identifier);\ndefault:\nthrow new IllegalStateException();\n}\n}\n/*\n* This section tries to break the ambiguity in parsing a statement that starts with a open-brace.\n*/\n/**\n* Parse statements that starts with open-brace. It could be a:\n* 1) Block statement\n* 2) Var-decl with mapping binding pattern.\n* 3) Statement that starts with mapping constructor expression.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatementStartsWithOpenBrace() {\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode openBrace = parseOpenBrace();\nif (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nSTNode closeBrace = parseCloseBrace();\nswitch (peek().kind) {\ncase EQUAL_TOKEN:\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nSTNode fields = STNodeFactory.createEmptyNodeList();\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nSTNode bindingPattern = STNodeFactory.createMappingBindingPatternNode(openBrace, fields,\nrestBindingPattern, closeBrace);\nreturn parseAssignmentStmtRhs(bindingPattern);\ncase RIGHT_ARROW_TOKEN:\ncase SYNC_SEND_TOKEN:\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nfields = STNodeFactory.createEmptyNodeList();\nSTNode expr = STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace);\nexpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);\nreturn parseStatementStartWithExprRhs(expr);\ndefault:\nSTNode statements = STNodeFactory.createEmptyNodeList();\nendContext();\nreturn STNodeFactory.createBlockStatementNode(openBrace, statements, closeBrace);\n}\n}\nSTNode member = parseStatementStartingBracedListFirstMember();\nSyntaxKind nodeType = getBracedListType(member);\nSTNode stmt;\nswitch (nodeType) {\ncase MAPPING_BINDING_PATTERN:\nreturn parseStmtAsMappingBindingPatternStart(openBrace, member);\ncase MAPPING_CONSTRUCTOR:\nreturn parseStmtAsMappingConstructorStart(openBrace, member);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn parseStmtAsMappingBPOrMappingConsStart(openBrace, member);\ncase BLOCK_STATEMENT:\nSTNode closeBrace = parseCloseBrace();\nstmt = STNodeFactory.createBlockStatementNode(openBrace, member, closeBrace);\nendContext();\nreturn stmt;\ndefault:\nArrayList stmts = new ArrayList<>();\nstmts.add(member);\nSTNode statements = parseStatements(stmts);\ncloseBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createBlockStatementNode(openBrace, statements, closeBrace);\n}\n}\n/**\n* Parse the rest of the statement, treating the start as a mapping binding pattern.\n*\n* @param openBrace Open brace\n* @param firstMappingField First member\n* @return Parsed node\n*/\nprivate STNode parseStmtAsMappingBindingPatternStart(STNode openBrace, STNode firstMappingField) {\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nstartContext(ParserRuleContext.MAPPING_BINDING_PATTERN);\nList bindingPatterns = new ArrayList<>();\nif (firstMappingField.kind != SyntaxKind.REST_BINDING_PATTERN) {\nbindingPatterns.add(getBindingPattern(firstMappingField));\n}\nSTNode mappingBP = parseMappingBindingPattern(openBrace, bindingPatterns, firstMappingField);\nreturn parseAssignmentStmtRhs(mappingBP);\n}\n/**\n* Parse the rest of the statement, treating the start as a mapping constructor expression.\n*\n* @param openBrace Open brace\n* @param firstMember First member\n* @return Parsed node\n*/\nprivate STNode parseStmtAsMappingConstructorStart(STNode openBrace, STNode firstMember) {\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nstartContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nList members = new ArrayList<>();\nSTNode mappingCons = parseAsMappingConstructor(openBrace, members, firstMember);\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, mappingCons, false, true);\nreturn parseStatementStartWithExprRhs(expr);\n}\n/**\n* Parse the braced-list as a mapping constructor expression.\n*\n* @param openBrace Open brace\n* @param members members list\n* @param member Most recently parsed member\n* @return Parsed node\n*/\nprivate STNode parseAsMappingConstructor(STNode openBrace, List members, STNode member) {\nmembers.add(member);\nmembers = getExpressionList(members);\nswitchContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nSTNode fields = parseMappingConstructorFields(members);\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace);\n}\n/**\n* Parse the rest of the statement, treating the start as a mapping binding pattern\n* or a mapping constructor expression.\n*\n* @param openBrace Open brace\n* @param member First member\n* @return Parsed node\n*/\nprivate STNode parseStmtAsMappingBPOrMappingConsStart(STNode openBrace, STNode member) {\nstartContext(ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR);\nList members = new ArrayList<>();\nmembers.add(member);\nSTNode bpOrConstructor;\nSTNode memberEnd = parseMappingFieldEnd();\nif (memberEnd == null) {\nSTNode closeBrace = parseCloseBrace();\nbpOrConstructor = parseMappingBindingPatternOrMappingConstructor(openBrace, members, closeBrace);\n} else {\nmembers.add(memberEnd);\nbpOrConstructor = parseMappingBindingPatternOrMappingConstructor(openBrace, members);;\n}\nswitch (bpOrConstructor.kind) {\ncase MAPPING_CONSTRUCTOR:\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, bpOrConstructor, false, true);\nreturn parseStatementStartWithExprRhs(expr);\ncase MAPPING_BINDING_PATTERN:\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nSTNode bindingPattern = getBindingPattern(bpOrConstructor);\nreturn parseAssignmentStmtRhs(bindingPattern);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ndefault:\nif (peek().kind == SyntaxKind.EQUAL_TOKEN) {\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nbindingPattern = getBindingPattern(bpOrConstructor);\nreturn parseAssignmentStmtRhs(bindingPattern);\n}\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nexpr = getExpression(bpOrConstructor);\nexpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);\nreturn parseStatementStartWithExprRhs(expr);\n}\n}\n/**\n* Parse a member of a braced-list that occurs at the start of a statement.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatementStartingBracedListFirstMember() {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase READONLY_KEYWORD:\nSTNode readonlyKeyword = parseReadonlyKeyword();\nreturn bracedListMemberStartsWithReadonly(readonlyKeyword);\ncase IDENTIFIER_TOKEN:\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nreturn parseIdentifierRhsInStmtStartingBrace(readonlyKeyword);\ncase STRING_LITERAL:\nSTNode key = parseStringLiteral();\nif (peek().kind == SyntaxKind.COLON_TOKEN) {\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\n}\nswitchContext(ParserRuleContext.BLOCK_STMT);\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, key, false, true);\nreturn parseStatementStartWithExprRhs(expr);\ncase OPEN_BRACKET_TOKEN:\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseStatementStartsWithOpenBracket(annots, true);\ncase OPEN_BRACE_TOKEN:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nreturn parseStatementStartsWithOpenBrace();\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ndefault:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nreturn parseStatements();\n}\n}\nprivate STNode bracedListMemberStartsWithReadonly(STNode readonlyKeyword) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase IDENTIFIER_TOKEN:\nreturn parseIdentifierRhsInStmtStartingBrace(readonlyKeyword);\ncase STRING_LITERAL:\nif (peek(2).kind == SyntaxKind.COLON_TOKEN) {\nSTNode key = parseStringLiteral();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\n}\ndefault:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nstartContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\nSTNode typeDesc = parseComplexTypeDescriptor(readonlyKeyword,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nendContext();\nSTNode metadata = STNodeFactory.createEmptyNode();\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nSTNode typedBP = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(metadata, finalKeyword, typedBP, false);\n}\n}\n/**\n* Parse the rhs components of an identifier that follows an open brace,\n* at the start of a statement. i.e: \"{foo\".\n*\n* @param readonlyKeyword Readonly keyword\n* @return Parsed node\n*/\nprivate STNode parseIdentifierRhsInStmtStartingBrace(STNode readonlyKeyword) {\nSTNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_REF);\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nSTNode colon = STNodeFactory.createEmptyNode();\nSTNode value = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, value);\ncase COLON_TOKEN:\ncolon = parseColon();\nif (!isEmpty(readonlyKeyword)) {\nvalue = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, value);\n}\nSyntaxKind nextTokenKind = peek().kind;\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nSTNode bindingPatternOrExpr = parseListBindingPatternOrListConstructor();\nreturn getMappingField(identifier, colon, bindingPatternOrExpr);\ncase OPEN_BRACE_TOKEN:\nbindingPatternOrExpr = parseMappingBindingPatterOrMappingConstructor();\nreturn getMappingField(identifier, colon, bindingPatternOrExpr);\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifierRhsInStmtStartBrace(identifier, colon);\ndefault:\nSTNode expr = parseExpression();\nreturn getMappingField(identifier, colon, expr);\n}\ndefault:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nif (!isEmpty(readonlyKeyword)) {\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode bindingPattern = STNodeFactory.createCaptureBindingPatternNode(identifier);\nSTNode typedBindingPattern =\nSTNodeFactory.createTypedBindingPatternNode(readonlyKeyword, bindingPattern);\nSTNode metadata = STNodeFactory.createEmptyNode();\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVarDeclRhs(metadata, finalKeyword, typedBindingPattern, false);\n}\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode qualifiedIdentifier = parseQualifiedIdentifier(identifier, false);\nSTNode expr = parseTypedBindingPatternOrExprRhs(qualifiedIdentifier, true);\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, expr);\n}\n}\n/**\n* Parse the rhs components of \"{ identifier : identifier\",\n* at the start of a statement. i.e: \"{foo:bar\".\n*\n* @return Parsed node\n*/\nprivate STNode parseQualifiedIdentifierRhsInStmtStartBrace(STNode identifier, STNode colon) {\nSTNode secondIdentifier = parseIdentifier(ParserRuleContext.VARIABLE_REF);\nSTNode secondNameRef = STNodeFactory.createSimpleNameReferenceNode(secondIdentifier);\nif (isWildcardBP(secondIdentifier)) {\nreturn getWildcardBindingPattern(secondIdentifier);\n}\nSyntaxKind nextTokenKind = peek().kind;\nSTNode qualifiedNameRef = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondNameRef);\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn qualifiedNameRef;\ncase OPEN_BRACE_TOKEN:\ncase IDENTIFIER_TOKEN:\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nSTNode typeBindingPattern =\nparseTypedBindingPatternTypeRhs(qualifiedNameRef, ParserRuleContext.VAR_DECL_STMT);\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, false);\ncase OPEN_BRACKET_TOKEN:\nreturn parseMemberRhsInStmtStartWithBrace(identifier, colon, secondNameRef);\ncase QUESTION_MARK_TOKEN:\nSTNode typeDesc = parseComplexTypeDescriptor(qualifiedNameRef,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nfinalKeyword = STNodeFactory.createEmptyNode();\ntypeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\nannots = STNodeFactory.createEmptyNodeList();\nreturn parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, false);\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn parseStatementStartWithExprRhs(qualifiedNameRef);\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\ndefault:\nreturn parseMemberWithExprInRhs(identifier, colon, secondNameRef, secondNameRef);\n}\n}\nprivate SyntaxKind getBracedListType(STNode member) {\nswitch (member.kind) {\ncase FIELD_BINDING_PATTERN:\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.MAPPING_BINDING_PATTERN;\ncase SPECIFIC_FIELD:\nSTNode expr = ((STSpecificFieldNode) member).valueExpr;\nif (expr == null) {\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\n}\nswitch (expr.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\ncase FUNCTION_CALL:\nif (isPosibleFunctionalBindingPattern((STFunctionCallExpressionNode) expr)) {\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\n}\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\ndefault:\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\n}\ncase SPREAD_FIELD:\ncase COMPUTED_NAME_FIELD:\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ncase REST_BINDING_PATTERN:\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\ncase LIST:\nreturn SyntaxKind.BLOCK_STATEMENT;\ndefault:\nreturn SyntaxKind.NONE;\n}\n}\n/**\n* Parse mapping binding pattern or mapping constructor.\n*\n* @return Parsed node\n*/\nprivate STNode parseMappingBindingPatterOrMappingConstructor() {\nstartContext(ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR);\nSTNode openBrace = parseOpenBrace();\nList memberList = new ArrayList<>();\nreturn parseMappingBindingPatternOrMappingConstructor(openBrace, memberList);\n}\nprivate boolean isBracedListEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseMappingBindingPatternOrMappingConstructor(STNode openBrace, List memberList) {\nSTToken nextToken = peek();\nwhile (!isBracedListEnd(nextToken.kind)) {\nSTNode member = parseMappingBindingPatterOrMappingConstructorMember(nextToken.kind);\nSyntaxKind currentNodeType = getTypeOfMappingBPOrMappingCons(member);\nswitch (currentNodeType) {\ncase MAPPING_CONSTRUCTOR:\nreturn parseAsMappingConstructor(openBrace, memberList, member);\ncase MAPPING_BINDING_PATTERN:\nreturn parseAsMappingBindingPattern(openBrace, memberList, member);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseMappingFieldEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\nnextToken = peek();\n}\nSTNode closeBrace = parseCloseBrace();\nreturn parseMappingBindingPatternOrMappingConstructor(openBrace, memberList, closeBrace);\n}\nprivate STNode parseMappingBindingPatterOrMappingConstructorMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nSTNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME);\nreturn parseMappingFieldRhs(key);\ncase STRING_LITERAL:\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nkey = parseStringLiteral();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\ncase OPEN_BRACKET_TOKEN:\nreturn parseComputedField();\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\nSTNode expr = parseExpression();\nif (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nreturn STNodeFactory.createRestBindingPatternNode(ellipsis, expr);\n}\nreturn STNodeFactory.createSpreadFieldNode(ellipsis, expr);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListBindingPatternOrListConstructorMember(solution.tokenKind);\n}\n}\nprivate STNode parseMappingFieldRhs(STNode key) {\nSTToken nextToken = peek();\nreturn parseMappingFieldRhs(nextToken.kind, key);\n}\nprivate STNode parseMappingFieldRhs(SyntaxKind tokenKind, STNode key) {\nSTNode colon;\nSTNode valueExpr;\nswitch (tokenKind) {\ncase COLON_TOKEN:\ncolon = parseColon();\nreturn parseMappingFieldValue(key, colon);\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\ncolon = STNodeFactory.createEmptyNode();\nvalueExpr = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, key);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nreturn parseSpecificFieldRhs(solution.tokenKind, readonlyKeyword, key);\n}\n}\nprivate STNode parseMappingFieldValue(STNode key, STNode colon) {\nSTNode expr;\nswitch (peek().kind) {\ncase IDENTIFIER_TOKEN:\nexpr = parseExpression();\nbreak;\ncase OPEN_BRACKET_TOKEN:\nexpr = parseListBindingPatternOrListConstructor();\nbreak;\ncase OPEN_BRACE_TOKEN:\nexpr = parseMappingBindingPatterOrMappingConstructor();\nbreak;\ndefault:\nexpr = parseExpression();\nbreak;\n}\nif (isBindingPattern(expr.kind)) {\nreturn STNodeFactory.createFieldBindingPatternFullNode(key, colon, expr);\n}\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, expr);\n}\nprivate boolean isBindingPattern(SyntaxKind kind) {\nswitch (kind) {\ncase FIELD_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate SyntaxKind getTypeOfMappingBPOrMappingCons(STNode memberNode) {\nswitch (memberNode.kind) {\ncase FIELD_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.MAPPING_BINDING_PATTERN;\ncase SPECIFIC_FIELD:\nSTNode expr = ((STSpecificFieldNode) memberNode).valueExpr;\nif (expr == null || expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE ||\nexpr.kind == SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR ||\nexpr.kind == SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR) {\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\n}\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\ncase SPREAD_FIELD:\ncase COMPUTED_NAME_FIELD:\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase REST_BINDING_PATTERN:\ndefault:\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\n}\n}\nprivate STNode parseMappingBindingPatternOrMappingConstructor(STNode openBrace, List members,\nSTNode closeBrace) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR, openBrace, members,\ncloseBrace);\n}\nprivate STNode parseAsMappingBindingPattern(STNode openBrace, List members, STNode member) {\nmembers.add(member);\nmembers = getBindingPatternsList(members);\nswitchContext(ParserRuleContext.MAPPING_BINDING_PATTERN);\nreturn parseMappingBindingPattern(openBrace, members, member);\n}\n/**\n* Parse list binding pattern or list constructor.\n*\n* @return Parsed node\n*/\nprivate STNode parseListBindingPatternOrListConstructor() {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList memberList = new ArrayList<>();\nreturn parseListBindingPatternOrListConstructor(openBracket, memberList, false);\n}\nprivate STNode parseListBindingPatternOrListConstructor(STNode openBracket, List memberList,\nboolean isRoot) {\nSTToken nextToken = peek();\nwhile (!isBracketedListEnd(nextToken.kind)) {\nSTNode member = parseListBindingPatternOrListConstructorMember(nextToken.kind);\nSyntaxKind currentNodeType = getParsingNodeTypeOfListBPOrListCons(member);\nswitch (currentNodeType) {\ncase LIST_CONSTRUCTOR:\nreturn parseAsListConstructor(openBracket, memberList, member, isRoot);\ncase LIST_BINDING_PATTERN:\nreturn parseAsListBindingPattern(openBracket, memberList, member, isRoot);\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\nnextToken = peek();\n}\nSTNode closeBracket = parseCloseBracket();\nreturn parseListBindingPatternOrListConstructor(openBracket, memberList, closeBracket, isRoot);\n}\nprivate STNode parseListBindingPatternOrListConstructorMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseListBindingPatternOrListConstructor();\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nif (isWildcardBP(identifier)) {\nreturn getWildcardBindingPattern(identifier);\n}\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false);\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingBindingPatterOrMappingConstructor();\ncase ELLIPSIS_TOKEN:\nreturn parseListBindingPatternMember();\ndefault:\nif (isValidExpressionStart(nextTokenKind, 1)) {\nreturn parseExpression();\n}\nSolution solution = recover(peek(), ParserRuleContext.LIST_BP_OR_LIST_CONSTRUCTOR_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListBindingPatternOrListConstructorMember(solution.tokenKind);\n}\n}\nprivate SyntaxKind getParsingNodeTypeOfListBPOrListCons(STNode memberNode) {\nswitch (memberNode.kind) {\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.LIST_BINDING_PATTERN;\ncase SIMPLE_NAME_REFERENCE:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR;\ndefault:\nreturn SyntaxKind.LIST_CONSTRUCTOR;\n}\n}\nprivate STNode parseAsListConstructor(STNode openBracket, List memberList, STNode member, boolean isRoot) {\nmemberList.add(member);\nmemberList = getExpressionList(memberList);\nswitchContext(ParserRuleContext.LIST_CONSTRUCTOR);\nSTNode expressions = parseOptionalExpressionsList(memberList);\nSTNode closeBracket = parseCloseBracket();\nSTNode listConstructor =\nSTNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket);\nendContext();\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, listConstructor, false, false);\nif (!isRoot) {\nreturn expr;\n}\nreturn parseStatementStartWithExprRhs(expr);\n}\nprivate STNode parseListBindingPatternOrListConstructor(STNode openBracket, List members,\nSTNode closeBracket, boolean isRoot) {\nSTNode lbpOrListCons;\nswitch (peek().kind) {\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nif (!isRoot) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR, openBracket, members,\ncloseBracket);\n}\ndefault:\nif (isValidExprRhsStart(peek().kind, closeBracket.kind)) {\nmembers = getExpressionList(members);\nSTNode memberExpressions = STNodeFactory.createNodeList(members);\nlbpOrListCons = STNodeFactory.createListConstructorExpressionNode(openBracket, memberExpressions,\ncloseBracket);\nbreak;\n}\nmembers = getBindingPatternsList(members);\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(members);\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nlbpOrListCons = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode,\nrestBindingPattern, closeBracket);\nbreak;\n}\nendContext();\nif (!isRoot) {\nreturn lbpOrListCons;\n}\nreturn parseStmtStartsWithTypedBPOrExprRhs(null, lbpOrListCons);\n}\nprivate STNode parseMemberRhsInStmtStartWithBrace(STNode identifier, STNode colon, STNode secondIdentifier) {\nSTNode typedBPOrExpr =\nparseTypedBindingPatternOrMemberAccess(secondIdentifier, false, true, ParserRuleContext.AMBIGUOUS_STMT);\nif (isExpression(typedBPOrExpr.kind)) {\nreturn parseMemberWithExprInRhs(identifier, colon, secondIdentifier, typedBPOrExpr);\n}\nswitchContext(ParserRuleContext.BLOCK_STMT);\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nSTNode annots = STNodeFactory.createEmptyNode();\nSTNode qualifiedNameRef = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondIdentifier);\nSTNode typeDesc = mergeQualifiedNameWithTypeDesc(qualifiedNameRef,\n((STTypedBindingPatternNode) typedBPOrExpr).typeDescriptor);\nreturn parseVarDeclRhs(annots, finalKeyword, typeDesc, false);\n}\n/**\n* Parse a member that starts with \"foo:bar[\", in a statement starting with a brace.\n*\n* @param identifier First identifier of the statement\n* @param colon Colon that follows the first identifier\n* @param secondIdentifier Identifier that follows the colon\n* @param memberAccessExpr Member access expression\n* @return Parsed node\n*/\nprivate STNode parseMemberWithExprInRhs(STNode identifier, STNode colon, STNode secondIdentifier,\nSTNode memberAccessExpr) {\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, true);\nswitch (peek().kind) {\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nstartContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, expr);\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\ndefault:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nstartContext(ParserRuleContext.EXPRESSION_STATEMENT);\nSTNode qualifiedName =\nSTNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondIdentifier);\nSTNode updatedExpr = mergeQualifiedNameWithExpr(qualifiedName, expr);\nreturn parseStatementStartWithExprRhs(updatedExpr);\n}\n}\n/**\n* Replace the first identifier of an expression, with a given qualified-identifier.\n* Only expressions that can start with \"bar[..]\" can reach here.\n*\n* @param qualifiedName Qualified identifier to replace simple identifier\n* @param exprOrAction Expression or action\n* @return Updated expression\n*/\nprivate STNode mergeQualifiedNameWithExpr(STNode qualifiedName, STNode exprOrAction) {\nswitch (exprOrAction.kind) {\ncase SIMPLE_NAME_REFERENCE:\nreturn qualifiedName;\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) exprOrAction;\nSTNode newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, binaryExpr.lhsExpr);\nreturn STNodeFactory.createBinaryExpressionNode(binaryExpr.kind, newLhsExpr, binaryExpr.operator,\nbinaryExpr.rhsExpr);\ncase FIELD_ACCESS:\nSTFieldAccessExpressionNode fieldAccess = (STFieldAccessExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, fieldAccess.expression);\nreturn STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, fieldAccess.dotToken,\nfieldAccess.fieldName);\ncase INDEXED_EXPRESSION:\nSTIndexedExpressionNode memberAccess = (STIndexedExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, memberAccess.containerExpression);\nreturn STNodeFactory.createIndexedExpressionNode(newLhsExpr, memberAccess.openBracket,\nmemberAccess.keyExpression, memberAccess.closeBracket);\ncase TYPE_TEST_EXPRESSION:\nSTTypeTestExpressionNode typeTest = (STTypeTestExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, typeTest.expression);\nreturn STNodeFactory.createTypeTestExpressionNode(newLhsExpr, typeTest.isKeyword,\ntypeTest.typeDescriptor);\ncase ANNOT_ACCESS:\nSTAnnotAccessExpressionNode annotAccess = (STAnnotAccessExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, annotAccess.expression);\nreturn STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, annotAccess.annotChainingToken,\nannotAccess.annotTagReference);\ncase OPTIONAL_FIELD_ACCESS:\nSTOptionalFieldAccessExpressionNode optionalFieldAccess =\n(STOptionalFieldAccessExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, optionalFieldAccess.expression);\nreturn STNodeFactory.createFieldAccessExpressionNode(newLhsExpr,\noptionalFieldAccess.optionalChainingToken, optionalFieldAccess.fieldName);\ncase CONDITIONAL_EXPRESSION:\nSTConditionalExpressionNode conditionalExpr = (STConditionalExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, conditionalExpr.lhsExpression);\nreturn STNodeFactory.createConditionalExpressionNode(newLhsExpr, conditionalExpr.questionMarkToken,\nconditionalExpr.middleExpression, conditionalExpr.colonToken, conditionalExpr.endExpression);\ncase REMOTE_METHOD_CALL_ACTION:\nSTRemoteMethodCallActionNode remoteCall = (STRemoteMethodCallActionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, remoteCall.expression);\nreturn STNodeFactory.createRemoteMethodCallActionNode(newLhsExpr, remoteCall.rightArrowToken,\nremoteCall.methodName, remoteCall.openParenToken, remoteCall.arguments,\nremoteCall.closeParenToken);\ncase ASYNC_SEND_ACTION:\nSTAsyncSendActionNode asyncSend = (STAsyncSendActionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, asyncSend.expression);\nreturn STNodeFactory.createAsyncSendActionNode(newLhsExpr, asyncSend.rightArrowToken,\nasyncSend.peerWorker);\ncase SYNC_SEND_ACTION:\nSTSyncSendActionNode syncSend = (STSyncSendActionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, syncSend.expression);\nreturn STNodeFactory.createAsyncSendActionNode(newLhsExpr, syncSend.syncSendToken, syncSend.peerWorker);\ndefault:\nreturn exprOrAction;\n}\n}\nprivate STNode mergeQualifiedNameWithTypeDesc(STNode qualifiedName, STNode typeDesc) {\nswitch (typeDesc.kind) {\ncase SIMPLE_NAME_REFERENCE:\nreturn qualifiedName;\ncase ARRAY_TYPE_DESC:\nSTArrayTypeDescriptorNode arrayTypeDesc = (STArrayTypeDescriptorNode) typeDesc;\nSTNode newMemberType = mergeQualifiedNameWithTypeDesc(qualifiedName, arrayTypeDesc.memberTypeDesc);\nreturn STNodeFactory.createArrayTypeDescriptorNode(newMemberType, arrayTypeDesc.openBracket,\narrayTypeDesc.arrayLength, arrayTypeDesc.closeBracket);\ncase UNION_TYPE_DESC:\nSTUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) typeDesc;\nSTNode newlhsType = mergeQualifiedNameWithTypeDesc(qualifiedName, unionTypeDesc.leftTypeDesc);\nreturn STNodeFactory.createUnionTypeDescriptorNode(newlhsType, unionTypeDesc.pipeToken,\nunionTypeDesc.rightTypeDesc);\ncase INTERSECTION_TYPE_DESC:\nSTIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) typeDesc;\nnewlhsType = mergeQualifiedNameWithTypeDesc(qualifiedName, intersectionTypeDesc.leftTypeDesc);\nreturn STNodeFactory.createUnionTypeDescriptorNode(newlhsType, intersectionTypeDesc.bitwiseAndToken,\nintersectionTypeDesc.rightTypeDesc);\ncase OPTIONAL_TYPE_DESC:\nSTOptionalTypeDescriptorNode optionalType = (STOptionalTypeDescriptorNode) typeDesc;\nnewMemberType = mergeQualifiedNameWithTypeDesc(qualifiedName, optionalType.typeDescriptor);\nreturn STNodeFactory.createOptionalTypeDescriptorNode(newMemberType, optionalType.questionMarkToken);\ndefault:\nreturn typeDesc;\n}\n}\nprivate List getTypeDescList(List ambiguousList) {\nList typeDescList = new ArrayList<>();\nfor (STNode item : ambiguousList) {\ntypeDescList.add(getTypeDescFromExpr(item));\n}\nreturn typeDescList;\n}\n/**\n* Create a type-desc out of an expression.\n*\n* @param expression Expression\n* @return Type descriptor\n*/\nprivate STNode getTypeDescFromExpr(STNode expression) {\nswitch (expression.kind) {\ncase INDEXED_EXPRESSION:\nreturn parseArrayTypeDescriptorNode((STIndexedExpressionNode) expression);\ncase BASIC_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn STNodeFactory.createSingletonTypeDescriptorNode(expression);\ncase TYPE_REFERENCE_TYPE_DESC:\nreturn ((STTypeReferenceTypeDescNode) expression).typeRef;\ncase BRACED_EXPRESSION:\nSTBracedExpressionNode bracedExpr = (STBracedExpressionNode) expression;\nSTNode typeDesc = getTypeDescFromExpr(bracedExpr.expression);\nreturn STNodeFactory.createParenthesisedTypeDescriptorNode(bracedExpr.openParen, typeDesc,\nbracedExpr.closeParen);\ncase NIL_LITERAL:\nSTNilLiteralNode nilLiteral = (STNilLiteralNode) expression;\nreturn STNodeFactory.createNilTypeDescriptorNode(nilLiteral.openParenToken, nilLiteral.closeParenToken);\ncase BRACKETED_LIST:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nSTAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) expression;\nSTNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(innerList.members));\nreturn STNodeFactory.createTupleTypeDescriptorNode(innerList.collectionStartToken, memberTypeDescs,\ninnerList.collectionEndToken);\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) expression;\nswitch (binaryExpr.operator.kind) {\ncase PIPE_TOKEN:\nSTNode lhsTypeDesc = getTypeDescFromExpr(binaryExpr.lhsExpr);\nSTNode rhsTypeDesc = getTypeDescFromExpr(binaryExpr.rhsExpr);\nreturn STNodeFactory.createUnionTypeDescriptorNode(lhsTypeDesc, binaryExpr.operator,\nrhsTypeDesc);\ncase BITWISE_AND_TOKEN:\nlhsTypeDesc = getTypeDescFromExpr(binaryExpr.lhsExpr);\nrhsTypeDesc = getTypeDescFromExpr(binaryExpr.rhsExpr);\nreturn STNodeFactory.createIntersectionTypeDescriptorNode(lhsTypeDesc, binaryExpr.operator,\nrhsTypeDesc);\ndefault:\nbreak;\n}\nreturn expression;\ncase UNARY_EXPRESSION:\nreturn STNodeFactory.createSingletonTypeDescriptorNode(expression);\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ndefault:\nreturn expression;\n}\n}\nprivate List getBindingPatternsList(List ambibuousList) {\nList bindingPatterns = new ArrayList();\nfor (STNode item : ambibuousList) {\nbindingPatterns.add(getBindingPattern(item));\n}\nreturn bindingPatterns;\n}\nprivate STNode getBindingPattern(STNode ambiguousNode) {\nif (isEmpty(ambiguousNode)) {\nreturn ambiguousNode;\n}\nswitch (ambiguousNode.kind) {\ncase SIMPLE_NAME_REFERENCE:\nSTNode varName = ((STSimpleNameReferenceNode) ambiguousNode).name;\nreturn createCaptureOrWildcardBP(varName);\ncase QUALIFIED_NAME_REFERENCE:\nSTQualifiedNameReferenceNode qualifiedName = (STQualifiedNameReferenceNode) ambiguousNode;\nSTNode fieldName = STNodeFactory.createSimpleNameReferenceNode(qualifiedName.modulePrefix);\nreturn STNodeFactory.createFieldBindingPatternFullNode(fieldName, qualifiedName.colon,\ngetBindingPattern(qualifiedName.identifier));\ncase BRACKETED_LIST:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nSTAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) ambiguousNode;\nSTNode memberBindingPatterns = STNodeFactory.createNodeList(getBindingPatternsList(innerList.members));\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createListBindingPatternNode(innerList.collectionStartToken, memberBindingPatterns,\nrestBindingPattern, innerList.collectionEndToken);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ninnerList = (STAmbiguousCollectionNode) ambiguousNode;\nList bindingPatterns = new ArrayList<>();\nrestBindingPattern = STNodeFactory.createEmptyNode();\nfor (int i = 0; i < innerList.members.size(); i++) {\nSTNode bp = getBindingPattern(innerList.members.get(i));\nif (bp.kind == SyntaxKind.REST_BINDING_PATTERN) {\nrestBindingPattern = bp;\nbreak;\n}\nbindingPatterns.add(bp);\n}\nmemberBindingPatterns = STNodeFactory.createNodeList(bindingPatterns);\nreturn STNodeFactory.createMappingBindingPatternNode(innerList.collectionStartToken,\nmemberBindingPatterns, restBindingPattern, innerList.collectionEndToken);\ncase SPECIFIC_FIELD:\nSTSpecificFieldNode field = (STSpecificFieldNode) ambiguousNode;\nfieldName = STNodeFactory.createSimpleNameReferenceNode(field.fieldName);\nif (field.valueExpr == null) {\nreturn STNodeFactory.createFieldBindingPatternVarnameNode(fieldName);\n}\nreturn STNodeFactory.createFieldBindingPatternFullNode(fieldName, field.colon,\ngetBindingPattern(field.valueExpr));\ncase FUNCTION_CALL:\nSTFunctionCallExpressionNode funcCall = (STFunctionCallExpressionNode) ambiguousNode;\nSTNode args = funcCall.arguments;\nint size = args.bucketCount();\nbindingPatterns = new ArrayList<>();\nfor (int i = 0; i < size; i++) {\nSTNode arg = args.childInBucket(i);\nbindingPatterns.add(getBindingPattern(arg));\n}\nSTNode argListBindingPatterns = STNodeFactory.createNodeList(bindingPatterns);\nreturn STNodeFactory.createFunctionalBindingPatternNode(funcCall.functionName, funcCall.openParenToken,\nargListBindingPatterns, funcCall.closeParenToken);\ncase POSITIONAL_ARG:\nSTPositionalArgumentNode positionalArg = (STPositionalArgumentNode) ambiguousNode;\nreturn getBindingPattern(positionalArg.expression);\ncase NAMED_ARG:\nSTNamedArgumentNode namedArg = (STNamedArgumentNode) ambiguousNode;\nreturn STNodeFactory.createNamedArgBindingPatternNode(namedArg.argumentName, namedArg.equalsToken,\ngetBindingPattern(namedArg.expression));\ncase REST_ARG:\nSTRestArgumentNode restArg = (STRestArgumentNode) ambiguousNode;\nreturn STNodeFactory.createRestBindingPatternNode(restArg.ellipsis, restArg.expression);\ndefault:\nreturn ambiguousNode;\n}\n}\nprivate List getExpressionList(List ambibuousList) {\nList exprList = new ArrayList();\nfor (STNode item : ambibuousList) {\nexprList.add(getExpression(item));\n}\nreturn exprList;\n}\nprivate STNode getExpression(STNode ambiguousNode) {\nif (isEmpty(ambiguousNode)) {\nreturn ambiguousNode;\n}\nswitch (ambiguousNode.kind) {\ncase BRACKETED_LIST:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nSTAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) ambiguousNode;\nSTNode memberExprs = STNodeFactory.createNodeList(getExpressionList(innerList.members));\nreturn STNodeFactory.createListConstructorExpressionNode(innerList.collectionStartToken, memberExprs,\ninnerList.collectionEndToken);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ninnerList = (STAmbiguousCollectionNode) ambiguousNode;\nList fieldList = new ArrayList<>();\nfor (int i = 0; i < innerList.members.size(); i++) {\nSTNode field = innerList.members.get(i);\nSTNode fieldNode;\nif (field.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTQualifiedNameReferenceNode qualifiedNameRefNode = (STQualifiedNameReferenceNode) field;\nSTNode readOnlyKeyword = STNodeFactory.createEmptyNode();\nSTNode fieldName = qualifiedNameRefNode.modulePrefix;\nSTNode colon = qualifiedNameRefNode.colon;\nSTNode valueExpr = getExpression(qualifiedNameRefNode.identifier);\nfieldNode = STNodeFactory.createSpecificFieldNode(readOnlyKeyword, fieldName, colon,\nvalueExpr);\n} else {\nfieldNode = getExpression(field);\n}\nfieldList.add(fieldNode);\n}\nSTNode fields = STNodeFactory.createNodeList(fieldList);\nreturn STNodeFactory.createMappingConstructorExpressionNode(innerList.collectionStartToken,\nfields, innerList.collectionEndToken);\ncase REST_BINDING_PATTERN:\nSTRestBindingPatternNode restBindingPattern = (STRestBindingPatternNode) ambiguousNode;\nreturn STNodeFactory.createSpreadFieldNode(restBindingPattern.ellipsisToken,\nrestBindingPattern.variableName);\ncase SPECIFIC_FIELD:\nSTSpecificFieldNode field = (STSpecificFieldNode) ambiguousNode;\nreturn STNodeFactory.createSpecificFieldNode(field.readonlyKeyword, field.fieldName, field.colon,\ngetExpression(field.valueExpr));\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ndefault:\nreturn ambiguousNode;\n}\n}\nprivate STNode getMappingField(STNode identifier, STNode colon, STNode bindingPatternOrExpr) {\nSTNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier);\nswitch (bindingPatternOrExpr.kind) {\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\nreturn STNodeFactory.createFieldBindingPatternFullNode(simpleNameRef, colon, bindingPatternOrExpr);\ncase LIST_CONSTRUCTOR:\ncase MAPPING_CONSTRUCTOR:\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, simpleNameRef, colon, identifier);\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ndefault:\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, bindingPatternOrExpr);\n}\n}\n}", + "target_code": "if (nextToken.kind == SyntaxKind.ASCENDING_KEYWORD) {", + "method_body_after": "private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) {\nswitch (peek(lookahead + 1).kind) {\ncase IDENTIFIER_TOKEN:\nSyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind;\nswitch (tokenAfterIdentifier) {\ncase ON_KEYWORD:\ncase OPEN_BRACE_TOKEN:\nreturn true;\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\ncase QUESTION_MARK_TOKEN:\nreturn false;\ndefault:\nreturn false;\n}\ncase ON_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse listener declaration, given the qualifier.\n*

\n* \n* listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ;\n* \n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @return Parsed node\n*/\nprivate STNode parseListenerDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.LISTENER_DECL);\nSTNode listenerKeyword = parseListenerKeyword();\nif (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) {\nSTNode listenerDecl =\nparseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true);\nendContext();\nreturn listenerDecl;\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode variableName = parseVariableName();\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nendContext();\nreturn STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName,\nequalsToken, initializer, semicolonToken);\n}\n/**\n* Parse listener keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseListenerKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LISTENER_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LISTENER_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse constant declaration, given the qualifier.\n*

\n* module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @return Parsed node\n*/\nprivate STNode parseConstantDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.CONSTANT_DECL);\nSTNode constKeyword = parseConstantKeyword();\nSTNode constDecl = parseConstDecl(metadata, qualifier, constKeyword);\nendContext();\nreturn constDecl;\n}\n/**\n* Parse the components that follows after the const keyword of a constant declaration.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the constant decl\n* @param constKeyword Const keyword\n* @return Parsed node\n*/\nprivate STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) {\nSTToken nextToken = peek();\nreturn parseConstDeclFromType(nextToken.kind, metadata, qualifier, constKeyword);\n}\nprivate STNode parseConstDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode keyword) {\nswitch (nextTokenKind) {\ncase ANNOTATION_KEYWORD:\nswitchContext(ParserRuleContext.ANNOTATION_DECL);\nreturn parseAnnotationDeclaration(metadata, qualifier, keyword);\ncase IDENTIFIER_TOKEN:\nreturn parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, keyword, false);\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, keyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseConstDeclFromType(solution.tokenKind, metadata, qualifier, keyword);\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode variableName = parseVariableName();\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, typeDesc, variableName,\nequalsToken, initializer, semicolonToken);\n}\nprivate STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,\nboolean isListener) {\nSTNode varNameOrTypeName = parseStatementStartIdentifier();\nSTNode constDecl =\nparseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener);\nreturn constDecl;\n}\n/**\n* Parse the component that follows the first identifier in a const decl. The identifier\n* can be either the type-name (a user defined type) or the var-name there the type-name\n* is not present.\n*\n* @param qualifier Qualifier that precedes the constant decl\n* @param keyword Keyword\n* @param typeOrVarName Identifier that follows the const-keywoord\n* @return Parsed node\n*/\nprivate STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword,\nSTNode typeOrVarName, boolean isListener) {\nif (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTNode type = typeOrVarName;\nSTNode variableName = parseVariableName();\nreturn parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);\n}\nSTToken token = peek();\nreturn parseConstantOrListenerDeclRhs(token.kind, metadata, qualifier, keyword, typeOrVarName, isListener);\n}\nprivate STNode parseConstantOrListenerDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,\nSTNode keyword, STNode typeOrVarName, boolean isListener) {\nSTNode type;\nSTNode variableName;\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\ntype = typeOrVarName;\nvariableName = parseVariableName();\nbreak;\ncase EQUAL_TOKEN:\nvariableName = ((STSimpleNameReferenceNode) typeOrVarName).name;\ntype = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, keyword,\ntypeOrVarName, isListener);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseConstantOrListenerDeclRhs(solution.tokenKind, metadata, qualifier, keyword, typeOrVarName,\nisListener);\n}\nreturn parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);\n}\nprivate STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener,\nSTNode type, STNode variableName) {\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nif (isListener) {\nreturn STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName,\nequalsToken, initializer, semicolonToken);\n}\nreturn STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName,\nequalsToken, initializer, semicolonToken);\n}\n/**\n* Parse const keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseConstantKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CONST_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CONST_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse nil type descriptor.\n*

\n* nil-type-descriptor := ( ) \n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseNilTypeDescriptor() {\nstartContext(ParserRuleContext.NIL_TYPE_DESCRIPTOR);\nSTNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode closeParenthesisToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createNilTypeDescriptorNode(openParenthesisToken, closeParenthesisToken);\n}\n/**\n* Parse typeof expression.\n*

\n* \n* typeof-expr := typeof expression\n* \n*\n* @param isRhsExpr\n* @return Typeof expression node\n*/\nprivate STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\nSTNode typeofKeyword = parseTypeofKeyword();\nSTNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);\nreturn STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr);\n}\n/**\n* Parse typeof-keyword.\n*\n* @return Typeof-keyword node\n*/\nprivate STNode parseTypeofKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TYPEOF_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TYPEOF_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse optional type descriptor.\n*

\n* optional-type-descriptor := type-descriptor ? \n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) {\nstartContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR);\nSTNode questionMarkToken = parseQuestionMark();\nendContext();\nreturn STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken);\n}\n/**\n* Parse unary expression.\n*

\n* \n* unary-expr := + expression | - expression | ~ expression | ! expression\n* \n*\n* @param isRhsExpr\n* @return Unary expression node\n*/\nprivate STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\nSTNode unaryOperator = parseUnaryOperator();\nSTNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);\nreturn STNodeFactory.createUnaryExpressionNode(unaryOperator, expr);\n}\n/**\n* Parse unary operator.\n* UnaryOperator := + | - | ~ | !\n*\n* @return Parsed node\n*/\nprivate STNode parseUnaryOperator() {\nSTToken token = peek();\nif (isUnaryOperator(token.kind)) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.UNARY_OPERATOR);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Check whether the given token kind is a unary operator.\n*\n* @param kind STToken kind\n* @return true if the token kind refers to a unary operator. false otherwise\n*/\nprivate boolean isUnaryOperator(SyntaxKind kind) {\nswitch (kind) {\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse array type descriptor.\n*

\n* \n* array-type-descriptor := member-type-descriptor [ [ array-length ] ]\n* member-type-descriptor := type-descriptor\n* array-length :=\n* int-literal\n* | constant-reference-expr\n* | inferred-array-length\n* inferred-array-length := *\n* \n*

\n*\n* @param memberTypeDesc\n*\n* @return Parsed Node\n*/\nprivate STNode parseArrayTypeDescriptor(STNode memberTypeDesc) {\nstartContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);\nSTNode openBracketToken = parseOpenBracket();\nSTNode arrayLengthNode = parseArrayLength();\nSTNode closeBracketToken = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, openBracketToken, arrayLengthNode,\ncloseBracketToken);\n}\n/**\n* Parse array length.\n*

\n* \n* array-length :=\n* int-literal\n* | constant-reference-expr\n* | inferred-array-length\n* constant-reference-expr := variable-reference-expr\n* \n*

\n*\n* @return Parsed array length\n*/\nprivate STNode parseArrayLength() {\nSTToken token = peek();\nswitch (token.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase ASTERISK_TOKEN:\nreturn parseBasicLiteral();\ncase CLOSE_BRACKET_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH);\ndefault:\nSolution sol = recover(token, ParserRuleContext.ARRAY_LENGTH);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse annotations.\n*

\n* Note: In the ballerina spec ({@link https:\n* annotations-list is specified as one-or-more annotations. And the usage is marked as\n* optional annotations-list. However, for the consistency of the tree, here we make the\n* annotation-list as zero-or-more annotations, and the usage is not-optional.\n*

\n* annots := annotation*\n*\n* @return Parsed node\n*/\nprivate STNode parseOptionalAnnotations() {\nSTToken nextToken = peek();\nreturn parseOptionalAnnotations(nextToken.kind);\n}\nprivate STNode parseOptionalAnnotations(SyntaxKind nextTokenKind) {\nstartContext(ParserRuleContext.ANNOTATIONS);\nList annotList = new ArrayList<>();\nwhile (nextTokenKind == SyntaxKind.AT_TOKEN) {\nannotList.add(parseAnnotation());\nnextTokenKind = peek().kind;\n}\nendContext();\nreturn STNodeFactory.createNodeList(annotList);\n}\n/**\n* Parse annotation list with atleast one annotation.\n*\n* @return Annotation list\n*/\nprivate STNode parseAnnotations() {\nstartContext(ParserRuleContext.ANNOTATIONS);\nList annotList = new ArrayList<>();\nannotList.add(parseAnnotation());\nwhile (peek().kind == SyntaxKind.AT_TOKEN) {\nannotList.add(parseAnnotation());\n}\nendContext();\nreturn STNodeFactory.createNodeList(annotList);\n}\n/**\n* Parse annotation attachment.\n*

\n* annotation := @ annot-tag-reference annot-value\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotation() {\nSTNode atToken = parseAtToken();\nSTNode annotReference;\nif (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) {\nannotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n} else {\nannotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE);\n}\nSTNode annotValue;\nif (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {\nannotValue = parseMappingConstructorExpr();\n} else {\nannotValue = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue);\n}\n/**\n* Parse '@' token.\n*\n* @return Parsed node\n*/\nprivate STNode parseAtToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.AT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.AT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse metadata. Meta data consist of optional doc string and\n* an annotations list.\n*

\n* metadata := [DocumentationString] annots\n*\n* @return Parse node\n*/\nprivate STNode parseMetaData(SyntaxKind nextTokenKind) {\nSTNode docString;\nSTNode annotations;\nswitch (nextTokenKind) {\ncase DOCUMENTATION_STRING:\ndocString = parseMarkdownDocumentation();\nannotations = parseOptionalAnnotations();\nbreak;\ncase AT_TOKEN:\ndocString = STNodeFactory.createEmptyNode();\nannotations = parseOptionalAnnotations(nextTokenKind);\nbreak;\ndefault:\nreturn createEmptyMetadata();\n}\nreturn STNodeFactory.createMetadataNode(docString, annotations);\n}\n/**\n* Create empty metadata node.\n*\n* @return A metadata node with no doc string and no annotations\n*/\nprivate STNode createEmptyMetadata() {\nreturn STNodeFactory.createMetadataNode(STNodeFactory.createEmptyNode(), STNodeFactory.createEmptyNodeList());\n}\n/**\n* Parse is expression.\n* \n* is-expr := expression is type-descriptor\n* \n*\n* @param lhsExpr Preceding expression of the is expression\n* @return Is expression node\n*/\nprivate STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode isKeyword = parseIsKeyword();\nSTNode typeDescriptor =\nparseTypeDescriptorInExpression(ParserRuleContext.TYPE_DESC_IN_EXPRESSION, isInConditionalExpr);\nreturn STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor);\n}\n/**\n* Parse is-keyword.\n*\n* @return Is-keyword node\n*/\nprivate STNode parseIsKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IS_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.IS_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse local type definition statement statement.\n* ocal-type-defn-stmt := [annots] type identifier type-descriptor ;\n*\n* @return local type definition statement statement\n*/\nprivate STNode parseLocalTypeDefinitionStatement(STNode annots) {\nstartContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT);\nSTNode typeKeyword = parseTypeKeyword();\nSTNode typeName = parseTypeName();\nSTNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor,\nsemicolon);\n}\n/**\n* Parse statement which is only consists of an action or expression.\n*\n* @param annots Annotations\n* @param nextTokenKind Next token kind\n* @return Statement node\n*/\nprivate STNode parseExpressionStatement(SyntaxKind nextTokenKind, STNode annots) {\nstartContext(ParserRuleContext.EXPRESSION_STATEMENT);\nSTNode expression = parseActionOrExpressionInLhs(nextTokenKind, annots);\nreturn getExpressionAsStatement(expression);\n}\n/**\n* Parse statements that starts with an expression.\n*\n* @return Statement node\n*/\nprivate STNode parseStatementStartWithExpr(STNode annots) {\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode expr = parseActionOrExpressionInLhs(peek().kind, annots);\nreturn parseStatementStartWithExprRhs(expr);\n}\n/**\n* Parse rhs of statements that starts with an expression.\n*\n* @return Statement node\n*/\nprivate STNode parseStatementStartWithExprRhs(STNode expression) {\nSTToken nextToken = peek();\nreturn parseStatementStartWithExprRhs(nextToken.kind, expression);\n}\n/**\n* Parse the component followed by the expression, at the beginning of a statement.\n*\n* @param nextTokenKind Kind of the next token\n* @return Statement node\n*/\nprivate STNode parseStatementStartWithExprRhs(SyntaxKind nextTokenKind, STNode expression) {\nswitch (nextTokenKind) {\ncase EQUAL_TOKEN:\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nreturn parseAssignmentStmtRhs(expression);\ncase SEMICOLON_TOKEN:\nreturn getExpressionAsStatement(expression);\ncase IDENTIFIER_TOKEN:\ndefault:\nif (isCompoundBinaryOperator(nextTokenKind)) {\nreturn parseCompoundAssignmentStmtRhs(expression);\n}\nParserRuleContext context;\nif (isPossibleExpressionStatement(expression)) {\ncontext = ParserRuleContext.EXPR_STMT_RHS;\n} else {\ncontext = ParserRuleContext.STMT_START_WITH_EXPR_RHS;\n}\nSolution solution = recover(peek(), context, expression);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatementStartWithExprRhs(solution.tokenKind, expression);\n}\n}\nprivate boolean isPossibleExpressionStatement(STNode expression) {\nswitch (expression.kind) {\ncase METHOD_CALL:\ncase FUNCTION_CALL:\ncase CHECK_EXPRESSION:\ncase FAIL_EXPRESSION:\ncase REMOTE_METHOD_CALL_ACTION:\ncase CHECK_ACTION:\ncase FAIL_ACTION:\ncase BRACED_ACTION:\ncase START_ACTION:\ncase TRAP_ACTION:\ncase FLUSH_ACTION:\ncase ASYNC_SEND_ACTION:\ncase SYNC_SEND_ACTION:\ncase RECEIVE_ACTION:\ncase WAIT_ACTION:\ncase QUERY_ACTION:\ncase COMMIT_ACTION:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode getExpressionAsStatement(STNode expression) {\nswitch (expression.kind) {\ncase METHOD_CALL:\ncase FUNCTION_CALL:\ncase CHECK_EXPRESSION:\nreturn parseCallStatement(expression);\ncase REMOTE_METHOD_CALL_ACTION:\ncase CHECK_ACTION:\ncase FAIL_ACTION:\ncase BRACED_ACTION:\ncase START_ACTION:\ncase TRAP_ACTION:\ncase FLUSH_ACTION:\ncase ASYNC_SEND_ACTION:\ncase SYNC_SEND_ACTION:\ncase RECEIVE_ACTION:\ncase WAIT_ACTION:\ncase QUERY_ACTION:\ncase COMMIT_ACTION:\ncase FAIL_EXPRESSION:\nreturn parseActionStatement(expression);\ndefault:\nSTNode semicolon = parseSemicolon();\nendContext();\nSTNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT,\nexpression, semicolon);\nexprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT);\nreturn exprStmt;\n}\n}\nprivate STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) {\nSTNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression);\nSTNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression;\nif (lengthExprs.isEmpty()) {\nreturn STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, indexedExpr.openBracket,\nSTNodeFactory.createEmptyNode(), indexedExpr.closeBracket);\n}\nSTNode lengthExpr = lengthExprs.get(0);\nswitch (lengthExpr.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase ASTERISK_TOKEN:\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\nbreak;\ndefault:\nSTNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(\nindexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);\nindexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics);\nlengthExpr = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, indexedExpr.openBracket, lengthExpr,\nindexedExpr.closeBracket);\n}\n/**\n*

\n* Parse call statement, given the call expression.\n*

\n* \n* call-stmt := call-expr ;\n*
\n* call-expr := function-call-expr | method-call-expr | checking-keyword call-expr\n*
\n*\n* @param expression Call expression associated with the call statement\n* @return Call statement node\n*/\nprivate STNode parseCallStatement(STNode expression) {\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon);\n}\nprivate STNode parseActionStatement(STNode action) {\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon);\n}\n/**\n* Parse remote method call action, given the starting expression.\n*

\n* \n* remote-method-call-action := expression -> method-name ( arg-list )\n*
\n* async-send-action := expression -> peer-worker ;\n*
\n*\n* @param isRhsExpr Is this an RHS action\n* @param expression LHS expression\n* @return\n*/\nprivate STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) {\nSTNode rightArrow = parseRightArrow();\nreturn parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);\n}\nprivate STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) {\nreturn parseRemoteCallOrAsyncSendActionRhs(peek().kind, expression, isRhsExpr, rightArrow);\n}\nprivate STNode parseRemoteCallOrAsyncSendActionRhs(SyntaxKind nextTokenKind, STNode expression, boolean isRhsExpr,\nSTNode rightArrow) {\nSTNode name;\nswitch (nextTokenKind) {\ncase DEFAULT_KEYWORD:\nSTNode defaultKeyword = parseDefaultKeyword();\nname = STNodeFactory.createSimpleNameReferenceNode(defaultKeyword);\nreturn parseAsyncSendAction(expression, rightArrow, name);\ncase IDENTIFIER_TOKEN:\nname = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());\nbreak;\ncase CONTINUE_KEYWORD:\ncase COMMIT_KEYWORD:\nname = getKeywordAsSimpleNameRef();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS, expression,\nisRhsExpr, rightArrow);\nif (solution.action == Action.REMOVE) {\nname = solution.recoveredNode;\nbreak;\n}\nreturn parseRemoteCallOrAsyncSendActionRhs(solution.tokenKind, expression, isRhsExpr, rightArrow);\n}\nreturn parseRemoteCallOrAsyncSendEnd(peek().kind, expression, rightArrow, name);\n}\nprivate STNode parseRemoteCallOrAsyncSendEnd(SyntaxKind nextTokenKind, STNode expression, STNode rightArrow,\nSTNode name) {\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseRemoteMethodCallAction(expression, rightArrow, name);\ncase SEMICOLON_TOKEN:\nreturn parseAsyncSendAction(expression, rightArrow, name);\ndefault:\nSTToken token = peek();\nSolution solution =\nrecover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END, expression, rightArrow, name);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseRemoteCallOrAsyncSendEnd(solution.tokenKind, expression, rightArrow, name);\n}\n}\n/**\n* Parse default keyword.\n*\n* @return default keyword node\n*/\nprivate STNode parseDefaultKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DEFAULT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.DEFAULT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) {\nreturn STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker);\n}\nprivate STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) {\nSTNode openParenToken = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode arguments = parseArgsList();\nSTNode closeParenToken = parseCloseParenthesis();\nreturn STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments,\ncloseParenToken);\n}\n/**\n* Parse right arrow (->) token.\n*\n* @return Parsed node\n*/\nprivate STNode parseRightArrow() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.RIGHT_ARROW);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse parameterized type descriptor.\n* parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter\n*\n* @return Parsed node\n*/\nprivate STNode parseParameterizedTypeDescriptor() {\nSTNode parameterizedTypeKeyword = parseParameterizedTypeKeyword();\nSTNode ltToken = parseLTToken();\nSTNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\nSTNode gtToken = parseGTToken();\nreturn STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, ltToken, typeNode,\ngtToken);\n}\n/**\n* Parse map or future keyword token.\n*\n* @return Parsed node\n*/\nprivate STNode parseParameterizedTypeKeyword() {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase MAP_KEYWORD:\ncase FUTURE_KEYWORD:\nreturn consume();\ndefault:\nSolution sol = recover(nextToken, ParserRuleContext.PARAMETERIZED_TYPE);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse < token.\n*\n* @return Parsed node\n*/\nprivate STNode parseGTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.GT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.GT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse > token.\n*\n* @return Parsed node\n*/\nprivate STNode parseLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.LT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse nil literal. Here nil literal is only referred to ( ).\n*\n* @return Parsed node\n*/\nprivate STNode parseNilLiteral() {\nstartContext(ParserRuleContext.NIL_LITERAL);\nSTNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode closeParenthesisToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken);\n}\n/**\n* Parse annotation declaration, given the qualifier.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @param constKeyword Const keyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) {\nstartContext(ParserRuleContext.ANNOTATION_DECL);\nSTNode annotationKeyword = parseAnnotationKeyword();\nSTNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);\nendContext();\nreturn annotDecl;\n}\n/**\n* Parse annotation keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ANNOTATION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ANNOTATION_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse the components that follows after the annotation keyword of a annotation declaration.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the constant decl\n* @param constKeyword Const keyword\n* @param annotationKeyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword) {\nSTToken nextToken = peek();\nreturn parseAnnotationDeclFromType(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword);\n}\nprivate STNode parseAnnotationDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,\nSTNode constKeyword, STNode annotationKeyword) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword);\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier,\nconstKeyword, annotationKeyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAnnotationDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword,\nannotationKeyword);\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL);\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\n/**\n* Parse annotation tag.\n*

\n* annot-tag := identifier\n*\n* @return\n*/\nprivate STNode parseAnnotationTag() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.ANNOTATION_TAG);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword) {\nSTNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);\nif (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword,\ntypeDescOrAnnotTag, annotTag);\n}\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) {\nSTNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag,\nParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false);\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\nSTNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name;\nreturn parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag);\n}\n/**\n* Parse the component that follows the first identifier in an annotation decl. The identifier\n* can be either the type-name (a user defined type) or the annot-tag, where the type-name\n* is not present.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the annotation decl\n* @param constKeyword Const keyword\n* @param annotationKeyword Annotation keyword\n* @param typeDescOrAnnotTag Identifier that follows the annotation-keyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword, STNode typeDescOrAnnotTag) {\nSTToken token = peek();\nreturn parseAnnotationDeclRhs(token.kind, metadata, qualifier, constKeyword, annotationKeyword,\ntypeDescOrAnnotTag);\n}\nprivate STNode parseAnnotationDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,\nSTNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) {\nSTNode typeDesc;\nSTNode annotTag;\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\ntypeDesc = typeDescOrAnnotTag;\nannotTag = parseAnnotationTag();\nbreak;\ncase SEMICOLON_TOKEN:\ncase ON_KEYWORD:\ntypeDesc = STNodeFactory.createEmptyNode();\nannotTag = typeDescOrAnnotTag;\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword,\nannotationKeyword, typeDescOrAnnotTag);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAnnotationDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword,\ntypeDescOrAnnotTag);\n}\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\nprivate STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword, STNode typeDesc, STNode annotTag) {\nSTToken nextToken = peek();\nreturn parseAnnotationDeclAttachPoints(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword,\ntypeDesc, annotTag);\n}\nprivate STNode parseAnnotationDeclAttachPoints(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,\nSTNode constKeyword, STNode annotationKeyword, STNode typeDesc,\nSTNode annotTag) {\nSTNode onKeyword;\nSTNode attachPoints;\nswitch (nextTokenKind) {\ncase SEMICOLON_TOKEN:\nonKeyword = STNodeFactory.createEmptyNode();\nattachPoints = STNodeFactory.createEmptyNodeList();\nbreak;\ncase ON_KEYWORD:\nonKeyword = parseOnKeyword();\nattachPoints = parseAnnotationAttachPoints();\nonKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword,\nDiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier,\nconstKeyword, annotationKeyword, typeDesc, annotTag);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAnnotationDeclAttachPoints(solution.tokenKind, metadata, qualifier, constKeyword,\nannotationKeyword, typeDesc, annotTag);\n}\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword,\ntypeDesc, annotTag, onKeyword, attachPoints, semicolonToken);\n}\n/**\n* Parse annotation attach points.\n*

\n* \n* annot-attach-points := annot-attach-point (, annot-attach-point)*\n*

\n* annot-attach-point := dual-attach-point | source-only-attach-point\n*

\n* dual-attach-point := [source] dual-attach-point-ident\n*

\n* dual-attach-point-ident :=\n* [object] type\n* | [object|resource] function\n* | parameter\n* | return\n* | service\n* | [object|record] field\n*

\n* source-only-attach-point := source source-only-attach-point-ident\n*

\n* source-only-attach-point-ident :=\n* annotation\n* | external\n* | var\n* | const\n* | listener\n* | worker\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationAttachPoints() {\nstartContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST);\nList attachPoints = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndAnnotAttachPointList(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode attachPoint = parseAnnotationAttachPoint();\nattachPoints.add(attachPoint);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndAnnotAttachPointList(nextToken.kind)) {\nleadingComma = parseAttachPointEnd();\nif (leadingComma == null) {\nbreak;\n}\nattachPoints.add(leadingComma);\nattachPoint = parseAnnotationAttachPoint();\nif (attachPoint == null) {\nattachPoint = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);\nattachPoints.add(attachPoint);\nbreak;\n}\nattachPoints.add(attachPoint);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(attachPoints);\n}\n/**\n* Parse annotation attach point end.\n*\n* @return Parsed node\n*/\nprivate STNode parseAttachPointEnd() {\nSTToken nextToken = peek();\nreturn parseAttachPointEnd(nextToken.kind);\n}\nprivate STNode parseAttachPointEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase SEMICOLON_TOKEN:\nreturn null;\ncase COMMA_TOKEN:\nreturn consume();\ndefault:\nSolution sol = recover(peek(), ParserRuleContext.ATTACH_POINT_END);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nreturn sol.tokenKind == SyntaxKind.COMMA_TOKEN ? sol.recoveredNode : null;\n}\n}\nprivate boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse annotation attach point.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationAttachPoint() {\nreturn parseAnnotationAttachPoint(peek().kind);\n}\nprivate STNode parseAnnotationAttachPoint(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\nreturn null;\ncase ANNOTATION_KEYWORD:\ncase EXTERNAL_KEYWORD:\ncase VAR_KEYWORD:\ncase CONST_KEYWORD:\ncase LISTENER_KEYWORD:\ncase WORKER_KEYWORD:\ncase SOURCE_KEYWORD:\nSTNode sourceKeyword = parseSourceKeyword();\nreturn parseAttachPointIdent(sourceKeyword);\ncase OBJECT_KEYWORD:\ncase TYPE_KEYWORD:\ncase RESOURCE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase SERVICE_KEYWORD:\ncase FIELD_KEYWORD:\ncase RECORD_KEYWORD:\nsourceKeyword = STNodeFactory.createEmptyNode();\nSTNode firstIdent = consume();\nreturn parseDualAttachPointIdent(sourceKeyword, firstIdent);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ATTACH_POINT);\nreturn solution.recoveredNode;\n}\n}\n/**\n* Parse source keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseSourceKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SOURCE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SOURCE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse attach point ident gievn.\n*

\n* \n* source-only-attach-point-ident := annotation | external | var | const | listener | worker\n*

\n* dual-attach-point-ident := [object] type | [object|resource] function | parameter\n* | return | service | [object|record] field\n*
\n*\n* @param sourceKeyword Source keyword\n* @return Parsed node\n*/\nprivate STNode parseAttachPointIdent(STNode sourceKeyword) {\nreturn parseAttachPointIdent(peek().kind, sourceKeyword);\n}\nprivate STNode parseAttachPointIdent(SyntaxKind nextTokenKind, STNode sourceKeyword) {\nswitch (nextTokenKind) {\ncase ANNOTATION_KEYWORD:\ncase EXTERNAL_KEYWORD:\ncase VAR_KEYWORD:\ncase CONST_KEYWORD:\ncase LISTENER_KEYWORD:\ncase WORKER_KEYWORD:\nSTNode firstIdent = consume();\nSTNode secondIdent = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent);\ncase OBJECT_KEYWORD:\ncase RESOURCE_KEYWORD:\ncase RECORD_KEYWORD:\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase SERVICE_KEYWORD:\ncase FIELD_KEYWORD:\nfirstIdent = consume();\nreturn parseDualAttachPointIdent(sourceKeyword, firstIdent);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nfirstIdent = solution.recoveredNode;\nreturn parseDualAttachPointIdent(sourceKeyword, firstIdent);\n}\n}\n/**\n* Parse dual-attach-point ident.\n*\n* @param sourceKeyword Source keyword\n* @param firstIdent first part of the dual attach-point\n* @return Parsed node\n*/\nprivate STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) {\nSTNode secondIdent;\nswitch (firstIdent.kind) {\ncase OBJECT_KEYWORD:\nsecondIdent = parseIdentAfterObjectIdent();\nbreak;\ncase RESOURCE_KEYWORD:\nsecondIdent = parseFunctionIdent();\nbreak;\ncase RECORD_KEYWORD:\nsecondIdent = parseFieldIdent();\nbreak;\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase SERVICE_KEYWORD:\ncase FIELD_KEYWORD:\ndefault:\nsecondIdent = STNodeFactory.createEmptyNode();\nbreak;\n}\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent);\n}\n/**\n* Parse the idents that are supported after object-ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseIdentAfterObjectIdent() {\nSTToken token = peek();\nswitch (token.kind) {\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase FIELD_KEYWORD:\nreturn consume();\ndefault:\nSolution sol = recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse function ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseFunctionIdent() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FUNCTION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FUNCTION_IDENT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse field ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldIdent() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FIELD_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FIELD_IDENT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse XML namespace declaration.\n*

\n* xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ;\n*
\n* xml-namespace-uri := simple-const-expr\n*
\n* xml-namespace-prefix := identifier\n*
\n*\n* @return\n*/\nprivate STNode parseXMLNamespaceDeclaration(boolean isModuleVar) {\nstartContext(ParserRuleContext.XML_NAMESPACE_DECLARATION);\nSTNode xmlnsKeyword = parseXMLNSKeyword();\nSTNode namespaceUri = parseXMLNamespaceUri();\nSTNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);\nendContext();\nreturn xmlnsDecl;\n}\n/**\n* Parse xmlns keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLNSKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.XMLNS_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.XMLNS_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse namespace uri.\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLNamespaceUri() {\nSTNode expr = parseSimpleConstExpr();\nswitch (expr.kind) {\ncase STRING_LITERAL:\ncase IDENTIFIER_TOKEN:\ncase QUALIFIED_NAME_REFERENCE:\nbreak;\ndefault:\nexpr = SyntaxErrors.addDiagnostic(expr, DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI);\n}\nreturn expr;\n}\nprivate STNode parseSimpleConstExpr() {\nstartContext(ParserRuleContext.CONSTANT_EXPRESSION);\nSTNode expr = parseSimpleConstExprInternal();\nendContext();\nreturn expr;\n}\nprivate STNode parseSimpleConstExprInternal() {\nSTToken nextToken = peek();\nreturn parseConstExprInternal(nextToken.kind);\n}\n/**\n* Parse constants expr.\n*\n* @return Parsed node\n*/\nprivate STNode parseConstExprInternal(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase STRING_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase NULL_KEYWORD:\nreturn parseBasicLiteral();\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn parseSignedIntOrFloat();\ncase OPEN_PAREN_TOKEN:\nreturn parseNilLiteral();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START);\nreturn solution.recoveredNode;\n}\n}\n/**\n* Parse the portion after the namsepsace-uri of an XML declaration.\n*\n* @param xmlnsKeyword XMLNS keyword\n* @param namespaceUri Namespace URI\n* @return Parsed node\n*/\nprivate STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) {\nreturn parseXMLDeclRhs(peek().kind, xmlnsKeyword, namespaceUri, isModuleVar);\n}\nprivate STNode parseXMLDeclRhs(SyntaxKind nextTokenKind, STNode xmlnsKeyword, STNode namespaceUri,\nboolean isModuleVar) {\nSTNode asKeyword = STNodeFactory.createEmptyNode();\nSTNode namespacePrefix = STNodeFactory.createEmptyNode();\nswitch (nextTokenKind) {\ncase AS_KEYWORD:\nasKeyword = parseAsKeyword();\nnamespacePrefix = parseNamespacePrefix();\nbreak;\ncase SEMICOLON_TOKEN:\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword,\nnamespaceUri, isModuleVar);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseXMLDeclRhs(solution.tokenKind, xmlnsKeyword, namespaceUri, isModuleVar);\n}\nSTNode semicolon = parseSemicolon();\nif (isModuleVar) {\nreturn STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword,\nnamespacePrefix, semicolon);\n}\nreturn STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix,\nsemicolon);\n}\n/**\n* Parse import prefix.\n*\n* @return Parsed node\n*/\nprivate STNode parseNamespacePrefix() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.NAMESPACE_PREFIX);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse named worker declaration.\n*

\n* named-worker-decl := [annots] worker worker-name return-type-descriptor { sequence-stmt }\n*\n* @param annots Annotations attached to the worker decl\n* @return Parsed node\n*/\nprivate STNode parseNamedWorkerDeclaration(STNode annots) {\nstartContext(ParserRuleContext.NAMED_WORKER_DECL);\nSTNode workerKeyword = parseWorkerKeyword();\nSTNode workerName = parseWorkerName();\nSTNode returnTypeDesc = parseReturnTypeDescriptor();\nSTNode workerBody = parseBlockNode();\nendContext();\nreturn STNodeFactory.createNamedWorkerDeclarationNode(annots, workerKeyword, workerName, returnTypeDesc,\nworkerBody);\n}\nprivate STNode parseReturnTypeDescriptor() {\nSTToken token = peek();\nif (token.kind != SyntaxKind.RETURNS_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode returnsKeyword = consume();\nSTNode annot = parseOptionalAnnotations();\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);\nreturn STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);\n}\n/**\n* Parse worker keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseWorkerKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.WORKER_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.WORKER_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse worker name.\n*

\n* worker-name := identifier\n*\n* @return Parsed node\n*/\nprivate STNode parseWorkerName() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.WORKER_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse lock statement.\n* lock-stmt := lock block-stmt ;\n*\n* @return Lock statement\n*/\nprivate STNode parseLockStatement() {\nstartContext(ParserRuleContext.LOCK_STMT);\nSTNode lockKeyword = parseLockKeyword();\nSTNode blockStatement = parseBlockNode();\nendContext();\nreturn STNodeFactory.createLockStatementNode(lockKeyword, blockStatement);\n}\n/**\n* Parse lock-keyword.\n*\n* @return lock-keyword node\n*/\nprivate STNode parseLockKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LOCK_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LOCK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse union type descriptor.\n* union-type-descriptor := type-descriptor | type-descriptor\n*\n* @param leftTypeDesc Type desc in the LHS os the union type desc.\n* @param context Current context.\n* @return parsed union type desc node\n*/\nprivate STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode pipeToken = parsePipeToken();\nSTNode rightTypeDesc = parseTypeDescriptor(context, isTypedBindingPattern, false);\nreturn STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc);\n}\n/**\n* Parse pipe token.\n*\n* @return parsed pipe token node\n*/\nprivate STNode parsePipeToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PIPE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.PIPE);\nreturn sol.recoveredNode;\n}\n}\nprivate boolean isTypeStartingToken(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase IDENTIFIER_TOKEN:\ncase SERVICE_KEYWORD:\ncase RECORD_KEYWORD:\ncase OBJECT_KEYWORD:\ncase ABSTRACT_KEYWORD:\ncase CLIENT_KEYWORD:\ncase OPEN_PAREN_TOKEN:\ncase MAP_KEYWORD:\ncase FUTURE_KEYWORD:\ncase TYPEDESC_KEYWORD:\ncase ERROR_KEYWORD:\ncase STREAM_KEYWORD:\ncase TABLE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase DISTINCT_KEYWORD:\nreturn true;\ndefault:\nif (isSingletonTypeDescStart(nodeKind, true)) {\nreturn true;\n}\nreturn isSimpleType(nodeKind);\n}\n}\nstatic boolean isSimpleType(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase INT_KEYWORD:\ncase FLOAT_KEYWORD:\ncase DECIMAL_KEYWORD:\ncase BOOLEAN_KEYWORD:\ncase STRING_KEYWORD:\ncase BYTE_KEYWORD:\ncase XML_KEYWORD:\ncase JSON_KEYWORD:\ncase HANDLE_KEYWORD:\ncase ANY_KEYWORD:\ncase ANYDATA_KEYWORD:\ncase NEVER_KEYWORD:\ncase SERVICE_KEYWORD:\ncase VAR_KEYWORD:\ncase ERROR_KEYWORD:\ncase STREAM_KEYWORD:\ncase TYPEDESC_KEYWORD:\ncase READONLY_KEYWORD:\ncase DISTINCT_KEYWORD:\nreturn true;\ncase TYPE_DESC:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) {\nswitch (typeKeyword) {\ncase INT_KEYWORD:\nreturn SyntaxKind.INT_TYPE_DESC;\ncase FLOAT_KEYWORD:\nreturn SyntaxKind.FLOAT_TYPE_DESC;\ncase DECIMAL_KEYWORD:\nreturn SyntaxKind.DECIMAL_TYPE_DESC;\ncase BOOLEAN_KEYWORD:\nreturn SyntaxKind.BOOLEAN_TYPE_DESC;\ncase STRING_KEYWORD:\nreturn SyntaxKind.STRING_TYPE_DESC;\ncase BYTE_KEYWORD:\nreturn SyntaxKind.BYTE_TYPE_DESC;\ncase XML_KEYWORD:\nreturn SyntaxKind.XML_TYPE_DESC;\ncase JSON_KEYWORD:\nreturn SyntaxKind.JSON_TYPE_DESC;\ncase HANDLE_KEYWORD:\nreturn SyntaxKind.HANDLE_TYPE_DESC;\ncase ANY_KEYWORD:\nreturn SyntaxKind.ANY_TYPE_DESC;\ncase ANYDATA_KEYWORD:\nreturn SyntaxKind.ANYDATA_TYPE_DESC;\ncase READONLY_KEYWORD:\nreturn SyntaxKind.READONLY_TYPE_DESC;\ncase NEVER_KEYWORD:\nreturn SyntaxKind.NEVER_TYPE_DESC;\ncase SERVICE_KEYWORD:\nreturn SyntaxKind.SERVICE_TYPE_DESC;\ncase VAR_KEYWORD:\nreturn SyntaxKind.VAR_TYPE_DESC;\ndefault:\nreturn SyntaxKind.TYPE_DESC;\n}\n}\n/**\n* Parse fork-keyword.\n*\n* @return Fork-keyword node\n*/\nprivate STNode parseForkKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FORK_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FORK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse fork statement.\n* fork-stmt := fork { named-worker-decl+ }\n*\n* @return Fork statement\n*/\nprivate STNode parseForkStatement() {\nstartContext(ParserRuleContext.FORK_STMT);\nSTNode forkKeyword = parseForkKeyword();\nSTNode openBrace = parseOpenBrace();\nArrayList workers = new ArrayList<>();\nwhile (!isEndOfStatements()) {\nSTNode stmt = parseStatement();\nif (stmt == null) {\nbreak;\n}\nswitch (stmt.kind) {\ncase NAMED_WORKER_DECLARATION:\nworkers.add(stmt);\nbreak;\ndefault:\nif (workers.isEmpty()) {\nopenBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt,\nDiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);\n} else {\nupdateLastNodeInListWithInvalidNode(workers, stmt,\nDiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);\n}\n}\n}\nSTNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers);\nSTNode closeBrace = parseCloseBrace();\nendContext();\nSTNode forkStmt =\nSTNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace);\nif (isNodeListEmpty(namedWorkerDeclarations)) {\nreturn SyntaxErrors.addDiagnostic(forkStmt,\nDiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT);\n}\nreturn forkStmt;\n}\n/**\n* Parse trap expression.\n*

\n* \n* trap-expr := trap expression\n* \n*\n* @param allowActions Allow actions\n* @param isRhsExpr Whether this is a RHS expression or not\n* @return Trap expression node\n*/\nprivate STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nSTNode trapKeyword = parseTrapKeyword();\nSTNode expr =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nif (isAction(expr)) {\nreturn STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr);\n}\nreturn STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr);\n}\n/**\n* Parse trap-keyword.\n*\n* @return Trap-keyword node\n*/\nprivate STNode parseTrapKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRAP_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TRAP_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse list constructor expression.\n*

\n* \n* list-constructor-expr := [ [ expr-list ] ]\n*
\n* expr-list := expression (, expression)*\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseListConstructorExpr() {\nstartContext(ParserRuleContext.LIST_CONSTRUCTOR);\nSTNode openBracket = parseOpenBracket();\nSTNode expressions = parseOptionalExpressionsList();\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket);\n}\n/**\n* Parse optional expression list.\n*\n* @return Parsed node\n*/\nprivate STNode parseOptionalExpressionsList() {\nList expressions = new ArrayList<>();\nif (isEndOfListConstructor(peek().kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode expr = parseExpression();\nexpressions.add(expr);\nreturn parseOptionalExpressionsList(expressions);\n}\nprivate STNode parseOptionalExpressionsList(List expressions) {\nSTNode listConstructorMemberEnd;\nwhile (!isEndOfListConstructor(peek().kind)) {\nlistConstructorMemberEnd = parseListConstructorMemberEnd();\nif (listConstructorMemberEnd == null) {\nbreak;\n}\nexpressions.add(listConstructorMemberEnd);\nSTNode expr = parseExpression();\nexpressions.add(expr);\n}\nreturn STNodeFactory.createNodeList(expressions);\n}\nprivate boolean isEndOfListConstructor(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseListConstructorMemberEnd() {\nreturn parseListConstructorMemberEnd(peek().kind);\n}\nprivate STNode parseListConstructorMemberEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListConstructorMemberEnd(solution.tokenKind);\n}\n}\n/**\n* Parse foreach statement.\n* foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt\n*\n* @return foreach statement\n*/\nprivate STNode parseForEachStatement() {\nstartContext(ParserRuleContext.FOREACH_STMT);\nSTNode forEachKeyword = parseForEachKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT);\nSTNode inKeyword = parseInKeyword();\nSTNode actionOrExpr = parseActionOrExpression();\nSTNode blockStatement = parseBlockNode();\nendContext();\nreturn STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr,\nblockStatement);\n}\n/**\n* Parse foreach-keyword.\n*\n* @return ForEach-keyword node\n*/\nprivate STNode parseForEachKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FOREACH_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FOREACH_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse in-keyword.\n*\n* @return In-keyword node\n*/\nprivate STNode parseInKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IN_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.IN_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse type cast expression.\n*

\n* \n* type-cast-expr := < type-cast-param > expression\n*
\n* type-cast-param := [annots] type-descriptor | annots\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nstartContext(ParserRuleContext.TYPE_CAST);\nSTNode ltToken = parseLTToken();\nSTNode typeCastParam = parseTypeCastParam();\nSTNode gtToken = parseGTToken();\nendContext();\nSTNode expression =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nreturn STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression);\n}\nprivate STNode parseTypeCastParam() {\nSTNode annot;\nSTNode type;\nSTToken token = peek();\nswitch (token.kind) {\ncase AT_TOKEN:\nannot = parseOptionalAnnotations();\ntoken = peek();\nif (isTypeStartingToken(token.kind)) {\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n} else {\ntype = STNodeFactory.createEmptyNode();\n}\nbreak;\ndefault:\nannot = STNodeFactory.createEmptyNode();\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\nbreak;\n}\nreturn STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type);\n}\n/**\n* Parse table constructor expression.\n*

\n* \n* table-constructor-expr-rhs := [ [row-list] ]\n* \n*\n* @param tableKeyword tableKeyword that precedes this rhs\n* @param keySpecifier keySpecifier that precedes this rhs\n* @return Parsed node\n*/\nprivate STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) {\nswitchContext(ParserRuleContext.TABLE_CONSTRUCTOR);\nSTNode openBracket = parseOpenBracket();\nSTNode rowList = parseRowList();\nSTNode closeBracket = parseCloseBracket();\nreturn STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList,\ncloseBracket);\n}\n/**\n* Parse table-keyword.\n*\n* @return Table-keyword node\n*/\nprivate STNode parseTableKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TABLE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TABLE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse table rows.\n*

\n* row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]\n*\n* @return Parsed node\n*/\nprivate STNode parseRowList() {\nSTToken nextToken = peek();\nif (isEndOfTableRowList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList mappings = new ArrayList<>();\nSTNode mapExpr = parseMappingConstructorExpr();\nmappings.add(mapExpr);\nnextToken = peek();\nSTNode rowEnd;\nwhile (!isEndOfTableRowList(nextToken.kind)) {\nrowEnd = parseTableRowEnd(nextToken.kind);\nif (rowEnd == null) {\nbreak;\n}\nmappings.add(rowEnd);\nmapExpr = parseMappingConstructorExpr();\nmappings.add(mapExpr);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(mappings);\n}\nprivate boolean isEndOfTableRowList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ncase COMMA_TOKEN:\ncase OPEN_BRACE_TOKEN:\nreturn false;\ndefault:\nreturn isEndOfMappingConstructor(tokenKind);\n}\n}\nprivate STNode parseTableRowEnd() {\nSTNode nextToken = peek();\nreturn parseTableRowEnd(nextToken.kind);\n}\nprivate STNode parseTableRowEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TABLE_ROW_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListConstructorMemberEnd(solution.tokenKind);\n}\n}\n/**\n* Parse key specifier.\n*

\n* key-specifier := key ( [ field-name (, field-name)* ] )\n*\n* @return Parsed node\n*/\nprivate STNode parseKeySpecifier() {\nstartContext(ParserRuleContext.KEY_SPECIFIER);\nSTNode keyKeyword = parseKeyKeyword();\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode fieldNames = parseFieldNames();\nSTNode closeParen = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen);\n}\n/**\n* Parse key-keyword.\n*\n* @return Key-keyword node\n*/\nprivate STNode parseKeyKeyword() {\nSTToken token = peek();\nif (isKeyKeyword(token)) {\nreturn getKeyKeyword(consume());\n} else {\nSolution sol = recover(token, ParserRuleContext.KEY_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nstatic boolean isKeyKeyword(STToken token) {\nreturn token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text());\n}\nprivate STNode getKeyKeyword(STToken token) {\nreturn STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(),\ntoken.diagnostics());\n}\n/**\n* Parse field names.\n*

\n* field-name-list := [ field-name (, field-name)* ]\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldNames() {\nSTToken nextToken = peek();\nif (isEndOfFieldNamesList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList fieldNames = new ArrayList<>();\nSTNode fieldName = parseVariableName();\nfieldNames.add(fieldName);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndOfFieldNamesList(nextToken.kind)) {\nleadingComma = parseComma();\nfieldNames.add(leadingComma);\nfieldName = parseVariableName();\nfieldNames.add(fieldName);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(fieldNames);\n}\nprivate boolean isEndOfFieldNamesList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\ncase IDENTIFIER_TOKEN:\nreturn false;\ndefault:\nreturn true;\n}\n}\n/**\n* Parse error type descriptor.\n*

\n* error-type-descriptor := error [error-type-param]\n* error-type-param := < (detail-type-descriptor | inferred-type-descriptor) >\n* detail-type-descriptor := type-descriptor\n* inferred-type-descriptor := *\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseErrorTypeDescriptor() {\nSTNode errorKeywordToken = parseErrorKeyword();\nSTNode errorTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nerrorTypeParamsNode = parseErrorTypeParamsNode();\n} else {\nerrorTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode);\n}\n/**\n* Parse error type param node.\n*

\n* error-type-param := < (detail-type-descriptor | inferred-type-descriptor) >\n* detail-type-descriptor := type-descriptor\n* inferred-type-descriptor := *\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseErrorTypeParamsNode() {\nSTNode ltToken = parseLTToken();\nSTNode parameter;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {\nparameter = consume();\n} else {\nparameter = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n}\nSTNode gtToken = parseGTToken();\nreturn STNodeFactory.createErrorTypeParamsNode(ltToken, parameter, gtToken);\n}\n/**\n* Parse error-keyword.\n*\n* @return Parsed error-keyword node\n*/\nprivate STNode parseErrorKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ERROR_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ERROR_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse typedesc type descriptor.\n* typedesc-type-descriptor := typedesc type-parameter\n*\n* @return Parsed typedesc type node\n*/\nprivate STNode parseTypedescTypeDescriptor() {\nSTNode typedescKeywordToken = parseTypedescKeyword();\nSTNode typedescTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\ntypedescTypeParamsNode = parseTypeParameter();\n} else {\ntypedescTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createTypedescTypeDescriptorNode(typedescKeywordToken, typedescTypeParamsNode);\n}\n/**\n* Parse typedesc-keyword.\n*\n* @return Parsed typedesc-keyword node\n*/\nprivate STNode parseTypedescKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TYPEDESC_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TYPEDESC_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse stream type descriptor.\n*

\n* stream-type-descriptor := stream [stream-type-parameters]\n* stream-type-parameters := < type-descriptor [, type-descriptor]>\n*

\n*\n* @return Parsed stream type descriptor node\n*/\nprivate STNode parseStreamTypeDescriptor() {\nSTNode streamKeywordToken = parseStreamKeyword();\nSTNode streamTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nstreamTypeParamsNode = parseStreamTypeParamsNode();\n} else {\nstreamTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode);\n}\n/**\n* Parse xml type descriptor.\n* xml-type-descriptor := xml type-parameter\n*\n* @return Parsed typedesc type node\n*/\nprivate STNode parseXmlTypeDescriptor() {\nSTNode xmlKeywordToken = parseXMLKeyword();\nSTNode typedescTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\ntypedescTypeParamsNode = parseTypeParameter();\n} else {\ntypedescTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createXmlTypeDescriptorNode(xmlKeywordToken, typedescTypeParamsNode);\n}\n/**\n* Parse stream type params node.\n*

\n* stream-type-parameters := < type-descriptor [, type-descriptor]>\n*

\n*\n* @return Parsed stream type params node\n*/\nprivate STNode parseStreamTypeParamsNode() {\nSTNode ltToken = parseLTToken();\nstartContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);\nSTNode leftTypeDescNode = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC, false);\nSTNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode);\nendContext();\nreturn streamTypedesc;\n}\nprivate STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) {\nreturn parseStreamTypeParamsNode(peek().kind, ltToken, leftTypeDescNode);\n}\nprivate STNode parseStreamTypeParamsNode(SyntaxKind nextTokenKind, STNode ltToken, STNode leftTypeDescNode) {\nSTNode commaToken, rightTypeDescNode, gtToken;\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\ncommaToken = parseComma();\nrightTypeDescNode = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC, false);\nbreak;\ncase GT_TOKEN:\ncommaToken = STNodeFactory.createEmptyNode();\nrightTypeDescNode = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSolution solution =\nrecover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStreamTypeParamsNode(solution.tokenKind, ltToken, leftTypeDescNode);\n}\ngtToken = parseGTToken();\nreturn STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode,\ngtToken);\n}\n/**\n* Parse stream-keyword.\n*\n* @return Parsed stream-keyword node\n*/\nprivate STNode parseStreamKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.STREAM_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.STREAM_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse let expression.\n*

\n* \n* let-expr := let let-var-decl [, let-var-decl]* in expression\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseLetExpression(boolean isRhsExpr) {\nSTNode letKeyword = parseLetKeyword();\nSTNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr);\nSTNode inKeyword = parseInKeyword();\nletKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,\nDiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression);\n}\n/**\n* Parse let-keyword.\n*\n* @return Let-keyword node\n*/\nprivate STNode parseLetKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LET_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LET_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse let variable declarations.\n*

\n* let-var-decl-list := let-var-decl [, let-var-decl]*\n*\n* @return Parsed node\n*/\nprivate STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) {\nstartContext(context);\nList varDecls = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfLetVarDeclarations(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode varDec = parseLetVarDecl(isRhsExpr);\nvarDecls.add(varDec);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndOfLetVarDeclarations(nextToken.kind)) {\nleadingComma = parseComma();\nvarDecls.add(leadingComma);\nvarDec = parseLetVarDecl(isRhsExpr);\nvarDecls.add(varDec);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(varDecls);\n}\nprivate boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\ncase AT_TOKEN:\nreturn false;\ncase IN_KEYWORD:\nreturn true;\ndefault:\nreturn !isTypeStartingToken(tokenKind);\n}\n}\n/**\n* Parse let variable declaration.\n*

\n* let-var-decl := [annots] typed-binding-pattern = expression\n*\n* @return Parsed node\n*/\nprivate STNode parseLetVarDecl(boolean isRhsExpr) {\nSTNode annot = parseOptionalAnnotations();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL);\nSTNode assign = parseAssignOp();\nSTNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false);\nreturn STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression);\n}\n/**\n* Parse raw backtick string template expression.\n*

\n* BacktickString := `expression`\n*\n* @return Template expression node\n*/\nprivate STNode parseTemplateExpression() {\nSTNode type = STNodeFactory.createEmptyNode();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContent();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick,\ncontent, endingBackTick);\n}\nprivate STNode parseTemplateContent() {\nList items = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode contentItem = parseTemplateItem();\nitems.add(contentItem);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(items);\n}\nprivate boolean isEndOfBacktickContent(SyntaxKind kind) {\nswitch (kind) {\ncase EOF_TOKEN:\ncase BACKTICK_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseTemplateItem() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {\nreturn parseInterpolation();\n}\nreturn consume();\n}\n/**\n* Parse string template expression.\n*

\n* string-template-expr := string ` expression `\n*\n* @return String template expression node\n*/\nprivate STNode parseStringTemplateExpression() {\nSTNode type = parseStringKeyword();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContent();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick,\ncontent, endingBackTick);\n}\n/**\n* Parse string keyword.\n*\n* @return string keyword node\n*/\nprivate STNode parseStringKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.STRING_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.STRING_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse XML template expression.\n*

\n* xml-template-expr := xml BacktickString\n*\n* @return XML template expression\n*/\nprivate STNode parseXMLTemplateExpression() {\nSTNode xmlKeyword = parseXMLKeyword();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContentAsXML();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword,\nstartingBackTick, content, endingBackTick);\n}\n/**\n* Parse xml keyword.\n*\n* @return xml keyword node\n*/\nprivate STNode parseXMLKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.XML_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.XML_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse the content of the template string as XML. This method first read the\n* input in the same way as the raw-backtick-template (BacktickString). Then\n* it parses the content as XML.\n*\n* @return XML node\n*/\nprivate STNode parseTemplateContentAsXML() {\nArrayDeque expressions = new ArrayDeque<>();\nStringBuilder xmlStringBuilder = new StringBuilder();\nSTToken nextToken = peek();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode contentItem = parseTemplateItem();\nif (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {\nxmlStringBuilder.append(((STToken) contentItem).text());\n} else {\nxmlStringBuilder.append(\"${}\");\nexpressions.add(contentItem);\n}\nnextToken = peek();\n}\nTextDocument textDocument = TextDocuments.from(xmlStringBuilder.toString());\nAbstractTokenReader tokenReader = new TokenReader(new XMLLexer(textDocument.getCharacterReader()));\nXMLParser xmlParser = new XMLParser(tokenReader, expressions);\nreturn xmlParser.parse();\n}\n/**\n* Parse interpolation of a back-tick string.\n*

\n* \n* interpolation := ${ expression }\n* \n*\n* @return Interpolation node\n*/\nprivate STNode parseInterpolation() {\nstartContext(ParserRuleContext.INTERPOLATION);\nSTNode interpolStart = parseInterpolationStart();\nSTNode expr = parseExpression();\nwhile (true) {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.EOF_TOKEN || nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nbreak;\n} else {\nnextToken = consume();\nexpr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken,\nDiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text());\n}\n}\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace);\n}\n/**\n* Parse interpolation start token.\n*

\n* interpolation-start := ${\n*\n* @return Interpolation start token\n*/\nprivate STNode parseInterpolationStart() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse back-tick token.\n*\n* @return Back-tick token\n*/\nprivate STNode parseBacktickToken(ParserRuleContext ctx) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ctx);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse table type descriptor.\n*

\n* table-type-descriptor := table row-type-parameter [key-constraint]\n* row-type-parameter := type-parameter\n* key-constraint := key-specifier | key-type-constraint\n* key-specifier := key ( [ field-name (, field-name)* ] )\n* key-type-constraint := key type-parameter\n*

\n*\n* @return Parsed table type desc node.\n*/\nprivate STNode parseTableTypeDescriptor() {\nSTNode tableKeywordToken = parseTableKeyword();\nSTNode rowTypeParameterNode = parseRowTypeParameter();\nSTNode keyConstraintNode;\nSTToken nextToken = peek();\nif (isKeyKeyword(nextToken)) {\nSTNode keyKeywordToken = getKeyKeyword(consume());\nkeyConstraintNode = parseKeyConstraint(keyKeywordToken);\n} else {\nkeyConstraintNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode);\n}\n/**\n* Parse row type parameter node.\n*

\n* row-type-parameter := type-parameter\n*

\n*\n* @return Parsed node.\n*/\nprivate STNode parseRowTypeParameter() {\nstartContext(ParserRuleContext.ROW_TYPE_PARAM);\nSTNode rowTypeParameterNode = parseTypeParameter();\nendContext();\nreturn rowTypeParameterNode;\n}\n/**\n* Parse type parameter node.\n*

\n* type-parameter := < type-descriptor >\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeParameter() {\nSTNode ltToken = parseLTToken();\nSTNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\nSTNode gtToken = parseGTToken();\nreturn STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken);\n}\n/**\n* Parse key constraint.\n*

\n* key-constraint := key-specifier | key-type-constraint\n*

\n*\n* @return Parsed node.\n*/\nprivate STNode parseKeyConstraint(STNode keyKeywordToken) {\nreturn parseKeyConstraint(peek().kind, keyKeywordToken);\n}\nprivate STNode parseKeyConstraint(SyntaxKind nextTokenKind, STNode keyKeywordToken) {\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseKeySpecifier(keyKeywordToken);\ncase LT_TOKEN:\nreturn parseKeyTypeConstraint(keyKeywordToken);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseKeyConstraint(solution.tokenKind, keyKeywordToken);\n}\n}\n/**\n* Parse key specifier given parsed key keyword token.\n*

\n* key-specifier := key ( [ field-name (, field-name)* ] )\n*\n* @return Parsed node\n*/\nprivate STNode parseKeySpecifier(STNode keyKeywordToken) {\nstartContext(ParserRuleContext.KEY_SPECIFIER);\nSTNode openParenToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode fieldNamesNode = parseFieldNames();\nSTNode closeParenToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken);\n}\n/**\n* Parse key type constraint.\n*

\n* key-type-constraint := key type-parameter\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseKeyTypeConstraint(STNode keyKeywordToken) {\nSTNode typeParameterNode = parseTypeParameter();\nreturn STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode);\n}\n/**\n* Parse function type descriptor.\n*

\n* function-type-descriptor := function function-signature\n*\n* @return Function type descriptor node\n*/\nprivate STNode parseFunctionTypeDesc() {\nstartContext(ParserRuleContext.FUNC_TYPE_DESC);\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode signature = parseFuncSignature(true);\nendContext();\nreturn STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, signature);\n}\n/**\n* Parse explicit anonymous function expression.\n*

\n* explicit-anonymous-function-expr := [annots] function function-signature anon-func-body\n*\n* @param annots Annotations.\n* @param isRhsExpr Is expression in rhs context\n* @return Anonymous function expression node\n*/\nprivate STNode parseExplicitFunctionExpression(STNode annots, boolean isRhsExpr) {\nstartContext(ParserRuleContext.ANON_FUNC_EXPRESSION);\nSTNode funcKeyword = parseFunctionKeyword();\nSTNode funcSignature = parseFuncSignature(false);\nSTNode funcBody = parseAnonFuncBody(isRhsExpr);\nreturn STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, funcKeyword, funcSignature,\nfuncBody);\n}\n/**\n* Parse anonymous function body.\n*

\n* anon-func-body := block-function-body | expr-function-body\n*\n* @param isRhsExpr Is expression in rhs context\n* @return Anon function body node\n*/\nprivate STNode parseAnonFuncBody(boolean isRhsExpr) {\nreturn parseAnonFuncBody(peek().kind, isRhsExpr);\n}\nprivate STNode parseAnonFuncBody(SyntaxKind nextTokenKind, boolean isRhsExpr) {\nswitch (nextTokenKind) {\ncase OPEN_BRACE_TOKEN:\ncase EOF_TOKEN:\nSTNode body = parseFunctionBodyBlock(true);\nendContext();\nreturn body;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nendContext();\nreturn parseExpressionFuncBody(true, isRhsExpr);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ANON_FUNC_BODY, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAnonFuncBody(solution.tokenKind, isRhsExpr);\n}\n}\n/**\n* Parse expression function body.\n*

\n* expr-function-body := => expression\n*\n* @param isAnon Is anonymous function.\n* @param isRhsExpr Is expression in rhs context\n* @return Expression function body node\n*/\nprivate STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) {\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nSTNode semiColon;\nif (isAnon) {\nsemiColon = STNodeFactory.createEmptyNode();\n} else {\nsemiColon = parseSemicolon();\n}\nreturn STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon);\n}\n/**\n* Parse '=>' token.\n*\n* @return Double right arrow token\n*/\nprivate STNode parseDoubleRightArrow() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.EXPR_FUNC_BODY_START);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) {\nswitch (params.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase INFER_PARAM_LIST:\nbreak;\ncase BRACED_EXPRESSION:\nparams = getAnonFuncParam((STBracedExpressionNode) params);\nbreak;\ndefault:\nparams = SyntaxErrors.addDiagnostic(params,\nDiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);\n}\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression);\n}\n/**\n* Create a new anon-func-param node from a braced expression.\n*\n* @param params Braced expression\n* @return Anon-func param node\n*/\nprivate STNode getAnonFuncParam(STBracedExpressionNode params) {\nList paramList = new ArrayList<>();\nparamList.add(params.expression);\nreturn STNodeFactory.createImplicitAnonymousFunctionParameters(params.openParen,\nSTNodeFactory.createNodeList(paramList), params.closeParen);\n}\n/**\n* Parse implicit anon function expression.\n*\n* @param openParen Open parenthesis token\n* @param firstParam First parameter\n* @param isRhsExpr Is expression in rhs context\n* @return Implicit anon function expression node\n*/\nprivate STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) {\nList paramList = new ArrayList<>();\nparamList.add(firstParam);\nSTToken nextToken = peek();\nSTNode paramEnd;\nSTNode param;\nwhile (!isEndOfAnonFuncParametersList(nextToken.kind)) {\nparamEnd = parseImplicitAnonFuncParamEnd(nextToken.kind);\nif (paramEnd == null) {\nbreak;\n}\nparamList.add(paramEnd);\nparam = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM);\nparam = STNodeFactory.createSimpleNameReferenceNode(param);\nparamList.add(param);\nnextToken = peek();\n}\nSTNode params = STNodeFactory.createNodeList(paramList);\nSTNode closeParen = parseCloseParenthesis();\nendContext();\nSTNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\nreturn parseImplicitAnonFunc(inferedParams, isRhsExpr);\n}\nprivate STNode parseImplicitAnonFuncParamEnd() {\nreturn parseImplicitAnonFuncParamEnd(peek().kind);\n}\nprivate STNode parseImplicitAnonFuncParamEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseImplicitAnonFuncParamEnd(solution.tokenKind);\n}\n}\nprivate boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase SEMICOLON_TOKEN:\ncase RETURNS_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse tuple type descriptor.\n*

\n* tuple-type-descriptor := [ tuple-member-type-descriptors ]\n*

\n* tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor]\n* | [ tuple-rest-descriptor ]\n*

\n* tuple-rest-descriptor := type-descriptor ...\n*
\n*\n* @return\n*/\nprivate STNode parseTupleTypeDesc() {\nSTNode openBracket = parseOpenBracket();\nstartContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nSTNode memberTypeDesc = parseTupleMemberTypeDescList();\nSTNode closeBracket = parseCloseBracket();\nendContext();\nopenBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket,\nDiagnosticErrorCode.ERROR_MISSING_TYPE_DESC);\nreturn STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket);\n}\n/**\n* Parse tuple member type descriptors.\n*\n* @return Parsed node\n*/\nprivate STNode parseTupleMemberTypeDescList() {\nList typeDescList = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfTypeList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode typeDesc = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\nreturn parseTupleTypeMembers(typeDesc, typeDescList);\n}\nprivate STNode parseTupleTypeMembers(STNode typeDesc, List typeDescList) {\nSTToken nextToken;\nnextToken = peek();\nSTNode tupleMemberRhs;\nwhile (!isEndOfTypeList(nextToken.kind)) {\ntupleMemberRhs = parseTupleMemberRhs(nextToken.kind);\nif (tupleMemberRhs == null) {\nbreak;\n}\nif (tupleMemberRhs.kind == SyntaxKind.ELLIPSIS_TOKEN) {\ntypeDesc = STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs);\nbreak;\n}\ntypeDescList.add(typeDesc);\ntypeDescList.add(tupleMemberRhs);\ntypeDesc = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\nnextToken = peek();\n}\ntypeDescList.add(typeDesc);\nreturn STNodeFactory.createNodeList(typeDescList);\n}\nprivate STNode parseTupleMemberRhs() {\nreturn parseTupleMemberRhs(peek().kind);\n}\nprivate STNode parseTupleMemberRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ncase ELLIPSIS_TOKEN:\nreturn parseEllipsis();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTupleMemberRhs(solution.tokenKind);\n}\n}\nprivate boolean isEndOfTypeList(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase EOF_TOKEN:\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse table constructor or query expression.\n*

\n* \n* table-constructor-or-query-expr := table-constructor-expr | query-expr\n*
\n* table-constructor-expr := table [key-specifier] [ [row-list] ]\n*
\n* query-expr := [query-construct-type] query-pipeline select-clause\n* [query-construct-type] query-pipeline select-clause on-conflict-clause? limit-lause?\n*
\n* query-construct-type := table key-specifier | stream\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseTableConstructorOrQuery(boolean isRhsExpr) {\nstartContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION);\nSTNode tableOrQueryExpr = parseTableConstructorOrQuery(peek().kind, isRhsExpr);\nendContext();\nreturn tableOrQueryExpr;\n}\nprivate STNode parseTableConstructorOrQuery(SyntaxKind nextTokenKind, boolean isRhsExpr) {\nSTNode queryConstructType;\nswitch (nextTokenKind) {\ncase FROM_KEYWORD:\nqueryConstructType = STNodeFactory.createEmptyNode();\nreturn parseQueryExprRhs(queryConstructType, isRhsExpr);\ncase STREAM_KEYWORD:\nqueryConstructType = parseQueryConstructType(parseStreamKeyword(), null);\nreturn parseQueryExprRhs(queryConstructType, isRhsExpr);\ncase TABLE_KEYWORD:\nSTNode tableKeyword = parseTableKeyword();\nreturn parseTableConstructorOrQuery(tableKeyword, isRhsExpr);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTableConstructorOrQuery(solution.tokenKind, isRhsExpr);\n}\n}\nprivate STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) {\nSTToken nextToken = peek();\nreturn parseTableConstructorOrQuery(nextToken.kind, nextToken, tableKeyword, isRhsExpr);\n}\nprivate STNode parseTableConstructorOrQuery(SyntaxKind nextTokenKind, STToken nextToken, STNode tableKeyword,\nboolean isRhsExpr) {\nSTNode keySpecifier;\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nkeySpecifier = STNodeFactory.createEmptyNode();\nreturn parseTableConstructorExprRhs(tableKeyword, keySpecifier);\ncase KEY_KEYWORD:\nkeySpecifier = parseKeySpecifier();\nreturn parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\ncase IDENTIFIER_TOKEN:\nif (isKeyKeyword(nextToken)) {\nkeySpecifier = parseKeySpecifier();\nreturn parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\n}\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTableConstructorOrQuery(solution.tokenKind, null, tableKeyword, isRhsExpr);\n}\n}\nprivate STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) {\nreturn parseTableConstructorOrQueryRhs(peek().kind, tableKeyword, keySpecifier, isRhsExpr);\n}\nprivate STNode parseTableConstructorOrQueryRhs(SyntaxKind nextTokenKind, STNode tableKeyword, STNode keySpecifier,\nboolean isRhsExpr) {\nswitch (nextTokenKind) {\ncase FROM_KEYWORD:\nreturn parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr);\ncase OPEN_BRACKET_TOKEN:\nreturn parseTableConstructorExprRhs(tableKeyword, keySpecifier);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS, tableKeyword,\nkeySpecifier, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTableConstructorOrQueryRhs(solution.tokenKind, tableKeyword, keySpecifier, isRhsExpr);\n}\n}\n/**\n* Parse query construct type.\n*

\n* query-construct-type := table key-specifier | stream\n*\n* @return Parsed node\n*/\nprivate STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) {\nreturn STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier);\n}\n/**\n* Parse query expression.\n*

\n* \n* query-expr-rhs := query-pipeline select-clause\n* query-pipeline select-clause on-conflict-clause? limit-clause?\n*
\n* query-pipeline := from-clause intermediate-clause*\n*
\n*\n* @param queryConstructType queryConstructType that precedes this rhs\n* @return Parsed node\n*/\nprivate STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) {\nswitchContext(ParserRuleContext.QUERY_EXPRESSION);\nSTNode fromClause = parseFromClause(isRhsExpr);\nList clauses = new ArrayList<>();\nSTNode intermediateClause;\nSTNode selectClause = null;\nwhile (!isEndOfIntermediateClause(peek().kind, SyntaxKind.NONE)) {\nintermediateClause = parseIntermediateClause(isRhsExpr);\nif (intermediateClause == null) {\nbreak;\n}\nif (selectClause != null) {\nselectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause,\nDiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE);\ncontinue;\n}\nif (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) {\nselectClause = intermediateClause;\n} else {\nclauses.add(intermediateClause);\n}\n}\nif (peek().kind == SyntaxKind.DO_KEYWORD) {\nSTNode intermediateClauses = STNodeFactory.createNodeList(clauses);\nSTNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);\nreturn parseQueryAction(queryPipeline, selectClause, isRhsExpr);\n}\nif (selectClause == null) {\nSTNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD);\nSTNode expr = STNodeFactory\n.createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\nselectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr);\nif (clauses.isEmpty()) {\nfromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);\n} else {\nint lastIndex = clauses.size() - 1;\nSTNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex),\nDiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);\nclauses.set(lastIndex, intClauseWithDiagnostic);\n}\n}\nSTNode intermediateClauses = STNodeFactory.createNodeList(clauses);\nSTNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);\nSTNode onConflictClause = parseOnConflictClause(isRhsExpr);\nSTNode limitClause = parseLimitClause(isRhsExpr);\nreturn STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause,\nonConflictClause, limitClause);\n}\n/**\n* Parse limit keyword.\n*\n* @return Limit keyword node\n*/\nprivate STNode parseLimitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LIMIT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LIMIT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse an intermediate clause.\n*

\n* \n* intermediate-clause := from-clause | where-clause | let-clause\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseIntermediateClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nreturn parseIntermediateClause(nextToken.kind, isRhsExpr);\n}\nprivate STNode parseIntermediateClause(SyntaxKind nextTokenKind, boolean isRhsExpr) {\nswitch (nextTokenKind) {\ncase FROM_KEYWORD:\nreturn parseFromClause(isRhsExpr);\ncase WHERE_KEYWORD:\nreturn parseWhereClause(isRhsExpr);\ncase LET_KEYWORD:\nreturn parseLetClause(isRhsExpr);\ncase SELECT_KEYWORD:\nreturn parseSelectClause(isRhsExpr);\ncase JOIN_KEYWORD:\ncase OUTER_KEYWORD:\nreturn parseJoinClause(isRhsExpr);\ncase ORDER_KEYWORD:\ncase BY_KEYWORD:\ncase ASCENDING_KEYWORD:\ncase DESCENDING_KEYWORD:\nreturn parseOrderByClause(isRhsExpr);\ncase DO_KEYWORD:\ncase SEMICOLON_TOKEN:\ncase ON_KEYWORD:\ncase CONFLICT_KEYWORD:\ncase LIMIT_KEYWORD:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseIntermediateClause(solution.tokenKind, isRhsExpr);\n}\n}\n/**\n* Parse select-keyword.\n*\n* @return Select-keyword node\n*/\nprivate STNode parseJoinKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.JOIN_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.JOIN_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse select-keyword.\n*\n* @return Select-keyword node\n*/\nprivate STNode parseOuterKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OUTER_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OUTER_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate boolean isEndOfIntermediateClause(SyntaxKind tokenKind, SyntaxKind precedingNodeKind) {\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase PUBLIC_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase EOF_TOKEN:\ncase RESOURCE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase DOCUMENTATION_STRING:\ncase PRIVATE_KEYWORD:\ncase RETURNS_KEYWORD:\ncase SERVICE_KEYWORD:\ncase TYPE_KEYWORD:\ncase CONST_KEYWORD:\ncase FINAL_KEYWORD:\ncase DO_KEYWORD:\nreturn true;\ndefault:\nreturn isValidExprRhsStart(tokenKind, precedingNodeKind);\n}\n}\n/**\n* Parse from clause.\n*

\n* from-clause := from typed-binding-pattern in expression\n*\n* @return Parsed node\n*/\nprivate STNode parseFromClause(boolean isRhsExpr) {\nSTNode fromKeyword = parseFromKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE);\nSTNode inKeyword = parseInKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression);\n}\n/**\n* Parse from-keyword.\n*\n* @return From-keyword node\n*/\nprivate STNode parseFromKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FROM_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FROM_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse where clause.\n*

\n* where-clause := where expression\n*\n* @return Parsed node\n*/\nprivate STNode parseWhereClause(boolean isRhsExpr) {\nSTNode whereKeyword = parseWhereKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createWhereClauseNode(whereKeyword, expression);\n}\n/**\n* Parse where-keyword.\n*\n* @return Where-keyword node\n*/\nprivate STNode parseWhereKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.WHERE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.WHERE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse let clause.\n*

\n* let-clause := let let-var-decl [, let-var-decl]* \n*\n* @return Parsed node\n*/\nprivate STNode parseLetClause(boolean isRhsExpr) {\nSTNode letKeyword = parseLetKeyword();\nSTNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr);\nletKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,\nDiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);\nreturn STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations);\n}\n/**\n* Parse order-keyword.\n*\n* @return Order-keyword node\n*/\nprivate STNode parseOrderKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ORDER_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ORDER_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse by-keyword.\n*\n* @return By-keyword node\n*/\nprivate STNode parseByKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BY_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BY_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse ascending-keyword.\n*\n* @return Ascending-keyword node\n*/\nprivate STNode parseAscendingKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ASCENDING_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ASCENDING_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse descending-keyword.\n*\n* @return Descending-keyword node\n*/\nprivate STNode parseDescendingKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DESCENDING_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.DESCENDING_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse order by clause.\n*

\n* order-by-clause := order by order-key-list\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseOrderByClause(boolean isRhsExpr) {\nSTNode orderKeyword = parseOrderKeyword();\nSTNode byKeyword = parseByKeyword();\nSTNode orderKeys = parseOrderKeyList(isRhsExpr);\nbyKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword,\nDiagnosticErrorCode.ERROR_MISSING_ORDER_KEY);\nreturn STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys);\n}\n/**\n* Parse order key.\n*

\n* order-key-list := order-key [, order-key]*\n*\n* @return Parsed node\n*/\nprivate STNode parseOrderKeyList(boolean isRhsExpr) {\nstartContext(ParserRuleContext.ORDER_KEY);\nList orderKeys = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfOrderKeys(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode orderKey = parseOrderKey(isRhsExpr);\norderKeys.add(orderKey);\nnextToken = peek();\nSTNode orderKeyListMemberEnd;\nwhile (!isEndOfOrderKeys(nextToken.kind)) {\norderKeyListMemberEnd = parseOrderKeyListMemberEnd();\nif (orderKeyListMemberEnd == null) {\nbreak;\n}\norderKeys.add(orderKeyListMemberEnd);\norderKey = parseOrderKey(isRhsExpr);\norderKeys.add(orderKey);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(orderKeys);\n}\nprivate boolean isEndOfOrderKeys(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\nreturn false;\ncase SEMICOLON_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn isNextQueryClauseStart(tokenKind);\n}\n}\nprivate boolean isNextQueryClauseStart(SyntaxKind tokenKind) {\nswitch(tokenKind) {\ncase SELECT_KEYWORD:\ncase LET_KEYWORD:\ncase WHERE_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseOrderKeyListMemberEnd() {\nreturn parseOrderKeyListMemberEnd(peek().kind);\n}\nprivate STNode parseOrderKeyListMemberEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase SELECT_KEYWORD:\ncase WHERE_KEYWORD:\ncase LET_KEYWORD:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ORDER_KEY_LIST_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseOrderKeyListMemberEnd(solution.tokenKind);\n}\n}\n/**\n* Parse order key.\n*

\n* order-key := expression (ascending | descending)?\n*\n* @return Parsed node\n*/\nprivate STNode parseOrderKey(boolean isRhsExpr) {\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase ASCENDING_KEYWORD:\nSTNode ascendingKeyword = parseAscendingKeyword();\nreturn STNodeFactory.createOrderKeyNode(expression, ascendingKeyword);\ncase DESCENDING_KEYWORD:\nSTNode descendingKeyword = parseDescendingKeyword();\nreturn STNodeFactory.createOrderKeyNode(expression, descendingKeyword);\ndefault:\nreturn STNodeFactory.createOrderKeyNode(expression, STNodeFactory.createEmptyNode());\n}\n}\n/**\n* Parse select clause.\n*

\n* select-clause := select expression\n*\n* @return Parsed node\n*/\nprivate STNode parseSelectClause(boolean isRhsExpr) {\nSTNode selectKeyword = parseSelectKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createSelectClauseNode(selectKeyword, expression);\n}\n/**\n* Parse select-keyword.\n*\n* @return Select-keyword node\n*/\nprivate STNode parseSelectKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SELECT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SELECT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse on-conflict clause.\n*

\n* \n* onConflictClause := on conflict expression\n* \n*\n* @return On conflict clause node\n*/\nprivate STNode parseOnConflictClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.ON_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode onKeyword = parseOnKeyword();\nSTNode conflictKeyword = parseConflictKeyword();\nSTNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr);\n}\n/**\n* Parse conflict keyword.\n*\n* @return Conflict keyword node\n*/\nprivate STNode parseConflictKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CONFLICT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CONFLICT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse limit clause.\n*

\n* limitClause := limit expression\n*\n* @return Limit expression node\n*/\nprivate STNode parseLimitClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.LIMIT_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode limitKeyword = parseLimitKeyword();\nSTNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createLimitClauseNode(limitKeyword, expr);\n}\n/**\n* Parse join clause.\n*

\n* \n* join-clause := (join-var-decl | outer-join-var-decl) in expression\n*
\n* join-var-decl := join (typeName | var) bindingPattern\n*
\n* outer-join-var-decl := outer join var binding-pattern\n*
\n*\n* @return Join clause\n*/\nprivate STNode parseJoinClause(boolean isRhsExpr) {\nSTNode outerKeyword;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.OUTER_KEYWORD) {\nouterKeyword = parseOuterKeyword();\n} else {\nouterKeyword = STNodeFactory.createEmptyNode();\n}\nSTNode joinKeyword = parseJoinKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE);\nSTNode inKeyword = parseInKeyword();\nSTNode onCondition;\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nnextToken = peek();\nif (nextToken.kind == SyntaxKind.ON_KEYWORD) {\nonCondition = parseOnClause(isRhsExpr);\n} else {\nonCondition = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression,\nonCondition);\n}\n/**\n* Parse on clause.\n*

\n* on clause := on expression\n*\n* @return On clause node\n*/\nprivate STNode parseOnClause(boolean isRhsExpr) {\nSTNode onKeyword = parseOnKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createOnClauseNode(onKeyword, expression);\n}\n/**\n* Parse start action.\n*

\n* start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)\n*\n* @return Start action node\n*/\nprivate STNode parseStartAction(STNode annots) {\nSTNode startKeyword = parseStartKeyword();\nSTNode expr = parseActionOrExpression();\nswitch (expr.kind) {\ncase FUNCTION_CALL:\ncase METHOD_CALL:\ncase REMOTE_METHOD_CALL_ACTION:\nbreak;\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\nSTNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN);\nSTNode arguments = STNodeFactory.createEmptyNodeList();\nSTNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN);\nexpr = STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken);\nbreak;\ndefault:\nstartKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr,\nDiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION);\nSTNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nfuncName = STNodeFactory.createSimpleNameReferenceNode(funcName);\nopenParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN);\narguments = STNodeFactory.createEmptyNodeList();\ncloseParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN);\nexpr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken, arguments,\ncloseParenToken);\nbreak;\n}\nreturn STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr);\n}\n/**\n* Parse start keyword.\n*\n* @return Start keyword node\n*/\nprivate STNode parseStartKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.START_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.START_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse flush action.\n*

\n* flush-action := flush [peer-worker]\n*\n* @return flush action node\n*/\nprivate STNode parseFlushAction() {\nSTNode flushKeyword = parseFlushKeyword();\nSTNode peerWorker = parseOptionalPeerWorkerName();\nreturn STNodeFactory.createFlushActionNode(flushKeyword, peerWorker);\n}\n/**\n* Parse flush keyword.\n*\n* @return flush keyword node\n*/\nprivate STNode parseFlushKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FLUSH_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FLUSH_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse peer worker.\n*

\n* peer-worker := worker-name | default\n*\n* @return peer worker name node\n*/\nprivate STNode parseOptionalPeerWorkerName() {\nSTToken token = peek();\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase DEFAULT_KEYWORD:\nreturn STNodeFactory.createSimpleNameReferenceNode(consume());\ndefault:\nreturn STNodeFactory.createEmptyNode();\n}\n}\n/**\n* Parse intersection type descriptor.\n*

\n* intersection-type-descriptor := type-descriptor & type-descriptor\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode bitwiseAndToken = consume();\nSTNode rightTypeDesc = parseTypeDescriptor(context, isTypedBindingPattern, false);\nreturn STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc);\n}\n/**\n* Parse singleton type descriptor.\n*

\n* singleton-type-descriptor := simple-const-expr\n* simple-const-expr :=\n* nil-literal\n* | boolean-literal\n* | [Sign] int-literal\n* | [Sign] floating-point-literal\n* | string-literal\n* | constant-reference-expr\n*

\n*/\nprivate STNode parseSingletonTypeDesc() {\nSTNode simpleContExpr = parseSimpleConstExpr();\nreturn STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr);\n}\nprivate STNode parseSignedIntOrFloat() {\nSTNode operator = parseUnaryOperator();\nSTNode literal;\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nliteral = parseBasicLiteral();\nbreak;\ndefault:\nliteral = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL);\nliteral = STNodeFactory.createBasicLiteralNode(literal.kind, literal);\n}\nreturn STNodeFactory.createUnaryExpressionNode(operator, literal);\n}\nprivate boolean isSingletonTypeDescStart(SyntaxKind tokenKind, boolean inTypeDescCtx) {\nSTToken nextNextToken = getNextNextToken(tokenKind);\nswitch (tokenKind) {\ncase STRING_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase NULL_KEYWORD:\nif (inTypeDescCtx || isValidTypeDescRHSOutSideTypeDescCtx(nextNextToken)) {\nreturn true;\n}\nreturn false;\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn isIntOrFloat(nextNextToken);\ndefault:\nreturn false;\n}\n}\nstatic boolean isIntOrFloat(STToken token) {\nswitch (token.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate boolean isValidTypeDescRHSOutSideTypeDescCtx(STToken token) {\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\ncase OPEN_PAREN_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Check whether the parser reached to a valid expression start.\n*\n* @param nextTokenKind Kind of the next immediate token.\n* @param nextTokenIndex Index to the next token.\n* @return true if this is a start of a valid expression. false otherwise\n*/\nprivate boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) {\nnextTokenIndex++;\nswitch (nextTokenKind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nSyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind;\nreturn nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN || nextNextTokenKind == SyntaxKind.COMMA_TOKEN ||\nnextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN ||\nisValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE);\ncase IDENTIFIER_TOKEN:\nreturn isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE);\ncase OPEN_PAREN_TOKEN:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase TYPEOF_KEYWORD:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\ncase TRAP_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase LT_TOKEN:\ncase FROM_KEYWORD:\ncase LET_KEYWORD:\ncase BACKTICK_TOKEN:\ncase NEW_KEYWORD:\ncase FAIL_KEYWORD:\ncase LEFT_ARROW_TOKEN:\nreturn true;\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);\ncase FUNCTION_KEYWORD:\ncase TABLE_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD;\ncase STREAM_KEYWORD:\nSTToken nextNextToken = peek(nextTokenIndex);\nreturn nextNextToken.kind == SyntaxKind.KEY_KEYWORD ||\nnextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN ||\nnextNextToken.kind == SyntaxKind.FROM_KEYWORD;\ncase ERROR_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN;\ncase SERVICE_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.OPEN_BRACE_TOKEN;\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN;\ncase START_KEYWORD:\ncase FLUSH_KEYWORD:\ncase WAIT_KEYWORD:\ndefault:\nreturn false;\n}\n}\n/**\n* Parse sync send action.\n*

\n* sync-send-action := expression ->> peer-worker\n*\n* @param expression LHS expression of the sync send action\n* @return Sync send action node\n*/\nprivate STNode parseSyncSendAction(STNode expression) {\nSTNode syncSendToken = parseSyncSendToken();\nSTNode peerWorker = parsePeerWorkerName();\nreturn STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker);\n}\n/**\n* Parse peer worker.\n*

\n* peer-worker := worker-name | default\n*\n* @return peer worker name node\n*/\nprivate STNode parsePeerWorkerName() {\nSTToken token = peek();\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase DEFAULT_KEYWORD:\nreturn STNodeFactory.createSimpleNameReferenceNode(consume());\ndefault:\nSolution sol = recover(token, ParserRuleContext.PEER_WORKER_NAME);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nreturn STNodeFactory.createSimpleNameReferenceNode(sol.recoveredNode);\n}\n}\n/**\n* Parse sync send token.\n*

\n* sync-send-token := ->> \n*\n* @return sync send token\n*/\nprivate STNode parseSyncSendToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SYNC_SEND_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SYNC_SEND_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse receive action.\n*

\n* receive-action := single-receive-action | multiple-receive-action\n*\n* @return Receive action\n*/\nprivate STNode parseReceiveAction() {\nSTNode leftArrow = parseLeftArrowToken();\nSTNode receiveWorkers = parseReceiveWorkers();\nreturn STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers);\n}\nprivate STNode parseReceiveWorkers() {\nreturn parseReceiveWorkers(peek().kind);\n}\nprivate STNode parseReceiveWorkers(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase DEFAULT_KEYWORD:\ncase IDENTIFIER_TOKEN:\nreturn parsePeerWorkerName();\ncase OPEN_BRACE_TOKEN:\nreturn parseMultipleReceiveWorkers();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RECEIVE_WORKERS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseReceiveWorkers(solution.tokenKind);\n}\n}\n/**\n* Parse multiple worker receivers.\n*

\n* { receive-field (, receive-field)* }\n*\n* @return Multiple worker receiver node\n*/\nprivate STNode parseMultipleReceiveWorkers() {\nstartContext(ParserRuleContext.MULTI_RECEIVE_WORKERS);\nSTNode openBrace = parseOpenBrace();\nSTNode receiveFields = parseReceiveFields();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nopenBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace,\nDiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION);\nreturn STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace);\n}\nprivate STNode parseReceiveFields() {\nList receiveFields = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfReceiveFields(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode receiveField = parseReceiveField();\nreceiveFields.add(receiveField);\nnextToken = peek();\nSTNode recieveFieldEnd;\nwhile (!isEndOfReceiveFields(nextToken.kind)) {\nrecieveFieldEnd = parseReceiveFieldEnd(nextToken.kind);\nif (recieveFieldEnd == null) {\nbreak;\n}\nreceiveFields.add(recieveFieldEnd);\nreceiveField = parseReceiveField();\nreceiveFields.add(receiveField);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(receiveFields);\n}\nprivate boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseReceiveFieldEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RECEIVE_FIELD_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseReceiveFieldEnd(solution.tokenKind);\n}\n}\nprivate STNode parseReceiveField() {\nreturn parseReceiveField(peek().kind);\n}\n/**\n* Parse receive field.\n*

\n* receive-field := peer-worker | field-name : peer-worker\n*\n* @param nextTokenKind Kind of the next token\n* @return Receiver field node\n*/\nprivate STNode parseReceiveField(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase DEFAULT_KEYWORD:\nSTNode defaultKeyword = parseDefaultKeyword();\nreturn STNodeFactory.createSimpleNameReferenceNode(defaultKeyword);\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME);\nreturn createQualifiedReceiveField(identifier);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RECEIVE_FIELD);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.tokenKind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn createQualifiedReceiveField(solution.recoveredNode);\n}\nreturn solution.recoveredNode;\n}\n}\nprivate STNode createQualifiedReceiveField(STNode identifier) {\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn identifier;\n}\nSTNode colon = parseColon();\nSTNode peerWorker = parsePeerWorkerName();\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, peerWorker);\n}\n/**\n*\n* Parse left arrow (<-) token.\n*\n* @return left arrow token\n*/\nprivate STNode parseLeftArrowToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LEFT_ARROW_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse signed right shift token (>>).\n*\n* @return Parsed node\n*/\nprivate STNode parseSignedRightShiftToken() {\nSTNode openGTToken = consume();\nSTToken endLGToken = consume();\nSTNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, openGTToken.leadingMinutiae(),\nendLGToken.trailingMinutiae());\nif (!validateRightShiftOperatorWS(openGTToken)) {\ndoubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken,\nDiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP);\n}\nreturn doubleGTToken;\n}\n/**\n* Parse unsigned right shift token (>>>).\n*\n* @return Parsed node\n*/\nprivate STNode parseUnsignedRightShiftToken() {\nSTNode openGTToken = consume();\nSTNode middleGTToken = consume();\nSTNode endLGToken = consume();\nSTNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN,\nopenGTToken.leadingMinutiae(), endLGToken.trailingMinutiae());\nboolean validOpenGTToken = validateRightShiftOperatorWS(openGTToken);\nboolean validMiddleGTToken = validateRightShiftOperatorWS(middleGTToken);\nif (validOpenGTToken && validMiddleGTToken) {\nreturn unsignedRightShiftToken;\n}\nunsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken,\nDiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP);\nreturn unsignedRightShiftToken;\n}\n/**\n* Validate the whitespace between '>' tokens of right shift operators.\n*\n* @param node Preceding node\n* @return the validated node\n*/\nprivate boolean validateRightShiftOperatorWS(STNode node) {\nint diff = node.widthWithTrailingMinutiae() - node.width();\nreturn diff == 0;\n}\n/**\n* Parse wait action.\n*

\n* wait-action := single-wait-action | multiple-wait-action | alternate-wait-action \n*\n* @return Wait action node\n*/\nprivate STNode parseWaitAction() {\nSTNode waitKeyword = parseWaitKeyword();\nif (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {\nreturn parseMultiWaitAction(waitKeyword);\n}\nreturn parseSingleOrAlternateWaitAction(waitKeyword);\n}\n/**\n* Parse wait keyword.\n*\n* @return wait keyword\n*/\nprivate STNode parseWaitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.WAIT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.WAIT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse single or alternate wait actions.\n*

\n* \n* alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+\n*
\n* wait-future-expr := expression but not mapping-constructor-expr\n*
\n*\n* @param waitKeyword wait keyword\n* @return Single or alternate wait action node\n*/\nprivate STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) {\nstartContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS);\nSTToken nextToken = peek();\nif (isEndOfWaitFutureExprList(nextToken.kind)) {\nendContext();\nSTNode waitFutureExprs = STNodeFactory\n.createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\nwaitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs,\nDiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION);\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs);\n}\nList waitFutureExprList = new ArrayList<>();\nSTNode waitField = parseWaitFutureExpr();\nwaitFutureExprList.add(waitField);\nnextToken = peek();\nSTNode waitFutureExprEnd;\nwhile (!isEndOfWaitFutureExprList(nextToken.kind)) {\nwaitFutureExprEnd = parseWaitFutureExprEnd(nextToken.kind, 1);\nif (waitFutureExprEnd == null) {\nbreak;\n}\nwaitFutureExprList.add(waitFutureExprEnd);\nwaitField = parseWaitFutureExpr();\nwaitFutureExprList.add(waitField);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0));\n}\nprivate boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseWaitFutureExpr() {\nSTNode waitFutureExpr = parseActionOrExpression();\nif (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) {\nwaitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr,\nDiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR);\n} else if (isAction(waitFutureExpr)) {\nwaitFutureExpr =\nSyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR);\n}\nreturn waitFutureExpr;\n}\nprivate STNode parseWaitFutureExprEnd(int nextTokenIndex) {\nreturn parseWaitFutureExprEnd(peek().kind, 1);\n}\nprivate STNode parseWaitFutureExprEnd(SyntaxKind nextTokenKind, int nextTokenIndex) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\nreturn parsePipeToken();\ndefault:\nif (isEndOfWaitFutureExprList(nextTokenKind) ||\n!isValidExpressionStart(nextTokenKind, nextTokenIndex)) {\nreturn null;\n}\nSolution solution = recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END, nextTokenIndex);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseWaitFutureExprEnd(solution.tokenKind, 0);\n}\n}\n/**\n* Parse multiple wait action.\n*

\n* multiple-wait-action := wait { wait-field (, wait-field)* }\n*\n* @param waitKeyword Wait keyword\n* @return Multiple wait action node\n*/\nprivate STNode parseMultiWaitAction(STNode waitKeyword) {\nstartContext(ParserRuleContext.MULTI_WAIT_FIELDS);\nSTNode openBrace = parseOpenBrace();\nSTNode waitFields = parseWaitFields();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nopenBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace,\nDiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION);\nSTNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace);\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode);\n}\nprivate STNode parseWaitFields() {\nList waitFields = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfWaitFields(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode waitField = parseWaitField();\nwaitFields.add(waitField);\nnextToken = peek();\nSTNode waitFieldEnd;\nwhile (!isEndOfWaitFields(nextToken.kind)) {\nwaitFieldEnd = parseWaitFieldEnd(nextToken.kind);\nif (waitFieldEnd == null) {\nbreak;\n}\nwaitFields.add(waitFieldEnd);\nwaitField = parseWaitField();\nwaitFields.add(waitField);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(waitFields);\n}\nprivate boolean isEndOfWaitFields(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseWaitFieldEnd() {\nreturn parseWaitFieldEnd(peek().kind);\n}\nprivate STNode parseWaitFieldEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.WAIT_FIELD_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseWaitFieldEnd(solution.tokenKind);\n}\n}\nprivate STNode parseWaitField() {\nreturn parseWaitField(peek().kind);\n}\n/**\n* Parse wait field.\n*

\n* wait-field := variable-name | field-name : wait-future-expr\n*\n* @param nextTokenKind Kind of the next token\n* @return Receiver field node\n*/\nprivate STNode parseWaitField(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME);\nidentifier = STNodeFactory.createSimpleNameReferenceNode(identifier);\nreturn createQualifiedWaitField(identifier);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.WAIT_FIELD_NAME);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseWaitField(solution.tokenKind);\n}\n}\nprivate STNode createQualifiedWaitField(STNode identifier) {\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn identifier;\n}\nSTNode colon = parseColon();\nSTNode waitFutureExpr = parseWaitFutureExpr();\nreturn STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr);\n}\n/**\n* Parse annot access expression.\n*

\n* \n* annot-access-expr := expression .@ annot-tag-reference\n*
\n* annot-tag-reference := qualified-identifier | identifier\n*
\n*\n* @param lhsExpr Preceding expression of the annot access access\n* @return Parsed node\n*/\nprivate STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode annotAccessToken = parseAnnotChainingToken();\nSTNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr);\nreturn STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference);\n}\n/**\n* Parse annot-chaining-token.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotChainingToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse field access identifier.\n*

\n* field-access-identifier := qualified-identifier | identifier\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) {\nreturn parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr);\n}\n/**\n* Parse query action.\n*

\n* query-action := query-pipeline do-clause\n*
\n* do-clause := do block-stmt\n*
\n*\n* @param queryPipeline Query pipeline\n* @param selectClause Select clause if any This is only for validation.\n* @return Query action node\n*/\nprivate STNode parseQueryAction(STNode queryPipeline, STNode selectClause, boolean isRhsExpr) {\nif (selectClause != null) {\nqueryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause,\nDiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION);\n}\nstartContext(ParserRuleContext.DO_CLAUSE);\nSTNode doKeyword = parseDoKeyword();\nSTNode blockStmt = parseBlockNode();\nendContext();\nSTNode limitClause = parseLimitClause(isRhsExpr);\nreturn STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt, limitClause);\n}\n/**\n* Parse 'do' keyword.\n*\n* @return do keyword node\n*/\nprivate STNode parseDoKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DO_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.DO_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse optional field access or xml optional attribute access expression.\n*

\n* \n* optional-field-access-expr := expression ?. field-name\n*
\n* xml-optional-attribute-access-expr := expression ?. xml-attribute-name\n*
\n* xml-attribute-name := xml-qualified-name | qualified-identifier | identifier\n*
\n* xml-qualified-name := xml-namespace-prefix : identifier\n*
\n* xml-namespace-prefix := identifier\n*
\n*\n* @param lhsExpr Preceding expression of the optional access\n* @return Parsed node\n*/\nprivate STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode optionalFieldAccessToken = parseOptionalChainingToken();\nSTNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr);\nreturn STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName);\n}\n/**\n* Parse optional chaining token.\n*\n* @return parsed node\n*/\nprivate STNode parseOptionalChainingToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse conditional expression.\n*

\n* conditional-expr := expression ? expression : expression\n*\n* @param lhsExpr Preceding expression of the question mark\n* @return Parsed node\n*/\nprivate STNode parseConditionalExpression(STNode lhsExpr) {\nstartContext(ParserRuleContext.CONDITIONAL_EXPRESSION);\nSTNode questionMark = parseQuestionMark();\nSTNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true);\nSTNode nextToken = peek();\nSTNode endExpr;\nSTNode colon;\nif (nextToken.kind != SyntaxKind.COLON_TOKEN && middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) middleExpr;\nmiddleExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.modulePrefix);\ncolon = qualifiedNameRef.colon;\nendContext();\nendExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);\n} else {\ncolon = parseColon();\nendContext();\nendExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false);\n}\nreturn STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr);\n}\n/**\n* Parse enum declaration.\n*

\n* module-enum-decl :=\n* metadata\n* [public] enum identifier { enum-member (, enum-member)* }\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @param metadata\n* @param qualifier\n*\n* @return Parsed enum node.\n*/\nprivate STNode parseEnumDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.MODULE_ENUM_DECLARATION);\nSTNode enumKeywordToken = parseEnumKeyword();\nSTNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME);\nSTNode openBraceToken = parseOpenBrace();\nSTNode enumMemberList = parseEnumMemberList();\nSTNode closeBraceToken = parseCloseBrace();\nendContext();\nopenBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken,\nDiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER);\nreturn STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier,\nopenBraceToken, enumMemberList, closeBraceToken);\n}\n/**\n* Parse 'enum' keyword.\n*\n* @return enum keyword node\n*/\nprivate STNode parseEnumKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ENUM_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ENUM_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse enum member list.\n*

\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @return enum member list node.\n*/\nprivate STNode parseEnumMemberList() {\nstartContext(ParserRuleContext.ENUM_MEMBER_LIST);\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList enumMemberList = new ArrayList<>();\nSTNode enumMember = parseEnumMember();\nnextToken = peek();\nSTNode enumMemberRhs;\nwhile (nextToken.kind != SyntaxKind.CLOSE_BRACE_TOKEN) {\nenumMemberRhs = parseEnumMemberEnd(nextToken.kind);\nif (enumMemberRhs == null) {\nbreak;\n}\nenumMemberList.add(enumMember);\nenumMemberList.add(enumMemberRhs);\nenumMember = parseEnumMember();\nnextToken = peek();\n}\nenumMemberList.add(enumMember);\nendContext();\nreturn STNodeFactory.createNodeList(enumMemberList);\n}\n/**\n* Parse enum member.\n*

\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @return Parsed enum member node.\n*/\nprivate STNode parseEnumMember() {\nSTToken nextToken = peek();\nSTNode metadata;\nswitch (nextToken.kind) {\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData(nextToken.kind);\nbreak;\ndefault:\nmetadata = STNodeFactory.createEmptyNode();\n}\nSTNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME);\nreturn parseEnumMemberRhs(metadata, identifierNode);\n}\nprivate STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) {\nreturn parseEnumMemberRhs(peek().kind, metadata, identifierNode);\n}\nprivate STNode parseEnumMemberRhs(SyntaxKind nextToken, STNode metadata, STNode identifierNode) {\nSTNode equalToken, constExprNode;\nswitch (nextToken) {\ncase EQUAL_TOKEN:\nequalToken = parseAssignOp();\nconstExprNode = parseExpression();\nbreak;\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nequalToken = STNodeFactory.createEmptyNode();\nconstExprNode = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS, metadata, identifierNode);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseEnumMemberRhs(solution.tokenKind, metadata, identifierNode);\n}\nreturn STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode);\n}\nprivate STNode parseEnumMemberEnd() {\nreturn parseEnumMemberEnd(peek().kind);\n}\nprivate STNode parseEnumMemberEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ENUM_MEMBER_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseEnumMemberEnd(solution.tokenKind);\n}\n}\n/**\n* Parse transaction statement.\n*

\n* transaction-stmt := \"transaction\" block-stmt ;\n*\n* @return Transaction statement node\n*/\nprivate STNode parseTransactionStatement() {\nstartContext(ParserRuleContext.TRANSACTION_STMT);\nSTNode transactionKeyword = parseTransactionKeyword();\nSTNode blockStmt = parseBlockNode();\nendContext();\nreturn STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt);\n}\n/**\n* Parse transaction keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseTransactionKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRANSACTION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TRANSACTION_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse commit action.\n*

\n* commit-action := \"commit\"\n*\n* @return Commit action node\n*/\nprivate STNode parseCommitAction() {\nSTNode commitKeyword = parseCommitKeyword();\nreturn STNodeFactory.createCommitActionNode(commitKeyword);\n}\n/**\n* Parse commit keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseCommitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COMMIT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.COMMIT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse retry statement.\n*

\n* \n* retry-stmt := \"retry\" retry-spec block-stmt\n*
\n* retry-spec := [type-parameter] [ \"(\" arg-list \")\" ]\n*
\n*\n* @return Retry statement node\n*/\nprivate STNode parseRetryStatement() {\nstartContext(ParserRuleContext.RETRY_STMT);\nSTNode retryKeyword = parseRetryKeyword();\nSTNode retryStmt = parseRetryKeywordRhs(retryKeyword);\nendContext();\nreturn retryStmt;\n}\nprivate STNode parseRetryKeywordRhs(STNode retryKeyword) {\nreturn parseRetryKeywordRhs(peek().kind, retryKeyword);\n}\nprivate STNode parseRetryKeywordRhs(SyntaxKind nextTokenKind, STNode retryKeyword) {\nswitch (nextTokenKind) {\ncase LT_TOKEN:\nSTNode typeParam = parseTypeParameter();\nreturn parseRetryTypeParamRhs(retryKeyword, typeParam);\ncase OPEN_PAREN_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase TRANSACTION_KEYWORD:\ntypeParam = STNodeFactory.createEmptyNode();\nreturn parseRetryTypeParamRhs(nextTokenKind, retryKeyword, typeParam);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS, retryKeyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseRetryKeywordRhs(solution.tokenKind, retryKeyword);\n}\n}\nprivate STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) {\nreturn parseRetryTypeParamRhs(peek().kind, retryKeyword, typeParam);\n}\nprivate STNode parseRetryTypeParamRhs(SyntaxKind nextTokenKind, STNode retryKeyword, STNode typeParam) {\nSTNode args;\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\nargs = parseParenthesizedArgList();\nbreak;\ncase OPEN_BRACE_TOKEN:\ncase TRANSACTION_KEYWORD:\nargs = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS, retryKeyword, typeParam);\nreturn parseRetryTypeParamRhs(solution.tokenKind, retryKeyword, typeParam);\n}\nSTNode blockStmt = parseRetryBody();\nreturn STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt);\n}\nprivate STNode parseRetryBody() {\nreturn parseRetryBody(peek().kind);\n}\nprivate STNode parseRetryBody(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACE_TOKEN:\nreturn parseBlockNode();\ncase TRANSACTION_KEYWORD:\nreturn parseTransactionStatement();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RETRY_BODY);\nreturn parseRetryBody(solution.tokenKind);\n}\n}\n/**\n* Parse retry keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseRetryKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RETRY_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RETRY_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse transaction statement.\n*

\n* rollback-stmt := \"rollback\" [expression] \";\"\n*\n* @return Rollback statement node\n*/\nprivate STNode parseRollbackStatement() {\nstartContext(ParserRuleContext.ROLLBACK_STMT);\nSTNode rollbackKeyword = parseRollbackKeyword();\nSTNode expression;\nif (peek().kind == SyntaxKind.SEMICOLON_TOKEN) {\nexpression = STNodeFactory.createEmptyNode();\n} else {\nexpression = parseExpression();\n}\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon);\n}\n/**\n* Parse rollback keyword.\n*\n* @return Rollback keyword node\n*/\nprivate STNode parseRollbackKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ROLLBACK_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ROLLBACK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse transactional expression.\n*

\n* transactional-expr := \"transactional\"\n*\n* @return Transactional expression node\n*/\nprivate STNode parseTransactionalExpression() {\nSTNode transactionalKeyword = parseTransactionalKeyword();\nreturn STNodeFactory.createTransactionalExpressionNode(transactionalKeyword);\n}\n/**\n* Parse transactional keyword.\n*\n* @return Transactional keyword node\n*/\nprivate STNode parseTransactionalKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ROLLBACK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse service-constructor-expr.\n*

\n* \n* service-constructor-expr := [annots] service service-body-block\n*
\n* service-body-block := { service-method-defn* }\n*
\n* service-method-defn := metadata [resource] function identifier function-signature method-defn-body\n*
\n*\n* @param annots Annotations\n* @return Service constructor expression node\n*/\nprivate STNode parseServiceConstructorExpression(STNode annots) {\nstartContext(ParserRuleContext.SERVICE_CONSTRUCTOR_EXPRESSION);\nSTNode serviceKeyword = parseServiceKeyword();\nSTNode serviceBody = parseServiceBody();\nendContext();\nreturn STNodeFactory.createServiceConstructorExpressionNode(annots, serviceKeyword, serviceBody);\n}\n/**\n* Parse base16 literal.\n*

\n* \n* byte-array-literal := Base16Literal | Base64Literal\n*
\n* Base16Literal := base16 WS ` HexGroup* WS `\n*
\n* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `\n*
\n*\n* @param kind byte array literal kind\n* @return parsed node\n*/\nprivate STNode parseByteArrayLiteral(SyntaxKind kind) {\nSTNode type;\nif (kind == SyntaxKind.BASE16_KEYWORD) {\ntype = parseBase16Keyword();\n} else {\ntype = parseBase64Keyword();\n}\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nif (startingBackTick.isMissing()) {\nstartingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);\nSTNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);\nSTNode content = STNodeFactory.createEmptyNode();\nSTNode byteArrayLiteral =\nSTNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick);\nbyteArrayLiteral =\nSyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT);\nreturn byteArrayLiteral;\n}\nSTNode content = parseByteArrayContent(kind);\nreturn parseByteArrayLiteral(kind, type, startingBackTick, content);\n}\n/**\n* Parse byte array literal.\n*\n* @param baseKind indicates the SyntaxKind base16 or base64\n* @param typeKeyword keyword token, possible values are `base16` and `base64`\n* @param startingBackTick starting backtick token\n* @param byteArrayContent byte array literal content to be validated\n* @return parsed byte array literal node\n*/\nprivate STNode parseByteArrayLiteral(SyntaxKind baseKind, STNode typeKeyword, STNode startingBackTick,\nSTNode byteArrayContent) {\nSTNode content = STNodeFactory.createEmptyNode();\nSTNode newStartingBackTick = startingBackTick;\nSTNodeList items = (STNodeList) byteArrayContent;\nif (items.size() == 1) {\nSTNode item = items.get(0);\nif (baseKind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else if (baseKind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else if (item.kind != SyntaxKind.TEMPLATE_STRING) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else {\ncontent = item;\n}\n} else if (items.size() > 1) {\nSTNode clonedStartingBackTick = startingBackTick;\nfor (int index = 0; index < items.size(); index++) {\nSTNode item = items.get(index);\nclonedStartingBackTick =\nSyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item);\n}\nnewStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick,\nDiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);\n}\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick);\n}\n/**\n* Parse base16 keyword.\n*\n* @return base16 keyword node\n*/\nprivate STNode parseBase16Keyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BASE16_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BASE16_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse base64 keyword.\n*\n* @return base64 keyword node\n*/\nprivate STNode parseBase64Keyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BASE64_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BASE64_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Validate and parse byte array literal content.\n* An error is reported, if the content is invalid.\n*\n* @param kind byte array literal kind\n* @return parsed node\n*/\nprivate STNode parseByteArrayContent(SyntaxKind kind) {\nSTToken nextToken = peek();\nList items = new ArrayList<>();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode content = parseTemplateItem();\nitems.add(content);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(items);\n}\n/**\n* Validate base16 literal content.\n*

\n* \n* Base16Literal := base16 WS ` HexGroup* WS `\n*
\n* HexGroup := WS HexDigit WS HexDigit\n*
\n* WS := WhiteSpaceChar*\n*
\n* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20\n*
\n*\n* @param content the string surrounded by the backticks\n* @return true, if the string content is valid. false otherwise.\n*/\nstatic boolean isValidBase16LiteralContent(String content) {\nchar[] charArray = content.toCharArray();\nint hexDigitCount = 0;\nfor (char c : charArray) {\nswitch (c) {\ncase LexerTerminals.TAB:\ncase LexerTerminals.NEWLINE:\ncase LexerTerminals.CARRIAGE_RETURN:\ncase LexerTerminals.SPACE:\nbreak;\ndefault:\nif (isHexDigit(c)) {\nhexDigitCount++;\n} else {\nreturn false;\n}\nbreak;\n}\n}\nreturn hexDigitCount % 2 == 0;\n}\n/**\n* Validate base64 literal content.\n*

\n* \n* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `\n*
\n* Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char\n*
\n* PaddedBase64Group :=\n* WS Base64Char WS Base64Char WS Base64Char WS PaddingChar\n* | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar\n*
\n* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /\n*
\n* PaddingChar := =\n*
\n* WS := WhiteSpaceChar*\n*
\n* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20\n*
\n*\n* @param content the string surrounded by the backticks\n* @return true, if the string content is valid. false otherwise.\n*/\nstatic boolean isValidBase64LiteralContent(String content) {\nchar[] charArray = content.toCharArray();\nint base64CharCount = 0;\nint paddingCharCount = 0;\nfor (char c : charArray) {\nswitch (c) {\ncase LexerTerminals.TAB:\ncase LexerTerminals.NEWLINE:\ncase LexerTerminals.CARRIAGE_RETURN:\ncase LexerTerminals.SPACE:\nbreak;\ncase LexerTerminals.EQUAL:\npaddingCharCount++;\nbreak;\ndefault:\nif (isBase64Char(c)) {\nif (paddingCharCount == 0) {\nbase64CharCount++;\n} else {\nreturn false;\n}\n} else {\nreturn false;\n}\nbreak;\n}\n}\nif (paddingCharCount > 2) {\nreturn false;\n} else if (paddingCharCount == 0) {\nreturn base64CharCount % 4 == 0;\n} else {\nreturn base64CharCount % 4 == 4 - paddingCharCount;\n}\n}\n/**\n*

\n* Check whether a given char is a base64 char.\n*

\n* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /\n*\n* @param c character to check\n* @return true, if the character represents a base64 char. false otherwise.\n*/\nstatic boolean isBase64Char(int c) {\nif ('a' <= c && c <= 'z') {\nreturn true;\n}\nif ('A' <= c && c <= 'Z') {\nreturn true;\n}\nif (c == '+' || c == '/') {\nreturn true;\n}\nreturn isDigit(c);\n}\nstatic boolean isHexDigit(int c) {\nif ('a' <= c && c <= 'f') {\nreturn true;\n}\nif ('A' <= c && c <= 'F') {\nreturn true;\n}\nreturn isDigit(c);\n}\nstatic boolean isDigit(int c) {\nreturn ('0' <= c && c <= '9');\n}\n/**\n* Parse xml filter expression.\n*

\n* xml-filter-expr := expression .< xml-name-pattern >\n*\n* @param lhsExpr Preceding expression of .< token\n* @return Parsed node\n*/\nprivate STNode parseXMLFilterExpression(STNode lhsExpr) {\nSTNode xmlNamePatternChain = parseXMLFilterExpressionRhs();\nreturn STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain);\n}\n/**\n* Parse xml filter expression rhs.\n*

\n* filer-expression-rhs := .< xml-name-pattern >\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLFilterExpressionRhs() {\nSTNode dotLTToken = parseDotLTToken();\nreturn parseXMLNamePatternChain(dotLTToken);\n}\n/**\n* Parse xml name pattern chain.\n*

\n* \n* xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step\n*
\n* filer-expression-rhs := .< xml-name-pattern >\n*
\n* xml-element-children-step := /< xml-name-pattern >\n*
\n* xml-element-descendants-step := /**\\/\n*
\n*\n* @param startToken Preceding token of xml name pattern\n* @return Parsed node\n*/\nprivate STNode parseXMLNamePatternChain(STNode startToken) {\nstartContext(ParserRuleContext.XML_NAME_PATTERN);\nSTNode xmlNamePattern = parseXMLNamePattern();\nSTNode gtToken = parseGTToken();\nendContext();\nstartToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken,\nDiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN);\nreturn STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken);\n}\n/**\n* Parse .< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseDotLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.DOT_LT_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse xml name pattern.\n*

\n* xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLNamePattern() {\nList xmlAtomicNamePatternList = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfXMLNamePattern(nextToken.kind)) {\nreturn STNodeFactory.createNodeList(xmlAtomicNamePatternList);\n}\nSTNode xmlAtomicNamePattern = parseXMLAtomicNamePattern();\nxmlAtomicNamePatternList.add(xmlAtomicNamePattern);\nSTNode separator;\nwhile (!isEndOfXMLNamePattern(peek().kind)) {\nseparator = parseXMLNamePatternSeparator();\nif (separator == null) {\nbreak;\n}\nxmlAtomicNamePatternList.add(separator);\nxmlAtomicNamePattern = parseXMLAtomicNamePattern();\nxmlAtomicNamePatternList.add(xmlAtomicNamePattern);\n}\nreturn STNodeFactory.createNodeList(xmlAtomicNamePatternList);\n}\nprivate boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase GT_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ncase IDENTIFIER_TOKEN:\ncase ASTERISK_TOKEN:\ncase COLON_TOKEN:\ndefault:\nreturn false;\n}\n}\nprivate STNode parseXMLNamePatternSeparator() {\nSTToken token = peek();\nswitch (token.kind) {\ncase PIPE_TOKEN:\nreturn consume();\ncase GT_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution sol = recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS);\nif (sol.tokenKind == SyntaxKind.GT_TOKEN) {\nreturn null;\n}\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse xml atomic name pattern.\n*

\n* \n* xml-atomic-name-pattern :=\n* *\n* | identifier\n* | xml-namespace-prefix : identifier\n* | xml-namespace-prefix : *\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseXMLAtomicNamePattern() {\nstartContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN);\nSTNode atomicNamePattern = parseXMLAtomicNamePatternBody();\nendContext();\nreturn atomicNamePattern;\n}\nprivate STNode parseXMLAtomicNamePatternBody() {\nSTToken token = peek();\nSTNode identifier;\nswitch (token.kind) {\ncase ASTERISK_TOKEN:\nreturn consume();\ncase IDENTIFIER_TOKEN:\nidentifier = consume();\nbreak;\ndefault:\nSolution sol = recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nif (sol.recoveredNode.kind == SyntaxKind.ASTERISK_TOKEN) {\nreturn sol.recoveredNode;\n}\nidentifier = sol.recoveredNode;\nbreak;\n}\nreturn parseXMLAtomicNameIdentifier(identifier);\n}\nprivate STNode parseXMLAtomicNameIdentifier(STNode identifier) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COLON_TOKEN) {\nSTNode colon = consume();\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {\nSTToken endToken = consume();\nreturn STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken);\n}\n}\nreturn STNodeFactory.createSimpleNameReferenceNode(identifier);\n}\n/**\n* Parse xml step expression.\n*

\n* xml-step-expr := expression xml-step-start\n*\n* @param lhsExpr Preceding expression of /*, /<, or /**\\/< token\n* @return Parsed node\n*/\nprivate STNode parseXMLStepExpression(STNode lhsExpr) {\nSTNode xmlStepStart = parseXMLStepStart();\nreturn STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart);\n}\n/**\n* Parse xml filter expression rhs.\n*

\n* \n* xml-step-start :=\n* xml-all-children-step\n* | xml-element-children-step\n* | xml-element-descendants-step\n*
\n* xml-all-children-step := /*\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLStepStart() {\nSTToken token = peek();\nSTNode startToken;\nswitch (token.kind) {\ncase SLASH_ASTERISK_TOKEN:\nreturn consume();\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\nstartToken = parseDoubleSlashDoubleAsteriskLTToken();\nbreak;\ncase SLASH_LT_TOKEN:\ndefault:\nstartToken = parseSlashLTToken();\nbreak;\n}\nreturn parseXMLNamePatternChain(startToken);\n}\n/**\n* Parse /< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseSlashLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse /< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseDoubleSlashDoubleAsteriskLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse match statement.\n*

\n* match-stmt := match action-or-expr { match-clause+ }\n*\n* @return Match statement\n*/\nprivate STNode parseMatchStatement() {\nstartContext(ParserRuleContext.MATCH_STMT);\nSTNode matchKeyword = parseMatchKeyword();\nSTNode actionOrExpr = parseActionOrExpression();\nstartContext(ParserRuleContext.MATCH_BODY);\nSTNode openBrace = parseOpenBrace();\nSTNode matchClauses = parseMatchClauses();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nendContext();\nreturn STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace);\n}\n/**\n* Parse match keyword.\n*\n* @return Match keyword node\n*/\nprivate STNode parseMatchKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.MATCH_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.MATCH_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse match clauses list.\n*\n* @return Match clauses list\n*/\nprivate STNode parseMatchClauses() {\nList matchClauses = new ArrayList<>();\nwhile (!isEndOfMatchClauses(peek().kind)) {\nSTNode clause = parseMatchClause();\nmatchClauses.add(clause);\n}\nreturn STNodeFactory.createNodeList(matchClauses);\n}\nprivate boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse a single match match clause.\n*

\n* \n* match-clause := match-pattern-list [match-guard] => block-stmt\n*
\n* match-guard := if expression\n*
\n*\n* @return A match clause\n*/\nprivate STNode parseMatchClause() {\nSTNode matchPatterns = parseMatchPatternList();\nSTNode matchGuard = parseMatchGuard();\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode blockStmt = parseBlockNode();\nreturn STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt);\n}\n/**\n* Parse match guard.\n*

\n* match-guard := if expression\n*\n* @return Match guard\n*/\nprivate STNode parseMatchGuard() {\nSTToken nextToken = peek();\nreturn parseMatchGuard(nextToken.kind);\n}\nprivate STNode parseMatchGuard(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IF_KEYWORD:\nSTNode ifKeyword = parseIfKeyword();\nSTNode expr = parseExpression(peek().kind, DEFAULT_OP_PRECEDENCE, true, false, true, false);\nreturn STNodeFactory.createMatchGuardNode(ifKeyword, expr);\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.OPTIONAL_MATCH_GUARD);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMatchGuard(solution.tokenKind);\n}\n}\n/**\n* Parse match patterns list.\n*

\n* match-pattern-list := match-pattern (| match-pattern)*\n*\n* @return Match patterns list\n*/\nprivate STNode parseMatchPatternList() {\nstartContext(ParserRuleContext.MATCH_PATTERN);\nList matchClauses = new ArrayList<>();\nwhile (!isEndOfMatchPattern(peek().kind)) {\nSTNode clause = parseMatchPattern();\nif (clause == null) {\nbreak;\n}\nmatchClauses.add(clause);\nSTNode seperator = parseMatchPatternEnd();\nif (seperator == null) {\nbreak;\n}\nmatchClauses.add(seperator);\n}\nendContext();\nreturn STNodeFactory.createNodeList(matchClauses);\n}\nprivate boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\ncase IF_KEYWORD:\ncase RIGHT_ARROW_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse match pattern.\n*

\n* \n* match-pattern := var binding-pattern\n* | wildcard-match-pattern\n* | const-pattern\n* | list-match-pattern\n* | mapping-match-pattern\n* | functional-match-pattern\n* \n*\n* @return Match pattern\n*/\nprivate STNode parseMatchPattern() {\nSTToken nextToken = peek();\nreturn parseMatchPattern(nextToken.kind);\n}\nprivate STNode parseMatchPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase STRING_LITERAL:\nreturn parseSimpleConstExpr();\ncase IDENTIFIER_TOKEN:\nSTNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);\nreturn parseFunctionalMatchPatternOrConsPattern(typeRefOrConstExpr);\ncase VAR_KEYWORD:\nreturn parseVarTypedBindingPattern();\ncase OPEN_BRACKET_TOKEN:\nreturn parseListMatchPattern();\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingMatchPattern();\ncase ERROR_KEYWORD:\nreturn parseFunctionalMatchPattern(consume());\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MATCH_PATTERN_START);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMatchPattern(solution.tokenKind);\n}\n}\nprivate STNode parseMatchPatternEnd() {\nSTToken nextToken = peek();\nreturn parseMatchPatternEnd(nextToken.kind);\n}\nprivate STNode parseMatchPatternEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\nreturn parsePipeToken();\ncase IF_KEYWORD:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MATCH_PATTERN_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMatchPatternEnd(solution.tokenKind);\n}\n}\n/**\n* Parse var typed binding pattern.\n*

\n* var binding-pattern\n*

\n*\n* @return Parsed typed binding pattern node\n*/\nprivate STNode parseVarTypedBindingPattern() {\nSTNode varKeyword = parseVarKeyword();\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createTypedBindingPatternNode(varKeyword, bindingPattern);\n}\n/**\n* Parse var keyword.\n*\n* @return Var keyword node\n*/\nprivate STNode parseVarKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.VAR_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.VAR_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse list match pattern.\n*

\n* \n* list-match-pattern := [ list-member-match-patterns ]\n* list-member-match-patterns :=\n* match-pattern (, match-pattern)* [, rest-match-pattern]\n* | [ rest-match-pattern ]\n* \n*

\n*\n* @return Parsed list match pattern node\n*/\nprivate STNode parseListMatchPattern() {\nstartContext(ParserRuleContext.LIST_MATCH_PATTERN);\nSTNode openBracketToken = parseOpenBracket();\nList matchPatternList = new ArrayList<>();\nSTNode restMatchPattern = null;\nwhile (!isEndOfListMatchPattern()) {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {\nrestMatchPattern = parseRestMatchPattern();\nbreak;\n}\nSTNode matchPatternListMember = parseMatchPattern();\nmatchPatternList.add(matchPatternListMember);\nSTNode matchPatternMemberRhs = parseListMatchPatternMemberRhs();\nif (matchPatternMemberRhs != null) {\nmatchPatternList.add(matchPatternMemberRhs);\n} else {\nbreak;\n}\n}\nif (restMatchPattern == null) {\nrestMatchPattern = STNodeFactory.createEmptyNode();\n}\nSTNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList);\nSTNode closeBracketToken = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, restMatchPattern,\ncloseBracketToken);\n}\npublic boolean isEndOfListMatchPattern() {\nswitch (peek().kind) {\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse rest match pattern.\n*

\n* \n* rest-match-pattern := ... var variable-name\n* \n*

\n*\n* @return Parsed rest match pattern node\n*/\nprivate STNode parseRestMatchPattern() {\nstartContext(ParserRuleContext.REST_MATCH_PATTERN);\nSTNode ellipsisToken = parseEllipsis();\nSTNode varKeywordToken = parseVarKeyword();\nSTNode variableName = parseVariableName();\nendContext();\nSTSimpleNameReferenceNode simpleNameReferenceNode =\n(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName);\nreturn STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode);\n}\nprivate STNode parseListMatchPatternMemberRhs() {\nreturn parseListMatchPatternMemberRhs(peek().kind);\n}\nprivate STNode parseListMatchPatternMemberRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListMatchPatternMemberRhs(solution.tokenKind);\n}\n}\n/**\n* Parse mapping match pattern.\n*

\n* mapping-match-pattern := { field-match-patterns }\n*
\n* field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern]\n* | [ rest-match-pattern ]\n*
\n* field-match-pattern := field-name : match-pattern\n*
\n* rest-match-pattern := ... var variable-name\n*

\n*\n* @return Parsed Node.\n*/\nprivate STNode parseMappingMatchPattern() {\nstartContext(ParserRuleContext.MAPPING_MATCH_PATTERN);\nSTNode openBraceToken = parseOpenBrace();\nList fieldMatchPatternList = new ArrayList<>();\nSTNode restMatchPattern = null;\nboolean isEndOfFields = false;\nwhile (!isEndOfMappingMatchPattern()) {\nSTNode fieldMatchPatternMember = parseFieldMatchPatternMember();\nif (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {\nrestMatchPattern = fieldMatchPatternMember;\nisEndOfFields = true;\nbreak;\n}\nfieldMatchPatternList.add(fieldMatchPatternMember);\nSTNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();\nif (fieldMatchPatternRhs != null) {\nfieldMatchPatternList.add(fieldMatchPatternRhs);\n} else {\nbreak;\n}\n}\nSTNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();\nwhile (isEndOfFields && fieldMatchPatternRhs != null) {\nSTNode invalidField = parseFieldMatchPatternMember();\nrestMatchPattern =\nSyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, fieldMatchPatternRhs);\nrestMatchPattern = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, invalidField);\nrestMatchPattern = SyntaxErrors.addDiagnostic(restMatchPattern,\nDiagnosticErrorCode.ERROR_MORE_FIELD_MATCH_PATTERNS_AFTER_REST_FIELD);\nfieldMatchPatternRhs = parseFieldMatchPatternRhs();\n}\nif (restMatchPattern == null) {\nrestMatchPattern = STNodeFactory.createEmptyNode();\n}\nSTNode fieldMatchPatterns = STNodeFactory.createNodeList(fieldMatchPatternList);\nSTNode closeBraceToken = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, restMatchPattern,\ncloseBraceToken);\n}\nprivate STNode parseFieldMatchPatternMember() {\nreturn parseFieldMatchPatternMember(peek().kind);\n}\nprivate STNode parseFieldMatchPatternMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseFieldMatchPattern();\ncase ELLIPSIS_TOKEN:\nreturn parseRestMatchPattern();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldMatchPatternMember(solution.tokenKind);\n}\n}\n/**\n* Parse filed match pattern.\n*

\n* field-match-pattern := field-name : match-pattern\n*

\n*\n* @return Parsed field match pattern node\n*/\npublic STNode parseFieldMatchPattern() {\nSTNode fieldNameNode = parseVariableName();\nSTNode colonToken = parseColon();\nSTNode matchPattern = parseMatchPattern();\nreturn STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern);\n}\npublic boolean isEndOfMappingMatchPattern() {\nswitch (peek().kind) {\ncase CLOSE_BRACE_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseFieldMatchPatternRhs() {\nreturn parseFieldMatchPatternRhs(peek().kind);\n}\nprivate STNode parseFieldMatchPatternRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldMatchPatternRhs(solution.tokenKind);\n}\n}\nprivate STNode parseFunctionalMatchPatternOrConsPattern(STNode typeRefOrConstExpr) {\nreturn parseFunctionalMatchPatternOrConsPattern(peek().kind, typeRefOrConstExpr);\n}\nprivate STNode parseFunctionalMatchPatternOrConsPattern(SyntaxKind nextToken, STNode typeRefOrConstExpr) {\nswitch (nextToken) {\ncase OPEN_PAREN_TOKEN:\nreturn parseFunctionalMatchPattern(typeRefOrConstExpr);\ndefault:\nif (isMatchPatternEnd(peek().kind)) {\nreturn typeRefOrConstExpr;\n}\nSolution solution =\nrecover(peek(), ParserRuleContext.FUNC_MATCH_PATTERN_OR_CONST_PATTERN, typeRefOrConstExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFunctionalMatchPatternOrConsPattern(solution.tokenKind, typeRefOrConstExpr);\n}\n}\nprivate boolean isMatchPatternEnd(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase RIGHT_DOUBLE_ARROW_TOKEN:\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase PIPE_TOKEN:\ncase IF_KEYWORD:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse functional match pattern.\n*

\n* functional-match-pattern := functionally-constructible-type-reference ( arg-list-match-pattern )\n*
\n* functionally-constructible-type-reference := error | type-reference\n*
\n* type-reference := identifier | qualified-identifier\n*
\n* arg-list-match-pattern := positional-arg-match-patterns [, other-arg-match-patterns]\n* | other-arg-match-patterns\n*

\n*\n* @return Parsed functional match pattern node.\n*/\nprivate STNode parseFunctionalMatchPattern(STNode typeRef) {\nstartContext(ParserRuleContext.FUNCTIONAL_MATCH_PATTERN);\nSTNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode argListMatchPatternNode = parseArgListMatchPatterns();\nSTNode closeParenthesisToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createFunctionalMatchPatternNode(typeRef, openParenthesisToken, argListMatchPatternNode,\ncloseParenthesisToken);\n}\nprivate STNode parseArgListMatchPatterns() {\nList argListMatchPatterns = new ArrayList<>();\nSyntaxKind lastValidArgKind = SyntaxKind.IDENTIFIER_TOKEN;\nwhile (!isEndOfFunctionalMatchPattern()) {\nSTNode currentArg = parseArgMatchPattern();\nDiagnosticErrorCode errorCode = validateArgMatchPatternOrder(lastValidArgKind, currentArg.kind);\nif (errorCode == null) {\nargListMatchPatterns.add(currentArg);\nlastValidArgKind = currentArg.kind;\n} else {\nupdateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode);\n}\nSTNode argRhs = parseArgMatchPatternRhs();\nif (argRhs == null) {\nbreak;\n}\nif (errorCode == null) {\nargListMatchPatterns.add(argRhs);\n} else {\nupdateLastNodeInListWithInvalidNode(argListMatchPatterns, argRhs, null);\n}\n}\nreturn STNodeFactory.createNodeList(argListMatchPatterns);\n}\nprivate boolean isEndOfFunctionalMatchPattern() {\nswitch (peek().kind) {\ncase CLOSE_PAREN_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse arg match patterns.\n* \n* arg-match-pattern := match-pattern | named-arg-match-pattern | rest-match-pattern\n* \n*
\n*
\n*\n* @return parsed arg match pattern node.\n*/\nprivate STNode parseArgMatchPattern() {\nreturn parseArgMatchPattern(peek().kind);\n}\nprivate STNode parseArgMatchPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseNamedOrPositionalArgMatchPattern();\ncase ELLIPSIS_TOKEN:\nreturn parseRestMatchPattern();\ncase OPEN_PAREN_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase STRING_LITERAL:\ncase VAR_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseMatchPattern();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_MATCH_PATTERN);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgMatchPattern(solution.tokenKind);\n}\n}\nprivate STNode parseNamedOrPositionalArgMatchPattern() {\nSTNode identifier = parseIdentifier(ParserRuleContext.MATCH_PATTERN_START);\nSTToken secondToken = peek();\nswitch (secondToken.kind) {\ncase EQUAL_TOKEN:\nreturn parseNamedArgMatchPattern(identifier);\ncase OPEN_PAREN_TOKEN:\nreturn parseFunctionalMatchPattern(identifier);\ncase COMMA_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ndefault:\nreturn identifier;\n}\n}\n/**\n* Parses the next named arg match pattern.\n*
\n* named-arg-match-pattern := arg-name = match-pattern\n*
\n*
\n*\n* @return arg match pattern list node added the new arg match pattern\n*/\nprivate STNode parseNamedArgMatchPattern(STNode identifier) {\nstartContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN);\nSTNode equalToken = parseAssignOp();\nSTNode matchPattern = parseMatchPattern();\nendContext();\nreturn STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern);\n}\nprivate STNode parseArgMatchPatternRhs() {\nreturn parseArgMatchPatternRhs(peek().kind);\n}\nprivate STNode parseArgMatchPatternRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_MATCH_PATTERN_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgMatchPatternRhs(solution.tokenKind);\n}\n}\nprivate DiagnosticErrorCode validateArgMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {\nDiagnosticErrorCode errorCode = null;\nswitch (prevArgKind) {\ncase NAMED_ARG_MATCH_PATTERN:\nif (currentArgKind != SyntaxKind.NAMED_ARG_MATCH_PATTERN &&\ncurrentArgKind != SyntaxKind.REST_MATCH_PATTERN) {\nerrorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG;\n}\nbreak;\ncase REST_MATCH_PATTERN:\nerrorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG;\nbreak;\ndefault:\nbreak;\n}\nreturn errorCode;\n}\n/**\n* Parse markdown documentation.\n*\n* @return markdown documentation node\n*/\nprivate STNode parseMarkdownDocumentation() {\nList markdownDocLineList = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) {\nSTToken documentationString = consume();\nSTNode parsedDocLines = parseDocumentationString(documentationString);\nappendParsedDocumentationLines(markdownDocLineList, parsedDocLines);\nnextToken = peek();\n}\nSTNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList);\nreturn STNodeFactory.createMarkdownDocumentationNode(markdownDocLines);\n}\n/**\n* Parse documentation string.\n*\n* @return markdown documentation line list node\n*/\nprivate STNode parseDocumentationString(STToken documentationStringToken) {\nList leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae());\nTextDocument textDocument = TextDocuments.from(documentationStringToken.text());\nDocumentationLexer documentationLexer =\nnew DocumentationLexer(textDocument.getCharacterReader(), leadingTriviaList);\nAbstractTokenReader tokenReader = new TokenReader(documentationLexer);\nDocumentationParser documentationParser = new DocumentationParser(tokenReader);\nreturn documentationParser.parse();\n}\nprivate List getLeadingTriviaList(STNode leadingMinutiaeNode) {\nList leadingTriviaList = new ArrayList<>();\nint bucketCount = leadingMinutiaeNode.bucketCount();\nfor (int i = 0; i < bucketCount; i++) {\nleadingTriviaList.add(leadingMinutiaeNode.childInBucket(i));\n}\nreturn leadingTriviaList;\n}\nprivate void appendParsedDocumentationLines(List markdownDocLineList, STNode parsedDocLines) {\nint bucketCount = parsedDocLines.bucketCount();\nfor (int i = 0; i < bucketCount; i++) {\nSTNode markdownDocLine = parsedDocLines.childInBucket(i);\nmarkdownDocLineList.add(markdownDocLine);\n}\n}\n/**\n* Parse any statement that starts with a token that has ambiguity between being\n* a type-desc or an expression.\n*\n* @param annots Annotations\n* @return Statement node\n*/\nprivate STNode parseStmtStartsWithTypeOrExpr(SyntaxKind nextTokenKind, STNode annots) {\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode typeOrExpr = parseTypedBindingPatternOrExpr(nextTokenKind, true);\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr);\n}\nprivate STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) {\nif (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(annots, finalKeyword, typedBindingPatternOrExpr, false);\n}\nSTNode expr = getExpression(typedBindingPatternOrExpr);\nexpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);\nreturn parseStatementStartWithExprRhs(expr);\n}\nprivate STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternOrExpr(nextToken.kind, allowAssignment);\n}\nprivate STNode parseTypedBindingPatternOrExpr(SyntaxKind nextTokenKind, boolean allowAssignment) {\nSTNode typeOrExpr;\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseTypedBPOrExprStartsWithOpenParenthesis();\ncase FUNCTION_KEYWORD:\nreturn parseAnonFuncExprOrTypedBPWithFuncType();\ncase IDENTIFIER_TOKEN:\ntypeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\nreturn parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\ncase OPEN_BRACKET_TOKEN:\ntypeOrExpr = parseTypedDescOrExprStartsWithOpenBracket();\nreturn parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nSTNode basicLiteral = parseBasicLiteral();\nreturn parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment);\ndefault:\nif (isValidExpressionStart(nextTokenKind, 1)) {\nreturn parseActionOrExpressionInLhs(nextTokenKind, null);\n}\nreturn parseTypedBindingPattern(ParserRuleContext.VAR_DECL_STMT);\n}\n}\n/**\n* Parse the component after the ambiguous starting node. Ambiguous node could be either an expr\n* or a type-desc. The component followed by this ambiguous node could be the binding-pattern or\n* the expression-rhs.\n*\n* @param typeOrExpr Type desc or the expression\n* @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a\n* valid lvalue expression\n* @return Typed-binding-pattern node or an expression node\n*/\nprivate STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternOrExprRhs(nextToken.kind, typeOrExpr, allowAssignment);\n}\nprivate STNode parseTypedBindingPatternOrExprRhs(SyntaxKind nextTokenKind, STNode typeOrExpr,\nboolean allowAssignment) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\nSTToken nextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode pipe = parsePipeToken();\nSTNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);\nif (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nSTNode newTypeDesc =\nSTNodeFactory.createUnionTypeDescriptorNode(typeOrExpr, pipe, typedBP.typeDescriptor);\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);\n}\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe,\nrhsTypedBPOrExpr);\ncase BITWISE_AND_TOKEN:\nnextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode ampersand = parseBinaryOperator();\nrhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);\nif (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nSTNode newTypeDesc = STNodeFactory.createIntersectionTypeDescriptorNode(typeOrExpr, ampersand,\ntypedBP.typeDescriptor);\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);\n}\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand,\nrhsTypedBPOrExpr);\ncase SEMICOLON_TOKEN:\nif (isDefiniteExpr(typeOrExpr.kind)) {\nreturn typeOrExpr;\n}\nif (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) {\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n}\nreturn typeOrExpr;\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\nif (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) {\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n}\nreturn typeOrExpr;\ncase EQUAL_TOKEN:\nreturn typeOrExpr;\ncase OPEN_BRACKET_TOKEN:\nreturn parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment,\nParserRuleContext.AMBIGUOUS_STMT);\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\ndefault:\nif (isCompoundBinaryOperator(nextTokenKind)) {\nreturn typeOrExpr;\n}\nif (isValidExprRhsStart(nextTokenKind, typeOrExpr.kind)) {\nreturn typeOrExpr;\n}\nSTToken token = peek();\nSolution solution =\nrecover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr, allowAssignment);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypedBindingPatternOrExprRhs(solution.tokenKind, typeOrExpr, allowAssignment);\n}\n}\nprivate STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) {\nstartContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\ntypeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false);\nendContext();\nreturn parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\n}\nprivate STNode parseTypedBPOrExprStartsWithOpenParenthesis() {\nSTNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis();\nif (isDefiniteTypeDesc(exprOrTypeDesc.kind)) {\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc);\n}\nreturn parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false);\n}\nprivate boolean isDefiniteTypeDesc(SyntaxKind kind) {\nreturn kind.compareTo(SyntaxKind.TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0;\n}\nprivate boolean isDefiniteExpr(SyntaxKind kind) {\nif (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nreturn false;\n}\nreturn kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&\nkind.compareTo(SyntaxKind.FAIL_EXPRESSION) <= 0;\n}\n/**\n* Parse type or expression that starts with open parenthesis. Possible options are:\n* 1) () - nil type-desc or nil-literal\n* 2) (T) - Parenthesized type-desc\n* 3) (expr) - Parenthesized expression\n* 4) (param, param, ..) - Anon function params\n*\n* @return Type-desc or expression node\n*/\nprivate STNode parseTypedDescOrExprStartsWithOpenParenthesis() {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {\nSTNode closeParen = parseCloseParenthesis();\nreturn parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen);\n}\nSTNode typeOrExpr = parseTypeDescOrExpr();\nif (isAction(typeOrExpr)) {\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr,\ncloseParen);\n}\nif (isExpression(typeOrExpr.kind)) {\nstartContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);\nreturn parseBracedExprOrAnonFuncParamRhs(peek().kind, openParen, typeOrExpr, false);\n}\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeOrExpr, closeParen);\n}\n/**\n* Parse type-desc or expression. This method does not handle binding patterns.\n*\n* @return Type-desc node or expression node\n*/\nprivate STNode parseTypeDescOrExpr() {\nSTToken nextToken = peek();\nSTNode typeOrExpr;\nswitch (nextToken.kind) {\ncase OPEN_PAREN_TOKEN:\ntypeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis();\nbreak;\ncase FUNCTION_KEYWORD:\ntypeOrExpr = parseAnonFuncExprOrFuncTypeDesc();\nbreak;\ncase IDENTIFIER_TOKEN:\ntypeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\nreturn parseTypeDescOrExprRhs(typeOrExpr);\ncase OPEN_BRACKET_TOKEN:\ntypeOrExpr = parseTypedDescOrExprStartsWithOpenBracket();\nbreak;\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nSTNode basicLiteral = parseBasicLiteral();\nreturn parseTypeDescOrExprRhs(basicLiteral);\ndefault:\nif (isValidExpressionStart(nextToken.kind, 1)) {\nreturn parseActionOrExpressionInLhs(nextToken.kind, null);\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\n}\nif (isDefiniteTypeDesc(typeOrExpr.kind)) {\nreturn parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\n}\nreturn parseTypeDescOrExprRhs(typeOrExpr);\n}\nprivate boolean isExpression(SyntaxKind kind) {\nswitch (kind) {\ncase BASIC_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn true;\ndefault:\nreturn kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&\nkind.compareTo(SyntaxKind.FAIL_EXPRESSION) <= 0;\n}\n}\n/**\n* Parse statement that starts with an empty parenthesis. Empty parenthesis can be\n* 1) Nil literal\n* 2) Nil type-desc\n* 3) Anon-function params\n*\n* @param openParen Open parenthesis\n* @param closeParen Close parenthesis\n* @return Parsed node\n*/\nprivate STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nSTNode params = STNodeFactory.createNodeList();\nSTNode anonFuncParam =\nSTNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\nendContext();\nreturn anonFuncParam;\ndefault:\nreturn STNodeFactory.createNilLiteralNode(openParen, closeParen);\n}\n}\nprivate STNode parseAnonFuncExprOrTypedBPWithFuncType() {\nSTNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc();\nif (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) {\nreturn exprOrTypeDesc;\n}\nreturn parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT);\n}\n/**\n* Parse anon-func-expr or function-type-desc, by resolving the ambiguity.\n*\n* @return Anon-func-expr or function-type-desc\n*/\nprivate STNode parseAnonFuncExprOrFuncTypeDesc() {\nstartContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC);\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode funcSignature = parseFuncSignature(true);\nendContext();\nswitch (peek().kind) {\ncase OPEN_BRACE_TOKEN:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nstartContext(ParserRuleContext.ANON_FUNC_EXPRESSION);\nfuncSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);\nSTNode funcBody = parseAnonFuncBody(false);\nSTNode annots = STNodeFactory.createEmptyNodeList();\nSTNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, functionKeyword,\nfuncSignature, funcBody);\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true);\ncase IDENTIFIER_TOKEN:\ndefault:\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature);\nreturn parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,\ntrue);\n}\n}\nprivate STNode parseTypeDescOrExprRhs(STNode typeOrExpr) {\nSyntaxKind nextTokenKind = peek().kind;\nreturn parseTypeDescOrExprRhs(nextTokenKind, typeOrExpr);\n}\nprivate STNode parseTypeDescOrExprRhs(SyntaxKind nextTokenKind, STNode typeOrExpr) {\nSTNode typeDesc;\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\nSTToken nextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode pipe = parsePipeToken();\nSTNode rhsTypeDescOrExpr = parseTypeDescOrExpr();\nif (isExpression(rhsTypeDescOrExpr.kind)) {\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe,\nrhsTypeDescOrExpr);\n}\ntypeDesc = getTypeDescFromExpr(typeOrExpr);\nrhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);\nreturn STNodeFactory.createUnionTypeDescriptorNode(typeDesc, pipe, rhsTypeDescOrExpr);\ncase BITWISE_AND_TOKEN:\nnextNextToken = peek(2);\nif (nextNextToken.kind != SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode ampersand = parseBinaryOperator();\nrhsTypeDescOrExpr = parseTypeDescOrExpr();\nif (isExpression(rhsTypeDescOrExpr.kind)) {\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand,\nrhsTypeDescOrExpr);\n}\ntypeDesc = getTypeDescFromExpr(typeOrExpr);\nrhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);\nreturn STNodeFactory.createIntersectionTypeDescriptorNode(typeDesc, ampersand, rhsTypeDescOrExpr);\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\nstartContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\ntypeDesc = parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,\nfalse);\nendContext();\nreturn typeDesc;\ncase SEMICOLON_TOKEN:\nreturn getTypeDescFromExpr(typeOrExpr);\ncase EQUAL_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\ncase COMMA_TOKEN:\nreturn typeOrExpr;\ncase OPEN_BRACKET_TOKEN:\nreturn parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true,\nParserRuleContext.AMBIGUOUS_STMT);\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nreturn STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis);\ndefault:\nif (isCompoundBinaryOperator(nextTokenKind)) {\nreturn typeOrExpr;\n}\nif (isValidExprRhsStart(nextTokenKind, typeOrExpr.kind)) {\nreturn parseExpressionRhs(nextTokenKind, DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false,\nfalse);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypeDescOrExprRhs(solution.tokenKind, typeOrExpr);\n}\n}\nprivate boolean isAmbiguous(STNode node) {\nswitch (node.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase BRACKETED_LIST:\nreturn true;\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;\nif (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||\nbinaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn false;\n}\nreturn isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);\ncase BRACED_EXPRESSION:\nreturn isAmbiguous(((STBracedExpressionNode) node).expression);\ncase INDEXED_EXPRESSION:\nSTIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node;\nif (!isAmbiguous(indexExpr.containerExpression)) {\nreturn false;\n}\nSTNode keys = indexExpr.keyExpression;\nfor (int i = 0; i < keys.bucketCount(); i++) {\nSTNode item = keys.childInBucket(i);\nif (item.kind == SyntaxKind.COMMA_TOKEN) {\ncontinue;\n}\nif (!isAmbiguous(item)) {\nreturn false;\n}\n}\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate boolean isAllBasicLiterals(STNode node) {\nswitch (node.kind) {\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn true;\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;\nif (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||\nbinaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn false;\n}\nreturn isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);\ncase BRACED_EXPRESSION:\nreturn isAmbiguous(((STBracedExpressionNode) node).expression);\ncase BRACKETED_LIST:\nSTAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node;\nfor (STNode member : list.members) {\nif (member.kind == SyntaxKind.COMMA_TOKEN) {\ncontinue;\n}\nif (!isAllBasicLiterals(member)) {\nreturn false;\n}\n}\nreturn true;\ncase UNARY_EXPRESSION:\nSTUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node;\nif (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN &&\nunaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) {\nreturn false;\n}\nreturn isNumericLiteral(unaryExpr.expression);\ndefault:\nreturn false;\n}\n}\nprivate boolean isNumericLiteral(STNode node) {\nswitch (node.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseTypedDescOrExprStartsWithOpenBracket() {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList members = new ArrayList<>();\nSTNode memberEnd;\nwhile (!isEndOfListConstructor(peek().kind)) {\nSTNode expr = parseTypeDescOrExpr();\nmembers.add(expr);\nmemberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmembers.add(memberEnd);\n}\nSTNode memberNodes = STNodeFactory.createNodeList(members);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberNodes, closeBracket);\n}\n/**\n* Parse binding-patterns.\n*

\n* \n* binding-pattern := capture-binding-pattern\n* | wildcard-binding-pattern\n* | list-binding-pattern\n* | mapping-binding-pattern\n* | functional-binding-pattern\n*

\n*\n* capture-binding-pattern := variable-name\n* variable-name := identifier\n*

\n*\n* wildcard-binding-pattern := _\n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n*\n* mapping-binding-pattern := { field-binding-patterns }\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n* field-binding-pattern := field-name : binding-pattern | variable-name\n*
\n* rest-binding-pattern := ... variable-name\n*\n*

\n* functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )\n*
\n* arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns]\n* | other-arg-binding-patterns\n*
\n* positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*\n*
\n* positional-arg-binding-pattern := binding-pattern\n*
\n* other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern]\n* | [rest-binding-pattern]\n*
\n* named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*\n*
\n* named-arg-binding-pattern := arg-name = binding-pattern\n*
\n*\n* @return binding-pattern node\n*/\nprivate STNode parseBindingPattern() {\nSTToken token = peek();\nreturn parseBindingPattern(token.kind);\n}\nprivate STNode parseBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseListBindingPattern();\ncase IDENTIFIER_TOKEN:\nreturn parseBindingPatternStartsWithIdentifier();\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingBindingPattern();\ncase ERROR_KEYWORD:\nreturn parseErrorBindingPattern();\ndefault:\nSolution sol = recover(peek(), ParserRuleContext.BINDING_PATTERN);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nreturn parseBindingPattern(sol.tokenKind);\n}\n}\nprivate STNode parseBindingPatternStartsWithIdentifier() {\nSTNode argNameOrBindingPattern =\nparseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);\nSTToken secondToken = peek();\nif (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nstartContext(ParserRuleContext.FUNCTIONAL_BINDING_PATTERN);\nreturn parseFunctionalBindingPattern(argNameOrBindingPattern);\n}\nif (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) {\nSTNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN);\nidentifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern);\nreturn createCaptureOrWildcardBP(identifier);\n}\nreturn createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name);\n}\nprivate STNode createCaptureOrWildcardBP(STNode varName) {\nSTNode bindingPattern;\nif (isWildcardBP(varName)) {\nbindingPattern = getWildcardBindingPattern(varName);\n} else {\nbindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName);\n}\nreturn bindingPattern;\n}\n/**\n* Parse list-binding-patterns.\n*

\n* \n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n*\n* @return list-binding-pattern node\n*/\nprivate STNode parseListBindingPattern() {\nstartContext(ParserRuleContext.LIST_BINDING_PATTERN);\nSTNode openBracket = parseOpenBracket();\nList bindingPatternsList = new ArrayList<>();\nSTNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList);\nendContext();\nreturn listBindingPattern;\n}\nprivate STNode parseListBindingPattern(STNode openBracket, List bindingPatternsList) {\nSTNode listBindingPatternMember = parseListBindingPatternMember();\nbindingPatternsList.add(listBindingPatternMember);\nSTNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList);\nreturn listBindingPattern;\n}\nprivate STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List bindingPatterns) {\nSTNode member = firstMember;\nSTToken token = peek();\nSTNode listBindingPatternRhs = null;\nwhile (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {\nlistBindingPatternRhs = parseListBindingPatternMemberRhs(token.kind);\nif (listBindingPatternRhs == null) {\nbreak;\n}\nbindingPatterns.add(listBindingPatternRhs);\nmember = parseListBindingPatternMember();\nbindingPatterns.add(member);\ntoken = peek();\n}\nSTNode restBindingPattern;\nif (member.kind == SyntaxKind.REST_BINDING_PATTERN) {\nrestBindingPattern = bindingPatterns.remove(bindingPatterns.size() - 1);\n} else {\nrestBindingPattern = STNodeFactory.createEmptyNode();\n}\nSTNode closeBracket = parseCloseBracket();\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);\nreturn STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, restBindingPattern,\ncloseBracket);\n}\nprivate STNode parseListBindingPatternMemberRhs() {\nreturn parseListBindingPatternMemberRhs(peek().kind);\n}\nprivate STNode parseListBindingPatternMemberRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListBindingPatternMemberRhs(solution.tokenKind);\n}\n}\nprivate boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse list-binding-pattern member.\n*

\n* \n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n*\n* @return List binding pattern member\n*/\nprivate STNode parseListBindingPatternMember() {\nSTToken token = peek();\nreturn parseListBindingPatternMember(token.kind);\n}\nprivate STNode parseListBindingPatternMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ncase OPEN_BRACKET_TOKEN:\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseBindingPattern();\ndefault:\nSolution sol = recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nreturn parseListBindingPatternMember(sol.tokenKind);\n}\n}\n/**\n* Parse rest binding pattern.\n*

\n* \n* rest-binding-pattern := ... variable-name\n* \n*\n* @return Rest binding pattern node\n*/\nprivate STNode parseRestBindingPattern() {\nstartContext(ParserRuleContext.REST_BINDING_PATTERN);\nSTNode ellipsis = parseEllipsis();\nSTNode varName = parseVariableName();\nendContext();\nSTSimpleNameReferenceNode simpleNameReferenceNode =\n(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName);\nreturn STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode);\n}\n/**\n* Parse Typed-binding-pattern.\n*

\n* \n* typed-binding-pattern := inferable-type-descriptor binding-pattern\n*

\n* inferable-type-descriptor := type-descriptor | var\n*
\n*\n* @return Typed binding pattern node\n*/\nprivate STNode parseTypedBindingPattern(ParserRuleContext context) {\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false);\nSTNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context);\nreturn typeBindingPattern;\n}\n/**\n* Parse mapping-binding-patterns.\n*

\n* \n* mapping-binding-pattern := { field-binding-patterns }\n*

\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n* field-binding-pattern := field-name : binding-pattern | variable-name\n*
\n*\n* @return mapping-binding-pattern node\n*/\nprivate STNode parseMappingBindingPattern() {\nstartContext(ParserRuleContext.MAPPING_BINDING_PATTERN);\nSTNode openBrace = parseOpenBrace();\nSTToken token = peek();\nif (isEndOfMappingBindingPattern(token.kind)) {\nSTNode closeBrace = parseCloseBrace();\nSTNode bindingPatternsNode = STNodeFactory.createEmptyNodeList();\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nendContext();\nreturn STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, restBindingPattern,\ncloseBrace);\n}\nList bindingPatterns = new ArrayList<>();\nSTNode prevMember = parseMappingBindingPatternMember();\nif (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {\nbindingPatterns.add(prevMember);\n}\nreturn parseMappingBindingPattern(openBrace, bindingPatterns, prevMember);\n}\nprivate STNode parseMappingBindingPattern(STNode openBrace, List bindingPatterns, STNode prevMember) {\nSTToken token = peek();\nSTNode mappingBindingPatternRhs = null;\nwhile (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {\nmappingBindingPatternRhs = parseMappingBindingPatternEnd(token.kind);\nif (mappingBindingPatternRhs == null) {\nbreak;\n}\nbindingPatterns.add(mappingBindingPatternRhs);\nprevMember = parseMappingBindingPatternMember();\nif (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {\nbreak;\n}\nbindingPatterns.add(prevMember);\ntoken = peek();\n}\nSTNode restBindingPattern;\nif (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {\nrestBindingPattern = prevMember;\n} else {\nrestBindingPattern = STNodeFactory.createEmptyNode();\n}\nSTNode closeBrace = parseCloseBrace();\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);\nendContext();\nreturn STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, restBindingPattern,\ncloseBrace);\n}\n/**\n* Parse mapping-binding-pattern entry.\n*

\n* \n* mapping-binding-pattern := { field-binding-patterns }\n*

\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n* field-binding-pattern := field-name : binding-pattern\n* | variable-name\n*
\n*\n* @return mapping-binding-pattern node\n*/\nprivate STNode parseMappingBindingPatternMember() {\nSTToken token = peek();\nswitch (token.kind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ndefault:\nreturn parseFieldBindingPattern();\n}\n}\nprivate STNode parseMappingBindingPatternEnd() {\nreturn parseMappingBindingPatternEnd(peek().kind);\n}\nprivate STNode parseMappingBindingPatternEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MAPPING_BINDING_PATTERN_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMappingBindingPatternEnd(solution.tokenKind);\n}\n}\nprivate STNode parseFieldBindingPattern() {\nreturn parseFieldBindingPattern(peek().kind);\n}\n/**\n* Parse field-binding-pattern.\n* field-binding-pattern := field-name : binding-pattern | varname\n*\n* @return field-binding-pattern node\n*/\nprivate STNode parseFieldBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME);\nSTNode fieldBindingPattern = parseFieldBindingPattern(identifier);\nreturn fieldBindingPattern;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.FIELD_BINDING_PATTERN_NAME);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldBindingPattern(solution.tokenKind);\n}\n}\nprivate STNode parseFieldBindingPattern(STNode identifier) {\nSTNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier);\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference);\n}\nSTNode colon = parseColon();\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern);\n}\nprivate boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse error binding pattern node.\n*

\n* functional-binding-pattern := error ( arg-list-binding-pattern )\n*\n* @return Error binding pattern node.\n*/\nprivate STNode parseErrorBindingPattern() {\nstartContext(ParserRuleContext.FUNCTIONAL_BINDING_PATTERN);\nSTNode typeDesc = parseErrorKeyword();\nreturn parseFunctionalBindingPattern(typeDesc);\n}\n/**\n* Parse functional binding pattern.\n*

\n* \n* functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )\n*

\n* functionally-constructible-type-reference := error | type-reference\n*
\n*\n* @param typeDesc Functionally constructible type reference\n* @return Functional binding pattern node.\n*/\nprivate STNode parseFunctionalBindingPattern(STNode typeDesc) {\nSTNode openParenthesis = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode argListBindingPatterns = parseArgListBindingPatterns();\nSTNode closeParenthesis = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createFunctionalBindingPatternNode(typeDesc, openParenthesis, argListBindingPatterns,\ncloseParenthesis);\n}\nprivate STNode parseArgListBindingPatterns() {\nList argListBindingPatterns = new ArrayList<>();\nSyntaxKind lastValidArgKind = SyntaxKind.CAPTURE_BINDING_PATTERN;\nSTToken nextToken = peek();\nif (isEndOfParametersList(nextToken.kind)) {\nreturn STNodeFactory.createNodeList(argListBindingPatterns);\n}\nargListBindingPatterns.add(parseArgBindingPattern());\nnextToken = peek();\nwhile (!isEndOfParametersList(nextToken.kind)) {\nSTNode argEnd = parseArgsBindingPatternEnd(nextToken.kind);\nif (argEnd == null) {\nbreak;\n}\nnextToken = peek();\nSTNode currentArg = parseArgBindingPattern(nextToken.kind);\nDiagnosticErrorCode errorCode = validateArgBindingPatternOrder(lastValidArgKind, currentArg.kind);\nif (errorCode == null) {\nargListBindingPatterns.add(argEnd);\nargListBindingPatterns.add(currentArg);\nlastValidArgKind = currentArg.kind;\n} else {\nupdateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);\nupdateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode);\n}\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(argListBindingPatterns);\n}\nprivate STNode parseArgsBindingPatternEnd() {\nSTToken nextToken = peek();\nreturn parseArgsBindingPatternEnd(nextToken.kind);\n}\nprivate STNode parseArgsBindingPatternEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_BINDING_PATTERN_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgsBindingPatternEnd(solution.tokenKind);\n}\n}\n/**\n* Parse arg binding pattern.\n*

\n* \n* arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns]\n* | other-arg-binding-patterns\n*

\n* positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*\n*

\n* positional-arg-binding-pattern := binding-pattern\n*

\n* other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern] | [rest-binding-pattern]\n*

\n* named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*\n*

\n* named-arg-binding-pattern := arg-name = binding-pattern\n*
\n*\n* @return Arg binding pattern\n*/\nprivate STNode parseArgBindingPattern() {\nSTToken nextToken = peek();\nreturn parseArgBindingPattern(nextToken.kind);\n}\nprivate STNode parseArgBindingPattern(SyntaxKind kind) {\nswitch (kind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ncase IDENTIFIER_TOKEN:\nreturn parseNamedOrPositionalArgBindingPattern(kind);\ncase OPEN_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseBindingPattern();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_BINDING_PATTERN);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgBindingPattern(solution.tokenKind);\n}\n}\nprivate STNode parseNamedOrPositionalArgBindingPattern(SyntaxKind nextTokenKind) {\nSTNode argNameOrBindingPattern = parseQualifiedIdentifier(ParserRuleContext.ARG_BINDING_PATTERN_START_IDENT);\nSTToken secondToken = peek();\nswitch (secondToken.kind) {\ncase EQUAL_TOKEN:\nSTNode equal = parseAssignOp();\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createNamedArgBindingPatternNode(argNameOrBindingPattern, equal, bindingPattern);\ncase OPEN_PAREN_TOKEN:\nreturn parseFunctionalBindingPattern(argNameOrBindingPattern);\ncase COMMA_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ndefault:\nreturn createCaptureOrWildcardBP(argNameOrBindingPattern);\n}\n}\nprivate DiagnosticErrorCode validateArgBindingPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {\nDiagnosticErrorCode errorCode = null;\nswitch (prevArgKind) {\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\nbreak;\ncase NAMED_ARG_BINDING_PATTERN:\nif (currentArgKind != SyntaxKind.NAMED_ARG_BINDING_PATTERN &&\ncurrentArgKind != SyntaxKind.REST_BINDING_PATTERN) {\nerrorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG;\n}\nbreak;\ncase REST_BINDING_PATTERN:\nerrorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG;\nbreak;\ndefault:\nthrow new IllegalStateException(\"Invalid SyntaxKind in an argument\");\n}\nreturn errorCode;\n}\n/*\n* This parses Typed binding patterns and deals with ambiguity between types,\n* and binding patterns. An example is 'T[a]'.\n* The ambiguity lies in between:\n* 1) Array Type\n* 2) List binding pattern\n* 3) Member access expression.\n*/\n/**\n* Parse the component after the type-desc, of a typed-binding-pattern.\n*\n* @param typeDesc Starting type-desc of the typed-binding-pattern\n* @return Typed-binding pattern\n*/\nprivate STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternTypeRhs(nextToken.kind, typeDesc, context, true);\n}\nprivate STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternTypeRhs(nextToken.kind, typeDesc, context, isRoot);\n}\nprivate STNode parseTypedBindingPatternTypeRhs(SyntaxKind nextTokenKind, STNode typeDesc, ParserRuleContext context,\nboolean isRoot) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode bindingPattern = parseBindingPattern(nextTokenKind);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\ncase OPEN_BRACKET_TOKEN:\nSTNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context);\nassert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN;\nreturn typedBindingPattern;\ncase CLOSE_PAREN_TOKEN:\ncase COMMA_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nif (!isRoot) {\nreturn typeDesc;\n}\ndefault:\nSolution solution =\nrecover(peek(), ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS, typeDesc, context, isRoot);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypedBindingPatternTypeRhs(solution.tokenKind, typeDesc, context, isRoot);\n}\n}\n/**\n* Parse typed-binding pattern with list, array-type-desc, or member-access-expr.\n*\n* @param typeDescOrExpr Type desc or the expression at the start\n* @param isTypedBindingPattern Is this is a typed-binding-pattern.\n* @return Parsed node\n*/\nprivate STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern,\nboolean allowAssignment, ParserRuleContext context) {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nif (isBracketedListEnd(peek().kind)) {\nreturn parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context);\n}\nSTNode member = parseBracketedListMember(isTypedBindingPattern);\nSyntaxKind currentNodeType = getBracketedListNodeType(member);\nswitch (currentNodeType) {\ncase ARRAY_TYPE_DESC:\nSTNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context);\nreturn typedBindingPattern;\ncase LIST_BINDING_PATTERN:\nSTNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\ncase INDEXED_EXPRESSION:\nreturn parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member);\ncase NONE:\ndefault:\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd != null) {\nList memberList = new ArrayList<>();\nmemberList.add(member);\nmemberList.add(memberEnd);\nSTNode bindingPattern = parseAsListBindingPattern(openBracket, memberList);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n}\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,\nisTypedBindingPattern, allowAssignment, context);\n}\nprivate STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) {\nmember = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nSTNode keyExpr = STNodeFactory.createNodeList(member);\nSTNode memberAccessExpr =\nSTNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket);\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false);\n}\nprivate boolean isBracketedListEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseBracketedListMember(boolean isTypedBindingPattern) {\nreturn parseBracketedListMember(peek().kind, isTypedBindingPattern);\n}\n/**\n* Parse a member of an ambiguous bracketed list. This member could be:\n* 1) Array length\n* 2) Key expression of a member-access-expr\n* 3) A member-binding pattern of a list-binding-pattern.\n*\n* @param nextTokenKind Kind of the next token\n* @param isTypedBindingPattern Is this in a definite typed-binding pattern\n* @return Parsed member node\n*/\nprivate STNode parseBracketedListMember(SyntaxKind nextTokenKind, boolean isTypedBindingPattern) {\nswitch (nextTokenKind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase ASTERISK_TOKEN:\ncase STRING_LITERAL:\nreturn parseBasicLiteral();\ncase CLOSE_BRACKET_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\ncase ELLIPSIS_TOKEN:\ncase OPEN_BRACKET_TOKEN:\nreturn parseStatementStartBracketedListMember();\ncase IDENTIFIER_TOKEN:\nif (isTypedBindingPattern) {\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nnextTokenKind = peek().kind;\nif (nextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseListBindingPatternMember();\n}\nreturn identifier;\n}\nbreak;\ndefault:\nif (!isTypedBindingPattern && isValidExpressionStart(nextTokenKind, 1)) {\nbreak;\n}\nParserRuleContext recoverContext =\nisTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH\n: ParserRuleContext.BRACKETED_LIST_MEMBER;\nSolution solution = recover(peek(), recoverContext, isTypedBindingPattern);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseBracketedListMember(solution.tokenKind, isTypedBindingPattern);\n}\nSTNode expr = parseExpression();\nif (isWildcardBP(expr)) {\nreturn getWildcardBindingPattern(expr);\n}\nif (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE || expr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nnextTokenKind = peek().kind;\nif (nextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseListBindingPatternMember();\n}\n}\nreturn expr;\n}\n/**\n* Treat the current node as an array, and parse the remainder of the binding pattern.\n*\n* @param typeDesc Type-desc\n* @param openBracket Open bracket\n* @param member Member\n* @return Parsed node\n*/\nprivate STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) {\ntypeDesc = getTypeDescFromExpr(typeDesc);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true,\ncontext);\n}\nprivate STNode parseBracketedListMemberEnd() {\nreturn parseBracketedListMemberEnd(peek().kind);\n}\nprivate STNode parseBracketedListMemberEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseBracketedListMemberEnd(solution.tokenKind);\n}\n}\n/**\n* We reach here to break ambiguity of T[a]. This could be:\n* 1) Array Type Desc\n* 2) Member access on LHS\n* 3) Typed-binding-pattern\n*\n* @param typeDescOrExpr Type name or the expr that precede the open-bracket.\n* @param openBracket Open bracket\n* @param member Member\n* @param closeBracket Open bracket\n* @param isTypedBindingPattern Is this is a typed-binding-pattern.\n* @return Specific node that matches to T[a], after solving ambiguity.\n*/\nprivate STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket, boolean isTypedBindingPattern,\nboolean allowAssignment, ParserRuleContext context) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternOrMemberAccessRhs(nextToken.kind, typeDescOrExpr, openBracket, member,\ncloseBracket, isTypedBindingPattern, allowAssignment, context);\n}\nprivate STNode parseTypedBindingPatternOrMemberAccessRhs(SyntaxKind nextTokenKind, STNode typeDescOrExpr,\nSTNode openBracket, STNode member, STNode closeBracket,\nboolean isTypedBindingPattern, boolean allowAssignment,\nParserRuleContext context) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nSTNode arrayTypeDesc = createArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\nreturn parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);\ncase OPEN_BRACKET_TOKEN:\nif (isTypedBindingPattern) {\ntypeDesc = getTypeDescFromExpr(typeDescOrExpr);\narrayTypeDesc =\nSTNodeFactory.createArrayTypeDescriptorNode(typeDesc, openBracket, member, closeBracket);\nreturn parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);\n}\nSTNode keyExpr = STNodeFactory.createNodeList(member);\nSTNode expr =\nSTNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);\nreturn parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context);\ncase QUESTION_MARK_TOKEN:\ntypeDesc = getTypeDescFromExpr(typeDescOrExpr);\narrayTypeDesc = createArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\ntypeDesc = parseComplexTypeDescriptor(arrayTypeDesc,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nreturn parseTypedBindingPatternTypeRhs(typeDesc, context);\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\nreturn parseComplexTypeDescInTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket,\ncontext, isTypedBindingPattern);\ncase IN_KEYWORD:\nif (context != ParserRuleContext.FOREACH_STMT && context != ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\ncase EQUAL_TOKEN:\nif (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nif (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) {\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\n}\nkeyExpr = STNodeFactory.createNodeList(member);\ntypeDescOrExpr = getExpression(typeDescOrExpr);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);\ncase SEMICOLON_TOKEN:\nif (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\ncase CLOSE_BRACE_TOKEN:\ncase COMMA_TOKEN:\nif (context == ParserRuleContext.AMBIGUOUS_STMT) {\nkeyExpr = STNodeFactory.createNodeList(member);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,\ncloseBracket);\n}\ndefault:\nif (isValidExprRhsStart(nextTokenKind, closeBracket.kind)) {\nkeyExpr = STNodeFactory.createNodeList(member);\ntypeDescOrExpr = getExpression(typeDescOrExpr);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,\ncloseBracket);\n}\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.BRACKETED_LIST_RHS, typeDescOrExpr, openBracket, member,\ncloseBracket, isTypedBindingPattern, allowAssignment, context);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypedBindingPatternOrMemberAccessRhs(solution.tokenKind, typeDescOrExpr, openBracket, member,\ncloseBracket, isTypedBindingPattern, allowAssignment, context);\n}\nprivate STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket) {\nSTNode bindingPatterns;\nif (isEmpty(member)) {\nbindingPatterns = STNodeFactory.createEmptyNodeList();\n} else {\nSTNode bindingPattern = getBindingPattern(member);\nbindingPatterns = STNodeFactory.createNodeList(bindingPattern);\n}\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nSTNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns,\nrestBindingPattern, closeBracket);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n}\n/**\n* Parse a union or intersection type-desc/binary-expression that involves ambiguous\n* bracketed list in lhs.\n*

\n* e.g: (T[a] & R..) or (T[a] | R.. )\n*

\n* Complexity occurs in scenarios such as T[a] |/& R[b]. If the token after this\n* is another binding-pattern, then (T[a] |/& R[b]) becomes the type-desc. However,\n* if the token follows this is an equal or semicolon, then (T[a] |/& R) becomes\n* the type-desc, and [b] becomes the binding pattern.\n*\n* @param typeDescOrExpr Type desc or the expression\n* @param openBracket Open bracket\n* @param member Member\n* @param closeBracket Close bracket\n* @param context COntext in which the typed binding pattern occurs\n* @return Parsed node\n*/\nprivate STNode parseComplexTypeDescInTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode pipeOrAndToken = parseUnionOrIntersectionToken();\nSTNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false);\nif (isTypedBindingPattern || typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);\nlhsTypeDesc = createArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);\nSTTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr;\nSTNode newTypeDesc;\nif (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) {\nnewTypeDesc = STNodeFactory.createUnionTypeDescriptorNode(lhsTypeDesc, pipeOrAndToken,\nrhsTypedBindingPattern.typeDescriptor);\n} else {\nnewTypeDesc = STNodeFactory.createIntersectionTypeDescriptorNode(lhsTypeDesc, pipeOrAndToken,\nrhsTypedBindingPattern.typeDescriptor);\n}\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern);\n} else {\nSTNode keyExpr = getExpression(member);\nSTNode containerExpr = getExpression(typeDescOrExpr);\nSTNode lhsExpr =\nSTNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket);\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken,\ntypedBindingPatternOrExpr);\n}\n}\nprivate STNode createArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) {\nif (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\nSTUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;\nSTNode middleTypeDesc = createArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc);\nlhsTypeDesc = STNodeFactory.createUnionTypeDescriptorNode(unionTypeDesc.leftTypeDesc,\nunionTypeDesc.pipeToken, middleTypeDesc);\n} else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\nSTIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc;\nSTNode middleTypeDesc =\ncreateArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc);\nlhsTypeDesc = STNodeFactory.createIntersectionTypeDescriptorNode(intersectionTypeDesc.leftTypeDesc,\nintersectionTypeDesc.bitwiseAndToken, middleTypeDesc);\n} else {\nlhsTypeDesc = STNodeFactory.createArrayTypeDescriptorNode(lhsTypeDesc, openBracket, member, closeBracket);\n}\nreturn lhsTypeDesc;\n}\n/**\n* Parse union (|) or intersection (&) type operator.\n*\n* @return pipe or bitwise and token\n*/\nprivate STNode parseUnionOrIntersectionToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Infer the type of the ambiguous bracketed list, based on the type of the member.\n*\n* @param memberNode Member node\n* @return Inferred type of the bracketed list\n*/\nprivate SyntaxKind getBracketedListNodeType(STNode memberNode) {\nif (isEmpty(memberNode)) {\nreturn SyntaxKind.NONE;\n}\nif (isDefiniteTypeDesc(memberNode.kind)) {\nreturn SyntaxKind.TUPLE_TYPE_DESC;\n}\nswitch (memberNode.kind) {\ncase ASTERISK_TOKEN:\nreturn SyntaxKind.ARRAY_TYPE_DESC;\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.LIST_BINDING_PATTERN;\ncase QUALIFIED_NAME_REFERENCE:\ncase REST_TYPE:\nreturn SyntaxKind.TUPLE_TYPE_DESC;\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase SIMPLE_NAME_REFERENCE:\ncase BRACKETED_LIST:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.NONE;\ndefault:\nreturn SyntaxKind.INDEXED_EXPRESSION;\n}\n}\n/*\n* This section tries to break the ambiguity in parsing a statement that starts with a open-bracket.\n* The ambiguity lies in between:\n* 1) Assignment that starts with list binding pattern\n* 2) Var-decl statement that starts with tuple type\n* 3) Statement that starts with list constructor, such as sync-send, etc.\n*/\n/**\n* Parse any statement that starts with an open-bracket.\n*\n* @param annots Annotations attached to the statement.\n* @return Parsed node\n*/\nprivate STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) {\nstartContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT);\nreturn parseStatementStartsWithOpenBracket(annots, true, possibleMappingField);\n}\nprivate STNode parseMemberBracketedList(boolean possibleMappingField) {\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseStatementStartsWithOpenBracket(annots, false, possibleMappingField);\n}\n/**\n* The bracketed list at the start of a statement can be one of the following.\n* 1) List binding pattern\n* 2) Tuple type\n* 3) List constructor\n*\n* @param isRoot Is this the root of the list\n* @return Parsed node\n*/\nprivate STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) {\nstartContext(ParserRuleContext.STMT_START_BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList memberList = new ArrayList<>();\nwhile (!isBracketedListEnd(peek().kind)) {\nSTNode member = parseStatementStartBracketedListMember();\nSyntaxKind currentNodeType = getStmtStartBracketedListType(member);\nswitch (currentNodeType) {\ncase TUPLE_TYPE_DESC:\nreturn parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\ncase LIST_BINDING_PATTERN:\nreturn parseAsListBindingPattern(openBracket, memberList, member, isRoot);\ncase LIST_CONSTRUCTOR:\nreturn parseAsListConstructor(openBracket, memberList, member, isRoot);\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nreturn parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot);\ncase TUPLE_TYPE_DESC_OR_LIST_CONST:\nreturn parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot);\ncase NONE:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\n}\nSTNode closeBracket = parseCloseBracket();\nSTNode bracketedList = parseStatementStartBracketedList(annots, openBracket, memberList, closeBracket, isRoot,\npossibleMappingField);\nreturn bracketedList;\n}\nprivate STNode parseStatementStartBracketedListMember() {\nSTToken nextToken = peek();\nreturn parseStatementStartBracketedListMember(nextToken.kind);\n}\n/**\n* Parse a member of a list-binding-pattern, tuple-type-desc, or\n* list-constructor-expr, when the parent is ambiguous.\n*\n* @param nextTokenKind Kind of the next token.\n* @return Parsed node\n*/\nprivate STNode parseStatementStartBracketedListMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseMemberBracketedList(false);\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nif (isWildcardBP(identifier)) {\nSTNode varName = ((STSimpleNameReferenceNode) identifier).name;\nreturn getWildcardBindingPattern(varName);\n}\nnextTokenKind = peek().kind;\nif (nextTokenKind == SyntaxKind.ELLIPSIS_TOKEN) {\nSTNode ellipsis = parseEllipsis();\nreturn STNodeFactory.createRestDescriptorNode(identifier, ellipsis);\n}\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true);\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingBindingPatterOrMappingConstructor();\ncase ERROR_KEYWORD:\nif (getNextNextToken(nextTokenKind).kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseErrorConstructorExpr();\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase ELLIPSIS_TOKEN:\nreturn parseListBindingPatternMember();\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\nif (getNextNextToken(nextTokenKind).kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseExpression(false);\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\nif (getNextNextToken(nextTokenKind).kind == SyntaxKind.LT_TOKEN) {\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nreturn parseExpression(false);\ncase OPEN_PAREN_TOKEN:\nreturn parseTypeDescOrExpr();\ndefault:\nif (isValidExpressionStart(nextTokenKind, 1)) {\nreturn parseExpression(false);\n}\nif (isTypeStartingToken(nextTokenKind)) {\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nSolution solution = recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatementStartBracketedListMember(solution.tokenKind);\n}\n}\nprivate STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List memberList,\nSTNode member, boolean isRoot) {\nmemberList.add(member);\nSTNode memberEnd = parseBracketedListMemberEnd();\nSTNode tupleTypeDescOrListCons;\nif (memberEnd == null) {\nSTNode closeBracket = parseCloseBracket();\ntupleTypeDescOrListCons =\nparseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);\n} else {\nmemberList.add(memberEnd);\ntupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot);\n}\nreturn tupleTypeDescOrListCons;\n}\n/**\n* Parse tuple type desc or list constructor.\n*\n* @return Parsed node\n*/\nprivate STNode parseTupleTypeDescOrListConstructor(STNode annots) {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList memberList = new ArrayList<>();\nreturn parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false);\n}\nprivate STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List memberList,\nboolean isRoot) {\nSTToken nextToken = peek();\nwhile (!isBracketedListEnd(nextToken.kind)) {\nSTNode member = parseTupleTypeDescOrListConstructorMember(nextToken.kind, annots);\nSyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member);\nswitch (currentNodeType) {\ncase LIST_CONSTRUCTOR:\nreturn parseAsListConstructor(openBracket, memberList, member, isRoot);\ncase TUPLE_TYPE_DESC:\nreturn parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\ncase TUPLE_TYPE_DESC_OR_LIST_CONST:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\nnextToken = peek();\n}\nSTNode closeBracket = parseCloseBracket();\nreturn parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);\n}\nprivate STNode parseTupleTypeDescOrListConstructorMember(SyntaxKind nextTokenKind, STNode annots) {\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseTupleTypeDescOrListConstructor(annots);\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nnextTokenKind = peek().kind;\nif (nextTokenKind == SyntaxKind.ELLIPSIS_TOKEN) {\nSTNode ellipsis = parseEllipsis();\nreturn STNodeFactory.createRestDescriptorNode(identifier, ellipsis);\n}\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false);\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingConstructorExpr();\ncase ERROR_KEYWORD:\nif (getNextNextToken(nextTokenKind).kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseErrorConstructorExpr();\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\nif (getNextNextToken(nextTokenKind).kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseExpression(false);\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\nif (getNextNextToken(nextTokenKind).kind == SyntaxKind.LT_TOKEN) {\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nreturn parseExpression(false);\ncase OPEN_PAREN_TOKEN:\nreturn parseTypeDescOrExpr();\ndefault:\nif (isValidExpressionStart(nextTokenKind, 1)) {\nreturn parseExpression(false);\n}\nif (isTypeStartingToken(nextTokenKind)) {\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nSolution solution = recover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatementStartBracketedListMember(solution.tokenKind);\n}\n}\nprivate SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) {\nreturn getStmtStartBracketedListType(memberNode);\n}\nprivate STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List members,\nSTNode closeBracket, boolean isRoot) {\nSTNode tupleTypeOrListConst;\nswitch (peek().kind) {\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nif (!isRoot) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members,\ncloseBracket);\n}\ndefault:\nif (isValidExprRhsStart(peek().kind, closeBracket.kind) ||\n(isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) {\nmembers = getExpressionList(members);\nSTNode memberExpressions = STNodeFactory.createNodeList(members);\ntupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket,\nmemberExpressions, closeBracket);\nbreak;\n}\nSTNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members));\nSTNode tupleTypeDesc =\nSTNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);\ntupleTypeOrListConst =\nparseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\n}\nendContext();\nif (!isRoot) {\nreturn tupleTypeOrListConst;\n}\nreturn parseStmtStartsWithTupleTypeOrExprRhs(null, tupleTypeOrListConst, isRoot);\n}", + "context_before": "class BallerinaParser extends AbstractParser {\nprivate static final OperatorPrecedence DEFAULT_OP_PRECEDENCE = OperatorPrecedence.DEFAULT;\nprotected BallerinaParser(AbstractTokenReader tokenReader) {\nsuper(tokenReader, new BallerinaParserErrorHandler(tokenReader));\n}\n/**\n* Start parsing the given input.\n*\n* @return Parsed node\n*/\n@Override\npublic STNode parse() {\nreturn parseCompUnit();\n}\n/**\n* Start parsing the input from a given context. Supported starting points are:\n*

    \n*
  • Module part (a file)
  • \n*
  • Top level node
  • \n*
  • Statement
  • \n*
  • Expression
  • \n*
\n*\n* @param context Context to start parsing\n* @return Parsed node\n*/\npublic STNode parse(ParserRuleContext context) {\nswitch (context) {\ncase COMP_UNIT:\nreturn parseCompUnit();\ncase TOP_LEVEL_NODE:\nstartContext(ParserRuleContext.COMP_UNIT);\nreturn parseTopLevelNode();\ncase STATEMENT:\nstartContext(ParserRuleContext.COMP_UNIT);\nstartContext(ParserRuleContext.FUNC_BODY_BLOCK);\nreturn parseStatement();\ncase EXPRESSION:\nstartContext(ParserRuleContext.COMP_UNIT);\nstartContext(ParserRuleContext.FUNC_BODY_BLOCK);\nstartContext(ParserRuleContext.STATEMENT);\nreturn parseExpression();\ndefault:\nthrow new UnsupportedOperationException(\"Cannot start parsing from: \" + context);\n}\n}\n/**\n* Resume the parsing from the given context.\n*\n* @param context Context to resume parsing\n* @param args Arguments that requires to continue parsing from the given parser context\n* @return Parsed node\n*/\n@Override\npublic STNode resumeParsing(ParserRuleContext context, Object... args) {\nswitch (context) {\ncase FUNC_BODY:\nreturn parseFunctionBody((boolean) args[0]);\ncase OPEN_BRACE:\nreturn parseOpenBrace();\ncase CLOSE_BRACE:\nreturn parseCloseBrace();\ncase FUNC_NAME:\nreturn parseFunctionName();\ncase OPEN_PARENTHESIS:\ncase ARG_LIST_START:\nreturn parseOpenParenthesis(context);\ncase SIMPLE_TYPE_DESCRIPTOR:\nreturn parseSimpleTypeDescriptor();\ncase ASSIGN_OP:\nreturn parseAssignOp();\ncase EXTERNAL_KEYWORD:\nreturn parseExternalKeyword();\ncase SEMICOLON:\nreturn parseSemicolon();\ncase CLOSE_PARENTHESIS:\nreturn parseCloseParenthesis();\ncase VARIABLE_NAME:\nreturn parseVariableName();\ncase TERMINAL_EXPRESSION:\nreturn parseTerminalExpression((STNode) args[0], (boolean) args[1], (boolean) args[2],\n(boolean) args[3]);\ncase STATEMENT:\nreturn parseStatement();\ncase STATEMENT_WITHOUT_ANNOTS:\nreturn parseStatement((STNode) args[0]);\ncase EXPRESSION_RHS:\nreturn parseExpressionRhs((OperatorPrecedence) args[0], (STNode) args[1], (boolean) args[2],\n(boolean) args[3], (boolean) args[4], (boolean) args[5]);\ncase PARAMETER_START:\nreturn parseParameter((SyntaxKind) args[0], (STNode) args[1], (int) args[2], (boolean) args[3]);\ncase PARAMETER_WITHOUT_ANNOTS:\nreturn parseParamGivenAnnots((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (int) args[3],\n(boolean) args[4]);\ncase AFTER_PARAMETER_TYPE:\nreturn parseAfterParamType((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],\n(STNode) args[4], (boolean) args[5]);\ncase PARAMETER_NAME_RHS:\nreturn parseParameterRhs((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],\n(STNode) args[4], (STNode) args[5]);\ncase TOP_LEVEL_NODE:\nreturn parseTopLevelNode();\ncase TOP_LEVEL_NODE_WITHOUT_METADATA:\nreturn parseTopLevelNode((STNode) args[0]);\ncase TOP_LEVEL_NODE_WITHOUT_MODIFIER:\nreturn parseTopLevelNode((STNode) args[0], (STNode) args[1]);\ncase TYPE_NAME_OR_VAR_NAME:\ncase RECORD_FIELD_NAME_OR_TYPE_NAME:\ncase TYPE_REFERENCE:\ncase ANNOT_REFERENCE:\ncase FIELD_ACCESS_IDENTIFIER:\nreturn parseQualifiedIdentifier(context, (boolean) args[0]);\ncase VAR_DECL_STMT_RHS:\nreturn parseVarDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (boolean) args[3]);\ncase FIELD_DESCRIPTOR_RHS:\nreturn parseFieldDescriptorRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]);\ncase RECORD_BODY_START:\nreturn parseRecordBodyStartDelimiter();\ncase TYPE_DESCRIPTOR:\nreturn parseTypeDescriptorInternal((ParserRuleContext) args[0], (boolean) args[1]);\ncase OBJECT_MEMBER_START:\nreturn parseObjectMember();\ncase OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY:\nreturn parseObjectMethodOrField((STNode) args[0], (STNode) args[1]);\ncase OBJECT_FIELD_RHS:\nreturn parseObjectFieldRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],\n(STNode) args[4]);\ncase OBJECT_TYPE_QUALIFIER:\nreturn parseObjectTypeQualifiers();\ncase OBJECT_KEYWORD:\nreturn parseObjectKeyword();\ncase TYPE_NAME:\nreturn parseTypeName();\ncase IF_KEYWORD:\nreturn parseIfKeyword();\ncase ELSE_KEYWORD:\nreturn parseElseKeyword();\ncase ELSE_BODY:\nreturn parseElseBody();\ncase WHILE_KEYWORD:\nreturn parseWhileKeyword();\ncase PANIC_KEYWORD:\nreturn parsePanicKeyword();\ncase IMPORT_DECL_RHS:\nreturn parseImportDecl((STNode) args[0], (STNode) args[1]);\ncase IMPORT_PREFIX:\nreturn parseImportPrefix();\ncase IMPORT_MODULE_NAME:\ncase IMPORT_ORG_OR_MODULE_NAME:\ncase VARIABLE_REF:\ncase SERVICE_NAME:\ncase IMPLICIT_ANON_FUNC_PARAM:\ncase MAPPING_FIELD_NAME:\ncase RECEIVE_FIELD_NAME:\ncase MODULE_ENUM_NAME:\ncase ENUM_MEMBER_NAME:\nreturn parseIdentifier(context);\ncase IMPORT_KEYWORD:\nreturn parseImportKeyword();\ncase SLASH:\nreturn parseSlashToken();\ncase DOT:\nreturn parseDotToken();\ncase IMPORT_VERSION_DECL:\nreturn parseVersion();\ncase VERSION_KEYWORD:\nreturn parseVersionKeyword();\ncase VERSION_NUMBER:\nreturn parseVersionNumber();\ncase DECIMAL_INTEGER_LITERAL:\ncase MAJOR_VERSION:\ncase MINOR_VERSION:\ncase PATCH_VERSION:\nreturn parseDecimalIntLiteral(context);\ncase IMPORT_SUB_VERSION:\nreturn parseSubVersion(context);\ncase IMPORT_PREFIX_DECL:\nreturn parseImportPrefixDecl();\ncase AS_KEYWORD:\nreturn parseAsKeyword();\ncase CONTINUE_KEYWORD:\nreturn parseContinueKeyword();\ncase BREAK_KEYWORD:\nreturn parseBreakKeyword();\ncase RETURN_KEYWORD:\nreturn parseReturnKeyword();\ncase MAPPING_FIELD:\ncase FIRST_MAPPING_FIELD:\nreturn parseMappingField((ParserRuleContext) args[0]);\ncase SPECIFIC_FIELD_RHS:\nreturn parseSpecificFieldRhs((STNode) args[0], (STNode) args[1]);\ncase STRING_LITERAL:\nreturn parseStringLiteral();\ncase COLON:\nreturn parseColon();\ncase OPEN_BRACKET:\nreturn parseOpenBracket();\ncase RESOURCE_DEF:\nreturn parseResource();\ncase OPTIONAL_SERVICE_NAME:\nreturn parseServiceName();\ncase SERVICE_KEYWORD:\nreturn parseServiceKeyword();\ncase ON_KEYWORD:\nreturn parseOnKeyword();\ncase RESOURCE_KEYWORD:\nreturn parseResourceKeyword();\ncase LISTENER_KEYWORD:\nreturn parseListenerKeyword();\ncase NIL_TYPE_DESCRIPTOR:\nreturn parseNilTypeDescriptor();\ncase COMPOUND_ASSIGNMENT_STMT:\nreturn parseCompoundAssignmentStmt();\ncase TYPEOF_KEYWORD:\nreturn parseTypeofKeyword();\ncase ARRAY_LENGTH:\nreturn parseArrayLength();\ncase IS_KEYWORD:\nreturn parseIsKeyword();\ncase STMT_START_WITH_EXPR_RHS:\nreturn parseStatementStartWithExprRhs((STNode) args[0]);\ncase COMMA:\nreturn parseComma();\ncase CONST_DECL_TYPE:\nreturn parseConstDecl((STNode) args[0], (STNode) args[1], (STNode) args[2]);\ncase BINDING_PATTERN_OR_EXPR_RHS:\nreturn parseTypedBindingPatternOrExprRhs((STNode) args[0], (boolean) args[1]);\ncase LT:\nreturn parseLTToken();\ncase GT:\nreturn parseGTToken();\ncase RECORD_FIELD_OR_RECORD_END:\nreturn parseFieldOrRestDescriptor((boolean) args[0]);\ncase ANNOTATION_KEYWORD:\nreturn parseAnnotationKeyword();\ncase ANNOT_DECL_OPTIONAL_TYPE:\nreturn parseAnnotationDeclFromType((STNode) args[0], (STNode) args[1], (STNode) args[2],\n(STNode) args[3]);\ncase ANNOT_DECL_RHS:\nreturn parseAnnotationDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],\n(STNode) args[4]);\ncase ANNOT_OPTIONAL_ATTACH_POINTS:\nreturn parseAnnotationDeclAttachPoints((STNode) args[0], (STNode) args[1], (STNode) args[2],\n(STNode) args[3], (STNode) args[4], (STNode) args[5]);\ncase SOURCE_KEYWORD:\nreturn parseSourceKeyword();\ncase ATTACH_POINT_IDENT:\nreturn parseAttachPointIdent((STNode) args[0]);\ncase IDENT_AFTER_OBJECT_IDENT:\nreturn parseIdentAfterObjectIdent();\ncase FUNCTION_IDENT:\nreturn parseFunctionIdent();\ncase FIELD_IDENT:\nreturn parseFieldIdent();\ncase ATTACH_POINT_END:\nreturn parseAttachPointEnd();\ncase XMLNS_KEYWORD:\nreturn parseXMLNSKeyword();\ncase XML_NAMESPACE_PREFIX_DECL:\nreturn parseXMLDeclRhs((STNode) args[0], (STNode) args[1], (boolean) args[2]);\ncase NAMESPACE_PREFIX:\nreturn parseNamespacePrefix();\ncase WORKER_KEYWORD:\nreturn parseWorkerKeyword();\ncase WORKER_NAME:\nreturn parseWorkerName();\ncase FORK_KEYWORD:\nreturn parseForkKeyword();\ncase TRAP_KEYWORD:\nreturn parseTrapKeyword();\ncase IN_KEYWORD:\nreturn parseInKeyword();\ncase FOREACH_KEYWORD:\nreturn parseForEachKeyword();\ncase TABLE_KEYWORD:\nreturn parseTableKeyword();\ncase KEY_KEYWORD:\nreturn parseKeyKeyword();\ncase TABLE_KEYWORD_RHS:\nreturn parseTableConstructorOrQuery((STNode) args[0], (boolean) args[1]);\ncase ERROR_KEYWORD:\nreturn parseErrorKeyword();\ncase LET_KEYWORD:\nreturn parseLetKeyword();\ncase STREAM_KEYWORD:\nreturn parseStreamKeyword();\ncase STREAM_TYPE_FIRST_PARAM_RHS:\nreturn parseStreamTypeParamsNode((STNode) args[0], (STNode) args[1]);\ncase TEMPLATE_START:\ncase TEMPLATE_END:\nreturn parseBacktickToken(context);\ncase KEY_CONSTRAINTS_RHS:\nreturn parseKeyConstraint((STNode) args[0]);\ncase FUNCTION_KEYWORD_RHS:\nreturn parseFunctionKeywordRhs((STNode) args[0], (STNode) args[1], (boolean) args[2], (boolean) args[3],\n(STNode[]) args[4]);\ncase RETURNS_KEYWORD:\nreturn parseReturnsKeyword();\ncase NEW_KEYWORD:\nreturn parseNewKeyword();\ncase FROM_KEYWORD:\nreturn parseFromKeyword();\ncase WHERE_KEYWORD:\nreturn parseWhereKeyword();\ncase SELECT_KEYWORD:\nreturn parseSelectKeyword();\ncase TABLE_CONSTRUCTOR_OR_QUERY_START:\nreturn parseTableConstructorOrQuery((boolean) args[0]);\ncase TABLE_CONSTRUCTOR_OR_QUERY_RHS:\nreturn parseTableConstructorOrQueryRhs((STNode) args[0], (STNode) args[1], (boolean) args[2]);\ncase QUERY_PIPELINE_RHS:\nreturn parseIntermediateClause((boolean) args[0]);\ncase ANON_FUNC_BODY:\nreturn parseAnonFuncBody((boolean) args[0]);\ncase CLOSE_BRACKET:\nreturn parseCloseBracket();\ncase ARG_START_OR_ARG_LIST_END:\nreturn parseArgument();\ncase ARG_END:\nreturn parseArgEnd();\ncase MAPPING_FIELD_END:\nreturn parseMappingFieldEnd();\ncase FUNCTION_KEYWORD:\nreturn parseFunctionKeyword();\ncase FIELD_OR_REST_DESCIPTOR_RHS:\nreturn parseFieldOrRestDescriptorRhs((STNode) args[0], (STNode) args[1]);\ncase TYPE_DESC_IN_TUPLE_RHS:\nreturn parseTupleMemberRhs();\ncase LIST_BINDING_PATTERN_MEMBER_END:\nreturn parseListBindingPatternMemberRhs();\ncase MAPPING_BINDING_PATTERN_END:\nreturn parseMappingBindingPatternEnd();\ncase FIELD_BINDING_PATTERN_NAME:\nreturn parseFieldBindingPattern();\ncase CONSTANT_EXPRESSION_START:\nreturn parseSimpleConstExprInternal();\ncase LIST_CONSTRUCTOR_MEMBER_END:\nreturn parseListConstructorMemberEnd();\ncase NIL_OR_PARENTHESISED_TYPE_DESC_RHS:\nreturn parseNilOrParenthesisedTypeDescRhs((STNode) args[0]);\ncase ANON_FUNC_PARAM_RHS:\nreturn parseImplicitAnonFuncParamEnd();\ncase LIST_BINDING_PATTERN:\nreturn parseListBindingPattern();\ncase BINDING_PATTERN:\nreturn parseBindingPattern();\ncase PEER_WORKER_NAME:\nreturn parsePeerWorkerName();\ncase SYNC_SEND_TOKEN:\nreturn parseSyncSendToken();\ncase LEFT_ARROW_TOKEN:\nreturn parseLeftArrowToken();\ncase RECEIVE_WORKERS:\nreturn parseReceiveWorkers();\ncase WAIT_KEYWORD:\nreturn parseWaitKeyword();\ncase WAIT_FUTURE_EXPR_END:\nreturn parseWaitFutureExprEnd((int) args[0]);\ncase WAIT_FIELD_NAME:\nreturn parseWaitField();\ncase WAIT_FIELD_END:\nreturn parseWaitFieldEnd();\ncase ANNOT_CHAINING_TOKEN:\nreturn parseAnnotChainingToken();\ncase DO_KEYWORD:\nreturn parseDoKeyword();\ncase MEMBER_ACCESS_KEY_EXPR_END:\nreturn parseMemberAccessKeyExprEnd();\ncase OPTIONAL_CHAINING_TOKEN:\nreturn parseOptionalChainingToken();\ncase RETRY_KEYWORD_RHS:\nreturn parseRetryKeywordRhs((STNode) args[0]);\ncase RETRY_TYPE_PARAM_RHS:\nreturn parseRetryTypeParamRhs((STNode) args[0], (STNode) args[1]);\ncase TRANSACTION_KEYWORD:\nreturn parseTransactionKeyword();\ncase COMMIT_KEYWORD:\nreturn parseCommitKeyword();\ncase RETRY_KEYWORD:\nreturn parseRetryKeyword();\ncase ROLLBACK_KEYWORD:\nreturn parseRollbackKeyword();\ncase RETRY_BODY:\nreturn parseRetryBody();\ncase ENUM_MEMBER_END:\nreturn parseEnumMemberEnd();\ncase BRACKETED_LIST_MEMBER_END:\nreturn parseBracketedListMemberEnd();\ncase STMT_START_BRACKETED_LIST_MEMBER:\nreturn parseStatementStartBracketedListMember();\ncase TYPED_BINDING_PATTERN_TYPE_RHS:\nreturn parseTypedBindingPatternTypeRhs((STNode) args[0], (ParserRuleContext) args[1],\n(boolean) args[2]);\ncase BRACKETED_LIST_RHS:\nreturn parseTypedBindingPatternOrMemberAccessRhs((STNode) args[0], (STNode) args[1], (STNode) args[2],\n(STNode) args[3], (boolean) args[4], (boolean) args[5], (ParserRuleContext) args[6]);\ncase UNION_OR_INTERSECTION_TOKEN:\nreturn parseUnionOrIntersectionToken();\ncase BRACKETED_LIST_MEMBER:\ncase LIST_BINDING_MEMBER_OR_ARRAY_LENGTH:\nreturn parseBracketedListMember((boolean) args[0]);\ncase BASE16_KEYWORD:\nreturn parseBase16Keyword();\ncase BASE64_KEYWORD:\nreturn parseBase64Keyword();\ncase DOT_LT_TOKEN:\nreturn parseDotLTToken();\ncase SLASH_LT_TOKEN:\nreturn parseSlashLTToken();\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\nreturn parseDoubleSlashDoubleAsteriskLTToken();\ncase XML_ATOMIC_NAME_PATTERN_START:\nreturn parseXMLAtomicNamePatternBody();\ncase BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS:\nreturn parseBracedExprOrAnonFuncParamRhs((STNode) args[0], (STNode) args[1], (boolean) args[2]);\ncase READONLY_KEYWORD:\nreturn parseReadonlyKeyword();\ncase SPECIFIC_FIELD:\nreturn parseSpecificField((STNode) args[0]);\ncase OPTIONAL_MATCH_GUARD:\nreturn parseMatchGuard();\ncase MATCH_PATTERN_START:\nreturn parseMatchPattern();\ncase MATCH_PATTERN_RHS:\nreturn parseMatchPatternEnd();\ncase ENUM_MEMBER_RHS:\nreturn parseEnumMemberRhs((STNode) args[0], (STNode) args[1]);\ncase RECEIVE_FIELD:\nreturn parseReceiveField();\ncase PUBLIC_KEYWORD:\nreturn parseQualifier();\ncase PARAM_END:\nreturn parseParameterRhs();\ncase ELLIPSIS:\nreturn parseEllipsis();\ncase BINARY_OPERATOR:\nreturn parseBinaryOperator();\ncase TYPE_KEYWORD:\nreturn parseTypeKeyword();\ncase CLOSED_RECORD_BODY_START:\nreturn parseClosedRecordBodyStart();\ncase CLOSED_RECORD_BODY_END:\nreturn parseClosedRecordBodyEnd();\ncase QUESTION_MARK:\nreturn parseQuestionMark();\ncase FINAL_KEYWORD:\nreturn parseFinalKeyword();\ncase CLIENT_KEYWORD:\nreturn parseClientKeyword();\ncase ABSTRACT_KEYWORD:\nreturn parseAbstractKeyword();\ncase REMOTE_KEYWORD:\nreturn parseRemoteKeyword();\ncase CHECKING_KEYWORD:\nreturn parseCheckingKeyword();\ncase COMPOUND_BINARY_OPERATOR:\nreturn parseCompoundBinaryOperator();\ncase CONST_DECL_RHS:\nreturn parseConstantOrListenerDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2],\n(STNode) args[3], (boolean) args[4]);\ncase CONST_KEYWORD:\nreturn parseConstantKeyword();\ncase UNARY_OPERATOR:\nreturn parseUnaryOperator();\ncase AT:\nreturn parseAtToken();\ncase REMOTE_CALL_OR_ASYNC_SEND_RHS:\nreturn parseRemoteCallOrAsyncSendActionRhs((STNode) args[0], (boolean) args[1], (STNode) args[2]);\ncase DEFAULT_KEYWORD:\nreturn parseDefaultKeyword();\ncase RIGHT_ARROW:\nreturn parseRightArrow();\ncase PARAMETERIZED_TYPE:\nreturn parseParameterizedTypeKeyword();\ncase ANNOTATION_TAG:\nreturn parseAnnotationTag();\ncase ATTACH_POINT:\nreturn parseAnnotationAttachPoint();\ncase LOCK_KEYWORD:\nreturn parseLockKeyword();\ncase PIPE:\nreturn parsePipeToken();\ncase STRING_KEYWORD:\nreturn parseStringKeyword();\ncase XML_KEYWORD:\nreturn parseXMLKeyword();\ncase INTERPOLATION_START_TOKEN:\nreturn parseInterpolationStart();\ncase EXPR_FUNC_BODY_START:\nreturn parseDoubleRightArrow();\ncase START_KEYWORD:\nreturn parseStartKeyword();\ncase FLUSH_KEYWORD:\nreturn parseFlushKeyword();\ncase ENUM_KEYWORD:\nreturn parseEnumKeyword();\ncase MATCH_KEYWORD:\nreturn parseMatchKeyword();\ncase RECORD_KEYWORD:\nreturn parseRecordKeyword();\ncase LIST_MATCH_PATTERN_MEMBER_RHS:\nreturn parseListMatchPatternMemberRhs();\ncase LIST_BINDING_PATTERN_MEMBER:\nreturn parseListBindingPatternMember();\ncase FIELD_MATCH_PATTERN_MEMBER:\nreturn parseFieldMatchPatternMember();\ncase FIELD_MATCH_PATTERN_MEMBER_RHS:\nreturn parseFieldMatchPatternRhs();\ncase FUNC_MATCH_PATTERN_OR_CONST_PATTERN:\nreturn parseFunctionalMatchPatternOrConsPattern((STNode) args[0]);\ncase ARG_MATCH_PATTERN:\nreturn parseArgMatchPattern();\ncase ARG_MATCH_PATTERN_RHS:\nreturn parseArgMatchPatternRhs();\ncase ARG_BINDING_PATTERN:\nreturn parseArgBindingPattern();\ndefault:\nthrow new IllegalStateException(\"cannot resume parsing the rule: \" + context);\n}\n}\n/*\n* Private methods.\n*/\n/**\n* Parse a given input and returns the AST. Starts parsing from the top of a compilation unit.\n*\n* @return Parsed node\n*/\nprivate STNode parseCompUnit() {\nstartContext(ParserRuleContext.COMP_UNIT);\nSTToken token = peek();\nList otherDecls = new ArrayList<>();\nList importDecls = new ArrayList<>();\nboolean processImports = true;\nwhile (token.kind != SyntaxKind.EOF_TOKEN) {\nSTNode decl = parseTopLevelNode(token.kind);\nif (decl == null) {\nbreak;\n}\nif (decl.kind == SyntaxKind.IMPORT_DECLARATION) {\nif (processImports) {\nimportDecls.add(decl);\n} else {\nupdateLastNodeInListWithInvalidNode(otherDecls, decl,\nDiagnosticErrorCode.ERROR_IMPORT_DECLARATION_AFTER_OTHER_DECLARATIONS);\n}\n} else {\nif (processImports) {\nprocessImports = false;\n}\notherDecls.add(decl);\n}\ntoken = peek();\n}\nSTToken eof = consume();\nendContext();\nreturn STNodeFactory.createModulePartNode(STNodeFactory.createNodeList(importDecls),\nSTNodeFactory.createNodeList(otherDecls), eof);\n}\n/**\n* Parse top level node having an optional modifier preceding it.\n*\n* @return Parsed node\n*/\nprivate STNode parseTopLevelNode() {\nSTToken token = peek();\nreturn parseTopLevelNode(token.kind);\n}\nprotected STNode parseTopLevelNode(SyntaxKind tokenKind) {\nSTNode metadata;\nswitch (tokenKind) {\ncase EOF_TOKEN:\nreturn null;\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData(tokenKind);\nreturn parseTopLevelNode(metadata);\ncase IMPORT_KEYWORD:\ncase FINAL_KEYWORD:\ncase PUBLIC_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase CONST_KEYWORD:\ncase ANNOTATION_KEYWORD:\ncase XMLNS_KEYWORD:\ncase SERVICE_KEYWORD:\ncase ENUM_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nmetadata = createEmptyMetadata();\nbreak;\ncase IDENTIFIER_TOKEN:\nif (isModuleVarDeclStart(1)) {\nreturn parseModuleVarDecl(createEmptyMetadata(), null);\n}\ndefault:\nif (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\nmetadata = createEmptyMetadata();\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE);\nif (solution.action == Action.KEEP) {\nmetadata = STNodeFactory.createEmptyNodeList();\nbreak;\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTopLevelNode(solution.tokenKind);\n}\nreturn parseTopLevelNode(tokenKind, metadata);\n}\n/**\n* Parse top level node having an optional modifier preceding it, given the next token kind.\n*\n* @param metadata Next token kind\n* @return Parsed node\n*/\nprivate STNode parseTopLevelNode(STNode metadata) {\nSTToken nextToken = peek();\nreturn parseTopLevelNode(nextToken.kind, metadata);\n}\nprivate STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata) {\nSTNode qualifier = null;\nswitch (tokenKind) {\ncase EOF_TOKEN:\nif (metadata != null) {\naddInvalidNodeToNextToken(metadata, DiagnosticErrorCode.ERROR_INVALID_METADATA);\n}\nreturn null;\ncase PUBLIC_KEYWORD:\nqualifier = parseQualifier();\ntokenKind = peek().kind;\nbreak;\ncase FUNCTION_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase CONST_KEYWORD:\ncase FINAL_KEYWORD:\ncase IMPORT_KEYWORD:\ncase ANNOTATION_KEYWORD:\ncase XMLNS_KEYWORD:\ncase ENUM_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nbreak;\ncase IDENTIFIER_TOKEN:\nif (isModuleVarDeclStart(1)) {\nreturn parseModuleVarDecl(metadata, null);\n}\ndefault:\nif (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA, metadata);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.action == Action.KEEP) {\nqualifier = STNodeFactory.createEmptyNode();\nbreak;\n}\nreturn parseTopLevelNode(solution.tokenKind, metadata);\n}\nreturn parseTopLevelNode(tokenKind, metadata, qualifier);\n}\n/**\n* Check whether the cursor is at the start of a module level var-decl.\n*\n* @param lookahead Offset of the token to to check\n* @return true if the cursor is at the start of a module level var-decl.\n* false otherwise.\n*/\nprivate boolean isModuleVarDeclStart(int lookahead) {\nSTToken nextToken = peek(lookahead + 1);\nswitch (nextToken.kind) {\ncase EQUAL_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase QUESTION_MARK_TOKEN:\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn true;\ncase IDENTIFIER_TOKEN:\nswitch (peek(lookahead + 2).kind) {\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\ncase COLON_TOKEN:\nif (lookahead > 1) {\nreturn false;\n}\nif (peek(lookahead + 2).kind != SyntaxKind.IDENTIFIER_TOKEN) {\nreturn false;\n}\nreturn isModuleVarDeclStart(lookahead + 2);\ndefault:\nreturn false;\n}\n}\n/**\n* Parse import declaration.\n*

\n* import-decl := import [org-name /] module-name [version sem-ver] [as import-prefix] ;\n*\n* @return Parsed node\n*/\nprivate STNode parseImportDecl() {\nstartContext(ParserRuleContext.IMPORT_DECL);\nthis.tokenReader.startMode(ParserMode.IMPORT);\nSTNode importKeyword = parseImportKeyword();\nSTNode identifier = parseIdentifier(ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME);\nSTToken token = peek();\nSTNode importDecl = parseImportDecl(token.kind, importKeyword, identifier);\nthis.tokenReader.endMode();\nendContext();\nreturn importDecl;\n}\n/**\n* Parse import keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseImportKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IMPORT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.IMPORT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse identifier.\n*\n* @return Parsed node\n*/\nprivate STNode parseIdentifier(ParserRuleContext currentCtx) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else if (token.kind == SyntaxKind.MAP_KEYWORD) {\nSTToken mapKeyword = consume();\nreturn STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(),\nmapKeyword.trailingMinutiae(), mapKeyword.diagnostics());\n} else {\nSolution sol = recover(token, currentCtx);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse RHS of the import declaration. This includes the components after the\n* starting identifier (org-name/module-name) of the import decl.\n*\n* @param importKeyword Import keyword\n* @param identifier Org-name or the module name\n* @return Parsed node\n*/\nprivate STNode parseImportDecl(STNode importKeyword, STNode identifier) {\nSTToken nextToken = peek();\nreturn parseImportDecl(nextToken.kind, importKeyword, identifier);\n}\nprivate STNode parseImportDecl(SyntaxKind tokenKind, STNode importKeyword, STNode identifier) {\nSTNode orgName;\nSTNode moduleName;\nSTNode version;\nSTNode alias;\nswitch (tokenKind) {\ncase SLASH_TOKEN:\nSTNode slash = parseSlashToken();\norgName = STNodeFactory.createImportOrgNameNode(identifier, slash);\nmoduleName = parseModuleName();\nversion = parseVersion();\nalias = parseImportPrefixDecl();\nbreak;\ncase DOT_TOKEN:\ncase VERSION_KEYWORD:\norgName = STNodeFactory.createEmptyNode();\nmoduleName = parseModuleName(tokenKind, identifier);\nversion = parseVersion();\nalias = parseImportPrefixDecl();\nbreak;\ncase AS_KEYWORD:\norgName = STNodeFactory.createEmptyNode();\nmoduleName = parseModuleName(tokenKind, identifier);\nversion = STNodeFactory.createEmptyNode();\nalias = parseImportPrefixDecl();\nbreak;\ncase SEMICOLON_TOKEN:\norgName = STNodeFactory.createEmptyNode();\nmoduleName = parseModuleName(tokenKind, identifier);\nversion = STNodeFactory.createEmptyNode();\nalias = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.IMPORT_DECL_RHS, importKeyword, identifier);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseImportDecl(solution.tokenKind, importKeyword, identifier);\n}\nSTNode semicolon = parseSemicolon();\nreturn STNodeFactory.createImportDeclarationNode(importKeyword, orgName, moduleName, version, alias, semicolon);\n}\n/**\n* parse slash token.\n*\n* @return Parsed node\n*/\nprivate STNode parseSlashToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SLASH_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SLASH);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse dot token.\n*\n* @return Parsed node\n*/\nprivate STNode parseDotToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DOT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.DOT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse module name of a import declaration.\n*\n* @return Parsed node\n*/\nprivate STNode parseModuleName() {\nSTNode moduleNameStart = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME);\nreturn parseModuleName(peek().kind, moduleNameStart);\n}\n/**\n* Parse import module name of a import declaration, given the module name start identifier.\n*\n* @param moduleNameStart Starting identifier of the module name\n* @return Parsed node\n*/\nprivate STNode parseModuleName(SyntaxKind nextTokenKind, STNode moduleNameStart) {\nList moduleNameParts = new ArrayList<>();\nmoduleNameParts.add(moduleNameStart);\nwhile (!isEndOfImportModuleName(nextTokenKind)) {\nmoduleNameParts.add(parseDotToken());\nmoduleNameParts.add(parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME));\nnextTokenKind = peek().kind;\n}\nreturn STNodeFactory.createNodeList(moduleNameParts);\n}\nprivate boolean isEndOfImportModuleName(SyntaxKind nextTokenKind) {\nreturn nextTokenKind != SyntaxKind.DOT_TOKEN && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN;\n}\nprivate boolean isEndOfImportDecl(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase SEMICOLON_TOKEN:\ncase PUBLIC_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase TYPE_KEYWORD:\ncase ABSTRACT_KEYWORD:\ncase CONST_KEYWORD:\ncase EOF_TOKEN:\ncase SERVICE_KEYWORD:\ncase IMPORT_KEYWORD:\ncase FINAL_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse version component of a import declaration.\n*

\n* version-decl := version sem-ver\n*\n* @return Parsed node\n*/\nprivate STNode parseVersion() {\nSTToken nextToken = peek();\nreturn parseVersion(nextToken.kind);\n}\nprivate STNode parseVersion(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase VERSION_KEYWORD:\nSTNode versionKeyword = parseVersionKeyword();\nSTNode versionNumber = parseVersionNumber();\nreturn STNodeFactory.createImportVersionNode(versionKeyword, versionNumber);\ncase AS_KEYWORD:\ncase SEMICOLON_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nif (isEndOfImportDecl(nextTokenKind)) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.IMPORT_VERSION_DECL);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseVersion(solution.tokenKind);\n}\n}\n/**\n* Parse version keywrod.\n*\n* @return Parsed node\n*/\nprivate STNode parseVersionKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.VERSION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.VERSION_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse version number.\n*

\n* sem-ver := major-num [. minor-num [. patch-num]]\n*
\n* major-num := DecimalNumber\n*
\n* minor-num := DecimalNumber\n*
\n* patch-num := DecimalNumber\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseVersionNumber() {\nSTToken nextToken = peek();\nreturn parseVersionNumber(nextToken.kind);\n}\nprivate STNode parseVersionNumber(SyntaxKind nextTokenKind) {\nSTNode majorVersion;\nswitch (nextTokenKind) {\ncase DECIMAL_INTEGER_LITERAL:\nmajorVersion = parseMajorVersion();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.VERSION_NUMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseVersionNumber(solution.tokenKind);\n}\nList versionParts = new ArrayList<>();\nversionParts.add(majorVersion);\nSTNode minorVersion = parseMinorVersion();\nif (minorVersion != null) {\nversionParts.add(minorVersion);\nSTNode patchVersion = parsePatchVersion();\nif (patchVersion != null) {\nversionParts.add(patchVersion);\n}\n}\nreturn STNodeFactory.createNodeList(versionParts);\n}\nprivate STNode parseMajorVersion() {\nreturn parseDecimalIntLiteral(ParserRuleContext.MAJOR_VERSION);\n}\nprivate STNode parseMinorVersion() {\nreturn parseSubVersion(ParserRuleContext.MINOR_VERSION);\n}\nprivate STNode parsePatchVersion() {\nreturn parseSubVersion(ParserRuleContext.PATCH_VERSION);\n}\n/**\n* Parse decimal literal.\n*\n* @param context Context in which the decimal literal is used.\n* @return Parsed node\n*/\nprivate STNode parseDecimalIntLiteral(ParserRuleContext context) {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), context);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse sub version. i.e: minor-version/patch-version.\n*\n* @param context Context indicating what kind of sub-version is being parsed.\n* @return Parsed node\n*/\nprivate STNode parseSubVersion(ParserRuleContext context) {\nSTToken nextToken = peek();\nreturn parseSubVersion(nextToken.kind, context);\n}\nprivate STNode parseSubVersion(SyntaxKind nextTokenKind, ParserRuleContext context) {\nswitch (nextTokenKind) {\ncase AS_KEYWORD:\ncase SEMICOLON_TOKEN:\nreturn null;\ncase DOT_TOKEN:\nSTNode leadingDot = parseDotToken();\nSTNode versionNumber = parseDecimalIntLiteral(context);\nreturn STNodeFactory.createImportSubVersionNode(leadingDot, versionNumber);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.IMPORT_SUB_VERSION);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseSubVersion(solution.tokenKind, context);\n}\n}\n/**\n* Parse import prefix declaration.\n*

\n* import-prefix-decl := as import-prefix\n*
\n* import-prefix := a identifier | _\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseImportPrefixDecl() {\nSTToken token = peek();\nreturn parseImportPrefixDecl(token.kind);\n}\nprivate STNode parseImportPrefixDecl(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase AS_KEYWORD:\nSTNode asKeyword = parseAsKeyword();\nSTNode prefix = parseImportPrefix();\nreturn STNodeFactory.createImportPrefixNode(asKeyword, prefix);\ncase SEMICOLON_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nif (isEndOfImportDecl(nextTokenKind)) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.IMPORT_PREFIX_DECL);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseImportPrefixDecl(solution.tokenKind);\n}\n}\n/**\n* Parse as keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseAsKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.AS_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.AS_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse import prefix.\n*\n* @return Parsed node\n*/\nprivate STNode parseImportPrefix() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.IMPORT_PREFIX);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse top level node, given the modifier that precedes it.\n*\n* @param qualifier Qualifier that precedes the top level node\n* @return Parsed node\n*/\nprivate STNode parseTopLevelNode(STNode metadata, STNode qualifier) {\nSTToken token = peek();\nreturn parseTopLevelNode(token.kind, metadata, qualifier);\n}\n/**\n* Parse top level node given the next token kind and the modifier that precedes it.\n*\n* @param tokenKind Next token kind\n* @param qualifier Qualifier that precedes the top level node\n* @return Parsed top-level node\n*/\nprivate STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata, STNode qualifier) {\nswitch (tokenKind) {\ncase FUNCTION_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nreturn parseFuncDefOrFuncTypeDesc(metadata, false, getQualifier(qualifier), null);\ncase TYPE_KEYWORD:\nreturn parseModuleTypeDefinition(metadata, getQualifier(qualifier));\ncase LISTENER_KEYWORD:\nreturn parseListenerDeclaration(metadata, getQualifier(qualifier));\ncase CONST_KEYWORD:\nreturn parseConstantDeclaration(metadata, getQualifier(qualifier));\ncase ANNOTATION_KEYWORD:\nSTNode constKeyword = STNodeFactory.createEmptyNode();\nreturn parseAnnotationDeclaration(metadata, getQualifier(qualifier), constKeyword);\ncase IMPORT_KEYWORD:\nreportInvalidQualifier(qualifier);\nreturn parseImportDecl();\ncase XMLNS_KEYWORD:\nreportInvalidQualifier(qualifier);\nreturn parseXMLNamespaceDeclaration(true);\ncase FINAL_KEYWORD:\nreportInvalidQualifier(qualifier);\nSTNode finalKeyword = parseFinalKeyword();\nreturn parseVariableDecl(metadata, finalKeyword, true);\ncase SERVICE_KEYWORD:\nif (isServiceDeclStart(ParserRuleContext.TOP_LEVEL_NODE, 1)) {\nreportInvalidQualifier(qualifier);\nreturn parseServiceDecl(metadata);\n}\nreturn parseModuleVarDecl(metadata, qualifier);\ncase ENUM_KEYWORD:\nreturn parseEnumDeclaration(metadata, getQualifier(qualifier));\ncase IDENTIFIER_TOKEN:\nif (isModuleVarDeclStart(1)) {\nreturn parseModuleVarDecl(metadata, qualifier);\n}\ndefault:\nif (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\nreturn parseModuleVarDecl(metadata, qualifier);\n}\nSTToken token = peek();\nSolution solution =\nrecover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER, metadata, qualifier);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.action == Action.KEEP) {\nreturn parseModuleVarDecl(metadata, qualifier);\n}\nreturn parseTopLevelNode(solution.tokenKind, metadata, qualifier);\n}\n}\nprivate STNode parseModuleVarDecl(STNode metadata, STNode qualifier) {\nreportInvalidQualifier(qualifier);\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVariableDecl(metadata, finalKeyword, true);\n}\nprivate STNode getQualifier(STNode qualifier) {\nreturn qualifier == null ? STNodeFactory.createEmptyNode() : qualifier;\n}\nprivate void reportInvalidQualifier(STNode qualifier) {\nif (qualifier != null && qualifier.kind != SyntaxKind.NONE) {\naddInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_INVALID_QUALIFIER,\nqualifier.toString().trim());\n}\n}\n/**\n* Parse access modifiers.\n*\n* @return Parsed node\n*/\nprivate STNode parseQualifier() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PUBLIC_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseFuncDefinition(STNode metadata, boolean isObjectMethod, STNode... qualifiers) {\nparseTransactionalQUalifier(qualifiers);\nstartContext(ParserRuleContext.FUNC_DEF);\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode funcDef = parseFunctionKeywordRhs(metadata, functionKeyword, true, isObjectMethod, qualifiers);\nreturn funcDef;\n}\n/**\n* Parse function definition for the function type descriptor.\n*

\n* \n* function-defn := FUNCTION identifier function-signature function-body\n*
\n* function-type-descriptor := function function-signature\n*
\n*\n* @param metadata Metadata\n* @param visibilityQualifier Visibility qualifier\n* @return Parsed node\n*/\nprivate STNode parseFuncDefOrFuncTypeDesc(STNode metadata, boolean isObjectMethod, STNode... qualifiers) {\nparseTransactionalQUalifier(qualifiers);\nstartContext(ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE);\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode funcDefOrType = parseFunctionKeywordRhs(metadata, functionKeyword, false, isObjectMethod, qualifiers);\nreturn funcDefOrType;\n}\nprivate void parseTransactionalQUalifier(STNode... qualifiers) {\nif (peek().kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {\nqualifiers[qualifiers.length - 1] = consume();\n} else {\nqualifiers[qualifiers.length - 1] = STNodeFactory.createEmptyNode();\n}\n}\nprivate STNode parseFunctionKeywordRhs(STNode metadata, STNode functionKeyword, boolean isFuncDef,\nboolean isObjectMethod, STNode... qualifiers) {\nreturn parseFunctionKeywordRhs(peek().kind, metadata, functionKeyword, isFuncDef, isObjectMethod, qualifiers);\n}\nprivate STNode parseFunctionKeywordRhs(SyntaxKind nextTokenKind, STNode metadata, STNode functionKeyword,\nboolean isFuncDef, boolean isObjectMethod, STNode... qualifiers) {\nSTNode name;\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nname = parseFunctionName();\nisFuncDef = true;\nbreak;\ncase OPEN_PAREN_TOKEN:\nname = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.FUNCTION_KEYWORD_RHS, metadata, functionKeyword,\nisFuncDef, isObjectMethod, qualifiers);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFunctionKeywordRhs(solution.tokenKind, metadata, functionKeyword, isFuncDef, isObjectMethod,\nqualifiers);\n}\nif (isFuncDef) {\nswitchContext(ParserRuleContext.FUNC_DEF);\nSTNode funcSignature = parseFuncSignature(false);\nSTNode funcDef = createFuncDefOrMethodDecl(metadata, functionKeyword, isObjectMethod, name, funcSignature,\nqualifiers);\nendContext();\nreturn funcDef;\n}\nSTNode funcSignature = parseFuncSignature(true);\nreturn parseReturnTypeDescRhs(metadata, functionKeyword, funcSignature, isObjectMethod, qualifiers);\n}\nprivate STNode createFuncDefOrMethodDecl(STNode metadata, STNode functionKeyword, boolean isObjectMethod,\nSTNode name, STNode funcSignature, STNode... qualifiers) {\nSTNode body = parseFunctionBody(isObjectMethod);\nif (body.kind == SyntaxKind.SEMICOLON_TOKEN) {\nreturn STNodeFactory.createMethodDeclarationNode(metadata, qualifiers[0], functionKeyword, name,\nfuncSignature, body);\n}\nif (isObjectMethod) {\nreturn STNodeFactory.createObjectMethodDefinitionNode(metadata, qualifiers[0], qualifiers[1], qualifiers[2],\nfunctionKeyword, name, funcSignature, body);\n}\nreturn STNodeFactory.createFunctionDefinitionNode(metadata, qualifiers[0], qualifiers[1], functionKeyword, name,\nfuncSignature, body);\n}\n/**\n* Parse function signature.\n*

\n* \n* function-signature := ( param-list ) return-type-descriptor\n*
\n* return-type-descriptor := [ returns [annots] type-descriptor ]\n*
\n*\n* @param isParamNameOptional Whether the parameter names are optional\n* @return Function signature node\n*/\nprivate STNode parseFuncSignature(boolean isParamNameOptional) {\nSTNode openParenthesis = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode parameters = parseParamList(isParamNameOptional);\nSTNode closeParenthesis = parseCloseParenthesis();\nendContext();\nSTNode returnTypeDesc = parseFuncReturnTypeDescriptor();\nreturn STNodeFactory.createFunctionSignatureNode(openParenthesis, parameters, closeParenthesis, returnTypeDesc);\n}\nprivate STNode parseReturnTypeDescRhs(STNode metadata, STNode functionKeyword, STNode funcSignature,\nboolean isObjectMethod, STNode... qualifiers) {\nswitch (peek().kind) {\ncase SEMICOLON_TOKEN:\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACKET_TOKEN:\nendContext();\nSTNode typeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature);\nif (isObjectMethod) {\nSTNode readonlyQualifier = STNodeFactory.createEmptyNode();\nSTNode fieldName = parseVariableName();\nreturn parseObjectFieldRhs(metadata, qualifiers[0], readonlyQualifier, typeDesc, fieldName);\n}\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(metadata, qualifiers[0], typedBindingPattern, true);\ncase OPEN_BRACE_TOKEN:\ncase EQUAL_TOKEN:\nbreak;\ndefault:\nbreak;\n}\nSTNode name = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_FUNCTION_NAME);\nfuncSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);\nSTNode funcDef =\ncreateFuncDefOrMethodDecl(metadata, functionKeyword, isObjectMethod, name, funcSignature, qualifiers);\nendContext();\nreturn funcDef;\n}\n/**\n* Validate the param list and return. If there are params without param-name,\n* then this method will create a new set of params with missing param-name\n* and return.\n*\n* @param signature Function signature\n* @return\n*/\nprivate STNode validateAndGetFuncParams(STFunctionSignatureNode signature) {\nSTNode parameters = signature.parameters;\nint paramCount = parameters.bucketCount();\nint index = 0;\nfor (; index < paramCount; index++) {\nSTNode param = parameters.childInBucket(index);\nswitch (param.kind) {\ncase REQUIRED_PARAM:\nSTRequiredParameterNode requiredParam = (STRequiredParameterNode) param;\nif (isEmpty(requiredParam.paramName)) {\nbreak;\n}\ncontinue;\ncase DEFAULTABLE_PARAM:\nSTDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param;\nif (isEmpty(defaultableParam.paramName)) {\nbreak;\n}\ncontinue;\ncase REST_PARAM:\nSTRestParameterNode restParam = (STRestParameterNode) param;\nif (isEmpty(restParam.paramName)) {\nbreak;\n}\ncontinue;\ndefault:\ncontinue;\n}\nbreak;\n}\nif (index == paramCount) {\nreturn signature;\n}\nSTNode updatedParams = getUpdatedParamList(parameters, index);\nreturn STNodeFactory.createFunctionSignatureNode(signature.openParenToken, updatedParams,\nsignature.closeParenToken, signature.returnTypeDesc);\n}\nprivate STNode getUpdatedParamList(STNode parameters, int index) {\nint paramCount = parameters.bucketCount();\nint newIndex = 0;\nArrayList newParams = new ArrayList<>();\nfor (; newIndex < index; newIndex++) {\nnewParams.add(parameters.childInBucket(index));\n}\nfor (; newIndex < paramCount; newIndex++) {\nSTNode param = parameters.childInBucket(newIndex);\nSTNode paramName = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nswitch (param.kind) {\ncase REQUIRED_PARAM:\nSTRequiredParameterNode requiredParam = (STRequiredParameterNode) param;\nif (isEmpty(requiredParam.paramName)) {\nparam = STNodeFactory.createRequiredParameterNode(requiredParam.leadingComma,\nrequiredParam.annotations, requiredParam.visibilityQualifier, requiredParam.typeName,\nparamName);\n}\nbreak;\ncase DEFAULTABLE_PARAM:\nSTDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param;\nif (isEmpty(defaultableParam.paramName)) {\nparam = STNodeFactory.createDefaultableParameterNode(defaultableParam.leadingComma,\ndefaultableParam.annotations, defaultableParam.visibilityQualifier,\ndefaultableParam.typeName, paramName, defaultableParam.equalsToken,\ndefaultableParam.expression);\n}\nbreak;\ncase REST_PARAM:\nSTRestParameterNode restParam = (STRestParameterNode) param;\nif (isEmpty(restParam.paramName)) {\nparam = STNodeFactory.createRestParameterNode(restParam.leadingComma, restParam.annotations,\nrestParam.typeName, restParam.ellipsisToken, paramName);\n}\nbreak;\ndefault:\nbreak;\n}\nnewParams.add(param);\n}\nreturn STNodeFactory.createNodeList(newParams);\n}\nprivate boolean isEmpty(STNode node) {\nreturn !SyntaxUtils.isSTNodePresent(node);\n}\n/**\n* Parse function keyword. Need to validate the token before consuming,\n* since we can reach here while recovering.\n*\n* @return Parsed node\n*/\nprivate STNode parseFunctionKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FUNCTION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FUNCTION_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse function name.\n*\n* @return Parsed node\n*/\nprivate STNode parseFunctionName() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FUNC_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse open parenthesis.\n*\n* @param ctx Context of the parenthesis\n* @return Parsed node\n*/\nprivate STNode parseOpenParenthesis(ParserRuleContext ctx) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ctx);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse close parenthesis.\n*\n* @return Parsed node\n*/\nprivate STNode parseCloseParenthesis() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSE_PARENTHESIS);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse parameter list.\n*

\n* \n* param-list := required-params [, defaultable-params] [, rest-param]\n*
 | defaultable-params [, rest-param]\n*
 | [rest-param]\n*

\n* required-params := required-param (, required-param)*\n*

\n* required-param := [annots] [public] type-descriptor [param-name]\n*

\n* defaultable-params := defaultable-param (, defaultable-param)*\n*

\n* defaultable-param := [annots] [public] type-descriptor [param-name] default-value\n*

\n* rest-param := [annots] type-descriptor ... [param-name]\n*

\n* param-name := identifier\n*
\n*\n* @param isParamNameOptional Whether the param names in the signature is optional or not.\n* @return Parsed node\n*/\nprivate STNode parseParamList(boolean isParamNameOptional) {\nstartContext(ParserRuleContext.PARAM_LIST);\nSTToken token = peek();\nif (isEndOfParametersList(token.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nArrayList paramsList = new ArrayList<>();\nSTNode startingComma = STNodeFactory.createEmptyNode();\nstartContext(ParserRuleContext.REQUIRED_PARAM);\nSTNode firstParam = parseParameter(startingComma, SyntaxKind.REQUIRED_PARAM, isParamNameOptional);\nSyntaxKind prevParamKind = firstParam.kind;\nparamsList.add(firstParam);\nboolean paramOrderErrorPresent = false;\ntoken = peek();\nwhile (!isEndOfParametersList(token.kind)) {\nif (prevParamKind == SyntaxKind.DEFAULTABLE_PARAM) {\nstartContext(ParserRuleContext.DEFAULTABLE_PARAM);\n} else {\nstartContext(ParserRuleContext.REQUIRED_PARAM);\n}\nSTNode paramEnd = parseParameterRhs();\nif (paramEnd == null) {\nendContext();\nbreak;\n}\nSTNode param = parseParameter(paramEnd, prevParamKind, isParamNameOptional);\nif (paramOrderErrorPresent) {\nupdateLastNodeInListWithInvalidNode(paramsList, param, null);\n} else {\nDiagnosticCode paramOrderError = validateParamOrder(param, prevParamKind);\nif (paramOrderError == null) {\nparamsList.add(param);\n} else {\nparamOrderErrorPresent = true;\nupdateLastNodeInListWithInvalidNode(paramsList, param, paramOrderError);\n}\n}\nprevParamKind = param.kind;\ntoken = peek();\n}\nreturn STNodeFactory.createNodeList(paramsList);\n}\n/**\n* Return the appropriate {@code DiagnosticCode} if there are parameter order issues.\n*\n* @param param the new parameter\n* @param prevParamKind the SyntaxKind of the previously added parameter\n*/\nprivate DiagnosticCode validateParamOrder(STNode param, SyntaxKind prevParamKind) {\nif (prevParamKind == SyntaxKind.REST_PARAM) {\nreturn DiagnosticErrorCode.ERROR_PARAMETER_AFTER_THE_REST_PARAMETER;\n} else if (prevParamKind == SyntaxKind.DEFAULTABLE_PARAM && param.kind == SyntaxKind.REQUIRED_PARAM) {\nreturn DiagnosticErrorCode.ERROR_REQUIRED_PARAMETER_AFTER_THE_DEFAULTABLE_PARAMETER;\n} else {\nreturn null;\n}\n}\nprivate boolean isNodeWithSyntaxKindInList(List nodeList, SyntaxKind kind) {\nfor (STNode node : nodeList) {\nif (node.kind == kind) {\nreturn true;\n}\n}\nreturn false;\n}\nprivate STNode parseParameterRhs() {\nreturn parseParameterRhs(peek().kind);\n}\nprivate STNode parseParameterRhs(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.PARAM_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseParameterRhs(solution.tokenKind);\n}\n}\n/**\n* Parse a single parameter. Parameter can be a required parameter, a defaultable\n* parameter, or a rest parameter.\n*\n* @param prevParamKind Kind of the parameter that precedes current parameter\n* @param leadingComma Comma that occurs before the param\n* @param isParamNameOptional Whether the param names in the signature is optional or not.\n* @return Parsed node\n*/\nprivate STNode parseParameter(STNode leadingComma, SyntaxKind prevParamKind, boolean isParamNameOptional) {\nSTToken token = peek();\nreturn parseParameter(token.kind, prevParamKind, leadingComma, 1, isParamNameOptional);\n}\nprivate STNode parseParameter(SyntaxKind prevParamKind, STNode leadingComma, int nextTokenOffset,\nboolean isParamNameOptional) {\nreturn parseParameter(peek().kind, prevParamKind, leadingComma, nextTokenOffset, isParamNameOptional);\n}\nprivate STNode parseParameter(SyntaxKind nextTokenKind, SyntaxKind prevParamKind, STNode leadingComma,\nint nextTokenOffset, boolean isParamNameOptional) {\nSTNode annots;\nswitch (nextTokenKind) {\ncase AT_TOKEN:\nannots = parseAnnotations(nextTokenKind);\nnextTokenKind = peek().kind;\nbreak;\ncase PUBLIC_KEYWORD:\ncase IDENTIFIER_TOKEN:\nannots = STNodeFactory.createEmptyNodeList();\nbreak;\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nannots = STNodeFactory.createNodeList(new ArrayList<>());\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.PARAMETER_START, prevParamKind, leadingComma,\nnextTokenOffset, isParamNameOptional);\nif (solution.action == Action.KEEP) {\nannots = STNodeFactory.createEmptyNodeList();\nbreak;\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseParameter(solution.tokenKind, prevParamKind, leadingComma, 0, isParamNameOptional);\n}\nreturn parseParamGivenAnnots(nextTokenKind, prevParamKind, leadingComma, annots, 1, isParamNameOptional);\n}\nprivate STNode parseParamGivenAnnots(SyntaxKind prevParamKind, STNode leadingComma, STNode annots,\nint nextNextTokenOffset, boolean isFuncDef) {\nreturn parseParamGivenAnnots(peek().kind, prevParamKind, leadingComma, annots, nextNextTokenOffset, isFuncDef);\n}\nprivate STNode parseParamGivenAnnots(SyntaxKind nextTokenKind, SyntaxKind prevParamKind, STNode leadingComma,\nSTNode annots, int nextTokenOffset, boolean isParamNameOptional) {\nSTNode qualifier;\nswitch (nextTokenKind) {\ncase PUBLIC_KEYWORD:\nqualifier = parseQualifier();\nbreak;\ncase IDENTIFIER_TOKEN:\nqualifier = STNodeFactory.createEmptyNode();\nbreak;\ncase AT_TOKEN:\ndefault:\nif (isTypeStartingToken(nextTokenKind) && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\nqualifier = STNodeFactory.createEmptyNode();\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.PARAMETER_WITHOUT_ANNOTS, prevParamKind,\nleadingComma, annots, nextTokenOffset, isParamNameOptional);\nif (solution.action == Action.KEEP) {\nqualifier = STNodeFactory.createEmptyNode();\nbreak;\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseParamGivenAnnots(solution.tokenKind, prevParamKind, leadingComma, annots, 0,\nisParamNameOptional);\n}\nreturn parseParamGivenAnnotsAndQualifier(prevParamKind, leadingComma, annots, qualifier, isParamNameOptional);\n}\nprivate STNode parseParamGivenAnnotsAndQualifier(SyntaxKind prevParamKind, STNode leadingComma, STNode annots,\nSTNode qualifier, boolean isParamNameOptional) {\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode param = parseAfterParamType(prevParamKind, leadingComma, annots, qualifier, type, isParamNameOptional);\nendContext();\nreturn param;\n}\nprivate STNode parseAfterParamType(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier,\nSTNode type, boolean isParamNameOptional) {\nSTToken token = peek();\nreturn parseAfterParamType(token.kind, prevParamKind, leadingComma, annots, qualifier, type,\nisParamNameOptional);\n}\nprivate STNode parseAfterParamType(SyntaxKind tokenKind, SyntaxKind prevParamKind, STNode leadingComma,\nSTNode annots, STNode qualifier, STNode type, boolean isParamNameOptional) {\nSTNode paramName;\nswitch (tokenKind) {\ncase ELLIPSIS_TOKEN:\nswitchContext(ParserRuleContext.REST_PARAM);\nreportInvalidQualifier(qualifier);\nSTNode ellipsis = parseEllipsis();\nif (isParamNameOptional && peek().kind != SyntaxKind.IDENTIFIER_TOKEN) {\nparamName = STNodeFactory.createEmptyNode();\n} else {\nparamName = parseVariableName();\n}\nreturn STNodeFactory.createRestParameterNode(leadingComma, annots, type, ellipsis, paramName);\ncase IDENTIFIER_TOKEN:\nparamName = parseVariableName();\nreturn parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);\ncase EQUAL_TOKEN:\nif (!isParamNameOptional) {\nbreak;\n}\nparamName = STNodeFactory.createEmptyNode();\nreturn parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);\ndefault:\nif (!isParamNameOptional) {\nbreak;\n}\nparamName = STNodeFactory.createEmptyNode();\nreturn parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.AFTER_PARAMETER_TYPE, prevParamKind, leadingComma, annots,\nqualifier, type, isParamNameOptional);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAfterParamType(solution.tokenKind, prevParamKind, leadingComma, annots, qualifier, type,\nisParamNameOptional);\n}\n/**\n* Parse ellipsis.\n*\n* @return Parsed node\n*/\nprivate STNode parseEllipsis() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ELLIPSIS_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ELLIPSIS);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse the right hand side of a required/defaultable parameter.\n*

\n* parameter-rhs := [= expression]\n*\n* @param leadingComma Comma that precedes this parameter\n* @param prevParamKind Kind of the parameter that precedes current parameter\n* @param annots Annotations attached to the parameter\n* @param qualifier Visibility qualifier\n* @param type Type descriptor\n* @param paramName Name of the parameter\n* @return Parsed parameter node\n*/\nprivate STNode parseParameterRhs(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier,\nSTNode type, STNode paramName) {\nSTToken token = peek();\nreturn parseParameterRhs(token.kind, prevParamKind, leadingComma, annots, qualifier, type, paramName);\n}\nprivate STNode parseParameterRhs(SyntaxKind tokenKind, SyntaxKind prevParamKind, STNode leadingComma, STNode annots,\nSTNode qualifier, STNode type, STNode paramName) {\nif (isEndOfParameter(tokenKind)) {\nreturn STNodeFactory.createRequiredParameterNode(leadingComma, annots, qualifier, type, paramName);\n} else if (tokenKind == SyntaxKind.EQUAL_TOKEN) {\nif (prevParamKind == SyntaxKind.REQUIRED_PARAM) {\nswitchContext(ParserRuleContext.DEFAULTABLE_PARAM);\n}\nSTNode equal = parseAssignOp();\nSTNode expr = parseExpression();\nreturn STNodeFactory.createDefaultableParameterNode(leadingComma, annots, qualifier, type, paramName, equal,\nexpr);\n} else {\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.PARAMETER_NAME_RHS, prevParamKind, leadingComma,\nannots, qualifier, type, paramName);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseParameterRhs(solution.tokenKind, prevParamKind, leadingComma, annots, qualifier, type,\nparamName);\n}\n}\n/**\n* Parse comma.\n*\n* @return Parsed node\n*/\nprivate STNode parseComma() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COMMA_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.COMMA);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse return type descriptor of a function. A return type descriptor has the following structure.\n*\n* return-type-descriptor := [ returns annots type-descriptor ]\n*\n* @return Parsed node\n*/\nprivate STNode parseFuncReturnTypeDescriptor() {\nreturn parseFuncReturnTypeDescriptor(peek().kind);\n}\nprivate STNode parseFuncReturnTypeDescriptor(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACE_TOKEN:\ncase EQUAL_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ncase RETURNS_KEYWORD:\nbreak;\ndefault:\nSTToken nextNextToken = getNextNextToken(nextTokenKind);\nif (nextNextToken.kind == SyntaxKind.RETURNS_KEYWORD) {\nbreak;\n}\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode returnsKeyword = parseReturnsKeyword();\nSTNode annot = parseAnnotations();\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);\nreturn STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);\n}\n/**\n* Parse 'returns' keyword.\n*\n* @return Return-keyword node\n*/\nprivate STNode parseReturnsKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RETURNS_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RETURNS_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse a type descriptor. A type descriptor has the following structure.\n*

\n* type-descriptor :=\n*  simple-type-descriptor
\n*  | structured-type-descriptor
\n*  | behavioral-type-descriptor
\n*  | singleton-type-descriptor
\n*  | union-type-descriptor
\n*  | optional-type-descriptor
\n*  | any-type-descriptor
\n*  | anydata-type-descriptor
\n*  | byte-type-descriptor
\n*  | json-type-descriptor
\n*  | type-descriptor-reference
\n*  | ( type-descriptor )\n*
\n* type-descriptor-reference := qualified-identifier
\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeDescriptor(ParserRuleContext context) {\nreturn parseTypeDescriptor(context, false, false);\n}\nprivate STNode parseTypeDescriptorInExpression(ParserRuleContext context, boolean isInConditionalExpr) {\nreturn parseTypeDescriptor(context, false, isInConditionalExpr);\n}\nprivate STNode parseTypeDescriptor(ParserRuleContext context, boolean isTypedBindingPattern,\nboolean isInConditionalExpr) {\nstartContext(context);\nSTNode typeDesc = parseTypeDescriptorInternal(context, isTypedBindingPattern, isInConditionalExpr);\nendContext();\nreturn typeDesc;\n}\nprivate STNode parseTypeDescriptorInternal(ParserRuleContext context, boolean isInConditionalExpr) {\nreturn parseTypeDescriptorInternal(context, false, isInConditionalExpr);\n}\nprivate STNode parseTypeDescriptorInternal(ParserRuleContext context, boolean isTypedBindingPattern,\nboolean isInConditionalExpr) {\nSTToken token = peek();\nSTNode typeDesc = parseTypeDescriptorInternal(token.kind, context, isInConditionalExpr);\nreturn parseComplexTypeDescriptor(typeDesc, context, isTypedBindingPattern);\n}\n/**\n* This will handle the parsing of optional,array,union type desc to infinite length.\n*\n* @param typeDesc\n*\n* @return Parsed type descriptor node\n*/\nprivate STNode parseComplexTypeDescriptor(STNode typeDesc, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase QUESTION_MARK_TOKEN:\nif (context == ParserRuleContext.TYPE_DESC_IN_EXPRESSION &&\n!isValidTypeContinuationToken(getNextNextToken(nextToken.kind)) &&\nisValidExprStart(getNextNextToken(nextToken.kind).kind)) {\nreturn typeDesc;\n}\nreturn parseComplexTypeDescriptor(parseOptionalTypeDescriptor(typeDesc), context,\nisTypedBindingPattern);\ncase OPEN_BRACKET_TOKEN:\nif (isTypedBindingPattern) {\nreturn typeDesc;\n}\nreturn parseComplexTypeDescriptor(parseArrayTypeDescriptor(typeDesc), context, isTypedBindingPattern);\ncase PIPE_TOKEN:\nreturn parseUnionTypeDescriptor(typeDesc, context, isTypedBindingPattern);\ncase BITWISE_AND_TOKEN:\nreturn parseIntersectionTypeDescriptor(typeDesc, context, isTypedBindingPattern);\ndefault:\nreturn typeDesc;\n}\n}\nprivate boolean isValidTypeContinuationToken(STToken nextToken) {\nswitch (nextToken.kind) {\ncase QUESTION_MARK_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n*

\n* Parse a type descriptor, given the next token kind.\n*

\n* If the preceding token is ? then it is an optional type descriptor\n*\n* @param tokenKind Next token kind\n* @param context Current context\n* @param isInConditionalExpr\n* @return Parsed node\n*/\nprivate STNode parseTypeDescriptorInternal(SyntaxKind tokenKind, ParserRuleContext context,\nboolean isInConditionalExpr) {\nswitch (tokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseTypeReference(isInConditionalExpr);\ncase RECORD_KEYWORD:\nreturn parseRecordTypeDescriptor();\ncase READONLY_KEYWORD:\nSTToken nextNextToken = getNextNextToken(tokenKind);\nSyntaxKind nextNextTokenKind = nextNextToken.kind;\nif (nextNextTokenKind != SyntaxKind.OBJECT_KEYWORD &&\nnextNextTokenKind != SyntaxKind.ABSTRACT_KEYWORD &&\nnextNextTokenKind != SyntaxKind.CLIENT_KEYWORD) {\nreturn parseSimpleTypeDescriptor();\n}\ncase OBJECT_KEYWORD:\ncase ABSTRACT_KEYWORD:\ncase CLIENT_KEYWORD:\nreturn parseObjectTypeDescriptor();\ncase OPEN_PAREN_TOKEN:\nreturn parseNilOrParenthesisedTypeDesc();\ncase MAP_KEYWORD:\ncase FUTURE_KEYWORD:\nreturn parseParameterizedTypeDescriptor();\ncase TYPEDESC_KEYWORD:\nreturn parseTypedescTypeDescriptor();\ncase ERROR_KEYWORD:\nreturn parseErrorTypeDescriptor();\ncase XML_KEYWORD:\nreturn parseXmlTypeDescriptor();\ncase STREAM_KEYWORD:\nreturn parseStreamTypeDescriptor();\ncase TABLE_KEYWORD:\nreturn parseTableTypeDescriptor();\ncase FUNCTION_KEYWORD:\nreturn parseFunctionTypeDesc();\ncase OPEN_BRACKET_TOKEN:\nreturn parseTupleTypeDesc();\ncase DISTINCT_KEYWORD:\nreturn parseDistinctTypeDesc(context);\ndefault:\nif (isSingletonTypeDescStart(tokenKind, true)) {\nreturn parseSingletonTypeDesc();\n}\nif (isSimpleType(tokenKind)) {\nreturn parseSimpleTypeDescriptor();\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.TYPE_DESCRIPTOR, context, isInConditionalExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypeDescriptorInternal(solution.tokenKind, context, isInConditionalExpr);\n}\n}\n/**\n* Parse distinct type descriptor.\n*

\n* \n* distinct-type-descriptor := distinct type-descriptor\n* \n*\n* @param context Context in which the type desc is used.\n* @return Distinct type descriptor\n*/\nprivate STNode parseDistinctTypeDesc(ParserRuleContext context) {\nSTNode distinctKeyword = parseDistinctKeyword();\nSTNode typeDesc = parseTypeDescriptor(context);\nreturn STNodeFactory.createDistinctTypeDescriptorNode(distinctKeyword, typeDesc);\n}\nprivate STNode parseDistinctKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DISTINCT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.DISTINCT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseNilOrParenthesisedTypeDesc() {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nreturn parseNilOrParenthesisedTypeDescRhs(openParen);\n}\nprivate STNode parseNilOrParenthesisedTypeDescRhs(STNode openParen) {\nreturn parseNilOrParenthesisedTypeDescRhs(peek().kind, openParen);\n}\nprivate STNode parseNilOrParenthesisedTypeDescRhs(SyntaxKind nextTokenKind, STNode openParen) {\nSTNode closeParen;\nswitch (nextTokenKind) {\ncase CLOSE_PAREN_TOKEN:\ncloseParen = parseCloseParenthesis();\nreturn STNodeFactory.createNilTypeDescriptorNode(openParen, closeParen);\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nSTNode typedesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_PARENTHESIS);\ncloseParen = parseCloseParenthesis();\nreturn STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typedesc, closeParen);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.NIL_OR_PARENTHESISED_TYPE_DESC_RHS, openParen);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseNilOrParenthesisedTypeDescRhs(solution.tokenKind, openParen);\n}\n}\n/**\n* Parse simple type descriptor.\n*\n* @return Parsed node\n*/\nprivate STNode parseSimpleTypeDescriptor() {\nSTToken node = peek();\nif (isSimpleType(node.kind)) {\nSTToken token = consume();\nreturn createBuiltinSimpleNameReference(token);\n} else {\nSolution sol = recover(peek(), ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR);\nSTNode recoveredNode = sol.recoveredNode;\nreturn createBuiltinSimpleNameReference(recoveredNode);\n}\n}\nprivate STNode createBuiltinSimpleNameReference(STNode token) {\nSyntaxKind typeKind = getTypeSyntaxKind(token.kind);\nreturn STNodeFactory.createBuiltinSimpleNameReferenceNode(typeKind, token);\n}\n/**\n*

\n* Parse function body. A function body has the following structure.\n*

\n* \n* function-body := function-body-block | external-function-body\n* external-function-body := = annots external ;\n* function-body-block := { [default-worker-init, named-worker-decl+] default-worker }\n* \n*\n* @param isObjectMethod Flag indicating whether this is an object-method\n* @return Parsed node\n*/\nprivate STNode parseFunctionBody(boolean isObjectMethod) {\nSTToken token = peek();\nreturn parseFunctionBody(token.kind, isObjectMethod);\n}\n/**\n* Parse function body, given the next token kind.\n*\n* @param tokenKind Next token kind\n* @param isObjectMethod Flag indicating whether this is an object-method\n* @return Parsed node\n*/\nprotected STNode parseFunctionBody(SyntaxKind tokenKind, boolean isObjectMethod) {\nswitch (tokenKind) {\ncase EQUAL_TOKEN:\nreturn parseExternalFunctionBody();\ncase OPEN_BRACE_TOKEN:\nreturn parseFunctionBodyBlock(false);\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn parseExpressionFuncBody(false, false);\ncase SEMICOLON_TOKEN:\nif (isObjectMethod) {\nreturn parseSemicolon();\n}\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.FUNC_BODY, isObjectMethod);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.tokenKind == SyntaxKind.NONE) {\nreturn STNodeFactory.createMissingToken(solution.tokenKind);\n}\nreturn parseFunctionBody(solution.tokenKind, isObjectMethod);\n}\n}\n/**\n*

\n* Parse function body block. A function body block has the following structure.\n*

\n*\n* \n* function-body-block := { [default-worker-init, named-worker-decl+] default-worker }
\n* default-worker-init := sequence-stmt
\n* default-worker := sequence-stmt
\n* named-worker-decl := worker worker-name return-type-descriptor { sequence-stmt }
\n* worker-name := identifier
\n*
\n*\n* @param isAnonFunc Flag indicating whether the func body belongs to an anonymous function\n* @return Parsed node\n*/\nprivate STNode parseFunctionBodyBlock(boolean isAnonFunc) {\nstartContext(ParserRuleContext.FUNC_BODY_BLOCK);\nSTNode openBrace = parseOpenBrace();\nSTToken token = peek();\nArrayList firstStmtList = new ArrayList<>();\nArrayList workers = new ArrayList<>();\nArrayList secondStmtList = new ArrayList<>();\nParserRuleContext currentCtx = ParserRuleContext.DEFAULT_WORKER_INIT;\nboolean hasNamedWorkers = false;\nwhile (!isEndOfFuncBodyBlock(token.kind, isAnonFunc)) {\nSTNode stmt = parseStatement();\nif (stmt == null) {\nbreak;\n}\nswitch (currentCtx) {\ncase DEFAULT_WORKER_INIT:\nif (stmt.kind != SyntaxKind.NAMED_WORKER_DECLARATION) {\nfirstStmtList.add(stmt);\nbreak;\n}\ncurrentCtx = ParserRuleContext.NAMED_WORKERS;\nhasNamedWorkers = true;\ncase NAMED_WORKERS:\nif (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {\nworkers.add(stmt);\nbreak;\n}\ncurrentCtx = ParserRuleContext.DEFAULT_WORKER;\ncase DEFAULT_WORKER:\ndefault:\nif (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {\nupdateLastNodeInListWithInvalidNode(secondStmtList, stmt,\nDiagnosticErrorCode.ERROR_NAMED_WORKER_NOT_ALLOWED_HERE);\nbreak;\n}\nsecondStmtList.add(stmt);\nbreak;\n}\ntoken = peek();\n}\nSTNode namedWorkersList;\nSTNode statements;\nif (hasNamedWorkers) {\nSTNode workerInitStatements = STNodeFactory.createNodeList(firstStmtList);\nSTNode namedWorkers = STNodeFactory.createNodeList(workers);\nnamedWorkersList = STNodeFactory.createNamedWorkerDeclarator(workerInitStatements, namedWorkers);\nstatements = STNodeFactory.createNodeList(secondStmtList);\n} else {\nnamedWorkersList = STNodeFactory.createEmptyNode();\nstatements = STNodeFactory.createNodeList(firstStmtList);\n}\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createFunctionBodyBlockNode(openBrace, namedWorkersList, statements, closeBrace);\n}\nprivate boolean isEndOfFuncBodyBlock(SyntaxKind nextTokenKind, boolean isAnonFunc) {\nif (isAnonFunc) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase COMMA_TOKEN:\ncase PUBLIC_KEYWORD:\ncase EOF_TOKEN:\ncase EQUAL_TOKEN:\ncase BACKTICK_TOKEN:\nreturn true;\ndefault:\nbreak;\n}\n}\nreturn isEndOfStatements();\n}\nprivate boolean isEndOfRecordTypeNode(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase TYPE_KEYWORD:\ncase PUBLIC_KEYWORD:\ndefault:\nreturn endOfModuleLevelNode(1);\n}\n}\nprivate boolean isEndOfObjectTypeNode() {\nreturn endOfModuleLevelNode(1, true);\n}\nprivate boolean isEndOfStatements() {\nswitch (peek().kind) {\ncase RESOURCE_KEYWORD:\nreturn true;\ndefault:\nreturn endOfModuleLevelNode(1);\n}\n}\nprivate boolean endOfModuleLevelNode(int peekIndex) {\nreturn endOfModuleLevelNode(peekIndex, false);\n}\nprivate boolean endOfModuleLevelNode(int peekIndex, boolean isObject) {\nswitch (peek(peekIndex).kind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACE_PIPE_TOKEN:\ncase IMPORT_KEYWORD:\ncase CONST_KEYWORD:\ncase ANNOTATION_KEYWORD:\ncase LISTENER_KEYWORD:\nreturn true;\ncase SERVICE_KEYWORD:\nreturn isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 1);\ncase PUBLIC_KEYWORD:\nreturn endOfModuleLevelNode(peekIndex + 1, isObject);\ncase FUNCTION_KEYWORD:\nif (isObject) {\nreturn false;\n}\nreturn peek(peekIndex + 1).kind == SyntaxKind.IDENTIFIER_TOKEN;\ndefault:\nreturn false;\n}\n}\n/**\n* Check whether the given token is an end of a parameter.\n*\n* @param tokenKind Next token kind\n* @return true if the token represents an end of a parameter. false otherwise\n*/\nprivate boolean isEndOfParameter(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase SEMICOLON_TOKEN:\ncase COMMA_TOKEN:\ncase RETURNS_KEYWORD:\ncase TYPE_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\ncase AT_TOKEN:\nreturn true;\ndefault:\nreturn endOfModuleLevelNode(1);\n}\n}\n/**\n* Check whether the given token is an end of a parameter-list.\n*\n* @param tokenKind Next token kind\n* @return true if the token represents an end of a parameter-list. false otherwise\n*/\nprivate boolean isEndOfParametersList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase CLOSE_PAREN_TOKEN:\ncase SEMICOLON_TOKEN:\ncase RETURNS_KEYWORD:\ncase TYPE_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\nreturn true;\ndefault:\nreturn endOfModuleLevelNode(1);\n}\n}\n/**\n* Parse type reference or variable reference.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatementStartIdentifier() {\nreturn parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\n}\n/**\n* Parse variable name.\n*\n* @return Parsed node\n*/\nprivate STNode parseVariableName() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.VARIABLE_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse open brace.\n*\n* @return Parsed node\n*/\nprivate STNode parseOpenBrace() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPEN_BRACE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OPEN_BRACE);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse close brace.\n*\n* @return Parsed node\n*/\nprivate STNode parseCloseBrace() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSE_BRACE);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse external function body. An external function body has the following structure.\n*

\n* \n* external-function-body := = annots external ;\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseExternalFunctionBody() {\nstartContext(ParserRuleContext.EXTERNAL_FUNC_BODY);\nSTNode assign = parseAssignOp();\nSTNode annotation = parseAnnotations();\nSTNode externalKeyword = parseExternalKeyword();\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createExternalFunctionBodyNode(assign, annotation, externalKeyword, semicolon);\n}\n/**\n* Parse semicolon.\n*\n* @return Parsed node\n*/\nprivate STNode parseSemicolon() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SEMICOLON_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SEMICOLON);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse external keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseExternalKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.EXTERNAL_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.EXTERNAL_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/*\n* Operators\n*/\n/**\n* Parse assign operator.\n*\n* @return Parsed node\n*/\nprivate STNode parseAssignOp() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ASSIGN_OP);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse binary operator.\n*\n* @return Parsed node\n*/\nprivate STNode parseBinaryOperator() {\nSTToken token = peek();\nif (isBinaryOperator(token.kind)) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BINARY_OPERATOR);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Check whether the given token kind is a binary operator.\n*\n* @param kind STToken kind\n* @return true if the token kind refers to a binary operator. false otherwise\n*/\nprivate boolean isBinaryOperator(SyntaxKind kind) {\nswitch (kind) {\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase SLASH_TOKEN:\ncase ASTERISK_TOKEN:\ncase GT_TOKEN:\ncase LT_TOKEN:\ncase DOUBLE_EQUAL_TOKEN:\ncase TRIPPLE_EQUAL_TOKEN:\ncase LT_EQUAL_TOKEN:\ncase GT_EQUAL_TOKEN:\ncase NOT_EQUAL_TOKEN:\ncase NOT_DOUBLE_EQUAL_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase BITWISE_XOR_TOKEN:\ncase PIPE_TOKEN:\ncase LOGICAL_AND_TOKEN:\ncase LOGICAL_OR_TOKEN:\ncase PERCENT_TOKEN:\ncase DOUBLE_LT_TOKEN:\ncase DOUBLE_GT_TOKEN:\ncase TRIPPLE_GT_TOKEN:\ncase ELLIPSIS_TOKEN:\ncase DOUBLE_DOT_LT_TOKEN:\ncase ELVIS_TOKEN:\ncase EQUALS_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Get the precedence of a given operator.\n*\n* @param binaryOpKind Operator kind\n* @return Precedence of the given operator\n*/\nprivate OperatorPrecedence getOpPrecedence(SyntaxKind binaryOpKind) {\nswitch (binaryOpKind) {\ncase ASTERISK_TOKEN:\ncase SLASH_TOKEN:\ncase PERCENT_TOKEN:\nreturn OperatorPrecedence.MULTIPLICATIVE;\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn OperatorPrecedence.ADDITIVE;\ncase GT_TOKEN:\ncase LT_TOKEN:\ncase GT_EQUAL_TOKEN:\ncase LT_EQUAL_TOKEN:\ncase IS_KEYWORD:\nreturn OperatorPrecedence.BINARY_COMPARE;\ncase DOT_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase OPEN_PAREN_TOKEN:\ncase ANNOT_CHAINING_TOKEN:\ncase OPTIONAL_CHAINING_TOKEN:\ncase DOT_LT_TOKEN:\ncase SLASH_LT_TOKEN:\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\ncase SLASH_ASTERISK_TOKEN:\nreturn OperatorPrecedence.MEMBER_ACCESS;\ncase DOUBLE_EQUAL_TOKEN:\ncase TRIPPLE_EQUAL_TOKEN:\ncase NOT_EQUAL_TOKEN:\ncase NOT_DOUBLE_EQUAL_TOKEN:\ncase EQUALS_KEYWORD:\nreturn OperatorPrecedence.EQUALITY;\ncase BITWISE_AND_TOKEN:\nreturn OperatorPrecedence.BITWISE_AND;\ncase BITWISE_XOR_TOKEN:\nreturn OperatorPrecedence.BITWISE_XOR;\ncase PIPE_TOKEN:\nreturn OperatorPrecedence.BITWISE_OR;\ncase LOGICAL_AND_TOKEN:\nreturn OperatorPrecedence.LOGICAL_AND;\ncase LOGICAL_OR_TOKEN:\nreturn OperatorPrecedence.LOGICAL_OR;\ncase RIGHT_ARROW_TOKEN:\nreturn OperatorPrecedence.REMOTE_CALL_ACTION;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn OperatorPrecedence.ANON_FUNC_OR_LET;\ncase SYNC_SEND_TOKEN:\nreturn OperatorPrecedence.ACTION;\ncase DOUBLE_LT_TOKEN:\ncase DOUBLE_GT_TOKEN:\ncase TRIPPLE_GT_TOKEN:\nreturn OperatorPrecedence.SHIFT;\ncase ELLIPSIS_TOKEN:\ncase DOUBLE_DOT_LT_TOKEN:\nreturn OperatorPrecedence.RANGE;\ncase ELVIS_TOKEN:\nreturn OperatorPrecedence.ELVIS_CONDITIONAL;\ncase QUESTION_MARK_TOKEN:\ncase COLON_TOKEN:\nreturn OperatorPrecedence.CONDITIONAL;\ndefault:\nthrow new UnsupportedOperationException(\"Unsupported binary operator '\" + binaryOpKind + \"'\");\n}\n}\n/**\n*

\n* Get the operator kind to insert during recovery, given the precedence level.\n*

\n*\n* @param opPrecedenceLevel Precedence of the given operator\n* @return Kind of the operator to insert\n*/\nprivate SyntaxKind getBinaryOperatorKindToInsert(OperatorPrecedence opPrecedenceLevel) {\nswitch (opPrecedenceLevel) {\ncase DEFAULT:\ncase UNARY:\ncase ACTION:\ncase EXPRESSION_ACTION:\ncase REMOTE_CALL_ACTION:\ncase ANON_FUNC_OR_LET:\ncase QUERY:\ncase MULTIPLICATIVE:\nreturn SyntaxKind.ASTERISK_TOKEN;\ncase ADDITIVE:\nreturn SyntaxKind.PLUS_TOKEN;\ncase SHIFT:\nreturn SyntaxKind.DOUBLE_LT_TOKEN;\ncase RANGE:\nreturn SyntaxKind.ELLIPSIS_TOKEN;\ncase BINARY_COMPARE:\nreturn SyntaxKind.LT_TOKEN;\ncase EQUALITY:\nreturn SyntaxKind.DOUBLE_EQUAL_TOKEN;\ncase BITWISE_AND:\nreturn SyntaxKind.BITWISE_AND_TOKEN;\ncase BITWISE_XOR:\nreturn SyntaxKind.BITWISE_XOR_TOKEN;\ncase BITWISE_OR:\nreturn SyntaxKind.PIPE_TOKEN;\ncase LOGICAL_AND:\nreturn SyntaxKind.LOGICAL_AND_TOKEN;\ncase LOGICAL_OR:\nreturn SyntaxKind.LOGICAL_OR_TOKEN;\ncase ELVIS_CONDITIONAL:\nreturn SyntaxKind.ELVIS_TOKEN;\ndefault:\nthrow new UnsupportedOperationException(\n\"Unsupported operator precedence level'\" + opPrecedenceLevel + \"'\");\n}\n}\n/**\n*

\n* Parse a module type definition.\n*

\n* module-type-defn := metadata [public] type identifier type-descriptor ;\n*\n* @param metadata Metadata\n* @param qualifier Visibility qualifier\n* @return Parsed node\n*/\nprivate STNode parseModuleTypeDefinition(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.MODULE_TYPE_DEFINITION);\nSTNode typeKeyword = parseTypeKeyword();\nSTNode typeName = parseTypeName();\nSTNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createTypeDefinitionNode(metadata, qualifier, typeKeyword, typeName, typeDescriptor,\nsemicolon);\n}\n/**\n* Parse type keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TYPE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TYPE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse type name.\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeName() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TYPE_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse record type descriptor. A record type descriptor body has the following structure.\n*

\n*\n* record-type-descriptor := inclusive-record-type-descriptor | exclusive-record-type-descriptor\n*

inclusive-record-type-descriptor := record { field-descriptor* }\n*

exclusive-record-type-descriptor := record {| field-descriptor* [record-rest-descriptor] |}\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseRecordTypeDescriptor() {\nstartContext(ParserRuleContext.RECORD_TYPE_DESCRIPTOR);\nSTNode recordKeyword = parseRecordKeyword();\nSTNode bodyStartDelimiter = parseRecordBodyStartDelimiter();\nboolean isInclusive = bodyStartDelimiter.kind == SyntaxKind.OPEN_BRACE_TOKEN;\nSTNode fields = parseFieldDescriptors(isInclusive);\nSTNode bodyEndDelimiter = parseRecordBodyCloseDelimiter(bodyStartDelimiter.kind);\nendContext();\nreturn STNodeFactory.createRecordTypeDescriptorNode(recordKeyword, bodyStartDelimiter, fields,\nbodyEndDelimiter);\n}\n/**\n* Parse record body start delimiter.\n*\n* @return Parsed node\n*/\nprivate STNode parseRecordBodyStartDelimiter() {\nSTToken token = peek();\nreturn parseRecordBodyStartDelimiter(token.kind);\n}\nprivate STNode parseRecordBodyStartDelimiter(SyntaxKind kind) {\nswitch (kind) {\ncase OPEN_BRACE_PIPE_TOKEN:\nreturn parseClosedRecordBodyStart();\ncase OPEN_BRACE_TOKEN:\nreturn parseOpenBrace();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RECORD_BODY_START);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseRecordBodyStartDelimiter(solution.tokenKind);\n}\n}\n/**\n* Parse closed-record body start delimiter.\n*\n* @return Parsed node\n*/\nprivate STNode parseClosedRecordBodyStart() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_START);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse record body close delimiter.\n*\n* @return Parsed node\n*/\nprivate STNode parseRecordBodyCloseDelimiter(SyntaxKind startingDelimeter) {\nswitch (startingDelimeter) {\ncase OPEN_BRACE_PIPE_TOKEN:\nreturn parseClosedRecordBodyEnd();\ncase OPEN_BRACE_TOKEN:\nreturn parseCloseBrace();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RECORD_BODY_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseRecordBodyCloseDelimiter(solution.tokenKind);\n}\n}\n/**\n* Parse closed-record body end delimiter.\n*\n* @return Parsed node\n*/\nprivate STNode parseClosedRecordBodyEnd() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_END);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse record keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseRecordKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RECORD_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RECORD_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse field descriptors.\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldDescriptors(boolean isInclusive) {\nArrayList recordFields = new ArrayList<>();\nSTToken token = peek();\nboolean endOfFields = false;\nwhile (!isEndOfRecordTypeNode(token.kind)) {\nSTNode field = parseFieldOrRestDescriptor(isInclusive);\nif (field == null) {\nendOfFields = true;\nbreak;\n}\nrecordFields.add(field);\ntoken = peek();\nif (field.kind == SyntaxKind.RECORD_REST_TYPE) {\nbreak;\n}\n}\nwhile (!endOfFields && !isEndOfRecordTypeNode(token.kind)) {\nSTNode invalidField = parseFieldOrRestDescriptor(isInclusive);\nupdateLastNodeInListWithInvalidNode(recordFields, invalidField,\nDiagnosticErrorCode.ERROR_MORE_RECORD_FIELDS_AFTER_REST_FIELD);\ntoken = peek();\n}\nreturn STNodeFactory.createNodeList(recordFields);\n}\n/**\n*

\n* Parse field descriptor or rest descriptor.\n*

\n*\n* \n*

field-descriptor := individual-field-descriptor | record-type-reference\n*


individual-field-descriptor := metadata type-descriptor field-name [? | default-value] ;\n*

field-name := identifier\n*

default-value := = expression\n*

record-type-reference := * type-reference ;\n*

record-rest-descriptor := type-descriptor ... ;\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldOrRestDescriptor(boolean isInclusive) {\nreturn parseFieldOrRestDescriptor(peek().kind, isInclusive);\n}\nprivate STNode parseFieldOrRestDescriptor(SyntaxKind nextTokenKind, boolean isInclusive) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACE_PIPE_TOKEN:\nreturn null;\ncase ASTERISK_TOKEN:\nstartContext(ParserRuleContext.RECORD_FIELD);\nSTNode asterisk = consume();\nSTNode type = parseTypeReference();\nSTNode semicolonToken = parseSemicolon();\nendContext();\nreturn STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken);\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nstartContext(ParserRuleContext.RECORD_FIELD);\nSTNode metadata = parseMetaData(nextTokenKind);\nnextTokenKind = peek().kind;\nreturn parseRecordField(nextTokenKind, isInclusive, metadata);\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nstartContext(ParserRuleContext.RECORD_FIELD);\nmetadata = createEmptyMetadata();\nreturn parseRecordField(nextTokenKind, isInclusive, metadata);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RECORD_FIELD_OR_RECORD_END, isInclusive);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldOrRestDescriptor(solution.tokenKind, isInclusive);\n}\n}\nprivate STNode parseRecordField(SyntaxKind nextTokenKind, boolean isInclusive, STNode metadata) {\nif (nextTokenKind != SyntaxKind.READONLY_KEYWORD) {\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD);\nSTNode fieldOrRestDesc = parseFieldDescriptor(isInclusive, metadata, type);\nendContext();\nreturn fieldOrRestDesc;\n}\nSTNode type;\nSTNode fieldOrRestDesc;\nSTNode readOnlyQualifier;\nreadOnlyQualifier = parseReadonlyKeyword();\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nSTNode fieldNameOrTypeDesc = parseQualifiedIdentifier(ParserRuleContext.RECORD_FIELD_NAME_OR_TYPE_NAME);\nif (fieldNameOrTypeDesc.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\ntype = fieldNameOrTypeDesc;\n} else {\nnextToken = peek();\nswitch (nextToken.kind) {\ncase SEMICOLON_TOKEN:\ncase EQUAL_TOKEN:\ntype = createBuiltinSimpleNameReference(readOnlyQualifier);\nreadOnlyQualifier = STNodeFactory.createEmptyNode();\nSTNode fieldName = ((STSimpleNameReferenceNode) fieldNameOrTypeDesc).name;\nreturn parseFieldDescriptorRhs(metadata, readOnlyQualifier, type, fieldName);\ndefault:\ntype = fieldNameOrTypeDesc;\nbreak;\n}\n}\n} else if (isTypeStartingToken(nextToken.kind)) {\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD);\n} else {\nreadOnlyQualifier = createBuiltinSimpleNameReference(readOnlyQualifier);\ntype = parseComplexTypeDescriptor(readOnlyQualifier, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD, false);\nreadOnlyQualifier = STNodeFactory.createEmptyNode();\n}\nfieldOrRestDesc = parseIndividualRecordField(metadata, readOnlyQualifier, type);\nendContext();\nreturn fieldOrRestDesc;\n}\nprivate STNode parseFieldDescriptor(boolean isInclusive, STNode metadata, STNode type) {\nif (isInclusive) {\nSTNode readOnlyQualifier = STNodeFactory.createEmptyNode();\nreturn parseIndividualRecordField(metadata, readOnlyQualifier, type);\n} else {\nreturn parseFieldOrRestDescriptorRhs(metadata, type);\n}\n}\nprivate STNode parseIndividualRecordField(STNode metadata, STNode readOnlyQualifier, STNode type) {\nSTNode fieldName = parseVariableName();\nreturn parseFieldDescriptorRhs(metadata, readOnlyQualifier, type, fieldName);\n}\n/**\n* Parse type reference.\n* type-reference := identifier | qualified-identifier\n*\n* @return Type reference node\n*/\nprivate STNode parseTypeReference() {\nreturn parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE, false);\n}\nprivate STNode parseTypeReference(boolean isInConditionalExpr) {\nreturn parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE, isInConditionalExpr);\n}\n/**\n* Parse identifier or qualified identifier.\n*\n* @return Identifier node\n*/\nprivate STNode parseQualifiedIdentifier(ParserRuleContext currentCtx) {\nreturn parseQualifiedIdentifier(currentCtx, false);\n}\nprivate STNode parseQualifiedIdentifier(ParserRuleContext currentCtx, boolean isInConditionalExpr) {\nSTToken token = peek();\nSTNode typeRefOrPkgRef;\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\ntypeRefOrPkgRef = consume();\n} else {\nSolution sol = recover(token, currentCtx, isInConditionalExpr);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nif (sol.tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\naddInvalidTokenToNextToken(errorHandler.consumeInvalidToken());\nreturn parseQualifiedIdentifier(currentCtx, isInConditionalExpr);\n}\ntypeRefOrPkgRef = sol.recoveredNode;\n}\nreturn parseQualifiedIdentifier(typeRefOrPkgRef, isInConditionalExpr);\n}\n/**\n* Parse identifier or qualified identifier, given the starting identifier.\n*\n* @param identifier Starting identifier\n* @return Parse node\n*/\nprivate STNode parseQualifiedIdentifier(STNode identifier, boolean isInConditionalExpr) {\nSTToken nextToken = peek(1);\nif (nextToken.kind != SyntaxKind.COLON_TOKEN) {\nreturn STNodeFactory.createSimpleNameReferenceNode(identifier);\n}\nSTToken nextNextToken = peek(2);\nswitch (nextNextToken.kind) {\ncase IDENTIFIER_TOKEN:\nSTToken colon = consume();\nSTNode varOrFuncName = consume();\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName);\ncase MAP_KEYWORD:\ncolon = consume();\nSTToken mapKeyword = consume();\nSTNode refName = STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(),\nmapKeyword.trailingMinutiae(), mapKeyword.diagnostics());\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, refName);\ncase COLON_TOKEN:\naddInvalidTokenToNextToken(errorHandler.consumeInvalidToken());\nreturn parseQualifiedIdentifier(identifier, isInConditionalExpr);\ndefault:\nif (isInConditionalExpr) {\nreturn STNodeFactory.createSimpleNameReferenceNode(identifier);\n}\ncolon = consume();\nvarOrFuncName = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName);\n}\n}\n/**\n* Parse RHS of a field or rest type descriptor.\n*\n* @param metadata Metadata\n* @param type Type descriptor\n* @return Parsed node\n*/\nprivate STNode parseFieldOrRestDescriptorRhs(STNode metadata, STNode type) {\nSTToken token = peek();\nreturn parseFieldOrRestDescriptorRhs(token.kind, metadata, type);\n}\nprivate STNode parseFieldOrRestDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type) {\nswitch (kind) {\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createRecordRestDescriptorNode(type, ellipsis, semicolonToken);\ncase IDENTIFIER_TOKEN:\nSTNode readonlyQualifier = STNodeFactory.createEmptyNode();\nreturn parseIndividualRecordField(metadata, readonlyQualifier, type);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.FIELD_OR_REST_DESCIPTOR_RHS, metadata, type);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldOrRestDescriptorRhs(solution.tokenKind, metadata, type);\n}\n}\n/**\n*

\n* Parse field descriptor rhs.\n*

\n*\n* @param metadata Metadata\n* @param readonlyQualifier Readonly qualifier\n* @param type Type descriptor\n* @param fieldName Field name\n* @return Parsed node\n*/\nprivate STNode parseFieldDescriptorRhs(STNode metadata, STNode readonlyQualifier, STNode type, STNode fieldName) {\nSTToken token = peek();\nreturn parseFieldDescriptorRhs(token.kind, metadata, readonlyQualifier, type, fieldName);\n}\n/**\n*

\n* Parse field descriptor rhs.\n*

\n*\n* \n* field-descriptor := [? | default-value] ;\n*
default-value := = expression\n*
\n*\n* @param kind Kind of the next token\n* @param metadata Metadata\n* @param type Type descriptor\n* @param fieldName Field name\n* @return Parsed node\n*/\nprivate STNode parseFieldDescriptorRhs(SyntaxKind kind, STNode metadata, STNode readonlyQualifier, STNode type,\nSTNode fieldName) {\nswitch (kind) {\ncase SEMICOLON_TOKEN:\nSTNode questionMarkToken = STNodeFactory.createEmptyNode();\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createRecordFieldNode(metadata, readonlyQualifier, type, fieldName,\nquestionMarkToken, semicolonToken);\ncase QUESTION_MARK_TOKEN:\nquestionMarkToken = parseQuestionMark();\nsemicolonToken = parseSemicolon();\nreturn STNodeFactory.createRecordFieldNode(metadata, readonlyQualifier, type, fieldName,\nquestionMarkToken, semicolonToken);\ncase EQUAL_TOKEN:\nSTNode equalsToken = parseAssignOp();\nSTNode expression = parseExpression();\nsemicolonToken = parseSemicolon();\nreturn STNodeFactory.createRecordFieldWithDefaultValueNode(metadata, readonlyQualifier, type, fieldName,\nequalsToken, expression, semicolonToken);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.FIELD_DESCRIPTOR_RHS, metadata, readonlyQualifier,\ntype, fieldName);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldDescriptorRhs(solution.tokenKind, metadata, readonlyQualifier, type, fieldName);\n}\n}\n/**\n* Parse question mark.\n*\n* @return Parsed node\n*/\nprivate STNode parseQuestionMark() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.QUESTION_MARK_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.QUESTION_MARK);\nreturn sol.recoveredNode;\n}\n}\n/*\n* Statements\n*/\n/**\n* Parse statements, until an end of a block is reached.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatements() {\nArrayList stmts = new ArrayList<>();\nreturn parseStatements(stmts);\n}\nprivate STNode parseStatements(ArrayList stmts) {\nwhile (!isEndOfStatements()) {\nSTNode stmt = parseStatement();\nif (stmt == null) {\nbreak;\n}\nif (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {\naddInvalidNodeToNextToken(stmt, DiagnosticErrorCode.ERROR_NAMED_WORKER_NOT_ALLOWED_HERE);\nbreak;\n}\nstmts.add(stmt);\n}\nreturn STNodeFactory.createNodeList(stmts);\n}\n/**\n* Parse a single statement.\n*\n* @return Parsed node\n*/\nprotected STNode parseStatement() {\nSTToken token = peek();\nreturn parseStatement(token.kind, 1);\n}\nprivate STNode parseStatement(SyntaxKind tokenKind, int nextTokenIndex) {\nSTNode annots = null;\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ncase SEMICOLON_TOKEN:\naddInvalidTokenToNextToken(errorHandler.consumeInvalidToken());\nreturn parseStatement();\ncase AT_TOKEN:\nannots = parseAnnotations(tokenKind);\ntokenKind = peek().kind;\nbreak;\ncase FINAL_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\ncase PANIC_KEYWORD:\ncase CONTINUE_KEYWORD:\ncase BREAK_KEYWORD:\ncase RETURN_KEYWORD:\ncase TYPE_KEYWORD:\ncase LOCK_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase FORK_KEYWORD:\ncase FOREACH_KEYWORD:\ncase XMLNS_KEYWORD:\ncase TRANSACTION_KEYWORD:\ncase RETRY_KEYWORD:\ncase ROLLBACK_KEYWORD:\ncase MATCH_KEYWORD:\ncase CHECK_KEYWORD:\ncase FAIL_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase TRAP_KEYWORD:\ncase START_KEYWORD:\ncase FLUSH_KEYWORD:\ncase LEFT_ARROW_TOKEN:\ncase WAIT_KEYWORD:\ncase COMMIT_KEYWORD:\ncase WORKER_KEYWORD:\nbreak;\ndefault:\nif (isTypeStartingToken(tokenKind)) {\nbreak;\n}\nif (isValidExpressionStart(tokenKind, nextTokenIndex)) {\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.STATEMENT, nextTokenIndex);\nif (solution.action == Action.KEEP) {\nbreak;\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatement(solution.tokenKind, nextTokenIndex);\n}\nreturn parseStatement(tokenKind, annots, nextTokenIndex);\n}\nprivate STNode getAnnotations(STNode nullbaleAnnot) {\nif (nullbaleAnnot != null) {\nreturn nullbaleAnnot;\n}\nreturn STNodeFactory.createEmptyNodeList();\n}\nprivate STNode parseStatement(STNode annots) {\nreturn parseStatement(peek().kind, annots, 1);\n}\n/**\n* Parse a single statement, given the next token kind.\n*\n* @param tokenKind Next token kind\n* @return Parsed node\n*/\nprivate STNode parseStatement(SyntaxKind tokenKind, STNode annots, int nextTokenIndex) {\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\naddInvalidNodeToNextToken(annots, DiagnosticErrorCode.ERROR_INVALID_ANNOTATIONS);\nreturn null;\ncase SEMICOLON_TOKEN:\naddInvalidTokenToNextToken(errorHandler.consumeInvalidToken());\nreturn parseStatement(annots);\ncase FINAL_KEYWORD:\nSTNode finalKeyword = parseFinalKeyword();\nreturn parseVariableDecl(getAnnotations(annots), finalKeyword, false);\ncase IF_KEYWORD:\nreturn parseIfElseBlock();\ncase WHILE_KEYWORD:\nreturn parseWhileStatement();\ncase PANIC_KEYWORD:\nreturn parsePanicStatement();\ncase CONTINUE_KEYWORD:\nreturn parseContinueStatement();\ncase BREAK_KEYWORD:\nreturn parseBreakStatement();\ncase RETURN_KEYWORD:\nreturn parseReturnStatement();\ncase TYPE_KEYWORD:\nreturn parseLocalTypeDefinitionStatement(getAnnotations(annots));\ncase LOCK_KEYWORD:\nreturn parseLockStatement();\ncase OPEN_BRACE_TOKEN:\nreturn parseStatementStartsWithOpenBrace();\ncase WORKER_KEYWORD:\nreturn parseNamedWorkerDeclaration(getAnnotations(annots));\ncase FORK_KEYWORD:\nreturn parseForkStatement();\ncase FOREACH_KEYWORD:\nreturn parseForEachStatement();\ncase START_KEYWORD:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase FAIL_KEYWORD:\ncase TRAP_KEYWORD:\ncase FLUSH_KEYWORD:\ncase LEFT_ARROW_TOKEN:\ncase WAIT_KEYWORD:\ncase FROM_KEYWORD:\ncase COMMIT_KEYWORD:\nreturn parseExpressionStatement(tokenKind, getAnnotations(annots));\ncase XMLNS_KEYWORD:\nreturn parseXMLNamespaceDeclaration(false);\ncase TRANSACTION_KEYWORD:\nreturn parseTransactionStatement();\ncase RETRY_KEYWORD:\nreturn parseRetryStatement();\ncase ROLLBACK_KEYWORD:\nreturn parseRollbackStatement();\ncase OPEN_BRACKET_TOKEN:\nreturn parseStatementStartsWithOpenBracket(getAnnotations(annots), false);\ncase FUNCTION_KEYWORD:\ncase OPEN_PAREN_TOKEN:\ncase IDENTIFIER_TOKEN:\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase STRING_KEYWORD:\ncase XML_KEYWORD:\nreturn parseStmtStartsWithTypeOrExpr(tokenKind, getAnnotations(annots));\ncase MATCH_KEYWORD:\nreturn parseMatchStatement();\ndefault:\nif (isValidExpressionStart(tokenKind, nextTokenIndex)) {\nreturn parseStatementStartWithExpr(getAnnotations(annots));\n}\nif (isTypeStartingToken(tokenKind)) {\nfinalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVariableDecl(getAnnotations(annots), finalKeyword, false);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.STATEMENT_WITHOUT_ANNOTS, annots, nextTokenIndex);\nif (solution.action == Action.KEEP) {\nfinalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVariableDecl(getAnnotations(annots), finalKeyword, false);\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatement(solution.tokenKind, annots, nextTokenIndex - 1);\n}\n}\n/**\n*

\n* Parse variable declaration. Variable declaration can be a local or module level.\n*

\n*\n* \n* local-var-decl-stmt := local-init-var-decl-stmt | local-no-init-var-decl-stmt\n*

\n* local-init-var-decl-stmt := [annots] [final] typed-binding-pattern = action-or-expr ;\n*

\n* local-no-init-var-decl-stmt := [annots] [final] type-descriptor variable-name ;\n*
\n*\n* @param annots Annotations or metadata\n* @param finalKeyword Final keyword\n* @return Parsed node\n*/\nprivate STNode parseVariableDecl(STNode annots, STNode finalKeyword, boolean isModuleVar) {\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode typeBindingPattern = parseTypedBindingPattern(ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, isModuleVar);\n}\n/**\n* Parse final keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseFinalKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FINAL_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FINAL_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse the right hand side of a variable declaration statement.\n*

\n* \n* var-decl-rhs := ; | = action-or-expr ;\n* \n*\n* @param metadata metadata\n* @param finalKeyword Final keyword\n* @param typedBindingPattern Typed binding pattern\n* @return Parsed node\n*/\nprivate STNode parseVarDeclRhs(STNode metadata, STNode finalKeyword, STNode typedBindingPattern,\nboolean isModuleVar) {\nSTToken token = peek();\nreturn parseVarDeclRhs(token.kind, metadata, finalKeyword, typedBindingPattern, isModuleVar);\n}\n/**\n* Parse the right hand side of a variable declaration statement, given the\n* next token kind.\n*\n* @param tokenKind Next token kind\n* @param metadata Metadata\n* @param finalKeyword Final keyword\n* @param typedBindingPattern Typed binding pattern\n* @param isModuleVar flag indicating whether the var is module level\n* @return Parsed node\n*/\nprivate STNode parseVarDeclRhs(SyntaxKind tokenKind, STNode metadata, STNode finalKeyword,\nSTNode typedBindingPattern, boolean isModuleVar) {\nSTNode assign;\nSTNode expr;\nSTNode semicolon;\nswitch (tokenKind) {\ncase EQUAL_TOKEN:\nassign = parseAssignOp();\nif (isModuleVar) {\nexpr = parseExpression();\n} else {\nexpr = parseActionOrExpression();\n}\nsemicolon = parseSemicolon();\nbreak;\ncase SEMICOLON_TOKEN:\nassign = STNodeFactory.createEmptyNode();\nexpr = STNodeFactory.createEmptyNode();\nsemicolon = parseSemicolon();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.VAR_DECL_STMT_RHS, metadata, finalKeyword,\ntypedBindingPattern, isModuleVar);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseVarDeclRhs(solution.tokenKind, metadata, finalKeyword, typedBindingPattern, isModuleVar);\n}\nendContext();\nif (isModuleVar) {\nreturn STNodeFactory.createModuleVariableDeclarationNode(metadata, finalKeyword, typedBindingPattern,\nassign, expr, semicolon);\n}\nreturn STNodeFactory.createVariableDeclarationNode(metadata, finalKeyword, typedBindingPattern, assign, expr,\nsemicolon);\n}\n/**\n*

\n* Parse the RHS portion of the assignment.\n*

\n* assignment-stmt-rhs := = action-or-expr ;\n*\n* @param lvExpr LHS expression\n* @return Parsed node\n*/\nprivate STNode parseAssignmentStmtRhs(STNode lvExpr) {\nSTNode assign = parseAssignOp();\nSTNode expr = parseActionOrExpression();\nSTNode semicolon = parseSemicolon();\nendContext();\nif (lvExpr.kind == SyntaxKind.FUNCTION_CALL &&\nisPosibleFunctionalBindingPattern((STFunctionCallExpressionNode) lvExpr)) {\nlvExpr = getBindingPattern(lvExpr);\n}\nboolean lvExprValid = isValidLVExpr(lvExpr);\nif (!lvExprValid) {\nSTNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nSTNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier);\nlvExpr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(simpleNameRef, lvExpr,\nDiagnosticErrorCode.ERROR_INVALID_EXPR_IN_ASSIGNMENT_LHS);\n}\nreturn STNodeFactory.createAssignmentStatementNode(lvExpr, assign, expr, semicolon);\n}\n/*\n* Expressions\n*/\n/**\n* Parse expression. This will start parsing expressions from the lowest level of precedence.\n*\n* @return Parsed node\n*/\nprotected STNode parseExpression() {\nSTNode actionOrExpression = parseExpression(DEFAULT_OP_PRECEDENCE, true, false);\nif (isAction(actionOrExpression)) {\nactionOrExpression = SyntaxErrors.addDiagnostic(actionOrExpression,\nDiagnosticErrorCode.ERROR_EXPRESSION_EXPECTED_ACTION_FOUND);\n}\nreturn actionOrExpression;\n}\n/**\n* Parse action or expression. This will start parsing actions or expressions from the lowest level of precedence.\n*\n* @return Parsed node\n*/\nprivate STNode parseActionOrExpression() {\nreturn parseExpression(DEFAULT_OP_PRECEDENCE, true, true);\n}\nprivate STNode parseActionOrExpressionInLhs(SyntaxKind tokenKind, STNode annots) {\nreturn parseExpression(tokenKind, DEFAULT_OP_PRECEDENCE, annots, false, true, false);\n}\n/**\n* Parse expression.\n*\n* @param isRhsExpr Flag indicating whether this is a rhs expression\n* @return Parsed node\n*/\nprivate STNode parseExpression(boolean isRhsExpr) {\nreturn parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, false);\n}\nprivate boolean isValidLVExpr(STNode expression) {\nswitch (expression.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase FUNCTIONAL_BINDING_PATTERN:\nreturn true;\ncase FIELD_ACCESS:\nreturn isValidLVMemberExpr(((STFieldAccessExpressionNode) expression).expression);\ncase INDEXED_EXPRESSION:\nreturn isValidLVMemberExpr(((STIndexedExpressionNode) expression).containerExpression);\ndefault:\nreturn (expression instanceof STMissingToken);\n}\n}\nprivate boolean isValidLVMemberExpr(STNode expression) {\nswitch (expression.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\nreturn true;\ncase FIELD_ACCESS:\nreturn isValidLVMemberExpr(((STFieldAccessExpressionNode) expression).expression);\ncase INDEXED_EXPRESSION:\nreturn isValidLVMemberExpr(((STIndexedExpressionNode) expression).containerExpression);\ncase BRACED_EXPRESSION:\nreturn isValidLVMemberExpr(((STBracedExpressionNode) expression).expression);\ndefault:\nreturn (expression instanceof STMissingToken);\n}\n}\n/**\n* Parse an expression that has an equal or higher precedence than a given level.\n*\n* @param precedenceLevel Precedence level of expression to be parsed\n* @param isRhsExpr Flag indicating whether this is a rhs expression\n* @param allowActions Flag indicating whether the current context support actions\n* @return Parsed node\n*/\nprivate STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) {\nreturn parseExpression(precedenceLevel, isRhsExpr, allowActions, false);\n}\nprivate STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions,\nboolean isInConditionalExpr) {\nSTToken token = peek();\nreturn parseExpression(token.kind, precedenceLevel, isRhsExpr, allowActions, false, isInConditionalExpr);\n}\nprivate STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, boolean isRhsExpr,\nboolean allowActions, boolean isInMatchGuard, boolean isInConditionalExpr) {\nSTNode expr = parseTerminalExpression(kind, isRhsExpr, allowActions, isInConditionalExpr);\nreturn parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr);\n}\nprivate STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, STNode annots,\nboolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nSTNode expr = parseTerminalExpression(kind, annots, isRhsExpr, allowActions, isInConditionalExpr);\nreturn parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions, false, isInConditionalExpr);\n}\n/**\n* Parse terminal expressions. A terminal expression has the highest precedence level\n* out of all expressions, and will be at the leaves of an expression tree.\n*\n* @param annots Annotations\n* @param isRhsExpr Is a rhs expression\n* @param allowActions Allow actions\n* @return Parsed node\n*/\nprivate STNode parseTerminalExpression(STNode annots, boolean isRhsExpr, boolean allowActions,\nboolean isInConditionalExpr) {\nreturn parseTerminalExpression(peek().kind, annots, isRhsExpr, allowActions, isInConditionalExpr);\n}\nprivate STNode parseTerminalExpression(SyntaxKind kind, boolean isRhsExpr, boolean allowActions,\nboolean isInConditionalExpr) {\nSTNode annots;\nif (kind == SyntaxKind.AT_TOKEN) {\nannots = parseAnnotations();\nkind = peek().kind;\n} else {\nannots = STNodeFactory.createEmptyNodeList();\n}\nSTNode expr = parseTerminalExpression(kind, annots, isRhsExpr, allowActions, isInConditionalExpr);\nif (!isNodeListEmpty(annots) && expr.kind != SyntaxKind.START_ACTION) {\nexpr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(expr, annots,\nDiagnosticErrorCode.ERROR_ANNOTATIONS_ATTACHED_TO_EXPRESSION);\n}\nreturn expr;\n}\nprivate STNode parseTerminalExpression(SyntaxKind kind, STNode annots, boolean isRhsExpr, boolean allowActions,\nboolean isInConditionalExpr) {\nswitch (kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn parseBasicLiteral();\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF, isInConditionalExpr);\ncase OPEN_PAREN_TOKEN:\nreturn parseBracedExpression(isRhsExpr, allowActions);\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\nreturn parseCheckExpression(isRhsExpr, allowActions, isInConditionalExpr);\ncase FAIL_KEYWORD:\nreturn parseFailExpression(isRhsExpr, allowActions, isInConditionalExpr);\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingConstructorExpr();\ncase TYPEOF_KEYWORD:\nreturn parseTypeofExpression(isRhsExpr, isInConditionalExpr);\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\nreturn parseUnaryExpression(isRhsExpr, isInConditionalExpr);\ncase TRAP_KEYWORD:\nreturn parseTrapExpression(isRhsExpr, allowActions, isInConditionalExpr);\ncase OPEN_BRACKET_TOKEN:\nreturn parseListConstructorExpr();\ncase LT_TOKEN:\nreturn parseTypeCastExpr(isRhsExpr, allowActions, isInConditionalExpr);\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\ncase FROM_KEYWORD:\nreturn parseTableConstructorOrQuery(isRhsExpr);\ncase ERROR_KEYWORD:\nreturn parseErrorConstructorExpr();\ncase LET_KEYWORD:\nreturn parseLetExpression(isRhsExpr);\ncase BACKTICK_TOKEN:\nreturn parseTemplateExpression();\ncase XML_KEYWORD:\nSTToken nextNextToken = getNextNextToken(kind);\nif (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseXMLTemplateExpression();\n}\nreturn parseSimpleTypeDescriptor();\ncase STRING_KEYWORD:\nnextNextToken = getNextNextToken(kind);\nif (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseStringTemplateExpression();\n}\nreturn parseSimpleTypeDescriptor();\ncase FUNCTION_KEYWORD:\nreturn parseExplicitFunctionExpression(annots, isRhsExpr);\ncase AT_TOKEN:\nbreak;\ncase NEW_KEYWORD:\nreturn parseNewExpression();\ncase START_KEYWORD:\nreturn parseStartAction(annots);\ncase FLUSH_KEYWORD:\nreturn parseFlushAction();\ncase LEFT_ARROW_TOKEN:\nreturn parseReceiveAction();\ncase WAIT_KEYWORD:\nreturn parseWaitAction();\ncase COMMIT_KEYWORD:\nreturn parseCommitAction();\ncase TRANSACTIONAL_KEYWORD:\nreturn parseTransactionalExpression();\ncase SERVICE_KEYWORD:\nreturn parseServiceConstructorExpression(annots);\ncase BASE16_KEYWORD:\ncase BASE64_KEYWORD:\nreturn parseByteArrayLiteral(kind);\ndefault:\nif (isSimpleType(kind)) {\nreturn parseSimpleTypeDescriptor();\n}\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.TERMINAL_EXPRESSION, annots, isRhsExpr, allowActions,\nisInConditionalExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.action == Action.KEEP) {\nif (kind == SyntaxKind.XML_KEYWORD) {\nreturn parseXMLTemplateExpression();\n}\nreturn parseStringTemplateExpression();\n}\nswitch (solution.tokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(solution.recoveredNode, isInConditionalExpr);\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn solution.recoveredNode;\ndefault:\nreturn parseTerminalExpression(solution.tokenKind, annots, isRhsExpr, allowActions,\nisInConditionalExpr);\n}\n}\nprivate boolean isValidExprStart(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase IDENTIFIER_TOKEN:\ncase OPEN_PAREN_TOKEN:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase TYPEOF_KEYWORD:\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\ncase TRAP_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase LT_TOKEN:\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\ncase FROM_KEYWORD:\ncase ERROR_KEYWORD:\ncase LET_KEYWORD:\ncase BACKTICK_TOKEN:\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase AT_TOKEN:\ncase NEW_KEYWORD:\ncase START_KEYWORD:\ncase FLUSH_KEYWORD:\ncase LEFT_ARROW_TOKEN:\ncase WAIT_KEYWORD:\ncase SERVICE_KEYWORD:\nreturn true;\ndefault:\nreturn isSimpleType(tokenKind);\n}\n}\n/**\n*

\n* Parse a new expression.\n*

\n* \n* new-expr := explicit-new-expr | implicit-new-expr\n*
\n* explicit-new-expr := new type-descriptor ( arg-list )\n*
\n* implicit-new-expr := new [( arg-list )]\n*
\n*\n* @return Parsed NewExpression node.\n*/\nprivate STNode parseNewExpression() {\nSTNode newKeyword = parseNewKeyword();\nreturn parseNewKeywordRhs(newKeyword);\n}\n/**\n*

\n* Parse `new` keyword.\n*

\n*\n* @return Parsed NEW_KEYWORD Token.\n*/\nprivate STNode parseNewKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.NEW_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.NEW_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseNewKeywordRhs(STNode newKeyword) {\nSTNode token = peek();\nreturn parseNewKeywordRhs(token.kind, newKeyword);\n}\n/**\n*

\n* Parse an implicit or explicit new expression.\n*

\n*\n* @param kind next token kind.\n* @param newKeyword parsed node for `new` keyword.\n* @return Parsed new-expression node.\n*/\nprivate STNode parseNewKeywordRhs(SyntaxKind kind, STNode newKeyword) {\nswitch (kind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseImplicitNewRhs(newKeyword);\ncase SEMICOLON_TOKEN:\nbreak;\ncase IDENTIFIER_TOKEN:\ncase OBJECT_KEYWORD:\ncase STREAM_KEYWORD:\nreturn parseTypeDescriptorInNewExpr(newKeyword);\ndefault:\nbreak;\n}\nreturn STNodeFactory.createImplicitNewExpressionNode(newKeyword, STNodeFactory.createEmptyNode());\n}\n/**\n*

\n* Parse an Explicit New expression.\n*

\n* \n* explicit-new-expr := new type-descriptor ( arg-list )\n* \n*\n* @param newKeyword Parsed `new` keyword.\n* @return the Parsed Explicit New Expression.\n*/\nprivate STNode parseTypeDescriptorInNewExpr(STNode newKeyword) {\nSTNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_NEW_EXPR);\nSTNode parenthesizedArgsList = parseParenthesizedArgList();\nreturn STNodeFactory.createExplicitNewExpressionNode(newKeyword, typeDescriptor, parenthesizedArgsList);\n}\n/**\n*

\n* Parse an implicit-new-expr with arguments.\n*

\n*\n* @param newKeyword Parsed `new` keyword.\n* @return Parsed implicit-new-expr.\n*/\nprivate STNode parseImplicitNewRhs(STNode newKeyword) {\nSTNode implicitNewArgList = parseParenthesizedArgList();\nreturn STNodeFactory.createImplicitNewExpressionNode(newKeyword, implicitNewArgList);\n}\n/**\n*

\n* Parse the parenthesized argument list for a new-expr.\n*

\n*\n* @return Parsed parenthesized rhs of new-expr.\n*/\nprivate STNode parseParenthesizedArgList() {\nSTNode openParan = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode arguments = parseArgsList();\nSTNode closeParan = parseCloseParenthesis();\nreturn STNodeFactory.createParenthesizedArgList(openParan, arguments, closeParan);\n}\n/**\n*

\n* Parse the right-hand-side of an expression.\n*

\n* expr-rhs := (binary-op expression\n* | dot identifier\n* | open-bracket expression close-bracket\n* )*\n*\n* @param precedenceLevel Precedence level of the expression that is being parsed currently\n* @param lhsExpr LHS expression of the expression\n* @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement\n* @param allowActions Flag indicating whether the current context support actions\n* @return Parsed node\n*/\nprivate STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isRhsExpr,\nboolean allowActions) {\nreturn parseExpressionRhs(precedenceLevel, lhsExpr, isRhsExpr, allowActions, false, false);\n}\nprivate STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isRhsExpr,\nboolean allowActions, boolean isInMatchGuard, boolean isInConditionalExpr) {\nSTToken token = peek();\nreturn parseExpressionRhs(token.kind, precedenceLevel, lhsExpr, isRhsExpr, allowActions, isInMatchGuard,\nisInConditionalExpr);\n}\n/**\n* Parse the right hand side of an expression given the next token kind.\n*\n* @param tokenKind Next token kind\n* @param currentPrecedenceLevel Precedence level of the expression that is being parsed currently\n* @param lhsExpr LHS expression\n* @param isRhsExpr Flag indicating whether this is a rhs expr or not\n* @param allowActions Flag indicating whether to allow actions or not\n* @param isInMatchGuard Flag indicating whether this expression is in a match-guard\n* @return Parsed node\n*/\nprivate STNode parseExpressionRhs(SyntaxKind tokenKind, OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr,\nboolean isRhsExpr, boolean allowActions, boolean isInMatchGuard,\nboolean isInConditionalExpr) {\nif (isEndOfExpression(tokenKind, isRhsExpr, isInMatchGuard, lhsExpr.kind)) {\nreturn lhsExpr;\n}\nif (lhsExpr.kind == SyntaxKind.ASYNC_SEND_ACTION) {\nreturn lhsExpr;\n}\nif (!isValidExprRhsStart(tokenKind, lhsExpr.kind)) {\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.EXPRESSION_RHS, currentPrecedenceLevel, lhsExpr,\nisRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.ctx == ParserRuleContext.BINARY_OPERATOR) {\nSyntaxKind binaryOpKind = getBinaryOperatorKindToInsert(currentPrecedenceLevel);\nreturn parseExpressionRhs(binaryOpKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions,\nisInMatchGuard, isInConditionalExpr);\n} else {\nreturn parseExpressionRhs(solution.tokenKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions,\nisInMatchGuard, isInConditionalExpr);\n}\n}\nif (tokenKind == SyntaxKind.GT_TOKEN && peek(2).kind == SyntaxKind.GT_TOKEN) {\nif (peek(3).kind == SyntaxKind.GT_TOKEN) {\ntokenKind = SyntaxKind.TRIPPLE_GT_TOKEN;\n} else {\ntokenKind = SyntaxKind.DOUBLE_GT_TOKEN;\n}\n}\nOperatorPrecedence nextOperatorPrecedence = getOpPrecedence(tokenKind);\nif (currentPrecedenceLevel.isHigherThanOrEqual(nextOperatorPrecedence, allowActions)) {\nreturn lhsExpr;\n}\nSTNode newLhsExpr;\nSTNode operator;\nswitch (tokenKind) {\ncase OPEN_PAREN_TOKEN:\nnewLhsExpr = parseFuncCall(lhsExpr);\nbreak;\ncase OPEN_BRACKET_TOKEN:\nnewLhsExpr = parseMemberAccessExpr(lhsExpr, isRhsExpr);\nbreak;\ncase DOT_TOKEN:\nnewLhsExpr = parseFieldAccessOrMethodCall(lhsExpr, isInConditionalExpr);\nbreak;\ncase IS_KEYWORD:\nnewLhsExpr = parseTypeTestExpression(lhsExpr, isInConditionalExpr);\nbreak;\ncase RIGHT_ARROW_TOKEN:\nnewLhsExpr = parseRemoteMethodCallOrAsyncSendAction(lhsExpr, isRhsExpr);\nif (!allowActions) {\nnewLhsExpr = SyntaxErrors.addDiagnostic(newLhsExpr,\nDiagnosticErrorCode.ERROR_EXPRESSION_EXPECTED_ACTION_FOUND);\n}\nbreak;\ncase SYNC_SEND_TOKEN:\nnewLhsExpr = parseSyncSendAction(lhsExpr);\nif (!allowActions) {\nnewLhsExpr = SyntaxErrors.addDiagnostic(newLhsExpr,\nDiagnosticErrorCode.ERROR_EXPRESSION_EXPECTED_ACTION_FOUND);\n}\nbreak;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nnewLhsExpr = parseImplicitAnonFunc(lhsExpr, isRhsExpr);\nbreak;\ncase ANNOT_CHAINING_TOKEN:\nnewLhsExpr = parseAnnotAccessExpression(lhsExpr, isInConditionalExpr);\nbreak;\ncase OPTIONAL_CHAINING_TOKEN:\nnewLhsExpr = parseOptionalFieldAccessExpression(lhsExpr, isInConditionalExpr);\nbreak;\ncase QUESTION_MARK_TOKEN:\nnewLhsExpr = parseConditionalExpression(lhsExpr);\nbreak;\ncase DOT_LT_TOKEN:\nnewLhsExpr = parseXMLFilterExpression(lhsExpr);\nbreak;\ncase SLASH_LT_TOKEN:\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\ncase SLASH_ASTERISK_TOKEN:\nnewLhsExpr = parseXMLStepExpression(lhsExpr);\nbreak;\ndefault:\nif (tokenKind == SyntaxKind.DOUBLE_GT_TOKEN) {\noperator = parseSignedRightShiftToken();\n} else if (tokenKind == SyntaxKind.TRIPPLE_GT_TOKEN) {\noperator = parseUnsignedRightShiftToken();\n} else {\noperator = parseBinaryOperator();\n}\nSTNode rhsExpr = parseExpression(nextOperatorPrecedence, isRhsExpr, false, isInConditionalExpr);\nnewLhsExpr = STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, operator,\nrhsExpr);\nbreak;\n}\nreturn parseExpressionRhs(currentPrecedenceLevel, newLhsExpr, isRhsExpr, allowActions, isInMatchGuard,\nisInConditionalExpr);\n}\nprivate boolean isValidExprRhsStart(SyntaxKind tokenKind, SyntaxKind precedingNodeKind) {\nswitch (tokenKind) {\ncase OPEN_PAREN_TOKEN:\nreturn precedingNodeKind == SyntaxKind.QUALIFIED_NAME_REFERENCE ||\nprecedingNodeKind == SyntaxKind.SIMPLE_NAME_REFERENCE;\ncase DOT_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase IS_KEYWORD:\ncase RIGHT_ARROW_TOKEN:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\ncase SYNC_SEND_TOKEN:\ncase ANNOT_CHAINING_TOKEN:\ncase OPTIONAL_CHAINING_TOKEN:\ncase QUESTION_MARK_TOKEN:\ncase COLON_TOKEN:\ncase DOT_LT_TOKEN:\ncase SLASH_LT_TOKEN:\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\ncase SLASH_ASTERISK_TOKEN:\nreturn true;\ndefault:\nreturn isBinaryOperator(tokenKind);\n}\n}\n/**\n* Parse member access expression.\n*\n* @param lhsExpr Container expression\n* @param isRhsExpr Is this is a rhs expression\n* @return Member access expression\n*/\nprivate STNode parseMemberAccessExpr(STNode lhsExpr, boolean isRhsExpr) {\nstartContext(ParserRuleContext.MEMBER_ACCESS_KEY_EXPR);\nSTNode openBracket = parseOpenBracket();\nSTNode keyExpr = parseMemberAccessKeyExprs(isRhsExpr);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nif (isRhsExpr && ((STNodeList) keyExpr).isEmpty()) {\nkeyExpr = STNodeFactory.createNodeList(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\ncloseBracket = SyntaxErrors.addDiagnostic(closeBracket,\nDiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR);\n}\nreturn STNodeFactory.createIndexedExpressionNode(lhsExpr, openBracket, keyExpr, closeBracket);\n}\n/**\n* Parse key expression of a member access expression. A type descriptor\n* that starts with a type-ref (e.g: T[a][b]) also goes through this\n* method.\n*

\n* key-expression := single-key-expression | multi-key-expression\n*\n* @param isRhsExpr Is this is a rhs expression\n* @return Key expression\n*/\nprivate STNode parseMemberAccessKeyExprs(boolean isRhsExpr) {\nList exprList = new ArrayList<>();\nSTNode keyExpr;\nSTNode keyExprEnd;\nwhile (!isEndOfTypeList(peek().kind)) {\nkeyExpr = parseKeyExpr(isRhsExpr);\nexprList.add(keyExpr);\nkeyExprEnd = parseMemberAccessKeyExprEnd();\nif (keyExprEnd == null) {\nbreak;\n}\nexprList.add(keyExprEnd);\n}\nreturn STNodeFactory.createNodeList(exprList);\n}\nprivate STNode parseKeyExpr(boolean isRhsExpr) {\nif (!isRhsExpr && peek().kind == SyntaxKind.ASTERISK_TOKEN) {\nreturn STNodeFactory.createBasicLiteralNode(SyntaxKind.ASTERISK_TOKEN, consume());\n}\nreturn parseExpression(isRhsExpr);\n}\nprivate STNode parseMemberAccessKeyExprEnd() {\nreturn parseMemberAccessKeyExprEnd(peek().kind);\n}\nprivate STNode parseMemberAccessKeyExprEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MEMBER_ACCESS_KEY_EXPR_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMemberAccessKeyExprEnd(solution.tokenKind);\n}\n}\n/**\n* Parse close bracket.\n*\n* @return Parsed node\n*/\nprivate STNode parseCloseBracket() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSE_BRACKET);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse field access, xml required attribute access expressions or method call expression.\n*

\n* \n* field-access-expr := expression . field-name\n*
\n* xml-required-attribute-access-expr := expression . xml-attribute-name\n*
\n* xml-attribute-name := xml-qualified-name | qualified-identifier | identifier\n*
\n* method-call-expr := expression . method-name ( arg-list )\n*
\n*\n* @param lhsExpr Preceding expression of the field access or method call\n* @return One of field-access-expression or method-call-expression.\n*/\nprivate STNode parseFieldAccessOrMethodCall(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode dotToken = parseDotToken();\nSTToken token = peek();\nif (token.kind == SyntaxKind.MAP_KEYWORD || token.kind == SyntaxKind.START_KEYWORD) {\nSTNode methodName = getKeywordAsSimpleNameRef();\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode args = parseArgsList();\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, methodName, openParen, args,\ncloseParen);\n}\nSTNode fieldOrMethodName = parseFieldAccessIdentifier(isInConditionalExpr);\nif (fieldOrMethodName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nreturn STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName);\n}\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode args = parseArgsList();\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, fieldOrMethodName, openParen, args,\ncloseParen);\n}\nreturn STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName);\n}\nprivate STNode getKeywordAsSimpleNameRef() {\nSTToken mapKeyword = consume();\nSTNode methodName = STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(),\nmapKeyword.trailingMinutiae(), mapKeyword.diagnostics());\nmethodName = STNodeFactory.createSimpleNameReferenceNode(methodName);\nreturn methodName;\n}\n/**\n*

\n* Parse braced expression.\n*

\n* braced-expr := ( expression )\n*\n* @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement\n* @param allowActions Allow actions\n* @return Parsed node\n*/\nprivate STNode parseBracedExpression(boolean isRhsExpr, boolean allowActions) {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nif (peek().kind == SyntaxKind.CLOSE_PAREN_TOKEN) {\nreturn parseNilLiteralOrEmptyAnonFuncParamRhs(openParen);\n}\nstartContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);\nSTNode expr;\nif (allowActions) {\nexpr = parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, true);\n} else {\nexpr = parseExpression(isRhsExpr);\n}\nreturn parseBracedExprOrAnonFuncParamRhs(peek().kind, openParen, expr, isRhsExpr);\n}\nprivate STNode parseNilLiteralOrEmptyAnonFuncParamRhs(STNode openParen) {\nSTNode closeParen = parseCloseParenthesis();\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {\nreturn STNodeFactory.createNilLiteralNode(openParen, closeParen);\n} else {\nSTNode params = STNodeFactory.createNodeList();\nSTNode anonFuncParam =\nSTNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\nreturn anonFuncParam;\n}\n}\nprivate STNode parseBracedExprOrAnonFuncParamRhs(STNode openParen, STNode expr, boolean isRhsExpr) {\nSTToken nextToken = peek();\nreturn parseBracedExprOrAnonFuncParamRhs(nextToken.kind, openParen, expr, isRhsExpr);\n}\nprivate STNode parseBracedExprOrAnonFuncParamRhs(SyntaxKind nextTokenKind, STNode openParen, STNode expr,\nboolean isRhsExpr) {\nif (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nswitch (nextTokenKind) {\ncase CLOSE_PAREN_TOKEN:\nbreak;\ncase COMMA_TOKEN:\nreturn parseImplicitAnonFunc(openParen, expr, isRhsExpr);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS, openParen,\nexpr, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nendContext();\nreturn solution.recoveredNode;\n}\nreturn parseBracedExprOrAnonFuncParamRhs(solution.tokenKind, openParen, expr, isRhsExpr);\n}\n}\nSTNode closeParen = parseCloseParenthesis();\nendContext();\nif (isAction(expr)) {\nreturn STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen);\n}\nreturn STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen);\n}\n/**\n* Check whether a given node is an action node.\n*\n* @param node Node to check\n* @return true if the node is an action node. false otherwise\n*/\nprivate boolean isAction(STNode node) {\nswitch (node.kind) {\ncase REMOTE_METHOD_CALL_ACTION:\ncase BRACED_ACTION:\ncase CHECK_ACTION:\ncase START_ACTION:\ncase TRAP_ACTION:\ncase FLUSH_ACTION:\ncase ASYNC_SEND_ACTION:\ncase SYNC_SEND_ACTION:\ncase RECEIVE_ACTION:\ncase WAIT_ACTION:\ncase QUERY_ACTION:\ncase COMMIT_ACTION:\ncase FAIL_ACTION:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Check whether the given token is an end of a expression.\n*\n* @param tokenKind Token to check\n* @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement\n* @return true if the token represents an end of a block. false otherwise\n*/\nprivate boolean isEndOfExpression(SyntaxKind tokenKind, boolean isRhsExpr, boolean isInMatchGuard,\nSyntaxKind precedingNodeKind) {\nif (!isRhsExpr) {\nif (isCompoundBinaryOperator(tokenKind)) {\nreturn true;\n}\nif (isInMatchGuard && tokenKind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {\nreturn true;\n}\nreturn !isValidExprRhsStart(tokenKind, precedingNodeKind);\n}\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase SEMICOLON_TOKEN:\ncase COMMA_TOKEN:\ncase PUBLIC_KEYWORD:\ncase EOF_TOKEN:\ncase CONST_KEYWORD:\ncase LISTENER_KEYWORD:\ncase EQUAL_TOKEN:\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\ncase AS_KEYWORD:\ncase IN_KEYWORD:\ncase BACKTICK_TOKEN:\ncase FROM_KEYWORD:\ncase WHERE_KEYWORD:\ncase LET_KEYWORD:\ncase SELECT_KEYWORD:\ncase DO_KEYWORD:\ncase COLON_TOKEN:\ncase ON_KEYWORD:\ncase CONFLICT_KEYWORD:\ncase LIMIT_KEYWORD:\ncase JOIN_KEYWORD:\ncase OUTER_KEYWORD:\ncase ORDER_KEYWORD:\ncase BY_KEYWORD:\ncase ASCENDING_KEYWORD:\ncase DESCENDING_KEYWORD:\nreturn true;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn isInMatchGuard;\ndefault:\nreturn isSimpleType(tokenKind);\n}\n}\n/**\n* Parse basic literals. It is assumed that we come here after validation.\n*\n* @return Parsed node\n*/\nprivate STNode parseBasicLiteral() {\nSTToken literalToken = consume();\nreturn STNodeFactory.createBasicLiteralNode(literalToken.kind, literalToken);\n}\n/**\n* Parse function call expression.\n* function-call-expr := function-reference ( arg-list )\n* function-reference := variable-reference\n*\n* @param identifier Function name\n* @return Function call expression\n*/\nprivate STNode parseFuncCall(STNode identifier) {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode args = parseArgsList();\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createFunctionCallExpressionNode(identifier, openParen, args, closeParen);\n}\n/**\n*

\n* Parse error constructor expression.\n*

\n* \n* error-constructor-expr := error ( arg-list )\n* \n*\n* @return Error constructor expression\n*/\nprivate STNode parseErrorConstructorExpr() {\nSTNode errorKeyword = parseErrorKeyword();\nreturn parseFuncCall(errorKeyword);\n}\n/**\n* Parse function call argument list.\n*\n* @return Parsed args list\n*/\nprivate STNode parseArgsList() {\nstartContext(ParserRuleContext.ARG_LIST);\nSTToken token = peek();\nif (isEndOfParametersList(token.kind)) {\nSTNode args = STNodeFactory.createEmptyNodeList();\nendContext();\nreturn args;\n}\nSTNode firstArg = parseArgument();\nif (firstArg == null) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode argsList = parseArgList(firstArg);\nendContext();\nreturn argsList;\n}\n/**\n* Parse follow up arguments.\n*\n* @param firstArg first argument in the list\n* @return the argument list\n*/\nprivate STNode parseArgList(STNode firstArg) {\nArrayList argsList = new ArrayList<>();\nargsList.add(firstArg);\nSyntaxKind lastValidArgKind = firstArg.kind;\nSTToken nextToken = peek();\nwhile (!isEndOfParametersList(nextToken.kind)) {\nSTNode argEnd = parseArgEnd(nextToken.kind);\nif (argEnd == null) {\nbreak;\n}\nnextToken = peek();\nif (isEndOfParametersList(nextToken.kind)) {\nint prevArgIndex = argsList.size() - 1;\nSTNode prevArg = argsList.remove(prevArgIndex);\nSTNode prevArgWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(prevArg, argEnd,\nDiagnosticErrorCode.ERROR_INVALID_TOKEN, ((STToken) argEnd).text());\nargsList.add(prevArgWithDiagnostics);\nbreak;\n}\nSTNode curArg = parseArgument(nextToken.kind);\nDiagnosticErrorCode errorCode = validateArgumentOrder(lastValidArgKind, curArg.kind);\nif (errorCode == null) {\nargsList.add(argEnd);\nargsList.add(curArg);\nlastValidArgKind = curArg.kind;\n} else {\nupdateLastNodeInListWithInvalidNode(argsList, argEnd, null);\nupdateLastNodeInListWithInvalidNode(argsList, curArg, errorCode);\n}\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(argsList);\n}\nprivate DiagnosticErrorCode validateArgumentOrder(SyntaxKind prevArgKind, SyntaxKind curArgKind) {\nDiagnosticErrorCode errorCode = null;\nswitch (prevArgKind) {\ncase POSITIONAL_ARG:\nbreak;\ncase NAMED_ARG:\nif (curArgKind == SyntaxKind.POSITIONAL_ARG) {\nerrorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG;\n}\nbreak;\ncase REST_ARG:\nerrorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG;\nbreak;\ndefault:\nthrow new IllegalStateException(\"Invalid SyntaxKind in an argument\");\n}\nreturn errorCode;\n}\nprivate STNode parseArgEnd() {\nreturn parseArgEnd(peek().kind);\n}\nprivate STNode parseArgEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgEnd(solution.tokenKind);\n}\n}\n/**\n* Parse function call argument.\n*\n* @return Parsed argument node\n*/\nprivate STNode parseArgument() {\nSTToken token = peek();\nreturn parseArgument(token.kind);\n}\nprivate STNode parseArgument(SyntaxKind kind) {\nSTNode arg;\nswitch (kind) {\ncase ELLIPSIS_TOKEN:\nSTToken ellipsis = consume();\nSTNode expr = parseExpression();\narg = STNodeFactory.createRestArgumentNode(ellipsis, expr);\nbreak;\ncase IDENTIFIER_TOKEN:\narg = parseNamedOrPositionalArg(kind);\nbreak;\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nif (isValidExprStart(kind)) {\nexpr = parseExpression();\narg = STNodeFactory.createPositionalArgumentNode(expr);\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.ARG_START_OR_ARG_LIST_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgument(solution.tokenKind);\n}\nreturn arg;\n}\n/**\n* Parse positional or named arg. This method assumed peek()/peek(1)\n* is always an identifier.\n*\n* @return Parsed argument node\n*/\nprivate STNode parseNamedOrPositionalArg(SyntaxKind nextTokenKind) {\nSTNode argNameOrExpr = parseTerminalExpression(peek().kind, true, false, false);\nSTToken secondToken = peek();\nswitch (secondToken.kind) {\ncase EQUAL_TOKEN:\nSTNode equal = parseAssignOp();\nSTNode valExpr = parseExpression();\nreturn STNodeFactory.createNamedArgumentNode(argNameOrExpr, equal, valExpr);\ncase COMMA_TOKEN:\ncase CLOSE_PAREN_TOKEN:\nreturn STNodeFactory.createPositionalArgumentNode(argNameOrExpr);\ndefault:\nargNameOrExpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, argNameOrExpr, false, false);\nreturn STNodeFactory.createPositionalArgumentNode(argNameOrExpr);\n}\n}\n/**\n* Parse object type descriptor.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectTypeDescriptor() {\nstartContext(ParserRuleContext.OBJECT_TYPE_DESCRIPTOR);\nSTNode objectTypeQualifiers = parseObjectTypeQualifiers();\nSTNode objectKeyword = parseObjectKeyword();\nSTNode openBrace = parseOpenBrace();\nSTNode objectMembers = parseObjectMembers();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createObjectTypeDescriptorNode(objectTypeQualifiers, objectKeyword, openBrace,\nobjectMembers, closeBrace);\n}\n/**\n* Parse object type qualifiers.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectTypeQualifiers() {\nSTToken nextToken = peek();\nreturn parseObjectTypeQualifiers(nextToken.kind);\n}\nprivate STNode parseObjectTypeQualifiers(SyntaxKind kind) {\nSTNode firstQualifier;\nswitch (kind) {\ncase CLIENT_KEYWORD:\nfirstQualifier = parseClientKeyword();\nbreak;\ncase ABSTRACT_KEYWORD:\nfirstQualifier = parseAbstractKeyword();\nbreak;\ncase READONLY_KEYWORD:\nfirstQualifier = parseReadonlyKeyword();\nbreak;\ncase OBJECT_KEYWORD:\nreturn STNodeFactory.createEmptyNodeList();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_QUALIFIER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectTypeQualifiers(solution.tokenKind);\n}\nreturn parseObjectTypeNextQualifiers(firstQualifier);\n}\nprivate STNode parseObjectTypeNextQualifiers(STNode firstQualifier) {\nList qualifiers = new ArrayList<>();\nqualifiers.add(firstQualifier);\nfor (int i = 0; i < 2; i++) {\nSTNode nextToken = peek();\nif (isNodeWithSyntaxKindInList(qualifiers, nextToken.kind)) {\nnextToken = consume();\nupdateLastNodeInListWithInvalidNode(qualifiers, nextToken,\nDiagnosticErrorCode.ERROR_SAME_OBJECT_TYPE_QUALIFIER);\ncontinue;\n}\nSTNode nextQualifier;\nswitch (nextToken.kind) {\ncase CLIENT_KEYWORD:\nnextQualifier = parseClientKeyword();\nbreak;\ncase ABSTRACT_KEYWORD:\nnextQualifier = parseAbstractKeyword();\nbreak;\ncase READONLY_KEYWORD:\nnextQualifier = parseReadonlyKeyword();\nbreak;\ncase OBJECT_KEYWORD:\ndefault:\nreturn STNodeFactory.createNodeList(qualifiers);\n}\nqualifiers.add(nextQualifier);\n}\nreturn STNodeFactory.createNodeList(qualifiers);\n}\n/**\n* Parse client keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseClientKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLIENT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLIENT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse abstract keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseAbstractKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ABSTRACT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ABSTRACT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse object keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OBJECT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OBJECT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse object members.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectMembers() {\nArrayList objectMembers = new ArrayList<>();\nwhile (!isEndOfObjectTypeNode()) {\nstartContext(ParserRuleContext.OBJECT_MEMBER);\nSTNode member = parseObjectMember(peek().kind);\nendContext();\nif (member == null) {\nbreak;\n}\nobjectMembers.add(member);\n}\nreturn STNodeFactory.createNodeList(objectMembers);\n}\nprivate STNode parseObjectMember() {\nSTToken nextToken = peek();\nreturn parseObjectMember(nextToken.kind);\n}\nprivate STNode parseObjectMember(SyntaxKind nextTokenKind) {\nSTNode metadata;\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ncase ASTERISK_TOKEN:\ncase PUBLIC_KEYWORD:\ncase PRIVATE_KEYWORD:\ncase REMOTE_KEYWORD:\ncase FUNCTION_KEYWORD:\nmetadata = createEmptyMetadata();\nbreak;\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData(nextTokenKind);\nnextTokenKind = peek().kind;\nbreak;\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nmetadata = createEmptyMetadata();\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_START);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectMember(solution.tokenKind);\n}\nreturn parseObjectMember(nextTokenKind, metadata);\n}\nprivate STNode parseObjectMember(SyntaxKind nextTokenKind, STNode metadata) {\nSTNode member;\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ncase ASTERISK_TOKEN:\nSTNode asterisk = consume();\nSTNode type = parseTypeReference();\nSTNode semicolonToken = parseSemicolon();\nmember = STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken);\nbreak;\ncase PUBLIC_KEYWORD:\ncase PRIVATE_KEYWORD:\nSTNode visibilityQualifier = parseObjectMemberVisibility();\nmember = parseObjectMethodOrField(metadata, visibilityQualifier);\nbreak;\ncase REMOTE_KEYWORD:\nmember = parseObjectMethodOrField(metadata, STNodeFactory.createEmptyNode());\nbreak;\ncase FUNCTION_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nmember = parseObjectMethod(metadata, STNodeFactory.createEmptyNode(), STNodeFactory.createEmptyNode());\nbreak;\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nmember = parseObjectField(metadata, STNodeFactory.createEmptyNode());\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectMember(solution.tokenKind);\n}\nreturn member;\n}\nprivate STNode parseObjectMethodOrField(STNode metadata, STNode methodQualifiers) {\nSTToken nextToken = peek(1);\nSTToken nextNextToken = peek(2);\nreturn parseObjectMethodOrField(nextToken.kind, nextNextToken.kind, metadata, methodQualifiers);\n}\n/**\n* Parse an object member, given the visibility modifier. Object member can have\n* only one visibility qualifier. This mean the methodQualifiers list can have\n* one qualifier at-most.\n*\n* @param visibilityQualifier Visibility qualifier. A modifier can be\n* a syntax node with either 'PUBLIC' or 'PRIVATE'.\n* @param nextTokenKind Next token kind\n* @param nextNextTokenKind Kind of the token after the\n* @param metadata Metadata\n* @param visibilityQualifier Visibility qualifiers\n* @return Parse object member node\n*/\nprivate STNode parseObjectMethodOrField(SyntaxKind nextTokenKind, SyntaxKind nextNextTokenKind, STNode metadata,\nSTNode visibilityQualifier) {\nswitch (nextTokenKind) {\ncase REMOTE_KEYWORD:\nSTNode remoteKeyword = parseRemoteKeyword();\nreturn parseObjectMethod(metadata, visibilityQualifier, remoteKeyword);\ncase FUNCTION_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nremoteKeyword = STNodeFactory.createEmptyNode();\nreturn parseObjectMethod(metadata, visibilityQualifier, remoteKeyword);\ncase IDENTIFIER_TOKEN:\nif (nextNextTokenKind != SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseObjectField(metadata, visibilityQualifier);\n}\nbreak;\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nreturn parseObjectField(metadata, visibilityQualifier);\n}\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY, metadata,\nvisibilityQualifier);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectMethodOrField(solution.tokenKind, nextTokenKind, metadata, visibilityQualifier);\n}\n/**\n* Parse object visibility. Visibility can be public or private.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectMemberVisibility() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PUBLIC_KEYWORD || token.kind == SyntaxKind.PRIVATE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseRemoteKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.REMOTE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.REMOTE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseObjectField(STNode metadata, STNode methodQualifiers) {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.READONLY_KEYWORD) {\nSTNode readonlyQualifier = STNodeFactory.createEmptyNode();\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode fieldName = parseVariableName();\nreturn parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type, fieldName);\n}\nSTNode type;\nSTNode readonlyQualifier = parseReadonlyKeyword();\nnextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nSTNode fieldNameOrTypeDesc = parseQualifiedIdentifier(ParserRuleContext.RECORD_FIELD_NAME_OR_TYPE_NAME);\nif (fieldNameOrTypeDesc.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\ntype = fieldNameOrTypeDesc;\n} else {\nnextToken = peek();\nswitch (nextToken.kind) {\ncase SEMICOLON_TOKEN:\ncase EQUAL_TOKEN:\ntype = createBuiltinSimpleNameReference(readonlyQualifier);\nreadonlyQualifier = STNodeFactory.createEmptyNode();\nreturn parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type,\nfieldNameOrTypeDesc);\ndefault:\ntype = fieldNameOrTypeDesc;\nbreak;\n}\n}\n} else if (isTypeStartingToken(nextToken.kind)) {\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD);\n} else {\nreadonlyQualifier = createBuiltinSimpleNameReference(readonlyQualifier);\ntype = parseComplexTypeDescriptor(readonlyQualifier, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD, false);\nreadonlyQualifier = STNodeFactory.createEmptyNode();\n}\nSTNode fieldName = parseVariableName();\nreturn parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type, fieldName);\n}\n/**\n* Parse object field rhs, and complete the object field parsing. Returns the parsed object field.\n*\n* @param metadata Metadata\n* @param visibilityQualifier Visibility qualifier\n* @param readonlyQualifier Readonly qualifier\n* @param type Type descriptor\n* @param fieldName Field name\n* @return Parsed object field\n*/\nprivate STNode parseObjectFieldRhs(STNode metadata, STNode visibilityQualifier, STNode readonlyQualifier,\nSTNode type, STNode fieldName) {\nSTToken nextToken = peek();\nreturn parseObjectFieldRhs(nextToken.kind, metadata, visibilityQualifier, readonlyQualifier, type, fieldName);\n}\n/**\n* Parse object field rhs, and complete the object field parsing. Returns the parsed object field.\n*\n* @param nextTokenKind Kind of the next token\n* @param metadata Metadata\n* @param visibilityQualifier Visibility qualifier\n* @param readonlyQualifier Readonly qualifier\n* @param type Type descriptor\n* @param fieldName Field name\n* @return Parsed object field\n*/\nprivate STNode parseObjectFieldRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier,\nSTNode readonlyQualifier, STNode type, STNode fieldName) {\nSTNode equalsToken;\nSTNode expression;\nSTNode semicolonToken;\nswitch (nextTokenKind) {\ncase SEMICOLON_TOKEN:\nequalsToken = STNodeFactory.createEmptyNode();\nexpression = STNodeFactory.createEmptyNode();\nsemicolonToken = parseSemicolon();\nbreak;\ncase EQUAL_TOKEN:\nequalsToken = parseAssignOp();\nexpression = parseExpression();\nsemicolonToken = parseSemicolon();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.OBJECT_FIELD_RHS, metadata, visibilityQualifier,\nreadonlyQualifier, type, fieldName);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectFieldRhs(solution.tokenKind, metadata, visibilityQualifier, readonlyQualifier, type,\nfieldName);\n}\nreturn STNodeFactory.createObjectFieldNode(metadata, visibilityQualifier, readonlyQualifier, type, fieldName,\nequalsToken, expression, semicolonToken);\n}\nprivate STNode parseObjectMethod(STNode metadata, STNode visibilityQualifier, STNode remoteKeyword) {\nreturn parseFuncDefOrFuncTypeDesc(metadata, true, visibilityQualifier, remoteKeyword, null);\n}\n/**\n* Parse if-else statement.\n* \n* if-else-stmt := if expression block-stmt [else-block]\n* \n*\n* @return If-else block\n*/\nprivate STNode parseIfElseBlock() {\nstartContext(ParserRuleContext.IF_BLOCK);\nSTNode ifKeyword = parseIfKeyword();\nSTNode condition = parseExpression();\nSTNode ifBody = parseBlockNode();\nendContext();\nSTNode elseBody = parseElseBlock();\nreturn STNodeFactory.createIfElseStatementNode(ifKeyword, condition, ifBody, elseBody);\n}\n/**\n* Parse if-keyword.\n*\n* @return Parsed if-keyword node\n*/\nprivate STNode parseIfKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IF_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.IF_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse else-keyword.\n*\n* @return Parsed else keyword node\n*/\nprivate STNode parseElseKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ELSE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ELSE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse block node.\n* \n* block-stmt := { sequence-stmt }\n* sequence-stmt := statement*\n* \n*\n* @return Parse block node\n*/\nprivate STNode parseBlockNode() {\nstartContext(ParserRuleContext.BLOCK_STMT);\nSTNode openBrace = parseOpenBrace();\nSTNode stmts = parseStatements();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createBlockStatementNode(openBrace, stmts, closeBrace);\n}\n/**\n* Parse else block.\n* else-block := else (if-else-stmt | block-stmt)\n*\n* @return Else block\n*/\nprivate STNode parseElseBlock() {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.ELSE_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode elseKeyword = parseElseKeyword();\nSTNode elseBody = parseElseBody();\nreturn STNodeFactory.createElseBlockNode(elseKeyword, elseBody);\n}\n/**\n* Parse else node body.\n* else-body := if-else-stmt | block-stmt\n*\n* @return Else node body\n*/\nprivate STNode parseElseBody() {\nSTToken nextToken = peek();\nreturn parseElseBody(nextToken.kind);\n}\nprivate STNode parseElseBody(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IF_KEYWORD:\nreturn parseIfElseBlock();\ncase OPEN_BRACE_TOKEN:\nreturn parseBlockNode();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.ELSE_BODY);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseElseBody(solution.tokenKind);\n}\n}\n/**\n* Parse while statement.\n* while-stmt := while expression block-stmt\n*\n* @return While statement\n*/\nprivate STNode parseWhileStatement() {\nstartContext(ParserRuleContext.WHILE_BLOCK);\nSTNode whileKeyword = parseWhileKeyword();\nSTNode condition = parseExpression();\nSTNode whileBody = parseBlockNode();\nendContext();\nreturn STNodeFactory.createWhileStatementNode(whileKeyword, condition, whileBody);\n}\n/**\n* Parse while-keyword.\n*\n* @return While-keyword node\n*/\nprivate STNode parseWhileKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.WHILE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.WHILE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse panic statement.\n* panic-stmt := panic expression ;\n*\n* @return Panic statement\n*/\nprivate STNode parsePanicStatement() {\nstartContext(ParserRuleContext.PANIC_STMT);\nSTNode panicKeyword = parsePanicKeyword();\nSTNode expression = parseExpression();\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createPanicStatementNode(panicKeyword, expression, semicolon);\n}\n/**\n* Parse panic-keyword.\n*\n* @return Panic-keyword node\n*/\nprivate STNode parsePanicKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PANIC_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.PANIC_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse check expression. This method is used to parse both check expression\n* as well as check action.\n*\n*

\n* \n* checking-expr := checking-keyword expression\n* checking-action := checking-keyword action\n* \n*\n* @param allowActions Allow actions\n* @param isRhsExpr Is rhs expression\n* @return Check expression node\n*/\nprivate STNode parseCheckExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nSTNode checkingKeyword = parseCheckingKeyword();\nSTNode expr =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nif (isAction(expr)) {\nreturn STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_ACTION, checkingKeyword, expr);\n} else {\nreturn STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, expr);\n}\n}\n/**\n* Parse checking keyword.\n*

\n* \n* checking-keyword := check | checkpanic\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseCheckingKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CHECK_KEYWORD || token.kind == SyntaxKind.CHECKPANIC_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CHECKING_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse fail expression. This method is used to parse both fail expression\n* as well as fail action.\n*\n*

\n* \n* fail-expr := fail-keyword expression\n* fail-action := fail-keyword action\n* \n*\n* @param allowActions Allow actions\n* @param isRhsExpr Is rhs expression\n* @return Fail expression node\n*/\nprivate STNode parseFailExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nSTNode failKeyword = parseFailKeyword();\nSTNode expr =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nif (isAction(expr)) {\nreturn STNodeFactory.createFailExpressionNode(SyntaxKind.FAIL_ACTION, failKeyword, expr);\n} else {\nreturn STNodeFactory.createFailExpressionNode(SyntaxKind.FAIL_EXPRESSION, failKeyword, expr);\n}\n}\n/**\n* Parse fail keyword.\n*

\n* \n* fail-keyword := fail\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseFailKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FAIL_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FAIL_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*\n* Parse continue statement.\n* continue-stmt := continue ; \n*\n* @return continue statement\n*/\nprivate STNode parseContinueStatement() {\nstartContext(ParserRuleContext.CONTINUE_STATEMENT);\nSTNode continueKeyword = parseContinueKeyword();\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createContinueStatementNode(continueKeyword, semicolon);\n}\n/**\n* Parse continue-keyword.\n*\n* @return continue-keyword node\n*/\nprivate STNode parseContinueKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CONTINUE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CONTINUE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse return statement.\n* return-stmt := return [ action-or-expr ] ;\n*\n* @return Return statement\n*/\nprivate STNode parseReturnStatement() {\nstartContext(ParserRuleContext.RETURN_STMT);\nSTNode returnKeyword = parseReturnKeyword();\nSTNode returnRhs = parseReturnStatementRhs(returnKeyword);\nendContext();\nreturn returnRhs;\n}\n/**\n* Parse return-keyword.\n*\n* @return Return-keyword node\n*/\nprivate STNode parseReturnKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RETURN_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RETURN_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse break statement.\n* break-stmt := break ; \n*\n* @return break statement\n*/\nprivate STNode parseBreakStatement() {\nstartContext(ParserRuleContext.BREAK_STATEMENT);\nSTNode breakKeyword = parseBreakKeyword();\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createBreakStatementNode(breakKeyword, semicolon);\n}\n/**\n* Parse break-keyword.\n*\n* @return break-keyword node\n*/\nprivate STNode parseBreakKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BREAK_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BREAK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse the right hand side of a return statement.\n*

\n* \n* return-stmt-rhs := ; | action-or-expr ;\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseReturnStatementRhs(STNode returnKeyword) {\nSTNode expr;\nSTToken token = peek();\nswitch (token.kind) {\ncase SEMICOLON_TOKEN:\nexpr = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nexpr = parseActionOrExpression();\nbreak;\n}\nSTNode semicolon = parseSemicolon();\nreturn STNodeFactory.createReturnStatementNode(returnKeyword, expr, semicolon);\n}\n/**\n* Parse mapping constructor expression.\n*

\n* mapping-constructor-expr := { [field (, field)*] }\n*\n* @return Parsed node\n*/\nprivate STNode parseMappingConstructorExpr() {\nstartContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nSTNode openBrace = parseOpenBrace();\nSTNode fields = parseMappingConstructorFields();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace);\n}\n/**\n* Parse mapping constructor fields.\n*\n* @return Parsed node\n*/\nprivate STNode parseMappingConstructorFields() {\nSTToken nextToken = peek();\nif (isEndOfMappingConstructor(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList fields = new ArrayList<>();\nSTNode field = parseMappingField(ParserRuleContext.FIRST_MAPPING_FIELD);\nfields.add(field);\nreturn parseMappingConstructorFields(fields);\n}\nprivate STNode parseMappingConstructorFields(List fields) {\nSTToken nextToken;\nSTNode mappingFieldEnd;\nnextToken = peek();\nwhile (!isEndOfMappingConstructor(nextToken.kind)) {\nmappingFieldEnd = parseMappingFieldEnd(nextToken.kind);\nif (mappingFieldEnd == null) {\nbreak;\n}\nfields.add(mappingFieldEnd);\nSTNode field = parseMappingField(ParserRuleContext.MAPPING_FIELD);\nfields.add(field);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(fields);\n}\nprivate STNode parseMappingFieldEnd() {\nreturn parseMappingFieldEnd(peek().kind);\n}\nprivate STNode parseMappingFieldEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.MAPPING_FIELD_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMappingFieldEnd(solution.tokenKind);\n}\n}\nprivate boolean isEndOfMappingConstructor(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase IDENTIFIER_TOKEN:\ncase READONLY_KEYWORD:\nreturn false;\ncase EOF_TOKEN:\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase PUBLIC_KEYWORD:\ncase PRIVATE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase RETURNS_KEYWORD:\ncase SERVICE_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase CONST_KEYWORD:\ncase FINAL_KEYWORD:\ncase RESOURCE_KEYWORD:\nreturn true;\ndefault:\nreturn isSimpleType(tokenKind);\n}\n}\n/**\n* Parse mapping constructor field.\n*

\n* field := specific-field | computed-name-field | spread-field\n*\n* @param fieldContext Context of the mapping field\n* @param leadingComma Leading comma\n* @return Parsed node\n*/\nprivate STNode parseMappingField(ParserRuleContext fieldContext) {\nSTToken nextToken = peek();\nreturn parseMappingField(nextToken.kind, fieldContext);\n}\nprivate STNode parseMappingField(SyntaxKind tokenKind, ParserRuleContext fieldContext) {\nswitch (tokenKind) {\ncase IDENTIFIER_TOKEN:\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nreturn parseSpecificFieldWithOptionalValue(readonlyKeyword);\ncase STRING_LITERAL:\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nreturn parseQualifiedSpecificField(readonlyKeyword);\ncase READONLY_KEYWORD:\nreadonlyKeyword = parseReadonlyKeyword();\nreturn parseSpecificField(readonlyKeyword);\ncase OPEN_BRACKET_TOKEN:\nreturn parseComputedField();\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\nSTNode expr = parseExpression();\nreturn STNodeFactory.createSpreadFieldNode(ellipsis, expr);\ncase CLOSE_BRACE_TOKEN:\nif (fieldContext == ParserRuleContext.FIRST_MAPPING_FIELD) {\nreturn null;\n}\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, fieldContext, fieldContext);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMappingField(solution.tokenKind, fieldContext);\n}\n}\nprivate STNode parseSpecificField(STNode readonlyKeyword) {\nSTToken nextToken = peek();\nreturn parseSpecificField(nextToken.kind, readonlyKeyword);\n}\nprivate STNode parseSpecificField(SyntaxKind nextTokenKind, STNode readonlyKeyword) {\nswitch (nextTokenKind) {\ncase STRING_LITERAL:\nreturn parseQualifiedSpecificField(readonlyKeyword);\ncase IDENTIFIER_TOKEN:\nreturn parseSpecificFieldWithOptionalValue(readonlyKeyword);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD, readonlyKeyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseSpecificField(solution.tokenKind, readonlyKeyword);\n}\n}\nprivate STNode parseQualifiedSpecificField(STNode readonlyKeyword) {\nSTNode key = parseStringLiteral();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\n}\n/**\n* Parse mapping constructor specific-field with an optional value.\n*\n* @return Parsed node\n*/\nprivate STNode parseSpecificFieldWithOptionalValue(STNode readonlyKeyword) {\nSTNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME);\nreturn parseSpecificFieldRhs(readonlyKeyword, key);\n}\nprivate STNode parseSpecificFieldRhs(STNode readonlyKeyword, STNode key) {\nSTToken nextToken = peek();\nreturn parseSpecificFieldRhs(nextToken.kind, readonlyKeyword, key);\n}\nprivate STNode parseSpecificFieldRhs(SyntaxKind tokenKind, STNode readonlyKeyword, STNode key) {\nSTNode colon;\nSTNode valueExpr;\nswitch (tokenKind) {\ncase COLON_TOKEN:\ncolon = parseColon();\nvalueExpr = parseExpression();\nbreak;\ncase COMMA_TOKEN:\ncolon = STNodeFactory.createEmptyNode();\nvalueExpr = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nif (isEndOfMappingConstructor(tokenKind)) {\ncolon = STNodeFactory.createEmptyNode();\nvalueExpr = STNodeFactory.createEmptyNode();\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, readonlyKeyword, key);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseSpecificFieldRhs(solution.tokenKind, readonlyKeyword, key);\n}\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\n}\n/**\n* Parse string literal.\n*\n* @return Parsed node\n*/\nprivate STNode parseStringLiteral() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.STRING_LITERAL) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.STRING_LITERAL);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse colon token.\n*\n* @return Parsed node\n*/\nprivate STNode parseColon() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COLON_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.COLON);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse readonly keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseReadonlyKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.READONLY_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.READONLY_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse computed-name-field of a mapping constructor expression.\n*

\n* computed-name-field := [ field-name-expr ] : value-expr\n*\n* @param leadingComma Leading comma\n* @return Parsed node\n*/\nprivate STNode parseComputedField() {\nstartContext(ParserRuleContext.COMPUTED_FIELD_NAME);\nSTNode openBracket = parseOpenBracket();\nSTNode fieldNameExpr = parseExpression();\nSTNode closeBracket = parseCloseBracket();\nendContext();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createComputedNameFieldNode(openBracket, fieldNameExpr, closeBracket, colon, valueExpr);\n}\n/**\n* Parse open bracket.\n*\n* @return Parsed node\n*/\nprivate STNode parseOpenBracket() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPEN_BRACKET_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OPEN_BRACKET);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse compound assignment statement, which takes the following format.\n*

\n* assignment-stmt := lvexpr CompoundAssignmentOperator action-or-expr ;\n*\n* @return Parsed node\n*/\nprivate STNode parseCompoundAssignmentStmt() {\nstartContext(ParserRuleContext.COMPOUND_ASSIGNMENT_STMT);\nSTNode varName = parseVariableName();\nSTNode compoundAssignmentStmt = parseCompoundAssignmentStmtRhs(varName);\nendContext();\nreturn compoundAssignmentStmt;\n}\n/**\n*

\n* Parse the RHS portion of the compound assignment.\n*

\n* compound-assignment-stmt-rhs := CompoundAssignmentOperator action-or-expr ;\n*\n* @param lvExpr LHS expression\n* @return Parsed node\n*/\nprivate STNode parseCompoundAssignmentStmtRhs(STNode lvExpr) {\nSTNode binaryOperator = parseCompoundBinaryOperator();\nSTNode equalsToken = parseAssignOp();\nSTNode expr = parseActionOrExpression();\nSTNode semicolon = parseSemicolon();\nendContext();\nboolean lvExprValid = isValidLVExpr(lvExpr);\nif (!lvExprValid) {\nSTNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nSTNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier);\nlvExpr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(simpleNameRef, lvExpr,\nDiagnosticErrorCode.ERROR_INVALID_EXPR_IN_COMPOUND_ASSIGNMENT_LHS);\n}\nreturn STNodeFactory.createCompoundAssignmentStatementNode(lvExpr, binaryOperator, equalsToken, expr,\nsemicolon);\n}\n/**\n* Parse compound binary operator.\n* BinaryOperator := + | - | * | / | & | | | ^ | << | >> | >>>\n*\n* @return Parsed node\n*/\nprivate STNode parseCompoundBinaryOperator() {\nSTToken token = peek();\nif (isCompoundBinaryOperator(token.kind)) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.COMPOUND_BINARY_OPERATOR);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse service declaration.\n*

\n* \n* service-decl := metadata service [variable-name] on expression-list service-body-block\n*
\n* expression-list := expression (, expression)*\n*
\n*\n* @param metadata Metadata\n* @return Parsed node\n*/\nprivate STNode parseServiceDecl(STNode metadata) {\nstartContext(ParserRuleContext.SERVICE_DECL);\nSTNode serviceKeyword = parseServiceKeyword();\nSTNode serviceDecl = parseServiceRhs(metadata, serviceKeyword);\nendContext();\nreturn serviceDecl;\n}\n/**\n* Parse rhs of the service declaration.\n*

\n* \n* service-rhs := [variable-name] on expression-list service-body-block\n* \n*\n* @param metadata Metadata\n* @param serviceKeyword Service keyword\n* @return Parsed node\n*/\nprivate STNode parseServiceRhs(STNode metadata, STNode serviceKeyword) {\nSTNode serviceName = parseServiceName();\nSTNode onKeyword = parseOnKeyword();\nSTNode expressionList = parseListeners();\nSTNode serviceBody = parseServiceBody();\nonKeyword =\ncloneWithDiagnosticIfListEmpty(expressionList, onKeyword, DiagnosticErrorCode.ERROR_MISSING_EXPRESSION);\nreturn STNodeFactory.createServiceDeclarationNode(metadata, serviceKeyword, serviceName, onKeyword,\nexpressionList, serviceBody);\n}\nprivate STNode parseServiceName() {\nSTToken nextToken = peek();\nreturn parseServiceName(nextToken.kind);\n}\nprivate STNode parseServiceName(SyntaxKind kind) {\nswitch (kind) {\ncase IDENTIFIER_TOKEN:\nreturn parseIdentifier(ParserRuleContext.SERVICE_NAME);\ncase ON_KEYWORD:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.OPTIONAL_SERVICE_NAME);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseServiceName(solution.tokenKind);\n}\n}\n/**\n* Parse service keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseServiceKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SERVICE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SERVICE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Check whether the given token kind is a compound binary operator.\n*

\n* compound-binary-operator := + | - | * | / | & | | | ^ | << | >> | >>>\n*\n* @param tokenKind STToken kind\n* @return true if the token kind refers to a binary operator. false otherwise\n*/\nprivate boolean isCompoundBinaryOperator(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase SLASH_TOKEN:\ncase ASTERISK_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase BITWISE_XOR_TOKEN:\ncase PIPE_TOKEN:\ncase DOUBLE_LT_TOKEN:\ncase DOUBLE_GT_TOKEN:\ncase TRIPPLE_GT_TOKEN:\nreturn getNextNextToken(tokenKind).kind == SyntaxKind.EQUAL_TOKEN;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse on keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseOnKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ON_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ON_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse listener references.\n*

\n* expression-list := expression (, expression)*\n*\n* @return Parsed node\n*/\nprivate STNode parseListeners() {\nstartContext(ParserRuleContext.LISTENERS_LIST);\nList listeners = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfExpressionsList(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode leadingComma = STNodeFactory.createEmptyNode();\nSTNode exprListItem = parseExpressionListItem(leadingComma);\nlisteners.add(exprListItem);\nnextToken = peek();\nwhile (!isEndOfExpressionsList(nextToken.kind)) {\nleadingComma = parseComma();\nexprListItem = parseExpressionListItem(leadingComma);\nlisteners.add(exprListItem);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(listeners);\n}\nprivate boolean isEndOfExpressionsList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\nreturn false;\ncase EOF_TOKEN:\ncase SEMICOLON_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase OPEN_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn !isValidExprStart(tokenKind);\n}\n}\n/**\n* Parse expression list item.\n*\n* @param leadingComma Leading comma\n* @return Parsed node\n*/\nprivate STNode parseExpressionListItem(STNode leadingComma) {\nSTNode expr = parseExpression();\nreturn STNodeFactory.createExpressionListItemNode(leadingComma, expr);\n}\n/**\n* Parse service body.\n*

\n* \n* service-body-block := { service-method-defn* }\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseServiceBody() {\nSTNode openBrace = parseOpenBrace();\nSTNode resources = parseResources();\nSTNode closeBrace = parseCloseBrace();\nreturn STNodeFactory.createServiceBodyNode(openBrace, resources, closeBrace);\n}\n/**\n* Parse service resource definitions.\n*\n* @return Parsed node\n*/\nprivate STNode parseResources() {\nList resources = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (!isEndOfServiceDecl(nextToken.kind)) {\nSTNode serviceMethod = parseResource();\nif (serviceMethod == null) {\nbreak;\n}\nresources.add(serviceMethod);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(resources);\n}\nprivate boolean isEndOfServiceDecl(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase EOF_TOKEN:\ncase CLOSE_BRACE_PIPE_TOKEN:\ncase TYPE_KEYWORD:\ncase SERVICE_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse resource definition (i.e. service-method-defn).\n*

\n* \n* service-body-block := { service-method-defn* }\n*
\n* service-method-defn := metadata [resource] function identifier function-signature method-defn-body\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseResource() {\nSTToken nextToken = peek();\nreturn parseResource(nextToken.kind);\n}\nprivate STNode parseResource(SyntaxKind nextTokenKind) {\nSTNode metadata;\nswitch (nextTokenKind) {\ncase RESOURCE_KEYWORD:\ncase FUNCTION_KEYWORD:\nmetadata = createEmptyMetadata();\nbreak;\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData(nextTokenKind);\nnextTokenKind = peek().kind;\nbreak;\ndefault:\nif (isEndOfServiceDecl(nextTokenKind)) {\nreturn null;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RESOURCE_DEF);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseResource(solution.tokenKind);\n}\nreturn parseResource(nextTokenKind, metadata);\n}\nprivate STNode parseResource(SyntaxKind nextTokenKind, STNode metadata) {\nswitch (nextTokenKind) {\ncase RESOURCE_KEYWORD:\nSTNode resourceKeyword = parseResourceKeyword();\nreturn parseFuncDefinition(metadata, false, resourceKeyword, null);\ncase FUNCTION_KEYWORD:\nreturn parseFuncDefinition(metadata, false, STNodeFactory.createEmptyNode(), null);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RESOURCE_DEF, metadata);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseResource(solution.tokenKind, metadata);\n}\n}\n/**\n* Parse resource keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseResourceKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RESOURCE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RESOURCE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Check whether next construct is a service declaration or not. This method is\n* used to determine whether an end-of-block is reached, if the next token is\n* a service-keyword. Because service-keyword can be used in statements as well\n* as in top-level node (service-decl). We have reached a service-decl, then\n* it could be due to missing close-brace at the end of the current block.\n*\n* @return true if the next construct is a service declaration.\n* false otherwise\n*/", + "context_after": "class BallerinaParser extends AbstractParser {\nprivate static final OperatorPrecedence DEFAULT_OP_PRECEDENCE = OperatorPrecedence.DEFAULT;\nprotected BallerinaParser(AbstractTokenReader tokenReader) {\nsuper(tokenReader, new BallerinaParserErrorHandler(tokenReader));\n}\n/**\n* Start parsing the given input.\n*\n* @return Parsed node\n*/\n@Override\npublic STNode parse() {\nreturn parseCompUnit();\n}\n/**\n* Start parsing the input from a given context. Supported starting points are:\n*

    \n*
  • Module part (a file)
  • \n*
  • Top level node
  • \n*
  • Statement
  • \n*
  • Expression
  • \n*
\n*\n* @param context Context to start parsing\n* @return Parsed node\n*/\npublic STNode parse(ParserRuleContext context) {\nswitch (context) {\ncase COMP_UNIT:\nreturn parseCompUnit();\ncase TOP_LEVEL_NODE:\nstartContext(ParserRuleContext.COMP_UNIT);\nreturn parseTopLevelNode();\ncase STATEMENT:\nstartContext(ParserRuleContext.COMP_UNIT);\nstartContext(ParserRuleContext.FUNC_BODY_BLOCK);\nreturn parseStatement();\ncase EXPRESSION:\nstartContext(ParserRuleContext.COMP_UNIT);\nstartContext(ParserRuleContext.FUNC_BODY_BLOCK);\nstartContext(ParserRuleContext.STATEMENT);\nreturn parseExpression();\ndefault:\nthrow new UnsupportedOperationException(\"Cannot start parsing from: \" + context);\n}\n}\n/**\n* Resume the parsing from the given context.\n*\n* @param context Context to resume parsing\n* @param args Arguments that requires to continue parsing from the given parser context\n* @return Parsed node\n*/\n@Override\npublic STNode resumeParsing(ParserRuleContext context, Object... args) {\nswitch (context) {\ncase FUNC_BODY:\nreturn parseFunctionBody((boolean) args[0]);\ncase OPEN_BRACE:\nreturn parseOpenBrace();\ncase CLOSE_BRACE:\nreturn parseCloseBrace();\ncase FUNC_NAME:\nreturn parseFunctionName();\ncase OPEN_PARENTHESIS:\ncase ARG_LIST_START:\nreturn parseOpenParenthesis(context);\ncase SIMPLE_TYPE_DESCRIPTOR:\nreturn parseSimpleTypeDescriptor();\ncase ASSIGN_OP:\nreturn parseAssignOp();\ncase EXTERNAL_KEYWORD:\nreturn parseExternalKeyword();\ncase SEMICOLON:\nreturn parseSemicolon();\ncase CLOSE_PARENTHESIS:\nreturn parseCloseParenthesis();\ncase VARIABLE_NAME:\nreturn parseVariableName();\ncase TERMINAL_EXPRESSION:\nreturn parseTerminalExpression((STNode) args[0], (boolean) args[1], (boolean) args[2],\n(boolean) args[3]);\ncase STATEMENT:\nreturn parseStatement();\ncase STATEMENT_WITHOUT_ANNOTS:\nreturn parseStatement((STNode) args[0]);\ncase EXPRESSION_RHS:\nreturn parseExpressionRhs((OperatorPrecedence) args[0], (STNode) args[1], (boolean) args[2],\n(boolean) args[3], (boolean) args[4], (boolean) args[5]);\ncase PARAMETER_START:\nreturn parseParameter((SyntaxKind) args[0], (STNode) args[1], (int) args[2], (boolean) args[3]);\ncase PARAMETER_WITHOUT_ANNOTS:\nreturn parseParamGivenAnnots((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (int) args[3],\n(boolean) args[4]);\ncase AFTER_PARAMETER_TYPE:\nreturn parseAfterParamType((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],\n(STNode) args[4], (boolean) args[5]);\ncase PARAMETER_NAME_RHS:\nreturn parseParameterRhs((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],\n(STNode) args[4], (STNode) args[5]);\ncase TOP_LEVEL_NODE:\nreturn parseTopLevelNode();\ncase TOP_LEVEL_NODE_WITHOUT_METADATA:\nreturn parseTopLevelNode((STNode) args[0]);\ncase TOP_LEVEL_NODE_WITHOUT_MODIFIER:\nreturn parseTopLevelNode((STNode) args[0], (STNode) args[1]);\ncase TYPE_NAME_OR_VAR_NAME:\ncase RECORD_FIELD_NAME_OR_TYPE_NAME:\ncase TYPE_REFERENCE:\ncase ANNOT_REFERENCE:\ncase FIELD_ACCESS_IDENTIFIER:\nreturn parseQualifiedIdentifier(context, (boolean) args[0]);\ncase VAR_DECL_STMT_RHS:\nreturn parseVarDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (boolean) args[3]);\ncase FIELD_DESCRIPTOR_RHS:\nreturn parseFieldDescriptorRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]);\ncase RECORD_BODY_START:\nreturn parseRecordBodyStartDelimiter();\ncase TYPE_DESCRIPTOR:\nreturn parseTypeDescriptorInternal((ParserRuleContext) args[0], (boolean) args[1]);\ncase OBJECT_MEMBER_START:\nreturn parseObjectMember();\ncase OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY:\nreturn parseObjectMethodOrField((STNode) args[0], (STNode) args[1]);\ncase OBJECT_FIELD_RHS:\nreturn parseObjectFieldRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],\n(STNode) args[4]);\ncase OBJECT_TYPE_QUALIFIER:\nreturn parseObjectTypeQualifiers();\ncase OBJECT_KEYWORD:\nreturn parseObjectKeyword();\ncase TYPE_NAME:\nreturn parseTypeName();\ncase IF_KEYWORD:\nreturn parseIfKeyword();\ncase ELSE_KEYWORD:\nreturn parseElseKeyword();\ncase ELSE_BODY:\nreturn parseElseBody();\ncase WHILE_KEYWORD:\nreturn parseWhileKeyword();\ncase PANIC_KEYWORD:\nreturn parsePanicKeyword();\ncase IMPORT_DECL_RHS:\nreturn parseImportDecl((STNode) args[0], (STNode) args[1]);\ncase IMPORT_PREFIX:\nreturn parseImportPrefix();\ncase IMPORT_MODULE_NAME:\ncase IMPORT_ORG_OR_MODULE_NAME:\ncase VARIABLE_REF:\ncase SERVICE_NAME:\ncase IMPLICIT_ANON_FUNC_PARAM:\ncase MAPPING_FIELD_NAME:\ncase RECEIVE_FIELD_NAME:\ncase MODULE_ENUM_NAME:\ncase ENUM_MEMBER_NAME:\nreturn parseIdentifier(context);\ncase IMPORT_KEYWORD:\nreturn parseImportKeyword();\ncase SLASH:\nreturn parseSlashToken();\ncase DOT:\nreturn parseDotToken();\ncase IMPORT_VERSION_DECL:\nreturn parseVersion();\ncase VERSION_KEYWORD:\nreturn parseVersionKeyword();\ncase VERSION_NUMBER:\nreturn parseVersionNumber();\ncase DECIMAL_INTEGER_LITERAL:\ncase MAJOR_VERSION:\ncase MINOR_VERSION:\ncase PATCH_VERSION:\nreturn parseDecimalIntLiteral(context);\ncase IMPORT_SUB_VERSION:\nreturn parseSubVersion(context);\ncase IMPORT_PREFIX_DECL:\nreturn parseImportPrefixDecl();\ncase AS_KEYWORD:\nreturn parseAsKeyword();\ncase CONTINUE_KEYWORD:\nreturn parseContinueKeyword();\ncase BREAK_KEYWORD:\nreturn parseBreakKeyword();\ncase RETURN_KEYWORD:\nreturn parseReturnKeyword();\ncase MAPPING_FIELD:\ncase FIRST_MAPPING_FIELD:\nreturn parseMappingField((ParserRuleContext) args[0]);\ncase SPECIFIC_FIELD_RHS:\nreturn parseSpecificFieldRhs((STNode) args[0], (STNode) args[1]);\ncase STRING_LITERAL:\nreturn parseStringLiteral();\ncase COLON:\nreturn parseColon();\ncase OPEN_BRACKET:\nreturn parseOpenBracket();\ncase RESOURCE_DEF:\nreturn parseResource();\ncase OPTIONAL_SERVICE_NAME:\nreturn parseServiceName();\ncase SERVICE_KEYWORD:\nreturn parseServiceKeyword();\ncase ON_KEYWORD:\nreturn parseOnKeyword();\ncase RESOURCE_KEYWORD:\nreturn parseResourceKeyword();\ncase LISTENER_KEYWORD:\nreturn parseListenerKeyword();\ncase NIL_TYPE_DESCRIPTOR:\nreturn parseNilTypeDescriptor();\ncase COMPOUND_ASSIGNMENT_STMT:\nreturn parseCompoundAssignmentStmt();\ncase TYPEOF_KEYWORD:\nreturn parseTypeofKeyword();\ncase ARRAY_LENGTH:\nreturn parseArrayLength();\ncase IS_KEYWORD:\nreturn parseIsKeyword();\ncase STMT_START_WITH_EXPR_RHS:\nreturn parseStatementStartWithExprRhs((STNode) args[0]);\ncase COMMA:\nreturn parseComma();\ncase CONST_DECL_TYPE:\nreturn parseConstDecl((STNode) args[0], (STNode) args[1], (STNode) args[2]);\ncase BINDING_PATTERN_OR_EXPR_RHS:\nreturn parseTypedBindingPatternOrExprRhs((STNode) args[0], (boolean) args[1]);\ncase LT:\nreturn parseLTToken();\ncase GT:\nreturn parseGTToken();\ncase RECORD_FIELD_OR_RECORD_END:\nreturn parseFieldOrRestDescriptor((boolean) args[0]);\ncase ANNOTATION_KEYWORD:\nreturn parseAnnotationKeyword();\ncase ANNOT_DECL_OPTIONAL_TYPE:\nreturn parseAnnotationDeclFromType((STNode) args[0], (STNode) args[1], (STNode) args[2],\n(STNode) args[3]);\ncase ANNOT_DECL_RHS:\nreturn parseAnnotationDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],\n(STNode) args[4]);\ncase ANNOT_OPTIONAL_ATTACH_POINTS:\nreturn parseAnnotationDeclAttachPoints((STNode) args[0], (STNode) args[1], (STNode) args[2],\n(STNode) args[3], (STNode) args[4], (STNode) args[5]);\ncase SOURCE_KEYWORD:\nreturn parseSourceKeyword();\ncase ATTACH_POINT_IDENT:\nreturn parseAttachPointIdent((STNode) args[0]);\ncase IDENT_AFTER_OBJECT_IDENT:\nreturn parseIdentAfterObjectIdent();\ncase FUNCTION_IDENT:\nreturn parseFunctionIdent();\ncase FIELD_IDENT:\nreturn parseFieldIdent();\ncase ATTACH_POINT_END:\nreturn parseAttachPointEnd();\ncase XMLNS_KEYWORD:\nreturn parseXMLNSKeyword();\ncase XML_NAMESPACE_PREFIX_DECL:\nreturn parseXMLDeclRhs((STNode) args[0], (STNode) args[1], (boolean) args[2]);\ncase NAMESPACE_PREFIX:\nreturn parseNamespacePrefix();\ncase WORKER_KEYWORD:\nreturn parseWorkerKeyword();\ncase WORKER_NAME:\nreturn parseWorkerName();\ncase FORK_KEYWORD:\nreturn parseForkKeyword();\ncase TRAP_KEYWORD:\nreturn parseTrapKeyword();\ncase IN_KEYWORD:\nreturn parseInKeyword();\ncase FOREACH_KEYWORD:\nreturn parseForEachKeyword();\ncase TABLE_KEYWORD:\nreturn parseTableKeyword();\ncase KEY_KEYWORD:\nreturn parseKeyKeyword();\ncase TABLE_KEYWORD_RHS:\nreturn parseTableConstructorOrQuery((STNode) args[0], (boolean) args[1]);\ncase ERROR_KEYWORD:\nreturn parseErrorKeyword();\ncase LET_KEYWORD:\nreturn parseLetKeyword();\ncase STREAM_KEYWORD:\nreturn parseStreamKeyword();\ncase STREAM_TYPE_FIRST_PARAM_RHS:\nreturn parseStreamTypeParamsNode((STNode) args[0], (STNode) args[1]);\ncase TEMPLATE_START:\ncase TEMPLATE_END:\nreturn parseBacktickToken(context);\ncase KEY_CONSTRAINTS_RHS:\nreturn parseKeyConstraint((STNode) args[0]);\ncase FUNCTION_KEYWORD_RHS:\nreturn parseFunctionKeywordRhs((STNode) args[0], (STNode) args[1], (boolean) args[2], (boolean) args[3],\n(STNode[]) args[4]);\ncase RETURNS_KEYWORD:\nreturn parseReturnsKeyword();\ncase NEW_KEYWORD:\nreturn parseNewKeyword();\ncase FROM_KEYWORD:\nreturn parseFromKeyword();\ncase WHERE_KEYWORD:\nreturn parseWhereKeyword();\ncase SELECT_KEYWORD:\nreturn parseSelectKeyword();\ncase ORDER_KEYWORD:\nreturn parseOrderKeyword();\ncase BY_KEYWORD:\nreturn parseByKeyword();\ncase ASCENDING_KEYWORD:\nreturn parseAscendingKeyword();\ncase DESCENDING_KEYWORD:\nreturn parseDescendingKeyword();\ncase ORDER_KEY_LIST_END:\nreturn parseOrderKeyListMemberEnd();\ncase TABLE_CONSTRUCTOR_OR_QUERY_START:\nreturn parseTableConstructorOrQuery((boolean) args[0]);\ncase TABLE_CONSTRUCTOR_OR_QUERY_RHS:\nreturn parseTableConstructorOrQueryRhs((STNode) args[0], (STNode) args[1], (boolean) args[2]);\ncase QUERY_PIPELINE_RHS:\nreturn parseIntermediateClause((boolean) args[0]);\ncase ANON_FUNC_BODY:\nreturn parseAnonFuncBody((boolean) args[0]);\ncase CLOSE_BRACKET:\nreturn parseCloseBracket();\ncase ARG_START_OR_ARG_LIST_END:\nreturn parseArgument();\ncase ARG_END:\nreturn parseArgEnd();\ncase MAPPING_FIELD_END:\nreturn parseMappingFieldEnd();\ncase FUNCTION_KEYWORD:\nreturn parseFunctionKeyword();\ncase FIELD_OR_REST_DESCIPTOR_RHS:\nreturn parseFieldOrRestDescriptorRhs((STNode) args[0], (STNode) args[1]);\ncase TYPE_DESC_IN_TUPLE_RHS:\nreturn parseTupleMemberRhs();\ncase LIST_BINDING_PATTERN_MEMBER_END:\nreturn parseListBindingPatternMemberRhs();\ncase MAPPING_BINDING_PATTERN_END:\nreturn parseMappingBindingPatternEnd();\ncase FIELD_BINDING_PATTERN_NAME:\nreturn parseFieldBindingPattern();\ncase CONSTANT_EXPRESSION_START:\nreturn parseSimpleConstExprInternal();\ncase LIST_CONSTRUCTOR_MEMBER_END:\nreturn parseListConstructorMemberEnd();\ncase NIL_OR_PARENTHESISED_TYPE_DESC_RHS:\nreturn parseNilOrParenthesisedTypeDescRhs((STNode) args[0]);\ncase ANON_FUNC_PARAM_RHS:\nreturn parseImplicitAnonFuncParamEnd();\ncase LIST_BINDING_PATTERN:\nreturn parseListBindingPattern();\ncase BINDING_PATTERN:\nreturn parseBindingPattern();\ncase PEER_WORKER_NAME:\nreturn parsePeerWorkerName();\ncase SYNC_SEND_TOKEN:\nreturn parseSyncSendToken();\ncase LEFT_ARROW_TOKEN:\nreturn parseLeftArrowToken();\ncase RECEIVE_WORKERS:\nreturn parseReceiveWorkers();\ncase WAIT_KEYWORD:\nreturn parseWaitKeyword();\ncase WAIT_FUTURE_EXPR_END:\nreturn parseWaitFutureExprEnd((int) args[0]);\ncase WAIT_FIELD_NAME:\nreturn parseWaitField();\ncase WAIT_FIELD_END:\nreturn parseWaitFieldEnd();\ncase ANNOT_CHAINING_TOKEN:\nreturn parseAnnotChainingToken();\ncase DO_KEYWORD:\nreturn parseDoKeyword();\ncase MEMBER_ACCESS_KEY_EXPR_END:\nreturn parseMemberAccessKeyExprEnd();\ncase OPTIONAL_CHAINING_TOKEN:\nreturn parseOptionalChainingToken();\ncase RETRY_KEYWORD_RHS:\nreturn parseRetryKeywordRhs((STNode) args[0]);\ncase RETRY_TYPE_PARAM_RHS:\nreturn parseRetryTypeParamRhs((STNode) args[0], (STNode) args[1]);\ncase TRANSACTION_KEYWORD:\nreturn parseTransactionKeyword();\ncase COMMIT_KEYWORD:\nreturn parseCommitKeyword();\ncase RETRY_KEYWORD:\nreturn parseRetryKeyword();\ncase ROLLBACK_KEYWORD:\nreturn parseRollbackKeyword();\ncase RETRY_BODY:\nreturn parseRetryBody();\ncase ENUM_MEMBER_END:\nreturn parseEnumMemberEnd();\ncase BRACKETED_LIST_MEMBER_END:\nreturn parseBracketedListMemberEnd();\ncase STMT_START_BRACKETED_LIST_MEMBER:\nreturn parseStatementStartBracketedListMember();\ncase TYPED_BINDING_PATTERN_TYPE_RHS:\nreturn parseTypedBindingPatternTypeRhs((STNode) args[0], (ParserRuleContext) args[1],\n(boolean) args[2]);\ncase BRACKETED_LIST_RHS:\nreturn parseTypedBindingPatternOrMemberAccessRhs((STNode) args[0], (STNode) args[1], (STNode) args[2],\n(STNode) args[3], (boolean) args[4], (boolean) args[5], (ParserRuleContext) args[6]);\ncase UNION_OR_INTERSECTION_TOKEN:\nreturn parseUnionOrIntersectionToken();\ncase BRACKETED_LIST_MEMBER:\ncase LIST_BINDING_MEMBER_OR_ARRAY_LENGTH:\nreturn parseBracketedListMember((boolean) args[0]);\ncase BASE16_KEYWORD:\nreturn parseBase16Keyword();\ncase BASE64_KEYWORD:\nreturn parseBase64Keyword();\ncase DOT_LT_TOKEN:\nreturn parseDotLTToken();\ncase SLASH_LT_TOKEN:\nreturn parseSlashLTToken();\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\nreturn parseDoubleSlashDoubleAsteriskLTToken();\ncase XML_ATOMIC_NAME_PATTERN_START:\nreturn parseXMLAtomicNamePatternBody();\ncase BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS:\nreturn parseBracedExprOrAnonFuncParamRhs((STNode) args[0], (STNode) args[1], (boolean) args[2]);\ncase READONLY_KEYWORD:\nreturn parseReadonlyKeyword();\ncase SPECIFIC_FIELD:\nreturn parseSpecificField((STNode) args[0]);\ncase OPTIONAL_MATCH_GUARD:\nreturn parseMatchGuard();\ncase MATCH_PATTERN_START:\nreturn parseMatchPattern();\ncase MATCH_PATTERN_RHS:\nreturn parseMatchPatternEnd();\ncase ENUM_MEMBER_RHS:\nreturn parseEnumMemberRhs((STNode) args[0], (STNode) args[1]);\ncase RECEIVE_FIELD:\nreturn parseReceiveField();\ncase PUBLIC_KEYWORD:\nreturn parseQualifier();\ncase PARAM_END:\nreturn parseParameterRhs();\ncase ELLIPSIS:\nreturn parseEllipsis();\ncase BINARY_OPERATOR:\nreturn parseBinaryOperator();\ncase TYPE_KEYWORD:\nreturn parseTypeKeyword();\ncase CLOSED_RECORD_BODY_START:\nreturn parseClosedRecordBodyStart();\ncase CLOSED_RECORD_BODY_END:\nreturn parseClosedRecordBodyEnd();\ncase QUESTION_MARK:\nreturn parseQuestionMark();\ncase FINAL_KEYWORD:\nreturn parseFinalKeyword();\ncase CLIENT_KEYWORD:\nreturn parseClientKeyword();\ncase ABSTRACT_KEYWORD:\nreturn parseAbstractKeyword();\ncase REMOTE_KEYWORD:\nreturn parseRemoteKeyword();\ncase FAIL_KEYWORD:\nreturn parseFailKeyword();\ncase CHECKING_KEYWORD:\nreturn parseCheckingKeyword();\ncase COMPOUND_BINARY_OPERATOR:\nreturn parseCompoundBinaryOperator();\ncase CONST_DECL_RHS:\nreturn parseConstantOrListenerDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2],\n(STNode) args[3], (boolean) args[4]);\ncase CONST_KEYWORD:\nreturn parseConstantKeyword();\ncase UNARY_OPERATOR:\nreturn parseUnaryOperator();\ncase AT:\nreturn parseAtToken();\ncase REMOTE_CALL_OR_ASYNC_SEND_RHS:\nreturn parseRemoteCallOrAsyncSendActionRhs((STNode) args[0], (boolean) args[1], (STNode) args[2]);\ncase DEFAULT_KEYWORD:\nreturn parseDefaultKeyword();\ncase RIGHT_ARROW:\nreturn parseRightArrow();\ncase PARAMETERIZED_TYPE:\nreturn parseParameterizedTypeKeyword();\ncase ANNOTATION_TAG:\nreturn parseAnnotationTag();\ncase ATTACH_POINT:\nreturn parseAnnotationAttachPoint();\ncase LOCK_KEYWORD:\nreturn parseLockKeyword();\ncase PIPE:\nreturn parsePipeToken();\ncase STRING_KEYWORD:\nreturn parseStringKeyword();\ncase XML_KEYWORD:\nreturn parseXMLKeyword();\ncase INTERPOLATION_START_TOKEN:\nreturn parseInterpolationStart();\ncase EXPR_FUNC_BODY_START:\nreturn parseDoubleRightArrow();\ncase START_KEYWORD:\nreturn parseStartKeyword();\ncase FLUSH_KEYWORD:\nreturn parseFlushKeyword();\ncase ENUM_KEYWORD:\nreturn parseEnumKeyword();\ncase MATCH_KEYWORD:\nreturn parseMatchKeyword();\ncase RECORD_KEYWORD:\nreturn parseRecordKeyword();\ncase LIST_MATCH_PATTERN_MEMBER_RHS:\nreturn parseListMatchPatternMemberRhs();\ncase LIST_BINDING_PATTERN_MEMBER:\nreturn parseListBindingPatternMember();\ncase FIELD_MATCH_PATTERN_MEMBER:\nreturn parseFieldMatchPatternMember();\ncase FIELD_MATCH_PATTERN_MEMBER_RHS:\nreturn parseFieldMatchPatternRhs();\ncase FUNC_MATCH_PATTERN_OR_CONST_PATTERN:\nreturn parseFunctionalMatchPatternOrConsPattern((STNode) args[0]);\ncase ARG_MATCH_PATTERN:\nreturn parseArgMatchPattern();\ncase ARG_MATCH_PATTERN_RHS:\nreturn parseArgMatchPatternRhs();\ncase ARG_BINDING_PATTERN:\nreturn parseArgBindingPattern();\ncase EXTERNAL_FUNC_BODY_OPTIONAL_ANNOTS:\nreturn parseExternalFuncBodyRhs((STNode) args[0]);\ncase ARG_BINDING_PATTERN_END:\nreturn parseArgsBindingPatternEnd();\ncase TABLE_ROW_END:\nreturn parseTableRowEnd();\ndefault:\nthrow new IllegalStateException(\"cannot resume parsing the rule: \" + context);\n}\n}\n/*\n* Private methods.\n*/\n/**\n* Parse a given input and returns the AST. Starts parsing from the top of a compilation unit.\n*\n* @return Parsed node\n*/\nprivate STNode parseCompUnit() {\nstartContext(ParserRuleContext.COMP_UNIT);\nSTToken token = peek();\nList otherDecls = new ArrayList<>();\nList importDecls = new ArrayList<>();\nboolean processImports = true;\nwhile (token.kind != SyntaxKind.EOF_TOKEN) {\nSTNode decl = parseTopLevelNode(token.kind);\nif (decl == null) {\nbreak;\n}\nif (decl.kind == SyntaxKind.IMPORT_DECLARATION) {\nif (processImports) {\nimportDecls.add(decl);\n} else {\nupdateLastNodeInListWithInvalidNode(otherDecls, decl,\nDiagnosticErrorCode.ERROR_IMPORT_DECLARATION_AFTER_OTHER_DECLARATIONS);\n}\n} else {\nif (processImports) {\nprocessImports = false;\n}\notherDecls.add(decl);\n}\ntoken = peek();\n}\nSTToken eof = consume();\nendContext();\nreturn STNodeFactory.createModulePartNode(STNodeFactory.createNodeList(importDecls),\nSTNodeFactory.createNodeList(otherDecls), eof);\n}\n/**\n* Parse top level node having an optional modifier preceding it.\n*\n* @return Parsed node\n*/\nprivate STNode parseTopLevelNode() {\nSTToken token = peek();\nreturn parseTopLevelNode(token.kind);\n}\nprotected STNode parseTopLevelNode(SyntaxKind tokenKind) {\nSTNode metadata;\nswitch (tokenKind) {\ncase EOF_TOKEN:\nreturn null;\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData(tokenKind);\nreturn parseTopLevelNode(metadata);\ncase IMPORT_KEYWORD:\ncase FINAL_KEYWORD:\ncase PUBLIC_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase CONST_KEYWORD:\ncase ANNOTATION_KEYWORD:\ncase XMLNS_KEYWORD:\ncase SERVICE_KEYWORD:\ncase ENUM_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nmetadata = createEmptyMetadata();\nbreak;\ncase IDENTIFIER_TOKEN:\nif (isModuleVarDeclStart(1)) {\nreturn parseModuleVarDecl(createEmptyMetadata(), null);\n}\ndefault:\nif (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\nmetadata = createEmptyMetadata();\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE);\nif (solution.action == Action.KEEP) {\nmetadata = STNodeFactory.createEmptyNodeList();\nbreak;\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTopLevelNode(solution.tokenKind);\n}\nreturn parseTopLevelNode(tokenKind, metadata);\n}\n/**\n* Parse top level node having an optional modifier preceding it, given the next token kind.\n*\n* @param metadata Next token kind\n* @return Parsed node\n*/\nprivate STNode parseTopLevelNode(STNode metadata) {\nSTToken nextToken = peek();\nreturn parseTopLevelNode(nextToken.kind, metadata);\n}\nprivate STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata) {\nSTNode qualifier = null;\nswitch (tokenKind) {\ncase EOF_TOKEN:\nif (metadata != null) {\naddInvalidNodeToNextToken(metadata, DiagnosticErrorCode.ERROR_INVALID_METADATA);\n}\nreturn null;\ncase PUBLIC_KEYWORD:\nqualifier = parseQualifier();\ntokenKind = peek().kind;\nbreak;\ncase FUNCTION_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase CONST_KEYWORD:\ncase FINAL_KEYWORD:\ncase IMPORT_KEYWORD:\ncase ANNOTATION_KEYWORD:\ncase XMLNS_KEYWORD:\ncase ENUM_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nbreak;\ncase IDENTIFIER_TOKEN:\nif (isModuleVarDeclStart(1)) {\nreturn parseModuleVarDecl(metadata, null);\n}\ndefault:\nif (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA, metadata);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.action == Action.KEEP) {\nqualifier = STNodeFactory.createEmptyNode();\nbreak;\n}\nreturn parseTopLevelNode(solution.tokenKind, metadata);\n}\nreturn parseTopLevelNode(tokenKind, metadata, qualifier);\n}\n/**\n* Check whether the cursor is at the start of a module level var-decl.\n*\n* @param lookahead Offset of the token to to check\n* @return true if the cursor is at the start of a module level var-decl.\n* false otherwise.\n*/\nprivate boolean isModuleVarDeclStart(int lookahead) {\nSTToken nextToken = peek(lookahead + 1);\nswitch (nextToken.kind) {\ncase EQUAL_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase QUESTION_MARK_TOKEN:\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn true;\ncase IDENTIFIER_TOKEN:\nswitch (peek(lookahead + 2).kind) {\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\ncase COLON_TOKEN:\nif (lookahead > 1) {\nreturn false;\n}\nif (peek(lookahead + 2).kind != SyntaxKind.IDENTIFIER_TOKEN) {\nreturn false;\n}\nreturn isModuleVarDeclStart(lookahead + 2);\ndefault:\nreturn false;\n}\n}\n/**\n* Parse import declaration.\n*

\n* import-decl := import [org-name /] module-name [version sem-ver] [as import-prefix] ;\n*\n* @return Parsed node\n*/\nprivate STNode parseImportDecl() {\nstartContext(ParserRuleContext.IMPORT_DECL);\nthis.tokenReader.startMode(ParserMode.IMPORT);\nSTNode importKeyword = parseImportKeyword();\nSTNode identifier = parseIdentifier(ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME);\nSTToken token = peek();\nSTNode importDecl = parseImportDecl(token.kind, importKeyword, identifier);\nthis.tokenReader.endMode();\nendContext();\nreturn importDecl;\n}\n/**\n* Parse import keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseImportKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IMPORT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.IMPORT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse identifier.\n*\n* @return Parsed node\n*/\nprivate STNode parseIdentifier(ParserRuleContext currentCtx) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else if (token.kind == SyntaxKind.MAP_KEYWORD) {\nSTToken mapKeyword = consume();\nreturn STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(),\nmapKeyword.trailingMinutiae(), mapKeyword.diagnostics());\n} else {\nSolution sol = recover(token, currentCtx);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse RHS of the import declaration. This includes the components after the\n* starting identifier (org-name/module-name) of the import decl.\n*\n* @param importKeyword Import keyword\n* @param identifier Org-name or the module name\n* @return Parsed node\n*/\nprivate STNode parseImportDecl(STNode importKeyword, STNode identifier) {\nSTToken nextToken = peek();\nreturn parseImportDecl(nextToken.kind, importKeyword, identifier);\n}\nprivate STNode parseImportDecl(SyntaxKind tokenKind, STNode importKeyword, STNode identifier) {\nSTNode orgName;\nSTNode moduleName;\nSTNode version;\nSTNode alias;\nswitch (tokenKind) {\ncase SLASH_TOKEN:\nSTNode slash = parseSlashToken();\norgName = STNodeFactory.createImportOrgNameNode(identifier, slash);\nmoduleName = parseModuleName();\nversion = parseVersion();\nalias = parseImportPrefixDecl();\nbreak;\ncase DOT_TOKEN:\ncase VERSION_KEYWORD:\norgName = STNodeFactory.createEmptyNode();\nmoduleName = parseModuleName(tokenKind, identifier);\nversion = parseVersion();\nalias = parseImportPrefixDecl();\nbreak;\ncase AS_KEYWORD:\norgName = STNodeFactory.createEmptyNode();\nmoduleName = parseModuleName(tokenKind, identifier);\nversion = STNodeFactory.createEmptyNode();\nalias = parseImportPrefixDecl();\nbreak;\ncase SEMICOLON_TOKEN:\norgName = STNodeFactory.createEmptyNode();\nmoduleName = parseModuleName(tokenKind, identifier);\nversion = STNodeFactory.createEmptyNode();\nalias = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.IMPORT_DECL_RHS, importKeyword, identifier);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseImportDecl(solution.tokenKind, importKeyword, identifier);\n}\nSTNode semicolon = parseSemicolon();\nreturn STNodeFactory.createImportDeclarationNode(importKeyword, orgName, moduleName, version, alias, semicolon);\n}\n/**\n* parse slash token.\n*\n* @return Parsed node\n*/\nprivate STNode parseSlashToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SLASH_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SLASH);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse dot token.\n*\n* @return Parsed node\n*/\nprivate STNode parseDotToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DOT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.DOT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse module name of a import declaration.\n*\n* @return Parsed node\n*/\nprivate STNode parseModuleName() {\nSTNode moduleNameStart = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME);\nreturn parseModuleName(peek().kind, moduleNameStart);\n}\n/**\n* Parse import module name of a import declaration, given the module name start identifier.\n*\n* @param moduleNameStart Starting identifier of the module name\n* @return Parsed node\n*/\nprivate STNode parseModuleName(SyntaxKind nextTokenKind, STNode moduleNameStart) {\nList moduleNameParts = new ArrayList<>();\nmoduleNameParts.add(moduleNameStart);\nwhile (!isEndOfImportModuleName(nextTokenKind)) {\nmoduleNameParts.add(parseDotToken());\nmoduleNameParts.add(parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME));\nnextTokenKind = peek().kind;\n}\nreturn STNodeFactory.createNodeList(moduleNameParts);\n}\nprivate boolean isEndOfImportModuleName(SyntaxKind nextTokenKind) {\nreturn nextTokenKind != SyntaxKind.DOT_TOKEN && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN;\n}\nprivate boolean isEndOfImportDecl(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase SEMICOLON_TOKEN:\ncase PUBLIC_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase TYPE_KEYWORD:\ncase ABSTRACT_KEYWORD:\ncase CONST_KEYWORD:\ncase EOF_TOKEN:\ncase SERVICE_KEYWORD:\ncase IMPORT_KEYWORD:\ncase FINAL_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse version component of a import declaration.\n*

\n* version-decl := version sem-ver\n*\n* @return Parsed node\n*/\nprivate STNode parseVersion() {\nSTToken nextToken = peek();\nreturn parseVersion(nextToken.kind);\n}\nprivate STNode parseVersion(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase VERSION_KEYWORD:\nSTNode versionKeyword = parseVersionKeyword();\nSTNode versionNumber = parseVersionNumber();\nreturn STNodeFactory.createImportVersionNode(versionKeyword, versionNumber);\ncase AS_KEYWORD:\ncase SEMICOLON_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nif (isEndOfImportDecl(nextTokenKind)) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.IMPORT_VERSION_DECL);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseVersion(solution.tokenKind);\n}\n}\n/**\n* Parse version keywrod.\n*\n* @return Parsed node\n*/\nprivate STNode parseVersionKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.VERSION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.VERSION_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse version number.\n*

\n* sem-ver := major-num [. minor-num [. patch-num]]\n*
\n* major-num := DecimalNumber\n*
\n* minor-num := DecimalNumber\n*
\n* patch-num := DecimalNumber\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseVersionNumber() {\nSTToken nextToken = peek();\nreturn parseVersionNumber(nextToken.kind);\n}\nprivate STNode parseVersionNumber(SyntaxKind nextTokenKind) {\nSTNode majorVersion;\nswitch (nextTokenKind) {\ncase DECIMAL_INTEGER_LITERAL:\nmajorVersion = parseMajorVersion();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.VERSION_NUMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseVersionNumber(solution.tokenKind);\n}\nList versionParts = new ArrayList<>();\nversionParts.add(majorVersion);\nSTNode minorVersion = parseMinorVersion();\nif (minorVersion != null) {\nversionParts.add(minorVersion);\nSTNode patchVersion = parsePatchVersion();\nif (patchVersion != null) {\nversionParts.add(patchVersion);\n}\n}\nreturn STNodeFactory.createNodeList(versionParts);\n}\nprivate STNode parseMajorVersion() {\nreturn parseDecimalIntLiteral(ParserRuleContext.MAJOR_VERSION);\n}\nprivate STNode parseMinorVersion() {\nreturn parseSubVersion(ParserRuleContext.MINOR_VERSION);\n}\nprivate STNode parsePatchVersion() {\nreturn parseSubVersion(ParserRuleContext.PATCH_VERSION);\n}\n/**\n* Parse decimal literal.\n*\n* @param context Context in which the decimal literal is used.\n* @return Parsed node\n*/\nprivate STNode parseDecimalIntLiteral(ParserRuleContext context) {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), context);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse sub version. i.e: minor-version/patch-version.\n*\n* @param context Context indicating what kind of sub-version is being parsed.\n* @return Parsed node\n*/\nprivate STNode parseSubVersion(ParserRuleContext context) {\nSTToken nextToken = peek();\nreturn parseSubVersion(nextToken.kind, context);\n}\nprivate STNode parseSubVersion(SyntaxKind nextTokenKind, ParserRuleContext context) {\nswitch (nextTokenKind) {\ncase AS_KEYWORD:\ncase SEMICOLON_TOKEN:\nreturn null;\ncase DOT_TOKEN:\nSTNode leadingDot = parseDotToken();\nSTNode versionNumber = parseDecimalIntLiteral(context);\nreturn STNodeFactory.createImportSubVersionNode(leadingDot, versionNumber);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.IMPORT_SUB_VERSION);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseSubVersion(solution.tokenKind, context);\n}\n}\n/**\n* Parse import prefix declaration.\n*

\n* import-prefix-decl := as import-prefix\n*
\n* import-prefix := a identifier | _\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseImportPrefixDecl() {\nSTToken token = peek();\nreturn parseImportPrefixDecl(token.kind);\n}\nprivate STNode parseImportPrefixDecl(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase AS_KEYWORD:\nSTNode asKeyword = parseAsKeyword();\nSTNode prefix = parseImportPrefix();\nreturn STNodeFactory.createImportPrefixNode(asKeyword, prefix);\ncase SEMICOLON_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nif (isEndOfImportDecl(nextTokenKind)) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.IMPORT_PREFIX_DECL);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseImportPrefixDecl(solution.tokenKind);\n}\n}\n/**\n* Parse as keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseAsKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.AS_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.AS_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse import prefix.\n*\n* @return Parsed node\n*/\nprivate STNode parseImportPrefix() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.IMPORT_PREFIX);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse top level node, given the modifier that precedes it.\n*\n* @param qualifier Qualifier that precedes the top level node\n* @return Parsed node\n*/\nprivate STNode parseTopLevelNode(STNode metadata, STNode qualifier) {\nSTToken token = peek();\nreturn parseTopLevelNode(token.kind, metadata, qualifier);\n}\n/**\n* Parse top level node given the next token kind and the modifier that precedes it.\n*\n* @param tokenKind Next token kind\n* @param qualifier Qualifier that precedes the top level node\n* @return Parsed top-level node\n*/\nprivate STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata, STNode qualifier) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\nreportInvalidQualifier(qualifier);\nreturn null;\ncase FUNCTION_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nreturn parseFuncDefOrFuncTypeDesc(metadata, false, getQualifier(qualifier), null);\ncase TYPE_KEYWORD:\nreturn parseModuleTypeDefinition(metadata, getQualifier(qualifier));\ncase LISTENER_KEYWORD:\nreturn parseListenerDeclaration(metadata, getQualifier(qualifier));\ncase CONST_KEYWORD:\nreturn parseConstantDeclaration(metadata, getQualifier(qualifier));\ncase ANNOTATION_KEYWORD:\nSTNode constKeyword = STNodeFactory.createEmptyNode();\nreturn parseAnnotationDeclaration(metadata, getQualifier(qualifier), constKeyword);\ncase IMPORT_KEYWORD:\nreportInvalidQualifier(qualifier);\nreturn parseImportDecl();\ncase XMLNS_KEYWORD:\nreportInvalidQualifier(qualifier);\nreturn parseXMLNamespaceDeclaration(true);\ncase FINAL_KEYWORD:\nreportInvalidQualifier(qualifier);\nSTNode finalKeyword = parseFinalKeyword();\nreturn parseVariableDecl(metadata, finalKeyword, true);\ncase SERVICE_KEYWORD:\nif (isServiceDeclStart(ParserRuleContext.TOP_LEVEL_NODE, 1)) {\nreportInvalidQualifier(qualifier);\nreturn parseServiceDecl(metadata);\n}\nreturn parseModuleVarDecl(metadata, qualifier);\ncase ENUM_KEYWORD:\nreturn parseEnumDeclaration(metadata, getQualifier(qualifier));\ncase IDENTIFIER_TOKEN:\nif (isModuleVarDeclStart(1)) {\nreturn parseModuleVarDecl(metadata, qualifier);\n}\ndefault:\nif (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\nreturn parseModuleVarDecl(metadata, qualifier);\n}\nSTToken token = peek();\nSolution solution =\nrecover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER, metadata, qualifier);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.action == Action.KEEP) {\nreturn parseModuleVarDecl(metadata, qualifier);\n}\nreturn parseTopLevelNode(solution.tokenKind, metadata, qualifier);\n}\n}\nprivate STNode parseModuleVarDecl(STNode metadata, STNode qualifier) {\nreportInvalidQualifier(qualifier);\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVariableDecl(metadata, finalKeyword, true);\n}\nprivate STNode getQualifier(STNode qualifier) {\nreturn qualifier == null ? STNodeFactory.createEmptyNode() : qualifier;\n}\nprivate void reportInvalidQualifier(STNode qualifier) {\nif (qualifier != null && qualifier.kind != SyntaxKind.NONE) {\naddInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_INVALID_QUALIFIER,\nqualifier.toString().trim());\n}\n}\n/**\n* Parse access modifiers.\n*\n* @return Parsed node\n*/\nprivate STNode parseQualifier() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PUBLIC_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseFuncDefinition(STNode metadata, boolean isObjectMethod, STNode... qualifiers) {\nparseTransactionalQUalifier(qualifiers);\nstartContext(ParserRuleContext.FUNC_DEF);\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode funcDef = parseFunctionKeywordRhs(metadata, functionKeyword, true, isObjectMethod, qualifiers);\nreturn funcDef;\n}\n/**\n* Parse function definition for the function type descriptor.\n*

\n* \n* function-defn := FUNCTION identifier function-signature function-body\n*
\n* function-type-descriptor := function function-signature\n*
\n*\n* @param metadata Metadata\n* @param visibilityQualifier Visibility qualifier\n* @return Parsed node\n*/\nprivate STNode parseFuncDefOrFuncTypeDesc(STNode metadata, boolean isObjectMethod, STNode... qualifiers) {\nparseTransactionalQUalifier(qualifiers);\nstartContext(ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE);\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode funcDefOrType = parseFunctionKeywordRhs(metadata, functionKeyword, false, isObjectMethod, qualifiers);\nreturn funcDefOrType;\n}\nprivate void parseTransactionalQUalifier(STNode... qualifiers) {\nif (peek().kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {\nqualifiers[qualifiers.length - 1] = consume();\n} else {\nqualifiers[qualifiers.length - 1] = STNodeFactory.createEmptyNode();\n}\n}\nprivate STNode parseFunctionKeywordRhs(STNode metadata, STNode functionKeyword, boolean isFuncDef,\nboolean isObjectMethod, STNode... qualifiers) {\nreturn parseFunctionKeywordRhs(peek().kind, metadata, functionKeyword, isFuncDef, isObjectMethod, qualifiers);\n}\nprivate STNode parseFunctionKeywordRhs(SyntaxKind nextTokenKind, STNode metadata, STNode functionKeyword,\nboolean isFuncDef, boolean isObjectMethod, STNode... qualifiers) {\nSTNode name;\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nname = parseFunctionName();\nisFuncDef = true;\nbreak;\ncase OPEN_PAREN_TOKEN:\nname = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.FUNCTION_KEYWORD_RHS, metadata, functionKeyword,\nisFuncDef, isObjectMethod, qualifiers);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFunctionKeywordRhs(solution.tokenKind, metadata, functionKeyword, isFuncDef, isObjectMethod,\nqualifiers);\n}\nif (isFuncDef) {\nswitchContext(ParserRuleContext.FUNC_DEF);\nSTNode funcSignature = parseFuncSignature(false);\nSTNode funcDef = createFuncDefOrMethodDecl(metadata, functionKeyword, isObjectMethod, name, funcSignature,\nqualifiers);\nendContext();\nreturn funcDef;\n}\nSTNode funcSignature = parseFuncSignature(true);\nreturn parseReturnTypeDescRhs(metadata, functionKeyword, funcSignature, isObjectMethod, qualifiers);\n}\nprivate STNode createFuncDefOrMethodDecl(STNode metadata, STNode functionKeyword, boolean isObjectMethod,\nSTNode name, STNode funcSignature, STNode... qualifiers) {\nSTNode body = parseFunctionBody(isObjectMethod);\nif (body.kind == SyntaxKind.SEMICOLON_TOKEN) {\nreturn STNodeFactory.createMethodDeclarationNode(metadata, qualifiers[0], functionKeyword, name,\nfuncSignature, body);\n}\nif (isObjectMethod) {\nreturn STNodeFactory.createObjectMethodDefinitionNode(metadata, qualifiers[0], qualifiers[1], qualifiers[2],\nfunctionKeyword, name, funcSignature, body);\n}\nreturn STNodeFactory.createFunctionDefinitionNode(metadata, qualifiers[0], qualifiers[1], functionKeyword, name,\nfuncSignature, body);\n}\n/**\n* Parse function signature.\n*

\n* \n* function-signature := ( param-list ) return-type-descriptor\n*
\n* return-type-descriptor := [ returns [annots] type-descriptor ]\n*
\n*\n* @param isParamNameOptional Whether the parameter names are optional\n* @return Function signature node\n*/\nprivate STNode parseFuncSignature(boolean isParamNameOptional) {\nSTNode openParenthesis = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode parameters = parseParamList(isParamNameOptional);\nSTNode closeParenthesis = parseCloseParenthesis();\nendContext();\nSTNode returnTypeDesc = parseFuncReturnTypeDescriptor();\nreturn STNodeFactory.createFunctionSignatureNode(openParenthesis, parameters, closeParenthesis, returnTypeDesc);\n}\nprivate STNode parseReturnTypeDescRhs(STNode metadata, STNode functionKeyword, STNode funcSignature,\nboolean isObjectMethod, STNode... qualifiers) {\nswitch (peek().kind) {\ncase SEMICOLON_TOKEN:\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACKET_TOKEN:\nendContext();\nSTNode typeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature);\nif (isObjectMethod) {\nSTNode readonlyQualifier = STNodeFactory.createEmptyNode();\nSTNode fieldName = parseVariableName();\nreturn parseObjectFieldRhs(metadata, qualifiers[0], readonlyQualifier, typeDesc, fieldName);\n}\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(metadata, qualifiers[0], typedBindingPattern, true);\ncase OPEN_BRACE_TOKEN:\ncase EQUAL_TOKEN:\nbreak;\ndefault:\nbreak;\n}\nSTNode name = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_FUNCTION_NAME);\nfuncSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);\nSTNode funcDef =\ncreateFuncDefOrMethodDecl(metadata, functionKeyword, isObjectMethod, name, funcSignature, qualifiers);\nendContext();\nreturn funcDef;\n}\n/**\n* Validate the param list and return. If there are params without param-name,\n* then this method will create a new set of params with missing param-name\n* and return.\n*\n* @param signature Function signature\n* @return\n*/\nprivate STNode validateAndGetFuncParams(STFunctionSignatureNode signature) {\nSTNode parameters = signature.parameters;\nint paramCount = parameters.bucketCount();\nint index = 0;\nfor (; index < paramCount; index++) {\nSTNode param = parameters.childInBucket(index);\nswitch (param.kind) {\ncase REQUIRED_PARAM:\nSTRequiredParameterNode requiredParam = (STRequiredParameterNode) param;\nif (isEmpty(requiredParam.paramName)) {\nbreak;\n}\ncontinue;\ncase DEFAULTABLE_PARAM:\nSTDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param;\nif (isEmpty(defaultableParam.paramName)) {\nbreak;\n}\ncontinue;\ncase REST_PARAM:\nSTRestParameterNode restParam = (STRestParameterNode) param;\nif (isEmpty(restParam.paramName)) {\nbreak;\n}\ncontinue;\ndefault:\ncontinue;\n}\nbreak;\n}\nif (index == paramCount) {\nreturn signature;\n}\nSTNode updatedParams = getUpdatedParamList(parameters, index);\nreturn STNodeFactory.createFunctionSignatureNode(signature.openParenToken, updatedParams,\nsignature.closeParenToken, signature.returnTypeDesc);\n}\nprivate STNode getUpdatedParamList(STNode parameters, int index) {\nint paramCount = parameters.bucketCount();\nint newIndex = 0;\nArrayList newParams = new ArrayList<>();\nfor (; newIndex < index; newIndex++) {\nnewParams.add(parameters.childInBucket(index));\n}\nfor (; newIndex < paramCount; newIndex++) {\nSTNode param = parameters.childInBucket(newIndex);\nSTNode paramName = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nswitch (param.kind) {\ncase REQUIRED_PARAM:\nSTRequiredParameterNode requiredParam = (STRequiredParameterNode) param;\nif (isEmpty(requiredParam.paramName)) {\nparam = STNodeFactory.createRequiredParameterNode(requiredParam.leadingComma,\nrequiredParam.annotations, requiredParam.visibilityQualifier, requiredParam.typeName,\nparamName);\n}\nbreak;\ncase DEFAULTABLE_PARAM:\nSTDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param;\nif (isEmpty(defaultableParam.paramName)) {\nparam = STNodeFactory.createDefaultableParameterNode(defaultableParam.leadingComma,\ndefaultableParam.annotations, defaultableParam.visibilityQualifier,\ndefaultableParam.typeName, paramName, defaultableParam.equalsToken,\ndefaultableParam.expression);\n}\nbreak;\ncase REST_PARAM:\nSTRestParameterNode restParam = (STRestParameterNode) param;\nif (isEmpty(restParam.paramName)) {\nparam = STNodeFactory.createRestParameterNode(restParam.leadingComma, restParam.annotations,\nrestParam.typeName, restParam.ellipsisToken, paramName);\n}\nbreak;\ndefault:\nbreak;\n}\nnewParams.add(param);\n}\nreturn STNodeFactory.createNodeList(newParams);\n}\nprivate boolean isEmpty(STNode node) {\nreturn !SyntaxUtils.isSTNodePresent(node);\n}\n/**\n* Parse function keyword. Need to validate the token before consuming,\n* since we can reach here while recovering.\n*\n* @return Parsed node\n*/\nprivate STNode parseFunctionKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FUNCTION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FUNCTION_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse function name.\n*\n* @return Parsed node\n*/\nprivate STNode parseFunctionName() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FUNC_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse open parenthesis.\n*\n* @param ctx Context of the parenthesis\n* @return Parsed node\n*/\nprivate STNode parseOpenParenthesis(ParserRuleContext ctx) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ctx);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse close parenthesis.\n*\n* @return Parsed node\n*/\nprivate STNode parseCloseParenthesis() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSE_PARENTHESIS);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse parameter list.\n*

\n* \n* param-list := required-params [, defaultable-params] [, rest-param]\n*
 | defaultable-params [, rest-param]\n*
 | [rest-param]\n*

\n* required-params := required-param (, required-param)*\n*

\n* required-param := [annots] [public] type-descriptor [param-name]\n*

\n* defaultable-params := defaultable-param (, defaultable-param)*\n*

\n* defaultable-param := [annots] [public] type-descriptor [param-name] default-value\n*

\n* rest-param := [annots] type-descriptor ... [param-name]\n*

\n* param-name := identifier\n*
\n*\n* @param isParamNameOptional Whether the param names in the signature is optional or not.\n* @return Parsed node\n*/\nprivate STNode parseParamList(boolean isParamNameOptional) {\nstartContext(ParserRuleContext.PARAM_LIST);\nSTToken token = peek();\nif (isEndOfParametersList(token.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nArrayList paramsList = new ArrayList<>();\nSTNode startingComma = STNodeFactory.createEmptyNode();\nstartContext(ParserRuleContext.REQUIRED_PARAM);\nSTNode firstParam = parseParameter(startingComma, SyntaxKind.REQUIRED_PARAM, isParamNameOptional);\nSyntaxKind prevParamKind = firstParam.kind;\nparamsList.add(firstParam);\nboolean paramOrderErrorPresent = false;\ntoken = peek();\nwhile (!isEndOfParametersList(token.kind)) {\nif (prevParamKind == SyntaxKind.DEFAULTABLE_PARAM) {\nstartContext(ParserRuleContext.DEFAULTABLE_PARAM);\n} else {\nstartContext(ParserRuleContext.REQUIRED_PARAM);\n}\nSTNode paramEnd = parseParameterRhs();\nif (paramEnd == null) {\nendContext();\nbreak;\n}\nSTNode param = parseParameter(paramEnd, prevParamKind, isParamNameOptional);\nif (paramOrderErrorPresent) {\nupdateLastNodeInListWithInvalidNode(paramsList, param, null);\n} else {\nDiagnosticCode paramOrderError = validateParamOrder(param, prevParamKind);\nif (paramOrderError == null) {\nparamsList.add(param);\n} else {\nparamOrderErrorPresent = true;\nupdateLastNodeInListWithInvalidNode(paramsList, param, paramOrderError);\n}\n}\nprevParamKind = param.kind;\ntoken = peek();\n}\nreturn STNodeFactory.createNodeList(paramsList);\n}\n/**\n* Return the appropriate {@code DiagnosticCode} if there are parameter order issues.\n*\n* @param param the new parameter\n* @param prevParamKind the SyntaxKind of the previously added parameter\n*/\nprivate DiagnosticCode validateParamOrder(STNode param, SyntaxKind prevParamKind) {\nif (prevParamKind == SyntaxKind.REST_PARAM) {\nreturn DiagnosticErrorCode.ERROR_PARAMETER_AFTER_THE_REST_PARAMETER;\n} else if (prevParamKind == SyntaxKind.DEFAULTABLE_PARAM && param.kind == SyntaxKind.REQUIRED_PARAM) {\nreturn DiagnosticErrorCode.ERROR_REQUIRED_PARAMETER_AFTER_THE_DEFAULTABLE_PARAMETER;\n} else {\nreturn null;\n}\n}\nprivate boolean isNodeWithSyntaxKindInList(List nodeList, SyntaxKind kind) {\nfor (STNode node : nodeList) {\nif (node.kind == kind) {\nreturn true;\n}\n}\nreturn false;\n}\nprivate STNode parseParameterRhs() {\nreturn parseParameterRhs(peek().kind);\n}\nprivate STNode parseParameterRhs(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.PARAM_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseParameterRhs(solution.tokenKind);\n}\n}\n/**\n* Parse a single parameter. Parameter can be a required parameter, a defaultable\n* parameter, or a rest parameter.\n*\n* @param prevParamKind Kind of the parameter that precedes current parameter\n* @param leadingComma Comma that occurs before the param\n* @param isParamNameOptional Whether the param names in the signature is optional or not.\n* @return Parsed node\n*/\nprivate STNode parseParameter(STNode leadingComma, SyntaxKind prevParamKind, boolean isParamNameOptional) {\nSTToken token = peek();\nreturn parseParameter(token.kind, prevParamKind, leadingComma, 1, isParamNameOptional);\n}\nprivate STNode parseParameter(SyntaxKind prevParamKind, STNode leadingComma, int nextTokenOffset,\nboolean isParamNameOptional) {\nreturn parseParameter(peek().kind, prevParamKind, leadingComma, nextTokenOffset, isParamNameOptional);\n}\nprivate STNode parseParameter(SyntaxKind nextTokenKind, SyntaxKind prevParamKind, STNode leadingComma,\nint nextTokenOffset, boolean isParamNameOptional) {\nSTNode annots;\nswitch (nextTokenKind) {\ncase AT_TOKEN:\nannots = parseOptionalAnnotations(nextTokenKind);\nnextTokenKind = peek().kind;\nbreak;\ncase PUBLIC_KEYWORD:\ncase IDENTIFIER_TOKEN:\nannots = STNodeFactory.createEmptyNodeList();\nbreak;\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nannots = STNodeFactory.createNodeList(new ArrayList<>());\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.PARAMETER_START, prevParamKind, leadingComma,\nnextTokenOffset, isParamNameOptional);\nif (solution.action == Action.KEEP) {\nannots = STNodeFactory.createEmptyNodeList();\nbreak;\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseParameter(solution.tokenKind, prevParamKind, leadingComma, 0, isParamNameOptional);\n}\nreturn parseParamGivenAnnots(nextTokenKind, prevParamKind, leadingComma, annots, 1, isParamNameOptional);\n}\nprivate STNode parseParamGivenAnnots(SyntaxKind prevParamKind, STNode leadingComma, STNode annots,\nint nextNextTokenOffset, boolean isFuncDef) {\nreturn parseParamGivenAnnots(peek().kind, prevParamKind, leadingComma, annots, nextNextTokenOffset, isFuncDef);\n}\nprivate STNode parseParamGivenAnnots(SyntaxKind nextTokenKind, SyntaxKind prevParamKind, STNode leadingComma,\nSTNode annots, int nextTokenOffset, boolean isParamNameOptional) {\nSTNode qualifier;\nswitch (nextTokenKind) {\ncase PUBLIC_KEYWORD:\nqualifier = parseQualifier();\nbreak;\ncase IDENTIFIER_TOKEN:\nqualifier = STNodeFactory.createEmptyNode();\nbreak;\ncase AT_TOKEN:\ndefault:\nif (isTypeStartingToken(nextTokenKind) && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\nqualifier = STNodeFactory.createEmptyNode();\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.PARAMETER_WITHOUT_ANNOTS, prevParamKind,\nleadingComma, annots, nextTokenOffset, isParamNameOptional);\nif (solution.action == Action.KEEP) {\nqualifier = STNodeFactory.createEmptyNode();\nbreak;\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseParamGivenAnnots(solution.tokenKind, prevParamKind, leadingComma, annots, 0,\nisParamNameOptional);\n}\nreturn parseParamGivenAnnotsAndQualifier(prevParamKind, leadingComma, annots, qualifier, isParamNameOptional);\n}\nprivate STNode parseParamGivenAnnotsAndQualifier(SyntaxKind prevParamKind, STNode leadingComma, STNode annots,\nSTNode qualifier, boolean isParamNameOptional) {\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode param = parseAfterParamType(prevParamKind, leadingComma, annots, qualifier, type, isParamNameOptional);\nendContext();\nreturn param;\n}\nprivate STNode parseAfterParamType(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier,\nSTNode type, boolean isParamNameOptional) {\nSTToken token = peek();\nreturn parseAfterParamType(token.kind, prevParamKind, leadingComma, annots, qualifier, type,\nisParamNameOptional);\n}\nprivate STNode parseAfterParamType(SyntaxKind tokenKind, SyntaxKind prevParamKind, STNode leadingComma,\nSTNode annots, STNode qualifier, STNode type, boolean isParamNameOptional) {\nSTNode paramName;\nswitch (tokenKind) {\ncase ELLIPSIS_TOKEN:\nswitchContext(ParserRuleContext.REST_PARAM);\nreportInvalidQualifier(qualifier);\nSTNode ellipsis = parseEllipsis();\nif (isParamNameOptional && peek().kind != SyntaxKind.IDENTIFIER_TOKEN) {\nparamName = STNodeFactory.createEmptyNode();\n} else {\nparamName = parseVariableName();\n}\nreturn STNodeFactory.createRestParameterNode(leadingComma, annots, type, ellipsis, paramName);\ncase IDENTIFIER_TOKEN:\nparamName = parseVariableName();\nreturn parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);\ncase EQUAL_TOKEN:\nif (!isParamNameOptional) {\nbreak;\n}\nparamName = STNodeFactory.createEmptyNode();\nreturn parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);\ndefault:\nif (!isParamNameOptional) {\nbreak;\n}\nparamName = STNodeFactory.createEmptyNode();\nreturn parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.AFTER_PARAMETER_TYPE, prevParamKind, leadingComma, annots,\nqualifier, type, isParamNameOptional);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAfterParamType(solution.tokenKind, prevParamKind, leadingComma, annots, qualifier, type,\nisParamNameOptional);\n}\n/**\n* Parse ellipsis.\n*\n* @return Parsed node\n*/\nprivate STNode parseEllipsis() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ELLIPSIS_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ELLIPSIS);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse the right hand side of a required/defaultable parameter.\n*

\n* parameter-rhs := [= expression]\n*\n* @param leadingComma Comma that precedes this parameter\n* @param prevParamKind Kind of the parameter that precedes current parameter\n* @param annots Annotations attached to the parameter\n* @param qualifier Visibility qualifier\n* @param type Type descriptor\n* @param paramName Name of the parameter\n* @return Parsed parameter node\n*/\nprivate STNode parseParameterRhs(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier,\nSTNode type, STNode paramName) {\nSTToken token = peek();\nreturn parseParameterRhs(token.kind, prevParamKind, leadingComma, annots, qualifier, type, paramName);\n}\nprivate STNode parseParameterRhs(SyntaxKind tokenKind, SyntaxKind prevParamKind, STNode leadingComma, STNode annots,\nSTNode qualifier, STNode type, STNode paramName) {\nif (isEndOfParameter(tokenKind)) {\nreturn STNodeFactory.createRequiredParameterNode(leadingComma, annots, qualifier, type, paramName);\n} else if (tokenKind == SyntaxKind.EQUAL_TOKEN) {\nif (prevParamKind == SyntaxKind.REQUIRED_PARAM) {\nswitchContext(ParserRuleContext.DEFAULTABLE_PARAM);\n}\nSTNode equal = parseAssignOp();\nSTNode expr = parseExpression();\nreturn STNodeFactory.createDefaultableParameterNode(leadingComma, annots, qualifier, type, paramName, equal,\nexpr);\n} else {\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.PARAMETER_NAME_RHS, prevParamKind, leadingComma,\nannots, qualifier, type, paramName);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseParameterRhs(solution.tokenKind, prevParamKind, leadingComma, annots, qualifier, type,\nparamName);\n}\n}\n/**\n* Parse comma.\n*\n* @return Parsed node\n*/\nprivate STNode parseComma() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COMMA_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.COMMA);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse return type descriptor of a function. A return type descriptor has the following structure.\n*\n* return-type-descriptor := [ returns annots type-descriptor ]\n*\n* @return Parsed node\n*/\nprivate STNode parseFuncReturnTypeDescriptor() {\nreturn parseFuncReturnTypeDescriptor(peek().kind);\n}\nprivate STNode parseFuncReturnTypeDescriptor(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACE_TOKEN:\ncase EQUAL_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ncase RETURNS_KEYWORD:\nbreak;\ndefault:\nSTToken nextNextToken = getNextNextToken(nextTokenKind);\nif (nextNextToken.kind == SyntaxKind.RETURNS_KEYWORD) {\nbreak;\n}\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode returnsKeyword = parseReturnsKeyword();\nSTNode annot = parseOptionalAnnotations();\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);\nreturn STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);\n}\n/**\n* Parse 'returns' keyword.\n*\n* @return Return-keyword node\n*/\nprivate STNode parseReturnsKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RETURNS_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RETURNS_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse a type descriptor. A type descriptor has the following structure.\n*

\n* type-descriptor :=\n*  simple-type-descriptor
\n*  | structured-type-descriptor
\n*  | behavioral-type-descriptor
\n*  | singleton-type-descriptor
\n*  | union-type-descriptor
\n*  | optional-type-descriptor
\n*  | any-type-descriptor
\n*  | anydata-type-descriptor
\n*  | byte-type-descriptor
\n*  | json-type-descriptor
\n*  | type-descriptor-reference
\n*  | ( type-descriptor )\n*
\n* type-descriptor-reference := qualified-identifier
\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeDescriptor(ParserRuleContext context) {\nreturn parseTypeDescriptor(context, false, false);\n}\nprivate STNode parseTypeDescriptorInExpression(ParserRuleContext context, boolean isInConditionalExpr) {\nreturn parseTypeDescriptor(context, false, isInConditionalExpr);\n}\nprivate STNode parseTypeDescriptor(ParserRuleContext context, boolean isTypedBindingPattern,\nboolean isInConditionalExpr) {\nstartContext(context);\nSTNode typeDesc = parseTypeDescriptorInternal(context, isTypedBindingPattern, isInConditionalExpr);\nendContext();\nreturn typeDesc;\n}\nprivate STNode parseTypeDescriptorInternal(ParserRuleContext context, boolean isInConditionalExpr) {\nreturn parseTypeDescriptorInternal(context, false, isInConditionalExpr);\n}\nprivate STNode parseTypeDescriptorInternal(ParserRuleContext context, boolean isTypedBindingPattern,\nboolean isInConditionalExpr) {\nSTToken token = peek();\nSTNode typeDesc = parseTypeDescriptorInternal(token.kind, context, isInConditionalExpr);\nreturn parseComplexTypeDescriptor(typeDesc, context, isTypedBindingPattern);\n}\n/**\n* This will handle the parsing of optional,array,union type desc to infinite length.\n*\n* @param typeDesc\n*\n* @return Parsed type descriptor node\n*/\nprivate STNode parseComplexTypeDescriptor(STNode typeDesc, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase QUESTION_MARK_TOKEN:\nif (context == ParserRuleContext.TYPE_DESC_IN_EXPRESSION &&\n!isValidTypeContinuationToken(getNextNextToken(nextToken.kind)) &&\nisValidExprStart(getNextNextToken(nextToken.kind).kind)) {\nreturn typeDesc;\n}\nreturn parseComplexTypeDescriptor(parseOptionalTypeDescriptor(typeDesc), context,\nisTypedBindingPattern);\ncase OPEN_BRACKET_TOKEN:\nif (isTypedBindingPattern) {\nreturn typeDesc;\n}\nreturn parseComplexTypeDescriptor(parseArrayTypeDescriptor(typeDesc), context, isTypedBindingPattern);\ncase PIPE_TOKEN:\nreturn parseUnionTypeDescriptor(typeDesc, context, isTypedBindingPattern);\ncase BITWISE_AND_TOKEN:\nreturn parseIntersectionTypeDescriptor(typeDesc, context, isTypedBindingPattern);\ndefault:\nreturn typeDesc;\n}\n}\nprivate boolean isValidTypeContinuationToken(STToken nextToken) {\nswitch (nextToken.kind) {\ncase QUESTION_MARK_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n*

\n* Parse a type descriptor, given the next token kind.\n*

\n* If the preceding token is ? then it is an optional type descriptor\n*\n* @param tokenKind Next token kind\n* @param context Current context\n* @param isInConditionalExpr\n* @return Parsed node\n*/\nprivate STNode parseTypeDescriptorInternal(SyntaxKind tokenKind, ParserRuleContext context,\nboolean isInConditionalExpr) {\nswitch (tokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseTypeReference(isInConditionalExpr);\ncase RECORD_KEYWORD:\nreturn parseRecordTypeDescriptor();\ncase READONLY_KEYWORD:\nSTToken nextNextToken = getNextNextToken(tokenKind);\nSyntaxKind nextNextTokenKind = nextNextToken.kind;\nif (nextNextTokenKind != SyntaxKind.OBJECT_KEYWORD &&\nnextNextTokenKind != SyntaxKind.ABSTRACT_KEYWORD &&\nnextNextTokenKind != SyntaxKind.CLIENT_KEYWORD) {\nreturn parseSimpleTypeDescriptor();\n}\ncase OBJECT_KEYWORD:\ncase ABSTRACT_KEYWORD:\ncase CLIENT_KEYWORD:\nreturn parseObjectTypeDescriptor();\ncase OPEN_PAREN_TOKEN:\nreturn parseNilOrParenthesisedTypeDesc();\ncase MAP_KEYWORD:\ncase FUTURE_KEYWORD:\nreturn parseParameterizedTypeDescriptor();\ncase TYPEDESC_KEYWORD:\nreturn parseTypedescTypeDescriptor();\ncase ERROR_KEYWORD:\nreturn parseErrorTypeDescriptor();\ncase XML_KEYWORD:\nreturn parseXmlTypeDescriptor();\ncase STREAM_KEYWORD:\nreturn parseStreamTypeDescriptor();\ncase TABLE_KEYWORD:\nreturn parseTableTypeDescriptor();\ncase FUNCTION_KEYWORD:\nreturn parseFunctionTypeDesc();\ncase OPEN_BRACKET_TOKEN:\nreturn parseTupleTypeDesc();\ncase DISTINCT_KEYWORD:\nreturn parseDistinctTypeDesc(context);\ndefault:\nif (isSingletonTypeDescStart(tokenKind, true)) {\nreturn parseSingletonTypeDesc();\n}\nif (isSimpleType(tokenKind)) {\nreturn parseSimpleTypeDescriptor();\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.TYPE_DESCRIPTOR, context, isInConditionalExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypeDescriptorInternal(solution.tokenKind, context, isInConditionalExpr);\n}\n}\n/**\n* Parse distinct type descriptor.\n*

\n* \n* distinct-type-descriptor := distinct type-descriptor\n* \n*\n* @param context Context in which the type desc is used.\n* @return Distinct type descriptor\n*/\nprivate STNode parseDistinctTypeDesc(ParserRuleContext context) {\nSTNode distinctKeyword = parseDistinctKeyword();\nSTNode typeDesc = parseTypeDescriptor(context);\nreturn STNodeFactory.createDistinctTypeDescriptorNode(distinctKeyword, typeDesc);\n}\nprivate STNode parseDistinctKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DISTINCT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.DISTINCT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseNilOrParenthesisedTypeDesc() {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nreturn parseNilOrParenthesisedTypeDescRhs(openParen);\n}\nprivate STNode parseNilOrParenthesisedTypeDescRhs(STNode openParen) {\nreturn parseNilOrParenthesisedTypeDescRhs(peek().kind, openParen);\n}\nprivate STNode parseNilOrParenthesisedTypeDescRhs(SyntaxKind nextTokenKind, STNode openParen) {\nSTNode closeParen;\nswitch (nextTokenKind) {\ncase CLOSE_PAREN_TOKEN:\ncloseParen = parseCloseParenthesis();\nreturn STNodeFactory.createNilTypeDescriptorNode(openParen, closeParen);\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nSTNode typedesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_PARENTHESIS);\ncloseParen = parseCloseParenthesis();\nreturn STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typedesc, closeParen);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.NIL_OR_PARENTHESISED_TYPE_DESC_RHS, openParen);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseNilOrParenthesisedTypeDescRhs(solution.tokenKind, openParen);\n}\n}\n/**\n* Parse simple type descriptor.\n*\n* @return Parsed node\n*/\nprivate STNode parseSimpleTypeDescriptor() {\nSTToken node = peek();\nif (isSimpleType(node.kind)) {\nSTToken token = consume();\nreturn createBuiltinSimpleNameReference(token);\n} else {\nSolution sol = recover(peek(), ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR);\nSTNode recoveredNode = sol.recoveredNode;\nreturn createBuiltinSimpleNameReference(recoveredNode);\n}\n}\nprivate STNode createBuiltinSimpleNameReference(STNode token) {\nSyntaxKind typeKind = getTypeSyntaxKind(token.kind);\nreturn STNodeFactory.createBuiltinSimpleNameReferenceNode(typeKind, token);\n}\n/**\n*

\n* Parse function body. A function body has the following structure.\n*

\n* \n* function-body := function-body-block | external-function-body\n* external-function-body := = annots external ;\n* function-body-block := { [default-worker-init, named-worker-decl+] default-worker }\n* \n*\n* @param isObjectMethod Flag indicating whether this is an object-method\n* @return Parsed node\n*/\nprivate STNode parseFunctionBody(boolean isObjectMethod) {\nSTToken token = peek();\nreturn parseFunctionBody(token.kind, isObjectMethod);\n}\n/**\n* Parse function body, given the next token kind.\n*\n* @param tokenKind Next token kind\n* @param isObjectMethod Flag indicating whether this is an object-method\n* @return Parsed node\n*/\nprotected STNode parseFunctionBody(SyntaxKind tokenKind, boolean isObjectMethod) {\nswitch (tokenKind) {\ncase EQUAL_TOKEN:\nreturn parseExternalFunctionBody();\ncase OPEN_BRACE_TOKEN:\nreturn parseFunctionBodyBlock(false);\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn parseExpressionFuncBody(false, false);\ncase SEMICOLON_TOKEN:\nif (isObjectMethod) {\nreturn parseSemicolon();\n}\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.FUNC_BODY, isObjectMethod);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.tokenKind == SyntaxKind.NONE) {\nreturn STNodeFactory.createMissingToken(solution.tokenKind);\n}\nreturn parseFunctionBody(solution.tokenKind, isObjectMethod);\n}\n}\n/**\n*

\n* Parse function body block. A function body block has the following structure.\n*

\n*\n* \n* function-body-block := { [default-worker-init, named-worker-decl+] default-worker }
\n* default-worker-init := sequence-stmt
\n* default-worker := sequence-stmt
\n* named-worker-decl := worker worker-name return-type-descriptor { sequence-stmt }
\n* worker-name := identifier
\n*
\n*\n* @param isAnonFunc Flag indicating whether the func body belongs to an anonymous function\n* @return Parsed node\n*/\nprivate STNode parseFunctionBodyBlock(boolean isAnonFunc) {\nstartContext(ParserRuleContext.FUNC_BODY_BLOCK);\nSTNode openBrace = parseOpenBrace();\nSTToken token = peek();\nArrayList firstStmtList = new ArrayList<>();\nArrayList workers = new ArrayList<>();\nArrayList secondStmtList = new ArrayList<>();\nParserRuleContext currentCtx = ParserRuleContext.DEFAULT_WORKER_INIT;\nboolean hasNamedWorkers = false;\nwhile (!isEndOfFuncBodyBlock(token.kind, isAnonFunc)) {\nSTNode stmt = parseStatement();\nif (stmt == null) {\nbreak;\n}\nswitch (currentCtx) {\ncase DEFAULT_WORKER_INIT:\nif (stmt.kind != SyntaxKind.NAMED_WORKER_DECLARATION) {\nfirstStmtList.add(stmt);\nbreak;\n}\ncurrentCtx = ParserRuleContext.NAMED_WORKERS;\nhasNamedWorkers = true;\ncase NAMED_WORKERS:\nif (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {\nworkers.add(stmt);\nbreak;\n}\ncurrentCtx = ParserRuleContext.DEFAULT_WORKER;\ncase DEFAULT_WORKER:\ndefault:\nif (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {\nupdateLastNodeInListWithInvalidNode(secondStmtList, stmt,\nDiagnosticErrorCode.ERROR_NAMED_WORKER_NOT_ALLOWED_HERE);\nbreak;\n}\nsecondStmtList.add(stmt);\nbreak;\n}\ntoken = peek();\n}\nSTNode namedWorkersList;\nSTNode statements;\nif (hasNamedWorkers) {\nSTNode workerInitStatements = STNodeFactory.createNodeList(firstStmtList);\nSTNode namedWorkers = STNodeFactory.createNodeList(workers);\nnamedWorkersList = STNodeFactory.createNamedWorkerDeclarator(workerInitStatements, namedWorkers);\nstatements = STNodeFactory.createNodeList(secondStmtList);\n} else {\nnamedWorkersList = STNodeFactory.createEmptyNode();\nstatements = STNodeFactory.createNodeList(firstStmtList);\n}\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createFunctionBodyBlockNode(openBrace, namedWorkersList, statements, closeBrace);\n}\nprivate boolean isEndOfFuncBodyBlock(SyntaxKind nextTokenKind, boolean isAnonFunc) {\nif (isAnonFunc) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase COMMA_TOKEN:\ncase PUBLIC_KEYWORD:\ncase EOF_TOKEN:\ncase EQUAL_TOKEN:\ncase BACKTICK_TOKEN:\nreturn true;\ndefault:\nbreak;\n}\n}\nreturn isEndOfStatements();\n}\nprivate boolean isEndOfRecordTypeNode(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase TYPE_KEYWORD:\ncase PUBLIC_KEYWORD:\ndefault:\nreturn endOfModuleLevelNode(1);\n}\n}\nprivate boolean isEndOfObjectTypeNode() {\nreturn endOfModuleLevelNode(1, true);\n}\nprivate boolean isEndOfStatements() {\nswitch (peek().kind) {\ncase RESOURCE_KEYWORD:\nreturn true;\ndefault:\nreturn endOfModuleLevelNode(1);\n}\n}\nprivate boolean endOfModuleLevelNode(int peekIndex) {\nreturn endOfModuleLevelNode(peekIndex, false);\n}\nprivate boolean endOfModuleLevelNode(int peekIndex, boolean isObject) {\nswitch (peek(peekIndex).kind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACE_PIPE_TOKEN:\ncase IMPORT_KEYWORD:\ncase CONST_KEYWORD:\ncase ANNOTATION_KEYWORD:\ncase LISTENER_KEYWORD:\nreturn true;\ncase SERVICE_KEYWORD:\nreturn isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 1);\ncase PUBLIC_KEYWORD:\nreturn endOfModuleLevelNode(peekIndex + 1, isObject);\ncase FUNCTION_KEYWORD:\nif (isObject) {\nreturn false;\n}\nreturn peek(peekIndex + 1).kind == SyntaxKind.IDENTIFIER_TOKEN;\ndefault:\nreturn false;\n}\n}\n/**\n* Check whether the given token is an end of a parameter.\n*\n* @param tokenKind Next token kind\n* @return true if the token represents an end of a parameter. false otherwise\n*/\nprivate boolean isEndOfParameter(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase SEMICOLON_TOKEN:\ncase COMMA_TOKEN:\ncase RETURNS_KEYWORD:\ncase TYPE_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\ncase AT_TOKEN:\nreturn true;\ndefault:\nreturn endOfModuleLevelNode(1);\n}\n}\n/**\n* Check whether the given token is an end of a parameter-list.\n*\n* @param tokenKind Next token kind\n* @return true if the token represents an end of a parameter-list. false otherwise\n*/\nprivate boolean isEndOfParametersList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase CLOSE_PAREN_TOKEN:\ncase SEMICOLON_TOKEN:\ncase RETURNS_KEYWORD:\ncase TYPE_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\nreturn true;\ndefault:\nreturn endOfModuleLevelNode(1);\n}\n}\n/**\n* Parse type reference or variable reference.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatementStartIdentifier() {\nreturn parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\n}\n/**\n* Parse variable name.\n*\n* @return Parsed node\n*/\nprivate STNode parseVariableName() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.VARIABLE_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse open brace.\n*\n* @return Parsed node\n*/\nprivate STNode parseOpenBrace() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPEN_BRACE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OPEN_BRACE);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse close brace.\n*\n* @return Parsed node\n*/\nprivate STNode parseCloseBrace() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSE_BRACE);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse external function body. An external function body has the following structure.\n*

\n* \n* external-function-body := = annots external ;\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseExternalFunctionBody() {\nstartContext(ParserRuleContext.EXTERNAL_FUNC_BODY);\nSTNode assign = parseAssignOp();\nreturn parseExternalFuncBodyRhs(assign);\n}\nprivate STNode parseExternalFuncBodyRhs(STNode assign) {\nSTToken nextToken = peek();\nreturn parseExternalFuncBodyRhs(nextToken, assign);\n}\nprivate STNode parseExternalFuncBodyRhs(STToken nextToken, STNode assign) {\nSTNode annotation;\nswitch (nextToken.kind) {\ncase AT_TOKEN:\nannotation = parseAnnotations();\nbreak;\ncase EXTERNAL_KEYWORD:\nannotation = STNodeFactory.createNodeList();\nbreak;\ndefault:\nSolution solution = recover(nextToken, ParserRuleContext.EXTERNAL_FUNC_BODY_OPTIONAL_ANNOTS, assign);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseExternalFuncBodyRhs((STToken) solution.recoveredNode, assign);\n}\nSTNode externalKeyword = parseExternalKeyword();\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createExternalFunctionBodyNode(assign, annotation, externalKeyword, semicolon);\n}\n/**\n* Parse semicolon.\n*\n* @return Parsed node\n*/\nprivate STNode parseSemicolon() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SEMICOLON_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SEMICOLON);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse external keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseExternalKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.EXTERNAL_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.EXTERNAL_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/*\n* Operators\n*/\n/**\n* Parse assign operator.\n*\n* @return Parsed node\n*/\nprivate STNode parseAssignOp() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ASSIGN_OP);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse binary operator.\n*\n* @return Parsed node\n*/\nprivate STNode parseBinaryOperator() {\nSTToken token = peek();\nif (isBinaryOperator(token.kind)) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BINARY_OPERATOR);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Check whether the given token kind is a binary operator.\n*\n* @param kind STToken kind\n* @return true if the token kind refers to a binary operator. false otherwise\n*/\nprivate boolean isBinaryOperator(SyntaxKind kind) {\nswitch (kind) {\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase SLASH_TOKEN:\ncase ASTERISK_TOKEN:\ncase GT_TOKEN:\ncase LT_TOKEN:\ncase DOUBLE_EQUAL_TOKEN:\ncase TRIPPLE_EQUAL_TOKEN:\ncase LT_EQUAL_TOKEN:\ncase GT_EQUAL_TOKEN:\ncase NOT_EQUAL_TOKEN:\ncase NOT_DOUBLE_EQUAL_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase BITWISE_XOR_TOKEN:\ncase PIPE_TOKEN:\ncase LOGICAL_AND_TOKEN:\ncase LOGICAL_OR_TOKEN:\ncase PERCENT_TOKEN:\ncase DOUBLE_LT_TOKEN:\ncase DOUBLE_GT_TOKEN:\ncase TRIPPLE_GT_TOKEN:\ncase ELLIPSIS_TOKEN:\ncase DOUBLE_DOT_LT_TOKEN:\ncase ELVIS_TOKEN:\ncase EQUALS_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Get the precedence of a given operator.\n*\n* @param binaryOpKind Operator kind\n* @return Precedence of the given operator\n*/\nprivate OperatorPrecedence getOpPrecedence(SyntaxKind binaryOpKind) {\nswitch (binaryOpKind) {\ncase ASTERISK_TOKEN:\ncase SLASH_TOKEN:\ncase PERCENT_TOKEN:\nreturn OperatorPrecedence.MULTIPLICATIVE;\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn OperatorPrecedence.ADDITIVE;\ncase GT_TOKEN:\ncase LT_TOKEN:\ncase GT_EQUAL_TOKEN:\ncase LT_EQUAL_TOKEN:\ncase IS_KEYWORD:\nreturn OperatorPrecedence.BINARY_COMPARE;\ncase DOT_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase OPEN_PAREN_TOKEN:\ncase ANNOT_CHAINING_TOKEN:\ncase OPTIONAL_CHAINING_TOKEN:\ncase DOT_LT_TOKEN:\ncase SLASH_LT_TOKEN:\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\ncase SLASH_ASTERISK_TOKEN:\nreturn OperatorPrecedence.MEMBER_ACCESS;\ncase DOUBLE_EQUAL_TOKEN:\ncase TRIPPLE_EQUAL_TOKEN:\ncase NOT_EQUAL_TOKEN:\ncase NOT_DOUBLE_EQUAL_TOKEN:\ncase EQUALS_KEYWORD:\nreturn OperatorPrecedence.EQUALITY;\ncase BITWISE_AND_TOKEN:\nreturn OperatorPrecedence.BITWISE_AND;\ncase BITWISE_XOR_TOKEN:\nreturn OperatorPrecedence.BITWISE_XOR;\ncase PIPE_TOKEN:\nreturn OperatorPrecedence.BITWISE_OR;\ncase LOGICAL_AND_TOKEN:\nreturn OperatorPrecedence.LOGICAL_AND;\ncase LOGICAL_OR_TOKEN:\nreturn OperatorPrecedence.LOGICAL_OR;\ncase RIGHT_ARROW_TOKEN:\nreturn OperatorPrecedence.REMOTE_CALL_ACTION;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn OperatorPrecedence.ANON_FUNC_OR_LET;\ncase SYNC_SEND_TOKEN:\nreturn OperatorPrecedence.ACTION;\ncase DOUBLE_LT_TOKEN:\ncase DOUBLE_GT_TOKEN:\ncase TRIPPLE_GT_TOKEN:\nreturn OperatorPrecedence.SHIFT;\ncase ELLIPSIS_TOKEN:\ncase DOUBLE_DOT_LT_TOKEN:\nreturn OperatorPrecedence.RANGE;\ncase ELVIS_TOKEN:\nreturn OperatorPrecedence.ELVIS_CONDITIONAL;\ncase QUESTION_MARK_TOKEN:\ncase COLON_TOKEN:\nreturn OperatorPrecedence.CONDITIONAL;\ndefault:\nthrow new UnsupportedOperationException(\"Unsupported binary operator '\" + binaryOpKind + \"'\");\n}\n}\n/**\n*

\n* Get the operator kind to insert during recovery, given the precedence level.\n*

\n*\n* @param opPrecedenceLevel Precedence of the given operator\n* @return Kind of the operator to insert\n*/\nprivate SyntaxKind getBinaryOperatorKindToInsert(OperatorPrecedence opPrecedenceLevel) {\nswitch (opPrecedenceLevel) {\ncase DEFAULT:\ncase UNARY:\ncase ACTION:\ncase EXPRESSION_ACTION:\ncase REMOTE_CALL_ACTION:\ncase ANON_FUNC_OR_LET:\ncase QUERY:\ncase MULTIPLICATIVE:\nreturn SyntaxKind.ASTERISK_TOKEN;\ncase ADDITIVE:\nreturn SyntaxKind.PLUS_TOKEN;\ncase SHIFT:\nreturn SyntaxKind.DOUBLE_LT_TOKEN;\ncase RANGE:\nreturn SyntaxKind.ELLIPSIS_TOKEN;\ncase BINARY_COMPARE:\nreturn SyntaxKind.LT_TOKEN;\ncase EQUALITY:\nreturn SyntaxKind.DOUBLE_EQUAL_TOKEN;\ncase BITWISE_AND:\nreturn SyntaxKind.BITWISE_AND_TOKEN;\ncase BITWISE_XOR:\nreturn SyntaxKind.BITWISE_XOR_TOKEN;\ncase BITWISE_OR:\nreturn SyntaxKind.PIPE_TOKEN;\ncase LOGICAL_AND:\nreturn SyntaxKind.LOGICAL_AND_TOKEN;\ncase LOGICAL_OR:\nreturn SyntaxKind.LOGICAL_OR_TOKEN;\ncase ELVIS_CONDITIONAL:\nreturn SyntaxKind.ELVIS_TOKEN;\ndefault:\nthrow new UnsupportedOperationException(\n\"Unsupported operator precedence level'\" + opPrecedenceLevel + \"'\");\n}\n}\n/**\n*

\n* Parse a module type definition.\n*

\n* module-type-defn := metadata [public] type identifier type-descriptor ;\n*\n* @param metadata Metadata\n* @param qualifier Visibility qualifier\n* @return Parsed node\n*/\nprivate STNode parseModuleTypeDefinition(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.MODULE_TYPE_DEFINITION);\nSTNode typeKeyword = parseTypeKeyword();\nSTNode typeName = parseTypeName();\nSTNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createTypeDefinitionNode(metadata, qualifier, typeKeyword, typeName, typeDescriptor,\nsemicolon);\n}\n/**\n* Parse type keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TYPE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TYPE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse type name.\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeName() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TYPE_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse record type descriptor. A record type descriptor body has the following structure.\n*

\n*\n* record-type-descriptor := inclusive-record-type-descriptor | exclusive-record-type-descriptor\n*

inclusive-record-type-descriptor := record { field-descriptor* }\n*

exclusive-record-type-descriptor := record {| field-descriptor* [record-rest-descriptor] |}\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseRecordTypeDescriptor() {\nstartContext(ParserRuleContext.RECORD_TYPE_DESCRIPTOR);\nSTNode recordKeyword = parseRecordKeyword();\nSTNode bodyStartDelimiter = parseRecordBodyStartDelimiter();\nboolean isInclusive = bodyStartDelimiter.kind == SyntaxKind.OPEN_BRACE_TOKEN;\nSTNode fields = parseFieldDescriptors(isInclusive);\nSTNode bodyEndDelimiter = parseRecordBodyCloseDelimiter(bodyStartDelimiter.kind);\nendContext();\nreturn STNodeFactory.createRecordTypeDescriptorNode(recordKeyword, bodyStartDelimiter, fields,\nbodyEndDelimiter);\n}\n/**\n* Parse record body start delimiter.\n*\n* @return Parsed node\n*/\nprivate STNode parseRecordBodyStartDelimiter() {\nSTToken token = peek();\nreturn parseRecordBodyStartDelimiter(token.kind);\n}\nprivate STNode parseRecordBodyStartDelimiter(SyntaxKind kind) {\nswitch (kind) {\ncase OPEN_BRACE_PIPE_TOKEN:\nreturn parseClosedRecordBodyStart();\ncase OPEN_BRACE_TOKEN:\nreturn parseOpenBrace();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RECORD_BODY_START);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseRecordBodyStartDelimiter(solution.tokenKind);\n}\n}\n/**\n* Parse closed-record body start delimiter.\n*\n* @return Parsed node\n*/\nprivate STNode parseClosedRecordBodyStart() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_START);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse record body close delimiter.\n*\n* @return Parsed node\n*/\nprivate STNode parseRecordBodyCloseDelimiter(SyntaxKind startingDelimeter) {\nswitch (startingDelimeter) {\ncase OPEN_BRACE_PIPE_TOKEN:\nreturn parseClosedRecordBodyEnd();\ncase OPEN_BRACE_TOKEN:\nreturn parseCloseBrace();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RECORD_BODY_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseRecordBodyCloseDelimiter(solution.tokenKind);\n}\n}\n/**\n* Parse closed-record body end delimiter.\n*\n* @return Parsed node\n*/\nprivate STNode parseClosedRecordBodyEnd() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_END);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse record keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseRecordKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RECORD_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RECORD_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse field descriptors.\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldDescriptors(boolean isInclusive) {\nArrayList recordFields = new ArrayList<>();\nSTToken token = peek();\nboolean endOfFields = false;\nwhile (!isEndOfRecordTypeNode(token.kind)) {\nSTNode field = parseFieldOrRestDescriptor(isInclusive);\nif (field == null) {\nendOfFields = true;\nbreak;\n}\nrecordFields.add(field);\ntoken = peek();\nif (field.kind == SyntaxKind.RECORD_REST_TYPE) {\nbreak;\n}\n}\nwhile (!endOfFields && !isEndOfRecordTypeNode(token.kind)) {\nSTNode invalidField = parseFieldOrRestDescriptor(isInclusive);\nupdateLastNodeInListWithInvalidNode(recordFields, invalidField,\nDiagnosticErrorCode.ERROR_MORE_RECORD_FIELDS_AFTER_REST_FIELD);\ntoken = peek();\n}\nreturn STNodeFactory.createNodeList(recordFields);\n}\n/**\n*

\n* Parse field descriptor or rest descriptor.\n*

\n*\n* \n*

field-descriptor := individual-field-descriptor | record-type-reference\n*


individual-field-descriptor := metadata type-descriptor field-name [? | default-value] ;\n*

field-name := identifier\n*

default-value := = expression\n*

record-type-reference := * type-reference ;\n*

record-rest-descriptor := type-descriptor ... ;\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldOrRestDescriptor(boolean isInclusive) {\nreturn parseFieldOrRestDescriptor(peek().kind, isInclusive);\n}\nprivate STNode parseFieldOrRestDescriptor(SyntaxKind nextTokenKind, boolean isInclusive) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACE_PIPE_TOKEN:\nreturn null;\ncase ASTERISK_TOKEN:\nstartContext(ParserRuleContext.RECORD_FIELD);\nSTNode asterisk = consume();\nSTNode type = parseTypeReference();\nSTNode semicolonToken = parseSemicolon();\nendContext();\nreturn STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken);\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nstartContext(ParserRuleContext.RECORD_FIELD);\nSTNode metadata = parseMetaData(nextTokenKind);\nnextTokenKind = peek().kind;\nreturn parseRecordField(nextTokenKind, isInclusive, metadata);\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nstartContext(ParserRuleContext.RECORD_FIELD);\nmetadata = createEmptyMetadata();\nreturn parseRecordField(nextTokenKind, isInclusive, metadata);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RECORD_FIELD_OR_RECORD_END, isInclusive);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldOrRestDescriptor(solution.tokenKind, isInclusive);\n}\n}\nprivate STNode parseRecordField(SyntaxKind nextTokenKind, boolean isInclusive, STNode metadata) {\nif (nextTokenKind != SyntaxKind.READONLY_KEYWORD) {\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD);\nSTNode fieldOrRestDesc = parseFieldDescriptor(isInclusive, metadata, type);\nendContext();\nreturn fieldOrRestDesc;\n}\nSTNode type;\nSTNode fieldOrRestDesc;\nSTNode readOnlyQualifier;\nreadOnlyQualifier = parseReadonlyKeyword();\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nSTNode fieldNameOrTypeDesc = parseQualifiedIdentifier(ParserRuleContext.RECORD_FIELD_NAME_OR_TYPE_NAME);\nif (fieldNameOrTypeDesc.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\ntype = fieldNameOrTypeDesc;\n} else {\nnextToken = peek();\nswitch (nextToken.kind) {\ncase SEMICOLON_TOKEN:\ncase EQUAL_TOKEN:\ntype = createBuiltinSimpleNameReference(readOnlyQualifier);\nreadOnlyQualifier = STNodeFactory.createEmptyNode();\nSTNode fieldName = ((STSimpleNameReferenceNode) fieldNameOrTypeDesc).name;\nreturn parseFieldDescriptorRhs(metadata, readOnlyQualifier, type, fieldName);\ndefault:\ntype = fieldNameOrTypeDesc;\nbreak;\n}\n}\n} else if (isTypeStartingToken(nextToken.kind)) {\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD);\n} else {\nreadOnlyQualifier = createBuiltinSimpleNameReference(readOnlyQualifier);\ntype = parseComplexTypeDescriptor(readOnlyQualifier, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD, false);\nreadOnlyQualifier = STNodeFactory.createEmptyNode();\n}\nfieldOrRestDesc = parseIndividualRecordField(metadata, readOnlyQualifier, type);\nendContext();\nreturn fieldOrRestDesc;\n}\nprivate STNode parseFieldDescriptor(boolean isInclusive, STNode metadata, STNode type) {\nif (isInclusive) {\nSTNode readOnlyQualifier = STNodeFactory.createEmptyNode();\nreturn parseIndividualRecordField(metadata, readOnlyQualifier, type);\n} else {\nreturn parseFieldOrRestDescriptorRhs(metadata, type);\n}\n}\nprivate STNode parseIndividualRecordField(STNode metadata, STNode readOnlyQualifier, STNode type) {\nSTNode fieldName = parseVariableName();\nreturn parseFieldDescriptorRhs(metadata, readOnlyQualifier, type, fieldName);\n}\n/**\n* Parse type reference.\n* type-reference := identifier | qualified-identifier\n*\n* @return Type reference node\n*/\nprivate STNode parseTypeReference() {\nSTNode typeReference = parseTypeDescriptor(ParserRuleContext.TYPE_REFERENCE);\nif (typeReference.kind == SyntaxKind.SIMPLE_NAME_REFERENCE ||\ntypeReference.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nreturn typeReference;\n}\nSTNode emptyNameReference = STNodeFactory.createSimpleNameReferenceNode(\nSyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\nemptyNameReference = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(emptyNameReference, typeReference,\nDiagnosticErrorCode.ONLY_TYPE_REFERENCE_ALLOWED_HERE_AS_TYPE_INCLUSIONS);\nreturn emptyNameReference;\n}\nprivate STNode parseTypeReference(boolean isInConditionalExpr) {\nreturn parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE, isInConditionalExpr);\n}\n/**\n* Parse identifier or qualified identifier.\n*\n* @return Identifier node\n*/\nprivate STNode parseQualifiedIdentifier(ParserRuleContext currentCtx) {\nreturn parseQualifiedIdentifier(currentCtx, false);\n}\nprivate STNode parseQualifiedIdentifier(ParserRuleContext currentCtx, boolean isInConditionalExpr) {\nSTToken token = peek();\nSTNode typeRefOrPkgRef;\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\ntypeRefOrPkgRef = consume();\n} else {\nSolution sol = recover(token, currentCtx, isInConditionalExpr);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nif (sol.tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\naddInvalidTokenToNextToken(errorHandler.consumeInvalidToken());\nreturn parseQualifiedIdentifier(currentCtx, isInConditionalExpr);\n}\ntypeRefOrPkgRef = sol.recoveredNode;\n}\nreturn parseQualifiedIdentifier(typeRefOrPkgRef, isInConditionalExpr);\n}\n/**\n* Parse identifier or qualified identifier, given the starting identifier.\n*\n* @param identifier Starting identifier\n* @return Parse node\n*/\nprivate STNode parseQualifiedIdentifier(STNode identifier, boolean isInConditionalExpr) {\nSTToken nextToken = peek(1);\nif (nextToken.kind != SyntaxKind.COLON_TOKEN) {\nreturn STNodeFactory.createSimpleNameReferenceNode(identifier);\n}\nSTToken nextNextToken = peek(2);\nswitch (nextNextToken.kind) {\ncase IDENTIFIER_TOKEN:\nSTToken colon = consume();\nSTNode varOrFuncName = consume();\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName);\ncase MAP_KEYWORD:\ncolon = consume();\nSTToken mapKeyword = consume();\nSTNode refName = STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(),\nmapKeyword.trailingMinutiae(), mapKeyword.diagnostics());\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, refName);\ncase COLON_TOKEN:\naddInvalidTokenToNextToken(errorHandler.consumeInvalidToken());\nreturn parseQualifiedIdentifier(identifier, isInConditionalExpr);\ndefault:\nif (isInConditionalExpr) {\nreturn STNodeFactory.createSimpleNameReferenceNode(identifier);\n}\ncolon = consume();\nvarOrFuncName = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName);\n}\n}\n/**\n* Parse RHS of a field or rest type descriptor.\n*\n* @param metadata Metadata\n* @param type Type descriptor\n* @return Parsed node\n*/\nprivate STNode parseFieldOrRestDescriptorRhs(STNode metadata, STNode type) {\nSTToken token = peek();\nreturn parseFieldOrRestDescriptorRhs(token.kind, metadata, type);\n}\nprivate STNode parseFieldOrRestDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type) {\nswitch (kind) {\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createRecordRestDescriptorNode(type, ellipsis, semicolonToken);\ncase IDENTIFIER_TOKEN:\nSTNode readonlyQualifier = STNodeFactory.createEmptyNode();\nreturn parseIndividualRecordField(metadata, readonlyQualifier, type);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.FIELD_OR_REST_DESCIPTOR_RHS, metadata, type);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldOrRestDescriptorRhs(solution.tokenKind, metadata, type);\n}\n}\n/**\n*

\n* Parse field descriptor rhs.\n*

\n*\n* @param metadata Metadata\n* @param readonlyQualifier Readonly qualifier\n* @param type Type descriptor\n* @param fieldName Field name\n* @return Parsed node\n*/\nprivate STNode parseFieldDescriptorRhs(STNode metadata, STNode readonlyQualifier, STNode type, STNode fieldName) {\nSTToken token = peek();\nreturn parseFieldDescriptorRhs(token.kind, metadata, readonlyQualifier, type, fieldName);\n}\n/**\n*

\n* Parse field descriptor rhs.\n*

\n*\n* \n* field-descriptor := [? | default-value] ;\n*
default-value := = expression\n*
\n*\n* @param kind Kind of the next token\n* @param metadata Metadata\n* @param type Type descriptor\n* @param fieldName Field name\n* @return Parsed node\n*/\nprivate STNode parseFieldDescriptorRhs(SyntaxKind kind, STNode metadata, STNode readonlyQualifier, STNode type,\nSTNode fieldName) {\nswitch (kind) {\ncase SEMICOLON_TOKEN:\nSTNode questionMarkToken = STNodeFactory.createEmptyNode();\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createRecordFieldNode(metadata, readonlyQualifier, type, fieldName,\nquestionMarkToken, semicolonToken);\ncase QUESTION_MARK_TOKEN:\nquestionMarkToken = parseQuestionMark();\nsemicolonToken = parseSemicolon();\nreturn STNodeFactory.createRecordFieldNode(metadata, readonlyQualifier, type, fieldName,\nquestionMarkToken, semicolonToken);\ncase EQUAL_TOKEN:\nSTNode equalsToken = parseAssignOp();\nSTNode expression = parseExpression();\nsemicolonToken = parseSemicolon();\nreturn STNodeFactory.createRecordFieldWithDefaultValueNode(metadata, readonlyQualifier, type, fieldName,\nequalsToken, expression, semicolonToken);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.FIELD_DESCRIPTOR_RHS, metadata, readonlyQualifier,\ntype, fieldName);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldDescriptorRhs(solution.tokenKind, metadata, readonlyQualifier, type, fieldName);\n}\n}\n/**\n* Parse question mark.\n*\n* @return Parsed node\n*/\nprivate STNode parseQuestionMark() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.QUESTION_MARK_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.QUESTION_MARK);\nreturn sol.recoveredNode;\n}\n}\n/*\n* Statements\n*/\n/**\n* Parse statements, until an end of a block is reached.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatements() {\nArrayList stmts = new ArrayList<>();\nreturn parseStatements(stmts);\n}\nprivate STNode parseStatements(ArrayList stmts) {\nwhile (!isEndOfStatements()) {\nSTNode stmt = parseStatement();\nif (stmt == null) {\nbreak;\n}\nif (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {\naddInvalidNodeToNextToken(stmt, DiagnosticErrorCode.ERROR_NAMED_WORKER_NOT_ALLOWED_HERE);\nbreak;\n}\nstmts.add(stmt);\n}\nreturn STNodeFactory.createNodeList(stmts);\n}\n/**\n* Parse a single statement.\n*\n* @return Parsed node\n*/\nprotected STNode parseStatement() {\nSTToken token = peek();\nreturn parseStatement(token.kind, 1);\n}\nprivate STNode parseStatement(SyntaxKind tokenKind, int nextTokenIndex) {\nSTNode annots = null;\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ncase SEMICOLON_TOKEN:\naddInvalidTokenToNextToken(errorHandler.consumeInvalidToken());\nreturn parseStatement();\ncase AT_TOKEN:\nannots = parseOptionalAnnotations(tokenKind);\ntokenKind = peek().kind;\nbreak;\ncase FINAL_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\ncase PANIC_KEYWORD:\ncase CONTINUE_KEYWORD:\ncase BREAK_KEYWORD:\ncase RETURN_KEYWORD:\ncase TYPE_KEYWORD:\ncase LOCK_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase FORK_KEYWORD:\ncase FOREACH_KEYWORD:\ncase XMLNS_KEYWORD:\ncase TRANSACTION_KEYWORD:\ncase RETRY_KEYWORD:\ncase ROLLBACK_KEYWORD:\ncase MATCH_KEYWORD:\ncase CHECK_KEYWORD:\ncase FAIL_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase TRAP_KEYWORD:\ncase START_KEYWORD:\ncase FLUSH_KEYWORD:\ncase LEFT_ARROW_TOKEN:\ncase WAIT_KEYWORD:\ncase COMMIT_KEYWORD:\ncase WORKER_KEYWORD:\nbreak;\ndefault:\nif (isTypeStartingToken(tokenKind)) {\nbreak;\n}\nif (isValidExpressionStart(tokenKind, nextTokenIndex)) {\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.STATEMENT, nextTokenIndex);\nif (solution.action == Action.KEEP) {\nbreak;\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatement(solution.tokenKind, nextTokenIndex);\n}\nreturn parseStatement(tokenKind, annots, nextTokenIndex);\n}\nprivate STNode getAnnotations(STNode nullbaleAnnot) {\nif (nullbaleAnnot != null) {\nreturn nullbaleAnnot;\n}\nreturn STNodeFactory.createEmptyNodeList();\n}\nprivate STNode parseStatement(STNode annots) {\nreturn parseStatement(peek().kind, annots, 1);\n}\n/**\n* Parse a single statement, given the next token kind.\n*\n* @param tokenKind Next token kind\n* @return Parsed node\n*/\nprivate STNode parseStatement(SyntaxKind tokenKind, STNode annots, int nextTokenIndex) {\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\naddInvalidNodeToNextToken(annots, DiagnosticErrorCode.ERROR_INVALID_ANNOTATIONS);\nreturn null;\ncase SEMICOLON_TOKEN:\naddInvalidTokenToNextToken(errorHandler.consumeInvalidToken());\nreturn parseStatement(annots);\ncase FINAL_KEYWORD:\nSTNode finalKeyword = parseFinalKeyword();\nreturn parseVariableDecl(getAnnotations(annots), finalKeyword, false);\ncase IF_KEYWORD:\nreturn parseIfElseBlock();\ncase WHILE_KEYWORD:\nreturn parseWhileStatement();\ncase PANIC_KEYWORD:\nreturn parsePanicStatement();\ncase CONTINUE_KEYWORD:\nreturn parseContinueStatement();\ncase BREAK_KEYWORD:\nreturn parseBreakStatement();\ncase RETURN_KEYWORD:\nreturn parseReturnStatement();\ncase TYPE_KEYWORD:\nreturn parseLocalTypeDefinitionStatement(getAnnotations(annots));\ncase LOCK_KEYWORD:\nreturn parseLockStatement();\ncase OPEN_BRACE_TOKEN:\nreturn parseStatementStartsWithOpenBrace();\ncase WORKER_KEYWORD:\nreturn parseNamedWorkerDeclaration(getAnnotations(annots));\ncase FORK_KEYWORD:\nreturn parseForkStatement();\ncase FOREACH_KEYWORD:\nreturn parseForEachStatement();\ncase START_KEYWORD:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase FAIL_KEYWORD:\ncase TRAP_KEYWORD:\ncase FLUSH_KEYWORD:\ncase LEFT_ARROW_TOKEN:\ncase WAIT_KEYWORD:\ncase FROM_KEYWORD:\ncase COMMIT_KEYWORD:\nreturn parseExpressionStatement(tokenKind, getAnnotations(annots));\ncase XMLNS_KEYWORD:\nreturn parseXMLNamespaceDeclaration(false);\ncase TRANSACTION_KEYWORD:\nreturn parseTransactionStatement();\ncase RETRY_KEYWORD:\nreturn parseRetryStatement();\ncase ROLLBACK_KEYWORD:\nreturn parseRollbackStatement();\ncase OPEN_BRACKET_TOKEN:\nreturn parseStatementStartsWithOpenBracket(getAnnotations(annots), false);\ncase FUNCTION_KEYWORD:\ncase OPEN_PAREN_TOKEN:\ncase IDENTIFIER_TOKEN:\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase STRING_KEYWORD:\ncase XML_KEYWORD:\nreturn parseStmtStartsWithTypeOrExpr(tokenKind, getAnnotations(annots));\ncase MATCH_KEYWORD:\nreturn parseMatchStatement();\ndefault:\nif (isValidExpressionStart(tokenKind, nextTokenIndex)) {\nreturn parseStatementStartWithExpr(getAnnotations(annots));\n}\nif (isTypeStartingToken(tokenKind)) {\nfinalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVariableDecl(getAnnotations(annots), finalKeyword, false);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.STATEMENT_WITHOUT_ANNOTS, annots, nextTokenIndex);\nif (solution.action == Action.KEEP) {\nfinalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVariableDecl(getAnnotations(annots), finalKeyword, false);\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatement(solution.tokenKind, annots, nextTokenIndex - 1);\n}\n}\n/**\n*

\n* Parse variable declaration. Variable declaration can be a local or module level.\n*

\n*\n* \n* local-var-decl-stmt := local-init-var-decl-stmt | local-no-init-var-decl-stmt\n*

\n* local-init-var-decl-stmt := [annots] [final] typed-binding-pattern = action-or-expr ;\n*

\n* local-no-init-var-decl-stmt := [annots] [final] type-descriptor variable-name ;\n*
\n*\n* @param annots Annotations or metadata\n* @param finalKeyword Final keyword\n* @return Parsed node\n*/\nprivate STNode parseVariableDecl(STNode annots, STNode finalKeyword, boolean isModuleVar) {\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode typeBindingPattern = parseTypedBindingPattern(ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, isModuleVar);\n}\n/**\n* Parse final keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseFinalKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FINAL_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FINAL_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse the right hand side of a variable declaration statement.\n*

\n* \n* var-decl-rhs := ; | = action-or-expr ;\n* \n*\n* @param metadata metadata\n* @param finalKeyword Final keyword\n* @param typedBindingPattern Typed binding pattern\n* @return Parsed node\n*/\nprivate STNode parseVarDeclRhs(STNode metadata, STNode finalKeyword, STNode typedBindingPattern,\nboolean isModuleVar) {\nSTToken token = peek();\nreturn parseVarDeclRhs(token.kind, metadata, finalKeyword, typedBindingPattern, isModuleVar);\n}\n/**\n* Parse the right hand side of a variable declaration statement, given the\n* next token kind.\n*\n* @param tokenKind Next token kind\n* @param metadata Metadata\n* @param finalKeyword Final keyword\n* @param typedBindingPattern Typed binding pattern\n* @param isModuleVar flag indicating whether the var is module level\n* @return Parsed node\n*/\nprivate STNode parseVarDeclRhs(SyntaxKind tokenKind, STNode metadata, STNode finalKeyword,\nSTNode typedBindingPattern, boolean isModuleVar) {\nSTNode assign;\nSTNode expr;\nSTNode semicolon;\nswitch (tokenKind) {\ncase EQUAL_TOKEN:\nassign = parseAssignOp();\nif (isModuleVar) {\nexpr = parseExpression();\n} else {\nexpr = parseActionOrExpression();\n}\nsemicolon = parseSemicolon();\nbreak;\ncase SEMICOLON_TOKEN:\nassign = STNodeFactory.createEmptyNode();\nexpr = STNodeFactory.createEmptyNode();\nsemicolon = parseSemicolon();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.VAR_DECL_STMT_RHS, metadata, finalKeyword,\ntypedBindingPattern, isModuleVar);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseVarDeclRhs(solution.tokenKind, metadata, finalKeyword, typedBindingPattern, isModuleVar);\n}\nendContext();\nif (isModuleVar) {\nreturn STNodeFactory.createModuleVariableDeclarationNode(metadata, finalKeyword, typedBindingPattern,\nassign, expr, semicolon);\n}\nreturn STNodeFactory.createVariableDeclarationNode(metadata, finalKeyword, typedBindingPattern, assign, expr,\nsemicolon);\n}\n/**\n*

\n* Parse the RHS portion of the assignment.\n*

\n* assignment-stmt-rhs := = action-or-expr ;\n*\n* @param lvExpr LHS expression\n* @return Parsed node\n*/\nprivate STNode parseAssignmentStmtRhs(STNode lvExpr) {\nSTNode assign = parseAssignOp();\nSTNode expr = parseActionOrExpression();\nSTNode semicolon = parseSemicolon();\nendContext();\nif (lvExpr.kind == SyntaxKind.FUNCTION_CALL &&\nisPosibleFunctionalBindingPattern((STFunctionCallExpressionNode) lvExpr)) {\nlvExpr = getBindingPattern(lvExpr);\n}\nboolean lvExprValid = isValidLVExpr(lvExpr);\nif (!lvExprValid) {\nSTNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nSTNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier);\nlvExpr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(simpleNameRef, lvExpr,\nDiagnosticErrorCode.ERROR_INVALID_EXPR_IN_ASSIGNMENT_LHS);\n}\nreturn STNodeFactory.createAssignmentStatementNode(lvExpr, assign, expr, semicolon);\n}\n/*\n* Expressions\n*/\n/**\n* Parse expression. This will start parsing expressions from the lowest level of precedence.\n*\n* @return Parsed node\n*/\nprotected STNode parseExpression() {\nSTNode actionOrExpression = parseExpression(DEFAULT_OP_PRECEDENCE, true, false);\nif (isAction(actionOrExpression)) {\nactionOrExpression = SyntaxErrors.addDiagnostic(actionOrExpression,\nDiagnosticErrorCode.ERROR_EXPRESSION_EXPECTED_ACTION_FOUND);\n}\nreturn actionOrExpression;\n}\n/**\n* Parse action or expression. This will start parsing actions or expressions from the lowest level of precedence.\n*\n* @return Parsed node\n*/\nprivate STNode parseActionOrExpression() {\nreturn parseExpression(DEFAULT_OP_PRECEDENCE, true, true);\n}\nprivate STNode parseActionOrExpressionInLhs(SyntaxKind tokenKind, STNode annots) {\nreturn parseExpression(tokenKind, DEFAULT_OP_PRECEDENCE, annots, false, true, false);\n}\n/**\n* Parse expression.\n*\n* @param isRhsExpr Flag indicating whether this is a rhs expression\n* @return Parsed node\n*/\nprivate STNode parseExpression(boolean isRhsExpr) {\nreturn parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, false);\n}\nprivate boolean isValidLVExpr(STNode expression) {\nswitch (expression.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase FUNCTIONAL_BINDING_PATTERN:\nreturn true;\ncase FIELD_ACCESS:\nreturn isValidLVMemberExpr(((STFieldAccessExpressionNode) expression).expression);\ncase INDEXED_EXPRESSION:\nreturn isValidLVMemberExpr(((STIndexedExpressionNode) expression).containerExpression);\ndefault:\nreturn (expression instanceof STMissingToken);\n}\n}\nprivate boolean isValidLVMemberExpr(STNode expression) {\nswitch (expression.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\nreturn true;\ncase FIELD_ACCESS:\nreturn isValidLVMemberExpr(((STFieldAccessExpressionNode) expression).expression);\ncase INDEXED_EXPRESSION:\nreturn isValidLVMemberExpr(((STIndexedExpressionNode) expression).containerExpression);\ncase BRACED_EXPRESSION:\nreturn isValidLVMemberExpr(((STBracedExpressionNode) expression).expression);\ndefault:\nreturn (expression instanceof STMissingToken);\n}\n}\n/**\n* Parse an expression that has an equal or higher precedence than a given level.\n*\n* @param precedenceLevel Precedence level of expression to be parsed\n* @param isRhsExpr Flag indicating whether this is a rhs expression\n* @param allowActions Flag indicating whether the current context support actions\n* @return Parsed node\n*/\nprivate STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) {\nreturn parseExpression(precedenceLevel, isRhsExpr, allowActions, false);\n}\nprivate STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions,\nboolean isInConditionalExpr) {\nSTToken token = peek();\nreturn parseExpression(token.kind, precedenceLevel, isRhsExpr, allowActions, false, isInConditionalExpr);\n}\nprivate STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, boolean isRhsExpr,\nboolean allowActions, boolean isInMatchGuard, boolean isInConditionalExpr) {\nSTNode expr = parseTerminalExpression(kind, isRhsExpr, allowActions, isInConditionalExpr);\nreturn parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr);\n}\nprivate STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, STNode annots,\nboolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nSTNode expr = parseTerminalExpression(kind, annots, isRhsExpr, allowActions, isInConditionalExpr);\nreturn parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions, false, isInConditionalExpr);\n}\n/**\n* Parse terminal expressions. A terminal expression has the highest precedence level\n* out of all expressions, and will be at the leaves of an expression tree.\n*\n* @param annots Annotations\n* @param isRhsExpr Is a rhs expression\n* @param allowActions Allow actions\n* @return Parsed node\n*/\nprivate STNode parseTerminalExpression(STNode annots, boolean isRhsExpr, boolean allowActions,\nboolean isInConditionalExpr) {\nreturn parseTerminalExpression(peek().kind, annots, isRhsExpr, allowActions, isInConditionalExpr);\n}\nprivate STNode parseTerminalExpression(SyntaxKind kind, boolean isRhsExpr, boolean allowActions,\nboolean isInConditionalExpr) {\nSTNode annots;\nif (kind == SyntaxKind.AT_TOKEN) {\nannots = parseOptionalAnnotations();\nkind = peek().kind;\n} else {\nannots = STNodeFactory.createEmptyNodeList();\n}\nSTNode expr = parseTerminalExpression(kind, annots, isRhsExpr, allowActions, isInConditionalExpr);\nif (!isNodeListEmpty(annots) && expr.kind != SyntaxKind.START_ACTION) {\nexpr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(expr, annots,\nDiagnosticErrorCode.ERROR_ANNOTATIONS_ATTACHED_TO_EXPRESSION);\n}\nreturn expr;\n}\nprivate STNode parseTerminalExpression(SyntaxKind kind, STNode annots, boolean isRhsExpr, boolean allowActions,\nboolean isInConditionalExpr) {\nswitch (kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn parseBasicLiteral();\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF, isInConditionalExpr);\ncase OPEN_PAREN_TOKEN:\nreturn parseBracedExpression(isRhsExpr, allowActions);\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\nreturn parseCheckExpression(isRhsExpr, allowActions, isInConditionalExpr);\ncase FAIL_KEYWORD:\nreturn parseFailExpression(isRhsExpr, allowActions, isInConditionalExpr);\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingConstructorExpr();\ncase TYPEOF_KEYWORD:\nreturn parseTypeofExpression(isRhsExpr, isInConditionalExpr);\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\nreturn parseUnaryExpression(isRhsExpr, isInConditionalExpr);\ncase TRAP_KEYWORD:\nreturn parseTrapExpression(isRhsExpr, allowActions, isInConditionalExpr);\ncase OPEN_BRACKET_TOKEN:\nreturn parseListConstructorExpr();\ncase LT_TOKEN:\nreturn parseTypeCastExpr(isRhsExpr, allowActions, isInConditionalExpr);\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\ncase FROM_KEYWORD:\nreturn parseTableConstructorOrQuery(isRhsExpr);\ncase ERROR_KEYWORD:\nreturn parseErrorConstructorExpr();\ncase LET_KEYWORD:\nreturn parseLetExpression(isRhsExpr);\ncase BACKTICK_TOKEN:\nreturn parseTemplateExpression();\ncase XML_KEYWORD:\nSTToken nextNextToken = getNextNextToken(kind);\nif (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseXMLTemplateExpression();\n}\nreturn parseSimpleTypeDescriptor();\ncase STRING_KEYWORD:\nnextNextToken = getNextNextToken(kind);\nif (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseStringTemplateExpression();\n}\nreturn parseSimpleTypeDescriptor();\ncase FUNCTION_KEYWORD:\nreturn parseExplicitFunctionExpression(annots, isRhsExpr);\ncase AT_TOKEN:\nbreak;\ncase NEW_KEYWORD:\nreturn parseNewExpression();\ncase START_KEYWORD:\nreturn parseStartAction(annots);\ncase FLUSH_KEYWORD:\nreturn parseFlushAction();\ncase LEFT_ARROW_TOKEN:\nreturn parseReceiveAction();\ncase WAIT_KEYWORD:\nreturn parseWaitAction();\ncase COMMIT_KEYWORD:\nreturn parseCommitAction();\ncase TRANSACTIONAL_KEYWORD:\nreturn parseTransactionalExpression();\ncase SERVICE_KEYWORD:\nreturn parseServiceConstructorExpression(annots);\ncase BASE16_KEYWORD:\ncase BASE64_KEYWORD:\nreturn parseByteArrayLiteral(kind);\ndefault:\nif (isSimpleType(kind)) {\nreturn parseSimpleTypeDescriptor();\n}\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.TERMINAL_EXPRESSION, annots, isRhsExpr, allowActions,\nisInConditionalExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.action == Action.KEEP) {\nif (kind == SyntaxKind.XML_KEYWORD) {\nreturn parseXMLTemplateExpression();\n}\nreturn parseStringTemplateExpression();\n}\nswitch (solution.tokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(solution.recoveredNode, isInConditionalExpr);\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn solution.recoveredNode;\ndefault:\nreturn parseTerminalExpression(solution.tokenKind, annots, isRhsExpr, allowActions,\nisInConditionalExpr);\n}\n}\nprivate boolean isValidExprStart(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase IDENTIFIER_TOKEN:\ncase OPEN_PAREN_TOKEN:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase FAIL_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase TYPEOF_KEYWORD:\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\ncase TRAP_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase LT_TOKEN:\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\ncase FROM_KEYWORD:\ncase ERROR_KEYWORD:\ncase LET_KEYWORD:\ncase BACKTICK_TOKEN:\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase AT_TOKEN:\ncase NEW_KEYWORD:\ncase START_KEYWORD:\ncase FLUSH_KEYWORD:\ncase LEFT_ARROW_TOKEN:\ncase WAIT_KEYWORD:\ncase SERVICE_KEYWORD:\nreturn true;\ndefault:\nreturn isSimpleType(tokenKind);\n}\n}\n/**\n*

\n* Parse a new expression.\n*

\n* \n* new-expr := explicit-new-expr | implicit-new-expr\n*
\n* explicit-new-expr := new type-descriptor ( arg-list )\n*
\n* implicit-new-expr := new [( arg-list )]\n*
\n*\n* @return Parsed NewExpression node.\n*/\nprivate STNode parseNewExpression() {\nSTNode newKeyword = parseNewKeyword();\nreturn parseNewKeywordRhs(newKeyword);\n}\n/**\n*

\n* Parse `new` keyword.\n*

\n*\n* @return Parsed NEW_KEYWORD Token.\n*/\nprivate STNode parseNewKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.NEW_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.NEW_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseNewKeywordRhs(STNode newKeyword) {\nSTNode token = peek();\nreturn parseNewKeywordRhs(token.kind, newKeyword);\n}\n/**\n*

\n* Parse an implicit or explicit new expression.\n*

\n*\n* @param kind next token kind.\n* @param newKeyword parsed node for `new` keyword.\n* @return Parsed new-expression node.\n*/\nprivate STNode parseNewKeywordRhs(SyntaxKind kind, STNode newKeyword) {\nswitch (kind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseImplicitNewRhs(newKeyword);\ncase SEMICOLON_TOKEN:\nbreak;\ncase IDENTIFIER_TOKEN:\ncase OBJECT_KEYWORD:\ncase STREAM_KEYWORD:\nreturn parseTypeDescriptorInNewExpr(newKeyword);\ndefault:\nbreak;\n}\nreturn STNodeFactory.createImplicitNewExpressionNode(newKeyword, STNodeFactory.createEmptyNode());\n}\n/**\n*

\n* Parse an Explicit New expression.\n*

\n* \n* explicit-new-expr := new type-descriptor ( arg-list )\n* \n*\n* @param newKeyword Parsed `new` keyword.\n* @return the Parsed Explicit New Expression.\n*/\nprivate STNode parseTypeDescriptorInNewExpr(STNode newKeyword) {\nSTNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_NEW_EXPR);\nSTNode parenthesizedArgsList = parseParenthesizedArgList();\nreturn STNodeFactory.createExplicitNewExpressionNode(newKeyword, typeDescriptor, parenthesizedArgsList);\n}\n/**\n*

\n* Parse an implicit-new-expr with arguments.\n*

\n*\n* @param newKeyword Parsed `new` keyword.\n* @return Parsed implicit-new-expr.\n*/\nprivate STNode parseImplicitNewRhs(STNode newKeyword) {\nSTNode implicitNewArgList = parseParenthesizedArgList();\nreturn STNodeFactory.createImplicitNewExpressionNode(newKeyword, implicitNewArgList);\n}\n/**\n*

\n* Parse the parenthesized argument list for a new-expr.\n*

\n*\n* @return Parsed parenthesized rhs of new-expr.\n*/\nprivate STNode parseParenthesizedArgList() {\nSTNode openParan = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode arguments = parseArgsList();\nSTNode closeParan = parseCloseParenthesis();\nreturn STNodeFactory.createParenthesizedArgList(openParan, arguments, closeParan);\n}\n/**\n*

\n* Parse the right-hand-side of an expression.\n*

\n* expr-rhs := (binary-op expression\n* | dot identifier\n* | open-bracket expression close-bracket\n* )*\n*\n* @param precedenceLevel Precedence level of the expression that is being parsed currently\n* @param lhsExpr LHS expression of the expression\n* @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement\n* @param allowActions Flag indicating whether the current context support actions\n* @return Parsed node\n*/\nprivate STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isRhsExpr,\nboolean allowActions) {\nreturn parseExpressionRhs(precedenceLevel, lhsExpr, isRhsExpr, allowActions, false, false);\n}\nprivate STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isRhsExpr,\nboolean allowActions, boolean isInMatchGuard, boolean isInConditionalExpr) {\nSTToken token = peek();\nreturn parseExpressionRhs(token.kind, precedenceLevel, lhsExpr, isRhsExpr, allowActions, isInMatchGuard,\nisInConditionalExpr);\n}\n/**\n* Parse the right hand side of an expression given the next token kind.\n*\n* @param tokenKind Next token kind\n* @param currentPrecedenceLevel Precedence level of the expression that is being parsed currently\n* @param lhsExpr LHS expression\n* @param isRhsExpr Flag indicating whether this is a rhs expr or not\n* @param allowActions Flag indicating whether to allow actions or not\n* @param isInMatchGuard Flag indicating whether this expression is in a match-guard\n* @return Parsed node\n*/\nprivate STNode parseExpressionRhs(SyntaxKind tokenKind, OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr,\nboolean isRhsExpr, boolean allowActions, boolean isInMatchGuard,\nboolean isInConditionalExpr) {\nif (isEndOfExpression(tokenKind, isRhsExpr, isInMatchGuard, lhsExpr.kind)) {\nreturn lhsExpr;\n}\nif (lhsExpr.kind == SyntaxKind.ASYNC_SEND_ACTION) {\nreturn lhsExpr;\n}\nif (!isValidExprRhsStart(tokenKind, lhsExpr.kind)) {\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.EXPRESSION_RHS, currentPrecedenceLevel, lhsExpr,\nisRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.ctx == ParserRuleContext.BINARY_OPERATOR) {\nSyntaxKind binaryOpKind = getBinaryOperatorKindToInsert(currentPrecedenceLevel);\nreturn parseExpressionRhs(binaryOpKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions,\nisInMatchGuard, isInConditionalExpr);\n} else {\nreturn parseExpressionRhs(solution.tokenKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions,\nisInMatchGuard, isInConditionalExpr);\n}\n}\nif (tokenKind == SyntaxKind.GT_TOKEN && peek(2).kind == SyntaxKind.GT_TOKEN) {\nif (peek(3).kind == SyntaxKind.GT_TOKEN) {\ntokenKind = SyntaxKind.TRIPPLE_GT_TOKEN;\n} else {\ntokenKind = SyntaxKind.DOUBLE_GT_TOKEN;\n}\n}\nOperatorPrecedence nextOperatorPrecedence = getOpPrecedence(tokenKind);\nif (currentPrecedenceLevel.isHigherThanOrEqual(nextOperatorPrecedence, allowActions)) {\nreturn lhsExpr;\n}\nSTNode newLhsExpr;\nSTNode operator;\nswitch (tokenKind) {\ncase OPEN_PAREN_TOKEN:\nnewLhsExpr = parseFuncCall(lhsExpr);\nbreak;\ncase OPEN_BRACKET_TOKEN:\nnewLhsExpr = parseMemberAccessExpr(lhsExpr, isRhsExpr);\nbreak;\ncase DOT_TOKEN:\nnewLhsExpr = parseFieldAccessOrMethodCall(lhsExpr, isInConditionalExpr);\nbreak;\ncase IS_KEYWORD:\nnewLhsExpr = parseTypeTestExpression(lhsExpr, isInConditionalExpr);\nbreak;\ncase RIGHT_ARROW_TOKEN:\nnewLhsExpr = parseRemoteMethodCallOrAsyncSendAction(lhsExpr, isRhsExpr);\nif (!allowActions) {\nnewLhsExpr = SyntaxErrors.addDiagnostic(newLhsExpr,\nDiagnosticErrorCode.ERROR_EXPRESSION_EXPECTED_ACTION_FOUND);\n}\nbreak;\ncase SYNC_SEND_TOKEN:\nnewLhsExpr = parseSyncSendAction(lhsExpr);\nif (!allowActions) {\nnewLhsExpr = SyntaxErrors.addDiagnostic(newLhsExpr,\nDiagnosticErrorCode.ERROR_EXPRESSION_EXPECTED_ACTION_FOUND);\n}\nbreak;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nnewLhsExpr = parseImplicitAnonFunc(lhsExpr, isRhsExpr);\nbreak;\ncase ANNOT_CHAINING_TOKEN:\nnewLhsExpr = parseAnnotAccessExpression(lhsExpr, isInConditionalExpr);\nbreak;\ncase OPTIONAL_CHAINING_TOKEN:\nnewLhsExpr = parseOptionalFieldAccessExpression(lhsExpr, isInConditionalExpr);\nbreak;\ncase QUESTION_MARK_TOKEN:\nnewLhsExpr = parseConditionalExpression(lhsExpr);\nbreak;\ncase DOT_LT_TOKEN:\nnewLhsExpr = parseXMLFilterExpression(lhsExpr);\nbreak;\ncase SLASH_LT_TOKEN:\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\ncase SLASH_ASTERISK_TOKEN:\nnewLhsExpr = parseXMLStepExpression(lhsExpr);\nbreak;\ndefault:\nif (tokenKind == SyntaxKind.DOUBLE_GT_TOKEN) {\noperator = parseSignedRightShiftToken();\n} else if (tokenKind == SyntaxKind.TRIPPLE_GT_TOKEN) {\noperator = parseUnsignedRightShiftToken();\n} else {\noperator = parseBinaryOperator();\n}\nSTNode rhsExpr = parseExpression(nextOperatorPrecedence, isRhsExpr, false, isInConditionalExpr);\nnewLhsExpr = STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, operator,\nrhsExpr);\nbreak;\n}\nreturn parseExpressionRhs(currentPrecedenceLevel, newLhsExpr, isRhsExpr, allowActions, isInMatchGuard,\nisInConditionalExpr);\n}\nprivate boolean isValidExprRhsStart(SyntaxKind tokenKind, SyntaxKind precedingNodeKind) {\nswitch (tokenKind) {\ncase OPEN_PAREN_TOKEN:\nreturn precedingNodeKind == SyntaxKind.QUALIFIED_NAME_REFERENCE ||\nprecedingNodeKind == SyntaxKind.SIMPLE_NAME_REFERENCE;\ncase DOT_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase IS_KEYWORD:\ncase RIGHT_ARROW_TOKEN:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\ncase SYNC_SEND_TOKEN:\ncase ANNOT_CHAINING_TOKEN:\ncase OPTIONAL_CHAINING_TOKEN:\ncase QUESTION_MARK_TOKEN:\ncase COLON_TOKEN:\ncase DOT_LT_TOKEN:\ncase SLASH_LT_TOKEN:\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\ncase SLASH_ASTERISK_TOKEN:\nreturn true;\ndefault:\nreturn isBinaryOperator(tokenKind);\n}\n}\n/**\n* Parse member access expression.\n*\n* @param lhsExpr Container expression\n* @param isRhsExpr Is this is a rhs expression\n* @return Member access expression\n*/\nprivate STNode parseMemberAccessExpr(STNode lhsExpr, boolean isRhsExpr) {\nstartContext(ParserRuleContext.MEMBER_ACCESS_KEY_EXPR);\nSTNode openBracket = parseOpenBracket();\nSTNode keyExpr = parseMemberAccessKeyExprs(isRhsExpr);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nif (isRhsExpr && ((STNodeList) keyExpr).isEmpty()) {\nkeyExpr = STNodeFactory.createNodeList(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\ncloseBracket = SyntaxErrors.addDiagnostic(closeBracket,\nDiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR);\n}\nreturn STNodeFactory.createIndexedExpressionNode(lhsExpr, openBracket, keyExpr, closeBracket);\n}\n/**\n* Parse key expression of a member access expression. A type descriptor\n* that starts with a type-ref (e.g: T[a][b]) also goes through this\n* method.\n*

\n* key-expression := single-key-expression | multi-key-expression\n*\n* @param isRhsExpr Is this is a rhs expression\n* @return Key expression\n*/\nprivate STNode parseMemberAccessKeyExprs(boolean isRhsExpr) {\nList exprList = new ArrayList<>();\nSTNode keyExpr;\nSTNode keyExprEnd;\nwhile (!isEndOfTypeList(peek().kind)) {\nkeyExpr = parseKeyExpr(isRhsExpr);\nexprList.add(keyExpr);\nkeyExprEnd = parseMemberAccessKeyExprEnd();\nif (keyExprEnd == null) {\nbreak;\n}\nexprList.add(keyExprEnd);\n}\nreturn STNodeFactory.createNodeList(exprList);\n}\nprivate STNode parseKeyExpr(boolean isRhsExpr) {\nif (!isRhsExpr && peek().kind == SyntaxKind.ASTERISK_TOKEN) {\nreturn STNodeFactory.createBasicLiteralNode(SyntaxKind.ASTERISK_TOKEN, consume());\n}\nreturn parseExpression(isRhsExpr);\n}\nprivate STNode parseMemberAccessKeyExprEnd() {\nreturn parseMemberAccessKeyExprEnd(peek().kind);\n}\nprivate STNode parseMemberAccessKeyExprEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MEMBER_ACCESS_KEY_EXPR_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMemberAccessKeyExprEnd(solution.tokenKind);\n}\n}\n/**\n* Parse close bracket.\n*\n* @return Parsed node\n*/\nprivate STNode parseCloseBracket() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSE_BRACKET);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse field access, xml required attribute access expressions or method call expression.\n*

\n* \n* field-access-expr := expression . field-name\n*
\n* xml-required-attribute-access-expr := expression . xml-attribute-name\n*
\n* xml-attribute-name := xml-qualified-name | qualified-identifier | identifier\n*
\n* method-call-expr := expression . method-name ( arg-list )\n*
\n*\n* @param lhsExpr Preceding expression of the field access or method call\n* @return One of field-access-expression or method-call-expression.\n*/\nprivate STNode parseFieldAccessOrMethodCall(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode dotToken = parseDotToken();\nSTToken token = peek();\nif (token.kind == SyntaxKind.MAP_KEYWORD || token.kind == SyntaxKind.START_KEYWORD) {\nSTNode methodName = getKeywordAsSimpleNameRef();\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode args = parseArgsList();\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, methodName, openParen, args,\ncloseParen);\n}\nSTNode fieldOrMethodName = parseFieldAccessIdentifier(isInConditionalExpr);\nif (fieldOrMethodName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nreturn STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName);\n}\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode args = parseArgsList();\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, fieldOrMethodName, openParen, args,\ncloseParen);\n}\nreturn STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName);\n}\nprivate STNode getKeywordAsSimpleNameRef() {\nSTToken mapKeyword = consume();\nSTNode methodName = STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(),\nmapKeyword.trailingMinutiae(), mapKeyword.diagnostics());\nmethodName = STNodeFactory.createSimpleNameReferenceNode(methodName);\nreturn methodName;\n}\n/**\n*

\n* Parse braced expression.\n*

\n* braced-expr := ( expression )\n*\n* @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement\n* @param allowActions Allow actions\n* @return Parsed node\n*/\nprivate STNode parseBracedExpression(boolean isRhsExpr, boolean allowActions) {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nif (peek().kind == SyntaxKind.CLOSE_PAREN_TOKEN) {\nreturn parseNilLiteralOrEmptyAnonFuncParamRhs(openParen);\n}\nstartContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);\nSTNode expr;\nif (allowActions) {\nexpr = parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, true);\n} else {\nexpr = parseExpression(isRhsExpr);\n}\nreturn parseBracedExprOrAnonFuncParamRhs(peek().kind, openParen, expr, isRhsExpr);\n}\nprivate STNode parseNilLiteralOrEmptyAnonFuncParamRhs(STNode openParen) {\nSTNode closeParen = parseCloseParenthesis();\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {\nreturn STNodeFactory.createNilLiteralNode(openParen, closeParen);\n} else {\nSTNode params = STNodeFactory.createNodeList();\nSTNode anonFuncParam =\nSTNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\nreturn anonFuncParam;\n}\n}\nprivate STNode parseBracedExprOrAnonFuncParamRhs(STNode openParen, STNode expr, boolean isRhsExpr) {\nSTToken nextToken = peek();\nreturn parseBracedExprOrAnonFuncParamRhs(nextToken.kind, openParen, expr, isRhsExpr);\n}\nprivate STNode parseBracedExprOrAnonFuncParamRhs(SyntaxKind nextTokenKind, STNode openParen, STNode expr,\nboolean isRhsExpr) {\nif (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nswitch (nextTokenKind) {\ncase CLOSE_PAREN_TOKEN:\nbreak;\ncase COMMA_TOKEN:\nreturn parseImplicitAnonFunc(openParen, expr, isRhsExpr);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS, openParen,\nexpr, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nendContext();\nreturn solution.recoveredNode;\n}\nreturn parseBracedExprOrAnonFuncParamRhs(solution.tokenKind, openParen, expr, isRhsExpr);\n}\n}\nSTNode closeParen = parseCloseParenthesis();\nendContext();\nif (isAction(expr)) {\nreturn STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen);\n}\nreturn STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen);\n}\n/**\n* Check whether a given node is an action node.\n*\n* @param node Node to check\n* @return true if the node is an action node. false otherwise\n*/\nprivate boolean isAction(STNode node) {\nswitch (node.kind) {\ncase REMOTE_METHOD_CALL_ACTION:\ncase BRACED_ACTION:\ncase CHECK_ACTION:\ncase START_ACTION:\ncase TRAP_ACTION:\ncase FLUSH_ACTION:\ncase ASYNC_SEND_ACTION:\ncase SYNC_SEND_ACTION:\ncase RECEIVE_ACTION:\ncase WAIT_ACTION:\ncase QUERY_ACTION:\ncase COMMIT_ACTION:\ncase FAIL_ACTION:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Check whether the given token is an end of a expression.\n*\n* @param tokenKind Token to check\n* @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement\n* @return true if the token represents an end of a block. false otherwise\n*/\nprivate boolean isEndOfExpression(SyntaxKind tokenKind, boolean isRhsExpr, boolean isInMatchGuard,\nSyntaxKind precedingNodeKind) {\nif (!isRhsExpr) {\nif (isCompoundBinaryOperator(tokenKind)) {\nreturn true;\n}\nif (isInMatchGuard && tokenKind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {\nreturn true;\n}\nreturn !isValidExprRhsStart(tokenKind, precedingNodeKind);\n}\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase SEMICOLON_TOKEN:\ncase COMMA_TOKEN:\ncase PUBLIC_KEYWORD:\ncase CONST_KEYWORD:\ncase LISTENER_KEYWORD:\ncase EQUAL_TOKEN:\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\ncase AS_KEYWORD:\ncase IN_KEYWORD:\ncase FROM_KEYWORD:\ncase WHERE_KEYWORD:\ncase LET_KEYWORD:\ncase SELECT_KEYWORD:\ncase DO_KEYWORD:\ncase COLON_TOKEN:\ncase ON_KEYWORD:\ncase CONFLICT_KEYWORD:\ncase LIMIT_KEYWORD:\ncase JOIN_KEYWORD:\ncase OUTER_KEYWORD:\ncase ORDER_KEYWORD:\ncase BY_KEYWORD:\ncase ASCENDING_KEYWORD:\ncase DESCENDING_KEYWORD:\nreturn true;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn isInMatchGuard;\ndefault:\nreturn isSimpleType(tokenKind);\n}\n}\n/**\n* Parse basic literals. It is assumed that we come here after validation.\n*\n* @return Parsed node\n*/\nprivate STNode parseBasicLiteral() {\nSTToken literalToken = consume();\nreturn STNodeFactory.createBasicLiteralNode(literalToken.kind, literalToken);\n}\n/**\n* Parse function call expression.\n* function-call-expr := function-reference ( arg-list )\n* function-reference := variable-reference\n*\n* @param identifier Function name\n* @return Function call expression\n*/\nprivate STNode parseFuncCall(STNode identifier) {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode args = parseArgsList();\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createFunctionCallExpressionNode(identifier, openParen, args, closeParen);\n}\n/**\n*

\n* Parse error constructor expression.\n*

\n* \n* error-constructor-expr := error ( arg-list )\n* \n*\n* @return Error constructor expression\n*/\nprivate STNode parseErrorConstructorExpr() {\nSTNode errorKeyword = parseErrorKeyword();\nreturn parseFuncCall(errorKeyword);\n}\n/**\n* Parse function call argument list.\n*\n* @return Parsed args list\n*/\nprivate STNode parseArgsList() {\nstartContext(ParserRuleContext.ARG_LIST);\nSTToken token = peek();\nif (isEndOfParametersList(token.kind)) {\nSTNode args = STNodeFactory.createEmptyNodeList();\nendContext();\nreturn args;\n}\nSTNode firstArg = parseArgument();\nSTNode argsList = parseArgList(firstArg);\nendContext();\nreturn argsList;\n}\n/**\n* Parse follow up arguments.\n*\n* @param firstArg first argument in the list\n* @return the argument list\n*/\nprivate STNode parseArgList(STNode firstArg) {\nArrayList argsList = new ArrayList<>();\nargsList.add(firstArg);\nSyntaxKind lastValidArgKind = firstArg.kind;\nSTToken nextToken = peek();\nwhile (!isEndOfParametersList(nextToken.kind)) {\nSTNode argEnd = parseArgEnd(nextToken.kind);\nif (argEnd == null) {\nbreak;\n}\nnextToken = peek();\nSTNode curArg = parseArgument(nextToken.kind);\nDiagnosticErrorCode errorCode = validateArgumentOrder(lastValidArgKind, curArg.kind);\nif (errorCode == null) {\nargsList.add(argEnd);\nargsList.add(curArg);\nlastValidArgKind = curArg.kind;\n} else if (errorCode == DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG &&\nisMissingPositionalArg(curArg)) {\nargsList.add(argEnd);\nargsList.add(curArg);\n} else {\nupdateLastNodeInListWithInvalidNode(argsList, argEnd, null);\nupdateLastNodeInListWithInvalidNode(argsList, curArg, errorCode);\n}\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(argsList);\n}\nprivate DiagnosticErrorCode validateArgumentOrder(SyntaxKind prevArgKind, SyntaxKind curArgKind) {\nDiagnosticErrorCode errorCode = null;\nswitch (prevArgKind) {\ncase POSITIONAL_ARG:\nbreak;\ncase NAMED_ARG:\nif (curArgKind == SyntaxKind.POSITIONAL_ARG) {\nerrorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG;\n}\nbreak;\ncase REST_ARG:\nerrorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG;\nbreak;\ndefault:\nthrow new IllegalStateException(\"Invalid SyntaxKind in an argument\");\n}\nreturn errorCode;\n}\nprivate boolean isMissingPositionalArg(STNode arg) {\nSTNode expr = ((STPositionalArgumentNode) arg).expression;\nreturn expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE && ((STSimpleNameReferenceNode) expr).name.isMissing();\n}\nprivate STNode parseArgEnd() {\nreturn parseArgEnd(peek().kind);\n}\nprivate STNode parseArgEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgEnd(solution.tokenKind);\n}\n}\n/**\n* Parse function call argument.\n*\n* @return Parsed argument node\n*/\nprivate STNode parseArgument() {\nSTToken token = peek();\nreturn parseArgument(token.kind);\n}\nprivate STNode parseArgument(SyntaxKind kind) {\nSTNode arg;\nswitch (kind) {\ncase ELLIPSIS_TOKEN:\nSTToken ellipsis = consume();\nSTNode expr = parseExpression();\narg = STNodeFactory.createRestArgumentNode(ellipsis, expr);\nbreak;\ncase IDENTIFIER_TOKEN:\narg = parseNamedOrPositionalArg(kind);\nbreak;\ndefault:\nif (isValidExprStart(kind)) {\nexpr = parseExpression();\narg = STNodeFactory.createPositionalArgumentNode(expr);\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.ARG_START);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgument(solution.tokenKind);\n}\nreturn arg;\n}\n/**\n* Parse positional or named arg. This method assumed peek()/peek(1)\n* is always an identifier.\n*\n* @return Parsed argument node\n*/\nprivate STNode parseNamedOrPositionalArg(SyntaxKind nextTokenKind) {\nSTNode argNameOrExpr = parseTerminalExpression(peek().kind, true, false, false);\nSTToken secondToken = peek();\nswitch (secondToken.kind) {\ncase EQUAL_TOKEN:\nSTNode equal = parseAssignOp();\nSTNode valExpr = parseExpression();\nreturn STNodeFactory.createNamedArgumentNode(argNameOrExpr, equal, valExpr);\ncase COMMA_TOKEN:\ncase CLOSE_PAREN_TOKEN:\nreturn STNodeFactory.createPositionalArgumentNode(argNameOrExpr);\ndefault:\nargNameOrExpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, argNameOrExpr, false, false);\nreturn STNodeFactory.createPositionalArgumentNode(argNameOrExpr);\n}\n}\n/**\n* Parse object type descriptor.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectTypeDescriptor() {\nstartContext(ParserRuleContext.OBJECT_TYPE_DESCRIPTOR);\nSTNode objectTypeQualifiers = parseObjectTypeQualifiers();\nSTNode objectKeyword = parseObjectKeyword();\nSTNode openBrace = parseOpenBrace();\nSTNode objectMembers = parseObjectMembers();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createObjectTypeDescriptorNode(objectTypeQualifiers, objectKeyword, openBrace,\nobjectMembers, closeBrace);\n}\n/**\n* Parse object type qualifiers.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectTypeQualifiers() {\nSTToken nextToken = peek();\nreturn parseObjectTypeQualifiers(nextToken.kind);\n}\nprivate STNode parseObjectTypeQualifiers(SyntaxKind kind) {\nSTNode firstQualifier;\nswitch (kind) {\ncase CLIENT_KEYWORD:\nfirstQualifier = parseClientKeyword();\nbreak;\ncase ABSTRACT_KEYWORD:\nfirstQualifier = parseAbstractKeyword();\nbreak;\ncase READONLY_KEYWORD:\nfirstQualifier = parseReadonlyKeyword();\nbreak;\ncase OBJECT_KEYWORD:\nreturn STNodeFactory.createEmptyNodeList();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_QUALIFIER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectTypeQualifiers(solution.tokenKind);\n}\nreturn parseObjectTypeNextQualifiers(firstQualifier);\n}\nprivate STNode parseObjectTypeNextQualifiers(STNode firstQualifier) {\nList qualifiers = new ArrayList<>();\nqualifiers.add(firstQualifier);\nfor (int i = 0; i < 2; i++) {\nSTNode nextToken = peek();\nif (isNodeWithSyntaxKindInList(qualifiers, nextToken.kind)) {\nnextToken = consume();\nupdateLastNodeInListWithInvalidNode(qualifiers, nextToken,\nDiagnosticErrorCode.ERROR_SAME_OBJECT_TYPE_QUALIFIER);\ncontinue;\n}\nSTNode nextQualifier;\nswitch (nextToken.kind) {\ncase CLIENT_KEYWORD:\nnextQualifier = parseClientKeyword();\nbreak;\ncase ABSTRACT_KEYWORD:\nnextQualifier = parseAbstractKeyword();\nbreak;\ncase READONLY_KEYWORD:\nnextQualifier = parseReadonlyKeyword();\nbreak;\ncase OBJECT_KEYWORD:\ndefault:\nreturn STNodeFactory.createNodeList(qualifiers);\n}\nqualifiers.add(nextQualifier);\n}\nreturn STNodeFactory.createNodeList(qualifiers);\n}\n/**\n* Parse client keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseClientKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLIENT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLIENT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse abstract keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseAbstractKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ABSTRACT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ABSTRACT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse object keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OBJECT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OBJECT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse object members.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectMembers() {\nArrayList objectMembers = new ArrayList<>();\nwhile (!isEndOfObjectTypeNode()) {\nstartContext(ParserRuleContext.OBJECT_MEMBER);\nSTNode member = parseObjectMember(peek().kind);\nendContext();\nif (member == null) {\nbreak;\n}\nobjectMembers.add(member);\n}\nreturn STNodeFactory.createNodeList(objectMembers);\n}\nprivate STNode parseObjectMember() {\nSTToken nextToken = peek();\nreturn parseObjectMember(nextToken.kind);\n}\nprivate STNode parseObjectMember(SyntaxKind nextTokenKind) {\nSTNode metadata;\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ncase ASTERISK_TOKEN:\ncase PUBLIC_KEYWORD:\ncase PRIVATE_KEYWORD:\ncase REMOTE_KEYWORD:\ncase FUNCTION_KEYWORD:\nmetadata = createEmptyMetadata();\nbreak;\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData(nextTokenKind);\nnextTokenKind = peek().kind;\nbreak;\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nmetadata = createEmptyMetadata();\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_START);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectMember(solution.tokenKind);\n}\nreturn parseObjectMember(nextTokenKind, metadata);\n}\nprivate STNode parseObjectMember(SyntaxKind nextTokenKind, STNode metadata) {\nSTNode member;\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ncase ASTERISK_TOKEN:\nSTNode asterisk = consume();\nSTNode type = parseTypeReference();\nSTNode semicolonToken = parseSemicolon();\nmember = STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken);\nbreak;\ncase PUBLIC_KEYWORD:\ncase PRIVATE_KEYWORD:\nSTNode visibilityQualifier = parseObjectMemberVisibility();\nmember = parseObjectMethodOrField(metadata, visibilityQualifier);\nbreak;\ncase REMOTE_KEYWORD:\nmember = parseObjectMethodOrField(metadata, STNodeFactory.createEmptyNode());\nbreak;\ncase FUNCTION_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nmember = parseObjectMethod(metadata, STNodeFactory.createEmptyNode(), STNodeFactory.createEmptyNode());\nbreak;\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nmember = parseObjectField(metadata, STNodeFactory.createEmptyNode());\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectMember(solution.tokenKind);\n}\nreturn member;\n}\nprivate STNode parseObjectMethodOrField(STNode metadata, STNode methodQualifiers) {\nSTToken nextToken = peek(1);\nSTToken nextNextToken = peek(2);\nreturn parseObjectMethodOrField(nextToken.kind, nextNextToken.kind, metadata, methodQualifiers);\n}\n/**\n* Parse an object member, given the visibility modifier. Object member can have\n* only one visibility qualifier. This mean the methodQualifiers list can have\n* one qualifier at-most.\n*\n* @param visibilityQualifier Visibility qualifier. A modifier can be\n* a syntax node with either 'PUBLIC' or 'PRIVATE'.\n* @param nextTokenKind Next token kind\n* @param nextNextTokenKind Kind of the token after the\n* @param metadata Metadata\n* @param visibilityQualifier Visibility qualifiers\n* @return Parse object member node\n*/\nprivate STNode parseObjectMethodOrField(SyntaxKind nextTokenKind, SyntaxKind nextNextTokenKind, STNode metadata,\nSTNode visibilityQualifier) {\nswitch (nextTokenKind) {\ncase REMOTE_KEYWORD:\nSTNode remoteKeyword = parseRemoteKeyword();\nreturn parseObjectMethod(metadata, visibilityQualifier, remoteKeyword);\ncase FUNCTION_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nremoteKeyword = STNodeFactory.createEmptyNode();\nreturn parseObjectMethod(metadata, visibilityQualifier, remoteKeyword);\ncase IDENTIFIER_TOKEN:\nif (nextNextTokenKind != SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseObjectField(metadata, visibilityQualifier);\n}\nbreak;\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nreturn parseObjectField(metadata, visibilityQualifier);\n}\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY, metadata,\nvisibilityQualifier);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectMethodOrField(solution.tokenKind, nextTokenKind, metadata, visibilityQualifier);\n}\n/**\n* Parse object visibility. Visibility can be public or private.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectMemberVisibility() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PUBLIC_KEYWORD || token.kind == SyntaxKind.PRIVATE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseRemoteKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.REMOTE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.REMOTE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseObjectField(STNode metadata, STNode methodQualifiers) {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.READONLY_KEYWORD) {\nSTNode readonlyQualifier = STNodeFactory.createEmptyNode();\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode fieldName = parseVariableName();\nreturn parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type, fieldName);\n}\nSTNode type;\nSTNode readonlyQualifier = parseReadonlyKeyword();\nnextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nSTNode fieldNameOrTypeDesc = parseQualifiedIdentifier(ParserRuleContext.RECORD_FIELD_NAME_OR_TYPE_NAME);\nif (fieldNameOrTypeDesc.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\ntype = fieldNameOrTypeDesc;\n} else {\nnextToken = peek();\nswitch (nextToken.kind) {\ncase SEMICOLON_TOKEN:\ncase EQUAL_TOKEN:\ntype = createBuiltinSimpleNameReference(readonlyQualifier);\nreadonlyQualifier = STNodeFactory.createEmptyNode();\nreturn parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type,\nfieldNameOrTypeDesc);\ndefault:\ntype = fieldNameOrTypeDesc;\nbreak;\n}\n}\n} else if (isTypeStartingToken(nextToken.kind)) {\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD);\n} else {\nreadonlyQualifier = createBuiltinSimpleNameReference(readonlyQualifier);\ntype = parseComplexTypeDescriptor(readonlyQualifier, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD, false);\nreadonlyQualifier = STNodeFactory.createEmptyNode();\n}\nSTNode fieldName = parseVariableName();\nreturn parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type, fieldName);\n}\n/**\n* Parse object field rhs, and complete the object field parsing. Returns the parsed object field.\n*\n* @param metadata Metadata\n* @param visibilityQualifier Visibility qualifier\n* @param readonlyQualifier Readonly qualifier\n* @param type Type descriptor\n* @param fieldName Field name\n* @return Parsed object field\n*/\nprivate STNode parseObjectFieldRhs(STNode metadata, STNode visibilityQualifier, STNode readonlyQualifier,\nSTNode type, STNode fieldName) {\nSTToken nextToken = peek();\nreturn parseObjectFieldRhs(nextToken.kind, metadata, visibilityQualifier, readonlyQualifier, type, fieldName);\n}\n/**\n* Parse object field rhs, and complete the object field parsing. Returns the parsed object field.\n*\n* @param nextTokenKind Kind of the next token\n* @param metadata Metadata\n* @param visibilityQualifier Visibility qualifier\n* @param readonlyQualifier Readonly qualifier\n* @param type Type descriptor\n* @param fieldName Field name\n* @return Parsed object field\n*/\nprivate STNode parseObjectFieldRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier,\nSTNode readonlyQualifier, STNode type, STNode fieldName) {\nSTNode equalsToken;\nSTNode expression;\nSTNode semicolonToken;\nswitch (nextTokenKind) {\ncase SEMICOLON_TOKEN:\nequalsToken = STNodeFactory.createEmptyNode();\nexpression = STNodeFactory.createEmptyNode();\nsemicolonToken = parseSemicolon();\nbreak;\ncase EQUAL_TOKEN:\nequalsToken = parseAssignOp();\nexpression = parseExpression();\nsemicolonToken = parseSemicolon();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.OBJECT_FIELD_RHS, metadata, visibilityQualifier,\nreadonlyQualifier, type, fieldName);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectFieldRhs(solution.tokenKind, metadata, visibilityQualifier, readonlyQualifier, type,\nfieldName);\n}\nreturn STNodeFactory.createObjectFieldNode(metadata, visibilityQualifier, readonlyQualifier, type, fieldName,\nequalsToken, expression, semicolonToken);\n}\nprivate STNode parseObjectMethod(STNode metadata, STNode visibilityQualifier, STNode remoteKeyword) {\nreturn parseFuncDefOrFuncTypeDesc(metadata, true, visibilityQualifier, remoteKeyword, null);\n}\n/**\n* Parse if-else statement.\n* \n* if-else-stmt := if expression block-stmt [else-block]\n* \n*\n* @return If-else block\n*/\nprivate STNode parseIfElseBlock() {\nstartContext(ParserRuleContext.IF_BLOCK);\nSTNode ifKeyword = parseIfKeyword();\nSTNode condition = parseExpression();\nSTNode ifBody = parseBlockNode();\nendContext();\nSTNode elseBody = parseElseBlock();\nreturn STNodeFactory.createIfElseStatementNode(ifKeyword, condition, ifBody, elseBody);\n}\n/**\n* Parse if-keyword.\n*\n* @return Parsed if-keyword node\n*/\nprivate STNode parseIfKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IF_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.IF_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse else-keyword.\n*\n* @return Parsed else keyword node\n*/\nprivate STNode parseElseKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ELSE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ELSE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse block node.\n* \n* block-stmt := { sequence-stmt }\n* sequence-stmt := statement*\n* \n*\n* @return Parse block node\n*/\nprivate STNode parseBlockNode() {\nstartContext(ParserRuleContext.BLOCK_STMT);\nSTNode openBrace = parseOpenBrace();\nSTNode stmts = parseStatements();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createBlockStatementNode(openBrace, stmts, closeBrace);\n}\n/**\n* Parse else block.\n* else-block := else (if-else-stmt | block-stmt)\n*\n* @return Else block\n*/\nprivate STNode parseElseBlock() {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.ELSE_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode elseKeyword = parseElseKeyword();\nSTNode elseBody = parseElseBody();\nreturn STNodeFactory.createElseBlockNode(elseKeyword, elseBody);\n}\n/**\n* Parse else node body.\n* else-body := if-else-stmt | block-stmt\n*\n* @return Else node body\n*/\nprivate STNode parseElseBody() {\nSTToken nextToken = peek();\nreturn parseElseBody(nextToken.kind);\n}\nprivate STNode parseElseBody(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IF_KEYWORD:\nreturn parseIfElseBlock();\ncase OPEN_BRACE_TOKEN:\nreturn parseBlockNode();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.ELSE_BODY);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseElseBody(solution.tokenKind);\n}\n}\n/**\n* Parse while statement.\n* while-stmt := while expression block-stmt\n*\n* @return While statement\n*/\nprivate STNode parseWhileStatement() {\nstartContext(ParserRuleContext.WHILE_BLOCK);\nSTNode whileKeyword = parseWhileKeyword();\nSTNode condition = parseExpression();\nSTNode whileBody = parseBlockNode();\nendContext();\nreturn STNodeFactory.createWhileStatementNode(whileKeyword, condition, whileBody);\n}\n/**\n* Parse while-keyword.\n*\n* @return While-keyword node\n*/\nprivate STNode parseWhileKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.WHILE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.WHILE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse panic statement.\n* panic-stmt := panic expression ;\n*\n* @return Panic statement\n*/\nprivate STNode parsePanicStatement() {\nstartContext(ParserRuleContext.PANIC_STMT);\nSTNode panicKeyword = parsePanicKeyword();\nSTNode expression = parseExpression();\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createPanicStatementNode(panicKeyword, expression, semicolon);\n}\n/**\n* Parse panic-keyword.\n*\n* @return Panic-keyword node\n*/\nprivate STNode parsePanicKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PANIC_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.PANIC_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse check expression. This method is used to parse both check expression\n* as well as check action.\n*\n*

\n* \n* checking-expr := checking-keyword expression\n* checking-action := checking-keyword action\n* \n*\n* @param allowActions Allow actions\n* @param isRhsExpr Is rhs expression\n* @return Check expression node\n*/\nprivate STNode parseCheckExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nSTNode checkingKeyword = parseCheckingKeyword();\nSTNode expr =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nif (isAction(expr)) {\nreturn STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_ACTION, checkingKeyword, expr);\n} else {\nreturn STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, expr);\n}\n}\n/**\n* Parse checking keyword.\n*

\n* \n* checking-keyword := check | checkpanic\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseCheckingKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CHECK_KEYWORD || token.kind == SyntaxKind.CHECKPANIC_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CHECKING_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse fail expression. This method is used to parse both fail expression\n* as well as fail action.\n*\n*

\n* \n* fail-expr := fail-keyword expression\n* fail-action := fail-keyword action\n* \n*\n* @param allowActions Allow actions\n* @param isRhsExpr Is rhs expression\n* @return Fail expression node\n*/\nprivate STNode parseFailExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nSTNode failKeyword = parseFailKeyword();\nSTNode expr =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nif (isAction(expr)) {\nreturn STNodeFactory.createFailExpressionNode(SyntaxKind.FAIL_ACTION, failKeyword, expr);\n} else {\nreturn STNodeFactory.createFailExpressionNode(SyntaxKind.FAIL_EXPRESSION, failKeyword, expr);\n}\n}\n/**\n* Parse fail keyword.\n*

\n* \n* fail-keyword := fail\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseFailKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FAIL_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FAIL_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*\n* Parse continue statement.\n* continue-stmt := continue ; \n*\n* @return continue statement\n*/\nprivate STNode parseContinueStatement() {\nstartContext(ParserRuleContext.CONTINUE_STATEMENT);\nSTNode continueKeyword = parseContinueKeyword();\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createContinueStatementNode(continueKeyword, semicolon);\n}\n/**\n* Parse continue-keyword.\n*\n* @return continue-keyword node\n*/\nprivate STNode parseContinueKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CONTINUE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CONTINUE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse return statement.\n* return-stmt := return [ action-or-expr ] ;\n*\n* @return Return statement\n*/\nprivate STNode parseReturnStatement() {\nstartContext(ParserRuleContext.RETURN_STMT);\nSTNode returnKeyword = parseReturnKeyword();\nSTNode returnRhs = parseReturnStatementRhs(returnKeyword);\nendContext();\nreturn returnRhs;\n}\n/**\n* Parse return-keyword.\n*\n* @return Return-keyword node\n*/\nprivate STNode parseReturnKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RETURN_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RETURN_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse break statement.\n* break-stmt := break ; \n*\n* @return break statement\n*/\nprivate STNode parseBreakStatement() {\nstartContext(ParserRuleContext.BREAK_STATEMENT);\nSTNode breakKeyword = parseBreakKeyword();\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createBreakStatementNode(breakKeyword, semicolon);\n}\n/**\n* Parse break-keyword.\n*\n* @return break-keyword node\n*/\nprivate STNode parseBreakKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BREAK_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BREAK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse the right hand side of a return statement.\n*

\n* \n* return-stmt-rhs := ; | action-or-expr ;\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseReturnStatementRhs(STNode returnKeyword) {\nSTNode expr;\nSTToken token = peek();\nswitch (token.kind) {\ncase SEMICOLON_TOKEN:\nexpr = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nexpr = parseActionOrExpression();\nbreak;\n}\nSTNode semicolon = parseSemicolon();\nreturn STNodeFactory.createReturnStatementNode(returnKeyword, expr, semicolon);\n}\n/**\n* Parse mapping constructor expression.\n*

\n* mapping-constructor-expr := { [field (, field)*] }\n*\n* @return Parsed node\n*/\nprivate STNode parseMappingConstructorExpr() {\nstartContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nSTNode openBrace = parseOpenBrace();\nSTNode fields = parseMappingConstructorFields();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace);\n}\n/**\n* Parse mapping constructor fields.\n*\n* @return Parsed node\n*/\nprivate STNode parseMappingConstructorFields() {\nSTToken nextToken = peek();\nif (isEndOfMappingConstructor(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList fields = new ArrayList<>();\nSTNode field = parseMappingField(ParserRuleContext.FIRST_MAPPING_FIELD);\nfields.add(field);\nreturn parseMappingConstructorFields(fields);\n}\nprivate STNode parseMappingConstructorFields(List fields) {\nSTToken nextToken;\nSTNode mappingFieldEnd;\nnextToken = peek();\nwhile (!isEndOfMappingConstructor(nextToken.kind)) {\nmappingFieldEnd = parseMappingFieldEnd(nextToken.kind);\nif (mappingFieldEnd == null) {\nbreak;\n}\nfields.add(mappingFieldEnd);\nSTNode field = parseMappingField(ParserRuleContext.MAPPING_FIELD);\nfields.add(field);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(fields);\n}\nprivate STNode parseMappingFieldEnd() {\nreturn parseMappingFieldEnd(peek().kind);\n}\nprivate STNode parseMappingFieldEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.MAPPING_FIELD_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMappingFieldEnd(solution.tokenKind);\n}\n}\nprivate boolean isEndOfMappingConstructor(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase IDENTIFIER_TOKEN:\ncase READONLY_KEYWORD:\nreturn false;\ncase EOF_TOKEN:\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase PUBLIC_KEYWORD:\ncase PRIVATE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase RETURNS_KEYWORD:\ncase SERVICE_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase CONST_KEYWORD:\ncase FINAL_KEYWORD:\ncase RESOURCE_KEYWORD:\nreturn true;\ndefault:\nreturn isSimpleType(tokenKind);\n}\n}\n/**\n* Parse mapping constructor field.\n*

\n* field := specific-field | computed-name-field | spread-field\n*\n* @param fieldContext Context of the mapping field\n* @param leadingComma Leading comma\n* @return Parsed node\n*/\nprivate STNode parseMappingField(ParserRuleContext fieldContext) {\nSTToken nextToken = peek();\nreturn parseMappingField(nextToken.kind, fieldContext);\n}\nprivate STNode parseMappingField(SyntaxKind tokenKind, ParserRuleContext fieldContext) {\nswitch (tokenKind) {\ncase IDENTIFIER_TOKEN:\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nreturn parseSpecificFieldWithOptionalValue(readonlyKeyword);\ncase STRING_LITERAL:\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nreturn parseQualifiedSpecificField(readonlyKeyword);\ncase READONLY_KEYWORD:\nreadonlyKeyword = parseReadonlyKeyword();\nreturn parseSpecificField(readonlyKeyword);\ncase OPEN_BRACKET_TOKEN:\nreturn parseComputedField();\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\nSTNode expr = parseExpression();\nreturn STNodeFactory.createSpreadFieldNode(ellipsis, expr);\ncase CLOSE_BRACE_TOKEN:\nif (fieldContext == ParserRuleContext.FIRST_MAPPING_FIELD) {\nreturn null;\n}\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, fieldContext, fieldContext);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMappingField(solution.tokenKind, fieldContext);\n}\n}\nprivate STNode parseSpecificField(STNode readonlyKeyword) {\nSTToken nextToken = peek();\nreturn parseSpecificField(nextToken.kind, readonlyKeyword);\n}\nprivate STNode parseSpecificField(SyntaxKind nextTokenKind, STNode readonlyKeyword) {\nswitch (nextTokenKind) {\ncase STRING_LITERAL:\nreturn parseQualifiedSpecificField(readonlyKeyword);\ncase IDENTIFIER_TOKEN:\nreturn parseSpecificFieldWithOptionalValue(readonlyKeyword);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD, readonlyKeyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseSpecificField(solution.tokenKind, readonlyKeyword);\n}\n}\nprivate STNode parseQualifiedSpecificField(STNode readonlyKeyword) {\nSTNode key = parseStringLiteral();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\n}\n/**\n* Parse mapping constructor specific-field with an optional value.\n*\n* @return Parsed node\n*/\nprivate STNode parseSpecificFieldWithOptionalValue(STNode readonlyKeyword) {\nSTNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME);\nreturn parseSpecificFieldRhs(readonlyKeyword, key);\n}\nprivate STNode parseSpecificFieldRhs(STNode readonlyKeyword, STNode key) {\nSTToken nextToken = peek();\nreturn parseSpecificFieldRhs(nextToken.kind, readonlyKeyword, key);\n}\nprivate STNode parseSpecificFieldRhs(SyntaxKind tokenKind, STNode readonlyKeyword, STNode key) {\nSTNode colon;\nSTNode valueExpr;\nswitch (tokenKind) {\ncase COLON_TOKEN:\ncolon = parseColon();\nvalueExpr = parseExpression();\nbreak;\ncase COMMA_TOKEN:\ncolon = STNodeFactory.createEmptyNode();\nvalueExpr = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nif (isEndOfMappingConstructor(tokenKind)) {\ncolon = STNodeFactory.createEmptyNode();\nvalueExpr = STNodeFactory.createEmptyNode();\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, readonlyKeyword, key);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseSpecificFieldRhs(solution.tokenKind, readonlyKeyword, key);\n}\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\n}\n/**\n* Parse string literal.\n*\n* @return Parsed node\n*/\nprivate STNode parseStringLiteral() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.STRING_LITERAL) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.STRING_LITERAL);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse colon token.\n*\n* @return Parsed node\n*/\nprivate STNode parseColon() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COLON_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.COLON);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse readonly keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseReadonlyKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.READONLY_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.READONLY_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse computed-name-field of a mapping constructor expression.\n*

\n* computed-name-field := [ field-name-expr ] : value-expr\n*\n* @param leadingComma Leading comma\n* @return Parsed node\n*/\nprivate STNode parseComputedField() {\nstartContext(ParserRuleContext.COMPUTED_FIELD_NAME);\nSTNode openBracket = parseOpenBracket();\nSTNode fieldNameExpr = parseExpression();\nSTNode closeBracket = parseCloseBracket();\nendContext();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createComputedNameFieldNode(openBracket, fieldNameExpr, closeBracket, colon, valueExpr);\n}\n/**\n* Parse open bracket.\n*\n* @return Parsed node\n*/\nprivate STNode parseOpenBracket() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPEN_BRACKET_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OPEN_BRACKET);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse compound assignment statement, which takes the following format.\n*

\n* assignment-stmt := lvexpr CompoundAssignmentOperator action-or-expr ;\n*\n* @return Parsed node\n*/\nprivate STNode parseCompoundAssignmentStmt() {\nstartContext(ParserRuleContext.COMPOUND_ASSIGNMENT_STMT);\nSTNode varName = parseVariableName();\nSTNode compoundAssignmentStmt = parseCompoundAssignmentStmtRhs(varName);\nendContext();\nreturn compoundAssignmentStmt;\n}\n/**\n*

\n* Parse the RHS portion of the compound assignment.\n*

\n* compound-assignment-stmt-rhs := CompoundAssignmentOperator action-or-expr ;\n*\n* @param lvExpr LHS expression\n* @return Parsed node\n*/\nprivate STNode parseCompoundAssignmentStmtRhs(STNode lvExpr) {\nSTNode binaryOperator = parseCompoundBinaryOperator();\nSTNode equalsToken = parseAssignOp();\nSTNode expr = parseActionOrExpression();\nSTNode semicolon = parseSemicolon();\nendContext();\nboolean lvExprValid = isValidLVExpr(lvExpr);\nif (!lvExprValid) {\nSTNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nSTNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier);\nlvExpr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(simpleNameRef, lvExpr,\nDiagnosticErrorCode.ERROR_INVALID_EXPR_IN_COMPOUND_ASSIGNMENT_LHS);\n}\nreturn STNodeFactory.createCompoundAssignmentStatementNode(lvExpr, binaryOperator, equalsToken, expr,\nsemicolon);\n}\n/**\n* Parse compound binary operator.\n* BinaryOperator := + | - | * | / | & | | | ^ | << | >> | >>>\n*\n* @return Parsed node\n*/\nprivate STNode parseCompoundBinaryOperator() {\nSTToken token = peek();\nif (isCompoundBinaryOperator(token.kind)) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.COMPOUND_BINARY_OPERATOR);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse service declaration.\n*

\n* \n* service-decl := metadata service [variable-name] on expression-list service-body-block\n*
\n* expression-list := expression (, expression)*\n*
\n*\n* @param metadata Metadata\n* @return Parsed node\n*/\nprivate STNode parseServiceDecl(STNode metadata) {\nstartContext(ParserRuleContext.SERVICE_DECL);\nSTNode serviceKeyword = parseServiceKeyword();\nSTNode serviceDecl = parseServiceRhs(metadata, serviceKeyword);\nendContext();\nreturn serviceDecl;\n}\n/**\n* Parse rhs of the service declaration.\n*

\n* \n* service-rhs := [variable-name] on expression-list service-body-block\n* \n*\n* @param metadata Metadata\n* @param serviceKeyword Service keyword\n* @return Parsed node\n*/\nprivate STNode parseServiceRhs(STNode metadata, STNode serviceKeyword) {\nSTNode serviceName = parseServiceName();\nSTNode onKeyword = parseOnKeyword();\nSTNode expressionList = parseListeners();\nSTNode serviceBody = parseServiceBody();\nonKeyword =\ncloneWithDiagnosticIfListEmpty(expressionList, onKeyword, DiagnosticErrorCode.ERROR_MISSING_EXPRESSION);\nreturn STNodeFactory.createServiceDeclarationNode(metadata, serviceKeyword, serviceName, onKeyword,\nexpressionList, serviceBody);\n}\nprivate STNode parseServiceName() {\nSTToken nextToken = peek();\nreturn parseServiceName(nextToken.kind);\n}\nprivate STNode parseServiceName(SyntaxKind kind) {\nswitch (kind) {\ncase IDENTIFIER_TOKEN:\nreturn parseIdentifier(ParserRuleContext.SERVICE_NAME);\ncase ON_KEYWORD:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.OPTIONAL_SERVICE_NAME);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseServiceName(solution.tokenKind);\n}\n}\n/**\n* Parse service keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseServiceKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SERVICE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SERVICE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Check whether the given token kind is a compound binary operator.\n*

\n* compound-binary-operator := + | - | * | / | & | | | ^ | << | >> | >>>\n*\n* @param tokenKind STToken kind\n* @return true if the token kind refers to a binary operator. false otherwise\n*/\nprivate boolean isCompoundBinaryOperator(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase SLASH_TOKEN:\ncase ASTERISK_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase BITWISE_XOR_TOKEN:\ncase PIPE_TOKEN:\ncase DOUBLE_LT_TOKEN:\ncase DOUBLE_GT_TOKEN:\ncase TRIPPLE_GT_TOKEN:\nreturn getNextNextToken(tokenKind).kind == SyntaxKind.EQUAL_TOKEN;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse on keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseOnKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ON_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ON_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse listener references.\n*

\n* expression-list := expression (, expression)*\n*\n* @return Parsed node\n*/\nprivate STNode parseListeners() {\nstartContext(ParserRuleContext.LISTENERS_LIST);\nList listeners = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfExpressionsList(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode leadingComma = STNodeFactory.createEmptyNode();\nSTNode exprListItem = parseExpressionListItem(leadingComma);\nlisteners.add(exprListItem);\nnextToken = peek();\nwhile (!isEndOfExpressionsList(nextToken.kind)) {\nleadingComma = parseComma();\nexprListItem = parseExpressionListItem(leadingComma);\nlisteners.add(exprListItem);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(listeners);\n}\nprivate boolean isEndOfExpressionsList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\nreturn false;\ncase EOF_TOKEN:\ncase SEMICOLON_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase OPEN_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn !isValidExprStart(tokenKind);\n}\n}\n/**\n* Parse expression list item.\n*\n* @param leadingComma Leading comma\n* @return Parsed node\n*/\nprivate STNode parseExpressionListItem(STNode leadingComma) {\nSTNode expr = parseExpression();\nreturn STNodeFactory.createExpressionListItemNode(leadingComma, expr);\n}\n/**\n* Parse service body.\n*

\n* \n* service-body-block := { service-method-defn* }\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseServiceBody() {\nSTNode openBrace = parseOpenBrace();\nSTNode resources = parseResources();\nSTNode closeBrace = parseCloseBrace();\nreturn STNodeFactory.createServiceBodyNode(openBrace, resources, closeBrace);\n}\n/**\n* Parse service resource definitions.\n*\n* @return Parsed node\n*/\nprivate STNode parseResources() {\nList resources = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (!isEndOfServiceDecl(nextToken.kind)) {\nSTNode serviceMethod = parseResource();\nif (serviceMethod == null) {\nbreak;\n}\nresources.add(serviceMethod);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(resources);\n}\nprivate boolean isEndOfServiceDecl(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase EOF_TOKEN:\ncase CLOSE_BRACE_PIPE_TOKEN:\ncase TYPE_KEYWORD:\ncase SERVICE_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse resource definition (i.e. service-method-defn).\n*

\n* \n* service-body-block := { service-method-defn* }\n*
\n* service-method-defn := metadata [resource] function identifier function-signature method-defn-body\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseResource() {\nSTToken nextToken = peek();\nreturn parseResource(nextToken.kind);\n}\nprivate STNode parseResource(SyntaxKind nextTokenKind) {\nSTNode metadata;\nswitch (nextTokenKind) {\ncase RESOURCE_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\ncase FUNCTION_KEYWORD:\nmetadata = createEmptyMetadata();\nbreak;\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData(nextTokenKind);\nnextTokenKind = peek().kind;\nbreak;\ndefault:\nif (isEndOfServiceDecl(nextTokenKind)) {\nreturn null;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RESOURCE_DEF);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseResource(solution.tokenKind);\n}\nreturn parseResource(nextTokenKind, metadata);\n}\nprivate STNode parseResource(SyntaxKind nextTokenKind, STNode metadata) {\nswitch (nextTokenKind) {\ncase RESOURCE_KEYWORD:\nSTNode resourceKeyword = parseResourceKeyword();\nreturn parseFuncDefinition(metadata, false, resourceKeyword, null);\ncase TRANSACTIONAL_KEYWORD:\nSTNode transactionalKeyword = parseTransactionalKeyword();\nSTNode resourceKey = parseResourceKeyword();\nreturn parseFuncDefinition(metadata, false, resourceKey, transactionalKeyword, null);\ncase FUNCTION_KEYWORD:\nreturn parseFuncDefinition(metadata, false, STNodeFactory.createEmptyNode(), null);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RESOURCE_DEF, metadata);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseResource(solution.tokenKind, metadata);\n}\n}\n/**\n* Parse resource keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseResourceKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RESOURCE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RESOURCE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Check whether next construct is a service declaration or not. This method is\n* used to determine whether an end-of-block is reached, if the next token is\n* a service-keyword. Because service-keyword can be used in statements as well\n* as in top-level node (service-decl). We have reached a service-decl, then\n* it could be due to missing close-brace at the end of the current block.\n*\n* @return true if the next construct is a service declaration.\n* false otherwise\n*/\nprivate STNode parseStmtStartsWithTupleTypeOrExprRhs(STNode annots, STNode tupleTypeOrListConst, boolean isRoot) {\nif (tupleTypeOrListConst.kind.compareTo(SyntaxKind.TYPE_DESC) >= 0 &&\ntupleTypeOrListConst.kind.compareTo(SyntaxKind.TYPEDESC_TYPE_DESC) <= 0) {\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nSTNode typedBindingPattern =\nparseTypedBindingPatternTypeRhs(tupleTypeOrListConst, ParserRuleContext.VAR_DECL_STMT, isRoot);\nif (!isRoot) {\nreturn typedBindingPattern;\n}\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(annots, finalKeyword, typedBindingPattern, false);\n}\nSTNode expr = getExpression(tupleTypeOrListConst);\nexpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);\nreturn parseStatementStartWithExprRhs(expr);\n}\nprivate STNode parseAsTupleTypeDesc(STNode annots, STNode openBracket, List memberList, STNode member,\nboolean isRoot) {\nmemberList = getTypeDescList(memberList);\nstartContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nSTNode tupleTypeMembers = parseTupleTypeMembers(member, memberList);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nSTNode tupleType = STNodeFactory.createTupleTypeDescriptorNode(openBracket, tupleTypeMembers, closeBracket);\nSTNode typeDesc =\nparseComplexTypeDescriptor(tupleType, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nendContext();\nSTNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT, isRoot);\nif (!isRoot) {\nreturn typedBindingPattern;\n}\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(annots, STNodeFactory.createEmptyNode(), typedBindingPattern, false);\n}\nprivate STNode parseAsListBindingPattern(STNode openBracket, List memberList, STNode member,\nboolean isRoot) {\nmemberList = getBindingPatternsList(memberList);\nmemberList.add(member);\nswitchContext(ParserRuleContext.LIST_BINDING_PATTERN);\nSTNode listBindingPattern = parseListBindingPattern(openBracket, member, memberList);\nendContext();\nif (!isRoot) {\nreturn listBindingPattern;\n}\nreturn parseAssignmentStmtRhs(listBindingPattern);\n}\nprivate STNode parseAsListBindingPattern(STNode openBracket, List memberList) {\nmemberList = getBindingPatternsList(memberList);\nswitchContext(ParserRuleContext.LIST_BINDING_PATTERN);\nSTNode listBindingPattern = parseListBindingPattern(openBracket, memberList);\nendContext();\nreturn listBindingPattern;\n}\nprivate STNode parseAsListBindingPatternOrListConstructor(STNode openBracket, List memberList,\nSTNode member, boolean isRoot) {\nmemberList.add(member);\nSTNode memberEnd = parseBracketedListMemberEnd();\nSTNode listBindingPatternOrListCons;\nif (memberEnd == null) {\nSTNode closeBracket = parseCloseBracket();\nlistBindingPatternOrListCons =\nparseListBindingPatternOrListConstructor(openBracket, memberList, closeBracket, isRoot);\n} else {\nmemberList.add(memberEnd);\nlistBindingPatternOrListCons = parseListBindingPatternOrListConstructor(openBracket, memberList, isRoot);\n}\nreturn listBindingPatternOrListCons;\n}\nprivate SyntaxKind getStmtStartBracketedListType(STNode memberNode) {\nif (memberNode.kind.compareTo(SyntaxKind.TYPE_DESC) >= 0 &&\nmemberNode.kind.compareTo(SyntaxKind.TYPEDESC_TYPE_DESC) <= 0) {\nreturn SyntaxKind.TUPLE_TYPE_DESC;\n}\nswitch (memberNode.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase ASTERISK_TOKEN:\nreturn SyntaxKind.ARRAY_TYPE_DESC;\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.LIST_BINDING_PATTERN;\ncase QUALIFIED_NAME_REFERENCE:\ncase REST_TYPE:\nreturn SyntaxKind.TUPLE_TYPE_DESC;\ncase LIST_CONSTRUCTOR:\ncase MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.LIST_CONSTRUCTOR;\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR;\ncase SIMPLE_NAME_REFERENCE:\ncase BRACKETED_LIST:\nreturn SyntaxKind.NONE;\ncase FUNCTION_CALL:\nif (isPosibleFunctionalBindingPattern((STFunctionCallExpressionNode) memberNode)) {\nreturn SyntaxKind.NONE;\n}\nreturn SyntaxKind.LIST_CONSTRUCTOR;\ncase INDEXED_EXPRESSION:\nreturn SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST;\ndefault:\nif (isExpression(memberNode.kind) && !isAllBasicLiterals(memberNode) && !isAmbiguous(memberNode)) {\nreturn SyntaxKind.LIST_CONSTRUCTOR;\n}\nreturn SyntaxKind.NONE;\n}\n}\nprivate boolean isPosibleFunctionalBindingPattern(STFunctionCallExpressionNode funcCall) {\nSTNode args = funcCall.arguments;\nint size = args.bucketCount();\nfor (int i = 0; i < size; i++) {\nSTNode arg = args.childInBucket(i);\nif (arg.kind != SyntaxKind.NAMED_ARG && arg.kind != SyntaxKind.POSITIONAL_ARG &&\narg.kind != SyntaxKind.REST_ARG) {\ncontinue;\n}\nif (!isPosibleArgBindingPattern((STFunctionArgumentNode) arg)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate boolean isPosibleArgBindingPattern(STFunctionArgumentNode arg) {\nswitch (arg.kind) {\ncase POSITIONAL_ARG:\nSTNode expr = ((STPositionalArgumentNode) arg).expression;\nreturn isPosibleBindingPattern(expr);\ncase NAMED_ARG:\nexpr = ((STNamedArgumentNode) arg).expression;\nreturn isPosibleBindingPattern(expr);\ncase REST_ARG:\nexpr = ((STRestArgumentNode) arg).expression;\nreturn expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE;\ndefault:\nreturn false;\n}\n}\nprivate boolean isPosibleBindingPattern(STNode node) {\nswitch (node.kind) {\ncase SIMPLE_NAME_REFERENCE:\nreturn true;\ncase LIST_CONSTRUCTOR:\nSTListConstructorExpressionNode listConstructor = (STListConstructorExpressionNode) node;\nfor (int i = 0; i < listConstructor.bucketCount(); i++) {\nSTNode expr = listConstructor.childInBucket(i);\nif (!isPosibleBindingPattern(expr)) {\nreturn false;\n}\n}\nreturn true;\ncase MAPPING_CONSTRUCTOR:\nSTMappingConstructorExpressionNode mappingConstructor = (STMappingConstructorExpressionNode) node;\nfor (int i = 0; i < mappingConstructor.bucketCount(); i++) {\nSTNode expr = mappingConstructor.childInBucket(i);\nif (!isPosibleBindingPattern(expr)) {\nreturn false;\n}\n}\nreturn true;\ncase SPECIFIC_FIELD:\nSTSpecificFieldNode specificField = (STSpecificFieldNode) node;\nif (specificField.readonlyKeyword != null) {\nreturn false;\n}\nif (specificField.valueExpr == null) {\nreturn true;\n}\nreturn isPosibleBindingPattern(specificField.valueExpr);\ncase FUNCTION_CALL:\nreturn isPosibleFunctionalBindingPattern((STFunctionCallExpressionNode) node);\ndefault:\nreturn false;\n}\n}\nprivate STNode parseStatementStartBracketedList(STNode annots, STNode openBracket, List members,\nSTNode closeBracket, boolean isRoot, boolean possibleMappingField) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase EQUAL_TOKEN:\nif (!isRoot) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\n}\nSTNode memberBindingPatterns = STNodeFactory.createNodeList(getBindingPatternsList(members));\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nSTNode listBindingPattern = STNodeFactory.createListBindingPatternNode(openBracket,\nmemberBindingPatterns, restBindingPattern, closeBracket);\nendContext();\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nreturn parseAssignmentStmtRhs(listBindingPattern);\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\nif (!isRoot) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\n}\nif (members.isEmpty()) {\nopenBracket =\nSyntaxErrors.addDiagnostic(openBracket, DiagnosticErrorCode.ERROR_MISSING_TUPLE_MEMBER);\n}\nswitchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\nstartContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nSTNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members));\nSTNode tupleTypeDesc =\nSTNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);\nendContext();\nSTNode typeDesc = parseComplexTypeDescriptor(tupleTypeDesc,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nSTNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\nendContext();\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, typedBindingPattern);\ncase OPEN_BRACKET_TOKEN:\nif (!isRoot) {\nmemberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members));\ntupleTypeDesc =\nSTNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);\nendContext();\ntypeDesc = parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\nreturn typeDesc;\n}\nSTAmbiguousCollectionNode list =\nnew STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\nendContext();\nSTNode tpbOrExpr = parseTypedBindingPatternOrExprRhs(list, true);\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, tpbOrExpr);\ncase COLON_TOKEN:\nif (possibleMappingField && members.size() == 1) {\nstartContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nSTNode colon = parseColon();\nSTNode fieldNameExpr = getExpression(members.get(0));\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createComputedNameFieldNode(openBracket, fieldNameExpr, closeBracket, colon,\nvalueExpr);\n}\ndefault:\nendContext();\nif (!isRoot) {\nreturn new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\n}\nlist = new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\nSTNode exprOrTPB = parseTypedBindingPatternOrExprRhs(list, false);\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, exprOrTPB);\n}\n}\nprivate boolean isWildcardBP(STNode node) {\nswitch (node.kind) {\ncase SIMPLE_NAME_REFERENCE:\nSTToken nameToken = (STToken) ((STSimpleNameReferenceNode) node).name;\nreturn isUnderscoreToken(nameToken);\ncase IDENTIFIER_TOKEN:\nreturn isUnderscoreToken((STToken) node);\ndefault:\nreturn false;\n}\n}\nprivate boolean isUnderscoreToken(STToken token) {\nreturn \"_\".equals(token.text());\n}\nprivate STNode getWildcardBindingPattern(STNode identifier) {\nswitch (identifier.kind) {\ncase SIMPLE_NAME_REFERENCE:\nSTNode varName = ((STSimpleNameReferenceNode) identifier).name;\nreturn STNodeFactory.createWildcardBindingPatternNode(varName);\ncase IDENTIFIER_TOKEN:\nreturn STNodeFactory.createWildcardBindingPatternNode(identifier);\ndefault:\nthrow new IllegalStateException();\n}\n}\n/*\n* This section tries to break the ambiguity in parsing a statement that starts with a open-brace.\n*/\n/**\n* Parse statements that starts with open-brace. It could be a:\n* 1) Block statement\n* 2) Var-decl with mapping binding pattern.\n* 3) Statement that starts with mapping constructor expression.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatementStartsWithOpenBrace() {\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode openBrace = parseOpenBrace();\nif (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nSTNode closeBrace = parseCloseBrace();\nswitch (peek().kind) {\ncase EQUAL_TOKEN:\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nSTNode fields = STNodeFactory.createEmptyNodeList();\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nSTNode bindingPattern = STNodeFactory.createMappingBindingPatternNode(openBrace, fields,\nrestBindingPattern, closeBrace);\nreturn parseAssignmentStmtRhs(bindingPattern);\ncase RIGHT_ARROW_TOKEN:\ncase SYNC_SEND_TOKEN:\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nfields = STNodeFactory.createEmptyNodeList();\nSTNode expr = STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace);\nexpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);\nreturn parseStatementStartWithExprRhs(expr);\ndefault:\nSTNode statements = STNodeFactory.createEmptyNodeList();\nendContext();\nreturn STNodeFactory.createBlockStatementNode(openBrace, statements, closeBrace);\n}\n}\nSTNode member = parseStatementStartingBracedListFirstMember();\nSyntaxKind nodeType = getBracedListType(member);\nSTNode stmt;\nswitch (nodeType) {\ncase MAPPING_BINDING_PATTERN:\nreturn parseStmtAsMappingBindingPatternStart(openBrace, member);\ncase MAPPING_CONSTRUCTOR:\nreturn parseStmtAsMappingConstructorStart(openBrace, member);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn parseStmtAsMappingBPOrMappingConsStart(openBrace, member);\ncase BLOCK_STATEMENT:\nSTNode closeBrace = parseCloseBrace();\nstmt = STNodeFactory.createBlockStatementNode(openBrace, member, closeBrace);\nendContext();\nreturn stmt;\ndefault:\nArrayList stmts = new ArrayList<>();\nstmts.add(member);\nSTNode statements = parseStatements(stmts);\ncloseBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createBlockStatementNode(openBrace, statements, closeBrace);\n}\n}\n/**\n* Parse the rest of the statement, treating the start as a mapping binding pattern.\n*\n* @param openBrace Open brace\n* @param firstMappingField First member\n* @return Parsed node\n*/\nprivate STNode parseStmtAsMappingBindingPatternStart(STNode openBrace, STNode firstMappingField) {\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nstartContext(ParserRuleContext.MAPPING_BINDING_PATTERN);\nList bindingPatterns = new ArrayList<>();\nif (firstMappingField.kind != SyntaxKind.REST_BINDING_PATTERN) {\nbindingPatterns.add(getBindingPattern(firstMappingField));\n}\nSTNode mappingBP = parseMappingBindingPattern(openBrace, bindingPatterns, firstMappingField);\nreturn parseAssignmentStmtRhs(mappingBP);\n}\n/**\n* Parse the rest of the statement, treating the start as a mapping constructor expression.\n*\n* @param openBrace Open brace\n* @param firstMember First member\n* @return Parsed node\n*/\nprivate STNode parseStmtAsMappingConstructorStart(STNode openBrace, STNode firstMember) {\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nstartContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nList members = new ArrayList<>();\nSTNode mappingCons = parseAsMappingConstructor(openBrace, members, firstMember);\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, mappingCons, false, true);\nreturn parseStatementStartWithExprRhs(expr);\n}\n/**\n* Parse the braced-list as a mapping constructor expression.\n*\n* @param openBrace Open brace\n* @param members members list\n* @param member Most recently parsed member\n* @return Parsed node\n*/\nprivate STNode parseAsMappingConstructor(STNode openBrace, List members, STNode member) {\nmembers.add(member);\nmembers = getExpressionList(members);\nswitchContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nSTNode fields = parseMappingConstructorFields(members);\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace);\n}\n/**\n* Parse the rest of the statement, treating the start as a mapping binding pattern\n* or a mapping constructor expression.\n*\n* @param openBrace Open brace\n* @param member First member\n* @return Parsed node\n*/\nprivate STNode parseStmtAsMappingBPOrMappingConsStart(STNode openBrace, STNode member) {\nstartContext(ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR);\nList members = new ArrayList<>();\nmembers.add(member);\nSTNode bpOrConstructor;\nSTNode memberEnd = parseMappingFieldEnd();\nif (memberEnd == null) {\nSTNode closeBrace = parseCloseBrace();\nbpOrConstructor = parseMappingBindingPatternOrMappingConstructor(openBrace, members, closeBrace);\n} else {\nmembers.add(memberEnd);\nbpOrConstructor = parseMappingBindingPatternOrMappingConstructor(openBrace, members);;\n}\nswitch (bpOrConstructor.kind) {\ncase MAPPING_CONSTRUCTOR:\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, bpOrConstructor, false, true);\nreturn parseStatementStartWithExprRhs(expr);\ncase MAPPING_BINDING_PATTERN:\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nSTNode bindingPattern = getBindingPattern(bpOrConstructor);\nreturn parseAssignmentStmtRhs(bindingPattern);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ndefault:\nif (peek().kind == SyntaxKind.EQUAL_TOKEN) {\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nbindingPattern = getBindingPattern(bpOrConstructor);\nreturn parseAssignmentStmtRhs(bindingPattern);\n}\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nexpr = getExpression(bpOrConstructor);\nexpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);\nreturn parseStatementStartWithExprRhs(expr);\n}\n}\n/**\n* Parse a member of a braced-list that occurs at the start of a statement.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatementStartingBracedListFirstMember() {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase READONLY_KEYWORD:\nSTNode readonlyKeyword = parseReadonlyKeyword();\nreturn bracedListMemberStartsWithReadonly(readonlyKeyword);\ncase IDENTIFIER_TOKEN:\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nreturn parseIdentifierRhsInStmtStartingBrace(readonlyKeyword);\ncase STRING_LITERAL:\nSTNode key = parseStringLiteral();\nif (peek().kind == SyntaxKind.COLON_TOKEN) {\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\n}\nswitchContext(ParserRuleContext.BLOCK_STMT);\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, key, false, true);\nreturn parseStatementStartWithExprRhs(expr);\ncase OPEN_BRACKET_TOKEN:\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseStatementStartsWithOpenBracket(annots, true);\ncase OPEN_BRACE_TOKEN:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nreturn parseStatementStartsWithOpenBrace();\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ndefault:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nreturn parseStatements();\n}\n}\nprivate STNode bracedListMemberStartsWithReadonly(STNode readonlyKeyword) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase IDENTIFIER_TOKEN:\nreturn parseIdentifierRhsInStmtStartingBrace(readonlyKeyword);\ncase STRING_LITERAL:\nif (peek(2).kind == SyntaxKind.COLON_TOKEN) {\nSTNode key = parseStringLiteral();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\n}\ndefault:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nstartContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\nSTNode typeDesc = parseComplexTypeDescriptor(readonlyKeyword,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nendContext();\nSTNode metadata = STNodeFactory.createEmptyNode();\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nSTNode typedBP = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(metadata, finalKeyword, typedBP, false);\n}\n}\n/**\n* Parse the rhs components of an identifier that follows an open brace,\n* at the start of a statement. i.e: \"{foo\".\n*\n* @param readonlyKeyword Readonly keyword\n* @return Parsed node\n*/\nprivate STNode parseIdentifierRhsInStmtStartingBrace(STNode readonlyKeyword) {\nSTNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_REF);\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nSTNode colon = STNodeFactory.createEmptyNode();\nSTNode value = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, value);\ncase COLON_TOKEN:\ncolon = parseColon();\nif (!isEmpty(readonlyKeyword)) {\nvalue = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, value);\n}\nSyntaxKind nextTokenKind = peek().kind;\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nSTNode bindingPatternOrExpr = parseListBindingPatternOrListConstructor();\nreturn getMappingField(identifier, colon, bindingPatternOrExpr);\ncase OPEN_BRACE_TOKEN:\nbindingPatternOrExpr = parseMappingBindingPatterOrMappingConstructor();\nreturn getMappingField(identifier, colon, bindingPatternOrExpr);\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifierRhsInStmtStartBrace(identifier, colon);\ndefault:\nSTNode expr = parseExpression();\nreturn getMappingField(identifier, colon, expr);\n}\ndefault:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nif (!isEmpty(readonlyKeyword)) {\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode bindingPattern = STNodeFactory.createCaptureBindingPatternNode(identifier);\nSTNode typedBindingPattern =\nSTNodeFactory.createTypedBindingPatternNode(readonlyKeyword, bindingPattern);\nSTNode metadata = STNodeFactory.createEmptyNode();\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVarDeclRhs(metadata, finalKeyword, typedBindingPattern, false);\n}\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode qualifiedIdentifier = parseQualifiedIdentifier(identifier, false);\nSTNode expr = parseTypedBindingPatternOrExprRhs(qualifiedIdentifier, true);\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, expr);\n}\n}\n/**\n* Parse the rhs components of \"{ identifier : identifier\",\n* at the start of a statement. i.e: \"{foo:bar\".\n*\n* @return Parsed node\n*/\nprivate STNode parseQualifiedIdentifierRhsInStmtStartBrace(STNode identifier, STNode colon) {\nSTNode secondIdentifier = parseIdentifier(ParserRuleContext.VARIABLE_REF);\nSTNode secondNameRef = STNodeFactory.createSimpleNameReferenceNode(secondIdentifier);\nif (isWildcardBP(secondIdentifier)) {\nreturn getWildcardBindingPattern(secondIdentifier);\n}\nSyntaxKind nextTokenKind = peek().kind;\nSTNode qualifiedNameRef = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondNameRef);\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn qualifiedNameRef;\ncase OPEN_BRACE_TOKEN:\ncase IDENTIFIER_TOKEN:\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nSTNode typeBindingPattern =\nparseTypedBindingPatternTypeRhs(qualifiedNameRef, ParserRuleContext.VAR_DECL_STMT);\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, false);\ncase OPEN_BRACKET_TOKEN:\nreturn parseMemberRhsInStmtStartWithBrace(identifier, colon, secondNameRef);\ncase QUESTION_MARK_TOKEN:\nSTNode typeDesc = parseComplexTypeDescriptor(qualifiedNameRef,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nfinalKeyword = STNodeFactory.createEmptyNode();\ntypeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\nannots = STNodeFactory.createEmptyNodeList();\nreturn parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, false);\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn parseStatementStartWithExprRhs(qualifiedNameRef);\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\ndefault:\nreturn parseMemberWithExprInRhs(identifier, colon, secondNameRef, secondNameRef);\n}\n}\nprivate SyntaxKind getBracedListType(STNode member) {\nswitch (member.kind) {\ncase FIELD_BINDING_PATTERN:\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.MAPPING_BINDING_PATTERN;\ncase SPECIFIC_FIELD:\nSTNode expr = ((STSpecificFieldNode) member).valueExpr;\nif (expr == null) {\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\n}\nswitch (expr.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\ncase FUNCTION_CALL:\nif (isPosibleFunctionalBindingPattern((STFunctionCallExpressionNode) expr)) {\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\n}\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\ndefault:\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\n}\ncase SPREAD_FIELD:\ncase COMPUTED_NAME_FIELD:\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ncase REST_BINDING_PATTERN:\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\ncase LIST:\nreturn SyntaxKind.BLOCK_STATEMENT;\ndefault:\nreturn SyntaxKind.NONE;\n}\n}\n/**\n* Parse mapping binding pattern or mapping constructor.\n*\n* @return Parsed node\n*/\nprivate STNode parseMappingBindingPatterOrMappingConstructor() {\nstartContext(ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR);\nSTNode openBrace = parseOpenBrace();\nList memberList = new ArrayList<>();\nreturn parseMappingBindingPatternOrMappingConstructor(openBrace, memberList);\n}\nprivate boolean isBracedListEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseMappingBindingPatternOrMappingConstructor(STNode openBrace, List memberList) {\nSTToken nextToken = peek();\nwhile (!isBracedListEnd(nextToken.kind)) {\nSTNode member = parseMappingBindingPatterOrMappingConstructorMember(nextToken.kind);\nSyntaxKind currentNodeType = getTypeOfMappingBPOrMappingCons(member);\nswitch (currentNodeType) {\ncase MAPPING_CONSTRUCTOR:\nreturn parseAsMappingConstructor(openBrace, memberList, member);\ncase MAPPING_BINDING_PATTERN:\nreturn parseAsMappingBindingPattern(openBrace, memberList, member);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseMappingFieldEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\nnextToken = peek();\n}\nSTNode closeBrace = parseCloseBrace();\nreturn parseMappingBindingPatternOrMappingConstructor(openBrace, memberList, closeBrace);\n}\nprivate STNode parseMappingBindingPatterOrMappingConstructorMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nSTNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME);\nreturn parseMappingFieldRhs(key);\ncase STRING_LITERAL:\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nkey = parseStringLiteral();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\ncase OPEN_BRACKET_TOKEN:\nreturn parseComputedField();\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\nSTNode expr = parseExpression();\nif (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nreturn STNodeFactory.createRestBindingPatternNode(ellipsis, expr);\n}\nreturn STNodeFactory.createSpreadFieldNode(ellipsis, expr);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListBindingPatternOrListConstructorMember(solution.tokenKind);\n}\n}\nprivate STNode parseMappingFieldRhs(STNode key) {\nSTToken nextToken = peek();\nreturn parseMappingFieldRhs(nextToken.kind, key);\n}\nprivate STNode parseMappingFieldRhs(SyntaxKind tokenKind, STNode key) {\nSTNode colon;\nSTNode valueExpr;\nswitch (tokenKind) {\ncase COLON_TOKEN:\ncolon = parseColon();\nreturn parseMappingFieldValue(key, colon);\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\ncolon = STNodeFactory.createEmptyNode();\nvalueExpr = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, key);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nreturn parseSpecificFieldRhs(solution.tokenKind, readonlyKeyword, key);\n}\n}\nprivate STNode parseMappingFieldValue(STNode key, STNode colon) {\nSTNode expr;\nswitch (peek().kind) {\ncase IDENTIFIER_TOKEN:\nexpr = parseExpression();\nbreak;\ncase OPEN_BRACKET_TOKEN:\nexpr = parseListBindingPatternOrListConstructor();\nbreak;\ncase OPEN_BRACE_TOKEN:\nexpr = parseMappingBindingPatterOrMappingConstructor();\nbreak;\ndefault:\nexpr = parseExpression();\nbreak;\n}\nif (isBindingPattern(expr.kind)) {\nreturn STNodeFactory.createFieldBindingPatternFullNode(key, colon, expr);\n}\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, expr);\n}\nprivate boolean isBindingPattern(SyntaxKind kind) {\nswitch (kind) {\ncase FIELD_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate SyntaxKind getTypeOfMappingBPOrMappingCons(STNode memberNode) {\nswitch (memberNode.kind) {\ncase FIELD_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.MAPPING_BINDING_PATTERN;\ncase SPECIFIC_FIELD:\nSTNode expr = ((STSpecificFieldNode) memberNode).valueExpr;\nif (expr == null || expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE ||\nexpr.kind == SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR ||\nexpr.kind == SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR) {\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\n}\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\ncase SPREAD_FIELD:\ncase COMPUTED_NAME_FIELD:\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase REST_BINDING_PATTERN:\ndefault:\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\n}\n}\nprivate STNode parseMappingBindingPatternOrMappingConstructor(STNode openBrace, List members,\nSTNode closeBrace) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR, openBrace, members,\ncloseBrace);\n}\nprivate STNode parseAsMappingBindingPattern(STNode openBrace, List members, STNode member) {\nmembers.add(member);\nmembers = getBindingPatternsList(members);\nswitchContext(ParserRuleContext.MAPPING_BINDING_PATTERN);\nreturn parseMappingBindingPattern(openBrace, members, member);\n}\n/**\n* Parse list binding pattern or list constructor.\n*\n* @return Parsed node\n*/\nprivate STNode parseListBindingPatternOrListConstructor() {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList memberList = new ArrayList<>();\nreturn parseListBindingPatternOrListConstructor(openBracket, memberList, false);\n}\nprivate STNode parseListBindingPatternOrListConstructor(STNode openBracket, List memberList,\nboolean isRoot) {\nSTToken nextToken = peek();\nwhile (!isBracketedListEnd(nextToken.kind)) {\nSTNode member = parseListBindingPatternOrListConstructorMember(nextToken.kind);\nSyntaxKind currentNodeType = getParsingNodeTypeOfListBPOrListCons(member);\nswitch (currentNodeType) {\ncase LIST_CONSTRUCTOR:\nreturn parseAsListConstructor(openBracket, memberList, member, isRoot);\ncase LIST_BINDING_PATTERN:\nreturn parseAsListBindingPattern(openBracket, memberList, member, isRoot);\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\nnextToken = peek();\n}\nSTNode closeBracket = parseCloseBracket();\nreturn parseListBindingPatternOrListConstructor(openBracket, memberList, closeBracket, isRoot);\n}\nprivate STNode parseListBindingPatternOrListConstructorMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseListBindingPatternOrListConstructor();\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nif (isWildcardBP(identifier)) {\nreturn getWildcardBindingPattern(identifier);\n}\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false);\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingBindingPatterOrMappingConstructor();\ncase ELLIPSIS_TOKEN:\nreturn parseListBindingPatternMember();\ndefault:\nif (isValidExpressionStart(nextTokenKind, 1)) {\nreturn parseExpression();\n}\nSolution solution = recover(peek(), ParserRuleContext.LIST_BP_OR_LIST_CONSTRUCTOR_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListBindingPatternOrListConstructorMember(solution.tokenKind);\n}\n}\nprivate SyntaxKind getParsingNodeTypeOfListBPOrListCons(STNode memberNode) {\nswitch (memberNode.kind) {\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.LIST_BINDING_PATTERN;\ncase SIMPLE_NAME_REFERENCE:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR;\ndefault:\nreturn SyntaxKind.LIST_CONSTRUCTOR;\n}\n}\nprivate STNode parseAsListConstructor(STNode openBracket, List memberList, STNode member, boolean isRoot) {\nmemberList.add(member);\nmemberList = getExpressionList(memberList);\nswitchContext(ParserRuleContext.LIST_CONSTRUCTOR);\nSTNode expressions = parseOptionalExpressionsList(memberList);\nSTNode closeBracket = parseCloseBracket();\nSTNode listConstructor =\nSTNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket);\nendContext();\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, listConstructor, false, false);\nif (!isRoot) {\nreturn expr;\n}\nreturn parseStatementStartWithExprRhs(expr);\n}\nprivate STNode parseListBindingPatternOrListConstructor(STNode openBracket, List members,\nSTNode closeBracket, boolean isRoot) {\nSTNode lbpOrListCons;\nswitch (peek().kind) {\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nif (!isRoot) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR, openBracket, members,\ncloseBracket);\n}\ndefault:\nif (isValidExprRhsStart(peek().kind, closeBracket.kind)) {\nmembers = getExpressionList(members);\nSTNode memberExpressions = STNodeFactory.createNodeList(members);\nlbpOrListCons = STNodeFactory.createListConstructorExpressionNode(openBracket, memberExpressions,\ncloseBracket);\nbreak;\n}\nmembers = getBindingPatternsList(members);\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(members);\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nlbpOrListCons = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode,\nrestBindingPattern, closeBracket);\nbreak;\n}\nendContext();\nif (!isRoot) {\nreturn lbpOrListCons;\n}\nreturn parseStmtStartsWithTypedBPOrExprRhs(null, lbpOrListCons);\n}\nprivate STNode parseMemberRhsInStmtStartWithBrace(STNode identifier, STNode colon, STNode secondIdentifier) {\nSTNode typedBPOrExpr =\nparseTypedBindingPatternOrMemberAccess(secondIdentifier, false, true, ParserRuleContext.AMBIGUOUS_STMT);\nif (isExpression(typedBPOrExpr.kind)) {\nreturn parseMemberWithExprInRhs(identifier, colon, secondIdentifier, typedBPOrExpr);\n}\nswitchContext(ParserRuleContext.BLOCK_STMT);\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nSTNode annots = STNodeFactory.createEmptyNode();\nSTNode qualifiedNameRef = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondIdentifier);\nSTNode typeDesc = mergeQualifiedNameWithTypeDesc(qualifiedNameRef,\n((STTypedBindingPatternNode) typedBPOrExpr).typeDescriptor);\nreturn parseVarDeclRhs(annots, finalKeyword, typeDesc, false);\n}\n/**\n* Parse a member that starts with \"foo:bar[\", in a statement starting with a brace.\n*\n* @param identifier First identifier of the statement\n* @param colon Colon that follows the first identifier\n* @param secondIdentifier Identifier that follows the colon\n* @param memberAccessExpr Member access expression\n* @return Parsed node\n*/\nprivate STNode parseMemberWithExprInRhs(STNode identifier, STNode colon, STNode secondIdentifier,\nSTNode memberAccessExpr) {\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, true);\nswitch (peek().kind) {\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nstartContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, expr);\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\ndefault:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nstartContext(ParserRuleContext.EXPRESSION_STATEMENT);\nSTNode qualifiedName =\nSTNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondIdentifier);\nSTNode updatedExpr = mergeQualifiedNameWithExpr(qualifiedName, expr);\nreturn parseStatementStartWithExprRhs(updatedExpr);\n}\n}\n/**\n* Replace the first identifier of an expression, with a given qualified-identifier.\n* Only expressions that can start with \"bar[..]\" can reach here.\n*\n* @param qualifiedName Qualified identifier to replace simple identifier\n* @param exprOrAction Expression or action\n* @return Updated expression\n*/\nprivate STNode mergeQualifiedNameWithExpr(STNode qualifiedName, STNode exprOrAction) {\nswitch (exprOrAction.kind) {\ncase SIMPLE_NAME_REFERENCE:\nreturn qualifiedName;\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) exprOrAction;\nSTNode newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, binaryExpr.lhsExpr);\nreturn STNodeFactory.createBinaryExpressionNode(binaryExpr.kind, newLhsExpr, binaryExpr.operator,\nbinaryExpr.rhsExpr);\ncase FIELD_ACCESS:\nSTFieldAccessExpressionNode fieldAccess = (STFieldAccessExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, fieldAccess.expression);\nreturn STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, fieldAccess.dotToken,\nfieldAccess.fieldName);\ncase INDEXED_EXPRESSION:\nSTIndexedExpressionNode memberAccess = (STIndexedExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, memberAccess.containerExpression);\nreturn STNodeFactory.createIndexedExpressionNode(newLhsExpr, memberAccess.openBracket,\nmemberAccess.keyExpression, memberAccess.closeBracket);\ncase TYPE_TEST_EXPRESSION:\nSTTypeTestExpressionNode typeTest = (STTypeTestExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, typeTest.expression);\nreturn STNodeFactory.createTypeTestExpressionNode(newLhsExpr, typeTest.isKeyword,\ntypeTest.typeDescriptor);\ncase ANNOT_ACCESS:\nSTAnnotAccessExpressionNode annotAccess = (STAnnotAccessExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, annotAccess.expression);\nreturn STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, annotAccess.annotChainingToken,\nannotAccess.annotTagReference);\ncase OPTIONAL_FIELD_ACCESS:\nSTOptionalFieldAccessExpressionNode optionalFieldAccess =\n(STOptionalFieldAccessExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, optionalFieldAccess.expression);\nreturn STNodeFactory.createFieldAccessExpressionNode(newLhsExpr,\noptionalFieldAccess.optionalChainingToken, optionalFieldAccess.fieldName);\ncase CONDITIONAL_EXPRESSION:\nSTConditionalExpressionNode conditionalExpr = (STConditionalExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, conditionalExpr.lhsExpression);\nreturn STNodeFactory.createConditionalExpressionNode(newLhsExpr, conditionalExpr.questionMarkToken,\nconditionalExpr.middleExpression, conditionalExpr.colonToken, conditionalExpr.endExpression);\ncase REMOTE_METHOD_CALL_ACTION:\nSTRemoteMethodCallActionNode remoteCall = (STRemoteMethodCallActionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, remoteCall.expression);\nreturn STNodeFactory.createRemoteMethodCallActionNode(newLhsExpr, remoteCall.rightArrowToken,\nremoteCall.methodName, remoteCall.openParenToken, remoteCall.arguments,\nremoteCall.closeParenToken);\ncase ASYNC_SEND_ACTION:\nSTAsyncSendActionNode asyncSend = (STAsyncSendActionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, asyncSend.expression);\nreturn STNodeFactory.createAsyncSendActionNode(newLhsExpr, asyncSend.rightArrowToken,\nasyncSend.peerWorker);\ncase SYNC_SEND_ACTION:\nSTSyncSendActionNode syncSend = (STSyncSendActionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, syncSend.expression);\nreturn STNodeFactory.createAsyncSendActionNode(newLhsExpr, syncSend.syncSendToken, syncSend.peerWorker);\ndefault:\nreturn exprOrAction;\n}\n}\nprivate STNode mergeQualifiedNameWithTypeDesc(STNode qualifiedName, STNode typeDesc) {\nswitch (typeDesc.kind) {\ncase SIMPLE_NAME_REFERENCE:\nreturn qualifiedName;\ncase ARRAY_TYPE_DESC:\nSTArrayTypeDescriptorNode arrayTypeDesc = (STArrayTypeDescriptorNode) typeDesc;\nSTNode newMemberType = mergeQualifiedNameWithTypeDesc(qualifiedName, arrayTypeDesc.memberTypeDesc);\nreturn STNodeFactory.createArrayTypeDescriptorNode(newMemberType, arrayTypeDesc.openBracket,\narrayTypeDesc.arrayLength, arrayTypeDesc.closeBracket);\ncase UNION_TYPE_DESC:\nSTUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) typeDesc;\nSTNode newlhsType = mergeQualifiedNameWithTypeDesc(qualifiedName, unionTypeDesc.leftTypeDesc);\nreturn STNodeFactory.createUnionTypeDescriptorNode(newlhsType, unionTypeDesc.pipeToken,\nunionTypeDesc.rightTypeDesc);\ncase INTERSECTION_TYPE_DESC:\nSTIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) typeDesc;\nnewlhsType = mergeQualifiedNameWithTypeDesc(qualifiedName, intersectionTypeDesc.leftTypeDesc);\nreturn STNodeFactory.createUnionTypeDescriptorNode(newlhsType, intersectionTypeDesc.bitwiseAndToken,\nintersectionTypeDesc.rightTypeDesc);\ncase OPTIONAL_TYPE_DESC:\nSTOptionalTypeDescriptorNode optionalType = (STOptionalTypeDescriptorNode) typeDesc;\nnewMemberType = mergeQualifiedNameWithTypeDesc(qualifiedName, optionalType.typeDescriptor);\nreturn STNodeFactory.createOptionalTypeDescriptorNode(newMemberType, optionalType.questionMarkToken);\ndefault:\nreturn typeDesc;\n}\n}\nprivate List getTypeDescList(List ambiguousList) {\nList typeDescList = new ArrayList<>();\nfor (STNode item : ambiguousList) {\ntypeDescList.add(getTypeDescFromExpr(item));\n}\nreturn typeDescList;\n}\n/**\n* Create a type-desc out of an expression.\n*\n* @param expression Expression\n* @return Type descriptor\n*/\nprivate STNode getTypeDescFromExpr(STNode expression) {\nswitch (expression.kind) {\ncase INDEXED_EXPRESSION:\nreturn parseArrayTypeDescriptorNode((STIndexedExpressionNode) expression);\ncase BASIC_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn STNodeFactory.createSingletonTypeDescriptorNode(expression);\ncase TYPE_REFERENCE_TYPE_DESC:\nreturn ((STTypeReferenceTypeDescNode) expression).typeRef;\ncase BRACED_EXPRESSION:\nSTBracedExpressionNode bracedExpr = (STBracedExpressionNode) expression;\nSTNode typeDesc = getTypeDescFromExpr(bracedExpr.expression);\nreturn STNodeFactory.createParenthesisedTypeDescriptorNode(bracedExpr.openParen, typeDesc,\nbracedExpr.closeParen);\ncase NIL_LITERAL:\nSTNilLiteralNode nilLiteral = (STNilLiteralNode) expression;\nreturn STNodeFactory.createNilTypeDescriptorNode(nilLiteral.openParenToken, nilLiteral.closeParenToken);\ncase BRACKETED_LIST:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nSTAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) expression;\nSTNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(innerList.members));\nreturn STNodeFactory.createTupleTypeDescriptorNode(innerList.collectionStartToken, memberTypeDescs,\ninnerList.collectionEndToken);\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) expression;\nswitch (binaryExpr.operator.kind) {\ncase PIPE_TOKEN:\nSTNode lhsTypeDesc = getTypeDescFromExpr(binaryExpr.lhsExpr);\nSTNode rhsTypeDesc = getTypeDescFromExpr(binaryExpr.rhsExpr);\nreturn STNodeFactory.createUnionTypeDescriptorNode(lhsTypeDesc, binaryExpr.operator,\nrhsTypeDesc);\ncase BITWISE_AND_TOKEN:\nlhsTypeDesc = getTypeDescFromExpr(binaryExpr.lhsExpr);\nrhsTypeDesc = getTypeDescFromExpr(binaryExpr.rhsExpr);\nreturn STNodeFactory.createIntersectionTypeDescriptorNode(lhsTypeDesc, binaryExpr.operator,\nrhsTypeDesc);\ndefault:\nbreak;\n}\nreturn expression;\ncase UNARY_EXPRESSION:\nreturn STNodeFactory.createSingletonTypeDescriptorNode(expression);\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ndefault:\nreturn expression;\n}\n}\nprivate List getBindingPatternsList(List ambibuousList) {\nList bindingPatterns = new ArrayList();\nfor (STNode item : ambibuousList) {\nbindingPatterns.add(getBindingPattern(item));\n}\nreturn bindingPatterns;\n}\nprivate STNode getBindingPattern(STNode ambiguousNode) {\nif (isEmpty(ambiguousNode)) {\nreturn ambiguousNode;\n}\nswitch (ambiguousNode.kind) {\ncase SIMPLE_NAME_REFERENCE:\nSTNode varName = ((STSimpleNameReferenceNode) ambiguousNode).name;\nreturn createCaptureOrWildcardBP(varName);\ncase QUALIFIED_NAME_REFERENCE:\nSTQualifiedNameReferenceNode qualifiedName = (STQualifiedNameReferenceNode) ambiguousNode;\nSTNode fieldName = STNodeFactory.createSimpleNameReferenceNode(qualifiedName.modulePrefix);\nreturn STNodeFactory.createFieldBindingPatternFullNode(fieldName, qualifiedName.colon,\ngetBindingPattern(qualifiedName.identifier));\ncase BRACKETED_LIST:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nSTAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) ambiguousNode;\nSTNode memberBindingPatterns = STNodeFactory.createNodeList(getBindingPatternsList(innerList.members));\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createListBindingPatternNode(innerList.collectionStartToken, memberBindingPatterns,\nrestBindingPattern, innerList.collectionEndToken);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ninnerList = (STAmbiguousCollectionNode) ambiguousNode;\nList bindingPatterns = new ArrayList<>();\nrestBindingPattern = STNodeFactory.createEmptyNode();\nfor (int i = 0; i < innerList.members.size(); i++) {\nSTNode bp = getBindingPattern(innerList.members.get(i));\nif (bp.kind == SyntaxKind.REST_BINDING_PATTERN) {\nrestBindingPattern = bp;\nbreak;\n}\nbindingPatterns.add(bp);\n}\nmemberBindingPatterns = STNodeFactory.createNodeList(bindingPatterns);\nreturn STNodeFactory.createMappingBindingPatternNode(innerList.collectionStartToken,\nmemberBindingPatterns, restBindingPattern, innerList.collectionEndToken);\ncase SPECIFIC_FIELD:\nSTSpecificFieldNode field = (STSpecificFieldNode) ambiguousNode;\nfieldName = STNodeFactory.createSimpleNameReferenceNode(field.fieldName);\nif (field.valueExpr == null) {\nreturn STNodeFactory.createFieldBindingPatternVarnameNode(fieldName);\n}\nreturn STNodeFactory.createFieldBindingPatternFullNode(fieldName, field.colon,\ngetBindingPattern(field.valueExpr));\ncase FUNCTION_CALL:\nSTFunctionCallExpressionNode funcCall = (STFunctionCallExpressionNode) ambiguousNode;\nSTNode args = funcCall.arguments;\nint size = args.bucketCount();\nbindingPatterns = new ArrayList<>();\nfor (int i = 0; i < size; i++) {\nSTNode arg = args.childInBucket(i);\nbindingPatterns.add(getBindingPattern(arg));\n}\nSTNode argListBindingPatterns = STNodeFactory.createNodeList(bindingPatterns);\nreturn STNodeFactory.createFunctionalBindingPatternNode(funcCall.functionName, funcCall.openParenToken,\nargListBindingPatterns, funcCall.closeParenToken);\ncase POSITIONAL_ARG:\nSTPositionalArgumentNode positionalArg = (STPositionalArgumentNode) ambiguousNode;\nreturn getBindingPattern(positionalArg.expression);\ncase NAMED_ARG:\nSTNamedArgumentNode namedArg = (STNamedArgumentNode) ambiguousNode;\nreturn STNodeFactory.createNamedArgBindingPatternNode(namedArg.argumentName, namedArg.equalsToken,\ngetBindingPattern(namedArg.expression));\ncase REST_ARG:\nSTRestArgumentNode restArg = (STRestArgumentNode) ambiguousNode;\nreturn STNodeFactory.createRestBindingPatternNode(restArg.ellipsis, restArg.expression);\ndefault:\nreturn ambiguousNode;\n}\n}\nprivate List getExpressionList(List ambibuousList) {\nList exprList = new ArrayList();\nfor (STNode item : ambibuousList) {\nexprList.add(getExpression(item));\n}\nreturn exprList;\n}\nprivate STNode getExpression(STNode ambiguousNode) {\nif (isEmpty(ambiguousNode)) {\nreturn ambiguousNode;\n}\nswitch (ambiguousNode.kind) {\ncase BRACKETED_LIST:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase TUPLE_TYPE_DESC_OR_LIST_CONST:\nSTAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) ambiguousNode;\nSTNode memberExprs = STNodeFactory.createNodeList(getExpressionList(innerList.members));\nreturn STNodeFactory.createListConstructorExpressionNode(innerList.collectionStartToken, memberExprs,\ninnerList.collectionEndToken);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ninnerList = (STAmbiguousCollectionNode) ambiguousNode;\nList fieldList = new ArrayList<>();\nfor (int i = 0; i < innerList.members.size(); i++) {\nSTNode field = innerList.members.get(i);\nSTNode fieldNode;\nif (field.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTQualifiedNameReferenceNode qualifiedNameRefNode = (STQualifiedNameReferenceNode) field;\nSTNode readOnlyKeyword = STNodeFactory.createEmptyNode();\nSTNode fieldName = qualifiedNameRefNode.modulePrefix;\nSTNode colon = qualifiedNameRefNode.colon;\nSTNode valueExpr = getExpression(qualifiedNameRefNode.identifier);\nfieldNode = STNodeFactory.createSpecificFieldNode(readOnlyKeyword, fieldName, colon,\nvalueExpr);\n} else {\nfieldNode = getExpression(field);\n}\nfieldList.add(fieldNode);\n}\nSTNode fields = STNodeFactory.createNodeList(fieldList);\nreturn STNodeFactory.createMappingConstructorExpressionNode(innerList.collectionStartToken,\nfields, innerList.collectionEndToken);\ncase REST_BINDING_PATTERN:\nSTRestBindingPatternNode restBindingPattern = (STRestBindingPatternNode) ambiguousNode;\nreturn STNodeFactory.createSpreadFieldNode(restBindingPattern.ellipsisToken,\nrestBindingPattern.variableName);\ncase SPECIFIC_FIELD:\nSTSpecificFieldNode field = (STSpecificFieldNode) ambiguousNode;\nreturn STNodeFactory.createSpecificFieldNode(field.readonlyKeyword, field.fieldName, field.colon,\ngetExpression(field.valueExpr));\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ndefault:\nreturn ambiguousNode;\n}\n}\nprivate STNode getMappingField(STNode identifier, STNode colon, STNode bindingPatternOrExpr) {\nSTNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier);\nswitch (bindingPatternOrExpr.kind) {\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\nreturn STNodeFactory.createFieldBindingPatternFullNode(simpleNameRef, colon, bindingPatternOrExpr);\ncase LIST_CONSTRUCTOR:\ncase MAPPING_CONSTRUCTOR:\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, simpleNameRef, colon, identifier);\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ndefault:\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, bindingPatternOrExpr);\n}\n}\n}" + }, + { + "comment": "@stuartwdouglas I've updated this mapper to check for the 1st non-null cause, but for now I'd still rather prefer it be a generic mapper. Does it work for you ?", + "method_body": "public Response toResponse(CompositeException ex) {\nThrowable t = ex.getCauses().get(0);\nif (t == null) {\nthrow new InternalServerErrorException();\n}\nExceptionMapper mapper = (ExceptionMapper) p.getExceptionMapper(t.getClass());\nif (mapper == null) {\nthrow new InternalServerErrorException();\n}\nreturn mapper.toResponse(t);\n}", + "target_code": "Throwable t = ex.getCauses().get(0);", + "method_body_after": "public Response toResponse(CompositeException ex) {\nThrowable t = ex.getCauses().stream().filter(s -> s != null).findFirst()\n.orElseThrow(() -> new InternalServerErrorException());\nExceptionMapper mapper = (ExceptionMapper) p.getExceptionMapper(t.getClass());\nif (mapper == null) {\nthrow new InternalServerErrorException();\n}\nreturn mapper.toResponse(t);\n}", + "context_before": "class CompositeExceptionMapper implements ExceptionMapper {\n@Context\nProviders p;\n@SuppressWarnings(\"unchecked\")\n@Override\n}", + "context_after": "class CompositeExceptionMapper implements ExceptionMapper {\n@Context\nProviders p;\n@SuppressWarnings(\"unchecked\")\n@Override\n}" + }, + { + "comment": "`getInt64` returns a `Long`, so I believe that will allow you to remove the `castIfNecessary`?", + "method_body": "private static Expression value(Expression value, Schema.FieldType type) {\nif (type.getTypeName().isLogicalType()) {\nString logicalId = type.getLogicalType().getIdentifier();\nif (SqlTypes.TIME.getIdentifier().equals(logicalId)) {\nreturn nullOr(\nvalue, Expressions.divide(value, Expressions.constant(NANOS_PER_MILLISECOND)));\n} else if (SqlTypes.DATE.getIdentifier().equals(logicalId)) {\nreturn value;\n} else if (SqlTypes.DATETIME.getIdentifier().equals(logicalId)) {\nExpression dateValue =\nExpressions.call(value, \"getValue\", Expressions.constant(DateTime.DATE_FIELD_NAME));\nExpression timeValue =\nExpressions.call(value, \"getValue\", Expressions.constant(DateTime.TIME_FIELD_NAME));\nExpression returnValue =\nExpressions.add(\nExpressions.multiply(\nTypes.castIfNecessary(long.class, dateValue),\nExpressions.constant(MILLIS_PER_DAY)),\nExpressions.divide(\nTypes.castIfNecessary(long.class, timeValue),\nExpressions.constant(NANOS_PER_MILLISECOND)));\nreturn returnValue;\n} else if (!CharType.IDENTIFIER.equals(logicalId)) {\nthrow new UnsupportedOperationException(\n\"Unknown LogicalType \" + type.getLogicalType().getIdentifier());\n}\n} else if (type.getTypeName().isMapType()) {\nreturn nullOr(value, map(value, type.getMapValueType()));\n} else if (CalciteUtils.isDateTimeType(type)) {\nreturn nullOr(value, Expressions.call(value, \"getMillis\"));\n} else if (type.getTypeName().isCompositeType()) {\nreturn nullOr(value, row(value, type.getRowSchema()));\n} else if (type.getTypeName().isCollectionType()) {\nreturn nullOr(value, list(value, type.getCollectionElementType()));\n} else if (type.getTypeName() == TypeName.BYTES) {\nreturn nullOr(\nvalue, Expressions.new_(ByteString.class, Types.castIfNecessary(byte[].class, value)));\n}\nreturn value;\n}", + "target_code": "Types.castIfNecessary(long.class, dateValue),", + "method_body_after": "private static Expression value(Expression value, Schema.FieldType type) {\nif (type.getTypeName().isLogicalType()) {\nString logicalId = type.getLogicalType().getIdentifier();\nif (SqlTypes.TIME.getIdentifier().equals(logicalId)) {\nreturn nullOr(\nvalue, Expressions.divide(value, Expressions.constant(NANOS_PER_MILLISECOND)));\n} else if (SqlTypes.DATE.getIdentifier().equals(logicalId)) {\nreturn value;\n} else if (SqlTypes.DATETIME.getIdentifier().equals(logicalId)) {\nExpression dateValue =\nExpressions.call(value, \"getInt64\", Expressions.constant(DateTime.DATE_FIELD_NAME));\nExpression timeValue =\nExpressions.call(value, \"getInt64\", Expressions.constant(DateTime.TIME_FIELD_NAME));\nExpression returnValue =\nExpressions.add(\nExpressions.multiply(dateValue, Expressions.constant(MILLIS_PER_DAY)),\nExpressions.divide(timeValue, Expressions.constant(NANOS_PER_MILLISECOND)));\nreturn nullOr(value, returnValue);\n} else if (!CharType.IDENTIFIER.equals(logicalId)) {\nthrow new UnsupportedOperationException(\n\"Unknown LogicalType \" + type.getLogicalType().getIdentifier());\n}\n} else if (type.getTypeName().isMapType()) {\nreturn nullOr(value, map(value, type.getMapValueType()));\n} else if (CalciteUtils.isDateTimeType(type)) {\nreturn nullOr(value, Expressions.call(value, \"getMillis\"));\n} else if (type.getTypeName().isCompositeType()) {\nreturn nullOr(value, row(value, type.getRowSchema()));\n} else if (type.getTypeName().isCollectionType()) {\nreturn nullOr(value, list(value, type.getCollectionElementType()));\n} else if (type.getTypeName() == TypeName.BYTES) {\nreturn nullOr(\nvalue, Expressions.new_(ByteString.class, Types.castIfNecessary(byte[].class, value)));\n}\nreturn value;\n}", + "context_before": "class InputGetterImpl implements RexToLixTranslator.InputGetter {\nprivate static final Map TYPE_CONVERSION_MAP =\nImmutableMap.builder()\n.put(TypeName.BYTE, Byte.class)\n.put(TypeName.BYTES, byte[].class)\n.put(TypeName.INT16, Short.class)\n.put(TypeName.INT32, Integer.class)\n.put(TypeName.INT64, Long.class)\n.put(TypeName.DECIMAL, BigDecimal.class)\n.put(TypeName.FLOAT, Float.class)\n.put(TypeName.DOUBLE, Double.class)\n.put(TypeName.STRING, String.class)\n.put(TypeName.DATETIME, ReadableInstant.class)\n.put(TypeName.BOOLEAN, Boolean.class)\n.put(TypeName.MAP, Map.class)\n.put(TypeName.ARRAY, Collection.class)\n.put(TypeName.ITERABLE, Iterable.class)\n.put(TypeName.ROW, Row.class)\n.build();\nprivate static final Map LOGICAL_TYPE_TO_BASE_TYPE_MAP =\nImmutableMap.builder()\n.put(SqlTypes.DATE.getIdentifier(), Long.class)\n.put(SqlTypes.TIME.getIdentifier(), Long.class)\n.put(TimeWithLocalTzType.IDENTIFIER, ReadableInstant.class)\n.put(SqlTypes.DATETIME.getIdentifier(), Row.class)\n.put(CharType.IDENTIFIER, String.class)\n.build();\nprivate final Expression input;\nprivate final Schema inputSchema;\nprivate InputGetterImpl(Expression input, Schema inputSchema) {\nthis.input = input;\nthis.inputSchema = inputSchema;\n}\n@Override\npublic Expression field(BlockBuilder list, int index, Type storageType) {\nreturn value(list, index, storageType, input, inputSchema);\n}\nprivate static Expression value(\nBlockBuilder list, int index, Type storageType, Expression input, Schema schema) {\nif (index >= schema.getFieldCount() || index < 0) {\nthrow new IllegalArgumentException(\"Unable to find value\n}\nfinal Expression expression = list.append(list.newName(\"current\"), input);\nFieldType fromType = schema.getField(index).getType();\nClass convertTo = null;\nif (storageType == Object.class) {\nconvertTo = Object.class;\n} else if (fromType.getTypeName().isLogicalType()) {\nconvertTo = LOGICAL_TYPE_TO_BASE_TYPE_MAP.get(fromType.getLogicalType().getIdentifier());\n} else {\nconvertTo = TYPE_CONVERSION_MAP.get(fromType.getTypeName());\n}\nif (convertTo == null) {\nthrow new UnsupportedOperationException(\"Unable to get \" + fromType.getTypeName());\n}\nExpression value =\nExpressions.convert_(\nExpressions.call(\nexpression,\n\"getBaseValue\",\nExpressions.constant(index),\nExpressions.constant(convertTo)),\nconvertTo);\nreturn (storageType != Object.class) ? value(value, fromType) : value;\n}\nprivate static Expression list(Expression input, FieldType elementType) {\nParameterExpression value = Expressions.parameter(Object.class);\nBlockBuilder block = new BlockBuilder();\nblock.add(value(value, elementType));\nreturn Expressions.new_(\nWrappedList.class,\nImmutableList.of(Types.castIfNecessary(List.class, input)),\nImmutableList.of(\nExpressions.methodDecl(\nModifier.PUBLIC,\nObject.class,\n\"value\",\nImmutableList.of(value),\nblock.toBlock())));\n}\nprivate static Expression map(Expression input, FieldType mapValueType) {\nParameterExpression value = Expressions.parameter(Object.class);\nBlockBuilder block = new BlockBuilder();\nblock.add(value(value, mapValueType));\nreturn Expressions.new_(\nWrappedMap.class,\nImmutableList.of(Types.castIfNecessary(Map.class, input)),\nImmutableList.of(\nExpressions.methodDecl(\nModifier.PUBLIC,\nObject.class,\n\"value\",\nImmutableList.of(value),\nblock.toBlock())));\n}\nprivate static Expression row(Expression input, Schema schema) {\nParameterExpression row = Expressions.parameter(Row.class);\nParameterExpression index = Expressions.parameter(int.class);\nBlockBuilder body = new BlockBuilder(/* optimizing= */ false);\nfor (int i = 0; i < schema.getFieldCount(); i++) {\nBlockBuilder list = new BlockBuilder(/* optimizing= */ false, body);\nExpression returnValue = value(list, i, /* storageType= */ null, row, schema);\nlist.append(returnValue);\nbody.append(\n\"if i=\" + i,\nExpressions.block(\nExpressions.ifThen(\nExpressions.equal(index, Expressions.constant(i, int.class)), list.toBlock())));\n}\nbody.add(Expressions.throw_(Expressions.new_(IndexOutOfBoundsException.class)));\nreturn Expressions.new_(\nWrappedRow.class,\nImmutableList.of(Types.castIfNecessary(Row.class, input)),\nImmutableList.of(\nExpressions.methodDecl(\nModifier.PUBLIC,\nObject.class,\n\"field\",\nImmutableList.of(row, index),\nbody.toBlock())));\n}\n}", + "context_after": "class InputGetterImpl implements RexToLixTranslator.InputGetter {\nprivate static final Map TYPE_CONVERSION_MAP =\nImmutableMap.builder()\n.put(TypeName.BYTE, Byte.class)\n.put(TypeName.BYTES, byte[].class)\n.put(TypeName.INT16, Short.class)\n.put(TypeName.INT32, Integer.class)\n.put(TypeName.INT64, Long.class)\n.put(TypeName.DECIMAL, BigDecimal.class)\n.put(TypeName.FLOAT, Float.class)\n.put(TypeName.DOUBLE, Double.class)\n.put(TypeName.STRING, String.class)\n.put(TypeName.DATETIME, ReadableInstant.class)\n.put(TypeName.BOOLEAN, Boolean.class)\n.put(TypeName.MAP, Map.class)\n.put(TypeName.ARRAY, Collection.class)\n.put(TypeName.ITERABLE, Iterable.class)\n.put(TypeName.ROW, Row.class)\n.build();\nprivate static final Map LOGICAL_TYPE_TO_BASE_TYPE_MAP =\nImmutableMap.builder()\n.put(SqlTypes.DATE.getIdentifier(), Long.class)\n.put(SqlTypes.TIME.getIdentifier(), Long.class)\n.put(TimeWithLocalTzType.IDENTIFIER, ReadableInstant.class)\n.put(SqlTypes.DATETIME.getIdentifier(), Row.class)\n.put(CharType.IDENTIFIER, String.class)\n.build();\nprivate final Expression input;\nprivate final Schema inputSchema;\nprivate InputGetterImpl(Expression input, Schema inputSchema) {\nthis.input = input;\nthis.inputSchema = inputSchema;\n}\n@Override\npublic Expression field(BlockBuilder list, int index, Type storageType) {\nreturn value(list, index, storageType, input, inputSchema);\n}\nprivate static Expression value(\nBlockBuilder list, int index, Type storageType, Expression input, Schema schema) {\nif (index >= schema.getFieldCount() || index < 0) {\nthrow new IllegalArgumentException(\"Unable to find value\n}\nfinal Expression expression = list.append(list.newName(\"current\"), input);\nFieldType fromType = schema.getField(index).getType();\nClass convertTo = null;\nif (storageType == Object.class) {\nconvertTo = Object.class;\n} else if (fromType.getTypeName().isLogicalType()) {\nconvertTo = LOGICAL_TYPE_TO_BASE_TYPE_MAP.get(fromType.getLogicalType().getIdentifier());\n} else {\nconvertTo = TYPE_CONVERSION_MAP.get(fromType.getTypeName());\n}\nif (convertTo == null) {\nthrow new UnsupportedOperationException(\"Unable to get \" + fromType.getTypeName());\n}\nExpression value =\nExpressions.convert_(\nExpressions.call(\nexpression,\n\"getBaseValue\",\nExpressions.constant(index),\nExpressions.constant(convertTo)),\nconvertTo);\nreturn (storageType != Object.class) ? value(value, fromType) : value;\n}\nprivate static Expression list(Expression input, FieldType elementType) {\nParameterExpression value = Expressions.parameter(Object.class);\nBlockBuilder block = new BlockBuilder();\nblock.add(value(value, elementType));\nreturn Expressions.new_(\nWrappedList.class,\nImmutableList.of(Types.castIfNecessary(List.class, input)),\nImmutableList.of(\nExpressions.methodDecl(\nModifier.PUBLIC,\nObject.class,\n\"value\",\nImmutableList.of(value),\nblock.toBlock())));\n}\nprivate static Expression map(Expression input, FieldType mapValueType) {\nParameterExpression value = Expressions.parameter(Object.class);\nBlockBuilder block = new BlockBuilder();\nblock.add(value(value, mapValueType));\nreturn Expressions.new_(\nWrappedMap.class,\nImmutableList.of(Types.castIfNecessary(Map.class, input)),\nImmutableList.of(\nExpressions.methodDecl(\nModifier.PUBLIC,\nObject.class,\n\"value\",\nImmutableList.of(value),\nblock.toBlock())));\n}\nprivate static Expression row(Expression input, Schema schema) {\nParameterExpression row = Expressions.parameter(Row.class);\nParameterExpression index = Expressions.parameter(int.class);\nBlockBuilder body = new BlockBuilder(/* optimizing= */ false);\nfor (int i = 0; i < schema.getFieldCount(); i++) {\nBlockBuilder list = new BlockBuilder(/* optimizing= */ false, body);\nExpression returnValue = value(list, i, /* storageType= */ null, row, schema);\nlist.append(returnValue);\nbody.append(\n\"if i=\" + i,\nExpressions.block(\nExpressions.ifThen(\nExpressions.equal(index, Expressions.constant(i, int.class)), list.toBlock())));\n}\nbody.add(Expressions.throw_(Expressions.new_(IndexOutOfBoundsException.class)));\nreturn Expressions.new_(\nWrappedRow.class,\nImmutableList.of(Types.castIfNecessary(Row.class, input)),\nImmutableList.of(\nExpressions.methodDecl(\nModifier.PUBLIC,\nObject.class,\n\"field\",\nImmutableList.of(row, index),\nbody.toBlock())));\n}\n}" + }, + { + "comment": "These 2 can be updated to hasValue as well.", + "method_body": "void testConcurrentSplitAssignmentForMultipleHosts() throws InterruptedException {\nfinal int NUM_THREADS = 10;\nfinal int NUM_SPLITS = 500;\nfinal int SUM_OF_IDS = (NUM_SPLITS - 1) * (NUM_SPLITS) / 2;\nfinal String[] hosts = {\"host1\", \"host1\", \"host1\", \"host2\", \"host2\", \"host3\"};\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(i, hosts[i % hosts.length]));\n}\nfinal LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nfinal AtomicInteger splitsRetrieved = new AtomicInteger(0);\nfinal AtomicInteger sumOfIds = new AtomicInteger(0);\nRunnable retriever =\n() -> {\nfinal String threadHost = hosts[(int) (Math.random() * hosts.length)];\nLocatableInputSplit split;\nwhile ((split = ia.getNextInputSplit(threadHost, 0)) != null) {\nsplitsRetrieved.incrementAndGet();\nsumOfIds.addAndGet(split.getSplitNumber());\n}\n};\nThread[] threads = new Thread[NUM_THREADS];\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i] = new Thread(retriever);\nthreads[i].setDaemon(true);\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i].start();\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i].join(5000);\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nassertThat(threads[i].isAlive()).isFalse();\n}\nassertThat(splitsRetrieved.get()).isEqualTo(NUM_SPLITS);\nassertThat(sumOfIds.get()).isEqualTo(SUM_OF_IDS);\nassertThat(ia.getNextInputSplit(\"testhost\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments())\n.isGreaterThanOrEqualTo(NUM_SPLITS / hosts.length);\n}", + "target_code": "assertThat(sumOfIds.get()).isEqualTo(SUM_OF_IDS);", + "method_body_after": "void testConcurrentSplitAssignmentForMultipleHosts() throws InterruptedException {\nfinal int NUM_THREADS = 10;\nfinal int NUM_SPLITS = 500;\nfinal int SUM_OF_IDS = (NUM_SPLITS - 1) * (NUM_SPLITS) / 2;\nfinal String[] hosts = {\"host1\", \"host1\", \"host1\", \"host2\", \"host2\", \"host3\"};\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(i, hosts[i % hosts.length]));\n}\nfinal LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nfinal AtomicInteger splitsRetrieved = new AtomicInteger(0);\nfinal AtomicInteger sumOfIds = new AtomicInteger(0);\nRunnable retriever =\n() -> {\nfinal String threadHost = hosts[(int) (Math.random() * hosts.length)];\nLocatableInputSplit split;\nwhile ((split = ia.getNextInputSplit(threadHost, 0)) != null) {\nsplitsRetrieved.incrementAndGet();\nsumOfIds.addAndGet(split.getSplitNumber());\n}\n};\nThread[] threads = new Thread[NUM_THREADS];\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i] = new Thread(retriever);\nthreads[i].setDaemon(true);\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i].start();\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i].join(5000);\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nassertThat(threads[i].isAlive()).isFalse();\n}\nassertThat(splitsRetrieved).hasValue(NUM_SPLITS);\nassertThat(sumOfIds).hasValue(SUM_OF_IDS);\nassertThat(ia.getNextInputSplit(\"testhost\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments())\n.isGreaterThanOrEqualTo(NUM_SPLITS / hosts.length);\n}", + "context_before": "class LocatableSplitAssignerTest {\n@Test\nvoid testSerialSplitAssignmentWithNullHost() {\nfinal int NUM_SPLITS = 50;\nfinal String[][] hosts = new String[][] {new String[] {\"localhost\"}, new String[0], null};\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(i, hosts[i % 3]));\n}\nLocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nInputSplit is = null;\nwhile ((is = ia.getNextInputSplit(null, 0)) != null) {\nassertThat(splits.remove(is)).isTrue();\n}\nassertThat(splits).isEmpty();\nassertThat(ia.getNextInputSplit(\"\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments()).isEqualTo(NUM_SPLITS);\nassertThat(ia.getNumberOfLocalAssignments()).isZero();\n}\n@Test\nvoid testSerialSplitAssignmentAllForSameHost() {\nfinal int NUM_SPLITS = 50;\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(i, \"testhost\"));\n}\nLocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nInputSplit is = null;\nwhile ((is = ia.getNextInputSplit(\"testhost\", 0)) != null) {\nassertThat(splits.remove(is)).isTrue();\n}\nassertThat(splits).isEmpty();\nassertThat(ia.getNextInputSplit(\"\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments()).isZero();\nassertThat(ia.getNumberOfLocalAssignments()).isEqualTo(NUM_SPLITS);\n}\n@Test\nvoid testSerialSplitAssignmentAllForRemoteHost() {\nfinal String[] hosts = {\"host1\", \"host1\", \"host1\", \"host2\", \"host2\", \"host3\"};\nfinal int NUM_SPLITS = 10 * hosts.length;\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(i, hosts[i % hosts.length]));\n}\nLocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nInputSplit is = null;\nwhile ((is = ia.getNextInputSplit(\"testhost\", 0)) != null) {\nassertThat(splits.remove(is)).isTrue();\n}\nassertThat(splits).isEmpty();\nassertThat(ia.getNextInputSplit(\"anotherHost\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments()).isEqualTo(NUM_SPLITS);\nassertThat(ia.getNumberOfLocalAssignments()).isZero();\n}\n@Test\nvoid testSerialSplitAssignmentSomeForRemoteHost() {\nfinal String[] hosts = {\"host1\", \"host2\", \"host3\"};\nfinal int NUM_LOCAL_HOST1_SPLITS = 20;\nfinal int NUM_LOCAL_HOST2_SPLITS = 10;\nfinal int NUM_REMOTE_SPLITS = 30;\nfinal int NUM_LOCAL_SPLITS = NUM_LOCAL_HOST1_SPLITS + NUM_LOCAL_HOST2_SPLITS;\nint splitCnt = 0;\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_LOCAL_HOST1_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(splitCnt++, \"host1\"));\n}\nfor (int i = 0; i < NUM_LOCAL_HOST2_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(splitCnt++, \"host2\"));\n}\nfor (int i = 0; i < NUM_REMOTE_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(splitCnt++, \"remoteHost\"));\n}\nLocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nInputSplit is = null;\nint i = 0;\nwhile ((is = ia.getNextInputSplit(hosts[i++ % hosts.length], 0)) != null) {\nassertThat(splits.remove(is)).isTrue();\n}\nassertThat(splits).isEmpty();\nassertThat(ia.getNextInputSplit(\"anotherHost\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments()).isEqualTo(NUM_REMOTE_SPLITS);\nassertThat(ia.getNumberOfLocalAssignments()).isEqualTo(NUM_LOCAL_SPLITS);\n}\n@Test\nvoid testSerialSplitAssignmentMultiLocalHost() {\nfinal String[] localHosts = {\"local1\", \"local2\", \"local3\"};\nfinal String[] remoteHosts = {\"remote1\", \"remote2\", \"remote3\"};\nfinal String[] requestingHosts = {\"local3\", \"local2\", \"local1\", \"other\"};\nfinal int NUM_THREE_LOCAL_SPLITS = 10;\nfinal int NUM_TWO_LOCAL_SPLITS = 10;\nfinal int NUM_ONE_LOCAL_SPLITS = 10;\nfinal int NUM_LOCAL_SPLITS = 30;\nfinal int NUM_REMOTE_SPLITS = 10;\nfinal int NUM_SPLITS = 40;\nString[] threeLocalHosts = localHosts;\nString[] twoLocalHosts = {localHosts[0], localHosts[1], remoteHosts[0]};\nString[] oneLocalHost = {localHosts[0], remoteHosts[0], remoteHosts[1]};\nString[] noLocalHost = remoteHosts;\nint splitCnt = 0;\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_THREE_LOCAL_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(splitCnt++, threeLocalHosts));\n}\nfor (int i = 0; i < NUM_TWO_LOCAL_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(splitCnt++, twoLocalHosts));\n}\nfor (int i = 0; i < NUM_ONE_LOCAL_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(splitCnt++, oneLocalHost));\n}\nfor (int i = 0; i < NUM_REMOTE_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(splitCnt++, noLocalHost));\n}\nLocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nLocatableInputSplit is = null;\nfor (int i = 0; i < NUM_SPLITS; i++) {\nString host = requestingHosts[i % requestingHosts.length];\nis = ia.getNextInputSplit(host, 0);\nassertThat(is).isNotNull();\nassertThat(splits.remove(is)).isTrue();\nif (host.equals(localHosts[0])) {\nassertThat(is.getHostnames()).isEqualTo(oneLocalHost);\n} else if (host.equals(localHosts[1])) {\nassertThat(is.getHostnames()).isEqualTo(twoLocalHosts);\n} else if (host.equals(localHosts[2])) {\nassertThat(is.getHostnames()).isEqualTo(threeLocalHosts);\n} else {\nassertThat(is.getHostnames()).isEqualTo(noLocalHost);\n}\n}\nassertThat(splits).isEmpty();\nassertThat(ia.getNextInputSplit(\"anotherHost\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments()).isEqualTo(NUM_REMOTE_SPLITS);\nassertThat(ia.getNumberOfLocalAssignments()).isEqualTo(NUM_LOCAL_SPLITS);\n}\n@Test\nvoid testSerialSplitAssignmentMixedLocalHost() {\nfinal String[] hosts = {\"host1\", \"host1\", \"host1\", \"host2\", \"host2\", \"host3\"};\nfinal int NUM_SPLITS = 10 * hosts.length;\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(i, hosts[i % hosts.length]));\n}\nLocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nInputSplit is = null;\nint i = 0;\nwhile ((is = ia.getNextInputSplit(hosts[i++ % hosts.length], 0)) != null) {\nassertThat(splits.remove(is)).isTrue();\n}\nassertThat(splits).isEmpty();\nassertThat(ia.getNextInputSplit(\"anotherHost\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments()).isZero();\nassertThat(ia.getNumberOfLocalAssignments()).isEqualTo(NUM_SPLITS);\n}\n@Test\nvoid testConcurrentSplitAssignmentNullHost() throws InterruptedException {\nfinal int NUM_THREADS = 10;\nfinal int NUM_SPLITS = 500;\nfinal int SUM_OF_IDS = (NUM_SPLITS - 1) * (NUM_SPLITS) / 2;\nfinal String[][] hosts = new String[][] {new String[] {\"localhost\"}, new String[0], null};\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(i, hosts[i % 3]));\n}\nfinal LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nfinal AtomicInteger splitsRetrieved = new AtomicInteger(0);\nfinal AtomicInteger sumOfIds = new AtomicInteger(0);\nRunnable retriever =\n() -> {\nLocatableInputSplit split;\nwhile ((split = ia.getNextInputSplit(null, 0)) != null) {\nsplitsRetrieved.incrementAndGet();\nsumOfIds.addAndGet(split.getSplitNumber());\n}\n};\nThread[] threads = new Thread[NUM_THREADS];\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i] = new Thread(retriever);\nthreads[i].setDaemon(true);\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i].start();\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i].join(5000);\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nassertThat(threads[i].isAlive()).isFalse();\n}\nassertThat(splitsRetrieved.get()).isEqualTo(NUM_SPLITS);\nassertThat(sumOfIds.get()).isEqualTo(SUM_OF_IDS);\nassertThat(ia.getNextInputSplit(\"\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments()).isEqualTo(NUM_SPLITS);\nassertThat(ia.getNumberOfLocalAssignments()).isZero();\n}\n@Test\nvoid testConcurrentSplitAssignmentForSingleHost() throws InterruptedException {\nfinal int NUM_THREADS = 10;\nfinal int NUM_SPLITS = 500;\nfinal int SUM_OF_IDS = (NUM_SPLITS - 1) * (NUM_SPLITS) / 2;\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(i, \"testhost\"));\n}\nfinal LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nfinal AtomicInteger splitsRetrieved = new AtomicInteger(0);\nfinal AtomicInteger sumOfIds = new AtomicInteger(0);\nRunnable retriever =\n() -> {\nLocatableInputSplit split;\nwhile ((split = ia.getNextInputSplit(\"testhost\", 0)) != null) {\nsplitsRetrieved.incrementAndGet();\nsumOfIds.addAndGet(split.getSplitNumber());\n}\n};\nThread[] threads = new Thread[NUM_THREADS];\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i] = new Thread(retriever);\nthreads[i].setDaemon(true);\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i].start();\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i].join(5000);\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nassertThat(threads[i].isAlive()).isFalse();\n}\nassertThat(splitsRetrieved.get()).isEqualTo(NUM_SPLITS);\nassertThat(sumOfIds.get()).isEqualTo(SUM_OF_IDS);\nassertThat(ia.getNextInputSplit(\"testhost\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments()).isZero();\nassertThat(ia.getNumberOfLocalAssignments()).isEqualTo(NUM_SPLITS);\n}\n@Test\n@Test\nvoid testAssignmentOfManySplitsRandomly() {\nlong seed = Calendar.getInstance().getTimeInMillis();\nfinal int NUM_SPLITS = 65536;\nfinal String[] splitHosts = new String[256];\nfinal String[] requestingHosts = new String[256];\nfinal Random rand = new Random(seed);\nfor (int i = 0; i < splitHosts.length; i++) {\nsplitHosts[i] = \"localHost\" + i;\n}\nfor (int i = 0; i < requestingHosts.length; i++) {\nif (i % 2 == 0) {\nrequestingHosts[i] = \"localHost\" + i;\n} else {\nrequestingHosts[i] = \"remoteHost\" + i;\n}\n}\nString[] stringArray = {};\nSet hosts = new HashSet<>();\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_SPLITS; i++) {\nwhile (hosts.size() < 3) {\nhosts.add(splitHosts[rand.nextInt(splitHosts.length)]);\n}\nsplits.add(new LocatableInputSplit(i, hosts.toArray(stringArray)));\nhosts.clear();\n}\nfinal LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nfor (int i = 0; i < NUM_SPLITS; i++) {\nLocatableInputSplit split =\nia.getNextInputSplit(requestingHosts[rand.nextInt(requestingHosts.length)], 0);\nassertThat(split).isNotNull();\nassertThat(splits.remove(split)).isTrue();\n}\nassertThat(splits).isEmpty();\nassertThat(ia.getNextInputSplit(\"testHost\", 0)).isNull();\n}\n}", + "context_after": "class LocatableSplitAssignerTest {\n@Test\nvoid testSerialSplitAssignmentWithNullHost() {\nfinal int NUM_SPLITS = 50;\nfinal String[][] hosts = new String[][] {new String[] {\"localhost\"}, new String[0], null};\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(i, hosts[i % 3]));\n}\nLocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nInputSplit is = null;\nwhile ((is = ia.getNextInputSplit(null, 0)) != null) {\nassertThat(splits.remove(is)).isTrue();\n}\nassertThat(splits).isEmpty();\nassertThat(ia.getNextInputSplit(\"\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments()).isEqualTo(NUM_SPLITS);\nassertThat(ia.getNumberOfLocalAssignments()).isZero();\n}\n@Test\nvoid testSerialSplitAssignmentAllForSameHost() {\nfinal int NUM_SPLITS = 50;\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(i, \"testhost\"));\n}\nLocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nInputSplit is = null;\nwhile ((is = ia.getNextInputSplit(\"testhost\", 0)) != null) {\nassertThat(splits.remove(is)).isTrue();\n}\nassertThat(splits).isEmpty();\nassertThat(ia.getNextInputSplit(\"\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments()).isZero();\nassertThat(ia.getNumberOfLocalAssignments()).isEqualTo(NUM_SPLITS);\n}\n@Test\nvoid testSerialSplitAssignmentAllForRemoteHost() {\nfinal String[] hosts = {\"host1\", \"host1\", \"host1\", \"host2\", \"host2\", \"host3\"};\nfinal int NUM_SPLITS = 10 * hosts.length;\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(i, hosts[i % hosts.length]));\n}\nLocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nInputSplit is = null;\nwhile ((is = ia.getNextInputSplit(\"testhost\", 0)) != null) {\nassertThat(splits.remove(is)).isTrue();\n}\nassertThat(splits).isEmpty();\nassertThat(ia.getNextInputSplit(\"anotherHost\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments()).isEqualTo(NUM_SPLITS);\nassertThat(ia.getNumberOfLocalAssignments()).isZero();\n}\n@Test\nvoid testSerialSplitAssignmentSomeForRemoteHost() {\nfinal String[] hosts = {\"host1\", \"host2\", \"host3\"};\nfinal int NUM_LOCAL_HOST1_SPLITS = 20;\nfinal int NUM_LOCAL_HOST2_SPLITS = 10;\nfinal int NUM_REMOTE_SPLITS = 30;\nfinal int NUM_LOCAL_SPLITS = NUM_LOCAL_HOST1_SPLITS + NUM_LOCAL_HOST2_SPLITS;\nint splitCnt = 0;\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_LOCAL_HOST1_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(splitCnt++, \"host1\"));\n}\nfor (int i = 0; i < NUM_LOCAL_HOST2_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(splitCnt++, \"host2\"));\n}\nfor (int i = 0; i < NUM_REMOTE_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(splitCnt++, \"remoteHost\"));\n}\nLocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nInputSplit is = null;\nint i = 0;\nwhile ((is = ia.getNextInputSplit(hosts[i++ % hosts.length], 0)) != null) {\nassertThat(splits.remove(is)).isTrue();\n}\nassertThat(splits).isEmpty();\nassertThat(ia.getNextInputSplit(\"anotherHost\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments()).isEqualTo(NUM_REMOTE_SPLITS);\nassertThat(ia.getNumberOfLocalAssignments()).isEqualTo(NUM_LOCAL_SPLITS);\n}\n@Test\nvoid testSerialSplitAssignmentMultiLocalHost() {\nfinal String[] localHosts = {\"local1\", \"local2\", \"local3\"};\nfinal String[] remoteHosts = {\"remote1\", \"remote2\", \"remote3\"};\nfinal String[] requestingHosts = {\"local3\", \"local2\", \"local1\", \"other\"};\nfinal int NUM_THREE_LOCAL_SPLITS = 10;\nfinal int NUM_TWO_LOCAL_SPLITS = 10;\nfinal int NUM_ONE_LOCAL_SPLITS = 10;\nfinal int NUM_LOCAL_SPLITS = 30;\nfinal int NUM_REMOTE_SPLITS = 10;\nfinal int NUM_SPLITS = 40;\nString[] threeLocalHosts = localHosts;\nString[] twoLocalHosts = {localHosts[0], localHosts[1], remoteHosts[0]};\nString[] oneLocalHost = {localHosts[0], remoteHosts[0], remoteHosts[1]};\nString[] noLocalHost = remoteHosts;\nint splitCnt = 0;\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_THREE_LOCAL_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(splitCnt++, threeLocalHosts));\n}\nfor (int i = 0; i < NUM_TWO_LOCAL_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(splitCnt++, twoLocalHosts));\n}\nfor (int i = 0; i < NUM_ONE_LOCAL_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(splitCnt++, oneLocalHost));\n}\nfor (int i = 0; i < NUM_REMOTE_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(splitCnt++, noLocalHost));\n}\nLocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nLocatableInputSplit is = null;\nfor (int i = 0; i < NUM_SPLITS; i++) {\nString host = requestingHosts[i % requestingHosts.length];\nis = ia.getNextInputSplit(host, 0);\nassertThat(is).isNotNull();\nassertThat(splits.remove(is)).isTrue();\nif (host.equals(localHosts[0])) {\nassertThat(is.getHostnames()).isEqualTo(oneLocalHost);\n} else if (host.equals(localHosts[1])) {\nassertThat(is.getHostnames()).isEqualTo(twoLocalHosts);\n} else if (host.equals(localHosts[2])) {\nassertThat(is.getHostnames()).isEqualTo(threeLocalHosts);\n} else {\nassertThat(is.getHostnames()).isEqualTo(noLocalHost);\n}\n}\nassertThat(splits).isEmpty();\nassertThat(ia.getNextInputSplit(\"anotherHost\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments()).isEqualTo(NUM_REMOTE_SPLITS);\nassertThat(ia.getNumberOfLocalAssignments()).isEqualTo(NUM_LOCAL_SPLITS);\n}\n@Test\nvoid testSerialSplitAssignmentMixedLocalHost() {\nfinal String[] hosts = {\"host1\", \"host1\", \"host1\", \"host2\", \"host2\", \"host3\"};\nfinal int NUM_SPLITS = 10 * hosts.length;\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(i, hosts[i % hosts.length]));\n}\nLocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nInputSplit is = null;\nint i = 0;\nwhile ((is = ia.getNextInputSplit(hosts[i++ % hosts.length], 0)) != null) {\nassertThat(splits.remove(is)).isTrue();\n}\nassertThat(splits).isEmpty();\nassertThat(ia.getNextInputSplit(\"anotherHost\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments()).isZero();\nassertThat(ia.getNumberOfLocalAssignments()).isEqualTo(NUM_SPLITS);\n}\n@Test\nvoid testConcurrentSplitAssignmentNullHost() throws InterruptedException {\nfinal int NUM_THREADS = 10;\nfinal int NUM_SPLITS = 500;\nfinal int SUM_OF_IDS = (NUM_SPLITS - 1) * (NUM_SPLITS) / 2;\nfinal String[][] hosts = new String[][] {new String[] {\"localhost\"}, new String[0], null};\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(i, hosts[i % 3]));\n}\nfinal LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nfinal AtomicInteger splitsRetrieved = new AtomicInteger(0);\nfinal AtomicInteger sumOfIds = new AtomicInteger(0);\nRunnable retriever =\n() -> {\nLocatableInputSplit split;\nwhile ((split = ia.getNextInputSplit(null, 0)) != null) {\nsplitsRetrieved.incrementAndGet();\nsumOfIds.addAndGet(split.getSplitNumber());\n}\n};\nThread[] threads = new Thread[NUM_THREADS];\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i] = new Thread(retriever);\nthreads[i].setDaemon(true);\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i].start();\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i].join(5000);\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nassertThat(threads[i].isAlive()).isFalse();\n}\nassertThat(splitsRetrieved).hasValue(NUM_SPLITS);\nassertThat(sumOfIds).hasValue(SUM_OF_IDS);\nassertThat(ia.getNextInputSplit(\"\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments()).isEqualTo(NUM_SPLITS);\nassertThat(ia.getNumberOfLocalAssignments()).isZero();\n}\n@Test\nvoid testConcurrentSplitAssignmentForSingleHost() throws InterruptedException {\nfinal int NUM_THREADS = 10;\nfinal int NUM_SPLITS = 500;\nfinal int SUM_OF_IDS = (NUM_SPLITS - 1) * (NUM_SPLITS) / 2;\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_SPLITS; i++) {\nsplits.add(new LocatableInputSplit(i, \"testhost\"));\n}\nfinal LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nfinal AtomicInteger splitsRetrieved = new AtomicInteger(0);\nfinal AtomicInteger sumOfIds = new AtomicInteger(0);\nRunnable retriever =\n() -> {\nLocatableInputSplit split;\nwhile ((split = ia.getNextInputSplit(\"testhost\", 0)) != null) {\nsplitsRetrieved.incrementAndGet();\nsumOfIds.addAndGet(split.getSplitNumber());\n}\n};\nThread[] threads = new Thread[NUM_THREADS];\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i] = new Thread(retriever);\nthreads[i].setDaemon(true);\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i].start();\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nthreads[i].join(5000);\n}\nfor (int i = 0; i < NUM_THREADS; i++) {\nassertThat(threads[i].isAlive()).isFalse();\n}\nassertThat(splitsRetrieved).hasValue(NUM_SPLITS);\nassertThat(sumOfIds).hasValue(SUM_OF_IDS);\nassertThat(ia.getNextInputSplit(\"testhost\", 0)).isNull();\nassertThat(ia.getNumberOfRemoteAssignments()).isZero();\nassertThat(ia.getNumberOfLocalAssignments()).isEqualTo(NUM_SPLITS);\n}\n@Test\n@Test\nvoid testAssignmentOfManySplitsRandomly() {\nlong seed = Calendar.getInstance().getTimeInMillis();\nfinal int NUM_SPLITS = 65536;\nfinal String[] splitHosts = new String[256];\nfinal String[] requestingHosts = new String[256];\nfinal Random rand = new Random(seed);\nfor (int i = 0; i < splitHosts.length; i++) {\nsplitHosts[i] = \"localHost\" + i;\n}\nfor (int i = 0; i < requestingHosts.length; i++) {\nif (i % 2 == 0) {\nrequestingHosts[i] = \"localHost\" + i;\n} else {\nrequestingHosts[i] = \"remoteHost\" + i;\n}\n}\nString[] stringArray = {};\nSet hosts = new HashSet<>();\nSet splits = new HashSet<>();\nfor (int i = 0; i < NUM_SPLITS; i++) {\nwhile (hosts.size() < 3) {\nhosts.add(splitHosts[rand.nextInt(splitHosts.length)]);\n}\nsplits.add(new LocatableInputSplit(i, hosts.toArray(stringArray)));\nhosts.clear();\n}\nfinal LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);\nfor (int i = 0; i < NUM_SPLITS; i++) {\nLocatableInputSplit split =\nia.getNextInputSplit(requestingHosts[rand.nextInt(requestingHosts.length)], 0);\nassertThat(split).isNotNull();\nassertThat(splits.remove(split)).isTrue();\n}\nassertThat(splits).isEmpty();\nassertThat(ia.getNextInputSplit(\"testHost\", 0)).isNull();\n}\n}" + }, + { + "comment": "Well, it will work once https://github.com/quarkusio/quarkus/pull/2629 is merged. Tests are not CDI beans ATM.", + "method_body": "public void persist(Object entity) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(\"Transaction is not active\");\n}\nemr.em.persist(entity);\n}\n}", + "target_code": "throw new TransactionRequiredException(\"Transaction is not active\");", + "method_body_after": "public void persist(Object entity) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(TRANSACTION_IS_NOT_ACTIVE);\n}\nemr.em.persist(entity);\n}\n}", + "context_before": "class TransactionScopedEntityManager implements EntityManager {\nprivate final TransactionManager transactionManager;\nprivate final TransactionSynchronizationRegistry tsr;\nprivate final EntityManagerFactory emf;\nprivate final String unitName;\nprivate static final Object transactionKey = new Object();\nprivate final Instance requestScopedEms;\npublic TransactionScopedEntityManager(TransactionManager transactionManager,\nTransactionSynchronizationRegistry tsr,\nEntityManagerFactory emf,\nString unitName, Instance requestScopedEms) {\nthis.transactionManager = transactionManager;\nthis.tsr = tsr;\nthis.emf = emf;\nthis.unitName = unitName;\nthis.requestScopedEms = requestScopedEms;\n}\nEntityManagerResult getEntityManager() {\nif (isInTransaction()) {\nEntityManager em = (EntityManager) tsr.getResource(transactionKey);\nif (em != null) {\nreturn new EntityManagerResult(em, false, true);\n}\nEntityManager newEm = emf.createEntityManager();\nnewEm.joinTransaction();\ntsr.putResource(transactionKey, newEm);\ntsr.registerInterposedSynchronization(new Synchronization() {\n@Override\npublic void beforeCompletion() {\nnewEm.flush();\nnewEm.close();\n}\n@Override\npublic void afterCompletion(int i) {\nnewEm.close();\n}\n});\nreturn new EntityManagerResult(newEm, false, true);\n} else {\nRequestScopedEntityManagerHolder requestScopedEms = this.requestScopedEms.get();\nreturn new EntityManagerResult(requestScopedEms.getOrCreateEntityManager(unitName, emf), false, false);\n}\n}\nprivate boolean isInTransaction() {\ntry {\nswitch (transactionManager.getStatus()) {\ncase Status.STATUS_ACTIVE:\ncase Status.STATUS_COMMITTING:\ncase Status.STATUS_MARKED_ROLLBACK:\ncase Status.STATUS_PREPARED:\ncase Status.STATUS_PREPARING:\nreturn true;\ndefault:\nreturn false;\n}\n} catch (Exception e) {\nthrow new RuntimeException(e);\n}\n}\n@Override\n@Override\npublic T merge(T entity) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(\"Transaction is not active\");\n}\nreturn emr.em.merge(entity);\n}\n}\n@Override\npublic void remove(Object entity) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(\"Transaction is not active\");\n}\nemr.em.remove(entity);\n}\n}\n@Override\npublic T find(Class entityClass, Object primaryKey) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.find(entityClass, primaryKey);\n}\n}\n@Override\npublic T find(Class entityClass, Object primaryKey, Map properties) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.find(entityClass, primaryKey, properties);\n}\n}\n@Override\npublic T find(Class entityClass, Object primaryKey, LockModeType lockMode) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.find(entityClass, primaryKey, lockMode);\n}\n}\n@Override\npublic T find(Class entityClass, Object primaryKey, LockModeType lockMode, Map properties) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.find(entityClass, primaryKey, lockMode, properties);\n}\n}\n@Override\npublic T getReference(Class entityClass, Object primaryKey) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getReference(entityClass, primaryKey);\n}\n}\n@Override\npublic void flush() {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(\"Transaction is not active\");\n}\nemr.em.flush();\n}\n}\n@Override\npublic void setFlushMode(FlushModeType flushMode) {\ntry (EntityManagerResult emr = getEntityManager()) {\nemr.em.setFlushMode(flushMode);\n}\n}\n@Override\npublic FlushModeType getFlushMode() {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getFlushMode();\n}\n}\n@Override\npublic void lock(Object entity, LockModeType lockMode) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(\"Transaction is not active\");\n}\nemr.em.lock(entity, lockMode);\n}\n}\n@Override\npublic void lock(Object entity, LockModeType lockMode, Map properties) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(\"Transaction is not active\");\n}\nemr.em.lock(entity, lockMode, properties);\n}\n}\n@Override\npublic void refresh(Object entity) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(\"Transaction is not active\");\n}\nemr.em.refresh(entity);\n}\n}\n@Override\npublic void refresh(Object entity, Map properties) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(\"Transaction is not active\");\n}\nemr.em.refresh(entity, properties);\n}\n}\n@Override\npublic void refresh(Object entity, LockModeType lockMode) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(\"Transaction is not active\");\n}\nemr.em.refresh(entity, lockMode);\n}\n}\n@Override\npublic void refresh(Object entity, LockModeType lockMode, Map properties) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(\"Transaction is not active\");\n}\nemr.em.refresh(entity, lockMode, properties);\n}\n}\n@Override\npublic void clear() {\ntry (EntityManagerResult emr = getEntityManager()) {\nemr.em.clear();\n}\n}\n@Override\npublic void detach(Object entity) {\ntry (EntityManagerResult emr = getEntityManager()) {\nemr.em.detach(entity);\n}\n}\n@Override\npublic boolean contains(Object entity) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.contains(entity);\n}\n}\n@Override\npublic LockModeType getLockMode(Object entity) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getLockMode(entity);\n}\n}\n@Override\npublic void setProperty(String propertyName, Object value) {\ntry (EntityManagerResult emr = getEntityManager()) {\nemr.em.setProperty(propertyName, value);\n}\n}\n@Override\npublic Map getProperties() {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getProperties();\n}\n}\n@Override\npublic Query createQuery(String qlString) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createQuery(qlString);\n}\n}\n@Override\npublic TypedQuery createQuery(CriteriaQuery criteriaQuery) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createQuery(criteriaQuery);\n}\n}\n@Override\npublic Query createQuery(CriteriaUpdate updateQuery) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createQuery(updateQuery);\n}\n}\n@Override\npublic Query createQuery(CriteriaDelete deleteQuery) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createQuery(deleteQuery);\n}\n}\n@Override\npublic TypedQuery createQuery(String qlString, Class resultClass) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createQuery(qlString, resultClass);\n}\n}\n@Override\npublic Query createNamedQuery(String name) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createNamedQuery(name);\n}\n}\n@Override\npublic TypedQuery createNamedQuery(String name, Class resultClass) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createNamedQuery(name, resultClass);\n}\n}\n@Override\npublic Query createNativeQuery(String sqlString) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createNativeQuery(sqlString);\n}\n}\n@Override\npublic Query createNativeQuery(String sqlString, Class resultClass) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createNativeQuery(sqlString, resultClass);\n}\n}\n@Override\npublic Query createNativeQuery(String sqlString, String resultSetMapping) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createNativeQuery(sqlString, resultSetMapping);\n}\n}\n@Override\npublic StoredProcedureQuery createNamedStoredProcedureQuery(String name) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createNamedStoredProcedureQuery(name);\n}\n}\n@Override\npublic StoredProcedureQuery createStoredProcedureQuery(String procedureName) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createStoredProcedureQuery(procedureName);\n}\n}\n@Override\npublic StoredProcedureQuery createStoredProcedureQuery(String procedureName, Class... resultClasses) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createStoredProcedureQuery(procedureName, resultClasses);\n}\n}\n@Override\npublic StoredProcedureQuery createStoredProcedureQuery(String procedureName, String... resultSetMappings) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createStoredProcedureQuery(procedureName, resultSetMappings);\n}\n}\n@Override\npublic void joinTransaction() {\ntry (EntityManagerResult emr = getEntityManager()) {\nemr.em.joinTransaction();\n}\n}\n@Override\npublic boolean isJoinedToTransaction() {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.isJoinedToTransaction();\n}\n}\n@Override\npublic T unwrap(Class cls) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.unwrap(cls);\n}\n}\n@Override\npublic Object getDelegate() {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getDelegate();\n}\n}\n@Override\npublic void close() {\nthrow new IllegalStateException(\"Not supported for transaction scoped entity managers\");\n}\n@Override\npublic boolean isOpen() {\nreturn true;\n}\n@Override\npublic EntityTransaction getTransaction() {\nthrow new IllegalStateException(\"Not supported for JTA entity managers\");\n}\n@Override\npublic EntityManagerFactory getEntityManagerFactory() {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getEntityManagerFactory();\n}\n}\n@Override\npublic CriteriaBuilder getCriteriaBuilder() {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getCriteriaBuilder();\n}\n}\n@Override\npublic Metamodel getMetamodel() {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getMetamodel();\n}\n}\n@Override\npublic EntityGraph createEntityGraph(Class rootType) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createEntityGraph(rootType);\n}\n}\n@Override\npublic EntityGraph createEntityGraph(String graphName) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createEntityGraph(graphName);\n}\n}\n@Override\npublic EntityGraph getEntityGraph(String graphName) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getEntityGraph(graphName);\n}\n}\n@Override\npublic List> getEntityGraphs(Class entityClass) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getEntityGraphs(entityClass);\n}\n}\nstatic class EntityManagerResult implements AutoCloseable {\nfinal EntityManager em;\nfinal boolean closeOnEnd;\nfinal boolean allowModification;\nEntityManagerResult(EntityManager em, boolean closeOnEnd, boolean allowModification) {\nthis.em = em;\nthis.closeOnEnd = closeOnEnd;\nthis.allowModification = allowModification;\n}\n@Override\npublic void close() {\nif (closeOnEnd) {\nem.close();\n}\n}\n}\n}", + "context_after": "class TransactionScopedEntityManager implements EntityManager {\nprotected static final String TRANSACTION_IS_NOT_ACTIVE = \"Transaction is not active, consider adding @Transactional to your method to automatically activate one.\";\nprivate final TransactionManager transactionManager;\nprivate final TransactionSynchronizationRegistry tsr;\nprivate final EntityManagerFactory emf;\nprivate final String unitName;\nprivate static final Object transactionKey = new Object();\nprivate final Instance requestScopedEms;\npublic TransactionScopedEntityManager(TransactionManager transactionManager,\nTransactionSynchronizationRegistry tsr,\nEntityManagerFactory emf,\nString unitName, Instance requestScopedEms) {\nthis.transactionManager = transactionManager;\nthis.tsr = tsr;\nthis.emf = emf;\nthis.unitName = unitName;\nthis.requestScopedEms = requestScopedEms;\n}\nEntityManagerResult getEntityManager() {\nif (isInTransaction()) {\nEntityManager em = (EntityManager) tsr.getResource(transactionKey);\nif (em != null) {\nreturn new EntityManagerResult(em, false, true);\n}\nEntityManager newEm = emf.createEntityManager();\nnewEm.joinTransaction();\ntsr.putResource(transactionKey, newEm);\ntsr.registerInterposedSynchronization(new Synchronization() {\n@Override\npublic void beforeCompletion() {\nnewEm.flush();\nnewEm.close();\n}\n@Override\npublic void afterCompletion(int i) {\nnewEm.close();\n}\n});\nreturn new EntityManagerResult(newEm, false, true);\n} else {\nRequestScopedEntityManagerHolder requestScopedEms = this.requestScopedEms.get();\nreturn new EntityManagerResult(requestScopedEms.getOrCreateEntityManager(unitName, emf), false, false);\n}\n}\nprivate boolean isInTransaction() {\ntry {\nswitch (transactionManager.getStatus()) {\ncase Status.STATUS_ACTIVE:\ncase Status.STATUS_COMMITTING:\ncase Status.STATUS_MARKED_ROLLBACK:\ncase Status.STATUS_PREPARED:\ncase Status.STATUS_PREPARING:\nreturn true;\ndefault:\nreturn false;\n}\n} catch (Exception e) {\nthrow new RuntimeException(e);\n}\n}\n@Override\n@Override\npublic T merge(T entity) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(TRANSACTION_IS_NOT_ACTIVE);\n}\nreturn emr.em.merge(entity);\n}\n}\n@Override\npublic void remove(Object entity) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(TRANSACTION_IS_NOT_ACTIVE);\n}\nemr.em.remove(entity);\n}\n}\n@Override\npublic T find(Class entityClass, Object primaryKey) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.find(entityClass, primaryKey);\n}\n}\n@Override\npublic T find(Class entityClass, Object primaryKey, Map properties) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.find(entityClass, primaryKey, properties);\n}\n}\n@Override\npublic T find(Class entityClass, Object primaryKey, LockModeType lockMode) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.find(entityClass, primaryKey, lockMode);\n}\n}\n@Override\npublic T find(Class entityClass, Object primaryKey, LockModeType lockMode, Map properties) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.find(entityClass, primaryKey, lockMode, properties);\n}\n}\n@Override\npublic T getReference(Class entityClass, Object primaryKey) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getReference(entityClass, primaryKey);\n}\n}\n@Override\npublic void flush() {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(TRANSACTION_IS_NOT_ACTIVE);\n}\nemr.em.flush();\n}\n}\n@Override\npublic void setFlushMode(FlushModeType flushMode) {\ntry (EntityManagerResult emr = getEntityManager()) {\nemr.em.setFlushMode(flushMode);\n}\n}\n@Override\npublic FlushModeType getFlushMode() {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getFlushMode();\n}\n}\n@Override\npublic void lock(Object entity, LockModeType lockMode) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(TRANSACTION_IS_NOT_ACTIVE);\n}\nemr.em.lock(entity, lockMode);\n}\n}\n@Override\npublic void lock(Object entity, LockModeType lockMode, Map properties) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(TRANSACTION_IS_NOT_ACTIVE);\n}\nemr.em.lock(entity, lockMode, properties);\n}\n}\n@Override\npublic void refresh(Object entity) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(TRANSACTION_IS_NOT_ACTIVE);\n}\nemr.em.refresh(entity);\n}\n}\n@Override\npublic void refresh(Object entity, Map properties) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(TRANSACTION_IS_NOT_ACTIVE);\n}\nemr.em.refresh(entity, properties);\n}\n}\n@Override\npublic void refresh(Object entity, LockModeType lockMode) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(TRANSACTION_IS_NOT_ACTIVE);\n}\nemr.em.refresh(entity, lockMode);\n}\n}\n@Override\npublic void refresh(Object entity, LockModeType lockMode, Map properties) {\ntry (EntityManagerResult emr = getEntityManager()) {\nif (!emr.allowModification) {\nthrow new TransactionRequiredException(TRANSACTION_IS_NOT_ACTIVE);\n}\nemr.em.refresh(entity, lockMode, properties);\n}\n}\n@Override\npublic void clear() {\ntry (EntityManagerResult emr = getEntityManager()) {\nemr.em.clear();\n}\n}\n@Override\npublic void detach(Object entity) {\ntry (EntityManagerResult emr = getEntityManager()) {\nemr.em.detach(entity);\n}\n}\n@Override\npublic boolean contains(Object entity) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.contains(entity);\n}\n}\n@Override\npublic LockModeType getLockMode(Object entity) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getLockMode(entity);\n}\n}\n@Override\npublic void setProperty(String propertyName, Object value) {\ntry (EntityManagerResult emr = getEntityManager()) {\nemr.em.setProperty(propertyName, value);\n}\n}\n@Override\npublic Map getProperties() {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getProperties();\n}\n}\n@Override\npublic Query createQuery(String qlString) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createQuery(qlString);\n}\n}\n@Override\npublic TypedQuery createQuery(CriteriaQuery criteriaQuery) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createQuery(criteriaQuery);\n}\n}\n@Override\npublic Query createQuery(CriteriaUpdate updateQuery) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createQuery(updateQuery);\n}\n}\n@Override\npublic Query createQuery(CriteriaDelete deleteQuery) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createQuery(deleteQuery);\n}\n}\n@Override\npublic TypedQuery createQuery(String qlString, Class resultClass) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createQuery(qlString, resultClass);\n}\n}\n@Override\npublic Query createNamedQuery(String name) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createNamedQuery(name);\n}\n}\n@Override\npublic TypedQuery createNamedQuery(String name, Class resultClass) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createNamedQuery(name, resultClass);\n}\n}\n@Override\npublic Query createNativeQuery(String sqlString) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createNativeQuery(sqlString);\n}\n}\n@Override\npublic Query createNativeQuery(String sqlString, Class resultClass) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createNativeQuery(sqlString, resultClass);\n}\n}\n@Override\npublic Query createNativeQuery(String sqlString, String resultSetMapping) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createNativeQuery(sqlString, resultSetMapping);\n}\n}\n@Override\npublic StoredProcedureQuery createNamedStoredProcedureQuery(String name) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createNamedStoredProcedureQuery(name);\n}\n}\n@Override\npublic StoredProcedureQuery createStoredProcedureQuery(String procedureName) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createStoredProcedureQuery(procedureName);\n}\n}\n@Override\npublic StoredProcedureQuery createStoredProcedureQuery(String procedureName, Class... resultClasses) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createStoredProcedureQuery(procedureName, resultClasses);\n}\n}\n@Override\npublic StoredProcedureQuery createStoredProcedureQuery(String procedureName, String... resultSetMappings) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createStoredProcedureQuery(procedureName, resultSetMappings);\n}\n}\n@Override\npublic void joinTransaction() {\ntry (EntityManagerResult emr = getEntityManager()) {\nemr.em.joinTransaction();\n}\n}\n@Override\npublic boolean isJoinedToTransaction() {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.isJoinedToTransaction();\n}\n}\n@Override\npublic T unwrap(Class cls) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.unwrap(cls);\n}\n}\n@Override\npublic Object getDelegate() {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getDelegate();\n}\n}\n@Override\npublic void close() {\nthrow new IllegalStateException(\"Not supported for transaction scoped entity managers\");\n}\n@Override\npublic boolean isOpen() {\nreturn true;\n}\n@Override\npublic EntityTransaction getTransaction() {\nthrow new IllegalStateException(\"Not supported for JTA entity managers\");\n}\n@Override\npublic EntityManagerFactory getEntityManagerFactory() {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getEntityManagerFactory();\n}\n}\n@Override\npublic CriteriaBuilder getCriteriaBuilder() {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getCriteriaBuilder();\n}\n}\n@Override\npublic Metamodel getMetamodel() {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getMetamodel();\n}\n}\n@Override\npublic EntityGraph createEntityGraph(Class rootType) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createEntityGraph(rootType);\n}\n}\n@Override\npublic EntityGraph createEntityGraph(String graphName) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.createEntityGraph(graphName);\n}\n}\n@Override\npublic EntityGraph getEntityGraph(String graphName) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getEntityGraph(graphName);\n}\n}\n@Override\npublic List> getEntityGraphs(Class entityClass) {\ntry (EntityManagerResult emr = getEntityManager()) {\nreturn emr.em.getEntityGraphs(entityClass);\n}\n}\nstatic class EntityManagerResult implements AutoCloseable {\nfinal EntityManager em;\nfinal boolean closeOnEnd;\nfinal boolean allowModification;\nEntityManagerResult(EntityManager em, boolean closeOnEnd, boolean allowModification) {\nthis.em = em;\nthis.closeOnEnd = closeOnEnd;\nthis.allowModification = allowModification;\n}\n@Override\npublic void close() {\nif (closeOnEnd) {\nem.close();\n}\n}\n}\n}" + }, + { + "comment": "Yes you are right if you consider all commits. I made this comment with the state of commit `b8309e1` in mind where it was first touched. So in the end you can ignore it now.", + "method_body": "private Optional getNextBufferOrEvent(boolean blocking) throws IOException, InterruptedException {\nif (hasReceivedAllEndOfPartitionEvents) {\nreturn Optional.empty();\n}\nif (isReleased) {\nthrow new IllegalStateException(\"Released\");\n}\nrequestPartitions();\nInputChannel currentChannel;\nboolean moreAvailable;\nOptional result = Optional.empty();\ndo {\nsynchronized (inputChannelsWithData) {\nwhile (inputChannelsWithData.size() == 0) {\nif (isReleased) {\nthrow new IllegalStateException(\"Released\");\n}\nif (blocking) {\ninputChannelsWithData.wait();\n}\nelse {\nreturn Optional.empty();\n}\n}\ncurrentChannel = inputChannelsWithData.remove();\nresult = currentChannel.getNextBuffer();\nif (result.isPresent() && result.get().moreAvailable()) {\ninputChannelsWithData.add(currentChannel);\n} else {\nenqueuedInputChannelsWithData.clear(currentChannel.getChannelIndex());\n}\nmoreAvailable = !inputChannelsWithData.isEmpty();\n}\n} while (!result.isPresent());\nfinal Buffer buffer = result.get().buffer();\nnumBytesIn.inc(buffer.getSizeUnsafe());\nif (buffer.isBuffer()) {\nreturn Optional.of(new BufferOrEvent(buffer, currentChannel.getChannelIndex(), moreAvailable));\n}\nelse {\nfinal AbstractEvent event = EventSerializer.fromBuffer(buffer, getClass().getClassLoader());\nif (event.getClass() == EndOfPartitionEvent.class) {\nchannelsWithEndOfPartitionEvents.set(currentChannel.getChannelIndex());\nif (channelsWithEndOfPartitionEvents.cardinality() == numberOfInputChannels) {\ncheckState(!moreAvailable || !pollNextBufferOrEvent().isPresent());\nmoreAvailable = false;\nhasReceivedAllEndOfPartitionEvents = true;\n}\ncurrentChannel.notifySubpartitionConsumed();\ncurrentChannel.releaseAllResources();\n}\nreturn Optional.of(new BufferOrEvent(event, currentChannel.getChannelIndex(), moreAvailable));\n}\n}", + "target_code": "", + "method_body_after": "private Optional getNextBufferOrEvent(boolean blocking) throws IOException, InterruptedException {\nif (hasReceivedAllEndOfPartitionEvents) {\nreturn Optional.empty();\n}\nif (isReleased) {\nthrow new IllegalStateException(\"Released\");\n}\nrequestPartitions();\nOptional> next = waitAndGetNextData(blocking);\nif (!next.isPresent()) {\nreturn Optional.empty();\n}\nInputWithData inputWithData = next.get();\nreturn Optional.of(transformToBufferOrEvent(\ninputWithData.data.buffer(),\ninputWithData.moreAvailable,\ninputWithData.input));\n}", + "context_before": "class SingleInputGate implements InputGate {\nprivate static final Logger LOG = LoggerFactory.getLogger(SingleInputGate.class);\n/** Lock object to guard partition requests and runtime channel updates. */\nprivate final Object requestLock = new Object();\n/** The name of the owning task, for logging purposes. */\nprivate final String owningTaskName;\n/** The job ID of the owning task. */\nprivate final JobID jobId;\n/**\n* The ID of the consumed intermediate result. Each input gate consumes partitions of the\n* intermediate result specified by this ID. This ID also identifies the input gate at the\n* consuming task.\n*/\nprivate final IntermediateDataSetID consumedResultId;\n/** The type of the partition the input gate is consuming. */\nprivate final ResultPartitionType consumedPartitionType;\n/**\n* The index of the consumed subpartition of each consumed partition. This index depends on the\n* {@link DistributionPattern} and the subtask indices of the producing and consuming task.\n*/\nprivate final int consumedSubpartitionIndex;\n/** The number of input channels (equivalent to the number of consumed partitions). */\nprivate final int numberOfInputChannels;\n/**\n* Input channels. There is a one input channel for each consumed intermediate result partition.\n* We store this in a map for runtime updates of single channels.\n*/\nprivate final Map inputChannels;\n/** Channels, which notified this input gate about available data. */\nprivate final ArrayDeque inputChannelsWithData = new ArrayDeque<>();\n/**\n* Field guaranteeing uniqueness for inputChannelsWithData queue. Both of those fields should be unified\n* onto one.\n*/\nprivate final BitSet enqueuedInputChannelsWithData;\nprivate final BitSet channelsWithEndOfPartitionEvents;\n/** The partition state listener listening to failed partition requests. */\nprivate final TaskActions taskActions;\n/**\n* Buffer pool for incoming buffers. Incoming data from remote channels is copied to buffers\n* from this pool.\n*/\nprivate BufferPool bufferPool;\n/** Global network buffer pool to request and recycle exclusive buffers (only for credit-based). */\nprivate NetworkBufferPool networkBufferPool;\nprivate final boolean isCreditBased;\nprivate boolean hasReceivedAllEndOfPartitionEvents;\n/** Flag indicating whether partitions have been requested. */\nprivate boolean requestedPartitionsFlag;\n/** Flag indicating whether all resources have been released. */\nprivate volatile boolean isReleased;\n/** Registered listener to forward buffer notifications to. */\nprivate volatile InputGateListener inputGateListener;\nprivate final List pendingEvents = new ArrayList<>();\nprivate int numberOfUninitializedChannels;\n/** Number of network buffers to use for each remote input channel. */\nprivate int networkBuffersPerChannel;\n/** A timer to retrigger local partition requests. Only initialized if actually needed. */\nprivate Timer retriggerLocalRequestTimer;\nprivate final Counter numBytesIn;\npublic SingleInputGate(\nString owningTaskName,\nJobID jobId,\nIntermediateDataSetID consumedResultId,\nfinal ResultPartitionType consumedPartitionType,\nint consumedSubpartitionIndex,\nint numberOfInputChannels,\nTaskActions taskActions,\nCounter numBytesIn,\nboolean isCreditBased) {\nthis.owningTaskName = checkNotNull(owningTaskName);\nthis.jobId = checkNotNull(jobId);\nthis.consumedResultId = checkNotNull(consumedResultId);\nthis.consumedPartitionType = checkNotNull(consumedPartitionType);\ncheckArgument(consumedSubpartitionIndex >= 0);\nthis.consumedSubpartitionIndex = consumedSubpartitionIndex;\ncheckArgument(numberOfInputChannels > 0);\nthis.numberOfInputChannels = numberOfInputChannels;\nthis.inputChannels = new HashMap<>(numberOfInputChannels);\nthis.channelsWithEndOfPartitionEvents = new BitSet(numberOfInputChannels);\nthis.enqueuedInputChannelsWithData = new BitSet(numberOfInputChannels);\nthis.taskActions = checkNotNull(taskActions);\nthis.numBytesIn = checkNotNull(numBytesIn);\nthis.isCreditBased = isCreditBased;\n}\n@Override\npublic int getNumberOfInputChannels() {\nreturn numberOfInputChannels;\n}\npublic IntermediateDataSetID getConsumedResultId() {\nreturn consumedResultId;\n}\n/**\n* Returns the type of this input channel's consumed result partition.\n*\n* @return consumed result partition type\n*/\npublic ResultPartitionType getConsumedPartitionType() {\nreturn consumedPartitionType;\n}\nBufferProvider getBufferProvider() {\nreturn bufferPool;\n}\npublic BufferPool getBufferPool() {\nreturn bufferPool;\n}\n@Override\npublic int getPageSize() {\nif (bufferPool != null) {\nreturn bufferPool.getMemorySegmentSize();\n}\nelse {\nthrow new IllegalStateException(\"Input gate has not been initialized with buffers.\");\n}\n}\npublic int getNumberOfQueuedBuffers() {\nfor (int retry = 0; retry < 3; retry++) {\ntry {\nint totalBuffers = 0;\nfor (InputChannel channel : inputChannels.values()) {\nif (channel instanceof RemoteInputChannel) {\ntotalBuffers += ((RemoteInputChannel) channel).getNumberOfQueuedBuffers();\n}\n}\nreturn totalBuffers;\n}\ncatch (Exception ignored) {}\n}\nreturn 0;\n}\n@Override\npublic String getOwningTaskName() {\nreturn owningTaskName;\n}\npublic void setBufferPool(BufferPool bufferPool) {\ncheckState(this.bufferPool == null, \"Bug in input gate setup logic: buffer pool has\" +\n\"already been set for this input gate.\");\nthis.bufferPool = checkNotNull(bufferPool);\n}\n/**\n* Assign the exclusive buffers to all remote input channels directly for credit-based mode.\n*\n* @param networkBufferPool The global pool to request and recycle exclusive buffers\n* @param networkBuffersPerChannel The number of exclusive buffers for each channel\n*/\npublic void assignExclusiveSegments(NetworkBufferPool networkBufferPool, int networkBuffersPerChannel) throws IOException {\ncheckState(this.isCreditBased, \"Bug in input gate setup logic: exclusive buffers only exist with credit-based flow control.\");\ncheckState(this.networkBufferPool == null, \"Bug in input gate setup logic: global buffer pool has\" +\n\"already been set for this input gate.\");\nthis.networkBufferPool = checkNotNull(networkBufferPool);\nthis.networkBuffersPerChannel = networkBuffersPerChannel;\nsynchronized (requestLock) {\nfor (InputChannel inputChannel : inputChannels.values()) {\nif (inputChannel instanceof RemoteInputChannel) {\n((RemoteInputChannel) inputChannel).assignExclusiveSegments(\nnetworkBufferPool.requestMemorySegments(networkBuffersPerChannel));\n}\n}\n}\n}\n/**\n* The exclusive segments are recycled to network buffer pool directly when input channel is released.\n*\n* @param segments The exclusive segments need to be recycled\n*/\npublic void returnExclusiveSegments(List segments) throws IOException {\nnetworkBufferPool.recycleMemorySegments(segments);\n}\npublic void setInputChannel(IntermediateResultPartitionID partitionId, InputChannel inputChannel) {\nsynchronized (requestLock) {\nif (inputChannels.put(checkNotNull(partitionId), checkNotNull(inputChannel)) == null\n&& inputChannel instanceof UnknownInputChannel) {\nnumberOfUninitializedChannels++;\n}\n}\n}\npublic void updateInputChannel(InputChannelDeploymentDescriptor icdd) throws IOException, InterruptedException {\nsynchronized (requestLock) {\nif (isReleased) {\nreturn;\n}\nfinal IntermediateResultPartitionID partitionId = icdd.getConsumedPartitionId().getPartitionId();\nInputChannel current = inputChannels.get(partitionId);\nif (current instanceof UnknownInputChannel) {\nUnknownInputChannel unknownChannel = (UnknownInputChannel) current;\nInputChannel newChannel;\nResultPartitionLocation partitionLocation = icdd.getConsumedPartitionLocation();\nif (partitionLocation.isLocal()) {\nnewChannel = unknownChannel.toLocalInputChannel();\n}\nelse if (partitionLocation.isRemote()) {\nnewChannel = unknownChannel.toRemoteInputChannel(partitionLocation.getConnectionId());\nif (this.isCreditBased) {\ncheckState(this.networkBufferPool != null, \"Bug in input gate setup logic: \" +\n\"global buffer pool has not been set for this input gate.\");\n((RemoteInputChannel) newChannel).assignExclusiveSegments(\nnetworkBufferPool.requestMemorySegments(networkBuffersPerChannel));\n}\n}\nelse {\nthrow new IllegalStateException(\"Tried to update unknown channel with unknown channel.\");\n}\nLOG.debug(\"{}: Updated unknown input channel to {}.\", owningTaskName, newChannel);\ninputChannels.put(partitionId, newChannel);\nif (requestedPartitionsFlag) {\nnewChannel.requestSubpartition(consumedSubpartitionIndex);\n}\nfor (TaskEvent event : pendingEvents) {\nnewChannel.sendTaskEvent(event);\n}\nif (--numberOfUninitializedChannels == 0) {\npendingEvents.clear();\n}\n}\n}\n}\n/**\n* Retriggers a partition request.\n*/\npublic void retriggerPartitionRequest(IntermediateResultPartitionID partitionId) throws IOException, InterruptedException {\nsynchronized (requestLock) {\nif (!isReleased) {\nfinal InputChannel ch = inputChannels.get(partitionId);\ncheckNotNull(ch, \"Unknown input channel with ID \" + partitionId);\nLOG.debug(\"{}: Retriggering partition request {}:{}.\", owningTaskName, ch.partitionId, consumedSubpartitionIndex);\nif (ch.getClass() == RemoteInputChannel.class) {\nfinal RemoteInputChannel rch = (RemoteInputChannel) ch;\nrch.retriggerSubpartitionRequest(consumedSubpartitionIndex);\n}\nelse if (ch.getClass() == LocalInputChannel.class) {\nfinal LocalInputChannel ich = (LocalInputChannel) ch;\nif (retriggerLocalRequestTimer == null) {\nretriggerLocalRequestTimer = new Timer(true);\n}\nich.retriggerSubpartitionRequest(retriggerLocalRequestTimer, consumedSubpartitionIndex);\n}\nelse {\nthrow new IllegalStateException(\n\"Unexpected type of channel to retrigger partition: \" + ch.getClass());\n}\n}\n}\n}\n@Override\npublic void close() throws IOException {\nboolean released = false;\nsynchronized (requestLock) {\nif (!isReleased) {\ntry {\nLOG.debug(\"{}: Releasing {}.\", owningTaskName, this);\nif (retriggerLocalRequestTimer != null) {\nretriggerLocalRequestTimer.cancel();\n}\nfor (InputChannel inputChannel : inputChannels.values()) {\ntry {\ninputChannel.releaseAllResources();\n}\ncatch (IOException e) {\nLOG.warn(\"{}: Error during release of channel resources: {}.\",\nowningTaskName, e.getMessage(), e);\n}\n}\nif (bufferPool != null) {\nbufferPool.lazyDestroy();\n}\n}\nfinally {\nisReleased = true;\nreleased = true;\n}\n}\n}\nif (released) {\nsynchronized (inputChannelsWithData) {\ninputChannelsWithData.notifyAll();\n}\n}\n}\n@Override\npublic boolean isFinished() {\nsynchronized (requestLock) {\nfor (InputChannel inputChannel : inputChannels.values()) {\nif (!inputChannel.isReleased()) {\nreturn false;\n}\n}\n}\nreturn true;\n}\n@Override\npublic void requestPartitions() throws IOException, InterruptedException {\nsynchronized (requestLock) {\nif (!requestedPartitionsFlag) {\nif (isReleased) {\nthrow new IllegalStateException(\"Already released.\");\n}\nif (numberOfInputChannels != inputChannels.size()) {\nthrow new IllegalStateException(\"Bug in input gate setup logic: mismatch between \" +\n\"number of total input channels and the currently set number of input \" +\n\"channels.\");\n}\nfor (InputChannel inputChannel : inputChannels.values()) {\ninputChannel.requestSubpartition(consumedSubpartitionIndex);\n}\n}\nrequestedPartitionsFlag = true;\n}\n}\n@Override\npublic Optional getNextBufferOrEvent() throws IOException, InterruptedException {\nreturn getNextBufferOrEvent(true);\n}\n@Override\npublic Optional pollNextBufferOrEvent() throws IOException, InterruptedException {\nreturn getNextBufferOrEvent(false);\n}\n@Override\npublic void sendTaskEvent(TaskEvent event) throws IOException {\nsynchronized (requestLock) {\nfor (InputChannel inputChannel : inputChannels.values()) {\ninputChannel.sendTaskEvent(event);\n}\nif (numberOfUninitializedChannels > 0) {\npendingEvents.add(event);\n}\n}\n}\n@Override\npublic void registerListener(InputGateListener inputGateListener) {\nif (this.inputGateListener == null) {\nthis.inputGateListener = inputGateListener;\n} else {\nthrow new IllegalStateException(\"Multiple listeners\");\n}\n}\nvoid notifyChannelNonEmpty(InputChannel channel) {\nqueueChannel(checkNotNull(channel));\n}\nvoid triggerPartitionStateCheck(ResultPartitionID partitionId) {\ntaskActions.triggerPartitionProducerStateCheck(jobId, consumedResultId, partitionId);\n}\nprivate void queueChannel(InputChannel channel) {\nint availableChannels;\nsynchronized (inputChannelsWithData) {\nif (enqueuedInputChannelsWithData.get(channel.getChannelIndex())) {\nreturn;\n}\navailableChannels = inputChannelsWithData.size();\ninputChannelsWithData.add(channel);\nenqueuedInputChannelsWithData.set(channel.getChannelIndex());\nif (availableChannels == 0) {\ninputChannelsWithData.notifyAll();\n}\n}\nif (availableChannels == 0) {\nInputGateListener listener = inputGateListener;\nif (listener != null) {\nlistener.notifyInputGateNonEmpty(this);\n}\n}\n}\nMap getInputChannels() {\nreturn inputChannels;\n}\n/**\n* Creates an input gate and all of its input channels.\n*/\npublic static SingleInputGate create(\nString owningTaskName,\nJobID jobId,\nInputGateDeploymentDescriptor igdd,\nNetworkEnvironment networkEnvironment,\nTaskEventPublisher taskEventPublisher,\nTaskActions taskActions,\nInputChannelMetrics metrics,\nCounter numBytesInCounter) {\nfinal IntermediateDataSetID consumedResultId = checkNotNull(igdd.getConsumedResultId());\nfinal ResultPartitionType consumedPartitionType = checkNotNull(igdd.getConsumedPartitionType());\nfinal int consumedSubpartitionIndex = igdd.getConsumedSubpartitionIndex();\ncheckArgument(consumedSubpartitionIndex >= 0);\nfinal InputChannelDeploymentDescriptor[] icdd = checkNotNull(igdd.getInputChannelDeploymentDescriptors());\nfinal NetworkEnvironmentConfiguration networkConfig = networkEnvironment.getConfiguration();\nfinal SingleInputGate inputGate = new SingleInputGate(\nowningTaskName, jobId, consumedResultId, consumedPartitionType, consumedSubpartitionIndex,\nicdd.length, taskActions, numBytesInCounter, networkConfig.isCreditBased());\nfinal InputChannel[] inputChannels = new InputChannel[icdd.length];\nint numLocalChannels = 0;\nint numRemoteChannels = 0;\nint numUnknownChannels = 0;\nfor (int i = 0; i < inputChannels.length; i++) {\nfinal ResultPartitionID partitionId = icdd[i].getConsumedPartitionId();\nfinal ResultPartitionLocation partitionLocation = icdd[i].getConsumedPartitionLocation();\nif (partitionLocation.isLocal()) {\ninputChannels[i] = new LocalInputChannel(inputGate, i, partitionId,\nnetworkEnvironment.getResultPartitionManager(),\ntaskEventPublisher,\nnetworkConfig.partitionRequestInitialBackoff(),\nnetworkConfig.partitionRequestMaxBackoff(),\nmetrics\n);\nnumLocalChannels++;\n}\nelse if (partitionLocation.isRemote()) {\ninputChannels[i] = new RemoteInputChannel(inputGate, i, partitionId,\npartitionLocation.getConnectionId(),\nnetworkEnvironment.getConnectionManager(),\nnetworkConfig.partitionRequestInitialBackoff(),\nnetworkConfig.partitionRequestMaxBackoff(),\nmetrics\n);\nnumRemoteChannels++;\n}\nelse if (partitionLocation.isUnknown()) {\ninputChannels[i] = new UnknownInputChannel(inputGate, i, partitionId,\nnetworkEnvironment.getResultPartitionManager(),\ntaskEventPublisher,\nnetworkEnvironment.getConnectionManager(),\nnetworkConfig.partitionRequestInitialBackoff(),\nnetworkConfig.partitionRequestMaxBackoff(),\nmetrics\n);\nnumUnknownChannels++;\n}\nelse {\nthrow new IllegalStateException(\"Unexpected partition location.\");\n}\ninputGate.setInputChannel(partitionId.getPartitionId(), inputChannels[i]);\n}\nLOG.debug(\"{}: Created {} input channels (local: {}, remote: {}, unknown: {}).\",\nowningTaskName,\ninputChannels.length,\nnumLocalChannels,\nnumRemoteChannels,\nnumUnknownChannels);\nreturn inputGate;\n}\n}", + "context_after": "class SingleInputGate extends InputGate {\nprivate static final Logger LOG = LoggerFactory.getLogger(SingleInputGate.class);\n/** Lock object to guard partition requests and runtime channel updates. */\nprivate final Object requestLock = new Object();\n/** The name of the owning task, for logging purposes. */\nprivate final String owningTaskName;\n/** The job ID of the owning task. */\nprivate final JobID jobId;\n/**\n* The ID of the consumed intermediate result. Each input gate consumes partitions of the\n* intermediate result specified by this ID. This ID also identifies the input gate at the\n* consuming task.\n*/\nprivate final IntermediateDataSetID consumedResultId;\n/** The type of the partition the input gate is consuming. */\nprivate final ResultPartitionType consumedPartitionType;\n/**\n* The index of the consumed subpartition of each consumed partition. This index depends on the\n* {@link DistributionPattern} and the subtask indices of the producing and consuming task.\n*/\nprivate final int consumedSubpartitionIndex;\n/** The number of input channels (equivalent to the number of consumed partitions). */\nprivate final int numberOfInputChannels;\n/**\n* Input channels. There is a one input channel for each consumed intermediate result partition.\n* We store this in a map for runtime updates of single channels.\n*/\nprivate final Map inputChannels;\n/** Channels, which notified this input gate about available data. */\nprivate final ArrayDeque inputChannelsWithData = new ArrayDeque<>();\n/**\n* Field guaranteeing uniqueness for inputChannelsWithData queue. Both of those fields should be unified\n* onto one.\n*/\nprivate final BitSet enqueuedInputChannelsWithData;\nprivate final BitSet channelsWithEndOfPartitionEvents;\n/** The partition state listener listening to failed partition requests. */\nprivate final TaskActions taskActions;\n/**\n* Buffer pool for incoming buffers. Incoming data from remote channels is copied to buffers\n* from this pool.\n*/\nprivate BufferPool bufferPool;\n/** Global network buffer pool to request and recycle exclusive buffers (only for credit-based). */\nprivate NetworkBufferPool networkBufferPool;\nprivate final boolean isCreditBased;\nprivate boolean hasReceivedAllEndOfPartitionEvents;\n/** Flag indicating whether partitions have been requested. */\nprivate boolean requestedPartitionsFlag;\n/** Flag indicating whether all resources have been released. */\nprivate volatile boolean isReleased;\nprivate final List pendingEvents = new ArrayList<>();\nprivate int numberOfUninitializedChannels;\n/** Number of network buffers to use for each remote input channel. */\nprivate int networkBuffersPerChannel;\n/** A timer to retrigger local partition requests. Only initialized if actually needed. */\nprivate Timer retriggerLocalRequestTimer;\nprivate final Counter numBytesIn;\npublic SingleInputGate(\nString owningTaskName,\nJobID jobId,\nIntermediateDataSetID consumedResultId,\nfinal ResultPartitionType consumedPartitionType,\nint consumedSubpartitionIndex,\nint numberOfInputChannels,\nTaskActions taskActions,\nCounter numBytesIn,\nboolean isCreditBased) {\nthis.owningTaskName = checkNotNull(owningTaskName);\nthis.jobId = checkNotNull(jobId);\nthis.consumedResultId = checkNotNull(consumedResultId);\nthis.consumedPartitionType = checkNotNull(consumedPartitionType);\ncheckArgument(consumedSubpartitionIndex >= 0);\nthis.consumedSubpartitionIndex = consumedSubpartitionIndex;\ncheckArgument(numberOfInputChannels > 0);\nthis.numberOfInputChannels = numberOfInputChannels;\nthis.inputChannels = new HashMap<>(numberOfInputChannels);\nthis.channelsWithEndOfPartitionEvents = new BitSet(numberOfInputChannels);\nthis.enqueuedInputChannelsWithData = new BitSet(numberOfInputChannels);\nthis.taskActions = checkNotNull(taskActions);\nthis.numBytesIn = checkNotNull(numBytesIn);\nthis.isCreditBased = isCreditBased;\n}\n@Override\npublic int getNumberOfInputChannels() {\nreturn numberOfInputChannels;\n}\npublic IntermediateDataSetID getConsumedResultId() {\nreturn consumedResultId;\n}\n/**\n* Returns the type of this input channel's consumed result partition.\n*\n* @return consumed result partition type\n*/\npublic ResultPartitionType getConsumedPartitionType() {\nreturn consumedPartitionType;\n}\nBufferProvider getBufferProvider() {\nreturn bufferPool;\n}\npublic BufferPool getBufferPool() {\nreturn bufferPool;\n}\n@Override\npublic int getPageSize() {\nif (bufferPool != null) {\nreturn bufferPool.getMemorySegmentSize();\n}\nelse {\nthrow new IllegalStateException(\"Input gate has not been initialized with buffers.\");\n}\n}\npublic int getNumberOfQueuedBuffers() {\nfor (int retry = 0; retry < 3; retry++) {\ntry {\nint totalBuffers = 0;\nfor (InputChannel channel : inputChannels.values()) {\nif (channel instanceof RemoteInputChannel) {\ntotalBuffers += ((RemoteInputChannel) channel).getNumberOfQueuedBuffers();\n}\n}\nreturn totalBuffers;\n}\ncatch (Exception ignored) {}\n}\nreturn 0;\n}\n@Override\npublic String getOwningTaskName() {\nreturn owningTaskName;\n}\npublic void setBufferPool(BufferPool bufferPool) {\ncheckState(this.bufferPool == null, \"Bug in input gate setup logic: buffer pool has\" +\n\"already been set for this input gate.\");\nthis.bufferPool = checkNotNull(bufferPool);\n}\n/**\n* Assign the exclusive buffers to all remote input channels directly for credit-based mode.\n*\n* @param networkBufferPool The global pool to request and recycle exclusive buffers\n* @param networkBuffersPerChannel The number of exclusive buffers for each channel\n*/\npublic void assignExclusiveSegments(NetworkBufferPool networkBufferPool, int networkBuffersPerChannel) throws IOException {\ncheckState(this.isCreditBased, \"Bug in input gate setup logic: exclusive buffers only exist with credit-based flow control.\");\ncheckState(this.networkBufferPool == null, \"Bug in input gate setup logic: global buffer pool has\" +\n\"already been set for this input gate.\");\nthis.networkBufferPool = checkNotNull(networkBufferPool);\nthis.networkBuffersPerChannel = networkBuffersPerChannel;\nsynchronized (requestLock) {\nfor (InputChannel inputChannel : inputChannels.values()) {\nif (inputChannel instanceof RemoteInputChannel) {\n((RemoteInputChannel) inputChannel).assignExclusiveSegments(\nnetworkBufferPool.requestMemorySegments(networkBuffersPerChannel));\n}\n}\n}\n}\n/**\n* The exclusive segments are recycled to network buffer pool directly when input channel is released.\n*\n* @param segments The exclusive segments need to be recycled\n*/\npublic void returnExclusiveSegments(List segments) throws IOException {\nnetworkBufferPool.recycleMemorySegments(segments);\n}\npublic void setInputChannel(IntermediateResultPartitionID partitionId, InputChannel inputChannel) {\nsynchronized (requestLock) {\nif (inputChannels.put(checkNotNull(partitionId), checkNotNull(inputChannel)) == null\n&& inputChannel instanceof UnknownInputChannel) {\nnumberOfUninitializedChannels++;\n}\n}\n}\npublic void updateInputChannel(InputChannelDeploymentDescriptor icdd) throws IOException, InterruptedException {\nsynchronized (requestLock) {\nif (isReleased) {\nreturn;\n}\nfinal IntermediateResultPartitionID partitionId = icdd.getConsumedPartitionId().getPartitionId();\nInputChannel current = inputChannels.get(partitionId);\nif (current instanceof UnknownInputChannel) {\nUnknownInputChannel unknownChannel = (UnknownInputChannel) current;\nInputChannel newChannel;\nResultPartitionLocation partitionLocation = icdd.getConsumedPartitionLocation();\nif (partitionLocation.isLocal()) {\nnewChannel = unknownChannel.toLocalInputChannel();\n}\nelse if (partitionLocation.isRemote()) {\nnewChannel = unknownChannel.toRemoteInputChannel(partitionLocation.getConnectionId());\nif (this.isCreditBased) {\ncheckState(this.networkBufferPool != null, \"Bug in input gate setup logic: \" +\n\"global buffer pool has not been set for this input gate.\");\n((RemoteInputChannel) newChannel).assignExclusiveSegments(\nnetworkBufferPool.requestMemorySegments(networkBuffersPerChannel));\n}\n}\nelse {\nthrow new IllegalStateException(\"Tried to update unknown channel with unknown channel.\");\n}\nLOG.debug(\"{}: Updated unknown input channel to {}.\", owningTaskName, newChannel);\ninputChannels.put(partitionId, newChannel);\nif (requestedPartitionsFlag) {\nnewChannel.requestSubpartition(consumedSubpartitionIndex);\n}\nfor (TaskEvent event : pendingEvents) {\nnewChannel.sendTaskEvent(event);\n}\nif (--numberOfUninitializedChannels == 0) {\npendingEvents.clear();\n}\n}\n}\n}\n/**\n* Retriggers a partition request.\n*/\npublic void retriggerPartitionRequest(IntermediateResultPartitionID partitionId) throws IOException, InterruptedException {\nsynchronized (requestLock) {\nif (!isReleased) {\nfinal InputChannel ch = inputChannels.get(partitionId);\ncheckNotNull(ch, \"Unknown input channel with ID \" + partitionId);\nLOG.debug(\"{}: Retriggering partition request {}:{}.\", owningTaskName, ch.partitionId, consumedSubpartitionIndex);\nif (ch.getClass() == RemoteInputChannel.class) {\nfinal RemoteInputChannel rch = (RemoteInputChannel) ch;\nrch.retriggerSubpartitionRequest(consumedSubpartitionIndex);\n}\nelse if (ch.getClass() == LocalInputChannel.class) {\nfinal LocalInputChannel ich = (LocalInputChannel) ch;\nif (retriggerLocalRequestTimer == null) {\nretriggerLocalRequestTimer = new Timer(true);\n}\nich.retriggerSubpartitionRequest(retriggerLocalRequestTimer, consumedSubpartitionIndex);\n}\nelse {\nthrow new IllegalStateException(\n\"Unexpected type of channel to retrigger partition: \" + ch.getClass());\n}\n}\n}\n}\n@Override\npublic void close() throws IOException {\nboolean released = false;\nsynchronized (requestLock) {\nif (!isReleased) {\ntry {\nLOG.debug(\"{}: Releasing {}.\", owningTaskName, this);\nif (retriggerLocalRequestTimer != null) {\nretriggerLocalRequestTimer.cancel();\n}\nfor (InputChannel inputChannel : inputChannels.values()) {\ntry {\ninputChannel.releaseAllResources();\n}\ncatch (IOException e) {\nLOG.warn(\"{}: Error during release of channel resources: {}.\",\nowningTaskName, e.getMessage(), e);\n}\n}\nif (bufferPool != null) {\nbufferPool.lazyDestroy();\n}\n}\nfinally {\nisReleased = true;\nreleased = true;\n}\n}\n}\nif (released) {\nsynchronized (inputChannelsWithData) {\ninputChannelsWithData.notifyAll();\n}\n}\n}\n@Override\npublic boolean isFinished() {\nsynchronized (requestLock) {\nfor (InputChannel inputChannel : inputChannels.values()) {\nif (!inputChannel.isReleased()) {\nreturn false;\n}\n}\n}\nreturn true;\n}\n@Override\npublic void requestPartitions() throws IOException, InterruptedException {\nsynchronized (requestLock) {\nif (!requestedPartitionsFlag) {\nif (isReleased) {\nthrow new IllegalStateException(\"Already released.\");\n}\nif (numberOfInputChannels != inputChannels.size()) {\nthrow new IllegalStateException(\"Bug in input gate setup logic: mismatch between \" +\n\"number of total input channels and the currently set number of input \" +\n\"channels.\");\n}\nfor (InputChannel inputChannel : inputChannels.values()) {\ninputChannel.requestSubpartition(consumedSubpartitionIndex);\n}\n}\nrequestedPartitionsFlag = true;\n}\n}\n@Override\npublic Optional getNextBufferOrEvent() throws IOException, InterruptedException {\nreturn getNextBufferOrEvent(true);\n}\n@Override\npublic Optional pollNextBufferOrEvent() throws IOException, InterruptedException {\nreturn getNextBufferOrEvent(false);\n}\nprivate Optional> waitAndGetNextData(boolean blocking)\nthrows IOException, InterruptedException {\nwhile (true) {\nsynchronized (inputChannelsWithData) {\nwhile (inputChannelsWithData.size() == 0) {\nif (isReleased) {\nthrow new IllegalStateException(\"Released\");\n}\nif (blocking) {\ninputChannelsWithData.wait();\n}\nelse {\nresetIsAvailable();\nreturn Optional.empty();\n}\n}\nInputChannel inputChannel = inputChannelsWithData.remove();\nOptional result = inputChannel.getNextBuffer();\nif (result.isPresent() && result.get().moreAvailable()) {\ninputChannelsWithData.add(inputChannel);\n} else {\nenqueuedInputChannelsWithData.clear(inputChannel.getChannelIndex());\n}\nif (inputChannelsWithData.isEmpty()) {\nresetIsAvailable();\n}\nif (result.isPresent()) {\nreturn Optional.of(new InputWithData<>(\ninputChannel,\nresult.get(),\n!inputChannelsWithData.isEmpty()));\n}\n}\n}\n}\nprivate BufferOrEvent transformToBufferOrEvent(\nBuffer buffer,\nboolean moreAvailable,\nInputChannel currentChannel) throws IOException, InterruptedException {\nnumBytesIn.inc(buffer.getSizeUnsafe());\nif (buffer.isBuffer()) {\nreturn new BufferOrEvent(buffer, currentChannel.getChannelIndex(), moreAvailable);\n}\nelse {\nfinal AbstractEvent event = EventSerializer.fromBuffer(buffer, getClass().getClassLoader());\nif (event.getClass() == EndOfPartitionEvent.class) {\nchannelsWithEndOfPartitionEvents.set(currentChannel.getChannelIndex());\nif (channelsWithEndOfPartitionEvents.cardinality() == numberOfInputChannels) {\ncheckState(!moreAvailable || !pollNextBufferOrEvent().isPresent());\nmoreAvailable = false;\nhasReceivedAllEndOfPartitionEvents = true;\n}\ncurrentChannel.notifySubpartitionConsumed();\ncurrentChannel.releaseAllResources();\n}\nreturn new BufferOrEvent(event, currentChannel.getChannelIndex(), moreAvailable);\n}\n}\n@Override\npublic void sendTaskEvent(TaskEvent event) throws IOException {\nsynchronized (requestLock) {\nfor (InputChannel inputChannel : inputChannels.values()) {\ninputChannel.sendTaskEvent(event);\n}\nif (numberOfUninitializedChannels > 0) {\npendingEvents.add(event);\n}\n}\n}\nvoid notifyChannelNonEmpty(InputChannel channel) {\nqueueChannel(checkNotNull(channel));\n}\nvoid triggerPartitionStateCheck(ResultPartitionID partitionId) {\ntaskActions.triggerPartitionProducerStateCheck(jobId, consumedResultId, partitionId);\n}\nprivate void queueChannel(InputChannel channel) {\nint availableChannels;\nCompletableFuture toNotify = null;\nsynchronized (inputChannelsWithData) {\nif (enqueuedInputChannelsWithData.get(channel.getChannelIndex())) {\nreturn;\n}\navailableChannels = inputChannelsWithData.size();\ninputChannelsWithData.add(channel);\nenqueuedInputChannelsWithData.set(channel.getChannelIndex());\nif (availableChannels == 0) {\ninputChannelsWithData.notifyAll();\ntoNotify = isAvailable;\nisAvailable = AVAILABLE;\n}\n}\nif (toNotify != null) {\ntoNotify.complete(null);\n}\n}\npublic Map getInputChannels() {\nreturn inputChannels;\n}\n/**\n* Creates an input gate and all of its input channels.\n*/\npublic static SingleInputGate create(\nString owningTaskName,\nJobID jobId,\nInputGateDeploymentDescriptor igdd,\nNetworkEnvironment networkEnvironment,\nTaskEventPublisher taskEventPublisher,\nTaskActions taskActions,\nInputChannelMetrics metrics,\nCounter numBytesInCounter) {\nfinal IntermediateDataSetID consumedResultId = checkNotNull(igdd.getConsumedResultId());\nfinal ResultPartitionType consumedPartitionType = checkNotNull(igdd.getConsumedPartitionType());\nfinal int consumedSubpartitionIndex = igdd.getConsumedSubpartitionIndex();\ncheckArgument(consumedSubpartitionIndex >= 0);\nfinal InputChannelDeploymentDescriptor[] icdd = checkNotNull(igdd.getInputChannelDeploymentDescriptors());\nfinal NetworkEnvironmentConfiguration networkConfig = networkEnvironment.getConfiguration();\nfinal SingleInputGate inputGate = new SingleInputGate(\nowningTaskName, jobId, consumedResultId, consumedPartitionType, consumedSubpartitionIndex,\nicdd.length, taskActions, numBytesInCounter, networkConfig.isCreditBased());\nfinal InputChannel[] inputChannels = new InputChannel[icdd.length];\nint numLocalChannels = 0;\nint numRemoteChannels = 0;\nint numUnknownChannels = 0;\nfor (int i = 0; i < inputChannels.length; i++) {\nfinal ResultPartitionID partitionId = icdd[i].getConsumedPartitionId();\nfinal ResultPartitionLocation partitionLocation = icdd[i].getConsumedPartitionLocation();\nif (partitionLocation.isLocal()) {\ninputChannels[i] = new LocalInputChannel(inputGate, i, partitionId,\nnetworkEnvironment.getResultPartitionManager(),\ntaskEventPublisher,\nnetworkConfig.partitionRequestInitialBackoff(),\nnetworkConfig.partitionRequestMaxBackoff(),\nmetrics\n);\nnumLocalChannels++;\n}\nelse if (partitionLocation.isRemote()) {\ninputChannels[i] = new RemoteInputChannel(inputGate, i, partitionId,\npartitionLocation.getConnectionId(),\nnetworkEnvironment.getConnectionManager(),\nnetworkConfig.partitionRequestInitialBackoff(),\nnetworkConfig.partitionRequestMaxBackoff(),\nmetrics\n);\nnumRemoteChannels++;\n}\nelse if (partitionLocation.isUnknown()) {\ninputChannels[i] = new UnknownInputChannel(inputGate, i, partitionId,\nnetworkEnvironment.getResultPartitionManager(),\ntaskEventPublisher,\nnetworkEnvironment.getConnectionManager(),\nnetworkConfig.partitionRequestInitialBackoff(),\nnetworkConfig.partitionRequestMaxBackoff(),\nmetrics\n);\nnumUnknownChannels++;\n}\nelse {\nthrow new IllegalStateException(\"Unexpected partition location.\");\n}\ninputGate.setInputChannel(partitionId.getPartitionId(), inputChannels[i]);\n}\nLOG.debug(\"{}: Created {} input channels (local: {}, remote: {}, unknown: {}).\",\nowningTaskName,\ninputChannels.length,\nnumLocalChannels,\nnumRemoteChannels,\nnumUnknownChannels);\nreturn inputGate;\n}\n}" + }, + { + "comment": "it's not clear to me how this would work here, can you make a (full) github \"suggestion\" so I can understand better?", + "method_body": "private static String getHttpUrlFromServerSpanStableSemconv(Attributes attributes) {\nString scheme = attributes.get(SemanticAttributes.URL_SCHEME);\nif (scheme == null) {\nreturn null;\n}\nString path = attributes.get(SemanticAttributes.URL_PATH);\nif (path == null) {\nreturn null;\n}\nString host = attributes.get(SemanticAttributes.SERVER_ADDRESS);\nif (host == null) {\nreturn null;\n}\nLong port = attributes.get(SemanticAttributes.SERVER_PORT);\nif (port != null && port > 0) {\nreturn scheme + \":\n}\nreturn scheme + \":\n}", + "target_code": "Long port = attributes.get(SemanticAttributes.SERVER_PORT);", + "method_body_after": "private static String getHttpUrlFromServerSpanStableSemconv(Attributes attributes) {\nString scheme = attributes.get(SemanticAttributes.URL_SCHEME);\nif (scheme == null) {\nreturn null;\n}\nString path = attributes.get(SemanticAttributes.URL_PATH);\nif (path == null) {\nreturn null;\n}\nString host = attributes.get(SemanticAttributes.SERVER_ADDRESS);\nif (host == null) {\nreturn null;\n}\nLong port = attributes.get(SemanticAttributes.SERVER_PORT);\nif (port != null && port > 0) {\nreturn scheme + \":\n}\nreturn scheme + \":\n}", + "context_before": "class SpanDataMapper {\npublic static final String MS_PROCESSED_BY_METRIC_EXTRACTORS = \"_MS.ProcessedByMetricExtractors\";\nprivate static final Set SQL_DB_SYSTEMS =\nnew HashSet<>(\nasList(\nSemanticAttributes.DbSystemValues.DB2,\nSemanticAttributes.DbSystemValues.DERBY,\nSemanticAttributes.DbSystemValues.MARIADB,\nSemanticAttributes.DbSystemValues.MSSQL,\nSemanticAttributes.DbSystemValues.MYSQL,\nSemanticAttributes.DbSystemValues.ORACLE,\nSemanticAttributes.DbSystemValues.POSTGRESQL,\nSemanticAttributes.DbSystemValues.SQLITE,\nSemanticAttributes.DbSystemValues.OTHER_SQL,\nSemanticAttributes.DbSystemValues.HSQLDB,\nSemanticAttributes.DbSystemValues.H2));\nprivate static final String COSMOS = \"Cosmos\";\nprivate static final Mappings MAPPINGS;\nprivate static final ContextTagKeys AI_DEVICE_OS = ContextTagKeys.fromString(\"ai.device.os\");\nstatic {\nMappingsBuilder mappingsBuilder =\nnew MappingsBuilder()\n.ignoreExact(AiSemanticAttributes.AZURE_SDK_NAMESPACE.getKey())\n.ignoreExact(AiSemanticAttributes.AZURE_SDK_MESSAGE_BUS_DESTINATION.getKey())\n.ignoreExact(AiSemanticAttributes.AZURE_SDK_ENQUEUED_TIME.getKey())\n.ignoreExact(AiSemanticAttributes.KAFKA_RECORD_QUEUE_TIME_MS.getKey())\n.ignoreExact(AiSemanticAttributes.KAFKA_OFFSET.getKey())\n.exact(\nSemanticAttributes.USER_AGENT_ORIGINAL.getKey(),\n(builder, value) -> {\nif (value instanceof String) {\nbuilder.addTag(\"ai.user.userAgent\", (String) value);\n}\n})\n.ignorePrefix(\"applicationinsights.internal.\")\n.prefix(\n\"http.request.header.\",\n(telemetryBuilder, key, value) -> {\nif (value instanceof List) {\ntelemetryBuilder.addProperty(key, Mappings.join((List) value));\n}\n})\n.prefix(\n\"http.response.header.\",\n(telemetryBuilder, key, value) -> {\nif (value instanceof List) {\ntelemetryBuilder.addProperty(key, Mappings.join((List) value));\n}\n});\napplyCommonTags(mappingsBuilder);\nMAPPINGS = mappingsBuilder.build();\n}\nprivate final boolean captureHttpServer4xxAsError;\nprivate final BiConsumer telemetryInitializer;\nprivate final BiPredicate eventSuppressor;\nprivate final BiPredicate shouldSuppress;\npublic SpanDataMapper(\nboolean captureHttpServer4xxAsError,\nBiConsumer telemetryInitializer,\nBiPredicate eventSuppressor,\nBiPredicate shouldSuppress) {\nthis.captureHttpServer4xxAsError = captureHttpServer4xxAsError;\nthis.telemetryInitializer = telemetryInitializer;\nthis.eventSuppressor = eventSuppressor;\nthis.shouldSuppress = shouldSuppress;\n}\npublic TelemetryItem map(SpanData span) {\nlong itemCount = getItemCount(span);\nreturn map(span, itemCount);\n}\npublic void map(SpanData span, Consumer consumer) {\nlong itemCount = getItemCount(span);\nTelemetryItem telemetryItem = map(span, itemCount);\nconsumer.accept(telemetryItem);\nexportEvents(\nspan,\ntelemetryItem.getTags().get(ContextTagKeys.AI_OPERATION_NAME.toString()),\nitemCount,\nconsumer);\n}\npublic TelemetryItem map(SpanData span, long itemCount) {\nif (RequestChecker.isRequest(span)) {\nreturn exportRequest(span, itemCount);\n} else {\nreturn exportRemoteDependency(span, span.getKind() == SpanKind.INTERNAL, itemCount);\n}\n}\nprivate static boolean checkIsPreAggregatedStandardMetric(SpanData span) {\nBoolean isPreAggregatedStandardMetric =\nspan.getAttributes().get(AiSemanticAttributes.IS_PRE_AGGREGATED);\nreturn isPreAggregatedStandardMetric != null && isPreAggregatedStandardMetric;\n}\nprivate TelemetryItem exportRemoteDependency(SpanData span, boolean inProc, long itemCount) {\nRemoteDependencyTelemetryBuilder telemetryBuilder = RemoteDependencyTelemetryBuilder.create();\ntelemetryInitializer.accept(telemetryBuilder, span.getResource());\nsetOperationTags(telemetryBuilder, span);\nsetTime(telemetryBuilder, span.getStartEpochNanos());\nsetItemCount(telemetryBuilder, itemCount);\nMAPPINGS.map(span.getAttributes(), telemetryBuilder);\naddLinks(telemetryBuilder, span.getLinks());\ntelemetryBuilder.setId(span.getSpanId());\ntelemetryBuilder.setName(getDependencyName(span));\ntelemetryBuilder.setDuration(\nFormattedDuration.fromNanos(span.getEndEpochNanos() - span.getStartEpochNanos()));\ntelemetryBuilder.setSuccess(getSuccess(span));\nif (inProc) {\ntelemetryBuilder.setType(\"InProc\");\n} else {\napplySemanticConventions(telemetryBuilder, span);\n}\nif (checkIsPreAggregatedStandardMetric(span)) {\ntelemetryBuilder.addProperty(MS_PROCESSED_BY_METRIC_EXTRACTORS, \"True\");\n}\nreturn telemetryBuilder.build();\n}\nprivate static final Set DEFAULT_HTTP_SPAN_NAMES =\nnew HashSet<>(\nasList(\"OPTIONS\", \"GET\", \"HEAD\", \"POST\", \"PUT\", \"DELETE\", \"TRACE\", \"CONNECT\", \"PATCH\"));\nprivate static String getDependencyName(SpanData span) {\nString name = span.getName();\nString method = getStableAttribute(span.getAttributes(), SemanticAttributes.HTTP_REQUEST_METHOD, SemanticAttributes.HTTP_METHOD);\nif (method == null) {\nreturn name;\n}\nif (!DEFAULT_HTTP_SPAN_NAMES.contains(name)) {\nreturn name;\n}\nString url = getStableAttribute(span.getAttributes(), SemanticAttributes.URL_FULL, SemanticAttributes.HTTP_URL);\nif (url == null) {\nreturn name;\n}\nString path = UrlParser.getPath(url);\nif (path == null) {\nreturn name;\n}\nreturn path.isEmpty() ? method + \" /\" : method + \" \" + path;\n}\nprivate static void applySemanticConventions(\nRemoteDependencyTelemetryBuilder telemetryBuilder, SpanData span) {\nAttributes attributes = span.getAttributes();\nString httpMethod = getStableAttribute(attributes, SemanticAttributes.HTTP_REQUEST_METHOD, SemanticAttributes.HTTP_METHOD);\nif (httpMethod != null) {\napplyHttpClientSpan(telemetryBuilder, attributes);\nreturn;\n}\nString rpcSystem = attributes.get(SemanticAttributes.RPC_SYSTEM);\nif (rpcSystem != null) {\napplyRpcClientSpan(telemetryBuilder, rpcSystem, attributes);\nreturn;\n}\nString dbSystem = attributes.get(SemanticAttributes.DB_SYSTEM);\nif (dbSystem == null) {\ndbSystem = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_TYPE);\n}\nif (dbSystem != null) {\napplyDatabaseClientSpan(telemetryBuilder, dbSystem, attributes);\nreturn;\n}\nString messagingSystem = getMessagingSystem(attributes);\nif (messagingSystem != null) {\napplyMessagingClientSpan(telemetryBuilder, span.getKind(), messagingSystem, attributes);\nreturn;\n}\nString target = getTargetOrDefault(attributes, Integer.MAX_VALUE, null);\nif (target != null) {\ntelemetryBuilder.setTarget(target);\nreturn;\n}\ntelemetryBuilder.setType(\"InProc\");\n}\n@Nullable\nprivate static String getMessagingSystem(Attributes attributes) {\nString azureNamespace = attributes.get(AiSemanticAttributes.AZURE_SDK_NAMESPACE);\nif (isAzureSdkMessaging(azureNamespace)) {\nreturn azureNamespace;\n}\nreturn attributes.get(SemanticAttributes.MESSAGING_SYSTEM);\n}\nprivate static void setOperationTags(AbstractTelemetryBuilder telemetryBuilder, SpanData span) {\nsetOperationId(telemetryBuilder, span.getTraceId());\nsetOperationParentId(telemetryBuilder, span.getParentSpanContext().getSpanId());\nsetOperationName(telemetryBuilder, span.getAttributes());\n}\nprivate static void setOperationId(AbstractTelemetryBuilder telemetryBuilder, String traceId) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_ID.toString(), traceId);\n}\nprivate static void setOperationParentId(\nAbstractTelemetryBuilder telemetryBuilder, String parentSpanId) {\nif (SpanId.isValid(parentSpanId)) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);\n}\n}\nprivate static void setOperationName(\nAbstractTelemetryBuilder telemetryBuilder, Attributes attributes) {\nString operationName = attributes.get(AiSemanticAttributes.OPERATION_NAME);\nif (operationName != null) {\nsetOperationName(telemetryBuilder, operationName);\n}\n}\nprivate static void setOperationName(\nAbstractTelemetryBuilder telemetryBuilder, String operationName) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_NAME.toString(), operationName);\n}\nprivate static void applyHttpClientSpan(\nRemoteDependencyTelemetryBuilder telemetryBuilder, Attributes attributes) {\nString httpUrl = getStableAttribute(attributes, SemanticAttributes.URL_FULL, SemanticAttributes.HTTP_URL);\nint defaultPort = getDefaultPortForHttpUrl(httpUrl);\nString target = getTargetOrDefault(attributes, defaultPort, \"Http\");\ntelemetryBuilder.setType(\"Http\");\ntelemetryBuilder.setTarget(target);\nLong httpStatusCode = getStableAttribute(attributes, SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);\nif (httpStatusCode != null) {\ntelemetryBuilder.setResultCode(Long.toString(httpStatusCode));\n} else {\ntelemetryBuilder.setResultCode(\"0\");\n}\ntelemetryBuilder.setData(httpUrl);\n}\nprivate static void applyRpcClientSpan(\nRemoteDependencyTelemetryBuilder telemetryBuilder, String rpcSystem, Attributes attributes) {\ntelemetryBuilder.setType(rpcSystem);\nString target = getTargetOrDefault(attributes, Integer.MAX_VALUE, rpcSystem);\ntelemetryBuilder.setTarget(target);\n}\nprivate static int getDefaultPortForHttpUrl(@Nullable String httpUrl) {\nif (httpUrl == null) {\nreturn Integer.MAX_VALUE;\n}\nif (httpUrl.startsWith(\"https:\nreturn 443;\n}\nif (httpUrl.startsWith(\"http:\nreturn 80;\n}\nreturn Integer.MAX_VALUE;\n}\npublic static String getTargetOrDefault(\nAttributes attributes, int defaultPort, String defaultTarget) {\nString target = getTargetOrNullStableSemconv(attributes, defaultPort);\nif (target != null) {\nreturn target;\n}\ntarget = getTargetOrNullOldSemconv(attributes, defaultPort);\nif (target != null) {\nreturn target;\n}\nreturn defaultTarget;\n}\n@Nullable\nprivate static String getTargetOrNullStableSemconv(Attributes attributes, int defaultPort) {\nString peerService = attributes.get(SemanticAttributes.PEER_SERVICE);\nif (peerService != null) {\nreturn peerService;\n}\nString host = attributes.get(SemanticAttributes.SERVER_ADDRESS);\nif (host != null) {\nLong port = attributes.get(SemanticAttributes.SERVER_PORT);\nreturn getTarget(host, port, defaultPort);\n}\nreturn null;\n}\n@Nullable\nprivate static String getTargetOrNullOldSemconv(Attributes attributes, int defaultPort) {\nString peerService = attributes.get(SemanticAttributes.PEER_SERVICE);\nif (peerService != null) {\nreturn peerService;\n}\nString host = attributes.get(SemanticAttributes.NET_PEER_NAME);\nif (host != null) {\nLong port = attributes.get(SemanticAttributes.NET_PEER_PORT);\nreturn getTarget(host, port, defaultPort);\n}\nhost = attributes.get(SemanticAttributes.NET_SOCK_PEER_NAME);\nif (host == null) {\nhost = attributes.get(SemanticAttributes.NET_SOCK_PEER_ADDR);\n}\nif (host != null) {\nLong port = attributes.get(SemanticAttributes.NET_SOCK_PEER_PORT);\nreturn getTarget(host, port, defaultPort);\n}\nString httpUrl = attributes.get(SemanticAttributes.HTTP_URL);\nif (httpUrl != null) {\nreturn UrlParser.getTarget(httpUrl);\n}\nreturn null;\n}\nprivate static String getTarget(String host, @Nullable Long port, int defaultPort) {\nif (port != null && port != defaultPort) {\nreturn host + \":\" + port;\n} else {\nreturn host;\n}\n}\nprivate static void applyDatabaseClientSpan(\nRemoteDependencyTelemetryBuilder telemetryBuilder, String dbSystem, Attributes attributes) {\nString dbStatement = attributes.get(SemanticAttributes.DB_STATEMENT);\nif (dbStatement == null) {\ndbStatement = attributes.get(SemanticAttributes.DB_OPERATION);\n}\nString type;\nif (SQL_DB_SYSTEMS.contains(dbSystem)) {\nif (dbSystem.equals(SemanticAttributes.DbSystemValues.MYSQL)) {\ntype = \"mysql\";\n} else if (dbSystem.equals(SemanticAttributes.DbSystemValues.POSTGRESQL)) {\ntype = \"postgresql\";\n} else {\ntype = \"SQL\";\n}\n} else if (dbSystem.equals(COSMOS)) {\ntype = \"Microsoft.DocumentDb\";\n} else {\ntype = dbSystem;\n}\ntelemetryBuilder.setType(type);\ntelemetryBuilder.setData(dbStatement);\nString target;\nString dbName;\nif (dbSystem.equals(COSMOS)) {\nString dbUrl = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_URL);\nif (dbUrl != null) {\ntarget = UrlParser.getTarget(dbUrl);\n} else {\ntarget = null;\n}\ndbName = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_INSTANCE);\n} else {\ntarget = getTargetOrDefault(attributes, getDefaultPortForDbSystem(dbSystem), dbSystem);\ndbName = attributes.get(SemanticAttributes.DB_NAME);\n}\ntarget = nullAwareConcat(target, dbName, \" | \");\nif (target == null) {\ntarget = dbSystem;\n}\ntelemetryBuilder.setTarget(target);\n}\nprivate static void applyMessagingClientSpan(\nRemoteDependencyTelemetryBuilder telemetryBuilder,\nSpanKind spanKind,\nString messagingSystem,\nAttributes attributes) {\nif (spanKind == SpanKind.PRODUCER) {\ntelemetryBuilder.setType(\"Queue Message | \" + messagingSystem);\n} else {\ntelemetryBuilder.setType(messagingSystem);\n}\ntelemetryBuilder.setTarget(getMessagingTargetSource(attributes));\n}\nprivate static int getDefaultPortForDbSystem(String dbSystem) {\nswitch (dbSystem) {\ncase SemanticAttributes.DbSystemValues.MONGODB:\nreturn 27017;\ncase SemanticAttributes.DbSystemValues.CASSANDRA:\nreturn 9042;\ncase SemanticAttributes.DbSystemValues.REDIS:\nreturn 6379;\ncase SemanticAttributes.DbSystemValues.MARIADB:\ncase SemanticAttributes.DbSystemValues.MYSQL:\nreturn 3306;\ncase SemanticAttributes.DbSystemValues.MSSQL:\nreturn 1433;\ncase SemanticAttributes.DbSystemValues.DB2:\nreturn 50000;\ncase SemanticAttributes.DbSystemValues.ORACLE:\nreturn 1521;\ncase SemanticAttributes.DbSystemValues.H2:\nreturn 8082;\ncase SemanticAttributes.DbSystemValues.DERBY:\nreturn 1527;\ncase SemanticAttributes.DbSystemValues.POSTGRESQL:\nreturn 5432;\ndefault:\nreturn Integer.MAX_VALUE;\n}\n}\nprivate TelemetryItem exportRequest(SpanData span, long itemCount) {\nRequestTelemetryBuilder telemetryBuilder = RequestTelemetryBuilder.create();\ntelemetryInitializer.accept(telemetryBuilder, span.getResource());\nAttributes attributes = span.getAttributes();\nlong startEpochNanos = span.getStartEpochNanos();\ntelemetryBuilder.setId(span.getSpanId());\nsetTime(telemetryBuilder, startEpochNanos);\nsetItemCount(telemetryBuilder, itemCount);\nMAPPINGS.map(attributes, telemetryBuilder);\naddLinks(telemetryBuilder, span.getLinks());\nString operationName = getOperationName(span);\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_NAME.toString(), operationName);\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());\nString aiLegacyParentId = span.getAttributes().get(AiSemanticAttributes.LEGACY_PARENT_ID);\nif (aiLegacyParentId != null) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);\n} else if (span.getParentSpanContext().isValid()) {\ntelemetryBuilder.addTag(\nContextTagKeys.AI_OPERATION_PARENT_ID.toString(),\nspan.getParentSpanContext().getSpanId());\n}\nString aiLegacyRootId = span.getAttributes().get(AiSemanticAttributes.LEGACY_ROOT_ID);\nif (aiLegacyRootId != null) {\ntelemetryBuilder.addTag(\"ai_legacyRootID\", aiLegacyRootId);\n}\ntelemetryBuilder.setName(operationName);\ntelemetryBuilder.setDuration(\nFormattedDuration.fromNanos(span.getEndEpochNanos() - startEpochNanos));\ntelemetryBuilder.setSuccess(getSuccess(span));\nString httpUrl = getHttpUrlFromServerSpan(attributes);\nif (httpUrl != null) {\ntelemetryBuilder.setUrl(httpUrl);\n}\nLong httpStatusCode = getStableAttribute(attributes, SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);\nif (httpStatusCode == null) {\nhttpStatusCode = attributes.get(SemanticAttributes.RPC_GRPC_STATUS_CODE);\n}\nif (httpStatusCode != null) {\ntelemetryBuilder.setResponseCode(Long.toString(httpStatusCode));\n} else {\ntelemetryBuilder.setResponseCode(\"0\");\n}\nString locationIp = getStableAttribute(attributes, SemanticAttributes.CLIENT_ADDRESS, SemanticAttributes.HTTP_CLIENT_IP);\nif (locationIp == null) {\nlocationIp = attributes.get(SemanticAttributes.NET_SOCK_PEER_ADDR);\n}\nif (locationIp != null) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_LOCATION_IP.toString(), locationIp);\n}\ntelemetryBuilder.setSource(getSource(attributes));\nString sessionId = attributes.get(AiSemanticAttributes.SESSION_ID);\nif (sessionId != null) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_SESSION_ID.toString(), sessionId);\n}\nString deviceOs = attributes.get(AiSemanticAttributes.DEVICE_OS);\nif (deviceOs != null) {\ntelemetryBuilder.addTag(AI_DEVICE_OS.toString(), deviceOs);\n}\nString deviceOsVersion = attributes.get(AiSemanticAttributes.DEVICE_OS_VERSION);\nif (deviceOsVersion != null) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_DEVICE_OS_VERSION.toString(), deviceOsVersion);\n}\nif (checkIsPreAggregatedStandardMetric(span)) {\ntelemetryBuilder.addProperty(MS_PROCESSED_BY_METRIC_EXTRACTORS, \"True\");\n}\nLong enqueuedTime = attributes.get(AiSemanticAttributes.AZURE_SDK_ENQUEUED_TIME);\nif (enqueuedTime != null) {\nlong timeSinceEnqueuedMillis =\nMath.max(\n0L, NANOSECONDS.toMillis(span.getStartEpochNanos()) - SECONDS.toMillis(enqueuedTime));\ntelemetryBuilder.addMeasurement(\"timeSinceEnqueued\", (double) timeSinceEnqueuedMillis);\n}\nLong timeSinceEnqueuedMillis = attributes.get(AiSemanticAttributes.KAFKA_RECORD_QUEUE_TIME_MS);\nif (timeSinceEnqueuedMillis != null) {\ntelemetryBuilder.addMeasurement(\"timeSinceEnqueued\", (double) timeSinceEnqueuedMillis);\n}\nreturn telemetryBuilder.build();\n}\nprivate boolean getSuccess(SpanData span) {\nswitch (span.getStatus().getStatusCode()) {\ncase ERROR:\nreturn false;\ncase OK:\nreturn true;\ncase UNSET:\nif (captureHttpServer4xxAsError) {\nLong statusCode = getStableAttribute(span.getAttributes(), SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);\nreturn statusCode == null || statusCode < 400;\n}\nreturn true;\n}\nreturn true;\n}\n@Nullable\npublic static String getHttpUrlFromServerSpan(Attributes attributes) {\nString httpUrl = getHttpUrlFromServerSpanStableSemconv(attributes);\nif (httpUrl != null) {\nreturn httpUrl;\n}\nreturn getHttpUrlFromServerSpanOldSemconv(attributes);\n}\n@Nullable\n@Nullable\nprivate static String getHttpUrlFromServerSpanOldSemconv(Attributes attributes) {\nString httpUrl = attributes.get(SemanticAttributes.HTTP_URL);\nif (httpUrl != null) {\nreturn httpUrl;\n}\nString scheme = attributes.get(SemanticAttributes.HTTP_SCHEME);\nif (scheme == null) {\nreturn null;\n}\nString target = attributes.get(SemanticAttributes.HTTP_TARGET);\nif (target == null) {\nreturn null;\n}\nString host = attributes.get(SemanticAttributes.NET_HOST_NAME);\nLong port = attributes.get(SemanticAttributes.NET_HOST_PORT);\nif (port != null && port > 0) {\nreturn scheme + \":\n}\nreturn scheme + \":\n}\n@Nullable\nprivate static String getSource(Attributes attributes) {\nString source = attributes.get(AiSemanticAttributes.SPAN_SOURCE);\nif (source != null) {\nreturn source;\n}\nreturn getMessagingTargetSource(attributes);\n}\n@Nullable\nprivate static String getMessagingTargetSource(Attributes attributes) {\nif (isAzureSdkMessaging(attributes.get(AiSemanticAttributes.AZURE_SDK_NAMESPACE))) {\nString peerAddress = attributes.get(AiSemanticAttributes.AZURE_SDK_PEER_ADDRESS);\nif (peerAddress != null) {\nString destination = attributes.get(AiSemanticAttributes.AZURE_SDK_MESSAGE_BUS_DESTINATION);\nreturn peerAddress + \"/\" + destination;\n}\n}\nString messagingSystem = getMessagingSystem(attributes);\nif (messagingSystem == null) {\nreturn null;\n}\nString source =\nnullAwareConcat(\ngetTargetOrNullOldSemconv(attributes, Integer.MAX_VALUE),\nattributes.get(SemanticAttributes.MESSAGING_DESTINATION_NAME),\n\"/\");\nif (source != null) {\nreturn source;\n}\nreturn messagingSystem;\n}\nprivate static boolean isAzureSdkMessaging(String messagingSystem) {\nreturn \"Microsoft.EventHub\".equals(messagingSystem)\n|| \"Microsoft.ServiceBus\".equals(messagingSystem);\n}\nprivate static String getOperationName(SpanData span) {\nString operationName = span.getAttributes().get(AiSemanticAttributes.OPERATION_NAME);\nif (operationName != null) {\nreturn operationName;\n}\nreturn span.getName();\n}\nprivate static String nullAwareConcat(\n@Nullable String str1, @Nullable String str2, String separator) {\nif (str1 == null) {\nreturn str2;\n}\nif (str2 == null) {\nreturn str1;\n}\nreturn str1 + separator + str2;\n}\nprivate void exportEvents(\nSpanData span,\n@Nullable String operationName,\nlong itemCount,\nConsumer consumer) {\nfor (EventData event : span.getEvents()) {\nString instrumentationScopeName = span.getInstrumentationScopeInfo().getName();\nif (eventSuppressor.test(event, instrumentationScopeName)) {\ncontinue;\n}\nif (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null\n|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {\nSpanContext parentSpanContext = span.getParentSpanContext();\nif (!parentSpanContext.isValid() || parentSpanContext.isRemote()) {\nString stacktrace = event.getAttributes().get(SemanticAttributes.EXCEPTION_STACKTRACE);\nif (stacktrace != null && !shouldSuppress.test(span, event)) {\nconsumer.accept(\ncreateExceptionTelemetryItem(stacktrace, span, operationName, itemCount));\n}\n}\nreturn;\n}\nMessageTelemetryBuilder telemetryBuilder = MessageTelemetryBuilder.create();\ntelemetryInitializer.accept(telemetryBuilder, span.getResource());\nsetOperationId(telemetryBuilder, span.getTraceId());\nsetOperationParentId(telemetryBuilder, span.getSpanId());\nif (operationName != null) {\nsetOperationName(telemetryBuilder, operationName);\n} else {\nsetOperationName(telemetryBuilder, span.getAttributes());\n}\nsetTime(telemetryBuilder, event.getEpochNanos());\nsetItemCount(telemetryBuilder, itemCount);\nMAPPINGS.map(event.getAttributes(), telemetryBuilder);\ntelemetryBuilder.setMessage(event.getName());\nconsumer.accept(telemetryBuilder.build());\n}\n}\nprivate TelemetryItem createExceptionTelemetryItem(\nString errorStack, SpanData span, @Nullable String operationName, long itemCount) {\nExceptionTelemetryBuilder telemetryBuilder = ExceptionTelemetryBuilder.create();\ntelemetryInitializer.accept(telemetryBuilder, span.getResource());\nsetOperationId(telemetryBuilder, span.getTraceId());\nsetOperationParentId(telemetryBuilder, span.getSpanId());\nif (operationName != null) {\nsetOperationName(telemetryBuilder, operationName);\n} else {\nsetOperationName(telemetryBuilder, span.getAttributes());\n}\nsetTime(telemetryBuilder, span.getEndEpochNanos());\nsetItemCount(telemetryBuilder, itemCount);\nMAPPINGS.map(span.getAttributes(), telemetryBuilder);\ntelemetryBuilder.setExceptions(Exceptions.minimalParse(errorStack));\nreturn telemetryBuilder.build();\n}\npublic static T getStableAttribute(Attributes attributes, AttributeKey stable, AttributeKey old) {\nT value = attributes.get(stable);\nif (value != null) {\nreturn value;\n}\nreturn attributes.get(old);\n}\nprivate static void setTime(AbstractTelemetryBuilder telemetryBuilder, long epochNanos) {\ntelemetryBuilder.setTime(FormattedTime.offSetDateTimeFromEpochNanos(epochNanos));\n}\nprivate static void setItemCount(AbstractTelemetryBuilder telemetryBuilder, long itemCount) {\nif (itemCount != 1) {\ntelemetryBuilder.setSampleRate(100.0f / itemCount);\n}\n}\nprivate static long getItemCount(SpanData span) {\nLong itemCount = span.getAttributes().get(AiSemanticAttributes.ITEM_COUNT);\nreturn itemCount == null ? 1 : itemCount;\n}\nprivate static void addLinks(AbstractTelemetryBuilder telemetryBuilder, List links) {\nif (links.isEmpty()) {\nreturn;\n}\nStringBuilder sb = new StringBuilder();\nsb.append(\"[\");\nboolean first = true;\nfor (LinkData link : links) {\nif (!first) {\nsb.append(\",\");\n}\nsb.append(\"{\\\"operation_Id\\\":\\\"\");\nsb.append(link.getSpanContext().getTraceId());\nsb.append(\"\\\",\\\"id\\\":\\\"\");\nsb.append(link.getSpanContext().getSpanId());\nsb.append(\"\\\"}\");\nfirst = false;\n}\nsb.append(\"]\");\ntelemetryBuilder.addProperty(\"_MS.links\", sb.toString());\n}\nstatic void applyCommonTags(MappingsBuilder mappingsBuilder) {\nmappingsBuilder\n.exact(\nSemanticAttributes.ENDUSER_ID.getKey(),\n(telemetryBuilder, value) -> {\nif (value instanceof String) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_USER_ID.toString(), (String) value);\n}\n})\n.exact(\nAiSemanticAttributes.PREVIEW_APPLICATION_VERSION.getKey(),\n(telemetryBuilder, value) -> {\nif (value instanceof String) {\ntelemetryBuilder.addTag(\nContextTagKeys.AI_APPLICATION_VER.toString(), (String) value);\n}\n});\napplyConnectionStringAndRoleNameOverrides(mappingsBuilder);\n}\n@SuppressWarnings(\"deprecation\")\nprivate static final WarningLogger connectionStringAttributeNoLongerSupported =\nnew WarningLogger(\nSpanDataMapper.class,\nAiSemanticAttributes.DEPRECATED_CONNECTION_STRING.getKey()\n+ \" is no longer supported because it\"\n+ \" is incompatible with pre-aggregated standard metrics. Please use\"\n+ \" \\\"connectionStringOverrides\\\" configuration, or reach out to\"\n+ \" https:\n+ \" different use case.\");\n@SuppressWarnings(\"deprecation\")\nprivate static final WarningLogger roleNameAttributeNoLongerSupported =\nnew WarningLogger(\nSpanDataMapper.class,\nAiSemanticAttributes.DEPRECATED_ROLE_NAME.getKey()\n+ \" is no longer supported because it\"\n+ \" is incompatible with pre-aggregated standard metrics. Please use\"\n+ \" \\\"roleNameOverrides\\\" configuration, or reach out to\"\n+ \" https:\n+ \" different use case.\");\n@SuppressWarnings(\"deprecation\")\nprivate static final WarningLogger roleInstanceAttributeNoLongerSupported =\nnew WarningLogger(\nSpanDataMapper.class,\nAiSemanticAttributes.DEPRECATED_ROLE_INSTANCE.getKey()\n+ \" is no longer supported because it\"\n+ \" is incompatible with pre-aggregated standard metrics. Please reach out to\"\n+ \" https:\n+ \" case for this.\");\n@SuppressWarnings(\"deprecation\")\nprivate static final WarningLogger instrumentationKeyAttributeNoLongerSupported =\nnew WarningLogger(\nSpanDataMapper.class,\nAiSemanticAttributes.DEPRECATED_INSTRUMENTATION_KEY.getKey()\n+ \" is no longer supported because it\"\n+ \" is incompatible with pre-aggregated standard metrics. Please use\"\n+ \" \\\"connectionStringOverrides\\\" configuration, or reach out to\"\n+ \" https:\n+ \" different use case.\");\n@SuppressWarnings(\"deprecation\")\nstatic void applyConnectionStringAndRoleNameOverrides(MappingsBuilder mappingsBuilder) {\nmappingsBuilder\n.exact(\nAiSemanticAttributes.INTERNAL_CONNECTION_STRING.getKey(),\n(telemetryBuilder, value) -> {\ntelemetryBuilder.setConnectionString(ConnectionString.parse((String) value));\n})\n.exact(\nAiSemanticAttributes.INTERNAL_ROLE_NAME.getKey(),\n(telemetryBuilder, value) -> {\nif (value instanceof String) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_CLOUD_ROLE.toString(), (String) value);\n}\n})\n.exact(\nAiSemanticAttributes.DEPRECATED_CONNECTION_STRING.getKey(),\n(telemetryBuilder, value) -> {\nconnectionStringAttributeNoLongerSupported.recordWarning();\n})\n.exact(\nAiSemanticAttributes.DEPRECATED_ROLE_NAME.getKey(),\n(telemetryBuilder, value) -> {\nroleNameAttributeNoLongerSupported.recordWarning();\n})\n.exact(\nAiSemanticAttributes.DEPRECATED_ROLE_INSTANCE.getKey(),\n(telemetryBuilder, value) -> {\nroleInstanceAttributeNoLongerSupported.recordWarning();\n})\n.exact(\nAiSemanticAttributes.DEPRECATED_INSTRUMENTATION_KEY.getKey(),\n(telemetryBuilder, value) -> {\ninstrumentationKeyAttributeNoLongerSupported.recordWarning();\n});\n}\n}", + "context_after": "class SpanDataMapper {\npublic static final String MS_PROCESSED_BY_METRIC_EXTRACTORS = \"_MS.ProcessedByMetricExtractors\";\nprivate static final Set SQL_DB_SYSTEMS =\nnew HashSet<>(\nasList(\nSemanticAttributes.DbSystemValues.DB2,\nSemanticAttributes.DbSystemValues.DERBY,\nSemanticAttributes.DbSystemValues.MARIADB,\nSemanticAttributes.DbSystemValues.MSSQL,\nSemanticAttributes.DbSystemValues.MYSQL,\nSemanticAttributes.DbSystemValues.ORACLE,\nSemanticAttributes.DbSystemValues.POSTGRESQL,\nSemanticAttributes.DbSystemValues.SQLITE,\nSemanticAttributes.DbSystemValues.OTHER_SQL,\nSemanticAttributes.DbSystemValues.HSQLDB,\nSemanticAttributes.DbSystemValues.H2));\nprivate static final String COSMOS = \"Cosmos\";\nprivate static final Mappings MAPPINGS;\nprivate static final ContextTagKeys AI_DEVICE_OS = ContextTagKeys.fromString(\"ai.device.os\");\nstatic {\nMappingsBuilder mappingsBuilder =\nnew MappingsBuilder(SPAN)\n.ignoreExact(AiSemanticAttributes.AZURE_SDK_NAMESPACE.getKey())\n.ignoreExact(AiSemanticAttributes.AZURE_SDK_MESSAGE_BUS_DESTINATION.getKey())\n.ignoreExact(AiSemanticAttributes.AZURE_SDK_ENQUEUED_TIME.getKey())\n.ignoreExact(AiSemanticAttributes.KAFKA_RECORD_QUEUE_TIME_MS.getKey())\n.ignoreExact(AiSemanticAttributes.KAFKA_OFFSET.getKey())\n.exact(\nSemanticAttributes.USER_AGENT_ORIGINAL.getKey(),\n(builder, value) -> {\nif (value instanceof String) {\nbuilder.addTag(\"ai.user.userAgent\", (String) value);\n}\n})\n.ignorePrefix(\"applicationinsights.internal.\")\n.prefix(\n\"http.request.header.\",\n(telemetryBuilder, key, value) -> {\nif (value instanceof List) {\ntelemetryBuilder.addProperty(key, Mappings.join((List) value));\n}\n})\n.prefix(\n\"http.response.header.\",\n(telemetryBuilder, key, value) -> {\nif (value instanceof List) {\ntelemetryBuilder.addProperty(key, Mappings.join((List) value));\n}\n});\napplyCommonTags(mappingsBuilder);\nMAPPINGS = mappingsBuilder.build();\n}\nprivate final boolean captureHttpServer4xxAsError;\nprivate final BiConsumer telemetryInitializer;\nprivate final BiPredicate eventSuppressor;\nprivate final BiPredicate shouldSuppress;\npublic SpanDataMapper(\nboolean captureHttpServer4xxAsError,\nBiConsumer telemetryInitializer,\nBiPredicate eventSuppressor,\nBiPredicate shouldSuppress) {\nthis.captureHttpServer4xxAsError = captureHttpServer4xxAsError;\nthis.telemetryInitializer = telemetryInitializer;\nthis.eventSuppressor = eventSuppressor;\nthis.shouldSuppress = shouldSuppress;\n}\npublic TelemetryItem map(SpanData span) {\nlong itemCount = getItemCount(span);\nreturn map(span, itemCount);\n}\npublic void map(SpanData span, Consumer consumer) {\nlong itemCount = getItemCount(span);\nTelemetryItem telemetryItem = map(span, itemCount);\nconsumer.accept(telemetryItem);\nexportEvents(\nspan,\ntelemetryItem.getTags().get(ContextTagKeys.AI_OPERATION_NAME.toString()),\nitemCount,\nconsumer);\n}\npublic TelemetryItem map(SpanData span, long itemCount) {\nif (RequestChecker.isRequest(span)) {\nreturn exportRequest(span, itemCount);\n} else {\nreturn exportRemoteDependency(span, span.getKind() == SpanKind.INTERNAL, itemCount);\n}\n}\nprivate static boolean checkIsPreAggregatedStandardMetric(SpanData span) {\nBoolean isPreAggregatedStandardMetric =\nspan.getAttributes().get(AiSemanticAttributes.IS_PRE_AGGREGATED);\nreturn isPreAggregatedStandardMetric != null && isPreAggregatedStandardMetric;\n}\nprivate TelemetryItem exportRemoteDependency(SpanData span, boolean inProc, long itemCount) {\nRemoteDependencyTelemetryBuilder telemetryBuilder = RemoteDependencyTelemetryBuilder.create();\ntelemetryInitializer.accept(telemetryBuilder, span.getResource());\nsetOperationTags(telemetryBuilder, span);\nsetTime(telemetryBuilder, span.getStartEpochNanos());\nsetItemCount(telemetryBuilder, itemCount);\nMAPPINGS.map(span.getAttributes(), telemetryBuilder);\naddLinks(telemetryBuilder, span.getLinks());\ntelemetryBuilder.setId(span.getSpanId());\ntelemetryBuilder.setName(getDependencyName(span));\ntelemetryBuilder.setDuration(\nFormattedDuration.fromNanos(span.getEndEpochNanos() - span.getStartEpochNanos()));\ntelemetryBuilder.setSuccess(getSuccess(span));\nif (inProc) {\ntelemetryBuilder.setType(\"InProc\");\n} else {\napplySemanticConventions(telemetryBuilder, span);\n}\nif (checkIsPreAggregatedStandardMetric(span)) {\ntelemetryBuilder.addProperty(MS_PROCESSED_BY_METRIC_EXTRACTORS, \"True\");\n}\nreturn telemetryBuilder.build();\n}\nprivate static final Set DEFAULT_HTTP_SPAN_NAMES =\nnew HashSet<>(\nasList(\"OPTIONS\", \"GET\", \"HEAD\", \"POST\", \"PUT\", \"DELETE\", \"TRACE\", \"CONNECT\", \"PATCH\"));\nprivate static String getDependencyName(SpanData span) {\nString name = span.getName();\nString method = getStableOrOldAttribute(span.getAttributes(), SemanticAttributes.HTTP_REQUEST_METHOD, SemanticAttributes.HTTP_METHOD);\nif (method == null) {\nreturn name;\n}\nif (!DEFAULT_HTTP_SPAN_NAMES.contains(name)) {\nreturn name;\n}\nString url = getStableOrOldAttribute(span.getAttributes(), SemanticAttributes.URL_FULL, SemanticAttributes.HTTP_URL);\nif (url == null) {\nreturn name;\n}\nString path = UrlParser.getPath(url);\nif (path == null) {\nreturn name;\n}\nreturn path.isEmpty() ? method + \" /\" : method + \" \" + path;\n}\nprivate static void applySemanticConventions(\nRemoteDependencyTelemetryBuilder telemetryBuilder, SpanData span) {\nAttributes attributes = span.getAttributes();\nString httpMethod = getStableOrOldAttribute(attributes, SemanticAttributes.HTTP_REQUEST_METHOD, SemanticAttributes.HTTP_METHOD);\nif (httpMethod != null) {\napplyHttpClientSpan(telemetryBuilder, attributes);\nreturn;\n}\nString rpcSystem = attributes.get(SemanticAttributes.RPC_SYSTEM);\nif (rpcSystem != null) {\napplyRpcClientSpan(telemetryBuilder, rpcSystem, attributes);\nreturn;\n}\nString dbSystem = attributes.get(SemanticAttributes.DB_SYSTEM);\nif (dbSystem == null) {\ndbSystem = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_TYPE);\n}\nif (dbSystem != null) {\napplyDatabaseClientSpan(telemetryBuilder, dbSystem, attributes);\nreturn;\n}\nString messagingSystem = getMessagingSystem(attributes);\nif (messagingSystem != null) {\napplyMessagingClientSpan(telemetryBuilder, span.getKind(), messagingSystem, attributes);\nreturn;\n}\nString target = getTargetOrDefault(attributes, Integer.MAX_VALUE, null);\nif (target != null) {\ntelemetryBuilder.setTarget(target);\nreturn;\n}\ntelemetryBuilder.setType(\"InProc\");\n}\n@Nullable\nprivate static String getMessagingSystem(Attributes attributes) {\nString azureNamespace = attributes.get(AiSemanticAttributes.AZURE_SDK_NAMESPACE);\nif (isAzureSdkMessaging(azureNamespace)) {\nreturn azureNamespace;\n}\nreturn attributes.get(SemanticAttributes.MESSAGING_SYSTEM);\n}\nprivate static void setOperationTags(AbstractTelemetryBuilder telemetryBuilder, SpanData span) {\nsetOperationId(telemetryBuilder, span.getTraceId());\nsetOperationParentId(telemetryBuilder, span.getParentSpanContext().getSpanId());\nsetOperationName(telemetryBuilder, span.getAttributes());\n}\nprivate static void setOperationId(AbstractTelemetryBuilder telemetryBuilder, String traceId) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_ID.toString(), traceId);\n}\nprivate static void setOperationParentId(\nAbstractTelemetryBuilder telemetryBuilder, String parentSpanId) {\nif (SpanId.isValid(parentSpanId)) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);\n}\n}\nprivate static void setOperationName(\nAbstractTelemetryBuilder telemetryBuilder, Attributes attributes) {\nString operationName = attributes.get(AiSemanticAttributes.OPERATION_NAME);\nif (operationName != null) {\nsetOperationName(telemetryBuilder, operationName);\n}\n}\nprivate static void setOperationName(\nAbstractTelemetryBuilder telemetryBuilder, String operationName) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_NAME.toString(), operationName);\n}\nprivate static void applyHttpClientSpan(\nRemoteDependencyTelemetryBuilder telemetryBuilder, Attributes attributes) {\nString httpUrl = getStableOrOldAttribute(attributes, SemanticAttributes.URL_FULL, SemanticAttributes.HTTP_URL);\nint defaultPort = getDefaultPortForHttpUrl(httpUrl);\nString target = getTargetOrDefault(attributes, defaultPort, \"Http\");\ntelemetryBuilder.setType(\"Http\");\ntelemetryBuilder.setTarget(target);\nLong httpStatusCode = getStableOrOldAttribute(attributes, SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);\nif (httpStatusCode != null) {\ntelemetryBuilder.setResultCode(Long.toString(httpStatusCode));\n} else {\ntelemetryBuilder.setResultCode(\"0\");\n}\ntelemetryBuilder.setData(httpUrl);\n}\nprivate static void applyRpcClientSpan(\nRemoteDependencyTelemetryBuilder telemetryBuilder, String rpcSystem, Attributes attributes) {\ntelemetryBuilder.setType(rpcSystem);\nString target = getTargetOrDefault(attributes, Integer.MAX_VALUE, rpcSystem);\ntelemetryBuilder.setTarget(target);\n}\nprivate static int getDefaultPortForHttpUrl(@Nullable String httpUrl) {\nif (httpUrl == null) {\nreturn Integer.MAX_VALUE;\n}\nif (httpUrl.startsWith(\"https:\nreturn 443;\n}\nif (httpUrl.startsWith(\"http:\nreturn 80;\n}\nreturn Integer.MAX_VALUE;\n}\npublic static String getTargetOrDefault(\nAttributes attributes, int defaultPort, String defaultTarget) {\nString target = getTargetOrNullStableSemconv(attributes, defaultPort);\nif (target != null) {\nreturn target;\n}\ntarget = getTargetOrNullOldSemconv(attributes, defaultPort);\nif (target != null) {\nreturn target;\n}\nreturn defaultTarget;\n}\n@Nullable\nprivate static String getTargetOrNullStableSemconv(Attributes attributes, int defaultPort) {\nString peerService = attributes.get(SemanticAttributes.PEER_SERVICE);\nif (peerService != null) {\nreturn peerService;\n}\nString host = attributes.get(SemanticAttributes.SERVER_ADDRESS);\nif (host != null) {\nLong port = attributes.get(SemanticAttributes.SERVER_PORT);\nreturn getTarget(host, port, defaultPort);\n}\nreturn null;\n}\n@Nullable\nprivate static String getTargetOrNullOldSemconv(Attributes attributes, int defaultPort) {\nString peerService = attributes.get(SemanticAttributes.PEER_SERVICE);\nif (peerService != null) {\nreturn peerService;\n}\nString host = attributes.get(SemanticAttributes.NET_PEER_NAME);\nif (host != null) {\nLong port = attributes.get(SemanticAttributes.NET_PEER_PORT);\nreturn getTarget(host, port, defaultPort);\n}\nhost = attributes.get(SemanticAttributes.NET_SOCK_PEER_NAME);\nif (host == null) {\nhost = attributes.get(SemanticAttributes.NET_SOCK_PEER_ADDR);\n}\nif (host != null) {\nLong port = attributes.get(SemanticAttributes.NET_SOCK_PEER_PORT);\nreturn getTarget(host, port, defaultPort);\n}\nString httpUrl = attributes.get(SemanticAttributes.HTTP_URL);\nif (httpUrl != null) {\nreturn UrlParser.getTarget(httpUrl);\n}\nreturn null;\n}\nprivate static String getTarget(String host, @Nullable Long port, int defaultPort) {\nif (port != null && port != defaultPort) {\nreturn host + \":\" + port;\n} else {\nreturn host;\n}\n}\nprivate static void applyDatabaseClientSpan(\nRemoteDependencyTelemetryBuilder telemetryBuilder, String dbSystem, Attributes attributes) {\nString dbStatement = attributes.get(SemanticAttributes.DB_STATEMENT);\nif (dbStatement == null) {\ndbStatement = attributes.get(SemanticAttributes.DB_OPERATION);\n}\nString type;\nif (SQL_DB_SYSTEMS.contains(dbSystem)) {\nif (dbSystem.equals(SemanticAttributes.DbSystemValues.MYSQL)) {\ntype = \"mysql\";\n} else if (dbSystem.equals(SemanticAttributes.DbSystemValues.POSTGRESQL)) {\ntype = \"postgresql\";\n} else {\ntype = \"SQL\";\n}\n} else if (dbSystem.equals(COSMOS)) {\ntype = \"Microsoft.DocumentDb\";\n} else {\ntype = dbSystem;\n}\ntelemetryBuilder.setType(type);\ntelemetryBuilder.setData(dbStatement);\nString target;\nString dbName;\nif (dbSystem.equals(COSMOS)) {\nString dbUrl = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_URL);\nif (dbUrl != null) {\ntarget = UrlParser.getTarget(dbUrl);\n} else {\ntarget = null;\n}\ndbName = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_INSTANCE);\n} else {\ntarget = getTargetOrDefault(attributes, getDefaultPortForDbSystem(dbSystem), dbSystem);\ndbName = attributes.get(SemanticAttributes.DB_NAME);\n}\ntarget = nullAwareConcat(target, dbName, \" | \");\nif (target == null) {\ntarget = dbSystem;\n}\ntelemetryBuilder.setTarget(target);\n}\nprivate static void applyMessagingClientSpan(\nRemoteDependencyTelemetryBuilder telemetryBuilder,\nSpanKind spanKind,\nString messagingSystem,\nAttributes attributes) {\nif (spanKind == SpanKind.PRODUCER) {\ntelemetryBuilder.setType(\"Queue Message | \" + messagingSystem);\n} else {\ntelemetryBuilder.setType(messagingSystem);\n}\ntelemetryBuilder.setTarget(getMessagingTargetSource(attributes));\n}\nprivate static int getDefaultPortForDbSystem(String dbSystem) {\nswitch (dbSystem) {\ncase SemanticAttributes.DbSystemValues.MONGODB:\nreturn 27017;\ncase SemanticAttributes.DbSystemValues.CASSANDRA:\nreturn 9042;\ncase SemanticAttributes.DbSystemValues.REDIS:\nreturn 6379;\ncase SemanticAttributes.DbSystemValues.MARIADB:\ncase SemanticAttributes.DbSystemValues.MYSQL:\nreturn 3306;\ncase SemanticAttributes.DbSystemValues.MSSQL:\nreturn 1433;\ncase SemanticAttributes.DbSystemValues.DB2:\nreturn 50000;\ncase SemanticAttributes.DbSystemValues.ORACLE:\nreturn 1521;\ncase SemanticAttributes.DbSystemValues.H2:\nreturn 8082;\ncase SemanticAttributes.DbSystemValues.DERBY:\nreturn 1527;\ncase SemanticAttributes.DbSystemValues.POSTGRESQL:\nreturn 5432;\ndefault:\nreturn Integer.MAX_VALUE;\n}\n}\nprivate TelemetryItem exportRequest(SpanData span, long itemCount) {\nRequestTelemetryBuilder telemetryBuilder = RequestTelemetryBuilder.create();\ntelemetryInitializer.accept(telemetryBuilder, span.getResource());\nAttributes attributes = span.getAttributes();\nlong startEpochNanos = span.getStartEpochNanos();\ntelemetryBuilder.setId(span.getSpanId());\nsetTime(telemetryBuilder, startEpochNanos);\nsetItemCount(telemetryBuilder, itemCount);\nMAPPINGS.map(attributes, telemetryBuilder);\naddLinks(telemetryBuilder, span.getLinks());\nString operationName = getOperationName(span);\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_NAME.toString(), operationName);\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());\nString aiLegacyParentId = span.getAttributes().get(AiSemanticAttributes.LEGACY_PARENT_ID);\nif (aiLegacyParentId != null) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);\n} else if (span.getParentSpanContext().isValid()) {\ntelemetryBuilder.addTag(\nContextTagKeys.AI_OPERATION_PARENT_ID.toString(),\nspan.getParentSpanContext().getSpanId());\n}\nString aiLegacyRootId = span.getAttributes().get(AiSemanticAttributes.LEGACY_ROOT_ID);\nif (aiLegacyRootId != null) {\ntelemetryBuilder.addTag(\"ai_legacyRootID\", aiLegacyRootId);\n}\ntelemetryBuilder.setName(operationName);\ntelemetryBuilder.setDuration(\nFormattedDuration.fromNanos(span.getEndEpochNanos() - startEpochNanos));\ntelemetryBuilder.setSuccess(getSuccess(span));\nString httpUrl = getHttpUrlFromServerSpan(attributes);\nif (httpUrl != null) {\ntelemetryBuilder.setUrl(httpUrl);\n}\nLong httpStatusCode = getStableOrOldAttribute(attributes, SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);\nif (httpStatusCode == null) {\nhttpStatusCode = attributes.get(SemanticAttributes.RPC_GRPC_STATUS_CODE);\n}\nif (httpStatusCode != null) {\ntelemetryBuilder.setResponseCode(Long.toString(httpStatusCode));\n} else {\ntelemetryBuilder.setResponseCode(\"0\");\n}\nString locationIp = getStableOrOldAttribute(attributes, SemanticAttributes.CLIENT_ADDRESS, SemanticAttributes.HTTP_CLIENT_IP);\nif (locationIp == null) {\nlocationIp = attributes.get(SemanticAttributes.NET_SOCK_PEER_ADDR);\n}\nif (locationIp != null) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_LOCATION_IP.toString(), locationIp);\n}\ntelemetryBuilder.setSource(getSource(attributes));\nString sessionId = attributes.get(AiSemanticAttributes.SESSION_ID);\nif (sessionId != null) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_SESSION_ID.toString(), sessionId);\n}\nString deviceOs = attributes.get(AiSemanticAttributes.DEVICE_OS);\nif (deviceOs != null) {\ntelemetryBuilder.addTag(AI_DEVICE_OS.toString(), deviceOs);\n}\nString deviceOsVersion = attributes.get(AiSemanticAttributes.DEVICE_OS_VERSION);\nif (deviceOsVersion != null) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_DEVICE_OS_VERSION.toString(), deviceOsVersion);\n}\nif (checkIsPreAggregatedStandardMetric(span)) {\ntelemetryBuilder.addProperty(MS_PROCESSED_BY_METRIC_EXTRACTORS, \"True\");\n}\nLong enqueuedTime = attributes.get(AiSemanticAttributes.AZURE_SDK_ENQUEUED_TIME);\nif (enqueuedTime != null) {\nlong timeSinceEnqueuedMillis =\nMath.max(\n0L, NANOSECONDS.toMillis(span.getStartEpochNanos()) - SECONDS.toMillis(enqueuedTime));\ntelemetryBuilder.addMeasurement(\"timeSinceEnqueued\", (double) timeSinceEnqueuedMillis);\n}\nLong timeSinceEnqueuedMillis = attributes.get(AiSemanticAttributes.KAFKA_RECORD_QUEUE_TIME_MS);\nif (timeSinceEnqueuedMillis != null) {\ntelemetryBuilder.addMeasurement(\"timeSinceEnqueued\", (double) timeSinceEnqueuedMillis);\n}\nreturn telemetryBuilder.build();\n}\nprivate boolean getSuccess(SpanData span) {\nswitch (span.getStatus().getStatusCode()) {\ncase ERROR:\nreturn false;\ncase OK:\nreturn true;\ncase UNSET:\nif (captureHttpServer4xxAsError) {\nLong statusCode = getStableOrOldAttribute(span.getAttributes(), SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);\nreturn statusCode == null || statusCode < 400;\n}\nreturn true;\n}\nreturn true;\n}\n@Nullable\npublic static String getHttpUrlFromServerSpan(Attributes attributes) {\nString httpUrl = getHttpUrlFromServerSpanStableSemconv(attributes);\nif (httpUrl != null) {\nreturn httpUrl;\n}\nreturn getHttpUrlFromServerSpanOldSemconv(attributes);\n}\n@Nullable\n@Nullable\nprivate static String getHttpUrlFromServerSpanOldSemconv(Attributes attributes) {\nString httpUrl = attributes.get(SemanticAttributes.HTTP_URL);\nif (httpUrl != null) {\nreturn httpUrl;\n}\nString scheme = attributes.get(SemanticAttributes.HTTP_SCHEME);\nif (scheme == null) {\nreturn null;\n}\nString target = attributes.get(SemanticAttributes.HTTP_TARGET);\nif (target == null) {\nreturn null;\n}\nString host = attributes.get(SemanticAttributes.NET_HOST_NAME);\nLong port = attributes.get(SemanticAttributes.NET_HOST_PORT);\nif (port != null && port > 0) {\nreturn scheme + \":\n}\nreturn scheme + \":\n}\n@Nullable\nprivate static String getSource(Attributes attributes) {\nString source = attributes.get(AiSemanticAttributes.SPAN_SOURCE);\nif (source != null) {\nreturn source;\n}\nreturn getMessagingTargetSource(attributes);\n}\n@Nullable\nprivate static String getMessagingTargetSource(Attributes attributes) {\nif (isAzureSdkMessaging(attributes.get(AiSemanticAttributes.AZURE_SDK_NAMESPACE))) {\nString peerAddress = attributes.get(AiSemanticAttributes.AZURE_SDK_PEER_ADDRESS);\nif (peerAddress != null) {\nString destination = attributes.get(AiSemanticAttributes.AZURE_SDK_MESSAGE_BUS_DESTINATION);\nreturn peerAddress + \"/\" + destination;\n}\n}\nString messagingSystem = getMessagingSystem(attributes);\nif (messagingSystem == null) {\nreturn null;\n}\nString source =\nnullAwareConcat(\ngetTargetOrNullOldSemconv(attributes, Integer.MAX_VALUE),\nattributes.get(SemanticAttributes.MESSAGING_DESTINATION_NAME),\n\"/\");\nif (source != null) {\nreturn source;\n}\nreturn messagingSystem;\n}\nprivate static boolean isAzureSdkMessaging(String messagingSystem) {\nreturn \"Microsoft.EventHub\".equals(messagingSystem)\n|| \"Microsoft.ServiceBus\".equals(messagingSystem);\n}\nprivate static String getOperationName(SpanData span) {\nString operationName = span.getAttributes().get(AiSemanticAttributes.OPERATION_NAME);\nif (operationName != null) {\nreturn operationName;\n}\nreturn span.getName();\n}\nprivate static String nullAwareConcat(\n@Nullable String str1, @Nullable String str2, String separator) {\nif (str1 == null) {\nreturn str2;\n}\nif (str2 == null) {\nreturn str1;\n}\nreturn str1 + separator + str2;\n}\nprivate void exportEvents(\nSpanData span,\n@Nullable String operationName,\nlong itemCount,\nConsumer consumer) {\nfor (EventData event : span.getEvents()) {\nString instrumentationScopeName = span.getInstrumentationScopeInfo().getName();\nif (eventSuppressor.test(event, instrumentationScopeName)) {\ncontinue;\n}\nif (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null\n|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {\nSpanContext parentSpanContext = span.getParentSpanContext();\nif (!parentSpanContext.isValid() || parentSpanContext.isRemote()) {\nString stacktrace = event.getAttributes().get(SemanticAttributes.EXCEPTION_STACKTRACE);\nif (stacktrace != null && !shouldSuppress.test(span, event)) {\nconsumer.accept(\ncreateExceptionTelemetryItem(stacktrace, span, operationName, itemCount));\n}\n}\nreturn;\n}\nMessageTelemetryBuilder telemetryBuilder = MessageTelemetryBuilder.create();\ntelemetryInitializer.accept(telemetryBuilder, span.getResource());\nsetOperationId(telemetryBuilder, span.getTraceId());\nsetOperationParentId(telemetryBuilder, span.getSpanId());\nif (operationName != null) {\nsetOperationName(telemetryBuilder, operationName);\n} else {\nsetOperationName(telemetryBuilder, span.getAttributes());\n}\nsetTime(telemetryBuilder, event.getEpochNanos());\nsetItemCount(telemetryBuilder, itemCount);\nMAPPINGS.map(event.getAttributes(), telemetryBuilder);\ntelemetryBuilder.setMessage(event.getName());\nconsumer.accept(telemetryBuilder.build());\n}\n}\nprivate TelemetryItem createExceptionTelemetryItem(\nString errorStack, SpanData span, @Nullable String operationName, long itemCount) {\nExceptionTelemetryBuilder telemetryBuilder = ExceptionTelemetryBuilder.create();\ntelemetryInitializer.accept(telemetryBuilder, span.getResource());\nsetOperationId(telemetryBuilder, span.getTraceId());\nsetOperationParentId(telemetryBuilder, span.getSpanId());\nif (operationName != null) {\nsetOperationName(telemetryBuilder, operationName);\n} else {\nsetOperationName(telemetryBuilder, span.getAttributes());\n}\nsetTime(telemetryBuilder, span.getEndEpochNanos());\nsetItemCount(telemetryBuilder, itemCount);\nMAPPINGS.map(span.getAttributes(), telemetryBuilder);\ntelemetryBuilder.setExceptions(Exceptions.minimalParse(errorStack));\nreturn telemetryBuilder.build();\n}\npublic static T getStableOrOldAttribute(Attributes attributes, AttributeKey stable, AttributeKey old) {\nT value = attributes.get(stable);\nif (value != null) {\nreturn value;\n}\nreturn attributes.get(old);\n}\nprivate static void setTime(AbstractTelemetryBuilder telemetryBuilder, long epochNanos) {\ntelemetryBuilder.setTime(FormattedTime.offSetDateTimeFromEpochNanos(epochNanos));\n}\nprivate static void setItemCount(AbstractTelemetryBuilder telemetryBuilder, long itemCount) {\nif (itemCount != 1) {\ntelemetryBuilder.setSampleRate(100.0f / itemCount);\n}\n}\nprivate static long getItemCount(SpanData span) {\nLong itemCount = span.getAttributes().get(AiSemanticAttributes.ITEM_COUNT);\nreturn itemCount == null ? 1 : itemCount;\n}\nprivate static void addLinks(AbstractTelemetryBuilder telemetryBuilder, List links) {\nif (links.isEmpty()) {\nreturn;\n}\nStringBuilder sb = new StringBuilder();\nsb.append(\"[\");\nboolean first = true;\nfor (LinkData link : links) {\nif (!first) {\nsb.append(\",\");\n}\nsb.append(\"{\\\"operation_Id\\\":\\\"\");\nsb.append(link.getSpanContext().getTraceId());\nsb.append(\"\\\",\\\"id\\\":\\\"\");\nsb.append(link.getSpanContext().getSpanId());\nsb.append(\"\\\"}\");\nfirst = false;\n}\nsb.append(\"]\");\ntelemetryBuilder.addProperty(\"_MS.links\", sb.toString());\n}\nstatic void applyCommonTags(MappingsBuilder mappingsBuilder) {\nmappingsBuilder\n.exact(\nSemanticAttributes.ENDUSER_ID.getKey(),\n(telemetryBuilder, value) -> {\nif (value instanceof String) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_USER_ID.toString(), (String) value);\n}\n})\n.exact(\nAiSemanticAttributes.PREVIEW_APPLICATION_VERSION.getKey(),\n(telemetryBuilder, value) -> {\nif (value instanceof String) {\ntelemetryBuilder.addTag(\nContextTagKeys.AI_APPLICATION_VER.toString(), (String) value);\n}\n});\napplyConnectionStringAndRoleNameOverrides(mappingsBuilder);\n}\n@SuppressWarnings(\"deprecation\")\nprivate static final WarningLogger connectionStringAttributeNoLongerSupported =\nnew WarningLogger(\nSpanDataMapper.class,\nAiSemanticAttributes.DEPRECATED_CONNECTION_STRING.getKey()\n+ \" is no longer supported because it\"\n+ \" is incompatible with pre-aggregated standard metrics. Please use\"\n+ \" \\\"connectionStringOverrides\\\" configuration, or reach out to\"\n+ \" https:\n+ \" different use case.\");\n@SuppressWarnings(\"deprecation\")\nprivate static final WarningLogger roleNameAttributeNoLongerSupported =\nnew WarningLogger(\nSpanDataMapper.class,\nAiSemanticAttributes.DEPRECATED_ROLE_NAME.getKey()\n+ \" is no longer supported because it\"\n+ \" is incompatible with pre-aggregated standard metrics. Please use\"\n+ \" \\\"roleNameOverrides\\\" configuration, or reach out to\"\n+ \" https:\n+ \" different use case.\");\n@SuppressWarnings(\"deprecation\")\nprivate static final WarningLogger roleInstanceAttributeNoLongerSupported =\nnew WarningLogger(\nSpanDataMapper.class,\nAiSemanticAttributes.DEPRECATED_ROLE_INSTANCE.getKey()\n+ \" is no longer supported because it\"\n+ \" is incompatible with pre-aggregated standard metrics. Please reach out to\"\n+ \" https:\n+ \" case for this.\");\n@SuppressWarnings(\"deprecation\")\nprivate static final WarningLogger instrumentationKeyAttributeNoLongerSupported =\nnew WarningLogger(\nSpanDataMapper.class,\nAiSemanticAttributes.DEPRECATED_INSTRUMENTATION_KEY.getKey()\n+ \" is no longer supported because it\"\n+ \" is incompatible with pre-aggregated standard metrics. Please use\"\n+ \" \\\"connectionStringOverrides\\\" configuration, or reach out to\"\n+ \" https:\n+ \" different use case.\");\n@SuppressWarnings(\"deprecation\")\nstatic void applyConnectionStringAndRoleNameOverrides(MappingsBuilder mappingsBuilder) {\nmappingsBuilder\n.exact(\nAiSemanticAttributes.INTERNAL_CONNECTION_STRING.getKey(),\n(telemetryBuilder, value) -> {\ntelemetryBuilder.setConnectionString(ConnectionString.parse((String) value));\n})\n.exact(\nAiSemanticAttributes.INTERNAL_ROLE_NAME.getKey(),\n(telemetryBuilder, value) -> {\nif (value instanceof String) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_CLOUD_ROLE.toString(), (String) value);\n}\n})\n.exact(\nAiSemanticAttributes.DEPRECATED_CONNECTION_STRING.getKey(),\n(telemetryBuilder, value) -> {\nconnectionStringAttributeNoLongerSupported.recordWarning();\n})\n.exact(\nAiSemanticAttributes.DEPRECATED_ROLE_NAME.getKey(),\n(telemetryBuilder, value) -> {\nroleNameAttributeNoLongerSupported.recordWarning();\n})\n.exact(\nAiSemanticAttributes.DEPRECATED_ROLE_INSTANCE.getKey(),\n(telemetryBuilder, value) -> {\nroleInstanceAttributeNoLongerSupported.recordWarning();\n})\n.exact(\nAiSemanticAttributes.DEPRECATED_INSTRUMENTATION_KEY.getKey(),\n(telemetryBuilder, value) -> {\ninstrumentationKeyAttributeNoLongerSupported.recordWarning();\n});\n}\n}" + }, + { + "comment": "@gsmet This condition will pass when OT is enabled at runtime, but disabled at build time. There are steps that need to be done at build time (like creating additional bean you produced)", + "method_body": "public AgroalDataSource doCreateDataSource(String dataSourceName) {\nif (!dataSourceSupport.entries.containsKey(dataSourceName)) {\nthrow new IllegalArgumentException(\"No datasource named '\" + dataSourceName + \"' exists\");\n}\nDataSourceJdbcBuildTimeConfig dataSourceJdbcBuildTimeConfig = getDataSourceJdbcBuildTimeConfig(dataSourceName);\nDataSourceRuntimeConfig dataSourceRuntimeConfig = getDataSourceRuntimeConfig(dataSourceName);\nDataSourceJdbcRuntimeConfig dataSourceJdbcRuntimeConfig = getDataSourceJdbcRuntimeConfig(dataSourceName);\nDataSourceSupport.Entry matchingSupportEntry = dataSourceSupport.entries.get(dataSourceName);\nif (!dataSourceJdbcRuntimeConfig.url.isPresent()) {\nreturn new UnconfiguredDataSource(\nDataSourceUtil.dataSourcePropertyKey(dataSourceName, \"jdbc.url\") + \" has not been defined\");\n}\nloadDriversInTCCL();\nString resolvedDriverClass = matchingSupportEntry.resolvedDriverClass;\nClass driver;\ntry {\ndriver = Class.forName(resolvedDriverClass, true, Thread.currentThread().getContextClassLoader());\n} catch (ClassNotFoundException e) {\nthrow new RuntimeException(\n\"Unable to load the datasource driver \" + resolvedDriverClass + \" for datasource \" + dataSourceName, e);\n}\nString jdbcUrl = dataSourceJdbcRuntimeConfig.url.get();\nif (dataSourceJdbcBuildTimeConfig.tracing) {\nboolean tracingEnabled = dataSourceJdbcRuntimeConfig.tracing.enabled.orElse(dataSourceJdbcBuildTimeConfig.tracing);\nif (tracingEnabled) {\nString rootTracingUrl = !jdbcUrl.startsWith(JDBC_TRACING_URL_PREFIX)\n? jdbcUrl.replace(JDBC_URL_PREFIX, JDBC_TRACING_URL_PREFIX)\n: jdbcUrl;\nStringBuilder tracingURL = new StringBuilder(rootTracingUrl);\nif (dataSourceJdbcRuntimeConfig.tracing.traceWithActiveSpanOnly) {\nif (!tracingURL.toString().contains(\"?\")) {\ntracingURL.append(\"?\");\n}\ntracingURL.append(\"traceWithActiveSpanOnly=true\");\n}\nif (dataSourceJdbcRuntimeConfig.tracing.ignoreForTracing.isPresent()) {\nif (!tracingURL.toString().contains(\"?\")) {\ntracingURL.append(\"?\");\n}\nArrays.stream(dataSourceJdbcRuntimeConfig.tracing.ignoreForTracing.get().split(\";\"))\n.filter(query -> !query.isEmpty())\n.forEach(query -> tracingURL.append(\"ignoreForTracing=\")\n.append(query.replaceAll(\"\\\"\", \"\\\\\\\"\"))\n.append(\";\"));\n}\njdbcUrl = tracingURL.toString();\ndriver = null;\n}\n}\nString resolvedDbKind = matchingSupportEntry.resolvedDbKind;\nAgroalConnectionConfigurer agroalConnectionConfigurer = Arc.container()\n.instance(AgroalConnectionConfigurer.class, new JdbcDriverLiteral(resolvedDbKind))\n.orElse(new UnknownDbAgroalConnectionConfigurer());\nAgroalDataSourceConfigurationSupplier dataSourceConfiguration = new AgroalDataSourceConfigurationSupplier();\nif (!dataSourceJdbcRuntimeConfig.poolingEnabled) {\ndataSourceConfiguration.dataSourceImplementation(DataSourceImplementation.AGROAL_POOLLESS);\n}\nAgroalConnectionPoolConfigurationSupplier poolConfiguration = dataSourceConfiguration.connectionPoolConfiguration();\nAgroalConnectionFactoryConfigurationSupplier connectionFactoryConfiguration = poolConfiguration\n.connectionFactoryConfiguration();\nboolean mpMetricsPresent = dataSourceSupport.mpMetricsPresent;\napplyNewConfiguration(dataSourceName, dataSourceConfiguration, poolConfiguration, connectionFactoryConfiguration,\ndriver, jdbcUrl,\ndataSourceJdbcBuildTimeConfig, dataSourceRuntimeConfig, dataSourceJdbcRuntimeConfig, transactionRuntimeConfig,\nmpMetricsPresent);\nif (dataSourceSupport.disableSslSupport) {\nagroalConnectionConfigurer.disableSslSupport(resolvedDbKind, dataSourceConfiguration);\n}\ntry {\nClass.forName(\"io.netty.util.concurrent.FastThreadLocal\", true, Thread.currentThread().getContextClassLoader());\ndataSourceConfiguration.connectionPoolConfiguration().connectionCache(new QuarkusNettyConnectionCache());\n} catch (ClassNotFoundException e) {\ndataSourceConfiguration.connectionPoolConfiguration().connectionCache(new QuarkusSimpleConnectionCache());\n}\nagroalConnectionConfigurer.setExceptionSorter(resolvedDbKind, dataSourceConfiguration);\nAgroalDataSourceConfiguration agroalConfiguration = dataSourceConfiguration.get();\nAgroalDataSource dataSource = new io.agroal.pool.DataSource(agroalConfiguration,\nnew AgroalEventLoggingListener(dataSourceName,\nagroalConfiguration.connectionPoolConfiguration()\n.transactionRequirement() == TransactionRequirement.WARN));\nlog.debugv(\"Started datasource {0} connected to {1}\", dataSourceName,\nagroalConfiguration.connectionPoolConfiguration().connectionFactoryConfiguration().jdbcUrl());\nCollection interceptorList = agroalPoolInterceptors\n.select(dataSourceName == null || DataSourceUtil.isDefault(dataSourceName)\n? Default.Literal.INSTANCE\n: new DataSource.DataSourceLiteral(dataSourceName))\n.stream().collect(Collectors.toList());\nif (!interceptorList.isEmpty()) {\ndataSource.setPoolInterceptors(interceptorList);\n}\nif (dataSourceJdbcRuntimeConfig.telemetry.orElse(dataSourceJdbcBuildTimeConfig.telemetry)\n&& agroalOpenTelemetryWrapper.isResolvable()) {\ndataSource = agroalOpenTelemetryWrapper.get().apply(dataSource);\n}\nreturn dataSource;\n}", + "target_code": "if (dataSourceJdbcRuntimeConfig.telemetry.orElse(dataSourceJdbcBuildTimeConfig.telemetry)", + "method_body_after": "public AgroalDataSource doCreateDataSource(String dataSourceName) {\nif (!dataSourceSupport.entries.containsKey(dataSourceName)) {\nthrow new IllegalArgumentException(\"No datasource named '\" + dataSourceName + \"' exists\");\n}\nDataSourceJdbcBuildTimeConfig dataSourceJdbcBuildTimeConfig = getDataSourceJdbcBuildTimeConfig(dataSourceName);\nDataSourceRuntimeConfig dataSourceRuntimeConfig = getDataSourceRuntimeConfig(dataSourceName);\nDataSourceJdbcRuntimeConfig dataSourceJdbcRuntimeConfig = getDataSourceJdbcRuntimeConfig(dataSourceName);\nDataSourceSupport.Entry matchingSupportEntry = dataSourceSupport.entries.get(dataSourceName);\nif (!dataSourceJdbcRuntimeConfig.url.isPresent()) {\nreturn new UnconfiguredDataSource(\nDataSourceUtil.dataSourcePropertyKey(dataSourceName, \"jdbc.url\") + \" has not been defined\");\n}\nloadDriversInTCCL();\nString resolvedDriverClass = matchingSupportEntry.resolvedDriverClass;\nClass driver;\ntry {\ndriver = Class.forName(resolvedDriverClass, true, Thread.currentThread().getContextClassLoader());\n} catch (ClassNotFoundException e) {\nthrow new RuntimeException(\n\"Unable to load the datasource driver \" + resolvedDriverClass + \" for datasource \" + dataSourceName, e);\n}\nString jdbcUrl = dataSourceJdbcRuntimeConfig.url.get();\nif (dataSourceJdbcBuildTimeConfig.tracing) {\nboolean tracingEnabled = dataSourceJdbcRuntimeConfig.tracing.enabled.orElse(dataSourceJdbcBuildTimeConfig.tracing);\nif (tracingEnabled) {\nString rootTracingUrl = !jdbcUrl.startsWith(JDBC_TRACING_URL_PREFIX)\n? jdbcUrl.replace(JDBC_URL_PREFIX, JDBC_TRACING_URL_PREFIX)\n: jdbcUrl;\nStringBuilder tracingURL = new StringBuilder(rootTracingUrl);\nif (dataSourceJdbcRuntimeConfig.tracing.traceWithActiveSpanOnly) {\nif (!tracingURL.toString().contains(\"?\")) {\ntracingURL.append(\"?\");\n}\ntracingURL.append(\"traceWithActiveSpanOnly=true\");\n}\nif (dataSourceJdbcRuntimeConfig.tracing.ignoreForTracing.isPresent()) {\nif (!tracingURL.toString().contains(\"?\")) {\ntracingURL.append(\"?\");\n}\nArrays.stream(dataSourceJdbcRuntimeConfig.tracing.ignoreForTracing.get().split(\";\"))\n.filter(query -> !query.isEmpty())\n.forEach(query -> tracingURL.append(\"ignoreForTracing=\")\n.append(query.replaceAll(\"\\\"\", \"\\\\\\\"\"))\n.append(\";\"));\n}\njdbcUrl = tracingURL.toString();\ndriver = null;\n}\n}\nString resolvedDbKind = matchingSupportEntry.resolvedDbKind;\nAgroalConnectionConfigurer agroalConnectionConfigurer = Arc.container()\n.instance(AgroalConnectionConfigurer.class, new JdbcDriverLiteral(resolvedDbKind))\n.orElse(new UnknownDbAgroalConnectionConfigurer());\nAgroalDataSourceConfigurationSupplier dataSourceConfiguration = new AgroalDataSourceConfigurationSupplier();\nif (!dataSourceJdbcRuntimeConfig.poolingEnabled) {\ndataSourceConfiguration.dataSourceImplementation(DataSourceImplementation.AGROAL_POOLLESS);\n}\nAgroalConnectionPoolConfigurationSupplier poolConfiguration = dataSourceConfiguration.connectionPoolConfiguration();\nAgroalConnectionFactoryConfigurationSupplier connectionFactoryConfiguration = poolConfiguration\n.connectionFactoryConfiguration();\nboolean mpMetricsPresent = dataSourceSupport.mpMetricsPresent;\napplyNewConfiguration(dataSourceName, dataSourceConfiguration, poolConfiguration, connectionFactoryConfiguration,\ndriver, jdbcUrl,\ndataSourceJdbcBuildTimeConfig, dataSourceRuntimeConfig, dataSourceJdbcRuntimeConfig, transactionRuntimeConfig,\nmpMetricsPresent);\nif (dataSourceSupport.disableSslSupport) {\nagroalConnectionConfigurer.disableSslSupport(resolvedDbKind, dataSourceConfiguration);\n}\ntry {\nClass.forName(\"io.netty.util.concurrent.FastThreadLocal\", true, Thread.currentThread().getContextClassLoader());\ndataSourceConfiguration.connectionPoolConfiguration().connectionCache(new QuarkusNettyConnectionCache());\n} catch (ClassNotFoundException e) {\ndataSourceConfiguration.connectionPoolConfiguration().connectionCache(new QuarkusSimpleConnectionCache());\n}\nagroalConnectionConfigurer.setExceptionSorter(resolvedDbKind, dataSourceConfiguration);\nAgroalDataSourceConfiguration agroalConfiguration = dataSourceConfiguration.get();\nAgroalDataSource dataSource = new io.agroal.pool.DataSource(agroalConfiguration,\nnew AgroalEventLoggingListener(dataSourceName,\nagroalConfiguration.connectionPoolConfiguration()\n.transactionRequirement() == TransactionRequirement.WARN));\nlog.debugv(\"Started datasource {0} connected to {1}\", dataSourceName,\nagroalConfiguration.connectionPoolConfiguration().connectionFactoryConfiguration().jdbcUrl());\nCollection interceptorList = agroalPoolInterceptors\n.select(dataSourceName == null || DataSourceUtil.isDefault(dataSourceName)\n? Default.Literal.INSTANCE\n: new DataSource.DataSourceLiteral(dataSourceName))\n.stream().collect(Collectors.toList());\nif (!interceptorList.isEmpty()) {\ndataSource.setPoolInterceptors(interceptorList);\n}\nif (dataSourceJdbcBuildTimeConfig.telemetry && dataSourceJdbcRuntimeConfig.telemetry.orElse(true)) {\ndataSource = agroalOpenTelemetryWrapper.get().apply(dataSource);\n}\nreturn dataSource;\n}", + "context_before": "class DataSources {\nprivate static final Logger log = Logger.getLogger(DataSources.class.getName());\npublic static final String TRACING_DRIVER_CLASSNAME = \"io.opentracing.contrib.jdbc.TracingDriver\";\nprivate static final String JDBC_URL_PREFIX = \"jdbc:\";\nprivate static final String JDBC_TRACING_URL_PREFIX = \"jdbc:tracing:\";\nprivate final DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig;\nprivate final DataSourcesRuntimeConfig dataSourcesRuntimeConfig;\nprivate final DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig;\nprivate final DataSourcesJdbcRuntimeConfig dataSourcesJdbcRuntimeConfig;\nprivate final TransactionManagerConfiguration transactionRuntimeConfig;\nprivate final TransactionManager transactionManager;\nprivate final XAResourceRecoveryRegistry xaResourceRecoveryRegistry;\nprivate final TransactionSynchronizationRegistry transactionSynchronizationRegistry;\nprivate final DataSourceSupport dataSourceSupport;\nprivate final Instance agroalPoolInterceptors;\nprivate final Instance agroalOpenTelemetryWrapper;\nprivate final ConcurrentMap dataSources = new ConcurrentHashMap<>();\npublic DataSources(DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig,\nDataSourcesRuntimeConfig dataSourcesRuntimeConfig, DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig,\nDataSourcesJdbcRuntimeConfig dataSourcesJdbcRuntimeConfig,\nTransactionManagerConfiguration transactionRuntimeConfig,\nTransactionManager transactionManager,\nXAResourceRecoveryRegistry xaResourceRecoveryRegistry,\nTransactionSynchronizationRegistry transactionSynchronizationRegistry,\nDataSourceSupport dataSourceSupport,\n@Any Instance agroalPoolInterceptors,\nInstance agroalOpenTelemetryWrapper) {\nthis.dataSourcesBuildTimeConfig = dataSourcesBuildTimeConfig;\nthis.dataSourcesRuntimeConfig = dataSourcesRuntimeConfig;\nthis.dataSourcesJdbcBuildTimeConfig = dataSourcesJdbcBuildTimeConfig;\nthis.dataSourcesJdbcRuntimeConfig = dataSourcesJdbcRuntimeConfig;\nthis.transactionRuntimeConfig = transactionRuntimeConfig;\nthis.transactionManager = transactionManager;\nthis.xaResourceRecoveryRegistry = xaResourceRecoveryRegistry;\nthis.transactionSynchronizationRegistry = transactionSynchronizationRegistry;\nthis.dataSourceSupport = dataSourceSupport;\nthis.agroalPoolInterceptors = agroalPoolInterceptors;\nthis.agroalOpenTelemetryWrapper = agroalOpenTelemetryWrapper;\n}\n/**\n* Meant to be used from recorders that create synthetic beans that need access to {@code Datasource}.\n* In such using {@code Arc.container.instance(DataSource.class)} is not possible because\n* {@code Datasource} is itself a synthetic bean.\n*

\n* This method relies on the fact that {@code DataSources} should - given the same input -\n* always return the same {@code AgroalDataSource} no matter how many times it is invoked\n* (which makes sense because {@code DataSource} is a {@code Singleton} bean).\n*

\n* This method is thread-safe\n*/\npublic static AgroalDataSource fromName(String dataSourceName) {\nreturn Arc.container().instance(DataSources.class).get()\n.getDataSource(dataSourceName);\n}\npublic AgroalDataSource getDataSource(String dataSourceName) {\nreturn dataSources.computeIfAbsent(dataSourceName, new Function() {\n@Override\npublic AgroalDataSource apply(String s) {\nreturn doCreateDataSource(s);\n}\n});\n}\n@SuppressWarnings(\"resource\")\nprivate void applyNewConfiguration(String dataSourceName, AgroalDataSourceConfigurationSupplier dataSourceConfiguration,\nAgroalConnectionPoolConfigurationSupplier poolConfiguration,\nAgroalConnectionFactoryConfigurationSupplier connectionFactoryConfiguration, Class driver, String jdbcUrl,\nDataSourceJdbcBuildTimeConfig dataSourceJdbcBuildTimeConfig, DataSourceRuntimeConfig dataSourceRuntimeConfig,\nDataSourceJdbcRuntimeConfig dataSourceJdbcRuntimeConfig, TransactionManagerConfiguration transactionRuntimeConfig,\nboolean mpMetricsPresent) {\nconnectionFactoryConfiguration.jdbcUrl(jdbcUrl);\nconnectionFactoryConfiguration.connectionProviderClass(driver);\nconnectionFactoryConfiguration.trackJdbcResources(dataSourceJdbcRuntimeConfig.detectStatementLeaks);\nif (dataSourceJdbcRuntimeConfig.transactionIsolationLevel.isPresent()) {\nconnectionFactoryConfiguration\n.jdbcTransactionIsolation(\ndataSourceJdbcRuntimeConfig.transactionIsolationLevel.get());\n}\nif (dataSourceJdbcBuildTimeConfig.transactions != io.quarkus.agroal.runtime.TransactionIntegration.DISABLED) {\nTransactionIntegration txIntegration = new NarayanaTransactionIntegration(transactionManager,\ntransactionSynchronizationRegistry, null, false,\ndataSourceJdbcBuildTimeConfig.transactions == io.quarkus.agroal.runtime.TransactionIntegration.XA\n&& transactionRuntimeConfig.enableRecovery\n? xaResourceRecoveryRegistry\n: null);\nif (dataSourceJdbcBuildTimeConfig.transactions == io.quarkus.agroal.runtime.TransactionIntegration.XA\n&& !transactionRuntimeConfig.enableRecovery) {\nlog.warnv(\n\"Datasource {0} enables XA but transaction recovery is not enabled. Please enable transaction recovery by setting quarkus.transaction-manager.enable-recovery=true, otherwise data may be lost if the application is terminated abruptly\",\ndataSourceName);\n}\npoolConfiguration.transactionIntegration(txIntegration);\n}\nif (dataSourceJdbcRuntimeConfig.newConnectionSql.isPresent()) {\nconnectionFactoryConfiguration.initialSql(dataSourceJdbcRuntimeConfig.newConnectionSql.get());\n}\nif (dataSourceJdbcBuildTimeConfig.enableMetrics.isPresent()) {\ndataSourceConfiguration.metricsEnabled(dataSourceJdbcBuildTimeConfig.enableMetrics.get());\n} else {\ndataSourceConfiguration.metricsEnabled(dataSourcesBuildTimeConfig.metricsEnabled && mpMetricsPresent);\n}\nif (dataSourceRuntimeConfig.username.isPresent()) {\nNamePrincipal username = new NamePrincipal(dataSourceRuntimeConfig.username.get());\nconnectionFactoryConfiguration\n.principal(username).recoveryPrincipal(username);\n}\nif (dataSourceRuntimeConfig.password.isPresent()) {\nSimplePassword password = new SimplePassword(dataSourceRuntimeConfig.password.get());\nconnectionFactoryConfiguration\n.credential(password).recoveryCredential(password);\n}\nif (dataSourceRuntimeConfig.credentialsProvider.isPresent()) {\nString beanName = dataSourceRuntimeConfig.credentialsProviderName.orElse(null);\nCredentialsProvider credentialsProvider = CredentialsProviderFinder.find(beanName);\nString name = dataSourceRuntimeConfig.credentialsProvider.get();\nconnectionFactoryConfiguration\n.credential(new AgroalVaultCredentialsProviderPassword(name, credentialsProvider));\n}\nfor (Map.Entry entry : dataSourceJdbcRuntimeConfig.additionalJdbcProperties.entrySet()) {\nconnectionFactoryConfiguration.jdbcProperty(entry.getKey(), entry.getValue());\n}\npoolConfiguration.minSize(dataSourceJdbcRuntimeConfig.minSize);\npoolConfiguration.maxSize(dataSourceJdbcRuntimeConfig.maxSize);\nif (dataSourceJdbcRuntimeConfig.initialSize.isPresent() && dataSourceJdbcRuntimeConfig.initialSize.getAsInt() > 0) {\npoolConfiguration.initialSize(dataSourceJdbcRuntimeConfig.initialSize.getAsInt());\n}\npoolConfiguration.connectionValidator(ConnectionValidator.defaultValidator());\nif (dataSourceJdbcRuntimeConfig.acquisitionTimeout.isPresent()) {\npoolConfiguration.acquisitionTimeout(dataSourceJdbcRuntimeConfig.acquisitionTimeout.get());\n}\nif (dataSourceJdbcRuntimeConfig.backgroundValidationInterval.isPresent()) {\npoolConfiguration.validationTimeout(dataSourceJdbcRuntimeConfig.backgroundValidationInterval.get());\n}\nif (dataSourceJdbcRuntimeConfig.foregroundValidationInterval.isPresent()) {\npoolConfiguration.idleValidationTimeout(dataSourceJdbcRuntimeConfig.foregroundValidationInterval.get());\n}\nif (dataSourceJdbcRuntimeConfig.validationQuerySql.isPresent()) {\nString validationQuery = dataSourceJdbcRuntimeConfig.validationQuerySql.get();\npoolConfiguration.connectionValidator(new ConnectionValidator() {\n@Override\npublic boolean isValid(Connection connection) {\ntry (Statement stmt = connection.createStatement()) {\nstmt.execute(validationQuery);\nreturn true;\n} catch (Exception e) {\nlog.warn(\"Connection validation failed\", e);\n}\nreturn false;\n}\n});\n}\nif (dataSourceJdbcRuntimeConfig.idleRemovalInterval.isPresent()) {\npoolConfiguration.reapTimeout(dataSourceJdbcRuntimeConfig.idleRemovalInterval.get());\n}\nif (dataSourceJdbcRuntimeConfig.leakDetectionInterval.isPresent()) {\npoolConfiguration.leakTimeout(dataSourceJdbcRuntimeConfig.leakDetectionInterval.get());\n}\nif (dataSourceJdbcRuntimeConfig.maxLifetime.isPresent()) {\npoolConfiguration.maxLifetime(dataSourceJdbcRuntimeConfig.maxLifetime.get());\n}\nif (dataSourceJdbcRuntimeConfig.transactionRequirement.isPresent()) {\npoolConfiguration.transactionRequirement(dataSourceJdbcRuntimeConfig.transactionRequirement.get());\n}\npoolConfiguration.enhancedLeakReport(dataSourceJdbcRuntimeConfig.extendedLeakReport);\npoolConfiguration.flushOnClose(dataSourceJdbcRuntimeConfig.flushOnClose);\n}\npublic DataSourceBuildTimeConfig getDataSourceBuildTimeConfig(String dataSourceName) {\nif (DataSourceUtil.isDefault(dataSourceName)) {\nreturn dataSourcesBuildTimeConfig.defaultDataSource;\n}\nDataSourceBuildTimeConfig namedConfig = dataSourcesBuildTimeConfig.namedDataSources.get(dataSourceName);\nreturn namedConfig != null ? namedConfig : new DataSourceBuildTimeConfig();\n}\npublic DataSourceJdbcBuildTimeConfig getDataSourceJdbcBuildTimeConfig(String dataSourceName) {\nif (DataSourceUtil.isDefault(dataSourceName)) {\nreturn dataSourcesJdbcBuildTimeConfig.jdbc;\n}\nDataSourceJdbcOuterNamedBuildTimeConfig namedOuterConfig = dataSourcesJdbcBuildTimeConfig.namedDataSources\n.get(dataSourceName);\nreturn namedOuterConfig != null ? namedOuterConfig.jdbc : new DataSourceJdbcBuildTimeConfig();\n}\npublic DataSourceRuntimeConfig getDataSourceRuntimeConfig(String dataSourceName) {\nif (DataSourceUtil.isDefault(dataSourceName)) {\nreturn dataSourcesRuntimeConfig.defaultDataSource;\n}\nDataSourceRuntimeConfig namedConfig = dataSourcesRuntimeConfig.namedDataSources.get(dataSourceName);\nreturn namedConfig != null ? namedConfig : new DataSourceRuntimeConfig();\n}\npublic DataSourceJdbcRuntimeConfig getDataSourceJdbcRuntimeConfig(String dataSourceName) {\nif (DataSourceUtil.isDefault(dataSourceName)) {\nreturn dataSourcesJdbcRuntimeConfig.jdbc;\n}\nDataSourceJdbcOuterNamedRuntimeConfig namedOuterConfig = dataSourcesJdbcRuntimeConfig.namedDataSources\n.get(dataSourceName);\nreturn namedOuterConfig != null ? namedOuterConfig.jdbc : new DataSourceJdbcRuntimeConfig();\n}\n/**\n* Uses the {@link ServiceLoader\n* of the current {@link Thread\n*/\nprivate static void loadDriversInTCCL() {\nfinal ServiceLoader drivers = ServiceLoader.load(Driver.class);\nfinal Iterator iterator = drivers.iterator();\nwhile (iterator.hasNext()) {\ntry {\niterator.next();\n} catch (Throwable t) {\n}\n}\n}\n@PreDestroy\npublic void stop() {\nfor (AgroalDataSource dataSource : dataSources.values()) {\nif (dataSource != null) {\ndataSource.close();\n}\n}\n}\n}", + "context_after": "class DataSources {\nprivate static final Logger log = Logger.getLogger(DataSources.class.getName());\npublic static final String TRACING_DRIVER_CLASSNAME = \"io.opentracing.contrib.jdbc.TracingDriver\";\nprivate static final String JDBC_URL_PREFIX = \"jdbc:\";\nprivate static final String JDBC_TRACING_URL_PREFIX = \"jdbc:tracing:\";\nprivate final DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig;\nprivate final DataSourcesRuntimeConfig dataSourcesRuntimeConfig;\nprivate final DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig;\nprivate final DataSourcesJdbcRuntimeConfig dataSourcesJdbcRuntimeConfig;\nprivate final TransactionManagerConfiguration transactionRuntimeConfig;\nprivate final TransactionManager transactionManager;\nprivate final XAResourceRecoveryRegistry xaResourceRecoveryRegistry;\nprivate final TransactionSynchronizationRegistry transactionSynchronizationRegistry;\nprivate final DataSourceSupport dataSourceSupport;\nprivate final Instance agroalPoolInterceptors;\nprivate final Instance agroalOpenTelemetryWrapper;\nprivate final ConcurrentMap dataSources = new ConcurrentHashMap<>();\npublic DataSources(DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig,\nDataSourcesRuntimeConfig dataSourcesRuntimeConfig, DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig,\nDataSourcesJdbcRuntimeConfig dataSourcesJdbcRuntimeConfig,\nTransactionManagerConfiguration transactionRuntimeConfig,\nTransactionManager transactionManager,\nXAResourceRecoveryRegistry xaResourceRecoveryRegistry,\nTransactionSynchronizationRegistry transactionSynchronizationRegistry,\nDataSourceSupport dataSourceSupport,\n@Any Instance agroalPoolInterceptors,\nInstance agroalOpenTelemetryWrapper) {\nthis.dataSourcesBuildTimeConfig = dataSourcesBuildTimeConfig;\nthis.dataSourcesRuntimeConfig = dataSourcesRuntimeConfig;\nthis.dataSourcesJdbcBuildTimeConfig = dataSourcesJdbcBuildTimeConfig;\nthis.dataSourcesJdbcRuntimeConfig = dataSourcesJdbcRuntimeConfig;\nthis.transactionRuntimeConfig = transactionRuntimeConfig;\nthis.transactionManager = transactionManager;\nthis.xaResourceRecoveryRegistry = xaResourceRecoveryRegistry;\nthis.transactionSynchronizationRegistry = transactionSynchronizationRegistry;\nthis.dataSourceSupport = dataSourceSupport;\nthis.agroalPoolInterceptors = agroalPoolInterceptors;\nthis.agroalOpenTelemetryWrapper = agroalOpenTelemetryWrapper;\n}\n/**\n* Meant to be used from recorders that create synthetic beans that need access to {@code Datasource}.\n* In such using {@code Arc.container.instance(DataSource.class)} is not possible because\n* {@code Datasource} is itself a synthetic bean.\n*

\n* This method relies on the fact that {@code DataSources} should - given the same input -\n* always return the same {@code AgroalDataSource} no matter how many times it is invoked\n* (which makes sense because {@code DataSource} is a {@code Singleton} bean).\n*

\n* This method is thread-safe\n*/\npublic static AgroalDataSource fromName(String dataSourceName) {\nreturn Arc.container().instance(DataSources.class).get()\n.getDataSource(dataSourceName);\n}\npublic AgroalDataSource getDataSource(String dataSourceName) {\nreturn dataSources.computeIfAbsent(dataSourceName, new Function() {\n@Override\npublic AgroalDataSource apply(String s) {\nreturn doCreateDataSource(s);\n}\n});\n}\n@SuppressWarnings(\"resource\")\nprivate void applyNewConfiguration(String dataSourceName, AgroalDataSourceConfigurationSupplier dataSourceConfiguration,\nAgroalConnectionPoolConfigurationSupplier poolConfiguration,\nAgroalConnectionFactoryConfigurationSupplier connectionFactoryConfiguration, Class driver, String jdbcUrl,\nDataSourceJdbcBuildTimeConfig dataSourceJdbcBuildTimeConfig, DataSourceRuntimeConfig dataSourceRuntimeConfig,\nDataSourceJdbcRuntimeConfig dataSourceJdbcRuntimeConfig, TransactionManagerConfiguration transactionRuntimeConfig,\nboolean mpMetricsPresent) {\nconnectionFactoryConfiguration.jdbcUrl(jdbcUrl);\nconnectionFactoryConfiguration.connectionProviderClass(driver);\nconnectionFactoryConfiguration.trackJdbcResources(dataSourceJdbcRuntimeConfig.detectStatementLeaks);\nif (dataSourceJdbcRuntimeConfig.transactionIsolationLevel.isPresent()) {\nconnectionFactoryConfiguration\n.jdbcTransactionIsolation(\ndataSourceJdbcRuntimeConfig.transactionIsolationLevel.get());\n}\nif (dataSourceJdbcBuildTimeConfig.transactions != io.quarkus.agroal.runtime.TransactionIntegration.DISABLED) {\nTransactionIntegration txIntegration = new NarayanaTransactionIntegration(transactionManager,\ntransactionSynchronizationRegistry, null, false,\ndataSourceJdbcBuildTimeConfig.transactions == io.quarkus.agroal.runtime.TransactionIntegration.XA\n&& transactionRuntimeConfig.enableRecovery\n? xaResourceRecoveryRegistry\n: null);\nif (dataSourceJdbcBuildTimeConfig.transactions == io.quarkus.agroal.runtime.TransactionIntegration.XA\n&& !transactionRuntimeConfig.enableRecovery) {\nlog.warnv(\n\"Datasource {0} enables XA but transaction recovery is not enabled. Please enable transaction recovery by setting quarkus.transaction-manager.enable-recovery=true, otherwise data may be lost if the application is terminated abruptly\",\ndataSourceName);\n}\npoolConfiguration.transactionIntegration(txIntegration);\n}\nif (dataSourceJdbcRuntimeConfig.newConnectionSql.isPresent()) {\nconnectionFactoryConfiguration.initialSql(dataSourceJdbcRuntimeConfig.newConnectionSql.get());\n}\nif (dataSourceJdbcBuildTimeConfig.enableMetrics.isPresent()) {\ndataSourceConfiguration.metricsEnabled(dataSourceJdbcBuildTimeConfig.enableMetrics.get());\n} else {\ndataSourceConfiguration.metricsEnabled(dataSourcesBuildTimeConfig.metricsEnabled && mpMetricsPresent);\n}\nif (dataSourceRuntimeConfig.username.isPresent()) {\nNamePrincipal username = new NamePrincipal(dataSourceRuntimeConfig.username.get());\nconnectionFactoryConfiguration\n.principal(username).recoveryPrincipal(username);\n}\nif (dataSourceRuntimeConfig.password.isPresent()) {\nSimplePassword password = new SimplePassword(dataSourceRuntimeConfig.password.get());\nconnectionFactoryConfiguration\n.credential(password).recoveryCredential(password);\n}\nif (dataSourceRuntimeConfig.credentialsProvider.isPresent()) {\nString beanName = dataSourceRuntimeConfig.credentialsProviderName.orElse(null);\nCredentialsProvider credentialsProvider = CredentialsProviderFinder.find(beanName);\nString name = dataSourceRuntimeConfig.credentialsProvider.get();\nconnectionFactoryConfiguration\n.credential(new AgroalVaultCredentialsProviderPassword(name, credentialsProvider));\n}\nfor (Map.Entry entry : dataSourceJdbcRuntimeConfig.additionalJdbcProperties.entrySet()) {\nconnectionFactoryConfiguration.jdbcProperty(entry.getKey(), entry.getValue());\n}\npoolConfiguration.minSize(dataSourceJdbcRuntimeConfig.minSize);\npoolConfiguration.maxSize(dataSourceJdbcRuntimeConfig.maxSize);\nif (dataSourceJdbcRuntimeConfig.initialSize.isPresent() && dataSourceJdbcRuntimeConfig.initialSize.getAsInt() > 0) {\npoolConfiguration.initialSize(dataSourceJdbcRuntimeConfig.initialSize.getAsInt());\n}\npoolConfiguration.connectionValidator(ConnectionValidator.defaultValidator());\nif (dataSourceJdbcRuntimeConfig.acquisitionTimeout.isPresent()) {\npoolConfiguration.acquisitionTimeout(dataSourceJdbcRuntimeConfig.acquisitionTimeout.get());\n}\nif (dataSourceJdbcRuntimeConfig.backgroundValidationInterval.isPresent()) {\npoolConfiguration.validationTimeout(dataSourceJdbcRuntimeConfig.backgroundValidationInterval.get());\n}\nif (dataSourceJdbcRuntimeConfig.foregroundValidationInterval.isPresent()) {\npoolConfiguration.idleValidationTimeout(dataSourceJdbcRuntimeConfig.foregroundValidationInterval.get());\n}\nif (dataSourceJdbcRuntimeConfig.validationQuerySql.isPresent()) {\nString validationQuery = dataSourceJdbcRuntimeConfig.validationQuerySql.get();\npoolConfiguration.connectionValidator(new ConnectionValidator() {\n@Override\npublic boolean isValid(Connection connection) {\ntry (Statement stmt = connection.createStatement()) {\nstmt.execute(validationQuery);\nreturn true;\n} catch (Exception e) {\nlog.warn(\"Connection validation failed\", e);\n}\nreturn false;\n}\n});\n}\nif (dataSourceJdbcRuntimeConfig.idleRemovalInterval.isPresent()) {\npoolConfiguration.reapTimeout(dataSourceJdbcRuntimeConfig.idleRemovalInterval.get());\n}\nif (dataSourceJdbcRuntimeConfig.leakDetectionInterval.isPresent()) {\npoolConfiguration.leakTimeout(dataSourceJdbcRuntimeConfig.leakDetectionInterval.get());\n}\nif (dataSourceJdbcRuntimeConfig.maxLifetime.isPresent()) {\npoolConfiguration.maxLifetime(dataSourceJdbcRuntimeConfig.maxLifetime.get());\n}\nif (dataSourceJdbcRuntimeConfig.transactionRequirement.isPresent()) {\npoolConfiguration.transactionRequirement(dataSourceJdbcRuntimeConfig.transactionRequirement.get());\n}\npoolConfiguration.enhancedLeakReport(dataSourceJdbcRuntimeConfig.extendedLeakReport);\npoolConfiguration.flushOnClose(dataSourceJdbcRuntimeConfig.flushOnClose);\n}\npublic DataSourceBuildTimeConfig getDataSourceBuildTimeConfig(String dataSourceName) {\nif (DataSourceUtil.isDefault(dataSourceName)) {\nreturn dataSourcesBuildTimeConfig.defaultDataSource;\n}\nDataSourceBuildTimeConfig namedConfig = dataSourcesBuildTimeConfig.namedDataSources.get(dataSourceName);\nreturn namedConfig != null ? namedConfig : new DataSourceBuildTimeConfig();\n}\npublic DataSourceJdbcBuildTimeConfig getDataSourceJdbcBuildTimeConfig(String dataSourceName) {\nif (DataSourceUtil.isDefault(dataSourceName)) {\nreturn dataSourcesJdbcBuildTimeConfig.jdbc;\n}\nDataSourceJdbcOuterNamedBuildTimeConfig namedOuterConfig = dataSourcesJdbcBuildTimeConfig.namedDataSources\n.get(dataSourceName);\nreturn namedOuterConfig != null ? namedOuterConfig.jdbc : new DataSourceJdbcBuildTimeConfig();\n}\npublic DataSourceRuntimeConfig getDataSourceRuntimeConfig(String dataSourceName) {\nif (DataSourceUtil.isDefault(dataSourceName)) {\nreturn dataSourcesRuntimeConfig.defaultDataSource;\n}\nDataSourceRuntimeConfig namedConfig = dataSourcesRuntimeConfig.namedDataSources.get(dataSourceName);\nreturn namedConfig != null ? namedConfig : new DataSourceRuntimeConfig();\n}\npublic DataSourceJdbcRuntimeConfig getDataSourceJdbcRuntimeConfig(String dataSourceName) {\nif (DataSourceUtil.isDefault(dataSourceName)) {\nreturn dataSourcesJdbcRuntimeConfig.jdbc;\n}\nDataSourceJdbcOuterNamedRuntimeConfig namedOuterConfig = dataSourcesJdbcRuntimeConfig.namedDataSources\n.get(dataSourceName);\nreturn namedOuterConfig != null ? namedOuterConfig.jdbc : new DataSourceJdbcRuntimeConfig();\n}\n/**\n* Uses the {@link ServiceLoader\n* of the current {@link Thread\n*/\nprivate static void loadDriversInTCCL() {\nfinal ServiceLoader drivers = ServiceLoader.load(Driver.class);\nfinal Iterator iterator = drivers.iterator();\nwhile (iterator.hasNext()) {\ntry {\niterator.next();\n} catch (Throwable t) {\n}\n}\n}\n@PreDestroy\npublic void stop() {\nfor (AgroalDataSource dataSource : dataSources.values()) {\nif (dataSource != null) {\ndataSource.close();\n}\n}\n}\n}" + }, + { + "comment": "It was changed recently though with https://github.com/quarkusio/quarkus/pull/22271.", + "method_body": "void testCacheResult() {\nUni uni1 = cachedService.cacheResult1(KEY_1);\nassertEquals(0, cachedService.getCacheResultInvocations());\nUni uni2 = cachedService.cacheResult1(KEY_1);\nassertEquals(0, cachedService.getCacheResultInvocations());\nassertNotSame(uni1, uni2);\nString emittedItem1 = uni1.await().indefinitely();\nassertEquals(1, cachedService.getCacheResultInvocations());\nString emittedItem2 = uni2.await().indefinitely();\nassertEquals(1, cachedService.getCacheResultInvocations());\nassertSame(emittedItem1, emittedItem2);\nString emittedItem3 = cachedService.cacheResult1(\"another-key\").await().indefinitely();\nassertEquals(2, cachedService.getCacheResultInvocations());\nassertNotSame(emittedItem2, emittedItem3);\n}", + "target_code": "", + "method_body_after": "void testCacheResult() {\nUni uni1 = cachedService.cacheResult1(KEY_1);\nassertEquals(0, cachedService.getCacheResultInvocations());\nUni uni2 = cachedService.cacheResult1(KEY_1);\nassertEquals(0, cachedService.getCacheResultInvocations());\nassertNotSame(uni1, uni2);\nString emittedItem1 = uni1.await().indefinitely();\nassertEquals(1, cachedService.getCacheResultInvocations());\nString emittedItem2 = uni2.await().indefinitely();\nassertEquals(1, cachedService.getCacheResultInvocations());\nassertSame(emittedItem1, emittedItem2);\nString emittedItem3 = cachedService.cacheResult1(\"another-key\").await().indefinitely();\nassertEquals(2, cachedService.getCacheResultInvocations());\nassertNotSame(emittedItem2, emittedItem3);\n}", + "context_before": "class UniReturnTypeTest {\nprivate static final String CACHE_NAME_1 = \"test-cache-1\";\nprivate static final String CACHE_NAME_2 = \"test-cache-2\";\nprivate static final String KEY_1 = \"key-1\";\nprivate static final String KEY_2 = \"key-2\";\n@RegisterExtension\nstatic final QuarkusUnitTest TEST = new QuarkusUnitTest().withApplicationRoot((jar) -> jar.addClass(CachedService.class));\n@Inject\nCachedService cachedService;\n@Test\n@Test\nvoid testCacheInvalidate() {\nString value1 = cachedService.cacheResult1(KEY_1).await().indefinitely();\nObject value2 = cachedService.cacheResult2(KEY_1).await().indefinitely();\nObject value3 = cachedService.cacheResult2(KEY_2).await().indefinitely();\nUni invalidateUni = cachedService.cacheInvalidate(KEY_1);\nassertEquals(0, cachedService.getCacheInvalidateInvocations());\nString value4 = cachedService.cacheResult1(KEY_1).await().indefinitely();\nObject value5 = cachedService.cacheResult2(KEY_1).await().indefinitely();\nObject value6 = cachedService.cacheResult2(KEY_2).await().indefinitely();\nassertSame(value1, value4);\nassertSame(value2, value5);\nassertSame(value3, value6);\ninvalidateUni.await().indefinitely();\nassertEquals(1, cachedService.getCacheInvalidateInvocations());\nString value7 = cachedService.cacheResult1(KEY_1).await().indefinitely();\nObject value8 = cachedService.cacheResult2(KEY_1).await().indefinitely();\nObject value9 = cachedService.cacheResult2(KEY_2).await().indefinitely();\nassertNotSame(value4, value7);\nassertNotSame(value5, value8);\nassertSame(value6, value9);\n}\n@Test\nvoid testCacheInvalidateAll() {\nString value1 = cachedService.cacheResult1(KEY_1).await().indefinitely();\nObject value2 = cachedService.cacheResult2(KEY_2).await().indefinitely();\nUni invalidateAllUni = cachedService.cacheInvalidateAll();\nassertEquals(0, cachedService.getCacheInvalidateAllInvocations());\nString value3 = cachedService.cacheResult1(KEY_1).await().indefinitely();\nObject value4 = cachedService.cacheResult2(KEY_2).await().indefinitely();\nassertSame(value1, value3);\nassertSame(value2, value4);\ninvalidateAllUni.await().indefinitely();\nassertEquals(1, cachedService.getCacheInvalidateAllInvocations());\nString value5 = cachedService.cacheResult1(KEY_1).await().indefinitely();\nObject value6 = cachedService.cacheResult2(KEY_2).await().indefinitely();\nassertNotSame(value1, value5);\nassertNotSame(value2, value6);\n}\n@ApplicationScoped\nstatic class CachedService {\nprivate volatile int cacheResultInvocations;\nprivate volatile int cacheInvalidateInvocations;\nprivate volatile int cacheInvalidateAllInvocations;\n@CacheResult(cacheName = CACHE_NAME_1)\npublic Uni cacheResult1(String key) {\ncacheResultInvocations++;\nreturn Uni.createFrom().item(() -> new String());\n}\n@CacheResult(cacheName = CACHE_NAME_2)\npublic Uni cacheResult2(String key) {\nreturn Uni.createFrom().item(() -> new Object());\n}\n@CacheInvalidate(cacheName = CACHE_NAME_1)\n@CacheInvalidate(cacheName = CACHE_NAME_2)\npublic Uni cacheInvalidate(String key) {\ncacheInvalidateInvocations++;\nreturn Uni.createFrom().nullItem();\n}\n@CacheInvalidateAll(cacheName = CACHE_NAME_1)\n@CacheInvalidateAll(cacheName = CACHE_NAME_2)\npublic Uni cacheInvalidateAll() {\ncacheInvalidateAllInvocations++;\nreturn Uni.createFrom().nullItem();\n}\npublic int getCacheResultInvocations() {\nreturn cacheResultInvocations;\n}\npublic int getCacheInvalidateInvocations() {\nreturn cacheInvalidateInvocations;\n}\npublic int getCacheInvalidateAllInvocations() {\nreturn cacheInvalidateAllInvocations;\n}\n}\n}", + "context_after": "class UniReturnTypeTest {\nprivate static final String CACHE_NAME_1 = \"test-cache-1\";\nprivate static final String CACHE_NAME_2 = \"test-cache-2\";\nprivate static final String KEY_1 = \"key-1\";\nprivate static final String KEY_2 = \"key-2\";\n@RegisterExtension\nstatic final QuarkusUnitTest TEST = new QuarkusUnitTest().withApplicationRoot((jar) -> jar.addClass(CachedService.class));\n@Inject\nCachedService cachedService;\n@Test\n@Test\nvoid testCacheInvalidate() {\nString value1 = cachedService.cacheResult1(KEY_1).await().indefinitely();\nObject value2 = cachedService.cacheResult2(KEY_1).await().indefinitely();\nObject value3 = cachedService.cacheResult2(KEY_2).await().indefinitely();\nUni invalidateUni = cachedService.cacheInvalidate(KEY_1);\nassertEquals(0, cachedService.getCacheInvalidateInvocations());\nString value4 = cachedService.cacheResult1(KEY_1).await().indefinitely();\nObject value5 = cachedService.cacheResult2(KEY_1).await().indefinitely();\nObject value6 = cachedService.cacheResult2(KEY_2).await().indefinitely();\nassertSame(value1, value4);\nassertSame(value2, value5);\nassertSame(value3, value6);\ninvalidateUni.await().indefinitely();\nassertEquals(1, cachedService.getCacheInvalidateInvocations());\nString value7 = cachedService.cacheResult1(KEY_1).await().indefinitely();\nObject value8 = cachedService.cacheResult2(KEY_1).await().indefinitely();\nObject value9 = cachedService.cacheResult2(KEY_2).await().indefinitely();\nassertNotSame(value4, value7);\nassertNotSame(value5, value8);\nassertSame(value6, value9);\n}\n@Test\nvoid testCacheInvalidateAll() {\nString value1 = cachedService.cacheResult1(KEY_1).await().indefinitely();\nObject value2 = cachedService.cacheResult2(KEY_2).await().indefinitely();\nUni invalidateAllUni = cachedService.cacheInvalidateAll();\nassertEquals(0, cachedService.getCacheInvalidateAllInvocations());\nString value3 = cachedService.cacheResult1(KEY_1).await().indefinitely();\nObject value4 = cachedService.cacheResult2(KEY_2).await().indefinitely();\nassertSame(value1, value3);\nassertSame(value2, value4);\ninvalidateAllUni.await().indefinitely();\nassertEquals(1, cachedService.getCacheInvalidateAllInvocations());\nString value5 = cachedService.cacheResult1(KEY_1).await().indefinitely();\nObject value6 = cachedService.cacheResult2(KEY_2).await().indefinitely();\nassertNotSame(value1, value5);\nassertNotSame(value2, value6);\n}\n@ApplicationScoped\nstatic class CachedService {\nprivate volatile int cacheResultInvocations;\nprivate volatile int cacheInvalidateInvocations;\nprivate volatile int cacheInvalidateAllInvocations;\n@CacheResult(cacheName = CACHE_NAME_1)\npublic Uni cacheResult1(String key) {\ncacheResultInvocations++;\nreturn Uni.createFrom().item(() -> new String());\n}\n@CacheResult(cacheName = CACHE_NAME_2)\npublic Uni cacheResult2(String key) {\nreturn Uni.createFrom().item(() -> new Object());\n}\n@CacheInvalidate(cacheName = CACHE_NAME_1)\n@CacheInvalidate(cacheName = CACHE_NAME_2)\npublic Uni cacheInvalidate(String key) {\ncacheInvalidateInvocations++;\nreturn Uni.createFrom().nullItem();\n}\n@CacheInvalidateAll(cacheName = CACHE_NAME_1)\n@CacheInvalidateAll(cacheName = CACHE_NAME_2)\npublic Uni cacheInvalidateAll() {\ncacheInvalidateAllInvocations++;\nreturn Uni.createFrom().nullItem();\n}\npublic int getCacheResultInvocations() {\nreturn cacheResultInvocations;\n}\npublic int getCacheInvalidateInvocations() {\nreturn cacheInvalidateInvocations;\n}\npublic int getCacheInvalidateAllInvocations() {\nreturn cacheInvalidateAllInvocations;\n}\n}\n}" + }, + { + "comment": "snapShotPaimonTable", + "method_body": "private boolean checkBaseTablePartitionChange() {\nfor (Pair tablePair : snapshotBaseTables.values()) {\nBaseTableInfo baseTableInfo = tablePair.first;\nTable snapshotTable = tablePair.second;\nDatabase db = baseTableInfo.getDb();\nif (db == null) {\nreturn true;\n}\ndb.readLock();\ntry {\nTable table = baseTableInfo.getTable();\nif (table == null) {\nreturn true;\n}\nif (snapshotTable.isOlapOrCloudNativeTable()) {\nOlapTable snapShotOlapTable = (OlapTable) snapshotTable;\nPartitionInfo snapshotPartitionInfo = snapShotOlapTable.getPartitionInfo();\nif (snapshotPartitionInfo instanceof SinglePartitionInfo) {\nSet partitionNames = ((OlapTable) table).getVisiblePartitionNames();\nif (!snapShotOlapTable.getVisiblePartitionNames().equals(partitionNames)) {\nreturn true;\n}\n} else if (snapshotPartitionInfo instanceof ListPartitionInfo) {\nMap>> snapshotPartitionMap =\nsnapShotOlapTable.getListPartitionMap();\nMap>> currentPartitionMap =\n((OlapTable) table).getListPartitionMap();\nreturn SyncPartitionUtils.hasListPartitionChanged(snapshotPartitionMap, currentPartitionMap);\n} else {\nMap> snapshotPartitionMap =\nsnapShotOlapTable.getRangePartitionMap();\nMap> currentPartitionMap =\n((OlapTable) table).getRangePartitionMap();\nreturn SyncPartitionUtils.hasRangePartitionChanged(snapshotPartitionMap, currentPartitionMap);\n}\n} else if (snapshotTable.isHiveTable() || snapshotTable.isHudiTable()) {\nHiveMetaStoreTable snapShotHMSTable = (HiveMetaStoreTable) snapshotTable;\nif (snapShotHMSTable.isUnPartitioned()) {\nif (!((HiveMetaStoreTable) table).isUnPartitioned()) {\nreturn true;\n}\n} else {\nPartitionInfo mvPartitionInfo = materializedView.getPartitionInfo();\nif (!(mvPartitionInfo instanceof ExpressionRangePartitionInfo)) {\nreturn false;\n}\nPair partitionTableAndColumn = getRefBaseTableAndPartitionColumn(snapshotBaseTables);\nColumn partitionColumn = partitionTableAndColumn.second;\nif (!snapshotTable.equals(partitionTableAndColumn.first)\n|| !snapshotTable.containColumn(partitionColumn.getName())) {\ncontinue;\n}\nMap> snapshotPartitionMap = PartitionUtil.\ngetPartitionKeyRange(snapshotTable, partitionColumn);\nMap> currentPartitionMap = PartitionUtil.\ngetPartitionKeyRange(table, partitionColumn);\nreturn SyncPartitionUtils.hasRangePartitionChanged(snapshotPartitionMap, currentPartitionMap);\n}\n} else if (snapshotTable.isIcebergTable()) {\nIcebergTable snapShotIcebergTable = (IcebergTable) snapshotTable;\nif (snapShotIcebergTable.isUnPartitioned()) {\nif (!table.isUnPartitioned()) {\nreturn true;\n}\n} else {\nPartitionInfo mvPartitionInfo = materializedView.getPartitionInfo();\nif (!(mvPartitionInfo instanceof ExpressionRangePartitionInfo)) {\nreturn false;\n}\nPair partitionTableAndColumn = getRefBaseTableAndPartitionColumn(snapshotBaseTables);\nColumn partitionColumn = partitionTableAndColumn.second;\nif (!snapshotTable.equals(partitionTableAndColumn.first)\n|| !snapShotIcebergTable.containColumn(partitionColumn.getName())) {\ncontinue;\n}\nMap> snapshotPartitionMap = PartitionUtil.\ngetPartitionKeyRange(snapshotTable, partitionColumn);\nMap> currentPartitionMap = PartitionUtil.\ngetPartitionKeyRange(table, partitionColumn);\nreturn SyncPartitionUtils.hasRangePartitionChanged(snapshotPartitionMap, currentPartitionMap);\n}\n} else if (snapshotTable.isPaimonTable()) {\nPaimonTable snapShotIcebergTable = (PaimonTable) snapshotTable;\nif (snapShotIcebergTable.isUnPartitioned()) {\nif (!table.isUnPartitioned()) {\nreturn true;\n}\n} else {\nPartitionInfo mvPartitionInfo = materializedView.getPartitionInfo();\nif (!(mvPartitionInfo instanceof ExpressionRangePartitionInfo)) {\nreturn false;\n}\nPair partitionTableAndColumn =\ngetRefBaseTableAndPartitionColumn(snapshotBaseTables);\nColumn partitionColumn = partitionTableAndColumn.second;\nif (!snapshotTable.equals(partitionTableAndColumn.first)\n|| !snapShotIcebergTable.containColumn(partitionColumn.getName())) {\ncontinue;\n}\nMap> snapshotPartitionMap = PartitionUtil.\ngetPartitionKeyRange(snapshotTable, partitionColumn);\nMap> currentPartitionMap = PartitionUtil.\ngetPartitionKeyRange(table, partitionColumn);\nreturn SyncPartitionUtils.hasRangePartitionChanged(snapshotPartitionMap, currentPartitionMap);\n}\n}\n} catch (UserException e) {\nLOG.warn(\"Materialized view compute partition change failed\", e);\nreturn true;\n} finally {\ndb.readUnlock();\n}\n}\nreturn false;\n}", + "target_code": "PaimonTable snapShotIcebergTable = (PaimonTable) snapshotTable;", + "method_body_after": "private boolean checkBaseTablePartitionChange() {\nfor (Pair tablePair : snapshotBaseTables.values()) {\nBaseTableInfo baseTableInfo = tablePair.first;\nTable snapshotTable = tablePair.second;\nDatabase db = baseTableInfo.getDb();\nif (db == null) {\nreturn true;\n}\ndb.readLock();\ntry {\nTable table = baseTableInfo.getTable();\nif (table == null) {\nreturn true;\n}\nif (snapshotTable.isOlapOrCloudNativeTable()) {\nOlapTable snapShotOlapTable = (OlapTable) snapshotTable;\nPartitionInfo snapshotPartitionInfo = snapShotOlapTable.getPartitionInfo();\nif (snapshotPartitionInfo instanceof SinglePartitionInfo) {\nSet partitionNames = ((OlapTable) table).getVisiblePartitionNames();\nif (!snapShotOlapTable.getVisiblePartitionNames().equals(partitionNames)) {\nreturn true;\n}\n} else if (snapshotPartitionInfo instanceof ListPartitionInfo) {\nMap>> snapshotPartitionMap =\nsnapShotOlapTable.getListPartitionMap();\nMap>> currentPartitionMap =\n((OlapTable) table).getListPartitionMap();\nreturn SyncPartitionUtils.hasListPartitionChanged(snapshotPartitionMap, currentPartitionMap);\n} else {\nMap> snapshotPartitionMap =\nsnapShotOlapTable.getRangePartitionMap();\nMap> currentPartitionMap =\n((OlapTable) table).getRangePartitionMap();\nreturn SyncPartitionUtils.hasRangePartitionChanged(snapshotPartitionMap, currentPartitionMap);\n}\n} else if (snapshotTable.isHiveTable() || snapshotTable.isHudiTable()) {\nHiveMetaStoreTable snapShotHMSTable = (HiveMetaStoreTable) snapshotTable;\nif (snapShotHMSTable.isUnPartitioned()) {\nif (!((HiveMetaStoreTable) table).isUnPartitioned()) {\nreturn true;\n}\n} else {\nPartitionInfo mvPartitionInfo = materializedView.getPartitionInfo();\nif (!(mvPartitionInfo instanceof ExpressionRangePartitionInfo)) {\nreturn false;\n}\nPair partitionTableAndColumn = getRefBaseTableAndPartitionColumn(snapshotBaseTables);\nColumn partitionColumn = partitionTableAndColumn.second;\nif (!snapshotTable.equals(partitionTableAndColumn.first)\n|| !snapshotTable.containColumn(partitionColumn.getName())) {\ncontinue;\n}\nMap> snapshotPartitionMap = PartitionUtil.\ngetPartitionKeyRange(snapshotTable, partitionColumn);\nMap> currentPartitionMap = PartitionUtil.\ngetPartitionKeyRange(table, partitionColumn);\nreturn SyncPartitionUtils.hasRangePartitionChanged(snapshotPartitionMap, currentPartitionMap);\n}\n} else if (snapshotTable.isIcebergTable()) {\nIcebergTable snapShotIcebergTable = (IcebergTable) snapshotTable;\nif (snapShotIcebergTable.isUnPartitioned()) {\nif (!table.isUnPartitioned()) {\nreturn true;\n}\n} else {\nPartitionInfo mvPartitionInfo = materializedView.getPartitionInfo();\nif (!(mvPartitionInfo instanceof ExpressionRangePartitionInfo)) {\nreturn false;\n}\nPair partitionTableAndColumn = getRefBaseTableAndPartitionColumn(snapshotBaseTables);\nColumn partitionColumn = partitionTableAndColumn.second;\nif (!snapshotTable.equals(partitionTableAndColumn.first)\n|| !snapShotIcebergTable.containColumn(partitionColumn.getName())) {\ncontinue;\n}\nMap> snapshotPartitionMap = PartitionUtil.\ngetPartitionKeyRange(snapshotTable, partitionColumn);\nMap> currentPartitionMap = PartitionUtil.\ngetPartitionKeyRange(table, partitionColumn);\nreturn SyncPartitionUtils.hasRangePartitionChanged(snapshotPartitionMap, currentPartitionMap);\n}\n} else if (snapshotTable.isJDBCTable()) {\nJDBCTable snapShotJDBCTable = (JDBCTable) snapshotTable;\nif (snapShotJDBCTable.isUnPartitioned()) {\nif (!table.isUnPartitioned()) {\nreturn true;\n}\n} else {\nPartitionInfo mvPartitionInfo = materializedView.getPartitionInfo();\nif (!(mvPartitionInfo instanceof ExpressionRangePartitionInfo)) {\nreturn false;\n}\nPair partitionTableAndColumn = getRefBaseTableAndPartitionColumn(snapshotBaseTables);\nColumn partitionColumn = partitionTableAndColumn.second;\nif (!snapshotTable.equals(partitionTableAndColumn.first)\n|| !snapShotJDBCTable.containColumn(partitionColumn.getName())) {\ncontinue;\n}\nMap> snapshotPartitionMap = PartitionUtil.\ngetPartitionKeyRange(snapshotTable, partitionColumn);\nMap> currentPartitionMap = PartitionUtil.\ngetPartitionKeyRange(table, partitionColumn);\nreturn SyncPartitionUtils.hasRangePartitionChanged(snapshotPartitionMap, currentPartitionMap);\n}\n} else if (snapshotTable.isPaimonTable()) {\nPaimonTable snapShotPaimonTable = (PaimonTable) snapshotTable;\nif (snapShotPaimonTable.isUnPartitioned()) {\nif (!table.isUnPartitioned()) {\nreturn true;\n}\n} else {\nPartitionInfo mvPartitionInfo = materializedView.getPartitionInfo();\nif (!(mvPartitionInfo instanceof ExpressionRangePartitionInfo)) {\nreturn false;\n}\nPair partitionTableAndColumn =\ngetRefBaseTableAndPartitionColumn(snapshotBaseTables);\nColumn partitionColumn = partitionTableAndColumn.second;\nif (!snapshotTable.equals(partitionTableAndColumn.first)\n|| !snapShotPaimonTable.containColumn(partitionColumn.getName())) {\ncontinue;\n}\nMap> snapshotPartitionMap = PartitionUtil.\ngetPartitionKeyRange(snapshotTable, partitionColumn);\nMap> currentPartitionMap = PartitionUtil.\ngetPartitionKeyRange(table, partitionColumn);\nreturn SyncPartitionUtils.hasRangePartitionChanged(snapshotPartitionMap, currentPartitionMap);\n}\n}\n} catch (UserException e) {\nLOG.warn(\"Materialized view compute partition change failed\", e);\nreturn true;\n} finally {\ndb.readUnlock();\n}\n}\nreturn false;\n}", + "context_before": "class PartitionBasedMvRefreshProcessor extends BaseTaskRunProcessor {\nprivate static final Logger LOG = LogManager.getLogger(PartitionBasedMvRefreshProcessor.class);\npublic static final String MV_ID = \"mvId\";\npublic static final String MV_SESSION_ENABLE_SPILL =\nPropertyAnalyzer.PROPERTIES_MATERIALIZED_VIEW_SESSION_PREFIX + SessionVariable.ENABLE_SPILL;\npublic static final String MV_SESSION_TIMEOUT =\nPropertyAnalyzer.PROPERTIES_MATERIALIZED_VIEW_SESSION_PREFIX + SessionVariable.QUERY_TIMEOUT;\nprivate static final int MV_DEFAULT_QUERY_TIMEOUT = 3600;\nprivate static final int MAX_RETRY_NUM = 10;\nprivate Database database;\nprivate MaterializedView materializedView;\nprivate MvTaskRunContext mvContext;\nprivate Map> snapshotBaseTables;\nprivate long oldTransactionVisibleWaitTimeout;\n@VisibleForTesting\npublic MvTaskRunContext getMvContext() {\nreturn mvContext;\n}\n@VisibleForTesting\npublic void setMvContext(MvTaskRunContext mvContext) {\nthis.mvContext = mvContext;\n}\n@Override\npublic void processTaskRun(TaskRunContext context) throws Exception {\nTracers.register();\nprepare(context);\ntry {\ndoMvRefresh(context);\n} finally {\npostProcess();\nTracers.close();\n}\n}\nprivate void doMvRefresh(TaskRunContext context) throws Exception {\nInsertStmt insertStmt = null;\nExecPlan execPlan = null;\nint retryNum = 0;\nboolean checked = false;\nMap> refTableRefreshPartitions = null;\nSet mvToRefreshedPartitions = null;\nwhile (!checked) {\nsyncPartitions();\nrefreshExternalTable(context);\ndatabase.readLock();\ntry {\nif (checkBaseTablePartitionChange()) {\nretryNum++;\nif (retryNum > MAX_RETRY_NUM) {\nthrow new DmlException(\"materialized view:%s refresh task failed\", materializedView.getName());\n}\nLOG.info(\"materialized view:{} base partition has changed. retry to sync partitions, retryNum:{}\",\nmaterializedView.getName(), retryNum);\ncontinue;\n}\nchecked = true;\nmvToRefreshedPartitions = getPartitionsToRefreshForMaterializedView(context.getProperties());\nif (mvToRefreshedPartitions.isEmpty()) {\nLOG.info(\"no partitions to refresh for materialized view {}\", materializedView.getName());\nreturn;\n}\nfilterPartitionByRefreshNumber(mvToRefreshedPartitions, materializedView);\nLOG.debug(\"materialized view partitions to refresh:{}\", mvToRefreshedPartitions);\nrefTableRefreshPartitions = getRefTableRefreshPartitions(mvToRefreshedPartitions);\nMap> refTablePartitionNames =\nrefTableRefreshPartitions.entrySet().stream()\n.collect(Collectors.toMap(x -> x.getKey().getName(), Map.Entry::getValue));\nLOG.debug(\"materialized view:{} source partitions :{}\",\nmaterializedView.getName(), refTableRefreshPartitions);\nif (this.getMVTaskRunExtraMessage() != null) {\nMVTaskRunExtraMessage extraMessage = getMVTaskRunExtraMessage();\nextraMessage.setMvPartitionsToRefresh(mvToRefreshedPartitions);\nextraMessage.setRefBasePartitionsToRefreshMap(refTablePartitionNames);\n}\nchangeDefaultConnectContextIfNeeded(mvContext.getCtx());\ninsertStmt = generateInsertStmt(mvToRefreshedPartitions, refTablePartitionNames, materializedView);\nexecPlan = generateRefreshPlan(mvContext.getCtx(), insertStmt);\nTracers.log(Tracers.Module.MV,\nargs -> \"[TRACE QUERY] MV: \" + materializedView.getName() +\n\"\\nMV PartitionsToRefresh: \" + String.join(\",\", (Set) args[0]) +\n\"\\nBase PartitionsToScan:\" + refTablePartitionNames +\n\"\\nInsert Plan:\\n\" +\n((ExecPlan) args[1]).getExplainString(StatementBase.ExplainLevel.VERBOSE),\nmvToRefreshedPartitions, execPlan);\nmvContext.setExecPlan(execPlan);\n} catch (Exception e) {\nLOG.warn(\"Refresh mv {} failed: {}\", materializedView.getName(), e);\nthrow e;\n} finally {\ndatabase.readUnlock();\n}\n}\nrefreshMaterializedView(mvContext, execPlan, insertStmt);\nupdateMeta(mvToRefreshedPartitions, execPlan, refTableRefreshPartitions);\nif (mvContext.hasNextBatchPartition()) {\ngenerateNextTaskRun();\n}\n}\n/**\n* Change default connect context when for mv refresh this is because:\n* - MV Refresh may take much resource to load base tables' data into the final materialized view.\n* - Those changes are set by default and also able to be changed by users for their needs.\n* @param mvConnectCtx\n*/\nprivate void changeDefaultConnectContextIfNeeded(ConnectContext mvConnectCtx) {\nTableProperty mvProperty = materializedView.getTableProperty();\nSessionVariable mvSessionVariable = mvConnectCtx.getSessionVariable();\nif (mvSessionVariable.isEnableResourceGroup()) {\nString rg = mvProperty.getResourceGroup();\nif (rg == null || rg.isEmpty()) {\nrg = ResourceGroup.DEFAULT_MV_RESOURCE_GROUP_NAME;\n}\nmvSessionVariable.setResourceGroup(rg);\n}\nif (Config.enable_materialized_view_spill &&\n!mvSessionVariable.getEnableSpill() &&\n!mvProperty.getProperties().containsKey(MV_SESSION_ENABLE_SPILL)) {\nmvSessionVariable.setEnableSpill(true);\n}\nif (!mvProperty.getProperties().containsKey(MV_SESSION_TIMEOUT)) {\nmvSessionVariable.setQueryTimeoutS(MV_DEFAULT_QUERY_TIMEOUT);\n}\n}\nprivate void postProcess() {\nmvContext.ctx.getSessionVariable().setTransactionVisibleWaitTimeout(oldTransactionVisibleWaitTimeout);\n}\npublic MVTaskRunExtraMessage getMVTaskRunExtraMessage() {\nif (this.mvContext.status == null) {\nreturn null;\n}\nreturn this.mvContext.status.getMvTaskRunExtraMessage();\n}\n@VisibleForTesting\npublic void filterPartitionByRefreshNumber(Set partitionsToRefresh, MaterializedView materializedView) {\nint partitionRefreshNumber = materializedView.getTableProperty().getPartitionRefreshNumber();\nif (partitionRefreshNumber <= 0) {\nreturn;\n}\nMap> rangePartitionMap = materializedView.getRangePartitionMap();\nif (partitionRefreshNumber >= rangePartitionMap.size()) {\nreturn;\n}\nMap> mappedPartitionsToRefresh = Maps.newHashMap();\nfor (String partitionName : partitionsToRefresh) {\nmappedPartitionsToRefresh.put(partitionName, rangePartitionMap.get(partitionName));\n}\nLinkedHashMap> sortedPartition = mappedPartitionsToRefresh.entrySet().stream()\n.sorted(Map.Entry.comparingByValue(RangeUtils.RANGE_COMPARATOR))\n.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (e1, e2) -> e1, LinkedHashMap::new));\nIterator partitionNameIter = sortedPartition.keySet().iterator();\nfor (int i = 0; i < partitionRefreshNumber; i++) {\nif (partitionNameIter.hasNext()) {\npartitionNameIter.next();\n}\n}\nString nextPartitionStart = null;\nString endPartitionName = null;\nif (partitionNameIter.hasNext()) {\nString startPartitionName = partitionNameIter.next();\nRange partitionKeyRange = mappedPartitionsToRefresh.get(startPartitionName);\nnextPartitionStart = AnalyzerUtils.parseLiteralExprToDateString(partitionKeyRange.lowerEndpoint(), 0);\nendPartitionName = startPartitionName;\npartitionsToRefresh.remove(endPartitionName);\n}\nwhile (partitionNameIter.hasNext()) {\nendPartitionName = partitionNameIter.next();\npartitionsToRefresh.remove(endPartitionName);\n}\nmvContext.setNextPartitionStart(nextPartitionStart);\nif (endPartitionName != null) {\nPartitionKey upperEndpoint = mappedPartitionsToRefresh.get(endPartitionName).upperEndpoint();\nmvContext.setNextPartitionEnd(AnalyzerUtils.parseLiteralExprToDateString(upperEndpoint, 0));\n} else {\nmvContext.setNextPartitionEnd(null);\n}\n}\nprivate void generateNextTaskRun() {\nTaskManager taskManager = GlobalStateMgr.getCurrentState().getTaskManager();\nMap properties = mvContext.getProperties();\nlong mvId = Long.parseLong(properties.get(MV_ID));\nString taskName = TaskBuilder.getMvTaskName(mvId);\nMap newProperties = Maps.newHashMap();\nfor (Map.Entry proEntry : properties.entrySet()) {\nif (proEntry.getValue() != null) {\nnewProperties.put(proEntry.getKey(), proEntry.getValue());\n}\n}\nnewProperties.put(TaskRun.PARTITION_START, mvContext.getNextPartitionStart());\nnewProperties.put(TaskRun.PARTITION_END, mvContext.getNextPartitionEnd());\nExecuteOption option = new ExecuteOption(Constants.TaskRunPriority.HIGHEST.value(), true, newProperties);\ntaskManager.executeTask(taskName, option);\nLOG.info(\"[MV] Generate a task to refresh next batches of partitions for MV {}-{}, start={}, end={}\",\nmaterializedView.getName(), materializedView.getId(),\nmvContext.getNextPartitionStart(), mvContext.getNextPartitionEnd());\n}\nprivate void refreshExternalTable(TaskRunContext context) {\nfor (Pair tablePair : snapshotBaseTables.values()) {\nBaseTableInfo baseTableInfo = tablePair.first;\nTable table = tablePair.second;\nif (!table.isNativeTableOrMaterializedView() && !table.isHiveView()) {\ncontext.getCtx().getGlobalStateMgr().getMetadataMgr().refreshTable(baseTableInfo.getCatalogName(),\nbaseTableInfo.getDbName(), table, Lists.newArrayList(), true);\n}\n}\n}\n/**\n* After materialized view is refreshed, update materialized view's meta info to record history refreshes.\n*\n* @param refTableAndPartitionNames : refreshed base table and its partition names mapping.\n*/\nprivate void updateMeta(Set mvRefreshedPartitions,\nExecPlan execPlan,\nMap> refTableAndPartitionNames) {\nif (!database.writeLockAndCheckExist()) {\nthrow new DmlException(\"update meta failed. database:\" + database.getFullName() + \" not exist\");\n}\ntry {\nTable mv = database.getTable(materializedView.getId());\nif (mv == null) {\nthrow new DmlException(\n\"update meta failed. materialized view:\" + materializedView.getName() + \" not exist\");\n}\nif (mvRefreshedPartitions == null || refTableAndPartitionNames == null) {\nreturn;\n}\nMap> baseTableAndPartitionNames = Maps.newHashMap();\nfor (Map.Entry> e : refTableAndPartitionNames.entrySet()) {\nSet realPartitionNames =\ne.getValue().stream()\n.flatMap(name -> convertMVPartitionNameToRealPartitionName(e.getKey(), name).stream())\n.collect(Collectors.toSet());\nbaseTableAndPartitionNames.put(e.getKey(), realPartitionNames);\n}\nMap> nonRefTableAndPartitionNames = getNonRefTableRefreshPartitions();\nif (!nonRefTableAndPartitionNames.isEmpty()) {\nbaseTableAndPartitionNames.putAll(nonRefTableAndPartitionNames);\n}\nMaterializedView.AsyncRefreshContext refreshContext =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext();\nMap> changedOlapTablePartitionInfos =\ngetSelectedPartitionInfosOfOlapTable(baseTableAndPartitionNames);\nMap> changedExternalTablePartitionInfos\n= getSelectedPartitionInfosOfExternalTable(baseTableAndPartitionNames);\nPreconditions.checkState(changedOlapTablePartitionInfos.size() + changedExternalTablePartitionInfos.size()\n<= baseTableAndPartitionNames.size());\nupdateMetaForOlapTable(refreshContext, changedOlapTablePartitionInfos);\nupdateMetaForExternalTable(refreshContext, changedExternalTablePartitionInfos);\nif (this.getMVTaskRunExtraMessage() != null) {\ntry {\nMVTaskRunExtraMessage extraMessage = getMVTaskRunExtraMessage();\nMap> baseTableRefreshedPartitionsByExecPlan =\ngetBaseTableRefreshedPartitionsByExecPlan(execPlan);\nextraMessage.setBasePartitionsToRefreshMap(baseTableRefreshedPartitionsByExecPlan);\n} catch (Exception e) {\nLOG.warn(\"update task run messages failed:\", e);\n}\n}\n} catch (Exception e) {\nLOG.warn(\"update final meta failed after mv refreshed:\", e);\nthrow e;\n} finally {\ndatabase.writeUnlock();\n}\n}\nprivate void updateMetaForOlapTable(MaterializedView.AsyncRefreshContext refreshContext,\nMap> changedTablePartitionInfos) {\nMap> currentVersionMap =\nrefreshContext.getBaseTableVisibleVersionMap();\nTable partitionTable = null;\nif (mvContext.hasNextBatchPartition()) {\nPair partitionTableAndColumn = getRefBaseTableAndPartitionColumn(snapshotBaseTables);\npartitionTable = partitionTableAndColumn.first;\n}\nfor (Map.Entry> tableEntry\n: changedTablePartitionInfos.entrySet()) {\nLong tableId = tableEntry.getKey();\nif (partitionTable != null && tableId != partitionTable.getId()) {\ncontinue;\n}\nif (!currentVersionMap.containsKey(tableId)) {\ncurrentVersionMap.put(tableId, Maps.newHashMap());\n}\nMap currentTablePartitionInfo =\ncurrentVersionMap.get(tableId);\nMap partitionInfoMap = tableEntry.getValue();\ncurrentTablePartitionInfo.putAll(partitionInfoMap);\nTable snapshotTable = snapshotBaseTables.get(tableId).second;\nif (snapshotTable.isOlapOrCloudNativeTable()) {\nOlapTable snapshotOlapTable = (OlapTable) snapshotTable;\ncurrentTablePartitionInfo.keySet().removeIf(partitionName ->\n!snapshotOlapTable.getVisiblePartitionNames().contains(partitionName));\n}\n}\nif (!changedTablePartitionInfos.isEmpty()) {\nChangeMaterializedViewRefreshSchemeLog changeRefreshSchemeLog =\nnew ChangeMaterializedViewRefreshSchemeLog(materializedView);\nGlobalStateMgr.getCurrentState().getEditLog().logMvChangeRefreshScheme(changeRefreshSchemeLog);\nlong maxChangedTableRefreshTime =\nMvUtils.getMaxTablePartitionInfoRefreshTime(changedTablePartitionInfos.values());\nmaterializedView.getRefreshScheme().setLastRefreshTime(maxChangedTableRefreshTime);\n}\n}\nprivate void updateMetaForExternalTable(\nMaterializedView.AsyncRefreshContext refreshContext,\nMap> changedTablePartitionInfos) {\nMap> currentVersionMap =\nrefreshContext.getBaseTableInfoVisibleVersionMap();\nBaseTableInfo partitionTableInfo = null;\nif (mvContext.hasNextBatchPartition()) {\nPair partitionTableAndColumn = getRefBaseTableAndPartitionColumn(snapshotBaseTables);\npartitionTableInfo = snapshotBaseTables.get(partitionTableAndColumn.first.getId()).first;\n}\nfor (Map.Entry> tableEntry\n: changedTablePartitionInfos.entrySet()) {\nBaseTableInfo baseTableInfo = tableEntry.getKey();\nif (partitionTableInfo != null && !partitionTableInfo.equals(baseTableInfo)) {\ncontinue;\n}\nif (!currentVersionMap.containsKey(baseTableInfo)) {\ncurrentVersionMap.put(baseTableInfo, Maps.newHashMap());\n}\nMap currentTablePartitionInfo =\ncurrentVersionMap.get(baseTableInfo);\nMap partitionInfoMap = tableEntry.getValue();\ncurrentTablePartitionInfo.putAll(partitionInfoMap);\nSet partitionNames = Sets.newHashSet(PartitionUtil.getPartitionNames(baseTableInfo.getTable()));\ncurrentTablePartitionInfo.keySet().removeIf(partitionName ->\n!partitionNames.contains(partitionName));\n}\nif (!changedTablePartitionInfos.isEmpty()) {\nChangeMaterializedViewRefreshSchemeLog changeRefreshSchemeLog =\nnew ChangeMaterializedViewRefreshSchemeLog(materializedView);\nGlobalStateMgr.getCurrentState().getEditLog().logMvChangeRefreshScheme(changeRefreshSchemeLog);\nlong maxChangedTableRefreshTime =\nMvUtils.getMaxTablePartitionInfoRefreshTime(changedTablePartitionInfos.values());\nmaterializedView.getRefreshScheme().setLastRefreshTime(maxChangedTableRefreshTime);\n}\n}\nprivate void prepare(TaskRunContext context) {\nMap properties = context.getProperties();\nlong mvId = Long.parseLong(properties.get(MV_ID));\ndatabase = GlobalStateMgr.getCurrentState().getDb(context.ctx.getDatabase());\nif (database == null) {\nLOG.warn(\"database {} do not exist when refreshing materialized view:{}\", context.ctx.getDatabase(), mvId);\nthrow new DmlException(\"database \" + context.ctx.getDatabase() + \" do not exist.\");\n}\nTable table = database.getTable(mvId);\nif (table == null) {\nLOG.warn(\"materialized view:{} in database:{} do not exist when refreshing\", mvId,\ncontext.ctx.getDatabase());\nthrow new DmlException(String.format(\"materialized view:%s in database:%s do not exist when refreshing\",\nmvId, context.ctx.getDatabase()));\n}\nmaterializedView = (MaterializedView) table;\nif (!materializedView.isActive()) {\nString errorMsg = String.format(\"Materialized view: %s, id: %d is not active, \" +\n\"skip sync partition and data with base tables\", materializedView.getName(), mvId);\nLOG.warn(errorMsg);\nthrow new DmlException(errorMsg);\n}\noldTransactionVisibleWaitTimeout = context.ctx.getSessionVariable().getTransactionVisibleWaitTimeout();\ncontext.ctx.getSessionVariable().setTransactionVisibleWaitTimeout(Long.MAX_VALUE / 1000);\nmvContext = new MvTaskRunContext(context);\n}\n/**\n* Sync base table's partition infos to be used later.\n*/\nprivate void syncPartitions() {\nsnapshotBaseTables = collectBaseTables(materializedView);\nPartitionInfo partitionInfo = materializedView.getPartitionInfo();\nif (!(partitionInfo instanceof SinglePartitionInfo)) {\nPair partitionTableAndColumn = getRefBaseTableAndPartitionColumn(snapshotBaseTables);\nmvContext.setRefBaseTable(partitionTableAndColumn.first);\nmvContext.setRefBaseTablePartitionColumn(partitionTableAndColumn.second);\n}\nint partitionTTLNumber = materializedView.getTableProperty().getPartitionTTLNumber();\nmvContext.setPartitionTTLNumber(partitionTTLNumber);\nif (partitionInfo instanceof ExpressionRangePartitionInfo) {\nsyncPartitionsForExpr();\n} else if (partitionInfo instanceof ListPartitionInfo) {\nsyncPartitionsForList();\n}\n}\n/**\n* @param tables : base tables of the materialized view\n* @return : return the ref base table and column that materialized view's partition column\n* derives from if it exists, otherwise return null.\n*/\nprivate Pair getRefBaseTableAndPartitionColumn(\nMap> tables) {\nSlotRef slotRef = MaterializedView.getRefBaseTablePartitionSlotRef(materializedView);\nfor (Pair tableInfo : tables.values()) {\nBaseTableInfo baseTableInfo = tableInfo.first;\nTable table = tableInfo.second;\nif (slotRef.getTblNameWithoutAnalyzed().getTbl().equals(baseTableInfo.getTableName())) {\nreturn Pair.create(table, table.getColumn(slotRef.getColumnName()));\n}\n}\nreturn Pair.create(null, null);\n}\nprivate void syncPartitionsForExpr() {\nExpr partitionExpr = materializedView.getFirstPartitionRefTableExpr();\nTable refBaseTable = mvContext.getRefBaseTable();\nColumn refBaseTablePartitionColumn = mvContext.getRefBaseTablePartitionColumn();\nRangePartitionDiff rangePartitionDiff = new RangePartitionDiff();\ndatabase.readLock();\nMap> mvRangePartitionMap = materializedView.getRangePartitionMap();\nMap> refBaseTablePartitionMap;\nMap> refBaseTableMVPartitionMap = Maps.newHashMap();\ntry {\nrefBaseTablePartitionMap = PartitionUtil.getPartitionKeyRange(refBaseTable, refBaseTablePartitionColumn);\nif (!refBaseTable.isNativeTableOrMaterializedView()) {\nrefBaseTableMVPartitionMap = PartitionUtil.getMVPartitionNameMapOfExternalTable(refBaseTable,\nrefBaseTablePartitionColumn, PartitionUtil.getPartitionNames(refBaseTable));\n}\nif (partitionExpr instanceof SlotRef) {\nrangePartitionDiff = SyncPartitionUtils\n.getRangePartitionDiffOfSlotRef(refBaseTablePartitionMap, mvRangePartitionMap);\n} else if (partitionExpr instanceof FunctionCallExpr) {\nFunctionCallExpr functionCallExpr = (FunctionCallExpr) partitionExpr;\nString granularity = ((StringLiteral) functionCallExpr.getChild(0)).getValue().toLowerCase();\nrangePartitionDiff = SyncPartitionUtils.getRangePartitionDiffOfExpr(refBaseTablePartitionMap, mvRangePartitionMap,\ngranularity, refBaseTablePartitionColumn.getPrimitiveType());\n}\n} catch (UserException e) {\nLOG.warn(\"Materialized view compute partition difference with base table failed.\", e);\nreturn;\n} finally {\ndatabase.readUnlock();\n}\nMap> deletes = rangePartitionDiff.getDeletes();\nfor (String mvPartitionName : deletes.keySet()) {\ndropPartition(database, materializedView, mvPartitionName);\n}\nLOG.info(\"The process of synchronizing materialized view [{}] delete partitions range [{}]\",\nmaterializedView.getName(), deletes);\nMap partitionProperties = getPartitionProperties(materializedView);\nDistributionDesc distributionDesc = getDistributionDesc(materializedView);\nMap> adds = rangePartitionDiff.getAdds();\naddRangePartitions(database, materializedView, adds, partitionProperties, distributionDesc);\nfor (Map.Entry> addEntry : adds.entrySet()) {\nString mvPartitionName = addEntry.getKey();\nmvRangePartitionMap.put(mvPartitionName, addEntry.getValue());\n}\nLOG.info(\"The process of synchronizing materialized view [{}] add partitions range [{}]\",\nmaterializedView.getName(), adds);\nMap> baseToMvNameRef = SyncPartitionUtils\n.getIntersectedPartitions(refBaseTablePartitionMap, mvRangePartitionMap);\nMap> mvToBaseNameRef = SyncPartitionUtils\n.getIntersectedPartitions(mvRangePartitionMap, refBaseTablePartitionMap);\nmvContext.setRefBaseTableMVIntersectedPartitions(baseToMvNameRef);\nmvContext.setMvRefBaseTableIntersectedPartitions(mvToBaseNameRef);\nmvContext.setRefBaseTableRangePartitionMap(refBaseTablePartitionMap);\nmvContext.setExternalRefBaseTableMVPartitionMap(refBaseTableMVPartitionMap);\n}\nprivate void syncPartitionsForList() {\nTable partitionBaseTable = mvContext.getRefBaseTable();\nColumn partitionColumn = mvContext.getRefBaseTablePartitionColumn();\nListPartitionDiff listPartitionDiff;\nMap>> baseListPartitionMap;\nMap>> listPartitionMap = materializedView.getListPartitionMap();\ndatabase.readLock();\ntry {\nbaseListPartitionMap = PartitionUtil.getPartitionList(partitionBaseTable, partitionColumn);\nlistPartitionDiff = SyncPartitionUtils.getListPartitionDiff(baseListPartitionMap, listPartitionMap);\n} catch (UserException e) {\nLOG.warn(\"Materialized view compute partition difference with base table failed.\", e);\nreturn;\n} finally {\ndatabase.readUnlock();\n}\nMap>> deletes = listPartitionDiff.getDeletes();\nfor (String mvPartitionName : deletes.keySet()) {\ndropPartition(database, materializedView, mvPartitionName);\n}\nLOG.info(\"The process of synchronizing materialized view [{}] delete partitions range [{}]\",\nmaterializedView.getName(), deletes);\nMap partitionProperties = getPartitionProperties(materializedView);\nDistributionDesc distributionDesc = getDistributionDesc(materializedView);\nMap>> adds = listPartitionDiff.getAdds();\naddListPartitions(database, materializedView, adds, partitionProperties, distributionDesc);\nLOG.info(\"The process of synchronizing materialized view [{}] add partitions list [{}]\",\nmaterializedView.getName(), adds);\nMap> baseToMvNameRef = Maps.newHashMap();\nfor (String partitionName : baseListPartitionMap.keySet()) {\nbaseToMvNameRef.put(partitionName, Sets.newHashSet(partitionName));\n}\nmvContext.setRefBaseTableMVIntersectedPartitions(baseToMvNameRef);\nmvContext.setMvRefBaseTableIntersectedPartitions(baseToMvNameRef);\nmvContext.setRefBaseTableListPartitionMap(baseListPartitionMap);\n}\nprivate boolean needToRefreshTable(Table table) {\nreturn CollectionUtils.isNotEmpty(materializedView.getUpdatedPartitionNamesOfTable(table, false));\n}\n/**\n* @param table : Whether this table can be supported for incremental refresh by partition or not.\n*/\nprivate boolean unSupportRefreshByPartition(Table table) {\nreturn !table.isOlapTableOrMaterializedView() && !table.isHiveTable()\n&& !table.isJDBCTable() && !table.isCloudNativeTable();\n}\n/**\n* Whether non-partitioned materialized view needs to be refreshed or not, it needs refresh when:\n* - its base table is not supported refresh by partition.\n* - its base table has updated.\n*/\nprivate boolean isNonPartitionedMVNeedToRefresh() {\nfor (Pair tablePair : snapshotBaseTables.values()) {\nTable snapshotTable = tablePair.second;\nif (unSupportRefreshByPartition(snapshotTable)) {\nreturn true;\n}\nif (needToRefreshTable(snapshotTable)) {\nreturn true;\n}\n}\nreturn false;\n}\n/**\n* Whether partitioned materialized view needs to be refreshed or not base on the non-ref base tables, it needs refresh when:\n* - its non-ref base table except un-supported base table has updated.\n*/\nprivate boolean isPartitionedMVNeedToRefreshBaseOnNonRefTables(Table partitionTable) {\nfor (Pair tablePair : snapshotBaseTables.values()) {\nTable snapshotTable = tablePair.second;\nif (snapshotTable.getId() == partitionTable.getId()) {\ncontinue;\n}\nif (unSupportRefreshByPartition(snapshotTable)) {\ncontinue;\n}\nif (needToRefreshTable(snapshotTable)) {\nreturn true;\n}\n}\nreturn false;\n}\n@VisibleForTesting\npublic Set getPartitionsToRefreshForMaterializedView(Map properties)\nthrows AnalysisException {\nString start = properties.get(TaskRun.PARTITION_START);\nString end = properties.get(TaskRun.PARTITION_END);\nboolean force = Boolean.parseBoolean(properties.get(TaskRun.FORCE));\nPartitionInfo partitionInfo = materializedView.getPartitionInfo();\nSet needRefreshMvPartitionNames = getPartitionsToRefreshForMaterializedView(partitionInfo,\nstart, end, force);\nif (this.getMVTaskRunExtraMessage() != null) {\nMVTaskRunExtraMessage extraMessage = this.getMVTaskRunExtraMessage();\nextraMessage.setForceRefresh(force);\nextraMessage.setPartitionStart(start);\nextraMessage.setPartitionEnd(end);\n}\nreturn needRefreshMvPartitionNames;\n}\n/**\n* @param mvPartitionInfo : materialized view's partition info\n* @param start : materialized view's refresh start in this task run\n* @param end : materialized view's refresh end in this task run\n* @param force : whether this task run is force or not\n* @return\n* @throws AnalysisException\n*/\nprivate Set getPartitionsToRefreshForMaterializedView(PartitionInfo mvPartitionInfo,\nString start,\nString end,\nboolean force) throws AnalysisException {\nint partitionTTLNumber = mvContext.getPartitionTTLNumber();\nif (force && start == null && end == null) {\nif (mvPartitionInfo instanceof SinglePartitionInfo) {\nreturn materializedView.getVisiblePartitionNames();\n} else {\nif (mvPartitionInfo instanceof ListPartitionInfo) {\nreturn materializedView.getValidListPartitionMap(partitionTTLNumber).keySet();\n} else {\nreturn materializedView.getValidRangePartitionMap(partitionTTLNumber).keySet();\n}\n}\n}\nSet needRefreshMvPartitionNames = Sets.newHashSet();\nif (mvPartitionInfo instanceof SinglePartitionInfo) {\nif (force || isNonPartitionedMVNeedToRefresh()) {\nreturn materializedView.getVisiblePartitionNames();\n}\n} else if (mvPartitionInfo instanceof ExpressionRangePartitionInfo) {\nExpr partitionExpr = materializedView.getFirstPartitionRefTableExpr();\nTable refBaseTable = mvContext.getRefBaseTable();\nboolean isAutoRefresh = (mvContext.type == Constants.TaskType.PERIODICAL ||\nmvContext.type == Constants.TaskType.EVENT_TRIGGERED);\nSet mvRangePartitionNames = SyncPartitionUtils.getPartitionNamesByRangeWithPartitionLimit(\nmaterializedView, start, end, partitionTTLNumber, isAutoRefresh);\nif (isPartitionedMVNeedToRefreshBaseOnNonRefTables(refBaseTable)) {\nif (start == null && end == null) {\nreturn mvRangePartitionNames;\n} else {\nreturn getMVPartitionNamesToRefreshByRangePartitionNamesAndForce(refBaseTable,\nmvRangePartitionNames, true);\n}\n}\nif (partitionExpr instanceof SlotRef) {\nreturn getMVPartitionNamesToRefreshByRangePartitionNamesAndForce(refBaseTable, mvRangePartitionNames, force);\n} else if (partitionExpr instanceof FunctionCallExpr) {\nneedRefreshMvPartitionNames = getMVPartitionNamesToRefreshByRangePartitionNamesAndForce(refBaseTable,\nmvRangePartitionNames, force);\nSet baseChangedPartitionNames = getBasePartitionNamesByMVPartitionNames(needRefreshMvPartitionNames);\nLOG.debug(\"Start calcPotentialRefreshPartition, needRefreshMvPartitionNames: {},\" +\n\" baseChangedPartitionNames: {}\", needRefreshMvPartitionNames, baseChangedPartitionNames);\nSyncPartitionUtils.calcPotentialRefreshPartition(needRefreshMvPartitionNames, baseChangedPartitionNames,\nmvContext.getRefBaseTableMVIntersectedPartitions(), mvContext.getMvRefBaseTableIntersectedPartitions());\nLOG.debug(\"Finish calcPotentialRefreshPartition, needRefreshMvPartitionNames: {},\" +\n\" baseChangedPartitionNames: {}\", needRefreshMvPartitionNames, baseChangedPartitionNames);\n}\n} else if (mvPartitionInfo instanceof ListPartitionInfo) {\nTable partitionTable = mvContext.getRefBaseTable();\nboolean isAutoRefresh = (mvContext.type == Constants.TaskType.PERIODICAL ||\nmvContext.type == Constants.TaskType.EVENT_TRIGGERED);\nSet mvListPartitionNames = SyncPartitionUtils.getPartitionNamesByListWithPartitionLimit(\nmaterializedView, start, end, partitionTTLNumber, isAutoRefresh);\nif (isPartitionedMVNeedToRefreshBaseOnNonRefTables(partitionTable)) {\nif (start == null && end == null) {\nreturn mvListPartitionNames;\n} else {\nreturn getMVPartitionNamesToRefreshByRangePartitionNamesAndForce(partitionTable,\nmvListPartitionNames, true);\n}\n}\nreturn getMVPartitionNamesToRefreshByRangePartitionNamesAndForce(partitionTable, mvListPartitionNames, force);\n} else {\nthrow new DmlException(\"unsupported partition info type:\" + mvPartitionInfo.getClass().getName());\n}\nreturn needRefreshMvPartitionNames;\n}\nprivate Set getMVPartitionNamesToRefreshByRangePartitionNamesAndForce(Table refBaseTable,\nSet mvRangePartitionNames,\nboolean force) {\nif (force || unSupportRefreshByPartition(refBaseTable)) {\nreturn Sets.newHashSet(mvRangePartitionNames);\n}\nSet updatePartitionNames = materializedView.getUpdatedPartitionNamesOfTable(refBaseTable, false);\nif (updatePartitionNames == null) {\nreturn mvRangePartitionNames;\n}\nSet result = getMVPartitionNamesByBasePartitionNames(updatePartitionNames);\nresult.retainAll(mvRangePartitionNames);\nreturn result;\n}\n/**\n*\n* @param basePartitionNames : ref base table partition names to check.\n* @return : Return mv corresponding partition names to the ref base table partition names.\n*/\nprivate Set getMVPartitionNamesByBasePartitionNames(Set basePartitionNames) {\nSet result = Sets.newHashSet();\nMap> refBaseTableMVPartitionMap = mvContext.getRefBaseTableMVIntersectedPartitions();\nfor (String basePartitionName : basePartitionNames) {\nif (refBaseTableMVPartitionMap.containsKey(basePartitionName)) {\nresult.addAll(refBaseTableMVPartitionMap.get(basePartitionName));\n} else {\nLOG.warn(\"Cannot find need refreshed ref base table partition from synced partition info: {}\",\nbasePartitionName);\n}\n}\nreturn result;\n}\n/**\n* @param mvPartitionNames : the need to refresh materialized view partition names\n* @return : the corresponding ref base table partition names to the materialized view partition names\n*/\nprivate Set getBasePartitionNamesByMVPartitionNames(Set mvPartitionNames) {\nSet result = Sets.newHashSet();\nMap> mvRefBaseTablePartitionMap = mvContext.getMvRefBaseTableIntersectedPartitions();\nfor (String mvPartitionName : mvPartitionNames) {\nif (mvRefBaseTablePartitionMap.containsKey(mvPartitionName)) {\nresult.addAll(mvRefBaseTablePartitionMap.get(mvPartitionName));\n} else {\nLOG.warn(\"Cannot find need refreshed mv table partition from synced partition info: {}\",\nmvPartitionName);\n}\n}\nreturn result;\n}\nprivate ExecPlan generateRefreshPlan(ConnectContext ctx, InsertStmt insertStmt) throws AnalysisException {\nreturn StatementPlanner.plan(insertStmt, ctx);\n}\n@VisibleForTesting\npublic InsertStmt generateInsertStmt(Set materializedViewPartitions,\nMap> refTableRefreshPartitions,\nMaterializedView materializedView) throws AnalysisException {\nConnectContext ctx = mvContext.getCtx();\nctx.getAuditEventBuilder().reset();\nctx.getAuditEventBuilder()\n.setTimestamp(System.currentTimeMillis())\n.setClientIp(mvContext.getRemoteIp())\n.setUser(ctx.getQualifiedUser())\n.setDb(ctx.getDatabase());\nctx.setThreadLocalInfo();\nctx.getSessionVariable().setEnableMaterializedViewRewrite(false);\nString definition = mvContext.getDefinition();\nInsertStmt insertStmt =\n(InsertStmt) SqlParser.parse(definition, ctx.getSessionVariable()).get(0);\ninsertStmt.setTargetPartitionNames(new PartitionNames(false, new ArrayList<>(materializedViewPartitions)));\ninsertStmt.setSystem(true);\nAnalyzer.analyze(insertStmt, ctx);\nQueryStatement queryStatement = insertStmt.getQueryStatement();\nMultimap tableRelations =\nAnalyzerUtils.collectAllTableRelation(queryStatement);\nfor (Map.Entry nameTableRelationEntry : tableRelations.entries()) {\nif (refTableRefreshPartitions.containsKey(nameTableRelationEntry.getKey())) {\nSet tablePartitionNames = refTableRefreshPartitions.get(nameTableRelationEntry.getKey());\nTableRelation tableRelation = nameTableRelationEntry.getValue();\ntableRelation.setPartitionNames(\nnew PartitionNames(false, new ArrayList<>(tablePartitionNames)));\nExpr partitionPredicates = generatePartitionPredicate(tablePartitionNames, queryStatement,\nmaterializedView.getPartitionInfo());\nif (partitionPredicates != null) {\nList slots = Lists.newArrayList();\npartitionPredicates.collect(SlotRef.class, slots);\nScope tableRelationScope = tableRelation.getScope();\nif (canResolveSlotsInTheScope(slots, tableRelationScope)) {\ntableRelation.setPartitionPredicate(partitionPredicates);\n}\nQueryRelation queryRelation = queryStatement.getQueryRelation();\nif (queryRelation instanceof SelectRelation) {\nSelectRelation selectRelation = ((SelectRelation) queryStatement.getQueryRelation());\nselectRelation.setWhereClause(Expr.compoundAnd(Lists.newArrayList(selectRelation.getWhereClause(),\npartitionPredicates)));\n}\n}\n}\n}\nreturn insertStmt;\n}\n/**\n* Check whether to push down predicate expr with the slot refs into the scope.\n* @param slots : slot refs that are contained in the predicate expr\n* @param scope : scope that try to push down into.\n* @return\n*/\nprivate boolean canResolveSlotsInTheScope(List slots, Scope scope) {\nreturn slots.stream().allMatch(s -> scope.tryResolveField(s).isPresent());\n}\n/**\n* Generate partition predicates to refresh the materialized view so can be refreshed by the incremental partitions.\n*\n* @param tablePartitionNames : the need pruned partition tables of the ref base table\n* @param queryStatement : the materialized view's defined query statement\n* @param mvPartitionInfo : the materialized view's partition information\n* @return\n* @throws AnalysisException\n*/\nprivate Expr generatePartitionPredicate(Set tablePartitionNames, QueryStatement queryStatement,\nPartitionInfo mvPartitionInfo)\nthrows AnalysisException {\nSlotRef partitionSlot = MaterializedView.getRefBaseTablePartitionSlotRef(materializedView);\nList columnOutputNames = queryStatement.getQueryRelation().getColumnOutputNames();\nList outputExpressions = queryStatement.getQueryRelation().getOutputExpression();\nExpr outputPartitionSlot = null;\nfor (int i = 0; i < outputExpressions.size(); ++i) {\nif (columnOutputNames.get(i).equalsIgnoreCase(partitionSlot.getColumnName())) {\noutputPartitionSlot = outputExpressions.get(i);\nbreak;\n} else {\nSlotRef slotRef = outputExpressions.get(i).unwrapSlotRef();\nif (slotRef != null && slotRef.getColumnName().equals(partitionSlot.getColumnName())) {\noutputPartitionSlot = outputExpressions.get(i);\nbreak;\n}\n}\n}\nif (outputPartitionSlot == null) {\nLOG.warn(\"Generate partition predicate failed: \" +\n\"cannot find partition slot ref {} from query relation\", partitionSlot);\nreturn null;\n}\nif (mvPartitionInfo.isRangePartition()) {\nList> sourceTablePartitionRange = Lists.newArrayList();\nMap> refBaseTableRangePartitionMap = mvContext.getRefBaseTableRangePartitionMap();\nfor (String partitionName : tablePartitionNames) {\nsourceTablePartitionRange.add(refBaseTableRangePartitionMap.get(partitionName));\n}\nsourceTablePartitionRange = MvUtils.mergeRanges(sourceTablePartitionRange);\nList partitionPredicates =\nMvUtils.convertRange(outputPartitionSlot, sourceTablePartitionRange);\nOptional> nullRange = sourceTablePartitionRange.stream().\nfilter(range -> range.lowerEndpoint().isMinValue()).findAny();\nif (nullRange.isPresent()) {\nExpr isNullPredicate = new IsNullPredicate(outputPartitionSlot, false);\npartitionPredicates.add(isNullPredicate);\n}\nreturn Expr.compoundOr(partitionPredicates);\n} else if (mvPartitionInfo.getType() == PartitionType.LIST) {\nMap>> baseListPartitionMap = mvContext.getRefBaseTableListPartitionMap();\nType partitionType = mvContext.getRefBaseTablePartitionColumn().getType();\nList sourceTablePartitionList = Lists.newArrayList();\nfor (String tablePartitionName : tablePartitionNames) {\nList> values = baseListPartitionMap.get(tablePartitionName);\nfor (List value : values) {\nLiteralExpr partitionValue = new PartitionValue(value.get(0)).getValue(partitionType);\nsourceTablePartitionList.add(partitionValue);\n}\n}\nList partitionPredicates = MvUtils.convertList(outputPartitionSlot, sourceTablePartitionList);\nreturn Expr.compoundOr(partitionPredicates);\n} else {\nLOG.warn(\"Generate partition predicate failed: \" +\n\"partition slot {} is not supported yet: {}\", partitionSlot, mvPartitionInfo);\nreturn null;\n}\n}\n@VisibleForTesting\npublic void refreshMaterializedView(MvTaskRunContext mvContext, ExecPlan execPlan, InsertStmt insertStmt)\nthrows Exception {\nlong beginTimeInNanoSecond = TimeUtils.getStartTime();\nPreconditions.checkNotNull(execPlan);\nPreconditions.checkNotNull(insertStmt);\nConnectContext ctx = mvContext.getCtx();\nStmtExecutor executor = new StmtExecutor(ctx, insertStmt);\nctx.setExecutor(executor);\nif (ctx.getParent() != null && ctx.getParent().getExecutor() != null) {\nStmtExecutor parentStmtExecutor = ctx.getParent().getExecutor();\nparentStmtExecutor.registerSubStmtExecutor(executor);\n}\nctx.setStmtId(new AtomicInteger().incrementAndGet());\nctx.setExecutionId(UUIDUtil.toTUniqueId(ctx.getQueryId()));\ntry {\nexecutor.handleDMLStmtWithProfile(execPlan, insertStmt, beginTimeInNanoSecond);\n} catch (Exception e) {\nLOG.warn(\"refresh materialized view {} failed: {}\", materializedView.getName(), e);\nthrow e;\n} finally {\nauditAfterExec(mvContext, executor.getParsedStmt(), executor.getQueryStatisticsForAuditLog());\n}\n}\n@VisibleForTesting\npublic Map> collectBaseTables(MaterializedView materializedView) {\nMap> tables = Maps.newHashMap();\nList baseTableInfos = materializedView.getBaseTableInfos();\nfor (BaseTableInfo baseTableInfo : baseTableInfos) {\nDatabase db = baseTableInfo.getDb();\nif (db == null) {\nLOG.warn(\"database {} do not exist when refreshing materialized view:{}\",\nbaseTableInfo.getDbInfoStr(), materializedView.getName());\nthrow new DmlException(\"database \" + baseTableInfo.getDbInfoStr() + \" do not exist.\");\n}\nTable table = baseTableInfo.getTable();\nif (table == null) {\nLOG.warn(\"table {} do not exist when refreshing materialized view:{}\",\nbaseTableInfo.getTableInfoStr(), materializedView.getName());\nthrow new DmlException(\"Materialized view base table: %s not exist.\", baseTableInfo.getTableInfoStr());\n}\ndb.readLock();\ntry {\nif (table.isOlapTable()) {\nTable copied = new OlapTable();\nif (!DeepCopy.copy(table, copied, OlapTable.class)) {\nthrow new DmlException(\"Failed to copy olap table: %s\", table.getName());\n}\ntables.put(table.getId(), Pair.create(baseTableInfo, copied));\n} else if (table.isCloudNativeTable()) {\nLakeTable copied = DeepCopy.copyWithGson(table, LakeTable.class);\nif (copied == null) {\nthrow new DmlException(\"Failed to copy lake table: %s\", table.getName());\n}\ntables.put(table.getId(), Pair.create(baseTableInfo, copied));\n} else {\ntables.put(table.getId(), Pair.create(baseTableInfo, table));\n}\n} finally {\ndb.readUnlock();\n}\n}\nreturn tables;\n}\nprivate Map getPartitionProperties(MaterializedView materializedView) {\nMap partitionProperties = new HashMap<>(4);\npartitionProperties.put(\"replication_num\",\nString.valueOf(materializedView.getDefaultReplicationNum()));\npartitionProperties.put(\"storage_medium\", materializedView.getStorageMedium());\nString storageCooldownTime =\nmaterializedView.getTableProperty().getProperties().get(\"storage_cooldown_time\");\nif (storageCooldownTime != null\n&& !storageCooldownTime.equals(String.valueOf(DataProperty.MAX_COOLDOWN_TIME_MS))) {\nString storageCooldownTimeStr = TimeUtils.longToTimeString(Long.parseLong(storageCooldownTime));\npartitionProperties.put(\"storage_cooldown_time\", storageCooldownTimeStr);\n}\nreturn partitionProperties;\n}\nprivate DistributionDesc getDistributionDesc(MaterializedView materializedView) {\nDistributionInfo distributionInfo = materializedView.getDefaultDistributionInfo();\nif (distributionInfo instanceof HashDistributionInfo) {\nList distColumnNames = new ArrayList<>();\nfor (Column distributionColumn : ((HashDistributionInfo) distributionInfo).getDistributionColumns()) {\ndistColumnNames.add(distributionColumn.getName());\n}\nreturn new HashDistributionDesc(distributionInfo.getBucketNum(), distColumnNames);\n} else {\nreturn new RandomDistributionDesc();\n}\n}\nprivate void addRangePartitions(Database database, MaterializedView materializedView,\nMap> adds, Map partitionProperties,\nDistributionDesc distributionDesc) {\nif (adds.isEmpty()) {\nreturn;\n}\nList partitionDescs = Lists.newArrayList();\nfor (Map.Entry> addEntry : adds.entrySet()) {\nString mvPartitionName = addEntry.getKey();\nRange partitionKeyRange = addEntry.getValue();\nString lowerBound = partitionKeyRange.lowerEndpoint().getKeys().get(0).getStringValue();\nString upperBound = partitionKeyRange.upperEndpoint().getKeys().get(0).getStringValue();\nboolean isMaxValue = partitionKeyRange.upperEndpoint().isMaxValue();\nPartitionValue upperPartitionValue;\nif (isMaxValue) {\nupperPartitionValue = PartitionValue.MAX_VALUE;\n} else {\nupperPartitionValue = new PartitionValue(upperBound);\n}\nPartitionKeyDesc partitionKeyDesc = new PartitionKeyDesc(\nCollections.singletonList(new PartitionValue(lowerBound)),\nCollections.singletonList(upperPartitionValue));\nSingleRangePartitionDesc singleRangePartitionDesc =\nnew SingleRangePartitionDesc(false, mvPartitionName, partitionKeyDesc, partitionProperties);\npartitionDescs.add(singleRangePartitionDesc);\n}\nRangePartitionDesc rangePartitionDesc =\nnew RangePartitionDesc(materializedView.getPartitionColumnNames(), partitionDescs);\ntry {\nGlobalStateMgr.getCurrentState().addPartitions(\ndatabase, materializedView.getName(),\nnew AddPartitionClause(rangePartitionDesc, distributionDesc,\npartitionProperties, false));\n} catch (Exception e) {\nthrow new DmlException(\"Expression add partition failed: %s, db: %s, table: %s\", e, e.getMessage(),\ndatabase.getFullName(), materializedView.getName());\n}\n}\nprivate void addListPartitions(Database database, MaterializedView materializedView,\nMap>> adds, Map partitionProperties,\nDistributionDesc distributionDesc) {\nif (adds.isEmpty()) {\nreturn;\n}\nfor (Map.Entry>> addEntry : adds.entrySet()) {\nString mvPartitionName = addEntry.getKey();\nList> partitionKeyList = addEntry.getValue();\nMultiItemListPartitionDesc multiItemListPartitionDesc =\nnew MultiItemListPartitionDesc(false, mvPartitionName, partitionKeyList, partitionProperties);\ntry {\nGlobalStateMgr.getCurrentState().addPartitions(\ndatabase, materializedView.getName(), new AddPartitionClause(\nmultiItemListPartitionDesc, distributionDesc,\npartitionProperties, false));\n} catch (Exception e) {\nthrow new DmlException(\"add list partition failed: %s, db: %s, table: %s\", e, e.getMessage(),\ndatabase.getFullName(), materializedView.getName());\n}\n}\n}\nprivate void dropPartition(Database database, MaterializedView materializedView, String mvPartitionName) {\nString dropPartitionName = materializedView.getPartition(mvPartitionName).getName();\nif (!database.writeLockAndCheckExist()) {\nthrow new DmlException(\"drop partition failed. database:\" + database.getFullName() + \" not exist\");\n}\ntry {\nTable mv = database.getTable(materializedView.getId());\nif (mv == null) {\nthrow new DmlException(\"drop partition failed. mv:\" + materializedView.getName() + \" not exist\");\n}\nPartition mvPartition = mv.getPartition(dropPartitionName);\nif (mvPartition == null) {\nthrow new DmlException(\"drop partition failed. partition:\" + dropPartitionName + \" not exist\");\n}\nGlobalStateMgr.getCurrentState().dropPartition(\ndatabase, materializedView,\nnew DropPartitionClause(false, dropPartitionName, false, true));\n} catch (Exception e) {\nthrow new DmlException(\"Expression add partition failed: %s, db: %s, table: %s\", e, e.getMessage(),\ndatabase.getFullName(), materializedView.getName());\n} finally {\ndatabase.writeUnlock();\n}\n}\n/**\n* For external table, the partition name is normalized which should convert it into original partition name.\n*\n* For multi-partition columns, `refTableAndPartitionNames` is not fully exact to describe which partitions\n* of ref base table are refreshed, use `getSelectedPartitionInfosOfExternalTable` later if we can solve the multi\n* partition columns problem.\n* eg:\n* partitionName1 : par_col=0/par_date=2020-01-01 => p20200101\n* partitionName2 : par_col=1/par_date=2020-01-01 => p20200101\n*/\nprivate Set convertMVPartitionNameToRealPartitionName(Table table, String mvPartitionName) {\nif (!table.isNativeTableOrMaterializedView()) {\nMap> refBaseTableRangePartitionMap = mvContext.getExternalRefBaseTableMVPartitionMap();\nPreconditions.checkState(refBaseTableRangePartitionMap.containsKey(mvPartitionName));\nreturn refBaseTableRangePartitionMap.get(mvPartitionName);\n} else {\nreturn Sets.newHashSet(mvPartitionName);\n}\n}\n/**\n* @param mvToRefreshedPartitions : to-refreshed materialized view partition names\n* @return : return to-refreshed base table's table name and partition names mapping\n*/\nprivate Map> getRefTableRefreshPartitions(Set mvToRefreshedPartitions) {\nTable refBaseTable = mvContext.getRefBaseTable();\nMap> refTableAndPartitionNames = Maps.newHashMap();\nfor (Pair tablePair : snapshotBaseTables.values()) {\nTable table = tablePair.second;\nif (refBaseTable != null && refBaseTable == table) {\nSet needRefreshTablePartitionNames = Sets.newHashSet();\nMap> mvToBaseNameRef = mvContext.getMvRefBaseTableIntersectedPartitions();\nfor (String mvPartitionName : mvToRefreshedPartitions) {\nneedRefreshTablePartitionNames.addAll(mvToBaseNameRef.get(mvPartitionName));\n}\nrefTableAndPartitionNames.put(table, needRefreshTablePartitionNames);\nreturn refTableAndPartitionNames;\n}\n}\nreturn refTableAndPartitionNames;\n}\n/**\n* Return all non-ref base table and refreshed partitions.\n*/\nprivate Map> getNonRefTableRefreshPartitions() {\nTable partitionTable = mvContext.getRefBaseTable();\nMap> tableNamePartitionNames = Maps.newHashMap();\nfor (Pair tablePair : snapshotBaseTables.values()) {\nTable table = tablePair.second;\nif (partitionTable != null && partitionTable == table) {\n} else {\nif (table.isNativeTableOrMaterializedView()) {\ntableNamePartitionNames.put(table, ((OlapTable) table).getVisiblePartitionNames());\n} else if (table.isHiveTable()) {\ntableNamePartitionNames.put(table, Sets.newHashSet(PartitionUtil.getPartitionNames(table)));\n} else if (table.isView()) {\ncontinue;\n} else {\nLOG.warn(\"Do not support get partition names and columns for\" +\n\" table type {}\", table.getType());\n}\n}\n}\nreturn tableNamePartitionNames;\n}\n/**\n* Collect base olap tables and its partition infos based on refreshed table infos.\n*\n* @param baseTableAndPartitionNames : refreshed base table and its partition names mapping.\n* @return\n*/\nprivate Map> getSelectedPartitionInfosOfOlapTable(\nMap> baseTableAndPartitionNames) {\nMap> changedOlapTablePartitionInfos = Maps.newHashMap();\nfor (Map.Entry> entry : baseTableAndPartitionNames.entrySet()) {\nif (entry.getKey().isNativeTableOrMaterializedView()) {\nMap partitionInfos = Maps.newHashMap();\nOlapTable olapTable = (OlapTable) entry.getKey();\nfor (String partitionName : entry.getValue()) {\nPartition partition = olapTable.getPartition(partitionName);\nMaterializedView.BasePartitionInfo basePartitionInfo = new MaterializedView.BasePartitionInfo(\npartition.getId(), partition.getVisibleVersion(), partition.getVisibleVersionTime());\npartitionInfos.put(partition.getName(), basePartitionInfo);\n}\nchangedOlapTablePartitionInfos.put(olapTable.getId(), partitionInfos);\n}\n}\nreturn changedOlapTablePartitionInfos;\n}\n/**\n* Collect base hive tables and its partition infos based on refreshed table infos.\n*\n* @param baseTableAndPartitionNames : refreshed base table and its partition names mapping.\n* @return\n*/\nprivate Map> getSelectedPartitionInfosOfExternalTable(\nMap> baseTableAndPartitionNames) {\nMap> changedOlapTablePartitionInfos = Maps.newHashMap();\nfor (Map.Entry> entry : baseTableAndPartitionNames.entrySet()) {\nif (entry.getKey().isHiveTable()) {\nHiveTable hiveTable = (HiveTable) entry.getKey();\nOptional baseTableInfoOptional = materializedView.getBaseTableInfos().stream().filter(\nbaseTableInfo -> baseTableInfo.getTableIdentifier().equals(hiveTable.getTableIdentifier())).\nfindAny();\nif (!baseTableInfoOptional.isPresent()) {\ncontinue;\n}\nBaseTableInfo baseTableInfo = baseTableInfoOptional.get();\nMap partitionInfos =\ngetSelectedPartitionInfos(hiveTable, Lists.newArrayList(entry.getValue()),\nbaseTableInfo);\nchangedOlapTablePartitionInfos.put(baseTableInfo, partitionInfos);\n} else if (entry.getKey().isJDBCTable()) {\nJDBCTable jdbcTable = (JDBCTable) entry.getKey();\nOptional baseTableInfoOptional = materializedView.getBaseTableInfos().stream().filter(\nbaseTableInfo -> baseTableInfo.getTableIdentifier().equals(jdbcTable.getTableIdentifier())).\nfindAny();\nif (!baseTableInfoOptional.isPresent()) {\ncontinue;\n}\nBaseTableInfo baseTableInfo = baseTableInfoOptional.get();\nMap partitionInfos =\ngetSelectedPartitionInfos(jdbcTable, Lists.newArrayList(entry.getValue()),\nbaseTableInfo);\nchangedOlapTablePartitionInfos.put(baseTableInfo, partitionInfos);\n}\n}\nreturn changedOlapTablePartitionInfos;\n}\n/**\n* @param hiveTable : input hive table to collect refresh partition infos\n* @param selectedPartitionNames : input hive table refreshed partition names\n* @param baseTableInfo : input hive table's base table info\n* @return : return the given hive table's refresh partition infos\n*/\nprivate Map getSelectedPartitionInfos(HiveTable hiveTable,\nList selectedPartitionNames,\nBaseTableInfo baseTableInfo) {\nMap partitionInfos = Maps.newHashMap();\nList hivePartitions = GlobalStateMgr.\ngetCurrentState().getMetadataMgr().getPartitions(baseTableInfo.getCatalogName(), hiveTable,\nselectedPartitionNames);\nfor (int index = 0; index < selectedPartitionNames.size(); ++index) {\nlong modifiedTime = hivePartitions.get(index).getModifiedTime();\npartitionInfos.put(selectedPartitionNames.get(index),\nnew MaterializedView.BasePartitionInfo(-1, modifiedTime, modifiedTime));\n}\nreturn partitionInfos;\n}\n/**\n* @param jdbcTable : input jdbc table to collect refresh partition infos\n* @param selectedPartitionNames : input jdbc table refreshed partition names\n* @param baseTableInfo : input jdbc table's base table info\n* @return : return the given hive table's refresh partition infos\n*/\nprivate Map getSelectedPartitionInfos(JDBCTable jdbcTable,\nList selectedPartitionNames,\nBaseTableInfo baseTableInfo) {\nMap partitionInfos = Maps.newHashMap();\nList jdbcPartitions = GlobalStateMgr.\ngetCurrentState().getMetadataMgr().getPartitions(baseTableInfo.getCatalogName(), jdbcTable,\nselectedPartitionNames);\nfor (int index = 0; index < selectedPartitionNames.size(); ++index) {\nlong modifiedTime = jdbcPartitions.get(index).getModifiedTime();\npartitionInfos.put(selectedPartitionNames.get(index),\nnew MaterializedView.BasePartitionInfo(-1, modifiedTime, modifiedTime));\n}\nreturn partitionInfos;\n}\n/**\n* Extract refreshed/scanned base table and its refreshed partition names\n* NOTE: this is used to trace in task_runs.\n*/\nprivate Map> getBaseTableRefreshedPartitionsByExecPlan(\nExecPlan execPlan) {\nMap> baseTableRefreshPartitionNames = Maps.newHashMap();\nList scanNodes = execPlan.getScanNodes();\nfor (ScanNode scanNode : scanNodes) {\nSet selectedPartitionNames = Sets.newHashSet();\nif (scanNode instanceof OlapScanNode) {\nOlapScanNode olapScanNode = (OlapScanNode) scanNode;\nOlapTable olapTable = olapScanNode.getOlapTable();\nif (olapScanNode.getSelectedPartitionNames() != null && !olapScanNode.getSelectedPartitionNames().isEmpty()) {\nbaseTableRefreshPartitionNames.put(olapTable.getName(),\nnew HashSet<>(olapScanNode.getSelectedPartitionNames()));\n} else {\nList selectedPartitionIds = olapScanNode.getSelectedPartitionIds();\nselectedPartitionNames = selectedPartitionIds.stream().map(p -> olapTable.getPartition(p).getName())\n.collect(Collectors.toSet());\nbaseTableRefreshPartitionNames.put(olapTable.getName(), selectedPartitionNames);\n}\n} else if (scanNode instanceof HdfsScanNode) {\nHdfsScanNode hdfsScanNode = (HdfsScanNode) scanNode;\nHiveTable hiveTable = (HiveTable) hdfsScanNode.getHiveTable();\nOptional baseTableInfoOptional = materializedView.getBaseTableInfos().stream().filter(\nbaseTableInfo -> baseTableInfo.getTableIdentifier().equals(hiveTable.getTableIdentifier())).\nfindAny();\nif (!baseTableInfoOptional.isPresent()) {\ncontinue;\n}\nselectedPartitionNames = Sets.newHashSet(getSelectedPartitionNamesOfHiveTable(hiveTable, hdfsScanNode));\nbaseTableRefreshPartitionNames.put(hiveTable.getName(), selectedPartitionNames);\n} else {\n}\n}\nreturn baseTableRefreshPartitionNames;\n}\n/**\n* Extract hive partition names from hdfs scan node.\n*/\nprivate List getSelectedPartitionNamesOfHiveTable(HiveTable hiveTable, HdfsScanNode hdfsScanNode) {\nList partitionColumnNames = hiveTable.getPartitionColumnNames();\nList selectedPartitionNames;\nif (hiveTable.isUnPartitioned()) {\nselectedPartitionNames = Lists.newArrayList(hiveTable.getTableName());\n} else {\nCollection selectedPartitionIds = hdfsScanNode.getScanNodePredicates().getSelectedPartitionIds();\nList selectedPartitionKey = Lists.newArrayList();\nfor (Long selectedPartitionId : selectedPartitionIds) {\nselectedPartitionKey\n.add(hdfsScanNode.getScanNodePredicates().getIdToPartitionKey().get(selectedPartitionId));\n}\nselectedPartitionNames = selectedPartitionKey.stream().map(partitionKey ->\nPartitionUtil.toHivePartitionName(partitionColumnNames, partitionKey)).collect(Collectors.toList());\n}\nreturn selectedPartitionNames;\n}\n}", + "context_after": "class PartitionBasedMvRefreshProcessor extends BaseTaskRunProcessor {\nprivate static final Logger LOG = LogManager.getLogger(PartitionBasedMvRefreshProcessor.class);\npublic static final String MV_ID = \"mvId\";\npublic static final String MV_SESSION_ENABLE_SPILL =\nPropertyAnalyzer.PROPERTIES_MATERIALIZED_VIEW_SESSION_PREFIX + SessionVariable.ENABLE_SPILL;\npublic static final String MV_SESSION_TIMEOUT =\nPropertyAnalyzer.PROPERTIES_MATERIALIZED_VIEW_SESSION_PREFIX + SessionVariable.QUERY_TIMEOUT;\nprivate static final int MV_DEFAULT_QUERY_TIMEOUT = 3600;\nprivate static final int MAX_RETRY_NUM = 10;\nprivate Database database;\nprivate MaterializedView materializedView;\nprivate MvTaskRunContext mvContext;\nprivate Map> snapshotBaseTables;\nprivate long oldTransactionVisibleWaitTimeout;\npublic enum RefreshJobStatus {\nSUCCESS,\nFAILED,\nEMPTY,\nTOTAL\n}\n@VisibleForTesting\npublic MvTaskRunContext getMvContext() {\nreturn mvContext;\n}\n@VisibleForTesting\npublic void setMvContext(MvTaskRunContext mvContext) {\nthis.mvContext = mvContext;\n}\n@Override\npublic void processTaskRun(TaskRunContext context) throws Exception {\nTracers.register();\nprepare(context);\nPreconditions.checkState(materializedView != null);\nMaterializedViewMetricsEntity mvEntity =\nMaterializedViewMetricsRegistry.getInstance().getMetricsEntity(materializedView.getMvId());\nmvEntity.increaseRefreshJobStatus(RefreshJobStatus.TOTAL);\ntry {\nRefreshJobStatus status = doMvRefresh(context, mvEntity);\nmvEntity.increaseRefreshJobStatus(status);\n} catch (Exception e) {\nmvEntity.increaseRefreshJobStatus(RefreshJobStatus.FAILED);\nthrow e;\n} finally {\npostProcess();\nTracers.close();\n}\n}\nprivate RefreshJobStatus doMvRefresh(TaskRunContext context, MaterializedViewMetricsEntity mvEntity) throws Exception {\nInsertStmt insertStmt = null;\nExecPlan execPlan = null;\nint retryNum = 0;\nboolean checked = false;\nMap> refTableRefreshPartitions = null;\nSet mvToRefreshedPartitions = null;\nlong startRefreshTs = System.currentTimeMillis();\nwhile (!checked) {\nsyncPartitions();\nrefreshExternalTable(context);\ndatabase.readLock();\ntry {\nif (checkBaseTablePartitionChange()) {\nretryNum++;\nif (retryNum > MAX_RETRY_NUM) {\nthrow new DmlException(\"materialized view:%s refresh task failed\", materializedView.getName());\n}\nLOG.info(\"materialized view:{} base partition has changed. retry to sync partitions, retryNum:{}\",\nmaterializedView.getName(), retryNum);\ncontinue;\n}\nmvEntity.increaseRefreshRetryMetaCount((long) retryNum);\nchecked = true;\nmvToRefreshedPartitions = getPartitionsToRefreshForMaterializedView(context.getProperties());\nif (mvToRefreshedPartitions.isEmpty()) {\nLOG.info(\"no partitions to refresh for materialized view {}\", materializedView.getName());\nreturn RefreshJobStatus.EMPTY;\n}\nfilterPartitionByRefreshNumber(mvToRefreshedPartitions, materializedView);\nLOG.debug(\"materialized view partitions to refresh:{}\", mvToRefreshedPartitions);\nrefTableRefreshPartitions = getRefTableRefreshPartitions(mvToRefreshedPartitions);\nMap> refTablePartitionNames =\nrefTableRefreshPartitions.entrySet().stream()\n.collect(Collectors.toMap(x -> x.getKey().getName(), Map.Entry::getValue));\nLOG.debug(\"materialized view:{} source partitions :{}\",\nmaterializedView.getName(), refTableRefreshPartitions);\nif (this.getMVTaskRunExtraMessage() != null) {\nMVTaskRunExtraMessage extraMessage = getMVTaskRunExtraMessage();\nextraMessage.setMvPartitionsToRefresh(mvToRefreshedPartitions);\nextraMessage.setRefBasePartitionsToRefreshMap(refTablePartitionNames);\n}\nchangeDefaultConnectContextIfNeeded(mvContext.getCtx());\ninsertStmt = generateInsertStmt(mvToRefreshedPartitions, refTablePartitionNames, materializedView);\nexecPlan = generateRefreshPlan(mvContext.getCtx(), insertStmt);\nTracers.log(Tracers.Module.MV,\nargs -> \"[TRACE QUERY] MV: \" + materializedView.getName() +\n\"\\nMV PartitionsToRefresh: \" + String.join(\",\", (Set) args[0]) +\n\"\\nBase PartitionsToScan:\" + refTablePartitionNames +\n\"\\nInsert Plan:\\n\" +\n((ExecPlan) args[1]).getExplainString(StatementBase.ExplainLevel.VERBOSE),\nmvToRefreshedPartitions, execPlan);\nmvContext.setExecPlan(execPlan);\n} catch (Exception e) {\nLOG.warn(\"Refresh mv {} failed: {}\", materializedView.getName(), e);\nthrow e;\n} finally {\ndatabase.readUnlock();\n}\n}\nrefreshMaterializedView(mvContext, execPlan, insertStmt);\nupdateMeta(mvToRefreshedPartitions, execPlan, refTableRefreshPartitions);\nif (mvContext.hasNextBatchPartition()) {\ngenerateNextTaskRun();\n}\n{\nlong refreshDurationMs = System.currentTimeMillis() - startRefreshTs;\nLOG.info(\"Refresh {} success, cost time(s): {}\", materializedView.getName(),\nDebugUtil.DECIMAL_FORMAT_SCALE_3.format(refreshDurationMs / 1000));\nmvEntity.updateRefreshDuration(refreshDurationMs);\n}\nreturn RefreshJobStatus.SUCCESS;\n}\n/**\n* Change default connect context when for mv refresh this is because:\n* - MV Refresh may take much resource to load base tables' data into the final materialized view.\n* - Those changes are set by default and also able to be changed by users for their needs.\n* @param mvConnectCtx\n*/\nprivate void changeDefaultConnectContextIfNeeded(ConnectContext mvConnectCtx) {\nTableProperty mvProperty = materializedView.getTableProperty();\nSessionVariable mvSessionVariable = mvConnectCtx.getSessionVariable();\nif (mvSessionVariable.isEnableResourceGroup()) {\nString rg = mvProperty.getResourceGroup();\nif (rg == null || rg.isEmpty()) {\nrg = ResourceGroup.DEFAULT_MV_RESOURCE_GROUP_NAME;\n}\nmvSessionVariable.setResourceGroup(rg);\n}\nif (Config.enable_materialized_view_spill &&\n!mvSessionVariable.getEnableSpill() &&\n!mvProperty.getProperties().containsKey(MV_SESSION_ENABLE_SPILL)) {\nmvSessionVariable.setEnableSpill(true);\n}\nif (!mvProperty.getProperties().containsKey(MV_SESSION_TIMEOUT)) {\nmvSessionVariable.setQueryTimeoutS(MV_DEFAULT_QUERY_TIMEOUT);\n}\n}\nprivate void postProcess() {\nmvContext.ctx.getSessionVariable().setTransactionVisibleWaitTimeout(oldTransactionVisibleWaitTimeout);\n}\npublic MVTaskRunExtraMessage getMVTaskRunExtraMessage() {\nif (this.mvContext.status == null) {\nreturn null;\n}\nreturn this.mvContext.status.getMvTaskRunExtraMessage();\n}\n@VisibleForTesting\npublic void filterPartitionByRefreshNumber(Set partitionsToRefresh, MaterializedView materializedView) {\nint partitionRefreshNumber = materializedView.getTableProperty().getPartitionRefreshNumber();\nif (partitionRefreshNumber <= 0) {\nreturn;\n}\nMap> rangePartitionMap = materializedView.getRangePartitionMap();\nif (partitionRefreshNumber >= rangePartitionMap.size()) {\nreturn;\n}\nMap> mappedPartitionsToRefresh = Maps.newHashMap();\nfor (String partitionName : partitionsToRefresh) {\nmappedPartitionsToRefresh.put(partitionName, rangePartitionMap.get(partitionName));\n}\nLinkedHashMap> sortedPartition = mappedPartitionsToRefresh.entrySet().stream()\n.sorted(Map.Entry.comparingByValue(RangeUtils.RANGE_COMPARATOR))\n.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (e1, e2) -> e1, LinkedHashMap::new));\nIterator partitionNameIter = sortedPartition.keySet().iterator();\nfor (int i = 0; i < partitionRefreshNumber; i++) {\nif (partitionNameIter.hasNext()) {\npartitionNameIter.next();\n}\n}\nString nextPartitionStart = null;\nString endPartitionName = null;\nif (partitionNameIter.hasNext()) {\nString startPartitionName = partitionNameIter.next();\nRange partitionKeyRange = mappedPartitionsToRefresh.get(startPartitionName);\nnextPartitionStart = AnalyzerUtils.parseLiteralExprToDateString(partitionKeyRange.lowerEndpoint(), 0);\nendPartitionName = startPartitionName;\npartitionsToRefresh.remove(endPartitionName);\n}\nwhile (partitionNameIter.hasNext()) {\nendPartitionName = partitionNameIter.next();\npartitionsToRefresh.remove(endPartitionName);\n}\nmvContext.setNextPartitionStart(nextPartitionStart);\nif (endPartitionName != null) {\nPartitionKey upperEndpoint = mappedPartitionsToRefresh.get(endPartitionName).upperEndpoint();\nmvContext.setNextPartitionEnd(AnalyzerUtils.parseLiteralExprToDateString(upperEndpoint, 0));\n} else {\nmvContext.setNextPartitionEnd(null);\n}\n}\nprivate void generateNextTaskRun() {\nTaskManager taskManager = GlobalStateMgr.getCurrentState().getTaskManager();\nMap properties = mvContext.getProperties();\nlong mvId = Long.parseLong(properties.get(MV_ID));\nString taskName = TaskBuilder.getMvTaskName(mvId);\nMap newProperties = Maps.newHashMap();\nfor (Map.Entry proEntry : properties.entrySet()) {\nif (proEntry.getValue() != null) {\nnewProperties.put(proEntry.getKey(), proEntry.getValue());\n}\n}\nnewProperties.put(TaskRun.PARTITION_START, mvContext.getNextPartitionStart());\nnewProperties.put(TaskRun.PARTITION_END, mvContext.getNextPartitionEnd());\nExecuteOption option = new ExecuteOption(Constants.TaskRunPriority.HIGHEST.value(), true, newProperties);\ntaskManager.executeTask(taskName, option);\nLOG.info(\"[MV] Generate a task to refresh next batches of partitions for MV {}-{}, start={}, end={}\",\nmaterializedView.getName(), materializedView.getId(),\nmvContext.getNextPartitionStart(), mvContext.getNextPartitionEnd());\n}\nprivate void refreshExternalTable(TaskRunContext context) {\nfor (Pair tablePair : snapshotBaseTables.values()) {\nBaseTableInfo baseTableInfo = tablePair.first;\nTable table = tablePair.second;\nif (!table.isNativeTableOrMaterializedView() && !table.isHiveView()) {\ncontext.getCtx().getGlobalStateMgr().getMetadataMgr().refreshTable(baseTableInfo.getCatalogName(),\nbaseTableInfo.getDbName(), table, Lists.newArrayList(), true);\n}\n}\n}\n/**\n* After materialized view is refreshed, update materialized view's meta info to record history refreshes.\n*\n* @param refTableAndPartitionNames : refreshed base table and its partition names mapping.\n*/\nprivate void updateMeta(Set mvRefreshedPartitions,\nExecPlan execPlan,\nMap> refTableAndPartitionNames) {\nif (!database.writeLockAndCheckExist()) {\nthrow new DmlException(\"update meta failed. database:\" + database.getFullName() + \" not exist\");\n}\ntry {\nTable mv = database.getTable(materializedView.getId());\nif (mv == null) {\nthrow new DmlException(\n\"update meta failed. materialized view:\" + materializedView.getName() + \" not exist\");\n}\nif (mvRefreshedPartitions == null || refTableAndPartitionNames == null) {\nreturn;\n}\nMap> baseTableAndPartitionNames = Maps.newHashMap();\nfor (Map.Entry> e : refTableAndPartitionNames.entrySet()) {\nSet realPartitionNames =\ne.getValue().stream()\n.flatMap(name -> convertMVPartitionNameToRealPartitionName(e.getKey(), name).stream())\n.collect(Collectors.toSet());;\nbaseTableAndPartitionNames.put(e.getKey(), realPartitionNames);\n}\nMap> nonRefTableAndPartitionNames = getNonRefTableRefreshPartitions();\nif (!nonRefTableAndPartitionNames.isEmpty()) {\nbaseTableAndPartitionNames.putAll(nonRefTableAndPartitionNames);\n}\nMaterializedView.AsyncRefreshContext refreshContext =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext();\nMap> changedOlapTablePartitionInfos =\ngetSelectedPartitionInfosOfOlapTable(baseTableAndPartitionNames);\nMap> changedExternalTablePartitionInfos\n= getSelectedPartitionInfosOfExternalTable(baseTableAndPartitionNames);\nPreconditions.checkState(changedOlapTablePartitionInfos.size() + changedExternalTablePartitionInfos.size()\n<= baseTableAndPartitionNames.size());\nupdateMetaForOlapTable(refreshContext, changedOlapTablePartitionInfos);\nupdateMetaForExternalTable(refreshContext, changedExternalTablePartitionInfos);\nif (this.getMVTaskRunExtraMessage() != null) {\ntry {\nMVTaskRunExtraMessage extraMessage = getMVTaskRunExtraMessage();\nMap> baseTableRefreshedPartitionsByExecPlan =\ngetBaseTableRefreshedPartitionsByExecPlan(execPlan);\nextraMessage.setBasePartitionsToRefreshMap(baseTableRefreshedPartitionsByExecPlan);\n} catch (Exception e) {\nLOG.warn(\"update task run messages failed:\", e);\n}\n}\n} catch (Exception e) {\nLOG.warn(\"update final meta failed after mv refreshed:\", e);\nthrow e;\n} finally {\ndatabase.writeUnlock();\n}\n}\nprivate void updateMetaForOlapTable(MaterializedView.AsyncRefreshContext refreshContext,\nMap> changedTablePartitionInfos) {\nMap> currentVersionMap =\nrefreshContext.getBaseTableVisibleVersionMap();\nTable partitionTable = null;\nif (mvContext.hasNextBatchPartition()) {\nPair partitionTableAndColumn = getRefBaseTableAndPartitionColumn(snapshotBaseTables);\npartitionTable = partitionTableAndColumn.first;\n}\nfor (Map.Entry> tableEntry\n: changedTablePartitionInfos.entrySet()) {\nLong tableId = tableEntry.getKey();\nif (partitionTable != null && tableId != partitionTable.getId()) {\ncontinue;\n}\nif (!currentVersionMap.containsKey(tableId)) {\ncurrentVersionMap.put(tableId, Maps.newHashMap());\n}\nMap currentTablePartitionInfo =\ncurrentVersionMap.get(tableId);\nMap partitionInfoMap = tableEntry.getValue();\ncurrentTablePartitionInfo.putAll(partitionInfoMap);\nTable snapshotTable = snapshotBaseTables.get(tableId).second;\nif (snapshotTable.isOlapOrCloudNativeTable()) {\nOlapTable snapshotOlapTable = (OlapTable) snapshotTable;\ncurrentTablePartitionInfo.keySet().removeIf(partitionName ->\n!snapshotOlapTable.getVisiblePartitionNames().contains(partitionName));\n}\n}\nif (!changedTablePartitionInfos.isEmpty()) {\nChangeMaterializedViewRefreshSchemeLog changeRefreshSchemeLog =\nnew ChangeMaterializedViewRefreshSchemeLog(materializedView);\nGlobalStateMgr.getCurrentState().getEditLog().logMvChangeRefreshScheme(changeRefreshSchemeLog);\nlong maxChangedTableRefreshTime =\nMvUtils.getMaxTablePartitionInfoRefreshTime(changedTablePartitionInfos.values());\nmaterializedView.getRefreshScheme().setLastRefreshTime(maxChangedTableRefreshTime);\n}\n}\nprivate void updateMetaForExternalTable(\nMaterializedView.AsyncRefreshContext refreshContext,\nMap> changedTablePartitionInfos) {\nMap> currentVersionMap =\nrefreshContext.getBaseTableInfoVisibleVersionMap();\nBaseTableInfo partitionTableInfo = null;\nif (mvContext.hasNextBatchPartition()) {\nPair partitionTableAndColumn = getRefBaseTableAndPartitionColumn(snapshotBaseTables);\npartitionTableInfo = snapshotBaseTables.get(partitionTableAndColumn.first.getId()).first;\n}\nfor (Map.Entry> tableEntry\n: changedTablePartitionInfos.entrySet()) {\nBaseTableInfo baseTableInfo = tableEntry.getKey();\nif (partitionTableInfo != null && !partitionTableInfo.equals(baseTableInfo)) {\ncontinue;\n}\nif (!currentVersionMap.containsKey(baseTableInfo)) {\ncurrentVersionMap.put(baseTableInfo, Maps.newHashMap());\n}\nMap currentTablePartitionInfo =\ncurrentVersionMap.get(baseTableInfo);\nMap partitionInfoMap = tableEntry.getValue();\ncurrentTablePartitionInfo.putAll(partitionInfoMap);\nSet partitionNames = Sets.newHashSet(PartitionUtil.getPartitionNames(baseTableInfo.getTable()));\ncurrentTablePartitionInfo.keySet().removeIf(partitionName ->\n!partitionNames.contains(partitionName));\n}\nif (!changedTablePartitionInfos.isEmpty()) {\nChangeMaterializedViewRefreshSchemeLog changeRefreshSchemeLog =\nnew ChangeMaterializedViewRefreshSchemeLog(materializedView);\nGlobalStateMgr.getCurrentState().getEditLog().logMvChangeRefreshScheme(changeRefreshSchemeLog);\nlong maxChangedTableRefreshTime =\nMvUtils.getMaxTablePartitionInfoRefreshTime(changedTablePartitionInfos.values());\nmaterializedView.getRefreshScheme().setLastRefreshTime(maxChangedTableRefreshTime);\n}\n}\nprivate void prepare(TaskRunContext context) {\nMap properties = context.getProperties();\nlong mvId = Long.parseLong(properties.get(MV_ID));\ndatabase = GlobalStateMgr.getCurrentState().getDb(context.ctx.getDatabase());\nif (database == null) {\nLOG.warn(\"database {} do not exist when refreshing materialized view:{}\", context.ctx.getDatabase(), mvId);\nthrow new DmlException(\"database \" + context.ctx.getDatabase() + \" do not exist.\");\n}\nTable table = database.getTable(mvId);\nif (table == null) {\nLOG.warn(\"materialized view:{} in database:{} do not exist when refreshing\", mvId,\ncontext.ctx.getDatabase());\nthrow new DmlException(String.format(\"materialized view:%s in database:%s do not exist when refreshing\",\nmvId, context.ctx.getDatabase()));\n}\nmaterializedView = (MaterializedView) table;\nif (!materializedView.isActive()) {\nString errorMsg = String.format(\"Materialized view: %s, id: %d is not active, \" +\n\"skip sync partition and data with base tables\", materializedView.getName(), mvId);\nLOG.warn(errorMsg);\nthrow new DmlException(errorMsg);\n}\noldTransactionVisibleWaitTimeout = context.ctx.getSessionVariable().getTransactionVisibleWaitTimeout();\ncontext.ctx.getSessionVariable().setTransactionVisibleWaitTimeout(Long.MAX_VALUE / 1000);\nmvContext = new MvTaskRunContext(context);\n}\n/**\n* Sync base table's partition infos to be used later.\n*/\nprivate void syncPartitions() {\nsnapshotBaseTables = collectBaseTables(materializedView);\nPartitionInfo partitionInfo = materializedView.getPartitionInfo();\nif (!(partitionInfo instanceof SinglePartitionInfo)) {\nPair partitionTableAndColumn = getRefBaseTableAndPartitionColumn(snapshotBaseTables);\nmvContext.setRefBaseTable(partitionTableAndColumn.first);\nmvContext.setRefBaseTablePartitionColumn(partitionTableAndColumn.second);\n}\nint partitionTTLNumber = materializedView.getTableProperty().getPartitionTTLNumber();\nmvContext.setPartitionTTLNumber(partitionTTLNumber);\nif (partitionInfo instanceof ExpressionRangePartitionInfo) {\nsyncPartitionsForExpr();\n} else if (partitionInfo instanceof ListPartitionInfo) {\nsyncPartitionsForList();\n}\n}\n/**\n* @param tables : base tables of the materialized view\n* @return : return the ref base table and column that materialized view's partition column\n* derives from if it exists, otherwise return null.\n*/\nprivate Pair getRefBaseTableAndPartitionColumn(\nMap> tables) {\nSlotRef slotRef = MaterializedView.getRefBaseTablePartitionSlotRef(materializedView);\nfor (Pair tableInfo : tables.values()) {\nBaseTableInfo baseTableInfo = tableInfo.first;\nTable table = tableInfo.second;\nif (slotRef.getTblNameWithoutAnalyzed().getTbl().equals(baseTableInfo.getTableName())) {\nreturn Pair.create(table, table.getColumn(slotRef.getColumnName()));\n}\n}\nreturn Pair.create(null, null);\n}\nprivate void syncPartitionsForExpr() {\nExpr partitionExpr = materializedView.getFirstPartitionRefTableExpr();\nTable refBaseTable = mvContext.getRefBaseTable();\nColumn refBaseTablePartitionColumn = mvContext.getRefBaseTablePartitionColumn();\nRangePartitionDiff rangePartitionDiff = new RangePartitionDiff();\ndatabase.readLock();\nMap> mvRangePartitionMap = materializedView.getRangePartitionMap();\nMap> refBaseTablePartitionMap;\nMap> refBaseTableMVPartitionMap = Maps.newHashMap();\ntry {\nrefBaseTablePartitionMap = PartitionUtil.getPartitionKeyRange(refBaseTable, refBaseTablePartitionColumn);\nif (!refBaseTable.isNativeTableOrMaterializedView()) {\nrefBaseTableMVPartitionMap = PartitionUtil.getMVPartitionNameMapOfExternalTable(refBaseTable,\nrefBaseTablePartitionColumn, PartitionUtil.getPartitionNames(refBaseTable));\n}\nif (partitionExpr instanceof SlotRef) {\nrangePartitionDiff = SyncPartitionUtils\n.getRangePartitionDiffOfSlotRef(refBaseTablePartitionMap, mvRangePartitionMap);\n} else if (partitionExpr instanceof FunctionCallExpr) {\nFunctionCallExpr functionCallExpr = (FunctionCallExpr) partitionExpr;\nif (functionCallExpr.getFnName().getFunction().equalsIgnoreCase(FunctionSet.DATE_TRUNC) ||\nfunctionCallExpr.getFnName().getFunction().equalsIgnoreCase(FunctionSet.STR2DATE)) {\nrangePartitionDiff = SyncPartitionUtils.getRangePartitionDiffOfExpr(refBaseTablePartitionMap,\nmvRangePartitionMap, functionCallExpr, refBaseTablePartitionColumn.getPrimitiveType());\n} else {\nthrow new SemanticException(\"Materialized view partition function \" +\nfunctionCallExpr.getFnName().getFunction() +\n\" is not supported yet.\", functionCallExpr.getPos());\n}\n}\n} catch (UserException e) {\nLOG.warn(\"Materialized view compute partition difference with base table failed.\", e);\nreturn;\n} finally {\ndatabase.readUnlock();\n}\nMap> deletes = rangePartitionDiff.getDeletes();\nfor (String mvPartitionName : deletes.keySet()) {\ndropPartition(database, materializedView, mvPartitionName);\n}\nLOG.info(\"The process of synchronizing materialized view [{}] delete partitions range [{}]\",\nmaterializedView.getName(), deletes);\nMap partitionProperties = getPartitionProperties(materializedView);\nDistributionDesc distributionDesc = getDistributionDesc(materializedView);\nMap> adds = rangePartitionDiff.getAdds();\naddRangePartitions(database, materializedView, adds, partitionProperties, distributionDesc);\nfor (Map.Entry> addEntry : adds.entrySet()) {\nString mvPartitionName = addEntry.getKey();\nmvRangePartitionMap.put(mvPartitionName, addEntry.getValue());\n}\nLOG.info(\"The process of synchronizing materialized view [{}] add partitions range [{}]\",\nmaterializedView.getName(), adds);\nMap> baseToMvNameRef = SyncPartitionUtils\n.getIntersectedPartitions(refBaseTablePartitionMap, mvRangePartitionMap);\nMap> mvToBaseNameRef = SyncPartitionUtils\n.getIntersectedPartitions(mvRangePartitionMap, refBaseTablePartitionMap);\nmvContext.setRefBaseTableMVIntersectedPartitions(baseToMvNameRef);\nmvContext.setMvRefBaseTableIntersectedPartitions(mvToBaseNameRef);\nmvContext.setRefBaseTableRangePartitionMap(refBaseTablePartitionMap);\nmvContext.setExternalRefBaseTableMVPartitionMap(refBaseTableMVPartitionMap);\n}\nprivate void syncPartitionsForList() {\nTable partitionBaseTable = mvContext.getRefBaseTable();\nColumn partitionColumn = mvContext.getRefBaseTablePartitionColumn();\nListPartitionDiff listPartitionDiff;\nMap>> baseListPartitionMap;\nMap>> listPartitionMap = materializedView.getListPartitionMap();\ndatabase.readLock();\ntry {\nbaseListPartitionMap = PartitionUtil.getPartitionList(partitionBaseTable, partitionColumn);\nlistPartitionDiff = SyncPartitionUtils.getListPartitionDiff(baseListPartitionMap, listPartitionMap);\n} catch (UserException e) {\nLOG.warn(\"Materialized view compute partition difference with base table failed.\", e);\nreturn;\n} finally {\ndatabase.readUnlock();\n}\nMap>> deletes = listPartitionDiff.getDeletes();\nfor (String mvPartitionName : deletes.keySet()) {\ndropPartition(database, materializedView, mvPartitionName);\n}\nLOG.info(\"The process of synchronizing materialized view [{}] delete partitions range [{}]\",\nmaterializedView.getName(), deletes);\nMap partitionProperties = getPartitionProperties(materializedView);\nDistributionDesc distributionDesc = getDistributionDesc(materializedView);\nMap>> adds = listPartitionDiff.getAdds();\naddListPartitions(database, materializedView, adds, partitionProperties, distributionDesc);\nLOG.info(\"The process of synchronizing materialized view [{}] add partitions list [{}]\",\nmaterializedView.getName(), adds);\nMap> baseToMvNameRef = Maps.newHashMap();\nfor (String partitionName : baseListPartitionMap.keySet()) {\nbaseToMvNameRef.put(partitionName, Sets.newHashSet(partitionName));\n}\nmvContext.setRefBaseTableMVIntersectedPartitions(baseToMvNameRef);\nmvContext.setMvRefBaseTableIntersectedPartitions(baseToMvNameRef);\nmvContext.setRefBaseTableListPartitionMap(baseListPartitionMap);\n}\nprivate boolean needToRefreshTable(Table table) {\nreturn CollectionUtils.isNotEmpty(materializedView.getUpdatedPartitionNamesOfTable(table, false));\n}\n/**\n* @param table : Whether this table can be supported for incremental refresh by partition or not.\n*/\nprivate boolean unSupportRefreshByPartition(Table table) {\nreturn !table.isOlapTableOrMaterializedView() && !table.isHiveTable()\n&& !table.isJDBCTable() && !table.isCloudNativeTable();\n}\n/**\n* Whether non-partitioned materialized view needs to be refreshed or not, it needs refresh when:\n* - its base table is not supported refresh by partition.\n* - its base table has updated.\n*/\nprivate boolean isNonPartitionedMVNeedToRefresh() {\nfor (Pair tablePair : snapshotBaseTables.values()) {\nTable snapshotTable = tablePair.second;\nif (unSupportRefreshByPartition(snapshotTable)) {\nreturn true;\n}\nif (needToRefreshTable(snapshotTable)) {\nreturn true;\n}\n}\nreturn false;\n}\n/**\n* Whether partitioned materialized view needs to be refreshed or not base on the non-ref base tables, it needs refresh when:\n* - its non-ref base table except un-supported base table has updated.\n*/\nprivate boolean isPartitionedMVNeedToRefreshBaseOnNonRefTables(Table partitionTable) {\nfor (Pair tablePair : snapshotBaseTables.values()) {\nTable snapshotTable = tablePair.second;\nif (snapshotTable.getId() == partitionTable.getId()) {\ncontinue;\n}\nif (unSupportRefreshByPartition(snapshotTable)) {\ncontinue;\n}\nif (needToRefreshTable(snapshotTable)) {\nreturn true;\n}\n}\nreturn false;\n}\n@VisibleForTesting\npublic Set getPartitionsToRefreshForMaterializedView(Map properties)\nthrows AnalysisException {\nString start = properties.get(TaskRun.PARTITION_START);\nString end = properties.get(TaskRun.PARTITION_END);\nboolean force = Boolean.parseBoolean(properties.get(TaskRun.FORCE));\nPartitionInfo partitionInfo = materializedView.getPartitionInfo();\nSet needRefreshMvPartitionNames = getPartitionsToRefreshForMaterializedView(partitionInfo,\nstart, end, force);\nif (this.getMVTaskRunExtraMessage() != null) {\nMVTaskRunExtraMessage extraMessage = this.getMVTaskRunExtraMessage();\nextraMessage.setForceRefresh(force);\nextraMessage.setPartitionStart(start);\nextraMessage.setPartitionEnd(end);\n}\nreturn needRefreshMvPartitionNames;\n}\n/**\n* @param mvPartitionInfo : materialized view's partition info\n* @param start : materialized view's refresh start in this task run\n* @param end : materialized view's refresh end in this task run\n* @param force : whether this task run is force or not\n* @return\n* @throws AnalysisException\n*/\nprivate Set getPartitionsToRefreshForMaterializedView(PartitionInfo mvPartitionInfo,\nString start,\nString end,\nboolean force) throws AnalysisException {\nint partitionTTLNumber = mvContext.getPartitionTTLNumber();\nif (force && start == null && end == null) {\nif (mvPartitionInfo instanceof SinglePartitionInfo) {\nreturn materializedView.getVisiblePartitionNames();\n} else {\nif (mvPartitionInfo instanceof ListPartitionInfo) {\nreturn materializedView.getValidListPartitionMap(partitionTTLNumber).keySet();\n} else {\nreturn materializedView.getValidRangePartitionMap(partitionTTLNumber).keySet();\n}\n}\n}\nSet needRefreshMvPartitionNames = Sets.newHashSet();\nif (mvPartitionInfo instanceof SinglePartitionInfo) {\nif (force || isNonPartitionedMVNeedToRefresh()) {\nreturn materializedView.getVisiblePartitionNames();\n}\n} else if (mvPartitionInfo instanceof ExpressionRangePartitionInfo) {\nExpr partitionExpr = materializedView.getFirstPartitionRefTableExpr();\nTable refBaseTable = mvContext.getRefBaseTable();\nboolean isAutoRefresh = (mvContext.type == Constants.TaskType.PERIODICAL ||\nmvContext.type == Constants.TaskType.EVENT_TRIGGERED);\nSet mvRangePartitionNames = SyncPartitionUtils.getPartitionNamesByRangeWithPartitionLimit(\nmaterializedView, start, end, partitionTTLNumber, isAutoRefresh);\nif (isPartitionedMVNeedToRefreshBaseOnNonRefTables(refBaseTable)) {\nif (start == null && end == null) {\nreturn mvRangePartitionNames;\n} else {\nreturn getMVPartitionNamesToRefreshByRangePartitionNamesAndForce(refBaseTable,\nmvRangePartitionNames, true);\n}\n}\nif (partitionExpr instanceof SlotRef) {\nreturn getMVPartitionNamesToRefreshByRangePartitionNamesAndForce(refBaseTable, mvRangePartitionNames, force);\n} else if (partitionExpr instanceof FunctionCallExpr) {\nneedRefreshMvPartitionNames = getMVPartitionNamesToRefreshByRangePartitionNamesAndForce(refBaseTable,\nmvRangePartitionNames, force);\nSet baseChangedPartitionNames = getBasePartitionNamesByMVPartitionNames(needRefreshMvPartitionNames);\nLOG.debug(\"Start calcPotentialRefreshPartition, needRefreshMvPartitionNames: {},\" +\n\" baseChangedPartitionNames: {}\", needRefreshMvPartitionNames, baseChangedPartitionNames);\nSyncPartitionUtils.calcPotentialRefreshPartition(needRefreshMvPartitionNames, baseChangedPartitionNames,\nmvContext.getRefBaseTableMVIntersectedPartitions(), mvContext.getMvRefBaseTableIntersectedPartitions());\nLOG.debug(\"Finish calcPotentialRefreshPartition, needRefreshMvPartitionNames: {},\" +\n\" baseChangedPartitionNames: {}\", needRefreshMvPartitionNames, baseChangedPartitionNames);\n}\n} else if (mvPartitionInfo instanceof ListPartitionInfo) {\nTable partitionTable = mvContext.getRefBaseTable();\nboolean isAutoRefresh = (mvContext.type == Constants.TaskType.PERIODICAL ||\nmvContext.type == Constants.TaskType.EVENT_TRIGGERED);\nSet mvListPartitionNames = SyncPartitionUtils.getPartitionNamesByListWithPartitionLimit(\nmaterializedView, start, end, partitionTTLNumber, isAutoRefresh);\nif (isPartitionedMVNeedToRefreshBaseOnNonRefTables(partitionTable)) {\nif (start == null && end == null) {\nreturn mvListPartitionNames;\n} else {\nreturn getMVPartitionNamesToRefreshByRangePartitionNamesAndForce(partitionTable,\nmvListPartitionNames, true);\n}\n}\nreturn getMVPartitionNamesToRefreshByRangePartitionNamesAndForce(partitionTable, mvListPartitionNames, force);\n} else {\nthrow new DmlException(\"unsupported partition info type:\" + mvPartitionInfo.getClass().getName());\n}\nreturn needRefreshMvPartitionNames;\n}\nprivate Set getMVPartitionNamesToRefreshByRangePartitionNamesAndForce(Table refBaseTable,\nSet mvRangePartitionNames,\nboolean force) {\nif (force || unSupportRefreshByPartition(refBaseTable)) {\nreturn Sets.newHashSet(mvRangePartitionNames);\n}\nSet updatePartitionNames = materializedView.getUpdatedPartitionNamesOfTable(refBaseTable, false);\nif (updatePartitionNames == null) {\nreturn mvRangePartitionNames;\n}\nSet result = getMVPartitionNamesByBasePartitionNames(updatePartitionNames);\nresult.retainAll(mvRangePartitionNames);\nreturn result;\n}\n/**\n*\n* @param basePartitionNames : ref base table partition names to check.\n* @return : Return mv corresponding partition names to the ref base table partition names.\n*/\nprivate Set getMVPartitionNamesByBasePartitionNames(Set basePartitionNames) {\nSet result = Sets.newHashSet();\nMap> refBaseTableMVPartitionMap = mvContext.getRefBaseTableMVIntersectedPartitions();\nfor (String basePartitionName : basePartitionNames) {\nif (refBaseTableMVPartitionMap.containsKey(basePartitionName)) {\nresult.addAll(refBaseTableMVPartitionMap.get(basePartitionName));\n} else {\nLOG.warn(\"Cannot find need refreshed ref base table partition from synced partition info: {}\",\nbasePartitionName);\n}\n}\nreturn result;\n}\n/**\n* @param mvPartitionNames : the need to refresh materialized view partition names\n* @return : the corresponding ref base table partition names to the materialized view partition names\n*/\nprivate Set getBasePartitionNamesByMVPartitionNames(Set mvPartitionNames) {\nSet result = Sets.newHashSet();\nMap> mvRefBaseTablePartitionMap = mvContext.getMvRefBaseTableIntersectedPartitions();\nfor (String mvPartitionName : mvPartitionNames) {\nif (mvRefBaseTablePartitionMap.containsKey(mvPartitionName)) {\nresult.addAll(mvRefBaseTablePartitionMap.get(mvPartitionName));\n} else {\nLOG.warn(\"Cannot find need refreshed mv table partition from synced partition info: {}\",\nmvPartitionName);\n}\n}\nreturn result;\n}\nprivate ExecPlan generateRefreshPlan(ConnectContext ctx, InsertStmt insertStmt) throws AnalysisException {\nreturn StatementPlanner.plan(insertStmt, ctx);\n}\n@VisibleForTesting\npublic InsertStmt generateInsertStmt(Set materializedViewPartitions,\nMap> refTableRefreshPartitions,\nMaterializedView materializedView) throws AnalysisException {\nConnectContext ctx = mvContext.getCtx();\nctx.getAuditEventBuilder().reset();\nctx.getAuditEventBuilder()\n.setTimestamp(System.currentTimeMillis())\n.setClientIp(mvContext.getRemoteIp())\n.setUser(ctx.getQualifiedUser())\n.setDb(ctx.getDatabase());\nctx.setThreadLocalInfo();\nctx.getSessionVariable().setEnableMaterializedViewRewrite(false);\nString definition = mvContext.getDefinition();\nInsertStmt insertStmt =\n(InsertStmt) SqlParser.parse(definition, ctx.getSessionVariable()).get(0);\ninsertStmt.setTargetPartitionNames(new PartitionNames(false, new ArrayList<>(materializedViewPartitions)));\ninsertStmt.setSystem(true);\nAnalyzer.analyze(insertStmt, ctx);\nQueryStatement queryStatement = insertStmt.getQueryStatement();\nMultimap tableRelations =\nAnalyzerUtils.collectAllTableRelation(queryStatement);\nfor (Map.Entry nameTableRelationEntry : tableRelations.entries()) {\nif (refTableRefreshPartitions.containsKey(nameTableRelationEntry.getKey())) {\nSet tablePartitionNames = refTableRefreshPartitions.get(nameTableRelationEntry.getKey());\nTableRelation tableRelation = nameTableRelationEntry.getValue();\ntableRelation.setPartitionNames(\nnew PartitionNames(false, new ArrayList<>(tablePartitionNames)));\nExpr partitionPredicates = generatePartitionPredicate(tablePartitionNames, queryStatement,\nmaterializedView.getPartitionInfo());\nif (partitionPredicates != null) {\nList slots = Lists.newArrayList();\npartitionPredicates.collect(SlotRef.class, slots);\nScope tableRelationScope = tableRelation.getScope();\nif (canResolveSlotsInTheScope(slots, tableRelationScope)) {\ntableRelation.setPartitionPredicate(partitionPredicates);\n}\nQueryRelation queryRelation = queryStatement.getQueryRelation();\nif (queryRelation instanceof SelectRelation) {\nSelectRelation selectRelation = ((SelectRelation) queryStatement.getQueryRelation());\nselectRelation.setWhereClause(Expr.compoundAnd(Lists.newArrayList(selectRelation.getWhereClause(),\npartitionPredicates)));\n}\n}\n}\n}\nreturn insertStmt;\n}\n/**\n* Check whether to push down predicate expr with the slot refs into the scope.\n* @param slots : slot refs that are contained in the predicate expr\n* @param scope : scope that try to push down into.\n* @return\n*/\nprivate boolean canResolveSlotsInTheScope(List slots, Scope scope) {\nreturn slots.stream().allMatch(s -> scope.tryResolveField(s).isPresent());\n}\n/**\n* Generate partition predicates to refresh the materialized view so can be refreshed by the incremental partitions.\n*\n* @param tablePartitionNames : the need pruned partition tables of the ref base table\n* @param queryStatement : the materialized view's defined query statement\n* @param mvPartitionInfo : the materialized view's partition information\n* @return\n* @throws AnalysisException\n*/\nprivate Expr generatePartitionPredicate(Set tablePartitionNames, QueryStatement queryStatement,\nPartitionInfo mvPartitionInfo)\nthrows AnalysisException {\nSlotRef partitionSlot = MaterializedView.getRefBaseTablePartitionSlotRef(materializedView);\nList columnOutputNames = queryStatement.getQueryRelation().getColumnOutputNames();\nList outputExpressions = queryStatement.getQueryRelation().getOutputExpression();\nExpr outputPartitionSlot = null;\nfor (int i = 0; i < outputExpressions.size(); ++i) {\nif (columnOutputNames.get(i).equalsIgnoreCase(partitionSlot.getColumnName())) {\noutputPartitionSlot = outputExpressions.get(i);\nbreak;\n} else if (outputExpressions.get(i) instanceof FunctionCallExpr) {\nFunctionCallExpr functionCallExpr = (FunctionCallExpr) outputExpressions.get(i);\nif (functionCallExpr.getFnName().getFunction().equalsIgnoreCase(FunctionSet.STR2DATE)) {\noutputPartitionSlot = outputExpressions.get(i).getChild(0);\nbreak;\n}\n} else {\nSlotRef slotRef = outputExpressions.get(i).unwrapSlotRef();\nif (slotRef != null && slotRef.getColumnName().equals(partitionSlot.getColumnName())) {\noutputPartitionSlot = outputExpressions.get(i);\nbreak;\n}\n}\n}\nif (outputPartitionSlot == null) {\nLOG.warn(\"Generate partition predicate failed: \" +\n\"cannot find partition slot ref {} from query relation\", partitionSlot);\nreturn null;\n}\nif (mvPartitionInfo.isRangePartition()) {\nList> sourceTablePartitionRange = Lists.newArrayList();\nMap> refBaseTableRangePartitionMap = mvContext.getRefBaseTableRangePartitionMap();\nfor (String partitionName : tablePartitionNames) {\nsourceTablePartitionRange.add(refBaseTableRangePartitionMap.get(partitionName));\n}\nsourceTablePartitionRange = MvUtils.mergeRanges(sourceTablePartitionRange);\nList partitionPredicates =\nMvUtils.convertRange(outputPartitionSlot, sourceTablePartitionRange);\nOptional> nullRange = sourceTablePartitionRange.stream().\nfilter(range -> range.lowerEndpoint().isMinValue()).findAny();\nif (nullRange.isPresent()) {\nExpr isNullPredicate = new IsNullPredicate(outputPartitionSlot, false);\npartitionPredicates.add(isNullPredicate);\n}\nreturn Expr.compoundOr(partitionPredicates);\n} else if (mvPartitionInfo.getType() == PartitionType.LIST) {\nMap>> baseListPartitionMap = mvContext.getRefBaseTableListPartitionMap();\nType partitionType = mvContext.getRefBaseTablePartitionColumn().getType();\nList sourceTablePartitionList = Lists.newArrayList();\nfor (String tablePartitionName : tablePartitionNames) {\nList> values = baseListPartitionMap.get(tablePartitionName);\nfor (List value : values) {\nLiteralExpr partitionValue = new PartitionValue(value.get(0)).getValue(partitionType);\nsourceTablePartitionList.add(partitionValue);\n}\n}\nList partitionPredicates = MvUtils.convertList(outputPartitionSlot, sourceTablePartitionList);\nreturn Expr.compoundOr(partitionPredicates);\n} else {\nLOG.warn(\"Generate partition predicate failed: \" +\n\"partition slot {} is not supported yet: {}\", partitionSlot, mvPartitionInfo);\nreturn null;\n}\n}\n@VisibleForTesting\npublic void refreshMaterializedView(MvTaskRunContext mvContext, ExecPlan execPlan, InsertStmt insertStmt)\nthrows Exception {\nlong beginTimeInNanoSecond = TimeUtils.getStartTime();\nPreconditions.checkNotNull(execPlan);\nPreconditions.checkNotNull(insertStmt);\nConnectContext ctx = mvContext.getCtx();\nStmtExecutor executor = new StmtExecutor(ctx, insertStmt);\nctx.setExecutor(executor);\nif (ctx.getParent() != null && ctx.getParent().getExecutor() != null) {\nStmtExecutor parentStmtExecutor = ctx.getParent().getExecutor();\nparentStmtExecutor.registerSubStmtExecutor(executor);\n}\nctx.setStmtId(new AtomicInteger().incrementAndGet());\nctx.setExecutionId(UUIDUtil.toTUniqueId(ctx.getQueryId()));\ntry {\nexecutor.handleDMLStmtWithProfile(execPlan, insertStmt, beginTimeInNanoSecond);\n} catch (Exception e) {\nLOG.warn(\"refresh materialized view {} failed: {}\", materializedView.getName(), e);\nthrow e;\n} finally {\nauditAfterExec(mvContext, executor.getParsedStmt(), executor.getQueryStatisticsForAuditLog());\n}\n}\n@VisibleForTesting\npublic Map> collectBaseTables(MaterializedView materializedView) {\nMap> tables = Maps.newHashMap();\nList baseTableInfos = materializedView.getBaseTableInfos();\nfor (BaseTableInfo baseTableInfo : baseTableInfos) {\nDatabase db = baseTableInfo.getDb();\nif (db == null) {\nLOG.warn(\"database {} do not exist when refreshing materialized view:{}\",\nbaseTableInfo.getDbInfoStr(), materializedView.getName());\nthrow new DmlException(\"database \" + baseTableInfo.getDbInfoStr() + \" do not exist.\");\n}\nTable table = baseTableInfo.getTable();\nif (table == null) {\nLOG.warn(\"table {} do not exist when refreshing materialized view:{}\",\nbaseTableInfo.getTableInfoStr(), materializedView.getName());\nthrow new DmlException(\"Materialized view base table: %s not exist.\", baseTableInfo.getTableInfoStr());\n}\ndb.readLock();\ntry {\nif (table.isOlapTable()) {\nTable copied = new OlapTable();\nif (!DeepCopy.copy(table, copied, OlapTable.class)) {\nthrow new DmlException(\"Failed to copy olap table: %s\", table.getName());\n}\ntables.put(table.getId(), Pair.create(baseTableInfo, copied));\n} else if (table.isCloudNativeTable()) {\nLakeTable copied = DeepCopy.copyWithGson(table, LakeTable.class);\nif (copied == null) {\nthrow new DmlException(\"Failed to copy lake table: %s\", table.getName());\n}\ntables.put(table.getId(), Pair.create(baseTableInfo, copied));\n} else {\ntables.put(table.getId(), Pair.create(baseTableInfo, table));\n}\n} finally {\ndb.readUnlock();\n}\n}\nreturn tables;\n}\nprivate Map getPartitionProperties(MaterializedView materializedView) {\nMap partitionProperties = new HashMap<>(4);\npartitionProperties.put(\"replication_num\",\nString.valueOf(materializedView.getDefaultReplicationNum()));\npartitionProperties.put(\"storage_medium\", materializedView.getStorageMedium());\nString storageCooldownTime =\nmaterializedView.getTableProperty().getProperties().get(\"storage_cooldown_time\");\nif (storageCooldownTime != null\n&& !storageCooldownTime.equals(String.valueOf(DataProperty.MAX_COOLDOWN_TIME_MS))) {\nString storageCooldownTimeStr = TimeUtils.longToTimeString(Long.parseLong(storageCooldownTime));\npartitionProperties.put(\"storage_cooldown_time\", storageCooldownTimeStr);\n}\nreturn partitionProperties;\n}\nprivate DistributionDesc getDistributionDesc(MaterializedView materializedView) {\nDistributionInfo distributionInfo = materializedView.getDefaultDistributionInfo();\nif (distributionInfo instanceof HashDistributionInfo) {\nList distColumnNames = new ArrayList<>();\nfor (Column distributionColumn : ((HashDistributionInfo) distributionInfo).getDistributionColumns()) {\ndistColumnNames.add(distributionColumn.getName());\n}\nreturn new HashDistributionDesc(distributionInfo.getBucketNum(), distColumnNames);\n} else {\nreturn new RandomDistributionDesc();\n}\n}\nprivate void addRangePartitions(Database database, MaterializedView materializedView,\nMap> adds, Map partitionProperties,\nDistributionDesc distributionDesc) {\nif (adds.isEmpty()) {\nreturn;\n}\nList partitionDescs = Lists.newArrayList();\nfor (Map.Entry> addEntry : adds.entrySet()) {\nString mvPartitionName = addEntry.getKey();\nRange partitionKeyRange = addEntry.getValue();\nString lowerBound = partitionKeyRange.lowerEndpoint().getKeys().get(0).getStringValue();\nString upperBound = partitionKeyRange.upperEndpoint().getKeys().get(0).getStringValue();\nboolean isMaxValue = partitionKeyRange.upperEndpoint().isMaxValue();\nPartitionValue upperPartitionValue;\nif (isMaxValue) {\nupperPartitionValue = PartitionValue.MAX_VALUE;\n} else {\nupperPartitionValue = new PartitionValue(upperBound);\n}\nPartitionKeyDesc partitionKeyDesc = new PartitionKeyDesc(\nCollections.singletonList(new PartitionValue(lowerBound)),\nCollections.singletonList(upperPartitionValue));\nSingleRangePartitionDesc singleRangePartitionDesc =\nnew SingleRangePartitionDesc(false, mvPartitionName, partitionKeyDesc, partitionProperties);\npartitionDescs.add(singleRangePartitionDesc);\n}\nRangePartitionDesc rangePartitionDesc =\nnew RangePartitionDesc(materializedView.getPartitionColumnNames(), partitionDescs);\ntry {\nGlobalStateMgr.getCurrentState().addPartitions(\ndatabase, materializedView.getName(),\nnew AddPartitionClause(rangePartitionDesc, distributionDesc,\npartitionProperties, false));\n} catch (Exception e) {\nthrow new DmlException(\"Expression add partition failed: %s, db: %s, table: %s\", e, e.getMessage(),\ndatabase.getFullName(), materializedView.getName());\n}\n}\nprivate void addListPartitions(Database database, MaterializedView materializedView,\nMap>> adds, Map partitionProperties,\nDistributionDesc distributionDesc) {\nif (adds.isEmpty()) {\nreturn;\n}\nfor (Map.Entry>> addEntry : adds.entrySet()) {\nString mvPartitionName = addEntry.getKey();\nList> partitionKeyList = addEntry.getValue();\nMultiItemListPartitionDesc multiItemListPartitionDesc =\nnew MultiItemListPartitionDesc(false, mvPartitionName, partitionKeyList, partitionProperties);\ntry {\nGlobalStateMgr.getCurrentState().addPartitions(\ndatabase, materializedView.getName(), new AddPartitionClause(\nmultiItemListPartitionDesc, distributionDesc,\npartitionProperties, false));\n} catch (Exception e) {\nthrow new DmlException(\"add list partition failed: %s, db: %s, table: %s\", e, e.getMessage(),\ndatabase.getFullName(), materializedView.getName());\n}\n}\n}\nprivate void dropPartition(Database database, MaterializedView materializedView, String mvPartitionName) {\nString dropPartitionName = materializedView.getPartition(mvPartitionName).getName();\nif (!database.writeLockAndCheckExist()) {\nthrow new DmlException(\"drop partition failed. database:\" + database.getFullName() + \" not exist\");\n}\ntry {\nTable mv = database.getTable(materializedView.getId());\nif (mv == null) {\nthrow new DmlException(\"drop partition failed. mv:\" + materializedView.getName() + \" not exist\");\n}\nPartition mvPartition = mv.getPartition(dropPartitionName);\nif (mvPartition == null) {\nthrow new DmlException(\"drop partition failed. partition:\" + dropPartitionName + \" not exist\");\n}\nGlobalStateMgr.getCurrentState().dropPartition(\ndatabase, materializedView,\nnew DropPartitionClause(false, dropPartitionName, false, true));\n} catch (Exception e) {\nthrow new DmlException(\"Expression add partition failed: %s, db: %s, table: %s\", e, e.getMessage(),\ndatabase.getFullName(), materializedView.getName());\n} finally {\ndatabase.writeUnlock();\n}\n}\n/**\n* For external table, the partition name is normalized which should convert it into original partition name.\n*\n* For multi-partition columns, `refTableAndPartitionNames` is not fully exact to describe which partitions\n* of ref base table are refreshed, use `getSelectedPartitionInfosOfExternalTable` later if we can solve the multi\n* partition columns problem.\n* eg:\n* partitionName1 : par_col=0/par_date=2020-01-01 => p20200101\n* partitionName2 : par_col=1/par_date=2020-01-01 => p20200101\n*/\nprivate Set convertMVPartitionNameToRealPartitionName(Table table, String mvPartitionName) {\nif (!table.isNativeTableOrMaterializedView()) {\nMap> refBaseTableRangePartitionMap = mvContext.getExternalRefBaseTableMVPartitionMap();\nPreconditions.checkState(refBaseTableRangePartitionMap.containsKey(mvPartitionName));\nreturn refBaseTableRangePartitionMap.get(mvPartitionName);\n} else {\nreturn Sets.newHashSet(mvPartitionName);\n}\n}\n/**\n* @param mvToRefreshedPartitions : to-refreshed materialized view partition names\n* @return : return to-refreshed base table's table name and partition names mapping\n*/\nprivate Map> getRefTableRefreshPartitions(Set mvToRefreshedPartitions) {\nTable refBaseTable = mvContext.getRefBaseTable();\nMap> refTableAndPartitionNames = Maps.newHashMap();\nfor (Pair tablePair : snapshotBaseTables.values()) {\nTable table = tablePair.second;\nif (refBaseTable != null && refBaseTable == table) {\nSet needRefreshTablePartitionNames = Sets.newHashSet();\nMap> mvToBaseNameRef = mvContext.getMvRefBaseTableIntersectedPartitions();\nfor (String mvPartitionName : mvToRefreshedPartitions) {\nneedRefreshTablePartitionNames.addAll(mvToBaseNameRef.get(mvPartitionName));\n}\nrefTableAndPartitionNames.put(table, needRefreshTablePartitionNames);\nreturn refTableAndPartitionNames;\n}\n}\nreturn refTableAndPartitionNames;\n}\n/**\n* Return all non-ref base table and refreshed partitions.\n*/\nprivate Map> getNonRefTableRefreshPartitions() {\nTable partitionTable = mvContext.getRefBaseTable();\nMap> tableNamePartitionNames = Maps.newHashMap();\nfor (Pair tablePair : snapshotBaseTables.values()) {\nTable table = tablePair.second;\nif (partitionTable != null && partitionTable == table) {\n} else {\nif (table.isNativeTableOrMaterializedView()) {\ntableNamePartitionNames.put(table, ((OlapTable) table).getVisiblePartitionNames());\n} else if (table.isHiveTable()) {\ntableNamePartitionNames.put(table, Sets.newHashSet(PartitionUtil.getPartitionNames(table)));\n} else if (table.isView()) {\ncontinue;\n} else {\nLOG.warn(\"Do not support get partition names and columns for\" +\n\" table type {}\", table.getType());\n}\n}\n}\nreturn tableNamePartitionNames;\n}\n/**\n* Collect base olap tables and its partition infos based on refreshed table infos.\n*\n* @param baseTableAndPartitionNames : refreshed base table and its partition names mapping.\n* @return\n*/\nprivate Map> getSelectedPartitionInfosOfOlapTable(\nMap> baseTableAndPartitionNames) {\nMap> changedOlapTablePartitionInfos = Maps.newHashMap();\nfor (Map.Entry> entry : baseTableAndPartitionNames.entrySet()) {\nif (entry.getKey().isNativeTableOrMaterializedView()) {\nMap partitionInfos = Maps.newHashMap();\nOlapTable olapTable = (OlapTable) entry.getKey();\nfor (String partitionName : entry.getValue()) {\nPartition partition = olapTable.getPartition(partitionName);\nMaterializedView.BasePartitionInfo basePartitionInfo = new MaterializedView.BasePartitionInfo(\npartition.getId(), partition.getVisibleVersion(), partition.getVisibleVersionTime());\npartitionInfos.put(partition.getName(), basePartitionInfo);\n}\nchangedOlapTablePartitionInfos.put(olapTable.getId(), partitionInfos);\n}\n}\nreturn changedOlapTablePartitionInfos;\n}\n/**\n* Collect base hive tables and its partition infos based on refreshed table infos.\n*\n* @param baseTableAndPartitionNames : refreshed base table and its partition names mapping.\n* @return\n*/\nprivate Map> getSelectedPartitionInfosOfExternalTable(\nMap> baseTableAndPartitionNames) {\nMap> changedOlapTablePartitionInfos = Maps.newHashMap();\nfor (Map.Entry> entry : baseTableAndPartitionNames.entrySet()) {\nif (entry.getKey().isHiveTable()) {\nHiveTable hiveTable = (HiveTable) entry.getKey();\nOptional baseTableInfoOptional = materializedView.getBaseTableInfos().stream().filter(\nbaseTableInfo -> baseTableInfo.getTableIdentifier().equals(hiveTable.getTableIdentifier())).\nfindAny();\nif (!baseTableInfoOptional.isPresent()) {\ncontinue;\n}\nBaseTableInfo baseTableInfo = baseTableInfoOptional.get();\nMap partitionInfos =\ngetSelectedPartitionInfos(hiveTable, Lists.newArrayList(entry.getValue()),\nbaseTableInfo);\nchangedOlapTablePartitionInfos.put(baseTableInfo, partitionInfos);\n} else if (entry.getKey().isJDBCTable()) {\nJDBCTable jdbcTable = (JDBCTable) entry.getKey();\nOptional baseTableInfoOptional = materializedView.getBaseTableInfos().stream().filter(\nbaseTableInfo -> baseTableInfo.getTableIdentifier().equals(jdbcTable.getTableIdentifier())).\nfindAny();\nif (!baseTableInfoOptional.isPresent()) {\ncontinue;\n}\nBaseTableInfo baseTableInfo = baseTableInfoOptional.get();\nMap partitionInfos =\ngetSelectedPartitionInfos(jdbcTable, Lists.newArrayList(entry.getValue()),\nbaseTableInfo);\nchangedOlapTablePartitionInfos.put(baseTableInfo, partitionInfos);\n}\n}\nreturn changedOlapTablePartitionInfos;\n}\n/**\n* @param hiveTable : input hive table to collect refresh partition infos\n* @param selectedPartitionNames : input hive table refreshed partition names\n* @param baseTableInfo : input hive table's base table info\n* @return : return the given hive table's refresh partition infos\n*/\nprivate Map getSelectedPartitionInfos(HiveTable hiveTable,\nList selectedPartitionNames,\nBaseTableInfo baseTableInfo) {\nMap partitionInfos = Maps.newHashMap();\nList hivePartitions = GlobalStateMgr.\ngetCurrentState().getMetadataMgr().getPartitions(baseTableInfo.getCatalogName(), hiveTable,\nselectedPartitionNames);\nfor (int index = 0; index < selectedPartitionNames.size(); ++index) {\nlong modifiedTime = hivePartitions.get(index).getModifiedTime();\npartitionInfos.put(selectedPartitionNames.get(index),\nnew MaterializedView.BasePartitionInfo(-1, modifiedTime, modifiedTime));\n}\nreturn partitionInfos;\n}\n/**\n* @param jdbcTable : input jdbc table to collect refresh partition infos\n* @param selectedPartitionNames : input jdbc table refreshed partition names\n* @param baseTableInfo : input jdbc table's base table info\n* @return : return the given hive table's refresh partition infos\n*/\nprivate Map getSelectedPartitionInfos(JDBCTable jdbcTable,\nList selectedPartitionNames,\nBaseTableInfo baseTableInfo) {\nMap partitionInfos = Maps.newHashMap();\nList jdbcPartitions = GlobalStateMgr.\ngetCurrentState().getMetadataMgr().getPartitions(baseTableInfo.getCatalogName(), jdbcTable,\nselectedPartitionNames);\nfor (int index = 0; index < selectedPartitionNames.size(); ++index) {\nlong modifiedTime = jdbcPartitions.get(index).getModifiedTime();\npartitionInfos.put(selectedPartitionNames.get(index),\nnew MaterializedView.BasePartitionInfo(-1, modifiedTime, modifiedTime));\n}\nreturn partitionInfos;\n}\n/**\n* Extract refreshed/scanned base table and its refreshed partition names\n* NOTE: this is used to trace in task_runs.\n*/\nprivate Map> getBaseTableRefreshedPartitionsByExecPlan(\nExecPlan execPlan) {\nMap> baseTableRefreshPartitionNames = Maps.newHashMap();\nList scanNodes = execPlan.getScanNodes();\nfor (ScanNode scanNode : scanNodes) {\nSet selectedPartitionNames = Sets.newHashSet();\nif (scanNode instanceof OlapScanNode) {\nOlapScanNode olapScanNode = (OlapScanNode) scanNode;\nOlapTable olapTable = olapScanNode.getOlapTable();\nif (olapScanNode.getSelectedPartitionNames() != null && !olapScanNode.getSelectedPartitionNames().isEmpty()) {\nbaseTableRefreshPartitionNames.put(olapTable.getName(),\nnew HashSet<>(olapScanNode.getSelectedPartitionNames()));\n} else {\nList selectedPartitionIds = olapScanNode.getSelectedPartitionIds();\nselectedPartitionNames = selectedPartitionIds.stream().map(p -> olapTable.getPartition(p).getName())\n.collect(Collectors.toSet());\nbaseTableRefreshPartitionNames.put(olapTable.getName(), selectedPartitionNames);\n}\n} else if (scanNode instanceof HdfsScanNode) {\nHdfsScanNode hdfsScanNode = (HdfsScanNode) scanNode;\nHiveTable hiveTable = (HiveTable) hdfsScanNode.getHiveTable();\nOptional baseTableInfoOptional = materializedView.getBaseTableInfos().stream().filter(\nbaseTableInfo -> baseTableInfo.getTableIdentifier().equals(hiveTable.getTableIdentifier())).\nfindAny();\nif (!baseTableInfoOptional.isPresent()) {\ncontinue;\n}\nselectedPartitionNames = Sets.newHashSet(getSelectedPartitionNamesOfHiveTable(hiveTable, hdfsScanNode));\nbaseTableRefreshPartitionNames.put(hiveTable.getName(), selectedPartitionNames);\n} else {\n}\n}\nreturn baseTableRefreshPartitionNames;\n}\n/**\n* Extract hive partition names from hdfs scan node.\n*/\nprivate List getSelectedPartitionNamesOfHiveTable(HiveTable hiveTable, HdfsScanNode hdfsScanNode) {\nList partitionColumnNames = hiveTable.getPartitionColumnNames();\nList selectedPartitionNames;\nif (hiveTable.isUnPartitioned()) {\nselectedPartitionNames = Lists.newArrayList(hiveTable.getTableName());\n} else {\nCollection selectedPartitionIds = hdfsScanNode.getScanNodePredicates().getSelectedPartitionIds();\nList selectedPartitionKey = Lists.newArrayList();\nfor (Long selectedPartitionId : selectedPartitionIds) {\nselectedPartitionKey\n.add(hdfsScanNode.getScanNodePredicates().getIdToPartitionKey().get(selectedPartitionId));\n}\nselectedPartitionNames = selectedPartitionKey.stream().map(partitionKey ->\nPartitionUtil.toHivePartitionName(partitionColumnNames, partitionKey)).collect(Collectors.toList());\n}\nreturn selectedPartitionNames;\n}\n}" + }, + { + "comment": "https://github.com/jbossas/protean-shamrock/issues/706", + "method_body": "private void sanitizeOptions() {\nif (className == null) {\nclassName = DEFAULT_CLASS_NAME;\n}\nif (className.endsWith(MojoUtils.JAVA_EXTENSION)) {\nclassName = className.substring(0, className.length() - MojoUtils.JAVA_EXTENSION.length());\n}\nif (!className.contains(\".\")) {\nclassName = projectGroupId.replace(\"-\", \".\").replace(\"_\", \".\") + \".\" + className;\n}\nif (StringUtils.isBlank(path)) {\npath = \"/hello\";\n}\nif (!path.startsWith(\"/\")) {\npath = \"/\" + path;\n}\n}", + "target_code": "path = \"/hello\";", + "method_body_after": "private void sanitizeOptions() {\nif (className != null) {\nif (className.endsWith(MojoUtils.JAVA_EXTENSION)) {\nclassName = className.substring(0, className.length() - MojoUtils.JAVA_EXTENSION.length());\n}\nif (!className.contains(\".\")) {\nclassName = projectGroupId.replace(\"-\", \".\").replace(\"_\", \".\") + \".\" + className;\n}\nif (StringUtils.isBlank(path)) {\npath = \"/hello\";\n}\nif (!path.startsWith(\"/\")) {\npath = \"/\" + path;\n}\n}\n}", + "context_before": "class CreateProjectMojo extends AbstractMojo {\npublic static final String PLUGIN_KEY = MojoUtils.getPluginGroupId() + \":\" + MojoUtils.getPluginArtifactId();\n/**\n* FQCN of the generated resources when applied on a project with an existing `pom.xml` file and the user\n* does not pass the `className` parameter.\n*/\nprivate static final String DEFAULT_CLASS_NAME = \"io.jboss.shamrock.sample.HelloResource\";\n@Parameter(defaultValue = \"${project}\")\nprotected MavenProject project;\n@Parameter(property = \"projectGroupId\")\nprivate String projectGroupId;\n@Parameter(property = \"projectArtifactId\")\nprivate String projectArtifactId;\n@Parameter(property = \"projectVersion\")\nprivate String projectVersion;\n@Parameter(property = \"path\")\nprivate String path;\n@Parameter(property = \"className\")\nprivate String className;\n@Parameter(property = \"extensions\")\nprivate List extensions;\n@Parameter(defaultValue = \"${session}\")\nprivate MavenSession session;\n@Component\nprivate Prompter prompter;\n@Override\npublic void execute() throws MojoExecutionException {\nFile projectRoot = new File(\".\");\nFile pom = new File(projectRoot, \"pom.xml\");\nif (pom.isFile()) {\nif (! StringUtils.isBlank(projectGroupId) || ! StringUtils.isBlank(projectArtifactId)\n|| ! StringUtils.isBlank(projectVersion)) {\nthrow new MojoExecutionException(\"Unable to generate the project, the `projectGroupId`, \" +\n\"`projectArtifactId` and `projectVersion` parameters are not supported when applied to an \" +\n\"existing `pom.xml` file\");\n}\nprojectGroupId = project.getGroupId();\nprojectArtifactId = project.getArtifactId();\nprojectVersion = project.getVersion();\n} else {\naskTheUserForMissingValues();\nif (! isDirectoryEmpty(projectRoot)) {\nprojectRoot = new File(projectArtifactId);\nif (projectRoot.exists()) {\nthrow new MojoExecutionException(\"Unable to create the project - the current directory is not empty and\" +\n\" the directory \" + projectArtifactId + \" exists\");\n}\n}\n}\nboolean success;\ntry {\nsanitizeOptions();\nfinal Map context = new HashMap<>();\ncontext.put(\"className\", className);\ncontext.put(\"path\", path);\nsuccess = new CreateProject(projectRoot)\n.groupId(projectGroupId)\n.artifactId(projectArtifactId)\n.version(projectVersion)\n.doCreateProject(context);\nif (success) {\nnew AddExtensions(new File(projectRoot, \"pom.xml\"))\n.addExtensions(extensions);\n}\n} catch (IOException e) {\nthrow new MojoExecutionException(e.getMessage(), e);\n}\nif (success) {\nprintUserInstructions(projectRoot);\n}\n}\nprivate void askTheUserForMissingValues() throws MojoExecutionException {\nif (! session.getRequest().isInteractiveMode()) {\nif (StringUtils.isBlank(projectGroupId)) {\nprojectGroupId = \"io.jboss.shamrock.sample\";\n}\nif (StringUtils.isBlank(projectArtifactId)) {\nprojectArtifactId = \"my-shamrock-project\";\n}\nif (StringUtils.isBlank(projectVersion)) {\nprojectVersion = \"1.0-SNAPSHOT\";\n}\nreturn;\n}\ntry {\nif (StringUtils.isBlank(projectGroupId)) {\nprojectGroupId = prompter.promptWithDefaultValue(\"Set the project groupId\",\n\"io.jboss.shamrock.sample\");\n}\nif (StringUtils.isBlank(projectArtifactId)) {\nprojectArtifactId = prompter.promptWithDefaultValue(\"Set the project artifactId\",\n\"my-shamrock-project\");\n}\nif (StringUtils.isBlank(projectVersion)) {\nprojectVersion = prompter.promptWithDefaultValue(\"Set the Shamrock version\",\n\"1.0-SNAPSHOT\");\n}\nif (StringUtils.isBlank(className)) {\nString defaultResourceName = projectGroupId.replace(\"-\", \".\")\n.replace(\"_\", \".\") + \".HelloResource\";\nclassName = prompter.promptWithDefaultValue(\"Set the resource classname\", defaultResourceName);\n}\nif (StringUtils.isBlank(path)) {\npath = prompter.promptWithDefaultValue(\"Set the resource path \", \"/hello\");\n}\n} catch (IOException e) {\nthrow new MojoExecutionException(\"Unable to get user input\", e);\n}\n}\nprivate void printUserInstructions(File root) {\ngetLog().info(\"\");\ngetLog().info(\"========================================================================================\");\ngetLog().info(ansi().a(\"Your new application has been created in \").bold().a(root.getAbsolutePath()).boldOff().toString());\ngetLog().info(ansi().a(\"Navigate into this directory and launch your application with \")\n.bold()\n.fg(Ansi.Color.CYAN)\n.a(\"mvn compile shamrock:dev\")\n.reset()\n.toString());\ngetLog().info(\nansi().a(\"Your application will be accessible on \").bold().fg(Ansi.Color.CYAN).a(\"http:\ngetLog().info(\"========================================================================================\");\ngetLog().info(\"\");\n}\nprivate boolean isDirectoryEmpty(File dir) {\nif (! dir.isDirectory()) {\nthrow new IllegalArgumentException(\"The specified file must be a directory: \" + dir.getAbsolutePath());\n}\nString[] children = dir.list();\nif (children == null) {\nthrow new IllegalArgumentException(\"The specified directory cannot be accessed: \" + dir.getAbsolutePath());\n}\nreturn children.length == 0;\n}\n}", + "context_after": "class CreateProjectMojo extends AbstractMojo {\npublic static final String PLUGIN_KEY = MojoUtils.getPluginGroupId() + \":\" + MojoUtils.getPluginArtifactId();\nprivate static final String DEFAULT_GROUP_ID = \"org.acme.shamrock.sample\";\n@Parameter(defaultValue = \"${project}\")\nprotected MavenProject project;\n@Parameter(property = \"projectGroupId\")\nprivate String projectGroupId;\n@Parameter(property = \"projectArtifactId\")\nprivate String projectArtifactId;\n@Parameter(property = \"projectVersion\")\nprivate String projectVersion;\n@Parameter(property = \"path\")\nprivate String path;\n@Parameter(property = \"className\")\nprivate String className;\n@Parameter(property = \"extensions\")\nprivate List extensions;\n@Parameter(defaultValue = \"${session}\")\nprivate MavenSession session;\n@Component\nprivate Prompter prompter;\n@Override\npublic void execute() throws MojoExecutionException {\nFile projectRoot = new File(\".\");\nFile pom = new File(projectRoot, \"pom.xml\");\nif (pom.isFile()) {\nif (! StringUtils.isBlank(projectGroupId) || ! StringUtils.isBlank(projectArtifactId)\n|| ! StringUtils.isBlank(projectVersion)) {\nthrow new MojoExecutionException(\"Unable to generate the project, the `projectGroupId`, \" +\n\"`projectArtifactId` and `projectVersion` parameters are not supported when applied to an \" +\n\"existing `pom.xml` file\");\n}\nprojectGroupId = project.getGroupId();\nprojectArtifactId = project.getArtifactId();\nprojectVersion = project.getVersion();\n} else {\naskTheUserForMissingValues();\nif (! isDirectoryEmpty(projectRoot)) {\nprojectRoot = new File(projectArtifactId);\nif (projectRoot.exists()) {\nthrow new MojoExecutionException(\"Unable to create the project - the current directory is not empty and\" +\n\" the directory \" + projectArtifactId + \" exists\");\n}\n}\n}\nboolean success;\ntry {\nsanitizeOptions();\nfinal Map context = new HashMap<>();\ncontext.put(\"className\", className);\ncontext.put(\"path\", path);\nsuccess = new CreateProject(projectRoot)\n.groupId(projectGroupId)\n.artifactId(projectArtifactId)\n.version(projectVersion)\n.doCreateProject(context);\nif (success) {\nnew AddExtensions(new File(projectRoot, \"pom.xml\"))\n.addExtensions(extensions);\n}\n} catch (IOException e) {\nthrow new MojoExecutionException(e.getMessage(), e);\n}\nif (success) {\nprintUserInstructions(projectRoot);\n}\n}\nprivate void askTheUserForMissingValues() throws MojoExecutionException {\nif (! session.getRequest().isInteractiveMode() || shouldUseDefaults()) {\nif (StringUtils.isBlank(projectGroupId)) {\nprojectGroupId = DEFAULT_GROUP_ID;\n}\nif (StringUtils.isBlank(projectArtifactId)) {\nprojectArtifactId = \"my-shamrock-project\";\n}\nif (StringUtils.isBlank(projectVersion)) {\nprojectVersion = \"1.0-SNAPSHOT\";\n}\nreturn;\n}\ntry {\nif (StringUtils.isBlank(projectGroupId)) {\nprojectGroupId = prompter.promptWithDefaultValue(\"Set the project groupId\",\nDEFAULT_GROUP_ID);\n}\nif (StringUtils.isBlank(projectArtifactId)) {\nprojectArtifactId = prompter.promptWithDefaultValue(\"Set the project artifactId\",\n\"my-shamrock-project\");\n}\nif (StringUtils.isBlank(projectVersion)) {\nprojectVersion = prompter.promptWithDefaultValue(\"Set the Shamrock version\",\n\"1.0-SNAPSHOT\");\n}\nif (StringUtils.isBlank(className)) {\nString answer = prompter.promptWithDefaultValue(\"Do you want to create a REST resource? (y/n)\", \"no\");\nif (isTrueOrYes(answer)) {\nString defaultResourceName = projectGroupId.replace(\"-\", \".\")\n.replace(\"_\", \".\") + \".HelloResource\";\nclassName = prompter.promptWithDefaultValue(\"Set the resource classname\", defaultResourceName);\nif (StringUtils.isBlank(path)) {\npath = prompter.promptWithDefaultValue(\"Set the resource path \", \"/hello\");\n}\n} else {\nclassName = null;\npath = null;\n}\n}\n} catch (IOException e) {\nthrow new MojoExecutionException(\"Unable to get user input\", e);\n}\n}\nprivate boolean shouldUseDefaults() {\nreturn projectArtifactId != null;\n}\nprivate boolean isTrueOrYes(String answer) {\nif (answer == null) {\nreturn false;\n}\nString content = answer.trim().toLowerCase();\nreturn \"true\".equalsIgnoreCase(content) || \"yes\".equalsIgnoreCase(content) || \"y\".equalsIgnoreCase(content);\n}\nprivate void printUserInstructions(File root) {\ngetLog().info(\"\");\ngetLog().info(\"========================================================================================\");\ngetLog().info(ansi().a(\"Your new application has been created in \").bold().a(root.getAbsolutePath()).boldOff().toString());\ngetLog().info(ansi().a(\"Navigate into this directory and launch your application with \")\n.bold()\n.fg(Ansi.Color.CYAN)\n.a(\"mvn compile shamrock:dev\")\n.reset()\n.toString());\ngetLog().info(\nansi().a(\"Your application will be accessible on \").bold().fg(Ansi.Color.CYAN).a(\"http:\ngetLog().info(\"========================================================================================\");\ngetLog().info(\"\");\n}\nprivate boolean isDirectoryEmpty(File dir) {\nif (! dir.isDirectory()) {\nthrow new IllegalArgumentException(\"The specified file must be a directory: \" + dir.getAbsolutePath());\n}\nString[] children = dir.list();\nif (children == null) {\nthrow new IllegalArgumentException(\"The specified directory cannot be accessed: \" + dir.getAbsolutePath());\n}\nreturn children.length == 0;\n}\n}" + }, + { + "comment": "Same here.", + "method_body": "public void testParameterConstructorExtractRoleAuthorities() {\nwhen(jwt.getClaim(\"scp\")).thenReturn(null);\nAADJwtBearerTokenAuthenticationConverter converter = new AADJwtBearerTokenAuthenticationConverter(\"roles\",\n\"APPROLE_\");\nAbstractAuthenticationToken authenticationToken = converter.convert(jwt);\nassertThat(authenticationToken.getPrincipal()).isExactlyInstanceOf(AADOAuth2AuthenticatedPrincipal.class);\nAADOAuth2AuthenticatedPrincipal principal = (AADOAuth2AuthenticatedPrincipal) authenticationToken\n.getPrincipal();\nassertThat(principal.getAttributes()).isNotEmpty();\nassertThat(principal.getAttributes()).hasSize(2);\nassertThat(principal.getAuthorities()).hasSize(2);\n}", + "target_code": "when(jwt.getClaim(\"scp\")).thenReturn(null);", + "method_body_after": "public void testParameterConstructorExtractRoleAuthorities() {\nAADJwtBearerTokenAuthenticationConverter converter = new AADJwtBearerTokenAuthenticationConverter(\"roles\",\n\"APPROLE_\");\nAbstractAuthenticationToken authenticationToken = converter.convert(jwt);\nassertThat(authenticationToken.getPrincipal()).isExactlyInstanceOf(AADOAuth2AuthenticatedPrincipal.class);\nAADOAuth2AuthenticatedPrincipal principal = (AADOAuth2AuthenticatedPrincipal) authenticationToken\n.getPrincipal();\nassertThat(principal.getAttributes()).isNotEmpty();\nassertThat(principal.getAttributes()).hasSize(2);\nassertThat(principal.getAuthorities()).hasSize(2);\n}", + "context_before": "class AADJwtBearerTokenAuthenticationConverterTest {\nprivate Jwt jwt;\nprivate Map claims;\nprivate Map headers;\nprivate JSONArray jsonArray = new JSONArray().appendElement(\"User.read\").appendElement(\"User.write\");\n@BeforeEach\npublic void init() {\njwt = mock(Jwt.class);\nclaims = new HashMap<>();\nheaders = new HashMap<>();\nclaims.put(\"iss\", \"fake-issuer\");\nclaims.put(\"tid\", \"fake-tid\");\nheaders.put(\"kid\", \"kg2LYs2T0CTjIfj4rt6JIynen38\");\nwhen(jwt.getClaim(\"scp\")).thenReturn(\"Order.read Order.write\");\nwhen(jwt.getClaim(\"roles\")).thenReturn(jsonArray);\nwhen(jwt.getTokenValue()).thenReturn(\"fake-token-value\");\nwhen(jwt.getIssuedAt()).thenReturn(Instant.now());\nwhen(jwt.getHeaders()).thenReturn(headers);\nwhen(jwt.getExpiresAt()).thenReturn(Instant.MAX);\nwhen(jwt.getClaims()).thenReturn(claims);\n}\n@Test\npublic void testCreateUserPrincipal() {\nAADJwtBearerTokenAuthenticationConverter converter = new AADJwtBearerTokenAuthenticationConverter();\nAbstractAuthenticationToken authenticationToken = converter.convert(jwt);\nassertThat(authenticationToken.getPrincipal()).isExactlyInstanceOf(AADOAuth2AuthenticatedPrincipal.class);\nAADOAuth2AuthenticatedPrincipal principal = (AADOAuth2AuthenticatedPrincipal) authenticationToken\n.getPrincipal();\nassertThat(principal.getClaims()).isNotEmpty();\nassertThat(principal.getIssuer()).isEqualTo(claims.get(\"iss\"));\nassertThat(principal.getTenantId()).isEqualTo(claims.get(\"tid\"));\n}\n@Test\npublic void testNoArgumentsConstructorDefaultScopeAndRoleAuthorities() {\nAADJwtBearerTokenAuthenticationConverter converter = new AADJwtBearerTokenAuthenticationConverter();\nAbstractAuthenticationToken authenticationToken = converter.convert(jwt);\nassertThat(authenticationToken.getPrincipal()).isExactlyInstanceOf(AADOAuth2AuthenticatedPrincipal.class);\nAADOAuth2AuthenticatedPrincipal principal = (AADOAuth2AuthenticatedPrincipal) authenticationToken\n.getPrincipal();\nassertThat(principal.getAttributes()).isNotEmpty();\nassertThat(principal.getAttributes()).hasSize(2);\nassertThat(principal.getAuthorities()).hasSize(4);\n}\n@Test\npublic void testNoArgumentsConstructorExtractScopeAuthorities() {\nAADJwtBearerTokenAuthenticationConverter converter = new AADJwtBearerTokenAuthenticationConverter(\"scp\");\nAbstractAuthenticationToken authenticationToken = converter.convert(jwt);\nassertThat(authenticationToken.getPrincipal()).isExactlyInstanceOf(AADOAuth2AuthenticatedPrincipal.class);\nAADOAuth2AuthenticatedPrincipal principal = (AADOAuth2AuthenticatedPrincipal) authenticationToken\n.getPrincipal();\nassertThat(principal.getAttributes()).isNotEmpty();\nassertThat(principal.getAttributes()).hasSize(2);\nassertThat(principal.getAuthorities()).hasSize(2);\n}\n@Test\npublic void testNoArgumentsConstructorExtractRoleAuthorities() {\nwhen(jwt.getClaim(\"scp\")).thenReturn(null);\nAADJwtBearerTokenAuthenticationConverter converter = new AADJwtBearerTokenAuthenticationConverter();\nAbstractAuthenticationToken authenticationToken = converter.convert(jwt);\nassertThat(authenticationToken.getPrincipal()).isExactlyInstanceOf(AADOAuth2AuthenticatedPrincipal.class);\nAADOAuth2AuthenticatedPrincipal principal = (AADOAuth2AuthenticatedPrincipal) authenticationToken\n.getPrincipal();\nassertThat(principal.getAttributes()).isNotEmpty();\nassertThat(principal.getAttributes()).hasSize(2);\nassertThat(principal.getAuthorities()).hasSize(2);\n}\n@Test\npublic void testConstructorExtractRoleAuthoritiesWithAuthorityPrefixMapParameter() {\nwhen(jwt.getClaim(\"scp\")).thenReturn(null);\nMap claimToAuthorityPrefixMap = new HashMap<>();\nclaimToAuthorityPrefixMap.put(\"roles\", \"APPROLE_\");\nAADJwtBearerTokenAuthenticationConverter converter = new AADJwtBearerTokenAuthenticationConverter(\"sub\", claimToAuthorityPrefixMap);\nAbstractAuthenticationToken authenticationToken = converter.convert(jwt);\nassertThat(authenticationToken.getPrincipal()).isExactlyInstanceOf(AADOAuth2AuthenticatedPrincipal.class);\nAADOAuth2AuthenticatedPrincipal principal = (AADOAuth2AuthenticatedPrincipal) authenticationToken\n.getPrincipal();\nassertThat(principal.getAttributes()).isNotEmpty();\nassertThat(principal.getAttributes()).hasSize(2);\nassertThat(principal.getAuthorities()).hasSize(2);\nAssertions.assertTrue(principal.getAuthorities().contains(new SimpleGrantedAuthority(\"APPROLE_User.read\")));\nAssertions.assertTrue(principal.getAuthorities().contains(new SimpleGrantedAuthority(\"APPROLE_User.write\")));\n}\n@Test\npublic void testParameterConstructorExtractScopeAuthorities() {\nwhen(jwt.getClaim(\"roles\")).thenReturn(null);\nAADJwtBearerTokenAuthenticationConverter converter = new AADJwtBearerTokenAuthenticationConverter(\"scp\");\nAbstractAuthenticationToken authenticationToken = converter.convert(jwt);\nassertThat(authenticationToken.getPrincipal()).isExactlyInstanceOf(AADOAuth2AuthenticatedPrincipal.class);\nAADOAuth2AuthenticatedPrincipal principal = (AADOAuth2AuthenticatedPrincipal) authenticationToken\n.getPrincipal();\nassertThat(principal.getAttributes()).isNotEmpty();\nassertThat(principal.getAttributes()).hasSize(2);\nassertThat(principal.getAuthorities()).hasSize(2);\n}\n@Test\n}", + "context_after": "class AADJwtBearerTokenAuthenticationConverterTest {\nprivate Jwt jwt = mock(Jwt.class);\nprivate Map claims = new HashMap<>();\nprivate Map headers = new HashMap<>();\nprivate JSONArray jsonArray = new JSONArray().appendElement(\"User.read\").appendElement(\"User.write\");\n@BeforeAll\npublic void init() {\nclaims.put(\"iss\", \"fake-issuer\");\nclaims.put(\"tid\", \"fake-tid\");\nheaders.put(\"kid\", \"kg2LYs2T0CTjIfj4rt6JIynen38\");\nwhen(jwt.getClaim(\"scp\")).thenReturn(\"Order.read Order.write\");\nwhen(jwt.getClaim(\"roles\")).thenReturn(jsonArray);\nwhen(jwt.getTokenValue()).thenReturn(\"fake-token-value\");\nwhen(jwt.getIssuedAt()).thenReturn(Instant.now());\nwhen(jwt.getHeaders()).thenReturn(headers);\nwhen(jwt.getExpiresAt()).thenReturn(Instant.MAX);\nwhen(jwt.getClaims()).thenReturn(claims);\n}\n@Test\npublic void testCreateUserPrincipal() {\nAADJwtBearerTokenAuthenticationConverter converter = new AADJwtBearerTokenAuthenticationConverter();\nAbstractAuthenticationToken authenticationToken = converter.convert(jwt);\nassertThat(authenticationToken.getPrincipal()).isExactlyInstanceOf(AADOAuth2AuthenticatedPrincipal.class);\nAADOAuth2AuthenticatedPrincipal principal = (AADOAuth2AuthenticatedPrincipal) authenticationToken\n.getPrincipal();\nassertThat(principal.getClaims()).isNotEmpty();\nassertThat(principal.getIssuer()).isEqualTo(claims.get(\"iss\"));\nassertThat(principal.getTenantId()).isEqualTo(claims.get(\"tid\"));\n}\n@Test\npublic void testNoArgumentsConstructorDefaultScopeAndRoleAuthorities() {\nAADJwtBearerTokenAuthenticationConverter converter = new AADJwtBearerTokenAuthenticationConverter();\nAbstractAuthenticationToken authenticationToken = converter.convert(jwt);\nassertThat(authenticationToken.getPrincipal()).isExactlyInstanceOf(AADOAuth2AuthenticatedPrincipal.class);\nAADOAuth2AuthenticatedPrincipal principal = (AADOAuth2AuthenticatedPrincipal) authenticationToken\n.getPrincipal();\nassertThat(principal.getAttributes()).isNotEmpty();\nassertThat(principal.getAttributes()).hasSize(2);\nassertThat(principal.getAuthorities()).hasSize(4);\n}\n@Test\npublic void testNoArgumentsConstructorExtractScopeAuthorities() {\nAADJwtBearerTokenAuthenticationConverter converter = new AADJwtBearerTokenAuthenticationConverter();\nAbstractAuthenticationToken authenticationToken = converter.convert(jwt);\nassertThat(authenticationToken.getPrincipal()).isExactlyInstanceOf(AADOAuth2AuthenticatedPrincipal.class);\nAADOAuth2AuthenticatedPrincipal principal = (AADOAuth2AuthenticatedPrincipal) authenticationToken\n.getPrincipal();\nassertThat(principal.getAttributes()).isNotEmpty();\nassertThat(principal.getAttributes()).hasSize(2);\nassertThat(principal.getAuthorities()).hasSize(4);\n}\n@Test\npublic void testParameterConstructorExtractScopeAuthorities() {\nAADJwtBearerTokenAuthenticationConverter converter = new AADJwtBearerTokenAuthenticationConverter(\"scp\");\nAbstractAuthenticationToken authenticationToken = converter.convert(jwt);\nassertThat(authenticationToken.getPrincipal()).isExactlyInstanceOf(AADOAuth2AuthenticatedPrincipal.class);\nAADOAuth2AuthenticatedPrincipal principal = (AADOAuth2AuthenticatedPrincipal) authenticationToken\n.getPrincipal();\nassertThat(principal.getAttributes()).isNotEmpty();\nassertThat(principal.getAttributes()).hasSize(2);\nassertThat(principal.getAuthorities()).hasSize(2);\n}\n@Test\n@Test\npublic void testConstructorExtractRoleAuthoritiesWithAuthorityPrefixMapParameter() {\nMap claimToAuthorityPrefixMap = new HashMap<>();\nclaimToAuthorityPrefixMap.put(\"roles\", \"APPROLE_\");\nAADJwtBearerTokenAuthenticationConverter converter = new AADJwtBearerTokenAuthenticationConverter(\"scp\", claimToAuthorityPrefixMap);\nAbstractAuthenticationToken authenticationToken = converter.convert(jwt);\nassertThat(authenticationToken.getPrincipal()).isExactlyInstanceOf(AADOAuth2AuthenticatedPrincipal.class);\nAADOAuth2AuthenticatedPrincipal principal = (AADOAuth2AuthenticatedPrincipal) authenticationToken\n.getPrincipal();\nassertThat(principal.getAttributes()).isNotEmpty();\nassertThat(principal.getAttributes()).hasSize(2);\nassertThat(principal.getAuthorities()).hasSize(2);\nAssertions.assertTrue(principal.getAuthorities().contains(new SimpleGrantedAuthority(\"APPROLE_User.read\")));\nAssertions.assertTrue(principal.getAuthorities().contains(new SimpleGrantedAuthority(\"APPROLE_User.write\")));\n}\n}" + }, + { + "comment": "@AHeise nice catch, this looks much cleaner! Now the reader will only return a new future when there is no more input from the underlying reader and the future was completed.", + "method_body": "public CompletableFuture isAvailable() {\nif (currentReader != null) {\nreturn availabilityFuture = currentReader.isAvailable();\n}\nreturn availabilityFuture;\n}", + "target_code": "return availabilityFuture = currentReader.isAvailable();", + "method_body_after": "public CompletableFuture isAvailable() {\nreturn availabilityFuture;\n}", + "context_before": "class HybridSourceReader implements SourceReader {\nprivate static final Logger LOG = LoggerFactory.getLogger(HybridSourceReader.class);\nprivate final SourceReaderContext readerContext;\nprivate final Map switchedSources;\nprivate int currentSourceIndex = -1;\nprivate boolean isFinalSource;\nprivate SourceReader currentReader;\nprivate CompletableFuture availabilityFuture = new CompletableFuture<>();\nprivate List restoredSplits = new ArrayList<>();\npublic HybridSourceReader(\nSourceReaderContext readerContext, Map switchedSources) {\nthis.readerContext = readerContext;\nthis.switchedSources = switchedSources;\n}\n@Override\npublic void start() {\nint initialSourceIndex = currentSourceIndex;\nif (!restoredSplits.isEmpty()) {\ninitialSourceIndex = restoredSplits.get(0).sourceIndex() - 1;\n}\nreaderContext.sendSourceEventToCoordinator(\nnew SourceReaderFinishedEvent(initialSourceIndex));\n}\n@Override\npublic InputStatus pollNext(ReaderOutput output) throws Exception {\nif (currentReader == null) {\nreturn InputStatus.NOTHING_AVAILABLE;\n}\nInputStatus status = currentReader.pollNext(output);\nif (status == InputStatus.END_OF_INPUT) {\nLOG.info(\n\"End of input subtask={} sourceIndex={} {}\",\nreaderContext.getIndexOfSubtask(),\ncurrentSourceIndex,\ncurrentReader);\nreaderContext.sendSourceEventToCoordinator(\nnew SourceReaderFinishedEvent(currentSourceIndex));\nif (!isFinalSource) {\nif (availabilityFuture != null && availabilityFuture.isDone()) {\navailabilityFuture = new CompletableFuture();\n}\nreturn InputStatus.NOTHING_AVAILABLE;\n}\n}\nreturn status;\n}\n@Override\npublic List snapshotState(long checkpointId) {\nList state =\ncurrentReader != null\n? currentReader.snapshotState(checkpointId)\n: Collections.emptyList();\nreturn HybridSourceSplit.wrapSplits(currentSourceIndex, state);\n}\n@Override\n@Override\npublic void addSplits(List splits) {\nLOG.info(\n\"Adding splits subtask={} sourceIndex={} currentReader={} {}\",\nreaderContext.getIndexOfSubtask(),\ncurrentSourceIndex,\ncurrentReader,\nsplits);\nif (currentSourceIndex < 0) {\nrestoredSplits.addAll(splits);\n} else {\nList realSplits = new ArrayList<>(splits.size());\nfor (HybridSourceSplit split : splits) {\nPreconditions.checkState(\nsplit.sourceIndex() == currentSourceIndex,\n\"Split %s while current source is %s\",\nsplit,\ncurrentSourceIndex);\nrealSplits.add(split.getWrappedSplit());\n}\ncurrentReader.addSplits((List) realSplits);\n}\n}\n@Override\npublic void notifyNoMoreSplits() {\nif (currentReader != null) {\ncurrentReader.notifyNoMoreSplits();\n}\nLOG.debug(\n\"No more splits for subtask={} sourceIndex={} currentReader={}\",\nreaderContext.getIndexOfSubtask(),\ncurrentSourceIndex,\ncurrentReader);\n}\n@Override\npublic void handleSourceEvents(SourceEvent sourceEvent) {\nif (sourceEvent instanceof SwitchSourceEvent) {\nSwitchSourceEvent sse = (SwitchSourceEvent) sourceEvent;\nLOG.info(\n\"Switch source event: subtask={} sourceIndex={} source={}\",\nreaderContext.getIndexOfSubtask(),\nsse.sourceIndex(),\nsse.source());\nswitchedSources.put(sse.sourceIndex(), sse.source());\nsetCurrentReader(sse.sourceIndex());\nisFinalSource = sse.isFinalSource();\nif (availabilityFuture != null && !availabilityFuture.isDone()) {\navailabilityFuture.complete(null);\n}\n} else {\ncurrentReader.handleSourceEvents(sourceEvent);\n}\n}\n@Override\npublic void close() throws Exception {\nif (currentReader != null) {\ncurrentReader.close();\n}\nLOG.debug(\n\"Reader closed: subtask={} sourceIndex={} currentReader={}\",\nreaderContext.getIndexOfSubtask(),\ncurrentSourceIndex,\ncurrentReader);\n}\nprivate void setCurrentReader(int index) {\nPreconditions.checkArgument(index != currentSourceIndex);\nif (currentReader != null) {\ntry {\ncurrentReader.close();\n} catch (Exception e) {\nthrow new RuntimeException(\"Failed to close current reader\", e);\n}\nLOG.debug(\n\"Reader closed: subtask={} sourceIndex={} currentReader={}\",\nreaderContext.getIndexOfSubtask(),\ncurrentSourceIndex,\ncurrentReader);\n}\nSource source =\nPreconditions.checkNotNull(\nswitchedSources.get(index), \"Source for index=%s not available\", index);\nSourceReader reader;\ntry {\nreader = source.createReader(readerContext);\n} catch (Exception e) {\nthrow new RuntimeException(\"Failed tp create reader\", e);\n}\nreader.start();\ncurrentSourceIndex = index;\ncurrentReader = reader;\nLOG.debug(\n\"Reader started: subtask={} sourceIndex={} {}\",\nreaderContext.getIndexOfSubtask(),\ncurrentSourceIndex,\nreader);\nif (!restoredSplits.isEmpty()) {\nList splits = new ArrayList<>(restoredSplits.size());\nIterator it = restoredSplits.iterator();\nwhile (it.hasNext()) {\nHybridSourceSplit hybridSplit = it.next();\nif (hybridSplit.sourceIndex() == index) {\nsplits.add(hybridSplit);\nit.remove();\n}\n}\naddSplits(splits);\n}\n}\n}", + "context_after": "class HybridSourceReader implements SourceReader {\nprivate static final Logger LOG = LoggerFactory.getLogger(HybridSourceReader.class);\nprivate final SourceReaderContext readerContext;\nprivate final Map switchedSources;\nprivate int currentSourceIndex = -1;\nprivate boolean isFinalSource;\nprivate SourceReader currentReader;\nprivate CompletableFuture availabilityFuture = new CompletableFuture<>();\nprivate List restoredSplits = new ArrayList<>();\npublic HybridSourceReader(\nSourceReaderContext readerContext, Map switchedSources) {\nthis.readerContext = readerContext;\nthis.switchedSources = switchedSources;\n}\n@Override\npublic void start() {\nint initialSourceIndex = currentSourceIndex;\nif (!restoredSplits.isEmpty()) {\ninitialSourceIndex = restoredSplits.get(0).sourceIndex() - 1;\n}\nreaderContext.sendSourceEventToCoordinator(\nnew SourceReaderFinishedEvent(initialSourceIndex));\n}\n@Override\npublic InputStatus pollNext(ReaderOutput output) throws Exception {\nif (currentReader == null) {\nreturn InputStatus.NOTHING_AVAILABLE;\n}\nInputStatus status = currentReader.pollNext(output);\nif (status == InputStatus.END_OF_INPUT) {\nLOG.info(\n\"End of input subtask={} sourceIndex={} {}\",\nreaderContext.getIndexOfSubtask(),\ncurrentSourceIndex,\ncurrentReader);\nreaderContext.sendSourceEventToCoordinator(\nnew SourceReaderFinishedEvent(currentSourceIndex));\nif (!isFinalSource) {\nif (availabilityFuture.isDone()) {\navailabilityFuture = new CompletableFuture();\n}\nreturn InputStatus.NOTHING_AVAILABLE;\n}\n}\nreturn status;\n}\n@Override\npublic List snapshotState(long checkpointId) {\nList state =\ncurrentReader != null\n? currentReader.snapshotState(checkpointId)\n: Collections.emptyList();\nreturn HybridSourceSplit.wrapSplits(currentSourceIndex, state);\n}\n@Override\n@Override\npublic void addSplits(List splits) {\nLOG.info(\n\"Adding splits subtask={} sourceIndex={} currentReader={} {}\",\nreaderContext.getIndexOfSubtask(),\ncurrentSourceIndex,\ncurrentReader,\nsplits);\nif (currentSourceIndex < 0) {\nrestoredSplits.addAll(splits);\n} else {\nList realSplits = new ArrayList<>(splits.size());\nfor (HybridSourceSplit split : splits) {\nPreconditions.checkState(\nsplit.sourceIndex() == currentSourceIndex,\n\"Split %s while current source is %s\",\nsplit,\ncurrentSourceIndex);\nrealSplits.add(split.getWrappedSplit());\n}\ncurrentReader.addSplits((List) realSplits);\n}\n}\n@Override\npublic void notifyNoMoreSplits() {\nif (currentReader != null) {\ncurrentReader.notifyNoMoreSplits();\n}\nLOG.debug(\n\"No more splits for subtask={} sourceIndex={} currentReader={}\",\nreaderContext.getIndexOfSubtask(),\ncurrentSourceIndex,\ncurrentReader);\n}\n@Override\npublic void handleSourceEvents(SourceEvent sourceEvent) {\nif (sourceEvent instanceof SwitchSourceEvent) {\nSwitchSourceEvent sse = (SwitchSourceEvent) sourceEvent;\nLOG.info(\n\"Switch source event: subtask={} sourceIndex={} source={}\",\nreaderContext.getIndexOfSubtask(),\nsse.sourceIndex(),\nsse.source());\nswitchedSources.put(sse.sourceIndex(), sse.source());\nsetCurrentReader(sse.sourceIndex());\nisFinalSource = sse.isFinalSource();\nif (!availabilityFuture.isDone()) {\navailabilityFuture.complete(null);\n}\n} else {\ncurrentReader.handleSourceEvents(sourceEvent);\n}\n}\n@Override\npublic void close() throws Exception {\nif (currentReader != null) {\ncurrentReader.close();\n}\nLOG.debug(\n\"Reader closed: subtask={} sourceIndex={} currentReader={}\",\nreaderContext.getIndexOfSubtask(),\ncurrentSourceIndex,\ncurrentReader);\n}\nprivate void setCurrentReader(int index) {\nPreconditions.checkArgument(index != currentSourceIndex);\nif (currentReader != null) {\ntry {\ncurrentReader.close();\n} catch (Exception e) {\nthrow new RuntimeException(\"Failed to close current reader\", e);\n}\nLOG.debug(\n\"Reader closed: subtask={} sourceIndex={} currentReader={}\",\nreaderContext.getIndexOfSubtask(),\ncurrentSourceIndex,\ncurrentReader);\n}\nSource source =\nPreconditions.checkNotNull(\nswitchedSources.get(index), \"Source for index=%s not available\", index);\nSourceReader reader;\ntry {\nreader = source.createReader(readerContext);\n} catch (Exception e) {\nthrow new RuntimeException(\"Failed tp create reader\", e);\n}\nreader.start();\ncurrentSourceIndex = index;\ncurrentReader = reader;\ncurrentReader\n.isAvailable()\n.whenComplete(\n(result, ex) -> {\nif (ex == null) {\navailabilityFuture.complete(result);\n} else {\navailabilityFuture.completeExceptionally(ex);\n}\n});\nLOG.debug(\n\"Reader started: subtask={} sourceIndex={} {}\",\nreaderContext.getIndexOfSubtask(),\ncurrentSourceIndex,\nreader);\nif (!restoredSplits.isEmpty()) {\nList splits = new ArrayList<>(restoredSplits.size());\nIterator it = restoredSplits.iterator();\nwhile (it.hasNext()) {\nHybridSourceSplit hybridSplit = it.next();\nif (hybridSplit.sourceIndex() == index) {\nsplits.add(hybridSplit);\nit.remove();\n}\n}\naddSplits(splits);\n}\n}\n}" + }, + { + "comment": "rename `partitionFieldType` to `partitionFieldTypes` ? change the type definition to `LogicalType[]`? since `prunePartitions` method use `Array[LogicalType]` as argument type.", + "method_body": "public void onMatch(RelOptRuleCall call) {\nFilter filter = call.rel(0);\nLogicalTableScan scan = call.rel(1);\nContext context = call.getPlanner().getContext().unwrap(FlinkContext.class);\nTableSourceTable tableSourceTable = scan.getTable().unwrap(TableSourceTable.class);\nDynamicTableSource dynamicTableSource = tableSourceTable.tableSource();\nRelDataType inputFieldType = filter.getInput().getRowType();\nList inputFieldName = inputFieldType.getFieldNames();\nList partitionedFieldNames = tableSourceTable.catalogTable().getPartitionKeys();\nRelBuilder relBuilder = call.builder();\nRexBuilder rexBuilder = relBuilder.getRexBuilder();\nTuple2, Seq> predicate = RexNodeExtractor.extractPartitionPredicateList(\nfilter.getCondition(),\nFlinkRelOptUtil.getMaxCnfNodeCount(scan),\ninputFieldName.toArray(new String[inputFieldName.size()]),\nrexBuilder,\npartitionedFieldNames.toArray(new String[partitionedFieldNames.size()])\n);\nRexNode partitionPredicate = RexUtil.composeConjunction(rexBuilder, JavaConversions.seqAsJavaList(predicate._1));\nif (partitionPredicate.isAlwaysTrue()){\nreturn;\n}\nList partitionFieldType = partitionedFieldNames.stream().map(name -> {\nint index = inputFieldName.indexOf(name);\nif (index < 0) {\nthrow new RuntimeException(String.format(\"Partitioned key '%s' isn't found in input columns. \" +\n\"Validator should have checked that.\", name));\n}\nreturn inputFieldType.getFieldList().get(index).getType(); })\n.map(FlinkTypeFactory::toLogicalType).collect(Collectors.toList());\nList> allPartitions = ((SupportsPartitionPushDown) dynamicTableSource).listPartitions().get();\nRexNode finalPartitionPredicate = adjustPartitionPredicate(inputFieldName, partitionedFieldNames, partitionPredicate);\nList> remainingPartitions = PartitionPruner.prunePartitions(\n((FlinkContext) context).getTableConfig(),\npartitionedFieldNames.toArray(new String[partitionedFieldNames.size()]),\npartitionFieldType.toArray(new LogicalType[partitionFieldType.size()]),\nallPartitions,\nfinalPartitionPredicate\n);\n((SupportsPartitionPushDown) dynamicTableSource).applyPartitions(remainingPartitions);\nFlinkStatistic statistic = tableSourceTable.getStatistic();\nString extraDigest = \"source: [partitions=\" +\nString.join(\", \", ((SupportsPartitionPushDown) dynamicTableSource).listPartitions()\n.get()\n.stream()\n.map(partition -> partition.toString())\n.collect(Collectors.toList())\n.toArray(new String[1])) +\n\"]\";\nTableSourceTable newTableSourceTable = tableSourceTable.copy(dynamicTableSource, statistic, new String[]{extraDigest});\nLogicalTableScan newScan = new LogicalTableScan(\nscan.getCluster(), scan.getTraitSet(), scan.getHints(), newTableSourceTable);\nRexNode nonPartitionPredicate = RexUtil.composeConjunction(rexBuilder, JavaConversions.seqAsJavaList(predicate._2()));\nif (nonPartitionPredicate.isAlwaysTrue()) {\ncall.transformTo(newScan);\n} else {\ncall.transformTo(filter.copy(filter.getTraitSet(), newScan, nonPartitionPredicate));\n}\n}", + "target_code": "List partitionFieldType = partitionedFieldNames.stream().map(name -> {", + "method_body_after": "public void onMatch(RelOptRuleCall call) {\nFilter filter = call.rel(0);\nLogicalTableScan scan = call.rel(1);\nTableSourceTable tableSourceTable = scan.getTable().unwrap(TableSourceTable.class);\nRelDataType inputFieldTypes = filter.getInput().getRowType();\nList inputFieldNames = inputFieldTypes.getFieldNames();\nList partitionFieldNames = tableSourceTable.catalogTable().getPartitionKeys();\nRelBuilder relBuilder = call.builder();\nRexBuilder rexBuilder = relBuilder.getRexBuilder();\nTuple2, Seq> allPredicates = RexNodeExtractor.extractPartitionPredicateList(\nfilter.getCondition(),\nFlinkRelOptUtil.getMaxCnfNodeCount(scan),\ninputFieldNames.toArray(new String[0]),\nrexBuilder,\npartitionFieldNames.toArray(new String[0]));\nRexNode partitionPredicate = RexUtil.composeConjunction(rexBuilder, JavaConversions.seqAsJavaList(allPredicates._1));\nif (partitionPredicate.isAlwaysTrue()) {\nreturn;\n}\nLogicalType[] partitionFieldTypes = partitionFieldNames.stream()\n.map(name -> {\nint index = inputFieldNames.indexOf(name);\nif (index < 0) {\nthrow new TableException(String.format(\"Partitioned key '%s' isn't found in input columns. \" +\n\"Validator should have checked that.\", name));\n}\nreturn inputFieldTypes.getFieldList().get(index).getType(); })\n.map(FlinkTypeFactory::toLogicalType)\n.toArray(LogicalType[]::new);\nRexNode finalPartitionPredicate = adjustPartitionPredicate(inputFieldNames, partitionFieldNames, partitionPredicate);\nFlinkContext context = call.getPlanner().getContext().unwrap(FlinkContext.class);\nFunction>, List>> defaultPruner = partitions -> PartitionPruner.prunePartitions(\ncontext.getTableConfig(),\npartitionFieldNames.toArray(new String[0]),\npartitionFieldTypes,\npartitions,\nfinalPartitionPredicate);\nOptional>> remainingPartitions =\nreadPartitionsAndPrune(context, tableSourceTable, defaultPruner, allPredicates._1(), inputFieldNames);\nDynamicTableSource dynamicTableSource = tableSourceTable.tableSource().copy();\nremainingPartitions.ifPresent(((SupportsPartitionPushDown) dynamicTableSource)::applyPartitions);\nTableStats newTableStat = null;\nObjectIdentifier identifier = tableSourceTable.tableIdentifier();\nObjectPath tablePath = identifier.toObjectPath();\nOptional catalogOptional = context.getCatalogManager().getCatalog(identifier.getCatalogName());\nOptional partitionStats;\nif (remainingPartitions.isPresent() && catalogOptional.isPresent()) {\nfor (Map partition: remainingPartitions.get()) {\npartitionStats = getPartitionStats(catalogOptional.get(), tablePath, partition);\nif (!partitionStats.isPresent()) {\nnewTableStat = null;\nbreak;\n} else {\nnewTableStat = newTableStat == null ? partitionStats.get() : newTableStat.merge(partitionStats.get());\n}\n}\n}\nFlinkStatistic newStatistic = FlinkStatistic.builder()\n.statistic(tableSourceTable.getStatistic())\n.tableStats(newTableStat)\n.build();\nString extraDigest = remainingPartitions.map(partition -> (\"partitions=[\" +\nString.join(\", \", partition\n.stream()\n.map(Object::toString)\n.toArray(String[]::new)) +\n\"]\")).orElse(\"partitions=[]\");\nTableSourceTable newTableSourceTable = tableSourceTable.copy(dynamicTableSource, newStatistic, new String[]{extraDigest});\nLogicalTableScan newScan = LogicalTableScan.create(scan.getCluster(), newTableSourceTable, scan.getHints());\nRexNode nonPartitionPredicate = RexUtil.composeConjunction(rexBuilder, JavaConversions.seqAsJavaList(allPredicates._2()));\nif (nonPartitionPredicate.isAlwaysTrue()) {\ncall.transformTo(newScan);\n} else {\nFilter newFilter = filter.copy(filter.getTraitSet(), newScan, nonPartitionPredicate);\ncall.transformTo(newFilter);\n}\n}", + "context_before": "class PushPartitionIntoTableSourceScanRule extends RelOptRule {\npublic static final PushPartitionIntoTableSourceScanRule INSTANCE = new PushPartitionIntoTableSourceScanRule();\npublic PushPartitionIntoTableSourceScanRule(){\nsuper(operand(Filter.class,\noperand(LogicalTableScan.class, none())),\n\"PushPartitionTableSourceScanRule\");\n}\n@Override\npublic boolean matches(RelOptRuleCall call) {\nFilter filter = call.rel(0);\nif (filter.getCondition() == null) {\nreturn false;\n}\nTableSourceTable tableSourceTable = call.rel(1).getTable().unwrap(TableSourceTable.class);\nif (tableSourceTable == null){\nreturn false;\n}\nDynamicTableSource dynamicTableSource = tableSourceTable.tableSource();\nif (!(dynamicTableSource instanceof SupportsPartitionPushDown)) {\nreturn false;\n}\nOptional>> partitions = ((SupportsPartitionPushDown) dynamicTableSource).listPartitions();\nreturn partitions.isPresent()\n&& !partitions.get().isEmpty()\n&& !Arrays.stream(tableSourceTable.extraDigests()).anyMatch(digest -> digest.startsWith(\"source: [partitions=\"));\n}\n@Override\nprivate RexNode adjustPartitionPredicate(List inputFieldNames, List partitionFieldNames, RexNode partitionPredicate) {\nreturn partitionPredicate.accept(new RexShuttle(){\n@Override\npublic RexNode visitInputRef(RexInputRef inputRef) {\nint index = inputRef.getIndex();\nString fieldName = inputFieldNames.get(index);\nint newIndex = partitionFieldNames.indexOf(fieldName);\nif (newIndex < 0) {\nthrow new RuntimeException(String.format(\"Field name '%s' isn't found in partitioned columns.\" +\n\" Validator should have checked that.\", fieldName));\n}\nif (newIndex == index){\nreturn inputRef;\n} else {\nreturn new RexInputRef(newIndex, inputRef.getType());\n}\n}\n});\n}\n}", + "context_after": "class PushPartitionIntoTableSourceScanRule extends RelOptRule {\npublic static final PushPartitionIntoTableSourceScanRule INSTANCE = new PushPartitionIntoTableSourceScanRule();\npublic PushPartitionIntoTableSourceScanRule() {\nsuper(operand(Filter.class,\noperand(LogicalTableScan.class, none())),\n\"PushPartitionIntoTableSourceScanRule\");\n}\n@Override\npublic boolean matches(RelOptRuleCall call) {\nFilter filter = call.rel(0);\nif (filter.getCondition() == null) {\nreturn false;\n}\nTableSourceTable tableSourceTable = call.rel(1).getTable().unwrap(TableSourceTable.class);\nif (tableSourceTable == null) {\nreturn false;\n}\nDynamicTableSource dynamicTableSource = tableSourceTable.tableSource();\nif (!(dynamicTableSource instanceof SupportsPartitionPushDown)) {\nreturn false;\n}\nCatalogTable catalogTable = tableSourceTable.catalogTable();\nif (!catalogTable.isPartitioned() || catalogTable.getPartitionKeys().isEmpty()) {\nreturn false;\n}\nreturn Arrays.stream(tableSourceTable.extraDigests()).noneMatch(digest -> digest.startsWith(\"partitions=[\"));\n}\n@Override\n/**\n* adjust the partition field reference index to evaluate the partition values.\n* e.g. the original input fields is: a, b, c, p, and p is partition field. the partition values\n* are: List(Map(\"p\"->\"1\"), Map(\"p\" -> \"2\"), Map(\"p\" -> \"3\")). If the original partition\n* predicate is $3 > 1. after adjusting, the new predicate is ($0 > 1).\n* and use ($0 > 1) to evaluate partition values (row(1), row(2), row(3)).\n*/\nprivate RexNode adjustPartitionPredicate(List inputFieldNames, List partitionFieldNames, RexNode partitionPredicate) {\nreturn partitionPredicate.accept(new RexShuttle() {\n@Override\npublic RexNode visitInputRef(RexInputRef inputRef) {\nint index = inputRef.getIndex();\nString fieldName = inputFieldNames.get(index);\nint newIndex = partitionFieldNames.indexOf(fieldName);\nif (newIndex < 0) {\nthrow new TableException(String.format(\"Field name '%s' isn't found in partitioned columns.\" +\n\" Validator should have checked that.\", fieldName));\n}\nif (newIndex == index) {\nreturn inputRef;\n} else {\nreturn new RexInputRef(newIndex, inputRef.getType());\n}\n}\n});\n}\nprivate Optional>> readPartitionsAndPrune(\nFlinkContext context,\nTableSourceTable tableSourceTable,\nFunction>, List>> pruner,\nSeq partitionPredicate,\nList inputFieldNames) {\nOptional catalogOptional = context.getCatalogManager().getCatalog(tableSourceTable.tableIdentifier().getCatalogName());\nList> remainingPartitions;\nOptional>> optionalPartitions;\nDynamicTableSource dynamicTableSource = tableSourceTable.tableSource();\nObjectIdentifier identifier = tableSourceTable.tableIdentifier();\ntry {\noptionalPartitions = ((SupportsPartitionPushDown) dynamicTableSource).listPartitions();\nif (optionalPartitions.isPresent() && !optionalPartitions.get().isEmpty()) {\nremainingPartitions = pruner.apply(optionalPartitions.get());\nreturn remainingPartitions != null ? Optional.of(remainingPartitions) : Optional.empty();\n} else {\nreturn Optional.empty();\n}\n} catch (UnsupportedOperationException e) {\nif (!catalogOptional.isPresent()) {\nthrow new TableException(\nString.format(\"Table %s must from a catalog, but %s is not a catalog\",\nidentifier.asSummaryString(), identifier.getCatalogName()), e);\n}\ntry {\nreturn readPartitionFromCatalogAndPrune(\ncontext,\ncatalogOptional.get(),\nidentifier,\ninputFieldNames,\npartitionPredicate,\npruner);\n} catch (TableNotExistException tableNotExistException) {\nthrow new TableException(String.format(\"Table %s is not found in catalog.\", identifier.asSummaryString()), e);\n} catch (TableNotPartitionedException tableNotPartitionedException) {\nthrow new TableException(\nString.format(\"Table %s is not a partitionable source. Validator should have checked it.\", identifier.asSummaryString()),\ntableNotPartitionedException);\n}\n}\n}\nprivate Optional>> readPartitionFromCatalogAndPrune(\nFlinkContext context,\nCatalog catalog,\nObjectIdentifier tableIdentifier,\nList allFieldNames,\nSeq partitionPredicate,\nFunction>, List>> pruner)\nthrows TableNotExistException, TableNotPartitionedException {\nObjectPath tablePath = tableIdentifier.toObjectPath();\nRexNodeToExpressionConverter converter = new RexNodeToExpressionConverter(\nallFieldNames.toArray(new String[0]),\ncontext.getFunctionCatalog(),\ncontext.getCatalogManager(),\nTimeZone.getTimeZone(context.getTableConfig().getLocalTimeZone()));\nArrayList partitionFilters = new ArrayList<>();\nOption subExpr;\nfor (RexNode node: JavaConversions.seqAsJavaList(partitionPredicate)) {\nsubExpr = node.accept(converter);\nif (!subExpr.isEmpty()) {\npartitionFilters.add(subExpr.get());\n} else {\nreturn readPartitionFromCatalogWithoutFilterAndPrune(catalog, tablePath, pruner);\n}\n}\ntry {\nList> remainingPartitions = catalog.listPartitionsByFilter(tablePath, partitionFilters)\n.stream()\n.map(CatalogPartitionSpec::getPartitionSpec)\n.collect(Collectors.toList());\nreturn Optional.of(remainingPartitions);\n} catch (UnsupportedOperationException e) {\nreturn readPartitionFromCatalogWithoutFilterAndPrune(catalog, tablePath, pruner);\n}\n}\nprivate Optional>> readPartitionFromCatalogWithoutFilterAndPrune(\nCatalog catalog,\nObjectPath tablePath,\nFunction>, List>> pruner)\nthrows TableNotExistException, CatalogException, TableNotPartitionedException {\nList> allPartitions, remainingPartitions;\nallPartitions = catalog.listPartitions(tablePath)\n.stream()\n.map(CatalogPartitionSpec::getPartitionSpec)\n.collect(Collectors.toList());\nif (allPartitions.size() > 0) {\nremainingPartitions = pruner.apply(allPartitions);\nreturn remainingPartitions != null ? Optional.of(remainingPartitions) : Optional.empty();\n} else {\nreturn Optional.empty();\n}\n}\nprivate Optional getPartitionStats(Catalog catalog, ObjectPath tablePath, Map partition) {\ntry {\nCatalogPartitionSpec spec = new CatalogPartitionSpec(partition);\nCatalogTableStatistics partitionStat = catalog.getPartitionStatistics(tablePath, spec);\nCatalogColumnStatistics partitionColStat = catalog.getPartitionColumnStatistics(tablePath, spec);\nTableStats stats = CatalogTableStatisticsConverter.convertToTableStats(partitionStat, partitionColStat);\nreturn Optional.of(stats);\n} catch (PartitionNotExistException e) {\nreturn Optional.empty();\n}\n}\n}" + }, + { + "comment": "Shouldn't this be corrected as follows? Otherwise returns `true` even when the `annotationKeyword` is missing. ```suggestion && (semicolonToken.isMissing() || cursor < semicolonToken.textRange().endOffset()); ```", + "method_body": "public boolean onPreValidation(BallerinaCompletionContext context, AnnotationDeclarationNode node) {\nToken annotationKeyword = node.annotationKeyword();\nToken semicolonToken = node.semicolonToken();\nint cursor = context.getCursorPositionInTree();\nreturn !annotationKeyword.isMissing() && cursor >= annotationKeyword.textRange().endOffset()\n&& (semicolonToken.isMissing()) || cursor < semicolonToken.textRange().endOffset();\n}", + "target_code": "&& (semicolonToken.isMissing()) || cursor < semicolonToken.textRange().endOffset();", + "method_body_after": "public boolean onPreValidation(BallerinaCompletionContext context, AnnotationDeclarationNode node) {\nToken annotationKeyword = node.annotationKeyword();\nToken semicolonToken = node.semicolonToken();\nint cursor = context.getCursorPositionInTree();\nreturn !annotationKeyword.isMissing() && cursor >= annotationKeyword.textRange().endOffset()\n&& (semicolonToken.isMissing() || cursor < semicolonToken.textRange().endOffset());\n}", + "context_before": "class AnnotationDeclarationNodeContext extends AbstractCompletionProvider {\npublic AnnotationDeclarationNodeContext() {\nsuper(AnnotationDeclarationNode.class);\n}\n@Override\npublic List getCompletions(BallerinaCompletionContext context, AnnotationDeclarationNode node) {\nList completionItemList = new ArrayList<>();\nif (this.onTypeDescriptorContext(context, node)) {\nPredicate predicate = symbol -> symbol.kind() == SymbolKind.TYPE_DEFINITION\n&& this.isValidTypeDescForAnnotations((TypeDefinitionSymbol) symbol);\nif (QNameReferenceUtil.onQualifiedNameIdentifier(context, context.getNodeAtCursor())) {\nQualifiedNameReferenceNode qNameRef = (QualifiedNameReferenceNode) context.getNodeAtCursor();\nList filteredSymbols = QNameReferenceUtil.getModuleContent(context, qNameRef, predicate);\ncompletionItemList.addAll(this.getCompletionItemList(filteredSymbols, context));\n} else {\nList filteredSymbols = context.visibleSymbols(context.getCursorPosition()).stream()\n.filter(predicate)\n.collect(Collectors.toList());\ncompletionItemList.addAll(this.getCompletionItemList(filteredSymbols, context));\ncompletionItemList.addAll(this.getModuleCompletionItems(context));\ncompletionItemList.add(new SnippetCompletionItem(context, Snippet.DEF_RECORD_TYPE_DESC.get()));\ncompletionItemList.add(new SnippetCompletionItem(context, Snippet.DEF_CLOSED_RECORD_TYPE_DESC.get()));\n}\n} else if (this.onSuggestOnKeyword(context, node)) {\ncompletionItemList.add(new SnippetCompletionItem(context, Snippet.KW_ON.get()));\n} else if (this.onSuggestAttachmentPoints(context, node)) {\ncompletionItemList.addAll(this.getAnnotationAttachmentPoints(context, node));\n}\nthis.sort(context, node, completionItemList);\nreturn completionItemList;\n}\n@Override\nprivate boolean onTypeDescriptorContext(BallerinaCompletionContext context, AnnotationDeclarationNode node) {\nOptional typeDesc = node.typeDescriptor();\nToken annotationTag = node.annotationTag();\nint cursor = context.getCursorPositionInTree();\nreturn (typeDesc.isEmpty() && annotationTag.isMissing())\n|| (typeDesc.isEmpty() && !annotationTag.isMissing()\n&& cursor <= annotationTag.textRange().endOffset())\n|| (typeDesc.isPresent() && cursor >= typeDesc.get().textRange().startOffset()\n&& cursor <= typeDesc.get().textRange().endOffset());\n}\nprivate boolean onSuggestOnKeyword(BallerinaCompletionContext context, AnnotationDeclarationNode node) {\nToken annotationTag = node.annotationTag();\nSeparatedNodeList attachPoints = node.attachPoints();\nint cursor = context.getCursorPositionInTree();\nreturn !annotationTag.isMissing() && attachPoints.isEmpty() && node.onKeyword().isEmpty()\n&& annotationTag.textRange().endOffset() + 1 <= cursor;\n}\nprivate boolean onSuggestAttachmentPoints(BallerinaCompletionContext context, AnnotationDeclarationNode node) {\nOptional onKeyword = node.onKeyword();\nint cursor = context.getCursorPositionInTree();\nreturn onKeyword.isPresent() && cursor >= onKeyword.get().textRange().endOffset() + 1;\n}\nprivate boolean isValidTypeDescForAnnotations(TypeDefinitionSymbol typeDefinitionSymbol) {\nTypeSymbol typeSymbol = typeDefinitionSymbol.typeDescriptor();\nTypeSymbol rawType = CommonUtil.getRawType(typeSymbol);\nrawType = rawType.typeKind() == TypeDescKind.ARRAY\n? CommonUtil.getRawType(((ArrayTypeSymbol) rawType).memberTypeDescriptor()) : rawType;\nreturn rawType.typeKind() == TypeDescKind.MAP || rawType.typeKind() == TypeDescKind.RECORD;\n}\nprivate List getAnnotationAttachmentPoints(BallerinaCompletionContext context,\nAnnotationDeclarationNode node) {\nAttachmentPointContext attachmentPointContext = getAttachmentPointContext(context, node);\nList itemSnippets = new ArrayList<>();\nswitch (attachmentPointContext) {\ncase ANY:\nitemSnippets.addAll(anyAttachmentPoints());\nbreak;\ncase SOURCE:\nitemSnippets.addAll(dualAttachmentPoints());\nitemSnippets.addAll(sourceOnlyAttachmentPoints());\nbreak;\ncase OBJECT:\nitemSnippets.addAll(Arrays.asList(Snippet.KW_FUNCTION, Snippet.KW_FIELD));\nbreak;\ncase RECORD:\nitemSnippets.addAll(Collections.singletonList(Snippet.KW_FIELD));\nbreak;\ncase REMOTE:\nitemSnippets.addAll(Collections.singletonList(Snippet.KW_FUNCTION));\nbreak;\ncase SERVICE:\nitemSnippets.addAll(Arrays.asList(Snippet.KW_REMOTE, Snippet.KW_REMOTE_FUNCTION));\nbreak;\ndefault:\nbreak;\n}\nreturn itemSnippets.stream()\n.map(snippet -> new SnippetCompletionItem(context, snippet.get()))\n.collect(Collectors.toList());\n}\nprivate List anyAttachmentPoints() {\nreturn Arrays.asList(Snippet.KW_SOURCE, Snippet.KW_TYPE, Snippet.KW_CLASS, Snippet.KW_FUNCTION,\nSnippet.KW_OBJ_FUNCTION, Snippet.KW_SERVICE_REMOTE_FUNCTION, Snippet.KW_PARAMETER,\nSnippet.KW_RETURN, Snippet.KW_SERVICE, Snippet.KW_OBJECT, Snippet.KW_RECORD, Snippet.KW_OBJECT_FIELD,\nSnippet.KW_RECORD_FIELD, Snippet.KW_FIELD, Snippet.KW_SOURCE_ANNOTATION, Snippet.KW_SOURCE_EXTERNAL,\nSnippet.KW_SOURCE_VAR, Snippet.KW_SOURCE_CONST, Snippet.KW_SOURCE_LISTENER, Snippet.KW_SOURCE_WORKER);\n}\nprivate List dualAttachmentPoints() {\nreturn Arrays.asList(Snippet.KW_TYPE, Snippet.KW_CLASS, Snippet.KW_OBJ_FUNCTION,\nSnippet.KW_SERVICE_REMOTE_FUNCTION, Snippet.KW_PARAMETER, Snippet.KW_RETURN, Snippet.KW_SERVICE,\nSnippet.KW_OBJECT_FIELD, Snippet.KW_RECORD_FIELD, Snippet.KW_FIELD, Snippet.KW_FUNCTION);\n}\nprivate List sourceOnlyAttachmentPoints() {\nreturn Arrays.asList(Snippet.KW_ANNOTATION, Snippet.KW_EXTERNAL, Snippet.KW_VAR,\nSnippet.KW_CONST, Snippet.KW_LISTENER, Snippet.KW_WORKER);\n}\nprivate AttachmentPointContext getAttachmentPointContext(BallerinaCompletionContext context,\nAnnotationDeclarationNode node) {\nSeparatedNodeList attachmentPoints = node.attachPoints();\nOptional attachmentPointAtCursor =\nthis.attachmentPointAtCursor(context, attachmentPoints);\nif (attachmentPointAtCursor.isEmpty()) {\nreturn AttachmentPointContext.ANY;\n}\nNodeList identifiers = attachmentPointAtCursor.get().identifiers();\nint cursor = context.getCursorPositionInTree();\nOptional immediatePreviousToken = Optional.empty();\nfor (int i = identifiers.size() - 1; i >= 0; i--) {\nToken token = identifiers.get(i);\nif (token.isMissing()) {\ncontinue;\n}\nif (cursor > token.textRange().endOffset()) {\nimmediatePreviousToken = Optional.of(token);\nbreak;\n}\n}\nOptional sourceKeyword = attachmentPointAtCursor.get().sourceKeyword();\nif (sourceKeyword.isPresent() && sourceKeyword.get().textRange().endOffset() < cursor\n&& (immediatePreviousToken.isEmpty()\n|| immediatePreviousToken.get().textRange().endOffset() > cursor)) {\nreturn AttachmentPointContext.SOURCE;\n}\nif (immediatePreviousToken.isEmpty()) {\nreturn AttachmentPointContext.ANY;\n}\nSyntaxKind immediatePreviousTokenKind = immediatePreviousToken.get().kind();\nif (immediatePreviousTokenKind == SyntaxKind.OBJECT_KEYWORD) {\nreturn AttachmentPointContext.OBJECT;\n}\nif (immediatePreviousTokenKind == SyntaxKind.SERVICE_KEYWORD) {\nreturn AttachmentPointContext.SERVICE;\n}\nif (immediatePreviousTokenKind == SyntaxKind.REMOTE_KEYWORD) {\nreturn AttachmentPointContext.REMOTE;\n}\nif (immediatePreviousTokenKind == SyntaxKind.RECORD_KEYWORD) {\nreturn AttachmentPointContext.RECORD;\n}\nreturn AttachmentPointContext.NONE;\n}\nprivate Optional attachmentPointAtCursor(BallerinaCompletionContext context,\nSeparatedNodeList nodes) {\nif (nodes.isEmpty()) {\nreturn Optional.empty();\n}\nint cursor = context.getCursorPositionInTree();\nint separatorIndex = -1;\nfor (int i = nodes.separatorSize(); i > 0; i--) {\nToken separator = nodes.getSeparator(i - 1);\nif (separator.textRange().endOffset() <= cursor) {\nseparatorIndex = i - 1;\nbreak;\n}\n}\nint nodeIndex = separatorIndex + 1;\nif (nodeIndex > nodes.size() || nodes.get(nodeIndex).isMissing()) {\nreturn Optional.empty();\n}\nreturn Optional.of((AnnotationAttachPointNode) nodes.get(nodeIndex));\n}\nprivate enum AttachmentPointContext {\nANY,\nSOURCE,\nDUAL,\nOBJECT,\nSERVICE,\nREMOTE,\nRECORD,\nNONE\n}\n}", + "context_after": "class AnnotationDeclarationNodeContext extends AbstractCompletionProvider {\npublic AnnotationDeclarationNodeContext() {\nsuper(AnnotationDeclarationNode.class);\n}\n@Override\npublic List getCompletions(BallerinaCompletionContext context, AnnotationDeclarationNode node) {\nList completionItemList = new ArrayList<>();\nif (this.onTypeDescriptorContext(context, node)) {\nPredicate predicate = symbol -> symbol.kind() == SymbolKind.TYPE_DEFINITION\n&& this.isValidTypeDescForAnnotations((TypeDefinitionSymbol) symbol);\nif (QNameReferenceUtil.onQualifiedNameIdentifier(context, context.getNodeAtCursor())) {\nQualifiedNameReferenceNode qNameRef = (QualifiedNameReferenceNode) context.getNodeAtCursor();\nList filteredSymbols = QNameReferenceUtil.getModuleContent(context, qNameRef, predicate);\ncompletionItemList.addAll(this.getCompletionItemList(filteredSymbols, context));\n} else {\nList filteredSymbols = context.visibleSymbols(context.getCursorPosition()).stream()\n.filter(predicate)\n.collect(Collectors.toList());\ncompletionItemList.addAll(this.getCompletionItemList(filteredSymbols, context));\ncompletionItemList.addAll(this.getModuleCompletionItems(context));\ncompletionItemList.add(new SnippetCompletionItem(context, Snippet.DEF_RECORD_TYPE_DESC.get()));\ncompletionItemList.add(new SnippetCompletionItem(context, Snippet.DEF_CLOSED_RECORD_TYPE_DESC.get()));\n}\n} else if (this.onSuggestOnKeyword(context, node)) {\ncompletionItemList.add(new SnippetCompletionItem(context, Snippet.KW_ON.get()));\n} else if (this.onSuggestAttachmentPoints(context, node)) {\ncompletionItemList.addAll(this.getAnnotationAttachmentPoints(context, node));\n}\nthis.sort(context, node, completionItemList);\nreturn completionItemList;\n}\n@Override\nprivate boolean onTypeDescriptorContext(BallerinaCompletionContext context, AnnotationDeclarationNode node) {\nOptional typeDesc = node.typeDescriptor();\nToken annotationTag = node.annotationTag();\nint cursor = context.getCursorPositionInTree();\nreturn (typeDesc.isEmpty() && annotationTag.isMissing())\n|| (typeDesc.isEmpty() && !annotationTag.isMissing()\n&& cursor <= annotationTag.textRange().endOffset())\n|| (typeDesc.isPresent() && cursor >= typeDesc.get().textRange().startOffset()\n&& cursor <= typeDesc.get().textRange().endOffset());\n}\nprivate boolean onSuggestOnKeyword(BallerinaCompletionContext context, AnnotationDeclarationNode node) {\nToken annotationTag = node.annotationTag();\nSeparatedNodeList attachPoints = node.attachPoints();\nint cursor = context.getCursorPositionInTree();\nreturn !annotationTag.isMissing() && attachPoints.isEmpty() && node.onKeyword().isEmpty()\n&& annotationTag.textRange().endOffset() + 1 <= cursor;\n}\nprivate boolean onSuggestAttachmentPoints(BallerinaCompletionContext context, AnnotationDeclarationNode node) {\nOptional onKeyword = node.onKeyword();\nint cursor = context.getCursorPositionInTree();\nreturn onKeyword.isPresent() && cursor >= onKeyword.get().textRange().endOffset() + 1;\n}\nprivate boolean isValidTypeDescForAnnotations(TypeDefinitionSymbol typeDefinitionSymbol) {\nTypeSymbol typeSymbol = typeDefinitionSymbol.typeDescriptor();\nTypeSymbol rawType = CommonUtil.getRawType(typeSymbol);\nrawType = rawType.typeKind() == TypeDescKind.ARRAY\n? CommonUtil.getRawType(((ArrayTypeSymbol) rawType).memberTypeDescriptor()) : rawType;\nreturn rawType.typeKind() == TypeDescKind.MAP || rawType.typeKind() == TypeDescKind.RECORD;\n}\nprivate List getAnnotationAttachmentPoints(BallerinaCompletionContext context,\nAnnotationDeclarationNode node) {\nAttachmentPointContext attachmentPointContext = getAttachmentPointContext(context, node);\nList itemSnippets = new ArrayList<>();\nswitch (attachmentPointContext) {\ncase ANY:\nitemSnippets.addAll(anyAttachmentPoints());\nbreak;\ncase SOURCE:\nitemSnippets.addAll(dualAttachmentPoints());\nitemSnippets.addAll(sourceOnlyAttachmentPoints());\nbreak;\ncase OBJECT:\nitemSnippets.addAll(Arrays.asList(Snippet.KW_FUNCTION, Snippet.KW_FIELD));\nbreak;\ncase RECORD:\nitemSnippets.addAll(Collections.singletonList(Snippet.KW_FIELD));\nbreak;\ncase REMOTE:\nitemSnippets.addAll(Collections.singletonList(Snippet.KW_FUNCTION));\nbreak;\ncase SERVICE:\nitemSnippets.addAll(Arrays.asList(Snippet.KW_REMOTE, Snippet.KW_REMOTE_FUNCTION));\nbreak;\ndefault:\nbreak;\n}\nreturn itemSnippets.stream()\n.map(snippet -> new SnippetCompletionItem(context, snippet.get()))\n.collect(Collectors.toList());\n}\nprivate List anyAttachmentPoints() {\nreturn Arrays.asList(Snippet.KW_SOURCE, Snippet.KW_TYPE, Snippet.KW_CLASS, Snippet.KW_FUNCTION,\nSnippet.KW_OBJ_FUNCTION, Snippet.KW_SERVICE_REMOTE_FUNCTION, Snippet.KW_PARAMETER,\nSnippet.KW_RETURN, Snippet.KW_SERVICE, Snippet.KW_OBJECT, Snippet.KW_RECORD, Snippet.KW_OBJECT_FIELD,\nSnippet.KW_RECORD_FIELD, Snippet.KW_FIELD, Snippet.KW_SOURCE_ANNOTATION, Snippet.KW_SOURCE_EXTERNAL,\nSnippet.KW_SOURCE_VAR, Snippet.KW_SOURCE_CONST, Snippet.KW_SOURCE_LISTENER, Snippet.KW_SOURCE_WORKER);\n}\nprivate List dualAttachmentPoints() {\nreturn Arrays.asList(Snippet.KW_TYPE, Snippet.KW_CLASS, Snippet.KW_OBJ_FUNCTION,\nSnippet.KW_SERVICE_REMOTE_FUNCTION, Snippet.KW_PARAMETER, Snippet.KW_RETURN, Snippet.KW_SERVICE,\nSnippet.KW_OBJECT_FIELD, Snippet.KW_RECORD_FIELD, Snippet.KW_FIELD, Snippet.KW_FUNCTION);\n}\nprivate List sourceOnlyAttachmentPoints() {\nreturn Arrays.asList(Snippet.KW_ANNOTATION, Snippet.KW_EXTERNAL, Snippet.KW_VAR,\nSnippet.KW_CONST, Snippet.KW_LISTENER, Snippet.KW_WORKER);\n}\nprivate AttachmentPointContext getAttachmentPointContext(BallerinaCompletionContext context,\nAnnotationDeclarationNode node) {\nSeparatedNodeList attachmentPoints = node.attachPoints();\nOptional attachmentPointAtCursor =\nthis.attachmentPointAtCursor(context, attachmentPoints);\nif (attachmentPointAtCursor.isEmpty()) {\nreturn AttachmentPointContext.ANY;\n}\nNodeList identifiers = attachmentPointAtCursor.get().identifiers();\nint cursor = context.getCursorPositionInTree();\nOptional immediatePreviousToken = Optional.empty();\nfor (int i = identifiers.size() - 1; i >= 0; i--) {\nToken token = identifiers.get(i);\nif (token.isMissing()) {\ncontinue;\n}\nif (cursor > token.textRange().endOffset()) {\nimmediatePreviousToken = Optional.of(token);\nbreak;\n}\n}\nOptional sourceKeyword = attachmentPointAtCursor.get().sourceKeyword();\nif (sourceKeyword.isPresent() && sourceKeyword.get().textRange().endOffset() < cursor\n&& (immediatePreviousToken.isEmpty()\n|| immediatePreviousToken.get().textRange().endOffset() > cursor)) {\nreturn AttachmentPointContext.SOURCE;\n}\nif (immediatePreviousToken.isEmpty()) {\nreturn AttachmentPointContext.ANY;\n}\nSyntaxKind immediatePreviousTokenKind = immediatePreviousToken.get().kind();\nif (immediatePreviousTokenKind == SyntaxKind.OBJECT_KEYWORD) {\nreturn AttachmentPointContext.OBJECT;\n}\nif (immediatePreviousTokenKind == SyntaxKind.SERVICE_KEYWORD) {\nreturn AttachmentPointContext.SERVICE;\n}\nif (immediatePreviousTokenKind == SyntaxKind.REMOTE_KEYWORD) {\nreturn AttachmentPointContext.REMOTE;\n}\nif (immediatePreviousTokenKind == SyntaxKind.RECORD_KEYWORD) {\nreturn AttachmentPointContext.RECORD;\n}\nreturn AttachmentPointContext.NONE;\n}\nprivate Optional attachmentPointAtCursor(BallerinaCompletionContext context,\nSeparatedNodeList nodes) {\nif (nodes.isEmpty()) {\nreturn Optional.empty();\n}\nint cursor = context.getCursorPositionInTree();\nint separatorIndex = -1;\nfor (int i = nodes.separatorSize(); i > 0; i--) {\nToken separator = nodes.getSeparator(i - 1);\nif (separator.textRange().endOffset() <= cursor) {\nseparatorIndex = i - 1;\nbreak;\n}\n}\nint nodeIndex = separatorIndex + 1;\nif (nodeIndex > nodes.size() || nodes.get(nodeIndex).isMissing()) {\nreturn Optional.empty();\n}\nreturn Optional.of((AnnotationAttachPointNode) nodes.get(nodeIndex));\n}\nprivate enum AttachmentPointContext {\nANY,\nSOURCE,\nDUAL,\nOBJECT,\nSERVICE,\nREMOTE,\nRECORD,\nNONE\n}\n}" + }, + { + "comment": "Sorry, I missed this test.", + "method_body": "private Object[] generateValues(LogicalType type) {\nRandom rnd = new Random();\nint seedNum = RECORD_NUM / 5;\nObject[] seeds = new Object[seedNum];\nseeds[0] = null;\nseeds[1] = value1(type, rnd);\nseeds[2] = value2(type, rnd);\nseeds[3] = value3(type, rnd);\nfor (int i = 4; i < seeds.length; i++) {\nswitch (type.getTypeRoot()) {\ncase BOOLEAN:\nseeds[i] = rnd.nextBoolean();\nbreak;\ncase TINYINT:\nseeds[i] = (byte) rnd.nextLong();\nbreak;\ncase SMALLINT:\nseeds[i] = (short) rnd.nextLong();\nbreak;\ncase INTEGER:\nseeds[i] = rnd.nextInt();\nbreak;\ncase BIGINT:\nseeds[i] = rnd.nextLong();\nbreak;\ncase FLOAT:\nseeds[i] = rnd.nextFloat() * rnd.nextLong();\nbreak;\ncase DOUBLE:\nseeds[i] = rnd.nextDouble() * rnd.nextLong();\nbreak;\ncase VARCHAR:\nseeds[i] = BinaryString.fromString(RandomStringUtils.random(rnd.nextInt(20)));\nbreak;\ncase DECIMAL:\nDecimalType decimalType = (DecimalType) type;\nBigDecimal decimal = new BigDecimal(\nrnd.nextInt()).divide(\nnew BigDecimal(ThreadLocalRandom.current().nextInt(1, 256)),\nThreadLocalRandom.current().nextInt(1, 30), BigDecimal.ROUND_HALF_EVEN);\nseeds[i] = Decimal.fromBigDecimal(decimal, decimalType.getPrecision(), decimalType.getScale());\nbreak;\ncase ARRAY:\ncase VARBINARY:\nbyte[] bytes = new byte[rnd.nextInt(16) + 1];\nrnd.nextBytes(bytes);\nseeds[i] = type instanceof VarBinaryType ? bytes : BinaryArray.fromPrimitiveArray(bytes);\nbreak;\ncase ROW:\nRowType rowType = (RowType) type;\nif (rowType.getFields().get(0).getType().getTypeRoot() == INTEGER) {\nseeds[i] = GenericRow.of(rnd.nextInt());\n} else {\nseeds[i] = GenericRow.of(GenericRow.of(rnd.nextInt()));\n}\nbreak;\ncase ANY:\nseeds[i] = new BinaryGeneric<>(rnd.nextInt());\nbreak;\ndefault:\nthrow new RuntimeException(\"Not support!\");\n}\n}\nObject[] results = new Object[RECORD_NUM];\nfor (int i = 0; i < RECORD_NUM; i++) {\nresults[i] = seeds[rnd.nextInt(seedNum)];\n}\nreturn results;\n}", + "target_code": "throw new RuntimeException(\"Not support!\");", + "method_body_after": "private Object[] generateValues(LogicalType type) {\nRandom rnd = new Random();\nint seedNum = RECORD_NUM / 5;\nObject[] seeds = new Object[seedNum];\nseeds[0] = null;\nseeds[1] = value1(type, rnd);\nseeds[2] = value2(type, rnd);\nseeds[3] = value3(type, rnd);\nfor (int i = 4; i < seeds.length; i++) {\nswitch (type.getTypeRoot()) {\ncase BOOLEAN:\nseeds[i] = rnd.nextBoolean();\nbreak;\ncase TINYINT:\nseeds[i] = (byte) rnd.nextLong();\nbreak;\ncase SMALLINT:\nseeds[i] = (short) rnd.nextLong();\nbreak;\ncase INTEGER:\nseeds[i] = rnd.nextInt();\nbreak;\ncase BIGINT:\nseeds[i] = rnd.nextLong();\nbreak;\ncase FLOAT:\nseeds[i] = rnd.nextFloat() * rnd.nextLong();\nbreak;\ncase DOUBLE:\nseeds[i] = rnd.nextDouble() * rnd.nextLong();\nbreak;\ncase VARCHAR:\nseeds[i] = BinaryString.fromString(RandomStringUtils.random(rnd.nextInt(20)));\nbreak;\ncase DECIMAL:\nDecimalType decimalType = (DecimalType) type;\nBigDecimal decimal = new BigDecimal(\nrnd.nextInt()).divide(\nnew BigDecimal(ThreadLocalRandom.current().nextInt(1, 256)),\nThreadLocalRandom.current().nextInt(1, 30), BigDecimal.ROUND_HALF_EVEN);\nseeds[i] = Decimal.fromBigDecimal(decimal, decimalType.getPrecision(), decimalType.getScale());\nbreak;\ncase ARRAY:\ncase VARBINARY:\nbyte[] bytes = new byte[rnd.nextInt(16) + 1];\nrnd.nextBytes(bytes);\nseeds[i] = type instanceof VarBinaryType ? bytes : BinaryArray.fromPrimitiveArray(bytes);\nbreak;\ncase ROW:\nRowType rowType = (RowType) type;\nif (rowType.getFields().get(0).getType().getTypeRoot() == INTEGER) {\nseeds[i] = GenericRow.of(rnd.nextInt());\n} else {\nseeds[i] = GenericRow.of(GenericRow.of(rnd.nextInt()));\n}\nbreak;\ncase ANY:\nseeds[i] = new BinaryGeneric<>(rnd.nextInt());\nbreak;\ndefault:\nthrow new RuntimeException(\"Not support!\");\n}\n}\nObject[] results = new Object[RECORD_NUM];\nfor (int i = 0; i < RECORD_NUM; i++) {\nresults[i] = seeds[rnd.nextInt(seedNum)];\n}\nreturn results;\n}", + "context_before": "class SortCodeGeneratorTest {\nprivate static final int RECORD_NUM = 3000;\nprivate final LogicalType[] types = new LogicalType[]{\nnew BooleanType(),\nnew TinyIntType(),\nnew SmallIntType(),\nnew IntType(),\nnew BigIntType(),\nnew FloatType(),\nnew DoubleType(),\nnew VarCharType(VarCharType.MAX_LENGTH),\nnew DecimalType(18, 2),\nnew DecimalType(38, 18),\nnew VarBinaryType(VarBinaryType.MAX_LENGTH),\nnew ArrayType(new TinyIntType()),\nRowType.of(new IntType()),\nRowType.of(RowType.of(new IntType())),\nnew TypeInformationAnyType<>(Types.INT)\n};\nprivate int[] fields;\nprivate int[] keys;\nprivate boolean[] orders;\nprivate boolean[] nullsIsLast;\nprivate static final DataType INT_ROW_TYPE = DataTypes.ROW(DataTypes.FIELD(\"f0\", DataTypes.INT())).bridgedTo(Row.class);\nprivate static final DataFormatConverters.DataFormatConverter INT_ROW_CONV =\nDataFormatConverters.getConverterForDataType(INT_ROW_TYPE);\nprivate static final TypeComparator INT_ROW_COMP = new RowTypeInfo(Types.INT).createComparator(\nnew int[] {0}, new boolean[] {true}, 0, new ExecutionConfig());\nprivate static final DataFormatConverters.DataFormatConverter NEST_ROW_CONV =\nDataFormatConverters.getConverterForDataType(\nDataTypes.ROW(DataTypes.FIELD(\"f0\", INT_ROW_TYPE)).bridgedTo(Row.class));\nprivate static final TypeComparator NEST_ROW_COMP = new RowTypeInfo(new RowTypeInfo(Types.INT)).createComparator(\nnew int[] {0}, new boolean[] {true}, 0, new ExecutionConfig());\n@Test\npublic void testMultiKeys() throws Exception {\nfor (int i = 0; i < 100; i++) {\nrandomKeysAndOrders();\ntestInner();\n}\n}\n@Test\npublic void testOneKey() throws Exception {\nfor (int time = 0; time < 100; time++) {\nRandom rnd = new Random();\nfields = new int[rnd.nextInt(9) + 1];\nfor (int i = 0; i < fields.length; i++) {\nfields[i] = rnd.nextInt(types.length);\n}\nkeys = new int[] {0};\norders = new boolean[] {rnd.nextBoolean()};\nnullsIsLast = SortUtil.getNullDefaultOrders(orders);\ntestInner();\n}\n}\nprivate void randomKeysAndOrders() {\nRandom rnd = new Random();\nfields = new int[rnd.nextInt(9) + 1];\nfor (int i = 0; i < fields.length; i++) {\nfields[i] = rnd.nextInt(types.length);\n}\nkeys = new int[rnd.nextInt(fields.length) + 1];\nLinkedList indexQueue = new LinkedList<>();\nfor (int i = 0; i < fields.length; i++) {\nindexQueue.add(i);\n}\nCollections.shuffle(indexQueue);\norders = new boolean[keys.length];\nfor (int i = 0; i < keys.length; i++) {\nkeys[i] = indexQueue.poll();\norders[i] = rnd.nextBoolean();\n}\nnullsIsLast = SortUtil.getNullDefaultOrders(orders);\n}\nprivate Object[] shuffle(Object[] objects) {\nCollections.shuffle(Arrays.asList(objects));\nreturn objects;\n}\nprivate BinaryRow row(int i, Object[][] values) {\nBinaryRow row = new BinaryRow(fields.length);\nBinaryRowWriter writer = new BinaryRowWriter(row);\nfor (int j = 0; j < fields.length; j++) {\nObject value = values[j][i];\nif (value == null) {\nwriter.setNullAt(j);\n} else {\nBinaryWriter.write(writer, j, value, types[fields[j]],\nInternalSerializers.create(types[fields[j]], new ExecutionConfig()));\n}\n}\nwriter.complete();\nreturn row;\n}\nprivate BinaryRow[] getTestData() {\nBinaryRow[] result = new BinaryRow[RECORD_NUM];\nObject[][] values = new Object[fields.length][];\nfor (int i = 0; i < fields.length; i++) {\nvalues[i] = shuffle(generateValues(types[fields[i]]));\n}\nfor (int i = 0; i < RECORD_NUM; i++) {\nresult[i] = row(i, values);\n}\nreturn result;\n}\nprivate Object value1(LogicalType type, Random rnd) {\nswitch (type.getTypeRoot()) {\ncase BOOLEAN:\nreturn false;\ncase TINYINT:\nreturn Byte.MIN_VALUE;\ncase SMALLINT:\nreturn Short.MIN_VALUE;\ncase INTEGER:\nreturn Integer.MIN_VALUE;\ncase BIGINT:\nreturn Long.MIN_VALUE;\ncase FLOAT:\nreturn Float.MIN_VALUE;\ncase DOUBLE:\nreturn Double.MIN_VALUE;\ncase VARCHAR:\nreturn BinaryString.fromString(\"\");\ncase DECIMAL:\nDecimalType decimalType = (DecimalType) type;\nreturn Decimal.fromBigDecimal(new BigDecimal(Integer.MIN_VALUE),\ndecimalType.getPrecision(), decimalType.getScale());\ncase ARRAY:\nbyte[] bytes = new byte[rnd.nextInt(7) + 1];\nrnd.nextBytes(bytes);\nBinaryArray array = BinaryArray.fromPrimitiveArray(bytes);\nfor (int i = 0; i < bytes.length; i++) {\narray.setNullByte(i);\n}\nreturn array;\ncase VARBINARY:\nbyte[] bytes2 = new byte[rnd.nextInt(7) + 1];\nrnd.nextBytes(bytes2);\nreturn bytes2;\ncase ROW:\nreturn GenericRow.of(new Object[]{null});\ncase ANY:\nreturn new BinaryGeneric<>(rnd.nextInt());\ndefault:\nthrow new RuntimeException(\"Not support!\");\n}\n}\nprivate Object value2(LogicalType type, Random rnd) {\nswitch (type.getTypeRoot()) {\ncase BOOLEAN:\nreturn false;\ncase TINYINT:\nreturn (byte) 0;\ncase SMALLINT:\nreturn (short) 0;\ncase INTEGER:\nreturn 0;\ncase BIGINT:\nreturn 0L;\ncase FLOAT:\nreturn 0f;\ncase DOUBLE:\nreturn 0d;\ncase VARCHAR:\nreturn BinaryString.fromString(\"0\");\ncase DECIMAL:\nDecimalType decimalType = (DecimalType) type;\nreturn Decimal.fromBigDecimal(new BigDecimal(0),\ndecimalType.getPrecision(), decimalType.getScale());\ncase ARRAY:\ncase VARBINARY:\nbyte[] bytes = new byte[rnd.nextInt(7) + 10];\nrnd.nextBytes(bytes);\nreturn type instanceof VarBinaryType ? bytes : BinaryArray.fromPrimitiveArray(bytes);\ncase ROW:\nRowType rowType = (RowType) type;\nif (rowType.getFields().get(0).getType().getTypeRoot() == INTEGER) {\nreturn GenericRow.of(rnd.nextInt());\n} else {\nreturn GenericRow.of(GenericRow.of(new Object[]{null}));\n}\ncase ANY:\nreturn new BinaryGeneric<>(rnd.nextInt());\ndefault:\nthrow new RuntimeException(\"Not support!\");\n}\n}\nprivate Object value3(LogicalType type, Random rnd) {\nswitch (type.getTypeRoot()) {\ncase BOOLEAN:\nreturn true;\ncase TINYINT:\nreturn Byte.MAX_VALUE;\ncase SMALLINT:\nreturn Short.MAX_VALUE;\ncase INTEGER:\nreturn Integer.MAX_VALUE;\ncase BIGINT:\nreturn Long.MAX_VALUE;\ncase FLOAT:\nreturn Float.MAX_VALUE;\ncase DOUBLE:\nreturn Double.MAX_VALUE;\ncase VARCHAR:\nreturn BinaryString.fromString(RandomStringUtils.random(100));\ncase DECIMAL:\nDecimalType decimalType = (DecimalType) type;\nreturn Decimal.fromBigDecimal(new BigDecimal(Integer.MAX_VALUE),\ndecimalType.getPrecision(), decimalType.getScale());\ncase ARRAY:\ncase VARBINARY:\nbyte[] bytes = new byte[rnd.nextInt(100) + 100];\nrnd.nextBytes(bytes);\nreturn type instanceof VarBinaryType ? bytes : BinaryArray.fromPrimitiveArray(bytes);\ncase ROW:\nRowType rowType = (RowType) type;\nif (rowType.getFields().get(0).getType().getTypeRoot() == INTEGER) {\nreturn GenericRow.of(rnd.nextInt());\n} else {\nreturn GenericRow.of(GenericRow.of(rnd.nextInt()));\n}\ncase ANY:\nreturn new BinaryGeneric<>(rnd.nextInt());\ndefault:\nthrow new RuntimeException(\"Not support!\");\n}\n}\nprivate LogicalType[] getFieldTypes() {\nLogicalType[] result = new LogicalType[fields.length];\nfor (int i = 0; i < fields.length; i++) {\nresult[i] = types[fields[i]];\n}\nreturn result;\n}\nprivate LogicalType[] getKeyTypes() {\nLogicalType[] result = new LogicalType[keys.length];\nfor (int i = 0; i < keys.length; i++) {\nresult[i] = types[fields[keys[i]]];\n}\nreturn result;\n}\nprivate void testInner() throws Exception {\nList segments = new ArrayList<>();\nfor (int i = 0; i < 100; i++) {\nsegments.add(MemorySegmentFactory.wrap(new byte[32768]));\n}\nLogicalType[] fieldTypes = getFieldTypes();\nLogicalType[] keyTypes = getKeyTypes();\nTuple2 tuple2 = getSortBaseWithNulls(\nthis.getClass().getSimpleName(), keyTypes, keys, orders, nullsIsLast);\nBinaryRowSerializer serializer = new BinaryRowSerializer(fieldTypes.length);\nBinaryInMemorySortBuffer sortBuffer = BinaryInMemorySortBuffer.createBuffer(\ntuple2.f0, (AbstractRowSerializer) serializer, serializer,\ntuple2.f1, segments);\nBinaryRow[] dataArray = getTestData();\nList data = Arrays.asList(dataArray.clone());\nList binaryRows = Arrays.asList(dataArray.clone());\nCollections.shuffle(binaryRows);\nfor (BinaryRow row : binaryRows) {\nif (!sortBuffer.write(row)) {\nthrow new RuntimeException();\n}\n}\nnew QuickSort().sort(sortBuffer);\nMutableObjectIterator iter = sortBuffer.getIterator();\nList result = new ArrayList<>();\nBinaryRow row = serializer.createInstance();\nwhile ((row = iter.next(row)) != null) {\nresult.add(row.copy());\n}\ndata.sort((o1, o2) -> {\nfor (int i = 0; i < keys.length; i++) {\nLogicalType t = types[fields[keys[i]]];\nboolean order = orders[i];\nObject first = null;\nObject second = null;\nif (!o1.isNullAt(keys[i])) {\nfirst = TypeGetterSetters.get(o1, keys[i], keyTypes[i]);\n}\nif (!o2.isNullAt(keys[i])) {\nsecond = TypeGetterSetters.get(o2, keys[i], keyTypes[i]);\n}\nif (first != null || second != null) {\nif (first == null) {\nreturn order ? -1 : 1;\n}\nif (second == null) {\nreturn order ? 1 : -1;\n}\nif (first instanceof Comparable) {\nint ret = ((Comparable) first).compareTo(second);\nif (ret != 0) {\nreturn order ? ret : -ret;\n}\n} else if (t.getTypeRoot() == LogicalTypeRoot.ARRAY) {\nBinaryArray leftArray = (BinaryArray) first;\nBinaryArray rightArray = (BinaryArray) second;\nint minLength = Math.min(leftArray.numElements(), rightArray.numElements());\nfor (int j = 0; j < minLength; j++) {\nboolean isNullLeft = leftArray.isNullAt(j);\nboolean isNullRight = rightArray.isNullAt(j);\nif (isNullLeft && isNullRight) {\n} else if (isNullLeft) {\nreturn order ? -1 : 1;\n} else if (isNullRight) {\nreturn order ? 1 : -1;\n} else {\nint comp = Byte.compare(leftArray.getByte(j), rightArray.getByte(j));\nif (comp != 0) {\nreturn order ? comp : -comp;\n}\n}\n}\nif (leftArray.numElements() < rightArray.numElements()) {\nreturn order ? -1 : 1;\n} else if (leftArray.numElements() > rightArray.numElements()) {\nreturn order ? 1 : -1;\n}\n} else if (t.getTypeRoot() == LogicalTypeRoot.VARBINARY) {\nint comp = org.apache.flink.table.runtime.operators.sort.SortUtil.compareBinary(\n(byte[]) first, (byte[]) second);\nif (comp != 0) {\nreturn order ? comp : -comp;\n}\n} else if (t.getTypeRoot() == LogicalTypeRoot.ROW) {\nRowType rowType = (RowType) t;\nint comp;\nif (rowType.getFields().get(0).getType() instanceof IntType) {\ncomp = INT_ROW_COMP.compare(INT_ROW_CONV.toExternal(first),\nINT_ROW_CONV.toExternal(second));\n} else {\ncomp = NEST_ROW_COMP.compare(NEST_ROW_CONV.toExternal(first),\nNEST_ROW_CONV.toExternal(second));\n}\nif (comp != 0) {\nreturn order ? comp : -comp;\n}\n} else if (t.getTypeRoot() == LogicalTypeRoot.ANY) {\nInteger i1 = BinaryGeneric.getJavaObjectFromBinaryGeneric((BinaryGeneric) first, IntSerializer.INSTANCE);\nInteger i2 = BinaryGeneric.getJavaObjectFromBinaryGeneric((BinaryGeneric) second, IntSerializer.INSTANCE);\nint comp = Integer.compare(i1, i2);\nif (comp != 0) {\nreturn order ? comp : -comp;\n}\n} else {\nthrow new RuntimeException();\n}\n}\n}\nreturn 0;\n});\nStringBuilder builder = new StringBuilder();\nfor (int i = 0; i < data.size(); i++) {\nbuilder.append(\"\\n\")\n.append(\"expect: \")\n.append(data.get(i).toOriginString(fieldTypes))\n.append(\"; actual: \")\n.append(result.get(i).toOriginString(fieldTypes));\n}\nbuilder.append(\"\\n\").append(\"types: \").append(Arrays.asList(fieldTypes));\nbuilder.append(\"\\n\").append(\"keys: \").append(Arrays.toString(keys));\nString msg = builder.toString();\nfor (int i = 0; i < data.size(); i++) {\nfor (int j = 0; j < keys.length; j++) {\nboolean isNull1 = data.get(i).isNullAt(keys[j]);\nboolean isNull2 = result.get(i).isNullAt(keys[j]);\nAssert.assertEquals(msg, isNull1, isNull2);\nif (!isNull1 || !isNull2) {\nObject o1 = TypeGetterSetters.get(data.get(i), keys[j], keyTypes[j]);\nObject o2 = TypeGetterSetters.get(result.get(i), keys[j], keyTypes[j]);\nif (keyTypes[j] instanceof VarBinaryType) {\nAssert.assertArrayEquals(msg, (byte[]) o1, (byte[]) o2);\n} else {\nAssert.assertEquals(msg, o1, o2);\n}\n}\n}\n}\n}\npublic static Tuple2 getSortBaseWithNulls(\nString namePrefix, LogicalType[] keyTypes, int[] keys, boolean[] orders, boolean[] nullsIsLast)\nthrows IllegalAccessException, InstantiationException {\nSortCodeGenerator generator = new SortCodeGenerator(\nnew TableConfig(), keys, keyTypes, orders, nullsIsLast);\nGeneratedNormalizedKeyComputer computer = generator.generateNormalizedKeyComputer(namePrefix + \"Computer\");\nGeneratedRecordComparator comparator = generator.generateRecordComparator(namePrefix + \"Comparator\");\nClassLoader cl = Thread.currentThread().getContextClassLoader();\nreturn new Tuple2<>(computer.newInstance(cl), comparator.newInstance(cl));\n}\n}", + "context_after": "class SortCodeGeneratorTest {\nprivate static final int RECORD_NUM = 3000;\nprivate final LogicalType[] types = new LogicalType[]{\nnew BooleanType(),\nnew TinyIntType(),\nnew SmallIntType(),\nnew IntType(),\nnew BigIntType(),\nnew FloatType(),\nnew DoubleType(),\nnew VarCharType(VarCharType.MAX_LENGTH),\nnew DecimalType(18, 2),\nnew DecimalType(38, 18),\nnew VarBinaryType(VarBinaryType.MAX_LENGTH),\nnew ArrayType(new TinyIntType()),\nRowType.of(new IntType()),\nRowType.of(RowType.of(new IntType())),\nnew TypeInformationAnyType<>(Types.INT)\n};\nprivate int[] fields;\nprivate int[] keys;\nprivate boolean[] orders;\nprivate boolean[] nullsIsLast;\nprivate static final DataType INT_ROW_TYPE = DataTypes.ROW(DataTypes.FIELD(\"f0\", DataTypes.INT())).bridgedTo(Row.class);\nprivate static final DataFormatConverters.DataFormatConverter INT_ROW_CONV =\nDataFormatConverters.getConverterForDataType(INT_ROW_TYPE);\nprivate static final TypeComparator INT_ROW_COMP = new RowTypeInfo(Types.INT).createComparator(\nnew int[] {0}, new boolean[] {true}, 0, new ExecutionConfig());\nprivate static final DataFormatConverters.DataFormatConverter NEST_ROW_CONV =\nDataFormatConverters.getConverterForDataType(\nDataTypes.ROW(DataTypes.FIELD(\"f0\", INT_ROW_TYPE)).bridgedTo(Row.class));\nprivate static final TypeComparator NEST_ROW_COMP = new RowTypeInfo(new RowTypeInfo(Types.INT)).createComparator(\nnew int[] {0}, new boolean[] {true}, 0, new ExecutionConfig());\n@Test\npublic void testMultiKeys() throws Exception {\nfor (int i = 0; i < 100; i++) {\nrandomKeysAndOrders();\ntestInner();\n}\n}\n@Test\npublic void testOneKey() throws Exception {\nfor (int time = 0; time < 100; time++) {\nRandom rnd = new Random();\nfields = new int[rnd.nextInt(9) + 1];\nfor (int i = 0; i < fields.length; i++) {\nfields[i] = rnd.nextInt(types.length);\n}\nkeys = new int[] {0};\norders = new boolean[] {rnd.nextBoolean()};\nnullsIsLast = SortUtil.getNullDefaultOrders(orders);\ntestInner();\n}\n}\nprivate void randomKeysAndOrders() {\nRandom rnd = new Random();\nfields = new int[rnd.nextInt(9) + 1];\nfor (int i = 0; i < fields.length; i++) {\nfields[i] = rnd.nextInt(types.length);\n}\nkeys = new int[rnd.nextInt(fields.length) + 1];\nLinkedList indexQueue = new LinkedList<>();\nfor (int i = 0; i < fields.length; i++) {\nindexQueue.add(i);\n}\nCollections.shuffle(indexQueue);\norders = new boolean[keys.length];\nfor (int i = 0; i < keys.length; i++) {\nkeys[i] = indexQueue.poll();\norders[i] = rnd.nextBoolean();\n}\nnullsIsLast = SortUtil.getNullDefaultOrders(orders);\n}\nprivate Object[] shuffle(Object[] objects) {\nCollections.shuffle(Arrays.asList(objects));\nreturn objects;\n}\nprivate BinaryRow row(int i, Object[][] values) {\nBinaryRow row = new BinaryRow(fields.length);\nBinaryRowWriter writer = new BinaryRowWriter(row);\nfor (int j = 0; j < fields.length; j++) {\nObject value = values[j][i];\nif (value == null) {\nwriter.setNullAt(j);\n} else {\nBinaryWriter.write(writer, j, value, types[fields[j]],\nInternalSerializers.create(types[fields[j]], new ExecutionConfig()));\n}\n}\nwriter.complete();\nreturn row;\n}\nprivate BinaryRow[] getTestData() {\nBinaryRow[] result = new BinaryRow[RECORD_NUM];\nObject[][] values = new Object[fields.length][];\nfor (int i = 0; i < fields.length; i++) {\nvalues[i] = shuffle(generateValues(types[fields[i]]));\n}\nfor (int i = 0; i < RECORD_NUM; i++) {\nresult[i] = row(i, values);\n}\nreturn result;\n}\nprivate Object value1(LogicalType type, Random rnd) {\nswitch (type.getTypeRoot()) {\ncase BOOLEAN:\nreturn false;\ncase TINYINT:\nreturn Byte.MIN_VALUE;\ncase SMALLINT:\nreturn Short.MIN_VALUE;\ncase INTEGER:\nreturn Integer.MIN_VALUE;\ncase BIGINT:\nreturn Long.MIN_VALUE;\ncase FLOAT:\nreturn Float.MIN_VALUE;\ncase DOUBLE:\nreturn Double.MIN_VALUE;\ncase VARCHAR:\nreturn BinaryString.fromString(\"\");\ncase DECIMAL:\nDecimalType decimalType = (DecimalType) type;\nreturn Decimal.fromBigDecimal(new BigDecimal(Integer.MIN_VALUE),\ndecimalType.getPrecision(), decimalType.getScale());\ncase ARRAY:\nbyte[] bytes = new byte[rnd.nextInt(7) + 1];\nrnd.nextBytes(bytes);\nBinaryArray array = BinaryArray.fromPrimitiveArray(bytes);\nfor (int i = 0; i < bytes.length; i++) {\narray.setNullByte(i);\n}\nreturn array;\ncase VARBINARY:\nbyte[] bytes2 = new byte[rnd.nextInt(7) + 1];\nrnd.nextBytes(bytes2);\nreturn bytes2;\ncase ROW:\nreturn GenericRow.of(new Object[]{null});\ncase ANY:\nreturn new BinaryGeneric<>(rnd.nextInt());\ndefault:\nthrow new RuntimeException(\"Not support!\");\n}\n}\nprivate Object value2(LogicalType type, Random rnd) {\nswitch (type.getTypeRoot()) {\ncase BOOLEAN:\nreturn false;\ncase TINYINT:\nreturn (byte) 0;\ncase SMALLINT:\nreturn (short) 0;\ncase INTEGER:\nreturn 0;\ncase BIGINT:\nreturn 0L;\ncase FLOAT:\nreturn 0f;\ncase DOUBLE:\nreturn 0d;\ncase VARCHAR:\nreturn BinaryString.fromString(\"0\");\ncase DECIMAL:\nDecimalType decimalType = (DecimalType) type;\nreturn Decimal.fromBigDecimal(new BigDecimal(0),\ndecimalType.getPrecision(), decimalType.getScale());\ncase ARRAY:\ncase VARBINARY:\nbyte[] bytes = new byte[rnd.nextInt(7) + 10];\nrnd.nextBytes(bytes);\nreturn type instanceof VarBinaryType ? bytes : BinaryArray.fromPrimitiveArray(bytes);\ncase ROW:\nRowType rowType = (RowType) type;\nif (rowType.getFields().get(0).getType().getTypeRoot() == INTEGER) {\nreturn GenericRow.of(rnd.nextInt());\n} else {\nreturn GenericRow.of(GenericRow.of(new Object[]{null}));\n}\ncase ANY:\nreturn new BinaryGeneric<>(rnd.nextInt());\ndefault:\nthrow new RuntimeException(\"Not support!\");\n}\n}\nprivate Object value3(LogicalType type, Random rnd) {\nswitch (type.getTypeRoot()) {\ncase BOOLEAN:\nreturn true;\ncase TINYINT:\nreturn Byte.MAX_VALUE;\ncase SMALLINT:\nreturn Short.MAX_VALUE;\ncase INTEGER:\nreturn Integer.MAX_VALUE;\ncase BIGINT:\nreturn Long.MAX_VALUE;\ncase FLOAT:\nreturn Float.MAX_VALUE;\ncase DOUBLE:\nreturn Double.MAX_VALUE;\ncase VARCHAR:\nreturn BinaryString.fromString(RandomStringUtils.random(100));\ncase DECIMAL:\nDecimalType decimalType = (DecimalType) type;\nreturn Decimal.fromBigDecimal(new BigDecimal(Integer.MAX_VALUE),\ndecimalType.getPrecision(), decimalType.getScale());\ncase ARRAY:\ncase VARBINARY:\nbyte[] bytes = new byte[rnd.nextInt(100) + 100];\nrnd.nextBytes(bytes);\nreturn type instanceof VarBinaryType ? bytes : BinaryArray.fromPrimitiveArray(bytes);\ncase ROW:\nRowType rowType = (RowType) type;\nif (rowType.getFields().get(0).getType().getTypeRoot() == INTEGER) {\nreturn GenericRow.of(rnd.nextInt());\n} else {\nreturn GenericRow.of(GenericRow.of(rnd.nextInt()));\n}\ncase ANY:\nreturn new BinaryGeneric<>(rnd.nextInt());\ndefault:\nthrow new RuntimeException(\"Not support!\");\n}\n}\nprivate LogicalType[] getFieldTypes() {\nLogicalType[] result = new LogicalType[fields.length];\nfor (int i = 0; i < fields.length; i++) {\nresult[i] = types[fields[i]];\n}\nreturn result;\n}\nprivate LogicalType[] getKeyTypes() {\nLogicalType[] result = new LogicalType[keys.length];\nfor (int i = 0; i < keys.length; i++) {\nresult[i] = types[fields[keys[i]]];\n}\nreturn result;\n}\nprivate void testInner() throws Exception {\nList segments = new ArrayList<>();\nfor (int i = 0; i < 100; i++) {\nsegments.add(MemorySegmentFactory.wrap(new byte[32768]));\n}\nLogicalType[] fieldTypes = getFieldTypes();\nLogicalType[] keyTypes = getKeyTypes();\nTuple2 tuple2 = getSortBaseWithNulls(\nthis.getClass().getSimpleName(), keyTypes, keys, orders, nullsIsLast);\nBinaryRowSerializer serializer = new BinaryRowSerializer(fieldTypes.length);\nBinaryInMemorySortBuffer sortBuffer = BinaryInMemorySortBuffer.createBuffer(\ntuple2.f0, (AbstractRowSerializer) serializer, serializer,\ntuple2.f1, segments);\nBinaryRow[] dataArray = getTestData();\nList data = Arrays.asList(dataArray.clone());\nList binaryRows = Arrays.asList(dataArray.clone());\nCollections.shuffle(binaryRows);\nfor (BinaryRow row : binaryRows) {\nif (!sortBuffer.write(row)) {\nthrow new RuntimeException();\n}\n}\nnew QuickSort().sort(sortBuffer);\nMutableObjectIterator iter = sortBuffer.getIterator();\nList result = new ArrayList<>();\nBinaryRow row = serializer.createInstance();\nwhile ((row = iter.next(row)) != null) {\nresult.add(row.copy());\n}\ndata.sort((o1, o2) -> {\nfor (int i = 0; i < keys.length; i++) {\nLogicalType t = types[fields[keys[i]]];\nboolean order = orders[i];\nObject first = null;\nObject second = null;\nif (!o1.isNullAt(keys[i])) {\nfirst = TypeGetterSetters.get(o1, keys[i], keyTypes[i]);\n}\nif (!o2.isNullAt(keys[i])) {\nsecond = TypeGetterSetters.get(o2, keys[i], keyTypes[i]);\n}\nif (first != null || second != null) {\nif (first == null) {\nreturn order ? -1 : 1;\n}\nif (second == null) {\nreturn order ? 1 : -1;\n}\nif (first instanceof Comparable) {\nint ret = ((Comparable) first).compareTo(second);\nif (ret != 0) {\nreturn order ? ret : -ret;\n}\n} else if (t.getTypeRoot() == LogicalTypeRoot.ARRAY) {\nBinaryArray leftArray = (BinaryArray) first;\nBinaryArray rightArray = (BinaryArray) second;\nint minLength = Math.min(leftArray.numElements(), rightArray.numElements());\nfor (int j = 0; j < minLength; j++) {\nboolean isNullLeft = leftArray.isNullAt(j);\nboolean isNullRight = rightArray.isNullAt(j);\nif (isNullLeft && isNullRight) {\n} else if (isNullLeft) {\nreturn order ? -1 : 1;\n} else if (isNullRight) {\nreturn order ? 1 : -1;\n} else {\nint comp = Byte.compare(leftArray.getByte(j), rightArray.getByte(j));\nif (comp != 0) {\nreturn order ? comp : -comp;\n}\n}\n}\nif (leftArray.numElements() < rightArray.numElements()) {\nreturn order ? -1 : 1;\n} else if (leftArray.numElements() > rightArray.numElements()) {\nreturn order ? 1 : -1;\n}\n} else if (t.getTypeRoot() == LogicalTypeRoot.VARBINARY) {\nint comp = org.apache.flink.table.runtime.operators.sort.SortUtil.compareBinary(\n(byte[]) first, (byte[]) second);\nif (comp != 0) {\nreturn order ? comp : -comp;\n}\n} else if (t.getTypeRoot() == LogicalTypeRoot.ROW) {\nRowType rowType = (RowType) t;\nint comp;\nif (rowType.getFields().get(0).getType() instanceof IntType) {\ncomp = INT_ROW_COMP.compare(INT_ROW_CONV.toExternal(first),\nINT_ROW_CONV.toExternal(second));\n} else {\ncomp = NEST_ROW_COMP.compare(NEST_ROW_CONV.toExternal(first),\nNEST_ROW_CONV.toExternal(second));\n}\nif (comp != 0) {\nreturn order ? comp : -comp;\n}\n} else if (t.getTypeRoot() == LogicalTypeRoot.ANY) {\nInteger i1 = BinaryGeneric.getJavaObjectFromBinaryGeneric((BinaryGeneric) first, IntSerializer.INSTANCE);\nInteger i2 = BinaryGeneric.getJavaObjectFromBinaryGeneric((BinaryGeneric) second, IntSerializer.INSTANCE);\nint comp = Integer.compare(i1, i2);\nif (comp != 0) {\nreturn order ? comp : -comp;\n}\n} else {\nthrow new RuntimeException();\n}\n}\n}\nreturn 0;\n});\nStringBuilder builder = new StringBuilder();\nfor (int i = 0; i < data.size(); i++) {\nbuilder.append(\"\\n\")\n.append(\"expect: \")\n.append(data.get(i).toOriginString(fieldTypes))\n.append(\"; actual: \")\n.append(result.get(i).toOriginString(fieldTypes));\n}\nbuilder.append(\"\\n\").append(\"types: \").append(Arrays.asList(fieldTypes));\nbuilder.append(\"\\n\").append(\"keys: \").append(Arrays.toString(keys));\nString msg = builder.toString();\nfor (int i = 0; i < data.size(); i++) {\nfor (int j = 0; j < keys.length; j++) {\nboolean isNull1 = data.get(i).isNullAt(keys[j]);\nboolean isNull2 = result.get(i).isNullAt(keys[j]);\nAssert.assertEquals(msg, isNull1, isNull2);\nif (!isNull1 || !isNull2) {\nObject o1 = TypeGetterSetters.get(data.get(i), keys[j], keyTypes[j]);\nObject o2 = TypeGetterSetters.get(result.get(i), keys[j], keyTypes[j]);\nif (keyTypes[j] instanceof VarBinaryType) {\nAssert.assertArrayEquals(msg, (byte[]) o1, (byte[]) o2);\n} else if (keyTypes[j] instanceof TypeInformationAnyType) {\nassertThat(\nmsg,\n(BinaryGeneric) o1,\nequivalent((BinaryGeneric) o2, new BinaryGenericSerializer<>(IntSerializer.INSTANCE)));\n} else {\nAssert.assertEquals(msg, o1, o2);\n}\n}\n}\n}\n}\npublic static Tuple2 getSortBaseWithNulls(\nString namePrefix, LogicalType[] keyTypes, int[] keys, boolean[] orders, boolean[] nullsIsLast)\nthrows IllegalAccessException, InstantiationException {\nSortCodeGenerator generator = new SortCodeGenerator(\nnew TableConfig(), keys, keyTypes, orders, nullsIsLast);\nGeneratedNormalizedKeyComputer computer = generator.generateNormalizedKeyComputer(namePrefix + \"Computer\");\nGeneratedRecordComparator comparator = generator.generateRecordComparator(namePrefix + \"Comparator\");\nClassLoader cl = Thread.currentThread().getContextClassLoader();\nreturn new Tuple2<>(computer.newInstance(cl), comparator.newInstance(cl));\n}\n}" + }, + { + "comment": "I think it's better to move `if enableIncrementalCheckpointing` check here from `verifyRocksDBStateUploaderClosed` so that the uploader isn't created unnecessarily. We could also use `RocksDBOptions.CHECKPOINT_TRANSFER_THREAD_NUM.defaultValue()` instead of `4` here.", + "method_body": "public void setupRocksKeyedStateBackend() throws Exception {\nblocker = new OneShotLatch();\nwaiter = new OneShotLatch();\ntestStreamFactory = new BlockerCheckpointStreamFactory(1024 * 1024);\ntestStreamFactory.setBlockerLatch(blocker);\ntestStreamFactory.setWaiterLatch(waiter);\ntestStreamFactory.setAfterNumberInvocations(10);\nrocksDBStateUploader = spy(new RocksDBStateUploader(4));\nprepareRocksDB();\nkeyedStateBackend =\nRocksDBTestUtils.builderForTestDB(\nTEMP_FOLDER\n.newFolder(),\nIntSerializer.INSTANCE,\nspy(db),\ndefaultCFHandle,\noptionsContainer.getColumnOptions())\n.setEnableIncrementalCheckpointing(enableIncrementalCheckpointing)\n.setRocksDBStateUploader(rocksDBStateUploader)\n.build();\ntestState1 =\nkeyedStateBackend.getPartitionedState(\nVoidNamespace.INSTANCE,\nVoidNamespaceSerializer.INSTANCE,\nnew ValueStateDescriptor<>(\"TestState-1\", Integer.class, 0));\ntestState2 =\nkeyedStateBackend.getPartitionedState(\nVoidNamespace.INSTANCE,\nVoidNamespaceSerializer.INSTANCE,\nnew ValueStateDescriptor<>(\"TestState-2\", String.class, \"\"));\nallCreatedCloseables = new ArrayList<>();\ndoAnswer(\nnew Answer() {\n@Override\npublic Object answer(InvocationOnMock invocationOnMock)\nthrows Throwable {\nRocksIterator rocksIterator =\nspy((RocksIterator) invocationOnMock.callRealMethod());\nallCreatedCloseables.add(rocksIterator);\nreturn rocksIterator;\n}\n})\n.when(keyedStateBackend.db)\n.newIterator(any(ColumnFamilyHandle.class), any(ReadOptions.class));\ndoAnswer(\nnew Answer() {\n@Override\npublic Object answer(InvocationOnMock invocationOnMock)\nthrows Throwable {\nSnapshot snapshot =\nspy((Snapshot) invocationOnMock.callRealMethod());\nallCreatedCloseables.add(snapshot);\nreturn snapshot;\n}\n})\n.when(keyedStateBackend.db)\n.getSnapshot();\ndoAnswer(\nnew Answer() {\n@Override\npublic Object answer(InvocationOnMock invocationOnMock)\nthrows Throwable {\nColumnFamilyHandle snapshot =\nspy((ColumnFamilyHandle) invocationOnMock.callRealMethod());\nallCreatedCloseables.add(snapshot);\nreturn snapshot;\n}\n})\n.when(keyedStateBackend.db)\n.createColumnFamily(any(ColumnFamilyDescriptor.class));\nfor (int i = 0; i < 100; ++i) {\nkeyedStateBackend.setCurrentKey(i);\ntestState1.update(4200 + i);\ntestState2.update(\"S-\" + (4200 + i));\n}\n}", + "target_code": "rocksDBStateUploader = spy(new RocksDBStateUploader(4));", + "method_body_after": "public void setupRocksKeyedStateBackend() throws Exception {\nblocker = new OneShotLatch();\nwaiter = new OneShotLatch();\ntestStreamFactory = new BlockerCheckpointStreamFactory(1024 * 1024);\ntestStreamFactory.setBlockerLatch(blocker);\ntestStreamFactory.setWaiterLatch(waiter);\ntestStreamFactory.setAfterNumberInvocations(10);\nprepareRocksDB();\nRocksDBKeyedStateBackendBuilder keyedStateBackendBuilder =\nRocksDBTestUtils.builderForTestDB(\nTEMP_FOLDER\n.newFolder(),\nIntSerializer.INSTANCE,\nspy(db),\ndefaultCFHandle,\noptionsContainer.getColumnOptions())\n.setEnableIncrementalCheckpointing(enableIncrementalCheckpointing);\nif (enableIncrementalCheckpointing) {\nrocksDBStateUploader =\nspy(\nnew RocksDBStateUploader(\nRocksDBOptions.CHECKPOINT_TRANSFER_THREAD_NUM.defaultValue()));\nkeyedStateBackendBuilder.setRocksDBStateUploader(rocksDBStateUploader);\n}\nkeyedStateBackend = keyedStateBackendBuilder.build();\ntestState1 =\nkeyedStateBackend.getPartitionedState(\nVoidNamespace.INSTANCE,\nVoidNamespaceSerializer.INSTANCE,\nnew ValueStateDescriptor<>(\"TestState-1\", Integer.class, 0));\ntestState2 =\nkeyedStateBackend.getPartitionedState(\nVoidNamespace.INSTANCE,\nVoidNamespaceSerializer.INSTANCE,\nnew ValueStateDescriptor<>(\"TestState-2\", String.class, \"\"));\nallCreatedCloseables = new ArrayList<>();\ndoAnswer(\nnew Answer() {\n@Override\npublic Object answer(InvocationOnMock invocationOnMock)\nthrows Throwable {\nRocksIterator rocksIterator =\nspy((RocksIterator) invocationOnMock.callRealMethod());\nallCreatedCloseables.add(rocksIterator);\nreturn rocksIterator;\n}\n})\n.when(keyedStateBackend.db)\n.newIterator(any(ColumnFamilyHandle.class), any(ReadOptions.class));\ndoAnswer(\nnew Answer() {\n@Override\npublic Object answer(InvocationOnMock invocationOnMock)\nthrows Throwable {\nSnapshot snapshot =\nspy((Snapshot) invocationOnMock.callRealMethod());\nallCreatedCloseables.add(snapshot);\nreturn snapshot;\n}\n})\n.when(keyedStateBackend.db)\n.getSnapshot();\ndoAnswer(\nnew Answer() {\n@Override\npublic Object answer(InvocationOnMock invocationOnMock)\nthrows Throwable {\nColumnFamilyHandle snapshot =\nspy((ColumnFamilyHandle) invocationOnMock.callRealMethod());\nallCreatedCloseables.add(snapshot);\nreturn snapshot;\n}\n})\n.when(keyedStateBackend.db)\n.createColumnFamily(any(ColumnFamilyDescriptor.class));\nfor (int i = 0; i < 100; ++i) {\nkeyedStateBackend.setCurrentKey(i);\ntestState1.update(4200 + i);\ntestState2.update(\"S-\" + (4200 + i));\n}\n}", + "context_before": "class EmbeddedRocksDBStateBackendTest\nextends StateBackendTestBase {\nprivate OneShotLatch blocker;\nprivate OneShotLatch waiter;\nprivate BlockerCheckpointStreamFactory testStreamFactory;\nprivate RocksDBKeyedStateBackend keyedStateBackend;\nprivate List allCreatedCloseables;\nprivate ValueState testState1;\nprivate ValueState testState2;\n@ClassRule public static final TemporaryFolder TEMP_FOLDER = new TemporaryFolder();\n@Parameterized.Parameters\npublic static List modes() {\nreturn Arrays.asList(\nnew Object[][] {\n{\ntrue,\n(SupplierWithException)\nJobManagerCheckpointStorage::new\n},\n{\nfalse,\n(SupplierWithException)\n() -> {\nString checkpointPath =\nTEMP_FOLDER.newFolder().toURI().toString();\nreturn new FileSystemCheckpointStorage(checkpointPath);\n}\n}\n});\n}\n@Parameterized.Parameter(value = 0)\npublic boolean enableIncrementalCheckpointing;\n@Parameterized.Parameter(value = 1)\npublic SupplierWithException storageSupplier;\nprivate String dbPath;\nprivate RocksDB db = null;\nprivate ColumnFamilyHandle defaultCFHandle = null;\nprivate RocksDBStateUploader rocksDBStateUploader = null;\nprivate final RocksDBResourceContainer optionsContainer = new RocksDBResourceContainer();\npublic void prepareRocksDB() throws Exception {\nString dbPath = new File(TEMP_FOLDER.newFolder(), DB_INSTANCE_DIR_STRING).getAbsolutePath();\nColumnFamilyOptions columnOptions = optionsContainer.getColumnOptions();\nArrayList columnFamilyHandles = new ArrayList<>(1);\ndb =\nRocksDBOperationUtils.openDB(\ndbPath,\nCollections.emptyList(),\ncolumnFamilyHandles,\ncolumnOptions,\noptionsContainer.getDbOptions());\ndefaultCFHandle = columnFamilyHandles.remove(0);\n}\n@Override\nprotected ConfigurableStateBackend getStateBackend() throws IOException {\ndbPath = TEMP_FOLDER.newFolder().getAbsolutePath();\nEmbeddedRocksDBStateBackend backend =\nnew EmbeddedRocksDBStateBackend(enableIncrementalCheckpointing);\nConfiguration configuration = new Configuration();\nconfiguration.set(\nRocksDBOptions.TIMER_SERVICE_FACTORY,\nEmbeddedRocksDBStateBackend.PriorityQueueStateType.ROCKSDB);\nbackend = backend.configure(configuration, Thread.currentThread().getContextClassLoader());\nbackend.setDbStoragePath(dbPath);\nreturn backend;\n}\n@Override\nprotected CheckpointStorage getCheckpointStorage() throws Exception {\nreturn storageSupplier.get();\n}\n@Override\nprotected boolean isSerializerPresenceRequiredOnRestore() {\nreturn false;\n}\n@Override\nprotected boolean supportsAsynchronousSnapshots() {\nreturn true;\n}\n@After\npublic void cleanupRocksDB() {\nif (keyedStateBackend != null) {\nIOUtils.closeQuietly(keyedStateBackend);\nkeyedStateBackend.dispose();\n}\nIOUtils.closeQuietly(defaultCFHandle);\nIOUtils.closeQuietly(db);\nIOUtils.closeQuietly(optionsContainer);\nif (allCreatedCloseables != null) {\nfor (RocksObject rocksCloseable : allCreatedCloseables) {\nverify(rocksCloseable, times(1)).close();\n}\nallCreatedCloseables = null;\n}\n}\n@Test\npublic void testCorrectMergeOperatorSet() throws Exception {\nprepareRocksDB();\nfinal ColumnFamilyOptions columnFamilyOptions = spy(new ColumnFamilyOptions());\nRocksDBKeyedStateBackend test = null;\ntry {\ntest =\nRocksDBTestUtils.builderForTestDB(\nTEMP_FOLDER.newFolder(),\nIntSerializer.INSTANCE,\ndb,\ndefaultCFHandle,\ncolumnFamilyOptions)\n.setEnableIncrementalCheckpointing(enableIncrementalCheckpointing)\n.build();\nValueStateDescriptor stubState1 =\nnew ValueStateDescriptor<>(\"StubState-1\", StringSerializer.INSTANCE);\ntest.createInternalState(StringSerializer.INSTANCE, stubState1);\nValueStateDescriptor stubState2 =\nnew ValueStateDescriptor<>(\"StubState-2\", StringSerializer.INSTANCE);\ntest.createInternalState(StringSerializer.INSTANCE, stubState2);\nverify(columnFamilyOptions, Mockito.times(2))\n.setMergeOperatorName(RocksDBKeyedStateBackend.MERGE_OPERATOR_NAME);\n} finally {\nif (test != null) {\nIOUtils.closeQuietly(test);\ntest.dispose();\n}\ncolumnFamilyOptions.close();\n}\n}\n@Test\npublic void testReleasingSnapshotAfterBackendClosed() throws Exception {\nsetupRocksKeyedStateBackend();\ntry {\nRunnableFuture> snapshot =\nkeyedStateBackend.snapshot(\n0L,\n0L,\ntestStreamFactory,\nCheckpointOptions.forCheckpointWithDefaultLocation());\nRocksDB spyDB = keyedStateBackend.db;\nif (!enableIncrementalCheckpointing) {\nverify(spyDB, times(1)).getSnapshot();\nverify(spyDB, times(0)).releaseSnapshot(any(Snapshot.class));\n}\nfor (RocksObject rocksCloseable : allCreatedCloseables) {\nverify(rocksCloseable, times(0)).close();\n}\nsnapshot.cancel(true);\nthis.keyedStateBackend.dispose();\nverify(spyDB, times(1)).close();\nassertEquals(true, keyedStateBackend.isDisposed());\nfor (RocksObject rocksCloseable : allCreatedCloseables) {\nverify(rocksCloseable, times(1)).close();\n}\n} finally {\nkeyedStateBackend.dispose();\nkeyedStateBackend = null;\n}\nverifyRocksDBStateUploaderClosed();\n}\n@Test\npublic void testDismissingSnapshot() throws Exception {\nsetupRocksKeyedStateBackend();\ntry {\nRunnableFuture> snapshot =\nkeyedStateBackend.snapshot(\n0L,\n0L,\ntestStreamFactory,\nCheckpointOptions.forCheckpointWithDefaultLocation());\nsnapshot.cancel(true);\nverifyRocksObjectsReleased();\n} finally {\nthis.keyedStateBackend.dispose();\nthis.keyedStateBackend = null;\n}\nverifyRocksDBStateUploaderClosed();\n}\n@Test\npublic void testDismissingSnapshotNotRunnable() throws Exception {\nsetupRocksKeyedStateBackend();\ntry {\nRunnableFuture> snapshot =\nkeyedStateBackend.snapshot(\n0L,\n0L,\ntestStreamFactory,\nCheckpointOptions.forCheckpointWithDefaultLocation());\nsnapshot.cancel(true);\nThread asyncSnapshotThread = new Thread(snapshot);\nasyncSnapshotThread.start();\ntry {\nsnapshot.get();\nfail();\n} catch (Exception ignored) {\n}\nasyncSnapshotThread.join();\nverifyRocksObjectsReleased();\n} finally {\nthis.keyedStateBackend.dispose();\nthis.keyedStateBackend = null;\n}\nverifyRocksDBStateUploaderClosed();\n}\n@Test\npublic void testCompletingSnapshot() throws Exception {\nsetupRocksKeyedStateBackend();\ntry {\nRunnableFuture> snapshot =\nkeyedStateBackend.snapshot(\n0L,\n0L,\ntestStreamFactory,\nCheckpointOptions.forCheckpointWithDefaultLocation());\nThread asyncSnapshotThread = new Thread(snapshot);\nasyncSnapshotThread.start();\nwaiter.await();\nwaiter.reset();\nrunStateUpdates();\nblocker.trigger();\nwaiter.await();\nSnapshotResult snapshotResult = snapshot.get();\nKeyedStateHandle keyedStateHandle = snapshotResult.getJobManagerOwnedSnapshot();\nassertNotNull(keyedStateHandle);\nassertTrue(keyedStateHandle.getStateSize() > 0);\nassertEquals(2, keyedStateHandle.getKeyGroupRange().getNumberOfKeyGroups());\nfor (BlockingCheckpointOutputStream stream : testStreamFactory.getAllCreatedStreams()) {\nassertTrue(stream.isClosed());\n}\nasyncSnapshotThread.join();\nverifyRocksObjectsReleased();\n} finally {\nthis.keyedStateBackend.dispose();\nthis.keyedStateBackend = null;\n}\nverifyRocksDBStateUploaderClosed();\n}\n@Test\npublic void testCancelRunningSnapshot() throws Exception {\nsetupRocksKeyedStateBackend();\ntry {\nRunnableFuture> snapshot =\nkeyedStateBackend.snapshot(\n0L,\n0L,\ntestStreamFactory,\nCheckpointOptions.forCheckpointWithDefaultLocation());\nThread asyncSnapshotThread = new Thread(snapshot);\nasyncSnapshotThread.start();\nwaiter.await();\nwaiter.reset();\nrunStateUpdates();\nsnapshot.cancel(true);\nblocker.trigger();\nfor (BlockingCheckpointOutputStream stream : testStreamFactory.getAllCreatedStreams()) {\nassertTrue(stream.isClosed());\n}\nwaiter.await();\ntry {\nsnapshot.get();\nfail();\n} catch (Exception ignored) {\n}\nasyncSnapshotThread.join();\nverifyRocksObjectsReleased();\n} finally {\nthis.keyedStateBackend.dispose();\nthis.keyedStateBackend = null;\n}\nverifyRocksDBStateUploaderClosed();\n}\n@Test\npublic void testDisposeDeletesAllDirectories() throws Exception {\nCheckpointableKeyedStateBackend backend =\ncreateKeyedBackend(IntSerializer.INSTANCE);\nCollection allFilesInDbDir =\nFileUtils.listFilesAndDirs(\nnew File(dbPath), new AcceptAllFilter(), new AcceptAllFilter());\ntry {\nValueStateDescriptor kvId =\nnew ValueStateDescriptor<>(\"id\", String.class, null);\nkvId.initializeSerializerUnlessSet(new ExecutionConfig());\nValueState state =\nbackend.getPartitionedState(\nVoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);\nbackend.setCurrentKey(1);\nstate.update(\"Hello\");\nassertTrue(allFilesInDbDir.size() > 1);\n} finally {\nIOUtils.closeQuietly(backend);\nbackend.dispose();\n}\nallFilesInDbDir =\nFileUtils.listFilesAndDirs(\nnew File(dbPath), new AcceptAllFilter(), new AcceptAllFilter());\nassertEquals(1, allFilesInDbDir.size());\n}\n@Test\npublic void testSharedIncrementalStateDeRegistration() throws Exception {\nif (enableIncrementalCheckpointing) {\nCheckpointableKeyedStateBackend backend =\ncreateKeyedBackend(IntSerializer.INSTANCE);\ntry {\nValueStateDescriptor kvId =\nnew ValueStateDescriptor<>(\"id\", String.class, null);\nkvId.initializeSerializerUnlessSet(new ExecutionConfig());\nValueState state =\nbackend.getPartitionedState(\nVoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);\nQueue previousStateHandles = new LinkedList<>();\nSharedStateRegistry sharedStateRegistry = spy(new SharedStateRegistry());\nfor (int checkpointId = 0; checkpointId < 3; ++checkpointId) {\nreset(sharedStateRegistry);\nbackend.setCurrentKey(checkpointId);\nstate.update(\"Hello-\" + checkpointId);\nRunnableFuture> snapshot =\nbackend.snapshot(\ncheckpointId,\ncheckpointId,\ncreateStreamFactory(),\nCheckpointOptions.forCheckpointWithDefaultLocation());\nsnapshot.run();\nSnapshotResult snapshotResult = snapshot.get();\nIncrementalRemoteKeyedStateHandle stateHandle =\n(IncrementalRemoteKeyedStateHandle)\nsnapshotResult.getJobManagerOwnedSnapshot();\nMap sharedState =\nnew HashMap<>(stateHandle.getSharedState());\nstateHandle.registerSharedStates(sharedStateRegistry);\nfor (Map.Entry e : sharedState.entrySet()) {\nverify(sharedStateRegistry)\n.registerReference(\nstateHandle.createSharedStateRegistryKeyFromFileName(\ne.getKey()),\ne.getValue());\n}\npreviousStateHandles.add(stateHandle);\n((CheckpointListener) backend).notifyCheckpointComplete(checkpointId);\nif (previousStateHandles.size() > 1) {\ncheckRemove(previousStateHandles.remove(), sharedStateRegistry);\n}\n}\nwhile (!previousStateHandles.isEmpty()) {\nreset(sharedStateRegistry);\ncheckRemove(previousStateHandles.remove(), sharedStateRegistry);\n}\n} finally {\nIOUtils.closeQuietly(backend);\nbackend.dispose();\n}\n}\n}\nprivate void checkRemove(IncrementalRemoteKeyedStateHandle remove, SharedStateRegistry registry)\nthrows Exception {\nfor (StateHandleID id : remove.getSharedState().keySet()) {\nverify(registry, times(0))\n.unregisterReference(remove.createSharedStateRegistryKeyFromFileName(id));\n}\nremove.discardState();\nfor (StateHandleID id : remove.getSharedState().keySet()) {\nverify(registry)\n.unregisterReference(remove.createSharedStateRegistryKeyFromFileName(id));\n}\n}\nprivate void runStateUpdates() throws Exception {\nfor (int i = 50; i < 150; ++i) {\nif (i % 10 == 0) {\nThread.sleep(1);\n}\nkeyedStateBackend.setCurrentKey(i);\ntestState1.update(4200 + i);\ntestState2.update(\"S-\" + (4200 + i));\n}\n}\nprivate void verifyRocksObjectsReleased() {\nfor (RocksObject rocksCloseable : allCreatedCloseables) {\nverify(rocksCloseable, times(1)).close();\n}\nassertNotNull(null, keyedStateBackend.db);\nRocksDB spyDB = keyedStateBackend.db;\nif (!enableIncrementalCheckpointing) {\nverify(spyDB, times(1)).getSnapshot();\nverify(spyDB, times(1)).releaseSnapshot(any(Snapshot.class));\n}\nkeyedStateBackend.dispose();\nverify(spyDB, times(1)).close();\nassertEquals(true, keyedStateBackend.isDisposed());\n}\nprivate void verifyRocksDBStateUploaderClosed() {\nif (enableIncrementalCheckpointing) {\nverify(rocksDBStateUploader, times(1)).close();\n}\n}\nprivate static class AcceptAllFilter implements IOFileFilter {\n@Override\npublic boolean accept(File file) {\nreturn true;\n}\n@Override\npublic boolean accept(File file, String s) {\nreturn true;\n}\n}\n}", + "context_after": "class EmbeddedRocksDBStateBackendTest\nextends StateBackendTestBase {\nprivate OneShotLatch blocker;\nprivate OneShotLatch waiter;\nprivate BlockerCheckpointStreamFactory testStreamFactory;\nprivate RocksDBKeyedStateBackend keyedStateBackend;\nprivate List allCreatedCloseables;\nprivate ValueState testState1;\nprivate ValueState testState2;\n@ClassRule public static final TemporaryFolder TEMP_FOLDER = new TemporaryFolder();\n@Parameterized.Parameters\npublic static List modes() {\nreturn Arrays.asList(\nnew Object[][] {\n{\ntrue,\n(SupplierWithException)\nJobManagerCheckpointStorage::new\n},\n{\nfalse,\n(SupplierWithException)\n() -> {\nString checkpointPath =\nTEMP_FOLDER.newFolder().toURI().toString();\nreturn new FileSystemCheckpointStorage(checkpointPath);\n}\n}\n});\n}\n@Parameterized.Parameter(value = 0)\npublic boolean enableIncrementalCheckpointing;\n@Parameterized.Parameter(value = 1)\npublic SupplierWithException storageSupplier;\nprivate String dbPath;\nprivate RocksDB db = null;\nprivate ColumnFamilyHandle defaultCFHandle = null;\nprivate RocksDBStateUploader rocksDBStateUploader = null;\nprivate final RocksDBResourceContainer optionsContainer = new RocksDBResourceContainer();\npublic void prepareRocksDB() throws Exception {\nString dbPath = new File(TEMP_FOLDER.newFolder(), DB_INSTANCE_DIR_STRING).getAbsolutePath();\nColumnFamilyOptions columnOptions = optionsContainer.getColumnOptions();\nArrayList columnFamilyHandles = new ArrayList<>(1);\ndb =\nRocksDBOperationUtils.openDB(\ndbPath,\nCollections.emptyList(),\ncolumnFamilyHandles,\ncolumnOptions,\noptionsContainer.getDbOptions());\ndefaultCFHandle = columnFamilyHandles.remove(0);\n}\n@Override\nprotected ConfigurableStateBackend getStateBackend() throws IOException {\ndbPath = TEMP_FOLDER.newFolder().getAbsolutePath();\nEmbeddedRocksDBStateBackend backend =\nnew EmbeddedRocksDBStateBackend(enableIncrementalCheckpointing);\nConfiguration configuration = new Configuration();\nconfiguration.set(\nRocksDBOptions.TIMER_SERVICE_FACTORY,\nEmbeddedRocksDBStateBackend.PriorityQueueStateType.ROCKSDB);\nbackend = backend.configure(configuration, Thread.currentThread().getContextClassLoader());\nbackend.setDbStoragePath(dbPath);\nreturn backend;\n}\n@Override\nprotected CheckpointStorage getCheckpointStorage() throws Exception {\nreturn storageSupplier.get();\n}\n@Override\nprotected boolean isSerializerPresenceRequiredOnRestore() {\nreturn false;\n}\n@Override\nprotected boolean supportsAsynchronousSnapshots() {\nreturn true;\n}\n@After\npublic void cleanupRocksDB() {\nif (keyedStateBackend != null) {\nIOUtils.closeQuietly(keyedStateBackend);\nkeyedStateBackend.dispose();\n}\nIOUtils.closeQuietly(defaultCFHandle);\nIOUtils.closeQuietly(db);\nIOUtils.closeQuietly(optionsContainer);\nif (allCreatedCloseables != null) {\nfor (RocksObject rocksCloseable : allCreatedCloseables) {\nverify(rocksCloseable, times(1)).close();\n}\nallCreatedCloseables = null;\n}\n}\n@Test\npublic void testCorrectMergeOperatorSet() throws Exception {\nprepareRocksDB();\nfinal ColumnFamilyOptions columnFamilyOptions = spy(new ColumnFamilyOptions());\nRocksDBKeyedStateBackend test = null;\ntry {\ntest =\nRocksDBTestUtils.builderForTestDB(\nTEMP_FOLDER.newFolder(),\nIntSerializer.INSTANCE,\ndb,\ndefaultCFHandle,\ncolumnFamilyOptions)\n.setEnableIncrementalCheckpointing(enableIncrementalCheckpointing)\n.build();\nValueStateDescriptor stubState1 =\nnew ValueStateDescriptor<>(\"StubState-1\", StringSerializer.INSTANCE);\ntest.createInternalState(StringSerializer.INSTANCE, stubState1);\nValueStateDescriptor stubState2 =\nnew ValueStateDescriptor<>(\"StubState-2\", StringSerializer.INSTANCE);\ntest.createInternalState(StringSerializer.INSTANCE, stubState2);\nverify(columnFamilyOptions, Mockito.times(2))\n.setMergeOperatorName(RocksDBKeyedStateBackend.MERGE_OPERATOR_NAME);\n} finally {\nif (test != null) {\nIOUtils.closeQuietly(test);\ntest.dispose();\n}\ncolumnFamilyOptions.close();\n}\n}\n@Test\npublic void testReleasingSnapshotAfterBackendClosed() throws Exception {\nsetupRocksKeyedStateBackend();\ntry {\nRunnableFuture> snapshot =\nkeyedStateBackend.snapshot(\n0L,\n0L,\ntestStreamFactory,\nCheckpointOptions.forCheckpointWithDefaultLocation());\nRocksDB spyDB = keyedStateBackend.db;\nif (!enableIncrementalCheckpointing) {\nverify(spyDB, times(1)).getSnapshot();\nverify(spyDB, times(0)).releaseSnapshot(any(Snapshot.class));\n}\nfor (RocksObject rocksCloseable : allCreatedCloseables) {\nverify(rocksCloseable, times(0)).close();\n}\nsnapshot.cancel(true);\nthis.keyedStateBackend.dispose();\nverify(spyDB, times(1)).close();\nassertEquals(true, keyedStateBackend.isDisposed());\nfor (RocksObject rocksCloseable : allCreatedCloseables) {\nverify(rocksCloseable, times(1)).close();\n}\n} finally {\nkeyedStateBackend.dispose();\nkeyedStateBackend = null;\n}\nverifyRocksDBStateUploaderClosed();\n}\n@Test\npublic void testDismissingSnapshot() throws Exception {\nsetupRocksKeyedStateBackend();\ntry {\nRunnableFuture> snapshot =\nkeyedStateBackend.snapshot(\n0L,\n0L,\ntestStreamFactory,\nCheckpointOptions.forCheckpointWithDefaultLocation());\nsnapshot.cancel(true);\nverifyRocksObjectsReleased();\n} finally {\nthis.keyedStateBackend.dispose();\nthis.keyedStateBackend = null;\n}\nverifyRocksDBStateUploaderClosed();\n}\n@Test\npublic void testDismissingSnapshotNotRunnable() throws Exception {\nsetupRocksKeyedStateBackend();\ntry {\nRunnableFuture> snapshot =\nkeyedStateBackend.snapshot(\n0L,\n0L,\ntestStreamFactory,\nCheckpointOptions.forCheckpointWithDefaultLocation());\nsnapshot.cancel(true);\nThread asyncSnapshotThread = new Thread(snapshot);\nasyncSnapshotThread.start();\ntry {\nsnapshot.get();\nfail();\n} catch (Exception ignored) {\n}\nasyncSnapshotThread.join();\nverifyRocksObjectsReleased();\n} finally {\nthis.keyedStateBackend.dispose();\nthis.keyedStateBackend = null;\n}\nverifyRocksDBStateUploaderClosed();\n}\n@Test\npublic void testCompletingSnapshot() throws Exception {\nsetupRocksKeyedStateBackend();\ntry {\nRunnableFuture> snapshot =\nkeyedStateBackend.snapshot(\n0L,\n0L,\ntestStreamFactory,\nCheckpointOptions.forCheckpointWithDefaultLocation());\nThread asyncSnapshotThread = new Thread(snapshot);\nasyncSnapshotThread.start();\nwaiter.await();\nwaiter.reset();\nrunStateUpdates();\nblocker.trigger();\nwaiter.await();\nSnapshotResult snapshotResult = snapshot.get();\nKeyedStateHandle keyedStateHandle = snapshotResult.getJobManagerOwnedSnapshot();\nassertNotNull(keyedStateHandle);\nassertTrue(keyedStateHandle.getStateSize() > 0);\nassertEquals(2, keyedStateHandle.getKeyGroupRange().getNumberOfKeyGroups());\nfor (BlockingCheckpointOutputStream stream : testStreamFactory.getAllCreatedStreams()) {\nassertTrue(stream.isClosed());\n}\nasyncSnapshotThread.join();\nverifyRocksObjectsReleased();\n} finally {\nthis.keyedStateBackend.dispose();\nthis.keyedStateBackend = null;\n}\nverifyRocksDBStateUploaderClosed();\n}\n@Test\npublic void testCancelRunningSnapshot() throws Exception {\nsetupRocksKeyedStateBackend();\ntry {\nRunnableFuture> snapshot =\nkeyedStateBackend.snapshot(\n0L,\n0L,\ntestStreamFactory,\nCheckpointOptions.forCheckpointWithDefaultLocation());\nThread asyncSnapshotThread = new Thread(snapshot);\nasyncSnapshotThread.start();\nwaiter.await();\nwaiter.reset();\nrunStateUpdates();\nsnapshot.cancel(true);\nblocker.trigger();\nfor (BlockingCheckpointOutputStream stream : testStreamFactory.getAllCreatedStreams()) {\nassertTrue(stream.isClosed());\n}\nwaiter.await();\ntry {\nsnapshot.get();\nfail();\n} catch (Exception ignored) {\n}\nasyncSnapshotThread.join();\nverifyRocksObjectsReleased();\n} finally {\nthis.keyedStateBackend.dispose();\nthis.keyedStateBackend = null;\n}\nverifyRocksDBStateUploaderClosed();\n}\n@Test\npublic void testDisposeDeletesAllDirectories() throws Exception {\nCheckpointableKeyedStateBackend backend =\ncreateKeyedBackend(IntSerializer.INSTANCE);\nCollection allFilesInDbDir =\nFileUtils.listFilesAndDirs(\nnew File(dbPath), new AcceptAllFilter(), new AcceptAllFilter());\ntry {\nValueStateDescriptor kvId =\nnew ValueStateDescriptor<>(\"id\", String.class, null);\nkvId.initializeSerializerUnlessSet(new ExecutionConfig());\nValueState state =\nbackend.getPartitionedState(\nVoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);\nbackend.setCurrentKey(1);\nstate.update(\"Hello\");\nassertTrue(allFilesInDbDir.size() > 1);\n} finally {\nIOUtils.closeQuietly(backend);\nbackend.dispose();\n}\nallFilesInDbDir =\nFileUtils.listFilesAndDirs(\nnew File(dbPath), new AcceptAllFilter(), new AcceptAllFilter());\nassertEquals(1, allFilesInDbDir.size());\n}\n@Test\npublic void testSharedIncrementalStateDeRegistration() throws Exception {\nif (enableIncrementalCheckpointing) {\nCheckpointableKeyedStateBackend backend =\ncreateKeyedBackend(IntSerializer.INSTANCE);\ntry {\nValueStateDescriptor kvId =\nnew ValueStateDescriptor<>(\"id\", String.class, null);\nkvId.initializeSerializerUnlessSet(new ExecutionConfig());\nValueState state =\nbackend.getPartitionedState(\nVoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);\nQueue previousStateHandles = new LinkedList<>();\nSharedStateRegistry sharedStateRegistry = spy(new SharedStateRegistry());\nfor (int checkpointId = 0; checkpointId < 3; ++checkpointId) {\nreset(sharedStateRegistry);\nbackend.setCurrentKey(checkpointId);\nstate.update(\"Hello-\" + checkpointId);\nRunnableFuture> snapshot =\nbackend.snapshot(\ncheckpointId,\ncheckpointId,\ncreateStreamFactory(),\nCheckpointOptions.forCheckpointWithDefaultLocation());\nsnapshot.run();\nSnapshotResult snapshotResult = snapshot.get();\nIncrementalRemoteKeyedStateHandle stateHandle =\n(IncrementalRemoteKeyedStateHandle)\nsnapshotResult.getJobManagerOwnedSnapshot();\nMap sharedState =\nnew HashMap<>(stateHandle.getSharedState());\nstateHandle.registerSharedStates(sharedStateRegistry);\nfor (Map.Entry e : sharedState.entrySet()) {\nverify(sharedStateRegistry)\n.registerReference(\nstateHandle.createSharedStateRegistryKeyFromFileName(\ne.getKey()),\ne.getValue());\n}\npreviousStateHandles.add(stateHandle);\n((CheckpointListener) backend).notifyCheckpointComplete(checkpointId);\nif (previousStateHandles.size() > 1) {\ncheckRemove(previousStateHandles.remove(), sharedStateRegistry);\n}\n}\nwhile (!previousStateHandles.isEmpty()) {\nreset(sharedStateRegistry);\ncheckRemove(previousStateHandles.remove(), sharedStateRegistry);\n}\n} finally {\nIOUtils.closeQuietly(backend);\nbackend.dispose();\n}\n}\n}\nprivate void checkRemove(IncrementalRemoteKeyedStateHandle remove, SharedStateRegistry registry)\nthrows Exception {\nfor (StateHandleID id : remove.getSharedState().keySet()) {\nverify(registry, times(0))\n.unregisterReference(remove.createSharedStateRegistryKeyFromFileName(id));\n}\nremove.discardState();\nfor (StateHandleID id : remove.getSharedState().keySet()) {\nverify(registry)\n.unregisterReference(remove.createSharedStateRegistryKeyFromFileName(id));\n}\n}\nprivate void runStateUpdates() throws Exception {\nfor (int i = 50; i < 150; ++i) {\nif (i % 10 == 0) {\nThread.sleep(1);\n}\nkeyedStateBackend.setCurrentKey(i);\ntestState1.update(4200 + i);\ntestState2.update(\"S-\" + (4200 + i));\n}\n}\nprivate void verifyRocksObjectsReleased() {\nfor (RocksObject rocksCloseable : allCreatedCloseables) {\nverify(rocksCloseable, times(1)).close();\n}\nassertNotNull(null, keyedStateBackend.db);\nRocksDB spyDB = keyedStateBackend.db;\nif (!enableIncrementalCheckpointing) {\nverify(spyDB, times(1)).getSnapshot();\nverify(spyDB, times(1)).releaseSnapshot(any(Snapshot.class));\n}\nkeyedStateBackend.dispose();\nverify(spyDB, times(1)).close();\nassertEquals(true, keyedStateBackend.isDisposed());\n}\nprivate void verifyRocksDBStateUploaderClosed() {\nif (enableIncrementalCheckpointing) {\nverify(rocksDBStateUploader, times(1)).close();\n}\n}\nprivate static class AcceptAllFilter implements IOFileFilter {\n@Override\npublic boolean accept(File file) {\nreturn true;\n}\n@Override\npublic boolean accept(File file, String s) {\nreturn true;\n}\n}\n}" + }, + { + "comment": "Never tried to grab the binary from the instance, I will have a look.", + "method_body": "private static void s2iBuild(OpenShiftClient client, BuildConfig buildConfig, File binaryFile, S2iConfig s2iConfig) {\nBuild build;\ntry {\nbuild = client.buildConfigs().withName(buildConfig.getMetadata().getName()).instantiateBinary()\n.withTimeoutInMillis(s2iConfig.buildTimeout.toMillis()).fromFile(binaryFile);\n} catch (Exception e) {\nif (e.getCause() instanceof StreamResetException) {\nLOG.warn(\"Stream was reset while building. Falling back to building with the 'oc' binary.\");\nif (!ExecUtil.exec(\"oc\", \"start-build\", buildConfig.getMetadata().getName(), \"--from-archive\",\nbinaryFile.toPath().toAbsolutePath().toString())) {\nthrow s2iException(e);\n}\nreturn;\n} else {\nthrow s2iException(e);\n}\n}\ntry (BufferedReader reader = new BufferedReader(\nclient.builds().withName(build.getMetadata().getName()).getLogReader())) {\nfor (String line = reader.readLine(); line != null; line = reader.readLine()) {\nSystem.out.println(line);\n}\n} catch (IOException e) {\nthrow s2iException(e);\n}\n}", + "target_code": "if (!ExecUtil.exec(\"oc\", \"start-build\", buildConfig.getMetadata().getName(), \"--from-archive\",", + "method_body_after": "private static void s2iBuild(OpenShiftClient client, BuildConfig buildConfig, File binaryFile, S2iConfig s2iConfig) {\nBuild build;\ntry {\nbuild = client.buildConfigs().withName(buildConfig.getMetadata().getName()).instantiateBinary()\n.withTimeoutInMillis(s2iConfig.buildTimeout.toMillis()).fromFile(binaryFile);\n} catch (Exception e) {\nif (e.getCause() instanceof StreamResetException) {\nLOG.warn(\"Stream was reset while building. Falling back to building with the 'oc' binary.\");\nif (!ExecUtil.exec(\"oc\", \"start-build\", buildConfig.getMetadata().getName(), \"--from-archive\",\nbinaryFile.toPath().toAbsolutePath().toString())) {\nthrow s2iException(e);\n}\nreturn;\n} else {\nthrow s2iException(e);\n}\n}\ntry (BufferedReader reader = new BufferedReader(\nclient.builds().withName(build.getMetadata().getName()).getLogReader())) {\nfor (String line = reader.readLine(); line != null; line = reader.readLine()) {\nSystem.out.println(line);\n}\n} catch (IOException e) {\nthrow s2iException(e);\n}\n}", + "context_before": "class })\npublic void s2iBuildFromNative(S2iConfig s2iConfig, ContainerImageConfig containerImageConfig,\nKubernetesClientBuildItem kubernetesClient,\nContainerImageInfoBuildItem containerImage,\nArchiveRootBuildItem archiveRoot, OutputTargetBuildItem out, PackageConfig packageConfig,\nList generatedResources,\nOptional buildRequest,\nOptional pushRequest,\nBuildProducer artifactResultProducer,\nBuildProducer containerImageResultProducer,\nNativeImageBuildItem nativeImageBuildItem) {\nif (!containerImageConfig.build && !containerImageConfig.push && !buildRequest.isPresent()\n&& !pushRequest.isPresent()) {\nreturn;\n}\nString namespace = Optional.ofNullable(kubernetesClient.getClient().getNamespace()).orElse(\"default\");\nLOG.info(\"Performing s2i binary build with native image on server: \" + kubernetesClient.getClient().getMasterUrl()\n+ \" in namespace:\" + namespace + \".\");\nString image = containerImage.getImage();\nGeneratedFileSystemResourceBuildItem openshiftYml = generatedResources\n.stream()\n.filter(r -> r.getName().endsWith(\"kubernetes/openshift.yml\"))\n.findFirst().orElseThrow(() -> new IllegalStateException(\"Could not find kubernetes/openshift.yml\"));\nPath artifactPath = out.getOutputDirectory()\n.resolve(String.format(NATIVE_ARTIFACT_FORMAT, out.getBaseName(), packageConfig.runnerSuffix));\ncreateContainerImage(kubernetesClient, openshiftYml, s2iConfig, out.getOutputDirectory(), artifactPath);\nartifactResultProducer.produce(new ArtifactResultBuildItem(null, \"native-container\", Collections.emptyMap()));\ncontainerImageResultProducer.produce(\nnew ContainerImageResultBuildItem(null, ImageUtil.getRepository(image), ImageUtil.getTag(image)));\n}", + "context_after": "class })\npublic void s2iBuildFromNative(S2iConfig s2iConfig, ContainerImageConfig containerImageConfig,\nKubernetesClientBuildItem kubernetesClient,\nContainerImageInfoBuildItem containerImage,\nArchiveRootBuildItem archiveRoot, OutputTargetBuildItem out, PackageConfig packageConfig,\nList generatedResources,\nOptional buildRequest,\nOptional pushRequest,\nBuildProducer artifactResultProducer,\nBuildProducer containerImageResultProducer,\nNativeImageBuildItem nativeImageBuildItem) {\nif (!containerImageConfig.build && !containerImageConfig.push && !buildRequest.isPresent()\n&& !pushRequest.isPresent()) {\nreturn;\n}\nString namespace = Optional.ofNullable(kubernetesClient.getClient().getNamespace()).orElse(\"default\");\nLOG.info(\"Performing s2i binary build with native image on server: \" + kubernetesClient.getClient().getMasterUrl()\n+ \" in namespace:\" + namespace + \".\");\nString image = containerImage.getImage();\nGeneratedFileSystemResourceBuildItem openshiftYml = generatedResources\n.stream()\n.filter(r -> r.getName().endsWith(\"kubernetes/openshift.yml\"))\n.findFirst().orElseThrow(() -> new IllegalStateException(\"Could not find kubernetes/openshift.yml\"));\nPath artifactPath = out.getOutputDirectory()\n.resolve(String.format(NATIVE_ARTIFACT_FORMAT, out.getBaseName(), packageConfig.runnerSuffix));\nPath applicationImagePath = out.getOutputDirectory()\n.resolve(String.format(NATIVE_ARTIFACT_FORMAT, \"application\", packageConfig.runnerSuffix));\ntry {\nFiles.copy(artifactPath, applicationImagePath, StandardCopyOption.REPLACE_EXISTING);\n} catch (Exception e) {\nthrow new RuntimeException(\"Error preparing the s2i build archive.\", e);\n}\ncreateContainerImage(kubernetesClient, openshiftYml, s2iConfig, out.getOutputDirectory(), applicationImagePath);\nartifactResultProducer.produce(new ArtifactResultBuildItem(null, \"native-container\", Collections.emptyMap()));\ncontainerImageResultProducer.produce(\nnew ContainerImageResultBuildItem(null, ImageUtil.getRepository(image), ImageUtil.getTag(image)));\n}" + }, + { + "comment": "I'm not sure why `H2Database 1.x` is `2`. At least `4` is reasonable according to the processing logic of `H2Database 2.x`, I don't know what I overlooked.", + "method_body": "public void assertAddOnDuplicateKey() throws SQLException {\nint itemId = 1;\nint userId1 = 101;\nint userId2 = 102;\nint orderId = 200;\nString status = \"init\";\nString updatedStatus = \"updated on duplicate key\";\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_ON_DUPLICATE_KEY_SQL);\nPreparedStatement queryStatement = connection.prepareStatement(SELECT_SQL_WITH_PARAMETER_MARKER_RETURN_STATUS)) {\npreparedStatement.setInt(1, itemId);\npreparedStatement.setInt(2, orderId);\npreparedStatement.setInt(3, userId1);\npreparedStatement.setString(4, status);\npreparedStatement.setInt(5, itemId);\npreparedStatement.setInt(6, orderId);\npreparedStatement.setInt(7, userId2);\npreparedStatement.setString(8, status);\npreparedStatement.setString(9, updatedStatus);\nint result = preparedStatement.executeUpdate();\nassertThat(result, is(2));\nqueryStatement.setInt(1, orderId);\nqueryStatement.setInt(2, userId1);\ntry (ResultSet resultSet = queryStatement.executeQuery()) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(2), is(userId1));\nassertThat(resultSet.getString(3), is(status));\n}\nqueryStatement.setInt(1, orderId);\nqueryStatement.setInt(2, userId2);\ntry (ResultSet resultSet = queryStatement.executeQuery()) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(2), is(userId2));\nassertThat(resultSet.getString(3), is(status));\n}\n}\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_ON_DUPLICATE_KEY_SQL);\nPreparedStatement queryStatement = connection.prepareStatement(SELECT_SQL_WITH_PARAMETER_MARKER_RETURN_STATUS)) {\npreparedStatement.setInt(1, itemId);\npreparedStatement.setInt(2, orderId);\npreparedStatement.setInt(3, userId1);\npreparedStatement.setString(4, status);\npreparedStatement.setInt(5, itemId);\npreparedStatement.setInt(6, orderId);\npreparedStatement.setInt(7, userId2);\npreparedStatement.setString(8, status);\npreparedStatement.setString(9, updatedStatus);\nint result = preparedStatement.executeUpdate();\nassertThat(result, is(4));\nqueryStatement.setInt(1, orderId);\nqueryStatement.setInt(2, userId1);\ntry (ResultSet resultSet = queryStatement.executeQuery()) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(2), is(userId1));\nassertThat(resultSet.getString(3), is(updatedStatus));\n}\nqueryStatement.setInt(1, orderId);\nqueryStatement.setInt(2, userId2);\ntry (ResultSet resultSet = queryStatement.executeQuery()) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(2), is(userId2));\nassertThat(resultSet.getString(3), is(updatedStatus));\n}\n}\n}", + "target_code": "assertThat(result, is(4));", + "method_body_after": "public void assertAddOnDuplicateKey() throws SQLException {\nint itemId = 1;\nint userId1 = 101;\nint userId2 = 102;\nint orderId = 200;\nString status = \"init\";\nString updatedStatus = \"updated on duplicate key\";\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_ON_DUPLICATE_KEY_SQL);\nPreparedStatement queryStatement = connection.prepareStatement(SELECT_SQL_WITH_PARAMETER_MARKER_RETURN_STATUS)) {\npreparedStatement.setInt(1, itemId);\npreparedStatement.setInt(2, orderId);\npreparedStatement.setInt(3, userId1);\npreparedStatement.setString(4, status);\npreparedStatement.setInt(5, itemId);\npreparedStatement.setInt(6, orderId);\npreparedStatement.setInt(7, userId2);\npreparedStatement.setString(8, status);\npreparedStatement.setString(9, updatedStatus);\nint result = preparedStatement.executeUpdate();\nassertThat(result, is(2));\nqueryStatement.setInt(1, orderId);\nqueryStatement.setInt(2, userId1);\ntry (ResultSet resultSet = queryStatement.executeQuery()) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(2), is(userId1));\nassertThat(resultSet.getString(3), is(status));\n}\nqueryStatement.setInt(1, orderId);\nqueryStatement.setInt(2, userId2);\ntry (ResultSet resultSet = queryStatement.executeQuery()) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(2), is(userId2));\nassertThat(resultSet.getString(3), is(status));\n}\n}\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_ON_DUPLICATE_KEY_SQL);\nPreparedStatement queryStatement = connection.prepareStatement(SELECT_SQL_WITH_PARAMETER_MARKER_RETURN_STATUS)) {\npreparedStatement.setInt(1, itemId);\npreparedStatement.setInt(2, orderId);\npreparedStatement.setInt(3, userId1);\npreparedStatement.setString(4, status);\npreparedStatement.setInt(5, itemId);\npreparedStatement.setInt(6, orderId);\npreparedStatement.setInt(7, userId2);\npreparedStatement.setString(8, status);\npreparedStatement.setString(9, updatedStatus);\nint result = preparedStatement.executeUpdate();\nassertThat(result, is(4));\nqueryStatement.setInt(1, orderId);\nqueryStatement.setInt(2, userId1);\ntry (ResultSet resultSet = queryStatement.executeQuery()) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(2), is(userId1));\nassertThat(resultSet.getString(3), is(updatedStatus));\n}\nqueryStatement.setInt(1, orderId);\nqueryStatement.setInt(2, userId2);\ntry (ResultSet resultSet = queryStatement.executeQuery()) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(2), is(userId2));\nassertThat(resultSet.getString(3), is(updatedStatus));\n}\n}\n}", + "context_before": "class ShardingSpherePreparedStatementTest extends AbstractShardingSphereDataSourceForShardingTest {\nprivate static final String INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL = \"INSERT INTO t_user (name) VALUES (?),(?),(?),(?)\";\nprivate static final String SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL = \"SELECT name FROM t_user WHERE id=%dL\";\nprivate static final String INSERT_WITH_GENERATE_KEY_SQL = \"INSERT INTO t_order_item (item_id, order_id, user_id, status) VALUES (?, ?, ?, ?)\";\nprivate static final String INSERT_WITHOUT_GENERATE_KEY_SQL = \"INSERT INTO t_order_item (order_id, user_id, status) VALUES (?, ?, ?)\";\nprivate static final String INSERT_WITH_GENERATE_KEY_SQL_WITH_MULTI_VALUES = \"INSERT INTO t_order_item (item_id, order_id, user_id, status) VALUES (1, ?, ?, ?), (2, ?, ?, ?)\";\nprivate static final String INSERT_ON_DUPLICATE_KEY_SQL = \"INSERT INTO t_order_item (item_id, order_id, user_id, status) VALUES (?, ?, ?, ?), (?, ?, ?, ?) ON DUPLICATE KEY UPDATE status = ?\";\nprivate static final String INSERT_SINGLE_TABLE_SQL = \"INSERT INTO t_role (user_name) VALUES (?)\";\nprivate static final String SELECT_SQL_WITHOUT_PARAMETER_MARKER = \"SELECT item_id FROM t_order_item WHERE user_id = %d AND order_id= %s AND status = 'BATCH'\";\nprivate static final String SELECT_SQL_WITH_PARAMETER_MARKER = \"SELECT item_id FROM t_order_item WHERE user_id = ? AND order_id= ? AND status = 'BATCH'\";\nprivate static final String SELECT_SQL_WITH_PARAMETER_MARKER_RETURN_STATUS = \"SELECT item_id, user_id, status FROM t_order_item WHERE order_id= ? AND user_id = ?\";\nprivate static final String SELECT_WITH_ORDER_BY = \"SELECT order_id, user_id, status FROM t_order ORDER BY order_id\";\nprivate static final String SELECT_AUTO_SQL = \"SELECT item_id, order_id, status FROM t_order_item_auto WHERE order_id >= ?\";\nprivate static final String SELECT_SQL_COLUMN_WITH_PARAMETER_MARKER = \"SELECT ?, order_id, status FROM t_order_item_auto\";\nprivate static final String UPDATE_SQL = \"UPDATE t_order SET status = ? WHERE user_id = ? AND order_id = ?\";\nprivate static final String UPDATE_AUTO_SQL = \"UPDATE t_order_auto SET status = ? WHERE order_id = ?\";\nprivate static final String UPDATE_BATCH_SQL = \"UPDATE t_order SET status=? WHERE status=?\";\nprivate static final String UPDATE_WITH_ERROR_COLUMN = \"UPDATE t_order SET error_column=?\";\n@Test\npublic void assertExecuteBatchWithoutAddBatch() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_WITH_GENERATE_KEY_SQL)) {\nint[] actual = preparedStatement.executeBatch();\nassertThat(actual, is(new int[0]));\n}\n}\n@Test\npublic void assertAddBatch() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_WITH_GENERATE_KEY_SQL)) {\npreparedStatement.setInt(1, 3101);\npreparedStatement.setInt(2, 11);\npreparedStatement.setInt(3, 11);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 3102);\npreparedStatement.setInt(2, 12);\npreparedStatement.setInt(3, 12);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 3111);\npreparedStatement.setInt(2, 21);\npreparedStatement.setInt(3, 21);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 3112);\npreparedStatement.setInt(2, 22);\npreparedStatement.setInt(3, 22);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\nint[] result = preparedStatement.executeBatch();\nfor (int each : result) {\nassertThat(each, is(1));\n}\n}\n}\n@Test\npublic void assertMultiValuesWithGenerateShardingKeyColumn() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, Statement.RETURN_GENERATED_KEYS);\nStatement queryStatement = connection.createStatement()) {\nResetIncrementKeyGenerateAlgorithmFixture.getCOUNT().set(0);\npreparedStatement.setString(1, \"BATCH1\");\npreparedStatement.setString(2, \"BATCH2\");\npreparedStatement.setString(3, \"BATCH3\");\npreparedStatement.setString(4, \"BATCH4\");\nint result = preparedStatement.executeUpdate();\nassertThat(result, is(4));\nResultSet generateKeyResultSet = preparedStatement.getGeneratedKeys();\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(1L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(2L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(3L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(4L));\nassertFalse(generateKeyResultSet.next());\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 1L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH1\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 2L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH2\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 3L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH3\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 4L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH4\"));\n}\n}\n}\n@Test\npublic void assertAddBatchMultiValuesWithGenerateShardingKeyColumn() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, Statement.RETURN_GENERATED_KEYS);\nStatement queryStatement = connection.createStatement()) {\nResetIncrementKeyGenerateAlgorithmFixture.getCOUNT().set(10);\npreparedStatement.setString(1, \"BATCH1\");\npreparedStatement.setString(2, \"BATCH2\");\npreparedStatement.setString(3, \"BATCH3\");\npreparedStatement.setString(4, \"BATCH4\");\npreparedStatement.addBatch();\npreparedStatement.setString(1, \"BATCH5\");\npreparedStatement.setString(2, \"BATCH6\");\npreparedStatement.setString(3, \"BATCH7\");\npreparedStatement.setString(4, \"BATCH8\");\npreparedStatement.addBatch();\nint[] result = preparedStatement.executeBatch();\nfor (int each : result) {\nassertThat(each, is(4));\n}\nResultSet generateKeyResultSet = preparedStatement.getGeneratedKeys();\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(11L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(12L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(13L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(14L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(15L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(16L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(17L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(18L));\nassertFalse(generateKeyResultSet.next());\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 11L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH1\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 12L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH2\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 13L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH3\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 14L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH4\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 15L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH5\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 16L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH6\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 17L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH7\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 18L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH8\"));\n}\n}\n}\n@Test\npublic void assertAddBatchWithoutGenerateKeyColumn() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_WITHOUT_GENERATE_KEY_SQL, Statement.RETURN_GENERATED_KEYS);\nStatement queryStatement = connection.createStatement()) {\npreparedStatement.setInt(1, 11);\npreparedStatement.setInt(2, 11);\npreparedStatement.setString(3, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 12);\npreparedStatement.setInt(2, 12);\npreparedStatement.setString(3, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 21);\npreparedStatement.setInt(2, 21);\npreparedStatement.setString(3, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 22);\npreparedStatement.setInt(2, 22);\npreparedStatement.setString(3, \"BATCH\");\npreparedStatement.addBatch();\nint[] result = preparedStatement.executeBatch();\nfor (int each : result) {\nassertThat(each, is(1));\n}\nResultSet generateKeyResultSet = preparedStatement.getGeneratedKeys();\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(1L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(2L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(3L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(4L));\nassertFalse(generateKeyResultSet.next());\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_SQL_WITHOUT_PARAMETER_MARKER, 11, 11))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(1), is(1));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_SQL_WITHOUT_PARAMETER_MARKER, 12, 12))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(1), is(2));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_SQL_WITHOUT_PARAMETER_MARKER, 21, 21))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(1), is(3));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_SQL_WITHOUT_PARAMETER_MARKER, 22, 22))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(1), is(4));\n}\n}\n}\n@Test\npublic void assertAddBatchWithGenerateKeyColumn() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_WITH_GENERATE_KEY_SQL, Statement.RETURN_GENERATED_KEYS);\nStatement queryStatement = connection.createStatement()) {\npreparedStatement.setInt(1, 1);\npreparedStatement.setInt(2, 11);\npreparedStatement.setInt(3, 11);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 2);\npreparedStatement.setInt(2, 12);\npreparedStatement.setInt(3, 12);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 3);\npreparedStatement.setInt(2, 21);\npreparedStatement.setInt(3, 21);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 4);\npreparedStatement.setInt(2, 22);\npreparedStatement.setInt(3, 22);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\nint[] result = preparedStatement.executeBatch();\nfor (int each : result) {\nassertThat(each, is(1));\n}\nResultSet generateKeyResultSet = preparedStatement.getGeneratedKeys();\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(1L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(2L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(3L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(4L));\nassertFalse(generateKeyResultSet.next());\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_SQL_WITHOUT_PARAMETER_MARKER, 11, 11))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(1), is(1));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_SQL_WITHOUT_PARAMETER_MARKER, 12, 12))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(1), is(2));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_SQL_WITHOUT_PARAMETER_MARKER, 21, 21))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(1), is(3));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_SQL_WITHOUT_PARAMETER_MARKER, 22, 22))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(1), is(4));\n}\n}\n}\n@Test\npublic void assertGeneratedKeysForBatchInsert() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_WITH_GENERATE_KEY_SQL_WITH_MULTI_VALUES, Statement.RETURN_GENERATED_KEYS)) {\npreparedStatement.setInt(1, 11);\npreparedStatement.setInt(2, 11);\npreparedStatement.setString(3, \"MULTI\");\npreparedStatement.setInt(4, 12);\npreparedStatement.setInt(5, 12);\npreparedStatement.setString(6, \"MULTI\");\nint result = preparedStatement.executeUpdate();\nResultSet generateKeyResultSet = preparedStatement.getGeneratedKeys();\nassertThat(result, is(2));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getInt(1), is(1));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getInt(1), is(2));\n}\n}\n@Test\npublic void assertAddGetGeneratedKeysForNoGeneratedValues() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_SINGLE_TABLE_SQL, Statement.RETURN_GENERATED_KEYS)) {\npreparedStatement.setString(1, \"admin\");\npreparedStatement.execute();\nResultSet generateKeyResultSet = preparedStatement.getGeneratedKeys();\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getInt(1), is(1));\n}\n}\n@Test\n@Test\npublic void assertUpdateBatch() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(UPDATE_BATCH_SQL)) {\npreparedStatement.setString(1, \"batch\");\npreparedStatement.setString(2, \"init\");\npreparedStatement.addBatch();\npreparedStatement.setString(1, \"batch\");\npreparedStatement.setString(2, \"init\");\npreparedStatement.addBatch();\npreparedStatement.setString(1, \"init\");\npreparedStatement.setString(2, \"batch\");\npreparedStatement.addBatch();\nint[] result = preparedStatement.executeBatch();\nassertThat(result.length, is(3));\nassertThat(result[0], is(4));\nassertThat(result[1], is(0));\nassertThat(result[2], is(4));\n}\n}\n@Test\npublic void assertExecuteGetResultSet() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(UPDATE_SQL)) {\npreparedStatement.setString(1, \"OK\");\npreparedStatement.setInt(2, 11);\npreparedStatement.setInt(3, 11);\npreparedStatement.execute();\nassertNull(preparedStatement.getResultSet());\n}\n}\n@Test\npublic void assertExecuteUpdateGetResultSet() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(UPDATE_SQL)) {\npreparedStatement.setString(1, \"OK\");\npreparedStatement.setInt(2, 11);\npreparedStatement.setInt(3, 11);\npreparedStatement.executeUpdate();\nassertNull(preparedStatement.getResultSet());\n}\n}\n@Test\npublic void assertExecuteUpdateAutoTableGetResultSet() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(UPDATE_AUTO_SQL)) {\npreparedStatement.setString(1, \"OK\");\npreparedStatement.setInt(2, 10);\npreparedStatement.executeUpdate();\nassertNull(preparedStatement.getResultSet());\n}\n}\n@Test\npublic void assertExecuteSelectColumnGetResultSet() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(SELECT_SQL_COLUMN_WITH_PARAMETER_MARKER)) {\npreparedStatement.setString(1, \"item_id\");\npreparedStatement.executeQuery();\n}\n}\n@Test\npublic void assertExecuteSelectAutoTableGetResultSet() throws SQLException {\nCollection result = Arrays.asList(1001, 1100, 1101);\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(SELECT_AUTO_SQL)) {\npreparedStatement.setInt(1, 1001);\nint count = 0;\ntry (ResultSet resultSet = preparedStatement.executeQuery()) {\nwhile (resultSet.next()) {\nassertTrue(result.contains(resultSet.getInt(2)));\ncount++;\n}\n}\nassertThat(result.size(), is(count));\n}\n}\n@Test\npublic void assertExecuteSelectWithOrderByAndExecuteGetResultSet() throws SQLException {\nCollection result = Arrays.asList(1000, 1001, 1100, 1101);\nint count = 0;\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(SELECT_WITH_ORDER_BY)) {\npreparedStatement.executeQuery();\ntry (ResultSet resultSet = preparedStatement.getResultSet()) {\nwhile (resultSet.next()) {\ncount++;\n}\n}\n}\nassertThat(count, is(result.size()));\n}\n@Test\npublic void assertClearBatch() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_WITH_GENERATE_KEY_SQL)) {\npreparedStatement.setInt(1, 3101);\npreparedStatement.setInt(2, 11);\npreparedStatement.setInt(3, 11);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.clearBatch();\nint[] result = preparedStatement.executeBatch();\nassertThat(result.length, is(0));\n}\n}\n@Test\npublic void assertExecuteBatchRepeatedly() throws SQLException {\ntry (Connection connection = getShardingSphereDataSource().getConnection(); PreparedStatement preparedStatement = connection.prepareStatement(INSERT_WITH_GENERATE_KEY_SQL)) {\npreparedStatement.setInt(1, 3101);\npreparedStatement.setInt(2, 11);\npreparedStatement.setInt(3, 11);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\nassertThat(preparedStatement.executeBatch().length, is(1));\npreparedStatement.setInt(1, 3103);\npreparedStatement.setInt(2, 13);\npreparedStatement.setInt(3, 13);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\nassertThat(preparedStatement.executeBatch().length, is(1));\n}\n}\n@Test\npublic void assertInitPreparedStatementExecutorWithReplayMethod() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(SELECT_SQL_WITH_PARAMETER_MARKER)) {\npreparedStatement.setQueryTimeout(1);\npreparedStatement.setInt(1, 11);\npreparedStatement.setInt(2, 11);\npreparedStatement.executeQuery();\nassertThat(preparedStatement.getQueryTimeout(), is(1));\n}\n}\n@Test(expected = SQLException.class)\npublic void assertQueryWithNull() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(null)) {\npreparedStatement.executeQuery();\n}\n}\n@Test(expected = SQLException.class)\npublic void assertQueryWithEmptyString() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(\"\")) {\npreparedStatement.executeQuery();\n}\n}\n@Test\npublic void assertGetParameterMetaData() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(SELECT_SQL_WITH_PARAMETER_MARKER)) {\nassertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(2));\n}\n}\n@Test(expected = SQLException.class)\npublic void assertColumnNotFoundException() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(UPDATE_WITH_ERROR_COLUMN)) {\npreparedStatement.setString(1, \"OK\");\npreparedStatement.executeUpdate();\n}\n}\n}", + "context_after": "class ShardingSpherePreparedStatementTest extends AbstractShardingSphereDataSourceForShardingTest {\nprivate static final String INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL = \"INSERT INTO t_user (name) VALUES (?),(?),(?),(?)\";\nprivate static final String SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL = \"SELECT name FROM t_user WHERE id=%dL\";\nprivate static final String INSERT_WITH_GENERATE_KEY_SQL = \"INSERT INTO t_order_item (item_id, order_id, user_id, status) VALUES (?, ?, ?, ?)\";\nprivate static final String INSERT_WITHOUT_GENERATE_KEY_SQL = \"INSERT INTO t_order_item (order_id, user_id, status) VALUES (?, ?, ?)\";\nprivate static final String INSERT_WITH_GENERATE_KEY_SQL_WITH_MULTI_VALUES = \"INSERT INTO t_order_item (item_id, order_id, user_id, status) VALUES (1, ?, ?, ?), (2, ?, ?, ?)\";\nprivate static final String INSERT_ON_DUPLICATE_KEY_SQL = \"INSERT INTO t_order_item (item_id, order_id, user_id, status) VALUES (?, ?, ?, ?), (?, ?, ?, ?) ON DUPLICATE KEY UPDATE status = ?\";\nprivate static final String INSERT_SINGLE_TABLE_SQL = \"INSERT INTO t_role (user_name) VALUES (?)\";\nprivate static final String SELECT_SQL_WITHOUT_PARAMETER_MARKER = \"SELECT item_id FROM t_order_item WHERE user_id = %d AND order_id= %s AND status = 'BATCH'\";\nprivate static final String SELECT_SQL_WITH_PARAMETER_MARKER = \"SELECT item_id FROM t_order_item WHERE user_id = ? AND order_id= ? AND status = 'BATCH'\";\nprivate static final String SELECT_SQL_WITH_PARAMETER_MARKER_RETURN_STATUS = \"SELECT item_id, user_id, status FROM t_order_item WHERE order_id= ? AND user_id = ?\";\nprivate static final String SELECT_WITH_ORDER_BY = \"SELECT order_id, user_id, status FROM t_order ORDER BY order_id\";\nprivate static final String SELECT_AUTO_SQL = \"SELECT item_id, order_id, status FROM t_order_item_auto WHERE order_id >= ?\";\nprivate static final String SELECT_SQL_COLUMN_WITH_PARAMETER_MARKER = \"SELECT ?, order_id, status FROM t_order_item_auto\";\nprivate static final String UPDATE_SQL = \"UPDATE t_order SET status = ? WHERE user_id = ? AND order_id = ?\";\nprivate static final String UPDATE_AUTO_SQL = \"UPDATE t_order_auto SET status = ? WHERE order_id = ?\";\nprivate static final String UPDATE_BATCH_SQL = \"UPDATE t_order SET status=? WHERE status=?\";\nprivate static final String UPDATE_WITH_ERROR_COLUMN = \"UPDATE t_order SET error_column=?\";\n@Test\npublic void assertExecuteBatchWithoutAddBatch() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_WITH_GENERATE_KEY_SQL)) {\nint[] actual = preparedStatement.executeBatch();\nassertThat(actual, is(new int[0]));\n}\n}\n@Test\npublic void assertAddBatch() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_WITH_GENERATE_KEY_SQL)) {\npreparedStatement.setInt(1, 3101);\npreparedStatement.setInt(2, 11);\npreparedStatement.setInt(3, 11);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 3102);\npreparedStatement.setInt(2, 12);\npreparedStatement.setInt(3, 12);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 3111);\npreparedStatement.setInt(2, 21);\npreparedStatement.setInt(3, 21);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 3112);\npreparedStatement.setInt(2, 22);\npreparedStatement.setInt(3, 22);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\nint[] result = preparedStatement.executeBatch();\nfor (int each : result) {\nassertThat(each, is(1));\n}\n}\n}\n@Test\npublic void assertMultiValuesWithGenerateShardingKeyColumn() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, Statement.RETURN_GENERATED_KEYS);\nStatement queryStatement = connection.createStatement()) {\nResetIncrementKeyGenerateAlgorithmFixture.getCOUNT().set(0);\npreparedStatement.setString(1, \"BATCH1\");\npreparedStatement.setString(2, \"BATCH2\");\npreparedStatement.setString(3, \"BATCH3\");\npreparedStatement.setString(4, \"BATCH4\");\nint result = preparedStatement.executeUpdate();\nassertThat(result, is(4));\nResultSet generateKeyResultSet = preparedStatement.getGeneratedKeys();\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(1L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(2L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(3L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(4L));\nassertFalse(generateKeyResultSet.next());\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 1L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH1\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 2L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH2\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 3L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH3\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 4L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH4\"));\n}\n}\n}\n@Test\npublic void assertAddBatchMultiValuesWithGenerateShardingKeyColumn() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, Statement.RETURN_GENERATED_KEYS);\nStatement queryStatement = connection.createStatement()) {\nResetIncrementKeyGenerateAlgorithmFixture.getCOUNT().set(10);\npreparedStatement.setString(1, \"BATCH1\");\npreparedStatement.setString(2, \"BATCH2\");\npreparedStatement.setString(3, \"BATCH3\");\npreparedStatement.setString(4, \"BATCH4\");\npreparedStatement.addBatch();\npreparedStatement.setString(1, \"BATCH5\");\npreparedStatement.setString(2, \"BATCH6\");\npreparedStatement.setString(3, \"BATCH7\");\npreparedStatement.setString(4, \"BATCH8\");\npreparedStatement.addBatch();\nint[] result = preparedStatement.executeBatch();\nfor (int each : result) {\nassertThat(each, is(4));\n}\nResultSet generateKeyResultSet = preparedStatement.getGeneratedKeys();\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(11L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(12L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(13L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(14L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(15L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(16L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(17L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(18L));\nassertFalse(generateKeyResultSet.next());\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 11L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH1\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 12L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH2\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 13L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH3\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 14L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH4\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 15L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH5\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 16L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH6\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 17L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH7\"));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL, 18L))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getString(1), is(\"BATCH8\"));\n}\n}\n}\n@Test\npublic void assertAddBatchWithoutGenerateKeyColumn() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_WITHOUT_GENERATE_KEY_SQL, Statement.RETURN_GENERATED_KEYS);\nStatement queryStatement = connection.createStatement()) {\npreparedStatement.setInt(1, 11);\npreparedStatement.setInt(2, 11);\npreparedStatement.setString(3, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 12);\npreparedStatement.setInt(2, 12);\npreparedStatement.setString(3, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 21);\npreparedStatement.setInt(2, 21);\npreparedStatement.setString(3, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 22);\npreparedStatement.setInt(2, 22);\npreparedStatement.setString(3, \"BATCH\");\npreparedStatement.addBatch();\nint[] result = preparedStatement.executeBatch();\nfor (int each : result) {\nassertThat(each, is(1));\n}\nResultSet generateKeyResultSet = preparedStatement.getGeneratedKeys();\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(1L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(2L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(3L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(4L));\nassertFalse(generateKeyResultSet.next());\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_SQL_WITHOUT_PARAMETER_MARKER, 11, 11))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(1), is(1));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_SQL_WITHOUT_PARAMETER_MARKER, 12, 12))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(1), is(2));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_SQL_WITHOUT_PARAMETER_MARKER, 21, 21))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(1), is(3));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_SQL_WITHOUT_PARAMETER_MARKER, 22, 22))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(1), is(4));\n}\n}\n}\n@Test\npublic void assertAddBatchWithGenerateKeyColumn() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_WITH_GENERATE_KEY_SQL, Statement.RETURN_GENERATED_KEYS);\nStatement queryStatement = connection.createStatement()) {\npreparedStatement.setInt(1, 1);\npreparedStatement.setInt(2, 11);\npreparedStatement.setInt(3, 11);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 2);\npreparedStatement.setInt(2, 12);\npreparedStatement.setInt(3, 12);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 3);\npreparedStatement.setInt(2, 21);\npreparedStatement.setInt(3, 21);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.setInt(1, 4);\npreparedStatement.setInt(2, 22);\npreparedStatement.setInt(3, 22);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\nint[] result = preparedStatement.executeBatch();\nfor (int each : result) {\nassertThat(each, is(1));\n}\nResultSet generateKeyResultSet = preparedStatement.getGeneratedKeys();\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(1L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(2L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(3L));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getLong(1), is(4L));\nassertFalse(generateKeyResultSet.next());\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_SQL_WITHOUT_PARAMETER_MARKER, 11, 11))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(1), is(1));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_SQL_WITHOUT_PARAMETER_MARKER, 12, 12))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(1), is(2));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_SQL_WITHOUT_PARAMETER_MARKER, 21, 21))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(1), is(3));\n}\ntry (ResultSet resultSet = queryStatement.executeQuery(String.format(SELECT_SQL_WITHOUT_PARAMETER_MARKER, 22, 22))) {\nassertTrue(resultSet.next());\nassertThat(resultSet.getInt(1), is(4));\n}\n}\n}\n@Test\npublic void assertGeneratedKeysForBatchInsert() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_WITH_GENERATE_KEY_SQL_WITH_MULTI_VALUES, Statement.RETURN_GENERATED_KEYS)) {\npreparedStatement.setInt(1, 11);\npreparedStatement.setInt(2, 11);\npreparedStatement.setString(3, \"MULTI\");\npreparedStatement.setInt(4, 12);\npreparedStatement.setInt(5, 12);\npreparedStatement.setString(6, \"MULTI\");\nint result = preparedStatement.executeUpdate();\nResultSet generateKeyResultSet = preparedStatement.getGeneratedKeys();\nassertThat(result, is(2));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getInt(1), is(1));\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getInt(1), is(2));\n}\n}\n@Test\npublic void assertAddGetGeneratedKeysForNoGeneratedValues() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_SINGLE_TABLE_SQL, Statement.RETURN_GENERATED_KEYS)) {\npreparedStatement.setString(1, \"admin\");\npreparedStatement.execute();\nResultSet generateKeyResultSet = preparedStatement.getGeneratedKeys();\nassertTrue(generateKeyResultSet.next());\nassertThat(generateKeyResultSet.getInt(1), is(1));\n}\n}\n@Test\n@Test\npublic void assertUpdateBatch() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(UPDATE_BATCH_SQL)) {\npreparedStatement.setString(1, \"batch\");\npreparedStatement.setString(2, \"init\");\npreparedStatement.addBatch();\npreparedStatement.setString(1, \"batch\");\npreparedStatement.setString(2, \"init\");\npreparedStatement.addBatch();\npreparedStatement.setString(1, \"init\");\npreparedStatement.setString(2, \"batch\");\npreparedStatement.addBatch();\nint[] result = preparedStatement.executeBatch();\nassertThat(result.length, is(3));\nassertThat(result[0], is(4));\nassertThat(result[1], is(0));\nassertThat(result[2], is(4));\n}\n}\n@Test\npublic void assertExecuteGetResultSet() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(UPDATE_SQL)) {\npreparedStatement.setString(1, \"OK\");\npreparedStatement.setInt(2, 11);\npreparedStatement.setInt(3, 11);\npreparedStatement.execute();\nassertNull(preparedStatement.getResultSet());\n}\n}\n@Test\npublic void assertExecuteUpdateGetResultSet() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(UPDATE_SQL)) {\npreparedStatement.setString(1, \"OK\");\npreparedStatement.setInt(2, 11);\npreparedStatement.setInt(3, 11);\npreparedStatement.executeUpdate();\nassertNull(preparedStatement.getResultSet());\n}\n}\n@Test\npublic void assertExecuteUpdateAutoTableGetResultSet() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(UPDATE_AUTO_SQL)) {\npreparedStatement.setString(1, \"OK\");\npreparedStatement.setInt(2, 10);\npreparedStatement.executeUpdate();\nassertNull(preparedStatement.getResultSet());\n}\n}\n@Test\npublic void assertExecuteSelectColumnGetResultSet() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(SELECT_SQL_COLUMN_WITH_PARAMETER_MARKER)) {\npreparedStatement.setString(1, \"item_id\");\npreparedStatement.executeQuery();\n}\n}\n@Test\npublic void assertExecuteSelectAutoTableGetResultSet() throws SQLException {\nCollection result = Arrays.asList(1001, 1100, 1101);\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(SELECT_AUTO_SQL)) {\npreparedStatement.setInt(1, 1001);\nint count = 0;\ntry (ResultSet resultSet = preparedStatement.executeQuery()) {\nwhile (resultSet.next()) {\nassertTrue(result.contains(resultSet.getInt(2)));\ncount++;\n}\n}\nassertThat(result.size(), is(count));\n}\n}\n@Test\npublic void assertExecuteSelectWithOrderByAndExecuteGetResultSet() throws SQLException {\nCollection result = Arrays.asList(1000, 1001, 1100, 1101);\nint count = 0;\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(SELECT_WITH_ORDER_BY)) {\npreparedStatement.executeQuery();\ntry (ResultSet resultSet = preparedStatement.getResultSet()) {\nwhile (resultSet.next()) {\ncount++;\n}\n}\n}\nassertThat(count, is(result.size()));\n}\n@Test\npublic void assertClearBatch() throws SQLException {\ntry (\nConnection connection = getShardingSphereDataSource().getConnection();\nPreparedStatement preparedStatement = connection.prepareStatement(INSERT_WITH_GENERATE_KEY_SQL)) {\npreparedStatement.setInt(1, 3101);\npreparedStatement.setInt(2, 11);\npreparedStatement.setInt(3, 11);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\npreparedStatement.clearBatch();\nint[] result = preparedStatement.executeBatch();\nassertThat(result.length, is(0));\n}\n}\n@Test\npublic void assertExecuteBatchRepeatedly() throws SQLException {\ntry (Connection connection = getShardingSphereDataSource().getConnection(); PreparedStatement preparedStatement = connection.prepareStatement(INSERT_WITH_GENERATE_KEY_SQL)) {\npreparedStatement.setInt(1, 3101);\npreparedStatement.setInt(2, 11);\npreparedStatement.setInt(3, 11);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\nassertThat(preparedStatement.executeBatch().length, is(1));\npreparedStatement.setInt(1, 3103);\npreparedStatement.setInt(2, 13);\npreparedStatement.setInt(3, 13);\npreparedStatement.setString(4, \"BATCH\");\npreparedStatement.addBatch();\nassertThat(preparedStatement.executeBatch().length, is(1));\n}\n}\n@Test\npublic void assertInitPreparedStatementExecutorWithReplayMethod() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(SELECT_SQL_WITH_PARAMETER_MARKER)) {\npreparedStatement.setQueryTimeout(1);\npreparedStatement.setInt(1, 11);\npreparedStatement.setInt(2, 11);\npreparedStatement.executeQuery();\nassertThat(preparedStatement.getQueryTimeout(), is(1));\n}\n}\n@Test(expected = SQLException.class)\npublic void assertQueryWithNull() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(null)) {\npreparedStatement.executeQuery();\n}\n}\n@Test(expected = SQLException.class)\npublic void assertQueryWithEmptyString() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(\"\")) {\npreparedStatement.executeQuery();\n}\n}\n@Test\npublic void assertGetParameterMetaData() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(SELECT_SQL_WITH_PARAMETER_MARKER)) {\nassertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(2));\n}\n}\n@Test(expected = SQLException.class)\npublic void assertColumnNotFoundException() throws SQLException {\ntry (PreparedStatement preparedStatement = getShardingSphereDataSource().getConnection().prepareStatement(UPDATE_WITH_ERROR_COLUMN)) {\npreparedStatement.setString(1, \"OK\");\npreparedStatement.executeUpdate();\n}\n}\n}" + }, + { + "comment": "Maybe delete the ``` catch (Exception e) { \t\t\te.printStackTrace(); \t\t\tfail(e.getMessage()); \t\t} ``` block here as well", + "method_body": "public void tryLocalExecution() {\ntry {\nPackagedProgram packagedProgramMock = mock(PackagedProgram.class);\nwhen(packagedProgramMock.isUsingInteractiveMode()).thenReturn(true);\ndoAnswer(new Answer() {\n@Override\npublic Void answer(InvocationOnMock invocation) throws Throwable {\nExecutionEnvironment.createLocalEnvironment();\nreturn null;\n}\n}).when(packagedProgramMock).invokeInteractiveModeForExecution();\ntry {\nfinal ClusterClient client = new MiniClusterClient(new Configuration(), MINI_CLUSTER_RESOURCE.getMiniCluster());\nclient.setDetached(true);\nclient.run(packagedProgramMock, 1);\nfail(\"Creating the local execution environment should not be possible\");\n}\ncatch (InvalidProgramException e) {\n}\n}\ncatch (Exception e) {\ne.printStackTrace();\nfail(e.getMessage());\n}\n}", + "target_code": "try {", + "method_body_after": "public void tryLocalExecution() throws ProgramInvocationException, ProgramMissingJobException {\nPackagedProgram packagedProgramMock = mock(PackagedProgram.class);\nwhen(packagedProgramMock.isUsingInteractiveMode()).thenReturn(true);\ndoAnswer(new Answer() {\n@Override\npublic Void answer(InvocationOnMock invocation) throws Throwable {\nExecutionEnvironment.createLocalEnvironment();\nreturn null;\n}\n}).when(packagedProgramMock).invokeInteractiveModeForExecution();\ntry {\nfinal ClusterClient client = new MiniClusterClient(new Configuration(), MINI_CLUSTER_RESOURCE.getMiniCluster());\nclient.setDetached(true);\nclient.run(packagedProgramMock, 1);\nfail(\"Creating the local execution environment should not be possible\");\n}\ncatch (InvalidProgramException e) {\n}\n}", + "context_before": "class ClientTest extends TestLogger {\n@ClassRule\npublic static final MiniClusterResource MINI_CLUSTER_RESOURCE =\nnew MiniClusterResource(new MiniClusterResourceConfiguration.Builder().build());\nprivate PackagedProgram program;\nprivate Configuration config;\nprivate static final String ACCUMULATOR_NAME = \"test_accumulator\";\nprivate static final String FAIL_MESSAGE = \"Invalid program should have thrown ProgramInvocationException.\";\n@Before\npublic void setUp() throws Exception {\nExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment();\nenv.generateSequence(1, 1000).output(new DiscardingOutputFormat());\nPlan plan = env.createProgramPlan();\nJobWithJars jobWithJars = new JobWithJars(plan, Collections.emptyList(), Collections.emptyList());\nprogram = mock(PackagedProgram.class);\nwhen(program.getPlanWithJars()).thenReturn(jobWithJars);\nfinal int freePort = NetUtils.getAvailablePort();\nconfig = new Configuration();\nconfig.setString(JobManagerOptions.ADDRESS, \"localhost\");\nconfig.setInteger(JobManagerOptions.PORT, freePort);\nconfig.setString(AkkaOptions.ASK_TIMEOUT, AkkaOptions.ASK_TIMEOUT.defaultValue());\n}\n/**\n* Tests that invalid detached mode programs fail.\n*/\n@Test\npublic void testDetachedMode() throws Exception{\nfinal ClusterClient out = new MiniClusterClient(new Configuration(), MINI_CLUSTER_RESOURCE.getMiniCluster());\nout.setDetached(true);\ntry {\nPackagedProgram prg = new PackagedProgram(TestExecuteTwice.class);\nout.run(prg, 1);\nfail(FAIL_MESSAGE);\n} catch (ProgramInvocationException e) {\nassertEquals(\nDetachedJobExecutionResult.DETACHED_MESSAGE + DetachedJobExecutionResult.EXECUTE_TWICE_MESSAGE,\ne.getCause().getMessage());\n}\ntry {\nPackagedProgram prg = new PackagedProgram(TestEager.class);\nout.run(prg, 1);\nfail(FAIL_MESSAGE);\n} catch (ProgramInvocationException e) {\nassertEquals(\nDetachedJobExecutionResult.DETACHED_MESSAGE + DetachedJobExecutionResult.JOB_RESULT_MESSAGE + DetachedJobExecutionResult.EAGER_FUNCTION_MESSAGE,\ne.getCause().getMessage());\n}\ntry {\nPackagedProgram prg = new PackagedProgram(TestGetRuntime.class);\nout.run(prg, 1);\nfail(FAIL_MESSAGE);\n} catch (ProgramInvocationException e) {\nassertEquals(\nDetachedJobExecutionResult.DETACHED_MESSAGE + DetachedJobExecutionResult.JOB_RESULT_MESSAGE,\ne.getCause().getMessage());\n}\ntry {\nPackagedProgram prg = new PackagedProgram(TestGetJobID.class);\nout.run(prg, 1);\nfail(FAIL_MESSAGE);\n} catch (ProgramInvocationException e) {\nassertEquals(\nDetachedJobExecutionResult.DETACHED_MESSAGE + DetachedJobExecutionResult.JOB_RESULT_MESSAGE,\ne.getCause().getMessage());\n}\ntry {\nPackagedProgram prg = new PackagedProgram(TestGetAccumulator.class);\nout.run(prg, 1);\nfail(FAIL_MESSAGE);\n} catch (ProgramInvocationException e) {\nassertEquals(\nDetachedJobExecutionResult.DETACHED_MESSAGE + DetachedJobExecutionResult.JOB_RESULT_MESSAGE + DetachedJobExecutionResult.EAGER_FUNCTION_MESSAGE,\ne.getCause().getMessage());\n}\ntry {\nPackagedProgram prg = new PackagedProgram(TestGetAllAccumulator.class);\nout.run(prg, 1);\nfail(FAIL_MESSAGE);\n} catch (ProgramInvocationException e) {\nassertEquals(\nDetachedJobExecutionResult.DETACHED_MESSAGE + DetachedJobExecutionResult.JOB_RESULT_MESSAGE,\ne.getCause().getMessage());\n}\n}\n/**\n* This test verifies correct job submission messaging logic and plan translation calls.\n*/\n@Test\npublic void shouldSubmitToJobClient() throws Exception {\nfinal ClusterClient out = new MiniClusterClient(new Configuration(), MINI_CLUSTER_RESOURCE.getMiniCluster());\nout.setDetached(true);\nJobSubmissionResult result = out.run(program.getPlanWithJars(), 1);\nassertNotNull(result);\nprogram.deleteExtractedLibraries();\n}\n/**\n* This test verifies that the local execution environment cannot be created when\n* the program is submitted through a client.\n*/\n@Test\n@Test\npublic void testGetExecutionPlan() {\ntry {\nPackagedProgram prg = new PackagedProgram(TestOptimizerPlan.class, \"/dev/random\", \"/tmp\");\nassertNotNull(prg.getPreviewPlan());\nOptimizer optimizer = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), config);\nOptimizedPlan op = (OptimizedPlan) ClusterClient.getOptimizedPlan(optimizer, prg, 1);\nassertNotNull(op);\nPlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();\nassertNotNull(dumper.getOptimizerPlanAsJSON(op));\nPlanJSONDumpGenerator dumper2 = new PlanJSONDumpGenerator();\ndumper2.setEncodeForHTML(true);\nString htmlEscaped = dumper2.getOptimizerPlanAsJSON(op);\nassertEquals(-1, htmlEscaped.indexOf('\\\\'));\n}\ncatch (Exception e) {\ne.printStackTrace();\nfail(e.getMessage());\n}\n}\n/**\n* A test job.\n*/\npublic static class TestOptimizerPlan implements ProgramDescription {\n@SuppressWarnings(\"serial\")\npublic static void main(String[] args) throws Exception {\nif (args.length < 2) {\nSystem.err.println(\"Usage: TestOptimizerPlan \");\nreturn;\n}\nExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();\nDataSet> input = env.readCsvFile(args[0])\n.fieldDelimiter(\"\\t\").types(Long.class, Long.class);\nDataSet> result = input.map(\nnew MapFunction, Tuple2>() {\npublic Tuple2 map(Tuple2 value){\nreturn new Tuple2(value.f0, value.f1 + 1);\n}\n});\nresult.writeAsCsv(args[1], \"\\n\", \"\\t\");\nenv.execute();\n}\n@Override\npublic String getDescription() {\nreturn \"TestOptimizerPlan \";\n}\n}\n/**\n* Test job that calls {@link ExecutionEnvironment\n*/\npublic static final class TestExecuteTwice {\npublic static void main(String[] args) throws Exception {\nfinal ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();\nenv.fromElements(1, 2).output(new DiscardingOutputFormat());\nenv.execute();\nenv.fromElements(1, 2).collect();\n}\n}\n/**\n* Test job that uses an eager sink.\n*/\npublic static final class TestEager {\npublic static void main(String[] args) throws Exception {\nfinal ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();\nenv.fromElements(1, 2).collect();\n}\n}\n/**\n* Test job that retrieves the net runtime from the {@link JobExecutionResult}.\n*/\npublic static final class TestGetRuntime {\npublic static void main(String[] args) throws Exception {\nfinal ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();\nenv.fromElements(1, 2).output(new DiscardingOutputFormat());\nenv.execute().getNetRuntime();\n}\n}\n/**\n* Test job that retrieves the job ID from the {@link JobExecutionResult}.\n*/\npublic static final class TestGetJobID {\npublic static void main(String[] args) throws Exception {\nfinal ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();\nenv.fromElements(1, 2).output(new DiscardingOutputFormat());\nenv.execute().getJobID();\n}\n}\n/**\n* Test job that retrieves an accumulator from the {@link JobExecutionResult}.\n*/\npublic static final class TestGetAccumulator {\npublic static void main(String[] args) throws Exception {\nfinal ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();\nenv.fromElements(1, 2).output(new DiscardingOutputFormat());\nenv.execute().getAccumulatorResult(ACCUMULATOR_NAME);\n}\n}\n/**\n* Test job that retrieves all accumulators from the {@link JobExecutionResult}.\n*/\npublic static final class TestGetAllAccumulator {\npublic static void main(String[] args) throws Exception {\nfinal ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();\nenv.fromElements(1, 2).output(new DiscardingOutputFormat());\nenv.execute().getAllAccumulatorResults();\n}\n}\n}", + "context_after": "class ClientTest extends TestLogger {\n@ClassRule\npublic static final MiniClusterResource MINI_CLUSTER_RESOURCE =\nnew MiniClusterResource(new MiniClusterResourceConfiguration.Builder().build());\nprivate PackagedProgram program;\nprivate Configuration config;\nprivate static final String ACCUMULATOR_NAME = \"test_accumulator\";\nprivate static final String FAIL_MESSAGE = \"Invalid program should have thrown ProgramInvocationException.\";\n@Before\npublic void setUp() throws Exception {\nExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment();\nenv.generateSequence(1, 1000).output(new DiscardingOutputFormat());\nPlan plan = env.createProgramPlan();\nJobWithJars jobWithJars = new JobWithJars(plan, Collections.emptyList(), Collections.emptyList());\nprogram = mock(PackagedProgram.class);\nwhen(program.getPlanWithJars()).thenReturn(jobWithJars);\nfinal int freePort = NetUtils.getAvailablePort();\nconfig = new Configuration();\nconfig.setString(JobManagerOptions.ADDRESS, \"localhost\");\nconfig.setInteger(JobManagerOptions.PORT, freePort);\nconfig.setString(AkkaOptions.ASK_TIMEOUT, AkkaOptions.ASK_TIMEOUT.defaultValue());\n}\n/**\n* Tests that invalid detached mode programs fail.\n*/\n@Test\npublic void testDetachedMode() throws Exception{\nfinal ClusterClient clusterClient = new MiniClusterClient(new Configuration(), MINI_CLUSTER_RESOURCE.getMiniCluster());\nclusterClient.setDetached(true);\ntry {\nPackagedProgram prg = new PackagedProgram(TestExecuteTwice.class);\nclusterClient.run(prg, 1);\nfail(FAIL_MESSAGE);\n} catch (ProgramInvocationException e) {\nassertEquals(\nDetachedJobExecutionResult.DETACHED_MESSAGE + DetachedJobExecutionResult.EXECUTE_TWICE_MESSAGE,\ne.getCause().getMessage());\n}\ntry {\nPackagedProgram prg = new PackagedProgram(TestEager.class);\nclusterClient.run(prg, 1);\nfail(FAIL_MESSAGE);\n} catch (ProgramInvocationException e) {\nassertEquals(\nDetachedJobExecutionResult.DETACHED_MESSAGE + DetachedJobExecutionResult.JOB_RESULT_MESSAGE + DetachedJobExecutionResult.EAGER_FUNCTION_MESSAGE,\ne.getCause().getMessage());\n}\ntry {\nPackagedProgram prg = new PackagedProgram(TestGetRuntime.class);\nclusterClient.run(prg, 1);\nfail(FAIL_MESSAGE);\n} catch (ProgramInvocationException e) {\nassertEquals(\nDetachedJobExecutionResult.DETACHED_MESSAGE + DetachedJobExecutionResult.JOB_RESULT_MESSAGE,\ne.getCause().getMessage());\n}\ntry {\nPackagedProgram prg = new PackagedProgram(TestGetJobID.class);\nclusterClient.run(prg, 1);\nfail(FAIL_MESSAGE);\n} catch (ProgramInvocationException e) {\nassertEquals(\nDetachedJobExecutionResult.DETACHED_MESSAGE + DetachedJobExecutionResult.JOB_RESULT_MESSAGE,\ne.getCause().getMessage());\n}\ntry {\nPackagedProgram prg = new PackagedProgram(TestGetAccumulator.class);\nclusterClient.run(prg, 1);\nfail(FAIL_MESSAGE);\n} catch (ProgramInvocationException e) {\nassertEquals(\nDetachedJobExecutionResult.DETACHED_MESSAGE + DetachedJobExecutionResult.JOB_RESULT_MESSAGE + DetachedJobExecutionResult.EAGER_FUNCTION_MESSAGE,\ne.getCause().getMessage());\n}\ntry {\nPackagedProgram prg = new PackagedProgram(TestGetAllAccumulator.class);\nclusterClient.run(prg, 1);\nfail(FAIL_MESSAGE);\n} catch (ProgramInvocationException e) {\nassertEquals(\nDetachedJobExecutionResult.DETACHED_MESSAGE + DetachedJobExecutionResult.JOB_RESULT_MESSAGE,\ne.getCause().getMessage());\n}\n}\n/**\n* This test verifies correct job submission messaging logic and plan translation calls.\n*/\n@Test\npublic void shouldSubmitToJobClient() throws Exception {\nfinal ClusterClient clusterClient = new MiniClusterClient(new Configuration(), MINI_CLUSTER_RESOURCE.getMiniCluster());\nclusterClient.setDetached(true);\nJobSubmissionResult result = clusterClient.run(program.getPlanWithJars(), 1);\nassertNotNull(result);\nprogram.deleteExtractedLibraries();\n}\n/**\n* This test verifies that the local execution environment cannot be created when\n* the program is submitted through a client.\n*/\n@Test\n@Test\npublic void testGetExecutionPlan() throws ProgramInvocationException {\nPackagedProgram prg = new PackagedProgram(TestOptimizerPlan.class, \"/dev/random\", \"/tmp\");\nassertNotNull(prg.getPreviewPlan());\nOptimizer optimizer = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), config);\nOptimizedPlan op = (OptimizedPlan) ClusterClient.getOptimizedPlan(optimizer, prg, 1);\nassertNotNull(op);\nPlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();\nassertNotNull(dumper.getOptimizerPlanAsJSON(op));\nPlanJSONDumpGenerator dumper2 = new PlanJSONDumpGenerator();\ndumper2.setEncodeForHTML(true);\nString htmlEscaped = dumper2.getOptimizerPlanAsJSON(op);\nassertEquals(-1, htmlEscaped.indexOf('\\\\'));\n}\n/**\n* A test job.\n*/\npublic static class TestOptimizerPlan implements ProgramDescription {\n@SuppressWarnings(\"serial\")\npublic static void main(String[] args) throws Exception {\nif (args.length < 2) {\nSystem.err.println(\"Usage: TestOptimizerPlan \");\nreturn;\n}\nExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();\nDataSet> input = env.readCsvFile(args[0])\n.fieldDelimiter(\"\\t\").types(Long.class, Long.class);\nDataSet> result = input.map(\nnew MapFunction, Tuple2>() {\npublic Tuple2 map(Tuple2 value){\nreturn new Tuple2(value.f0, value.f1 + 1);\n}\n});\nresult.writeAsCsv(args[1], \"\\n\", \"\\t\");\nenv.execute();\n}\n@Override\npublic String getDescription() {\nreturn \"TestOptimizerPlan \";\n}\n}\n/**\n* Test job that calls {@link ExecutionEnvironment\n*/\npublic static final class TestExecuteTwice {\npublic static void main(String[] args) throws Exception {\nfinal ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();\nenv.fromElements(1, 2).output(new DiscardingOutputFormat());\nenv.execute();\nenv.fromElements(1, 2).collect();\n}\n}\n/**\n* Test job that uses an eager sink.\n*/\npublic static final class TestEager {\npublic static void main(String[] args) throws Exception {\nfinal ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();\nenv.fromElements(1, 2).collect();\n}\n}\n/**\n* Test job that retrieves the net runtime from the {@link JobExecutionResult}.\n*/\npublic static final class TestGetRuntime {\npublic static void main(String[] args) throws Exception {\nfinal ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();\nenv.fromElements(1, 2).output(new DiscardingOutputFormat());\nenv.execute().getNetRuntime();\n}\n}\n/**\n* Test job that retrieves the job ID from the {@link JobExecutionResult}.\n*/\npublic static final class TestGetJobID {\npublic static void main(String[] args) throws Exception {\nfinal ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();\nenv.fromElements(1, 2).output(new DiscardingOutputFormat());\nenv.execute().getJobID();\n}\n}\n/**\n* Test job that retrieves an accumulator from the {@link JobExecutionResult}.\n*/\npublic static final class TestGetAccumulator {\npublic static void main(String[] args) throws Exception {\nfinal ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();\nenv.fromElements(1, 2).output(new DiscardingOutputFormat());\nenv.execute().getAccumulatorResult(ACCUMULATOR_NAME);\n}\n}\n/**\n* Test job that retrieves all accumulators from the {@link JobExecutionResult}.\n*/\npublic static final class TestGetAllAccumulator {\npublic static void main(String[] args) throws Exception {\nfinal ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();\nenv.fromElements(1, 2).output(new DiscardingOutputFormat());\nenv.execute().getAllAccumulatorResults();\n}\n}\n}" + }, + { + "comment": "Remove unused args", + "method_body": "public void testIterateMirrorTable() throws Exception {\nBValue[] args = {};\nBValue[] returns = BRunUtil.invokeFunction(resultMirror, \"testIterateMirrorTable\", connectionArgs);\nAssert.assertNotNull(returns);\nAssert.assertEquals(returns[0].stringValue(), \"([{id:1, name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2, \"\n+ \"name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}], [{id:1, \"\n+ \"name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2, name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, \"\n+ \"name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}])\");\n}", + "target_code": "BValue[] returns = BRunUtil.invokeFunction(resultMirror, \"testIterateMirrorTable\", connectionArgs);", + "method_body_after": "public void testIterateMirrorTable() throws Exception {\nBValue[] returns = BRunUtil.invokeFunction(resultMirror, \"testIterateMirrorTable\", connectionArgs);\nAssert.assertNotNull(returns);\nAssert.assertEquals(returns[0].stringValue(), \"([{id:1, name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2, \"\n+ \"name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}], [{id:1, \"\n+ \"name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2, name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, \"\n+ \"name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}])\");\n}", + "context_before": "class SQLActionsTest {\nprivate static final double DELTA = 0.01;\nprivate CompileResult result;\nprivate CompileResult resultNegative;\nprivate CompileResult resultMirror;\nprivate static final String DB_NAME = \"TEST_SQL_CONNECTOR\";\nprivate DBType dbType;\nprivate MySQLContainer mysql;\nprivate PostgreSQLContainer postgres;\nprivate BValue[] connectionArgs = new BValue[3];\n@Parameters({\"dataClientTestDBType\"})\npublic SQLActionsTest(DBType dataClientTestDBType) {\nthis.dbType = dataClientTestDBType;\n}\n@BeforeClass\npublic void setup() {\nswitch (dbType) {\ncase MYSQL:\nmysql = new MySQLContainer();\nmysql.start();\nString jdbcURL = mysql.getJdbcUrl();\nString username = mysql.getUsername();\nString password = mysql.getPassword();\nSQLDBUtils.initMySQLDatabase(jdbcURL, username, password, \"datafiles/sql/SQLConnectorMYSQLDataFile.sql\");\nconnectionArgs[0] = new BString(jdbcURL);\nconnectionArgs[1] = new BString(username);\nconnectionArgs[2] = new BString(password);\nbreak;\ncase POSTGRES:\npostgres = new PostgreSQLContainer();\npostgres.start();\njdbcURL = postgres.getJdbcUrl();\nusername = postgres.getUsername();\npassword = postgres.getPassword();\nSQLDBUtils.initMySQLDatabase(jdbcURL, username, password, \"datafiles/sql/SQLConnectorPostgresDataFile.sql\");\nconnectionArgs[0] = new BString(jdbcURL);\nconnectionArgs[1] = new BString(username);\nconnectionArgs[2] = new BString(password);\nbreak;\ndefault:\nSQLDBUtils.deleteFiles(new File(SQLDBUtils.DB_DIRECTORY), DB_NAME);\nSQLDBUtils.initHSQLDBDatabase(SQLDBUtils.DB_DIRECTORY, DB_NAME, \"datafiles/sql/SQLConnectorDataFile.sql\");\nconnectionArgs[0] = new BString(\"jdbc:hsqldb:file:./target/tempdb/TEST_SQL_CONNECTOR\");\nconnectionArgs[1] = new BString(\"SA\");\nconnectionArgs[2] = new BString(\"\");\nbreak;\n}\nresult = BCompileUtil.compile(\"test-src/connectors/sql/sql_actions_test.bal\");\nresultNegative = BCompileUtil.compile(\"test-src/connectors/sql/sql_actions_negative_test.bal\");\nresultMirror = BCompileUtil.compile(\"test-src/connectors/sql/sql_mirror_table_test.bal\");\n}\n@Test(groups = \"ConnectorTest\")\npublic void testInsertTableData() {\nBValue[] returns = BRunUtil.invoke(result, \"testInsertTableData\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testCreateTable() {\nBValue[] returns = BRunUtil.invoke(result, \"testCreateTable\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 0);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testUpdateTableData() {\nBValue[] returns = BRunUtil.invoke(result, \"testUpdateTableData\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testGeneratedKeyOnInsert() {\nBValue[] returns = BRunUtil.invoke(result, \"testGeneratedKeyOnInsert\", connectionArgs);\nBString retValue = (BString) returns[0];\nAssert.assertTrue(Integer.parseInt(retValue.stringValue()) > 0);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testGeneratedKeyWithColumn() {\nBValue[] returns = BRunUtil.invoke(result, \"testGeneratedKeyWithColumn\", connectionArgs);\nBString retValue = (BString) returns[0];\nAssert.assertTrue(Integer.parseInt(retValue.stringValue()) > 0);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testSelectData() {\nBValue[] returns = BRunUtil.invoke(result, \"testSelectData\", connectionArgs);\nBString retValue = (BString) returns[0];\nfinal String expected = \"Peter\";\nAssert.assertEquals(retValue.stringValue(), expected);\n}\n@Test(groups = \"ConnectorTest for int float types\")\npublic void testSelectIntFloatData() {\nBValue[] returns = BRunUtil.invoke(result, \"testSelectIntFloatData\", connectionArgs);\nAssert.assertEquals(returns.length, 4);\nAssert.assertSame(returns[0].getClass(), BInteger.class);\nAssert.assertSame(returns[1].getClass(), BInteger.class);\nAssert.assertSame(returns[2].getClass(), BFloat.class);\nAssert.assertSame(returns[3].getClass(), BFloat.class);\nBInteger intVal = (BInteger) returns[0];\nBInteger longVal = (BInteger) returns[1];\nBFloat floatVal = (BFloat) returns[2];\nBFloat doubleVal = (BFloat) returns[3];\nlong intExpected = 10;\nlong longExpected = 9223372036854774807L;\ndouble floatExpected = 123.34;\ndouble doubleExpected = 2139095039;\nAssert.assertEquals(intVal.intValue(), intExpected);\nAssert.assertEquals(longVal.intValue(), longExpected);\nAssert.assertEquals(floatVal.floatValue(), floatExpected, DELTA);\nAssert.assertEquals(doubleVal.floatValue(), doubleExpected);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testCallProcedure() {\nBValue[] returns = BRunUtil.invoke(result, \"testCallProcedure\", connectionArgs);\nBString retValue = (BString) returns[0];\nfinal String expected = \"James\";\nAssert.assertEquals(retValue.stringValue(), expected);\nif (dbType == POSTGRES) {\nAssert.assertEquals(returns[1].stringValue(), \"table\");\n} else {\nAssert.assertEquals(returns[1].stringValue(), \"nil\");\n}\n}\n@Test(groups = {\"ConnectorTest\", \"MySQLNotSupported\", \"PostgresNotSupported\"})\npublic void testCallProcedureWithResultSet() {\nBValue[] returns = BRunUtil.invokeFunction(result, \"testCallProcedureWithResultSet\", connectionArgs);\nBString retValue = (BString) returns[0];\nfinal String expected = \"Peter\";\nAssert.assertEquals(retValue.stringValue(), expected);\n}\n@Test(groups = {\"ConnectorTest\", \"MySQLNotSupported\", \"HSQLDBNotSupported\"})\npublic void testCallFunctionWithRefCursor() {\nBValue[] returns = BRunUtil.invokeFunction(result, \"testCallFunctionWithReturningRefcursor\", connectionArgs);\nBString retValue = (BString) returns[0];\nfinal String expected = \"Peter\";\nAssert.assertEquals(retValue.stringValue(), expected);\n}\n@Test(groups = {\"ConnectorTest\", \"MySQLNotSupported\", \"PostgresNotSupported\"})\npublic void testCallProcedureWithMultipleResultSets() {\nBValue[] returns = BRunUtil.invoke(result, \"testCallProcedureWithMultipleResultSets\", connectionArgs);\nBString retValue = (BString) returns[0];\nfinal String expected = \"Peter\";\nBString retValue2 = (BString) returns[1];\nfinal String expected2 = \"John\";\nBString retValue3 = (BString) returns[2];\nfinal String expected3 = \"Watson\";\nAssert.assertEquals(retValue.stringValue(), expected);\nAssert.assertEquals(retValue2.stringValue(), expected2);\nAssert.assertEquals(retValue3.stringValue(), expected3);\n}\n@Test(groups = {\"ConnectorTest\", \"PostgresNotSupported\"})\npublic void testCallProcedureWithMultipleResultSetsAndLowerConstraintCount() {\nBValue[] returns = BRunUtil\n.invoke(resultNegative, \"testCallProcedureWithMultipleResultSetsAndLowerConstraintCount\",\nconnectionArgs);\nAssert.assertTrue(returns[0].stringValue().contains(\"message:\\\"execute stored procedure failed: Mismatching \"\n+ \"record type count: 1 and returned result set count: 2 from the stored procedure\\\"\"));\n}\n@Test(groups = {\"ConnectorTest\", \"PostgresNotSupported\"})\npublic void testCallProcedureWithMultipleResultSetsAndNilConstraintCount() {\nBValue[] returns = BRunUtil\n.invoke(resultNegative, \"testCallProcedureWithMultipleResultSetsAndNilConstraintCount\", connectionArgs);\nAssert.assertTrue(returns[0].stringValue().contains(\"message:\\\"execute stored procedure failed: Mismatching \"\n+ \"record type count: 0 and returned result set count: 2 from the stored procedure\\\"\"));\n}\n@Test(groups = {\"ConnectorTest\", \"PostgresNotSupported\"})\npublic void testCallProcedureWithMultipleResultSetsAndHigherConstraintCount() {\nBValue[] returns = BRunUtil\n.invoke(resultNegative, \"testCallProcedureWithMultipleResultSetsAndHigherConstraintCount\",\nconnectionArgs);\nAssert.assertTrue(returns[0].stringValue().contains(\"message:\\\"execute stored procedure failed: Mismatching \"\n+ \"record type count: 3 and returned result set count: 2 from the stored procedure\\\"\"));\n}\n@Test(groups = \"ConnectorTest\")\npublic void testQueryParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testQueryParameters\", connectionArgs);\nBString retValue = (BString) returns[0];\nfinal String expected = \"Peter\";\nAssert.assertEquals(retValue.stringValue(), expected);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testQueryParameters2() {\nBValue[] returns = BRunUtil.invoke(result, \"testQueryParameters2\", connectionArgs);\nBString retValue = (BString) returns[0];\nfinal String expected = \"Peter\";\nAssert.assertEquals(retValue.stringValue(), expected);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testInsertTableDataWithParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testInsertTableDataWithParameters\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testInsertTableDataWithParameters2() {\nBValue[] returns = BRunUtil.invoke(result, \"testInsertTableDataWithParameters2\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testInsertTableDataWithParameters3() {\nBValue[] returns = BRunUtil.invoke(result, \"testInsertTableDataWithParameters3\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testArrayofQueryParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testArrayofQueryParameters\", connectionArgs);\nBString retValue = (BString) returns[0];\nfinal String expected = \"Peter\";\nAssert.assertEquals(retValue.stringValue(), expected);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testBoolArrayofQueryParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testBoolArrayofQueryParameters\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 10);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testOutParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testOutParameters\", connectionArgs);\nAssert.assertEquals(returns.length, 14);\nAssert.assertEquals(((BInteger) returns[0]).intValue(), 10);\nAssert.assertEquals(((BInteger) returns[1]).intValue(), 9223372036854774807L);\nAssert.assertEquals(((BFloat) returns[2]).floatValue(), 123.34D, DELTA);\nAssert.assertEquals(((BFloat) returns[3]).floatValue(), 2139095039D);\nAssert.assertEquals(((BBoolean) returns[4]).booleanValue(), true);\nAssert.assertEquals(returns[5].stringValue(), \"Hello\");\nAssert.assertEquals(((BFloat) returns[6]).floatValue(), 1234.567D);\nAssert.assertEquals(((BFloat) returns[7]).floatValue(), 1234.567D);\nAssert.assertEquals(((BFloat) returns[8]).floatValue(), 1234.567D, DELTA);\nAssert.assertEquals(((BInteger) returns[9]).intValue(), 1);\nAssert.assertEquals(((BInteger) returns[10]).intValue(), 5555);\nAssert.assertEquals(returns[11].stringValue(), \"very long text\");\nif (dbType == POSTGRES) {\nAssert.assertEquals(returns[12].stringValue(), \"wso2 ballerina blob test.\");\n} else {\nAssert.assertEquals(returns[12].stringValue(), \"d3NvMiBiYWxsZXJpbmEgYmxvYiB0ZXN0Lg==\");\n}\nAssert.assertEquals(returns[13].stringValue(), \"wso2 ballerina binary test.\");\n}\n@Test(groups = {\"ConnectorTest\"})\npublic void testNullOutParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testNullOutParameters\", connectionArgs);\nAssert.assertEquals(returns.length, 14);\nAssert.assertEquals(((BInteger) returns[0]).intValue(), 0);\nAssert.assertEquals(((BInteger) returns[1]).intValue(), 0);\nAssert.assertEquals(((BFloat) returns[2]).floatValue(), 0.0D);\nAssert.assertEquals(((BFloat) returns[3]).floatValue(), 0.0D);\nAssert.assertEquals(((BBoolean) returns[4]).booleanValue(), false);\nAssert.assertEquals(returns[5].stringValue(), null);\nAssert.assertEquals(((BFloat) returns[6]).floatValue(), 0.0D);\nAssert.assertEquals(((BFloat) returns[7]).floatValue(), 0.0D);\nAssert.assertEquals(((BFloat) returns[8]).floatValue(), 0.0D);\nAssert.assertEquals(((BInteger) returns[9]).intValue(), 0);\nAssert.assertEquals(((BInteger) returns[10]).intValue(), 0);\nAssert.assertEquals(returns[11].stringValue(), null);\nAssert.assertEquals(returns[12].stringValue(), null);\nAssert.assertEquals(returns[13].stringValue(), null);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testINParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testINParameters\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testINParametersWithDirectValues() {\nBValue[] returns = BRunUtil.invoke(result, \"testINParametersWithDirectValues\", connectionArgs);\nAssert.assertEquals(((BInteger) returns[0]).intValue(), 1);\nAssert.assertEquals(((BInteger) returns[1]).intValue(), 9223372036854774807L);\nAssert.assertEquals(((BFloat) returns[2]).floatValue(), 123.34D, DELTA);\nAssert.assertEquals(((BFloat) returns[3]).floatValue(), 2139095039.1D);\nAssert.assertEquals(returns[5].stringValue(), \"Hello\");\nAssert.assertEquals(((BFloat) returns[6]).floatValue(), 1234.567D);\nAssert.assertEquals(((BFloat) returns[7]).floatValue(), 1234.567D);\nAssert.assertEquals(((BFloat) returns[8]).floatValue(), 1234.567D, DELTA);\nif (dbType == POSTGRES) {\nAssert.assertNull(returns[9]);\n} else {\nAssert.assertTrue(returns[9].stringValue().equals(returns[10].stringValue()));\n}\n}\n@Test(groups = \"ConnectorTest\")\npublic void testINParametersWithDirectVariables() {\nBValue[] returns = BRunUtil.invoke(result, \"testINParametersWithDirectVariables\", connectionArgs);\nAssert.assertEquals(((BInteger) returns[0]).intValue(), 1);\nAssert.assertEquals(((BInteger) returns[1]).intValue(), 9223372036854774807L);\nAssert.assertEquals(((BFloat) returns[2]).floatValue(), 123.34D, DELTA);\nAssert.assertEquals(((BFloat) returns[3]).floatValue(), 2139095039.1D);\nAssert.assertEquals(returns[5].stringValue(), \"Hello\");\nAssert.assertEquals(((BFloat) returns[6]).floatValue(), 1234.567D);\nAssert.assertEquals(((BFloat) returns[7]).floatValue(), 1234.567D);\nAssert.assertEquals(((BFloat) returns[8]).floatValue(), 1234.567D, DELTA);\nif (dbType == POSTGRES) {\nAssert.assertNull(returns[9]);\n} else {\nAssert.assertTrue(returns[9].stringValue().equals(returns[10].stringValue()));\n}\n}\n@Test(groups = \"ConnectorTest\")\npublic void testNullINParameterValues() {\nBValue[] returns = BRunUtil.invoke(result, \"testNullINParameterValues\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testINOutParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testINOutParameters\", connectionArgs);\nAssert.assertEquals(returns.length, 14);\nAssert.assertEquals(((BInteger) returns[0]).intValue(), 10);\nAssert.assertEquals(((BInteger) returns[1]).intValue(), 9223372036854774807L);\nAssert.assertEquals(((BFloat) returns[2]).floatValue(), 123.34D, DELTA);\nAssert.assertEquals(((BFloat) returns[3]).floatValue(), 2139095039D);\nAssert.assertEquals(((BBoolean) returns[4]).booleanValue(), true);\nAssert.assertEquals(returns[5].stringValue(), \"Hello\");\nAssert.assertEquals(((BFloat) returns[6]).floatValue(), 1234.567D);\nAssert.assertEquals(((BFloat) returns[7]).floatValue(), 1234.567D);\nAssert.assertEquals(((BFloat) returns[8]).floatValue(), 1234.567D, DELTA);\nAssert.assertEquals(((BInteger) returns[9]).intValue(), 1);\nAssert.assertEquals(((BInteger) returns[10]).intValue(), 5555);\nAssert.assertEquals(returns[11].stringValue(), \"very long text\");\nAssert.assertEquals(returns[13].stringValue(), \"wso2 ballerina binary test.\");\nif (dbType == POSTGRES) {\nAssert.assertEquals(returns[12].stringValue(), \"wso2 ballerina blob test.\");\n} else {\nAssert.assertEquals(returns[12].stringValue(), \"YmxvYiBkYXRh\");\n}\n}\n@Test(groups = \"ConnectorTest\")\npublic void testNullINOutParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testNullINOutParameters\", connectionArgs);\nAssert.assertEquals(returns.length, 14);\nAssert.assertEquals(((BInteger) returns[0]).intValue(), 0);\nAssert.assertEquals(((BInteger) returns[1]).intValue(), 0);\nAssert.assertEquals(((BFloat) returns[2]).floatValue(), 0.0D);\nAssert.assertEquals(((BFloat) returns[3]).floatValue(), 0.0D);\nAssert.assertEquals(((BBoolean) returns[4]).booleanValue(), false);\nAssert.assertEquals(returns[5].stringValue(), null);\nAssert.assertEquals(((BFloat) returns[6]).floatValue(), 0.0D);\nAssert.assertEquals(((BFloat) returns[7]).floatValue(), 0.0D);\nAssert.assertEquals(((BFloat) returns[8]).floatValue(), 0.0D);\nAssert.assertEquals(((BInteger) returns[9]).intValue(), 0);\nAssert.assertEquals(((BInteger) returns[10]).intValue(), 0);\nAssert.assertEquals(returns[11].stringValue(), null);\nAssert.assertEquals(returns[12].stringValue(), null);\nAssert.assertEquals(returns[13].stringValue(), null);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testEmptySQLType() {\nBValue[] returns = BRunUtil.invoke(result, \"testEmptySQLType\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = {\"ConnectorTest\", \"MySQLNotSupported\"})\npublic void testArrayInParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testArrayInParameters\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\nAssert.assertTrue(returns[1] instanceof BIntArray);\nBIntArray intArray = (BIntArray) returns[1];\nAssert.assertEquals(intArray.get(0), 1);\nAssert.assertTrue(returns[2] instanceof BIntArray);\nBIntArray longArray = (BIntArray) returns[2];\nAssert.assertEquals(longArray.get(0), 1503383034226L);\nAssert.assertEquals(longArray.get(1), 1503383034224L);\nAssert.assertEquals(longArray.get(2), 1503383034225L);\nAssert.assertTrue(returns[3] instanceof BFloatArray);\nBFloatArray doubleArray = (BFloatArray) returns[3];\nAssert.assertEquals(doubleArray.get(0), 1503383034226.23D);\nAssert.assertEquals(doubleArray.get(1), 1503383034224.43D);\nAssert.assertEquals(doubleArray.get(2), 1503383034225.123D);\nAssert.assertTrue(returns[4] instanceof BStringArray);\nBStringArray stringArray = (BStringArray) returns[4];\nAssert.assertEquals(stringArray.get(0), \"Hello\");\nAssert.assertEquals(stringArray.get(1), \"Ballerina\");\nAssert.assertTrue(returns[5] instanceof BBooleanArray);\nBBooleanArray booleanArray = (BBooleanArray) returns[5];\nAssert.assertEquals(booleanArray.get(0), 1);\nAssert.assertEquals(booleanArray.get(1), 0);\nAssert.assertEquals(booleanArray.get(2), 1);\nAssert.assertTrue(returns[6] instanceof BFloatArray);\nBFloatArray floatArray = (BFloatArray) returns[6];\nAssert.assertEquals(floatArray.get(0), 245.23);\nAssert.assertEquals(floatArray.get(1), 5559.49);\nAssert.assertEquals(floatArray.get(2), 8796.123);\n}\n@Test(groups = {\"ConnectorTest\", \"MySQLNotSupported\"})\npublic void testArrayOutParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testArrayOutParameters\", connectionArgs);\nAssert.assertEquals(returns[0].stringValue(), \"[1,2,3]\");\nAssert.assertEquals(returns[1].stringValue(), \"[100000000,200000000,300000000]\");\nAssert.assertEquals(returns[2].stringValue(), \"[245.23,5559.49,8796.123]\");\nAssert.assertEquals(returns[3].stringValue(), \"[245.23,5559.49,8796.123]\");\nAssert.assertEquals(returns[4].stringValue(), \"[true,false,true]\");\nAssert.assertEquals(returns[5].stringValue(), \"[Hello,Ballerina]\");\n}\n@Test(groups = {\"ConnectorTest\", \"MySQLNotSupported\"})\npublic void testArrayInOutParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testArrayInOutParameters\", connectionArgs);\nAssert.assertEquals(returns[0].stringValue(), \"1\");\nAssert.assertEquals(returns[1].stringValue(), \"[1,2,3]\");\nAssert.assertEquals(returns[2].stringValue(), \"[100000000,200000000,300000000]\");\nAssert.assertEquals(returns[3].stringValue(), \"[245.23,5559.49,8796.123]\");\nAssert.assertEquals(returns[4].stringValue(), \"[245.23,5559.49,8796.123]\");\nAssert.assertEquals(returns[5].stringValue(), \"[true,false,true]\");\nAssert.assertEquals(returns[6].stringValue(), \"[Hello,Ballerina]\");\n}\n@Test(groups = \"ConnectorTest\")\npublic void testBatchUpdate() {\nBValue[] returns = BRunUtil.invoke(result, \"testBatchUpdate\", connectionArgs);\nBIntArray retValue = (BIntArray) returns[0];\nAssert.assertEquals(retValue.get(0), 1);\nAssert.assertEquals(retValue.get(1), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testBatchUpdateWithValues() {\nBValue[] returns = BRunUtil.invoke(result, \"testBatchUpdateWithValues\", connectionArgs);\nBIntArray retValue = (BIntArray) returns[0];\nAssert.assertEquals(retValue.get(0), 1);\nAssert.assertEquals(retValue.get(1), 1);\n}\n@Test(groups = \"ConnectorTest\", description = \"Test batch update operation with variable parameters\")\npublic void testBatchUpdateWithVariables() {\nBValue[] returns = BRunUtil.invoke(result, \"testBatchUpdateWithVariables\", connectionArgs);\nBIntArray retValue = (BIntArray) returns[0];\nAssert.assertEquals(retValue.get(0), 1);\nAssert.assertEquals(retValue.get(1), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testBatchUpdateWithFailure() {\nBValue[] returns = BRunUtil.invoke(result, \"testBatchUpdateWithFailure\", connectionArgs);\nint[] expectedResult = {1, 1, -3, -3};\nint totalUpdatedCount = 2;\nif (dbType == MYSQL) {\nexpectedResult[3] = 1;\ntotalUpdatedCount = 3;\n} else if (dbType == POSTGRES) {\nexpectedResult[0] = -3;\nexpectedResult[1] = -3;\ntotalUpdatedCount = 0;\n}\nBIntArray retValue = (BIntArray) returns[0];\nAssert.assertEquals(retValue.get(0), expectedResult[0]);\nAssert.assertEquals(retValue.get(1), expectedResult[1]);\nAssert.assertEquals(retValue.get(2), expectedResult[2]);\nAssert.assertEquals(retValue.get(3), expectedResult[3]);\nAssert.assertEquals(((BInteger) returns[1]).intValue(), totalUpdatedCount);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testBatchUpdateWithNullParam() {\nBValue[] returns = BRunUtil.invoke(result, \"testBatchUpdateWithNullParam\", connectionArgs);\nBIntArray retValue = (BIntArray) returns[0];\nAssert.assertEquals(retValue.get(0), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testInsertTimeData() {\nBValue[] returns = BRunUtil.invoke(result, \"testDateTimeInParameters\", connectionArgs);\nBIntArray retValue = (BIntArray) returns[0];\nAssert.assertEquals((int) retValue.get(0), 1);\nAssert.assertEquals((int) retValue.get(1), 1);\nAssert.assertEquals((int) retValue.get(2), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testDateTimeOutParams() {\nBValue[] args = new BValue[6];\nCalendar cal = Calendar.getInstance();\ncal.clear();\ncal.set(Calendar.HOUR, 14);\ncal.set(Calendar.MINUTE, 15);\ncal.set(Calendar.SECOND, 23);\nlong time = cal.getTimeInMillis();\nargs[0] = new BInteger(time);\ncal.clear();\ncal.set(Calendar.YEAR, 2017);\ncal.set(Calendar.MONTH, 5);\ncal.set(Calendar.DAY_OF_MONTH, 23);\nlong date = cal.getTimeInMillis();\nargs[1] = new BInteger(date);\ncal.clear();\ncal.set(Calendar.HOUR, 16);\ncal.set(Calendar.MINUTE, 33);\ncal.set(Calendar.SECOND, 55);\ncal.set(Calendar.YEAR, 2017);\ncal.set(Calendar.MONTH, 1);\ncal.set(Calendar.DAY_OF_MONTH, 25);\nlong timestamp = cal.getTimeInMillis();\nargs[2] = new BInteger(timestamp);\nargs[3] = connectionArgs[0];\nargs[4] = connectionArgs[1];\nargs[5] = connectionArgs[2];\nBValue[] returns = BRunUtil.invoke(result, \"testDateTimeOutParams\", args);\nAssert.assertEquals(returns.length, 1);\nAssert.assertEquals(((BInteger) returns[0]).intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\", description = \"Check date time null in values\")\npublic void testDateTimeNullInValues() {\nBValue[] returns = BRunUtil.invoke(result, \"testDateTimeNullInValues\", connectionArgs);\nAssert.assertEquals(returns.length, 1);\nAssert.assertEquals((returns[0]).stringValue(), \"[{\\\"DATE_TYPE\\\":null,\\\"TIME_TYPE\\\":null,\"\n+ \"\\\"TIMESTAMP_TYPE\\\":null,\\\"DATETIME_TYPE\\\":null}]\");\n}\n@Test(groups = \"ConnectorTest\", description = \"Check date time null out values\")\npublic void testDateTimeNullOutValues() {\nBValue[] returns = BRunUtil.invoke(result, \"testDateTimeNullOutValues\", connectionArgs);\nAssert.assertEquals(returns.length, 1);\nAssert.assertEquals(((BInteger) returns[0]).intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\", description = \"Check date time null inout values\")\npublic void testDateTimeNullInOutValues() {\nBValue[] returns = BRunUtil.invoke(result, \"testDateTimeNullInOutValues\", connectionArgs);\nAssert.assertEquals(returns.length, 4);\nAssert.assertNull(returns[0].stringValue());\nAssert.assertNull(returns[1].stringValue());\nAssert.assertNull(returns[2].stringValue());\nAssert.assertNull(returns[3].stringValue());\n}\n@Test(groups = {\"ConnectorTest\", \"MySQLNotSupported\"})\npublic void testStructOutParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testStructOutParameters\", connectionArgs);\nBString retValue = (BString) returns[0];\nString expected = \"10\";\nAssert.assertEquals(retValue.stringValue(), expected);\n}\n@Test(dependsOnGroups = \"ConnectorTest\")\npublic void testCloseConnectionPool() {\nBValue connectionCountQuery = new BString(\"SELECT COUNT(*) as countVal FROM INFORMATION_SCHEMA\"\n+ \".SYSTEM_SESSIONS\");\nif (dbType == MYSQL) {\nconnectionCountQuery = new BString(\"SELECT COUNT(*) FROM information_schema.PROCESSLIST\");\n}\nBValue[] args = new BValue[4];\nfor (int i = 0; i < 3; i++) {\nargs[i] = connectionArgs[i];\n}\nargs[3] = connectionCountQuery;\nBValue[] returns = BRunUtil.invoke(result, \"testCloseConnectionPool\", args);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\", description = \"Check blob binary and clob types types.\")\npublic void testComplexTypeRetrieval() {\nBValue[] returns = BRunUtil.invoke(result, \"testComplexTypeRetrieval\", connectionArgs);\nString expected0 = \"110\"\n+ \"9223372036854774807123.34\"\n+ \"2.139095039E9true\"\n+ \"Hello1234.567\"\n+ \"1234.5671234.5671\"\n+ \"5555very long text\"\n+ \"d3NvMiBiYWxsZXJpbmEgYmxvYiB0ZXN0Lg==\"\n+ \"d3NvMiBiYWxsZXJpbmEgYmluYXJ5IHRlc3Qu\";\nString expected1 = \"1\"\n+ \"2017-02-0311:35:45\"\n+ \"2017-02-03 11:53:00.000000\"\n+ \"2017-02-03 11:53:00.000000\";\nString expected2 = \"[{\\\"ROW_ID\\\":1,\\\"INT_TYPE\\\":10,\"\n+ \"\\\"LONG_TYPE\\\":9223372036854774807,\\\"FLOAT_TYPE\\\":123.34,\\\"DOUBLE_TYPE\\\":2.139095039E9,\"\n+ \"\\\"BOOLEAN_TYPE\\\":true,\\\"STRING_TYPE\\\":\\\"Hello\\\",\\\"NUMERIC_TYPE\\\":1234.567,\\\"DECIMAL_TYPE\\\":1234.567,\"\n+ \"\\\"REAL_TYPE\\\":1234.567,\\\"TINYINT_TYPE\\\":1,\\\"SMALLINT_TYPE\\\":5555,\\\"CLOB_TYPE\\\":\\\"very long text\\\",\"\n+ \"\\\"BLOB_TYPE\\\":\\\"d3NvMiBiYWxsZXJpbmEgYmxvYiB0ZXN0Lg==\\\",\"\n+ \"\\\"BINARY_TYPE\\\":\\\"d3NvMiBiYWxsZXJpbmEgYmluYXJ5IHRlc3Qu\\\"}]\";\nString expected3 = \"[{\\\"ROW_ID\\\":1,\\\"DATE_TYPE\\\":\\\"2017-02-03\\\",\"\n+ \"\\\"TIME_TYPE\\\":\\\"11:35:45\\\",\\\"DATETIME_TYPE\\\":\\\"2017-02-03 11:53:00.000000\\\",\"\n+ \"\\\"TIMESTAMP_TYPE\\\":\\\"2017-02-03 11:53:00.000000\\\"}]\";\nif (dbType == MYSQL) {\nexpected0 = \"1109223372036854774807\"\n+ \"123.342.139095039E9\"\n+ \"trueHello\"\n+ \"1234.5671234.567\"\n+ \"1234.5671\"\n+ \"5555very long text\"\n+ \"d3NvMiBiYWxsZXJpbmEgYmxvYiB0ZXN0Lg==\"\n+ \"d3NvMiBiYWxsZXJpbmEgYmluYXJ5IHRlc3Qu\";\nexpected1 = \"1\"\n+ \"2017-02-0311:35:45\"\n+ \"2017-02-03 11:53:00.0\"\n+ \"2017-02-03 11:53:00.0\";\nexpected2 = \"[{\\\"row_id\\\":1,\\\"int_type\\\":10,\\\"long_type\\\":9223372036854774807,\\\"float_type\\\":123.34,\"\n+ \"\\\"double_type\\\":2.139095039E9,\\\"boolean_type\\\":true,\\\"string_type\\\":\\\"Hello\\\",\"\n+ \"\\\"numeric_type\\\":1234.567,\\\"decimal_type\\\":1234.567,\\\"real_type\\\":1234.567,\\\"tinyint_type\\\":1,\"\n+ \"\\\"smallint_type\\\":5555,\\\"clob_type\\\":\\\"very long text\\\",\"\n+ \"\\\"blob_type\\\":\\\"d3NvMiBiYWxsZXJpbmEgYmxvYiB0ZXN0Lg==\\\",\"\n+ \"\\\"binary_type\\\":\\\"d3NvMiBiYWxsZXJpbmEgYmluYXJ5IHRlc3Qu\\\"}]\";\nexpected3 = \"[{\\\"row_id\\\":1,\\\"date_type\\\":\\\"2017-02-03\\\",\\\"time_type\\\":\\\"11:35:45\\\",\"\n+ \"\\\"datetime_type\\\":\\\"2017-02-03 11:53:00.0\\\",\\\"timestamp_type\\\":\\\"2017-02-03 11:53:00.0\\\"}]\";\n}\nAssert.assertEquals(returns[0].stringValue(), expected0);\nAssert.assertEquals(returns[1].stringValue(), expected1);\nAssert.assertEquals(returns[2].stringValue(), expected2);\nAssert.assertEquals(returns[3].stringValue(), expected3);\n}\n@Test(groups = \"ConnectorTest\", description = \"Test failed select query\")\npublic void testFailedSelect() {\nBValue[] returns = BRunUtil.invoke(resultNegative, \"testSelectData\", connectionArgs);\nAssert.assertTrue(returns[0].stringValue().contains(\"execute query failed:\"));\n}\n@Test(groups = \"ConnectorTest\", description = \"Test failed update with generated id action\")\npublic void testFailedGeneratedKeyOnInsert() {\nBValue[] returns = BRunUtil.invoke(resultNegative, \"testGeneratedKeyOnInsert\", connectionArgs);\nAssert.assertTrue(returns[0].stringValue().contains(\"execute update with generated keys failed:\"));\n}\n@Test(groups = \"ConnectorTest\", description = \"Test failed call procedure\")\npublic void testFailedCallProcedure() {\nBValue[] returns = BRunUtil.invoke(resultNegative, \"testCallProcedure\", connectionArgs);\nAssert.assertTrue(returns[0].stringValue().contains(\"execute stored procedure failed:\"));\n}\n@Test(groups = \"ConnectorTest\", description = \"Test failed batch update\")\npublic void testFailedBatchUpdate() {\nBValue[] returns = BRunUtil.invoke(resultNegative, \"testBatchUpdate\", connectionArgs);\nif (dbType == HSQLDB) {\nAssert.assertTrue(returns[0].stringValue().contains(\"execute batch update failed:\"));\n} else {\nAssert.assertTrue(returns[0].stringValue().contains(\"failure\"));\n}\n}\n@Test(groups = \"ConnectorTest\", description = \"Test failed parameter array update\")\npublic void testInvalidArrayofQueryParameters() {\nBValue[] returns = BRunUtil.invoke(resultNegative, \"testInvalidArrayofQueryParameters\", connectionArgs);\nAssert.assertTrue(returns[0].stringValue()\n.contains(\"execute query failed: unsupported array type for parameter index 0\"));\n}\n@Test(groups = \"ConnectorTest\", description = \"Test failure scenario in adding data to mirrored table\")\npublic void testAddToMirrorTableNegative() throws Exception {\nBValue[] returns = BRunUtil.invoke(resultMirror, \"testAddToMirrorTableNegative\", connectionArgs);\nString errorMessage = \"{message:\\\"execute update failed: integrity constraint \"\n+ \"violation: unique constraint or index violation; SYS_PK_10179 table: EMPLOYEEADDNEGATIVE\\\", \"\n+ \"cause:null}\";\nif (dbType == MYSQL) {\nerrorMessage = \"{message:\\\"execute update failed: Duplicate entry '1' for key 'PRIMARY'\\\", cause:null}\";\n} else if (dbType == POSTGRES) {\nerrorMessage = \"{message:\\\"execute update failed: ERROR: duplicate key value violates unique constraint \"\n+ \"\\\"employeeaddnegative_pkey\\\"\\n\"\n+ \" Detail: Key (id)=(1) already exists.\\\", cause:null}\";\n}\nAssert.assertNotNull(returns);\nAssert.assertEquals(returns[0].stringValue(), errorMessage);\n}\n@Test(groups = \"ConnectorTest\", description = \"Test adding data to mirrored table\")\npublic void testAddToMirrorTable() throws Exception {\nBValue[] returns = BRunUtil.invoke(resultMirror, \"testAddToMirrorTable\", connectionArgs);\nAssert.assertNotNull(returns);\nAssert.assertEquals(returns[0].stringValue(), \"{id:1, name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}\");\nAssert.assertEquals(returns[1].stringValue(), \"{id:2, name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}\");\n}\n@Test(groups = \"ConnectorTest\", description = \"Test deleting data from mirrored table\")\npublic void testDeleteFromMirrorTable() throws Exception {\nBValue[] returns = BRunUtil.invoke(resultMirror, \"testDeleteFromMirrorTable\", connectionArgs);\nAssert.assertNotNull(returns);\nAssert.assertEquals(((BBoolean) returns[0]).booleanValue(), false);\nAssert.assertEquals(((BInteger) returns[1]).intValue(), 2);\n}\n@Test(groups = \"ConnectorTest\", description = \"Test iterating data of a mirrored table multiple times\")\n@Test(groups = \"ConnectorTest\", description = \"Test iterating data of a mirrored table after closing\")\npublic void testIterateMirrorTableAfterClose() throws Exception {\nBValue[] args = {};\nBValue[] returns = BRunUtil.invokeFunction(resultMirror, \"testIterateMirrorTableAfterClose\", connectionArgs);\nAssert.assertNotNull(returns);\nAssert.assertEquals(returns[0].stringValue(), \"([{id:1, name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2, \"\n+ \"name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}], [{id:1, \"\n+ \"name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2, name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, \"\n+ \"name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}], {message:\\\"Trying to perform hasNext operation over a \"\n+ \"closed table\\\", cause:null})\");\n}\n@Test(groups = \"ConnectorTest\", description = \"Test iterating data of a table loaded to memory multiple times\")\npublic void testSelectLoadToMemory() throws Exception {\nBValue[] args = {};\nBValue[] returns = BRunUtil.invokeFunction(result, \"testSelectLoadToMemory\", connectionArgs);\nAssert.assertNotNull(returns);\nAssert.assertEquals(returns[0].stringValue(), \"([{id:1, name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2, \"\n+ \"name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}], [{id:1, \"\n+ \"name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2, name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, \"\n+ \"name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}], [{id:1, name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2,\"\n+ \" name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}])\");\n}\n@Test(groups = \"ConnectorTest\", description = \"Test iterating data of a table loaded to memory after closing\")\npublic void testLoadToMemorySelectAfterTableClose() throws Exception {\nBValue[] args = {};\nBValue[] returns = BRunUtil.invokeFunction(result, \"testLoadToMemorySelectAfterTableClose\", connectionArgs);\nAssert.assertNotNull(returns);\nAssert.assertEquals(returns[0].stringValue(), \"([{id:1, name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2, \"\n+ \"name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}], [{id:1, \"\n+ \"name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2, name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, \"\n+ \"name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}], {message:\\\"Trying to perform hasNext operation over a \"\n+ \"closed table\\\", cause:null})\");\n}\n@AfterSuite\npublic void cleanup() {\nswitch (dbType) {\ncase HSQLDB:\nSQLDBUtils.deleteDirectory(new File(SQLDBUtils.DB_DIRECTORY));\nbreak;\ncase MYSQL:\nif (mysql != null) {\nmysql.stop();\n}\nbreak;\ncase POSTGRES:\nif (postgres != null) {\npostgres.stop();\n}\nbreak;\n}\n}\nenum DBType {\nMYSQL, ORACLE, POSTGRES, HSQLDB\n}\n}", + "context_after": "class SQLActionsTest {\nprivate static final double DELTA = 0.01;\nprivate CompileResult result;\nprivate CompileResult resultNegative;\nprivate CompileResult resultMirror;\nprivate static final String DB_NAME = \"TEST_SQL_CONNECTOR\";\nprivate DBType dbType;\nprivate MySQLContainer mysql;\nprivate PostgreSQLContainer postgres;\nprivate BValue[] connectionArgs = new BValue[3];\n@Parameters({\"dataClientTestDBType\"})\npublic SQLActionsTest(@Optional(\"HSQLDB\") DBType dataClientTestDBType) {\nthis.dbType = dataClientTestDBType;\n}\n@BeforeClass\npublic void setup() {\nString jdbcURL, username, password;\nswitch (dbType) {\ncase MYSQL:\nmysql = new MySQLContainer();\nmysql.start();\njdbcURL = mysql.getJdbcUrl();\nusername = mysql.getUsername();\npassword = mysql.getPassword();\nSQLDBUtils.initDatabase(jdbcURL, username, password, \"datafiles/sql/SQLConnectorMYSQLDataFile.sql\");\nbreak;\ncase POSTGRES:\npostgres = new PostgreSQLContainer();\npostgres.start();\njdbcURL = postgres.getJdbcUrl();\nusername = postgres.getUsername();\npassword = postgres.getPassword();\nSQLDBUtils.initDatabase(jdbcURL, username, password, \"datafiles/sql/SQLConnectorPostgresDataFile.sql\");\nbreak;\ndefault:\nSQLDBUtils.deleteFiles(new File(SQLDBUtils.DB_DIRECTORY), DB_NAME);\njdbcURL = \"jdbc:hsqldb:file:./target/tempdb/TEST_SQL_CONNECTOR\";\nusername = \"SA\";\npassword = \"\";\nSQLDBUtils.initDatabase(jdbcURL, username, password, \"datafiles/sql/SQLConnectorDataFile.sql\");\nbreak;\n}\nconnectionArgs[0] = new BString(jdbcURL);\nconnectionArgs[1] = new BString(username);\nconnectionArgs[2] = new BString(password);\nresult = BCompileUtil.compile(\"test-src/connectors/sql/sql_actions_test.bal\");\nresultNegative = BCompileUtil.compile(\"test-src/connectors/sql/sql_actions_negative_test.bal\");\nresultMirror = BCompileUtil.compile(\"test-src/connectors/sql/sql_mirror_table_test.bal\");\n}\n@Test(groups = \"ConnectorTest\")\npublic void testInsertTableData() {\nBValue[] returns = BRunUtil.invoke(result, \"testInsertTableData\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testCreateTable() {\nBValue[] returns = BRunUtil.invoke(result, \"testCreateTable\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 0);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testUpdateTableData() {\nBValue[] returns = BRunUtil.invoke(result, \"testUpdateTableData\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testGeneratedKeyOnInsert() {\nBValue[] returns = BRunUtil.invoke(result, \"testGeneratedKeyOnInsert\", connectionArgs);\nBString retValue = (BString) returns[0];\nAssert.assertTrue(Integer.parseInt(retValue.stringValue()) > 0);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testGeneratedKeyWithColumn() {\nBValue[] returns = BRunUtil.invoke(result, \"testGeneratedKeyWithColumn\", connectionArgs);\nBString retValue = (BString) returns[0];\nAssert.assertTrue(Integer.parseInt(retValue.stringValue()) > 0);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testSelectData() {\nBValue[] returns = BRunUtil.invoke(result, \"testSelectData\", connectionArgs);\nBString retValue = (BString) returns[0];\nfinal String expected = \"Peter\";\nAssert.assertEquals(retValue.stringValue(), expected);\n}\n@Test(groups = \"ConnectorTest for int float types\")\npublic void testSelectIntFloatData() {\nBValue[] returns = BRunUtil.invoke(result, \"testSelectIntFloatData\", connectionArgs);\nAssert.assertEquals(returns.length, 4);\nAssert.assertSame(returns[0].getClass(), BInteger.class);\nAssert.assertSame(returns[1].getClass(), BInteger.class);\nAssert.assertSame(returns[2].getClass(), BFloat.class);\nAssert.assertSame(returns[3].getClass(), BFloat.class);\nBInteger intVal = (BInteger) returns[0];\nBInteger longVal = (BInteger) returns[1];\nBFloat floatVal = (BFloat) returns[2];\nBFloat doubleVal = (BFloat) returns[3];\nlong intExpected = 10;\nlong longExpected = 9223372036854774807L;\ndouble floatExpected = 123.34;\ndouble doubleExpected = 2139095039;\nAssert.assertEquals(intVal.intValue(), intExpected);\nAssert.assertEquals(longVal.intValue(), longExpected);\nAssert.assertEquals(floatVal.floatValue(), floatExpected, DELTA);\nAssert.assertEquals(doubleVal.floatValue(), doubleExpected);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testCallProcedure() {\nBValue[] returns = BRunUtil.invoke(result, \"testCallProcedure\", connectionArgs);\nBString retValue = (BString) returns[0];\nfinal String expected = \"James\";\nAssert.assertEquals(retValue.stringValue(), expected);\nif (dbType == POSTGRES) {\nAssert.assertEquals(returns[1].stringValue(), \"table\");\n} else {\nAssert.assertEquals(returns[1].stringValue(), \"nil\");\n}\n}\n@Test(groups = {\"ConnectorTest\", \"MySQLNotSupported\", \"PostgresNotSupported\"})\npublic void testCallProcedureWithResultSet() {\nBValue[] returns = BRunUtil.invokeFunction(result, \"testCallProcedureWithResultSet\", connectionArgs);\nBString retValue = (BString) returns[0];\nfinal String expected = \"Peter\";\nAssert.assertEquals(retValue.stringValue(), expected);\n}\n@Test(groups = {\"ConnectorTest\", \"MySQLNotSupported\", \"HSQLDBNotSupported\"})\npublic void testCallFunctionWithRefCursor() {\nBValue[] returns = BRunUtil.invokeFunction(result, \"testCallFunctionWithReturningRefcursor\", connectionArgs);\nBString retValue = (BString) returns[0];\nfinal String expected = \"Peter\";\nAssert.assertEquals(retValue.stringValue(), expected);\n}\n@Test(groups = {\"ConnectorTest\", \"MySQLNotSupported\", \"PostgresNotSupported\"})\npublic void testCallProcedureWithMultipleResultSets() {\nBValue[] returns = BRunUtil.invoke(result, \"testCallProcedureWithMultipleResultSets\", connectionArgs);\nBString retValue = (BString) returns[0];\nfinal String expected = \"Peter\";\nBString retValue2 = (BString) returns[1];\nfinal String expected2 = \"John\";\nBString retValue3 = (BString) returns[2];\nfinal String expected3 = \"Watson\";\nAssert.assertEquals(retValue.stringValue(), expected);\nAssert.assertEquals(retValue2.stringValue(), expected2);\nAssert.assertEquals(retValue3.stringValue(), expected3);\n}\n@Test(groups = {\"ConnectorTest\", \"PostgresNotSupported\"})\npublic void testCallProcedureWithMultipleResultSetsAndLowerConstraintCount() {\nBValue[] returns = BRunUtil\n.invoke(resultNegative, \"testCallProcedureWithMultipleResultSetsAndLowerConstraintCount\",\nconnectionArgs);\nAssert.assertTrue(returns[0].stringValue().contains(\"message:\\\"execute stored procedure failed: Mismatching \"\n+ \"record type count: 1 and returned result set count: 2 from the stored procedure\\\"\"));\n}\n@Test(groups = {\"ConnectorTest\", \"PostgresNotSupported\"})\npublic void testCallProcedureWithMultipleResultSetsAndNilConstraintCount() {\nBValue[] returns = BRunUtil\n.invoke(resultNegative, \"testCallProcedureWithMultipleResultSetsAndNilConstraintCount\", connectionArgs);\nAssert.assertTrue(returns[0].stringValue().contains(\"message:\\\"execute stored procedure failed: Mismatching \"\n+ \"record type count: 0 and returned result set count: 2 from the stored procedure\\\"\"));\n}\n@Test(groups = {\"ConnectorTest\", \"PostgresNotSupported\"})\npublic void testCallProcedureWithMultipleResultSetsAndHigherConstraintCount() {\nBValue[] returns = BRunUtil\n.invoke(resultNegative, \"testCallProcedureWithMultipleResultSetsAndHigherConstraintCount\",\nconnectionArgs);\nAssert.assertTrue(returns[0].stringValue().contains(\"message:\\\"execute stored procedure failed: Mismatching \"\n+ \"record type count: 3 and returned result set count: 2 from the stored procedure\\\"\"));\n}\n@Test(groups = \"ConnectorTest\")\npublic void testQueryParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testQueryParameters\", connectionArgs);\nBString retValue = (BString) returns[0];\nfinal String expected = \"Peter\";\nAssert.assertEquals(retValue.stringValue(), expected);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testQueryParameters2() {\nBValue[] returns = BRunUtil.invoke(result, \"testQueryParameters2\", connectionArgs);\nBString retValue = (BString) returns[0];\nfinal String expected = \"Peter\";\nAssert.assertEquals(retValue.stringValue(), expected);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testInsertTableDataWithParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testInsertTableDataWithParameters\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testInsertTableDataWithParameters2() {\nBValue[] returns = BRunUtil.invoke(result, \"testInsertTableDataWithParameters2\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testInsertTableDataWithParameters3() {\nBValue[] returns = BRunUtil.invoke(result, \"testInsertTableDataWithParameters3\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testArrayofQueryParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testArrayofQueryParameters\", connectionArgs);\nBString retValue = (BString) returns[0];\nfinal String expected = \"Peter\";\nAssert.assertEquals(retValue.stringValue(), expected);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testBoolArrayofQueryParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testBoolArrayofQueryParameters\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 10);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testOutParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testOutParameters\", connectionArgs);\nAssert.assertEquals(returns.length, 14);\nAssert.assertEquals(((BInteger) returns[0]).intValue(), 10);\nAssert.assertEquals(((BInteger) returns[1]).intValue(), 9223372036854774807L);\nAssert.assertEquals(((BFloat) returns[2]).floatValue(), 123.34D, DELTA);\nAssert.assertEquals(((BFloat) returns[3]).floatValue(), 2139095039D);\nAssert.assertEquals(((BBoolean) returns[4]).booleanValue(), true);\nAssert.assertEquals(returns[5].stringValue(), \"Hello\");\nAssert.assertEquals(((BFloat) returns[6]).floatValue(), 1234.567D);\nAssert.assertEquals(((BFloat) returns[7]).floatValue(), 1234.567D);\nAssert.assertEquals(((BFloat) returns[8]).floatValue(), 1234.567D, DELTA);\nAssert.assertEquals(((BInteger) returns[9]).intValue(), 1);\nAssert.assertEquals(((BInteger) returns[10]).intValue(), 5555);\nAssert.assertEquals(returns[11].stringValue(), \"very long text\");\nAssert.assertEquals(returns[12].stringValue(), \"d3NvMiBiYWxsZXJpbmEgYmxvYiB0ZXN0Lg==\");\nAssert.assertEquals(returns[13].stringValue(), \"wso2 ballerina binary test.\");\n}\n@Test(groups = {\"ConnectorTest\"})\npublic void testNullOutParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testNullOutParameters\", connectionArgs);\nAssert.assertEquals(returns.length, 14);\nAssert.assertEquals(((BInteger) returns[0]).intValue(), 0);\nAssert.assertEquals(((BInteger) returns[1]).intValue(), 0);\nAssert.assertEquals(((BFloat) returns[2]).floatValue(), 0.0D);\nAssert.assertEquals(((BFloat) returns[3]).floatValue(), 0.0D);\nAssert.assertEquals(((BBoolean) returns[4]).booleanValue(), false);\nAssert.assertEquals(returns[5].stringValue(), null);\nAssert.assertEquals(((BFloat) returns[6]).floatValue(), 0.0D);\nAssert.assertEquals(((BFloat) returns[7]).floatValue(), 0.0D);\nAssert.assertEquals(((BFloat) returns[8]).floatValue(), 0.0D);\nAssert.assertEquals(((BInteger) returns[9]).intValue(), 0);\nAssert.assertEquals(((BInteger) returns[10]).intValue(), 0);\nAssert.assertEquals(returns[11].stringValue(), null);\nAssert.assertEquals(returns[12].stringValue(), null);\nAssert.assertEquals(returns[13].stringValue(), null);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testINParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testINParameters\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testINParametersWithDirectValues() {\nBValue[] returns = BRunUtil.invoke(result, \"testINParametersWithDirectValues\", connectionArgs);\nAssert.assertEquals(((BInteger) returns[0]).intValue(), 1);\nAssert.assertEquals(((BInteger) returns[1]).intValue(), 9223372036854774807L);\nAssert.assertEquals(((BFloat) returns[2]).floatValue(), 123.34D, DELTA);\nAssert.assertEquals(((BFloat) returns[3]).floatValue(), 2139095039.1D);\nAssert.assertEquals(returns[5].stringValue(), \"Hello\");\nAssert.assertEquals(((BFloat) returns[6]).floatValue(), 1234.567D);\nAssert.assertEquals(((BFloat) returns[7]).floatValue(), 1234.567D);\nAssert.assertEquals(((BFloat) returns[8]).floatValue(), 1234.567D, DELTA);\nAssert.assertTrue(returns[9].stringValue().equals(returns[10].stringValue()));\n}\n@Test(groups = \"ConnectorTest\")\npublic void testINParametersWithDirectVariables() {\nBValue[] returns = BRunUtil.invoke(result, \"testINParametersWithDirectVariables\", connectionArgs);\nAssert.assertEquals(((BInteger) returns[0]).intValue(), 1);\nAssert.assertEquals(((BInteger) returns[1]).intValue(), 9223372036854774807L);\nAssert.assertEquals(((BFloat) returns[2]).floatValue(), 123.34D, DELTA);\nAssert.assertEquals(((BFloat) returns[3]).floatValue(), 2139095039.1D);\nAssert.assertEquals(returns[5].stringValue(), \"Hello\");\nAssert.assertEquals(((BFloat) returns[6]).floatValue(), 1234.567D);\nAssert.assertEquals(((BFloat) returns[7]).floatValue(), 1234.567D);\nAssert.assertEquals(((BFloat) returns[8]).floatValue(), 1234.567D, DELTA);\nAssert.assertTrue(returns[9].stringValue().equals(returns[10].stringValue()));\n}\n@Test(groups = \"ConnectorTest\")\npublic void testNullINParameterValues() {\nBValue[] returns = BRunUtil.invoke(result, \"testNullINParameterValues\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testINOutParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testINOutParameters\", connectionArgs);\nAssert.assertEquals(returns.length, 14);\nAssert.assertEquals(((BInteger) returns[0]).intValue(), 10);\nAssert.assertEquals(((BInteger) returns[1]).intValue(), 9223372036854774807L);\nAssert.assertEquals(((BFloat) returns[2]).floatValue(), 123.34D, DELTA);\nAssert.assertEquals(((BFloat) returns[3]).floatValue(), 2139095039D);\nAssert.assertEquals(((BBoolean) returns[4]).booleanValue(), true);\nAssert.assertEquals(returns[5].stringValue(), \"Hello\");\nAssert.assertEquals(((BFloat) returns[6]).floatValue(), 1234.567D);\nAssert.assertEquals(((BFloat) returns[7]).floatValue(), 1234.567D);\nAssert.assertEquals(((BFloat) returns[8]).floatValue(), 1234.567D, DELTA);\nAssert.assertEquals(((BInteger) returns[9]).intValue(), 1);\nAssert.assertEquals(((BInteger) returns[10]).intValue(), 5555);\nAssert.assertEquals(returns[11].stringValue(), \"very long text\");\nAssert.assertEquals(returns[12].stringValue(), \"YmxvYiBkYXRh\");\nAssert.assertEquals(returns[13].stringValue(), \"wso2 ballerina binary test.\");\n}\n@Test(groups = \"ConnectorTest\")\npublic void testNullINOutParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testNullINOutParameters\", connectionArgs);\nAssert.assertEquals(returns.length, 14);\nAssert.assertEquals(((BInteger) returns[0]).intValue(), 0);\nAssert.assertEquals(((BInteger) returns[1]).intValue(), 0);\nAssert.assertEquals(((BFloat) returns[2]).floatValue(), 0.0D);\nAssert.assertEquals(((BFloat) returns[3]).floatValue(), 0.0D);\nAssert.assertEquals(((BBoolean) returns[4]).booleanValue(), false);\nAssert.assertEquals(returns[5].stringValue(), null);\nAssert.assertEquals(((BFloat) returns[6]).floatValue(), 0.0D);\nAssert.assertEquals(((BFloat) returns[7]).floatValue(), 0.0D);\nAssert.assertEquals(((BFloat) returns[8]).floatValue(), 0.0D);\nAssert.assertEquals(((BInteger) returns[9]).intValue(), 0);\nAssert.assertEquals(((BInteger) returns[10]).intValue(), 0);\nAssert.assertEquals(returns[11].stringValue(), null);\nAssert.assertEquals(returns[12].stringValue(), null);\nAssert.assertEquals(returns[13].stringValue(), null);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testEmptySQLType() {\nBValue[] returns = BRunUtil.invoke(result, \"testEmptySQLType\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = {\"ConnectorTest\", \"MySQLNotSupported\"})\npublic void testArrayInParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testArrayInParameters\", connectionArgs);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\nAssert.assertTrue(returns[1] instanceof BIntArray);\nBIntArray intArray = (BIntArray) returns[1];\nAssert.assertEquals(intArray.get(0), 1);\nAssert.assertTrue(returns[2] instanceof BIntArray);\nBIntArray longArray = (BIntArray) returns[2];\nAssert.assertEquals(longArray.get(0), 1503383034226L);\nAssert.assertEquals(longArray.get(1), 1503383034224L);\nAssert.assertEquals(longArray.get(2), 1503383034225L);\nAssert.assertTrue(returns[3] instanceof BFloatArray);\nBFloatArray doubleArray = (BFloatArray) returns[3];\nAssert.assertEquals(doubleArray.get(0), 1503383034226.23D);\nAssert.assertEquals(doubleArray.get(1), 1503383034224.43D);\nAssert.assertEquals(doubleArray.get(2), 1503383034225.123D);\nAssert.assertTrue(returns[4] instanceof BStringArray);\nBStringArray stringArray = (BStringArray) returns[4];\nAssert.assertEquals(stringArray.get(0), \"Hello\");\nAssert.assertEquals(stringArray.get(1), \"Ballerina\");\nAssert.assertTrue(returns[5] instanceof BBooleanArray);\nBBooleanArray booleanArray = (BBooleanArray) returns[5];\nAssert.assertEquals(booleanArray.get(0), 1);\nAssert.assertEquals(booleanArray.get(1), 0);\nAssert.assertEquals(booleanArray.get(2), 1);\nAssert.assertTrue(returns[6] instanceof BFloatArray);\nBFloatArray floatArray = (BFloatArray) returns[6];\nAssert.assertEquals(floatArray.get(0), 245.23);\nAssert.assertEquals(floatArray.get(1), 5559.49);\nAssert.assertEquals(floatArray.get(2), 8796.123);\n}\n@Test(groups = {\"ConnectorTest\", \"MySQLNotSupported\"})\npublic void testArrayOutParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testArrayOutParameters\", connectionArgs);\nAssert.assertEquals(returns[0].stringValue(), \"[1,2,3]\");\nAssert.assertEquals(returns[1].stringValue(), \"[100000000,200000000,300000000]\");\nAssert.assertEquals(returns[2].stringValue(), \"[245.23,5559.49,8796.123]\");\nAssert.assertEquals(returns[3].stringValue(), \"[245.23,5559.49,8796.123]\");\nAssert.assertEquals(returns[4].stringValue(), \"[true,false,true]\");\nAssert.assertEquals(returns[5].stringValue(), \"[Hello,Ballerina]\");\n}\n@Test(groups = {\"ConnectorTest\", \"MySQLNotSupported\"})\npublic void testArrayInOutParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testArrayInOutParameters\", connectionArgs);\nAssert.assertEquals(returns[0].stringValue(), \"1\");\nAssert.assertEquals(returns[1].stringValue(), \"[1,2,3]\");\nAssert.assertEquals(returns[2].stringValue(), \"[100000000,200000000,300000000]\");\nAssert.assertEquals(returns[3].stringValue(), \"[245.23,5559.49,8796.123]\");\nAssert.assertEquals(returns[4].stringValue(), \"[245.23,5559.49,8796.123]\");\nAssert.assertEquals(returns[5].stringValue(), \"[true,false,true]\");\nAssert.assertEquals(returns[6].stringValue(), \"[Hello,Ballerina]\");\n}\n@Test(groups = \"ConnectorTest\")\npublic void testBatchUpdate() {\nBValue[] returns = BRunUtil.invoke(result, \"testBatchUpdate\", connectionArgs);\nBIntArray retValue = (BIntArray) returns[0];\nAssert.assertEquals(retValue.get(0), 1);\nAssert.assertEquals(retValue.get(1), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testBatchUpdateWithValues() {\nBValue[] returns = BRunUtil.invoke(result, \"testBatchUpdateWithValues\", connectionArgs);\nBIntArray retValue = (BIntArray) returns[0];\nAssert.assertEquals(retValue.get(0), 1);\nAssert.assertEquals(retValue.get(1), 1);\n}\n@Test(groups = \"ConnectorTest\", description = \"Test batch update operation with variable parameters\")\npublic void testBatchUpdateWithVariables() {\nBValue[] returns = BRunUtil.invoke(result, \"testBatchUpdateWithVariables\", connectionArgs);\nBIntArray retValue = (BIntArray) returns[0];\nAssert.assertEquals(retValue.get(0), 1);\nAssert.assertEquals(retValue.get(1), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testBatchUpdateWithFailure() {\nBValue[] returns = BRunUtil.invoke(result, \"testBatchUpdateWithFailure\", connectionArgs);\nint[] expectedResult = {1, 1, -3, -3};\nif (dbType == MYSQL) {\nexpectedResult[3] = 1;\n} else if (dbType == POSTGRES) {\nexpectedResult[0] = -3;\nexpectedResult[1] = -3;\n}\nBIntArray retValue = (BIntArray) returns[0];\nAssert.assertEquals(retValue.get(0), expectedResult[0]);\nAssert.assertEquals(retValue.get(1), expectedResult[1]);\nAssert.assertEquals(retValue.get(2), expectedResult[2]);\nAssert.assertEquals(retValue.get(3), expectedResult[3]);\nAssert.assertEquals(((BInteger) returns[1]).intValue(), 0);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testBatchUpdateWithNullParam() {\nBValue[] returns = BRunUtil.invoke(result, \"testBatchUpdateWithNullParam\", connectionArgs);\nBIntArray retValue = (BIntArray) returns[0];\nAssert.assertEquals(retValue.get(0), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testInsertTimeData() {\nBValue[] returns = BRunUtil.invoke(result, \"testDateTimeInParameters\", connectionArgs);\nBIntArray retValue = (BIntArray) returns[0];\nAssert.assertEquals((int) retValue.get(0), 1);\nAssert.assertEquals((int) retValue.get(1), 1);\nAssert.assertEquals((int) retValue.get(2), 1);\n}\n@Test(groups = \"ConnectorTest\")\npublic void testDateTimeOutParams() {\nBValue[] args = new BValue[6];\nCalendar cal = Calendar.getInstance();\ncal.clear();\ncal.set(Calendar.HOUR, 14);\ncal.set(Calendar.MINUTE, 15);\ncal.set(Calendar.SECOND, 23);\nlong time = cal.getTimeInMillis();\nargs[0] = new BInteger(time);\ncal.clear();\ncal.set(Calendar.YEAR, 2017);\ncal.set(Calendar.MONTH, 5);\ncal.set(Calendar.DAY_OF_MONTH, 23);\nlong date = cal.getTimeInMillis();\nargs[1] = new BInteger(date);\ncal.clear();\ncal.set(Calendar.HOUR, 16);\ncal.set(Calendar.MINUTE, 33);\ncal.set(Calendar.SECOND, 55);\ncal.set(Calendar.YEAR, 2017);\ncal.set(Calendar.MONTH, 1);\ncal.set(Calendar.DAY_OF_MONTH, 25);\nlong timestamp = cal.getTimeInMillis();\nargs[2] = new BInteger(timestamp);\nargs[3] = connectionArgs[0];\nargs[4] = connectionArgs[1];\nargs[5] = connectionArgs[2];\nBValue[] returns = BRunUtil.invoke(result, \"testDateTimeOutParams\", args);\nAssert.assertEquals(returns.length, 1);\nAssert.assertEquals(((BInteger) returns[0]).intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\", description = \"Check date time null in values\")\npublic void testDateTimeNullInValues() {\nBValue[] returns = BRunUtil.invoke(result, \"testDateTimeNullInValues\", connectionArgs);\nAssert.assertEquals(returns.length, 1);\nAssert.assertEquals((returns[0]).stringValue(), \"[{\\\"DATE_TYPE\\\":null,\\\"TIME_TYPE\\\":null,\"\n+ \"\\\"TIMESTAMP_TYPE\\\":null,\\\"DATETIME_TYPE\\\":null}]\");\n}\n@Test(groups = \"ConnectorTest\", description = \"Check date time null out values\")\npublic void testDateTimeNullOutValues() {\nBValue[] returns = BRunUtil.invoke(result, \"testDateTimeNullOutValues\", connectionArgs);\nAssert.assertEquals(returns.length, 1);\nAssert.assertEquals(((BInteger) returns[0]).intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\", description = \"Check date time null inout values\")\npublic void testDateTimeNullInOutValues() {\nBValue[] returns = BRunUtil.invoke(result, \"testDateTimeNullInOutValues\", connectionArgs);\nAssert.assertEquals(returns.length, 4);\nAssert.assertNull(returns[0].stringValue());\nAssert.assertNull(returns[1].stringValue());\nAssert.assertNull(returns[2].stringValue());\nAssert.assertNull(returns[3].stringValue());\n}\n@Test(groups = {\"ConnectorTest\", \"MySQLNotSupported\"})\npublic void testStructOutParameters() {\nBValue[] returns = BRunUtil.invoke(result, \"testStructOutParameters\", connectionArgs);\nBString retValue = (BString) returns[0];\nString expected = \"10\";\nAssert.assertEquals(retValue.stringValue(), expected);\n}\n@Test(dependsOnGroups = \"ConnectorTest\")\npublic void testCloseConnectionPool() {\nBValue connectionCountQuery;\nif (dbType == MYSQL) {\nconnectionCountQuery = new BString(\"SELECT COUNT(*) FROM information_schema.PROCESSLIST\");\n} else {\nconnectionCountQuery = new BString(\"SELECT COUNT(*) as countVal FROM INFORMATION_SCHEMA\"\n+ \".SYSTEM_SESSIONS\");\n}\nBValue[] args = new BValue[4];\nSystem.arraycopy(connectionArgs, 0, args, 0, 3);\nargs[3] = connectionCountQuery;\nBValue[] returns = BRunUtil.invoke(result, \"testCloseConnectionPool\", args);\nBInteger retValue = (BInteger) returns[0];\nAssert.assertEquals(retValue.intValue(), 1);\n}\n@Test(groups = \"ConnectorTest\", description = \"Check blob binary and clob types types.\")\npublic void testComplexTypeRetrieval() {\nBValue[] returns = BRunUtil.invoke(result, \"testComplexTypeRetrieval\", connectionArgs);\nString expected0, expected1, expected2, expected3;\nif (dbType == MYSQL) {\nexpected0 = \"1109223372036854774807\"\n+ \"123.342.139095039E9\"\n+ \"trueHello\"\n+ \"1234.5671234.567\"\n+ \"1234.5671\"\n+ \"5555very long text\"\n+ \"d3NvMiBiYWxsZXJpbmEgYmxvYiB0ZXN0Lg==\"\n+ \"d3NvMiBiYWxsZXJpbmEgYmluYXJ5IHRlc3Qu\";\nexpected1 = \"1\"\n+ \"2017-02-0311:35:45\"\n+ \"2017-02-03 11:53:00.0\"\n+ \"2017-02-03 11:53:00.0\";\nexpected2 = \"[{\\\"row_id\\\":1,\\\"int_type\\\":10,\\\"long_type\\\":9223372036854774807,\\\"float_type\\\":123.34,\"\n+ \"\\\"double_type\\\":2.139095039E9,\\\"boolean_type\\\":true,\\\"string_type\\\":\\\"Hello\\\",\"\n+ \"\\\"numeric_type\\\":1234.567,\\\"decimal_type\\\":1234.567,\\\"real_type\\\":1234.567,\\\"tinyint_type\\\":1,\"\n+ \"\\\"smallint_type\\\":5555,\\\"clob_type\\\":\\\"very long text\\\",\"\n+ \"\\\"blob_type\\\":\\\"d3NvMiBiYWxsZXJpbmEgYmxvYiB0ZXN0Lg==\\\",\"\n+ \"\\\"binary_type\\\":\\\"d3NvMiBiYWxsZXJpbmEgYmluYXJ5IHRlc3Qu\\\"}]\";\nexpected3 = \"[{\\\"row_id\\\":1,\\\"date_type\\\":\\\"2017-02-03\\\",\\\"time_type\\\":\\\"11:35:45\\\",\"\n+ \"\\\"datetime_type\\\":\\\"2017-02-03 11:53:00.0\\\",\\\"timestamp_type\\\":\\\"2017-02-03 11:53:00.0\\\"}]\";\n} else {\nexpected0 = \"110\"\n+ \"9223372036854774807123.34\"\n+ \"2.139095039E9true\"\n+ \"Hello1234.567\"\n+ \"1234.5671234.5671\"\n+ \"5555very long text\"\n+ \"d3NvMiBiYWxsZXJpbmEgYmxvYiB0ZXN0Lg==\"\n+ \"d3NvMiBiYWxsZXJpbmEgYmluYXJ5IHRlc3Qu\";\nexpected1 = \"1\"\n+ \"2017-02-0311:35:45\"\n+ \"2017-02-03 11:53:00.000000\"\n+ \"2017-02-03 11:53:00.000000\";\nexpected2 = \"[{\\\"ROW_ID\\\":1,\\\"INT_TYPE\\\":10,\"\n+ \"\\\"LONG_TYPE\\\":9223372036854774807,\\\"FLOAT_TYPE\\\":123.34,\\\"DOUBLE_TYPE\\\":2.139095039E9,\"\n+ \"\\\"BOOLEAN_TYPE\\\":true,\\\"STRING_TYPE\\\":\\\"Hello\\\",\\\"NUMERIC_TYPE\\\":1234.567,\"\n+ \"\\\"DECIMAL_TYPE\\\":1234.567,\\\"REAL_TYPE\\\":1234.567,\\\"TINYINT_TYPE\\\":1,\\\"SMALLINT_TYPE\\\":5555,\"\n+ \"\\\"CLOB_TYPE\\\":\\\"very long text\\\",\\\"BLOB_TYPE\\\":\\\"d3NvMiBiYWxsZXJpbmEgYmxvYiB0ZXN0Lg==\\\",\"\n+ \"\\\"BINARY_TYPE\\\":\\\"d3NvMiBiYWxsZXJpbmEgYmluYXJ5IHRlc3Qu\\\"}]\";\nexpected3 = \"[{\\\"ROW_ID\\\":1,\\\"DATE_TYPE\\\":\\\"2017-02-03\\\",\"\n+ \"\\\"TIME_TYPE\\\":\\\"11:35:45\\\",\\\"DATETIME_TYPE\\\":\\\"2017-02-03 11:53:00.000000\\\",\"\n+ \"\\\"TIMESTAMP_TYPE\\\":\\\"2017-02-03 11:53:00.000000\\\"}]\";\n}\nAssert.assertEquals(returns[0].stringValue(), expected0);\nAssert.assertEquals(returns[1].stringValue(), expected1);\nAssert.assertEquals(returns[2].stringValue(), expected2);\nAssert.assertEquals(returns[3].stringValue(), expected3);\n}\n@Test(groups = \"ConnectorTest\", description = \"Test failed select query\")\npublic void testFailedSelect() {\nBValue[] returns = BRunUtil.invoke(resultNegative, \"testSelectData\", connectionArgs);\nAssert.assertTrue(returns[0].stringValue().contains(\"execute query failed:\"));\n}\n@Test(groups = \"ConnectorTest\", description = \"Test failed update with generated id action\")\npublic void testFailedGeneratedKeyOnInsert() {\nBValue[] returns = BRunUtil.invoke(resultNegative, \"testGeneratedKeyOnInsert\", connectionArgs);\nAssert.assertTrue(returns[0].stringValue().contains(\"execute update with generated keys failed:\"));\n}\n@Test(groups = \"ConnectorTest\", description = \"Test failed call procedure\")\npublic void testFailedCallProcedure() {\nBValue[] returns = BRunUtil.invoke(resultNegative, \"testCallProcedure\", connectionArgs);\nAssert.assertTrue(returns[0].stringValue().contains(\"execute stored procedure failed:\"));\n}\n@Test(groups = \"ConnectorTest\", description = \"Test failed batch update\")\npublic void testFailedBatchUpdate() {\nBValue[] returns = BRunUtil.invoke(resultNegative, \"testBatchUpdate\", connectionArgs);\nif (dbType == HSQLDB) {\nAssert.assertTrue(returns[0].stringValue().contains(\"execute batch update failed:\"));\n} else {\nAssert.assertTrue(returns[0].stringValue().contains(\"failure\"));\n}\n}\n@Test(groups = \"ConnectorTest\", description = \"Test failed parameter array update\")\npublic void testInvalidArrayofQueryParameters() {\nBValue[] returns = BRunUtil.invoke(resultNegative, \"testInvalidArrayofQueryParameters\", connectionArgs);\nAssert.assertTrue(returns[0].stringValue()\n.contains(\"execute query failed: unsupported array type for parameter index 0\"));\n}\n@Test(groups = \"ConnectorTest\", description = \"Test failure scenario in adding data to mirrored table\")\npublic void testAddToMirrorTableNegative() throws Exception {\nBValue[] returns = BRunUtil.invoke(resultMirror, \"testAddToMirrorTableNegative\", connectionArgs);\nString errorMessage;\nif (dbType == MYSQL) {\nerrorMessage = \"execute update failed: Duplicate entry '1' for key 'PRIMARY'\";\n} else if (dbType == POSTGRES) {\nerrorMessage = \"{message:\\\"execute update failed: ERROR: duplicate key value violates unique constraint \"\n+ \"\\\"employeeaddnegative_pkey\\\"\\n\"\n+ \" Detail: Key (id)=(1) already exists.\\\", cause:null}\";\n} else {\nerrorMessage = \"execute update failed: integrity constraint violation: unique constraint or index \"\n+ \"violation\";\n}\nAssert.assertNotNull(returns);\nAssert.assertTrue(returns[0].stringValue().contains(errorMessage));\n}\n@Test(groups = \"ConnectorTest\", description = \"Test adding data to mirrored table\")\npublic void testAddToMirrorTable() throws Exception {\nBValue[] returns = BRunUtil.invoke(resultMirror, \"testAddToMirrorTable\", connectionArgs);\nAssert.assertNotNull(returns);\nAssert.assertEquals(returns[0].stringValue(), \"{id:1, name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}\");\nAssert.assertEquals(returns[1].stringValue(), \"{id:2, name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}\");\n}\n@Test(groups = \"ConnectorTest\", description = \"Test deleting data from mirrored table\")\npublic void testDeleteFromMirrorTable() throws Exception {\nBValue[] returns = BRunUtil.invoke(resultMirror, \"testDeleteFromMirrorTable\", connectionArgs);\nAssert.assertNotNull(returns);\nAssert.assertEquals(((BBoolean) returns[0]).booleanValue(), false);\nAssert.assertEquals(((BInteger) returns[1]).intValue(), 2);\n}\n@Test(groups = \"ConnectorTest\", description = \"Test iterating data of a mirrored table multiple times\")\n@Test(groups = \"ConnectorTest\", description = \"Test iterating data of a mirrored table after closing\")\npublic void testIterateMirrorTableAfterClose() throws Exception {\nBValue[] returns = BRunUtil.invokeFunction(resultMirror, \"testIterateMirrorTableAfterClose\", connectionArgs);\nAssert.assertNotNull(returns);\nAssert.assertEquals(returns[0].stringValue(), \"([{id:1, name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2, \"\n+ \"name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}], [{id:1, \"\n+ \"name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2, name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, \"\n+ \"name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}], {message:\\\"Trying to perform hasNext operation over a \"\n+ \"closed table\\\", cause:null})\");\n}\n@Test(groups = \"ConnectorTest\", description = \"Test iterating data of a table loaded to memory multiple times\")\npublic void testSelectLoadToMemory() throws Exception {\nBValue[] returns = BRunUtil.invokeFunction(result, \"testSelectLoadToMemory\", connectionArgs);\nAssert.assertNotNull(returns);\nAssert.assertEquals(returns[0].stringValue(), \"([{id:1, name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2, \"\n+ \"name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}], [{id:1, \"\n+ \"name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2, name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, \"\n+ \"name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}], [{id:1, name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2,\"\n+ \" name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}])\");\n}\n@Test(groups = \"ConnectorTest\", description = \"Test iterating data of a table loaded to memory after closing\")\npublic void testLoadToMemorySelectAfterTableClose() throws Exception {\nBValue[] returns = BRunUtil.invokeFunction(result, \"testLoadToMemorySelectAfterTableClose\", connectionArgs);\nAssert.assertNotNull(returns);\nAssert.assertEquals(returns[0].stringValue(), \"([{id:1, name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2, \"\n+ \"name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}], [{id:1, \"\n+ \"name:\\\"Manuri\\\", address:\\\"Sri Lanka\\\"}, {id:2, name:\\\"Devni\\\", address:\\\"Sri Lanka\\\"}, {id:3, \"\n+ \"name:\\\"Thurani\\\", address:\\\"Sri Lanka\\\"}], {message:\\\"Trying to perform hasNext operation over a \"\n+ \"closed table\\\", cause:null})\");\n}\n@AfterSuite\npublic void cleanup() {\nswitch (dbType) {\ncase HSQLDB:\nSQLDBUtils.deleteDirectory(new File(SQLDBUtils.DB_DIRECTORY));\nbreak;\ncase MYSQL:\nif (mysql != null) {\nmysql.stop();\n}\nbreak;\ncase POSTGRES:\nif (postgres != null) {\npostgres.stop();\n}\nbreak;\n}\n}\nenum DBType {\nMYSQL, ORACLE, POSTGRES, HSQLDB\n}\n}" + }, + { + "comment": "It's only used here, I don't see the point. It's in the JDK that this interface belongs :(", + "method_body": "public void close() {\n}", + "target_code": "}", + "method_body_after": "public void close() {\n}", + "context_before": "class CommonPanacheQueryImpl {\nprivate static final Pattern SELECT_PATTERN = Pattern.compile(\"^\\\\s*SELECT\\\\s+((?:DISTINCT\\\\s+)?[^\\\\s]+)\\\\s+([^\\\\s]+.*)$\",\nPattern.CASE_INSENSITIVE);\nprivate interface NonThrowingCloseable extends AutoCloseable {\n@Override\nvoid close();\n}\nprivate static final NonThrowingCloseable NO_FILTERS = new NonThrowingCloseable() {\n@Override\n};\nprivate Object paramsArrayOrMap;\nprivate String query;\nprotected String countQuery;\nprivate String orderBy;\nprivate EntityManager em;\nprivate Page page;\nprivate Long count;\nprivate Range range;\nprivate LockModeType lockModeType;\nprivate Map hints;\nprivate Map> filters;\npublic CommonPanacheQueryImpl(EntityManager em, String query, String orderBy, Object paramsArrayOrMap) {\nthis.em = em;\nthis.query = query;\nthis.orderBy = orderBy;\nthis.paramsArrayOrMap = paramsArrayOrMap;\n}\nprivate CommonPanacheQueryImpl(CommonPanacheQueryImpl previousQuery, String newQueryString, String countQuery) {\nthis.em = previousQuery.em;\nthis.query = newQueryString;\nthis.countQuery = countQuery;\nthis.orderBy = previousQuery.orderBy;\nthis.paramsArrayOrMap = previousQuery.paramsArrayOrMap;\nthis.page = previousQuery.page;\nthis.count = previousQuery.count;\nthis.range = previousQuery.range;\nthis.lockModeType = previousQuery.lockModeType;\nthis.hints = previousQuery.hints;\nthis.filters = previousQuery.filters;\n}\npublic CommonPanacheQueryImpl project(Class type) {\nif (AbstractJpaOperations.isNamedQuery(query)) {\nthrow new PanacheQueryException(\"Unable to perform a projection on a named query\");\n}\nConstructor constructor = type.getDeclaredConstructors()[0];\nStringBuilder select = new StringBuilder(\"SELECT new \").append(type.getName()).append(\" (\");\nint selectInitialLength = select.length();\nfor (Parameter parameter : constructor.getParameters()) {\nif (!parameter.isNamePresent()) {\nthrow new PanacheQueryException(\n\"Your application must be built with parameter names, this should be the default if\" +\n\" using Quarkus artifacts. Check the maven or gradle compiler configuration to include '-parameters'.\");\n}\nif (select.length() > selectInitialLength) {\nselect.append(\", \");\n}\nselect.append(parameter.getName());\n}\nselect.append(\") \");\nreturn new CommonPanacheQueryImpl<>(this, select.toString() + query, \"select count(*) \" + query);\n}\npublic void filter(String filterName, Map parameters) {\nif (filters == null)\nfilters = new HashMap<>();\nfilters.put(filterName, parameters);\n}\npublic void page(Page page) {\nthis.page = page;\nthis.range = null;\n}\npublic void page(int pageIndex, int pageSize) {\npage(Page.of(pageIndex, pageSize));\n}\npublic void nextPage() {\ncheckPagination();\npage(page.next());\n}\npublic void previousPage() {\ncheckPagination();\npage(page.previous());\n}\npublic void firstPage() {\ncheckPagination();\npage(page.first());\n}\npublic void lastPage() {\ncheckPagination();\npage(page.index(pageCount() - 1));\n}\npublic boolean hasNextPage() {\ncheckPagination();\nreturn page.index < (pageCount() - 1);\n}\npublic boolean hasPreviousPage() {\ncheckPagination();\nreturn page.index > 0;\n}\npublic int pageCount() {\ncheckPagination();\nlong count = count();\nif (count == 0)\nreturn 1;\nreturn (int) Math.ceil((double) count / (double) page.size);\n}\npublic Page page() {\ncheckPagination();\nreturn page;\n}\nprivate void checkPagination() {\nif (page == null) {\nthrow new UnsupportedOperationException(\"Cannot call a page related method, \" +\n\"call page(Page) or page(int, int) to initiate pagination first\");\n}\nif (range != null) {\nthrow new UnsupportedOperationException(\"Cannot call a page related method in a ranged query, \" +\n\"call page(Page) or page(int, int) to initiate pagination first\");\n}\n}\npublic void range(int startIndex, int lastIndex) {\nthis.range = Range.of(startIndex, lastIndex);\nthis.page = null;\n}\npublic void withLock(LockModeType lockModeType) {\nthis.lockModeType = lockModeType;\n}\npublic void withHint(String hintName, Object value) {\nif (hints == null) {\nhints = new HashMap<>();\n}\nhints.put(hintName, value);\n}\n@SuppressWarnings(\"unchecked\")\npublic long count() {\nif (AbstractJpaOperations.isNamedQuery(query)) {\nthrow new PanacheQueryException(\"Unable to perform a count operation on a named query\");\n}\nif (count == null) {\nQuery countQuery = em.createQuery(countQuery());\nif (paramsArrayOrMap instanceof Map)\nAbstractJpaOperations.bindParameters(countQuery, (Map) paramsArrayOrMap);\nelse\nAbstractJpaOperations.bindParameters(countQuery, (Object[]) paramsArrayOrMap);\ntry (NonThrowingCloseable c = applyFilters()) {\ncount = (Long) countQuery.getSingleResult();\n}\n}\nreturn count;\n}\nprivate String countQuery() {\nif (countQuery != null) {\nreturn countQuery;\n}\nMatcher selectMatcher = SELECT_PATTERN.matcher(query);\nString countQuery;\nif (selectMatcher.matches()) {\ncountQuery = \"SELECT COUNT(\" + selectMatcher.group(1) + \") \" + selectMatcher.group(2);\n} else {\ncountQuery = \"SELECT COUNT(*) \" + query;\n}\nString lcQuery = countQuery.toLowerCase();\nint orderByIndex = lcQuery.lastIndexOf(\" order by \");\nif (orderByIndex != -1) {\ncountQuery = countQuery.substring(0, orderByIndex);\n}\nreturn countQuery;\n}\n@SuppressWarnings(\"unchecked\")\npublic List list() {\nQuery jpaQuery = createQuery();\ntry (NonThrowingCloseable c = applyFilters()) {\nreturn jpaQuery.getResultList();\n}\n}\n@SuppressWarnings(\"unchecked\")\npublic Stream stream() {\nQuery jpaQuery = createQuery();\ntry (NonThrowingCloseable c = applyFilters()) {\nreturn jpaQuery.getResultStream();\n}\n}\npublic T firstResult() {\nQuery jpaQuery = createQuery(1);\ntry (NonThrowingCloseable c = applyFilters()) {\n@SuppressWarnings(\"unchecked\")\nList list = jpaQuery.getResultList();\nreturn list.isEmpty() ? null : list.get(0);\n}\n}\npublic Optional firstResultOptional() {\nreturn Optional.ofNullable(firstResult());\n}\n@SuppressWarnings(\"unchecked\")\npublic T singleResult() {\nQuery jpaQuery = createQuery();\ntry (NonThrowingCloseable c = applyFilters()) {\nreturn (T) jpaQuery.getSingleResult();\n}\n}\n@SuppressWarnings(\"unchecked\")\npublic Optional singleResultOptional() {\nQuery jpaQuery = createQuery(2);\ntry (NonThrowingCloseable c = applyFilters()) {\nList list = jpaQuery.getResultList();\nif (list.size() > 1) {\nthrow new NonUniqueResultException();\n}\nreturn list.isEmpty() ? Optional.empty() : Optional.of(list.get(0));\n}\n}\nprivate Query createQuery() {\nQuery jpaQuery = createBaseQuery();\nif (range != null) {\njpaQuery.setFirstResult(range.getStartIndex());\njpaQuery.setMaxResults(range.getLastIndex() - range.getStartIndex() + 1);\n} else if (page != null) {\njpaQuery.setFirstResult(page.index * page.size);\njpaQuery.setMaxResults(page.size);\n} else {\n@SuppressWarnings(\"deprecation\")\nRowSelection options = jpaQuery.unwrap(org.hibernate.query.Query.class).getQueryOptions();\noptions.setFirstRow(null);\noptions.setMaxRows(null);\n}\nreturn jpaQuery;\n}\nprivate Query createQuery(int maxResults) {\nQuery jpaQuery = createBaseQuery();\nif (range != null) {\njpaQuery.setFirstResult(range.getStartIndex());\n} else if (page != null) {\njpaQuery.setFirstResult(page.index * page.size);\n} else {\n@SuppressWarnings(\"deprecation\")\nRowSelection options = jpaQuery.unwrap(org.hibernate.query.Query.class).getQueryOptions();\noptions.setFirstRow(null);\n}\njpaQuery.setMaxResults(maxResults);\nreturn jpaQuery;\n}\n@SuppressWarnings(\"unchecked\")\nprivate Query createBaseQuery() {\nQuery jpaQuery;\nif (AbstractJpaOperations.isNamedQuery(query)) {\nString namedQuery = query.substring(1);\njpaQuery = em.createNamedQuery(namedQuery);\n} else {\njpaQuery = em.createQuery(orderBy != null ? query + orderBy : query);\n}\nif (paramsArrayOrMap instanceof Map) {\nAbstractJpaOperations.bindParameters(jpaQuery, (Map) paramsArrayOrMap);\n} else {\nAbstractJpaOperations.bindParameters(jpaQuery, (Object[]) paramsArrayOrMap);\n}\nif (this.lockModeType != null) {\njpaQuery.setLockMode(lockModeType);\n}\nif (hints != null) {\nfor (Map.Entry hint : hints.entrySet()) {\njpaQuery.setHint(hint.getKey(), hint.getValue());\n}\n}\nreturn jpaQuery;\n}\nprivate NonThrowingCloseable applyFilters() {\nif (filters == null)\nreturn NO_FILTERS;\nSession session = em.unwrap(Session.class);\nfor (Entry> entry : filters.entrySet()) {\nFilter filter = session.enableFilter(entry.getKey());\nfor (Entry paramEntry : entry.getValue().entrySet()) {\nfilter.setParameter(paramEntry.getKey(), paramEntry.getValue());\n}\nfilter.validate();\n}\nreturn new NonThrowingCloseable() {\n@Override\npublic void close() {\nfor (Entry> entry : filters.entrySet()) {\nsession.disableFilter(entry.getKey());\n}\n}\n};\n}\n}", + "context_after": "class CommonPanacheQueryImpl {\nstatic final Pattern SELECT_PATTERN = Pattern.compile(\n\"^\\\\s*SELECT\\\\s+((?:DISTINCT\\\\s+)?\\\\w+(?:\\\\.\\\\w+)*)(?:\\\\s+AS\\\\s+\\\\w+)?(\\\\s*,\\\\s*\\\\w+(?:\\\\.\\\\w+)*(?:\\\\s+AS\\\\s+\\\\w+)?)*\\\\s+(.*)\",\nPattern.CASE_INSENSITIVE);\nstatic final Pattern FROM_PATTERN = Pattern.compile(\"^\\\\s*FROM\\\\s+.*\",\nPattern.CASE_INSENSITIVE);\nprivate interface NonThrowingCloseable extends AutoCloseable {\n@Override\nvoid close();\n}\nprivate static final NonThrowingCloseable NO_FILTERS = new NonThrowingCloseable() {\n@Override\n};\nprivate Object paramsArrayOrMap;\nprivate String query;\nprotected String countQuery;\nprivate String orderBy;\nprivate EntityManager em;\nprivate Page page;\nprivate Long count;\nprivate Range range;\nprivate LockModeType lockModeType;\nprivate Map hints;\nprivate Map> filters;\npublic CommonPanacheQueryImpl(EntityManager em, String query, String orderBy, Object paramsArrayOrMap) {\nthis.em = em;\nthis.query = query;\nthis.orderBy = orderBy;\nthis.paramsArrayOrMap = paramsArrayOrMap;\n}\nprivate CommonPanacheQueryImpl(CommonPanacheQueryImpl previousQuery, String newQueryString, String countQuery) {\nthis.em = previousQuery.em;\nthis.query = newQueryString;\nthis.countQuery = countQuery;\nthis.orderBy = previousQuery.orderBy;\nthis.paramsArrayOrMap = previousQuery.paramsArrayOrMap;\nthis.page = previousQuery.page;\nthis.count = previousQuery.count;\nthis.range = previousQuery.range;\nthis.lockModeType = previousQuery.lockModeType;\nthis.hints = previousQuery.hints;\nthis.filters = previousQuery.filters;\n}\npublic CommonPanacheQueryImpl project(Class type) {\nif (AbstractJpaOperations.isNamedQuery(query)) {\nthrow new PanacheQueryException(\"Unable to perform a projection on a named query\");\n}\nConstructor constructor = type.getDeclaredConstructors()[0];\nStringBuilder select = new StringBuilder(\"SELECT new \").append(type.getName()).append(\" (\");\nint selectInitialLength = select.length();\nfor (Parameter parameter : constructor.getParameters()) {\nif (!parameter.isNamePresent()) {\nthrow new PanacheQueryException(\n\"Your application must be built with parameter names, this should be the default if\" +\n\" using Quarkus artifacts. Check the maven or gradle compiler configuration to include '-parameters'.\");\n}\nif (select.length() > selectInitialLength) {\nselect.append(\", \");\n}\nselect.append(parameter.getName());\n}\nselect.append(\") \");\nreturn new CommonPanacheQueryImpl<>(this, select.toString() + query, \"select count(*) \" + query);\n}\npublic void filter(String filterName, Map parameters) {\nif (filters == null)\nfilters = new HashMap<>();\nfilters.put(filterName, parameters);\n}\npublic void page(Page page) {\nthis.page = page;\nthis.range = null;\n}\npublic void page(int pageIndex, int pageSize) {\npage(Page.of(pageIndex, pageSize));\n}\npublic void nextPage() {\ncheckPagination();\npage(page.next());\n}\npublic void previousPage() {\ncheckPagination();\npage(page.previous());\n}\npublic void firstPage() {\ncheckPagination();\npage(page.first());\n}\npublic void lastPage() {\ncheckPagination();\npage(page.index(pageCount() - 1));\n}\npublic boolean hasNextPage() {\ncheckPagination();\nreturn page.index < (pageCount() - 1);\n}\npublic boolean hasPreviousPage() {\ncheckPagination();\nreturn page.index > 0;\n}\npublic int pageCount() {\ncheckPagination();\nlong count = count();\nif (count == 0)\nreturn 1;\nreturn (int) Math.ceil((double) count / (double) page.size);\n}\npublic Page page() {\ncheckPagination();\nreturn page;\n}\nprivate void checkPagination() {\nif (page == null) {\nthrow new UnsupportedOperationException(\"Cannot call a page related method, \" +\n\"call page(Page) or page(int, int) to initiate pagination first\");\n}\nif (range != null) {\nthrow new UnsupportedOperationException(\"Cannot call a page related method in a ranged query, \" +\n\"call page(Page) or page(int, int) to initiate pagination first\");\n}\n}\npublic void range(int startIndex, int lastIndex) {\nthis.range = Range.of(startIndex, lastIndex);\nthis.page = null;\n}\npublic void withLock(LockModeType lockModeType) {\nthis.lockModeType = lockModeType;\n}\npublic void withHint(String hintName, Object value) {\nif (hints == null) {\nhints = new HashMap<>();\n}\nhints.put(hintName, value);\n}\n@SuppressWarnings(\"unchecked\")\npublic long count() {\nif (AbstractJpaOperations.isNamedQuery(query)) {\nthrow new PanacheQueryException(\"Unable to perform a count operation on a named query\");\n}\nif (count == null) {\nQuery countQuery = em.createQuery(countQuery());\nif (paramsArrayOrMap instanceof Map)\nAbstractJpaOperations.bindParameters(countQuery, (Map) paramsArrayOrMap);\nelse\nAbstractJpaOperations.bindParameters(countQuery, (Object[]) paramsArrayOrMap);\ntry (NonThrowingCloseable c = applyFilters()) {\ncount = (Long) countQuery.getSingleResult();\n}\n}\nreturn count;\n}\nprivate String countQuery() {\nif (countQuery != null) {\nreturn countQuery;\n}\nMatcher selectMatcher = SELECT_PATTERN.matcher(query);\nString countQuery;\nif (selectMatcher.matches()) {\nString firstSelection = selectMatcher.group(1).trim();\nif (firstSelection.toLowerCase().startsWith(\"distinct \")) {\nString secondSelection = selectMatcher.group(2);\nif (secondSelection != null && !secondSelection.trim().isEmpty()) {\nthrow new PanacheQueryException(\"Count query not supported for select query: \" + query);\n}\ncountQuery = \"SELECT COUNT(\" + firstSelection + \") \" + selectMatcher.group(3);\n} else {\ncountQuery = \"SELECT COUNT(*) \" + selectMatcher.group(3);\n}\n} else if (FROM_PATTERN.matcher(query).matches()) {\ncountQuery = \"SELECT COUNT(*) \" + query;\n} else {\nthrow new PanacheQueryException(\"Count query not supported for select query: \" + query);\n}\nString lcQuery = countQuery.toLowerCase();\nint orderByIndex = lcQuery.lastIndexOf(\" order by \");\nif (orderByIndex != -1) {\ncountQuery = countQuery.substring(0, orderByIndex);\n}\nreturn countQuery;\n}\n@SuppressWarnings(\"unchecked\")\npublic List list() {\nQuery jpaQuery = createQuery();\ntry (NonThrowingCloseable c = applyFilters()) {\nreturn jpaQuery.getResultList();\n}\n}\n@SuppressWarnings(\"unchecked\")\npublic Stream stream() {\nQuery jpaQuery = createQuery();\ntry (NonThrowingCloseable c = applyFilters()) {\nreturn jpaQuery.getResultStream();\n}\n}\npublic T firstResult() {\nQuery jpaQuery = createQuery(1);\ntry (NonThrowingCloseable c = applyFilters()) {\n@SuppressWarnings(\"unchecked\")\nList list = jpaQuery.getResultList();\nreturn list.isEmpty() ? null : list.get(0);\n}\n}\npublic Optional firstResultOptional() {\nreturn Optional.ofNullable(firstResult());\n}\n@SuppressWarnings(\"unchecked\")\npublic T singleResult() {\nQuery jpaQuery = createQuery();\ntry (NonThrowingCloseable c = applyFilters()) {\nreturn (T) jpaQuery.getSingleResult();\n}\n}\n@SuppressWarnings(\"unchecked\")\npublic Optional singleResultOptional() {\nQuery jpaQuery = createQuery(2);\ntry (NonThrowingCloseable c = applyFilters()) {\nList list = jpaQuery.getResultList();\nif (list.size() > 1) {\nthrow new NonUniqueResultException();\n}\nreturn list.isEmpty() ? Optional.empty() : Optional.of(list.get(0));\n}\n}\nprivate Query createQuery() {\nQuery jpaQuery = createBaseQuery();\nif (range != null) {\njpaQuery.setFirstResult(range.getStartIndex());\njpaQuery.setMaxResults(range.getLastIndex() - range.getStartIndex() + 1);\n} else if (page != null) {\njpaQuery.setFirstResult(page.index * page.size);\njpaQuery.setMaxResults(page.size);\n} else {\n@SuppressWarnings(\"deprecation\")\nRowSelection options = jpaQuery.unwrap(org.hibernate.query.Query.class).getQueryOptions();\noptions.setFirstRow(null);\noptions.setMaxRows(null);\n}\nreturn jpaQuery;\n}\nprivate Query createQuery(int maxResults) {\nQuery jpaQuery = createBaseQuery();\nif (range != null) {\njpaQuery.setFirstResult(range.getStartIndex());\n} else if (page != null) {\njpaQuery.setFirstResult(page.index * page.size);\n} else {\n@SuppressWarnings(\"deprecation\")\nRowSelection options = jpaQuery.unwrap(org.hibernate.query.Query.class).getQueryOptions();\noptions.setFirstRow(null);\n}\njpaQuery.setMaxResults(maxResults);\nreturn jpaQuery;\n}\n@SuppressWarnings(\"unchecked\")\nprivate Query createBaseQuery() {\nQuery jpaQuery;\nif (AbstractJpaOperations.isNamedQuery(query)) {\nString namedQuery = query.substring(1);\njpaQuery = em.createNamedQuery(namedQuery);\n} else {\njpaQuery = em.createQuery(orderBy != null ? query + orderBy : query);\n}\nif (paramsArrayOrMap instanceof Map) {\nAbstractJpaOperations.bindParameters(jpaQuery, (Map) paramsArrayOrMap);\n} else {\nAbstractJpaOperations.bindParameters(jpaQuery, (Object[]) paramsArrayOrMap);\n}\nif (this.lockModeType != null) {\njpaQuery.setLockMode(lockModeType);\n}\nif (hints != null) {\nfor (Map.Entry hint : hints.entrySet()) {\njpaQuery.setHint(hint.getKey(), hint.getValue());\n}\n}\nreturn jpaQuery;\n}\nprivate NonThrowingCloseable applyFilters() {\nif (filters == null)\nreturn NO_FILTERS;\nSession session = em.unwrap(Session.class);\nfor (Entry> entry : filters.entrySet()) {\nFilter filter = session.enableFilter(entry.getKey());\nfor (Entry paramEntry : entry.getValue().entrySet()) {\nfilter.setParameter(paramEntry.getKey(), paramEntry.getValue());\n}\nfilter.validate();\n}\nreturn new NonThrowingCloseable() {\n@Override\npublic void close() {\nfor (Entry> entry : filters.entrySet()) {\nsession.disableFilter(entry.getKey());\n}\n}\n};\n}\n}" + }, + { + "comment": "Do we need to do this in Gradle as well?", + "method_body": "public void execute() throws MojoFailureException, MojoExecutionException {\nif (project.getPackaging().equals(ArtifactCoords.TYPE_POM)) {\ngetLog().info(\"Type of the artifact is POM, skipping dev goal\");\nreturn;\n}\nmavenVersionEnforcer.ensureMavenVersion(getLog(), session);\ninitToolchain();\nString bootstrapId = handleAutoCompile();\nif (enforceBuildGoal) {\nfinal PluginDescriptor pluginDescr = getPluginDescriptor();\nfinal Plugin pluginDef = getConfiguredPluginOrNull(pluginDescr.getGroupId(), pluginDescr.getArtifactId());\nif (!isGoalConfigured(pluginDef, \"build\")) {\nif (warnIfBuildGoalMissing) {\nvar currentGoal = getCurrentGoal();\ngetLog().warn(\n\"Skipping \" + currentGoal + \" as this is assumed to be a support library.\" +\n\" To disable this warning set warnIfBuildGoalMissing parameter to false.\"\n+ System.lineSeparator() +\n\"To enable \" + currentGoal +\n\" for this module, make sure the quarkus-maven-plugin configuration includes the build goal\"\n+\n\" or disable the enforceBuildGoal flag (via plugin configuration or via\" +\n\" -Dquarkus.enforceBuildGoal=false).\");\n}\nreturn;\n}\n}\nsaveTerminalState();\nanalyticsProvider.buildAnalyticsUserInput((String prompt) -> {\nSystem.out.print(prompt);\ntry (Scanner scanner = new Scanner(new FilterInputStream(System.in) {\n@Override\npublic void close() throws IOException {\n}\n})) {\nreturn scanner.nextLine();\n} catch (Exception e) {\ngetLog().warn(\"Failed to collect user input for analytics\", e);\nreturn \"\";\n}\n});\ntry {\nDevModeRunner runner = new DevModeRunner(bootstrapId);\nMap pomFiles = readPomFileTimestamps(runner);\nrunner.run();\nlong nextCheck = System.currentTimeMillis() + 100;\nfor (;;) {\nsuspend = \"n\";\nlong sleep = Math.max(0, nextCheck - System.currentTimeMillis()) + 1;\nThread.sleep(sleep);\nif (System.currentTimeMillis() > nextCheck) {\nnextCheck = System.currentTimeMillis() + 100;\nif (!runner.alive()) {\nrestoreTerminalState();\nif (!runner.isExpectedExitValue()) {\nthrow new MojoExecutionException(\"Dev mode process did not complete successfully\");\n}\nreturn;\n}\nfinal Set changed = new HashSet<>();\nfor (Map.Entry e : pomFiles.entrySet()) {\nlong t = Files.getLastModifiedTime(e.getKey()).toMillis();\nif (t > e.getValue()) {\nchanged.add(e.getKey());\npomFiles.put(e.getKey(), t);\n}\n}\nif (!changed.isEmpty()) {\ngetLog().info(\"Changes detected to \" + changed + \", restarting dev mode\");\nrunner.stop();\nfinal DevModeRunner newRunner;\ntry {\nbootstrapId = handleAutoCompile();\nnewRunner = new DevModeRunner(runner.launcher.getActualDebugPort(), bootstrapId);\n} catch (Exception e) {\ngetLog().info(\"Could not load changed pom.xml file, changes not applied\", e);\ncontinue;\n}\nnewRunner.run();\nrunner = newRunner;\n}\n}\n}\n} catch (Exception e) {\nthrow new MojoFailureException(\"Failed to run\", e);\n}\n}", + "target_code": "runner.stop();", + "method_body_after": "public void execute() throws MojoFailureException, MojoExecutionException {\nif (project.getPackaging().equals(ArtifactCoords.TYPE_POM)) {\ngetLog().info(\"Type of the artifact is POM, skipping dev goal\");\nreturn;\n}\nmavenVersionEnforcer.ensureMavenVersion(getLog(), session);\ninitToolchain();\nString bootstrapId = handleAutoCompile();\nif (enforceBuildGoal) {\nfinal PluginDescriptor pluginDescr = getPluginDescriptor();\nfinal Plugin pluginDef = getConfiguredPluginOrNull(pluginDescr.getGroupId(), pluginDescr.getArtifactId());\nif (!isGoalConfigured(pluginDef, \"build\")) {\nif (warnIfBuildGoalMissing) {\nvar currentGoal = getCurrentGoal();\ngetLog().warn(\n\"Skipping \" + currentGoal + \" as this is assumed to be a support library.\" +\n\" To disable this warning set warnIfBuildGoalMissing parameter to false.\"\n+ System.lineSeparator() +\n\"To enable \" + currentGoal +\n\" for this module, make sure the quarkus-maven-plugin configuration includes the build goal\"\n+\n\" or disable the enforceBuildGoal flag (via plugin configuration or via\" +\n\" -Dquarkus.enforceBuildGoal=false).\");\n}\nreturn;\n}\n}\nsaveTerminalState();\nanalyticsProvider.buildAnalyticsUserInput((String prompt) -> {\nSystem.out.print(prompt);\ntry (Scanner scanner = new Scanner(new FilterInputStream(System.in) {\n@Override\npublic void close() throws IOException {\n}\n})) {\nreturn scanner.nextLine();\n} catch (Exception e) {\ngetLog().warn(\"Failed to collect user input for analytics\", e);\nreturn \"\";\n}\n});\ntry {\nDevModeRunner runner = new DevModeRunner(bootstrapId);\nMap pomFiles = readPomFileTimestamps(runner);\nrunner.run();\nlong nextCheck = System.currentTimeMillis() + 100;\nfor (;;) {\nsuspend = \"n\";\nlong sleep = Math.max(0, nextCheck - System.currentTimeMillis()) + 1;\nThread.sleep(sleep);\nif (System.currentTimeMillis() > nextCheck) {\nnextCheck = System.currentTimeMillis() + 100;\nif (!runner.alive()) {\nrestoreTerminalState();\nif (!runner.isExpectedExitValue()) {\nthrow new MojoExecutionException(\"Dev mode process did not complete successfully\");\n}\nreturn;\n}\nfinal Set changed = new HashSet<>();\nfor (Map.Entry e : pomFiles.entrySet()) {\nlong t = Files.getLastModifiedTime(e.getKey()).toMillis();\nif (t > e.getValue()) {\nchanged.add(e.getKey());\npomFiles.put(e.getKey(), t);\n}\n}\nif (!changed.isEmpty()) {\ngetLog().info(\"Changes detected to \" + changed + \", restarting dev mode\");\nrunner.stop();\nfinal DevModeRunner newRunner;\ntry {\nbootstrapId = handleAutoCompile();\nnewRunner = new DevModeRunner(runner.launcher.getActualDebugPort(), bootstrapId);\n} catch (Exception e) {\ngetLog().info(\"Could not load changed pom.xml file, changes not applied\", e);\ncontinue;\n}\nnewRunner.run();\nrunner = newRunner;\n}\n}\n}\n} catch (Exception e) {\nthrow new MojoFailureException(\"Failed to run\", e);\n}\n}", + "context_before": "class DevMojo extends AbstractMojo {\nprivate static final Set IGNORED_PHASES = Set.of(\n\"pre-clean\", \"clean\", \"post-clean\");\nprivate static final List PRE_DEV_MODE_PHASES = List.of(\n\"validate\",\n\"initialize\",\n\"generate-sources\",\n\"process-sources\",\n\"generate-resources\",\n\"process-resources\",\n\"compile\",\n\"process-classes\",\n\"generate-test-sources\",\n\"process-test-sources\",\n\"generate-test-resources\",\n\"process-test-resources\",\n\"test-compile\");\nprivate static final String IO_QUARKUS = \"io.quarkus\";\nprivate static final String QUARKUS_GENERATE_CODE_GOAL = \"generate-code\";\nprivate static final String QUARKUS_GENERATE_CODE_TESTS_GOAL = \"generate-code-tests\";\nprivate static final String ORG_APACHE_MAVEN_PLUGINS = \"org.apache.maven.plugins\";\nprivate static final String MAVEN_COMPILER_PLUGIN = \"maven-compiler-plugin\";\nprivate static final String MAVEN_RESOURCES_PLUGIN = \"maven-resources-plugin\";\nprivate static final String MAVEN_TOOLCHAINS_PLUGIN = \"maven-toolchains-plugin\";\nprivate static final String ORG_JETBRAINS_KOTLIN = \"org.jetbrains.kotlin\";\nprivate static final String KOTLIN_MAVEN_PLUGIN = \"kotlin-maven-plugin\";\nprivate static final String BOOTSTRAP_ID = \"DevMojo\";\n/**\n* The directory for compiled classes.\n*/\n@Parameter(readonly = true, required = true, defaultValue = \"${project.build.outputDirectory}\")\nprivate File outputDirectory;\n@Parameter(defaultValue = \"${project}\", readonly = true, required = true)\nprotected MavenProject project;\n/**\n* If this server should be started in debug mode. The default is to start in debug mode and listen on\n* port 5005. Whether the JVM is suspended waiting for a debugger to be attached,\n* depends on the value of {@link\n*

\n* {@code debug} supports the following options:\n*

\n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n*
ValueEffect
falseThe JVM is not started in debug mode
trueThe JVM is started in debug mode and will be listening on {@code debugHost}:{@code debugPort}
clientThe JVM is started in client mode, and will attempt to connect to {@code debugHost}:{@code debugPort}
{port}The JVM is started in debug mode and will be listening on {@code debugHost}:{port}.
\n* By default, {@code debugHost} has the value \"localhost\", and {@code debugPort} is 5005.\n*/\n@Parameter(defaultValue = \"${debug}\")\nprivate String debug;\n@Parameter(defaultValue = \"${open-lang-package}\")\nprivate boolean openJavaLang;\n/**\n* Allows configuring the modules to add to the application.\n* The listed modules will be added using: {@code --add-modules m1,m2...}.\n*/\n@Parameter(defaultValue = \"${add-modules}\")\nprivate List modules;\n/**\n* Whether the JVM launch, in debug mode, should be suspended. This parameter is only\n* relevant when the JVM is launched in {@link\n* following values (all the allowed values are case-insensitive):\n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n*
\n* ValueEffect
y or trueThe debug mode JVM launch is suspended
n or falseThe debug mode JVM is started without suspending
\n*/\n@Parameter(defaultValue = \"${suspend}\")\nprivate String suspend;\n@Parameter(defaultValue = \"${debugHost}\")\nprivate String debugHost;\n@Parameter(defaultValue = \"${debugPort}\")\nprivate String debugPort;\n@Parameter(defaultValue = \"${project.build.directory}\")\nprivate File buildDir;\n@Parameter(defaultValue = \"${project.build.sourceDirectory}\")\nprivate File sourceDir;\n@Parameter\nprivate File workingDir;\n@Parameter(defaultValue = \"${jvm.args}\")\nprivate String jvmArgs;\n@Parameter(defaultValue = \"${quarkus.args}\")\nprivate String argsString;\n@Parameter\nprivate Map environmentVariables = Map.of();\n@Parameter\nprivate Map systemProperties = Map.of();\n@Parameter(defaultValue = \"${session}\")\nprivate MavenSession session;\n@Parameter(defaultValue = \"TRUE\")\nprivate boolean deleteDevJar;\n@Component\nprivate MavenVersionEnforcer mavenVersionEnforcer;\n@Component\nprivate ArtifactHandlerManager artifactHandlerManager;\n@Component\nprivate RepositorySystem repoSystem;\n@Component\nQuarkusWorkspaceProvider workspaceProvider;\n@Parameter(defaultValue = \"${repositorySystemSession}\", readonly = true)\nprivate RepositorySystemSession repoSession;\n@Parameter(defaultValue = \"${project.remoteProjectRepositories}\", readonly = true, required = true)\nprivate List repos;\n@Parameter(defaultValue = \"${project.remotePluginRepositories}\", readonly = true, required = true)\nprivate List pluginRepos;\n/**\n* This value is intended to be set to true when some generated bytecode\n* is erroneous causing the JVM to crash when the verify:none option is set (which is on by default)\n*/\n@Parameter(defaultValue = \"${preventnoverify}\")\nprivate boolean preventnoverify = false;\n/**\n* Whether changes in the projects that appear to be dependencies of the project containing the application to be launched\n* should trigger hot-reload. By default, they do.\n*/\n@Parameter(defaultValue = \"${noDeps}\")\nprivate boolean noDeps = false;\n/**\n* Additional parameters to pass to javac when recompiling changed\n* source files.\n*/\n@Parameter\nprivate List compilerArgs;\n/**\n* Additional compiler arguments\n*/\n@Parameter\nprivate List compilerOptions;\n/**\n* The --release argument to javac.\n*/\n@Parameter(defaultValue = \"${maven.compiler.release}\")\nprivate String release;\n/**\n* The -source argument to javac.\n*/\n@Parameter(defaultValue = \"${maven.compiler.source}\")\nprivate String source;\n/**\n* The -target argument to javac.\n*/\n@Parameter(defaultValue = \"${maven.compiler.target}\")\nprivate String target;\n/**\n* Whether to enforce the quarkus-maven-plugin build goal to be configured.\n* By default, a missing build goal is considered an inconsistency (although the build goal is not required\n* technically).\n* In this case a warning will be logged and the application will not be started.\n*/\n@Parameter(defaultValue = \"${quarkus.enforceBuildGoal}\")\nprivate boolean enforceBuildGoal = true;\n@Parameter(property = \"quarkus.warnIfBuildGoalMissing\")\nboolean warnIfBuildGoalMissing = true;\n@Component\nprivate WorkspaceReader wsReader;\n@Component\nprivate BuildPluginManager pluginManager;\n@Component\nprivate ToolchainManager toolchainManager;\nprivate Map pluginMap;\n@Component\nprotected QuarkusBootstrapProvider bootstrapProvider;\n@Parameter(defaultValue = \"${mojoExecution}\", readonly = true, required = true)\nMojoExecution mojoExecution;\n@Component\nBuildAnalyticsProvider analyticsProvider;\n/**\n* A comma-separated list of Maven plugin keys in {@code groupId:artifactId} format\n* (for example {@code org.codehaus.mojo:flatten-maven-plugin} and/or goal prefixes,\n* (for example {@code flatten}) that should be skipped when {@code quarkus:dev} identifies\n* Maven plugin goals that should be executed before the application is launched in dev mode.\n*

\n* Only the {@code flatten} Maven plugin is skipped by default.\n*/\n@Parameter(defaultValue = \"org.codehaus.mojo:flatten-maven-plugin\")\nSet skipPlugins;\n/**\n* console attributes, used to restore the console state\n*/\nprivate Attributes attributes;\nprivate int windowsAttributes;\nprivate boolean windowsAttributesSet;\nprivate Pty pty;\nprivate boolean windowsColorSupport;\n/**\n* Indicates for which launch mode the dependencies should be resolved.\n*\n* @return launch mode for which the dependencies should be resolved\n*/\nprotected LaunchMode getLaunchModeClasspath() {\nreturn LaunchMode.DEVELOPMENT;\n}\n@Override\npublic void setLog(Log log) {\nsuper.setLog(log);\nMojoLogger.delegate = log;\n}\n@Override\n/**\n* if the process is forcibly killed then the terminal may be left in raw mode, which\n* messes everything up. This attempts to fix that by saving the state so it can be restored\n*/\nprivate void saveTerminalState() {\ntry {\nif (windowsAttributes > 0) {\nlong hConsole = Kernel32.GetStdHandle(Kernel32.STD_INPUT_HANDLE);\nif (hConsole != (long) Kernel32.INVALID_HANDLE_VALUE) {\nint[] mode = new int[1];\nwindowsAttributes = Kernel32.GetConsoleMode(hConsole, mode) == 0 ? -1 : mode[0];\nwindowsAttributesSet = true;\nfinal int VIRTUAL_TERMINAL_PROCESSING = 0x0004;\nif (Kernel32.SetConsoleMode(hConsole, windowsAttributes | VIRTUAL_TERMINAL_PROCESSING) != 0) {\nwindowsColorSupport = true;\n}\n}\n}\n} catch (Throwable t) {\ntry {\nPty pty = ExecPty.current();\nattributes = pty.getAttr();\nDevMojo.this.pty = pty;\n} catch (Exception e) {\ngetLog().debug(\"Failed to get a local tty\", e);\n}\n}\n}\nprivate void restoreTerminalState() {\nif (windowsAttributesSet) {\nlong hConsole = Kernel32.GetStdHandle(Kernel32.STD_INPUT_HANDLE);\nif (hConsole != (long) Kernel32.INVALID_HANDLE_VALUE) {\nKernel32.SetConsoleMode(hConsole, windowsAttributes);\n}\n} else {\nif (attributes == null || pty == null) {\nreturn;\n}\nPty finalPty = pty;\ntry (finalPty) {\nfinalPty.setAttr(attributes);\nint height = finalPty.getSize().getHeight();\nString sb = ANSI.MAIN_BUFFER +\nANSI.CURSOR_SHOW +\n\"\\u001B[0m\" +\n\"\\033[\" + height + \";0H\";\nfinalPty.getSlaveOutput().write(sb.getBytes(StandardCharsets.UTF_8));\n} catch (IOException e) {\ngetLog().error(\"Error restoring console state\", e);\n}\n}\n}\nprivate String handleAutoCompile() throws MojoExecutionException {\nList goals = session.getGoals();\nif (goals.isEmpty() && !StringUtils.isEmpty(project.getDefaultGoal())) {\ngoals = List.of(StringUtils.split(project.getDefaultGoal()));\n}\nfinal String currentGoal = getCurrentGoal();\nint latestHandledPhaseIndex = -1;\nfor (String goal : goals) {\nif (goal.endsWith(currentGoal)) {\nbreak;\n}\nif (goal.indexOf(':') >= 0 || IGNORED_PHASES.contains(goal)) {\ncontinue;\n}\nvar i = PRE_DEV_MODE_PHASES.indexOf(goal);\nif (i < 0 || i == PRE_DEV_MODE_PHASES.size() - 1) {\nreturn null;\n}\nif (i > latestHandledPhaseIndex) {\nlatestHandledPhaseIndex = i;\n}\n}\nfinal String quarkusPluginId = getPluginDescriptor().getId();\nfinal Map> phaseExecutions = new HashMap<>();\nfinal Map pluginPrefixes = new HashMap<>();\nString bootstrapId = BOOTSTRAP_ID;\nfor (Plugin p : project.getBuildPlugins()) {\nif (p.getExecutions().isEmpty()) {\ncontinue;\n}\nif (skipPlugins.contains(p.getKey())) {\nif (getLog().isDebugEnabled()) {\ngetLog().debug(\"Skipping \" + p.getId() + \" execution according to skipPlugins value\");\n}\ncontinue;\n}\nfor (PluginExecution e : p.getExecutions()) {\nif (e.getPhase() != null && !PRE_DEV_MODE_PHASES.contains(e.getPhase())) {\nif (getLog().isDebugEnabled()) {\ngetLog().debug(\"Skipping \" + e.getId() + \" of \" + p.getId());\n}\ncontinue;\n}\nString goalPrefix = null;\nif (!e.getGoals().isEmpty()) {\ngoalPrefix = getMojoDescriptor(p, e.getGoals().get(0)).getPluginDescriptor().getGoalPrefix();\nif (skipPlugins.contains(goalPrefix)) {\nif (getLog().isDebugEnabled()) {\ngetLog().debug(\"Skipping \" + goalPrefix + \" execution according to skipPlugins value\");\ncontinue;\n}\ncontinue;\n}\npluginPrefixes.put(goalPrefix, p);\npluginPrefixes.put(p.getId(), p);\n}\nif (e.getPhase() != null) {\nphaseExecutions.computeIfAbsent(e.getPhase(), k -> new ArrayList<>()).add(new PluginExec(p, goalPrefix, e));\n} else {\nfor (String goal : e.getGoals()) {\nif (goal.equals(QUARKUS_GENERATE_CODE_GOAL) && p.getId().equals(quarkusPluginId)) {\nvar clone = e.clone();\nclone.setGoals(List.of(QUARKUS_GENERATE_CODE_GOAL));\nphaseExecutions.computeIfAbsent(\"compile\", k -> new ArrayList<>())\n.add(0, new PluginExec(p, goalPrefix, clone));\nbootstrapId = e.getId();\n} else if (goal.equals(QUARKUS_GENERATE_CODE_TESTS_GOAL) && p.getId().equals(quarkusPluginId)) {\nvar clone = e.clone();\nclone.setGoals(List.of(QUARKUS_GENERATE_CODE_TESTS_GOAL));\nphaseExecutions.computeIfAbsent(\"test-compile\", k -> new ArrayList<>())\n.add(0, new PluginExec(p, goalPrefix, clone));\n} else {\nvar mojoDescr = getMojoDescriptor(p, goal);\nif (mojoDescr.getPhase() != null) {\nphaseExecutions.computeIfAbsent(mojoDescr.getPhase(), k -> new ArrayList<>())\n.add(new PluginExec(p, goalPrefix, e));\n} else {\ngetLog().warn(\"Failed to determine the lifecycle phase for \" + p.getId() + \":\" + goal);\n}\n}\n}\n}\n}\n}\nfinal Map> executedPluginGoals = new HashMap<>();\nfor (String goal : goals) {\nif (goal.endsWith(currentGoal)) {\nbreak;\n}\nvar colon = goal.lastIndexOf(':');\nif (colon >= 0) {\nvar plugin = pluginPrefixes.get(goal.substring(0, colon));\nif (plugin == null) {\ngetLog().warn(\"Failed to locate plugin for \" + goal);\n} else {\nexecutedPluginGoals.computeIfAbsent(plugin.getId(), k -> new ArrayList<>()).add(goal.substring(colon + 1));\n}\n}\n}\nfinal Map quarkusGoalParams = Map.of(\n\"mode\", LaunchMode.DEVELOPMENT.name(),\nQuarkusBootstrapMojo.CLOSE_BOOTSTRAPPED_APP, \"false\",\n\"bootstrapId\", bootstrapId);\nfor (int phaseIndex = latestHandledPhaseIndex + 1; phaseIndex < PRE_DEV_MODE_PHASES.size(); ++phaseIndex) {\nvar executions = phaseExecutions.get(PRE_DEV_MODE_PHASES.get(phaseIndex));\nif (executions == null) {\ncontinue;\n}\nfor (PluginExec pe : executions) {\nvar executedGoals = executedPluginGoals.getOrDefault(pe.plugin.getId(), List.of());\nfor (String goal : pe.execution.getGoals()) {\nif (!executedGoals.contains(goal)) {\ntry {\nexecuteGoal(pe, goal,\npe.getPluginId().equals(quarkusPluginId) ? quarkusGoalParams : Map.of());\n} catch (Throwable t) {\nif (goal.equals(\"testCompile\")) {\ngetLog().error(\n\"Test compile failed, you will need to fix your tests before you can use continuous testing\",\nt);\n} else {\nthrow t;\n}\n}\n}\n}\n}\n}\nreturn bootstrapId;\n}\nprivate String getCurrentGoal() {\nreturn mojoExecution.getMojoDescriptor().getPluginDescriptor().getGoalPrefix() + \":\"\n+ mojoExecution.getGoal();\n}\nprivate PluginDescriptor getPluginDescriptor() {\nreturn mojoExecution.getMojoDescriptor().getPluginDescriptor();\n}\nprivate void initToolchain() throws MojoExecutionException {\nfinal Plugin plugin = getConfiguredPluginOrNull(ORG_APACHE_MAVEN_PLUGINS, MAVEN_TOOLCHAINS_PLUGIN);\nif (!isGoalConfigured(plugin, \"toolchain\")) {\nreturn;\n}\nexecuteGoal(new PluginExec(plugin, null, null), \"toolchain\", Map.of());\n}\nprivate void executeGoal(PluginExec pluginExec, String goal, Map params)\nthrows MojoExecutionException {\nvar msg = new StringBuilder();\nmsg.append(\"Invoking \")\n.append(pluginExec.getPrefix()).append(\":\")\n.append(pluginExec.plugin.getVersion()).append(\":\")\n.append(goal);\nif (pluginExec.getExecutionId() != null) {\nmsg.append(\" (\").append(pluginExec.getExecutionId()).append(\")\");\n}\nmsg.append(\" @ \").append(project.getArtifactId());\ngetLog().info(msg.toString());\nexecuteMojo(\nplugin(\ngroupId(pluginExec.plugin.getGroupId()),\nartifactId(pluginExec.plugin.getArtifactId()),\nversion(pluginExec.plugin.getVersion()),\npluginExec.plugin.getDependencies()),\ngoal(goal),\ngetPluginConfig(pluginExec.plugin, pluginExec.getExecutionId(), goal, params),\nexecutionEnvironment(\nproject,\nsession,\npluginManager));\n}\nprivate List readAnnotationProcessors(Xpp3Dom pluginConfig) {\nif (pluginConfig == null) {\nreturn List.of();\n}\nXpp3Dom annotationProcessors = pluginConfig.getChild(\"annotationProcessors\");\nif (annotationProcessors == null) {\nreturn List.of();\n}\nXpp3Dom[] processors = annotationProcessors.getChildren(\"annotationProcessor\");\nif (processors.length == 0) {\nreturn List.of();\n}\nList ret = new ArrayList<>(processors.length);\nfor (Xpp3Dom processor : processors) {\nret.add(processor.getValue());\n}\nreturn ret;\n}\nprivate Set readAnnotationProcessorPaths(Xpp3Dom pluginConfig) throws MojoExecutionException {\nif (pluginConfig == null) {\nreturn Set.of();\n}\nXpp3Dom annotationProcessorPaths = pluginConfig.getChild(\"annotationProcessorPaths\");\nif (annotationProcessorPaths == null) {\nreturn Set.of();\n}\nvar versionConstraints = getAnnotationProcessorPathsDepMgmt(pluginConfig);\nXpp3Dom[] paths = annotationProcessorPaths.getChildren(\"path\");\nSet elements = new LinkedHashSet<>();\ntry {\nList dependencies = convertToDependencies(paths);\nCollectRequest collectRequest = new CollectRequest(dependencies, versionConstraints,\nproject.getRemoteProjectRepositories());\nDependencyRequest dependencyRequest = new DependencyRequest();\ndependencyRequest.setCollectRequest(collectRequest);\nDependencyResult dependencyResult = repoSystem.resolveDependencies(session.getRepositorySession(),\ndependencyRequest);\nfor (ArtifactResult resolved : dependencyResult.getArtifactResults()) {\nelements.add(resolved.getArtifact().getFile());\n}\nreturn elements;\n} catch (Exception e) {\nthrow new MojoExecutionException(\n\"Resolution of annotationProcessorPath dependencies failed: \" + e.getLocalizedMessage(), e);\n}\n}\nprivate List getAnnotationProcessorPathsDepMgmt(Xpp3Dom pluginConfig) {\nfinal Xpp3Dom useDepMgmt = pluginConfig.getChild(\"annotationProcessorPathsUseDepMgmt\");\nif (useDepMgmt == null || !Boolean.parseBoolean(useDepMgmt.getValue())) {\nreturn List.of();\n}\nvar dm = project.getDependencyManagement();\nif (dm == null) {\nreturn List.of();\n}\nreturn getProjectAetherDependencyManagement();\n}\nprivate List convertToDependencies(Xpp3Dom[] paths) throws MojoExecutionException {\nList dependencies = new ArrayList<>();\nfor (Xpp3Dom path : paths) {\nString type = getValue(path, \"type\", \"jar\");\nArtifactHandler handler = artifactHandlerManager.getArtifactHandler(type);\norg.eclipse.aether.artifact.Artifact artifact = new DefaultArtifact(\ngetValue(path, \"groupId\", null),\ngetValue(path, \"artifactId\", null),\ngetValue(path, \"classifier\", null),\nhandler.getExtension(),\ngetValue(path, \"version\", null));\nif (toNullIfEmpty(artifact.getVersion()) == null) {\nartifact = artifact.setVersion(getAnnotationProcessorPathVersion(artifact));\n}\nSet exclusions = convertToAetherExclusions(path.getChild(\"exclusions\"));\ndependencies.add(new org.eclipse.aether.graph.Dependency(artifact, JavaScopes.RUNTIME, false, exclusions));\n}\nreturn dependencies;\n}\nprivate String getAnnotationProcessorPathVersion(org.eclipse.aether.artifact.Artifact annotationProcessorPath)\nthrows MojoExecutionException {\nList managedDependencies = getProjectManagedDependencies();\nreturn findManagedVersion(annotationProcessorPath, managedDependencies)\n.orElseThrow(() -> new MojoExecutionException(String.format(\n\"Cannot find version for annotation processor path '%s'. The version needs to be either\"\n+ \" provided directly in the plugin configuration or via dependency management.\",\nannotationProcessorPath)));\n}\nprivate Optional findManagedVersion(\norg.eclipse.aether.artifact.Artifact artifact, List managedDependencies) {\nreturn managedDependencies.stream()\n.filter(dep -> Objects.equals(dep.getGroupId(), artifact.getGroupId())\n&& Objects.equals(dep.getArtifactId(), artifact.getArtifactId())\n&& Objects.equals(dep.getClassifier(), toNullIfEmpty(artifact.getClassifier()))\n&& Objects.equals(dep.getType(), toNullIfEmpty(artifact.getExtension())))\n.findAny()\n.map(org.apache.maven.model.Dependency::getVersion);\n}\nprivate String toNullIfEmpty(String value) {\nif (value != null && value.isBlank())\nreturn null;\nreturn value;\n}\nprivate List getProjectManagedDependencies() {\nDependencyManagement dependencyManagement = project.getDependencyManagement();\nif (dependencyManagement == null || dependencyManagement.getDependencies() == null) {\nreturn List.of();\n}\nreturn dependencyManagement.getDependencies();\n}\nprivate String getValue(Xpp3Dom path, String element, String defaultValue) {\nXpp3Dom child = path.getChild(element);\nif (child == null) {\nreturn defaultValue;\n}\nreturn child.getValue();\n}\nprivate Set convertToAetherExclusions(Xpp3Dom exclusions) {\nif (exclusions == null) {\nreturn Set.of();\n}\nSet aetherExclusions = new HashSet<>();\nfor (Xpp3Dom exclusion : exclusions.getChildren(\"exclusion\")) {\nExclusion aetherExclusion = new Exclusion(\ngetValue(exclusion, \"groupId\", null),\ngetValue(exclusion, \"artifactId\", null),\ngetValue(exclusion, \"classifier\", null),\ngetValue(exclusion, \"extension\", \"jar\"));\naetherExclusions.add(aetherExclusion);\n}\nreturn aetherExclusions;\n}\nprivate boolean isGoalConfigured(Plugin plugin, String goal) {\nif (plugin == null) {\nreturn false;\n}\nfor (PluginExecution pluginExecution : plugin.getExecutions()) {\nif (pluginExecution.getGoals().contains(goal)) {\nreturn true;\n}\n}\nreturn false;\n}\nprivate Xpp3Dom getPluginConfig(Plugin plugin, String executionId, String goal, Map params)\nthrows MojoExecutionException {\nXpp3Dom mergedConfig = null;\nif (!plugin.getExecutions().isEmpty()) {\nfor (PluginExecution exec : plugin.getExecutions()) {\nif (exec.getConfiguration() != null && exec.getGoals().contains(goal)\n&& matchesExecution(executionId, exec.getId())) {\nmergedConfig = mergedConfig == null ? (Xpp3Dom) exec.getConfiguration()\n: Xpp3Dom.mergeXpp3Dom(mergedConfig, (Xpp3Dom) exec.getConfiguration(), true);\n}\n}\n}\nif ((Xpp3Dom) plugin.getConfiguration() != null) {\nmergedConfig = mergedConfig == null ? (Xpp3Dom) plugin.getConfiguration()\n: Xpp3Dom.mergeXpp3Dom(mergedConfig, (Xpp3Dom) plugin.getConfiguration(), true);\n}\nfinal Xpp3Dom configuration = configuration();\nif (mergedConfig != null) {\nSet supportedParams = null;\nfor (Xpp3Dom child : mergedConfig.getChildren()) {\nif (child.getName().startsWith(\"test\")) {\ncontinue;\n}\nif (supportedParams == null) {\nsupportedParams = getMojoDescriptor(plugin, goal).getParameterMap().keySet();\n}\nif (supportedParams.contains(child.getName())) {\nconfiguration.addChild(child);\n}\n}\n}\nfor (Map.Entry param : params.entrySet()) {\nfinal Xpp3Dom p = new Xpp3Dom(param.getKey());\np.setValue(param.getValue());\nconfiguration.addChild(p);\n}\nreturn configuration;\n}\n/**\n* Check if the currentExecutionId matches the provided executionId.\n*

\n* This method will return true if\n*

    \n*
  • current execution id is undefined
  • \n*
  • execution id is undefined
  • \n*
  • both equals (ignoring case)
  • \n*
\n*\n* @param currentExecutionId current execution id (if defined)\n* @param executionId execution id to test matching (if defined)\n* @return true if executions ids do match.\n*/\nprivate static boolean matchesExecution(String currentExecutionId, String executionId) {\nif (currentExecutionId == null) {\nreturn true;\n}\nreturn executionId == null || currentExecutionId.equalsIgnoreCase(executionId);\n}\nprivate MojoDescriptor getMojoDescriptor(Plugin plugin, String goal) throws MojoExecutionException {\ntry {\nreturn pluginManager.getMojoDescriptor(plugin, goal, pluginRepos, repoSession);\n} catch (Exception e) {\nthrow new MojoExecutionException(\n\"Failed to obtain descriptor for Maven plugin \" + plugin.getId() + \" goal \" + goal, e);\n}\n}\nprivate Plugin getConfiguredPluginOrNull(String groupId, String artifactId) {\nif (pluginMap == null) {\npluginMap = new HashMap<>();\nfor (Plugin p : project.getBuildPlugins()) {\npluginMap.put(ArtifactKey.ga(p.getGroupId(), p.getArtifactId()), p);\n}\n}\nreturn pluginMap.get(ArtifactKey.ga(groupId, artifactId));\n}\nprivate Map readPomFileTimestamps(DevModeRunner runner) throws IOException {\nMap ret = new HashMap<>();\nfor (Path i : runner.pomFiles()) {\nret.put(i, Files.getLastModifiedTime(i).toMillis());\n}\nreturn ret;\n}\nprivate String getSourceEncoding() {\nObject sourceEncodingProperty = project.getProperties().get(\"project.build.sourceEncoding\");\nif (sourceEncodingProperty != null) {\nreturn (String) sourceEncodingProperty;\n}\nreturn null;\n}\nprivate void addProject(MavenDevModeLauncher.Builder builder, ResolvedDependency module, boolean root) throws Exception {\nif (!module.isJar()) {\nreturn;\n}\nString projectDirectory;\nSet sourcePaths;\nString classesPath = null;\nString generatedSourcesPath = null;\nSet resourcePaths;\nSet testSourcePaths;\nString testClassesPath = null;\nSet testResourcePaths;\nList activeProfiles = List.of();\nMavenProject mavenProject = null;\nif (module.getClassifier().isEmpty()) {\nfor (MavenProject p : session.getAllProjects()) {\nif (module.getArtifactId().equals(p.getArtifactId())\n&& module.getGroupId().equals(p.getGroupId())\n&& module.getVersion().equals(p.getVersion())) {\nmavenProject = p;\nbreak;\n}\n}\n}\nfinal ArtifactSources sources = module.getSources();\nif (mavenProject == null) {\nif (sources == null) {\ngetLog().debug(\"Local dependency \" + module.toCompactCoords() + \" does not appear to have any sources\");\nreturn;\n}\nprojectDirectory = module.getWorkspaceModule().getModuleDir().getAbsolutePath();\nsourcePaths = new LinkedHashSet<>();\nfor (SourceDir src : sources.getSourceDirs()) {\nfor (Path p : src.getSourceTree().getRoots()) {\nsourcePaths.add(p.toAbsolutePath());\n}\n}\ntestSourcePaths = new LinkedHashSet<>();\nArtifactSources testSources = module.getWorkspaceModule().getTestSources();\nif (testSources != null) {\nfor (SourceDir src : testSources.getSourceDirs()) {\nfor (Path p : src.getSourceTree().getRoots()) {\ntestSourcePaths.add(p.toAbsolutePath());\n}\n}\n}\n} else {\nprojectDirectory = mavenProject.getBasedir().getPath();\nsourcePaths = mavenProject.getCompileSourceRoots().stream()\n.map(Path::of)\n.map(Path::toAbsolutePath)\n.collect(Collectors.toCollection(LinkedHashSet::new));\ntestSourcePaths = mavenProject.getTestCompileSourceRoots().stream()\n.map(Path::of)\n.map(Path::toAbsolutePath)\n.collect(Collectors.toCollection(LinkedHashSet::new));\nactiveProfiles = mavenProject.getActiveProfiles();\n}\nfinal Path sourceParent;\nif (sourcePaths.isEmpty()) {\nif (sources == null || sources.getResourceDirs() == null) {\nthrow new MojoExecutionException(\n\"Local dependency \" + module.toCompactCoords() + \" does not appear to have any sources\");\n}\nsourceParent = sources.getResourceDirs().iterator().next().getDir().toAbsolutePath().getParent();\n} else {\nsourceParent = sourcePaths.iterator().next().toAbsolutePath().getParent();\n}\nPath classesDir = null;\nresourcePaths = new LinkedHashSet<>();\nif (sources != null) {\nSourceDir firstSourceDir = sources.getSourceDirs().iterator().next();\nclassesDir = firstSourceDir.getOutputDir().toAbsolutePath();\nif (firstSourceDir.getAptSourcesDir() != null) {\ngeneratedSourcesPath = firstSourceDir.getAptSourcesDir().toAbsolutePath().toString();\n}\nif (Files.isDirectory(classesDir)) {\nclassesPath = classesDir.toString();\n}\nfor (SourceDir src : sources.getResourceDirs()) {\nfor (Path p : src.getSourceTree().getRoots()) {\nresourcePaths.add(p.toAbsolutePath());\n}\n}\n}\nif (module.getWorkspaceModule().hasTestSources()) {\nPath testClassesDir = module.getWorkspaceModule().getTestSources().getSourceDirs().iterator().next().getOutputDir()\n.toAbsolutePath();\ntestClassesPath = testClassesDir.toString();\n}\ntestResourcePaths = new LinkedHashSet<>();\nArtifactSources testSources = module.getWorkspaceModule().getTestSources();\nif (testSources != null) {\nfor (SourceDir src : testSources.getResourceDirs()) {\nfor (Path p : src.getSourceTree().getRoots()) {\ntestResourcePaths.add(p.toAbsolutePath());\n}\n}\n}\nfor (Profile profile : activeProfiles) {\nfinal BuildBase build = profile.getBuild();\nif (build != null) {\nresourcePaths.addAll(\nbuild.getResources().stream()\n.map(Resource::getDirectory)\n.map(Path::of)\n.map(Path::toAbsolutePath)\n.collect(Collectors.toList()));\ntestResourcePaths.addAll(\nbuild.getTestResources().stream()\n.map(Resource::getDirectory)\n.map(Path::of)\n.map(Path::toAbsolutePath)\n.collect(Collectors.toList()));\n}\n}\nif (classesPath == null && (!sourcePaths.isEmpty() || !resourcePaths.isEmpty())) {\nthrow new MojoExecutionException(\"Hot reloadable dependency \" + module.getWorkspaceModule().getId()\n+ \" has not been compiled yet (the classes directory \" + (classesDir == null ? \"\" : classesDir)\n+ \" does not exist)\");\n}\nPath targetDir = Path.of(project.getBuild().getDirectory());\nif (generatedSourcesPath != null && Files.notExists(Path.of(generatedSourcesPath))) {\nFiles.createDirectories(Path.of(generatedSourcesPath));\n}\nDevModeContext.ModuleInfo moduleInfo = new DevModeContext.ModuleInfo.Builder()\n.setArtifactKey(module.getKey())\n.setProjectDirectory(projectDirectory)\n.setSourcePaths(PathList.from(sourcePaths))\n.setClassesPath(classesPath)\n.setGeneratedSourcesPath(generatedSourcesPath)\n.setResourcesOutputPath(classesPath)\n.setResourcePaths(PathList.from(resourcePaths))\n.setSourceParents(PathList.of(sourceParent.toAbsolutePath()))\n.setPreBuildOutputDir(targetDir.resolve(\"generated-sources\").toAbsolutePath().toString())\n.setTargetDir(targetDir.toAbsolutePath().toString())\n.setTestSourcePaths(PathList.from(testSourcePaths))\n.setTestClassesPath(testClassesPath)\n.setTestResourcesOutputPath(testClassesPath)\n.setTestResourcePaths(PathList.from(testResourcePaths))\n.build();\nif (root) {\nbuilder.mainModule(moduleInfo);\n} else {\nbuilder.dependency(moduleInfo);\n}\n}\nprivate class DevModeRunner {\nfinal QuarkusDevModeLauncher launcher;\nprivate Process process;\nprivate DevModeRunner(String bootstrapId) throws Exception {\nlauncher = newLauncher(null, bootstrapId);\n}\nprivate DevModeRunner(String actualDebugPort, String bootstrapId) throws Exception {\nlauncher = newLauncher(actualDebugPort, bootstrapId);\n}\nCollection pomFiles() {\nreturn launcher.watchedBuildFiles();\n}\nboolean alive() {\nreturn process != null && process.isAlive();\n}\nint exitValue() {\nreturn process == null ? -1 : process.exitValue();\n}\nboolean isExpectedExitValue() {\nreturn exitValue() == 0 || exitValue() == 130;\n}\nvoid run() throws Exception {\nif (getLog().isDebugEnabled()) {\ngetLog().debug(\"Launching JVM with command line: \" + String.join(\" \", launcher.args()));\n}\nfinal ProcessBuilder processBuilder = new ProcessBuilder(launcher.args())\n.redirectErrorStream(true)\n.inheritIO()\n.directory(workingDir == null ? project.getBasedir() : workingDir);\nif (!environmentVariables.isEmpty()) {\nprocessBuilder.environment().putAll(environmentVariables);\n}\nprocess = processBuilder.start();\nRuntime.getRuntime().addShutdownHook(new Thread(new Runnable() {\n@Override\npublic void run() {\nprocess.destroy();\ntry {\nprocess.waitFor();\n} catch (InterruptedException e) {\ngetLog().warn(\"Unable to properly wait for dev-mode end\", e);\n}\n}\n}, \"Development Mode Shutdown Hook\"));\n}\nvoid stop() throws InterruptedException {\nprocess.destroy();\nprocess.waitFor();\n}\n}\nprivate QuarkusDevModeLauncher newLauncher(String actualDebugPort, String bootstrapId) throws Exception {\nString java = null;\nif (toolchainManager != null) {\nToolchain toolchain = toolchainManager.getToolchainFromBuildContext(\"jdk\", session);\nif (toolchain != null) {\njava = toolchain.findTool(\"java\");\ngetLog().info(\"JVM from toolchain: \" + java);\n}\n}\nfinal MavenDevModeLauncher.Builder builder = MavenDevModeLauncher.builder(java, getLog())\n.preventnoverify(preventnoverify)\n.buildDir(buildDir)\n.outputDir(outputDirectory)\n.suspend(suspend)\n.debug(debug)\n.debugHost(debugHost)\n.debugPort(actualDebugPort)\n.deleteDevJar(deleteDevJar);\nsetJvmArgs(builder);\nif (windowsColorSupport) {\nbuilder.jvmArgs(\"-Dio.quarkus.force-color-support=true\");\n}\nif (openJavaLang) {\nbuilder.jvmArgs(\"--add-opens\");\nbuilder.jvmArgs(\"java.base/java.lang=ALL-UNNAMED\");\n}\nif (modules != null && !modules.isEmpty()) {\nString mods = String.join(\",\", this.modules);\nbuilder.jvmArgs(\"--add-modules\");\nbuilder.jvmArgs(mods);\n}\nbuilder.projectDir(project.getFile().getParentFile());\nProperties projectProperties = project.getProperties();\nMap effectiveProperties = new HashMap<>();\nfor (String name : projectProperties.stringPropertyNames()) {\nif (name.startsWith(\"quarkus.\")) {\neffectiveProperties.put(name, projectProperties.getProperty(name));\n}\n}\nList effectivePropertyValues = new ArrayList<>(effectiveProperties.values());\nfor (String value : effectivePropertyValues) {\nfor (String reference : Expression.compile(value, LENIENT_SYNTAX, NO_TRIM).getReferencedStrings()) {\nString referenceValue = session.getUserProperties().getProperty(reference);\nif (referenceValue != null) {\neffectiveProperties.put(reference, referenceValue);\ncontinue;\n}\nreferenceValue = projectProperties.getProperty(reference);\nif (referenceValue != null) {\neffectiveProperties.put(reference, referenceValue);\n}\n}\n}\nbuilder.buildSystemProperties(effectiveProperties);\nbuilder.applicationName(project.getArtifactId());\nbuilder.applicationVersion(project.getVersion());\nbuilder.sourceEncoding(getSourceEncoding());\nif (compilerOptions != null) {\nfor (CompilerOptions compilerOption : compilerOptions) {\nbuilder.compilerOptions(compilerOption.getName(), compilerOption.getArgs());\n}\n}\nfinal Optional compilerPluginConfiguration = findCompilerPluginConfiguration();\nif (compilerArgs != null) {\nbuilder.compilerOptions(\"java\", compilerArgs);\n} else if (compilerPluginConfiguration.isPresent()) {\nfinal Xpp3Dom compilerPluginArgsConfiguration = compilerPluginConfiguration.get().getChild(\"compilerArgs\");\nif (compilerPluginArgsConfiguration != null) {\nList compilerPluginArgs = new ArrayList<>();\nfor (Xpp3Dom argConfiguration : compilerPluginArgsConfiguration.getChildren()) {\ncompilerPluginArgs.add(argConfiguration.getValue());\n}\nif (compilerPluginArgsConfiguration.getValue() != null\n&& !compilerPluginArgsConfiguration.getValue().isEmpty()) {\ncompilerPluginArgs.add(compilerPluginArgsConfiguration.getValue().trim());\n}\nbuilder.compilerOptions(\"java\", compilerPluginArgs);\n}\n}\nif (release != null) {\nbuilder.releaseJavaVersion(release);\n} else if (compilerPluginConfiguration.isPresent()) {\napplyCompilerFlag(compilerPluginConfiguration, \"release\", builder::releaseJavaVersion);\n}\nif (source != null) {\nbuilder.sourceJavaVersion(source);\n} else if (compilerPluginConfiguration.isPresent()) {\napplyCompilerFlag(compilerPluginConfiguration, \"source\", builder::sourceJavaVersion);\n}\nif (target != null) {\nbuilder.targetJavaVersion(target);\n} else if (compilerPluginConfiguration.isPresent()) {\napplyCompilerFlag(compilerPluginConfiguration, \"target\", builder::targetJavaVersion);\n}\nsetKotlinSpecificFlags(builder);\nsetAnnotationProcessorFlags(builder);\nfinal Path appModelLocation = resolveSerializedModelLocation();\nApplicationModel appModel = bootstrapProvider.getResolvedApplicationModel(\nQuarkusBootstrapProvider.getProjectId(project), getLaunchModeClasspath(), bootstrapId);\nif (appModel != null) {\nbootstrapProvider.close();\n} else {\nfinal BootstrapMavenContextConfig mvnConfig = BootstrapMavenContext.config()\n.setUserSettings(session.getRequest().getUserSettingsFile())\n.setRemoteRepositories(repos)\n.setWorkspaceDiscovery(true)\n.setPreferPomsFromWorkspace(true)\n.setCurrentProject(project.getBasedir().toString());\nfinal BootstrapMavenContext mvnCtx = workspaceProvider.createMavenContext(mvnConfig);\nappModel = new BootstrapAppModelResolver(new MavenArtifactResolver(mvnCtx))\n.setDevMode(true)\n.setTest(LaunchMode.TEST.equals(getLaunchModeClasspath()))\n.setCollectReloadableDependencies(!noDeps)\n.setIncubatingModelResolver(!IncubatingApplicationModelResolver\n.isIncubatingModelResolverProperty(project.getProperties(), \"false\"))\n.resolveModel(mvnCtx.getCurrentProject().getAppArtifact());\n}\nBootstrapUtils.serializeAppModel(appModel, appModelLocation);\nbuilder.jvmArgs(\"-D\" + BootstrapConstants.SERIALIZED_APP_MODEL + \"=\" + appModelLocation);\nif (noDeps) {\naddProject(builder, appModel.getAppArtifact(), true);\nappModel.getApplicationModule().getBuildFiles().forEach(builder::watchedBuildFile);\nbuilder.localArtifact(\nArtifactKey.of(project.getGroupId(), project.getArtifactId(), null, ArtifactCoords.TYPE_JAR));\n} else {\nfor (ResolvedDependency project : DependenciesFilter.getReloadableModules(appModel)) {\naddProject(builder, project, project == appModel.getAppArtifact());\nproject.getWorkspaceModule().getBuildFiles().forEach(builder::watchedBuildFile);\nbuilder.localArtifact(project.getKey());\n}\n}\naddQuarkusDevModeDeps(builder, appModel);\nSet resourceDirs = new HashSet<>();\nfor (Resource resource : project.getResources()) {\nString dir = resource.getDirectory();\nPath path = Paths.get(dir);\nresourceDirs.add(path);\n}\nfinal Collection configuredParentFirst = ConfiguredClassLoading.builder()\n.setApplicationModel(appModel)\n.setApplicationRoot(PathsCollection.from(resourceDirs))\n.setMode(QuarkusBootstrap.Mode.DEV)\n.build().getParentFirstArtifacts();\nfor (Artifact appDep : project.getArtifacts()) {\nfinal ArtifactKey key = ArtifactKey.of(appDep.getGroupId(), appDep.getArtifactId(),\nappDep.getClassifier(), appDep.getArtifactHandler().getExtension());\nif (!builder.isLocal(key) && configuredParentFirst.contains(key)) {\nbuilder.classpathEntry(key, appDep.getFile());\n}\n}\nbuilder.baseName(project.getBuild().getFinalName());\nmodifyDevModeContext(builder);\nif (argsString != null) {\nbuilder.applicationArgs(argsString);\n}\nanalyticsProvider.sendAnalytics(DEV_MODE, appModel, emptyMap(), buildDir);\nreturn builder.build();\n}\nprivate void setJvmArgs(Builder builder) throws Exception {\nString jvmArgs = this.jvmArgs;\nif (!systemProperties.isEmpty()) {\nfinal StringBuilder buf = new StringBuilder();\nif (jvmArgs != null) {\nbuf.append(jvmArgs);\n}\nfor (Map.Entry prop : systemProperties.entrySet()) {\nbuf.append(\" -D\").append(prop.getKey()).append(\"=\\\"\").append(prop.getValue()).append(\"\\\"\");\n}\njvmArgs = buf.toString();\n}\nif (jvmArgs != null) {\nbuilder.jvmArgs(Arrays.asList(CommandLineUtils.translateCommandline(jvmArgs)));\n}\n}\nprivate void applyCompilerFlag(Optional compilerPluginConfiguration, String flagName,\nConsumer builderCall) {\ncompilerPluginConfiguration\n.map(cfg -> cfg.getChild(flagName))\n.map(Xpp3Dom::getValue)\n.map(String::trim)\n.filter(not(String::isEmpty))\n.ifPresent(builderCall);\n}\nprivate void addQuarkusDevModeDeps(MavenDevModeLauncher.Builder builder, ApplicationModel appModel)\nthrows MojoExecutionException, DependencyResolutionException {\nResolvedDependency coreDeployment = null;\nfor (ResolvedDependency d : appModel.getDependencies()) {\nif (d.isDeploymentCp() && d.getArtifactId().equals(\"quarkus-core-deployment\")\n&& d.getGroupId().equals(IO_QUARKUS)) {\ncoreDeployment = d;\nbreak;\n}\n}\nif (coreDeployment == null) {\nthrow new MojoExecutionException(\n\"Failed to locate io.quarkus:quarkus-core-deployment on the application build classpath\");\n}\nfinal String pomPropsPath = \"META-INF/maven/io.quarkus/quarkus-bootstrap-maven-resolver/pom.properties\";\nfinal InputStream devModePomPropsIs = DevModeMain.class.getClassLoader().getResourceAsStream(pomPropsPath);\nif (devModePomPropsIs == null) {\nthrow new MojoExecutionException(\"Failed to locate \" + pomPropsPath + \" on the classpath\");\n}\nfinal Properties devModeProps = new Properties();\ntry (InputStream is = devModePomPropsIs) {\ndevModeProps.load(is);\n} catch (IOException e) {\nthrow new MojoExecutionException(\"Failed to load \" + pomPropsPath + \" from the classpath\", e);\n}\nfinal String devModeGroupId = devModeProps.getProperty(\"groupId\");\nif (devModeGroupId == null) {\nthrow new MojoExecutionException(\"Classpath resource \" + pomPropsPath + \" is missing groupId\");\n}\nfinal String devModeArtifactId = devModeProps.getProperty(\"artifactId\");\nif (devModeArtifactId == null) {\nthrow new MojoExecutionException(\"Classpath resource \" + pomPropsPath + \" is missing artifactId\");\n}\nfinal String devModeVersion = devModeProps.getProperty(\"version\");\nif (devModeVersion == null) {\nthrow new MojoExecutionException(\"Classpath resource \" + pomPropsPath + \" is missing version\");\n}\nfinal DefaultArtifact devModeJar = new DefaultArtifact(devModeGroupId, devModeArtifactId, ArtifactCoords.TYPE_JAR,\ndevModeVersion);\nfinal DependencyResult cpRes = repoSystem.resolveDependencies(repoSession,\nnew DependencyRequest()\n.setCollectRequest(\nnew CollectRequest()\n.setRootArtifact(new DefaultArtifact(IO_QUARKUS, \"quarkus-devmode-alias\",\nArtifactCoords.TYPE_JAR, \"1.0\"))\n.setManagedDependencies(getProjectAetherDependencyManagement())\n.setDependencies(List.of(\nnew org.eclipse.aether.graph.Dependency(devModeJar, JavaScopes.RUNTIME),\nnew org.eclipse.aether.graph.Dependency(new DefaultArtifact(\ncoreDeployment.getGroupId(), coreDeployment.getArtifactId(),\ncoreDeployment.getClassifier(), coreDeployment.getType(),\ncoreDeployment.getVersion()), JavaScopes.RUNTIME)))\n.setRepositories(repos)));\nfor (ArtifactResult appDep : cpRes.getArtifactResults()) {\nfinal org.eclipse.aether.artifact.Artifact a = appDep.getArtifact();\nif (!(a.getArtifactId().equals(\"quarkus-ide-launcher\")\n&& a.getGroupId().equals(IO_QUARKUS))) {\nif (a.getArtifactId().equals(\"quarkus-class-change-agent\")\n&& a.getGroupId().equals(IO_QUARKUS)) {\nbuilder.jvmArgs(\"-javaagent:\" + a.getFile().getAbsolutePath());\n} else {\nbuilder.classpathEntry(\nArtifactKey.of(a.getGroupId(), a.getArtifactId(), a.getClassifier(), a.getExtension()),\na.getFile());\n}\n}\n}\n}\nprivate List getProjectAetherDependencyManagement() {\nfinal List managed = new ArrayList<>(\nproject.getDependencyManagement().getDependencies().size());\nproject.getDependencyManagement().getDependencies().forEach(d -> {\nfinal List exclusions;\nif (!d.getExclusions().isEmpty()) {\nexclusions = new ArrayList<>(d.getExclusions().size());\nd.getExclusions().forEach(e -> exclusions.add(new Exclusion(e.getGroupId(), e.getArtifactId(), \"*\", \"*\")));\n} else {\nexclusions = List.of();\n}\nmanaged.add(new org.eclipse.aether.graph.Dependency(\nnew DefaultArtifact(d.getGroupId(), d.getArtifactId(), d.getClassifier(), d.getType(), d.getVersion()),\nd.getScope(), d.isOptional(), exclusions));\n});\nreturn managed;\n}\nprivate void setKotlinSpecificFlags(MavenDevModeLauncher.Builder builder) {\nPlugin kotlinMavenPlugin = null;\nfor (Plugin plugin : project.getBuildPlugins()) {\nif (plugin.getArtifactId().equals(KOTLIN_MAVEN_PLUGIN) && plugin.getGroupId().equals(ORG_JETBRAINS_KOTLIN)) {\nkotlinMavenPlugin = plugin;\nbreak;\n}\n}\nif (kotlinMavenPlugin == null) {\nreturn;\n}\ngetLog().debug(\"Kotlin Maven plugin detected\");\nList compilerPluginArtifacts = new ArrayList<>();\nList dependencies = kotlinMavenPlugin.getDependencies();\nfor (Dependency dependency : dependencies) {\ntry {\nArtifactResult resolvedArtifact = repoSystem.resolveArtifact(repoSession,\nnew ArtifactRequest()\n.setArtifact(new DefaultArtifact(dependency.getGroupId(), dependency.getArtifactId(),\ndependency.getClassifier(), dependency.getType(), dependency.getVersion()))\n.setRepositories(repos));\ncompilerPluginArtifacts.add(resolvedArtifact.getArtifact().getFile().toPath().toAbsolutePath().toString());\n} catch (ArtifactResolutionException e) {\ngetLog().warn(\"Unable to properly setup dev-mode for Kotlin\", e);\nreturn;\n}\n}\nbuilder.compilerPluginArtifacts(compilerPluginArtifacts);\nList options = new ArrayList<>();\nXpp3Dom compilerPluginConfiguration = (Xpp3Dom) kotlinMavenPlugin.getConfiguration();\nif (compilerPluginConfiguration != null) {\nXpp3Dom compilerPluginArgsConfiguration = compilerPluginConfiguration.getChild(\"pluginOptions\");\nif (compilerPluginArgsConfiguration != null) {\nfor (Xpp3Dom argConfiguration : compilerPluginArgsConfiguration.getChildren()) {\noptions.add(argConfiguration.getValue());\n}\n}\n}\nbuilder.compilerPluginOptions(options);\n}\nprivate void setAnnotationProcessorFlags(MavenDevModeLauncher.Builder builder) {\nPlugin compilerMavenPlugin = null;\nfor (Plugin plugin : project.getBuildPlugins()) {\nif (plugin.getArtifactId().equals(\"maven-compiler-plugin\")\n&& plugin.getGroupId().equals(\"org.apache.maven.plugins\")) {\ncompilerMavenPlugin = plugin;\nbreak;\n}\n}\nif (compilerMavenPlugin == null) {\nreturn;\n}\ngetLog().debug(\"Maven compiler plugin found, looking for annotation processors\");\nList options = new ArrayList<>();\nXpp3Dom compilerPluginConfiguration = (Xpp3Dom) compilerMavenPlugin.getConfiguration();\ntry {\nSet processorPaths = this.readAnnotationProcessorPaths(compilerPluginConfiguration);\ngetLog().debug(\"Found processor paths: \" + processorPaths);\nif (!processorPaths.isEmpty()) {\nbuilder.annotationProcessorPaths(processorPaths);\n}\n} catch (MojoExecutionException e) {\nthrow new RuntimeException(e);\n}\nList processors = this.readAnnotationProcessors(compilerPluginConfiguration);\ngetLog().debug(\"Found processors: \" + processors);\nif (!processors.isEmpty()) {\nbuilder.annotationProcessors(processors);\n}\nbuilder.compilerPluginOptions(options);\n}\nprotected void modifyDevModeContext(MavenDevModeLauncher.Builder builder) {\n}\nprivate Optional findCompilerPluginConfiguration() {\nfor (final Plugin plugin : project.getBuildPlugins()) {\nif (plugin.getArtifactId().equals(MAVEN_COMPILER_PLUGIN) && plugin.getGroupId().equals(ORG_APACHE_MAVEN_PLUGINS)) {\nfinal Xpp3Dom compilerPluginConfiguration = (Xpp3Dom) plugin.getConfiguration();\nif (compilerPluginConfiguration != null) {\nreturn Optional.of(compilerPluginConfiguration);\n}\n}\n}\nreturn Optional.empty();\n}\nprivate Path resolveSerializedModelLocation() {\nfinal Path p = BootstrapUtils.resolveSerializedAppModelPath(Paths.get(project.getBuild().getDirectory()));\np.toFile().deleteOnExit();\nreturn p;\n}\nprivate static final class PluginExec {\nfinal Plugin plugin;\nfinal String prefix;\nfinal PluginExecution execution;\nPluginExec(Plugin plugin, String prefix, PluginExecution execution) {\nthis.plugin = plugin;\nthis.prefix = prefix;\nthis.execution = execution;\n}\nString getPluginId() {\nreturn plugin.getId();\n}\nString getPrefix() {\nreturn prefix == null ? plugin.getId() : prefix;\n}\nString getExecutionId() {\nreturn execution == null ? null : execution.getId();\n}\n}\n}", + "context_after": "class DevMojo extends AbstractMojo {\nprivate static final Set IGNORED_PHASES = Set.of(\n\"pre-clean\", \"clean\", \"post-clean\");\nprivate static final List PRE_DEV_MODE_PHASES = List.of(\n\"validate\",\n\"initialize\",\n\"generate-sources\",\n\"process-sources\",\n\"generate-resources\",\n\"process-resources\",\n\"compile\",\n\"process-classes\",\n\"generate-test-sources\",\n\"process-test-sources\",\n\"generate-test-resources\",\n\"process-test-resources\",\n\"test-compile\");\nprivate static final String IO_QUARKUS = \"io.quarkus\";\nprivate static final String QUARKUS_GENERATE_CODE_GOAL = \"generate-code\";\nprivate static final String QUARKUS_GENERATE_CODE_TESTS_GOAL = \"generate-code-tests\";\nprivate static final String ORG_APACHE_MAVEN_PLUGINS = \"org.apache.maven.plugins\";\nprivate static final String MAVEN_COMPILER_PLUGIN = \"maven-compiler-plugin\";\nprivate static final String MAVEN_RESOURCES_PLUGIN = \"maven-resources-plugin\";\nprivate static final String MAVEN_TOOLCHAINS_PLUGIN = \"maven-toolchains-plugin\";\nprivate static final String ORG_JETBRAINS_KOTLIN = \"org.jetbrains.kotlin\";\nprivate static final String KOTLIN_MAVEN_PLUGIN = \"kotlin-maven-plugin\";\nprivate static final String BOOTSTRAP_ID = \"DevMojo\";\n/**\n* The directory for compiled classes.\n*/\n@Parameter(readonly = true, required = true, defaultValue = \"${project.build.outputDirectory}\")\nprivate File outputDirectory;\n@Parameter(defaultValue = \"${project}\", readonly = true, required = true)\nprotected MavenProject project;\n/**\n* If this server should be started in debug mode. The default is to start in debug mode and listen on\n* port 5005. Whether the JVM is suspended waiting for a debugger to be attached,\n* depends on the value of {@link\n*

\n* {@code debug} supports the following options:\n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n*
ValueEffect
falseThe JVM is not started in debug mode
trueThe JVM is started in debug mode and will be listening on {@code debugHost}:{@code debugPort}
clientThe JVM is started in client mode, and will attempt to connect to {@code debugHost}:{@code debugPort}
{port}The JVM is started in debug mode and will be listening on {@code debugHost}:{port}.
\n* By default, {@code debugHost} has the value \"localhost\", and {@code debugPort} is 5005.\n*/\n@Parameter(defaultValue = \"${debug}\")\nprivate String debug;\n@Parameter(defaultValue = \"${open-lang-package}\")\nprivate boolean openJavaLang;\n/**\n* Allows configuring the modules to add to the application.\n* The listed modules will be added using: {@code --add-modules m1,m2...}.\n*/\n@Parameter(defaultValue = \"${add-modules}\")\nprivate List modules;\n/**\n* Whether the JVM launch, in debug mode, should be suspended. This parameter is only\n* relevant when the JVM is launched in {@link\n* following values (all the allowed values are case-insensitive):\n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n* \n*
\n* ValueEffect
y or trueThe debug mode JVM launch is suspended
n or falseThe debug mode JVM is started without suspending
\n*/\n@Parameter(defaultValue = \"${suspend}\")\nprivate String suspend;\n@Parameter(defaultValue = \"${debugHost}\")\nprivate String debugHost;\n@Parameter(defaultValue = \"${debugPort}\")\nprivate String debugPort;\n@Parameter(defaultValue = \"${project.build.directory}\")\nprivate File buildDir;\n@Parameter(defaultValue = \"${project.build.sourceDirectory}\")\nprivate File sourceDir;\n@Parameter\nprivate File workingDir;\n@Parameter(defaultValue = \"${jvm.args}\")\nprivate String jvmArgs;\n@Parameter(defaultValue = \"${quarkus.args}\")\nprivate String argsString;\n@Parameter\nprivate Map environmentVariables = Map.of();\n@Parameter\nprivate Map systemProperties = Map.of();\n@Parameter(defaultValue = \"${session}\")\nprivate MavenSession session;\n@Parameter(defaultValue = \"TRUE\")\nprivate boolean deleteDevJar;\n@Component\nprivate MavenVersionEnforcer mavenVersionEnforcer;\n@Component\nprivate ArtifactHandlerManager artifactHandlerManager;\n@Component\nprivate RepositorySystem repoSystem;\n@Component\nQuarkusWorkspaceProvider workspaceProvider;\n@Parameter(defaultValue = \"${repositorySystemSession}\", readonly = true)\nprivate RepositorySystemSession repoSession;\n@Parameter(defaultValue = \"${project.remoteProjectRepositories}\", readonly = true, required = true)\nprivate List repos;\n@Parameter(defaultValue = \"${project.remotePluginRepositories}\", readonly = true, required = true)\nprivate List pluginRepos;\n/**\n* This value is intended to be set to true when some generated bytecode\n* is erroneous causing the JVM to crash when the verify:none option is set (which is on by default)\n*/\n@Parameter(defaultValue = \"${preventnoverify}\")\nprivate boolean preventnoverify = false;\n/**\n* Whether changes in the projects that appear to be dependencies of the project containing the application to be launched\n* should trigger hot-reload. By default, they do.\n*/\n@Parameter(defaultValue = \"${noDeps}\")\nprivate boolean noDeps = false;\n/**\n* Additional parameters to pass to javac when recompiling changed\n* source files.\n*/\n@Parameter\nprivate List compilerArgs;\n/**\n* Additional compiler arguments\n*/\n@Parameter\nprivate List compilerOptions;\n/**\n* The --release argument to javac.\n*/\n@Parameter(defaultValue = \"${maven.compiler.release}\")\nprivate String release;\n/**\n* The -source argument to javac.\n*/\n@Parameter(defaultValue = \"${maven.compiler.source}\")\nprivate String source;\n/**\n* The -target argument to javac.\n*/\n@Parameter(defaultValue = \"${maven.compiler.target}\")\nprivate String target;\n/**\n* Whether to enforce the quarkus-maven-plugin build goal to be configured.\n* By default, a missing build goal is considered an inconsistency (although the build goal is not required\n* technically).\n* In this case a warning will be logged and the application will not be started.\n*/\n@Parameter(defaultValue = \"${quarkus.enforceBuildGoal}\")\nprivate boolean enforceBuildGoal = true;\n@Parameter(property = \"quarkus.warnIfBuildGoalMissing\")\nboolean warnIfBuildGoalMissing = true;\n@Component\nprivate WorkspaceReader wsReader;\n@Component\nprivate BuildPluginManager pluginManager;\n@Component\nprivate ToolchainManager toolchainManager;\nprivate Map pluginMap;\n@Component\nprotected QuarkusBootstrapProvider bootstrapProvider;\n@Parameter(defaultValue = \"${mojoExecution}\", readonly = true, required = true)\nMojoExecution mojoExecution;\n@Component\nBuildAnalyticsProvider analyticsProvider;\n/**\n* A comma-separated list of Maven plugin keys in {@code groupId:artifactId} format\n* (for example {@code org.codehaus.mojo:flatten-maven-plugin} and/or goal prefixes,\n* (for example {@code flatten}) that should be skipped when {@code quarkus:dev} identifies\n* Maven plugin goals that should be executed before the application is launched in dev mode.\n*

\n* Only the {@code flatten} Maven plugin is skipped by default.\n*/\n@Parameter(defaultValue = \"org.codehaus.mojo:flatten-maven-plugin\")\nSet skipPlugins;\n/**\n* console attributes, used to restore the console state\n*/\nprivate Attributes attributes;\nprivate int windowsAttributes;\nprivate boolean windowsAttributesSet;\nprivate Pty pty;\nprivate boolean windowsColorSupport;\n/**\n* Indicates for which launch mode the dependencies should be resolved.\n*\n* @return launch mode for which the dependencies should be resolved\n*/\nprotected LaunchMode getLaunchModeClasspath() {\nreturn LaunchMode.DEVELOPMENT;\n}\n@Override\npublic void setLog(Log log) {\nsuper.setLog(log);\nMojoLogger.delegate = log;\n}\n@Override\n/**\n* if the process is forcibly killed then the terminal may be left in raw mode, which\n* messes everything up. This attempts to fix that by saving the state so it can be restored\n*/\nprivate void saveTerminalState() {\ntry {\nif (windowsAttributes > 0) {\nlong hConsole = Kernel32.GetStdHandle(Kernel32.STD_INPUT_HANDLE);\nif (hConsole != (long) Kernel32.INVALID_HANDLE_VALUE) {\nint[] mode = new int[1];\nwindowsAttributes = Kernel32.GetConsoleMode(hConsole, mode) == 0 ? -1 : mode[0];\nwindowsAttributesSet = true;\nfinal int VIRTUAL_TERMINAL_PROCESSING = 0x0004;\nif (Kernel32.SetConsoleMode(hConsole, windowsAttributes | VIRTUAL_TERMINAL_PROCESSING) != 0) {\nwindowsColorSupport = true;\n}\n}\n}\n} catch (Throwable t) {\ntry {\nPty pty = ExecPty.current();\nattributes = pty.getAttr();\nDevMojo.this.pty = pty;\n} catch (Exception e) {\ngetLog().debug(\"Failed to get a local tty\", e);\n}\n}\n}\nprivate void restoreTerminalState() {\nif (windowsAttributesSet) {\nlong hConsole = Kernel32.GetStdHandle(Kernel32.STD_INPUT_HANDLE);\nif (hConsole != (long) Kernel32.INVALID_HANDLE_VALUE) {\nKernel32.SetConsoleMode(hConsole, windowsAttributes);\n}\n} else {\nif (attributes == null || pty == null) {\nreturn;\n}\nPty finalPty = pty;\ntry (finalPty) {\nfinalPty.setAttr(attributes);\nint height = finalPty.getSize().getHeight();\nString sb = ANSI.MAIN_BUFFER +\nANSI.CURSOR_SHOW +\n\"\\u001B[0m\" +\n\"\\033[\" + height + \";0H\";\nfinalPty.getSlaveOutput().write(sb.getBytes(StandardCharsets.UTF_8));\n} catch (IOException e) {\ngetLog().error(\"Error restoring console state\", e);\n}\n}\n}\nprivate String handleAutoCompile() throws MojoExecutionException {\nList goals = session.getGoals();\nif (goals.isEmpty() && !StringUtils.isEmpty(project.getDefaultGoal())) {\ngoals = List.of(StringUtils.split(project.getDefaultGoal()));\n}\nfinal String currentGoal = getCurrentGoal();\nint latestHandledPhaseIndex = -1;\nfor (String goal : goals) {\nif (goal.endsWith(currentGoal)) {\nbreak;\n}\nif (goal.indexOf(':') >= 0 || IGNORED_PHASES.contains(goal)) {\ncontinue;\n}\nvar i = PRE_DEV_MODE_PHASES.indexOf(goal);\nif (i < 0 || i == PRE_DEV_MODE_PHASES.size() - 1) {\nreturn null;\n}\nif (i > latestHandledPhaseIndex) {\nlatestHandledPhaseIndex = i;\n}\n}\nfinal String quarkusPluginId = getPluginDescriptor().getId();\nfinal Map> phaseExecutions = new HashMap<>();\nfinal Map pluginPrefixes = new HashMap<>();\nString bootstrapId = BOOTSTRAP_ID;\nfor (Plugin p : project.getBuildPlugins()) {\nif (p.getExecutions().isEmpty()) {\ncontinue;\n}\nif (skipPlugins.contains(p.getKey())) {\nif (getLog().isDebugEnabled()) {\ngetLog().debug(\"Skipping \" + p.getId() + \" execution according to skipPlugins value\");\n}\ncontinue;\n}\nfor (PluginExecution e : p.getExecutions()) {\nif (e.getPhase() != null && !PRE_DEV_MODE_PHASES.contains(e.getPhase())) {\nif (getLog().isDebugEnabled()) {\ngetLog().debug(\"Skipping \" + e.getId() + \" of \" + p.getId());\n}\ncontinue;\n}\nString goalPrefix = null;\nif (!e.getGoals().isEmpty()) {\ngoalPrefix = getMojoDescriptor(p, e.getGoals().get(0)).getPluginDescriptor().getGoalPrefix();\nif (skipPlugins.contains(goalPrefix)) {\nif (getLog().isDebugEnabled()) {\ngetLog().debug(\"Skipping \" + goalPrefix + \" execution according to skipPlugins value\");\ncontinue;\n}\ncontinue;\n}\npluginPrefixes.put(goalPrefix, p);\npluginPrefixes.put(p.getId(), p);\n}\nif (e.getPhase() != null) {\nphaseExecutions.computeIfAbsent(e.getPhase(), k -> new ArrayList<>()).add(new PluginExec(p, goalPrefix, e));\n} else {\nfor (String goal : e.getGoals()) {\nif (goal.equals(QUARKUS_GENERATE_CODE_GOAL) && p.getId().equals(quarkusPluginId)) {\nvar clone = e.clone();\nclone.setGoals(List.of(QUARKUS_GENERATE_CODE_GOAL));\nphaseExecutions.computeIfAbsent(\"compile\", k -> new ArrayList<>())\n.add(0, new PluginExec(p, goalPrefix, clone));\nbootstrapId = e.getId();\n} else if (goal.equals(QUARKUS_GENERATE_CODE_TESTS_GOAL) && p.getId().equals(quarkusPluginId)) {\nvar clone = e.clone();\nclone.setGoals(List.of(QUARKUS_GENERATE_CODE_TESTS_GOAL));\nphaseExecutions.computeIfAbsent(\"test-compile\", k -> new ArrayList<>())\n.add(0, new PluginExec(p, goalPrefix, clone));\n} else {\nvar mojoDescr = getMojoDescriptor(p, goal);\nif (mojoDescr.getPhase() != null) {\nphaseExecutions.computeIfAbsent(mojoDescr.getPhase(), k -> new ArrayList<>())\n.add(new PluginExec(p, goalPrefix, e));\n} else {\ngetLog().warn(\"Failed to determine the lifecycle phase for \" + p.getId() + \":\" + goal);\n}\n}\n}\n}\n}\n}\nfinal Map> executedPluginGoals = new HashMap<>();\nfor (String goal : goals) {\nif (goal.endsWith(currentGoal)) {\nbreak;\n}\nvar colon = goal.lastIndexOf(':');\nif (colon >= 0) {\nvar plugin = pluginPrefixes.get(goal.substring(0, colon));\nif (plugin == null) {\ngetLog().warn(\"Failed to locate plugin for \" + goal);\n} else {\nexecutedPluginGoals.computeIfAbsent(plugin.getId(), k -> new ArrayList<>()).add(goal.substring(colon + 1));\n}\n}\n}\nfinal Map quarkusGoalParams = Map.of(\n\"mode\", LaunchMode.DEVELOPMENT.name(),\nQuarkusBootstrapMojo.CLOSE_BOOTSTRAPPED_APP, \"false\",\n\"bootstrapId\", bootstrapId);\nfor (int phaseIndex = latestHandledPhaseIndex + 1; phaseIndex < PRE_DEV_MODE_PHASES.size(); ++phaseIndex) {\nvar executions = phaseExecutions.get(PRE_DEV_MODE_PHASES.get(phaseIndex));\nif (executions == null) {\ncontinue;\n}\nfor (PluginExec pe : executions) {\nvar executedGoals = executedPluginGoals.getOrDefault(pe.plugin.getId(), List.of());\nfor (String goal : pe.execution.getGoals()) {\nif (!executedGoals.contains(goal)) {\ntry {\nexecuteGoal(pe, goal,\npe.getPluginId().equals(quarkusPluginId) ? quarkusGoalParams : Map.of());\n} catch (Throwable t) {\nif (goal.equals(\"testCompile\")) {\ngetLog().error(\n\"Test compile failed, you will need to fix your tests before you can use continuous testing\",\nt);\n} else {\nthrow t;\n}\n}\n}\n}\n}\n}\nreturn bootstrapId;\n}\nprivate String getCurrentGoal() {\nreturn mojoExecution.getMojoDescriptor().getPluginDescriptor().getGoalPrefix() + \":\"\n+ mojoExecution.getGoal();\n}\nprivate PluginDescriptor getPluginDescriptor() {\nreturn mojoExecution.getMojoDescriptor().getPluginDescriptor();\n}\nprivate void initToolchain() throws MojoExecutionException {\nfinal Plugin plugin = getConfiguredPluginOrNull(ORG_APACHE_MAVEN_PLUGINS, MAVEN_TOOLCHAINS_PLUGIN);\nif (!isGoalConfigured(plugin, \"toolchain\")) {\nreturn;\n}\nexecuteGoal(new PluginExec(plugin, null, null), \"toolchain\", Map.of());\n}\nprivate void executeGoal(PluginExec pluginExec, String goal, Map params)\nthrows MojoExecutionException {\nvar msg = new StringBuilder();\nmsg.append(\"Invoking \")\n.append(pluginExec.getPrefix()).append(\":\")\n.append(pluginExec.plugin.getVersion()).append(\":\")\n.append(goal);\nif (pluginExec.getExecutionId() != null) {\nmsg.append(\" (\").append(pluginExec.getExecutionId()).append(\")\");\n}\nmsg.append(\" @ \").append(project.getArtifactId());\ngetLog().info(msg.toString());\nexecuteMojo(\nplugin(\ngroupId(pluginExec.plugin.getGroupId()),\nartifactId(pluginExec.plugin.getArtifactId()),\nversion(pluginExec.plugin.getVersion()),\npluginExec.plugin.getDependencies()),\ngoal(goal),\ngetPluginConfig(pluginExec.plugin, pluginExec.getExecutionId(), goal, params),\nexecutionEnvironment(\nproject,\nsession,\npluginManager));\n}\nprivate List readAnnotationProcessors(Xpp3Dom pluginConfig) {\nif (pluginConfig == null) {\nreturn List.of();\n}\nXpp3Dom annotationProcessors = pluginConfig.getChild(\"annotationProcessors\");\nif (annotationProcessors == null) {\nreturn List.of();\n}\nXpp3Dom[] processors = annotationProcessors.getChildren(\"annotationProcessor\");\nif (processors.length == 0) {\nreturn List.of();\n}\nList ret = new ArrayList<>(processors.length);\nfor (Xpp3Dom processor : processors) {\nret.add(processor.getValue());\n}\nreturn ret;\n}\nprivate Set readAnnotationProcessorPaths(Xpp3Dom pluginConfig) throws MojoExecutionException {\nif (pluginConfig == null) {\nreturn Set.of();\n}\nXpp3Dom annotationProcessorPaths = pluginConfig.getChild(\"annotationProcessorPaths\");\nif (annotationProcessorPaths == null) {\nreturn Set.of();\n}\nvar versionConstraints = getAnnotationProcessorPathsDepMgmt(pluginConfig);\nXpp3Dom[] paths = annotationProcessorPaths.getChildren(\"path\");\nSet elements = new LinkedHashSet<>();\ntry {\nList dependencies = convertToDependencies(paths);\nCollectRequest collectRequest = new CollectRequest(dependencies, versionConstraints,\nproject.getRemoteProjectRepositories());\nDependencyRequest dependencyRequest = new DependencyRequest();\ndependencyRequest.setCollectRequest(collectRequest);\nDependencyResult dependencyResult = repoSystem.resolveDependencies(session.getRepositorySession(),\ndependencyRequest);\nfor (ArtifactResult resolved : dependencyResult.getArtifactResults()) {\nelements.add(resolved.getArtifact().getFile());\n}\nreturn elements;\n} catch (Exception e) {\nthrow new MojoExecutionException(\n\"Resolution of annotationProcessorPath dependencies failed: \" + e.getLocalizedMessage(), e);\n}\n}\nprivate List getAnnotationProcessorPathsDepMgmt(Xpp3Dom pluginConfig) {\nfinal Xpp3Dom useDepMgmt = pluginConfig.getChild(\"annotationProcessorPathsUseDepMgmt\");\nif (useDepMgmt == null || !Boolean.parseBoolean(useDepMgmt.getValue())) {\nreturn List.of();\n}\nvar dm = project.getDependencyManagement();\nif (dm == null) {\nreturn List.of();\n}\nreturn getProjectAetherDependencyManagement();\n}\nprivate List convertToDependencies(Xpp3Dom[] paths) throws MojoExecutionException {\nList dependencies = new ArrayList<>();\nfor (Xpp3Dom path : paths) {\nString type = getValue(path, \"type\", \"jar\");\nArtifactHandler handler = artifactHandlerManager.getArtifactHandler(type);\norg.eclipse.aether.artifact.Artifact artifact = new DefaultArtifact(\ngetValue(path, \"groupId\", null),\ngetValue(path, \"artifactId\", null),\ngetValue(path, \"classifier\", null),\nhandler.getExtension(),\ngetValue(path, \"version\", null));\nif (toNullIfEmpty(artifact.getVersion()) == null) {\nartifact = artifact.setVersion(getAnnotationProcessorPathVersion(artifact));\n}\nSet exclusions = convertToAetherExclusions(path.getChild(\"exclusions\"));\ndependencies.add(new org.eclipse.aether.graph.Dependency(artifact, JavaScopes.RUNTIME, false, exclusions));\n}\nreturn dependencies;\n}\nprivate String getAnnotationProcessorPathVersion(org.eclipse.aether.artifact.Artifact annotationProcessorPath)\nthrows MojoExecutionException {\nList managedDependencies = getProjectManagedDependencies();\nreturn findManagedVersion(annotationProcessorPath, managedDependencies)\n.orElseThrow(() -> new MojoExecutionException(String.format(\n\"Cannot find version for annotation processor path '%s'. The version needs to be either\"\n+ \" provided directly in the plugin configuration or via dependency management.\",\nannotationProcessorPath)));\n}\nprivate Optional findManagedVersion(\norg.eclipse.aether.artifact.Artifact artifact, List managedDependencies) {\nreturn managedDependencies.stream()\n.filter(dep -> Objects.equals(dep.getGroupId(), artifact.getGroupId())\n&& Objects.equals(dep.getArtifactId(), artifact.getArtifactId())\n&& Objects.equals(dep.getClassifier(), toNullIfEmpty(artifact.getClassifier()))\n&& Objects.equals(dep.getType(), toNullIfEmpty(artifact.getExtension())))\n.findAny()\n.map(org.apache.maven.model.Dependency::getVersion);\n}\nprivate String toNullIfEmpty(String value) {\nif (value != null && value.isBlank())\nreturn null;\nreturn value;\n}\nprivate List getProjectManagedDependencies() {\nDependencyManagement dependencyManagement = project.getDependencyManagement();\nif (dependencyManagement == null || dependencyManagement.getDependencies() == null) {\nreturn List.of();\n}\nreturn dependencyManagement.getDependencies();\n}\nprivate String getValue(Xpp3Dom path, String element, String defaultValue) {\nXpp3Dom child = path.getChild(element);\nif (child == null) {\nreturn defaultValue;\n}\nreturn child.getValue();\n}\nprivate Set convertToAetherExclusions(Xpp3Dom exclusions) {\nif (exclusions == null) {\nreturn Set.of();\n}\nSet aetherExclusions = new HashSet<>();\nfor (Xpp3Dom exclusion : exclusions.getChildren(\"exclusion\")) {\nExclusion aetherExclusion = new Exclusion(\ngetValue(exclusion, \"groupId\", null),\ngetValue(exclusion, \"artifactId\", null),\ngetValue(exclusion, \"classifier\", null),\ngetValue(exclusion, \"extension\", \"jar\"));\naetherExclusions.add(aetherExclusion);\n}\nreturn aetherExclusions;\n}\nprivate boolean isGoalConfigured(Plugin plugin, String goal) {\nif (plugin == null) {\nreturn false;\n}\nfor (PluginExecution pluginExecution : plugin.getExecutions()) {\nif (pluginExecution.getGoals().contains(goal)) {\nreturn true;\n}\n}\nreturn false;\n}\nprivate Xpp3Dom getPluginConfig(Plugin plugin, String executionId, String goal, Map params)\nthrows MojoExecutionException {\nXpp3Dom mergedConfig = null;\nif (!plugin.getExecutions().isEmpty()) {\nfor (PluginExecution exec : plugin.getExecutions()) {\nif (exec.getConfiguration() != null && exec.getGoals().contains(goal)\n&& matchesExecution(executionId, exec.getId())) {\nmergedConfig = mergedConfig == null ? (Xpp3Dom) exec.getConfiguration()\n: Xpp3Dom.mergeXpp3Dom(mergedConfig, (Xpp3Dom) exec.getConfiguration(), true);\n}\n}\n}\nif ((Xpp3Dom) plugin.getConfiguration() != null) {\nmergedConfig = mergedConfig == null ? (Xpp3Dom) plugin.getConfiguration()\n: Xpp3Dom.mergeXpp3Dom(mergedConfig, (Xpp3Dom) plugin.getConfiguration(), true);\n}\nfinal Xpp3Dom configuration = configuration();\nif (mergedConfig != null) {\nSet supportedParams = null;\nfor (Xpp3Dom child : mergedConfig.getChildren()) {\nif (child.getName().startsWith(\"test\")) {\ncontinue;\n}\nif (supportedParams == null) {\nsupportedParams = getMojoDescriptor(plugin, goal).getParameterMap().keySet();\n}\nif (supportedParams.contains(child.getName())) {\nconfiguration.addChild(child);\n}\n}\n}\nfor (Map.Entry param : params.entrySet()) {\nfinal Xpp3Dom p = new Xpp3Dom(param.getKey());\np.setValue(param.getValue());\nconfiguration.addChild(p);\n}\nreturn configuration;\n}\n/**\n* Check if the currentExecutionId matches the provided executionId.\n*

\n* This method will return true if\n*

    \n*
  • current execution id is undefined
  • \n*
  • execution id is undefined
  • \n*
  • both equals (ignoring case)
  • \n*
\n*\n* @param currentExecutionId current execution id (if defined)\n* @param executionId execution id to test matching (if defined)\n* @return true if executions ids do match.\n*/\nprivate static boolean matchesExecution(String currentExecutionId, String executionId) {\nif (currentExecutionId == null) {\nreturn true;\n}\nreturn executionId == null || currentExecutionId.equalsIgnoreCase(executionId);\n}\nprivate MojoDescriptor getMojoDescriptor(Plugin plugin, String goal) throws MojoExecutionException {\ntry {\nreturn pluginManager.getMojoDescriptor(plugin, goal, pluginRepos, repoSession);\n} catch (Exception e) {\nthrow new MojoExecutionException(\n\"Failed to obtain descriptor for Maven plugin \" + plugin.getId() + \" goal \" + goal, e);\n}\n}\nprivate Plugin getConfiguredPluginOrNull(String groupId, String artifactId) {\nif (pluginMap == null) {\npluginMap = new HashMap<>();\nfor (Plugin p : project.getBuildPlugins()) {\npluginMap.put(ArtifactKey.ga(p.getGroupId(), p.getArtifactId()), p);\n}\n}\nreturn pluginMap.get(ArtifactKey.ga(groupId, artifactId));\n}\nprivate Map readPomFileTimestamps(DevModeRunner runner) throws IOException {\nMap ret = new HashMap<>();\nfor (Path i : runner.pomFiles()) {\nret.put(i, Files.getLastModifiedTime(i).toMillis());\n}\nreturn ret;\n}\nprivate String getSourceEncoding() {\nObject sourceEncodingProperty = project.getProperties().get(\"project.build.sourceEncoding\");\nif (sourceEncodingProperty != null) {\nreturn (String) sourceEncodingProperty;\n}\nreturn null;\n}\nprivate void addProject(MavenDevModeLauncher.Builder builder, ResolvedDependency module, boolean root) throws Exception {\nif (!module.isJar()) {\nreturn;\n}\nString projectDirectory;\nSet sourcePaths;\nString classesPath = null;\nString generatedSourcesPath = null;\nSet resourcePaths;\nSet testSourcePaths;\nString testClassesPath = null;\nSet testResourcePaths;\nList activeProfiles = List.of();\nMavenProject mavenProject = null;\nif (module.getClassifier().isEmpty()) {\nfor (MavenProject p : session.getAllProjects()) {\nif (module.getArtifactId().equals(p.getArtifactId())\n&& module.getGroupId().equals(p.getGroupId())\n&& module.getVersion().equals(p.getVersion())) {\nmavenProject = p;\nbreak;\n}\n}\n}\nfinal ArtifactSources sources = module.getSources();\nif (mavenProject == null) {\nif (sources == null) {\ngetLog().debug(\"Local dependency \" + module.toCompactCoords() + \" does not appear to have any sources\");\nreturn;\n}\nprojectDirectory = module.getWorkspaceModule().getModuleDir().getAbsolutePath();\nsourcePaths = new LinkedHashSet<>();\nfor (SourceDir src : sources.getSourceDirs()) {\nfor (Path p : src.getSourceTree().getRoots()) {\nsourcePaths.add(p.toAbsolutePath());\n}\n}\ntestSourcePaths = new LinkedHashSet<>();\nArtifactSources testSources = module.getWorkspaceModule().getTestSources();\nif (testSources != null) {\nfor (SourceDir src : testSources.getSourceDirs()) {\nfor (Path p : src.getSourceTree().getRoots()) {\ntestSourcePaths.add(p.toAbsolutePath());\n}\n}\n}\n} else {\nprojectDirectory = mavenProject.getBasedir().getPath();\nsourcePaths = mavenProject.getCompileSourceRoots().stream()\n.map(Path::of)\n.map(Path::toAbsolutePath)\n.collect(Collectors.toCollection(LinkedHashSet::new));\ntestSourcePaths = mavenProject.getTestCompileSourceRoots().stream()\n.map(Path::of)\n.map(Path::toAbsolutePath)\n.collect(Collectors.toCollection(LinkedHashSet::new));\nactiveProfiles = mavenProject.getActiveProfiles();\n}\nfinal Path sourceParent;\nif (sourcePaths.isEmpty()) {\nif (sources == null || sources.getResourceDirs() == null) {\nthrow new MojoExecutionException(\n\"Local dependency \" + module.toCompactCoords() + \" does not appear to have any sources\");\n}\nsourceParent = sources.getResourceDirs().iterator().next().getDir().toAbsolutePath().getParent();\n} else {\nsourceParent = sourcePaths.iterator().next().toAbsolutePath().getParent();\n}\nPath classesDir = null;\nresourcePaths = new LinkedHashSet<>();\nif (sources != null) {\nSourceDir firstSourceDir = sources.getSourceDirs().iterator().next();\nclassesDir = firstSourceDir.getOutputDir().toAbsolutePath();\nif (firstSourceDir.getAptSourcesDir() != null) {\ngeneratedSourcesPath = firstSourceDir.getAptSourcesDir().toAbsolutePath().toString();\n}\nif (Files.isDirectory(classesDir)) {\nclassesPath = classesDir.toString();\n}\nfor (SourceDir src : sources.getResourceDirs()) {\nfor (Path p : src.getSourceTree().getRoots()) {\nresourcePaths.add(p.toAbsolutePath());\n}\n}\n}\nif (module.getWorkspaceModule().hasTestSources()) {\nPath testClassesDir = module.getWorkspaceModule().getTestSources().getSourceDirs().iterator().next().getOutputDir()\n.toAbsolutePath();\ntestClassesPath = testClassesDir.toString();\n}\ntestResourcePaths = new LinkedHashSet<>();\nArtifactSources testSources = module.getWorkspaceModule().getTestSources();\nif (testSources != null) {\nfor (SourceDir src : testSources.getResourceDirs()) {\nfor (Path p : src.getSourceTree().getRoots()) {\ntestResourcePaths.add(p.toAbsolutePath());\n}\n}\n}\nfor (Profile profile : activeProfiles) {\nfinal BuildBase build = profile.getBuild();\nif (build != null) {\nresourcePaths.addAll(\nbuild.getResources().stream()\n.map(Resource::getDirectory)\n.map(Path::of)\n.map(Path::toAbsolutePath)\n.collect(Collectors.toList()));\ntestResourcePaths.addAll(\nbuild.getTestResources().stream()\n.map(Resource::getDirectory)\n.map(Path::of)\n.map(Path::toAbsolutePath)\n.collect(Collectors.toList()));\n}\n}\nif (classesPath == null && (!sourcePaths.isEmpty() || !resourcePaths.isEmpty())) {\nthrow new MojoExecutionException(\"Hot reloadable dependency \" + module.getWorkspaceModule().getId()\n+ \" has not been compiled yet (the classes directory \" + (classesDir == null ? \"\" : classesDir)\n+ \" does not exist)\");\n}\nPath targetDir = Path.of(project.getBuild().getDirectory());\nif (generatedSourcesPath != null && Files.notExists(Path.of(generatedSourcesPath))) {\nFiles.createDirectories(Path.of(generatedSourcesPath));\n}\nDevModeContext.ModuleInfo moduleInfo = new DevModeContext.ModuleInfo.Builder()\n.setArtifactKey(module.getKey())\n.setProjectDirectory(projectDirectory)\n.setSourcePaths(PathList.from(sourcePaths))\n.setClassesPath(classesPath)\n.setGeneratedSourcesPath(generatedSourcesPath)\n.setResourcesOutputPath(classesPath)\n.setResourcePaths(PathList.from(resourcePaths))\n.setSourceParents(PathList.of(sourceParent.toAbsolutePath()))\n.setPreBuildOutputDir(targetDir.resolve(\"generated-sources\").toAbsolutePath().toString())\n.setTargetDir(targetDir.toAbsolutePath().toString())\n.setTestSourcePaths(PathList.from(testSourcePaths))\n.setTestClassesPath(testClassesPath)\n.setTestResourcesOutputPath(testClassesPath)\n.setTestResourcePaths(PathList.from(testResourcePaths))\n.build();\nif (root) {\nbuilder.mainModule(moduleInfo);\n} else {\nbuilder.dependency(moduleInfo);\n}\n}\nprivate class DevModeRunner {\nfinal QuarkusDevModeLauncher launcher;\nprivate Process process;\nprivate DevModeRunner(String bootstrapId) throws Exception {\nlauncher = newLauncher(null, bootstrapId);\n}\nprivate DevModeRunner(String actualDebugPort, String bootstrapId) throws Exception {\nlauncher = newLauncher(actualDebugPort, bootstrapId);\n}\nCollection pomFiles() {\nreturn launcher.watchedBuildFiles();\n}\nboolean alive() {\nreturn process != null && process.isAlive();\n}\nint exitValue() {\nreturn process == null ? -1 : process.exitValue();\n}\nboolean isExpectedExitValue() {\nreturn exitValue() == 0 || exitValue() == 130;\n}\nvoid run() throws Exception {\nif (getLog().isDebugEnabled()) {\ngetLog().debug(\"Launching JVM with command line: \" + String.join(\" \", launcher.args()));\n}\nfinal ProcessBuilder processBuilder = new ProcessBuilder(launcher.args())\n.redirectErrorStream(true)\n.inheritIO()\n.directory(workingDir == null ? project.getBasedir() : workingDir);\nif (!environmentVariables.isEmpty()) {\nprocessBuilder.environment().putAll(environmentVariables);\n}\nprocess = processBuilder.start();\nRuntime.getRuntime().addShutdownHook(new Thread(new Runnable() {\n@Override\npublic void run() {\nprocess.destroy();\ntry {\nprocess.waitFor();\n} catch (InterruptedException e) {\ngetLog().warn(\"Unable to properly wait for dev-mode end\", e);\n}\n}\n}, \"Development Mode Shutdown Hook\"));\n}\nvoid stop() throws InterruptedException {\nprocess.destroy();\nprocess.waitFor();\n}\n}\nprivate QuarkusDevModeLauncher newLauncher(String actualDebugPort, String bootstrapId) throws Exception {\nString java = null;\nif (toolchainManager != null) {\nToolchain toolchain = toolchainManager.getToolchainFromBuildContext(\"jdk\", session);\nif (toolchain != null) {\njava = toolchain.findTool(\"java\");\ngetLog().info(\"JVM from toolchain: \" + java);\n}\n}\nfinal MavenDevModeLauncher.Builder builder = MavenDevModeLauncher.builder(java, getLog())\n.preventnoverify(preventnoverify)\n.buildDir(buildDir)\n.outputDir(outputDirectory)\n.suspend(suspend)\n.debug(debug)\n.debugHost(debugHost)\n.debugPort(actualDebugPort)\n.deleteDevJar(deleteDevJar);\nsetJvmArgs(builder);\nif (windowsColorSupport) {\nbuilder.jvmArgs(\"-Dio.quarkus.force-color-support=true\");\n}\nif (openJavaLang) {\nbuilder.jvmArgs(\"--add-opens\");\nbuilder.jvmArgs(\"java.base/java.lang=ALL-UNNAMED\");\n}\nif (modules != null && !modules.isEmpty()) {\nString mods = String.join(\",\", this.modules);\nbuilder.jvmArgs(\"--add-modules\");\nbuilder.jvmArgs(mods);\n}\nbuilder.projectDir(project.getFile().getParentFile());\nProperties projectProperties = project.getProperties();\nMap effectiveProperties = new HashMap<>();\nfor (String name : projectProperties.stringPropertyNames()) {\nif (name.startsWith(\"quarkus.\")) {\neffectiveProperties.put(name, projectProperties.getProperty(name));\n}\n}\nList effectivePropertyValues = new ArrayList<>(effectiveProperties.values());\nfor (String value : effectivePropertyValues) {\nfor (String reference : Expression.compile(value, LENIENT_SYNTAX, NO_TRIM).getReferencedStrings()) {\nString referenceValue = session.getUserProperties().getProperty(reference);\nif (referenceValue != null) {\neffectiveProperties.put(reference, referenceValue);\ncontinue;\n}\nreferenceValue = projectProperties.getProperty(reference);\nif (referenceValue != null) {\neffectiveProperties.put(reference, referenceValue);\n}\n}\n}\nbuilder.buildSystemProperties(effectiveProperties);\nbuilder.applicationName(project.getArtifactId());\nbuilder.applicationVersion(project.getVersion());\nbuilder.sourceEncoding(getSourceEncoding());\nif (compilerOptions != null) {\nfor (CompilerOptions compilerOption : compilerOptions) {\nbuilder.compilerOptions(compilerOption.getName(), compilerOption.getArgs());\n}\n}\nfinal Optional compilerPluginConfiguration = findCompilerPluginConfiguration();\nif (compilerArgs != null) {\nbuilder.compilerOptions(\"java\", compilerArgs);\n} else if (compilerPluginConfiguration.isPresent()) {\nfinal Xpp3Dom compilerPluginArgsConfiguration = compilerPluginConfiguration.get().getChild(\"compilerArgs\");\nif (compilerPluginArgsConfiguration != null) {\nList compilerPluginArgs = new ArrayList<>();\nfor (Xpp3Dom argConfiguration : compilerPluginArgsConfiguration.getChildren()) {\ncompilerPluginArgs.add(argConfiguration.getValue());\n}\nif (compilerPluginArgsConfiguration.getValue() != null\n&& !compilerPluginArgsConfiguration.getValue().isEmpty()) {\ncompilerPluginArgs.add(compilerPluginArgsConfiguration.getValue().trim());\n}\nbuilder.compilerOptions(\"java\", compilerPluginArgs);\n}\n}\nif (release != null) {\nbuilder.releaseJavaVersion(release);\n} else if (compilerPluginConfiguration.isPresent()) {\napplyCompilerFlag(compilerPluginConfiguration, \"release\", builder::releaseJavaVersion);\n}\nif (source != null) {\nbuilder.sourceJavaVersion(source);\n} else if (compilerPluginConfiguration.isPresent()) {\napplyCompilerFlag(compilerPluginConfiguration, \"source\", builder::sourceJavaVersion);\n}\nif (target != null) {\nbuilder.targetJavaVersion(target);\n} else if (compilerPluginConfiguration.isPresent()) {\napplyCompilerFlag(compilerPluginConfiguration, \"target\", builder::targetJavaVersion);\n}\nsetKotlinSpecificFlags(builder);\nsetAnnotationProcessorFlags(builder);\nfinal Path appModelLocation = resolveSerializedModelLocation();\nApplicationModel appModel = bootstrapProvider.getResolvedApplicationModel(\nQuarkusBootstrapProvider.getProjectId(project), getLaunchModeClasspath(), bootstrapId);\nif (appModel != null) {\nbootstrapProvider.close();\n} else {\nfinal BootstrapMavenContextConfig mvnConfig = BootstrapMavenContext.config()\n.setUserSettings(session.getRequest().getUserSettingsFile())\n.setRemoteRepositories(repos)\n.setWorkspaceDiscovery(true)\n.setPreferPomsFromWorkspace(true)\n.setCurrentProject(project.getBasedir().toString());\nfinal BootstrapMavenContext mvnCtx = workspaceProvider.createMavenContext(mvnConfig);\nappModel = new BootstrapAppModelResolver(new MavenArtifactResolver(mvnCtx))\n.setDevMode(true)\n.setTest(LaunchMode.TEST.equals(getLaunchModeClasspath()))\n.setCollectReloadableDependencies(!noDeps)\n.setIncubatingModelResolver(!IncubatingApplicationModelResolver\n.isIncubatingModelResolverProperty(project.getProperties(), \"false\"))\n.resolveModel(mvnCtx.getCurrentProject().getAppArtifact());\n}\nBootstrapUtils.serializeAppModel(appModel, appModelLocation);\nbuilder.jvmArgs(\"-D\" + BootstrapConstants.SERIALIZED_APP_MODEL + \"=\" + appModelLocation);\nif (noDeps) {\naddProject(builder, appModel.getAppArtifact(), true);\nappModel.getApplicationModule().getBuildFiles().forEach(builder::watchedBuildFile);\nbuilder.localArtifact(\nArtifactKey.of(project.getGroupId(), project.getArtifactId(), null, ArtifactCoords.TYPE_JAR));\n} else {\nfor (ResolvedDependency project : DependenciesFilter.getReloadableModules(appModel)) {\naddProject(builder, project, project == appModel.getAppArtifact());\nproject.getWorkspaceModule().getBuildFiles().forEach(builder::watchedBuildFile);\nbuilder.localArtifact(project.getKey());\n}\n}\naddQuarkusDevModeDeps(builder, appModel);\nSet resourceDirs = new HashSet<>();\nfor (Resource resource : project.getResources()) {\nString dir = resource.getDirectory();\nPath path = Paths.get(dir);\nresourceDirs.add(path);\n}\nfinal Collection configuredParentFirst = ConfiguredClassLoading.builder()\n.setApplicationModel(appModel)\n.setApplicationRoot(PathsCollection.from(resourceDirs))\n.setMode(QuarkusBootstrap.Mode.DEV)\n.build().getParentFirstArtifacts();\nfor (Artifact appDep : project.getArtifacts()) {\nfinal ArtifactKey key = ArtifactKey.of(appDep.getGroupId(), appDep.getArtifactId(),\nappDep.getClassifier(), appDep.getArtifactHandler().getExtension());\nif (!builder.isLocal(key) && configuredParentFirst.contains(key)) {\nbuilder.classpathEntry(key, appDep.getFile());\n}\n}\nbuilder.baseName(project.getBuild().getFinalName());\nmodifyDevModeContext(builder);\nif (argsString != null) {\nbuilder.applicationArgs(argsString);\n}\nanalyticsProvider.sendAnalytics(DEV_MODE, appModel, emptyMap(), buildDir);\nreturn builder.build();\n}\nprivate void setJvmArgs(Builder builder) throws Exception {\nString jvmArgs = this.jvmArgs;\nif (!systemProperties.isEmpty()) {\nfinal StringBuilder buf = new StringBuilder();\nif (jvmArgs != null) {\nbuf.append(jvmArgs);\n}\nfor (Map.Entry prop : systemProperties.entrySet()) {\nbuf.append(\" -D\").append(prop.getKey()).append(\"=\\\"\").append(prop.getValue()).append(\"\\\"\");\n}\njvmArgs = buf.toString();\n}\nif (jvmArgs != null) {\nbuilder.jvmArgs(Arrays.asList(CommandLineUtils.translateCommandline(jvmArgs)));\n}\n}\nprivate void applyCompilerFlag(Optional compilerPluginConfiguration, String flagName,\nConsumer builderCall) {\ncompilerPluginConfiguration\n.map(cfg -> cfg.getChild(flagName))\n.map(Xpp3Dom::getValue)\n.map(String::trim)\n.filter(not(String::isEmpty))\n.ifPresent(builderCall);\n}\nprivate void addQuarkusDevModeDeps(MavenDevModeLauncher.Builder builder, ApplicationModel appModel)\nthrows MojoExecutionException, DependencyResolutionException {\nResolvedDependency coreDeployment = null;\nfor (ResolvedDependency d : appModel.getDependencies()) {\nif (d.isDeploymentCp() && d.getArtifactId().equals(\"quarkus-core-deployment\")\n&& d.getGroupId().equals(IO_QUARKUS)) {\ncoreDeployment = d;\nbreak;\n}\n}\nif (coreDeployment == null) {\nthrow new MojoExecutionException(\n\"Failed to locate io.quarkus:quarkus-core-deployment on the application build classpath\");\n}\nfinal String pomPropsPath = \"META-INF/maven/io.quarkus/quarkus-bootstrap-maven-resolver/pom.properties\";\nfinal InputStream devModePomPropsIs = DevModeMain.class.getClassLoader().getResourceAsStream(pomPropsPath);\nif (devModePomPropsIs == null) {\nthrow new MojoExecutionException(\"Failed to locate \" + pomPropsPath + \" on the classpath\");\n}\nfinal Properties devModeProps = new Properties();\ntry (InputStream is = devModePomPropsIs) {\ndevModeProps.load(is);\n} catch (IOException e) {\nthrow new MojoExecutionException(\"Failed to load \" + pomPropsPath + \" from the classpath\", e);\n}\nfinal String devModeGroupId = devModeProps.getProperty(\"groupId\");\nif (devModeGroupId == null) {\nthrow new MojoExecutionException(\"Classpath resource \" + pomPropsPath + \" is missing groupId\");\n}\nfinal String devModeArtifactId = devModeProps.getProperty(\"artifactId\");\nif (devModeArtifactId == null) {\nthrow new MojoExecutionException(\"Classpath resource \" + pomPropsPath + \" is missing artifactId\");\n}\nfinal String devModeVersion = devModeProps.getProperty(\"version\");\nif (devModeVersion == null) {\nthrow new MojoExecutionException(\"Classpath resource \" + pomPropsPath + \" is missing version\");\n}\nfinal DefaultArtifact devModeJar = new DefaultArtifact(devModeGroupId, devModeArtifactId, ArtifactCoords.TYPE_JAR,\ndevModeVersion);\nfinal DependencyResult cpRes = repoSystem.resolveDependencies(repoSession,\nnew DependencyRequest()\n.setCollectRequest(\nnew CollectRequest()\n.setRootArtifact(new DefaultArtifact(IO_QUARKUS, \"quarkus-devmode-alias\",\nArtifactCoords.TYPE_JAR, \"1.0\"))\n.setManagedDependencies(getProjectAetherDependencyManagement())\n.setDependencies(List.of(\nnew org.eclipse.aether.graph.Dependency(devModeJar, JavaScopes.RUNTIME),\nnew org.eclipse.aether.graph.Dependency(new DefaultArtifact(\ncoreDeployment.getGroupId(), coreDeployment.getArtifactId(),\ncoreDeployment.getClassifier(), coreDeployment.getType(),\ncoreDeployment.getVersion()), JavaScopes.RUNTIME)))\n.setRepositories(repos)));\nfor (ArtifactResult appDep : cpRes.getArtifactResults()) {\nfinal org.eclipse.aether.artifact.Artifact a = appDep.getArtifact();\nif (!(a.getArtifactId().equals(\"quarkus-ide-launcher\")\n&& a.getGroupId().equals(IO_QUARKUS))) {\nif (a.getArtifactId().equals(\"quarkus-class-change-agent\")\n&& a.getGroupId().equals(IO_QUARKUS)) {\nbuilder.jvmArgs(\"-javaagent:\" + a.getFile().getAbsolutePath());\n} else {\nbuilder.classpathEntry(\nArtifactKey.of(a.getGroupId(), a.getArtifactId(), a.getClassifier(), a.getExtension()),\na.getFile());\n}\n}\n}\n}\nprivate List getProjectAetherDependencyManagement() {\nfinal List managed = new ArrayList<>(\nproject.getDependencyManagement().getDependencies().size());\nproject.getDependencyManagement().getDependencies().forEach(d -> {\nfinal List exclusions;\nif (!d.getExclusions().isEmpty()) {\nexclusions = new ArrayList<>(d.getExclusions().size());\nd.getExclusions().forEach(e -> exclusions.add(new Exclusion(e.getGroupId(), e.getArtifactId(), \"*\", \"*\")));\n} else {\nexclusions = List.of();\n}\nmanaged.add(new org.eclipse.aether.graph.Dependency(\nnew DefaultArtifact(d.getGroupId(), d.getArtifactId(), d.getClassifier(), d.getType(), d.getVersion()),\nd.getScope(), d.isOptional(), exclusions));\n});\nreturn managed;\n}\nprivate void setKotlinSpecificFlags(MavenDevModeLauncher.Builder builder) {\nPlugin kotlinMavenPlugin = null;\nfor (Plugin plugin : project.getBuildPlugins()) {\nif (plugin.getArtifactId().equals(KOTLIN_MAVEN_PLUGIN) && plugin.getGroupId().equals(ORG_JETBRAINS_KOTLIN)) {\nkotlinMavenPlugin = plugin;\nbreak;\n}\n}\nif (kotlinMavenPlugin == null) {\nreturn;\n}\ngetLog().debug(\"Kotlin Maven plugin detected\");\nList compilerPluginArtifacts = new ArrayList<>();\nList dependencies = kotlinMavenPlugin.getDependencies();\nfor (Dependency dependency : dependencies) {\ntry {\nArtifactResult resolvedArtifact = repoSystem.resolveArtifact(repoSession,\nnew ArtifactRequest()\n.setArtifact(new DefaultArtifact(dependency.getGroupId(), dependency.getArtifactId(),\ndependency.getClassifier(), dependency.getType(), dependency.getVersion()))\n.setRepositories(repos));\ncompilerPluginArtifacts.add(resolvedArtifact.getArtifact().getFile().toPath().toAbsolutePath().toString());\n} catch (ArtifactResolutionException e) {\ngetLog().warn(\"Unable to properly setup dev-mode for Kotlin\", e);\nreturn;\n}\n}\nbuilder.compilerPluginArtifacts(compilerPluginArtifacts);\nList options = new ArrayList<>();\nXpp3Dom compilerPluginConfiguration = (Xpp3Dom) kotlinMavenPlugin.getConfiguration();\nif (compilerPluginConfiguration != null) {\nXpp3Dom compilerPluginArgsConfiguration = compilerPluginConfiguration.getChild(\"pluginOptions\");\nif (compilerPluginArgsConfiguration != null) {\nfor (Xpp3Dom argConfiguration : compilerPluginArgsConfiguration.getChildren()) {\noptions.add(argConfiguration.getValue());\n}\n}\n}\nbuilder.compilerPluginOptions(options);\n}\nprivate void setAnnotationProcessorFlags(MavenDevModeLauncher.Builder builder) {\nPlugin compilerMavenPlugin = null;\nfor (Plugin plugin : project.getBuildPlugins()) {\nif (plugin.getArtifactId().equals(\"maven-compiler-plugin\")\n&& plugin.getGroupId().equals(\"org.apache.maven.plugins\")) {\ncompilerMavenPlugin = plugin;\nbreak;\n}\n}\nif (compilerMavenPlugin == null) {\nreturn;\n}\ngetLog().debug(\"Maven compiler plugin found, looking for annotation processors\");\nList options = new ArrayList<>();\nXpp3Dom compilerPluginConfiguration = (Xpp3Dom) compilerMavenPlugin.getConfiguration();\ntry {\nSet processorPaths = this.readAnnotationProcessorPaths(compilerPluginConfiguration);\ngetLog().debug(\"Found processor paths: \" + processorPaths);\nif (!processorPaths.isEmpty()) {\nbuilder.annotationProcessorPaths(processorPaths);\n}\n} catch (MojoExecutionException e) {\nthrow new RuntimeException(e);\n}\nList processors = this.readAnnotationProcessors(compilerPluginConfiguration);\ngetLog().debug(\"Found processors: \" + processors);\nif (!processors.isEmpty()) {\nbuilder.annotationProcessors(processors);\n}\nbuilder.compilerPluginOptions(options);\n}\nprotected void modifyDevModeContext(MavenDevModeLauncher.Builder builder) {\n}\nprivate Optional findCompilerPluginConfiguration() {\nfor (final Plugin plugin : project.getBuildPlugins()) {\nif (plugin.getArtifactId().equals(MAVEN_COMPILER_PLUGIN) && plugin.getGroupId().equals(ORG_APACHE_MAVEN_PLUGINS)) {\nfinal Xpp3Dom compilerPluginConfiguration = (Xpp3Dom) plugin.getConfiguration();\nif (compilerPluginConfiguration != null) {\nreturn Optional.of(compilerPluginConfiguration);\n}\n}\n}\nreturn Optional.empty();\n}\nprivate Path resolveSerializedModelLocation() {\nfinal Path p = BootstrapUtils.resolveSerializedAppModelPath(Paths.get(project.getBuild().getDirectory()));\np.toFile().deleteOnExit();\nreturn p;\n}\nprivate static final class PluginExec {\nfinal Plugin plugin;\nfinal String prefix;\nfinal PluginExecution execution;\nPluginExec(Plugin plugin, String prefix, PluginExecution execution) {\nthis.plugin = plugin;\nthis.prefix = prefix;\nthis.execution = execution;\n}\nString getPluginId() {\nreturn plugin.getId();\n}\nString getPrefix() {\nreturn prefix == null ? plugin.getId() : prefix;\n}\nString getExecutionId() {\nreturn execution == null ? null : execution.getId();\n}\n}\n}" + }, + { + "comment": "I don't have an environmental to test the case with the principal of the hostname(fqdn), so I think it's better to keep the comment for reminders. ", + "method_body": "private static String preparePrincipal(String originalPrincipal) throws UnknownHostException {\nString finalPrincipal = originalPrincipal;\nString[] components = originalPrincipal.split(\"[/@]\");\nif (components != null && components.length == 3) {\nif (components[1].equals(\"_HOST\")) {\nfinalPrincipal = components[0] + \"/\" +\nStringUtils.toLowerCase(InetAddress.getLocalHost().getCanonicalHostName())\n+ \"@\" + components[2];\n} else if (components[1].equals(\"_IP\")) {\nfinalPrincipal = components[0] + \"/\" +\nInetAddress.getByName(InetAddress.getLocalHost().getCanonicalHostName()).getHostAddress()\n+ \"@\" + components[2];\n}\n}\nreturn finalPrincipal;\n}", + "target_code": "", + "method_body_after": "private static String preparePrincipal(String originalPrincipal) throws UnknownHostException {\nString finalPrincipal = originalPrincipal;\nString[] components = originalPrincipal.split(\"[/@]\");\nif (components != null && components.length == 3) {\nif (components[1].equals(\"_HOST\")) {\nfinalPrincipal = components[0] + \"/\" +\nStringUtils.toLowerCase(InetAddress.getLocalHost().getCanonicalHostName())\n+ \"@\" + components[2];\n} else if (components[1].equals(\"_IP\")) {\nfinalPrincipal = components[0] + \"/\" +\nInetAddress.getByName(InetAddress.getLocalHost().getCanonicalHostName()).getHostAddress()\n+ \"@\" + components[2];\n}\n}\nreturn finalPrincipal;\n}", + "context_before": "class FileSystemManager {\nprivate static Logger logger = Logger\n.getLogger(FileSystemManager.class.getName());\nprivate static final String HDFS_SCHEME = \"hdfs:\nprivate static final String HDFS_UGI_CONF = \"hadoop.job.ugi\";\nprivate static final String USER_NAME_KEY = \"username\";\nprivate static final String PASSWORD_KEY = \"password\";\nprivate static final String AUTHENTICATION_SIMPLE = \"simple\";\nprivate static final String AUTHENTICATION_KERBEROS = \"kerberos\";\nprivate static final String KERBEROS_PRINCIPAL = \"kerberos_principal\";\nprivate static final String KERBEROS_KEYTAB = \"kerberos_keytab\";\nprivate static final String KERBEROS_KEYTAB_CONTENT = \"kerberos_keytab_content\";\nprivate static final String DFS_NAMESERVICES_KEY = \"dfs.nameservices\";\nprivate static final String DFS_HA_NAMENODES_PREFIX = \"dfs.ha.namenodes.\";\nprivate static final String DFS_HA_NAMENODE_RPC_ADDRESS_PREFIX = \"dfs.namenode.rpc-address.\";\nprivate static final String DFS_CLIENT_FAILOVER_PROXY_PROVIDER_PREFIX =\n\"dfs.client.failover.proxy.provider.\";\nprivate static final String DEFAULT_DFS_CLIENT_FAILOVER_PROXY_PROVIDER =\n\"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider\";\nprivate static final String FS_DEFAULTFS_KEY = \"fs.defaultFS\";\nprivate ScheduledExecutorService handleManagementPool = Executors.newScheduledThreadPool(2);\nprivate int readBufferSize = 128 << 10;\nprivate int writeBufferSize = 128 << 10;\nprivate ConcurrentHashMap cachedFileSystem;\nprivate ClientContextManager clientContextManager;\npublic FileSystemManager() {\ncachedFileSystem = new ConcurrentHashMap<>();\nclientContextManager = new ClientContextManager(handleManagementPool);\nreadBufferSize = BrokerConfig.hdfs_read_buffer_size_kb << 10;\nwriteBufferSize = BrokerConfig.hdfs_write_buffer_size_kb << 10;\nhandleManagementPool.schedule(new FileSystemExpirationChecker(), 0, TimeUnit.SECONDS);\n}\n/**\n* visible for test\n*\n* file system handle is cached, the identity is host + username_password\n* it will have safety problem if only hostname is used because one user may specify username and password\n* and then access hdfs, another user may not specify username and password but could also access data\n* @param path\n* @param properties\n* @return\n* @throws URISyntaxException\n* @throws Exception\n*/\npublic BrokerFileSystem getFileSystem(String path, Map properties) {\nURI pathUri;\ntry {\npathUri = new URI(path);\n} catch (URISyntaxException e) {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_INPUT_FILE_PATH, e);\n}\nString host = HDFS_SCHEME + pathUri.getAuthority();\nif (Strings.isNullOrEmpty(pathUri.getAuthority())) {\nif (properties.containsKey(FS_DEFAULTFS_KEY)) {\nhost = properties.get(FS_DEFAULTFS_KEY);\nlogger.info(\"no schema and authority in path. use fs.defaultFs\");\n} else {\nlogger.warn(\"invalid hdfs path. authority is null,path:\" + path);\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"invalid hdfs path. authority is null\");\n}\n}\nString username = properties.containsKey(USER_NAME_KEY) ? properties.get(USER_NAME_KEY) : \"\";\nString password = properties.containsKey(PASSWORD_KEY) ? properties.get(PASSWORD_KEY) : \"\";\nString dfsNameServices =\nproperties.containsKey(DFS_NAMESERVICES_KEY) ? properties.get(DFS_NAMESERVICES_KEY) : \"\";\nString authentication = AUTHENTICATION_SIMPLE;\nif (properties.containsKey(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION)) {\nauthentication = properties.get(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION);\nif (Strings.isNullOrEmpty(authentication)\n|| (!authentication.equals(AUTHENTICATION_SIMPLE)\n&& !authentication.equals(AUTHENTICATION_KERBEROS))) {\nlogger.warn(\"invalid authentication:\" + authentication);\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"invalid authentication:\" + authentication);\n}\n}\nString hdfsUgi = username + \",\" + password;\nFileSystemIdentity fileSystemIdentity = null;\nBrokerFileSystem fileSystem = null;\nif (authentication.equals(AUTHENTICATION_SIMPLE)) {\nfileSystemIdentity = new FileSystemIdentity(host, hdfsUgi);\n} else {\nString kerberosContent = \"\";\nif (properties.containsKey(KERBEROS_KEYTAB)) {\nkerberosContent = properties.get(KERBEROS_KEYTAB);\n} else if (properties.containsKey(KERBEROS_KEYTAB_CONTENT)) {\nkerberosContent = properties.get(KERBEROS_KEYTAB_CONTENT);\n} else {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"keytab is required for kerberos authentication\");\n}\nif (!properties.containsKey(KERBEROS_PRINCIPAL)) {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"principal is required for kerberos authentication\");\n} else {\nkerberosContent = kerberosContent + properties.get(KERBEROS_PRINCIPAL);\n}\ntry {\nMessageDigest digest = MessageDigest.getInstance(\"md5\");\nbyte[] result = digest.digest(kerberosContent.getBytes());\nString kerberosUgi = new String(result);\nfileSystemIdentity = new FileSystemIdentity(host, kerberosUgi);\n} catch (NoSuchAlgorithmException e) {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\ne.getMessage());\n}\n}\ncachedFileSystem.putIfAbsent(fileSystemIdentity, new BrokerFileSystem(fileSystemIdentity));\nfileSystem = cachedFileSystem.get(fileSystemIdentity);\nif (fileSystem == null) {\nreturn null;\n}\nfileSystem.getLock().lock();\ntry {\nif (!cachedFileSystem.containsKey(fileSystemIdentity)) {\nreturn null;\n}\nif (fileSystem.getDFSFileSystem() == null) {\nlogger.info(\"could not find file system for path \" + path + \" create a new one\");\nConfiguration conf = new Configuration();\nString tmpFilePath = null;\nif (authentication.equals(AUTHENTICATION_SIMPLE)) {\nconf.set(HDFS_UGI_CONF, hdfsUgi);\n} else if (authentication.equals(AUTHENTICATION_KERBEROS)){\nconf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,\nAUTHENTICATION_KERBEROS);\nString principal = preparePrincipal(properties.get(KERBEROS_PRINCIPAL));\nString keytab = \"\";\nif (properties.containsKey(KERBEROS_KEYTAB)) {\nkeytab = properties.get(KERBEROS_KEYTAB);\n} else if (properties.containsKey(KERBEROS_KEYTAB_CONTENT)) {\nString keytab_content = properties.get(KERBEROS_KEYTAB_CONTENT);\nbyte[] base64decodedBytes = Base64.getDecoder().decode(keytab_content);\nlong currentTime = System.currentTimeMillis();\nRandom random = new Random(currentTime);\nint randNumber = random.nextInt(10000);\ntmpFilePath = \"/tmp/.\" + Long.toString(currentTime) + \"_\" + Integer.toString(randNumber);\nFileOutputStream fileOutputStream = new FileOutputStream(tmpFilePath);\nfileOutputStream.write(base64decodedBytes);\nfileOutputStream.close();\nkeytab = tmpFilePath;\n} else {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"keytab is required for kerberos authentication\");\n}\nUserGroupInformation.setConfiguration(conf);\nUserGroupInformation.loginUserFromKeytab(principal, keytab);\nif (properties.containsKey(KERBEROS_KEYTAB_CONTENT)) {\ntry {\nFile file = new File(tmpFilePath);\nif(!file.delete()){\nlogger.warn(\"delete tmp file:\" + tmpFilePath + \" failed\");\n}\n} catch (Exception e) {\nthrow new BrokerException(TBrokerOperationStatusCode.FILE_NOT_FOUND,\ne.getMessage());\n}\n}\n} else {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"invalid authentication.\");\n}\nif (!Strings.isNullOrEmpty(dfsNameServices)) {\nfinal String dfsHaNameNodesKey = DFS_HA_NAMENODES_PREFIX + dfsNameServices;\nif (!properties.containsKey(dfsHaNameNodesKey)) {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"load request missed necessary arguments for ha mode\");\n}\nString dfsHaNameNodes = properties.get(dfsHaNameNodesKey);\nconf.set(DFS_NAMESERVICES_KEY, dfsNameServices);\nconf.set(dfsHaNameNodesKey, dfsHaNameNodes);\nString[] nameNodes = dfsHaNameNodes.split(\",\");\nif (nameNodes == null) {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"invalid \" + dfsHaNameNodesKey + \" configuration\");\n} else {\nfor (String nameNode : nameNodes) {\nString nameNodeRpcAddress =\nDFS_HA_NAMENODE_RPC_ADDRESS_PREFIX + dfsNameServices + \".\" + nameNode;\nif (!properties.containsKey(nameNodeRpcAddress)) {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"missed \" + nameNodeRpcAddress + \" configuration\");\n} else {\nconf.set(nameNodeRpcAddress, properties.get(nameNodeRpcAddress));\n}\n}\n}\nfinal String dfsClientFailoverProxyProviderKey =\nDFS_CLIENT_FAILOVER_PROXY_PROVIDER_PREFIX + dfsNameServices;\nif (properties.containsKey(dfsClientFailoverProxyProviderKey)) {\nconf.set(dfsClientFailoverProxyProviderKey,\nproperties.get(dfsClientFailoverProxyProviderKey));\n} else {\nconf.set(dfsClientFailoverProxyProviderKey,\nDEFAULT_DFS_CLIENT_FAILOVER_PROXY_PROVIDER);\n}\nif (properties.containsKey(FS_DEFAULTFS_KEY)) {\nconf.set(FS_DEFAULTFS_KEY, properties.get(FS_DEFAULTFS_KEY));\n}\n}\nFileSystem dfsFileSystem = FileSystem.get(URI.create(host), conf);\nfileSystem.setFileSystem(dfsFileSystem);\n}\nreturn fileSystem;\n} catch (Exception e) {\nlogger.error(\"errors while connect to \" + path, e);\nthrow new BrokerException(TBrokerOperationStatusCode.NOT_AUTHORIZED, e);\n} finally {\nfileSystem.getLock().unlock();\n}\n}\npublic List listPath(String path, Map properties) {\nList resultFileStatus = null;\nURI pathUri = getUriFromPath(path);\nBrokerFileSystem fileSystem = getFileSystem(path, properties);\nPath pathPattern = new Path(pathUri.getPath());\ntry {\nFileStatus[] files = fileSystem.getDFSFileSystem().globStatus(pathPattern);\nif (files == null) {\nresultFileStatus = new ArrayList<>(0);\nreturn resultFileStatus;\n}\nresultFileStatus = new ArrayList<>(files.length);\nfor (FileStatus fileStatus : files) {\nTBrokerFileStatus brokerFileStatus = new TBrokerFileStatus();\nbrokerFileStatus.setIsDir(fileStatus.isDir());\nif (fileStatus.isDir()) {\nbrokerFileStatus.setIsSplitable(false);\nbrokerFileStatus.setSize(-1);\n} else {\nbrokerFileStatus.setSize(fileStatus.getLen());\nbrokerFileStatus.setIsSplitable(true);\n}\nbrokerFileStatus.setPath(fileStatus.getPath().toString());\nresultFileStatus.add(brokerFileStatus);\n}\n} catch (Exception e) {\nlogger.error(\"errors while get file status \", e);\nfileSystem.closeFileSystem();\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\ne, \"unknown error when get file status\");\n}\nreturn resultFileStatus;\n}\npublic void deletePath(String path, Map properties) {\nURI pathUri = getUriFromPath(path);\nBrokerFileSystem fileSystem = getFileSystem(path, properties);\nPath filePath = new Path(pathUri.getPath());\ntry {\nfileSystem.getDFSFileSystem().delete(filePath, true);\n} catch (IOException e) {\nlogger.error(\"errors while delete path \" + path);\nfileSystem.closeFileSystem();\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\ne, \"delete path {} error\", path);\n}\n}\npublic void renamePath(String srcPath, String destPath, Map properties) {\nURI srcPathUri = getUriFromPath(srcPath);\nURI destPathUri = getUriFromPath(destPath);\nif (!srcPathUri.getAuthority().trim().equals(destPathUri.getAuthority().trim())) {\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\n\"only allow rename in same file system\");\n}\nBrokerFileSystem fileSystem = getFileSystem(srcPath, properties);\nPath srcfilePath = new Path(srcPathUri.getPath());\nPath destfilePath = new Path(destPathUri.getPath());\ntry {\nboolean isRenameSuccess = fileSystem.getDFSFileSystem().rename(srcfilePath, destfilePath);\nif (!isRenameSuccess) {\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\n\"failed to rename path from {} to {}\", srcPath, destPath);\n}\n} catch (IOException e) {\nlogger.error(\"errors while rename path from \" + srcPath + \" to \" + destPath);\nfileSystem.closeFileSystem();\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\ne, \"errors while rename {} to {}\", srcPath, destPath);\n}\n}\npublic boolean checkPathExist(String path, Map properties) {\nURI pathUri = getUriFromPath(path);\nBrokerFileSystem fileSystem = getFileSystem(path, properties);\nPath filePath = new Path(pathUri.getPath());\ntry {\nboolean isPathExist = fileSystem.getDFSFileSystem().exists(filePath);\nreturn isPathExist;\n} catch (IOException e) {\nlogger.error(\"errors while check path exist: \" + path);\nfileSystem.closeFileSystem();\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\ne, \"errors while check if path {} exist\", path);\n}\n}\npublic TBrokerFD openReader(String clientId, String path, long startOffset, Map properties) {\nURI pathUri = getUriFromPath(path);\nPath inputFilePath = new Path(pathUri.getPath());\nBrokerFileSystem fileSystem = getFileSystem(path, properties);\ntry {\nFSDataInputStream fsDataInputStream = fileSystem.getDFSFileSystem().open(inputFilePath, readBufferSize);\nfsDataInputStream.seek(startOffset);\nUUID uuid = UUID.randomUUID();\nTBrokerFD fd = parseUUIDToFD(uuid);\nclientContextManager.putNewInputStream(clientId, fd, fsDataInputStream, fileSystem);\nreturn fd;\n} catch (IOException e) {\nlogger.error(\"errors while open path\", e);\nfileSystem.closeFileSystem();\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\ne, \"could not open file {}\", path);\n}\n}\npublic ByteBuffer pread(TBrokerFD fd, long offset, long length) {\nFSDataInputStream fsDataInputStream = clientContextManager.getFsDataInputStream(fd);\nsynchronized (fsDataInputStream) {\nlong currentStreamOffset;\ntry {\ncurrentStreamOffset = fsDataInputStream.getPos();\n} catch (IOException e) {\nlogger.error(\"errors while get file pos from output stream\", e);\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\n\"errors while get file pos from output stream\");\n}\nif (currentStreamOffset != offset) {\nlogger.warn(\"invalid offset, current read offset is \"\n+ currentStreamOffset + \" is not equal to request offset \"\n+ offset + \" seek to it\");\ntry {\nfsDataInputStream.seek(offset);\n} catch (IOException e) {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_INPUT_OFFSET,\ne, \"current read offset {} is not equal to {}, and could not seek to it\",\ncurrentStreamOffset, offset);\n}\n}\nbyte[] buf;\nif (length > readBufferSize) {\nbuf = new byte[readBufferSize];\n} else {\nbuf = new byte[(int) length];\n}\ntry {\nint readLength = fsDataInputStream.read(buf);\nif (readLength < 0) {\nthrow new BrokerException(TBrokerOperationStatusCode.END_OF_FILE,\n\"end of file reached\");\n}\nreturn ByteBuffer.wrap(buf, 0, readLength);\n} catch (IOException e) {\nlogger.error(\"errors while read data from stream\", e);\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\ne, \"errors while write data to output stream\");\n}\n}\n}\npublic void seek(TBrokerFD fd, long offset) {\nthrow new BrokerException(TBrokerOperationStatusCode.OPERATION_NOT_SUPPORTED,\n\"seek this method is not supported\");\n}\npublic void closeReader(TBrokerFD fd) {\nFSDataInputStream fsDataInputStream = clientContextManager.getFsDataInputStream(fd);\nsynchronized (fsDataInputStream) {\ntry {\nfsDataInputStream.close();\n} catch (IOException e) {\nlogger.error(\"errors while close file input stream\", e);\n} finally {\nclientContextManager.removeInputStream(fd);\n}\n}\n}\npublic TBrokerFD openWriter(String clientId, String path, Map properties) {\nURI pathUri = getUriFromPath(path);\nPath inputFilePath = new Path(pathUri.getPath());\nBrokerFileSystem fileSystem = getFileSystem(path, properties);\ntry {\nFSDataOutputStream fsDataOutputStream = fileSystem.getDFSFileSystem().create(inputFilePath,\ntrue, writeBufferSize);\nUUID uuid = UUID.randomUUID();\nTBrokerFD fd = parseUUIDToFD(uuid);\nclientContextManager.putNewOutputStream(clientId, fd, fsDataOutputStream, fileSystem);\nreturn fd;\n} catch (IOException e) {\nlogger.error(\"errors while open path\", e);\nfileSystem.closeFileSystem();\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\ne, \"could not open file {}\", path);\n}\n}\npublic void pwrite(TBrokerFD fd, long offset, byte[] data) {\nFSDataOutputStream fsDataOutputStream = clientContextManager.getFsDataOutputStream(fd);\nsynchronized (fsDataOutputStream) {\nlong currentStreamOffset;\ntry {\ncurrentStreamOffset = fsDataOutputStream.getPos();\n} catch (IOException e) {\nlogger.error(\"errors while get file pos from output stream\", e);\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\n\"errors while get file pos from output stream\");\n}\nif (currentStreamOffset != offset) {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_INPUT_OFFSET,\n\"current outputstream offset is {} not equal to request {}\",\ncurrentStreamOffset, offset);\n}\ntry {\nfsDataOutputStream.write(data);\n} catch (IOException e) {\nlogger.error(\"errors while write data to output stream\", e);\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\ne, \"errors while write data to output stream\");\n}\n}\n}\npublic void closeWriter(TBrokerFD fd) {\nFSDataOutputStream fsDataOutputStream = clientContextManager.getFsDataOutputStream(fd);\nsynchronized (fsDataOutputStream) {\ntry {\nfsDataOutputStream.close();\n} catch (IOException e) {\nlogger.error(\"errors while close file output stream\", e);\n} finally {\nclientContextManager.removeOutputStream(fd);\n}\n}\n}\npublic void ping(String clientId) {\nclientContextManager.onPing(clientId);\n}\nprivate URI getUriFromPath(String path) {\nURI pathUri;\ntry {\npathUri = new URI(path);\npathUri = pathUri.normalize();\n} catch (URISyntaxException e) {\nlogger.error(\"invalid input path \" + path);\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_INPUT_FILE_PATH,\ne, \"invalid input path {} \", path);\n}\nreturn pathUri;\n}\nprivate static TBrokerFD parseUUIDToFD(UUID uuid) {\nreturn new TBrokerFD(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());\n}\nclass FileSystemExpirationChecker implements Runnable {\n@Override\npublic void run() {\ntry {\nfor (BrokerFileSystem fileSystem : cachedFileSystem.values()) {\nif (fileSystem.isExpired(BrokerConfig.client_expire_seconds)) {\nlogger.info(\"file system \" + fileSystem + \" is expired, close and remove it\");\nfileSystem.getLock().lock();\ntry {\nfileSystem.closeFileSystem();\n} catch (Throwable t) {\nlogger.error(\"errors while close file system\", t);\n} finally {\ncachedFileSystem.remove(fileSystem.getIdentity());\nfileSystem.getLock().unlock();\n}\n}\n}\n} finally {\nFileSystemManager.this.handleManagementPool.schedule(this, 60, TimeUnit.SECONDS);\n}\n}\n}\n}", + "context_after": "class FileSystemManager {\nprivate static Logger logger = Logger\n.getLogger(FileSystemManager.class.getName());\nprivate static final String HDFS_SCHEME = \"hdfs:\nprivate static final String HDFS_UGI_CONF = \"hadoop.job.ugi\";\nprivate static final String USER_NAME_KEY = \"username\";\nprivate static final String PASSWORD_KEY = \"password\";\nprivate static final String AUTHENTICATION_SIMPLE = \"simple\";\nprivate static final String AUTHENTICATION_KERBEROS = \"kerberos\";\nprivate static final String KERBEROS_PRINCIPAL = \"kerberos_principal\";\nprivate static final String KERBEROS_KEYTAB = \"kerberos_keytab\";\nprivate static final String KERBEROS_KEYTAB_CONTENT = \"kerberos_keytab_content\";\nprivate static final String DFS_NAMESERVICES_KEY = \"dfs.nameservices\";\nprivate static final String DFS_HA_NAMENODES_PREFIX = \"dfs.ha.namenodes.\";\nprivate static final String DFS_HA_NAMENODE_RPC_ADDRESS_PREFIX = \"dfs.namenode.rpc-address.\";\nprivate static final String DFS_CLIENT_FAILOVER_PROXY_PROVIDER_PREFIX =\n\"dfs.client.failover.proxy.provider.\";\nprivate static final String DEFAULT_DFS_CLIENT_FAILOVER_PROXY_PROVIDER =\n\"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider\";\nprivate static final String FS_DEFAULTFS_KEY = \"fs.defaultFS\";\nprivate ScheduledExecutorService handleManagementPool = Executors.newScheduledThreadPool(2);\nprivate int readBufferSize = 128 << 10;\nprivate int writeBufferSize = 128 << 10;\nprivate ConcurrentHashMap cachedFileSystem;\nprivate ClientContextManager clientContextManager;\npublic FileSystemManager() {\ncachedFileSystem = new ConcurrentHashMap<>();\nclientContextManager = new ClientContextManager(handleManagementPool);\nreadBufferSize = BrokerConfig.hdfs_read_buffer_size_kb << 10;\nwriteBufferSize = BrokerConfig.hdfs_write_buffer_size_kb << 10;\nhandleManagementPool.schedule(new FileSystemExpirationChecker(), 0, TimeUnit.SECONDS);\n}\n/**\n* visible for test\n*\n* file system handle is cached, the identity is host + username_password\n* it will have safety problem if only hostname is used because one user may specify username and password\n* and then access hdfs, another user may not specify username and password but could also access data\n* @param path\n* @param properties\n* @return\n* @throws URISyntaxException\n* @throws Exception\n*/\npublic BrokerFileSystem getFileSystem(String path, Map properties) {\nURI pathUri;\ntry {\npathUri = new URI(path);\n} catch (URISyntaxException e) {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_INPUT_FILE_PATH, e);\n}\nString host = HDFS_SCHEME + pathUri.getAuthority();\nif (Strings.isNullOrEmpty(pathUri.getAuthority())) {\nif (properties.containsKey(FS_DEFAULTFS_KEY)) {\nhost = properties.get(FS_DEFAULTFS_KEY);\nlogger.info(\"no schema and authority in path. use fs.defaultFs\");\n} else {\nlogger.warn(\"invalid hdfs path. authority is null,path:\" + path);\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"invalid hdfs path. authority is null\");\n}\n}\nString username = properties.containsKey(USER_NAME_KEY) ? properties.get(USER_NAME_KEY) : \"\";\nString password = properties.containsKey(PASSWORD_KEY) ? properties.get(PASSWORD_KEY) : \"\";\nString dfsNameServices =\nproperties.containsKey(DFS_NAMESERVICES_KEY) ? properties.get(DFS_NAMESERVICES_KEY) : \"\";\nString authentication = AUTHENTICATION_SIMPLE;\nif (properties.containsKey(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION)) {\nauthentication = properties.get(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION);\nif (Strings.isNullOrEmpty(authentication)\n|| (!authentication.equals(AUTHENTICATION_SIMPLE)\n&& !authentication.equals(AUTHENTICATION_KERBEROS))) {\nlogger.warn(\"invalid authentication:\" + authentication);\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"invalid authentication:\" + authentication);\n}\n}\nString hdfsUgi = username + \",\" + password;\nFileSystemIdentity fileSystemIdentity = null;\nBrokerFileSystem fileSystem = null;\nif (authentication.equals(AUTHENTICATION_SIMPLE)) {\nfileSystemIdentity = new FileSystemIdentity(host, hdfsUgi);\n} else {\nString kerberosContent = \"\";\nif (properties.containsKey(KERBEROS_KEYTAB)) {\nkerberosContent = properties.get(KERBEROS_KEYTAB);\n} else if (properties.containsKey(KERBEROS_KEYTAB_CONTENT)) {\nkerberosContent = properties.get(KERBEROS_KEYTAB_CONTENT);\n} else {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"keytab is required for kerberos authentication\");\n}\nif (!properties.containsKey(KERBEROS_PRINCIPAL)) {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"principal is required for kerberos authentication\");\n} else {\nkerberosContent = kerberosContent + properties.get(KERBEROS_PRINCIPAL);\n}\ntry {\nMessageDigest digest = MessageDigest.getInstance(\"md5\");\nbyte[] result = digest.digest(kerberosContent.getBytes());\nString kerberosUgi = new String(result);\nfileSystemIdentity = new FileSystemIdentity(host, kerberosUgi);\n} catch (NoSuchAlgorithmException e) {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\ne.getMessage());\n}\n}\ncachedFileSystem.putIfAbsent(fileSystemIdentity, new BrokerFileSystem(fileSystemIdentity));\nfileSystem = cachedFileSystem.get(fileSystemIdentity);\nif (fileSystem == null) {\nreturn null;\n}\nfileSystem.getLock().lock();\ntry {\nif (!cachedFileSystem.containsKey(fileSystemIdentity)) {\nreturn null;\n}\nif (fileSystem.getDFSFileSystem() == null) {\nlogger.info(\"could not find file system for path \" + path + \" create a new one\");\nConfiguration conf = new Configuration();\nString tmpFilePath = null;\nif (authentication.equals(AUTHENTICATION_SIMPLE)) {\nconf.set(HDFS_UGI_CONF, hdfsUgi);\n} else if (authentication.equals(AUTHENTICATION_KERBEROS)){\nconf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,\nAUTHENTICATION_KERBEROS);\nString principal = preparePrincipal(properties.get(KERBEROS_PRINCIPAL));\nString keytab = \"\";\nif (properties.containsKey(KERBEROS_KEYTAB)) {\nkeytab = properties.get(KERBEROS_KEYTAB);\n} else if (properties.containsKey(KERBEROS_KEYTAB_CONTENT)) {\nString keytab_content = properties.get(KERBEROS_KEYTAB_CONTENT);\nbyte[] base64decodedBytes = Base64.getDecoder().decode(keytab_content);\nlong currentTime = System.currentTimeMillis();\nRandom random = new Random(currentTime);\nint randNumber = random.nextInt(10000);\ntmpFilePath = \"/tmp/.\" + Long.toString(currentTime) + \"_\" + Integer.toString(randNumber);\nFileOutputStream fileOutputStream = new FileOutputStream(tmpFilePath);\nfileOutputStream.write(base64decodedBytes);\nfileOutputStream.close();\nkeytab = tmpFilePath;\n} else {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"keytab is required for kerberos authentication\");\n}\nUserGroupInformation.setConfiguration(conf);\nUserGroupInformation.loginUserFromKeytab(principal, keytab);\nif (properties.containsKey(KERBEROS_KEYTAB_CONTENT)) {\ntry {\nFile file = new File(tmpFilePath);\nif(!file.delete()){\nlogger.warn(\"delete tmp file:\" + tmpFilePath + \" failed\");\n}\n} catch (Exception e) {\nthrow new BrokerException(TBrokerOperationStatusCode.FILE_NOT_FOUND,\ne.getMessage());\n}\n}\n} else {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"invalid authentication.\");\n}\nif (!Strings.isNullOrEmpty(dfsNameServices)) {\nfinal String dfsHaNameNodesKey = DFS_HA_NAMENODES_PREFIX + dfsNameServices;\nif (!properties.containsKey(dfsHaNameNodesKey)) {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"load request missed necessary arguments for ha mode\");\n}\nString dfsHaNameNodes = properties.get(dfsHaNameNodesKey);\nconf.set(DFS_NAMESERVICES_KEY, dfsNameServices);\nconf.set(dfsHaNameNodesKey, dfsHaNameNodes);\nString[] nameNodes = dfsHaNameNodes.split(\",\");\nif (nameNodes == null) {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"invalid \" + dfsHaNameNodesKey + \" configuration\");\n} else {\nfor (String nameNode : nameNodes) {\nString nameNodeRpcAddress =\nDFS_HA_NAMENODE_RPC_ADDRESS_PREFIX + dfsNameServices + \".\" + nameNode;\nif (!properties.containsKey(nameNodeRpcAddress)) {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_ARGUMENT,\n\"missed \" + nameNodeRpcAddress + \" configuration\");\n} else {\nconf.set(nameNodeRpcAddress, properties.get(nameNodeRpcAddress));\n}\n}\n}\nfinal String dfsClientFailoverProxyProviderKey =\nDFS_CLIENT_FAILOVER_PROXY_PROVIDER_PREFIX + dfsNameServices;\nif (properties.containsKey(dfsClientFailoverProxyProviderKey)) {\nconf.set(dfsClientFailoverProxyProviderKey,\nproperties.get(dfsClientFailoverProxyProviderKey));\n} else {\nconf.set(dfsClientFailoverProxyProviderKey,\nDEFAULT_DFS_CLIENT_FAILOVER_PROXY_PROVIDER);\n}\nif (properties.containsKey(FS_DEFAULTFS_KEY)) {\nconf.set(FS_DEFAULTFS_KEY, properties.get(FS_DEFAULTFS_KEY));\n}\n}\nFileSystem dfsFileSystem = FileSystem.get(URI.create(host), conf);\nfileSystem.setFileSystem(dfsFileSystem);\n}\nreturn fileSystem;\n} catch (Exception e) {\nlogger.error(\"errors while connect to \" + path, e);\nthrow new BrokerException(TBrokerOperationStatusCode.NOT_AUTHORIZED, e);\n} finally {\nfileSystem.getLock().unlock();\n}\n}\npublic List listPath(String path, Map properties) {\nList resultFileStatus = null;\nURI pathUri = getUriFromPath(path);\nBrokerFileSystem fileSystem = getFileSystem(path, properties);\nPath pathPattern = new Path(pathUri.getPath());\ntry {\nFileStatus[] files = fileSystem.getDFSFileSystem().globStatus(pathPattern);\nif (files == null) {\nresultFileStatus = new ArrayList<>(0);\nreturn resultFileStatus;\n}\nresultFileStatus = new ArrayList<>(files.length);\nfor (FileStatus fileStatus : files) {\nTBrokerFileStatus brokerFileStatus = new TBrokerFileStatus();\nbrokerFileStatus.setIsDir(fileStatus.isDir());\nif (fileStatus.isDir()) {\nbrokerFileStatus.setIsSplitable(false);\nbrokerFileStatus.setSize(-1);\n} else {\nbrokerFileStatus.setSize(fileStatus.getLen());\nbrokerFileStatus.setIsSplitable(true);\n}\nbrokerFileStatus.setPath(fileStatus.getPath().toString());\nresultFileStatus.add(brokerFileStatus);\n}\n} catch (Exception e) {\nlogger.error(\"errors while get file status \", e);\nfileSystem.closeFileSystem();\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\ne, \"unknown error when get file status\");\n}\nreturn resultFileStatus;\n}\npublic void deletePath(String path, Map properties) {\nURI pathUri = getUriFromPath(path);\nBrokerFileSystem fileSystem = getFileSystem(path, properties);\nPath filePath = new Path(pathUri.getPath());\ntry {\nfileSystem.getDFSFileSystem().delete(filePath, true);\n} catch (IOException e) {\nlogger.error(\"errors while delete path \" + path);\nfileSystem.closeFileSystem();\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\ne, \"delete path {} error\", path);\n}\n}\npublic void renamePath(String srcPath, String destPath, Map properties) {\nURI srcPathUri = getUriFromPath(srcPath);\nURI destPathUri = getUriFromPath(destPath);\nif (!srcPathUri.getAuthority().trim().equals(destPathUri.getAuthority().trim())) {\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\n\"only allow rename in same file system\");\n}\nBrokerFileSystem fileSystem = getFileSystem(srcPath, properties);\nPath srcfilePath = new Path(srcPathUri.getPath());\nPath destfilePath = new Path(destPathUri.getPath());\ntry {\nboolean isRenameSuccess = fileSystem.getDFSFileSystem().rename(srcfilePath, destfilePath);\nif (!isRenameSuccess) {\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\n\"failed to rename path from {} to {}\", srcPath, destPath);\n}\n} catch (IOException e) {\nlogger.error(\"errors while rename path from \" + srcPath + \" to \" + destPath);\nfileSystem.closeFileSystem();\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\ne, \"errors while rename {} to {}\", srcPath, destPath);\n}\n}\npublic boolean checkPathExist(String path, Map properties) {\nURI pathUri = getUriFromPath(path);\nBrokerFileSystem fileSystem = getFileSystem(path, properties);\nPath filePath = new Path(pathUri.getPath());\ntry {\nboolean isPathExist = fileSystem.getDFSFileSystem().exists(filePath);\nreturn isPathExist;\n} catch (IOException e) {\nlogger.error(\"errors while check path exist: \" + path);\nfileSystem.closeFileSystem();\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\ne, \"errors while check if path {} exist\", path);\n}\n}\npublic TBrokerFD openReader(String clientId, String path, long startOffset, Map properties) {\nURI pathUri = getUriFromPath(path);\nPath inputFilePath = new Path(pathUri.getPath());\nBrokerFileSystem fileSystem = getFileSystem(path, properties);\ntry {\nFSDataInputStream fsDataInputStream = fileSystem.getDFSFileSystem().open(inputFilePath, readBufferSize);\nfsDataInputStream.seek(startOffset);\nUUID uuid = UUID.randomUUID();\nTBrokerFD fd = parseUUIDToFD(uuid);\nclientContextManager.putNewInputStream(clientId, fd, fsDataInputStream, fileSystem);\nreturn fd;\n} catch (IOException e) {\nlogger.error(\"errors while open path\", e);\nfileSystem.closeFileSystem();\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\ne, \"could not open file {}\", path);\n}\n}\npublic ByteBuffer pread(TBrokerFD fd, long offset, long length) {\nFSDataInputStream fsDataInputStream = clientContextManager.getFsDataInputStream(fd);\nsynchronized (fsDataInputStream) {\nlong currentStreamOffset;\ntry {\ncurrentStreamOffset = fsDataInputStream.getPos();\n} catch (IOException e) {\nlogger.error(\"errors while get file pos from output stream\", e);\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\n\"errors while get file pos from output stream\");\n}\nif (currentStreamOffset != offset) {\nlogger.warn(\"invalid offset, current read offset is \"\n+ currentStreamOffset + \" is not equal to request offset \"\n+ offset + \" seek to it\");\ntry {\nfsDataInputStream.seek(offset);\n} catch (IOException e) {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_INPUT_OFFSET,\ne, \"current read offset {} is not equal to {}, and could not seek to it\",\ncurrentStreamOffset, offset);\n}\n}\nbyte[] buf;\nif (length > readBufferSize) {\nbuf = new byte[readBufferSize];\n} else {\nbuf = new byte[(int) length];\n}\ntry {\nint readLength = fsDataInputStream.read(buf);\nif (readLength < 0) {\nthrow new BrokerException(TBrokerOperationStatusCode.END_OF_FILE,\n\"end of file reached\");\n}\nreturn ByteBuffer.wrap(buf, 0, readLength);\n} catch (IOException e) {\nlogger.error(\"errors while read data from stream\", e);\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\ne, \"errors while write data to output stream\");\n}\n}\n}\npublic void seek(TBrokerFD fd, long offset) {\nthrow new BrokerException(TBrokerOperationStatusCode.OPERATION_NOT_SUPPORTED,\n\"seek this method is not supported\");\n}\npublic void closeReader(TBrokerFD fd) {\nFSDataInputStream fsDataInputStream = clientContextManager.getFsDataInputStream(fd);\nsynchronized (fsDataInputStream) {\ntry {\nfsDataInputStream.close();\n} catch (IOException e) {\nlogger.error(\"errors while close file input stream\", e);\n} finally {\nclientContextManager.removeInputStream(fd);\n}\n}\n}\npublic TBrokerFD openWriter(String clientId, String path, Map properties) {\nURI pathUri = getUriFromPath(path);\nPath inputFilePath = new Path(pathUri.getPath());\nBrokerFileSystem fileSystem = getFileSystem(path, properties);\ntry {\nFSDataOutputStream fsDataOutputStream = fileSystem.getDFSFileSystem().create(inputFilePath,\ntrue, writeBufferSize);\nUUID uuid = UUID.randomUUID();\nTBrokerFD fd = parseUUIDToFD(uuid);\nclientContextManager.putNewOutputStream(clientId, fd, fsDataOutputStream, fileSystem);\nreturn fd;\n} catch (IOException e) {\nlogger.error(\"errors while open path\", e);\nfileSystem.closeFileSystem();\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\ne, \"could not open file {}\", path);\n}\n}\npublic void pwrite(TBrokerFD fd, long offset, byte[] data) {\nFSDataOutputStream fsDataOutputStream = clientContextManager.getFsDataOutputStream(fd);\nsynchronized (fsDataOutputStream) {\nlong currentStreamOffset;\ntry {\ncurrentStreamOffset = fsDataOutputStream.getPos();\n} catch (IOException e) {\nlogger.error(\"errors while get file pos from output stream\", e);\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\n\"errors while get file pos from output stream\");\n}\nif (currentStreamOffset != offset) {\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_INPUT_OFFSET,\n\"current outputstream offset is {} not equal to request {}\",\ncurrentStreamOffset, offset);\n}\ntry {\nfsDataOutputStream.write(data);\n} catch (IOException e) {\nlogger.error(\"errors while write data to output stream\", e);\nthrow new BrokerException(TBrokerOperationStatusCode.TARGET_STORAGE_SERVICE_ERROR,\ne, \"errors while write data to output stream\");\n}\n}\n}\npublic void closeWriter(TBrokerFD fd) {\nFSDataOutputStream fsDataOutputStream = clientContextManager.getFsDataOutputStream(fd);\nsynchronized (fsDataOutputStream) {\ntry {\nfsDataOutputStream.close();\n} catch (IOException e) {\nlogger.error(\"errors while close file output stream\", e);\n} finally {\nclientContextManager.removeOutputStream(fd);\n}\n}\n}\npublic void ping(String clientId) {\nclientContextManager.onPing(clientId);\n}\nprivate URI getUriFromPath(String path) {\nURI pathUri;\ntry {\npathUri = new URI(path);\npathUri = pathUri.normalize();\n} catch (URISyntaxException e) {\nlogger.error(\"invalid input path \" + path);\nthrow new BrokerException(TBrokerOperationStatusCode.INVALID_INPUT_FILE_PATH,\ne, \"invalid input path {} \", path);\n}\nreturn pathUri;\n}\nprivate static TBrokerFD parseUUIDToFD(UUID uuid) {\nreturn new TBrokerFD(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());\n}\nclass FileSystemExpirationChecker implements Runnable {\n@Override\npublic void run() {\ntry {\nfor (BrokerFileSystem fileSystem : cachedFileSystem.values()) {\nif (fileSystem.isExpired(BrokerConfig.client_expire_seconds)) {\nlogger.info(\"file system \" + fileSystem + \" is expired, close and remove it\");\nfileSystem.getLock().lock();\ntry {\nfileSystem.closeFileSystem();\n} catch (Throwable t) {\nlogger.error(\"errors while close file system\", t);\n} finally {\ncachedFileSystem.remove(fileSystem.getIdentity());\nfileSystem.getLock().unlock();\n}\n}\n}\n} finally {\nFileSystemManager.this.handleManagementPool.schedule(this, 60, TimeUnit.SECONDS);\n}\n}\n}\n}" + }, + { + "comment": "it is better to extract a private createRow method here", + "method_body": "protected MergedResult createMergedResult() {\nboolean masterOnly = HintManager.isMasterRouteOnly();\nboolean databaseOnly = HintManager.isDatabaseShardingOnly();\nHintShardingType shardingType = databaseOnly ? HintShardingType.DATABASES_ONLY : HintShardingType.DATABASES_TABLES;\nList row = new ArrayList<>(2);\nrow.add(String.valueOf(masterOnly).toLowerCase());\nrow.add(String.valueOf(shardingType).toLowerCase());\nreturn new ShowShardingCTLMergedResult(Collections.singletonList(row));\n}", + "target_code": "return new ShowShardingCTLMergedResult(Collections.singletonList(row));", + "method_body_after": "protected MergedResult createMergedResult() {\nHintShardingType shardingType = HintManager.isDatabaseShardingOnly() ? HintShardingType.DATABASES_ONLY : HintShardingType.DATABASES_TABLES;\nList row = createRow(HintManager.isMasterRouteOnly(), shardingType);\nreturn new ShowShardingCTLMergedResult(Collections.singletonList(row));\n}", + "context_before": "class HintShowStatusExecutor extends AbstractHintQueryExecutor {\nprotected List createQueryHeaders() {\nList queryHeaders = new ArrayList<>(2);\nqueryHeaders.add(new QueryHeader(\"\", \"\", \"master_only\", \"\", 5, Types.CHAR, 0));\nqueryHeaders.add(new QueryHeader(\"\", \"\", \"sharding_type\", \"\", 255, Types.CHAR, 0));\nreturn queryHeaders;\n}\n}", + "context_after": "class HintShowStatusExecutor extends AbstractHintQueryExecutor {\n@Override\nprotected List createQueryHeaders() {\nList queryHeaders = new ArrayList<>(2);\nqueryHeaders.add(new QueryHeader(\"\", \"\", \"master_only\", \"\", 5, Types.CHAR, 0));\nqueryHeaders.add(new QueryHeader(\"\", \"\", \"sharding_type\", \"\", 255, Types.CHAR, 0));\nreturn queryHeaders;\n}\n@Override\nprivate List createRow(final boolean masterOnly, final HintShardingType shardingType) {\nList row = new ArrayList<>(2);\nrow.add(String.valueOf(masterOnly).toLowerCase());\nrow.add(String.valueOf(shardingType).toLowerCase());\nreturn row;\n}\n}" + }, + { + "comment": "out of curiosity, you mind explain briefly about the change", + "method_body": "public void testThreadsAreAddedOnlyAsNeededWithContention() throws Exception {\nUnboundedScheduledExecutorService executorService = new UnboundedScheduledExecutorService();\nCountDownLatch start = new CountDownLatch(100);\nThreadPoolExecutor executor =\nnew ThreadPoolExecutor(100, 100, Long.MAX_VALUE, MILLISECONDS, new SynchronousQueue<>());\nfor (int i = 0; i < 100; ++i) {\nexecutor.execute(\n() -> {\nstart.countDown();\ntry {\nstart.await();\n} catch (InterruptedException e) {\nthrow new RuntimeException(e);\n}\nfor (int j = 0; j < 1000; ++j) {\ntry {\nexecutorService\n.submit(\n() -> {\ntry {\nThread.sleep(1);\n} catch (InterruptedException e) {\nthrow new RuntimeException(e);\n}\n})\n.get();\n} catch (InterruptedException | ExecutionException e) {\n}\n}\n});\n}\nexecutor.shutdown();\nexecutor.awaitTermination(3, MINUTES);\nint largestPool = executorService.threadPoolExecutor.getLargestPoolSize();\nLOG.info(\"Created {} threads to execute at most 100 parallel tasks\", largestPool);\nassertTrue(largestPool <= 110);\nexecutorService.shutdown();\n}", + "target_code": "assertTrue(largestPool <= 110);", + "method_body_after": "public void testThreadsAreAddedOnlyAsNeededWithContention() throws Exception {\nUnboundedScheduledExecutorService executorService = new UnboundedScheduledExecutorService();\nCountDownLatch start = new CountDownLatch(100);\nThreadPoolExecutor executor =\nnew ThreadPoolExecutor(100, 100, Long.MAX_VALUE, MILLISECONDS, new SynchronousQueue<>());\nfor (int i = 0; i < 100; ++i) {\nexecutor.execute(\n() -> {\nstart.countDown();\ntry {\nstart.await();\n} catch (InterruptedException e) {\nthrow new RuntimeException(e);\n}\nfor (int j = 0; j < 1000; ++j) {\ntry {\nexecutorService\n.submit(\n() -> {\ntry {\nThread.sleep(1);\n} catch (InterruptedException e) {\nthrow new RuntimeException(e);\n}\n})\n.get();\n} catch (InterruptedException | ExecutionException e) {\n}\n}\n});\n}\nexecutor.shutdown();\nexecutor.awaitTermination(3, MINUTES);\nint largestPool = executorService.threadPoolExecutor.getLargestPoolSize();\nLOG.info(\"Created {} threads to execute at most 100 parallel tasks\", largestPool);\nassertTrue(largestPool <= 110);\nexecutorService.shutdown();\n}", + "context_before": "class UnboundedScheduledExecutorServiceTest {\nprivate static final Logger LOG =\nLoggerFactory.getLogger(UnboundedScheduledExecutorServiceTest.class);\nprivate static final Runnable RUNNABLE =\n() -> {\n};\nprivate static final Callable CALLABLE = () -> \"A\";\nprivate static final Callable FAILING_CALLABLE =\n() -> {\nthrow new Exception(\"Test\");\n};\n@Test\npublic void testScheduleMethodErrorChecking() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nUnboundedScheduledExecutorService shutdownExecutorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nshutdownExecutorService.shutdown();\nassertThrows(\nNullPointerException.class, () -> executorService.schedule((Runnable) null, 10, SECONDS));\nassertThrows(NullPointerException.class, () -> executorService.schedule(RUNNABLE, 10, null));\nassertThrows(\nRejectedExecutionException.class,\n() -> shutdownExecutorService.schedule(RUNNABLE, 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.schedule((Callable) null, 10, SECONDS));\nassertThrows(NullPointerException.class, () -> executorService.schedule(CALLABLE, 10, null));\nassertThrows(\nRejectedExecutionException.class,\n() -> shutdownExecutorService.schedule(CALLABLE, 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.scheduleAtFixedRate(null, 10, 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.scheduleAtFixedRate(RUNNABLE, 10, 10, null));\nassertThrows(\nIllegalArgumentException.class,\n() -> executorService.scheduleAtFixedRate(RUNNABLE, 10, -10, SECONDS));\nassertThrows(\nRejectedExecutionException.class,\n() -> shutdownExecutorService.scheduleAtFixedRate(RUNNABLE, 10, 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.scheduleWithFixedDelay((Runnable) null, 10, 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.scheduleWithFixedDelay(RUNNABLE, 10, 10, null));\nassertThrows(\nIllegalArgumentException.class,\n() -> executorService.scheduleWithFixedDelay(RUNNABLE, 10, -10, SECONDS));\nassertThrows(\nRejectedExecutionException.class,\n() -> shutdownExecutorService.scheduleWithFixedDelay(RUNNABLE, 10, 10, SECONDS));\nassertThat(executorService.shutdownNow(), empty());\nassertThat(executorService.shutdownNow(), empty());\n}\n@Test\npublic void testSubmitMethodErrorChecking() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nUnboundedScheduledExecutorService shutdownExecutorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nshutdownExecutorService.shutdown();\nassertThrows(NullPointerException.class, () -> executorService.submit(null, \"result\"));\nassertThrows(\nRejectedExecutionException.class, () -> shutdownExecutorService.submit(RUNNABLE, \"result\"));\nassertThrows(NullPointerException.class, () -> executorService.submit((Runnable) null));\nassertThrows(RejectedExecutionException.class, () -> shutdownExecutorService.submit(RUNNABLE));\nassertThrows(NullPointerException.class, () -> executorService.submit((Callable) null));\nassertThrows(RejectedExecutionException.class, () -> shutdownExecutorService.submit(CALLABLE));\nassertThat(executorService.shutdownNow(), empty());\nassertThat(executorService.shutdownNow(), empty());\n}\n@Test\npublic void testInvokeMethodErrorChecking() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nUnboundedScheduledExecutorService shutdownExecutorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nshutdownExecutorService.shutdown();\nassertThrows(NullPointerException.class, () -> executorService.invokeAll(null));\nassertThrows(\nNullPointerException.class, () -> executorService.invokeAll(Collections.singleton(null)));\nassertThrows(\nRejectedExecutionException.class,\n() -> shutdownExecutorService.invokeAll(Collections.singleton(CALLABLE)));\nassertThrows(NullPointerException.class, () -> executorService.invokeAll(null, 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.invokeAll(Collections.singleton(null), 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.invokeAll(Collections.singleton(CALLABLE), 10, null));\nassertThrows(\nRejectedExecutionException.class,\n() -> shutdownExecutorService.invokeAll(Collections.singleton(CALLABLE), 10, SECONDS));\nassertThrows(NullPointerException.class, () -> executorService.invokeAny(null));\nassertThrows(\nNullPointerException.class, () -> executorService.invokeAny(Collections.singleton(null)));\nassertThrows(\nIllegalArgumentException.class, () -> executorService.invokeAny(Collections.emptyList()));\nassertThrows(\nExecutionException.class,\n() -> executorService.invokeAny(Arrays.asList(FAILING_CALLABLE, FAILING_CALLABLE)));\nassertThrows(\nRejectedExecutionException.class,\n() -> shutdownExecutorService.invokeAny(Collections.singleton(CALLABLE)));\nassertThrows(NullPointerException.class, () -> executorService.invokeAny(null, 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.invokeAny(Collections.singleton(null), 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.invokeAny(Collections.singleton(CALLABLE), 10, null));\nassertThrows(\nIllegalArgumentException.class,\n() -> executorService.invokeAny(Collections.emptyList(), 10, SECONDS));\nassertThrows(\nExecutionException.class,\n() ->\nexecutorService.invokeAny(\nArrays.asList(FAILING_CALLABLE, FAILING_CALLABLE), 10, SECONDS));\nassertThrows(\nRejectedExecutionException.class,\n() -> shutdownExecutorService.invokeAny(Collections.singleton(CALLABLE), 10, SECONDS));\nassertThat(executorService.shutdownNow(), empty());\nassertThat(executorService.shutdownNow(), empty());\n}\n@Test\npublic void testExecuteMethodErrorChecking() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nUnboundedScheduledExecutorService shutdownExecutorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nshutdownExecutorService.shutdown();\nassertThrows(NullPointerException.class, () -> executorService.execute(null));\nassertThrows(RejectedExecutionException.class, () -> shutdownExecutorService.execute(RUNNABLE));\nassertThat(executorService.shutdownNow(), empty());\nassertThat(executorService.shutdownNow(), empty());\n}\n@Test\npublic void testAllMethodsReturnScheduledFutures() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nassertThat(executorService.submit(RUNNABLE), instanceOf(ScheduledFutureTask.class));\nassertThat(executorService.submit(CALLABLE), instanceOf(ScheduledFutureTask.class));\nassertThat(executorService.submit(RUNNABLE, \"Answer\"), instanceOf(ScheduledFutureTask.class));\nassertThat(\nexecutorService.schedule(RUNNABLE, 10, SECONDS), instanceOf(ScheduledFutureTask.class));\nassertThat(\nexecutorService.schedule(CALLABLE, 10, SECONDS), instanceOf(ScheduledFutureTask.class));\nassertThat(\nexecutorService.scheduleAtFixedRate(RUNNABLE, 10, 10, SECONDS),\ninstanceOf(ScheduledFutureTask.class));\nassertThat(\nexecutorService.scheduleWithFixedDelay(RUNNABLE, 10, 10, SECONDS),\ninstanceOf(ScheduledFutureTask.class));\nassertThat(\nexecutorService.invokeAll(Arrays.asList(CALLABLE, CALLABLE)),\nIsIterableContainingInOrder.contains(\ninstanceOf(ScheduledFutureTask.class), instanceOf(ScheduledFutureTask.class)));\nassertThat(\nexecutorService.invokeAll(Arrays.asList(CALLABLE, CALLABLE), 10, SECONDS),\nIsIterableContainingInOrder.contains(\ninstanceOf(ScheduledFutureTask.class), instanceOf(ScheduledFutureTask.class)));\nexecutorService.shutdownNow();\n}\n@Test\npublic void testShutdown() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nRunnable runnable1 = Mockito.mock(Runnable.class);\nRunnable runnable2 = Mockito.mock(Runnable.class);\nRunnable runnable3 = Mockito.mock(Runnable.class);\nCallable callable1 = Mockito.mock(Callable.class);\nFuture rFuture1 = executorService.schedule(runnable1, 10, SECONDS);\nFuture cFuture1 = executorService.schedule(callable1, 10, SECONDS);\nFuture rFuture2 = executorService.scheduleAtFixedRate(runnable2, 10, 10, SECONDS);\nFuture rFuture3 = executorService.scheduleWithFixedDelay(runnable3, 10, 10, SECONDS);\nassertThat(\nexecutorService.shutdownNow(),\nIsIterableContaining.hasItems(\n(Runnable) rFuture1, (Runnable) rFuture2, (Runnable) rFuture3, (Runnable) cFuture1));\nverifyNoInteractions(runnable1, runnable2, runnable3, callable1);\nassertTrue(executorService.isShutdown());\nassertTrue(executorService.awaitTermination(10, SECONDS));\nassertTrue(executorService.isTerminated());\n}\n@Test\npublic void testExecute() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nAtomicInteger callCount = new AtomicInteger();\nCountDownLatch countDownLatch = new CountDownLatch(1);\nexecutorService.execute(\n() -> {\ncallCount.incrementAndGet();\ncountDownLatch.countDown();\n});\ncountDownLatch.await();\nassertEquals(1, callCount.get());\n}\n@Test\npublic void testSubmit() throws Exception {\nList callCounts = new ArrayList<>();\nList> futures = new ArrayList<>();\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\ncallCounts.add(new AtomicInteger());\nfutures.add(\n(ScheduledFutureTask)\nexecutorService.submit(\n(Runnable) callCounts.get(callCounts.size() - 1)::incrementAndGet));\ncallCounts.add(new AtomicInteger());\nfutures.add(\n(ScheduledFutureTask)\nexecutorService.submit(\ncallCounts.get(callCounts.size() - 1)::incrementAndGet, \"Result\"));\ncallCounts.add(new AtomicInteger());\nfutures.add(\n(ScheduledFutureTask)\nexecutorService.submit(callCounts.get(callCounts.size() - 1)::incrementAndGet));\nassertNull(futures.get(0).get());\nassertEquals(\"Result\", futures.get(1).get());\nassertEquals(1, futures.get(2).get());\nfor (int i = 0; i < callCounts.size(); ++i) {\nassertFalse(futures.get(i).isPeriodic());\nassertEquals(1, callCounts.get(i).get());\n}\n}\n@Test\npublic void testSchedule() throws Exception {\nList callCounts = new ArrayList<>();\nList> futures = new ArrayList<>();\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\ncallCounts.add(new AtomicInteger());\nfutures.add(\n(ScheduledFutureTask)\nexecutorService.schedule(\n(Runnable) callCounts.get(callCounts.size() - 1)::incrementAndGet,\n100,\nMILLISECONDS));\ncallCounts.add(new AtomicInteger());\nfutures.add(\n(ScheduledFutureTask)\nexecutorService.schedule(\ncallCounts.get(callCounts.size() - 1)::incrementAndGet, 100, MILLISECONDS));\nwakeUpAndCheckTasks(executorService);\nfor (int i = 0; i < callCounts.size(); ++i) {\nassertEquals(0, callCounts.get(i).get());\n}\nfastNanoClockAndSleeper.sleep(99);\nwakeUpAndCheckTasks(executorService);\nfor (int i = 0; i < callCounts.size(); ++i) {\nassertEquals(0, callCounts.get(i).get());\n}\nfastNanoClockAndSleeper.sleep(1);\nwakeUpAndCheckTasks(executorService);\nassertNull(futures.get(0).get());\nassertEquals(1, futures.get(1).get());\nfor (int i = 0; i < callCounts.size(); ++i) {\nassertFalse(futures.get(i).isPeriodic());\nassertEquals(1, callCounts.get(i).get());\n}\nassertThat(executorService.shutdownNow(), empty());\n}\n@Test\npublic void testSchedulePeriodicWithFixedDelay() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nAtomicInteger callCount = new AtomicInteger();\nCountDownLatch latch = new CountDownLatch(1);\nScheduledFutureTask future =\n(ScheduledFutureTask)\nexecutorService.scheduleWithFixedDelay(\n() -> {\ncallCount.incrementAndGet();\nlatch.countDown();\n},\n100,\n50,\nMILLISECONDS);\nwakeUpAndCheckTasks(executorService);\nassertEquals(0, callCount.get());\nfastNanoClockAndSleeper.sleep(99);\nwakeUpAndCheckTasks(executorService);\nassertEquals(0, callCount.get());\nfastNanoClockAndSleeper.sleep(10);\nwakeUpAndCheckTasks(executorService);\nlatch.await();\nassertEquals(1, callCount.get());\nfor (; ; ) {\nsynchronized (executorService.tasks) {\nScheduledFutureTask task = executorService.tasks.peek();\nif (task != null) {\nassertEquals(50, task.getDelay(MILLISECONDS));\nbreak;\n}\n}\nThread.sleep(1);\n}\nassertTrue(future.isPeriodic());\nassertFalse(future.isDone());\nfuture.cancel(true);\nassertTrue(future.isCancelled());\nassertTrue(future.isDone());\nassertThat(executorService.shutdownNow(), empty());\n}\n@Test\npublic void testSchedulePeriodicWithFixedRate() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nAtomicInteger callCount = new AtomicInteger();\nCountDownLatch latch = new CountDownLatch(1);\nScheduledFutureTask future =\n(ScheduledFutureTask)\nexecutorService.scheduleAtFixedRate(\n() -> {\ncallCount.incrementAndGet();\nlatch.countDown();\n},\n100,\n50,\nMILLISECONDS);\nwakeUpAndCheckTasks(executorService);\nassertEquals(0, callCount.get());\nfastNanoClockAndSleeper.sleep(99);\nwakeUpAndCheckTasks(executorService);\nassertEquals(0, callCount.get());\nfastNanoClockAndSleeper.sleep(10);\nwakeUpAndCheckTasks(executorService);\nlatch.await();\nassertEquals(1, callCount.get());\nfor (; ; ) {\nsynchronized (executorService.tasks) {\nScheduledFutureTask task = executorService.tasks.peek();\nif (task != null) {\nassertEquals(41, task.getDelay(MILLISECONDS));\nbreak;\n}\n}\nThread.sleep(1);\n}\nassertTrue(future.isPeriodic());\nassertFalse(future.isDone());\nfuture.cancel(true);\nassertTrue(future.isCancelled());\nassertTrue(future.isDone());\nassertThat(executorService.shutdownNow(), empty());\n}\nvoid wakeUpAndCheckTasks(UnboundedScheduledExecutorService executorService) throws Exception {\nsynchronized (executorService.tasks) {\nexecutorService.tasks.notify();\n}\nThread.sleep(100);\n}\n@Test\n}", + "context_after": "class UnboundedScheduledExecutorServiceTest {\nprivate static final Logger LOG =\nLoggerFactory.getLogger(UnboundedScheduledExecutorServiceTest.class);\nprivate static final Runnable RUNNABLE =\n() -> {\n};\nprivate static final Callable CALLABLE = () -> \"A\";\nprivate static final Callable FAILING_CALLABLE =\n() -> {\nthrow new Exception(\"Test\");\n};\n@Test\npublic void testScheduleMethodErrorChecking() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nUnboundedScheduledExecutorService shutdownExecutorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nshutdownExecutorService.shutdown();\nassertThrows(\nNullPointerException.class, () -> executorService.schedule((Runnable) null, 10, SECONDS));\nassertThrows(NullPointerException.class, () -> executorService.schedule(RUNNABLE, 10, null));\nassertThrows(\nRejectedExecutionException.class,\n() -> shutdownExecutorService.schedule(RUNNABLE, 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.schedule((Callable) null, 10, SECONDS));\nassertThrows(NullPointerException.class, () -> executorService.schedule(CALLABLE, 10, null));\nassertThrows(\nRejectedExecutionException.class,\n() -> shutdownExecutorService.schedule(CALLABLE, 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.scheduleAtFixedRate(null, 10, 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.scheduleAtFixedRate(RUNNABLE, 10, 10, null));\nassertThrows(\nIllegalArgumentException.class,\n() -> executorService.scheduleAtFixedRate(RUNNABLE, 10, -10, SECONDS));\nassertThrows(\nRejectedExecutionException.class,\n() -> shutdownExecutorService.scheduleAtFixedRate(RUNNABLE, 10, 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.scheduleWithFixedDelay((Runnable) null, 10, 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.scheduleWithFixedDelay(RUNNABLE, 10, 10, null));\nassertThrows(\nIllegalArgumentException.class,\n() -> executorService.scheduleWithFixedDelay(RUNNABLE, 10, -10, SECONDS));\nassertThrows(\nRejectedExecutionException.class,\n() -> shutdownExecutorService.scheduleWithFixedDelay(RUNNABLE, 10, 10, SECONDS));\nassertThat(executorService.shutdownNow(), empty());\nassertThat(executorService.shutdownNow(), empty());\n}\n@Test\npublic void testSubmitMethodErrorChecking() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nUnboundedScheduledExecutorService shutdownExecutorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nshutdownExecutorService.shutdown();\nassertThrows(NullPointerException.class, () -> executorService.submit(null, \"result\"));\nassertThrows(\nRejectedExecutionException.class, () -> shutdownExecutorService.submit(RUNNABLE, \"result\"));\nassertThrows(NullPointerException.class, () -> executorService.submit((Runnable) null));\nassertThrows(RejectedExecutionException.class, () -> shutdownExecutorService.submit(RUNNABLE));\nassertThrows(NullPointerException.class, () -> executorService.submit((Callable) null));\nassertThrows(RejectedExecutionException.class, () -> shutdownExecutorService.submit(CALLABLE));\nassertThat(executorService.shutdownNow(), empty());\nassertThat(executorService.shutdownNow(), empty());\n}\n@Test\npublic void testInvokeMethodErrorChecking() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nUnboundedScheduledExecutorService shutdownExecutorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nshutdownExecutorService.shutdown();\nassertThrows(NullPointerException.class, () -> executorService.invokeAll(null));\nassertThrows(\nNullPointerException.class, () -> executorService.invokeAll(Collections.singleton(null)));\nassertThrows(\nRejectedExecutionException.class,\n() -> shutdownExecutorService.invokeAll(Collections.singleton(CALLABLE)));\nassertThrows(NullPointerException.class, () -> executorService.invokeAll(null, 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.invokeAll(Collections.singleton(null), 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.invokeAll(Collections.singleton(CALLABLE), 10, null));\nassertThrows(\nRejectedExecutionException.class,\n() -> shutdownExecutorService.invokeAll(Collections.singleton(CALLABLE), 10, SECONDS));\nassertThrows(NullPointerException.class, () -> executorService.invokeAny(null));\nassertThrows(\nNullPointerException.class, () -> executorService.invokeAny(Collections.singleton(null)));\nassertThrows(\nIllegalArgumentException.class, () -> executorService.invokeAny(Collections.emptyList()));\nassertThrows(\nExecutionException.class,\n() -> executorService.invokeAny(Arrays.asList(FAILING_CALLABLE, FAILING_CALLABLE)));\nassertThrows(\nRejectedExecutionException.class,\n() -> shutdownExecutorService.invokeAny(Collections.singleton(CALLABLE)));\nassertThrows(NullPointerException.class, () -> executorService.invokeAny(null, 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.invokeAny(Collections.singleton(null), 10, SECONDS));\nassertThrows(\nNullPointerException.class,\n() -> executorService.invokeAny(Collections.singleton(CALLABLE), 10, null));\nassertThrows(\nIllegalArgumentException.class,\n() -> executorService.invokeAny(Collections.emptyList(), 10, SECONDS));\nassertThrows(\nExecutionException.class,\n() ->\nexecutorService.invokeAny(\nArrays.asList(FAILING_CALLABLE, FAILING_CALLABLE), 10, SECONDS));\nassertThrows(\nRejectedExecutionException.class,\n() -> shutdownExecutorService.invokeAny(Collections.singleton(CALLABLE), 10, SECONDS));\nassertThat(executorService.shutdownNow(), empty());\nassertThat(executorService.shutdownNow(), empty());\n}\n@Test\npublic void testExecuteMethodErrorChecking() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nUnboundedScheduledExecutorService shutdownExecutorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nshutdownExecutorService.shutdown();\nassertThrows(NullPointerException.class, () -> executorService.execute(null));\nassertThrows(RejectedExecutionException.class, () -> shutdownExecutorService.execute(RUNNABLE));\nassertThat(executorService.shutdownNow(), empty());\nassertThat(executorService.shutdownNow(), empty());\n}\n@Test\npublic void testAllMethodsReturnScheduledFutures() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nassertThat(executorService.submit(RUNNABLE), instanceOf(ScheduledFutureTask.class));\nassertThat(executorService.submit(CALLABLE), instanceOf(ScheduledFutureTask.class));\nassertThat(executorService.submit(RUNNABLE, \"Answer\"), instanceOf(ScheduledFutureTask.class));\nassertThat(\nexecutorService.schedule(RUNNABLE, 10, SECONDS), instanceOf(ScheduledFutureTask.class));\nassertThat(\nexecutorService.schedule(CALLABLE, 10, SECONDS), instanceOf(ScheduledFutureTask.class));\nassertThat(\nexecutorService.scheduleAtFixedRate(RUNNABLE, 10, 10, SECONDS),\ninstanceOf(ScheduledFutureTask.class));\nassertThat(\nexecutorService.scheduleWithFixedDelay(RUNNABLE, 10, 10, SECONDS),\ninstanceOf(ScheduledFutureTask.class));\nassertThat(\nexecutorService.invokeAll(Arrays.asList(CALLABLE, CALLABLE)),\nIsIterableContainingInOrder.contains(\ninstanceOf(ScheduledFutureTask.class), instanceOf(ScheduledFutureTask.class)));\nassertThat(\nexecutorService.invokeAll(Arrays.asList(CALLABLE, CALLABLE), 10, SECONDS),\nIsIterableContainingInOrder.contains(\ninstanceOf(ScheduledFutureTask.class), instanceOf(ScheduledFutureTask.class)));\nexecutorService.shutdownNow();\n}\n@Test\npublic void testShutdown() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nRunnable runnable1 = Mockito.mock(Runnable.class);\nRunnable runnable2 = Mockito.mock(Runnable.class);\nRunnable runnable3 = Mockito.mock(Runnable.class);\nCallable callable1 = Mockito.mock(Callable.class);\nFuture rFuture1 = executorService.schedule(runnable1, 10, SECONDS);\nFuture cFuture1 = executorService.schedule(callable1, 10, SECONDS);\nFuture rFuture2 = executorService.scheduleAtFixedRate(runnable2, 10, 10, SECONDS);\nFuture rFuture3 = executorService.scheduleWithFixedDelay(runnable3, 10, 10, SECONDS);\nassertThat(\nexecutorService.shutdownNow(),\nIsIterableContaining.hasItems(\n(Runnable) rFuture1, (Runnable) rFuture2, (Runnable) rFuture3, (Runnable) cFuture1));\nverifyNoInteractions(runnable1, runnable2, runnable3, callable1);\nassertTrue(executorService.isShutdown());\nassertTrue(executorService.awaitTermination(10, SECONDS));\nassertTrue(executorService.isTerminated());\n}\n@Test\npublic void testExecute() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nAtomicInteger callCount = new AtomicInteger();\nCountDownLatch countDownLatch = new CountDownLatch(1);\nexecutorService.execute(\n() -> {\ncallCount.incrementAndGet();\ncountDownLatch.countDown();\n});\ncountDownLatch.await();\nassertEquals(1, callCount.get());\n}\n@Test\npublic void testSubmit() throws Exception {\nList callCounts = new ArrayList<>();\nList> futures = new ArrayList<>();\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\ncallCounts.add(new AtomicInteger());\nfutures.add(\n(ScheduledFutureTask)\nexecutorService.submit(\n(Runnable) callCounts.get(callCounts.size() - 1)::incrementAndGet));\ncallCounts.add(new AtomicInteger());\nfutures.add(\n(ScheduledFutureTask)\nexecutorService.submit(\ncallCounts.get(callCounts.size() - 1)::incrementAndGet, \"Result\"));\ncallCounts.add(new AtomicInteger());\nfutures.add(\n(ScheduledFutureTask)\nexecutorService.submit(callCounts.get(callCounts.size() - 1)::incrementAndGet));\nassertNull(futures.get(0).get());\nassertEquals(\"Result\", futures.get(1).get());\nassertEquals(1, futures.get(2).get());\nfor (int i = 0; i < callCounts.size(); ++i) {\nassertFalse(futures.get(i).isPeriodic());\nassertEquals(1, callCounts.get(i).get());\n}\n}\n@Test\npublic void testSchedule() throws Exception {\nList callCounts = new ArrayList<>();\nList> futures = new ArrayList<>();\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\ncallCounts.add(new AtomicInteger());\nfutures.add(\n(ScheduledFutureTask)\nexecutorService.schedule(\n(Runnable) callCounts.get(callCounts.size() - 1)::incrementAndGet,\n100,\nMILLISECONDS));\ncallCounts.add(new AtomicInteger());\nfutures.add(\n(ScheduledFutureTask)\nexecutorService.schedule(\ncallCounts.get(callCounts.size() - 1)::incrementAndGet, 100, MILLISECONDS));\nwakeUpAndCheckTasks(executorService);\nfor (int i = 0; i < callCounts.size(); ++i) {\nassertEquals(0, callCounts.get(i).get());\n}\nfastNanoClockAndSleeper.sleep(99);\nwakeUpAndCheckTasks(executorService);\nfor (int i = 0; i < callCounts.size(); ++i) {\nassertEquals(0, callCounts.get(i).get());\n}\nfastNanoClockAndSleeper.sleep(1);\nwakeUpAndCheckTasks(executorService);\nassertNull(futures.get(0).get());\nassertEquals(1, futures.get(1).get());\nfor (int i = 0; i < callCounts.size(); ++i) {\nassertFalse(futures.get(i).isPeriodic());\nassertEquals(1, callCounts.get(i).get());\n}\nassertThat(executorService.shutdownNow(), empty());\n}\n@Test\npublic void testSchedulePeriodicWithFixedDelay() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nAtomicInteger callCount = new AtomicInteger();\nCountDownLatch latch = new CountDownLatch(1);\nScheduledFutureTask future =\n(ScheduledFutureTask)\nexecutorService.scheduleWithFixedDelay(\n() -> {\ncallCount.incrementAndGet();\nlatch.countDown();\n},\n100,\n50,\nMILLISECONDS);\nwakeUpAndCheckTasks(executorService);\nassertEquals(0, callCount.get());\nfastNanoClockAndSleeper.sleep(99);\nwakeUpAndCheckTasks(executorService);\nassertEquals(0, callCount.get());\nfastNanoClockAndSleeper.sleep(10);\nwakeUpAndCheckTasks(executorService);\nlatch.await();\nassertEquals(1, callCount.get());\nfor (; ; ) {\nsynchronized (executorService.tasks) {\nScheduledFutureTask task = executorService.tasks.peek();\nif (task != null) {\nassertEquals(50, task.getDelay(MILLISECONDS));\nbreak;\n}\n}\nThread.sleep(1);\n}\nassertTrue(future.isPeriodic());\nassertFalse(future.isDone());\nfuture.cancel(true);\nassertTrue(future.isCancelled());\nassertTrue(future.isDone());\nassertThat(executorService.shutdownNow(), empty());\n}\n@Test\npublic void testSchedulePeriodicWithFixedRate() throws Exception {\nFastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();\nUnboundedScheduledExecutorService executorService =\nnew UnboundedScheduledExecutorService(fastNanoClockAndSleeper);\nAtomicInteger callCount = new AtomicInteger();\nCountDownLatch latch = new CountDownLatch(1);\nScheduledFutureTask future =\n(ScheduledFutureTask)\nexecutorService.scheduleAtFixedRate(\n() -> {\ncallCount.incrementAndGet();\nlatch.countDown();\n},\n100,\n50,\nMILLISECONDS);\nwakeUpAndCheckTasks(executorService);\nassertEquals(0, callCount.get());\nfastNanoClockAndSleeper.sleep(99);\nwakeUpAndCheckTasks(executorService);\nassertEquals(0, callCount.get());\nfastNanoClockAndSleeper.sleep(10);\nwakeUpAndCheckTasks(executorService);\nlatch.await();\nassertEquals(1, callCount.get());\nfor (; ; ) {\nsynchronized (executorService.tasks) {\nScheduledFutureTask task = executorService.tasks.peek();\nif (task != null) {\nassertEquals(41, task.getDelay(MILLISECONDS));\nbreak;\n}\n}\nThread.sleep(1);\n}\nassertTrue(future.isPeriodic());\nassertFalse(future.isDone());\nfuture.cancel(true);\nassertTrue(future.isCancelled());\nassertTrue(future.isDone());\nassertThat(executorService.shutdownNow(), empty());\n}\nvoid wakeUpAndCheckTasks(UnboundedScheduledExecutorService executorService) throws Exception {\nsynchronized (executorService.tasks) {\nexecutorService.tasks.notify();\n}\nThread.sleep(100);\n}\n@Test\n}" + }, + { + "comment": "Update: I think we should indent `case` since it increases readability.", + "method_body": "public void generate(GenType type, String definitionPath, String outPath) throws IOException {\nOpenAPI api = new OpenAPIV3Parser().read(definitionPath);\nOpenApiWrapper context = new OpenApiWrapper().buildFromOpenAPI(api).apiPackage(apiPackage);\nif (outPath == null || \"\".equals(outPath)) {\nString fileName = api.getInfo().getTitle().replaceAll(\" \", \"\") + \".bal\";\noutPath = definitionPath.substring(0, definitionPath.lastIndexOf(File.separator) + 1);\noutPath += fileName;\n}\nswitch (type) {\ncase SKELETON:\nwriteBallerina(context, GeneratorConstants.DEFAULT_SKELETON_DIR, GeneratorConstants.SKELETON_TEMPLATE_NAME,\noutPath);\nbreak;\ncase CONNECTOR:\nwriteBallerina(context, GeneratorConstants.DEFAULT_CONNECTOR_DIR,\nGeneratorConstants.CONNECTOR_TEMPLATE_NAME, outPath);\nbreak;\ncase MOCK:\nwriteBallerina(context, GeneratorConstants.DEFAULT_MOCK_DIR, GeneratorConstants.MOCK_TEMPLATE_NAME,\noutPath);\nbreak;\ndefault:\nreturn;\n}\n}", + "target_code": "case SKELETON:", + "method_body_after": "public void generate(GenType type, String definitionPath, String outPath) throws IOException {\nOpenAPI api = new OpenAPIV3Parser().read(definitionPath);\nOpenApiWrapper context = new OpenApiWrapper().buildFromOpenAPI(api).apiPackage(apiPackage);\nif (outPath == null || outPath.isEmpty()) {\nString fileName = api.getInfo().getTitle().replaceAll(\" \", \"\") + \".bal\";\noutPath = definitionPath.substring(0, definitionPath.lastIndexOf(File.separator) + 1);\noutPath += fileName;\n}\nswitch (type) {\ncase SKELETON:\nwriteBallerina(context, GeneratorConstants.DEFAULT_SKELETON_DIR,\nGeneratorConstants.SKELETON_TEMPLATE_NAME, outPath);\nbreak;\ncase CONNECTOR:\nwriteBallerina(context, GeneratorConstants.DEFAULT_CONNECTOR_DIR,\nGeneratorConstants.CONNECTOR_TEMPLATE_NAME, outPath);\nbreak;\ncase MOCK:\nwriteBallerina(context, GeneratorConstants.DEFAULT_MOCK_DIR, GeneratorConstants.MOCK_TEMPLATE_NAME,\noutPath);\nbreak;\ndefault:\nreturn;\n}\n}", + "context_before": "class CodeGenerator {\nprivate String apiPackage;\n/**\n* Generates ballerina source for provided Open API Definition in definitionPath\n*

Method can be user for generating Ballerina service skeletons, mock services and connectors

\n*\n* @param type Output type. Following types are supported\n*
    \n*
  • skeleton
  • \n*
  • mock
  • \n*
  • connector
  • \n*
\n* @param definitionPath Input Open Api Definition file path\n* @param outPath Destination file path to save generated source files. If not provided\n* destinationPath will be used as the default destination path\n* @throws IOException when file operations fail\n*/\n/**\n* Write ballerina definition of a object to a file as described by template.\n*\n* @param object Context object to be used by the template parser\n* @param templateDir Directory with all the templates required for generating the source file\n* @param templateName Name of the parent template to be used\n* @param outPath Destination path for writing the resulting source file\n* @throws IOException when file operations fail\n*/\npublic void writeBallerina(Object object, String templateDir, String templateName, String outPath)\nthrows IOException {\nPrintWriter writer = null;\ntry {\nTemplate template = compileTemplate(templateDir, templateName);\nContext context = Context.newBuilder(object).resolver(FieldValueResolver.INSTANCE).build();\nwriter = new PrintWriter(outPath, \"UTF-8\");\nwriter.println(template.apply(context));\n} finally {\nif (writer != null) {\nwriter.close();\n}\n}\n}\nprivate Template compileTemplate(String defaultTemplateDir, String templateName) throws IOException {\nString templatesDirPath = System.getProperty(GeneratorConstants.TEMPLATES_DIR_PATH_KEY, defaultTemplateDir);\nClassPathTemplateLoader cpTemplateLoader = new ClassPathTemplateLoader((templatesDirPath));\nFileTemplateLoader fileTemplateLoader = new FileTemplateLoader(templatesDirPath);\ncpTemplateLoader.setSuffix(GeneratorConstants.TEMPLATES_SUFFIX);\nfileTemplateLoader.setSuffix(GeneratorConstants.TEMPLATES_SUFFIX);\nHandlebars handlebars = new Handlebars().with(cpTemplateLoader, fileTemplateLoader);\nhandlebars.registerHelpers(StringHelpers.class);\nhandlebars.registerHelper(\"equals\", (object, options) -> {\nCharSequence result;\nObject param0 = options.param(0);\nif (param0 == null) {\nthrow new IllegalArgumentException(\"found 'null', expected 'string'\");\n}\nif (object != null && object.toString().equals(param0.toString())) {\nresult = options.fn(options.context);\n} else {\nresult = null;\n}\nreturn result;\n});\nreturn handlebars.compile(templateName);\n}\npublic String getApiPackage() {\nreturn apiPackage;\n}\npublic void setApiPackage(String apiPackage) {\nthis.apiPackage = apiPackage;\n}\n}", + "context_after": "class CodeGenerator {\nprivate String apiPackage;\n/**\n* Generates ballerina source for provided Open API Definition in definitionPath\n*

Method can be user for generating Ballerina service skeletons, mock services and connectors

\n*\n* @param type Output type. Following types are supported\n*
    \n*
  • skeleton
  • \n*
  • mock
  • \n*
  • connector
  • \n*
\n* @param definitionPath Input Open Api Definition file path\n* @param outPath Destination file path to save generated source files. If not provided\n* destinationPath will be used as the default destination path\n* @throws IOException when file operations fail\n*/\n/**\n* Write ballerina definition of a object to a file as described by template.\n*\n* @param object Context object to be used by the template parser\n* @param templateDir Directory with all the templates required for generating the source file\n* @param templateName Name of the parent template to be used\n* @param outPath Destination path for writing the resulting source file\n* @throws IOException when file operations fail\n*/\npublic void writeBallerina(Object object, String templateDir, String templateName, String outPath)\nthrows IOException {\nPrintWriter writer = null;\ntry {\nTemplate template = compileTemplate(templateDir, templateName);\nContext context = Context.newBuilder(object).resolver(FieldValueResolver.INSTANCE).build();\nwriter = new PrintWriter(outPath, \"UTF-8\");\nwriter.println(template.apply(context));\n} finally {\nif (writer != null) {\nwriter.close();\n}\n}\n}\nprivate Template compileTemplate(String defaultTemplateDir, String templateName) throws IOException {\nString templatesDirPath = System.getProperty(GeneratorConstants.TEMPLATES_DIR_PATH_KEY, defaultTemplateDir);\nClassPathTemplateLoader cpTemplateLoader = new ClassPathTemplateLoader((templatesDirPath));\nFileTemplateLoader fileTemplateLoader = new FileTemplateLoader(templatesDirPath);\ncpTemplateLoader.setSuffix(GeneratorConstants.TEMPLATES_SUFFIX);\nfileTemplateLoader.setSuffix(GeneratorConstants.TEMPLATES_SUFFIX);\nHandlebars handlebars = new Handlebars().with(cpTemplateLoader, fileTemplateLoader);\nhandlebars.registerHelpers(StringHelpers.class);\nhandlebars.registerHelper(\"equals\", (object, options) -> {\nCharSequence result;\nObject param0 = options.param(0);\nif (param0 == null) {\nthrow new IllegalArgumentException(\"found 'null', expected 'string'\");\n}\nif (object != null && object.toString().equals(param0.toString())) {\nresult = options.fn(options.context);\n} else {\nresult = null;\n}\nreturn result;\n});\nreturn handlebars.compile(templateName);\n}\npublic String getApiPackage() {\nreturn apiPackage;\n}\npublic void setApiPackage(String apiPackage) {\nthis.apiPackage = apiPackage;\n}\n}" + }, + { + "comment": "would be better to inject a clock to this and the same one into StreamingDataflowWorker instead of relying on sleep timing Can you take a look at if that would be not too bad to add? Somewhat existing but would be nice to cleanup while modifying", + "method_body": "GetDataResponse getData(GetDataRequest request) {\nboolean isActiveWorkRefresh = true;\nfor (ComputationGetDataRequest computationRequest : request.getRequestsList()) {\nif (!computationRequest.getComputationId().equals(DEFAULT_COMPUTATION_ID)) {\nisActiveWorkRefresh = false;\ncontinue;\n}\nfor (KeyedGetDataRequest keyedRequest : computationRequest.getRequestsList()) {\nif (keyedRequest.getWorkToken() == 0\n|| keyedRequest.getShardingKey() != DEFAULT_SHARDING_KEY\n|| keyedRequest.getValuesToFetchCount() != 0\n|| keyedRequest.getBagsToFetchCount() != 0\n|| keyedRequest.getTagValuePrefixesToFetchCount() != 0\n|| keyedRequest.getWatermarkHoldsToFetchCount() != 0) {\nisActiveWorkRefresh = false;\ncontinue;\n}\nfor (LatencyAttribution la : keyedRequest.getLatencyAttributionList()) {\nEnumMap durations =\ntotalDurations.computeIfAbsent(\nkeyedRequest.getWorkToken(),\n(Long workToken) ->\nnew EnumMap(\nLatencyAttribution.State.class));\nDuration old = durations.get(la.getState());\nDuration cur = Duration.millis(la.getTotalDurationMillis());\nif (old == null || old.isShorterThan(cur)) {\ndurations.put(la.getState(), cur);\n}\n}\n}\n}\nif (!isActiveWorkRefresh) {\nUninterruptibles.sleepUninterruptibly(2000, TimeUnit.MILLISECONDS);\n}\nreturn EMPTY_DATA_RESPONDER.apply(request);\n}", + "target_code": "|| keyedRequest.getValuesToFetchCount() != 0", + "method_body_after": "GetDataResponse getData(GetDataRequest request) {\nif (!isActiveWorkRefresh(request)) {\nreturn responder.apply(request);\n}\nfor (ComputationGetDataRequest computationRequest : request.getRequestsList()) {\nfor (KeyedGetDataRequest keyedRequest : computationRequest.getRequestsList()) {\nfor (LatencyAttribution la : keyedRequest.getLatencyAttributionList()) {\nEnumMap durations =\ntotalDurations.computeIfAbsent(\nkeyedRequest.getWorkToken(),\n(Long workToken) ->\nnew EnumMap(\nLatencyAttribution.State.class));\nDuration cur = Duration.millis(la.getTotalDurationMillis());\ndurations.compute(la.getState(), (s, d) -> d == null || d.isShorterThan(cur) ? cur : d);\n}\n}\n}\nreturn EMPTY_DATA_RESPONDER.apply(request);\n}", + "context_before": "class ActiveWorkRefreshSink {\nMap> totalDurations = new HashMap<>();\nDuration getLatencyAttributionDuration(long workToken, LatencyAttribution.State state) {\nEnumMap durations = totalDurations.get(workToken);\nif (durations == null) {\nreturn Duration.ZERO;\n}\nDuration d = durations.get(state);\nif (d == null) {\nreturn Duration.ZERO;\n}\nreturn d;\n}\n}", + "context_after": "class ActiveWorkRefreshSink {\nprivate final Function responder;\nprivate final Map> totalDurations =\nnew HashMap<>();\nActiveWorkRefreshSink(Function responder) {\nthis.responder = responder;\n}\nDuration getLatencyAttributionDuration(long workToken, LatencyAttribution.State state) {\nEnumMap durations = totalDurations.get(workToken);\nreturn durations == null ? Duration.ZERO : durations.getOrDefault(state, Duration.ZERO);\n}\nboolean isActiveWorkRefresh(GetDataRequest request) {\nfor (ComputationGetDataRequest computationRequest : request.getRequestsList()) {\nif (!computationRequest.getComputationId().equals(DEFAULT_COMPUTATION_ID)) {\nreturn false;\n}\nfor (KeyedGetDataRequest keyedRequest : computationRequest.getRequestsList()) {\nif (keyedRequest.getWorkToken() == 0\n|| keyedRequest.getShardingKey() != DEFAULT_SHARDING_KEY\n|| keyedRequest.getValuesToFetchCount() != 0\n|| keyedRequest.getBagsToFetchCount() != 0\n|| keyedRequest.getTagValuePrefixesToFetchCount() != 0\n|| keyedRequest.getWatermarkHoldsToFetchCount() != 0) {\nreturn false;\n}\n}\n}\nreturn true;\n}\n}" + }, + { + "comment": "```suggestion return Files.walk(packagePath).anyMatch(path -> path.toString().endsWith(ProjectConstants.BLANG_SOURCE_EXT)); ```", + "method_body": "public static boolean balFilesExists(Path packagePath) throws IOException {\nif (Files.walk(packagePath).anyMatch(path -> path.toString().endsWith(ProjectConstants.BLANG_SOURCE_EXT))) {\nreturn true;\n}\nreturn false;\n}", + "target_code": "return false;", + "method_body_after": "public static boolean balFilesExists(Path packagePath) throws IOException {\nreturn Files.list(packagePath).anyMatch(path -> path.toString().endsWith(ProjectConstants.BLANG_SOURCE_EXT));\n}", + "context_before": "class CommandUtil {\npublic static final String ORG_NAME = \"ORG_NAME\";\npublic static final String PKG_NAME = \"PKG_NAME\";\npublic static final String DIST_VERSION = \"DIST_VERSION\";\npublic static final String GITIGNORE = \"gitignore\";\npublic static final String DEVCONTAINER = \"devcontainer\";\npublic static final String NEW_CMD_DEFAULTS = \"new_cmd_defaults\";\npublic static final String CREATE_CMD_TEMPLATES = \"create_cmd_templates\";\npublic static final String DEFAULT_TEMPLATE = \"default\";\npublic static final String MAIN_TEMPLATE = \"main\";\npublic static final String FILE_STRING_SEPARATOR = \", \";\nprivate static FileSystem jarFs;\nprivate static Map env;\nprivate static PrintStream errStream;\nprivate static PrintStream outStream;\nprivate static Path homeCache;\nprivate static boolean exitWhenFinish;\nstatic void setPrintStream(PrintStream errStream) {\nCommandUtil.errStream = errStream;\n}\npublic static void initJarFs() {\nURI uri = null;\ntry {\nuri = CommandUtil.class.getClassLoader().getResource(CREATE_CMD_TEMPLATES).toURI();\nif (uri.toString().contains(\"!\")) {\nfinal String[] array = uri.toString().split(\"!\");\nif (null == jarFs) {\nenv = new HashMap<>();\njarFs = FileSystems.newFileSystem(URI.create(array[0]), env);\n}\n}\n} catch (URISyntaxException | IOException e) {\nthrow new AssertionError();\n}\n}\n/**\n* Print command errors with a standard format.\n*\n* @param stream error will be sent to this stream\n* @param error error message\n* @param usage usage if any\n* @param help if the help message should be printed\n*/\npublic static void printError(PrintStream stream, String error, String usage, boolean help) {\nstream.println(\"ballerina: \" + error);\nif (null != usage) {\nstream.println();\nstream.println(\"USAGE:\");\nstream.println(\" \" + usage);\n}\nif (help) {\nstream.println();\nstream.println(\"For more information try --help\");\n}\n}\n/**\n* Exit with error code 1.\n*\n* @param exit Whether to exit or not.\n*/\npublic static void exitError(boolean exit) {\nif (exit) {\nRuntime.getRuntime().exit(1);\n}\n}\nstatic void applyTemplate(String orgName, String templatePkgName, String version, String packageName,\nPath projectPath, Path balaCache) {\nPath balaPath = balaCache.resolve(\nProjectUtils.getRelativeBalaPath(orgName, templatePkgName, version, null));\nString platform = findPlatform(balaPath);\nbalaPath = balaCache.resolve(\nProjectUtils.getRelativeBalaPath(orgName, templatePkgName, version, platform));\nif (!Files.exists(balaPath)) {\nCommandUtil.printError(errStream,\n\"unable to find the bala: \" + balaPath,\nnull,\nfalse);\nCommandUtil.exitError(exitWhenFinish);\n}\ntry {\naddModules(balaPath, projectPath, packageName, platform);\n} catch (IOException e) {\nProjectUtils.deleteDirectory(projectPath);\nCommandUtil.printError(errStream,\n\"error occurred while creating the package: \" + e.getMessage(),\nnull,\nfalse);\nCommandUtil.exitError(exitWhenFinish);\n}\n}\nprivate static void addModules(Path balaPath, Path projectPath, String packageName, String platform)\nthrows IOException {\nGson gson = new Gson();\nPath packageJsonPath = balaPath.resolve(PACKAGE_JSON);\nPath dependencyGraphJsonPath = balaPath.resolve(DEPENDENCY_GRAPH_JSON);\nPackageJson templatePackageJson = null;\nDependencyGraphJson templateDependencyGraphJson = null;\ntry (InputStream inputStream = new FileInputStream(String.valueOf(packageJsonPath))) {\nReader fileReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);\ntemplatePackageJson = gson.fromJson(fileReader, PackageJson.class);\n} catch (IOException e) {\nprintError(errStream,\n\"Error while reading the package json file: \" + e.getMessage(),\nnull,\nfalse);\ngetRuntime().exit(1);\n}\nif (dependencyGraphJsonPath.toFile().exists()) {\ntry (InputStream inputStream = new FileInputStream(String.valueOf(dependencyGraphJsonPath))) {\nReader fileReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);\ntemplateDependencyGraphJson = gson.fromJson(fileReader, DependencyGraphJson.class);\n} catch (IOException e) {\nprintError(errStream,\n\"Error while reading the dependency graph json file: \" + e.getMessage(),\nnull,\nfalse);\ngetRuntime().exit(1);\n}\n}\nif (!templatePackageJson.getTemplate()) {\nthrow createLauncherException(\"unable to create the package: \" +\n\"specified package is not a template\");\n}\nPath ballerinaToml = projectPath.resolve(ProjectConstants.BALLERINA_TOML);\nFiles.createDirectories(projectPath);\nFiles.createFile(ballerinaToml);\nwriteBallerinaToml(ballerinaToml, templatePackageJson, packageName, platform);\nif (dependencyGraphJsonPath.toFile().exists()) {\nPath dependenciesToml = projectPath.resolve(DEPENDENCIES_TOML);\nFiles.createFile(dependenciesToml);\nwriteDependenciesToml(projectPath, templateDependencyGraphJson, templatePackageJson);\n}\nPath packageMDFilePath = balaPath.resolve(\"docs\")\n.resolve(ProjectConstants.PACKAGE_MD_FILE_NAME);\nPath toPackageMdPath = projectPath.resolve(ProjectConstants.PACKAGE_MD_FILE_NAME);\nif (Files.exists(packageMDFilePath)) {\nFiles.copy(packageMDFilePath, toPackageMdPath, StandardCopyOption.REPLACE_EXISTING);\n}\ncreateDefaultGitignore(projectPath);\ncreateDefaultDevContainer(projectPath);\nString templatePkgName = templatePackageJson.getName();\nPath modulesRoot = balaPath.resolve(ProjectConstants.MODULES_ROOT);\nPath moduleMdDirRoot = balaPath.resolve(\"docs\").resolve(ProjectConstants.MODULES_ROOT);\nList modulesList;\ntry (Stream pathStream = Files.list(modulesRoot)) {\nmodulesList = pathStream.collect(Collectors.toList());\n}\nfor (Path moduleRoot : modulesList) {\nPath moduleDir = Optional.of(moduleRoot.getFileName()).get();\nPath destDir;\nif (moduleDir.toString().equals(templatePkgName)) {\ndestDir = projectPath;\n} else {\nString moduleDirName = moduleDir.toString().split(templatePkgName + ProjectConstants.DOT, 2)[1];\ndestDir = projectPath.resolve(ProjectConstants.MODULES_ROOT).resolve(moduleDirName);\nFiles.createDirectories(destDir);\n}\nFiles.walkFileTree(moduleRoot, new FileUtils.Copy(moduleRoot, destDir));\nPath moduleMdSource = moduleMdDirRoot.resolve(moduleDir).resolve(ProjectConstants.MODULE_MD_FILE_NAME);\nif (Files.exists(moduleMdSource)) {\nFiles.copy(moduleMdSource, destDir.resolve(ProjectConstants.MODULE_MD_FILE_NAME),\nStandardCopyOption.REPLACE_EXISTING);\n}\n}\ncopyIcon(balaPath, projectPath);\ncopyPlatformLibraries(balaPath, projectPath, platform);\ncopyIncludeFiles(balaPath, projectPath, templatePackageJson);\n}\nprivate static void copyIcon(Path balaPath, Path projectPath) {\nPath docsPath = balaPath.resolve(ProjectConstants.BALA_DOCS_DIR);\ntry (Stream pathStream = Files.walk(docsPath, 1)) {\nList icon = pathStream\n.filter(FileSystems.getDefault().getPathMatcher(\"glob:**.png\")::matches)\n.collect(Collectors.toList());\nif (!icon.isEmpty()) {\nPath projectDocsDir = projectPath.resolve(ProjectConstants.BALA_DOCS_DIR);\nFiles.createDirectory(projectDocsDir);\nPath projectIconPath = projectDocsDir.resolve(Optional.of(icon.get(0).getFileName()).get());\nFiles.copy(icon.get(0), projectIconPath, StandardCopyOption.REPLACE_EXISTING);\n}\n} catch (IOException e) {\nprintError(errStream,\n\"Error while retrieving the icon: \" + e.getMessage(),\nnull,\nfalse);\ngetRuntime().exit(1);\n}\n}\nprivate static void copyPlatformLibraries(Path balaPath, Path projectPath, String platform) throws IOException {\nPath platformLibPath = balaPath.resolve(\"platform\").resolve(platform);\nif (Files.exists(platformLibPath)) {\nPath libs = projectPath.resolve(\"libs\");\nFiles.createDirectories(libs);\nFiles.walkFileTree(platformLibPath, new FileUtils.Copy(platformLibPath, libs));\n}\n}\nprivate static void copyIncludeFiles(Path balaPath, Path projectPath, PackageJson templatePackageJson)\nthrows IOException {\nif (templatePackageJson.getInclude() != null) {\nString templatePkgName = templatePackageJson.getName();\nList includePaths = ProjectUtils.getPathsMatchingIncludePatterns(\ntemplatePackageJson.getInclude(), balaPath);\nfor (Path includePath : includePaths) {\nPath moduleNameUpdatedIncludePath = updateModuleDirectoryNaming(includePath, balaPath, templatePkgName);\nPath fromIncludeFilePath = balaPath.resolve(includePath);\nPath toIncludeFilePath = projectPath.resolve(moduleNameUpdatedIncludePath);\nif (Files.notExists(toIncludeFilePath)) {\nFiles.createDirectories(toIncludeFilePath);\nFiles.walkFileTree(fromIncludeFilePath, new FileUtils.Copy(fromIncludeFilePath, toIncludeFilePath));\n}\n}\n}\n}\nprivate static Path updateModuleDirectoryNaming(Path includePath, Path balaPath, String templatePkgName) {\nPath modulesDirPath = balaPath.resolve(ProjectConstants.MODULES_ROOT);\nPath absoluteIncludePath = balaPath.resolve(includePath);\nif (absoluteIncludePath.startsWith(modulesDirPath)) {\nPath moduleRootPath = modulesDirPath.relativize(absoluteIncludePath).subpath(0, 1);\nString moduleDirName = Optional.of(moduleRootPath.getFileName()).get().toString();\nString destinationDirName = moduleDirName.split(templatePkgName + ProjectConstants.DOT, 2)[1];\nPath includePathRelativeToModuleRoot = modulesDirPath.resolve(moduleRootPath)\n.relativize(absoluteIncludePath);\nPath updatedIncludePath = Paths.get(ProjectConstants.MODULES_ROOT).resolve(destinationDirName)\n.resolve(includePathRelativeToModuleRoot);\nreturn updatedIncludePath;\n}\nreturn includePath;\n}\n/**\n* Find the bala path for a given template.\n*\n* @param template template name\n*/\nstatic Path findBalaTemplate(String template, Path balaCache) {\nString packageName = findPkgName(template);\nString orgName = findOrg(template);\nString version = findPkgVersion(template);\nif (version != null) {\nPath balaPath = balaCache.resolve(\nProjectUtils.getRelativeBalaPath(orgName, packageName, version, null));\nString platform = findPlatform(balaPath);\nbalaPath = balaCache.resolve(\nProjectUtils.getRelativeBalaPath(orgName, packageName, version, platform));\nif (Files.exists(balaPath)) {\nreturn balaPath;\n} else {\nreturn null;\n}\n} else {\nreturn null;\n}\n}\npublic static void initPackageFromCentral(Path balaCache, Path projectPath, String packageName, String template) {\nSystem.setProperty(CentralClientConstants.ENABLE_OUTPUT_STREAM, \"true\");\nString templatePackageName = findPkgName(template);\nString orgName = findOrg(template);\nString version = findPkgVersion(template);\nPath pkgCacheParent = balaCache.resolve(orgName).resolve(templatePackageName);\ntry {\npullPackageFromRemote(orgName, templatePackageName, version, pkgCacheParent);\n} catch (PackageAlreadyExistsException e) {\nif (version == null) {\nList packageVersions = getPackageVersions(pkgCacheParent);\nPackageVersion latest = findLatest(packageVersions);\nif (latest == null) {\nthrow createLauncherException(\"unable to find package in the filesystem cache.\" +\n\" This is an unexpected error : \" + e.getMessage());\n}\nversion = latest.toString();\n}\n} catch (CentralClientException e) {\nerrStream.println(\"Warning: Unable to pull the package from Ballerina Central: \" + e.getMessage());\nif (findBalaTemplate(template, balaCache) == null) {\nList packageVersions = getPackageVersions(pkgCacheParent);\nPackageVersion latest = findLatest(packageVersions);\nif (latest == null) {\nthrow createLauncherException(\"template not found in filesystem cache.\");\n}\nversion = latest.toString();\n}\n}\nif (version == null) {\nList packageVersions = getPackageVersions(pkgCacheParent);\nPackageVersion latest = findLatest(packageVersions);\nversion = Objects.requireNonNull(latest).toString();\n}\napplyTemplate(orgName, templatePackageName, version, packageName, projectPath, balaCache);\n}\nprivate static void pullPackageFromRemote(String orgName, String packageName, String version, Path destination)\nthrows CentralClientException {\nfor (String supportedPlatform : SUPPORTED_PLATFORMS) {\nSettings settings;\ntry {\nsettings = readSettings();\n} catch (SettingsTomlException e) {\nsettings = Settings.from();\n}\nCentralAPIClient client = new CentralAPIClient(RepoUtils.getRemoteRepoURL(),\ninitializeProxy(settings.getProxy()),\ngetAccessTokenOfCLI(settings));\nclient.pullPackage(orgName, packageName, version, destination, supportedPlatform,\nRepoUtils.getBallerinaVersion(), false);\n}\n}\npublic static void writeBallerinaToml(Path balTomlPath, PackageJson packageJson,\nString packageName, String platform)\nthrows IOException {\nFiles.writeString(balTomlPath, \"[package]\", StandardOpenOption.APPEND);\nFiles.writeString(balTomlPath, \"\\norg = \\\"\" + packageJson.getOrganization() + \"\\\"\",\nStandardOpenOption.APPEND);\nFiles.writeString(balTomlPath, \"\\nname = \\\"\" + packageName + \"\\\"\", StandardOpenOption.APPEND);\nFiles.writeString(balTomlPath, \"\\nversion = \\\"\" + packageJson.getVersion() + \"\\\"\",\nStandardOpenOption.APPEND);\nList newModuleNames = packageJson.getExport().stream().map(module ->\nmodule.replaceFirst(packageJson.getName(), packageName)).collect(Collectors.toList());\nStringJoiner stringJoiner = new StringJoiner(\",\");\nfor (String newModuleName : newModuleNames) {\nstringJoiner.add(\"\\\"\" + newModuleName + \"\\\"\");\n}\nFiles.writeString(balTomlPath, \"\\nexport = [\" + stringJoiner + \"]\"\n.replaceFirst(packageJson.getName(), packageName), StandardOpenOption.APPEND);\nFiles.writeString(balTomlPath, \"\\ndistribution = \\\"\" + packageJson.getBallerinaVersion()\n+ \"\\\"\", StandardOpenOption.APPEND);\nwritePackageAttributeArray(balTomlPath, packageJson.getLicenses(), \"license\");\nwritePackageAttributeArray(balTomlPath, packageJson.getAuthors(), \"authors\");\nwritePackageAttributeArray(balTomlPath, packageJson.getKeywords(), \"keywords\");\nwritePackageAttributeValue(balTomlPath, packageJson.getSourceRepository(), \"repository\");\nwritePackageAttributeValue(balTomlPath, packageJson.getVisibility(), \"visibility\");\nwritePackageAttributeValue(balTomlPath, packageJson.getIcon(), \"icon\");\nFiles.writeString(balTomlPath, \"\\n\\n[build-options]\", StandardOpenOption.APPEND);\nFiles.writeString(balTomlPath, \"\\nobservabilityIncluded = true\\n\", StandardOpenOption.APPEND);\nJsonArray platformLibraries = packageJson.getPlatformDependencies();\nif (platformLibraries == null) {\nreturn;\n}\nFiles.writeString(balTomlPath, \"\\n[[platform.\" + platform + \".dependency]]\", StandardOpenOption.APPEND);\nfor (Object dependencies : platformLibraries) {\nJsonObject dependenciesObj = (JsonObject) dependencies;\nString libPath = dependenciesObj.get(\"path\").getAsString();\nPath libName = Optional.of(Paths.get(libPath).getFileName()).get();\nPath libRelPath = Paths.get(\"libs\", libName.toString());\nFiles.writeString(balTomlPath, \"\\npath = \\\"\" + libRelPath + \"\\\"\", StandardOpenOption.APPEND);\nif (dependenciesObj.get(\"artifactId\") != null) {\nString artifactId = dependenciesObj.get(\"artifactId\").getAsString();\nFiles.writeString(balTomlPath, \"\\nartifactId = \\\"\" + artifactId + \"\\\"\",\nStandardOpenOption.APPEND);\n}\nif (dependenciesObj.get(\"groupId\") != null) {\nString groupId = dependenciesObj.get(\"groupId\").getAsString();\nFiles.writeString(balTomlPath, \"\\ngroupId = \\\"\" + groupId + \"\\\"\", StandardOpenOption.APPEND);\n}\nif (dependenciesObj.get(\"version\") != null) {\nString dependencyVersion = dependenciesObj.get(\"version\").getAsString();\nFiles.writeString(balTomlPath, \"\\nversion = \\\"\" + dependencyVersion + \"\\\"\\n\",\nStandardOpenOption.APPEND);\n}\n}\n}\npublic static void writeDependenciesToml(Path projectPath, DependencyGraphJson templateDependencyGraphJson,\nPackageJson templatePackageJson)\nthrows IOException {\nPath depsTomlPath = projectPath.resolve(DEPENDENCIES_TOML);\nString autoGenCode = \"\n\"\\n\" +\n\"\n\"\n\"\\n\";\nFiles.writeString(depsTomlPath, autoGenCode, StandardOpenOption.APPEND);\nString balTomlVersion = \"[ballerina]\\n\" +\n\"dependencies-toml-version = \\\"\" + ProjectConstants.DEPENDENCIES_TOML_VERSION + \"\\\"\\n\" +\n\"\\n\";\nFiles.writeString(depsTomlPath, balTomlVersion, StandardOpenOption.APPEND);\nList currentPkgModules = new ArrayList<>();\nfor (ModuleDependency module : templateDependencyGraphJson.getModuleDependencies()) {\nif (module.getOrg().equals(templatePackageJson.getOrganization())\n&& module.getPackageName().equals(templatePackageJson.getName())) {\nList currentPkgModuleDeps = module.getDependencies();\ncurrentPkgModules.addAll(currentPkgModuleDeps);\n}\n}\nStringBuilder pkgDesc = new StringBuilder();\nfor (Dependency packageDependency : templateDependencyGraphJson.getPackageDependencyGraph()) {\nif (templatePackageJson.getOrganization().equals(packageDependency.getOrg())\n&& templatePackageJson.getName().equals(packageDependency.getName())) {\npkgDesc.append(\"[[package]]\\n\")\n.append(\"org = \\\"\").append(packageDependency.getOrg()).append(\"\\\"\\n\")\n.append(\"name = \\\"\").append(ProjectUtils.defaultName(projectPath)).append(\"\\\"\\n\")\n.append(\"version = \\\"\").append(packageDependency.getVersion()).append(\"\\\"\\n\");\npkgDesc.append(getDependenciesArrayContent(packageDependency));\npkgDesc.append(getDependencyModulesArrayContent(\ntemplateDependencyGraphJson.getModuleDependencies(), true, projectPath));\n} else {\npkgDesc.append(\"[[package]]\\n\")\n.append(\"org = \\\"\").append(packageDependency.getOrg()).append(\"\\\"\\n\")\n.append(\"name = \\\"\").append(packageDependency.getName()).append(\"\\\"\\n\")\n.append(\"version = \\\"\").append(packageDependency.getVersion()).append(\"\\\"\\n\");\npkgDesc.append(getDependenciesArrayContent(packageDependency));\nList packageDependencyModules = new ArrayList<>();\nfor (ModuleDependency module : currentPkgModules) {\nif (packageDependency.getOrg().equals(module.getOrg())\n&& packageDependency.getName().equals(module.getPackageName())) {\npackageDependencyModules.add(module);\n}\n}\nif (!packageDependencyModules.isEmpty()) {\npkgDesc.append(getDependencyModulesArrayContent(packageDependencyModules, false, projectPath));\n}\n}\npkgDesc.append(\"\\n\");\n}\nFiles.writeString(depsTomlPath, pkgDesc.toString(), StandardOpenOption.APPEND);\n}\n/**\n* Get formatted dependencies array content for Dependencies.toml dependency.\n*\n* @param packageDependency package dependency\n* @return formatted dependencies array content\n*/\nprivate static String getDependenciesArrayContent(Dependency packageDependency) {\nStringBuilder dependenciesContent = new StringBuilder();\nif (!packageDependency.getDependencies().isEmpty()) {\nfor (Dependency dependency : packageDependency.getDependencies()) {\ndependenciesContent.append(\"\\t{org = \\\"\").append(dependency.getOrg())\n.append(\"\\\", name = \\\"\").append(dependency.getName())\n.append(\"\\\"},\\n\");\n}\nString dependenciesPart = dependenciesContent.toString();\ndependenciesPart = removeLastCharacter(trimStartingWhitespaces(dependenciesPart));\nreturn \"dependencies = [\\n\"\n+ dependenciesPart\n+ \"\\n]\\n\";\n}\nreturn \"\";\n}\n/**\n* Get formatted modules array content for Dependencies.toml dependency.\n* \n* modules = [\n* {org = \"ballerinax\", packageName = \"redis\", moduleName = \"redis\"}\n* ]\n* \n*\n* @param dependencyModules modules of the given dependency package\n* @param isCurrentPackage is modules array generating for current package\n* @param projectPath project path\n* @return formatted modules array content\n*/\nprivate static String getDependencyModulesArrayContent(List dependencyModules,\nboolean isCurrentPackage, Path projectPath) {\nStringBuilder modulesContent = new StringBuilder();\nif (isCurrentPackage) {\nfor (ModuleDependency module : dependencyModules) {\nString currentPkgName = ProjectUtils.defaultName(projectPath).value();\nString modulePkgPart = module.getModuleName().split(\"\\\\.\")[0];\nString currentPkgModuleName = module.getModuleName().replaceFirst(modulePkgPart, currentPkgName);\nmodulesContent.append(\"\\t{org = \\\"\").append(module.getOrg())\n.append(\"\\\", packageName = \\\"\").append(currentPkgName)\n.append(\"\\\", moduleName = \\\"\").append(currentPkgModuleName)\n.append(\"\\\"},\\n\");\n}\n} else {\nfor (ModuleDependency module : dependencyModules) {\nmodulesContent.append(\"\\t{org = \\\"\").append(module.getOrg())\n.append(\"\\\", packageName = \\\"\").append(module.getPackageName())\n.append(\"\\\", moduleName = \\\"\").append(module.getModuleName())\n.append(\"\\\"},\\n\");\n}\n}\nString modulesPart = modulesContent.toString();\nmodulesPart = removeLastCharacter(trimStartingWhitespaces(modulesPart));\nreturn \"modules = [\\n\" + modulesPart + \"\\n]\\n\";\n}\n/**\n* Write Ballerina.toml package attribute array from template package.json to new project Ballerina.toml.\n*\n* @param balTomlPath Ballerina.toml path of the new project\n* @param attributeArray package attribute values array\n* @param attributeName package attribute name\n* @throws IOException when error occurs writing to the Ballerina.toml\n*/\nprivate static void writePackageAttributeArray(Path balTomlPath, List attributeArray, String attributeName)\nthrows IOException {\nif (attributeArray != null && !attributeArray.isEmpty()) {\nStringJoiner stringJoiner = new StringJoiner(\",\");\nfor (String attributeElement : attributeArray) {\nstringJoiner.add(\"\\\"\" + attributeElement + \"\\\"\");\n}\nFiles.writeString(balTomlPath, \"\\n\" + attributeName + \" = [\" + stringJoiner + \"]\",\nStandardOpenOption.APPEND);\n}\n}\n/**\n* Write Ballerina.toml package attribute from template package.json to new project Ballerina.toml.\n*\n* @param balTomlPath Ballerina.toml path of the new project\n* @param attributeValue package attribute value\n* @param attributeName package attribute name\n* @throws IOException when error occurs writing to the Ballerina.toml\n*/\nprivate static void writePackageAttributeValue(Path balTomlPath, String attributeValue, String attributeName)\nthrows IOException {\nif (attributeValue != null && !attributeValue.isEmpty()) {\nFiles.writeString(balTomlPath, \"\\n\" + attributeName + \" = \\\"\" + attributeValue + \"\\\"\",\nStandardOpenOption.APPEND);\n}\n}\n/**\n* Find the package name for a given template.\n*\n* @param template template name\n* @return packageName - package name of the module\n*/\npublic static String findPkgName(String template) {\nString[] orgSplit = template.split(\"/\");\nString packageName = \"\";\nString packagePart = (orgSplit.length > 1) ? orgSplit[1] : \"\";\nString[] pkgSplit = packagePart.split(\":\");\npackageName = pkgSplit[0].trim();\nreturn packageName;\n}\n/**\n* Find the organization for a given template.\n*\n* @param template template name\n* @return orgName - org of the module\n*/\npublic static String findOrg(String template) {\nString[] orgSplit = template.split(\"/\");\nreturn orgSplit[0].trim();\n}\n/**\n* Find the package version for a given template.\n*\n* @param template template name\n* @return version - version of the module\n*/\npublic static String findPkgVersion(String template) {\nString[] orgSplit = template.split(\"/\");\nString packagePart = (orgSplit.length > 1) ? orgSplit[1] : \"\";\nString[] pkgSplit = packagePart.split(\":\");\nif (pkgSplit.length > 1) {\nreturn pkgSplit[1].trim();\n} else {\nreturn null;\n}\n}\n/**\n* Find the platform of the module for a given template.\n*\n* @param balaPath path to the module\n* @return platform - platform of the module\n*/\npublic static String findPlatform(Path balaPath) {\nString platform = \"\";\nif (!Files.exists(balaPath)) {\nplatform = JvmTarget.JAVA_11.code();\n} else {\nplatform = ANY_PLATFORM;\n}\nreturn platform;\n}\n/**\n* Initialize a new ballerina project in the given path.\n*\n* @param path project path\n* @param packageName name of the package\n* @param template package template\n* @throws IOException If any IO exception occurred\n* @throws URISyntaxException If any URISyntaxException occurred\n*/\npublic static void initPackageByTemplate(Path path, String packageName, String template) throws IOException,\nURISyntaxException {\napplyTemplate(path, template);\nif (template.equalsIgnoreCase(\"lib\")) {\ninitLibPackage(path, packageName);\nPath source = path.resolve(\"lib.bal\");\nFiles.move(source, source.resolveSibling(guessPkgName(packageName, template) + \".bal\"),\nStandardCopyOption.REPLACE_EXISTING);\n} else {\ninitPackage(path, packageName);\n}\ncreateDefaultGitignore(path);\ncreateDefaultDevContainer(path);\n}\nprivate static void createDefaultGitignore(Path path) throws IOException {\nPath gitignore = path.resolve(ProjectConstants.GITIGNORE_FILE_NAME);\nif (Files.notExists(gitignore)) {\nFiles.createFile(gitignore);\n}\nString defaultGitignore = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + \"/\" + GITIGNORE);\nFiles.write(gitignore, defaultGitignore.getBytes(StandardCharsets.UTF_8));\n}\nprivate static void createDefaultDevContainer(Path path) throws IOException {\nPath devContainer = path.resolve(ProjectConstants.DEVCONTAINER);\nif (Files.notExists(devContainer)) {\nFiles.createFile(devContainer);\n}\nString defaultDevContainer = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + \"/\" + DEVCONTAINER);\ndefaultDevContainer = defaultDevContainer.replace(\"latest\", RepoUtils.getBallerinaVersion());\nFiles.write(devContainer, defaultDevContainer.getBytes(StandardCharsets.UTF_8));\n}\n/**\n* Get the list of templates.\n*\n* @return list of templates\n*/\npublic static List getTemplates() {\ntry {\nPath templateDir = getTemplatePath();\nStream walk = Files.walk(templateDir, 1);\nList templates = walk.filter(Files::isDirectory)\n.filter(directory -> !templateDir.equals(directory))\n.filter(directory -> directory.getFileName() != null)\n.map(directory -> directory.getFileName())\n.map(fileName -> fileName.toString())\n.collect(Collectors.toList());\nif (null != jarFs) {\nreturn templates.stream().map(t -> t\n.replace(jarFs.getSeparator(), \"\"))\n.collect(Collectors.toList());\n} else {\nreturn templates;\n}\n} catch (IOException | URISyntaxException e) {\nreturn new ArrayList();\n}\n}\n/**\n* Get the path to the given template.\n*\n* @return path of the given template\n* @throws URISyntaxException if any URISyntaxException occured\n*/\nprivate static Path getTemplatePath() throws URISyntaxException {\nURI uri = CommandUtil.class.getClassLoader().getResource(CREATE_CMD_TEMPLATES).toURI();\nif (uri.toString().contains(\"!\")) {\nfinal String[] array = uri.toString().split(\"!\");\nreturn jarFs.getPath(array[1]);\n} else {\nreturn Paths.get(uri);\n}\n}\n/**\n* Apply the template to the created module.\n*\n* @param modulePath path to the module\n* @param template template name\n* @throws IOException if any IOException occurred\n* @throws URISyntaxException if any URISyntaxException occurred\n*/\npublic static void applyTemplate(Path modulePath, String template) throws IOException, URISyntaxException {\nPath templateDir = getTemplatePath().resolve(template);\nif (template.equalsIgnoreCase(MAIN_TEMPLATE)) {\ntemplateDir = getTemplatePath().resolve(DEFAULT_TEMPLATE);\nPath tempDirTest = getTemplatePath().resolve(MAIN_TEMPLATE);\nFiles.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath));\nFiles.walkFileTree(tempDirTest, new FileUtils.Copy(tempDirTest, modulePath));\n} else {\nFiles.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath));\n}\n}\n/**\n* Initialize a new ballerina project in the given path.\n*\n* @param path Project path\n* @throws IOException If any IO exception occurred\n*/\npublic static void initPackage(Path path, String packageName) throws IOException {\nPath ballerinaToml = path.resolve(ProjectConstants.BALLERINA_TOML);\nFiles.createFile(ballerinaToml);\nString defaultManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + \"/\" + \"manifest-app.toml\");\ndefaultManifest = defaultManifest\n.replaceAll(ORG_NAME, ProjectUtils.guessOrgName())\n.replaceAll(PKG_NAME, guessPkgName(packageName, \"app\"))\n.replaceAll(DIST_VERSION, RepoUtils.getBallerinaShortVersion());\nFiles.write(ballerinaToml, defaultManifest.getBytes(StandardCharsets.UTF_8));\n}\nprivate static void initLibPackage(Path path, String packageName) throws IOException {\nPath ballerinaToml = path.resolve(ProjectConstants.BALLERINA_TOML);\nFiles.createFile(ballerinaToml);\nString defaultManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + \"/\" + \"manifest-lib.toml\");\ndefaultManifest = defaultManifest.replaceAll(ORG_NAME, ProjectUtils.guessOrgName())\n.replaceAll(PKG_NAME, guessPkgName(packageName, \"lib\"))\n.replaceAll(DIST_VERSION, RepoUtils.getBallerinaShortVersion());\nwrite(ballerinaToml, defaultManifest.getBytes(StandardCharsets.UTF_8));\nString packageMd = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + \"/Package.md\");\nwrite(path.resolve(ProjectConstants.PACKAGE_MD_FILE_NAME), packageMd.getBytes(StandardCharsets.UTF_8));\n}\nprotected static PackageVersion findLatest(List packageVersions) {\nif (packageVersions.isEmpty()) {\nreturn null;\n}\nPackageVersion latestVersion = packageVersions.get(0);\nfor (PackageVersion pkgVersion : packageVersions) {\nlatestVersion = getLatest(latestVersion, pkgVersion);\n}\nreturn latestVersion;\n}\nprotected static PackageVersion getLatest(PackageVersion v1, PackageVersion v2) {\nSemanticVersion semVer1 = v1.value();\nSemanticVersion semVer2 = v2.value();\nboolean isV1PreReleaseVersion = semVer1.isPreReleaseVersion();\nboolean isV2PreReleaseVersion = semVer2.isPreReleaseVersion();\nif (isV1PreReleaseVersion ^ isV2PreReleaseVersion) {\nreturn isV1PreReleaseVersion ? v2 : v1;\n} else {\nreturn semVer1.greaterThanOrEqualTo(semVer2) ? v1 : v2;\n}\n}\npublic static List getPackageVersions(Path balaPackagePath) {\nList versions = new ArrayList<>();\nif (Files.exists(balaPackagePath)) {\nStream collectVersions;\ntry {\ncollectVersions = Files.list(balaPackagePath);\n} catch (IOException e) {\nthrow new RuntimeException(\"Error while accessing Distribution cache: \" + e.getMessage());\n}\nversions.addAll(collectVersions.collect(Collectors.toList()));\n}\nreturn pathToVersions(versions);\n}\nprotected static List pathToVersions(List versions) {\nList availableVersions = new ArrayList<>();\nversions.stream().map(path -> Optional.ofNullable(path)\n.map(Path::getFileName)\n.map(Path::toString)\n.orElse(\"0.0.0\")).forEach(version -> {\ntry {\navailableVersions.add(PackageVersion.from(version));\n} catch (ProjectException ignored) {\n}\n});\nreturn availableVersions;\n}\n/**\n* Remove starting whitespaces of a string.\n*\n* @param str given string\n* @return starting whitespaces removed string\n*/\nprivate static String trimStartingWhitespaces(String str) {\nreturn str.replaceFirst(\"\\\\s++$\", \"\");\n}\n/**\n* Remove last character of a string.\n*\n* @param str given string\n* @return last character removed string\n*/\nprivate static String removeLastCharacter(String str) {\nreturn str.substring(0, str.length() - 1);\n}\n/**\n* Check if files of the given template exist in a given path.\n*\n* @param template given string\n* @param packagePath given path\n* @throws URISyntaxException if URI syntax exception occurred\n* @throws IOException if IO exception occurred\n*/\npublic static String checkTemplateFilesExists(String template, Path packagePath) throws URISyntaxException,\nIOException {\nPath templateDir = getTemplatePath().resolve(template);\nif (template.equalsIgnoreCase(MAIN_TEMPLATE)) {\ntemplateDir = getTemplatePath().resolve(DEFAULT_TEMPLATE);\nPath tempDirTest = getTemplatePath().resolve(MAIN_TEMPLATE);\ncheckFilesExists(packagePath, tempDirTest);\n}\nreturn checkFilesExists(packagePath, templateDir);\n}\n/**\n* Check if a set of files in one location exists in another.\n*\n* @param packagePath given path\n* @param templatesPath given path\n* @throws IOException if IO exception occurred\n*/\nprivate static String checkFilesExists(Path packagePath, Path templatesPath) throws IOException {\nStream paths = Files.list(templatesPath);\nList templateFilePathList = paths.collect(Collectors.toList());\nString existingFiles = \"\";\nfor (Path path : templateFilePathList) {\nString fileName = path.getFileName().toString();\nif (Files.exists(packagePath.resolve(fileName))) {\nexistingFiles += fileName + FILE_STRING_SEPARATOR;\n}\n}\nreturn existingFiles;\n}\n/**\n* Check if common files of a package exist in a given path.\n*\n* @param packagePath given path\n*/\npublic static String checkPackageFilesExists(Path packagePath) {\nString[] packageFiles = {DEPENDENCIES_TOML, ProjectConstants.PACKAGE_MD_FILE_NAME,\nProjectConstants.MODULE_MD_FILE_NAME, ProjectConstants.MODULES_ROOT, ProjectConstants.TEST_DIR_NAME,\nProjectConstants.GITIGNORE_FILE_NAME, ProjectConstants.DEVCONTAINER};\nString existingFiles = \"\";\nfor (String file : packageFiles) {\nif (Files.exists(packagePath.resolve(file))) {\nexistingFiles += file + FILE_STRING_SEPARATOR;\n}\n}\nreturn existingFiles;\n}\n/**\n* Check if .bal files exist in a given path.\n*\n* @param packagePath given path\n* @return error message if files exists\n*/\n}", + "context_after": "class CommandUtil {\npublic static final String ORG_NAME = \"ORG_NAME\";\npublic static final String PKG_NAME = \"PKG_NAME\";\npublic static final String DIST_VERSION = \"DIST_VERSION\";\npublic static final String GITIGNORE = \"gitignore\";\npublic static final String DEVCONTAINER = \"devcontainer\";\npublic static final String NEW_CMD_DEFAULTS = \"new_cmd_defaults\";\npublic static final String CREATE_CMD_TEMPLATES = \"create_cmd_templates\";\npublic static final String DEFAULT_TEMPLATE = \"default\";\npublic static final String MAIN_TEMPLATE = \"main\";\npublic static final String FILE_STRING_SEPARATOR = \", \";\nprivate static FileSystem jarFs;\nprivate static Map env;\nprivate static PrintStream errStream;\nprivate static PrintStream outStream;\nprivate static Path homeCache;\nprivate static boolean exitWhenFinish;\nstatic void setPrintStream(PrintStream errStream) {\nCommandUtil.errStream = errStream;\n}\npublic static void initJarFs() {\nURI uri = null;\ntry {\nuri = CommandUtil.class.getClassLoader().getResource(CREATE_CMD_TEMPLATES).toURI();\nif (uri.toString().contains(\"!\")) {\nfinal String[] array = uri.toString().split(\"!\");\nif (null == jarFs) {\nenv = new HashMap<>();\njarFs = FileSystems.newFileSystem(URI.create(array[0]), env);\n}\n}\n} catch (URISyntaxException | IOException e) {\nthrow new AssertionError();\n}\n}\n/**\n* Print command errors with a standard format.\n*\n* @param stream error will be sent to this stream\n* @param error error message\n* @param usage usage if any\n* @param help if the help message should be printed\n*/\npublic static void printError(PrintStream stream, String error, String usage, boolean help) {\nstream.println(\"ballerina: \" + error);\nif (null != usage) {\nstream.println();\nstream.println(\"USAGE:\");\nstream.println(\" \" + usage);\n}\nif (help) {\nstream.println();\nstream.println(\"For more information try --help\");\n}\n}\n/**\n* Exit with error code 1.\n*\n* @param exit Whether to exit or not.\n*/\npublic static void exitError(boolean exit) {\nif (exit) {\nRuntime.getRuntime().exit(1);\n}\n}\nstatic void applyTemplate(String orgName, String templatePkgName, String version, String packageName,\nPath projectPath, Path balaCache) {\nPath balaPath = balaCache.resolve(\nProjectUtils.getRelativeBalaPath(orgName, templatePkgName, version, null));\nString platform = findPlatform(balaPath);\nbalaPath = balaCache.resolve(\nProjectUtils.getRelativeBalaPath(orgName, templatePkgName, version, platform));\nif (!Files.exists(balaPath)) {\nCommandUtil.printError(errStream,\n\"unable to find the bala: \" + balaPath,\nnull,\nfalse);\nCommandUtil.exitError(exitWhenFinish);\n}\ntry {\naddModules(balaPath, projectPath, packageName, platform);\n} catch (IOException e) {\nProjectUtils.deleteDirectory(projectPath);\nCommandUtil.printError(errStream,\n\"error occurred while creating the package: \" + e.getMessage(),\nnull,\nfalse);\nCommandUtil.exitError(exitWhenFinish);\n}\n}\nprivate static void addModules(Path balaPath, Path projectPath, String packageName, String platform)\nthrows IOException {\nGson gson = new Gson();\nPath packageJsonPath = balaPath.resolve(PACKAGE_JSON);\nPath dependencyGraphJsonPath = balaPath.resolve(DEPENDENCY_GRAPH_JSON);\nPackageJson templatePackageJson = null;\nDependencyGraphJson templateDependencyGraphJson = null;\ntry (InputStream inputStream = new FileInputStream(String.valueOf(packageJsonPath))) {\nReader fileReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);\ntemplatePackageJson = gson.fromJson(fileReader, PackageJson.class);\n} catch (IOException e) {\nprintError(errStream,\n\"Error while reading the package json file: \" + e.getMessage(),\nnull,\nfalse);\ngetRuntime().exit(1);\n}\nif (dependencyGraphJsonPath.toFile().exists()) {\ntry (InputStream inputStream = new FileInputStream(String.valueOf(dependencyGraphJsonPath))) {\nReader fileReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);\ntemplateDependencyGraphJson = gson.fromJson(fileReader, DependencyGraphJson.class);\n} catch (IOException e) {\nprintError(errStream,\n\"Error while reading the dependency graph json file: \" + e.getMessage(),\nnull,\nfalse);\ngetRuntime().exit(1);\n}\n}\nif (!templatePackageJson.getTemplate()) {\nthrow createLauncherException(\"unable to create the package: \" +\n\"specified package is not a template\");\n}\nPath ballerinaToml = projectPath.resolve(ProjectConstants.BALLERINA_TOML);\nFiles.createDirectories(projectPath);\nFiles.createFile(ballerinaToml);\nwriteBallerinaToml(ballerinaToml, templatePackageJson, packageName, platform);\nif (dependencyGraphJsonPath.toFile().exists()) {\nPath dependenciesToml = projectPath.resolve(DEPENDENCIES_TOML);\nFiles.createFile(dependenciesToml);\nwriteDependenciesToml(projectPath, templateDependencyGraphJson, templatePackageJson);\n}\nPath packageMDFilePath = balaPath.resolve(\"docs\")\n.resolve(ProjectConstants.PACKAGE_MD_FILE_NAME);\nPath toPackageMdPath = projectPath.resolve(ProjectConstants.PACKAGE_MD_FILE_NAME);\nif (Files.exists(packageMDFilePath)) {\nFiles.copy(packageMDFilePath, toPackageMdPath, StandardCopyOption.REPLACE_EXISTING);\n}\ncreateDefaultGitignore(projectPath);\ncreateDefaultDevContainer(projectPath);\nString templatePkgName = templatePackageJson.getName();\nPath modulesRoot = balaPath.resolve(ProjectConstants.MODULES_ROOT);\nPath moduleMdDirRoot = balaPath.resolve(\"docs\").resolve(ProjectConstants.MODULES_ROOT);\nList modulesList;\ntry (Stream pathStream = Files.list(modulesRoot)) {\nmodulesList = pathStream.collect(Collectors.toList());\n}\nfor (Path moduleRoot : modulesList) {\nPath moduleDir = Optional.of(moduleRoot.getFileName()).get();\nPath destDir;\nif (moduleDir.toString().equals(templatePkgName)) {\ndestDir = projectPath;\n} else {\nString moduleDirName = moduleDir.toString().split(templatePkgName + ProjectConstants.DOT, 2)[1];\ndestDir = projectPath.resolve(ProjectConstants.MODULES_ROOT).resolve(moduleDirName);\nFiles.createDirectories(destDir);\n}\nFiles.walkFileTree(moduleRoot, new FileUtils.Copy(moduleRoot, destDir));\nPath moduleMdSource = moduleMdDirRoot.resolve(moduleDir).resolve(ProjectConstants.MODULE_MD_FILE_NAME);\nif (Files.exists(moduleMdSource)) {\nFiles.copy(moduleMdSource, destDir.resolve(ProjectConstants.MODULE_MD_FILE_NAME),\nStandardCopyOption.REPLACE_EXISTING);\n}\n}\ncopyIcon(balaPath, projectPath);\ncopyPlatformLibraries(balaPath, projectPath, platform);\ncopyIncludeFiles(balaPath, projectPath, templatePackageJson);\n}\nprivate static void copyIcon(Path balaPath, Path projectPath) {\nPath docsPath = balaPath.resolve(ProjectConstants.BALA_DOCS_DIR);\ntry (Stream pathStream = Files.walk(docsPath, 1)) {\nList icon = pathStream\n.filter(FileSystems.getDefault().getPathMatcher(\"glob:**.png\")::matches)\n.collect(Collectors.toList());\nif (!icon.isEmpty()) {\nPath projectDocsDir = projectPath.resolve(ProjectConstants.BALA_DOCS_DIR);\nFiles.createDirectory(projectDocsDir);\nPath projectIconPath = projectDocsDir.resolve(Optional.of(icon.get(0).getFileName()).get());\nFiles.copy(icon.get(0), projectIconPath, StandardCopyOption.REPLACE_EXISTING);\n}\n} catch (IOException e) {\nprintError(errStream,\n\"Error while retrieving the icon: \" + e.getMessage(),\nnull,\nfalse);\ngetRuntime().exit(1);\n}\n}\nprivate static void copyPlatformLibraries(Path balaPath, Path projectPath, String platform) throws IOException {\nPath platformLibPath = balaPath.resolve(\"platform\").resolve(platform);\nif (Files.exists(platformLibPath)) {\nPath libs = projectPath.resolve(\"libs\");\nFiles.createDirectories(libs);\nFiles.walkFileTree(platformLibPath, new FileUtils.Copy(platformLibPath, libs));\n}\n}\nprivate static void copyIncludeFiles(Path balaPath, Path projectPath, PackageJson templatePackageJson)\nthrows IOException {\nif (templatePackageJson.getInclude() != null) {\nString templatePkgName = templatePackageJson.getName();\nList includePaths = ProjectUtils.getPathsMatchingIncludePatterns(\ntemplatePackageJson.getInclude(), balaPath);\nfor (Path includePath : includePaths) {\nPath moduleNameUpdatedIncludePath = updateModuleDirectoryNaming(includePath, balaPath, templatePkgName);\nPath fromIncludeFilePath = balaPath.resolve(includePath);\nPath toIncludeFilePath = projectPath.resolve(moduleNameUpdatedIncludePath);\nif (Files.notExists(toIncludeFilePath)) {\nFiles.createDirectories(toIncludeFilePath);\nFiles.walkFileTree(fromIncludeFilePath, new FileUtils.Copy(fromIncludeFilePath, toIncludeFilePath));\n}\n}\n}\n}\nprivate static Path updateModuleDirectoryNaming(Path includePath, Path balaPath, String templatePkgName) {\nPath modulesDirPath = balaPath.resolve(ProjectConstants.MODULES_ROOT);\nPath absoluteIncludePath = balaPath.resolve(includePath);\nif (absoluteIncludePath.startsWith(modulesDirPath)) {\nPath moduleRootPath = modulesDirPath.relativize(absoluteIncludePath).subpath(0, 1);\nString moduleDirName = Optional.of(moduleRootPath.getFileName()).get().toString();\nString destinationDirName = moduleDirName.split(templatePkgName + ProjectConstants.DOT, 2)[1];\nPath includePathRelativeToModuleRoot = modulesDirPath.resolve(moduleRootPath)\n.relativize(absoluteIncludePath);\nPath updatedIncludePath = Paths.get(ProjectConstants.MODULES_ROOT).resolve(destinationDirName)\n.resolve(includePathRelativeToModuleRoot);\nreturn updatedIncludePath;\n}\nreturn includePath;\n}\n/**\n* Find the bala path for a given template.\n*\n* @param template template name\n*/\nstatic Path findBalaTemplate(String template, Path balaCache) {\nString packageName = findPkgName(template);\nString orgName = findOrg(template);\nString version = findPkgVersion(template);\nif (version != null) {\nPath balaPath = balaCache.resolve(\nProjectUtils.getRelativeBalaPath(orgName, packageName, version, null));\nString platform = findPlatform(balaPath);\nbalaPath = balaCache.resolve(\nProjectUtils.getRelativeBalaPath(orgName, packageName, version, platform));\nif (Files.exists(balaPath)) {\nreturn balaPath;\n} else {\nreturn null;\n}\n} else {\nreturn null;\n}\n}\npublic static void initPackageFromCentral(Path balaCache, Path projectPath, String packageName, String template) {\nSystem.setProperty(CentralClientConstants.ENABLE_OUTPUT_STREAM, \"true\");\nString templatePackageName = findPkgName(template);\nString orgName = findOrg(template);\nString version = findPkgVersion(template);\nPath pkgCacheParent = balaCache.resolve(orgName).resolve(templatePackageName);\ntry {\npullPackageFromRemote(orgName, templatePackageName, version, pkgCacheParent);\n} catch (PackageAlreadyExistsException e) {\nif (version == null) {\nList packageVersions = getPackageVersions(pkgCacheParent);\nPackageVersion latest = findLatest(packageVersions);\nif (latest == null) {\nthrow createLauncherException(\"unable to find package in the filesystem cache.\" +\n\" This is an unexpected error : \" + e.getMessage());\n}\nversion = latest.toString();\n}\n} catch (CentralClientException e) {\nerrStream.println(\"Warning: Unable to pull the package from Ballerina Central: \" + e.getMessage());\nif (findBalaTemplate(template, balaCache) == null) {\nList packageVersions = getPackageVersions(pkgCacheParent);\nPackageVersion latest = findLatest(packageVersions);\nif (latest == null) {\nthrow createLauncherException(\"template not found in filesystem cache.\");\n}\nversion = latest.toString();\n}\n}\nif (version == null) {\nList packageVersions = getPackageVersions(pkgCacheParent);\nPackageVersion latest = findLatest(packageVersions);\nversion = Objects.requireNonNull(latest).toString();\n}\napplyTemplate(orgName, templatePackageName, version, packageName, projectPath, balaCache);\n}\nprivate static void pullPackageFromRemote(String orgName, String packageName, String version, Path destination)\nthrows CentralClientException {\nfor (String supportedPlatform : SUPPORTED_PLATFORMS) {\nSettings settings;\ntry {\nsettings = readSettings();\n} catch (SettingsTomlException e) {\nsettings = Settings.from();\n}\nCentralAPIClient client = new CentralAPIClient(RepoUtils.getRemoteRepoURL(),\ninitializeProxy(settings.getProxy()),\ngetAccessTokenOfCLI(settings));\nclient.pullPackage(orgName, packageName, version, destination, supportedPlatform,\nRepoUtils.getBallerinaVersion(), false);\n}\n}\npublic static void writeBallerinaToml(Path balTomlPath, PackageJson packageJson,\nString packageName, String platform)\nthrows IOException {\nFiles.writeString(balTomlPath, \"[package]\", StandardOpenOption.APPEND);\nFiles.writeString(balTomlPath, \"\\norg = \\\"\" + packageJson.getOrganization() + \"\\\"\",\nStandardOpenOption.APPEND);\nFiles.writeString(balTomlPath, \"\\nname = \\\"\" + packageName + \"\\\"\", StandardOpenOption.APPEND);\nFiles.writeString(balTomlPath, \"\\nversion = \\\"\" + packageJson.getVersion() + \"\\\"\",\nStandardOpenOption.APPEND);\nList newModuleNames = packageJson.getExport().stream().map(module ->\nmodule.replaceFirst(packageJson.getName(), packageName)).collect(Collectors.toList());\nStringJoiner stringJoiner = new StringJoiner(\",\");\nfor (String newModuleName : newModuleNames) {\nstringJoiner.add(\"\\\"\" + newModuleName + \"\\\"\");\n}\nFiles.writeString(balTomlPath, \"\\nexport = [\" + stringJoiner + \"]\"\n.replaceFirst(packageJson.getName(), packageName), StandardOpenOption.APPEND);\nFiles.writeString(balTomlPath, \"\\ndistribution = \\\"\" + packageJson.getBallerinaVersion()\n+ \"\\\"\", StandardOpenOption.APPEND);\nwritePackageAttributeArray(balTomlPath, packageJson.getLicenses(), \"license\");\nwritePackageAttributeArray(balTomlPath, packageJson.getAuthors(), \"authors\");\nwritePackageAttributeArray(balTomlPath, packageJson.getKeywords(), \"keywords\");\nwritePackageAttributeValue(balTomlPath, packageJson.getSourceRepository(), \"repository\");\nwritePackageAttributeValue(balTomlPath, packageJson.getVisibility(), \"visibility\");\nwritePackageAttributeValue(balTomlPath, packageJson.getIcon(), \"icon\");\nFiles.writeString(balTomlPath, \"\\n\\n[build-options]\", StandardOpenOption.APPEND);\nFiles.writeString(balTomlPath, \"\\nobservabilityIncluded = true\\n\", StandardOpenOption.APPEND);\nJsonArray platformLibraries = packageJson.getPlatformDependencies();\nif (platformLibraries == null) {\nreturn;\n}\nFiles.writeString(balTomlPath, \"\\n[[platform.\" + platform + \".dependency]]\", StandardOpenOption.APPEND);\nfor (Object dependencies : platformLibraries) {\nJsonObject dependenciesObj = (JsonObject) dependencies;\nString libPath = dependenciesObj.get(\"path\").getAsString();\nPath libName = Optional.of(Paths.get(libPath).getFileName()).get();\nPath libRelPath = Paths.get(\"libs\", libName.toString());\nFiles.writeString(balTomlPath, \"\\npath = \\\"\" + libRelPath + \"\\\"\", StandardOpenOption.APPEND);\nif (dependenciesObj.get(\"artifactId\") != null) {\nString artifactId = dependenciesObj.get(\"artifactId\").getAsString();\nFiles.writeString(balTomlPath, \"\\nartifactId = \\\"\" + artifactId + \"\\\"\",\nStandardOpenOption.APPEND);\n}\nif (dependenciesObj.get(\"groupId\") != null) {\nString groupId = dependenciesObj.get(\"groupId\").getAsString();\nFiles.writeString(balTomlPath, \"\\ngroupId = \\\"\" + groupId + \"\\\"\", StandardOpenOption.APPEND);\n}\nif (dependenciesObj.get(\"version\") != null) {\nString dependencyVersion = dependenciesObj.get(\"version\").getAsString();\nFiles.writeString(balTomlPath, \"\\nversion = \\\"\" + dependencyVersion + \"\\\"\\n\",\nStandardOpenOption.APPEND);\n}\n}\n}\npublic static void writeDependenciesToml(Path projectPath, DependencyGraphJson templateDependencyGraphJson,\nPackageJson templatePackageJson)\nthrows IOException {\nPath depsTomlPath = projectPath.resolve(DEPENDENCIES_TOML);\nString autoGenCode = \"\n\"\\n\" +\n\"\n\"\n\"\\n\";\nFiles.writeString(depsTomlPath, autoGenCode, StandardOpenOption.APPEND);\nString balTomlVersion = \"[ballerina]\\n\" +\n\"dependencies-toml-version = \\\"\" + ProjectConstants.DEPENDENCIES_TOML_VERSION + \"\\\"\\n\" +\n\"\\n\";\nFiles.writeString(depsTomlPath, balTomlVersion, StandardOpenOption.APPEND);\nList currentPkgModules = new ArrayList<>();\nfor (ModuleDependency module : templateDependencyGraphJson.getModuleDependencies()) {\nif (module.getOrg().equals(templatePackageJson.getOrganization())\n&& module.getPackageName().equals(templatePackageJson.getName())) {\nList currentPkgModuleDeps = module.getDependencies();\ncurrentPkgModules.addAll(currentPkgModuleDeps);\n}\n}\nStringBuilder pkgDesc = new StringBuilder();\nfor (Dependency packageDependency : templateDependencyGraphJson.getPackageDependencyGraph()) {\nif (templatePackageJson.getOrganization().equals(packageDependency.getOrg())\n&& templatePackageJson.getName().equals(packageDependency.getName())) {\npkgDesc.append(\"[[package]]\\n\")\n.append(\"org = \\\"\").append(packageDependency.getOrg()).append(\"\\\"\\n\")\n.append(\"name = \\\"\").append(ProjectUtils.defaultName(projectPath)).append(\"\\\"\\n\")\n.append(\"version = \\\"\").append(packageDependency.getVersion()).append(\"\\\"\\n\");\npkgDesc.append(getDependenciesArrayContent(packageDependency));\npkgDesc.append(getDependencyModulesArrayContent(\ntemplateDependencyGraphJson.getModuleDependencies(), true, projectPath));\n} else {\npkgDesc.append(\"[[package]]\\n\")\n.append(\"org = \\\"\").append(packageDependency.getOrg()).append(\"\\\"\\n\")\n.append(\"name = \\\"\").append(packageDependency.getName()).append(\"\\\"\\n\")\n.append(\"version = \\\"\").append(packageDependency.getVersion()).append(\"\\\"\\n\");\npkgDesc.append(getDependenciesArrayContent(packageDependency));\nList packageDependencyModules = new ArrayList<>();\nfor (ModuleDependency module : currentPkgModules) {\nif (packageDependency.getOrg().equals(module.getOrg())\n&& packageDependency.getName().equals(module.getPackageName())) {\npackageDependencyModules.add(module);\n}\n}\nif (!packageDependencyModules.isEmpty()) {\npkgDesc.append(getDependencyModulesArrayContent(packageDependencyModules, false, projectPath));\n}\n}\npkgDesc.append(\"\\n\");\n}\nFiles.writeString(depsTomlPath, pkgDesc.toString(), StandardOpenOption.APPEND);\n}\n/**\n* Get formatted dependencies array content for Dependencies.toml dependency.\n*\n* @param packageDependency package dependency\n* @return formatted dependencies array content\n*/\nprivate static String getDependenciesArrayContent(Dependency packageDependency) {\nStringBuilder dependenciesContent = new StringBuilder();\nif (!packageDependency.getDependencies().isEmpty()) {\nfor (Dependency dependency : packageDependency.getDependencies()) {\ndependenciesContent.append(\"\\t{org = \\\"\").append(dependency.getOrg())\n.append(\"\\\", name = \\\"\").append(dependency.getName())\n.append(\"\\\"},\\n\");\n}\nString dependenciesPart = dependenciesContent.toString();\ndependenciesPart = removeLastCharacter(trimStartingWhitespaces(dependenciesPart));\nreturn \"dependencies = [\\n\"\n+ dependenciesPart\n+ \"\\n]\\n\";\n}\nreturn \"\";\n}\n/**\n* Get formatted modules array content for Dependencies.toml dependency.\n* \n* modules = [\n* {org = \"ballerinax\", packageName = \"redis\", moduleName = \"redis\"}\n* ]\n* \n*\n* @param dependencyModules modules of the given dependency package\n* @param isCurrentPackage is modules array generating for current package\n* @param projectPath project path\n* @return formatted modules array content\n*/\nprivate static String getDependencyModulesArrayContent(List dependencyModules,\nboolean isCurrentPackage, Path projectPath) {\nStringBuilder modulesContent = new StringBuilder();\nif (isCurrentPackage) {\nfor (ModuleDependency module : dependencyModules) {\nString currentPkgName = ProjectUtils.defaultName(projectPath).value();\nString modulePkgPart = module.getModuleName().split(\"\\\\.\")[0];\nString currentPkgModuleName = module.getModuleName().replaceFirst(modulePkgPart, currentPkgName);\nmodulesContent.append(\"\\t{org = \\\"\").append(module.getOrg())\n.append(\"\\\", packageName = \\\"\").append(currentPkgName)\n.append(\"\\\", moduleName = \\\"\").append(currentPkgModuleName)\n.append(\"\\\"},\\n\");\n}\n} else {\nfor (ModuleDependency module : dependencyModules) {\nmodulesContent.append(\"\\t{org = \\\"\").append(module.getOrg())\n.append(\"\\\", packageName = \\\"\").append(module.getPackageName())\n.append(\"\\\", moduleName = \\\"\").append(module.getModuleName())\n.append(\"\\\"},\\n\");\n}\n}\nString modulesPart = modulesContent.toString();\nmodulesPart = removeLastCharacter(trimStartingWhitespaces(modulesPart));\nreturn \"modules = [\\n\" + modulesPart + \"\\n]\\n\";\n}\n/**\n* Write Ballerina.toml package attribute array from template package.json to new project Ballerina.toml.\n*\n* @param balTomlPath Ballerina.toml path of the new project\n* @param attributeArray package attribute values array\n* @param attributeName package attribute name\n* @throws IOException when error occurs writing to the Ballerina.toml\n*/\nprivate static void writePackageAttributeArray(Path balTomlPath, List attributeArray, String attributeName)\nthrows IOException {\nif (attributeArray != null && !attributeArray.isEmpty()) {\nStringJoiner stringJoiner = new StringJoiner(\",\");\nfor (String attributeElement : attributeArray) {\nstringJoiner.add(\"\\\"\" + attributeElement + \"\\\"\");\n}\nFiles.writeString(balTomlPath, \"\\n\" + attributeName + \" = [\" + stringJoiner + \"]\",\nStandardOpenOption.APPEND);\n}\n}\n/**\n* Write Ballerina.toml package attribute from template package.json to new project Ballerina.toml.\n*\n* @param balTomlPath Ballerina.toml path of the new project\n* @param attributeValue package attribute value\n* @param attributeName package attribute name\n* @throws IOException when error occurs writing to the Ballerina.toml\n*/\nprivate static void writePackageAttributeValue(Path balTomlPath, String attributeValue, String attributeName)\nthrows IOException {\nif (attributeValue != null && !attributeValue.isEmpty()) {\nFiles.writeString(balTomlPath, \"\\n\" + attributeName + \" = \\\"\" + attributeValue + \"\\\"\",\nStandardOpenOption.APPEND);\n}\n}\n/**\n* Find the package name for a given template.\n*\n* @param template template name\n* @return packageName - package name of the module\n*/\npublic static String findPkgName(String template) {\nString[] orgSplit = template.split(\"/\");\nString packageName = \"\";\nString packagePart = (orgSplit.length > 1) ? orgSplit[1] : \"\";\nString[] pkgSplit = packagePart.split(\":\");\npackageName = pkgSplit[0].trim();\nreturn packageName;\n}\n/**\n* Find the organization for a given template.\n*\n* @param template template name\n* @return orgName - org of the module\n*/\npublic static String findOrg(String template) {\nString[] orgSplit = template.split(\"/\");\nreturn orgSplit[0].trim();\n}\n/**\n* Find the package version for a given template.\n*\n* @param template template name\n* @return version - version of the module\n*/\npublic static String findPkgVersion(String template) {\nString[] orgSplit = template.split(\"/\");\nString packagePart = (orgSplit.length > 1) ? orgSplit[1] : \"\";\nString[] pkgSplit = packagePart.split(\":\");\nif (pkgSplit.length > 1) {\nreturn pkgSplit[1].trim();\n} else {\nreturn null;\n}\n}\n/**\n* Find the platform of the module for a given template.\n*\n* @param balaPath path to the module\n* @return platform - platform of the module\n*/\npublic static String findPlatform(Path balaPath) {\nString platform = \"\";\nif (!Files.exists(balaPath)) {\nplatform = JvmTarget.JAVA_11.code();\n} else {\nplatform = ANY_PLATFORM;\n}\nreturn platform;\n}\n/**\n* Initialize a new ballerina project in the given path.\n*\n* @param path project path\n* @param packageName name of the package\n* @param template package template\n* @param balFilesExist if bal files exist in the project\n* @throws IOException If any IO exception occurred\n* @throws URISyntaxException If any URISyntaxException occurred\n*/\npublic static void initPackageByTemplate(Path path, String packageName, String template, boolean balFilesExist)\nthrows IOException, URISyntaxException {\napplyTemplate(path, template, balFilesExist);\nif (template.equalsIgnoreCase(\"lib\")) {\ninitLibPackage(path, packageName);\nPath source = path.resolve(\"lib.bal\");\nFiles.move(source, source.resolveSibling(guessPkgName(packageName, template) + \".bal\"),\nStandardCopyOption.REPLACE_EXISTING);\n} else {\ninitPackage(path, packageName);\n}\ncreateDefaultGitignore(path);\ncreateDefaultDevContainer(path);\n}\nprivate static void createDefaultGitignore(Path path) throws IOException {\nPath gitignore = path.resolve(ProjectConstants.GITIGNORE_FILE_NAME);\nif (Files.notExists(gitignore)) {\nFiles.createFile(gitignore);\n}\nString defaultGitignore = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + \"/\" + GITIGNORE);\nFiles.write(gitignore, defaultGitignore.getBytes(StandardCharsets.UTF_8));\n}\nprivate static void createDefaultDevContainer(Path path) throws IOException {\nPath devContainer = path.resolve(ProjectConstants.DEVCONTAINER);\nif (Files.notExists(devContainer)) {\nFiles.createFile(devContainer);\n}\nString defaultDevContainer = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + \"/\" + DEVCONTAINER);\ndefaultDevContainer = defaultDevContainer.replace(\"latest\", RepoUtils.getBallerinaVersion());\nFiles.write(devContainer, defaultDevContainer.getBytes(StandardCharsets.UTF_8));\n}\n/**\n* Get the list of templates.\n*\n* @return list of templates\n*/\npublic static List getTemplates() {\ntry {\nPath templateDir = getTemplatePath();\nStream walk = Files.walk(templateDir, 1);\nList templates = walk.filter(Files::isDirectory)\n.filter(directory -> !templateDir.equals(directory))\n.filter(directory -> directory.getFileName() != null)\n.map(directory -> directory.getFileName())\n.map(fileName -> fileName.toString())\n.collect(Collectors.toList());\nif (null != jarFs) {\nreturn templates.stream().map(t -> t\n.replace(jarFs.getSeparator(), \"\"))\n.collect(Collectors.toList());\n} else {\nreturn templates;\n}\n} catch (IOException | URISyntaxException e) {\nreturn new ArrayList();\n}\n}\n/**\n* Get the path to the given template.\n*\n* @return path of the given template\n* @throws URISyntaxException if any URISyntaxException occured\n*/\nprivate static Path getTemplatePath() throws URISyntaxException {\nURI uri = CommandUtil.class.getClassLoader().getResource(CREATE_CMD_TEMPLATES).toURI();\nif (uri.toString().contains(\"!\")) {\nfinal String[] array = uri.toString().split(\"!\");\nreturn jarFs.getPath(array[1]);\n} else {\nreturn Paths.get(uri);\n}\n}\n/**\n* Apply the template to the created module.\n*\n* @param modulePath path to the module\n* @param template template name\n* @param balFilesExist if bal files exist in the project\n* @throws IOException if any IOException occurred\n* @throws URISyntaxException if any URISyntaxException occurred\n*/\npublic static void applyTemplate(Path modulePath, String template, boolean balFilesExist)\nthrows IOException, URISyntaxException {\nPath templateDir = getTemplatePath().resolve(template);\nif (template.equalsIgnoreCase(MAIN_TEMPLATE)) {\ntemplateDir = getTemplatePath().resolve(DEFAULT_TEMPLATE);\nPath tempDirTest = getTemplatePath().resolve(MAIN_TEMPLATE);\nFiles.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath));\nFiles.walkFileTree(tempDirTest, new FileUtils.Copy(tempDirTest, modulePath));\n} else if (template.equalsIgnoreCase(DEFAULT_TEMPLATE)) {\nif (!balFilesExist) {\nFiles.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath));\n}\n} else {\nFiles.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath));\n}\n}\n/**\n* Initialize a new ballerina project in the given path.\n*\n* @param path Project path\n* @throws IOException If any IO exception occurred\n*/\npublic static void initPackage(Path path, String packageName) throws IOException {\nPath ballerinaToml = path.resolve(ProjectConstants.BALLERINA_TOML);\nFiles.createFile(ballerinaToml);\nString defaultManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + \"/\" + \"manifest-app.toml\");\ndefaultManifest = defaultManifest\n.replaceAll(ORG_NAME, ProjectUtils.guessOrgName())\n.replaceAll(PKG_NAME, guessPkgName(packageName, \"app\"))\n.replaceAll(DIST_VERSION, RepoUtils.getBallerinaShortVersion());\nFiles.write(ballerinaToml, defaultManifest.getBytes(StandardCharsets.UTF_8));\n}\nprivate static void initLibPackage(Path path, String packageName) throws IOException {\nPath ballerinaToml = path.resolve(ProjectConstants.BALLERINA_TOML);\nFiles.createFile(ballerinaToml);\nString defaultManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + \"/\" + \"manifest-lib.toml\");\ndefaultManifest = defaultManifest.replaceAll(ORG_NAME, ProjectUtils.guessOrgName())\n.replaceAll(PKG_NAME, guessPkgName(packageName, \"lib\"))\n.replaceAll(DIST_VERSION, RepoUtils.getBallerinaShortVersion());\nwrite(ballerinaToml, defaultManifest.getBytes(StandardCharsets.UTF_8));\nString packageMd = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + \"/Package.md\");\nwrite(path.resolve(ProjectConstants.PACKAGE_MD_FILE_NAME), packageMd.getBytes(StandardCharsets.UTF_8));\n}\nprotected static PackageVersion findLatest(List packageVersions) {\nif (packageVersions.isEmpty()) {\nreturn null;\n}\nPackageVersion latestVersion = packageVersions.get(0);\nfor (PackageVersion pkgVersion : packageVersions) {\nlatestVersion = getLatest(latestVersion, pkgVersion);\n}\nreturn latestVersion;\n}\nprotected static PackageVersion getLatest(PackageVersion v1, PackageVersion v2) {\nSemanticVersion semVer1 = v1.value();\nSemanticVersion semVer2 = v2.value();\nboolean isV1PreReleaseVersion = semVer1.isPreReleaseVersion();\nboolean isV2PreReleaseVersion = semVer2.isPreReleaseVersion();\nif (isV1PreReleaseVersion ^ isV2PreReleaseVersion) {\nreturn isV1PreReleaseVersion ? v2 : v1;\n} else {\nreturn semVer1.greaterThanOrEqualTo(semVer2) ? v1 : v2;\n}\n}\npublic static List getPackageVersions(Path balaPackagePath) {\nList versions = new ArrayList<>();\nif (Files.exists(balaPackagePath)) {\nStream collectVersions;\ntry {\ncollectVersions = Files.list(balaPackagePath);\n} catch (IOException e) {\nthrow new RuntimeException(\"Error while accessing Distribution cache: \" + e.getMessage());\n}\nversions.addAll(collectVersions.collect(Collectors.toList()));\n}\nreturn pathToVersions(versions);\n}\nprotected static List pathToVersions(List versions) {\nList availableVersions = new ArrayList<>();\nversions.stream().map(path -> Optional.ofNullable(path)\n.map(Path::getFileName)\n.map(Path::toString)\n.orElse(\"0.0.0\")).forEach(version -> {\ntry {\navailableVersions.add(PackageVersion.from(version));\n} catch (ProjectException ignored) {\n}\n});\nreturn availableVersions;\n}\n/**\n* Remove starting whitespaces of a string.\n*\n* @param str given string\n* @return starting whitespaces removed string\n*/\nprivate static String trimStartingWhitespaces(String str) {\nreturn str.replaceFirst(\"\\\\s++$\", \"\");\n}\n/**\n* Remove last character of a string.\n*\n* @param str given string\n* @return last character removed string\n*/\nprivate static String removeLastCharacter(String str) {\nreturn str.substring(0, str.length() - 1);\n}\n/**\n* Check if files of the given template exist in a given path.\n*\n* @param template given string\n* @param packagePath given path\n* @throws URISyntaxException if URI syntax exception occurred\n* @throws IOException if IO exception occurred\n*/\npublic static String checkTemplateFilesExists(String template, Path packagePath) throws URISyntaxException,\nIOException {\nPath templateDir = getTemplatePath().resolve(template);\nStream paths = Files.list(templateDir);\nList templateFilePathList = paths.collect(Collectors.toList());\nString existingFiles = \"\";\nfor (Path path : templateFilePathList) {\nString fileName = path.getFileName().toString();\nif (!fileName.endsWith(ProjectConstants.BLANG_SOURCE_EXT) && Files.exists(packagePath.resolve(fileName))) {\nexistingFiles += fileName + FILE_STRING_SEPARATOR;\n}\n}\nreturn existingFiles;\n}\n/**\n* Check if common files of a package exist in a given path.\n*\n* @param packagePath given path\n*/\npublic static String checkPackageFilesExists(Path packagePath) {\nString[] packageFiles = {DEPENDENCIES_TOML, ProjectConstants.PACKAGE_MD_FILE_NAME,\nProjectConstants.MODULE_MD_FILE_NAME, ProjectConstants.MODULES_ROOT, ProjectConstants.TEST_DIR_NAME,\nProjectConstants.GITIGNORE_FILE_NAME, ProjectConstants.DEVCONTAINER};\nString existingFiles = \"\";\nfor (String file : packageFiles) {\nif (Files.exists(packagePath.resolve(file))) {\nexistingFiles += file + FILE_STRING_SEPARATOR;\n}\n}\nreturn existingFiles;\n}\n/**\n* Check if .bal files exist in a given path.\n*\n* @param packagePath given path\n* @return error message if files exists\n*/\n}" + }, + { + "comment": "due to the PUT is `current` https://github.com/azure/azure-rest-api-specs/blob/216e7ae3aafefd2ea16a5b64a49281d21da6cb1a/specification/synapse/resource-manager/Microsoft.Synapse/preview/2019-06-01-preview/sqlPool.json#L1745", + "method_body": "protected Observable getInnerAsync() {\nSqlPoolSensitivityLabelsInner client = this.manager().inner().sqlPoolSensitivityLabels();\nreturn client.getAsync(this.resourceGroupName, this.workspaceName, this.sqlPoolName, this.schemaName, this.tableName, this.columnName, SensitivityLabelSource.CURRENT);\n}", + "target_code": "return client.getAsync(this.resourceGroupName, this.workspaceName, this.sqlPoolName, this.schemaName, this.tableName, this.columnName, SensitivityLabelSource.CURRENT);", + "method_body_after": "protected Observable getInnerAsync() {\nSqlPoolSensitivityLabelsInner client = this.manager().inner().sqlPoolSensitivityLabels();\nreturn client.getAsync(this.resourceGroupName, this.workspaceName, this.sqlPoolName, this.schemaName, this.tableName, this.columnName, this.sensitivityLabelSource);\n}", + "context_before": "class SensitivityLabelsImpl extends CreatableUpdatableImpl implements SensitivityLabels, SensitivityLabels.Definition, SensitivityLabels.Update {\nprivate final SynapseManager manager;\nprivate String resourceGroupName;\nprivate String workspaceName;\nprivate String sqlPoolName;\nprivate String schemaName;\nprivate String tableName;\nprivate String columnName;\nSensitivityLabelsImpl(String name, SynapseManager manager) {\nsuper(name, new SensitivityLabelInner());\nthis.manager = manager;\nthis.columnName = name;\n}\nSensitivityLabelsImpl(SensitivityLabelInner inner, SynapseManager manager) {\nsuper(inner.name(), inner);\nthis.manager = manager;\nthis.columnName = inner.name();\nthis.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), \"resourceGroups\");\nthis.workspaceName = IdParsingUtils.getValueFromIdByName(inner.id(), \"workspaces\");\nthis.sqlPoolName = IdParsingUtils.getValueFromIdByName(inner.id(), \"sqlPools\");\nthis.schemaName = IdParsingUtils.getValueFromIdByName(inner.id(), \"schemas\");\nthis.tableName = IdParsingUtils.getValueFromIdByName(inner.id(), \"tables\");\nthis.columnName = IdParsingUtils.getValueFromIdByName(inner.id(), \"columns\");\n}\n@Override\npublic SynapseManager manager() {\nreturn this.manager;\n}\n@Override\npublic Observable createResourceAsync() {\nSqlPoolSensitivityLabelsInner client = this.manager().inner().sqlPoolSensitivityLabels();\nreturn client.createOrUpdateAsync(this.resourceGroupName, this.workspaceName, this.sqlPoolName, this.schemaName, this.tableName, this.columnName, this.inner())\n.map(innerToFluentMap(this));\n}\n@Override\npublic Observable updateResourceAsync() {\nSqlPoolSensitivityLabelsInner client = this.manager().inner().sqlPoolSensitivityLabels();\nreturn client.createOrUpdateAsync(this.resourceGroupName, this.workspaceName, this.sqlPoolName, this.schemaName, this.tableName, this.columnName, this.inner())\n.map(innerToFluentMap(this));\n}\n@Override\n@Override\npublic boolean isInCreateMode() {\nreturn this.inner().id() == null;\n}\n@Override\npublic String id() {\nreturn this.inner().id();\n}\n@Override\npublic String informationType() {\nreturn this.inner().informationType();\n}\n@Override\npublic String informationTypeId() {\nreturn this.inner().informationTypeId();\n}\n@Override\npublic Boolean isDisabled() {\nreturn this.inner().isDisabled();\n}\n@Override\npublic String labelId() {\nreturn this.inner().labelId();\n}\n@Override\npublic String labelName() {\nreturn this.inner().labelName();\n}\n@Override\npublic String name() {\nreturn this.inner().name();\n}\n@Override\npublic String type() {\nreturn this.inner().type();\n}\n@Override\npublic SensitivityLabelsImpl withExistingColumn(String resourceGroupName, String workspaceName, String sqlPoolName, String schemaName, String tableName, String columnName) {\nthis.resourceGroupName = resourceGroupName;\nthis.workspaceName = workspaceName;\nthis.sqlPoolName = sqlPoolName;\nthis.schemaName = schemaName;\nthis.tableName = tableName;\nthis.columnName = columnName;\nreturn this;\n}\n@Override\npublic SensitivityLabelsImpl withInformationType(String informationType) {\nthis.inner().withInformationType(informationType);\nreturn this;\n}\n@Override\npublic SensitivityLabelsImpl withInformationTypeId(String informationTypeId) {\nthis.inner().withInformationTypeId(informationTypeId);\nreturn this;\n}\n@Override\npublic SensitivityLabelsImpl withLabelId(String labelId) {\nthis.inner().withLabelId(labelId);\nreturn this;\n}\n@Override\npublic SensitivityLabelsImpl withLabelName(String labelName) {\nthis.inner().withLabelName(labelName);\nreturn this;\n}\n}", + "context_after": "class SensitivityLabelsImpl extends CreatableUpdatableImpl implements SensitivityLabels, SensitivityLabels.Definition, SensitivityLabels.Update {\nprivate final SynapseManager manager;\nprivate String resourceGroupName;\nprivate String workspaceName;\nprivate String sqlPoolName;\nprivate String schemaName;\nprivate String tableName;\nprivate String columnName;\nprivate SensitivityLabelSource sensitivityLabelSource;\nSensitivityLabelsImpl(String name, SynapseManager manager) {\nsuper(name, new SensitivityLabelInner());\nthis.manager = manager;\nthis.columnName = name;\n}\nSensitivityLabelsImpl(SensitivityLabelInner inner, SynapseManager manager) {\nsuper(inner.name(), inner);\nthis.manager = manager;\nthis.columnName = inner.name();\nthis.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), \"resourceGroups\");\nthis.workspaceName = IdParsingUtils.getValueFromIdByName(inner.id(), \"workspaces\");\nthis.sqlPoolName = IdParsingUtils.getValueFromIdByName(inner.id(), \"sqlPools\");\nthis.schemaName = IdParsingUtils.getValueFromIdByName(inner.id(), \"schemas\");\nthis.tableName = IdParsingUtils.getValueFromIdByName(inner.id(), \"tables\");\nthis.columnName = IdParsingUtils.getValueFromIdByName(inner.id(), \"columns\");\nthis.sensitivityLabelSource = SensitivityLabelSource.fromString(IdParsingUtils.getValueFromIdByName(inner.id(), \"sensitivityLabels\"));\n}\n@Override\npublic SynapseManager manager() {\nreturn this.manager;\n}\n@Override\npublic Observable createResourceAsync() {\nSqlPoolSensitivityLabelsInner client = this.manager().inner().sqlPoolSensitivityLabels();\nreturn client.createOrUpdateAsync(this.resourceGroupName, this.workspaceName, this.sqlPoolName, this.schemaName, this.tableName, this.columnName, this.inner())\n.map(innerToFluentMap(this));\n}\n@Override\npublic Observable updateResourceAsync() {\nSqlPoolSensitivityLabelsInner client = this.manager().inner().sqlPoolSensitivityLabels();\nreturn client.createOrUpdateAsync(this.resourceGroupName, this.workspaceName, this.sqlPoolName, this.schemaName, this.tableName, this.columnName, this.inner())\n.map(innerToFluentMap(this));\n}\n@Override\n@Override\npublic boolean isInCreateMode() {\nreturn this.inner().id() == null;\n}\n@Override\npublic String id() {\nreturn this.inner().id();\n}\n@Override\npublic String informationType() {\nreturn this.inner().informationType();\n}\n@Override\npublic String informationTypeId() {\nreturn this.inner().informationTypeId();\n}\n@Override\npublic Boolean isDisabled() {\nreturn this.inner().isDisabled();\n}\n@Override\npublic String labelId() {\nreturn this.inner().labelId();\n}\n@Override\npublic String labelName() {\nreturn this.inner().labelName();\n}\n@Override\npublic String name() {\nreturn this.inner().name();\n}\n@Override\npublic String type() {\nreturn this.inner().type();\n}\n@Override\npublic SensitivityLabelsImpl withExistingColumn(String resourceGroupName, String workspaceName, String sqlPoolName, String schemaName, String tableName, String columnName) {\nthis.resourceGroupName = resourceGroupName;\nthis.workspaceName = workspaceName;\nthis.sqlPoolName = sqlPoolName;\nthis.schemaName = schemaName;\nthis.tableName = tableName;\nthis.columnName = columnName;\nreturn this;\n}\n@Override\npublic SensitivityLabelsImpl withInformationType(String informationType) {\nthis.inner().withInformationType(informationType);\nreturn this;\n}\n@Override\npublic SensitivityLabelsImpl withInformationTypeId(String informationTypeId) {\nthis.inner().withInformationTypeId(informationTypeId);\nreturn this;\n}\n@Override\npublic SensitivityLabelsImpl withLabelId(String labelId) {\nthis.inner().withLabelId(labelId);\nreturn this;\n}\n@Override\npublic SensitivityLabelsImpl withLabelName(String labelName) {\nthis.inner().withLabelName(labelName);\nreturn this;\n}\n}" + }, + { + "comment": "Equality test should be done on unescaped paths (segment lists) because the escaped form is not canonical. Consider the following test cases, that would not pass with the current code: ``` ... WHERE `a` > 1 ORDER BY a --> ORDER BY a, __name__ ... WHERE `a` > 1 AND a < 3 --> ORDER BY a, __name__ ... ORDER BY `__name__` --> ORDER BY `__name__` ... WHERE `__name__` > \".../foo/bar\" --> ORDER BY __name__ ```", + "method_body": "static List getImplicitOrderBy(StructuredQuery query) {\nList expectedImplicitOrders = new ArrayList<>();\nif (query.hasWhere()) {\nfillInequalityFields(query.getWhere(), expectedImplicitOrders);\n}\nif (!expectedImplicitOrders.contains(\"__name__\")) {\nexpectedImplicitOrders.add(\"__name__\");\n}\nfor (Order order : query.getOrderByList()) {\nString orderField = order.getField().getFieldPath();\nexpectedImplicitOrders.remove(orderField);\n}\nList additionalOrders = new ArrayList<>();\nif (!expectedImplicitOrders.isEmpty()) {\nDirection lastDirection =\nquery.getOrderByCount() == 0\n? Direction.ASCENDING\n: query.getOrderByList().get(query.getOrderByCount() - 1).getDirection();\nfor (String field : expectedImplicitOrders) {\nadditionalOrders.add(\nOrder.newBuilder()\n.setDirection(lastDirection)\n.setField(FieldReference.newBuilder().setFieldPath(field).build())\n.build());\n}\n}\nreturn additionalOrders;\n}", + "target_code": "for (String field : expectedImplicitOrders) {", + "method_body_after": "static List getImplicitOrderBy(StructuredQuery query) {\nList expectedImplicitOrders = new ArrayList<>();\nif (query.hasWhere()) {\nfillInequalityFields(query.getWhere(), expectedImplicitOrders);\n}\nCollections.sort(expectedImplicitOrders);\nif (expectedImplicitOrders.stream().noneMatch(OrderByFieldPath::isDocumentName)) {\nexpectedImplicitOrders.add(OrderByFieldPath.fromString(\"__name__\"));\n}\nfor (Order order : query.getOrderByList()) {\nOrderByFieldPath orderField = OrderByFieldPath.fromString(order.getField().getFieldPath());\nexpectedImplicitOrders.remove(orderField);\n}\nList additionalOrders = new ArrayList<>();\nif (!expectedImplicitOrders.isEmpty()) {\nDirection lastDirection =\nquery.getOrderByCount() == 0\n? Direction.ASCENDING\n: query.getOrderByList().get(query.getOrderByCount() - 1).getDirection();\nfor (OrderByFieldPath field : expectedImplicitOrders) {\nadditionalOrders.add(\nOrder.newBuilder()\n.setDirection(lastDirection)\n.setField(\nFieldReference.newBuilder().setFieldPath(field.getOriginalString()).build())\n.build());\n}\n}\nreturn additionalOrders;\n}", + "context_before": "class QueryUtils {\nprivate static final ImmutableSet INEQUALITY_FIELD_FILTER_OPS =\nImmutableSet.of(\nFieldFilter.Operator.LESS_THAN,\nFieldFilter.Operator.LESS_THAN_OR_EQUAL,\nFieldFilter.Operator.GREATER_THAN,\nFieldFilter.Operator.GREATER_THAN_OR_EQUAL,\nFieldFilter.Operator.NOT_EQUAL,\nFieldFilter.Operator.NOT_IN);\nprivate static final ImmutableSet INEQUALITY_UNARY_FILTER_OPS =\nImmutableSet.of(UnaryFilter.Operator.IS_NOT_NAN, UnaryFilter.Operator.IS_NOT_NULL);\nprivate static final String UNQUOTED_NAME_REGEX_STRING = \"([a-zA-Z_][a-zA-Z_0-9]*)\";\nprivate static final String QUOTED_NAME_REGEX_STRING = \"(`(?:[^`\\\\\\\\]|(?:\\\\\\\\.))+`)\";\nprivate static final Pattern FIELD_PATH_SEGMENT_REGEX =\nPattern.compile(\nString.format(\"(?:%s|%s)(\\\\..+|$)\", UNQUOTED_NAME_REGEX_STRING, QUOTED_NAME_REGEX_STRING),\nPattern.DOTALL);\nprivate static void fillInequalityFields(Filter filter, List result) {\nswitch (filter.getFilterTypeCase()) {\ncase FIELD_FILTER:\nif (INEQUALITY_FIELD_FILTER_OPS.contains(filter.getFieldFilter().getOp())) {\nString fieldPath = filter.getFieldFilter().getField().getFieldPath();\nif (!result.contains(fieldPath)) {\nresult.add(fieldPath);\n}\n}\nbreak;\ncase COMPOSITE_FILTER:\nfilter.getCompositeFilter().getFiltersList().forEach(f -> fillInequalityFields(f, result));\nbreak;\ncase UNARY_FILTER:\nif (INEQUALITY_UNARY_FILTER_OPS.contains(filter.getUnaryFilter().getOp())) {\nString fieldPath = filter.getUnaryFilter().getField().getFieldPath();\nif (!result.contains(fieldPath)) {\nresult.add(fieldPath);\n}\n}\nbreak;\ndefault:\nbreak;\n}\n}\n@Nullable\nstatic Value lookupDocumentValue(List segments, Map valueMap) {\nif (segments.isEmpty()) {\nreturn null;\n}\nString field = segments.remove(0);\nValue value = valueMap.get(field);\nif (segments.isEmpty()) {\nreturn value;\n}\nif (value == null || !value.getValueTypeCase().equals(ValueTypeCase.MAP_VALUE)) {\nreturn null;\n}\nreturn lookupDocumentValue(segments, value.getMapValue().getFieldsMap());\n}\nstatic List resolveOrderByFieldPath(String fieldPath) {\nif (fieldPath.isEmpty()) {\nthrow new IllegalArgumentException(\"Could not resolve empty field path\");\n}\nList segments = new ArrayList<>();\nwhile (!fieldPath.isEmpty()) {\nMatcher segmentMatcher = FIELD_PATH_SEGMENT_REGEX.matcher(fieldPath);\nboolean foundMatch = segmentMatcher.lookingAt();\nif (!foundMatch) {\nthrow new IllegalArgumentException(\"OrderBy field path was malformed\");\n}\nString fieldName = segmentMatcher.group(1);\nif (fieldName == null) {\nfieldName = segmentMatcher.group(2);\nString escaped = escapeFieldName(fieldName.substring(1, fieldName.length() - 1));\nsegments.add(escaped);\n} else {\nsegments.add(fieldName);\n}\nfieldPath = fieldPath.substring(fieldName.length());\nif (fieldPath.startsWith(\".\")) {\nfieldPath = fieldPath.substring(1);\n}\n}\nreturn segments;\n}\nprivate static String escapeFieldName(String fieldName) {\nif (fieldName.isEmpty()) {\nthrow new IllegalArgumentException(\"quoted identifier cannot be empty\");\n}\nStringBuilder buf = new StringBuilder();\nfor (int i = 0; i < fieldName.length(); i++) {\nchar c = fieldName.charAt(i);\nif (c == '`') {\nthrow new IllegalArgumentException(\"quoted identifier cannot contain unescaped quote\");\n} else if (c == '\\r') {\nbuf.append('\\n');\nif (i + 1 < fieldName.length() && fieldName.charAt(i + 1) == '\\n') {\ni++;\n}\n} else if (c != '\\\\') {\nbuf.append(c);\n} else if (i + 1 >= fieldName.length()) {\nthrow new IllegalArgumentException(\"illegal trailing backslash\");\n} else {\ni++;\nswitch (fieldName.charAt(i)) {\ncase 'a':\nbuf.appendCodePoint(Ascii.BEL);\nbreak;\ncase 'b':\nbuf.append('\\b');\nbreak;\ncase 'f':\nbuf.append('\\f');\nbreak;\ncase 'n':\nbuf.append('\\n');\nbreak;\ncase 'r':\nbuf.append('\\r');\nbreak;\ncase 't':\nbuf.append('\\t');\nbreak;\ncase 'v':\nbuf.appendCodePoint(Ascii.VT);\nbreak;\ncase '?':\nbuf.append('?');\nbreak;\ncase '\\\\':\nbuf.append('\\\\');\nbreak;\ncase '\\'':\nbuf.append('\\'');\nbreak;\ncase '\"':\nbuf.append('\\\"');\nbreak;\ncase '`':\nbuf.append('`');\nbreak;\ncase '0':\ncase '1':\ncase '2':\ncase '3':\nif (i + 3 > fieldName.length()) {\nthrow new IllegalArgumentException(\"illegal octal escape sequence\");\n}\nbuf.appendCodePoint(unescapeOctal(fieldName.substring(i, i + 3)));\ni += 3;\nbreak;\ncase 'x':\ncase 'X':\ni++;\nif (i + 2 > fieldName.length()) {\nthrow new IllegalArgumentException(\"illegal hex escape sequence\");\n}\nbuf.appendCodePoint(unescapeHex(fieldName.substring(i, i + 2)));\ni += 2;\nbreak;\ncase 'u':\ni++;\nif (i + 4 > fieldName.length()) {\nthrow new IllegalArgumentException(\"illegal unicode escape sequence\");\n}\nbuf.appendCodePoint(unescapeHex(fieldName.substring(i, i + 4)));\ni += 4;\nbreak;\ncase 'U':\ni++;\nif (i + 8 > fieldName.length()) {\nthrow new IllegalArgumentException(\"illegal unicode escape sequence\");\n}\nbuf.appendCodePoint(unescapeHex(fieldName.substring(i, i + 8)));\ni += 8;\nbreak;\ndefault:\nthrow new IllegalArgumentException(\"illegal escape\");\n}\n}\n}\nreturn buf.toString();\n}\nprivate static int unescapeOctal(String str) {\nint ch = 0;\nfor (int i = 0; i < str.length(); i++) {\nch = 8 * ch + octalValue(str.charAt(i));\n}\nif (!Character.isValidCodePoint(ch)) {\nthrow new IllegalArgumentException(\"illegal codepoint\");\n}\nreturn ch;\n}\nprivate static int unescapeHex(String str) {\nint ch = 0;\nfor (int i = 0; i < str.length(); i++) {\nch = 16 * ch + hexValue(str.charAt(i));\n}\nif (!Character.isValidCodePoint(ch)) {\nthrow new IllegalArgumentException(\"illegal codepoint\");\n}\nreturn ch;\n}\nprivate static int octalValue(char d) {\nif (d >= '0' && d <= '7') {\nreturn d - '0';\n} else {\nthrow new IllegalArgumentException(\"illegal octal digit\");\n}\n}\nprivate static int hexValue(char d) {\nif (d >= '0' && d <= '9') {\nreturn d - '0';\n} else if (d >= 'a' && d <= 'f') {\nreturn 10 + d - 'a';\n} else if (d >= 'A' && d <= 'F') {\nreturn 10 + d - 'A';\n} else {\nthrow new IllegalArgumentException(\"illegal hex digit\");\n}\n}\n}", + "context_after": "class QueryUtils {\nprivate static final ImmutableSet INEQUALITY_FIELD_FILTER_OPS =\nImmutableSet.of(\nFieldFilter.Operator.LESS_THAN,\nFieldFilter.Operator.LESS_THAN_OR_EQUAL,\nFieldFilter.Operator.GREATER_THAN,\nFieldFilter.Operator.GREATER_THAN_OR_EQUAL,\nFieldFilter.Operator.NOT_EQUAL,\nFieldFilter.Operator.NOT_IN);\nprivate static final ImmutableSet INEQUALITY_UNARY_FILTER_OPS =\nImmutableSet.of(UnaryFilter.Operator.IS_NOT_NAN, UnaryFilter.Operator.IS_NOT_NULL);\n/**\n* Populates implicit orderBy of a query in accordance with our documentation. * Required\n* inequality fields are appended in field name order. * __name__ is appended if not specified.\n* See result) {\nswitch (filter.getFilterTypeCase()) {\ncase FIELD_FILTER:\nif (INEQUALITY_FIELD_FILTER_OPS.contains(filter.getFieldFilter().getOp())) {\nOrderByFieldPath fieldPath =\nOrderByFieldPath.fromString(filter.getFieldFilter().getField().getFieldPath());\nif (!result.contains(fieldPath)) {\nresult.add(fieldPath);\n}\n}\nbreak;\ncase COMPOSITE_FILTER:\nfilter.getCompositeFilter().getFiltersList().forEach(f -> fillInequalityFields(f, result));\nbreak;\ncase UNARY_FILTER:\nif (INEQUALITY_UNARY_FILTER_OPS.contains(filter.getUnaryFilter().getOp())) {\nOrderByFieldPath fieldPath =\nOrderByFieldPath.fromString(filter.getUnaryFilter().getField().getFieldPath());\nif (!result.contains(fieldPath)) {\nresult.add(fieldPath);\n}\n}\nbreak;\ndefault:\nbreak;\n}\n}\nstatic @Nullable Value lookupDocumentValue(Document document, String fieldPath) {\nOrderByFieldPath resolvedPath = OrderByFieldPath.fromString(fieldPath);\nif (resolvedPath.isDocumentName()) {\nreturn Value.newBuilder().setReferenceValue(document.getName()).build();\n}\nreturn findMapValue(new ArrayList<>(resolvedPath.getSegments()), document.getFieldsMap());\n}\nprivate static @Nullable Value findMapValue(List segments, Map valueMap) {\nif (segments.isEmpty()) {\nreturn null;\n}\nString field = segments.remove(0);\nValue value = valueMap.get(field);\nif (segments.isEmpty()) {\nreturn value;\n}\nif (value == null || !value.getValueTypeCase().equals(ValueTypeCase.MAP_VALUE)) {\nreturn null;\n}\nreturn findMapValue(segments, value.getMapValue().getFieldsMap());\n}\nprivate static class OrderByFieldPath implements Comparable {\nprivate static final String UNQUOTED_NAME_REGEX_STRING = \"([a-zA-Z_][a-zA-Z_0-9]*)\";\nprivate static final String QUOTED_NAME_REGEX_STRING = \"(`(?:[^`\\\\\\\\]|(?:\\\\\\\\.))+`)\";\nprivate static final Pattern FIELD_PATH_SEGMENT_REGEX =\nPattern.compile(\nString.format(\n\"(?:%s|%s)(\\\\..+|$)\", UNQUOTED_NAME_REGEX_STRING, QUOTED_NAME_REGEX_STRING),\nPattern.DOTALL);\npublic static OrderByFieldPath fromString(String fieldPath) {\nif (fieldPath.isEmpty()) {\nthrow new IllegalArgumentException(\"Could not resolve empty field path\");\n}\nString originalString = fieldPath;\nList segments = new ArrayList<>();\nwhile (!fieldPath.isEmpty()) {\nMatcher segmentMatcher = FIELD_PATH_SEGMENT_REGEX.matcher(fieldPath);\nboolean foundMatch = segmentMatcher.lookingAt();\nif (!foundMatch) {\nthrow new IllegalArgumentException(\"OrderBy field path was malformed\");\n}\nString fieldName;\nif ((fieldName = segmentMatcher.group(1)) != null) {\nsegments.add(fieldName);\n} else if ((fieldName = segmentMatcher.group(2)) != null) {\nString unescaped = unescapeFieldName(fieldName.substring(1, fieldName.length() - 1));\nsegments.add(unescaped);\n} else {\nthrow new IllegalArgumentException(\"OrderBy field path was malformed\");\n}\nfieldPath = fieldPath.substring(fieldName.length());\nif (fieldPath.startsWith(\".\")) {\nfieldPath = fieldPath.substring(1);\n}\n}\nreturn new OrderByFieldPath(originalString, ImmutableList.copyOf(segments));\n}\nprivate final String originalString;\nprivate final ImmutableList segments;\nprivate OrderByFieldPath(String originalString, ImmutableList segments) {\nthis.originalString = originalString;\nthis.segments = segments;\n}\npublic String getOriginalString() {\nreturn originalString;\n}\npublic boolean isDocumentName() {\nreturn segments.size() == 1 && \"__name__\".equals(segments.get(0));\n}\npublic ImmutableList getSegments() {\nreturn segments;\n}\n@Override\npublic boolean equals(@Nullable Object other) {\nif (other instanceof OrderByFieldPath) {\nreturn this.segments.equals(((OrderByFieldPath) other).getSegments());\n}\nreturn super.equals(other);\n}\n@Override\npublic int hashCode() {\nreturn Objects.hash(segments);\n}\n@Override\npublic int compareTo(OrderByFieldPath other) {\nint length = Math.min(this.getSegments().size(), other.getSegments().size());\nfor (int i = 0; i < length; i++) {\nbyte[] thisField = this.getSegments().get(i).getBytes(StandardCharsets.UTF_8);\nbyte[] otherField = other.getSegments().get(i).getBytes(StandardCharsets.UTF_8);\nint cmp = UnsignedBytes.lexicographicalComparator().compare(thisField, otherField);\nif (cmp != 0) {\nreturn cmp;\n}\n}\nreturn Integer.compare(this.getSegments().size(), other.getSegments().size());\n}\nprivate static String unescapeFieldName(String fieldName) {\nif (fieldName.isEmpty()) {\nthrow new IllegalArgumentException(\"quoted identifier cannot be empty\");\n}\nStringBuilder buf = new StringBuilder();\nfor (int i = 0; i < fieldName.length(); i++) {\nchar c = fieldName.charAt(i);\nif (c == '`') {\nthrow new IllegalArgumentException(\"quoted identifier cannot contain unescaped quote\");\n} else if (c == '\\r') {\nbuf.append('\\n');\nif (i + 1 < fieldName.length() && fieldName.charAt(i + 1) == '\\n') {\ni++;\n}\n} else if (c != '\\\\') {\nbuf.append(c);\n} else if (i + 1 >= fieldName.length()) {\nthrow new IllegalArgumentException(\"illegal trailing backslash\");\n} else {\ni++;\nswitch (fieldName.charAt(i)) {\ncase 'a':\nbuf.appendCodePoint(Ascii.BEL);\nbreak;\ncase 'b':\nbuf.append('\\b');\nbreak;\ncase 'f':\nbuf.append('\\f');\nbreak;\ncase 'n':\nbuf.append('\\n');\nbreak;\ncase 'r':\nbuf.append('\\r');\nbreak;\ncase 't':\nbuf.append('\\t');\nbreak;\ncase 'v':\nbuf.appendCodePoint(Ascii.VT);\nbreak;\ncase '?':\nbuf.append('?');\nbreak;\ncase '\\\\':\nbuf.append('\\\\');\nbreak;\ncase '\\'':\nbuf.append('\\'');\nbreak;\ncase '\"':\nbuf.append('\\\"');\nbreak;\ncase '`':\nbuf.append('`');\nbreak;\ncase '0':\ncase '1':\ncase '2':\ncase '3':\nif (i + 3 > fieldName.length()) {\nthrow new IllegalArgumentException(\"illegal octal escape sequence\");\n}\nbuf.appendCodePoint(unescapeOctal(fieldName.substring(i, i + 3)));\ni += 3;\nbreak;\ncase 'x':\ncase 'X':\ni++;\nif (i + 2 > fieldName.length()) {\nthrow new IllegalArgumentException(\"illegal hex escape sequence\");\n}\nbuf.appendCodePoint(unescapeHex(fieldName.substring(i, i + 2)));\ni += 2;\nbreak;\ncase 'u':\ni++;\nif (i + 4 > fieldName.length()) {\nthrow new IllegalArgumentException(\"illegal unicode escape sequence\");\n}\nbuf.appendCodePoint(unescapeHex(fieldName.substring(i, i + 4)));\ni += 4;\nbreak;\ncase 'U':\ni++;\nif (i + 8 > fieldName.length()) {\nthrow new IllegalArgumentException(\"illegal unicode escape sequence\");\n}\nbuf.appendCodePoint(unescapeHex(fieldName.substring(i, i + 8)));\ni += 8;\nbreak;\ndefault:\nthrow new IllegalArgumentException(\"illegal escape\");\n}\n}\n}\nreturn buf.toString();\n}\nprivate static int unescapeOctal(String str) {\nint ch = 0;\nfor (int i = 0; i < str.length(); i++) {\nch = 8 * ch + octalValue(str.charAt(i));\n}\nif (!Character.isValidCodePoint(ch)) {\nthrow new IllegalArgumentException(\"illegal codepoint\");\n}\nreturn ch;\n}\nprivate static int unescapeHex(String str) {\nint ch = 0;\nfor (int i = 0; i < str.length(); i++) {\nch = 16 * ch + hexValue(str.charAt(i));\n}\nif (!Character.isValidCodePoint(ch)) {\nthrow new IllegalArgumentException(\"illegal codepoint\");\n}\nreturn ch;\n}\nprivate static int octalValue(char d) {\nif (d >= '0' && d <= '7') {\nreturn d - '0';\n} else {\nthrow new IllegalArgumentException(\"illegal octal digit\");\n}\n}\nprivate static int hexValue(char d) {\nif (d >= '0' && d <= '9') {\nreturn d - '0';\n} else if (d >= 'a' && d <= 'f') {\nreturn 10 + d - 'a';\n} else if (d >= 'A' && d <= 'F') {\nreturn 10 + d - 'A';\n} else {\nthrow new IllegalArgumentException(\"illegal hex digit\");\n}\n}\n}\n}" + }, + { + "comment": "Ah it is out-of-date... Removed", + "method_body": "public void processElement(ProcessContext c) {\nRow row = c.element();\nIntervalWindow window = windowFn.assignWindow(row.getDateTime(windowFieldIndex).toInstant());\nRow.Builder builder = Row.withSchema(outputSchema);\nbuilder.addValues(row.getValues());\nbuilder.addValue(window.start());\nbuilder.addValue(window.end());\nc.output(builder.build());\n}", + "target_code": "", + "method_body_after": "public void processElement(ProcessContext c) {\nRow row = c.element();\nIntervalWindow window = windowFn.assignWindow(row.getDateTime(windowFieldIndex).toInstant());\nRow.Builder builder = Row.withSchema(outputSchema);\nbuilder.addValues(row.getValues());\nbuilder.addValue(window.start());\nbuilder.addValue(window.end());\nc.output(builder.build());\n}", + "context_before": "class FixedWindowDoFn extends DoFn {\nprivate int windowFieldIndex;\nprivate FixedWindows windowFn;\nprivate Schema outputSchema;\npublic FixedWindowDoFn(FixedWindows windowFn, int windowFieldIndex, Schema schema) {\nthis.windowFn = windowFn;\nthis.windowFieldIndex = windowFieldIndex;\nthis.outputSchema = schema;\n}\n@ProcessElement\n}", + "context_after": "class FixedWindowDoFn extends DoFn {\nprivate final int windowFieldIndex;\nprivate final FixedWindows windowFn;\nprivate final Schema outputSchema;\npublic FixedWindowDoFn(FixedWindows windowFn, int windowFieldIndex, Schema schema) {\nthis.windowFn = windowFn;\nthis.windowFieldIndex = windowFieldIndex;\nthis.outputSchema = schema;\n}\n@ProcessElement\n}" + }, + { + "comment": "Not needed: Since PROP_REQUEST was private, there is no chance that anybody is already using this property name - it is not part of the API of properties.", + "method_body": "public void set(CompoundName key, Object value, Map context) {\ntry {\nif (key.size()==2 && key.first().equals(Model.MODEL)) {\nModel model = query.getModel();\nif (key.last().equals(Model.QUERY_STRING))\nmodel.setQueryString(asString(value, \"\"));\nelse if (key.last().equals(Model.TYPE))\nmodel.setType(asString(value, \"ANY\"));\nelse if (key.last().equals(Model.FILTER))\nmodel.setFilter(asString(value, \"\"));\nelse if (key.last().equals(Model.DEFAULT_INDEX))\nmodel.setDefaultIndex(asString(value, \"\"));\nelse if (key.last().equals(Model.LANGUAGE))\nmodel.setLanguage(asString(value, \"\"));\nelse if (key.last().equals(Model.ENCODING))\nmodel.setEncoding(asString(value,\"\"));\nelse if (key.last().equals(Model.SEARCH_PATH))\nmodel.setSearchPath(asString(value,\"\"));\nelse if (key.last().equals(Model.SOURCES))\nmodel.setSources(asString(value,\"\"));\nelse if (key.last().equals(Model.RESTRICT))\nmodel.setRestrict(asString(value,\"\"));\nelse\nthrowIllegalParameter(key.last(),Model.MODEL);\n}\nelse if (key.first().equals(Ranking.RANKING)) {\nRanking ranking = query.getRanking();\nif (key.size()==2) {\nif (key.last().equals(Ranking.LOCATION))\nranking.setLocation(asString(value,\"\"));\nelse if (key.last().equals(Ranking.PROFILE))\nranking.setProfile(asString(value,\"\"));\nelse if (key.last().equals(Ranking.SORTING))\nranking.setSorting(asString(value,\"\"));\nelse if (key.last().equals(Ranking.FRESHNESS))\nranking.setFreshness(asString(value, \"\"));\nelse if (key.last().equals(Ranking.QUERYCACHE))\nranking.setQueryCache(asBoolean(value, false));\nelse if (key.last().equals(Ranking.LIST_FEATURES))\nranking.setListFeatures(asBoolean(value,false));\n}\nelse if (key.size()>=3 && key.get(1).equals(Ranking.MATCH_PHASE)) {\nif (key.size() == 3) {\nMatchPhase matchPhase = ranking.getMatchPhase();\nif (key.last().equals(MatchPhase.ATTRIBUTE)) {\nmatchPhase.setAttribute(asString(value, null));\n} else if (key.last().equals(MatchPhase.ASCENDING)) {\nmatchPhase.setAscending(asBoolean(value, false));\n} else if (key.last().equals(MatchPhase.MAX_HITS)) {\nmatchPhase.setMaxHits(asLong(value, null));\n} else if (key.last().equals(MatchPhase.MAX_FILTER_COVERAGE)) {\nmatchPhase.setMaxFilterCoverage(asDouble(value, 0.2));\n}\n} else if (key.size() > 3 && key.get(2).equals(Ranking.DIVERSITY)) {\nDiversity diversity = ranking.getMatchPhase().getDiversity();\nif (key.last().equals(Diversity.ATTRIBUTE)) {\ndiversity.setAttribute(asString(value, null));\n} else if (key.last().equals(Diversity.MINGROUPS)) {\ndiversity.setMinGroups(asLong(value, null));\n} else if ((key.size() > 4) && key.get(3).equals(Diversity.CUTOFF)) {\nif (key.last().equals(Diversity.FACTOR)) {\ndiversity.setCutoffFactor(asDouble(value, 10.0));\n} else if (key.last().equals(Diversity.STRATEGY)) {\ndiversity.setCutoffStrategy(asString(value, \"loose\"));\n}\n}\n}\n}\nelse if (key.size() == 3 && key.get(1).equals(Ranking.SOFTTIMEOUT)) {\nSoftTimeout soft = ranking.getSoftTimeout();\nif (key.last().equals(SoftTimeout.ENABLE)) soft.setEnable(asBoolean(value, false));\nif (key.last().equals(SoftTimeout.FACTOR)) soft.setFactor(asDouble(value, 0.50));\nif (key.last().equals(SoftTimeout.TAILCOST)) soft.setTailcost(asDouble(value, 0.10));\n}\nelse if (key.size() == 3 && key.get(1).equals(Ranking.MATCHING)) {\nMatching matching = ranking.getMatching();\nif (key.last().equals(Matching.TERMWISELIMIT)) matching.setTermwiselimit(asDouble(value, 1.0));\nif (key.last().equals(Matching.NUMTHREADSPERSEARCH)) matching.setNumThreadsPerSearch(asInteger(value, 1));\nif (key.last().equals(Matching.NUMSEARCHPARTITIIONS)) matching.setNumSearchPartitions(asInteger(value, 1));\nif (key.last().equals(Matching.MINHITSPERTHREAD)) matching.setMinHitsPerThread(asInteger(value, 0));\n}\nelse if (key.size()>2) {\nString restKey = key.rest().rest().toString();\nif (key.get(1).equals(Ranking.FEATURES))\nsetRankingFeature(query, restKey, toSpecifiedType(restKey, value, profileRegistry.getTypeRegistry().getComponent(\"features\")));\nelse if (key.get(1).equals(Ranking.PROPERTIES))\nranking.getProperties().put(restKey, toSpecifiedType(restKey, value, profileRegistry.getTypeRegistry().getComponent(\"properties\")));\nelse\nthrowIllegalParameter(key.rest().toString(),Ranking.RANKING);\n}\n}\nelse if (key.size()==2 && key.first().equals(Presentation.PRESENTATION)) {\nif (key.last().equals(Presentation.BOLDING))\nquery.getPresentation().setBolding(asBoolean(value, true));\nelse if (key.last().equals(Presentation.SUMMARY))\nquery.getPresentation().setSummary(asString(value, \"\"));\nelse if (key.last().equals(Presentation.FORMAT))\nquery.getPresentation().setFormat(asString(value,\"\"));\nelse if (key.last().equals(Presentation.TIMING))\nquery.getPresentation().setTiming(asBoolean(value, true));\nelse if (key.last().equals(Presentation.SUMMARY_FIELDS))\nquery.getPresentation().setSummaryFields(asString(value,\"\"));\nelse if ( ! key.last().equals(Presentation.REPORT_COVERAGE))\nthrowIllegalParameter(key.last(), Presentation.PRESENTATION);\n}\nelse if (key.size()==2 && key.first().equals(Select.SELECT)) {\nif (key.last().equals(Select.WHERE)){\nquery.getSelect().setWhereString(asString(value, \"\"));\n} else if (key.last().equals(Select.GROUPING)) {\nquery.getSelect().setGroupingString(asString(value, \"\"));\n}\n}\nelse if (key.first().equals(\"rankfeature\") || key.first().equals(\"featureoverride\") ) {\nsetRankingFeature(query, key.rest().toString(), toSpecifiedType(key.rest().toString(), value, profileRegistry.getTypeRegistry().getComponent(\"features\")));\n} else if (key.first().equals(\"rankproperty\")) {\nquery.getRanking().getProperties().put(key.rest().toString(), toSpecifiedType(key.rest().toString(), value, profileRegistry.getTypeRegistry().getComponent(\"properties\")));\n} else if (key.size()==1) {\nif (key.equals(Query.HITS))\nquery.setHits(asInteger(value,10));\nelse if (key.equals(Query.OFFSET))\nquery.setOffset(asInteger(value,0));\nelse if (key.equals(Query.TRACE_LEVEL))\nquery.setTraceLevel(asInteger(value,0));\nelse if (key.equals(Query.TIMEOUT))\nquery.setTimeout(value.toString());\nelse if (key.equals(Query.NO_CACHE))\nquery.setNoCache(asBoolean(value,false));\nelse if (key.equals(Query.GROUPING_SESSION_CACHE))\nquery.setGroupingSessionCache(asBoolean(value, false));\nelse\nsuper.set(key,value,context);\n} else if (key.toString().equals(GroupingRequest.PROP_REQUEST)) {\nquery.getSelect().setGrouping((List) value);\n}\nelse\nsuper.set(key,value,context);\n}\ncatch (Exception e) {\nif (e.getMessage().startsWith(\"Could not set\"))\nthrow e;\nelse\nthrow new IllegalArgumentException(\"Could not set '\" + key + \"' to '\" + value + \"'\", e);\n}\n}", + "target_code": "} else if (key.toString().equals(GroupingRequest.PROP_REQUEST)) {", + "method_body_after": "public void set(CompoundName key, Object value, Map context) {\ntry {\nif (key.size()==2 && key.first().equals(Model.MODEL)) {\nModel model = query.getModel();\nif (key.last().equals(Model.QUERY_STRING))\nmodel.setQueryString(asString(value, \"\"));\nelse if (key.last().equals(Model.TYPE))\nmodel.setType(asString(value, \"ANY\"));\nelse if (key.last().equals(Model.FILTER))\nmodel.setFilter(asString(value, \"\"));\nelse if (key.last().equals(Model.DEFAULT_INDEX))\nmodel.setDefaultIndex(asString(value, \"\"));\nelse if (key.last().equals(Model.LANGUAGE))\nmodel.setLanguage(asString(value, \"\"));\nelse if (key.last().equals(Model.ENCODING))\nmodel.setEncoding(asString(value,\"\"));\nelse if (key.last().equals(Model.SEARCH_PATH))\nmodel.setSearchPath(asString(value,\"\"));\nelse if (key.last().equals(Model.SOURCES))\nmodel.setSources(asString(value,\"\"));\nelse if (key.last().equals(Model.RESTRICT))\nmodel.setRestrict(asString(value,\"\"));\nelse\nthrowIllegalParameter(key.last(),Model.MODEL);\n}\nelse if (key.first().equals(Ranking.RANKING)) {\nRanking ranking = query.getRanking();\nif (key.size()==2) {\nif (key.last().equals(Ranking.LOCATION))\nranking.setLocation(asString(value,\"\"));\nelse if (key.last().equals(Ranking.PROFILE))\nranking.setProfile(asString(value,\"\"));\nelse if (key.last().equals(Ranking.SORTING))\nranking.setSorting(asString(value,\"\"));\nelse if (key.last().equals(Ranking.FRESHNESS))\nranking.setFreshness(asString(value, \"\"));\nelse if (key.last().equals(Ranking.QUERYCACHE))\nranking.setQueryCache(asBoolean(value, false));\nelse if (key.last().equals(Ranking.LIST_FEATURES))\nranking.setListFeatures(asBoolean(value,false));\n}\nelse if (key.size()>=3 && key.get(1).equals(Ranking.MATCH_PHASE)) {\nif (key.size() == 3) {\nMatchPhase matchPhase = ranking.getMatchPhase();\nif (key.last().equals(MatchPhase.ATTRIBUTE)) {\nmatchPhase.setAttribute(asString(value, null));\n} else if (key.last().equals(MatchPhase.ASCENDING)) {\nmatchPhase.setAscending(asBoolean(value, false));\n} else if (key.last().equals(MatchPhase.MAX_HITS)) {\nmatchPhase.setMaxHits(asLong(value, null));\n} else if (key.last().equals(MatchPhase.MAX_FILTER_COVERAGE)) {\nmatchPhase.setMaxFilterCoverage(asDouble(value, 0.2));\n}\n} else if (key.size() > 3 && key.get(2).equals(Ranking.DIVERSITY)) {\nDiversity diversity = ranking.getMatchPhase().getDiversity();\nif (key.last().equals(Diversity.ATTRIBUTE)) {\ndiversity.setAttribute(asString(value, null));\n} else if (key.last().equals(Diversity.MINGROUPS)) {\ndiversity.setMinGroups(asLong(value, null));\n} else if ((key.size() > 4) && key.get(3).equals(Diversity.CUTOFF)) {\nif (key.last().equals(Diversity.FACTOR)) {\ndiversity.setCutoffFactor(asDouble(value, 10.0));\n} else if (key.last().equals(Diversity.STRATEGY)) {\ndiversity.setCutoffStrategy(asString(value, \"loose\"));\n}\n}\n}\n}\nelse if (key.size() == 3 && key.get(1).equals(Ranking.SOFTTIMEOUT)) {\nSoftTimeout soft = ranking.getSoftTimeout();\nif (key.last().equals(SoftTimeout.ENABLE)) soft.setEnable(asBoolean(value, false));\nif (key.last().equals(SoftTimeout.FACTOR)) soft.setFactor(asDouble(value, 0.50));\nif (key.last().equals(SoftTimeout.TAILCOST)) soft.setTailcost(asDouble(value, 0.10));\n}\nelse if (key.size() == 3 && key.get(1).equals(Ranking.MATCHING)) {\nMatching matching = ranking.getMatching();\nif (key.last().equals(Matching.TERMWISELIMIT)) matching.setTermwiselimit(asDouble(value, 1.0));\nif (key.last().equals(Matching.NUMTHREADSPERSEARCH)) matching.setNumThreadsPerSearch(asInteger(value, 1));\nif (key.last().equals(Matching.NUMSEARCHPARTITIIONS)) matching.setNumSearchPartitions(asInteger(value, 1));\nif (key.last().equals(Matching.MINHITSPERTHREAD)) matching.setMinHitsPerThread(asInteger(value, 0));\n}\nelse if (key.size()>2) {\nString restKey = key.rest().rest().toString();\nif (key.get(1).equals(Ranking.FEATURES))\nsetRankingFeature(query, restKey, toSpecifiedType(restKey, value, profileRegistry.getTypeRegistry().getComponent(\"features\")));\nelse if (key.get(1).equals(Ranking.PROPERTIES))\nranking.getProperties().put(restKey, toSpecifiedType(restKey, value, profileRegistry.getTypeRegistry().getComponent(\"properties\")));\nelse\nthrowIllegalParameter(key.rest().toString(),Ranking.RANKING);\n}\n}\nelse if (key.size()==2 && key.first().equals(Presentation.PRESENTATION)) {\nif (key.last().equals(Presentation.BOLDING))\nquery.getPresentation().setBolding(asBoolean(value, true));\nelse if (key.last().equals(Presentation.SUMMARY))\nquery.getPresentation().setSummary(asString(value, \"\"));\nelse if (key.last().equals(Presentation.FORMAT))\nquery.getPresentation().setFormat(asString(value,\"\"));\nelse if (key.last().equals(Presentation.TIMING))\nquery.getPresentation().setTiming(asBoolean(value, true));\nelse if (key.last().equals(Presentation.SUMMARY_FIELDS))\nquery.getPresentation().setSummaryFields(asString(value,\"\"));\nelse if ( ! key.last().equals(Presentation.REPORT_COVERAGE))\nthrowIllegalParameter(key.last(), Presentation.PRESENTATION);\n}\nelse if (key.size()==2 && key.first().equals(Select.SELECT)) {\nif (key.last().equals(Select.WHERE)){\nquery.getSelect().setWhereString(asString(value, \"\"));\n} else if (key.last().equals(Select.GROUPING)) {\nquery.getSelect().setGroupingString(asString(value, \"\"));\n}\n}\nelse if (key.first().equals(\"rankfeature\") || key.first().equals(\"featureoverride\") ) {\nsetRankingFeature(query, key.rest().toString(), toSpecifiedType(key.rest().toString(), value, profileRegistry.getTypeRegistry().getComponent(\"features\")));\n} else if (key.first().equals(\"rankproperty\")) {\nquery.getRanking().getProperties().put(key.rest().toString(), toSpecifiedType(key.rest().toString(), value, profileRegistry.getTypeRegistry().getComponent(\"properties\")));\n} else if (key.size()==1) {\nif (key.equals(Query.HITS))\nquery.setHits(asInteger(value,10));\nelse if (key.equals(Query.OFFSET))\nquery.setOffset(asInteger(value,0));\nelse if (key.equals(Query.TRACE_LEVEL))\nquery.setTraceLevel(asInteger(value,0));\nelse if (key.equals(Query.TIMEOUT))\nquery.setTimeout(value.toString());\nelse if (key.equals(Query.NO_CACHE))\nquery.setNoCache(asBoolean(value,false));\nelse if (key.equals(Query.GROUPING_SESSION_CACHE))\nquery.setGroupingSessionCache(asBoolean(value, false));\nelse\nsuper.set(key,value,context);\n} else\nsuper.set(key,value,context);\n}\ncatch (Exception e) {\nif (e.getMessage().startsWith(\"Could not set\"))\nthrow e;\nelse\nthrow new IllegalArgumentException(\"Could not set '\" + key + \"' to '\" + value + \"'\", e);\n}\n}", + "context_before": "class QueryProperties extends Properties {\n/**\n* TODO: Remove on Vespa 7\n* @deprecated use Query.nativeProperties\n*/\n@Deprecated\npublic static final CompoundName[] PER_SOURCE_QUERY_PROPERTIES =\nQuery.nativeProperties.toArray(new CompoundName[] {});\nprivate Query query;\nprivate final CompiledQueryProfileRegistry profileRegistry;\npublic QueryProperties(Query query, CompiledQueryProfileRegistry profileRegistry) {\nthis.query = query;\nthis.profileRegistry = profileRegistry;\n}\npublic void setParentQuery(Query query) {\nthis.query=query;\nsuper.setParentQuery(query);\n}\n@SuppressWarnings(\"deprecation\")\n@Override\npublic Object get(CompoundName key, Map context,\ncom.yahoo.processing.request.Properties substitution) {\nif (key.size() == 2 && key.first().equals(Model.MODEL)) {\nModel model = query.getModel();\nif (key.last().equals(Model.QUERY_STRING)) return model.getQueryString();\nif (key.last().equals(Model.TYPE)) return model.getType();\nif (key.last().equals(Model.FILTER)) return model.getFilter();\nif (key.last().equals(Model.DEFAULT_INDEX)) return model.getDefaultIndex();\nif (key.last().equals(Model.LANGUAGE)) return model.getLanguage();\nif (key.last().equals(Model.ENCODING)) return model.getEncoding();\nif (key.last().equals(Model.SOURCES)) return model.getSources();\nif (key.last().equals(Model.SEARCH_PATH)) return model.getSearchPath();\nif (key.last().equals(Model.RESTRICT)) return model.getRestrict();\n}\nelse if (key.first().equals(Ranking.RANKING)) {\nRanking ranking = query.getRanking();\nif (key.size() == 2) {\nif (key.last().equals(Ranking.LOCATION)) return ranking.getLocation();\nif (key.last().equals(Ranking.PROFILE)) return ranking.getProfile();\nif (key.last().equals(Ranking.SORTING)) return ranking.getSorting();\nif (key.last().equals(Ranking.FRESHNESS)) return ranking.getFreshness();\nif (key.last().equals(Ranking.QUERYCACHE)) return ranking.getQueryCache();\nif (key.last().equals(Ranking.LIST_FEATURES)) return ranking.getListFeatures();\n}\nelse if (key.size()>=3 && key.get(1).equals(Ranking.MATCH_PHASE)) {\nif (key.size() == 3) {\nMatchPhase matchPhase = ranking.getMatchPhase();\nif (key.last().equals(MatchPhase.ATTRIBUTE)) return matchPhase.getAttribute();\nif (key.last().equals(MatchPhase.ASCENDING)) return matchPhase.getAscending();\nif (key.last().equals(MatchPhase.MAX_HITS)) return matchPhase.getMaxHits();\nif (key.last().equals(MatchPhase.MAX_FILTER_COVERAGE)) return matchPhase.getMaxFilterCoverage();\n} else if (key.size() >= 4 && key.get(2).equals(Ranking.DIVERSITY)) {\nDiversity diversity = ranking.getMatchPhase().getDiversity();\nif (key.size() == 4) {\nif (key.last().equals(Diversity.ATTRIBUTE)) return diversity.getAttribute();\nif (key.last().equals(Diversity.MINGROUPS)) return diversity.getMinGroups();\n} else if ((key.size() == 5) && key.get(3).equals(Diversity.CUTOFF)) {\nif (key.last().equals(Diversity.FACTOR)) return diversity.getCutoffFactor();\nif (key.last().equals(Diversity.STRATEGY)) return diversity.getCutoffStrategy();\n}\n}\n}\nelse if (key.size() == 3 && key.get(1).equals(Ranking.SOFTTIMEOUT)) {\nSoftTimeout soft = ranking.getSoftTimeout();\nif (key.last().equals(SoftTimeout.ENABLE)) return soft.getEnable();\nif (key.last().equals(SoftTimeout.FACTOR)) return soft.getFactor();\nif (key.last().equals(SoftTimeout.TAILCOST)) return soft.getTailcost();\n}\nelse if (key.size() == 3 && key.get(1).equals(Ranking.MATCHING)) {\nMatching matching = ranking.getMatching();\nif (key.last().equals(Matching.TERMWISELIMIT)) return matching.getTermwiseLimit();\nif (key.last().equals(Matching.NUMTHREADSPERSEARCH)) return matching.getNumThreadsPerSearch();\nif (key.last().equals(Matching.NUMSEARCHPARTITIIONS)) return matching.getNumSearchPartitions();\nif (key.last().equals(Matching.MINHITSPERTHREAD)) return matching.getMinHitsPerThread();\n}\nelse if (key.size()>2) {\nif (key.get(1).equals(Ranking.FEATURES)) return ranking.getFeatures().getObject(key.rest().rest().toString());\nif (key.get(1).equals(Ranking.PROPERTIES)) return ranking.getProperties().get(key.rest().rest().toString());\n}\n}\nelse if (key.size()==2 && key.first().equals(Select.SELECT)) {\nif (key.last().equals(Select.WHERE)) return query.getSelect().getWhereString();\nif (key.last().equals(Select.GROUPING)) return query.getSelect().getGroupingString();\n}\nelse if (key.size()==2 && key.first().equals(Presentation.PRESENTATION)) {\nif (key.last().equals(Presentation.BOLDING)) return query.getPresentation().getBolding();\nif (key.last().equals(Presentation.SUMMARY)) return query.getPresentation().getSummary();\nif (key.last().equals(Presentation.REPORT_COVERAGE)) return true;\nif (key.last().equals(Presentation.FORMAT)) return query.getPresentation().getFormat();\nif (key.last().equals(Presentation.TIMING)) return query.getPresentation().getTiming();\nif (key.last().equals(Presentation.SUMMARY_FIELDS)) return query.getPresentation().getSummaryFields();\n}\nelse if (key.first().equals(\"rankfeature\") || key.first().equals(\"featureoverride\")) {\nreturn query.getRanking().getFeatures().getObject(key.rest().toString());\n} else if (key.first().equals(\"rankproperty\")) {\nreturn query.getRanking().getProperties().get(key.rest().toString());\n} else if (key.size()==1) {\nif (key.equals(Query.HITS)) return query.getHits();\nif (key.equals(Query.OFFSET)) return query.getOffset();\nif (key.equals(Query.TRACE_LEVEL)) return query.getTraceLevel();\nif (key.equals(Query.TIMEOUT)) return query.getTimeout();\nif (key.equals(Query.NO_CACHE)) return query.getNoCache();\nif (key.equals(Query.GROUPING_SESSION_CACHE)) return query.getGroupingSessionCache();\nif (key.toString().equals(Model.MODEL)) return query.getModel();\nif (key.toString().equals(Ranking.RANKING)) return query.getRanking();\nif (key.toString().equals(Presentation.PRESENTATION)) return query.getPresentation();\n} else if (key.toString().equals(GroupingRequest.PROP_REQUEST)) {\nreturn query.getSelect().getGrouping();\n}\nreturn super.get(key, context, substitution);\n}\n@SuppressWarnings(\"deprecation\")\n@Override\n@Override\npublic Map listProperties(CompoundName prefix,\nMap context,\ncom.yahoo.processing.request.Properties substitution) {\nMap properties = super.listProperties(prefix, context, substitution);\nfor (CompoundName queryProperty : Query.nativeProperties) {\nif (queryProperty.hasPrefix(prefix)) {\nObject value = this.get(queryProperty, context, substitution);\nif (value != null)\nproperties.put(queryProperty.toString(), value);\n}\n}\nreturn properties;\n}\nprivate void setRankingFeature(Query query, String key, Object value) {\nif (value instanceof Tensor)\nquery.getRanking().getFeatures().put(key, (Tensor)value);\nelse\nquery.getRanking().getFeatures().put(key, asString(value, \"\"));\n}\nprivate Object toSpecifiedType(String key, Object value, QueryProfileType type) {\nif ( ! ( value instanceof String)) return value;\nif (type == null) return value;\nFieldDescription field = type.getField(key);\nif (field == null) return value;\nreturn field.getType().convertFrom(value, profileRegistry);\n}\nprivate void throwIllegalParameter(String key,String namespace) {\nthrow new IllegalArgumentException(\"'\" + key + \"' is not a valid property in '\" + namespace +\n\"'. See the search api for valid keys starting by '\" + namespace + \"'.\");\n}\n@Override\npublic final Query getParentQuery() {\nreturn query;\n}\n}", + "context_after": "class QueryProperties extends Properties {\n/**\n* TODO: Remove on Vespa 7\n* @deprecated use Query.nativeProperties\n*/\n@Deprecated\npublic static final CompoundName[] PER_SOURCE_QUERY_PROPERTIES =\nQuery.nativeProperties.toArray(new CompoundName[] {});\nprivate Query query;\nprivate final CompiledQueryProfileRegistry profileRegistry;\npublic QueryProperties(Query query, CompiledQueryProfileRegistry profileRegistry) {\nthis.query = query;\nthis.profileRegistry = profileRegistry;\n}\npublic void setParentQuery(Query query) {\nthis.query=query;\nsuper.setParentQuery(query);\n}\n@SuppressWarnings(\"deprecation\")\n@Override\npublic Object get(CompoundName key, Map context,\ncom.yahoo.processing.request.Properties substitution) {\nif (key.size() == 2 && key.first().equals(Model.MODEL)) {\nModel model = query.getModel();\nif (key.last().equals(Model.QUERY_STRING)) return model.getQueryString();\nif (key.last().equals(Model.TYPE)) return model.getType();\nif (key.last().equals(Model.FILTER)) return model.getFilter();\nif (key.last().equals(Model.DEFAULT_INDEX)) return model.getDefaultIndex();\nif (key.last().equals(Model.LANGUAGE)) return model.getLanguage();\nif (key.last().equals(Model.ENCODING)) return model.getEncoding();\nif (key.last().equals(Model.SOURCES)) return model.getSources();\nif (key.last().equals(Model.SEARCH_PATH)) return model.getSearchPath();\nif (key.last().equals(Model.RESTRICT)) return model.getRestrict();\n}\nelse if (key.first().equals(Ranking.RANKING)) {\nRanking ranking = query.getRanking();\nif (key.size() == 2) {\nif (key.last().equals(Ranking.LOCATION)) return ranking.getLocation();\nif (key.last().equals(Ranking.PROFILE)) return ranking.getProfile();\nif (key.last().equals(Ranking.SORTING)) return ranking.getSorting();\nif (key.last().equals(Ranking.FRESHNESS)) return ranking.getFreshness();\nif (key.last().equals(Ranking.QUERYCACHE)) return ranking.getQueryCache();\nif (key.last().equals(Ranking.LIST_FEATURES)) return ranking.getListFeatures();\n}\nelse if (key.size()>=3 && key.get(1).equals(Ranking.MATCH_PHASE)) {\nif (key.size() == 3) {\nMatchPhase matchPhase = ranking.getMatchPhase();\nif (key.last().equals(MatchPhase.ATTRIBUTE)) return matchPhase.getAttribute();\nif (key.last().equals(MatchPhase.ASCENDING)) return matchPhase.getAscending();\nif (key.last().equals(MatchPhase.MAX_HITS)) return matchPhase.getMaxHits();\nif (key.last().equals(MatchPhase.MAX_FILTER_COVERAGE)) return matchPhase.getMaxFilterCoverage();\n} else if (key.size() >= 4 && key.get(2).equals(Ranking.DIVERSITY)) {\nDiversity diversity = ranking.getMatchPhase().getDiversity();\nif (key.size() == 4) {\nif (key.last().equals(Diversity.ATTRIBUTE)) return diversity.getAttribute();\nif (key.last().equals(Diversity.MINGROUPS)) return diversity.getMinGroups();\n} else if ((key.size() == 5) && key.get(3).equals(Diversity.CUTOFF)) {\nif (key.last().equals(Diversity.FACTOR)) return diversity.getCutoffFactor();\nif (key.last().equals(Diversity.STRATEGY)) return diversity.getCutoffStrategy();\n}\n}\n}\nelse if (key.size() == 3 && key.get(1).equals(Ranking.SOFTTIMEOUT)) {\nSoftTimeout soft = ranking.getSoftTimeout();\nif (key.last().equals(SoftTimeout.ENABLE)) return soft.getEnable();\nif (key.last().equals(SoftTimeout.FACTOR)) return soft.getFactor();\nif (key.last().equals(SoftTimeout.TAILCOST)) return soft.getTailcost();\n}\nelse if (key.size() == 3 && key.get(1).equals(Ranking.MATCHING)) {\nMatching matching = ranking.getMatching();\nif (key.last().equals(Matching.TERMWISELIMIT)) return matching.getTermwiseLimit();\nif (key.last().equals(Matching.NUMTHREADSPERSEARCH)) return matching.getNumThreadsPerSearch();\nif (key.last().equals(Matching.NUMSEARCHPARTITIIONS)) return matching.getNumSearchPartitions();\nif (key.last().equals(Matching.MINHITSPERTHREAD)) return matching.getMinHitsPerThread();\n}\nelse if (key.size()>2) {\nif (key.get(1).equals(Ranking.FEATURES)) return ranking.getFeatures().getObject(key.rest().rest().toString());\nif (key.get(1).equals(Ranking.PROPERTIES)) return ranking.getProperties().get(key.rest().rest().toString());\n}\n}\nelse if (key.size()==2 && key.first().equals(Select.SELECT)) {\nif (key.last().equals(Select.WHERE)) return query.getSelect().getWhereString();\nif (key.last().equals(Select.GROUPING)) return query.getSelect().getGroupingString();\n}\nelse if (key.size()==2 && key.first().equals(Presentation.PRESENTATION)) {\nif (key.last().equals(Presentation.BOLDING)) return query.getPresentation().getBolding();\nif (key.last().equals(Presentation.SUMMARY)) return query.getPresentation().getSummary();\nif (key.last().equals(Presentation.REPORT_COVERAGE)) return true;\nif (key.last().equals(Presentation.FORMAT)) return query.getPresentation().getFormat();\nif (key.last().equals(Presentation.TIMING)) return query.getPresentation().getTiming();\nif (key.last().equals(Presentation.SUMMARY_FIELDS)) return query.getPresentation().getSummaryFields();\n}\nelse if (key.first().equals(\"rankfeature\") || key.first().equals(\"featureoverride\")) {\nreturn query.getRanking().getFeatures().getObject(key.rest().toString());\n} else if (key.first().equals(\"rankproperty\")) {\nreturn query.getRanking().getProperties().get(key.rest().toString());\n} else if (key.size()==1) {\nif (key.equals(Query.HITS)) return query.getHits();\nif (key.equals(Query.OFFSET)) return query.getOffset();\nif (key.equals(Query.TRACE_LEVEL)) return query.getTraceLevel();\nif (key.equals(Query.TIMEOUT)) return query.getTimeout();\nif (key.equals(Query.NO_CACHE)) return query.getNoCache();\nif (key.equals(Query.GROUPING_SESSION_CACHE)) return query.getGroupingSessionCache();\nif (key.toString().equals(Model.MODEL)) return query.getModel();\nif (key.toString().equals(Ranking.RANKING)) return query.getRanking();\nif (key.toString().equals(Presentation.PRESENTATION)) return query.getPresentation();\n}\nreturn super.get(key, context, substitution);\n}\n@SuppressWarnings(\"deprecation\")\n@Override\n@Override\npublic Map listProperties(CompoundName prefix,\nMap context,\ncom.yahoo.processing.request.Properties substitution) {\nMap properties = super.listProperties(prefix, context, substitution);\nfor (CompoundName queryProperty : Query.nativeProperties) {\nif (queryProperty.hasPrefix(prefix)) {\nObject value = this.get(queryProperty, context, substitution);\nif (value != null)\nproperties.put(queryProperty.toString(), value);\n}\n}\nreturn properties;\n}\nprivate void setRankingFeature(Query query, String key, Object value) {\nif (value instanceof Tensor)\nquery.getRanking().getFeatures().put(key, (Tensor)value);\nelse\nquery.getRanking().getFeatures().put(key, asString(value, \"\"));\n}\nprivate Object toSpecifiedType(String key, Object value, QueryProfileType type) {\nif ( ! ( value instanceof String)) return value;\nif (type == null) return value;\nFieldDescription field = type.getField(key);\nif (field == null) return value;\nreturn field.getType().convertFrom(value, profileRegistry);\n}\nprivate void throwIllegalParameter(String key,String namespace) {\nthrow new IllegalArgumentException(\"'\" + key + \"' is not a valid property in '\" + namespace +\n\"'. See the search api for valid keys starting by '\" + namespace + \"'.\");\n}\n@Override\npublic final Query getParentQuery() {\nreturn query;\n}\n}" + }, + { + "comment": "Ah, yes, late night copy/paste. Fixed.", + "method_body": "public void testStateNotKeyed() {\nfinal String stateId = \"foo\";\nMyIntegerCoder myIntegerCoder = MyIntegerCoder.of();\npipeline.getCoderRegistry().registerCoderForClass(MyInteger.class, myIntegerCoder);\nDoFn fn =\nnew DoFn() {\n@StateId(stateId)\nprivate final StateSpec> intState =\nStateSpecs.value();\n@ProcessElement\npublic void processElement(\nProcessContext c, @StateId(stateId) ValueState state) {}\n};\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"state\");\nthrown.expectMessage(\"KvCoder\");\npipeline.apply(Create.of(\"hello\", \"goodbye\", \"hello again\")).apply(ParDo.of(fn));\n}", + "target_code": "pipeline.getCoderRegistry().registerCoderForClass(MyInteger.class, myIntegerCoder);", + "method_body_after": "public void testStateNotKeyed() {\nfinal String stateId = \"foo\";\nDoFn fn =\nnew DoFn() {\n@StateId(stateId)\nprivate final StateSpec> intState =\nStateSpecs.value();\n@ProcessElement\npublic void processElement(\nProcessContext c, @StateId(stateId) ValueState state) {}\n};\nthrown.expect(IllegalArgumentException.class);\nthrown.expectMessage(\"state\");\nthrown.expectMessage(\"KvCoder\");\npipeline.apply(Create.of(\"hello\", \"goodbye\", \"hello again\")).apply(ParDo.of(fn));\n}", + "context_before": "class Checker implements SerializableFunction, Void> {\n@Override\npublic Void apply(Iterable input) {\nboolean foundElement = false;\nboolean foundFinish = false;\nfor (String str : input) {\nif (str.equals(\"elem:1:1\")) {\nif (foundElement) {\nthrow new AssertionError(\"Received duplicate element\");\n}\nfoundElement = true;\n} else if (str.equals(\"finish:3:3\")) {\nfoundFinish = true;\n} else {\nthrow new AssertionError(\"Got unexpected value: \" + str);\n}\n}\nif (!foundElement) {\nthrow new AssertionError(\"Missing \\\"elem:1:1\\\"\");\n}\nif (!foundFinish) {\nthrow new AssertionError(\"Missing \\\"finish:3:3\\\"\");\n}\nreturn null;\n}\n}", + "context_after": "class Checker implements SerializableFunction, Void> {\n@Override\npublic Void apply(Iterable input) {\nboolean foundElement = false;\nboolean foundFinish = false;\nfor (String str : input) {\nif (str.equals(\"elem:1:1\")) {\nif (foundElement) {\nthrow new AssertionError(\"Received duplicate element\");\n}\nfoundElement = true;\n} else if (str.equals(\"finish:3:3\")) {\nfoundFinish = true;\n} else {\nthrow new AssertionError(\"Got unexpected value: \" + str);\n}\n}\nif (!foundElement) {\nthrow new AssertionError(\"Missing \\\"elem:1:1\\\"\");\n}\nif (!foundFinish) {\nthrow new AssertionError(\"Missing \\\"finish:3:3\\\"\");\n}\nreturn null;\n}\n}" + }, + { + "comment": "nevermind, the point of this test is to trigger an exception", + "method_body": "public void testReadXMLInvalidRecordClassWithCustomEventHandler() throws IOException {\nFile file = tempFolder.newFile(\"trainXMLSmall\");\nFiles.write(file.toPath(), trainXML.getBytes(StandardCharsets.UTF_8));\nValidationEventHandler validationEventHandler =\nevent -> {\nthrow new RuntimeException(\"MyCustomValidationEventHandler failure mesage\");\n};\nBoundedSource source =\nXmlIO.read()\n.from(file.toPath().toString())\n.withRootElement(\"trains\")\n.withRecordElement(\"train\")\n.withRecordClass(WrongTrainType.class)\n.withValidationEventHandler(validationEventHandler)\n.createSource();\nexception.expect(RuntimeException.class);\nexception.expectMessage(\"MyCustomValidationEventHandler failure mesage\");\ntry (Reader reader = source.createReader(null)) {\nfor (boolean available = reader.start(); available; available = reader.advance()) {\nreader.getCurrent();\n}\n}\n}", + "target_code": "}", + "method_body_after": "public void testReadXMLInvalidRecordClassWithCustomEventHandler() throws IOException {\nFile file = tempFolder.newFile(\"trainXMLSmall\");\nFiles.write(file.toPath(), trainXML.getBytes(StandardCharsets.UTF_8));\nValidationEventHandler validationEventHandler =\nevent -> {\nthrow new RuntimeException(\"MyCustomValidationEventHandler failure mesage\");\n};\nBoundedSource source =\nXmlIO.read()\n.from(file.toPath().toString())\n.withRootElement(\"trains\")\n.withRecordElement(\"train\")\n.withRecordClass(WrongTrainType.class)\n.withValidationEventHandler(validationEventHandler)\n.createSource();\nexception.expect(RuntimeException.class);\nexception.expectMessage(\"MyCustomValidationEventHandler failure mesage\");\ntry (Reader reader = source.createReader(null)) {\nfor (boolean available = reader.start(); available; available = reader.advance()) {\nreader.getCurrent();\n}\n}\n}", + "context_before": "class WrongTrainType {\n@SuppressWarnings(\"unused\")\npublic String something;\n}", + "context_after": "class WrongTrainType {\n@SuppressWarnings(\"unused\")\npublic String something;\n}" + }, + { + "comment": "May want to use `Mono.fromSupplier` as this'll eagerly create the authorization header", + "method_body": "public Mono process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) {\nfinal ByteBuffer contents = context.getHttpRequest().getBodyAsBinaryData()\n== null ? getEmptyBuffer() : context.getHttpRequest().getBodyAsBinaryData().toByteBuffer();\nreturn Mono.just(credentials\n.getAuthorizationHeaders(\ncontext.getHttpRequest().getUrl(),\ncontext.getHttpRequest().getHttpMethod().toString(),\ncontents))\n.flatMapMany(headers -> Flux.fromIterable(headers.entrySet()))\n.map(header -> context.getHttpRequest().setHeader(header.getKey(), header.getValue()))\n.last()\n.flatMap(request -> next.process());\n}", + "target_code": "return Mono.just(credentials", + "method_body_after": "public Mono process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) {\nreturn Mono.defer(() -> Mono.just(credentials.getAuthorizationHeaders(\ncontext.getHttpRequest().getUrl(),\ncontext.getHttpRequest().getHttpMethod().toString(),\ncontext.getHttpRequest().getBodyAsBinaryData()))\n.flatMapMany(headers -> Flux.fromIterable(headers.entrySet()))\n.map(header -> context.getHttpRequest().setHeader(header.getKey(), header.getValue()))\n.last()\n.flatMap(request -> next.process()));\n}", + "context_before": "class ConfigurationCredentialsPolicy implements HttpPipelinePolicy {\nprivate final ConfigurationClientCredentials credentials;\n/**\n* Creates an instance that is able to apply a {@link ConfigurationClientCredentials} credential to a request in the\n* pipeline.\n*\n* @param credentials the credential information to authenticate to Azure App Configuration service\n* @throws NullPointerException If {@code credential} is {@code null}.\n*/\npublic ConfigurationCredentialsPolicy(ConfigurationClientCredentials credentials) {\nObjects.requireNonNull(credentials, \"'credential' can not be a null value.\");\nthis.credentials = credentials;\n}\n/**\n* Adds the required headers to authenticate a request to Azure App Configuration service.\n*\n* @param context The request context\n* @param next The next HTTP pipeline policy to process the {@code context's} request after this policy\n* completes.\n* @return A {@link Mono} representing the HTTP response that will arrive asynchronously.\n*/\n@Override\n/**\n* Adds the required headers to authenticate a request to Azure App Configuration service.\n*\n* @param context The request context\n* @param next The next HTTP pipeline policy to process the {@code context's} request after this policy\n* completes.\n* @return A {@link HttpResponse} that will arrive synchronously.\n*/\n@Override\npublic HttpResponse processSync(HttpPipelineCallContext context, HttpPipelineNextSyncPolicy next) {\nfinal ByteBuffer contents = context.getHttpRequest().getBodyAsBinaryData()\n== null ? getEmptyBuffer() : context.getHttpRequest().getBodyAsBinaryData().toByteBuffer();\nMap headers = credentials\n.getAuthorizationHeaders(\ncontext.getHttpRequest().getUrl(),\ncontext.getHttpRequest().getHttpMethod().toString(),\ncontents);\nheaders.entrySet()\n.stream()\n.forEach(header -> context.getHttpRequest().setHeader(header.getKey(), header.getValue()));\nreturn next.processSync();\n}\nprivate ByteBuffer getEmptyBuffer() {\nreturn ByteBuffer.allocate(0);\n}\n}", + "context_after": "class ConfigurationCredentialsPolicy implements HttpPipelinePolicy {\nprivate final ConfigurationClientCredentials credentials;\n/**\n* Creates an instance that is able to apply a {@link ConfigurationClientCredentials} credential to a request in the\n* pipeline.\n*\n* @param credentials the credential information to authenticate to Azure App Configuration service\n* @throws NullPointerException If {@code credential} is {@code null}.\n*/\npublic ConfigurationCredentialsPolicy(ConfigurationClientCredentials credentials) {\nObjects.requireNonNull(credentials, \"'credential' can not be a null value.\");\nthis.credentials = credentials;\n}\n/**\n* Adds the required headers to authenticate a request to Azure App Configuration service.\n*\n* @param context The request context\n* @param next The next HTTP pipeline policy to process the {@code context's} request after this policy\n* completes.\n* @return A {@link Mono} representing the HTTP response that will arrive asynchronously.\n*/\n@Override\n/**\n* Adds the required headers to authenticate a request to Azure App Configuration service.\n*\n* @param context The request context\n* @param next The next HTTP pipeline policy to process the {@code context's} request after this policy\n* completes.\n* @return A {@link HttpResponse} that will arrive synchronously.\n*/\n@Override\npublic HttpResponse processSync(HttpPipelineCallContext context, HttpPipelineNextSyncPolicy next) {\nMap headers = credentials\n.getAuthorizationHeaders(\ncontext.getHttpRequest().getUrl(),\ncontext.getHttpRequest().getHttpMethod().toString(),\ncontext.getHttpRequest().getBodyAsBinaryData());\nheaders.entrySet()\n.stream()\n.forEach(header -> context.getHttpRequest().setHeader(header.getKey(), header.getValue()));\nreturn next.processSync();\n}\n}" + }, + { + "comment": "I'd be tempted to suggest checking for the other type here.", + "method_body": "public TableResultInternal execute(Context ctx) {\nString cName =\ncatalogName == null ? ctx.getCatalogManager().getCurrentCatalog() : catalogName;\nString[] databases =\nctx.getCatalogManager().getCatalogOrThrowException(cName).listDatabases().stream()\n.sorted()\n.toArray(String[]::new);\nif (likeType != null) {\ndatabases =\nArrays.stream(databases)\n.filter(\nrow -> {\nif (likeType == LikeType.ILIKE) {\nreturn notLike\n!= SqlLikeUtils.ilike(row, likePattern, \"\\\\\");\n} else {\nreturn notLike\n!= SqlLikeUtils.like(row, likePattern, \"\\\\\");\n}\n})\n.sorted()\n.toArray(String[]::new);\n}\nreturn buildStringArrayResult(\"database name\", databases);\n}", + "target_code": "} else {", + "method_body_after": "public TableResultInternal execute(Context ctx) {\nString cName =\ncatalogName == null ? ctx.getCatalogManager().getCurrentCatalog() : catalogName;\nStream databases =\nctx.getCatalogManager().getCatalogOrThrowException(cName).listDatabases().stream();\nif (likeType != null) {\ndatabases =\ndatabases.filter(\nrow -> {\nif (likeType == LikeType.ILIKE) {\nreturn notLike != SqlLikeUtils.ilike(row, likePattern, \"\\\\\");\n} else if (likeType == LikeType.LIKE) {\nreturn notLike != SqlLikeUtils.like(row, likePattern, \"\\\\\");\n}\nreturn false;\n});\n}\nreturn buildStringArrayResult(\"database name\", databases.sorted().toArray(String[]::new));\n}", + "context_before": "class ShowDatabasesOperation implements ShowOperation {\nprivate final String preposition;\nprivate final String catalogName;\nprivate final LikeType likeType;\nprivate final String likePattern;\nprivate final boolean notLike;\npublic ShowDatabasesOperation() {\nthis.preposition = null;\nthis.catalogName = null;\nthis.likeType = null;\nthis.likePattern = null;\nthis.notLike = false;\n}\npublic ShowDatabasesOperation(String likeType, String likePattern, boolean notLike) {\nthis.preposition = null;\nthis.catalogName = null;\nif (likeType != null) {\nthis.likeType = LikeType.of(likeType);\nthis.likePattern = requireNonNull(likePattern, \"Like pattern must not be null\");\nthis.notLike = notLike;\n} else {\nthis.likeType = null;\nthis.likePattern = null;\nthis.notLike = false;\n}\n}\npublic ShowDatabasesOperation(\nString preposition,\nString catalogName,\nString likeType,\nString likePattern,\nboolean notLike) {\nthis.preposition = preposition;\nthis.catalogName = catalogName;\nif (likeType != null) {\nthis.likeType = LikeType.of(likeType);\nthis.likePattern = requireNonNull(likePattern, \"Like pattern must not be null\");\nthis.notLike = notLike;\n} else {\nthis.likeType = null;\nthis.likePattern = null;\nthis.notLike = false;\n}\n}\n@Override\npublic String asSummaryString() {\nStringBuilder builder = new StringBuilder();\nbuilder.append(\"SHOW DATABASES\");\nif (preposition != null) {\nbuilder.append(String.format(\" %s %s\", preposition, catalogName));\n}\nif (likeType != null) {\nif (notLike) {\nbuilder.append(String.format(\" NOT %s '%s'\", likeType.name(), likePattern));\n} else {\nbuilder.append(String.format(\" %s '%s'\", likeType.name(), likePattern));\n}\n}\nreturn builder.toString();\n}\n@Override\n}", + "context_after": "class ShowDatabasesOperation implements ShowOperation {\nprivate final String catalogName;\nprivate final LikeType likeType;\nprivate final String likePattern;\nprivate final boolean notLike;\npublic ShowDatabasesOperation() {\nthis(null, null, null, false);\n}\npublic ShowDatabasesOperation(String likeType, String likePattern, boolean notLike) {\nthis(null, likeType, likePattern, notLike);\n}\npublic ShowDatabasesOperation(\nString catalogName, String likeType, String likePattern, boolean notLike) {\nthis.catalogName = catalogName;\nif (likeType != null) {\nthis.likeType = LikeType.of(likeType);\nthis.likePattern = requireNonNull(likePattern, \"Like pattern must not be null\");\nthis.notLike = notLike;\n} else {\nthis.likeType = null;\nthis.likePattern = null;\nthis.notLike = false;\n}\n}\n@Override\npublic String asSummaryString() {\nStringBuilder builder = new StringBuilder();\nbuilder.append(\"SHOW DATABASES\");\nif (catalogName != null) {\nbuilder.append(String.format(\" FROM/IN %s\", catalogName));\n}\nif (likeType != null) {\nif (notLike) {\nbuilder.append(String.format(\" NOT %s '%s'\", likeType.name(), likePattern));\n} else {\nbuilder.append(String.format(\" %s '%s'\", likeType.name(), likePattern));\n}\n}\nreturn builder.toString();\n}\n@Override\n}" + }, + { + "comment": "switch to null pointer exception check.", + "method_body": "private static List toLonLatStrings(GeoPoint point) {\nif (point == null) {\nreturn null;\n}\nreturn Arrays.asList(String.valueOf(point.getLongitude()), String.valueOf(point.getLatitude()));\n}", + "target_code": "}", + "method_body_after": "private static List toLonLatStrings(GeoPoint point) {\nObjects.requireNonNull(point);\nreturn Arrays.asList(String.valueOf(point.getLongitude()), String.valueOf(point.getLatitude()));\n}", + "context_before": "class with the given name and GeographyPoint value.\n*\n* @param name Name of the scoring parameter.\n* @param value Value of the scoring parameter.\n*/\npublic ScoringParameter(String name, GeoPoint value) {\nthis(name, toLonLatStrings(value));\n}", + "context_after": "class with the given name and GeographyPoint value.\n*\n* @param name Name of the scoring parameter.\n* @param value Value of the scoring parameter.\n*/\npublic ScoringParameter(String name, GeoPoint value) {\nthis(name, toLonLatStrings(value));\n}" + }, + { + "comment": "Space after this `if` is cleaner", + "method_body": "private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) {\nswitch (peek(lookahead + 1).kind) {\ncase IDENTIFIER_TOKEN:\nSyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind;\nswitch (tokenAfterIdentifier) {\ncase ON_KEYWORD:\ncase OPEN_BRACE_TOKEN:\nreturn true;\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\ncase QUESTION_MARK_TOKEN:\nreturn false;\ndefault:\nreturn false;\n}\ncase ON_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse listener declaration, given the qualifier.\n*

\n* \n* listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ;\n* \n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @return Parsed node\n*/\nprivate STNode parseListenerDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.LISTENER_DECL);\nSTNode listenerKeyword = parseListenerKeyword();\nif (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) {\nSTNode listenerDecl =\nparseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true);\nendContext();\nreturn listenerDecl;\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode variableName = parseVariableName();\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nendContext();\nreturn STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName,\nequalsToken, initializer, semicolonToken);\n}\n/**\n* Parse listener keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseListenerKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LISTENER_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.LISTENER_KEYWORD);\nreturn parseListenerKeyword();\n}\n}\n/**\n* Parse constant declaration, given the qualifier.\n*

\n* module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @return Parsed node\n*/\nprivate STNode parseConstantDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.CONSTANT_DECL);\nSTNode constKeyword = parseConstantKeyword();\nreturn parseConstDecl(metadata, qualifier, constKeyword);\n}\n/**\n* Parse the components that follows after the const keyword of a constant declaration.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the constant decl\n* @param constKeyword Const keyword\n* @return Parsed node\n*/\nprivate STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase ANNOTATION_KEYWORD:\nendContext();\nreturn parseAnnotationDeclaration(metadata, qualifier, constKeyword);\ncase IDENTIFIER_TOKEN:\nSTNode constantDecl =\nparseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false);\nendContext();\nreturn constantDecl;\ndefault:\nif (isTypeStartingToken(nextToken.kind)) {\nbreak;\n}\nrecover(peek(), ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword);\nreturn parseConstDecl(metadata, qualifier, constKeyword);\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode variableName = parseVariableName();\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nendContext();\nreturn STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName,\nequalsToken, initializer, semicolonToken);\n}\nprivate STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,\nboolean isListener) {\nSTNode varNameOrTypeName = parseStatementStartIdentifier();\nreturn parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener);\n}\n/**\n* Parse the component that follows the first identifier in a const decl. The identifier\n* can be either the type-name (a user defined type) or the var-name there the type-name\n* is not present.\n*\n* @param qualifier Qualifier that precedes the constant decl\n* @param keyword Keyword\n* @param typeOrVarName Identifier that follows the const-keywoord\n* @return Parsed node\n*/\nprivate STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword,\nSTNode typeOrVarName, boolean isListener) {\nif (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTNode type = typeOrVarName;\nSTNode variableName = parseVariableName();\nreturn parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);\n}\nSTNode type;\nSTNode variableName;\nswitch (peek().kind) {\ncase IDENTIFIER_TOKEN:\ntype = typeOrVarName;\nvariableName = parseVariableName();\nbreak;\ncase EQUAL_TOKEN:\nvariableName = ((STSimpleNameReferenceNode) typeOrVarName).name;\ntype = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nrecover(peek(), ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, keyword, typeOrVarName,\nisListener);\nreturn parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener);\n}\nreturn parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);\n}\nprivate STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener,\nSTNode type, STNode variableName) {\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nif (isListener) {\nreturn STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName,\nequalsToken, initializer, semicolonToken);\n}\nreturn STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName,\nequalsToken, initializer, semicolonToken);\n}\n/**\n* Parse const keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseConstantKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CONST_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.CONST_KEYWORD);\nreturn parseConstantKeyword();\n}\n}\n/**\n* Parse typeof expression.\n*

\n* \n* typeof-expr := typeof expression\n* \n*\n* @param isRhsExpr\n* @return Typeof expression node\n*/\nprivate STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\nSTNode typeofKeyword = parseTypeofKeyword();\nSTNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);\nreturn STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr);\n}\n/**\n* Parse typeof-keyword.\n*\n* @return Typeof-keyword node\n*/\nprivate STNode parseTypeofKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TYPEOF_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.TYPEOF_KEYWORD);\nreturn parseTypeofKeyword();\n}\n}\n/**\n* Parse optional type descriptor given the type.\n*

\n* optional-type-descriptor := type-descriptor `?`\n*

\n*\n* @param typeDescriptorNode Preceding type descriptor\n* @return Parsed node\n*/\nprivate STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) {\nstartContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR);\nSTNode questionMarkToken = parseQuestionMark();\nendContext();\ntypeDescriptorNode = validateForUsageOfVar(typeDescriptorNode);\nreturn STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken);\n}\n/**\n* Parse unary expression.\n*

\n* \n* unary-expr := + expression | - expression | ~ expression | ! expression\n* \n*\n* @param isRhsExpr\n* @return Unary expression node\n*/\nprivate STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\nSTNode unaryOperator = parseUnaryOperator();\nSTNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);\nreturn STNodeFactory.createUnaryExpressionNode(unaryOperator, expr);\n}\n/**\n* Parse unary operator.\n* UnaryOperator := + | - | ~ | !\n*\n* @return Parsed node\n*/\nprivate STNode parseUnaryOperator() {\nSTToken token = peek();\nif (isUnaryOperator(token.kind)) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.UNARY_OPERATOR);\nreturn parseUnaryOperator();\n}\n}\n/**\n* Check whether the given token kind is a unary operator.\n*\n* @param kind STToken kind\n* @return true if the token kind refers to a unary operator. false otherwise\n*/\nprivate boolean isUnaryOperator(SyntaxKind kind) {\nswitch (kind) {\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse array type descriptor.\n*

\n* \n* array-type-descriptor := member-type-descriptor [ [ array-length ] ]\n* member-type-descriptor := type-descriptor\n* array-length :=\n* int-literal\n* | constant-reference-expr\n* | inferred-array-length\n* inferred-array-length := *\n* \n*

\n*\n* @param memberTypeDesc\n* @return Parsed Node\n*/\nprivate STNode parseArrayTypeDescriptor(STNode memberTypeDesc) {\nstartContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);\nSTNode openBracketToken = parseOpenBracket();\nSTNode arrayLengthNode = parseArrayLength();\nSTNode closeBracketToken = parseCloseBracket();\nendContext();\nreturn createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken);\n}\nprivate STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode,\nSTNode closeBracketToken) {\nmemberTypeDesc = validateForUsageOfVar(memberTypeDesc);\nif (arrayLengthNode != null) {\nswitch (arrayLengthNode.kind) {\ncase ASTERISK_LITERAL:\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\nbreak;\ncase NUMERIC_LITERAL:\nSyntaxKind numericLiteralKind = arrayLengthNode.childInBucket(0).kind;\nif (numericLiteralKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||\nnumericLiteralKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {\nbreak;\n}\ndefault:\nopenBracketToken = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracketToken,\narrayLengthNode, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);\narrayLengthNode = STNodeFactory.createEmptyNode();\n}\n}\nreturn STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, openBracketToken, arrayLengthNode,\ncloseBracketToken);\n}\n/**\n* Parse array length.\n*

\n* \n* array-length :=\n* int-literal\n* | constant-reference-expr\n* | inferred-array-length\n* constant-reference-expr := variable-reference-expr\n* \n*

\n*\n* @return Parsed array length\n*/\nprivate STNode parseArrayLength() {\nSTToken token = peek();\nswitch (token.kind) {\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase ASTERISK_TOKEN:\nreturn parseBasicLiteral();\ncase CLOSE_BRACKET_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH);\ndefault:\nrecover(token, ParserRuleContext.ARRAY_LENGTH);\nreturn parseArrayLength();\n}\n}\n/**\n* Parse annotations.\n*

\n* Note: In the \n*

\n* annots := annotation*\n*\n* @return Parsed node\n*/\nprivate STNode parseOptionalAnnotations() {\nstartContext(ParserRuleContext.ANNOTATIONS);\nList annotList = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (nextToken.kind == SyntaxKind.AT_TOKEN) {\nannotList.add(parseAnnotation());\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(annotList);\n}\n/**\n* Parse annotation list with at least one annotation.\n*\n* @return Annotation list\n*/\nprivate STNode parseAnnotations() {\nstartContext(ParserRuleContext.ANNOTATIONS);\nList annotList = new ArrayList<>();\nannotList.add(parseAnnotation());\nwhile (peek().kind == SyntaxKind.AT_TOKEN) {\nannotList.add(parseAnnotation());\n}\nendContext();\nreturn STNodeFactory.createNodeList(annotList);\n}\n/**\n* Parse annotation attachment.\n*

\n* annotation := @ annot-tag-reference annot-value\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotation() {\nSTNode atToken = parseAtToken();\nSTNode annotReference;\nif (isAnnotTagReferenceToken()) {\nannotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE);\n} else {\nannotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n}\nSTNode annotValue;\nif (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {\nannotValue = parseMappingConstructorExpr();\n} else {\nannotValue = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue);\n}\nprivate boolean isAnnotTagReferenceToken() {\nSyntaxKind nextTokenKind = peek().kind;\nreturn nextTokenKind == SyntaxKind.IDENTIFIER_TOKEN || isQualifiedIdentifierPredeclaredPrefix(nextTokenKind);\n}\n/**\n* Parse '@' token.\n*\n* @return Parsed node\n*/\nprivate STNode parseAtToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.AT_TOKEN) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.AT);\nreturn parseAtToken();\n}\n}\n/**\n* Parse metadata. Meta data consist of optional doc string and\n* an annotations list.\n*

\n* metadata := [DocumentationString] annots\n*\n* @return Parse node\n*/\nprivate STNode parseMetaData() {\nSTNode docString;\nSTNode annotations;\nswitch (peek().kind) {\ncase DOCUMENTATION_STRING:\ndocString = parseMarkdownDocumentation();\nannotations = parseOptionalAnnotations();\nbreak;\ncase AT_TOKEN:\ndocString = STNodeFactory.createEmptyNode();\nannotations = parseOptionalAnnotations();\nbreak;\ndefault:\nreturn STNodeFactory.createEmptyNode();\n}\nreturn createMetadata(docString, annotations);\n}\n/**\n* Create metadata node.\n*\n* @return A metadata node\n*/\nprivate STNode createMetadata(STNode docString, STNode annotations) {\nif (annotations == null && docString == null) {\nreturn STNodeFactory.createEmptyNode();\n} else {\nreturn STNodeFactory.createMetadataNode(docString, annotations);\n}\n}\n/**\n* Parse is expression.\n* \n* is-expr := expression is type-descriptor\n* \n*\n* @param lhsExpr Preceding expression of the is expression\n* @return Is expression node\n*/\nprivate STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode isKeyword = parseIsKeyword();\nSTNode typeDescriptor = parseTypeDescriptorInExpression(isInConditionalExpr);\nreturn STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor);\n}\n/**\n* Parse is-keyword.\n*\n* @return Is-keyword node\n*/\nprivate STNode parseIsKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IS_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.IS_KEYWORD);\nreturn parseIsKeyword();\n}\n}\n/**\n* Parse local type definition statement statement.\n* ocal-type-defn-stmt := [annots] type identifier type-descriptor ;\n*\n* @return local type definition statement statement\n*/\nprivate STNode parseLocalTypeDefinitionStatement(STNode annots) {\nstartContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT);\nSTNode typeKeyword = parseTypeKeyword();\nSTNode typeName = parseTypeName();\nSTNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor,\nsemicolon);\n}\n/**\n* Parse statement which is only consists of an action or expression.\n*\n* @param annots Annotations\n* @return Statement node\n*/\nprivate STNode parseExpressionStatement(STNode annots) {\nstartContext(ParserRuleContext.EXPRESSION_STATEMENT);\nSTNode expression = parseActionOrExpressionInLhs(annots);\nreturn getExpressionAsStatement(expression);\n}\n/**\n* Parse statements that starts with an expression.\n*\n* @return Statement node\n*/\nprivate STNode parseStatementStartWithExpr(STNode annots) {\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode expr = parseActionOrExpressionInLhs(annots);\nreturn parseStatementStartWithExprRhs(expr);\n}\n/**\n* Parse the component followed by the expression, at the beginning of a statement.\n*\n* @param expression Action or expression in LHS\n* @return Statement node\n*/\nprivate STNode parseStatementStartWithExprRhs(STNode expression) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase EQUAL_TOKEN:\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nreturn parseAssignmentStmtRhs(expression);\ncase SEMICOLON_TOKEN:\nreturn getExpressionAsStatement(expression);\ncase IDENTIFIER_TOKEN:\ndefault:\nif (isCompoundBinaryOperator(nextToken.kind)) {\nreturn parseCompoundAssignmentStmtRhs(expression);\n}\nParserRuleContext context;\nif (isPossibleExpressionStatement(expression)) {\ncontext = ParserRuleContext.EXPR_STMT_RHS;\n} else {\ncontext = ParserRuleContext.STMT_START_WITH_EXPR_RHS;\n}\nrecover(peek(), context, expression);\nreturn parseStatementStartWithExprRhs(expression);\n}\n}\nprivate boolean isPossibleExpressionStatement(STNode expression) {\nswitch (expression.kind) {\ncase METHOD_CALL:\ncase FUNCTION_CALL:\ncase CHECK_EXPRESSION:\ncase REMOTE_METHOD_CALL_ACTION:\ncase CHECK_ACTION:\ncase BRACED_ACTION:\ncase START_ACTION:\ncase TRAP_ACTION:\ncase FLUSH_ACTION:\ncase ASYNC_SEND_ACTION:\ncase SYNC_SEND_ACTION:\ncase RECEIVE_ACTION:\ncase WAIT_ACTION:\ncase QUERY_ACTION:\ncase COMMIT_ACTION:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode getExpressionAsStatement(STNode expression) {\nswitch (expression.kind) {\ncase METHOD_CALL:\ncase FUNCTION_CALL:\ncase CHECK_EXPRESSION:\nreturn parseCallStatement(expression);\ncase REMOTE_METHOD_CALL_ACTION:\ncase CHECK_ACTION:\ncase BRACED_ACTION:\ncase START_ACTION:\ncase TRAP_ACTION:\ncase FLUSH_ACTION:\ncase ASYNC_SEND_ACTION:\ncase SYNC_SEND_ACTION:\ncase RECEIVE_ACTION:\ncase WAIT_ACTION:\ncase QUERY_ACTION:\ncase COMMIT_ACTION:\nreturn parseActionStatement(expression);\ndefault:\nSTNode semicolon = parseSemicolon();\nendContext();\nSTNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT,\nexpression, semicolon);\nexprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT);\nreturn exprStmt;\n}\n}\nprivate STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) {\nSTNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression);\nSTNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression;\nif (lengthExprs.isEmpty()) {\nreturn createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(),\nindexedExpr.closeBracket);\n}\nSTNode lengthExpr = lengthExprs.get(0);\nswitch (lengthExpr.kind) {\ncase ASTERISK_LITERAL:\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\nbreak;\ncase NUMERIC_LITERAL:\nSyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind;\nif (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||\ninnerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {\nbreak;\n}\ndefault:\nSTNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(\nindexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);\nindexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics);\nlengthExpr = STNodeFactory.createEmptyNode();\n}\nreturn createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket);\n}\n/**\n*

\n* Parse call statement, given the call expression.\n*

\n* \n* call-stmt := call-expr ;\n*
\n* call-expr := function-call-expr | method-call-expr | checking-keyword call-expr\n*
\n*\n* @param expression Call expression associated with the call statement\n* @return Call statement node\n*/\nprivate STNode parseCallStatement(STNode expression) {\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon);\n}\nprivate STNode parseActionStatement(STNode action) {\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon);\n}\n/**\n* Parse remote method call action, given the starting expression.\n*

\n* \n* remote-method-call-action := expression -> method-name ( arg-list )\n*
\n* async-send-action := expression -> peer-worker ;\n*
\n*\n* @param isRhsExpr Is this an RHS action\n* @param expression LHS expression\n* @return\n*/\nprivate STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) {\nSTNode rightArrow = parseRightArrow();\nreturn parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);\n}\nprivate STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) {\nSTNode name;\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase DEFAULT_KEYWORD:\nSTNode defaultKeyword = parseDefaultKeyword();\nname = STNodeFactory.createSimpleNameReferenceNode(defaultKeyword);\nreturn parseAsyncSendAction(expression, rightArrow, name);\ncase IDENTIFIER_TOKEN:\nname = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());\nbreak;\ncase CONTINUE_KEYWORD:\ncase COMMIT_KEYWORD:\nname = getKeywordAsSimpleNameRef();\nbreak;\ndefault:\nSTToken token = peek();\nrecover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS, expression, isRhsExpr, rightArrow);\nreturn parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);\n}\nreturn parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);\n}\nprivate STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseRemoteMethodCallAction(expression, rightArrow, name);\ncase SEMICOLON_TOKEN:\nreturn parseAsyncSendAction(expression, rightArrow, name);\ndefault:\nrecover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END, expression, rightArrow, name);\nreturn parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);\n}\n}\n/**\n* Parse default keyword.\n*\n* @return default keyword node\n*/\nprivate STNode parseDefaultKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DEFAULT_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.DEFAULT_KEYWORD);\nreturn parseDefaultKeyword();\n}\n}\nprivate STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) {\nreturn STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker);\n}\nprivate STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) {\nSTNode openParenToken = parseArgListOpenParenthesis();\nSTNode arguments = parseArgsList();\nSTNode closeParenToken = parseArgListCloseParenthesis();\nreturn STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments,\ncloseParenToken);\n}\n/**\n* Parse right arrow (->) token.\n*\n* @return Parsed node\n*/\nprivate STNode parseRightArrow() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.RIGHT_ARROW);\nreturn parseRightArrow();\n}\n}\n/**\n* Parse parameterized type descriptor.\n* parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter\n*\n* @return Parsed node\n*/\nprivate STNode parseParameterizedTypeDescriptor(STNode parameterizedTypeKeyword) {\nSTNode typeParameter = parseTypeParameter();\nreturn STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, typeParameter);\n}\n/**\n* Parse < token.\n*\n* @return Parsed node\n*/\nprivate STNode parseGTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.GT_TOKEN) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.GT);\nreturn parseGTToken();\n}\n}\n/**\n* Parse > token.\n*\n* @return Parsed node\n*/\nprivate STNode parseLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.LT);\nreturn parseLTToken();\n}\n}\n/**\n* Parse nil literal. Here nil literal is only referred to ( ).\n*\n* @return Parsed node\n*/\nprivate STNode parseNilLiteral() {\nstartContext(ParserRuleContext.NIL_LITERAL);\nSTNode openParenthesisToken = parseOpenParenthesis();\nSTNode closeParenthesisToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken);\n}\n/**\n* Parse annotation declaration, given the qualifier.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @param constKeyword Const keyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) {\nstartContext(ParserRuleContext.ANNOTATION_DECL);\nSTNode annotationKeyword = parseAnnotationKeyword();\nSTNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);\nendContext();\nreturn annotDecl;\n}\n/**\n* Parse annotation keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ANNOTATION_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.ANNOTATION_KEYWORD);\nreturn parseAnnotationKeyword();\n}\n}\n/**\n* Parse the components that follows after the annotation keyword of a annotation declaration.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the constant decl\n* @param constKeyword Const keyword\n* @param annotationKeyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase IDENTIFIER_TOKEN:\nreturn parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword);\ndefault:\nif (isTypeStartingToken(nextToken.kind)) {\nbreak;\n}\nrecover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier, constKeyword,\nannotationKeyword);\nreturn parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL);\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\n/**\n* Parse annotation tag.\n*

\n* annot-tag := identifier\n*\n* @return\n*/\nprivate STNode parseAnnotationTag() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nrecover(peek(), ParserRuleContext.ANNOTATION_TAG);\nreturn parseAnnotationTag();\n}\n}\nprivate STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword) {\nSTNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);\nif (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword,\ntypeDescOrAnnotTag, annotTag);\n}\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) {\nSTNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag,\nParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false);\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\nSTNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name;\nreturn parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag);\n}\n/**\n* Parse the component that follows the first identifier in an annotation decl. The identifier\n* can be either the type-name (a user defined type) or the annot-tag, where the type-name\n* is not present.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the annotation decl\n* @param constKeyword Const keyword\n* @param annotationKeyword Annotation keyword\n* @param typeDescOrAnnotTag Identifier that follows the annotation-keyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword, STNode typeDescOrAnnotTag) {\nSTToken nextToken = peek();\nSTNode typeDesc;\nSTNode annotTag;\nswitch (nextToken.kind) {\ncase IDENTIFIER_TOKEN:\ntypeDesc = typeDescOrAnnotTag;\nannotTag = parseAnnotationTag();\nbreak;\ncase SEMICOLON_TOKEN:\ncase ON_KEYWORD:\ntypeDesc = STNodeFactory.createEmptyNode();\nannotTag = typeDescOrAnnotTag;\nbreak;\ndefault:\nrecover(peek(), ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword, annotationKeyword,\ntypeDescOrAnnotTag);\nreturn parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag);\n}\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\nprivate STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword, STNode typeDesc, STNode annotTag) {\nSTNode onKeyword;\nSTNode attachPoints;\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase SEMICOLON_TOKEN:\nonKeyword = STNodeFactory.createEmptyNode();\nattachPoints = STNodeFactory.createEmptyNodeList();\nbreak;\ncase ON_KEYWORD:\nonKeyword = parseOnKeyword();\nattachPoints = parseAnnotationAttachPoints();\nonKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword,\nDiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);\nbreak;\ndefault:\nrecover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier, constKeyword,\nannotationKeyword, typeDesc, annotTag);\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword,\ntypeDesc, annotTag, onKeyword, attachPoints, semicolonToken);\n}\n/**\n* Parse annotation attach points.\n*

\n* \n* annot-attach-points := annot-attach-point (, annot-attach-point)*\n*

\n* annot-attach-point := dual-attach-point | source-only-attach-point\n*

\n* dual-attach-point := [source] dual-attach-point-ident\n*

\n* dual-attach-point-ident :=\n* type\n* | class\n* | [object|service remote] function\n* | parameter\n* | return\n* | service\n* | [object|record] field\n*

\n* source-only-attach-point := source source-only-attach-point-ident\n*

\n* source-only-attach-point-ident :=\n* annotation\n* | external\n* | var\n* | const\n* | listener\n* | worker\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationAttachPoints() {\nstartContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST);\nList attachPoints = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndAnnotAttachPointList(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode attachPoint = parseAnnotationAttachPoint();\nattachPoints.add(attachPoint);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndAnnotAttachPointList(nextToken.kind)) {\nleadingComma = parseAttachPointEnd();\nif (leadingComma == null) {\nbreak;\n}\nattachPoints.add(leadingComma);\nattachPoint = parseAnnotationAttachPoint();\nif (attachPoint == null) {\nattachPoint = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);\nattachPoints.add(attachPoint);\nbreak;\n}\nattachPoints.add(attachPoint);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(attachPoints);\n}\n/**\n* Parse annotation attach point end.\n*\n* @return Parsed node\n*/\nprivate STNode parseAttachPointEnd() {\nswitch (peek().kind) {\ncase SEMICOLON_TOKEN:\nreturn null;\ncase COMMA_TOKEN:\nreturn consume();\ndefault:\nrecover(peek(), ParserRuleContext.ATTACH_POINT_END);\nreturn parseAttachPointEnd();\n}\n}\nprivate boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse annotation attach point.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationAttachPoint() {\nswitch (peek().kind) {\ncase EOF_TOKEN:\nreturn null;\ncase ANNOTATION_KEYWORD:\ncase EXTERNAL_KEYWORD:\ncase VAR_KEYWORD:\ncase CONST_KEYWORD:\ncase LISTENER_KEYWORD:\ncase WORKER_KEYWORD:\ncase SOURCE_KEYWORD:\nSTNode sourceKeyword = parseSourceKeyword();\nreturn parseAttachPointIdent(sourceKeyword);\ncase OBJECT_KEYWORD:\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase SERVICE_KEYWORD:\ncase FIELD_KEYWORD:\ncase RECORD_KEYWORD:\ncase CLASS_KEYWORD:\nsourceKeyword = STNodeFactory.createEmptyNode();\nSTNode firstIdent = consume();\nreturn parseDualAttachPointIdent(sourceKeyword, firstIdent);\ndefault:\nrecover(peek(), ParserRuleContext.ATTACH_POINT);\nreturn parseAnnotationAttachPoint();\n}\n}\n/**\n* Parse source keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseSourceKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SOURCE_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.SOURCE_KEYWORD);\nreturn parseSourceKeyword();\n}\n}\n/**\n* Parse attach point ident gievn.\n*

\n* \n* source-only-attach-point-ident := annotation | external | var | const | listener | worker\n*

\n* dual-attach-point-ident := type | class | [object|service remote] function | parameter\n* | return | service | [object|record] field\n*
\n*\n* @param sourceKeyword Source keyword\n* @return Parsed node\n*/\nprivate STNode parseAttachPointIdent(STNode sourceKeyword) {\nswitch (peek().kind) {\ncase ANNOTATION_KEYWORD:\ncase EXTERNAL_KEYWORD:\ncase VAR_KEYWORD:\ncase CONST_KEYWORD:\ncase LISTENER_KEYWORD:\ncase WORKER_KEYWORD:\nSTNode firstIdent = consume();\nSTNode identList = STNodeFactory.createNodeList(firstIdent);\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\ncase OBJECT_KEYWORD:\ncase RESOURCE_KEYWORD:\ncase RECORD_KEYWORD:\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase SERVICE_KEYWORD:\ncase FIELD_KEYWORD:\ncase CLASS_KEYWORD:\nfirstIdent = consume();\nreturn parseDualAttachPointIdent(sourceKeyword, firstIdent);\ndefault:\nrecover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword);\nreturn parseAttachPointIdent(sourceKeyword);\n}\n}\n/**\n* Parse dual-attach-point ident.\n*\n* @param sourceKeyword Source keyword\n* @param firstIdent first part of the dual attach-point\n* @return Parsed node\n*/\nprivate STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) {\nSTNode secondIdent;\nswitch (firstIdent.kind) {\ncase OBJECT_KEYWORD:\nsecondIdent = parseIdentAfterObjectIdent();\nbreak;\ncase RESOURCE_KEYWORD:\nsecondIdent = parseFunctionIdent();\nbreak;\ncase RECORD_KEYWORD:\nsecondIdent = parseFieldIdent();\nbreak;\ncase SERVICE_KEYWORD:\nreturn parseServiceAttachPoint(sourceKeyword, firstIdent);\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase FIELD_KEYWORD:\ncase CLASS_KEYWORD:\ndefault:\nSTNode identList = STNodeFactory.createNodeList(firstIdent);\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\n}\nSTNode identList = STNodeFactory.createNodeList(firstIdent, secondIdent);\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\n}\n/**\n* Parse remote ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseRemoteIdent() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.REMOTE_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.REMOTE_IDENT);\nreturn parseRemoteIdent();\n}\n}\n/**\n* Parse service attach point.\n* service-attach-point := service | service remote function\n*\n* @return Parsed node\n*/\nprivate STNode parseServiceAttachPoint(STNode sourceKeyword, STNode firstIdent) {\nSTNode identList;\nSTToken token = peek();\nswitch (token.kind) {\ncase REMOTE_KEYWORD:\nSTNode secondIdent = parseRemoteIdent();\nSTNode thirdIdent = parseFunctionIdent();\nidentList = STNodeFactory.createNodeList(firstIdent, secondIdent, thirdIdent);\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\ncase COMMA_TOKEN:\ncase SEMICOLON_TOKEN:\nidentList = STNodeFactory.createNodeList(firstIdent);\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\ndefault:\nrecover(token, ParserRuleContext.SERVICE_IDENT_RHS);\nreturn parseServiceAttachPoint(sourceKeyword, firstIdent);\n}\n}\n/**\n* Parse the idents that are supported after object-ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseIdentAfterObjectIdent() {\nSTToken token = peek();\nswitch (token.kind) {\ncase FUNCTION_KEYWORD:\ncase FIELD_KEYWORD:\nreturn consume();\ndefault:\nrecover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT);\nreturn parseIdentAfterObjectIdent();\n}\n}\n/**\n* Parse function ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseFunctionIdent() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FUNCTION_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.FUNCTION_IDENT);\nreturn parseFunctionIdent();\n}\n}\n/**\n* Parse field ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldIdent() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FIELD_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.FIELD_IDENT);\nreturn parseFieldIdent();\n}\n}\n/**\n* Parse XML namespace declaration.\n*

\n* xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ;\n*
\n* xml-namespace-uri := simple-const-expr\n*
\n* xml-namespace-prefix := identifier\n*
\n*\n* @return\n*/\nprivate STNode parseXMLNamespaceDeclaration(boolean isModuleVar) {\nstartContext(ParserRuleContext.XML_NAMESPACE_DECLARATION);\nSTNode xmlnsKeyword = parseXMLNSKeyword();\nSTNode namespaceUri = parseSimpleConstExpr();\nwhile (!isValidXMLNameSpaceURI(namespaceUri)) {\nxmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri,\nDiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI);\nnamespaceUri = parseSimpleConstExpr();\n}\nSTNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);\nendContext();\nreturn xmlnsDecl;\n}\n/**\n* Parse xmlns keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLNSKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.XMLNS_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.XMLNS_KEYWORD);\nreturn parseXMLNSKeyword();\n}\n}\nprivate boolean isValidXMLNameSpaceURI(STNode expr) {\nswitch (expr.kind) {\ncase STRING_LITERAL:\ncase QUALIFIED_NAME_REFERENCE:\ncase SIMPLE_NAME_REFERENCE:\nreturn true;\ncase IDENTIFIER_TOKEN:\ndefault:\nreturn false;\n}\n}\nprivate STNode parseSimpleConstExpr() {\nstartContext(ParserRuleContext.CONSTANT_EXPRESSION);\nSTNode expr = parseSimpleConstExprInternal();\nendContext();\nreturn expr;\n}\n/**\n* Parse simple constants expr.\n*\n* @return Parsed node\n*/\nprivate STNode parseSimpleConstExprInternal() {\nswitch (peek().kind) {\ncase STRING_LITERAL_TOKEN:\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase NULL_KEYWORD:\nreturn parseBasicLiteral();\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn parseSignedIntOrFloat();\ncase OPEN_PAREN_TOKEN:\nreturn parseNilLiteral();\ndefault:\nSTToken token = peek();\nrecover(token, ParserRuleContext.CONSTANT_EXPRESSION_START);\nreturn parseSimpleConstExprInternal();\n}\n}\n/**\n* Parse the portion after the namsepsace-uri of an XML declaration.\n*\n* @param xmlnsKeyword XMLNS keyword\n* @param namespaceUri Namespace URI\n* @return Parsed node\n*/\nprivate STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) {\nSTNode asKeyword = STNodeFactory.createEmptyNode();\nSTNode namespacePrefix = STNodeFactory.createEmptyNode();\nswitch (peek().kind) {\ncase AS_KEYWORD:\nasKeyword = parseAsKeyword();\nnamespacePrefix = parseNamespacePrefix();\nbreak;\ncase SEMICOLON_TOKEN:\nbreak;\ndefault:\nrecover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri, isModuleVar);\nreturn parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);\n}\nSTNode semicolon = parseSemicolon();\nif (isModuleVar) {\nreturn STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword,\nnamespacePrefix, semicolon);\n}\nreturn STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix,\nsemicolon);\n}\n/**\n* Parse import prefix.\n*\n* @return Parsed node\n*/\nprivate STNode parseNamespacePrefix() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nrecover(peek(), ParserRuleContext.NAMESPACE_PREFIX);\nreturn parseNamespacePrefix();\n}\n}\n/**\n* Parse named worker declaration.\n*

\n* named-worker-decl := [annots] [transactional] worker worker-name return-type-descriptor { sequence-stmt }\n* \n*\n* @param annots Annotations attached to the worker decl\n* @param qualifiers Preceding transactional keyword in a list\n* @return Parsed node\n*/\nprivate STNode parseNamedWorkerDeclaration(STNode annots, List qualifiers) {\nstartContext(ParserRuleContext.NAMED_WORKER_DECL);\nSTNode transactionalKeyword = getTransactionalKeyword(qualifiers);\nSTNode workerKeyword = parseWorkerKeyword();\nSTNode workerName = parseWorkerName();\nSTNode returnTypeDesc = parseReturnTypeDescriptor();\nSTNode workerBody = parseBlockNode();\nendContext();\nreturn STNodeFactory.createNamedWorkerDeclarationNode(annots, transactionalKeyword, workerKeyword, workerName,\nreturnTypeDesc, workerBody);\n}\nprivate STNode getTransactionalKeyword(List qualifierList) {\nList validatedList = new ArrayList<>();\nfor (int i = 0; i < qualifierList.size(); i++) {\nSTNode qualifier = qualifierList.get(i);\nint nextIndex = i + 1;\nif (isSyntaxKindInList(validatedList, qualifier.kind)) {\nupdateLastNodeInListWithInvalidNode(validatedList, qualifier,\nDiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());\n} else if (qualifier.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {\nvalidatedList.add(qualifier);\n} else if (qualifierList.size() == nextIndex) {\naddInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED,\n((STToken) qualifier).text());\n} else {\nupdateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,\nDiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());\n}\n}\nSTNode transactionalKeyword;\nif (validatedList.isEmpty()) {\ntransactionalKeyword = STNodeFactory.createEmptyNode();\n} else {\ntransactionalKeyword = validatedList.get(0);\n}\nreturn transactionalKeyword;\n}\nprivate STNode parseReturnTypeDescriptor() {\nSTToken token = peek();\nif (token.kind != SyntaxKind.RETURNS_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode returnsKeyword = consume();\nSTNode annot = parseOptionalAnnotations();\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);\nreturn STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);\n}\n/**\n* Parse worker keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseWorkerKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.WORKER_KEYWORD) {\nreturn consume();\n} else {\nrecover(peek(), ParserRuleContext.WORKER_KEYWORD);\nreturn parseWorkerKeyword();\n}\n}\n/**\n* Parse worker name.\n*

\n* worker-name := identifier\n*\n* @return Parsed node\n*/\nprivate STNode parseWorkerName() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nrecover(peek(), ParserRuleContext.WORKER_NAME);\nreturn parseWorkerName();\n}\n}\n/**\n* Parse lock statement.\n* lock-stmt := lock block-stmt [on-fail-clause]\n*\n* @return Lock statement\n*/\nprivate STNode parseLockStatement() {\nstartContext(ParserRuleContext.LOCK_STMT);\nSTNode lockKeyword = parseLockKeyword();\nSTNode blockStatement = parseBlockNode();\nendContext();\nSTNode onFailClause = parseOptionalOnFailClause();\nreturn STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause);\n}\n/**\n* Parse lock-keyword.\n*\n* @return lock-keyword node\n*/\nprivate STNode parseLockKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LOCK_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.LOCK_KEYWORD);\nreturn parseLockKeyword();\n}\n}\n/**\n* Parse union type descriptor.\n* union-type-descriptor := type-descriptor | type-descriptor\n*\n* @param leftTypeDesc Type desc in the LHS os the union type desc.\n* @param context Current context.\n* @return parsed union type desc node\n*/\nprivate STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode pipeToken = consume();\nSTNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false);\nreturn createUnionTypeDesc(leftTypeDesc, pipeToken, rightTypeDesc);\n}\nprivate STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) {\nleftTypeDesc = validateForUsageOfVar(leftTypeDesc);\nrightTypeDesc = validateForUsageOfVar(rightTypeDesc);\nreturn STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc);\n}\n/**\n* Parse pipe token.\n*\n* @return parsed pipe token node\n*/\nprivate STNode parsePipeToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PIPE_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.PIPE);\nreturn parsePipeToken();\n}\n}\nprivate boolean isTypeStartingToken(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase IDENTIFIER_TOKEN:\ncase SERVICE_KEYWORD:\ncase RECORD_KEYWORD:\ncase OBJECT_KEYWORD:\ncase ABSTRACT_KEYWORD:\ncase CLIENT_KEYWORD:\ncase OPEN_PAREN_TOKEN:\ncase MAP_KEYWORD:\ncase FUTURE_KEYWORD:\ncase TYPEDESC_KEYWORD:\ncase ERROR_KEYWORD:\ncase STREAM_KEYWORD:\ncase TABLE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase DISTINCT_KEYWORD:\ncase ISOLATED_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nreturn true;\ndefault:\nif (isSingletonTypeDescStart(nodeKind, true)) {\nreturn true;\n}\nreturn isSimpleType(nodeKind);\n}\n}\nstatic boolean isSimpleType(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase INT_KEYWORD:\ncase FLOAT_KEYWORD:\ncase DECIMAL_KEYWORD:\ncase BOOLEAN_KEYWORD:\ncase STRING_KEYWORD:\ncase BYTE_KEYWORD:\ncase XML_KEYWORD:\ncase JSON_KEYWORD:\ncase HANDLE_KEYWORD:\ncase ANY_KEYWORD:\ncase ANYDATA_KEYWORD:\ncase NEVER_KEYWORD:\ncase VAR_KEYWORD:\ncase ERROR_KEYWORD:\ncase STREAM_KEYWORD:\ncase TYPEDESC_KEYWORD:\ncase READONLY_KEYWORD:\ncase DISTINCT_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\nstatic boolean isPredeclaredPrefix(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase BOOLEAN_KEYWORD:\ncase DECIMAL_KEYWORD:\ncase ERROR_KEYWORD:\ncase FLOAT_KEYWORD:\ncase FUTURE_KEYWORD:\ncase INT_KEYWORD:\ncase MAP_KEYWORD:\ncase OBJECT_KEYWORD:\ncase STREAM_KEYWORD:\ncase STRING_KEYWORD:\ncase TABLE_KEYWORD:\ncase TRANSACTION_KEYWORD:\ncase TYPEDESC_KEYWORD:\ncase XML_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate boolean isQualifiedIdentifierPredeclaredPrefix(SyntaxKind nodeKind) {\nreturn isPredeclaredPrefix(nodeKind) && getNextNextToken().kind == SyntaxKind.COLON_TOKEN;\n}\nprivate SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) {\nswitch (typeKeyword) {\ncase INT_KEYWORD:\nreturn SyntaxKind.INT_TYPE_DESC;\ncase FLOAT_KEYWORD:\nreturn SyntaxKind.FLOAT_TYPE_DESC;\ncase DECIMAL_KEYWORD:\nreturn SyntaxKind.DECIMAL_TYPE_DESC;\ncase BOOLEAN_KEYWORD:\nreturn SyntaxKind.BOOLEAN_TYPE_DESC;\ncase STRING_KEYWORD:\nreturn SyntaxKind.STRING_TYPE_DESC;\ncase BYTE_KEYWORD:\nreturn SyntaxKind.BYTE_TYPE_DESC;\ncase XML_KEYWORD:\nreturn SyntaxKind.XML_TYPE_DESC;\ncase JSON_KEYWORD:\nreturn SyntaxKind.JSON_TYPE_DESC;\ncase HANDLE_KEYWORD:\nreturn SyntaxKind.HANDLE_TYPE_DESC;\ncase ANY_KEYWORD:\nreturn SyntaxKind.ANY_TYPE_DESC;\ncase ANYDATA_KEYWORD:\nreturn SyntaxKind.ANYDATA_TYPE_DESC;\ncase READONLY_KEYWORD:\nreturn SyntaxKind.READONLY_TYPE_DESC;\ncase NEVER_KEYWORD:\nreturn SyntaxKind.NEVER_TYPE_DESC;\ncase VAR_KEYWORD:\nreturn SyntaxKind.VAR_TYPE_DESC;\ncase ERROR_KEYWORD:\nreturn SyntaxKind.ERROR_TYPE_DESC;\ndefault:\nreturn SyntaxKind.TYPE_REFERENCE;\n}\n}\n/**\n* Parse fork-keyword.\n*\n* @return Fork-keyword node\n*/\nprivate STNode parseForkKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FORK_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.FORK_KEYWORD);\nreturn parseForkKeyword();\n}\n}\n/**\n* Parse fork statement.\n* fork-stmt := fork { named-worker-decl+ }\n*\n* @return Fork statement\n*/\nprivate STNode parseForkStatement() {\nstartContext(ParserRuleContext.FORK_STMT);\nSTNode forkKeyword = parseForkKeyword();\nSTNode openBrace = parseOpenBrace();\nArrayList workers = new ArrayList<>();\nwhile (!isEndOfStatements()) {\nSTNode stmt = parseStatement();\nif (stmt == null) {\nbreak;\n}\nif (stmt.kind == SyntaxKind.LOCAL_TYPE_DEFINITION_STATEMENT) {\naddInvalidNodeToNextToken(stmt, DiagnosticErrorCode.ERROR_LOCAL_TYPE_DEFINITION_NOT_ALLOWED);\ncontinue;\n}\nswitch (stmt.kind) {\ncase NAMED_WORKER_DECLARATION:\nworkers.add(stmt);\nbreak;\ndefault:\nif (workers.isEmpty()) {\nopenBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt,\nDiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);\n} else {\nupdateLastNodeInListWithInvalidNode(workers, stmt,\nDiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);\n}\n}\n}\nSTNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers);\nSTNode closeBrace = parseCloseBrace();\nendContext();\nSTNode forkStmt =\nSTNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace);\nif (isNodeListEmpty(namedWorkerDeclarations)) {\nreturn SyntaxErrors.addDiagnostic(forkStmt,\nDiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT);\n}\nreturn forkStmt;\n}\n/**\n* Parse trap expression.\n*

\n* \n* trap-expr := trap expression\n* \n*\n* @param allowActions Allow actions\n* @param isRhsExpr Whether this is a RHS expression or not\n* @return Trap expression node\n*/\nprivate STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nSTNode trapKeyword = parseTrapKeyword();\nSTNode expr =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nif (isAction(expr)) {\nreturn STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr);\n}\nreturn STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr);\n}\n/**\n* Parse trap-keyword.\n*\n* @return Trap-keyword node\n*/\nprivate STNode parseTrapKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRAP_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.TRAP_KEYWORD);\nreturn parseTrapKeyword();\n}\n}\n/**\n* Parse list constructor expression.\n*

\n* \n* list-constructor-expr := [ [ expr-list ] ]\n*
\n* expr-list := expression (, expression)*\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseListConstructorExpr() {\nstartContext(ParserRuleContext.LIST_CONSTRUCTOR);\nSTNode openBracket = parseOpenBracket();\nSTNode expressions = parseOptionalExpressionsList();\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket);\n}\n/**\n* Parse optional expression list.\n*\n* @return Parsed node\n*/\nprivate STNode parseOptionalExpressionsList() {\nList expressions = new ArrayList<>();\nif (isEndOfListConstructor(peek().kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode expr = parseExpression();\nexpressions.add(expr);\nreturn parseOptionalExpressionsList(expressions);\n}\nprivate STNode parseOptionalExpressionsList(List expressions) {\nSTNode listConstructorMemberEnd;\nwhile (!isEndOfListConstructor(peek().kind)) {\nlistConstructorMemberEnd = parseListConstructorMemberEnd();\nif (listConstructorMemberEnd == null) {\nbreak;\n}\nexpressions.add(listConstructorMemberEnd);\nSTNode expr = parseExpression();\nexpressions.add(expr);\n}\nreturn STNodeFactory.createNodeList(expressions);\n}\nprivate boolean isEndOfListConstructor(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseListConstructorMemberEnd() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END);\nreturn parseListConstructorMemberEnd();\n}\n}\n/**\n* Parse foreach statement.\n* foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt [on-fail-clause]\n*\n* @return foreach statement\n*/\nprivate STNode parseForEachStatement() {\nstartContext(ParserRuleContext.FOREACH_STMT);\nSTNode forEachKeyword = parseForEachKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT);\nSTNode inKeyword = parseInKeyword();\nSTNode actionOrExpr = parseActionOrExpression();\nSTNode blockStatement = parseBlockNode();\nendContext();\nSTNode onFailClause = parseOptionalOnFailClause();\nreturn STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr,\nblockStatement, onFailClause);\n}\n/**\n* Parse foreach-keyword.\n*\n* @return ForEach-keyword node\n*/\nprivate STNode parseForEachKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FOREACH_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.FOREACH_KEYWORD);\nreturn parseForEachKeyword();\n}\n}\n/**\n* Parse in-keyword.\n*\n* @return In-keyword node\n*/\nprivate STNode parseInKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IN_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.IN_KEYWORD);\nreturn parseInKeyword();\n}\n}\n/**\n* Parse type cast expression.\n*

\n* \n* type-cast-expr := < type-cast-param > expression\n*
\n* type-cast-param := [annots] type-descriptor | annots\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nstartContext(ParserRuleContext.TYPE_CAST);\nSTNode ltToken = parseLTToken();\nSTNode typeCastParam = parseTypeCastParam();\nSTNode gtToken = parseGTToken();\nendContext();\nSTNode expression =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nreturn STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression);\n}\nprivate STNode parseTypeCastParam() {\nSTNode annot;\nSTNode type;\nSTToken token = peek();\nswitch (token.kind) {\ncase AT_TOKEN:\nannot = parseOptionalAnnotations();\ntoken = peek();\nif (isTypeStartingToken(token.kind)) {\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n} else {\ntype = STNodeFactory.createEmptyNode();\n}\nbreak;\ndefault:\nannot = STNodeFactory.createEmptyNode();\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\nbreak;\n}\nreturn STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type);\n}\n/**\n* Parse table constructor expression.\n*

\n* \n* table-constructor-expr-rhs := [ [row-list] ]\n* \n*\n* @param tableKeyword tableKeyword that precedes this rhs\n* @param keySpecifier keySpecifier that precedes this rhs\n* @return Parsed node\n*/\nprivate STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) {\nswitchContext(ParserRuleContext.TABLE_CONSTRUCTOR);\nSTNode openBracket = parseOpenBracket();\nSTNode rowList = parseRowList();\nSTNode closeBracket = parseCloseBracket();\nreturn STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList,\ncloseBracket);\n}\n/**\n* Parse table-keyword.\n*\n* @return Table-keyword node\n*/\nprivate STNode parseTableKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TABLE_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.TABLE_KEYWORD);\nreturn parseTableKeyword();\n}\n}\n/**\n* Parse table rows.\n*

\n* row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]\n*\n* @return Parsed node\n*/\nprivate STNode parseRowList() {\nSTToken nextToken = peek();\nif (isEndOfTableRowList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList mappings = new ArrayList<>();\nSTNode mapExpr = parseMappingConstructorExpr();\nmappings.add(mapExpr);\nnextToken = peek();\nSTNode rowEnd;\nwhile (!isEndOfTableRowList(nextToken.kind)) {\nrowEnd = parseTableRowEnd();\nif (rowEnd == null) {\nbreak;\n}\nmappings.add(rowEnd);\nmapExpr = parseMappingConstructorExpr();\nmappings.add(mapExpr);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(mappings);\n}\nprivate boolean isEndOfTableRowList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ncase COMMA_TOKEN:\ncase OPEN_BRACE_TOKEN:\nreturn false;\ndefault:\nreturn isEndOfMappingConstructor(tokenKind);\n}\n}\nprivate STNode parseTableRowEnd() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.TABLE_ROW_END);\nreturn parseTableRowEnd();\n}\n}\n/**\n* Parse key specifier.\n*

\n* key-specifier := key ( [ field-name (, field-name)* ] )\n*\n* @return Parsed node\n*/\nprivate STNode parseKeySpecifier() {\nstartContext(ParserRuleContext.KEY_SPECIFIER);\nSTNode keyKeyword = parseKeyKeyword();\nSTNode openParen = parseOpenParenthesis();\nSTNode fieldNames = parseFieldNames();\nSTNode closeParen = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen);\n}\n/**\n* Parse key-keyword.\n*\n* @return Key-keyword node\n*/\nprivate STNode parseKeyKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.KEY_KEYWORD) {\nreturn consume();\n}\nif (isKeyKeyword(token)) {\nreturn getKeyKeyword(consume());\n}\nrecover(token, ParserRuleContext.KEY_KEYWORD);\nreturn parseKeyKeyword();\n}\nstatic boolean isKeyKeyword(STToken token) {\nreturn token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text());\n}\nprivate STNode getKeyKeyword(STToken token) {\nreturn STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(),\ntoken.diagnostics());\n}\n/**\n* Parse field names.\n*

\n* field-name-list := [ field-name (, field-name)* ]\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldNames() {\nSTToken nextToken = peek();\nif (isEndOfFieldNamesList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList fieldNames = new ArrayList<>();\nSTNode fieldName = parseVariableName();\nfieldNames.add(fieldName);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndOfFieldNamesList(nextToken.kind)) {\nleadingComma = parseComma();\nfieldNames.add(leadingComma);\nfieldName = parseVariableName();\nfieldNames.add(fieldName);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(fieldNames);\n}\nprivate boolean isEndOfFieldNamesList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\ncase IDENTIFIER_TOKEN:\nreturn false;\ndefault:\nreturn true;\n}\n}\n/**\n* Parse error type descriptor.\n*

\n* error-type-descriptor := error [error-type-param]\n* error-type-param := < (detail-type-descriptor | inferred-type-descriptor) >\n* detail-type-descriptor := type-descriptor\n* inferred-type-descriptor := *\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseErrorTypeDescriptor() {\nSTNode errorKeywordToken = parseErrorKeyword();\nreturn parseErrorTypeDescriptor(errorKeywordToken);\n}\nprivate STNode parseErrorTypeDescriptor(STNode errorKeywordToken) {\nSTNode errorTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nerrorTypeParamsNode = parseErrorTypeParamsNode();\n} else {\nerrorTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode);\n}\n/**\n* Parse error type param node.\n*

\n* error-type-param := < (detail-type-descriptor | inferred-type-descriptor) >\n* detail-type-descriptor := type-descriptor\n* inferred-type-descriptor := *\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseErrorTypeParamsNode() {\nSTNode ltToken = parseLTToken();\nSTNode parameter;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {\nparameter = consume();\n} else {\nparameter = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n}\nSTNode gtToken = parseGTToken();\nreturn STNodeFactory.createErrorTypeParamsNode(ltToken, parameter, gtToken);\n}\n/**\n* Parse error-keyword.\n*\n* @return Parsed error-keyword node\n*/\nprivate STNode parseErrorKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ERROR_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.ERROR_KEYWORD);\nreturn parseErrorKeyword();\n}\n}\n/**\n* Parse typedesc type descriptor.\n* typedesc-type-descriptor := typedesc type-parameter\n*\n* @return Parsed typedesc type node\n*/\nprivate STNode parseTypedescTypeDescriptor(STNode typedescKeywordToken) {\nSTNode typedescTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\ntypedescTypeParamsNode = parseTypeParameter();\n} else {\ntypedescTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createTypedescTypeDescriptorNode(typedescKeywordToken, typedescTypeParamsNode);\n}\n/**\n* Parse stream type descriptor.\n*

\n* stream-type-descriptor := stream [stream-type-parameters]\n* stream-type-parameters := < type-descriptor [, type-descriptor]>\n*

\n*\n* @return Parsed stream type descriptor node\n*/\nprivate STNode parseStreamTypeDescriptor(STNode streamKeywordToken) {\nSTNode streamTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nstreamTypeParamsNode = parseStreamTypeParamsNode();\n} else {\nstreamTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode);\n}\n/**\n* Parse xml type descriptor.\n* xml-type-descriptor := xml type-parameter\n*\n* @return Parsed typedesc type node\n*/\nprivate STNode parseXmlTypeDescriptor(STNode xmlKeywordToken) {\nSTNode typedescTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\ntypedescTypeParamsNode = parseTypeParameter();\n} else {\ntypedescTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createXmlTypeDescriptorNode(xmlKeywordToken, typedescTypeParamsNode);\n}\n/**\n* Parse stream type params node.\n*

\n* stream-type-parameters := < type-descriptor [, type-descriptor]>\n*

\n*\n* @return Parsed stream type params node\n*/\nprivate STNode parseStreamTypeParamsNode() {\nSTNode ltToken = parseLTToken();\nstartContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);\nSTNode leftTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);\nSTNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode);\nendContext();\nreturn streamTypedesc;\n}\nprivate STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) {\nSTNode commaToken, rightTypeDescNode, gtToken;\nswitch (peek().kind) {\ncase COMMA_TOKEN:\ncommaToken = parseComma();\nrightTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);\nbreak;\ncase GT_TOKEN:\ncommaToken = STNodeFactory.createEmptyNode();\nrightTypeDescNode = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nrecover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode);\nreturn parseStreamTypeParamsNode(ltToken, leftTypeDescNode);\n}\ngtToken = parseGTToken();\nreturn STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode,\ngtToken);\n}\n/**\n* Parse stream-keyword.\n*\n* @return Parsed stream-keyword node\n*/\nprivate STNode parseStreamKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.STREAM_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.STREAM_KEYWORD);\nreturn parseStreamKeyword();\n}\n}\n/**\n* Parse let expression.\n*

\n* \n* let-expr := let let-var-decl [, let-var-decl]* in expression\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseLetExpression(boolean isRhsExpr) {\nSTNode letKeyword = parseLetKeyword();\nSTNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr);\nSTNode inKeyword = parseInKeyword();\nletKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,\nDiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression);\n}\n/**\n* Parse let-keyword.\n*\n* @return Let-keyword node\n*/\nprivate STNode parseLetKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LET_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.LET_KEYWORD);\nreturn parseLetKeyword();\n}\n}\n/**\n* Parse let variable declarations.\n*

\n* let-var-decl-list := let-var-decl [, let-var-decl]*\n*\n* @return Parsed node\n*/\nprivate STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) {\nstartContext(context);\nList varDecls = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfLetVarDeclarations(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode varDec = parseLetVarDecl(isRhsExpr);\nvarDecls.add(varDec);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndOfLetVarDeclarations(nextToken.kind)) {\nleadingComma = parseComma();\nvarDecls.add(leadingComma);\nvarDec = parseLetVarDecl(isRhsExpr);\nvarDecls.add(varDec);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(varDecls);\n}\nprivate boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\ncase AT_TOKEN:\nreturn false;\ncase IN_KEYWORD:\nreturn true;\ndefault:\nreturn !isTypeStartingToken(tokenKind);\n}\n}\n/**\n* Parse let variable declaration.\n*

\n* let-var-decl := [annots] typed-binding-pattern = expression\n*\n* @return Parsed node\n*/\nprivate STNode parseLetVarDecl(boolean isRhsExpr) {\nSTNode annot = parseOptionalAnnotations();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL);\nSTNode assign = parseAssignOp();\nSTNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false);\nreturn STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression);\n}\n/**\n* Parse raw backtick string template expression.\n*

\n* BacktickString := `expression`\n*\n* @return Template expression node\n*/\nprivate STNode parseTemplateExpression() {\nSTNode type = STNodeFactory.createEmptyNode();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContent();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick,\ncontent, endingBackTick);\n}\nprivate STNode parseTemplateContent() {\nList items = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode contentItem = parseTemplateItem();\nitems.add(contentItem);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(items);\n}\nprivate boolean isEndOfBacktickContent(SyntaxKind kind) {\nswitch (kind) {\ncase EOF_TOKEN:\ncase BACKTICK_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseTemplateItem() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {\nreturn parseInterpolation();\n}\nreturn consume();\n}\n/**\n* Parse string template expression.\n*

\n* string-template-expr := string ` expression `\n*\n* @return String template expression node\n*/\nprivate STNode parseStringTemplateExpression() {\nSTNode type = parseStringKeyword();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContent();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick,\ncontent, endingBackTick);\n}\n/**\n* Parse string keyword.\n*\n* @return string keyword node\n*/\nprivate STNode parseStringKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.STRING_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.STRING_KEYWORD);\nreturn parseStringKeyword();\n}\n}\n/**\n* Parse XML template expression.\n*

\n* xml-template-expr := xml BacktickString\n*\n* @return XML template expression\n*/\nprivate STNode parseXMLTemplateExpression() {\nSTNode xmlKeyword = parseXMLKeyword();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContentAsXML();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword,\nstartingBackTick, content, endingBackTick);\n}\n/**\n* Parse xml keyword.\n*\n* @return xml keyword node\n*/\nprivate STNode parseXMLKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.XML_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.XML_KEYWORD);\nreturn parseXMLKeyword();\n}\n}\n/**\n* Parse the content of the template string as XML. This method first read the\n* input in the same way as the raw-backtick-template (BacktickString). Then\n* it parses the content as XML.\n*\n* @return XML node\n*/\nprivate STNode parseTemplateContentAsXML() {\nArrayDeque expressions = new ArrayDeque<>();\nStringBuilder xmlStringBuilder = new StringBuilder();\nSTToken nextToken = peek();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode contentItem = parseTemplateItem();\nif (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {\nxmlStringBuilder.append(((STToken) contentItem).text());\n} else {\nxmlStringBuilder.append(\"${}\");\nexpressions.add(contentItem);\n}\nnextToken = peek();\n}\nCharReader charReader = CharReader.from(xmlStringBuilder.toString());\nAbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader));\nXMLParser xmlParser = new XMLParser(tokenReader, expressions);\nreturn xmlParser.parse();\n}\n/**\n* Parse interpolation of a back-tick string.\n*

\n* \n* interpolation := ${ expression }\n* \n*\n* @return Interpolation node\n*/\nprivate STNode parseInterpolation() {\nstartContext(ParserRuleContext.INTERPOLATION);\nSTNode interpolStart = parseInterpolationStart();\nSTNode expr = parseExpression();\nwhile (true) {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.EOF_TOKEN || nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nbreak;\n} else {\nnextToken = consume();\nexpr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken,\nDiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text());\n}\n}\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace);\n}\n/**\n* Parse interpolation start token.\n*

\n* interpolation-start := ${\n*\n* @return Interpolation start token\n*/\nprivate STNode parseInterpolationStart() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.INTERPOLATION_START_TOKEN);\nreturn parseInterpolationStart();\n}\n}\n/**\n* Parse back-tick token.\n*\n* @return Back-tick token\n*/\nprivate STNode parseBacktickToken(ParserRuleContext ctx) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ctx);\nreturn parseBacktickToken(ctx);\n}\n}\n/**\n* Parse table type descriptor.\n*

\n* table-type-descriptor := table row-type-parameter [key-constraint]\n* row-type-parameter := type-parameter\n* key-constraint := key-specifier | key-type-constraint\n* key-specifier := key ( [ field-name (, field-name)* ] )\n* key-type-constraint := key type-parameter\n*

\n*\n* @return Parsed table type desc node.\n*/\nprivate STNode parseTableTypeDescriptor(STNode tableKeywordToken) {\nSTNode rowTypeParameterNode = parseRowTypeParameter();\nSTNode keyConstraintNode;\nSTToken nextToken = peek();\nif (isKeyKeyword(nextToken)) {\nSTNode keyKeywordToken = getKeyKeyword(consume());\nkeyConstraintNode = parseKeyConstraint(keyKeywordToken);\n} else {\nkeyConstraintNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode);\n}\n/**\n* Parse row type parameter node.\n*

\n* row-type-parameter := type-parameter\n*

\n*\n* @return Parsed node.\n*/\nprivate STNode parseRowTypeParameter() {\nstartContext(ParserRuleContext.ROW_TYPE_PARAM);\nSTNode rowTypeParameterNode = parseTypeParameter();\nendContext();\nreturn rowTypeParameterNode;\n}\n/**\n* Parse type parameter node.\n*

\n* type-parameter := < type-descriptor >\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeParameter() {\nSTNode ltToken = parseLTToken();\nSTNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\nSTNode gtToken = parseGTToken();\nreturn STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken);\n}\n/**\n* Parse key constraint.\n*

\n* key-constraint := key-specifier | key-type-constraint\n*

\n*\n* @return Parsed node.\n*/\nprivate STNode parseKeyConstraint(STNode keyKeywordToken) {\nswitch (peek().kind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseKeySpecifier(keyKeywordToken);\ncase LT_TOKEN:\nreturn parseKeyTypeConstraint(keyKeywordToken);\ndefault:\nrecover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken);\nreturn parseKeyConstraint(keyKeywordToken);\n}\n}\n/**\n* Parse key specifier given parsed key keyword token.\n*

\n* key-specifier := key ( [ field-name (, field-name)* ] )\n*\n* @return Parsed node\n*/\nprivate STNode parseKeySpecifier(STNode keyKeywordToken) {\nstartContext(ParserRuleContext.KEY_SPECIFIER);\nSTNode openParenToken = parseOpenParenthesis();\nSTNode fieldNamesNode = parseFieldNames();\nSTNode closeParenToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken);\n}\n/**\n* Parse key type constraint.\n*

\n* key-type-constraint := key type-parameter\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseKeyTypeConstraint(STNode keyKeywordToken) {\nSTNode typeParameterNode = parseTypeParameter();\nreturn STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode);\n}\n/**\n* Parse function type descriptor.\n*

\n* function-type-descriptor := [isolated] function function-signature\n*\n* @param qualifiers Preceding type descriptor qualifiers\n* @return Function type descriptor node\n*/\nprivate STNode parseFunctionTypeDesc(List qualifiers) {\nstartContext(ParserRuleContext.FUNC_TYPE_DESC);\nSTNode qualifierList;\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode signature;\nswitch (peek().kind) {\ncase OPEN_PAREN_TOKEN:\nsignature = parseFuncSignature(true);\nqualifierList = createFuncTypeQualNodeList(qualifiers, true);\nbreak;\ndefault:\nsignature = STNodeFactory.createEmptyNode();\nqualifierList = createFuncTypeQualNodeList(qualifiers, false);\nbreak;\n}\nendContext();\nreturn STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, signature);\n}\nprivate STNode createFuncTypeQualNodeList(List qualifierList, boolean hasFuncSignature) {\nList validatedList = new ArrayList<>();\nfor (int i = 0; i < qualifierList.size(); i++) {\nSTNode qualifier = qualifierList.get(i);\nint nextIndex = i + 1;\nif (isSyntaxKindInList(validatedList, qualifier.kind)) {\nupdateLastNodeInListWithInvalidNode(validatedList, qualifier,\nDiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());\n} else if (hasFuncSignature && isRegularFuncQual(qualifier.kind)) {\nvalidatedList.add(qualifier);\n} else if (qualifier.kind == SyntaxKind.ISOLATED_KEYWORD) {\nvalidatedList.add(qualifier);\n} else if (qualifierList.size() == nextIndex) {\naddInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED,\n((STToken) qualifier).text());\n} else {\nupdateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,\nDiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());\n}\n}\nreturn STNodeFactory.createNodeList(validatedList);\n}\nprivate boolean isRegularFuncQual(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase ISOLATED_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse explicit anonymous function expression.\n*

\n* explicit-anonymous-function-expr :=\n* [annots] (isolated| transactional) function function-signature anon-func-body\n*\n* @param annots Annotations.\n* @param qualifiers Function qualifiers\n* @param isRhsExpr Is expression in rhs context\n* @return Anonymous function expression node\n*/\nprivate STNode parseExplicitFunctionExpression(STNode annots, List qualifiers, boolean isRhsExpr) {\nstartContext(ParserRuleContext.ANON_FUNC_EXPRESSION);\nSTNode qualifierList = createFuncTypeQualNodeList(qualifiers, true);\nSTNode funcKeyword = parseFunctionKeyword();\nSTNode funcSignature = parseFuncSignature(false);\nSTNode funcBody = parseAnonFuncBody(isRhsExpr);\nreturn STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, funcKeyword,\nfuncSignature, funcBody);\n}\n/**\n* Parse anonymous function body.\n*

\n* anon-func-body := block-function-body | expr-function-body\n*\n* @param isRhsExpr Is expression in rhs context\n* @return Anon function body node\n*/\nprivate STNode parseAnonFuncBody(boolean isRhsExpr) {\nswitch (peek().kind) {\ncase OPEN_BRACE_TOKEN:\ncase EOF_TOKEN:\nSTNode body = parseFunctionBodyBlock(true);\nendContext();\nreturn body;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nendContext();\nreturn parseExpressionFuncBody(true, isRhsExpr);\ndefault:\nrecover(peek(), ParserRuleContext.ANON_FUNC_BODY, isRhsExpr);\nreturn parseAnonFuncBody(isRhsExpr);\n}\n}\n/**\n* Parse expression function body.\n*

\n* expr-function-body := => expression\n*\n* @param isAnon Is anonymous function.\n* @param isRhsExpr Is expression in rhs context\n* @return Expression function body node\n*/\nprivate STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) {\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nSTNode semiColon;\nif (isAnon) {\nsemiColon = STNodeFactory.createEmptyNode();\n} else {\nsemiColon = parseSemicolon();\n}\nreturn STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon);\n}\n/**\n* Parse '=>' token.\n*\n* @return Double right arrow token\n*/\nprivate STNode parseDoubleRightArrow() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.EXPR_FUNC_BODY_START);\nreturn parseDoubleRightArrow();\n}\n}\nprivate STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) {\nswitch (params.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase INFER_PARAM_LIST:\nbreak;\ncase BRACED_EXPRESSION:\nparams = getAnonFuncParam((STBracedExpressionNode) params);\nbreak;\ndefault:\nSTToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nsyntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params,\nDiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);\nparams = STNodeFactory.createSimpleNameReferenceNode(syntheticParam);\n}\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression);\n}\n/**\n* Create a new anon-func-param node from a braced expression.\n*\n* @param params Braced expression\n* @return Anon-func param node\n*/\nprivate STNode getAnonFuncParam(STBracedExpressionNode params) {\nList paramList = new ArrayList<>();\nparamList.add(params.expression);\nreturn STNodeFactory.createImplicitAnonymousFunctionParameters(params.openParen,\nSTNodeFactory.createNodeList(paramList), params.closeParen);\n}\n/**\n* Parse implicit anon function expression.\n*\n* @param openParen Open parenthesis token\n* @param firstParam First parameter\n* @param isRhsExpr Is expression in rhs context\n* @return Implicit anon function expression node\n*/\nprivate STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) {\nList paramList = new ArrayList<>();\nparamList.add(firstParam);\nSTToken nextToken = peek();\nSTNode paramEnd;\nSTNode param;\nwhile (!isEndOfAnonFuncParametersList(nextToken.kind)) {\nparamEnd = parseImplicitAnonFuncParamEnd();\nif (paramEnd == null) {\nbreak;\n}\nparamList.add(paramEnd);\nparam = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM);\nparam = STNodeFactory.createSimpleNameReferenceNode(param);\nparamList.add(param);\nnextToken = peek();\n}\nSTNode params = STNodeFactory.createNodeList(paramList);\nSTNode closeParen = parseCloseParenthesis();\nendContext();\nSTNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\nreturn parseImplicitAnonFunc(inferedParams, isRhsExpr);\n}\nprivate STNode parseImplicitAnonFuncParamEnd() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS);\nreturn parseImplicitAnonFuncParamEnd();\n}\n}\nprivate boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase SEMICOLON_TOKEN:\ncase RETURNS_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\ncase DO_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse tuple type descriptor.\n*

\n* tuple-type-descriptor := [ tuple-member-type-descriptors ]\n*

\n* tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor]\n* | [ tuple-rest-descriptor ]\n*

\n* tuple-rest-descriptor := type-descriptor ...\n*
\n*\n* @return\n*/\nprivate STNode parseTupleTypeDesc() {\nSTNode openBracket = parseOpenBracket();\nstartContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nSTNode memberTypeDesc = parseTupleMemberTypeDescList();\nSTNode closeBracket = parseCloseBracket();\nendContext();\nopenBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket,\nDiagnosticErrorCode.ERROR_MISSING_TYPE_DESC);\nreturn STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket);\n}\n/**\n* Parse tuple member type descriptors.\n*\n* @return Parsed node\n*/\nprivate STNode parseTupleMemberTypeDescList() {\nList typeDescList = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfTypeList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nreturn parseTupleTypeMembers(typeDesc, typeDescList);\n}\nprivate STNode parseTupleTypeMembers(STNode typeDesc, List typeDescList) {\nSTToken nextToken;\nnextToken = peek();\nSTNode tupleMemberRhs;\nwhile (!isEndOfTypeList(nextToken.kind)) {\ntupleMemberRhs = parseTupleMemberRhs();\nif (tupleMemberRhs == null) {\nbreak;\n}\nif (tupleMemberRhs.kind == SyntaxKind.ELLIPSIS_TOKEN) {\ntypeDesc = STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs);\nbreak;\n}\ntypeDescList.add(typeDesc);\ntypeDescList.add(tupleMemberRhs);\ntypeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nnextToken = peek();\n}\ntypeDescList.add(typeDesc);\nreturn STNodeFactory.createNodeList(typeDescList);\n}\nprivate STNode parseTupleMemberRhs() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ncase ELLIPSIS_TOKEN:\nreturn parseEllipsis();\ndefault:\nrecover(peek(), ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS);\nreturn parseTupleMemberRhs();\n}\n}\nprivate boolean isEndOfTypeList(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase EOF_TOKEN:\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse table constructor or query expression.\n*

\n* \n* table-constructor-or-query-expr := table-constructor-expr | query-expr\n*
\n* table-constructor-expr := table [key-specifier] [ [row-list] ]\n*
\n* query-expr := [query-construct-type] query-pipeline select-clause\n* [query-construct-type] query-pipeline select-clause on-conflict-clause?\n*
\n* query-construct-type := table key-specifier | stream\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseTableConstructorOrQuery(boolean isRhsExpr) {\nstartContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION);\nSTNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr);\nendContext();\nreturn tableOrQueryExpr;\n}\nprivate STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr) {\nSTNode queryConstructType;\nswitch (peek().kind) {\ncase FROM_KEYWORD:\nqueryConstructType = STNodeFactory.createEmptyNode();\nreturn parseQueryExprRhs(queryConstructType, isRhsExpr);\ncase STREAM_KEYWORD:\nqueryConstructType = parseQueryConstructType(parseStreamKeyword(), null);\nreturn parseQueryExprRhs(queryConstructType, isRhsExpr);\ncase TABLE_KEYWORD:\nSTNode tableKeyword = parseTableKeyword();\nreturn parseTableConstructorOrQuery(tableKeyword, isRhsExpr);\ndefault:\nrecover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr);\nreturn parseTableConstructorOrQueryInternal(isRhsExpr);\n}\n}\nprivate STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) {\nSTNode keySpecifier;\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase OPEN_BRACKET_TOKEN:\nkeySpecifier = STNodeFactory.createEmptyNode();\nreturn parseTableConstructorExprRhs(tableKeyword, keySpecifier);\ncase KEY_KEYWORD:\nkeySpecifier = parseKeySpecifier();\nreturn parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\ncase IDENTIFIER_TOKEN:\nif (isKeyKeyword(nextToken)) {\nkeySpecifier = parseKeySpecifier();\nreturn parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\n}\nbreak;\ndefault:\nbreak;\n}\nrecover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr);\nreturn parseTableConstructorOrQuery(tableKeyword, isRhsExpr);\n}\nprivate STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) {\nswitch (peek().kind) {\ncase FROM_KEYWORD:\nreturn parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr);\ncase OPEN_BRACKET_TOKEN:\nreturn parseTableConstructorExprRhs(tableKeyword, keySpecifier);\ndefault:\nrecover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS, tableKeyword, keySpecifier,\nisRhsExpr);\nreturn parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\n}\n}\n/**\n* Parse query construct type.\n*

\n* query-construct-type := table key-specifier | stream\n*\n* @return Parsed node\n*/\nprivate STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) {\nreturn STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier);\n}\n/**\n* Parse query action or expression.\n*

\n* \n* query-expr-rhs := query-pipeline select-clause\n* query-pipeline select-clause on-conflict-clause?\n*
\n* query-pipeline := from-clause intermediate-clause*\n*
\n*\n* @param queryConstructType queryConstructType that precedes this rhs\n* @return Parsed node\n*/\nprivate STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) {\nswitchContext(ParserRuleContext.QUERY_EXPRESSION);\nSTNode fromClause = parseFromClause(isRhsExpr);\nList clauses = new ArrayList<>();\nSTNode intermediateClause;\nSTNode selectClause = null;\nwhile (!isEndOfIntermediateClause(peek().kind)) {\nintermediateClause = parseIntermediateClause(isRhsExpr);\nif (intermediateClause == null) {\nbreak;\n}\nif (selectClause != null) {\nselectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause,\nDiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE);\ncontinue;\n}\nif (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) {\nselectClause = intermediateClause;\n} else {\nclauses.add(intermediateClause);\n}\n}\nif (peek().kind == SyntaxKind.DO_KEYWORD) {\nSTNode intermediateClauses = STNodeFactory.createNodeList(clauses);\nSTNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);\nreturn parseQueryAction(queryConstructType, queryPipeline, selectClause, isRhsExpr);\n}\nif (selectClause == null) {\nSTNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD);\nSTNode expr = STNodeFactory\n.createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\nselectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr);\nif (clauses.isEmpty()) {\nfromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);\n} else {\nint lastIndex = clauses.size() - 1;\nSTNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex),\nDiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);\nclauses.set(lastIndex, intClauseWithDiagnostic);\n}\n}\nSTNode intermediateClauses = STNodeFactory.createNodeList(clauses);\nSTNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);\nSTNode onConflictClause = parseOnConflictClause(isRhsExpr);\nreturn STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause,\nonConflictClause);\n}\n/**\n* Parse an intermediate clause.\n*

\n* \n* intermediate-clause := from-clause | where-clause | let-clause | join-clause | limit-clause | order-by-clause\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseIntermediateClause(boolean isRhsExpr) {\nswitch (peek().kind) {\ncase FROM_KEYWORD:\nreturn parseFromClause(isRhsExpr);\ncase WHERE_KEYWORD:\nreturn parseWhereClause(isRhsExpr);\ncase LET_KEYWORD:\nreturn parseLetClause(isRhsExpr);\ncase SELECT_KEYWORD:\nreturn parseSelectClause(isRhsExpr);\ncase JOIN_KEYWORD:\ncase OUTER_KEYWORD:\nreturn parseJoinClause(isRhsExpr);\ncase ORDER_KEYWORD:\ncase BY_KEYWORD:\ncase ASCENDING_KEYWORD:\ncase DESCENDING_KEYWORD:\nreturn parseOrderByClause(isRhsExpr);\ncase LIMIT_KEYWORD:\nreturn parseLimitClause(isRhsExpr);\ncase DO_KEYWORD:\ncase SEMICOLON_TOKEN:\ncase ON_KEYWORD:\ncase CONFLICT_KEYWORD:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS, isRhsExpr);\nreturn parseIntermediateClause(isRhsExpr);\n}\n}\n/**\n* Parse join-keyword.\n*\n* @return Join-keyword node\n*/\nprivate STNode parseJoinKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.JOIN_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.JOIN_KEYWORD);\nreturn parseJoinKeyword();\n}\n}\n/**\n* Parse equals keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseEqualsKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.EQUALS_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.EQUALS_KEYWORD);\nreturn parseEqualsKeyword();\n}\n}\nprivate boolean isEndOfIntermediateClause(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase PUBLIC_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase EOF_TOKEN:\ncase RESOURCE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase DOCUMENTATION_STRING:\ncase PRIVATE_KEYWORD:\ncase RETURNS_KEYWORD:\ncase SERVICE_KEYWORD:\ncase TYPE_KEYWORD:\ncase CONST_KEYWORD:\ncase FINAL_KEYWORD:\ncase DO_KEYWORD:\nreturn true;\ndefault:\nreturn isValidExprRhsStart(tokenKind, SyntaxKind.NONE);\n}\n}\n/**\n* Parse from clause.\n*

\n* from-clause := from typed-binding-pattern in expression\n*\n* @return Parsed node\n*/\nprivate STNode parseFromClause(boolean isRhsExpr) {\nSTNode fromKeyword = parseFromKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE);\nSTNode inKeyword = parseInKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression);\n}\n/**\n* Parse from-keyword.\n*\n* @return From-keyword node\n*/\nprivate STNode parseFromKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FROM_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.FROM_KEYWORD);\nreturn parseFromKeyword();\n}\n}\n/**\n* Parse where clause.\n*

\n* where-clause := where expression\n*\n* @return Parsed node\n*/\nprivate STNode parseWhereClause(boolean isRhsExpr) {\nSTNode whereKeyword = parseWhereKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createWhereClauseNode(whereKeyword, expression);\n}\n/**\n* Parse where-keyword.\n*\n* @return Where-keyword node\n*/\nprivate STNode parseWhereKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.WHERE_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.WHERE_KEYWORD);\nreturn parseWhereKeyword();\n}\n}\n/**\n* Parse limit-keyword.\n*\n* @return limit-keyword node\n*/\nprivate STNode parseLimitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LIMIT_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.LIMIT_KEYWORD);\nreturn parseLimitKeyword();\n}\n}\n/**\n* Parse let clause.\n*

\n* let-clause := let let-var-decl [, let-var-decl]* \n*\n* @return Parsed node\n*/\nprivate STNode parseLetClause(boolean isRhsExpr) {\nSTNode letKeyword = parseLetKeyword();\nSTNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr);\nletKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,\nDiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);\nreturn STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations);\n}\n/**\n* Parse order-keyword.\n*\n* @return Order-keyword node\n*/\nprivate STNode parseOrderKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ORDER_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.ORDER_KEYWORD);\nreturn parseOrderKeyword();\n}\n}\n/**\n* Parse by-keyword.\n*\n* @return By-keyword node\n*/\nprivate STNode parseByKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BY_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.BY_KEYWORD);\nreturn parseByKeyword();\n}\n}\n/**\n* Parse order by clause.\n*

\n* order-by-clause := order by order-key-list\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseOrderByClause(boolean isRhsExpr) {\nSTNode orderKeyword = parseOrderKeyword();\nSTNode byKeyword = parseByKeyword();\nSTNode orderKeys = parseOrderKeyList(isRhsExpr);\nbyKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY);\nreturn STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys);\n}\n/**\n* Parse order key.\n*

\n* order-key-list := order-key [, order-key]*\n*\n* @return Parsed node\n*/\nprivate STNode parseOrderKeyList(boolean isRhsExpr) {\nstartContext(ParserRuleContext.ORDER_KEY_LIST);\nList orderKeys = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfOrderKeys(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode orderKey = parseOrderKey(isRhsExpr);\norderKeys.add(orderKey);\nnextToken = peek();\nSTNode orderKeyListMemberEnd;\nwhile (!isEndOfOrderKeys(nextToken.kind)) {\norderKeyListMemberEnd = parseOrderKeyListMemberEnd();\nif (orderKeyListMemberEnd == null) {\nbreak;\n}\norderKeys.add(orderKeyListMemberEnd);\norderKey = parseOrderKey(isRhsExpr);\norderKeys.add(orderKey);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(orderKeys);\n}\nprivate boolean isEndOfOrderKeys(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\ncase ASCENDING_KEYWORD:\ncase DESCENDING_KEYWORD:\nreturn false;\ncase SEMICOLON_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn isQueryClauseStartToken(tokenKind);\n}\n}\nprivate boolean isQueryClauseStartToken(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase SELECT_KEYWORD:\ncase LET_KEYWORD:\ncase WHERE_KEYWORD:\ncase OUTER_KEYWORD:\ncase JOIN_KEYWORD:\ncase ORDER_KEYWORD:\ncase DO_KEYWORD:\ncase FROM_KEYWORD:\ncase LIMIT_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseOrderKeyListMemberEnd() {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase EOF_TOKEN:\nreturn null;\ndefault:\nif (isQueryClauseStartToken(nextToken.kind)) {\nreturn null;\n}\nrecover(peek(), ParserRuleContext.ORDER_KEY_LIST_END);\nreturn parseOrderKeyListMemberEnd();\n}\n}\n/**\n* Parse order key.\n*

\n* order-key := expression (ascending | descending)?\n*\n* @return Parsed node\n*/\nprivate STNode parseOrderKey(boolean isRhsExpr) {\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nSTNode orderDirection;\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase ASCENDING_KEYWORD:\ncase DESCENDING_KEYWORD:\norderDirection = consume();\nbreak;\ndefault:\norderDirection = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createOrderKeyNode(expression, orderDirection);\n}\n/**\n* Parse select clause.\n*

\n* select-clause := select expression\n*\n* @return Parsed node\n*/\nprivate STNode parseSelectClause(boolean isRhsExpr) {\nstartContext(ParserRuleContext.SELECT_CLAUSE);\nSTNode selectKeyword = parseSelectKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nendContext();\nreturn STNodeFactory.createSelectClauseNode(selectKeyword, expression);\n}\n/**\n* Parse select-keyword.\n*\n* @return Select-keyword node\n*/\nprivate STNode parseSelectKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SELECT_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.SELECT_KEYWORD);\nreturn parseSelectKeyword();\n}\n}\n/**\n* Parse on-conflict clause.\n*

\n* \n* onConflictClause := on conflict expression\n* \n*\n* @return On conflict clause node\n*/\nprivate STNode parseOnConflictClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nstartContext(ParserRuleContext.ON_CONFLICT_CLAUSE);\nSTNode onKeyword = parseOnKeyword();\nSTNode conflictKeyword = parseConflictKeyword();\nendContext();\nSTNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr);\n}\n/**\n* Parse conflict keyword.\n*\n* @return Conflict keyword node\n*/\nprivate STNode parseConflictKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CONFLICT_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.CONFLICT_KEYWORD);\nreturn parseConflictKeyword();\n}\n}\n/**\n* Parse limit clause.\n*

\n* limitClause := limit expression\n*\n* @return Limit expression node\n*/\nprivate STNode parseLimitClause(boolean isRhsExpr) {\nSTNode limitKeyword = parseLimitKeyword();\nSTNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createLimitClauseNode(limitKeyword, expr);\n}\n/**\n* Parse join clause.\n*

\n* \n* join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause\n*
\n* join-var-decl := join (typeName | var) bindingPattern\n*
\n* outer-join-var-decl := outer join var binding-pattern\n*
\n*\n* @return Join clause\n*/\nprivate STNode parseJoinClause(boolean isRhsExpr) {\nstartContext(ParserRuleContext.JOIN_CLAUSE);\nSTNode outerKeyword;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.OUTER_KEYWORD) {\nouterKeyword = consume();\n} else {\nouterKeyword = STNodeFactory.createEmptyNode();\n}\nSTNode joinKeyword = parseJoinKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE);\nSTNode inKeyword = parseInKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nendContext();\nSTNode onCondition = parseOnClause(isRhsExpr);\nreturn STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression,\nonCondition);\n}\n/**\n* Parse on clause.\n*

\n* on clause := `on` expression `equals` expression\n*\n* @return On clause node\n*/\nprivate STNode parseOnClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nif (isQueryClauseStartToken(nextToken.kind)) {\nreturn createMissingOnClauseNode();\n}\nstartContext(ParserRuleContext.ON_CLAUSE);\nSTNode onKeyword = parseOnKeyword();\nSTNode lhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nSTNode equalsKeyword = parseEqualsKeyword();\nendContext();\nSTNode rhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);\n}\nprivate STNode createMissingOnClauseNode() {\nSTNode onKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ON_KEYWORD,\nDiagnosticErrorCode.ERROR_MISSING_ON_KEYWORD);\nSTNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);\nSTNode equalsKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.EQUALS_KEYWORD,\nDiagnosticErrorCode.ERROR_MISSING_EQUALS_KEYWORD);\nSTNode lhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);\nSTNode rhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);\nreturn STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);\n}\n/**\n* Parse start action.\n*

\n* start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)\n*\n* @return Start action node\n*/\nprivate STNode parseStartAction(STNode annots) {\nSTNode startKeyword = parseStartKeyword();\nSTNode expr = parseActionOrExpression();\nswitch (expr.kind) {\ncase FUNCTION_CALL:\ncase METHOD_CALL:\ncase REMOTE_METHOD_CALL_ACTION:\nbreak;\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\nSTNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN);\nSTNode arguments = STNodeFactory.createEmptyNodeList();\nSTNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN);\nexpr = STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken);\nbreak;\ndefault:\nstartKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr,\nDiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION);\nSTNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nfuncName = STNodeFactory.createSimpleNameReferenceNode(funcName);\nopenParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN);\narguments = STNodeFactory.createEmptyNodeList();\ncloseParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN);\nexpr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken, arguments,\ncloseParenToken);\nbreak;\n}\nreturn STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr);\n}\n/**\n* Parse start keyword.\n*\n* @return Start keyword node\n*/\nprivate STNode parseStartKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.START_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.START_KEYWORD);\nreturn parseStartKeyword();\n}\n}\n/**\n* Parse flush action.\n*

\n* flush-action := flush [peer-worker]\n*\n* @return flush action node\n*/\nprivate STNode parseFlushAction() {\nSTNode flushKeyword = parseFlushKeyword();\nSTNode peerWorker = parseOptionalPeerWorkerName();\nreturn STNodeFactory.createFlushActionNode(flushKeyword, peerWorker);\n}\n/**\n* Parse flush keyword.\n*\n* @return flush keyword node\n*/\nprivate STNode parseFlushKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FLUSH_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.FLUSH_KEYWORD);\nreturn parseFlushKeyword();\n}\n}\n/**\n* Parse peer worker.\n*

\n* peer-worker := worker-name | default\n*\n* @return peer worker name node\n*/\nprivate STNode parseOptionalPeerWorkerName() {\nSTToken token = peek();\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase DEFAULT_KEYWORD:\nreturn STNodeFactory.createSimpleNameReferenceNode(consume());\ndefault:\nreturn STNodeFactory.createEmptyNode();\n}\n}\n/**\n* Parse intersection type descriptor.\n*

\n* intersection-type-descriptor := type-descriptor & type-descriptor\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode bitwiseAndToken = consume();\nSTNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false);\nreturn createIntersectionTypeDesc(leftTypeDesc, bitwiseAndToken, rightTypeDesc);\n}\nprivate STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) {\nleftTypeDesc = validateForUsageOfVar(leftTypeDesc);\nrightTypeDesc = validateForUsageOfVar(rightTypeDesc);\nreturn STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc);\n}\n/**\n* Parse singleton type descriptor.\n*

\n* singleton-type-descriptor := simple-const-expr\n* simple-const-expr :=\n* nil-literal\n* | boolean-literal\n* | [Sign] int-literal\n* | [Sign] floating-point-literal\n* | string-literal\n* | constant-reference-expr\n*

\n*/\nprivate STNode parseSingletonTypeDesc() {\nSTNode simpleContExpr = parseSimpleConstExpr();\nreturn STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr);\n}\nprivate STNode parseSignedIntOrFloat() {\nSTNode operator = parseUnaryOperator();\nSTNode literal;\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\nliteral = parseBasicLiteral();\nbreak;\ndefault:\nliteral = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN);\nliteral = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal);\n}\nreturn STNodeFactory.createUnaryExpressionNode(operator, literal);\n}\nprivate boolean isSingletonTypeDescStart(SyntaxKind tokenKind, boolean inTypeDescCtx) {\nSTToken nextNextToken = getNextNextToken();\nswitch (tokenKind) {\ncase STRING_LITERAL_TOKEN:\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase NULL_KEYWORD:\nif (inTypeDescCtx || isValidTypeDescRHSOutSideTypeDescCtx(nextNextToken)) {\nreturn true;\n}\nreturn false;\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn isIntOrFloat(nextNextToken);\ndefault:\nreturn false;\n}\n}\nstatic boolean isIntOrFloat(STToken token) {\nswitch (token.kind) {\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate boolean isValidTypeDescRHSOutSideTypeDescCtx(STToken token) {\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\ncase OPEN_PAREN_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Check whether the parser reached to a valid expression start.\n*\n* @param nextTokenKind Kind of the next immediate token.\n* @param nextTokenIndex Index to the next token.\n* @return true if this is a start of a valid expression. false otherwise\n*/\nprivate boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) {\nnextTokenIndex++;\nswitch (nextTokenKind) {\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase STRING_LITERAL_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\nSyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind;\nreturn nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN || nextNextTokenKind == SyntaxKind.COMMA_TOKEN ||\nnextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN ||\nisValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE);\ncase IDENTIFIER_TOKEN:\nreturn isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE);\ncase OPEN_PAREN_TOKEN:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase TYPEOF_KEYWORD:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\ncase TRAP_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase LT_TOKEN:\ncase FROM_KEYWORD:\ncase LET_KEYWORD:\ncase BACKTICK_TOKEN:\ncase NEW_KEYWORD:\ncase LEFT_ARROW_TOKEN:\ncase FUNCTION_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\ncase ISOLATED_KEYWORD:\nreturn true;\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);\ncase TABLE_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD;\ncase STREAM_KEYWORD:\nSTToken nextNextToken = peek(nextTokenIndex);\nreturn nextNextToken.kind == SyntaxKind.KEY_KEYWORD ||\nnextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN ||\nnextNextToken.kind == SyntaxKind.FROM_KEYWORD;\ncase ERROR_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN;\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN;\ncase START_KEYWORD:\ncase FLUSH_KEYWORD:\ncase WAIT_KEYWORD:\ndefault:\nreturn false;\n}\n}\n/**\n* Parse sync send action.\n*

\n* sync-send-action := expression ->> peer-worker\n*\n* @param expression LHS expression of the sync send action\n* @return Sync send action node\n*/\nprivate STNode parseSyncSendAction(STNode expression) {\nSTNode syncSendToken = parseSyncSendToken();\nSTNode peerWorker = parsePeerWorkerName();\nreturn STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker);\n}\n/**\n* Parse peer worker.\n*

\n* peer-worker := worker-name | default\n*\n* @return peer worker name node\n*/\nprivate STNode parsePeerWorkerName() {\nSTToken token = peek();\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase DEFAULT_KEYWORD:\nreturn STNodeFactory.createSimpleNameReferenceNode(consume());\ndefault:\nrecover(token, ParserRuleContext.PEER_WORKER_NAME);\nreturn parsePeerWorkerName();\n}\n}\n/**\n* Parse sync send token.\n*

\n* sync-send-token := ->> \n*\n* @return sync send token\n*/\nprivate STNode parseSyncSendToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SYNC_SEND_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.SYNC_SEND_TOKEN);\nreturn parseSyncSendToken();\n}\n}\n/**\n* Parse receive action.\n*

\n* receive-action := single-receive-action | multiple-receive-action\n*\n* @return Receive action\n*/\nprivate STNode parseReceiveAction() {\nSTNode leftArrow = parseLeftArrowToken();\nSTNode receiveWorkers = parseReceiveWorkers();\nreturn STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers);\n}\nprivate STNode parseReceiveWorkers() {\nswitch (peek().kind) {\ncase DEFAULT_KEYWORD:\ncase IDENTIFIER_TOKEN:\nreturn parsePeerWorkerName();\ncase OPEN_BRACE_TOKEN:\nreturn parseMultipleReceiveWorkers();\ndefault:\nrecover(peek(), ParserRuleContext.RECEIVE_WORKERS);\nreturn parseReceiveWorkers();\n}\n}\n/**\n* Parse multiple worker receivers.\n*

\n* { receive-field (, receive-field)* }\n*\n* @return Multiple worker receiver node\n*/\nprivate STNode parseMultipleReceiveWorkers() {\nstartContext(ParserRuleContext.MULTI_RECEIVE_WORKERS);\nSTNode openBrace = parseOpenBrace();\nSTNode receiveFields = parseReceiveFields();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nopenBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace,\nDiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION);\nreturn STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace);\n}\nprivate STNode parseReceiveFields() {\nList receiveFields = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfReceiveFields(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode receiveField = parseReceiveField();\nreceiveFields.add(receiveField);\nnextToken = peek();\nSTNode recieveFieldEnd;\nwhile (!isEndOfReceiveFields(nextToken.kind)) {\nrecieveFieldEnd = parseReceiveFieldEnd();\nif (recieveFieldEnd == null) {\nbreak;\n}\nreceiveFields.add(recieveFieldEnd);\nreceiveField = parseReceiveField();\nreceiveFields.add(receiveField);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(receiveFields);\n}\nprivate boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseReceiveFieldEnd() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.RECEIVE_FIELD_END);\nreturn parseReceiveFieldEnd();\n}\n}\n/**\n* Parse receive field.\n*

\n* receive-field := peer-worker | field-name : peer-worker\n*\n* @return Receiver field node\n*/\nprivate STNode parseReceiveField() {\nswitch (peek().kind) {\ncase DEFAULT_KEYWORD:\nSTNode defaultKeyword = parseDefaultKeyword();\nreturn STNodeFactory.createSimpleNameReferenceNode(defaultKeyword);\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME);\nreturn createQualifiedReceiveField(identifier);\ndefault:\nrecover(peek(), ParserRuleContext.RECEIVE_FIELD);\nreturn parseReceiveField();\n}\n}\nprivate STNode createQualifiedReceiveField(STNode identifier) {\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn identifier;\n}\nSTNode colon = parseColon();\nSTNode peerWorker = parsePeerWorkerName();\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, peerWorker);\n}\n/**\n* Parse left arrow (<-) token.\n*\n* @return left arrow token\n*/\nprivate STNode parseLeftArrowToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.LEFT_ARROW_TOKEN);\nreturn parseLeftArrowToken();\n}\n}\n/**\n* Parse signed right shift token (>>).\n*\n* @return Parsed node\n*/\nprivate STNode parseSignedRightShiftToken() {\nSTNode openGTToken = consume();\nSTToken endLGToken = consume();\nSTNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, openGTToken.leadingMinutiae(),\nendLGToken.trailingMinutiae());\nif (hasTrailingMinutiae(openGTToken)) {\ndoubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken,\nDiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP);\n}\nreturn doubleGTToken;\n}\n/**\n* Parse unsigned right shift token (>>>).\n*\n* @return Parsed node\n*/\nprivate STNode parseUnsignedRightShiftToken() {\nSTNode openGTToken = consume();\nSTNode middleGTToken = consume();\nSTNode endLGToken = consume();\nSTNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN,\nopenGTToken.leadingMinutiae(), endLGToken.trailingMinutiae());\nboolean validOpenGTToken = !hasTrailingMinutiae(openGTToken);\nboolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken);\nif (validOpenGTToken && validMiddleGTToken) {\nreturn unsignedRightShiftToken;\n}\nunsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken,\nDiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP);\nreturn unsignedRightShiftToken;\n}\n/**\n* Parse wait action.\n*

\n* wait-action := single-wait-action | multiple-wait-action | alternate-wait-action \n*\n* @return Wait action node\n*/\nprivate STNode parseWaitAction() {\nSTNode waitKeyword = parseWaitKeyword();\nif (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {\nreturn parseMultiWaitAction(waitKeyword);\n}\nreturn parseSingleOrAlternateWaitAction(waitKeyword);\n}\n/**\n* Parse wait keyword.\n*\n* @return wait keyword\n*/\nprivate STNode parseWaitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.WAIT_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.WAIT_KEYWORD);\nreturn parseWaitKeyword();\n}\n}\n/**\n* Parse single or alternate wait actions.\n*

\n* \n* alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+\n*
\n* wait-future-expr := expression but not mapping-constructor-expr\n*
\n*\n* @param waitKeyword wait keyword\n* @return Single or alternate wait action node\n*/\nprivate STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) {\nstartContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS);\nSTToken nextToken = peek();\nif (isEndOfWaitFutureExprList(nextToken.kind)) {\nendContext();\nSTNode waitFutureExprs = STNodeFactory\n.createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\nwaitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs,\nDiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION);\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs);\n}\nList waitFutureExprList = new ArrayList<>();\nSTNode waitField = parseWaitFutureExpr();\nwaitFutureExprList.add(waitField);\nnextToken = peek();\nSTNode waitFutureExprEnd;\nwhile (!isEndOfWaitFutureExprList(nextToken.kind)) {\nwaitFutureExprEnd = parseWaitFutureExprEnd();\nif (waitFutureExprEnd == null) {\nbreak;\n}\nwaitFutureExprList.add(waitFutureExprEnd);\nwaitField = parseWaitFutureExpr();\nwaitFutureExprList.add(waitField);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0));\n}\nprivate boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase OPEN_BRACE_TOKEN:\nreturn true;\ncase PIPE_TOKEN:\ndefault:\nreturn false;\n}\n}\nprivate STNode parseWaitFutureExpr() {\nSTNode waitFutureExpr = parseActionOrExpression();\nif (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) {\nwaitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr,\nDiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR);\n} else if (isAction(waitFutureExpr)) {\nwaitFutureExpr =\nSyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR);\n}\nreturn waitFutureExpr;\n}\nprivate STNode parseWaitFutureExprEnd() {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase PIPE_TOKEN:\nreturn parsePipeToken();\ndefault:\nif (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) {\nreturn null;\n}\nrecover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END);\nreturn parseWaitFutureExprEnd();\n}\n}\n/**\n* Parse multiple wait action.\n*

\n* multiple-wait-action := wait { wait-field (, wait-field)* }\n*\n* @param waitKeyword Wait keyword\n* @return Multiple wait action node\n*/\nprivate STNode parseMultiWaitAction(STNode waitKeyword) {\nstartContext(ParserRuleContext.MULTI_WAIT_FIELDS);\nSTNode openBrace = parseOpenBrace();\nSTNode waitFields = parseWaitFields();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nopenBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace,\nDiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION);\nSTNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace);\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode);\n}\nprivate STNode parseWaitFields() {\nList waitFields = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfWaitFields(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode waitField = parseWaitField();\nwaitFields.add(waitField);\nnextToken = peek();\nSTNode waitFieldEnd;\nwhile (!isEndOfWaitFields(nextToken.kind)) {\nwaitFieldEnd = parseWaitFieldEnd();\nif (waitFieldEnd == null) {\nbreak;\n}\nwaitFields.add(waitFieldEnd);\nwaitField = parseWaitField();\nwaitFields.add(waitField);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(waitFields);\n}\nprivate boolean isEndOfWaitFields(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseWaitFieldEnd() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.WAIT_FIELD_END);\nreturn parseWaitFieldEnd();\n}\n}\n/**\n* Parse wait field.\n*

\n* wait-field := variable-name | field-name : wait-future-expr\n*\n* @return Receiver field node\n*/\nprivate STNode parseWaitField() {\nswitch (peek().kind) {\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME);\nidentifier = STNodeFactory.createSimpleNameReferenceNode(identifier);\nreturn createQualifiedWaitField(identifier);\ndefault:\nrecover(peek(), ParserRuleContext.WAIT_FIELD_NAME);\nreturn parseWaitField();\n}\n}\nprivate STNode createQualifiedWaitField(STNode identifier) {\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn identifier;\n}\nSTNode colon = parseColon();\nSTNode waitFutureExpr = parseWaitFutureExpr();\nreturn STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr);\n}\n/**\n* Parse annot access expression.\n*

\n* \n* annot-access-expr := expression .@ annot-tag-reference\n*
\n* annot-tag-reference := qualified-identifier | identifier\n*
\n*\n* @param lhsExpr Preceding expression of the annot access access\n* @return Parsed node\n*/\nprivate STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode annotAccessToken = parseAnnotChainingToken();\nSTNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr);\nreturn STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference);\n}\n/**\n* Parse annot-chaining-token.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotChainingToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN);\nreturn parseAnnotChainingToken();\n}\n}\n/**\n* Parse field access identifier.\n*

\n* field-access-identifier := qualified-identifier | identifier\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) {\nreturn parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr);\n}\n/**\n* Parse query action.\n*

\n* query-action := query-pipeline do-clause\n*
\n* do-clause := do block-stmt\n*
\n*\n* @param queryConstructType Query construct type. This is only for validation\n* @param queryPipeline Query pipeline\n* @param selectClause Select clause if any This is only for validation.\n* @return Query action node\n*/\nprivate STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause,\nboolean isRhsExpr) {\nif (queryConstructType != null) {\nqueryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType,\nDiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION);\n}\nif (selectClause != null) {\nqueryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause,\nDiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION);\n}\nstartContext(ParserRuleContext.DO_CLAUSE);\nSTNode doKeyword = parseDoKeyword();\nSTNode blockStmt = parseBlockNode();\nendContext();\nreturn STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt);\n}\n/**\n* Parse 'do' keyword.\n*\n* @return do keyword node\n*/\nprivate STNode parseDoKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DO_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.DO_KEYWORD);\nreturn parseDoKeyword();\n}\n}\n/**\n* Parse optional field access or xml optional attribute access expression.\n*

\n* \n* optional-field-access-expr := expression ?. field-name\n*
\n* xml-optional-attribute-access-expr := expression ?. xml-attribute-name\n*
\n* xml-attribute-name := xml-qualified-name | qualified-identifier | identifier\n*
\n* xml-qualified-name := xml-namespace-prefix : identifier\n*
\n* xml-namespace-prefix := identifier\n*
\n*\n* @param lhsExpr Preceding expression of the optional access\n* @return Parsed node\n*/\nprivate STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode optionalFieldAccessToken = parseOptionalChainingToken();\nSTNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr);\nreturn STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName);\n}\n/**\n* Parse optional chaining token.\n*\n* @return parsed node\n*/\nprivate STNode parseOptionalChainingToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN);\nreturn parseOptionalChainingToken();\n}\n}\n/**\n* Parse conditional expression.\n*

\n* conditional-expr := expression ? expression : expression\n*\n* @param lhsExpr Preceding expression of the question mark\n* @return Parsed node\n*/\nprivate STNode parseConditionalExpression(STNode lhsExpr) {\nstartContext(ParserRuleContext.CONDITIONAL_EXPRESSION);\nSTNode questionMark = parseQuestionMark();\nSTNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true);\nSTNode nextToken = peek();\nSTNode endExpr;\nSTNode colon;\nif (nextToken.kind != SyntaxKind.COLON_TOKEN && middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) middleExpr;\nSTNode modulePrefix = qualifiedNameRef.modulePrefix;\nif (modulePrefix.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nmiddleExpr = STNodeFactory.createSimpleNameReferenceNode(modulePrefix);\n} else {\nmiddleExpr = modulePrefix;\n}\ncolon = qualifiedNameRef.colon;\nendContext();\nendExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);\n} else {\nif (middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nmiddleExpr = generateQualifiedNameRef(middleExpr);\n}\ncolon = parseColon();\nendContext();\nendExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false);\n}\nreturn STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr);\n}\nprivate STNode generateQualifiedNameRef(STNode qualifiedName) {\nSTQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) qualifiedName;\nSTNode modulePrefix = qualifiedNameRef.modulePrefix;\nif (modulePrefix.kind != SyntaxKind.IDENTIFIER_TOKEN) {\nSTBuiltinSimpleNameReferenceNode builtInType = (STBuiltinSimpleNameReferenceNode) modulePrefix;\nSTToken nameToken = (STToken) builtInType.name;\nSTNode preDeclaredPrefix = STNodeFactory.createIdentifierToken(nameToken.text(),\nnameToken.leadingMinutiae(), nameToken.trailingMinutiae());\nreturn STNodeFactory.createQualifiedNameReferenceNode(preDeclaredPrefix,\nqualifiedNameRef.colon, qualifiedNameRef.identifier);\n} else {\nreturn qualifiedName;\n}\n}\n/**\n* Parse enum declaration.\n*

\n* module-enum-decl :=\n* metadata\n* [public] enum identifier { enum-member (, enum-member)* }\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @param metadata\n* @param qualifier\n* @return Parsed enum node.\n*/\nprivate STNode parseEnumDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.MODULE_ENUM_DECLARATION);\nSTNode enumKeywordToken = parseEnumKeyword();\nSTNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME);\nSTNode openBraceToken = parseOpenBrace();\nSTNode enumMemberList = parseEnumMemberList();\nSTNode closeBraceToken = parseCloseBrace();\nendContext();\nopenBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken,\nDiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER);\nreturn STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier,\nopenBraceToken, enumMemberList, closeBraceToken);\n}\n/**\n* Parse 'enum' keyword.\n*\n* @return enum keyword node\n*/\nprivate STNode parseEnumKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ENUM_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.ENUM_KEYWORD);\nreturn parseEnumKeyword();\n}\n}\n/**\n* Parse enum member list.\n*

\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @return enum member list node.\n*/\nprivate STNode parseEnumMemberList() {\nstartContext(ParserRuleContext.ENUM_MEMBER_LIST);\nif (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList enumMemberList = new ArrayList<>();\nSTNode enumMember = parseEnumMember();\nSTNode enumMemberRhs;\nwhile (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) {\nenumMemberRhs = parseEnumMemberEnd();\nif (enumMemberRhs == null) {\nbreak;\n}\nenumMemberList.add(enumMember);\nenumMemberList.add(enumMemberRhs);\nenumMember = parseEnumMember();\n}\nenumMemberList.add(enumMember);\nendContext();\nreturn STNodeFactory.createNodeList(enumMemberList);\n}\n/**\n* Parse enum member.\n*

\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @return Parsed enum member node.\n*/\nprivate STNode parseEnumMember() {\nSTNode metadata;\nswitch (peek().kind) {\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData();\nbreak;\ndefault:\nmetadata = STNodeFactory.createEmptyNode();\n}\nSTNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME);\nreturn parseEnumMemberRhs(metadata, identifierNode);\n}\nprivate STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) {\nSTNode equalToken, constExprNode;\nswitch (peek().kind) {\ncase EQUAL_TOKEN:\nequalToken = parseAssignOp();\nconstExprNode = parseExpression();\nbreak;\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nequalToken = STNodeFactory.createEmptyNode();\nconstExprNode = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nrecover(peek(), ParserRuleContext.ENUM_MEMBER_RHS, metadata, identifierNode);\nreturn parseEnumMemberRhs(metadata, identifierNode);\n}\nreturn STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode);\n}\nprivate STNode parseEnumMemberEnd() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.ENUM_MEMBER_END);\nreturn parseEnumMemberEnd();\n}\n}\n/**\n* Parse transaction statement.\n*

\n* transaction-stmt := `transaction` block-stmt [on-fail-clause]\n*\n* @return Transaction statement node\n*/\nprivate STNode parseTransactionStatement() {\nstartContext(ParserRuleContext.TRANSACTION_STMT);\nSTNode transactionKeyword = parseTransactionKeyword();\nSTNode blockStmt = parseBlockNode();\nendContext();\nSTNode onFailClause = parseOptionalOnFailClause();\nreturn STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause);\n}\n/**\n* Parse transaction keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseTransactionKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRANSACTION_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.TRANSACTION_KEYWORD);\nreturn parseTransactionKeyword();\n}\n}\n/**\n* Parse commit action.\n*

\n* commit-action := \"commit\"\n*\n* @return Commit action node\n*/\nprivate STNode parseCommitAction() {\nSTNode commitKeyword = parseCommitKeyword();\nreturn STNodeFactory.createCommitActionNode(commitKeyword);\n}\n/**\n* Parse commit keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseCommitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COMMIT_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.COMMIT_KEYWORD);\nreturn parseCommitKeyword();\n}\n}\n/**\n* Parse retry statement.\n*

\n* \n* retry-stmt := `retry` retry-spec block-stmt [on-fail-clause]\n*
\n* retry-spec := [type-parameter] [ `(` arg-list `)` ]\n*
\n*\n* @return Retry statement node\n*/\nprivate STNode parseRetryStatement() {\nstartContext(ParserRuleContext.RETRY_STMT);\nSTNode retryKeyword = parseRetryKeyword();\nSTNode retryStmt = parseRetryKeywordRhs(retryKeyword);\nreturn retryStmt;\n}\nprivate STNode parseRetryKeywordRhs(STNode retryKeyword) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase LT_TOKEN:\nSTNode typeParam = parseTypeParameter();\nreturn parseRetryTypeParamRhs(retryKeyword, typeParam);\ncase OPEN_PAREN_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase TRANSACTION_KEYWORD:\ntypeParam = STNodeFactory.createEmptyNode();\nreturn parseRetryTypeParamRhs(retryKeyword, typeParam);\ndefault:\nrecover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS, retryKeyword);\nreturn parseRetryKeywordRhs(retryKeyword);\n}\n}\nprivate STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) {\nSTNode args;\nswitch (peek().kind) {\ncase OPEN_PAREN_TOKEN:\nargs = parseParenthesizedArgList();\nbreak;\ncase OPEN_BRACE_TOKEN:\ncase TRANSACTION_KEYWORD:\nargs = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nrecover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS, retryKeyword, typeParam);\nreturn parseRetryTypeParamRhs(retryKeyword, typeParam);\n}\nSTNode blockStmt = parseRetryBody();\nendContext();\nSTNode onFailClause = parseOptionalOnFailClause();\nreturn STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause);\n}\nprivate STNode parseRetryBody() {\nswitch (peek().kind) {\ncase OPEN_BRACE_TOKEN:\nreturn parseBlockNode();\ncase TRANSACTION_KEYWORD:\nreturn parseTransactionStatement();\ndefault:\nrecover(peek(), ParserRuleContext.RETRY_BODY);\nreturn parseRetryBody();\n}\n}\n/**\n* Parse optional on fail clause.\n*\n* @return Parsed node\n*/\nprivate STNode parseOptionalOnFailClause() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.ON_KEYWORD) {\nreturn parseOnFailClause();\n}\nif (isEndOfRegularCompoundStmt(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNode();\n}\nrecover(nextToken, ParserRuleContext.REGULAR_COMPOUND_STMT_RHS);\nreturn parseOptionalOnFailClause();\n}\nprivate boolean isEndOfRegularCompoundStmt(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase CLOSE_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase AT_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn isStatementStartingToken(nodeKind);\n}\n}\nprivate boolean isStatementStartingToken(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase FINAL_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\ncase DO_KEYWORD:\ncase PANIC_KEYWORD:\ncase CONTINUE_KEYWORD:\ncase BREAK_KEYWORD:\ncase RETURN_KEYWORD:\ncase TYPE_KEYWORD:\ncase LOCK_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase FORK_KEYWORD:\ncase FOREACH_KEYWORD:\ncase XMLNS_KEYWORD:\ncase TRANSACTION_KEYWORD:\ncase RETRY_KEYWORD:\ncase ROLLBACK_KEYWORD:\ncase MATCH_KEYWORD:\ncase FAIL_KEYWORD:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase TRAP_KEYWORD:\ncase START_KEYWORD:\ncase FLUSH_KEYWORD:\ncase LEFT_ARROW_TOKEN:\ncase WAIT_KEYWORD:\ncase COMMIT_KEYWORD:\ncase WORKER_KEYWORD:\nreturn true;\ndefault:\nif (isTypeStartingToken(nodeKind)) {\nreturn true;\n}\nif (isValidExpressionStart(nodeKind, 1)) {\nreturn true;\n}\nreturn false;\n}\n}\n/**\n* Parse on fail clause.\n*

\n* \n* on-fail-clause := on fail typed-binding-pattern statement-block\n* \n*\n* @return On fail clause node\n*/\nprivate STNode parseOnFailClause() {\nstartContext(ParserRuleContext.ON_FAIL_CLAUSE);\nSTNode onKeyword = parseOnKeyword();\nSTNode failKeyword = parseFailKeyword();\nSTNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false);\nSTNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_REF);\nSTNode blockStatement = parseBlockNode();\nendContext();\nreturn STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typeDescriptor, identifier,\nblockStatement);\n}\n/**\n* Parse retry keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseRetryKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RETRY_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.RETRY_KEYWORD);\nreturn parseRetryKeyword();\n}\n}\n/**\n* Parse transaction statement.\n*

\n* rollback-stmt := \"rollback\" [expression] \";\"\n*\n* @return Rollback statement node\n*/\nprivate STNode parseRollbackStatement() {\nstartContext(ParserRuleContext.ROLLBACK_STMT);\nSTNode rollbackKeyword = parseRollbackKeyword();\nSTNode expression;\nif (peek().kind == SyntaxKind.SEMICOLON_TOKEN) {\nexpression = STNodeFactory.createEmptyNode();\n} else {\nexpression = parseExpression();\n}\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon);\n}\n/**\n* Parse rollback keyword.\n*\n* @return Rollback keyword node\n*/\nprivate STNode parseRollbackKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ROLLBACK_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.ROLLBACK_KEYWORD);\nreturn parseRollbackKeyword();\n}\n}\n/**\n* Parse transactional expression.\n*

\n* transactional-expr := \"transactional\"\n*\n* @return Transactional expression node\n*/\nprivate STNode parseTransactionalExpression() {\nSTNode transactionalKeyword = parseTransactionalKeyword();\nreturn STNodeFactory.createTransactionalExpressionNode(transactionalKeyword);\n}\n/**\n* Parse transactional keyword.\n*\n* @return Transactional keyword node\n*/\nprivate STNode parseTransactionalKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD);\nreturn parseTransactionalKeyword();\n}\n}\n/**\n* Parse base16 literal.\n*

\n* \n* byte-array-literal := Base16Literal | Base64Literal\n*
\n* Base16Literal := base16 WS ` HexGroup* WS `\n*
\n* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `\n*
\n*\n* @return parsed node\n*/\nprivate STNode parseByteArrayLiteral() {\nSTNode type;\nif (peek().kind == SyntaxKind.BASE16_KEYWORD) {\ntype = parseBase16Keyword();\n} else {\ntype = parseBase64Keyword();\n}\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nif (startingBackTick.isMissing()) {\nstartingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);\nSTNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);\nSTNode content = STNodeFactory.createEmptyNode();\nSTNode byteArrayLiteral =\nSTNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick);\nbyteArrayLiteral =\nSyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT);\nreturn byteArrayLiteral;\n}\nSTNode content = parseByteArrayContent();\nreturn parseByteArrayLiteral(type, startingBackTick, content);\n}\n/**\n* Parse byte array literal.\n*\n* @param typeKeyword keyword token, possible values are `base16` and `base64`\n* @param startingBackTick starting backtick token\n* @param byteArrayContent byte array literal content to be validated\n* @return parsed byte array literal node\n*/\nprivate STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) {\nSTNode content = STNodeFactory.createEmptyNode();\nSTNode newStartingBackTick = startingBackTick;\nSTNodeList items = (STNodeList) byteArrayContent;\nif (items.size() == 1) {\nSTNode item = items.get(0);\nif (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else if (item.kind != SyntaxKind.TEMPLATE_STRING) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else {\ncontent = item;\n}\n} else if (items.size() > 1) {\nSTNode clonedStartingBackTick = startingBackTick;\nfor (int index = 0; index < items.size(); index++) {\nSTNode item = items.get(index);\nclonedStartingBackTick =\nSyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item);\n}\nnewStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick,\nDiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);\n}\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick);\n}\n/**\n* Parse base16 keyword.\n*\n* @return base16 keyword node\n*/\nprivate STNode parseBase16Keyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BASE16_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.BASE16_KEYWORD);\nreturn parseBase16Keyword();\n}\n}\n/**\n* Parse base64 keyword.\n*\n* @return base64 keyword node\n*/\nprivate STNode parseBase64Keyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BASE64_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.BASE64_KEYWORD);\nreturn parseBase64Keyword();\n}\n}\n/**\n* Validate and parse byte array literal content.\n* An error is reported, if the content is invalid.\n*\n* @return parsed node\n*/\nprivate STNode parseByteArrayContent() {\nSTToken nextToken = peek();\nList items = new ArrayList<>();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode content = parseTemplateItem();\nitems.add(content);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(items);\n}\n/**\n* Validate base16 literal content.\n*

\n* \n* Base16Literal := base16 WS ` HexGroup* WS `\n*
\n* HexGroup := WS HexDigit WS HexDigit\n*
\n* WS := WhiteSpaceChar*\n*
\n* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20\n*
\n*\n* @param content the string surrounded by the backticks\n* @return true, if the string content is valid. false otherwise.\n*/\nstatic boolean isValidBase16LiteralContent(String content) {\nchar[] charArray = content.toCharArray();\nint hexDigitCount = 0;\nfor (char c : charArray) {\nswitch (c) {\ncase LexerTerminals.TAB:\ncase LexerTerminals.NEWLINE:\ncase LexerTerminals.CARRIAGE_RETURN:\ncase LexerTerminals.SPACE:\nbreak;\ndefault:\nif (isHexDigit(c)) {\nhexDigitCount++;\n} else {\nreturn false;\n}\nbreak;\n}\n}\nreturn hexDigitCount % 2 == 0;\n}\n/**\n* Validate base64 literal content.\n*

\n* \n* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `\n*
\n* Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char\n*
\n* PaddedBase64Group :=\n* WS Base64Char WS Base64Char WS Base64Char WS PaddingChar\n* | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar\n*
\n* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /\n*
\n* PaddingChar := =\n*
\n* WS := WhiteSpaceChar*\n*
\n* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20\n*
\n*\n* @param content the string surrounded by the backticks\n* @return true, if the string content is valid. false otherwise.\n*/\nstatic boolean isValidBase64LiteralContent(String content) {\nchar[] charArray = content.toCharArray();\nint base64CharCount = 0;\nint paddingCharCount = 0;\nfor (char c : charArray) {\nswitch (c) {\ncase LexerTerminals.TAB:\ncase LexerTerminals.NEWLINE:\ncase LexerTerminals.CARRIAGE_RETURN:\ncase LexerTerminals.SPACE:\nbreak;\ncase LexerTerminals.EQUAL:\npaddingCharCount++;\nbreak;\ndefault:\nif (isBase64Char(c)) {\nif (paddingCharCount == 0) {\nbase64CharCount++;\n} else {\nreturn false;\n}\n} else {\nreturn false;\n}\nbreak;\n}\n}\nif (paddingCharCount > 2) {\nreturn false;\n} else if (paddingCharCount == 0) {\nreturn base64CharCount % 4 == 0;\n} else {\nreturn base64CharCount % 4 == 4 - paddingCharCount;\n}\n}\n/**\n*

\n* Check whether a given char is a base64 char.\n*

\n* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /\n*\n* @param c character to check\n* @return true, if the character represents a base64 char. false otherwise.\n*/\nstatic boolean isBase64Char(int c) {\nif ('a' <= c && c <= 'z') {\nreturn true;\n}\nif ('A' <= c && c <= 'Z') {\nreturn true;\n}\nif (c == '+' || c == '/') {\nreturn true;\n}\nreturn isDigit(c);\n}\nstatic boolean isHexDigit(int c) {\nif ('a' <= c && c <= 'f') {\nreturn true;\n}\nif ('A' <= c && c <= 'F') {\nreturn true;\n}\nreturn isDigit(c);\n}\nstatic boolean isDigit(int c) {\nreturn ('0' <= c && c <= '9');\n}\n/**\n* Parse xml filter expression.\n*

\n* xml-filter-expr := expression .< xml-name-pattern >\n*\n* @param lhsExpr Preceding expression of .< token\n* @return Parsed node\n*/\nprivate STNode parseXMLFilterExpression(STNode lhsExpr) {\nSTNode xmlNamePatternChain = parseXMLFilterExpressionRhs();\nreturn STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain);\n}\n/**\n* Parse xml filter expression rhs.\n*

\n* filer-expression-rhs := .< xml-name-pattern >\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLFilterExpressionRhs() {\nSTNode dotLTToken = parseDotLTToken();\nreturn parseXMLNamePatternChain(dotLTToken);\n}\n/**\n* Parse xml name pattern chain.\n*

\n* \n* xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step\n*
\n* filer-expression-rhs := .< xml-name-pattern >\n*
\n* xml-element-children-step := /< xml-name-pattern >\n*
\n* xml-element-descendants-step := /**\\/\n*
\n*\n* @param startToken Preceding token of xml name pattern\n* @return Parsed node\n*/\nprivate STNode parseXMLNamePatternChain(STNode startToken) {\nstartContext(ParserRuleContext.XML_NAME_PATTERN);\nSTNode xmlNamePattern = parseXMLNamePattern();\nSTNode gtToken = parseGTToken();\nendContext();\nstartToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken,\nDiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN);\nreturn STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken);\n}\n/**\n* Parse .< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseDotLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.DOT_LT_TOKEN);\nreturn parseDotLTToken();\n}\n}\n/**\n* Parse xml name pattern.\n*

\n* xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLNamePattern() {\nList xmlAtomicNamePatternList = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfXMLNamePattern(nextToken.kind)) {\nreturn STNodeFactory.createNodeList(xmlAtomicNamePatternList);\n}\nSTNode xmlAtomicNamePattern = parseXMLAtomicNamePattern();\nxmlAtomicNamePatternList.add(xmlAtomicNamePattern);\nSTNode separator;\nwhile (!isEndOfXMLNamePattern(peek().kind)) {\nseparator = parseXMLNamePatternSeparator();\nif (separator == null) {\nbreak;\n}\nxmlAtomicNamePatternList.add(separator);\nxmlAtomicNamePattern = parseXMLAtomicNamePattern();\nxmlAtomicNamePatternList.add(xmlAtomicNamePattern);\n}\nreturn STNodeFactory.createNodeList(xmlAtomicNamePatternList);\n}\nprivate boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase GT_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ncase IDENTIFIER_TOKEN:\ncase ASTERISK_TOKEN:\ncase COLON_TOKEN:\ndefault:\nreturn false;\n}\n}\nprivate STNode parseXMLNamePatternSeparator() {\nSTToken token = peek();\nswitch (token.kind) {\ncase PIPE_TOKEN:\nreturn consume();\ncase GT_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nrecover(token, ParserRuleContext.XML_NAME_PATTERN_RHS);\nreturn parseXMLNamePatternSeparator();\n}\n}\n/**\n* Parse xml atomic name pattern.\n*

\n* \n* xml-atomic-name-pattern :=\n* *\n* | identifier\n* | xml-namespace-prefix : identifier\n* | xml-namespace-prefix : *\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseXMLAtomicNamePattern() {\nstartContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN);\nSTNode atomicNamePattern = parseXMLAtomicNamePatternBody();\nendContext();\nreturn atomicNamePattern;\n}\nprivate STNode parseXMLAtomicNamePatternBody() {\nSTToken token = peek();\nSTNode identifier;\nswitch (token.kind) {\ncase ASTERISK_TOKEN:\nreturn consume();\ncase IDENTIFIER_TOKEN:\nidentifier = consume();\nbreak;\ndefault:\nrecover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START);\nreturn parseXMLAtomicNamePatternBody();\n}\nreturn parseXMLAtomicNameIdentifier(identifier);\n}\nprivate STNode parseXMLAtomicNameIdentifier(STNode identifier) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COLON_TOKEN) {\nSTNode colon = consume();\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {\nSTToken endToken = consume();\nreturn STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken);\n}\n}\nreturn STNodeFactory.createSimpleNameReferenceNode(identifier);\n}\n/**\n* Parse xml step expression.\n*

\n* xml-step-expr := expression xml-step-start\n*\n* @param lhsExpr Preceding expression of /*, /<, or /**\\/< token\n* @return Parsed node\n*/\nprivate STNode parseXMLStepExpression(STNode lhsExpr) {\nSTNode xmlStepStart = parseXMLStepStart();\nreturn STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart);\n}\n/**\n* Parse xml filter expression rhs.\n*

\n* \n* xml-step-start :=\n* xml-all-children-step\n* | xml-element-children-step\n* | xml-element-descendants-step\n*
\n* xml-all-children-step := /*\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLStepStart() {\nSTToken token = peek();\nSTNode startToken;\nswitch (token.kind) {\ncase SLASH_ASTERISK_TOKEN:\nreturn consume();\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\nstartToken = parseDoubleSlashDoubleAsteriskLTToken();\nbreak;\ncase SLASH_LT_TOKEN:\ndefault:\nstartToken = parseSlashLTToken();\nbreak;\n}\nreturn parseXMLNamePatternChain(startToken);\n}\n/**\n* Parse /< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseSlashLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.SLASH_LT_TOKEN);\nreturn parseSlashLTToken();\n}\n}\n/**\n* Parse /< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseDoubleSlashDoubleAsteriskLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN);\nreturn parseDoubleSlashDoubleAsteriskLTToken();\n}\n}\n/**\n* Parse match statement.\n*

\n* match-stmt := match action-or-expr { match-clause+ } [on-fail-clause]\n*\n* @return Match statement\n*/\nprivate STNode parseMatchStatement() {\nstartContext(ParserRuleContext.MATCH_STMT);\nSTNode matchKeyword = parseMatchKeyword();\nSTNode actionOrExpr = parseActionOrExpression();\nstartContext(ParserRuleContext.MATCH_BODY);\nSTNode openBrace = parseOpenBrace();\nList matchClausesList = new ArrayList<>();\nwhile (!isEndOfMatchClauses(peek().kind)) {\nSTNode clause = parseMatchClause();\nmatchClausesList.add(clause);\n}\nSTNode matchClauses = STNodeFactory.createNodeList(matchClausesList);\nif (isNodeListEmpty(matchClauses)) {\nopenBrace = SyntaxErrors.addDiagnostic(openBrace,\nDiagnosticErrorCode.ERROR_MATCH_STATEMENT_SHOULD_HAVE_ONE_OR_MORE_MATCH_CLAUSES);\n}\nSTNode closeBrace = parseCloseBrace();\nendContext();\nendContext();\nSTNode onFailClause = parseOptionalOnFailClause();\nreturn STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace,\nonFailClause);\n}\n/**\n* Parse match keyword.\n*\n* @return Match keyword node\n*/\nprivate STNode parseMatchKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.MATCH_KEYWORD) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.MATCH_KEYWORD);\nreturn parseMatchKeyword();\n}\n}\nprivate boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse a single match match clause.\n*

\n* \n* match-clause := match-pattern-list [match-guard] => block-stmt\n*
\n* match-guard := if expression\n*
\n*\n* @return A match clause\n*/\nprivate STNode parseMatchClause() {\nSTNode matchPatterns = parseMatchPatternList();\nSTNode matchGuard = parseMatchGuard();\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode blockStmt = parseBlockNode();\nreturn STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt);\n}\n/**\n* Parse match guard.\n*

\n* match-guard := if expression\n*\n* @return Match guard\n*/\nprivate STNode parseMatchGuard() {\nswitch (peek().kind) {\ncase IF_KEYWORD:\nSTNode ifKeyword = parseIfKeyword();\nSTNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false);\nreturn STNodeFactory.createMatchGuardNode(ifKeyword, expr);\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nrecover(peek(), ParserRuleContext.OPTIONAL_MATCH_GUARD);\nreturn parseMatchGuard();\n}\n}\n/**\n* Parse match patterns list.\n*

\n* match-pattern-list := match-pattern (| match-pattern)*\n*\n* @return Match patterns list\n*/\nprivate STNode parseMatchPatternList() {\nstartContext(ParserRuleContext.MATCH_PATTERN);\nList matchClauses = new ArrayList<>();\nwhile (!isEndOfMatchPattern(peek().kind)) {\nSTNode clause = parseMatchPattern();\nif (clause == null) {\nbreak;\n}\nmatchClauses.add(clause);\nSTNode seperator = parseMatchPatternListMemberRhs();\nif (seperator == null) {\nbreak;\n}\nmatchClauses.add(seperator);\n}\nendContext();\nreturn STNodeFactory.createNodeList(matchClauses);\n}\nprivate boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\ncase IF_KEYWORD:\ncase RIGHT_ARROW_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse match pattern.\n*

\n* \n* match-pattern := var binding-pattern\n* | wildcard-match-pattern\n* | const-pattern\n* | list-match-pattern\n* | mapping-match-pattern\n* | error-match-pattern\n* \n*\n* @return Match pattern\n*/\nprivate STNode parseMatchPattern() {\nswitch (peek().kind) {\ncase OPEN_PAREN_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\ncase STRING_LITERAL_TOKEN:\nreturn parseSimpleConstExpr();\ncase IDENTIFIER_TOKEN:\nSTNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);\nreturn parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);\ncase VAR_KEYWORD:\nreturn parseVarTypedBindingPattern();\ncase OPEN_BRACKET_TOKEN:\nreturn parseListMatchPattern();\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingMatchPattern();\ncase ERROR_KEYWORD:\nreturn parseErrorMatchPattern();\ndefault:\nrecover(peek(), ParserRuleContext.MATCH_PATTERN_START);\nreturn parseMatchPattern();\n}\n}\nprivate STNode parseMatchPatternListMemberRhs() {\nswitch (peek().kind) {\ncase PIPE_TOKEN:\nreturn parsePipeToken();\ncase IF_KEYWORD:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.MATCH_PATTERN_LIST_MEMBER_RHS);\nreturn parseMatchPatternListMemberRhs();\n}\n}\n/**\n* Parse var typed binding pattern.\n*

\n* var binding-pattern\n*

\n*\n* @return Parsed typed binding pattern node\n*/\nprivate STNode parseVarTypedBindingPattern() {\nSTNode varKeyword = parseVarKeyword();\nSTNode varTypeDesc = createBuiltinSimpleNameReference(varKeyword);\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createTypedBindingPatternNode(varTypeDesc, bindingPattern);\n}\n/**\n* Parse var keyword.\n*\n* @return Var keyword node\n*/\nprivate STNode parseVarKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.VAR_KEYWORD) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.VAR_KEYWORD);\nreturn parseVarKeyword();\n}\n}\n/**\n* Parse list match pattern.\n*

\n* \n* list-match-pattern := [ list-member-match-patterns ]\n* list-member-match-patterns :=\n* match-pattern (, match-pattern)* [, rest-match-pattern]\n* | [ rest-match-pattern ]\n* \n*

\n*\n* @return Parsed list match pattern node\n*/\nprivate STNode parseListMatchPattern() {\nstartContext(ParserRuleContext.LIST_MATCH_PATTERN);\nSTNode openBracketToken = parseOpenBracket();\nList matchPatternList = new ArrayList<>();\nSTNode listMatchPatternMemberRhs = null;\nboolean isEndOfFields = false;\nwhile (!isEndOfListMatchPattern()) {\nSTNode listMatchPatternMember = parseListMatchPatternMember();\nmatchPatternList.add(listMatchPatternMember);\nlistMatchPatternMemberRhs = parseListMatchPatternMemberRhs();\nif (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {\nisEndOfFields = true;\nbreak;\n}\nif (listMatchPatternMemberRhs != null) {\nmatchPatternList.add(listMatchPatternMemberRhs);\n} else {\nbreak;\n}\n}\nwhile (isEndOfFields && listMatchPatternMemberRhs != null) {\nupdateLastNodeInListWithInvalidNode(matchPatternList, listMatchPatternMemberRhs, null);\nif (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) {\nbreak;\n}\nSTNode invalidField = parseListMatchPatternMember();\nupdateLastNodeInListWithInvalidNode(matchPatternList, invalidField,\nDiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);\nlistMatchPatternMemberRhs = parseListMatchPatternMemberRhs();\n}\nSTNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList);\nSTNode closeBracketToken = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, closeBracketToken);\n}\npublic boolean isEndOfListMatchPattern() {\nswitch (peek().kind) {\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseListMatchPatternMember() {\nSTNode nextToken = peek();\nswitch (nextToken.kind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestMatchPattern();\ndefault:\nreturn parseMatchPattern();\n}\n}\n/**\n* Parse rest match pattern.\n*

\n* \n* rest-match-pattern := ... var variable-name\n* \n*

\n*\n* @return Parsed rest match pattern node\n*/\nprivate STNode parseRestMatchPattern() {\nstartContext(ParserRuleContext.REST_MATCH_PATTERN);\nSTNode ellipsisToken = parseEllipsis();\nSTNode varKeywordToken = parseVarKeyword();\nSTNode variableName = parseVariableName();\nendContext();\nSTSimpleNameReferenceNode simpleNameReferenceNode =\n(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName);\nreturn STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode);\n}\nprivate STNode parseListMatchPatternMemberRhs() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS);\nreturn parseListMatchPatternMemberRhs();\n}\n}\n/**\n* Parse mapping match pattern.\n*

\n* mapping-match-pattern := { field-match-patterns }\n*
\n* field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern]\n* | [ rest-match-pattern ]\n*
\n* field-match-pattern := field-name : match-pattern\n*
\n* rest-match-pattern := ... var variable-name\n*

\n*\n* @return Parsed Node.\n*/\nprivate STNode parseMappingMatchPattern() {\nstartContext(ParserRuleContext.MAPPING_MATCH_PATTERN);\nSTNode openBraceToken = parseOpenBrace();\nList fieldMatchPatternList = new ArrayList<>();\nSTNode fieldMatchPatternRhs = null;\nboolean isEndOfFields = false;\nwhile (!isEndOfMappingMatchPattern()) {\nSTNode fieldMatchPatternMember = parseFieldMatchPatternMember();\nfieldMatchPatternList.add(fieldMatchPatternMember);\nfieldMatchPatternRhs = parseFieldMatchPatternRhs();\nif (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {\nisEndOfFields = true;\nbreak;\n}\nif (fieldMatchPatternRhs != null) {\nfieldMatchPatternList.add(fieldMatchPatternRhs);\n} else {\nbreak;\n}\n}\nwhile (isEndOfFields && fieldMatchPatternRhs != null) {\nupdateLastNodeInListWithInvalidNode(fieldMatchPatternList, fieldMatchPatternRhs, null);\nif (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nbreak;\n}\nSTNode invalidField = parseFieldMatchPatternMember();\nupdateLastNodeInListWithInvalidNode(fieldMatchPatternList, invalidField,\nDiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);\nfieldMatchPatternRhs = parseFieldMatchPatternRhs();\n}\nSTNode fieldMatchPatterns = STNodeFactory.createNodeList(fieldMatchPatternList);\nSTNode closeBraceToken = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, closeBraceToken);\n}\nprivate STNode parseFieldMatchPatternMember() {\nswitch (peek().kind) {\ncase IDENTIFIER_TOKEN:\nreturn parseFieldMatchPattern();\ncase ELLIPSIS_TOKEN:\nreturn parseRestMatchPattern();\ndefault:\nrecover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER);\nreturn parseFieldMatchPatternMember();\n}\n}\n/**\n* Parse filed match pattern.\n*

\n* field-match-pattern := field-name : match-pattern\n*

\n*\n* @return Parsed field match pattern node\n*/\npublic STNode parseFieldMatchPattern() {\nSTNode fieldNameNode = parseVariableName();\nSTNode colonToken = parseColon();\nSTNode matchPattern = parseMatchPattern();\nreturn STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern);\n}\npublic boolean isEndOfMappingMatchPattern() {\nswitch (peek().kind) {\ncase CLOSE_BRACE_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseFieldMatchPatternRhs() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS);\nreturn parseFieldMatchPatternRhs();\n}\n}\nprivate STNode parseErrorMatchPatternOrConsPattern(STNode typeRefOrConstExpr) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase OPEN_PAREN_TOKEN:\nSTNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,\nParserRuleContext.ERROR_KEYWORD);\nstartContext(ParserRuleContext.ERROR_MATCH_PATTERN);\nreturn parseErrorMatchPattern(errorKeyword, typeRefOrConstExpr);\ndefault:\nif (isMatchPatternEnd(peek().kind)) {\nreturn typeRefOrConstExpr;\n}\nrecover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_OR_CONST_PATTERN, typeRefOrConstExpr);\nreturn parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);\n}\n}\nprivate boolean isMatchPatternEnd(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase RIGHT_DOUBLE_ARROW_TOKEN:\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase PIPE_TOKEN:\ncase IF_KEYWORD:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse functional match pattern.\n*

\n* error-match-pattern := error [error-type-reference] ( error-arg-list-match-pattern )\n* error-arg-list-match-pattern :=\n* error-message-match-pattern [, error-cause-match-pattern] [, error-field-match-patterns]\n* | [error-field-match-patterns]\n* error-message-match-pattern := simple-match-pattern\n* error-cause-match-pattern := simple-match-pattern | error-match-pattern\n* simple-match-pattern :=\n* wildcard-match-pattern\n* | const-pattern\n* | var variable-name\n* error-field-match-patterns :=\n* named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]\n* | rest-match-pattern\n* named-arg-match-pattern := arg-name = match-pattern\n*

\n*\n* @return Parsed functional match pattern node.\n*/\nprivate STNode parseErrorMatchPattern() {\nstartContext(ParserRuleContext.ERROR_MATCH_PATTERN);\nSTNode errorKeyword = consume();\nreturn parseErrorMatchPattern(errorKeyword);\n}\nprivate STNode parseErrorMatchPattern(STNode errorKeyword) {\nSTToken nextToken = peek();\nSTNode typeRef;\nswitch (nextToken.kind) {\ncase OPEN_PAREN_TOKEN:\ntypeRef = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nif (isTypeReferenceStartToken(nextToken.kind)) {\ntypeRef = parseTypeReference();\nbreak;\n}\nrecover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_ERROR_KEYWORD_RHS);\nreturn parseErrorMatchPattern(errorKeyword);\n}\nreturn parseErrorMatchPattern(errorKeyword, typeRef);\n}\nprivate STNode parseErrorMatchPattern(STNode errorKeyword, STNode typeRef) {\nSTNode openParenthesisToken = parseOpenParenthesis();\nSTNode argListMatchPatternNode = parseErrorArgListMatchPatterns();\nSTNode closeParenthesisToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createErrorMatchPatternNode(errorKeyword, typeRef, openParenthesisToken,\nargListMatchPatternNode, closeParenthesisToken);\n}\nprivate STNode parseErrorArgListMatchPatterns() {\nList argListMatchPatterns = new ArrayList<>();\nif (isEndOfErrorFieldMatchPatterns()) {\nreturn STNodeFactory.createNodeList(argListMatchPatterns);\n}\nstartContext(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_FIRST_ARG);\nSTNode firstArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_START);\nendContext();\nif (firstArg == null) {\nreturn STNodeFactory.createNodeList(argListMatchPatterns);\n}\nif (isSimpleMatchPattern(firstArg.kind)) {\nargListMatchPatterns.add(firstArg);\nSTNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_END);\nif (argEnd != null) {\nSTNode secondArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_RHS);\nif (isValidSecondArgMatchPattern(secondArg.kind)) {\nargListMatchPatterns.add(argEnd);\nargListMatchPatterns.add(secondArg);\n} else {\nupdateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);\nupdateLastNodeInListWithInvalidNode(argListMatchPatterns, secondArg,\nDiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);\n}\n}\n} else {\nif (firstArg.kind != SyntaxKind.NAMED_ARG_MATCH_PATTERN &&\nfirstArg.kind != SyntaxKind.REST_MATCH_PATTERN) {\naddInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);\n} else {\nargListMatchPatterns.add(firstArg);\n}\n}\nparseErrorFieldMatchPatterns(argListMatchPatterns);\nreturn STNodeFactory.createNodeList(argListMatchPatterns);\n}\nprivate boolean isSimpleMatchPattern(SyntaxKind matchPatternKind) {\nswitch (matchPatternKind) {\ncase IDENTIFIER_TOKEN:\ncase SIMPLE_NAME_REFERENCE:\ncase NUMERIC_LITERAL:\ncase STRING_LITERAL:\ncase NULL_LITERAL:\ncase NIL_LITERAL:\ncase BOOLEAN_LITERAL:\ncase TYPED_BINDING_PATTERN:\ncase UNARY_EXPRESSION:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate boolean isValidSecondArgMatchPattern(SyntaxKind syntaxKind) {\nswitch (syntaxKind) {\ncase ERROR_MATCH_PATTERN:\ncase NAMED_ARG_MATCH_PATTERN:\ncase REST_MATCH_PATTERN:\nreturn true;\ndefault:\nif (isSimpleMatchPattern(syntaxKind)) {\nreturn true;\n}\nreturn false;\n}\n}\n/**\n* Parse error field match patterns.\n* error-field-match-patterns :=\n* named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]\n* | rest-match-pattern\n* named-arg-match-pattern := arg-name = match-pattern\n* @param argListMatchPatterns\n*/\nprivate void parseErrorFieldMatchPatterns(List argListMatchPatterns) {\nSyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_MATCH_PATTERN;\nwhile (!isEndOfErrorFieldMatchPatterns()) {\nSTNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN_RHS);\nif (argEnd == null) {\nbreak;\n}\nSTNode currentArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN);\nDiagnosticErrorCode errorCode = validateErrorFieldMatchPatternOrder(lastValidArgKind, currentArg.kind);\nif (errorCode == null) {\nargListMatchPatterns.add(argEnd);\nargListMatchPatterns.add(currentArg);\nlastValidArgKind = currentArg.kind;\n} else if (argListMatchPatterns.size() == 0) {\naddInvalidNodeToNextToken(argEnd, null);\naddInvalidNodeToNextToken(currentArg, errorCode);\n} else {\nupdateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);\nupdateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode);\n}\n}\n}\nprivate boolean isEndOfErrorFieldMatchPatterns() {\nreturn isEndOfErrorFieldBindingPatterns();\n}\nprivate STNode parseErrorArgListMatchPatternEnd(ParserRuleContext currentCtx) {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn consume();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), currentCtx);\nreturn parseErrorArgListMatchPatternEnd(currentCtx);\n}\n}\nprivate STNode parseErrorArgListMatchPattern(ParserRuleContext context) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestMatchPattern();\ncase IDENTIFIER_TOKEN:\nreturn parseNamedOrSimpleMatchPattern();\ncase OPEN_PAREN_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\ncase STRING_LITERAL_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseMatchPattern();\ncase VAR_KEYWORD:\nSTNode varKeyword = consume();\nSTNode variableName = createCaptureOrWildcardBP(parseVariableName());\nreturn STNodeFactory.createTypedBindingPatternNode(varKeyword, variableName);\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nrecover(nextToken, context);\nreturn parseErrorArgListMatchPattern(context);\n}\n}\nprivate STNode parseNamedOrSimpleMatchPattern() {\nSTNode identifier = consume();\nSTToken secondToken = peek();\nswitch (secondToken.kind) {\ncase EQUAL_TOKEN:\nreturn parseNamedArgMatchPattern(identifier);\ncase COMMA_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ndefault:\nreturn identifier;\n}\n}\n/**\n* Parses the next named arg match pattern.\n*
\n* named-arg-match-pattern := arg-name = match-pattern\n*
\n*
\n*\n* @return arg match pattern list node added the new arg match pattern\n*/\nprivate STNode parseNamedArgMatchPattern(STNode identifier) {\nstartContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN);\nSTNode equalToken = parseAssignOp();\nSTNode matchPattern = parseMatchPattern();\nendContext();\nreturn STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern);\n}\nprivate DiagnosticErrorCode validateErrorFieldMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {\nswitch (currentArgKind) {\ncase NAMED_ARG_MATCH_PATTERN:\ncase REST_MATCH_PATTERN:\nif (prevArgKind == SyntaxKind.REST_MATCH_PATTERN) {\nreturn DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;\n}\nreturn null;\ndefault:\nreturn DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED;\n}\n}\n/**\n* Parse markdown documentation.\n*\n* @return markdown documentation node\n*/\nprivate STNode parseMarkdownDocumentation() {\nList markdownDocLineList = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) {\nSTToken documentationString = consume();\nSTNode parsedDocLines = parseDocumentationString(documentationString);\nappendParsedDocumentationLines(markdownDocLineList, parsedDocLines);\nnextToken = peek();\n}\nSTNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList);\nreturn STNodeFactory.createMarkdownDocumentationNode(markdownDocLines);\n}\n/**\n* Parse documentation string.\n*\n* @return markdown documentation line list node\n*/\nprivate STNode parseDocumentationString(STToken documentationStringToken) {\nList leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae());\nCollection diagnostics = new ArrayList<>((documentationStringToken.diagnostics()));\nCharReader charReader = CharReader.from(documentationStringToken.text());\nDocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList, diagnostics);\nAbstractTokenReader tokenReader = new TokenReader(documentationLexer);\nDocumentationParser documentationParser = new DocumentationParser(tokenReader);\nreturn documentationParser.parse();\n}\nprivate List getLeadingTriviaList(STNode leadingMinutiaeNode) {\nList leadingTriviaList = new ArrayList<>();\nint bucketCount = leadingMinutiaeNode.bucketCount();\nfor (int i = 0; i < bucketCount; i++) {\nleadingTriviaList.add(leadingMinutiaeNode.childInBucket(i));\n}\nreturn leadingTriviaList;\n}\nprivate void appendParsedDocumentationLines(List markdownDocLineList, STNode parsedDocLines) {\nint bucketCount = parsedDocLines.bucketCount();\nfor (int i = 0; i < bucketCount; i++) {\nSTNode markdownDocLine = parsedDocLines.childInBucket(i);\nmarkdownDocLineList.add(markdownDocLine);\n}\n}\n/**\n* Parse any statement that starts with a token that has ambiguity between being\n* a type-desc or an expression.\n*\n* @param annots Annotations\n* @param qualifiers\n* @return Statement node\n*/\nprivate STNode parseStmtStartsWithTypeOrExpr(STNode annots, List qualifiers) {\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode typeOrExpr = parseTypedBindingPatternOrExpr(qualifiers, true);\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr);\n}\nprivate STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) {\nif (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nList varDeclQualifiers = new ArrayList<>();\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(annots, varDeclQualifiers, typedBindingPatternOrExpr, false);\n}\nSTNode expr = getExpression(typedBindingPatternOrExpr);\nexpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);\nreturn parseStatementStartWithExprRhs(expr);\n}\nprivate STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) {\nList typeDescQualifiers = new ArrayList<>();\nreturn parseTypedBindingPatternOrExpr(typeDescQualifiers, allowAssignment);\n}\nprivate STNode parseTypedBindingPatternOrExpr(List qualifiers, boolean allowAssignment) {\nparseTypeDescQualifiers(qualifiers);\nSTToken nextToken = peek();\nSTNode typeOrExpr;\nswitch (nextToken.kind) {\ncase OPEN_PAREN_TOKEN:\nreportInvalidQualifierList(qualifiers);\nreturn parseTypedBPOrExprStartsWithOpenParenthesis();\ncase FUNCTION_KEYWORD:\nreturn parseAnonFuncExprOrTypedBPWithFuncType(qualifiers);\ncase IDENTIFIER_TOKEN:\nreportInvalidQualifierList(qualifiers);\ntypeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\nreturn parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\ncase OPEN_BRACKET_TOKEN:\nreportInvalidQualifierList(qualifiers);\ntypeOrExpr = parseTupleTypeDescOrExprStartsWithOpenBracket();\nreturn parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase STRING_LITERAL_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\nreportInvalidQualifierList(qualifiers);\nSTNode basicLiteral = parseBasicLiteral();\nreturn parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment);\ndefault:\nif (isValidExpressionStart(nextToken.kind, 1)) {\nreportInvalidQualifierList(qualifiers);\nreturn parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());\n}\nreturn parseTypedBindingPattern(qualifiers, ParserRuleContext.VAR_DECL_STMT);\n}\n}\n/**\n* Parse the component after the ambiguous starting node. Ambiguous node could be either an expr\n* or a type-desc. The component followed by this ambiguous node could be the binding-pattern or\n* the expression-rhs.\n*\n* @param typeOrExpr Type desc or the expression\n* @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a\n* valid lvalue expression\n* @return Typed-binding-pattern node or an expression node\n*/\nprivate STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase PIPE_TOKEN:\nSTToken nextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode pipe = parsePipeToken();\nSTNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);\nif (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nSTNode newTypeDesc = createUnionTypeDesc(typeOrExpr, pipe, typedBP.typeDescriptor);\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);\n}\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe,\nrhsTypedBPOrExpr);\ncase BITWISE_AND_TOKEN:\nnextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode ampersand = parseBinaryOperator();\nrhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);\nif (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nSTNode newTypeDesc = createIntersectionTypeDesc(typeOrExpr, ampersand, typedBP.typeDescriptor);\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);\n}\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand,\nrhsTypedBPOrExpr);\ncase SEMICOLON_TOKEN:\nif (isDefiniteExpr(typeOrExpr.kind)) {\nreturn typeOrExpr;\n}\nif (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) {\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n}\nreturn typeOrExpr;\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\nif (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) {\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n}\nreturn typeOrExpr;\ncase EQUAL_TOKEN:\nreturn typeOrExpr;\ncase OPEN_BRACKET_TOKEN:\nreturn parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment,\nParserRuleContext.AMBIGUOUS_STMT);\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\ndefault:\nif (isCompoundBinaryOperator(nextToken.kind)) {\nreturn typeOrExpr;\n}\nif (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {\nreturn typeOrExpr;\n}\nSTToken token = peek();\nrecover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr, allowAssignment);\nreturn parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\n}\n}\nprivate STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) {\ntypeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false);\nreturn parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\n}\nprivate STNode parseTypedBPOrExprStartsWithOpenParenthesis() {\nSTNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis();\nif (isDefiniteTypeDesc(exprOrTypeDesc.kind)) {\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc);\n}\nreturn parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false);\n}\nprivate boolean isDefiniteTypeDesc(SyntaxKind kind) {\nreturn kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0;\n}\nprivate boolean isDefiniteExpr(SyntaxKind kind) {\nif (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nreturn false;\n}\nreturn kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&\nkind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;\n}\n/**\n* Parse type or expression that starts with open parenthesis. Possible options are:\n* 1) () - nil type-desc or nil-literal\n* 2) (T) - Parenthesized type-desc\n* 3) (expr) - Parenthesized expression\n* 4) (param, param, ..) - Anon function params\n*\n* @return Type-desc or expression node\n*/\nprivate STNode parseTypedDescOrExprStartsWithOpenParenthesis() {\nSTNode openParen = parseOpenParenthesis();\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {\nSTNode closeParen = parseCloseParenthesis();\nreturn parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen);\n}\nSTNode typeOrExpr = parseTypeDescOrExpr();\nif (isAction(typeOrExpr)) {\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr,\ncloseParen);\n}\nif (isExpression(typeOrExpr.kind)) {\nstartContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);\nreturn parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false);\n}\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeOrExpr, closeParen);\n}\n/**\n* Parse type-desc or expression. This method does not handle binding patterns.\n*\n* @return Type-desc node or expression node\n*/\nprivate STNode parseTypeDescOrExpr() {\nList typeDescQualifiers = new ArrayList<>();\nreturn parseTypeDescOrExpr(typeDescQualifiers);\n}\nprivate STNode parseTypeDescOrExpr(List qualifiers) {\nparseTypeDescQualifiers(qualifiers);\nSTToken nextToken = peek();\nSTNode typeOrExpr;\nswitch (nextToken.kind) {\ncase OPEN_PAREN_TOKEN:\nreportInvalidQualifierList(qualifiers);\ntypeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis();\nbreak;\ncase FUNCTION_KEYWORD:\ntypeOrExpr = parseAnonFuncExprOrFuncTypeDesc(qualifiers);\nbreak;\ncase IDENTIFIER_TOKEN:\nreportInvalidQualifierList(qualifiers);\ntypeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\nreturn parseTypeDescOrExprRhs(typeOrExpr);\ncase OPEN_BRACKET_TOKEN:\nreportInvalidQualifierList(qualifiers);\ntypeOrExpr = parseTupleTypeDescOrExprStartsWithOpenBracket();\nbreak;\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase STRING_LITERAL_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\nreportInvalidQualifierList(qualifiers);\nSTNode basicLiteral = parseBasicLiteral();\nreturn parseTypeDescOrExprRhs(basicLiteral);\ndefault:\nif (isValidExpressionStart(nextToken.kind, 1)) {\nreportInvalidQualifierList(qualifiers);\nreturn parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());\n}\nreturn parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\n}\nif (isDefiniteTypeDesc(typeOrExpr.kind)) {\nreturn parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\n}\nreturn parseTypeDescOrExprRhs(typeOrExpr);\n}\nprivate boolean isExpression(SyntaxKind kind) {\nswitch (kind) {\ncase NUMERIC_LITERAL:\ncase STRING_LITERAL_TOKEN:\ncase NIL_LITERAL:\ncase NULL_LITERAL:\ncase BOOLEAN_LITERAL:\nreturn true;\ndefault:\nreturn kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&\nkind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;\n}\n}\n/**\n* Parse statement that starts with an empty parenthesis. Empty parenthesis can be\n* 1) Nil literal\n* 2) Nil type-desc\n* 3) Anon-function params\n*\n* @param openParen Open parenthesis\n* @param closeParen Close parenthesis\n* @return Parsed node\n*/\nprivate STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nSTNode params = STNodeFactory.createEmptyNodeList();\nSTNode anonFuncParam =\nSTNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\nendContext();\nreturn anonFuncParam;\ndefault:\nreturn STNodeFactory.createNilLiteralNode(openParen, closeParen);\n}\n}\nprivate STNode parseAnonFuncExprOrTypedBPWithFuncType(List qualifiers) {\nSTNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(qualifiers);\nif (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) {\nreturn exprOrTypeDesc;\n}\nreturn parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT);\n}\n/**\n* Parse anon-func-expr or function-type-desc, by resolving the ambiguity.\n*\n* @param qualifiers Preceding qualifiers\n* @return Anon-func-expr or function-type-desc\n*/\nprivate STNode parseAnonFuncExprOrFuncTypeDesc(List qualifiers) {\nstartContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC);\nSTNode qualifierList;\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode funcSignature;\nif (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nfuncSignature = parseFuncSignature(true);\nqualifierList = createFuncTypeQualNodeList(qualifiers, true);\nendContext();\nreturn parseAnonFuncExprOrFuncTypeDesc(qualifierList, functionKeyword, funcSignature);\n}\nfuncSignature = STNodeFactory.createEmptyNode();\nqualifierList = createFuncTypeQualNodeList(qualifiers, false);\nSTNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,\nfuncSignature);\nif (getCurrentContext() != ParserRuleContext.STMT_START_BRACKETED_LIST) {\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nreturn parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,\ntrue);\n}\nreturn parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE,\nfalse);\n}\nprivate STNode parseAnonFuncExprOrFuncTypeDesc(STNode qualifierList, STNode functionKeyword, STNode funcSignature) {\nParserRuleContext currentCtx = getCurrentContext();\nswitch (peek().kind) {\ncase OPEN_BRACE_TOKEN:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nif (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\n}\nstartContext(ParserRuleContext.ANON_FUNC_EXPRESSION);\nfuncSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);\nSTNode funcBody = parseAnonFuncBody(false);\nSTNode annots = STNodeFactory.createEmptyNodeList();\nSTNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList,\nfunctionKeyword, funcSignature, funcBody);\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true);\ncase IDENTIFIER_TOKEN:\ndefault:\nSTNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,\nfuncSignature);\nif (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nreturn parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,\ntrue);\n}\nreturn parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE,\nfalse);\n}\n}\nprivate STNode parseTypeDescOrExprRhs(STNode typeOrExpr) {\nSTToken nextToken = peek();\nSTNode typeDesc;\nswitch (nextToken.kind) {\ncase PIPE_TOKEN:\nSTToken nextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode pipe = parsePipeToken();\nSTNode rhsTypeDescOrExpr = parseTypeDescOrExpr();\nif (isExpression(rhsTypeDescOrExpr.kind)) {\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe,\nrhsTypeDescOrExpr);\n}\ntypeDesc = getTypeDescFromExpr(typeOrExpr);\nrhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);\nreturn createUnionTypeDesc(typeDesc, pipe, rhsTypeDescOrExpr);\ncase BITWISE_AND_TOKEN:\nnextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode ampersand = parseBinaryOperator();\nrhsTypeDescOrExpr = parseTypeDescOrExpr();\nif (isExpression(rhsTypeDescOrExpr.kind)) {\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand,\nrhsTypeDescOrExpr);\n}\ntypeDesc = getTypeDescFromExpr(typeOrExpr);\nrhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);\nreturn createIntersectionTypeDesc(typeDesc, ampersand, rhsTypeDescOrExpr);\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\ntypeDesc = parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,\nfalse);\nreturn typeDesc;\ncase SEMICOLON_TOKEN:\nreturn getTypeDescFromExpr(typeOrExpr);\ncase EQUAL_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\ncase COMMA_TOKEN:\nreturn typeOrExpr;\ncase OPEN_BRACKET_TOKEN:\nreturn parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true,\nParserRuleContext.AMBIGUOUS_STMT);\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nreturn STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis);\ndefault:\nif (isCompoundBinaryOperator(nextToken.kind)) {\nreturn typeOrExpr;\n}\nif (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false);\n}\nrecover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS, typeOrExpr);\nreturn parseTypeDescOrExprRhs(typeOrExpr);\n}\n}\nprivate boolean isAmbiguous(STNode node) {\nswitch (node.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase NIL_LITERAL:\ncase NULL_LITERAL:\ncase NUMERIC_LITERAL:\ncase STRING_LITERAL:\ncase BOOLEAN_LITERAL:\ncase BRACKETED_LIST:\nreturn true;\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;\nif (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||\nbinaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn false;\n}\nreturn isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);\ncase BRACED_EXPRESSION:\nreturn isAmbiguous(((STBracedExpressionNode) node).expression);\ncase INDEXED_EXPRESSION:\nSTIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node;\nif (!isAmbiguous(indexExpr.containerExpression)) {\nreturn false;\n}\nSTNode keys = indexExpr.keyExpression;\nfor (int i = 0; i < keys.bucketCount(); i++) {\nSTNode item = keys.childInBucket(i);\nif (item.kind == SyntaxKind.COMMA_TOKEN) {\ncontinue;\n}\nif (!isAmbiguous(item)) {\nreturn false;\n}\n}\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate boolean isAllBasicLiterals(STNode node) {\nswitch (node.kind) {\ncase NIL_LITERAL:\ncase NULL_LITERAL:\ncase NUMERIC_LITERAL:\ncase STRING_LITERAL:\ncase BOOLEAN_LITERAL:\nreturn true;\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;\nif (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||\nbinaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn false;\n}\nreturn isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);\ncase BRACED_EXPRESSION:\nreturn isAmbiguous(((STBracedExpressionNode) node).expression);\ncase BRACKETED_LIST:\nSTAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node;\nfor (STNode member : list.members) {\nif (member.kind == SyntaxKind.COMMA_TOKEN) {\ncontinue;\n}\nif (!isAllBasicLiterals(member)) {\nreturn false;\n}\n}\nreturn true;\ncase UNARY_EXPRESSION:\nSTUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node;\nif (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN &&\nunaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) {\nreturn false;\n}\nreturn isNumericLiteral(unaryExpr.expression);\ndefault:\nreturn false;\n}\n}\nprivate boolean isNumericLiteral(STNode node) {\nswitch (node.kind) {\ncase NUMERIC_LITERAL:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseTupleTypeDescOrExprStartsWithOpenBracket() {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList members = new ArrayList<>();\nSTNode memberEnd;\nwhile (!isEndOfListConstructor(peek().kind)) {\nSTNode expr = parseTypeDescOrExpr();\nif (peek().kind == SyntaxKind.ELLIPSIS_TOKEN && isDefiniteTypeDesc(expr.kind)) {\nSTNode ellipsis = consume();\nexpr = STNodeFactory.createRestDescriptorNode(expr, ellipsis);\n}\nmembers.add(expr);\nmemberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmembers.add(memberEnd);\n}\nSTNode memberNodes = STNodeFactory.createNodeList(members);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberNodes, closeBracket);\n}\n/**\n* Parse binding-patterns.\n*

\n* \n* binding-pattern := capture-binding-pattern\n* | wildcard-binding-pattern\n* | list-binding-pattern\n* | mapping-binding-pattern\n* | functional-binding-pattern\n*

\n*

\n* capture-binding-pattern := variable-name\n* variable-name := identifier\n*

\n*

\n* wildcard-binding-pattern := _\n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n*

\n* mapping-binding-pattern := { field-binding-patterns }\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n* field-binding-pattern := field-name : binding-pattern | variable-name\n*
\n* rest-binding-pattern := ... variable-name\n*

\n*

\n* functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )\n*
\n* arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns]\n* | other-arg-binding-patterns\n*
\n* positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*\n*
\n* positional-arg-binding-pattern := binding-pattern\n*
\n* other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern]\n* | [rest-binding-pattern]\n*
\n* named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*\n*
\n* named-arg-binding-pattern := arg-name = binding-pattern\n*
\n*\n* @return binding-pattern node\n*/\nprivate STNode parseBindingPattern() {\nswitch (peek().kind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseListBindingPattern();\ncase IDENTIFIER_TOKEN:\nreturn parseBindingPatternStartsWithIdentifier();\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingBindingPattern();\ncase ERROR_KEYWORD:\nreturn parseErrorBindingPattern();\ndefault:\nrecover(peek(), ParserRuleContext.BINDING_PATTERN);\nreturn parseBindingPattern();\n}\n}\nprivate STNode parseBindingPatternStartsWithIdentifier() {\nSTNode argNameOrBindingPattern =\nparseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);\nSTToken secondToken = peek();\nif (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nstartContext(ParserRuleContext.ERROR_BINDING_PATTERN);\nSTNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,\nParserRuleContext.ERROR_KEYWORD);\nreturn parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern);\n}\nif (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) {\nSTNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);\nidentifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern);\nreturn createCaptureOrWildcardBP(identifier);\n}\nreturn createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name);\n}\nprivate STNode createCaptureOrWildcardBP(STNode varName) {\nSTNode bindingPattern;\nif (isWildcardBP(varName)) {\nbindingPattern = getWildcardBindingPattern(varName);\n} else {\nbindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName);\n}\nreturn bindingPattern;\n}\n/**\n* Parse list-binding-patterns.\n*

\n* \n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n*\n* @return list-binding-pattern node\n*/\nprivate STNode parseListBindingPattern() {\nstartContext(ParserRuleContext.LIST_BINDING_PATTERN);\nSTNode openBracket = parseOpenBracket();\nList bindingPatternsList = new ArrayList<>();\nSTNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList);\nendContext();\nreturn listBindingPattern;\n}\nprivate STNode parseListBindingPattern(STNode openBracket, List bindingPatternsList) {\nif (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.size() == 0) {\nSTNode closeBracket = parseCloseBracket();\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList);\nreturn STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);\n}\nSTNode listBindingPatternMember = parseListBindingPatternMember();\nbindingPatternsList.add(listBindingPatternMember);\nSTNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList);\nreturn listBindingPattern;\n}\nprivate STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List bindingPatterns) {\nSTNode member = firstMember;\nSTToken token = peek();\nSTNode listBindingPatternRhs = null;\nwhile (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {\nlistBindingPatternRhs = parseListBindingPatternMemberRhs();\nif (listBindingPatternRhs == null) {\nbreak;\n}\nbindingPatterns.add(listBindingPatternRhs);\nmember = parseListBindingPatternMember();\nbindingPatterns.add(member);\ntoken = peek();\n}\nSTNode closeBracket = parseCloseBracket();\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);\nreturn STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);\n}\nprivate STNode parseListBindingPatternMemberRhs() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END);\nreturn parseListBindingPatternMemberRhs();\n}\n}\nprivate boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse list-binding-pattern member.\n*

\n* \n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n*\n* @return List binding pattern member\n*/\nprivate STNode parseListBindingPatternMember() {\nswitch (peek().kind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ncase OPEN_BRACKET_TOKEN:\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseBindingPattern();\ndefault:\nrecover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER);\nreturn parseListBindingPatternMember();\n}\n}\n/**\n* Parse rest binding pattern.\n*

\n* \n* rest-binding-pattern := ... variable-name\n* \n*\n* @return Rest binding pattern node\n*/\nprivate STNode parseRestBindingPattern() {\nstartContext(ParserRuleContext.REST_BINDING_PATTERN);\nSTNode ellipsis = parseEllipsis();\nSTNode varName = parseVariableName();\nendContext();\nSTSimpleNameReferenceNode simpleNameReferenceNode =\n(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName);\nreturn STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode);\n}\n/**\n* Parse Typed-binding-pattern.\n*

\n* \n* typed-binding-pattern := inferable-type-descriptor binding-pattern\n*

\n* inferable-type-descriptor := type-descriptor | var\n*
\n*\n* @return Typed binding pattern node\n*/\nprivate STNode parseTypedBindingPattern(ParserRuleContext context) {\nList typeDescQualifiers = new ArrayList<>();\nreturn parseTypedBindingPattern(typeDescQualifiers, context);\n}\nprivate STNode parseTypedBindingPattern(List qualifiers, ParserRuleContext context) {\nSTNode typeDesc = parseTypeDescriptor(qualifiers,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false);\nSTNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context);\nreturn typeBindingPattern;\n}\n/**\n* Parse mapping-binding-patterns.\n*

\n* \n* mapping-binding-pattern := { field-binding-patterns }\n*

\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n* field-binding-pattern := field-name : binding-pattern | variable-name\n*
\n*\n* @return mapping-binding-pattern node\n*/\nprivate STNode parseMappingBindingPattern() {\nstartContext(ParserRuleContext.MAPPING_BINDING_PATTERN);\nSTNode openBrace = parseOpenBrace();\nSTToken token = peek();\nif (isEndOfMappingBindingPattern(token.kind)) {\nSTNode closeBrace = parseCloseBrace();\nSTNode bindingPatternsNode = STNodeFactory.createEmptyNodeList();\nendContext();\nreturn STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);\n}\nList bindingPatterns = new ArrayList<>();\nSTNode prevMember = parseMappingBindingPatternMember();\nif (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {\nbindingPatterns.add(prevMember);\n}\nreturn parseMappingBindingPattern(openBrace, bindingPatterns, prevMember);\n}\nprivate STNode parseMappingBindingPattern(STNode openBrace, List bindingPatterns, STNode prevMember) {\nSTToken token = peek();\nSTNode mappingBindingPatternRhs = null;\nwhile (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {\nmappingBindingPatternRhs = parseMappingBindingPatternEnd();\nif (mappingBindingPatternRhs == null) {\nbreak;\n}\nbindingPatterns.add(mappingBindingPatternRhs);\nprevMember = parseMappingBindingPatternMember();\nif (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {\nbreak;\n}\nbindingPatterns.add(prevMember);\ntoken = peek();\n}\nif (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {\nbindingPatterns.add(prevMember);\n}\nSTNode closeBrace = parseCloseBrace();\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);\nendContext();\nreturn STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);\n}\n/**\n* Parse mapping-binding-pattern entry.\n*

\n* \n* mapping-binding-pattern := { field-binding-patterns }\n*

\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n* field-binding-pattern := field-name : binding-pattern\n* | variable-name\n*
\n*\n* @return mapping-binding-pattern node\n*/\nprivate STNode parseMappingBindingPatternMember() {\nSTToken token = peek();\nswitch (token.kind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ndefault:\nreturn parseFieldBindingPattern();\n}\n}\nprivate STNode parseMappingBindingPatternEnd() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.MAPPING_BINDING_PATTERN_END);\nreturn parseMappingBindingPatternEnd();\n}\n}\n/**\n* Parse field-binding-pattern.\n* field-binding-pattern := field-name : binding-pattern | varname\n*\n* @return field-binding-pattern node\n*/\nprivate STNode parseFieldBindingPattern() {\nswitch (peek().kind) {\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME);\nSTNode fieldBindingPattern = parseFieldBindingPattern(identifier);\nreturn fieldBindingPattern;\ndefault:\nrecover(peek(), ParserRuleContext.FIELD_BINDING_PATTERN_NAME);\nreturn parseFieldBindingPattern();\n}\n}\nprivate STNode parseFieldBindingPattern(STNode identifier) {\nSTNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier);\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference);\n}\nSTNode colon = parseColon();\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern);\n}\nprivate boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) {\nreturn nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN;\n}\nprivate STNode parseErrorTypeDescOrErrorBP(STNode annots) {\nSTToken nextNextToken = peek(2);\nswitch (nextNextToken.kind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseAsErrorBindingPattern();\ncase LT_TOKEN:\nreturn parseAsErrorTypeDesc(annots);\ncase IDENTIFIER_TOKEN:\nSyntaxKind nextNextNextTokenKind = peek(3).kind;\nif (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN ||\nnextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseAsErrorBindingPattern();\n}\ndefault:\nreturn parseAsErrorTypeDesc(annots);\n}\n}\nprivate STNode parseAsErrorBindingPattern() {\nstartContext(ParserRuleContext.ASSIGNMENT_STMT);\nreturn parseAssignmentStmtRhs(parseErrorBindingPattern());\n}\nprivate STNode parseAsErrorTypeDesc(STNode annots) {\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVariableDecl(getAnnotations(annots), finalKeyword);\n}\n/**\n* Parse error binding pattern node.\n*

\n* error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )\n*

\n* error-arg-list-binding-pattern :=\n* error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]\n* | [error-field-binding-patterns]\n*

\n* error-message-binding-pattern := simple-binding-pattern\n*

\n* error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern\n*

\n* simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern\n*

\n* error-field-binding-patterns :=\n* named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]\n* | rest-binding-pattern\n*

\n* named-arg-binding-pattern := arg-name = binding-pattern\n*\n* @return Error binding pattern node.\n*/\nprivate STNode parseErrorBindingPattern() {\nstartContext(ParserRuleContext.ERROR_BINDING_PATTERN);\nSTNode errorKeyword = parseErrorKeyword();\nreturn parseErrorBindingPattern(errorKeyword);\n}\nprivate STNode parseErrorBindingPattern(STNode errorKeyword) {\nSTToken nextToken = peek();\nSTNode typeRef;\nswitch (nextToken.kind) {\ncase OPEN_PAREN_TOKEN:\ntypeRef = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nif (isTypeReferenceStartToken(nextToken.kind)) {\ntypeRef = parseTypeReference();\nbreak;\n}\nrecover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS);\nreturn parseErrorBindingPattern(errorKeyword);\n}\nreturn parseErrorBindingPattern(errorKeyword, typeRef);\n}\nprivate STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) {\nSTNode openParenthesis = parseOpenParenthesis();\nSTNode argListBindingPatterns = parseErrorArgListBindingPatterns();\nSTNode closeParenthesis = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis,\nargListBindingPatterns, closeParenthesis);\n}\n/**\n* Parse error arg list binding pattern.\n*

\n* \n* error-arg-list-binding-pattern :=\n* error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]\n* | [error-field-binding-patterns]\n*

\n*

\n* error-message-binding-pattern := simple-binding-pattern\n*

\n*

\n* error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern\n*

\n*

\n* simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern\n*

\n*

\n* error-field-binding-patterns :=\n* named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]\n* | rest-binding-pattern\n*

\n*

\n* named-arg-binding-pattern := arg-name = binding-pattern\n* \n*\n* @return Error arg list binding patterns.\n*/\nprivate STNode parseErrorArgListBindingPatterns() {\nList argListBindingPatterns = new ArrayList<>();\nif (isEndOfErrorFieldBindingPatterns()) {\nreturn STNodeFactory.createNodeList(argListBindingPatterns);\n}\nreturn parseErrorArgListBindingPatterns(argListBindingPatterns);\n}\nprivate STNode parseErrorArgListBindingPatterns(List argListBindingPatterns) {\nSTNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START, true);\nif (firstArg == null) {\nreturn STNodeFactory.createNodeList(argListBindingPatterns);\n}\nswitch (firstArg.kind) {\ncase CAPTURE_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nargListBindingPatterns.add(firstArg);\nreturn parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);\ncase ERROR_BINDING_PATTERN:\nSTNode missingIdentifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nSTNode missingErrorMsgBP = STNodeFactory.createCaptureBindingPatternNode(missingIdentifier);\nmissingErrorMsgBP = SyntaxErrors.addDiagnostic(missingErrorMsgBP,\nDiagnosticErrorCode.ERROR_MISSING_ERROR_MESSAGE_BINDING_PATTERN);\nSTNode missingComma = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.COMMA_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_COMMA_TOKEN);\nargListBindingPatterns.add(missingErrorMsgBP);\nargListBindingPatterns.add(missingComma);\nargListBindingPatterns.add(firstArg);\nreturn parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);\ncase REST_BINDING_PATTERN:\ncase NAMED_ARG_BINDING_PATTERN:\nargListBindingPatterns.add(firstArg);\nreturn parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);\ndefault:\naddInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);\nreturn parseErrorArgListBindingPatterns(argListBindingPatterns);\n}\n}\nprivate STNode parseErrorArgListBPWithoutErrorMsg(List argListBindingPatterns) {\nSTNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END);\nif (argEnd == null) {\nreturn STNodeFactory.createNodeList(argListBindingPatterns);\n}\nSTNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS, false);\nassert secondArg != null;\nswitch (secondArg.kind) {\ncase CAPTURE_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\ncase ERROR_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\ncase NAMED_ARG_BINDING_PATTERN:\nargListBindingPatterns.add(argEnd);\nargListBindingPatterns.add(secondArg);\nreturn parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, secondArg.kind);\ndefault:\nupdateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);\nupdateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg,\nDiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);\nreturn parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);\n}\n}\nprivate STNode parseErrorArgListBPWithoutErrorMsgAndCause(List argListBindingPatterns,\nSyntaxKind lastValidArgKind) {\nwhile (!isEndOfErrorFieldBindingPatterns()) {\nSTNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END);\nif (argEnd == null) {\nbreak;\n}\nSTNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN, false);\nassert currentArg != null;\nDiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind);\nif (errorCode == null) {\nargListBindingPatterns.add(argEnd);\nargListBindingPatterns.add(currentArg);\nlastValidArgKind = currentArg.kind;\n} else if (argListBindingPatterns.size() == 0) {\naddInvalidNodeToNextToken(argEnd, null);\naddInvalidNodeToNextToken(currentArg, errorCode);\n} else {\nupdateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);\nupdateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode);\n}\n}\nreturn STNodeFactory.createNodeList(argListBindingPatterns);\n}\nprivate boolean isEndOfErrorFieldBindingPatterns() {\nSyntaxKind nextTokenKind = peek().kind;\nswitch (nextTokenKind) {\ncase CLOSE_PAREN_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn consume();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), currentCtx);\nreturn parseErrorArgsBindingPatternEnd(currentCtx);\n}\n}\nprivate STNode parseErrorArgListBindingPattern(ParserRuleContext context, boolean isFirstArg) {\nswitch (peek().kind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ncase IDENTIFIER_TOKEN:\nSTNode argNameOrSimpleBindingPattern = consume();\nreturn parseNamedOrSimpleArgBindingPattern(argNameOrSimpleBindingPattern);\ncase OPEN_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseBindingPattern();\ncase CLOSE_PAREN_TOKEN:\nif (isFirstArg) {\nreturn null;\n}\ndefault:\nrecover(peek(), context);\nreturn parseErrorArgListBindingPattern(context, isFirstArg);\n}\n}\nprivate STNode parseNamedOrSimpleArgBindingPattern(STNode argNameOrSimpleBindingPattern) {\nSTToken secondToken = peek();\nswitch (secondToken.kind) {\ncase EQUAL_TOKEN:\nSTNode equal = consume();\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern,\nequal, bindingPattern);\ncase COMMA_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ndefault:\nreturn createCaptureOrWildcardBP(argNameOrSimpleBindingPattern);\n}\n}\nprivate DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind,\nSyntaxKind currentArgKind) {\nswitch (currentArgKind) {\ncase NAMED_ARG_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\nif (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) {\nreturn DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;\n}\nreturn null;\ncase CAPTURE_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\ncase ERROR_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ndefault:\nreturn DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED;\n}\n}\n/*\n* This parses Typed binding patterns and deals with ambiguity between types,\n* and binding patterns. An example is 'T[a]'.\n* The ambiguity lies in between:\n* 1) Array Type\n* 2) List binding pattern\n* 3) Member access expression.\n*/\n/**\n* Parse the component after the type-desc, of a typed-binding-pattern.\n*\n* @param typeDesc Starting type-desc of the typed-binding-pattern\n* @return Typed-binding pattern\n*/\nprivate STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) {\nreturn parseTypedBindingPatternTypeRhs(typeDesc, context, true);\n}\nprivate STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) {\nswitch (peek().kind) {\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\ncase OPEN_BRACKET_TOKEN:\nSTNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context);\nassert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN;\nreturn typedBindingPattern;\ncase CLOSE_PAREN_TOKEN:\ncase COMMA_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nif (!isRoot) {\nreturn typeDesc;\n}\ndefault:\nrecover(peek(), ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS, typeDesc, context, isRoot);\nreturn parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot);\n}\n}\n/**\n* Parse typed-binding pattern with list, array-type-desc, or member-access-expr.\n*\n* @param typeDescOrExpr Type desc or the expression at the start\n* @param isTypedBindingPattern Is this is a typed-binding-pattern.\n* @return Parsed node\n*/\nprivate STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern,\nboolean allowAssignment, ParserRuleContext context) {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nif (isBracketedListEnd(peek().kind)) {\nreturn parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context);\n}\nSTNode member = parseBracketedListMember(isTypedBindingPattern);\nSyntaxKind currentNodeType = getBracketedListNodeType(member, isTypedBindingPattern);\nswitch (currentNodeType) {\ncase ARRAY_TYPE_DESC:\nSTNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context);\nreturn typedBindingPattern;\ncase LIST_BINDING_PATTERN:\nSTNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\ncase INDEXED_EXPRESSION:\nreturn parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member);\ncase NONE:\ndefault:\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd != null) {\nList memberList = new ArrayList<>();\nmemberList.add(getBindingPattern(member));\nmemberList.add(memberEnd);\nSTNode bindingPattern = parseAsListBindingPattern(openBracket, memberList);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n}\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,\nisTypedBindingPattern, allowAssignment, context);\n}\nprivate STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) {\nmember = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nSTNode keyExpr = STNodeFactory.createNodeList(member);\nSTNode memberAccessExpr =\nSTNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket);\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false);\n}\nprivate boolean isBracketedListEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse a member of an ambiguous bracketed list. This member could be:\n* 1) Array length\n* 2) Key expression of a member-access-expr\n* 3) A member-binding pattern of a list-binding-pattern.\n*\n* @param isTypedBindingPattern Is this in a definite typed-binding pattern\n* @return Parsed member node\n*/\nprivate STNode parseBracketedListMember(boolean isTypedBindingPattern) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase ASTERISK_TOKEN:\ncase STRING_LITERAL_TOKEN:\nreturn parseBasicLiteral();\ncase CLOSE_BRACKET_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\ncase ELLIPSIS_TOKEN:\ncase OPEN_BRACKET_TOKEN:\nreturn parseStatementStartBracketedListMember();\ncase IDENTIFIER_TOKEN:\nif (isTypedBindingPattern) {\nreturn parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\n}\nbreak;\ndefault:\nif (!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) {\nbreak;\n}\nParserRuleContext recoverContext =\nisTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH\n: ParserRuleContext.BRACKETED_LIST_MEMBER;\nrecover(peek(), recoverContext, isTypedBindingPattern);\nreturn parseBracketedListMember(isTypedBindingPattern);\n}\nSTNode expr = parseExpression();\nif (isWildcardBP(expr)) {\nreturn getWildcardBindingPattern(expr);\n}\nreturn expr;\n}\n/**\n* Treat the current node as an array, and parse the remainder of the binding pattern.\n*\n* @param typeDesc Type-desc\n* @param openBracket Open bracket\n* @param member Member\n* @return Parsed node\n*/\nprivate STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) {\ntypeDesc = getTypeDescFromExpr(typeDesc);\ntypeDesc = validateForUsageOfVar(typeDesc);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true,\ncontext);\n}\nprivate STNode parseBracketedListMemberEnd() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END);\nreturn parseBracketedListMemberEnd();\n}\n}\n/**\n* We reach here to break ambiguity of T[a]. This could be:\n* 1) Array Type Desc\n* 2) Member access on LHS\n* 3) Typed-binding-pattern\n*\n* @param typeDescOrExpr Type name or the expr that precede the open-bracket.\n* @param openBracket Open bracket\n* @param member Member\n* @param closeBracket Open bracket\n* @param isTypedBindingPattern Is this is a typed-binding-pattern.\n* @return Specific node that matches to T[a], after solving ambiguity.\n*/\nprivate STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket, boolean isTypedBindingPattern,\nboolean allowAssignment, ParserRuleContext context) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nSTNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\nreturn parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);\ncase OPEN_BRACKET_TOKEN:\nif (isTypedBindingPattern) {\ntypeDesc = getTypeDescFromExpr(typeDescOrExpr);\narrayTypeDesc = createArrayTypeDesc(typeDesc, openBracket, member, closeBracket);\nreturn parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);\n}\nSTNode keyExpr = getKeyExpr(member);\nSTNode expr =\nSTNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);\nreturn parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context);\ncase QUESTION_MARK_TOKEN:\ntypeDesc = getTypeDescFromExpr(typeDescOrExpr);\narrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\ntypeDesc = parseComplexTypeDescriptor(arrayTypeDesc,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nreturn parseTypedBindingPatternTypeRhs(typeDesc, context);\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\nreturn parseComplexTypeDescInTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket,\ncontext, isTypedBindingPattern);\ncase IN_KEYWORD:\nif (context != ParserRuleContext.FOREACH_STMT && context != ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\ncase EQUAL_TOKEN:\nif (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nif (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) {\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\n}\nkeyExpr = getKeyExpr(member);\ntypeDescOrExpr = getExpression(typeDescOrExpr);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);\ncase SEMICOLON_TOKEN:\nif (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\ncase CLOSE_BRACE_TOKEN:\ncase COMMA_TOKEN:\nif (context == ParserRuleContext.AMBIGUOUS_STMT) {\nkeyExpr = getKeyExpr(member);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,\ncloseBracket);\n}\ndefault:\nif (isValidExprRhsStart(nextToken.kind, closeBracket.kind)) {\nkeyExpr = getKeyExpr(member);\ntypeDescOrExpr = getExpression(typeDescOrExpr);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,\ncloseBracket);\n}\nbreak;\n}\nrecover(peek(), ParserRuleContext.BRACKETED_LIST_RHS, typeDescOrExpr, openBracket, member, closeBracket,\nisTypedBindingPattern, allowAssignment, context);\nreturn parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,\nisTypedBindingPattern, allowAssignment, context);\n}\nprivate STNode getKeyExpr(STNode member) {\nif (member == null) {\nSTToken keyIdentifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR);\nSTNode missingVarRef = STNodeFactory.createSimpleNameReferenceNode(keyIdentifier);\nreturn STNodeFactory.createNodeList(missingVarRef);\n}\nreturn STNodeFactory.createNodeList(member);\n}\nprivate STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket) {\nSTNode bindingPatterns;\nif (isEmpty(member)) {\nbindingPatterns = STNodeFactory.createEmptyNodeList();\n} else {\nSTNode bindingPattern = getBindingPattern(member);\nbindingPatterns = STNodeFactory.createNodeList(bindingPattern);\n}\nSTNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, closeBracket);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n}\n/**\n* Parse a union or intersection type-desc/binary-expression that involves ambiguous\n* bracketed list in lhs.\n*

\n* e.g: (T[a] & R..) or (T[a] | R.. )\n*

\n* Complexity occurs in scenarios such as T[a] |/& R[b]. If the token after this\n* is another binding-pattern, then (T[a] |/& R[b]) becomes the type-desc. However,\n* if the token follows this is an equal or semicolon, then (T[a] |/& R) becomes\n* the type-desc, and [b] becomes the binding pattern.\n*\n* @param typeDescOrExpr Type desc or the expression\n* @param openBracket Open bracket\n* @param member Member\n* @param closeBracket Close bracket\n* @param context COntext in which the typed binding pattern occurs\n* @return Parsed node\n*/\nprivate STNode parseComplexTypeDescInTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode pipeOrAndToken = parseUnionOrIntersectionToken();\nSTNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false);\nif (isTypedBindingPattern || typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);\nlhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);\nSTTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr;\nSTNode newTypeDesc;\nif (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) {\nnewTypeDesc = createUnionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor);\n} else {\nnewTypeDesc =\ncreateIntersectionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor);\n}\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern);\n} else {\nSTNode keyExpr = getExpression(member);\nSTNode containerExpr = getExpression(typeDescOrExpr);\nSTNode lhsExpr =\nSTNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket);\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken,\ntypedBindingPatternOrExpr);\n}\n}\nprivate STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) {\nif (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\nSTUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;\nSTNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc);\nlhsTypeDesc = createUnionTypeDesc(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc);\n} else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\nSTIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc;\nSTNode middleTypeDesc =\ngetArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc);\nlhsTypeDesc = createIntersectionTypeDesc(intersectionTypeDesc.leftTypeDesc,\nintersectionTypeDesc.bitwiseAndToken, middleTypeDesc);\n} else {\nlhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket);\n}\nreturn lhsTypeDesc;\n}\n/**\n* Parse union (|) or intersection (&) type operator.\n*\n* @return pipe or bitwise and token\n*/\nprivate STNode parseUnionOrIntersectionToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN);\nreturn parseUnionOrIntersectionToken();\n}\n}\n/**\n* Infer the type of the ambiguous bracketed list, based on the type of the member.\n*\n* @param memberNode Member node\n* @return Inferred type of the bracketed list\n*/\nprivate SyntaxKind getBracketedListNodeType(STNode memberNode, boolean isTypedBindingPattern) {\nif (isEmpty(memberNode)) {\nreturn SyntaxKind.NONE;\n}\nif (isDefiniteTypeDesc(memberNode.kind)) {\nreturn SyntaxKind.TUPLE_TYPE_DESC;\n}\nswitch (memberNode.kind) {\ncase ASTERISK_LITERAL:\nreturn SyntaxKind.ARRAY_TYPE_DESC;\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.LIST_BINDING_PATTERN;\ncase QUALIFIED_NAME_REFERENCE:\ncase REST_TYPE:\nreturn SyntaxKind.TUPLE_TYPE_DESC;\ncase NUMERIC_LITERAL:\ncase SIMPLE_NAME_REFERENCE:\ncase BRACKETED_LIST:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.NONE;\ncase ERROR_CONSTRUCTOR:\nif (isPossibleErrorBindingPattern((STErrorConstructorExpressionNode) memberNode)) {\nreturn SyntaxKind.NONE;\n}\nreturn SyntaxKind.INDEXED_EXPRESSION;\ndefault:\nif (isTypedBindingPattern) {\nreturn SyntaxKind.NONE;\n}\nreturn SyntaxKind.INDEXED_EXPRESSION;\n}\n}\n/*\n* This section tries to break the ambiguity in parsing a statement that starts with a open-bracket.\n* The ambiguity lies in between:\n* 1) Assignment that starts with list binding pattern\n* 2) Var-decl statement that starts with tuple type\n* 3) Statement that starts with list constructor, such as sync-send, etc.\n*/\n/**\n* Parse any statement that starts with an open-bracket.\n*\n* @param annots Annotations attached to the statement.\n* @return Parsed node\n*/\nprivate STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) {\nstartContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT);\nreturn parseStatementStartsWithOpenBracket(annots, true, possibleMappingField);\n}\nprivate STNode parseMemberBracketedList(boolean possibleMappingField) {\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseStatementStartsWithOpenBracket(annots, false, possibleMappingField);\n}\n/**\n* The bracketed list at the start of a statement can be one of the following.\n* 1) List binding pattern\n* 2) Tuple type\n* 3) List constructor\n*\n* @param isRoot Is this the root of the list\n* @return Parsed node\n*/\nprivate STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) {\nstartContext(ParserRuleContext.STMT_START_BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList memberList = new ArrayList<>();\nwhile (!isBracketedListEnd(peek().kind)) {\nSTNode member = parseStatementStartBracketedListMember();\nSyntaxKind currentNodeType = getStmtStartBracketedListType(member);\nswitch (currentNodeType) {\ncase TUPLE_TYPE_DESC:\nreturn parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\ncase LIST_BINDING_PATTERN:\nreturn parseAsListBindingPattern(openBracket, memberList, member, isRoot);\ncase LIST_CONSTRUCTOR:\nreturn parseAsListConstructor(openBracket, memberList, member, isRoot);\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nreturn parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot);\ncase TUPLE_TYPE_DESC_OR_LIST_CONST:\nreturn parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot);\ncase NONE:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\n}\nSTNode closeBracket = parseCloseBracket();\nSTNode bracketedList = parseStatementStartBracketedList(annots, openBracket, memberList, closeBracket, isRoot,\npossibleMappingField);\nreturn bracketedList;\n}\n/**\n* Parse a member of a list-binding-pattern, tuple-type-desc, or\n* list-constructor-expr, when the parent is ambiguous.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatementStartBracketedListMember() {\nList typeDescQualifiers = new ArrayList<>();\nreturn parseStatementStartBracketedListMember(typeDescQualifiers);\n}\nprivate STNode parseStatementStartBracketedListMember(List qualifiers) {\nparseTypeDescQualifiers(qualifiers);\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase OPEN_BRACKET_TOKEN:\nreportInvalidQualifierList(qualifiers);\nreturn parseMemberBracketedList(false);\ncase IDENTIFIER_TOKEN:\nreportInvalidQualifierList(qualifiers);\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nif (isWildcardBP(identifier)) {\nSTNode varName = ((STSimpleNameReferenceNode) identifier).name;\nreturn getWildcardBindingPattern(varName);\n}\nif (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) {\nSTNode ellipsis = parseEllipsis();\nreturn STNodeFactory.createRestDescriptorNode(identifier, ellipsis);\n}\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true);\ncase OPEN_BRACE_TOKEN:\nreportInvalidQualifierList(qualifiers);\nreturn parseMappingBindingPatterOrMappingConstructor();\ncase ERROR_KEYWORD:\nreportInvalidQualifierList(qualifiers);\nSTToken nextNextToken = getNextNextToken();\nif (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||\nnextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn parseErrorBindingPatternOrErrorConstructor();\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase ELLIPSIS_TOKEN:\nreportInvalidQualifierList(qualifiers);\nreturn parseListBindingPatternMember();\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\nreportInvalidQualifierList(qualifiers);\nif (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseExpression(false);\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\nreportInvalidQualifierList(qualifiers);\nif (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nreturn parseExpression(false);\ncase OPEN_PAREN_TOKEN:\nreturn parseTypeDescOrExpr(qualifiers);\ncase FUNCTION_KEYWORD:\nreturn parseAnonFuncExprOrFuncTypeDesc(qualifiers);\ndefault:\nif (isValidExpressionStart(nextToken.kind, 1)) {\nreportInvalidQualifierList(qualifiers);\nreturn parseExpression(false);\n}\nif (isTypeStartingToken(nextToken.kind)) {\nreturn parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nrecover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER, qualifiers);\nreturn parseStatementStartBracketedListMember(qualifiers);\n}\n}\nprivate STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List memberList,\nSTNode member, boolean isRoot) {\nmemberList.add(member);\nSTNode memberEnd = parseBracketedListMemberEnd();\nSTNode tupleTypeDescOrListCons;\nif (memberEnd == null) {\nSTNode closeBracket = parseCloseBracket();\ntupleTypeDescOrListCons =\nparseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);\n} else {\nmemberList.add(memberEnd);\ntupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot);\n}\nreturn tupleTypeDescOrListCons;\n}\n/**\n* Parse tuple type desc or list constructor.\n*\n* @return Parsed node\n*/\nprivate STNode parseTupleTypeDescOrListConstructor(STNode annots) {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList memberList = new ArrayList<>();\nreturn parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false);\n}\nprivate STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List memberList,\nboolean isRoot) {\nSTToken nextToken = peek();\nwhile (!isBracketedListEnd(nextToken.kind)) {\nSTNode member = parseTupleTypeDescOrListConstructorMember(annots);\nSyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member);\nswitch (currentNodeType) {\ncase LIST_CONSTRUCTOR:\nreturn parseAsListConstructor(openBracket, memberList, member, isRoot);\ncase TUPLE_TYPE_DESC:\nreturn parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\ncase TUPLE_TYPE_DESC_OR_LIST_CONST:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\nnextToken = peek();\n}\nSTNode closeBracket = parseCloseBracket();\nreturn parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);\n}\nprivate STNode parseTupleTypeDescOrListConstructorMember(STNode annots) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseTupleTypeDescOrListConstructor(annots);\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nif (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) {\nSTNode ellipsis = parseEllipsis();\nreturn STNodeFactory.createRestDescriptorNode(identifier, ellipsis);\n}\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false);\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingConstructorExpr();\ncase ERROR_KEYWORD:\nSTToken nextNextToken = getNextNextToken();\nif (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||\nnextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn parseErrorConstructorExpr(false);\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\nif (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseExpression(false);\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\nif (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nreturn parseExpression(false);\ncase OPEN_PAREN_TOKEN:\nreturn parseTypeDescOrExpr();\ndefault:\nif (isValidExpressionStart(nextToken.kind, 1)) {\nreturn parseExpression(false);\n}\nif (isTypeStartingToken(nextToken.kind)) {\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nrecover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER, annots);\nreturn parseTupleTypeDescOrListConstructorMember(annots);\n}\n}\nprivate SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) {\nreturn getStmtStartBracketedListType(memberNode);\n}\nprivate STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List members, STNode closeBracket,\nboolean isRoot) {\nSTNode tupleTypeOrListConst;\nswitch (peek().kind) {\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nif (!isRoot) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members,\ncloseBracket);\n}\ndefault:\nif (isValidExprRhsStart(peek().kind, closeBracket.kind) ||\n(isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) {\nmembers = getExpressionList(members);\nSTNode memberExpressions = STNodeFactory.createNodeList(members);\ntupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket,\nmemberExpressions, closeBracket);\nbreak;\n}\nSTNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members));\nSTNode tupleTypeDesc =\nSTNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);\ntupleTypeOrListConst =\nparseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\n}\nendContext();\nif (!isRoot) {\nreturn tupleTypeOrListConst;\n}\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot);\n}", + "target_code": "}", + "method_body_after": "private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) {\nswitch (peek(lookahead + 1).kind) {\ncase IDENTIFIER_TOKEN:\nSyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind;\nswitch (tokenAfterIdentifier) {\ncase ON_KEYWORD:\ncase OPEN_BRACE_TOKEN:\nreturn true;\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\ncase QUESTION_MARK_TOKEN:\nreturn false;\ndefault:\nreturn false;\n}\ncase ON_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse listener declaration, given the qualifier.\n*

\n* \n* listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ;\n* \n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @return Parsed node\n*/\nprivate STNode parseListenerDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.LISTENER_DECL);\nSTNode listenerKeyword = parseListenerKeyword();\nif (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) {\nSTNode listenerDecl =\nparseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true);\nendContext();\nreturn listenerDecl;\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode variableName = parseVariableName();\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nendContext();\nreturn STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName,\nequalsToken, initializer, semicolonToken);\n}\n/**\n* Parse listener keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseListenerKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LISTENER_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.LISTENER_KEYWORD);\nreturn parseListenerKeyword();\n}\n}\n/**\n* Parse constant declaration, given the qualifier.\n*

\n* module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @return Parsed node\n*/\nprivate STNode parseConstantDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.CONSTANT_DECL);\nSTNode constKeyword = parseConstantKeyword();\nreturn parseConstDecl(metadata, qualifier, constKeyword);\n}\n/**\n* Parse the components that follows after the const keyword of a constant declaration.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the constant decl\n* @param constKeyword Const keyword\n* @return Parsed node\n*/\nprivate STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase ANNOTATION_KEYWORD:\nendContext();\nreturn parseAnnotationDeclaration(metadata, qualifier, constKeyword);\ncase IDENTIFIER_TOKEN:\nSTNode constantDecl =\nparseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false);\nendContext();\nreturn constantDecl;\ndefault:\nif (isTypeStartingToken(nextToken.kind)) {\nbreak;\n}\nrecover(peek(), ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword);\nreturn parseConstDecl(metadata, qualifier, constKeyword);\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode variableName = parseVariableName();\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nendContext();\nreturn STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName,\nequalsToken, initializer, semicolonToken);\n}\nprivate STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,\nboolean isListener) {\nSTNode varNameOrTypeName = parseStatementStartIdentifier();\nreturn parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener);\n}\n/**\n* Parse the component that follows the first identifier in a const decl. The identifier\n* can be either the type-name (a user defined type) or the var-name there the type-name\n* is not present.\n*\n* @param qualifier Qualifier that precedes the constant decl\n* @param keyword Keyword\n* @param typeOrVarName Identifier that follows the const-keywoord\n* @return Parsed node\n*/\nprivate STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword,\nSTNode typeOrVarName, boolean isListener) {\nif (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTNode type = typeOrVarName;\nSTNode variableName = parseVariableName();\nreturn parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);\n}\nSTNode type;\nSTNode variableName;\nswitch (peek().kind) {\ncase IDENTIFIER_TOKEN:\ntype = typeOrVarName;\nvariableName = parseVariableName();\nbreak;\ncase EQUAL_TOKEN:\nvariableName = ((STSimpleNameReferenceNode) typeOrVarName).name;\ntype = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nrecover(peek(), ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, keyword, typeOrVarName,\nisListener);\nreturn parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener);\n}\nreturn parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);\n}\nprivate STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener,\nSTNode type, STNode variableName) {\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nif (isListener) {\nreturn STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName,\nequalsToken, initializer, semicolonToken);\n}\nreturn STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName,\nequalsToken, initializer, semicolonToken);\n}\n/**\n* Parse const keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseConstantKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CONST_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.CONST_KEYWORD);\nreturn parseConstantKeyword();\n}\n}\n/**\n* Parse typeof expression.\n*

\n* \n* typeof-expr := typeof expression\n* \n*\n* @param isRhsExpr\n* @return Typeof expression node\n*/\nprivate STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\nSTNode typeofKeyword = parseTypeofKeyword();\nSTNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);\nreturn STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr);\n}\n/**\n* Parse typeof-keyword.\n*\n* @return Typeof-keyword node\n*/\nprivate STNode parseTypeofKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TYPEOF_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.TYPEOF_KEYWORD);\nreturn parseTypeofKeyword();\n}\n}\n/**\n* Parse optional type descriptor given the type.\n*

\n* optional-type-descriptor := type-descriptor `?`\n*

\n*\n* @param typeDescriptorNode Preceding type descriptor\n* @return Parsed node\n*/\nprivate STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) {\nstartContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR);\nSTNode questionMarkToken = parseQuestionMark();\nendContext();\ntypeDescriptorNode = validateForUsageOfVar(typeDescriptorNode);\nreturn STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken);\n}\n/**\n* Parse unary expression.\n*

\n* \n* unary-expr := + expression | - expression | ~ expression | ! expression\n* \n*\n* @param isRhsExpr\n* @return Unary expression node\n*/\nprivate STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\nSTNode unaryOperator = parseUnaryOperator();\nSTNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);\nreturn STNodeFactory.createUnaryExpressionNode(unaryOperator, expr);\n}\n/**\n* Parse unary operator.\n* UnaryOperator := + | - | ~ | !\n*\n* @return Parsed node\n*/\nprivate STNode parseUnaryOperator() {\nSTToken token = peek();\nif (isUnaryOperator(token.kind)) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.UNARY_OPERATOR);\nreturn parseUnaryOperator();\n}\n}\n/**\n* Check whether the given token kind is a unary operator.\n*\n* @param kind STToken kind\n* @return true if the token kind refers to a unary operator. false otherwise\n*/\nprivate boolean isUnaryOperator(SyntaxKind kind) {\nswitch (kind) {\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse array type descriptor.\n*

\n* \n* array-type-descriptor := member-type-descriptor [ [ array-length ] ]\n* member-type-descriptor := type-descriptor\n* array-length :=\n* int-literal\n* | constant-reference-expr\n* | inferred-array-length\n* inferred-array-length := *\n* \n*

\n*\n* @param memberTypeDesc\n* @return Parsed Node\n*/\nprivate STNode parseArrayTypeDescriptor(STNode memberTypeDesc) {\nstartContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);\nSTNode openBracketToken = parseOpenBracket();\nSTNode arrayLengthNode = parseArrayLength();\nSTNode closeBracketToken = parseCloseBracket();\nendContext();\nreturn createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken);\n}\nprivate STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode,\nSTNode closeBracketToken) {\nmemberTypeDesc = validateForUsageOfVar(memberTypeDesc);\nif (arrayLengthNode != null) {\nswitch (arrayLengthNode.kind) {\ncase ASTERISK_LITERAL:\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\nbreak;\ncase NUMERIC_LITERAL:\nSyntaxKind numericLiteralKind = arrayLengthNode.childInBucket(0).kind;\nif (numericLiteralKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||\nnumericLiteralKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {\nbreak;\n}\ndefault:\nopenBracketToken = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracketToken,\narrayLengthNode, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);\narrayLengthNode = STNodeFactory.createEmptyNode();\n}\n}\nreturn STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, openBracketToken, arrayLengthNode,\ncloseBracketToken);\n}\n/**\n* Parse array length.\n*

\n* \n* array-length :=\n* int-literal\n* | constant-reference-expr\n* | inferred-array-length\n* constant-reference-expr := variable-reference-expr\n* \n*

\n*\n* @return Parsed array length\n*/\nprivate STNode parseArrayLength() {\nSTToken token = peek();\nswitch (token.kind) {\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase ASTERISK_TOKEN:\nreturn parseBasicLiteral();\ncase CLOSE_BRACKET_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH);\ndefault:\nrecover(token, ParserRuleContext.ARRAY_LENGTH);\nreturn parseArrayLength();\n}\n}\n/**\n* Parse annotations.\n*

\n* Note: In the \n*

\n* annots := annotation*\n*\n* @return Parsed node\n*/\nprivate STNode parseOptionalAnnotations() {\nstartContext(ParserRuleContext.ANNOTATIONS);\nList annotList = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (nextToken.kind == SyntaxKind.AT_TOKEN) {\nannotList.add(parseAnnotation());\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(annotList);\n}\n/**\n* Parse annotation list with at least one annotation.\n*\n* @return Annotation list\n*/\nprivate STNode parseAnnotations() {\nstartContext(ParserRuleContext.ANNOTATIONS);\nList annotList = new ArrayList<>();\nannotList.add(parseAnnotation());\nwhile (peek().kind == SyntaxKind.AT_TOKEN) {\nannotList.add(parseAnnotation());\n}\nendContext();\nreturn STNodeFactory.createNodeList(annotList);\n}\n/**\n* Parse annotation attachment.\n*

\n* annotation := @ annot-tag-reference annot-value\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotation() {\nSTNode atToken = parseAtToken();\nSTNode annotReference;\nif (isAnnotTagReferenceToken()) {\nannotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE);\n} else {\nannotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n}\nSTNode annotValue;\nif (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {\nannotValue = parseMappingConstructorExpr();\n} else {\nannotValue = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue);\n}\nprivate boolean isAnnotTagReferenceToken() {\nSyntaxKind nextTokenKind = peek().kind;\nreturn nextTokenKind == SyntaxKind.IDENTIFIER_TOKEN || isQualifiedIdentifierPredeclaredPrefix(nextTokenKind);\n}\n/**\n* Parse '@' token.\n*\n* @return Parsed node\n*/\nprivate STNode parseAtToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.AT_TOKEN) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.AT);\nreturn parseAtToken();\n}\n}\n/**\n* Parse metadata. Meta data consist of optional doc string and\n* an annotations list.\n*

\n* metadata := [DocumentationString] annots\n*\n* @return Parse node\n*/\nprivate STNode parseMetaData() {\nSTNode docString;\nSTNode annotations;\nswitch (peek().kind) {\ncase DOCUMENTATION_STRING:\ndocString = parseMarkdownDocumentation();\nannotations = parseOptionalAnnotations();\nbreak;\ncase AT_TOKEN:\ndocString = STNodeFactory.createEmptyNode();\nannotations = parseOptionalAnnotations();\nbreak;\ndefault:\nreturn STNodeFactory.createEmptyNode();\n}\nreturn createMetadata(docString, annotations);\n}\n/**\n* Create metadata node.\n*\n* @return A metadata node\n*/\nprivate STNode createMetadata(STNode docString, STNode annotations) {\nif (annotations == null && docString == null) {\nreturn STNodeFactory.createEmptyNode();\n} else {\nreturn STNodeFactory.createMetadataNode(docString, annotations);\n}\n}\n/**\n* Parse is expression.\n* \n* is-expr := expression is type-descriptor\n* \n*\n* @param lhsExpr Preceding expression of the is expression\n* @return Is expression node\n*/\nprivate STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode isKeyword = parseIsKeyword();\nSTNode typeDescriptor = parseTypeDescriptorInExpression(isInConditionalExpr);\nreturn STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor);\n}\n/**\n* Parse is-keyword.\n*\n* @return Is-keyword node\n*/\nprivate STNode parseIsKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IS_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.IS_KEYWORD);\nreturn parseIsKeyword();\n}\n}\n/**\n* Parse local type definition statement statement.\n* ocal-type-defn-stmt := [annots] type identifier type-descriptor ;\n*\n* @return local type definition statement statement\n*/\nprivate STNode parseLocalTypeDefinitionStatement(STNode annots) {\nstartContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT);\nSTNode typeKeyword = parseTypeKeyword();\nSTNode typeName = parseTypeName();\nSTNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor,\nsemicolon);\n}\n/**\n* Parse statement which is only consists of an action or expression.\n*\n* @param annots Annotations\n* @return Statement node\n*/\nprivate STNode parseExpressionStatement(STNode annots) {\nstartContext(ParserRuleContext.EXPRESSION_STATEMENT);\nSTNode expression = parseActionOrExpressionInLhs(annots);\nreturn getExpressionAsStatement(expression);\n}\n/**\n* Parse statements that starts with an expression.\n*\n* @return Statement node\n*/\nprivate STNode parseStatementStartWithExpr(STNode annots) {\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode expr = parseActionOrExpressionInLhs(annots);\nreturn parseStatementStartWithExprRhs(expr);\n}\n/**\n* Parse the component followed by the expression, at the beginning of a statement.\n*\n* @param expression Action or expression in LHS\n* @return Statement node\n*/\nprivate STNode parseStatementStartWithExprRhs(STNode expression) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase EQUAL_TOKEN:\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nreturn parseAssignmentStmtRhs(expression);\ncase SEMICOLON_TOKEN:\nreturn getExpressionAsStatement(expression);\ncase IDENTIFIER_TOKEN:\ndefault:\nif (isCompoundBinaryOperator(nextToken.kind)) {\nreturn parseCompoundAssignmentStmtRhs(expression);\n}\nParserRuleContext context;\nif (isPossibleExpressionStatement(expression)) {\ncontext = ParserRuleContext.EXPR_STMT_RHS;\n} else {\ncontext = ParserRuleContext.STMT_START_WITH_EXPR_RHS;\n}\nrecover(peek(), context, expression);\nreturn parseStatementStartWithExprRhs(expression);\n}\n}\nprivate boolean isPossibleExpressionStatement(STNode expression) {\nswitch (expression.kind) {\ncase METHOD_CALL:\ncase FUNCTION_CALL:\ncase CHECK_EXPRESSION:\ncase REMOTE_METHOD_CALL_ACTION:\ncase CHECK_ACTION:\ncase BRACED_ACTION:\ncase START_ACTION:\ncase TRAP_ACTION:\ncase FLUSH_ACTION:\ncase ASYNC_SEND_ACTION:\ncase SYNC_SEND_ACTION:\ncase RECEIVE_ACTION:\ncase WAIT_ACTION:\ncase QUERY_ACTION:\ncase COMMIT_ACTION:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode getExpressionAsStatement(STNode expression) {\nswitch (expression.kind) {\ncase METHOD_CALL:\ncase FUNCTION_CALL:\ncase CHECK_EXPRESSION:\nreturn parseCallStatement(expression);\ncase REMOTE_METHOD_CALL_ACTION:\ncase CHECK_ACTION:\ncase BRACED_ACTION:\ncase START_ACTION:\ncase TRAP_ACTION:\ncase FLUSH_ACTION:\ncase ASYNC_SEND_ACTION:\ncase SYNC_SEND_ACTION:\ncase RECEIVE_ACTION:\ncase WAIT_ACTION:\ncase QUERY_ACTION:\ncase COMMIT_ACTION:\nreturn parseActionStatement(expression);\ndefault:\nSTNode semicolon = parseSemicolon();\nendContext();\nSTNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT,\nexpression, semicolon);\nexprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT);\nreturn exprStmt;\n}\n}\nprivate STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) {\nSTNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression);\nSTNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression;\nif (lengthExprs.isEmpty()) {\nreturn createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(),\nindexedExpr.closeBracket);\n}\nSTNode lengthExpr = lengthExprs.get(0);\nswitch (lengthExpr.kind) {\ncase ASTERISK_LITERAL:\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\nbreak;\ncase NUMERIC_LITERAL:\nSyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind;\nif (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||\ninnerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {\nbreak;\n}\ndefault:\nSTNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(\nindexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);\nindexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics);\nlengthExpr = STNodeFactory.createEmptyNode();\n}\nreturn createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket);\n}\n/**\n*

\n* Parse call statement, given the call expression.\n*

\n* \n* call-stmt := call-expr ;\n*
\n* call-expr := function-call-expr | method-call-expr | checking-keyword call-expr\n*
\n*\n* @param expression Call expression associated with the call statement\n* @return Call statement node\n*/\nprivate STNode parseCallStatement(STNode expression) {\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon);\n}\nprivate STNode parseActionStatement(STNode action) {\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon);\n}\n/**\n* Parse remote method call action, given the starting expression.\n*

\n* \n* remote-method-call-action := expression -> method-name ( arg-list )\n*
\n* async-send-action := expression -> peer-worker ;\n*
\n*\n* @param isRhsExpr Is this an RHS action\n* @param expression LHS expression\n* @return\n*/\nprivate STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) {\nSTNode rightArrow = parseRightArrow();\nreturn parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);\n}\nprivate STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) {\nSTNode name;\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase FUNCTION_KEYWORD:\nSTNode functionKeyword = consume();\nname = STNodeFactory.createSimpleNameReferenceNode(functionKeyword);\nreturn parseAsyncSendAction(expression, rightArrow, name);\ncase IDENTIFIER_TOKEN:\nname = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());\nbreak;\ncase CONTINUE_KEYWORD:\ncase COMMIT_KEYWORD:\nname = getKeywordAsSimpleNameRef();\nbreak;\ndefault:\nSTToken token = peek();\nrecover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS, expression, isRhsExpr, rightArrow);\nreturn parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);\n}\nreturn parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);\n}\nprivate STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseRemoteMethodCallAction(expression, rightArrow, name);\ncase SEMICOLON_TOKEN:\nreturn parseAsyncSendAction(expression, rightArrow, name);\ndefault:\nrecover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END, expression, rightArrow, name);\nreturn parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);\n}\n}\nprivate STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) {\nreturn STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker);\n}\nprivate STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) {\nSTNode openParenToken = parseArgListOpenParenthesis();\nSTNode arguments = parseArgsList();\nSTNode closeParenToken = parseArgListCloseParenthesis();\nreturn STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments,\ncloseParenToken);\n}\n/**\n* Parse right arrow (->) token.\n*\n* @return Parsed node\n*/\nprivate STNode parseRightArrow() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.RIGHT_ARROW);\nreturn parseRightArrow();\n}\n}\n/**\n* Parse parameterized type descriptor.\n* parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter\n*\n* @return Parsed node\n*/\nprivate STNode parseParameterizedTypeDescriptor(STNode parameterizedTypeKeyword) {\nSTNode typeParameter = parseTypeParameter();\nreturn STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, typeParameter);\n}\n/**\n* Parse < token.\n*\n* @return Parsed node\n*/\nprivate STNode parseGTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.GT_TOKEN) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.GT);\nreturn parseGTToken();\n}\n}\n/**\n* Parse > token.\n*\n* @return Parsed node\n*/\nprivate STNode parseLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.LT);\nreturn parseLTToken();\n}\n}\n/**\n* Parse nil literal. Here nil literal is only referred to ( ).\n*\n* @return Parsed node\n*/\nprivate STNode parseNilLiteral() {\nstartContext(ParserRuleContext.NIL_LITERAL);\nSTNode openParenthesisToken = parseOpenParenthesis();\nSTNode closeParenthesisToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken);\n}\n/**\n* Parse annotation declaration, given the qualifier.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @param constKeyword Const keyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) {\nstartContext(ParserRuleContext.ANNOTATION_DECL);\nSTNode annotationKeyword = parseAnnotationKeyword();\nSTNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);\nendContext();\nreturn annotDecl;\n}\n/**\n* Parse annotation keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ANNOTATION_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.ANNOTATION_KEYWORD);\nreturn parseAnnotationKeyword();\n}\n}\n/**\n* Parse the components that follows after the annotation keyword of a annotation declaration.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the constant decl\n* @param constKeyword Const keyword\n* @param annotationKeyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase IDENTIFIER_TOKEN:\nreturn parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword);\ndefault:\nif (isTypeStartingToken(nextToken.kind)) {\nbreak;\n}\nrecover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier, constKeyword,\nannotationKeyword);\nreturn parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL);\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\n/**\n* Parse annotation tag.\n*

\n* annot-tag := identifier\n*\n* @return\n*/\nprivate STNode parseAnnotationTag() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nrecover(peek(), ParserRuleContext.ANNOTATION_TAG);\nreturn parseAnnotationTag();\n}\n}\nprivate STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword) {\nSTNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);\nif (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword,\ntypeDescOrAnnotTag, annotTag);\n}\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) {\nSTNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag,\nParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false);\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\nSTNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name;\nreturn parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag);\n}\n/**\n* Parse the component that follows the first identifier in an annotation decl. The identifier\n* can be either the type-name (a user defined type) or the annot-tag, where the type-name\n* is not present.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the annotation decl\n* @param constKeyword Const keyword\n* @param annotationKeyword Annotation keyword\n* @param typeDescOrAnnotTag Identifier that follows the annotation-keyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword, STNode typeDescOrAnnotTag) {\nSTToken nextToken = peek();\nSTNode typeDesc;\nSTNode annotTag;\nswitch (nextToken.kind) {\ncase IDENTIFIER_TOKEN:\ntypeDesc = typeDescOrAnnotTag;\nannotTag = parseAnnotationTag();\nbreak;\ncase SEMICOLON_TOKEN:\ncase ON_KEYWORD:\ntypeDesc = STNodeFactory.createEmptyNode();\nannotTag = typeDescOrAnnotTag;\nbreak;\ndefault:\nrecover(peek(), ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword, annotationKeyword,\ntypeDescOrAnnotTag);\nreturn parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag);\n}\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\nprivate STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword, STNode typeDesc, STNode annotTag) {\nSTNode onKeyword;\nSTNode attachPoints;\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase SEMICOLON_TOKEN:\nonKeyword = STNodeFactory.createEmptyNode();\nattachPoints = STNodeFactory.createEmptyNodeList();\nbreak;\ncase ON_KEYWORD:\nonKeyword = parseOnKeyword();\nattachPoints = parseAnnotationAttachPoints();\nonKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword,\nDiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);\nbreak;\ndefault:\nrecover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier, constKeyword,\nannotationKeyword, typeDesc, annotTag);\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword,\ntypeDesc, annotTag, onKeyword, attachPoints, semicolonToken);\n}\n/**\n* Parse annotation attach points.\n*

\n* \n* annot-attach-points := annot-attach-point (, annot-attach-point)*\n*

\n* annot-attach-point := dual-attach-point | source-only-attach-point\n*

\n* dual-attach-point := [source] dual-attach-point-ident\n*

\n* dual-attach-point-ident :=\n* type\n* | class\n* | [object|service remote] function\n* | parameter\n* | return\n* | service\n* | [object|record] field\n*

\n* source-only-attach-point := source source-only-attach-point-ident\n*

\n* source-only-attach-point-ident :=\n* annotation\n* | external\n* | var\n* | const\n* | listener\n* | worker\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationAttachPoints() {\nstartContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST);\nList attachPoints = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndAnnotAttachPointList(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode attachPoint = parseAnnotationAttachPoint();\nattachPoints.add(attachPoint);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndAnnotAttachPointList(nextToken.kind)) {\nleadingComma = parseAttachPointEnd();\nif (leadingComma == null) {\nbreak;\n}\nattachPoints.add(leadingComma);\nattachPoint = parseAnnotationAttachPoint();\nif (attachPoint == null) {\nattachPoint = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);\nattachPoints.add(attachPoint);\nbreak;\n}\nattachPoints.add(attachPoint);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(attachPoints);\n}\n/**\n* Parse annotation attach point end.\n*\n* @return Parsed node\n*/\nprivate STNode parseAttachPointEnd() {\nswitch (peek().kind) {\ncase SEMICOLON_TOKEN:\nreturn null;\ncase COMMA_TOKEN:\nreturn consume();\ndefault:\nrecover(peek(), ParserRuleContext.ATTACH_POINT_END);\nreturn parseAttachPointEnd();\n}\n}\nprivate boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse annotation attach point.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationAttachPoint() {\nswitch (peek().kind) {\ncase EOF_TOKEN:\nreturn null;\ncase ANNOTATION_KEYWORD:\ncase EXTERNAL_KEYWORD:\ncase VAR_KEYWORD:\ncase CONST_KEYWORD:\ncase LISTENER_KEYWORD:\ncase WORKER_KEYWORD:\ncase SOURCE_KEYWORD:\nSTNode sourceKeyword = parseSourceKeyword();\nreturn parseAttachPointIdent(sourceKeyword);\ncase OBJECT_KEYWORD:\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase SERVICE_KEYWORD:\ncase FIELD_KEYWORD:\ncase RECORD_KEYWORD:\ncase CLASS_KEYWORD:\nsourceKeyword = STNodeFactory.createEmptyNode();\nSTNode firstIdent = consume();\nreturn parseDualAttachPointIdent(sourceKeyword, firstIdent);\ndefault:\nrecover(peek(), ParserRuleContext.ATTACH_POINT);\nreturn parseAnnotationAttachPoint();\n}\n}\n/**\n* Parse source keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseSourceKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SOURCE_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.SOURCE_KEYWORD);\nreturn parseSourceKeyword();\n}\n}\n/**\n* Parse attach point ident gievn.\n*

\n* \n* source-only-attach-point-ident := annotation | external | var | const | listener | worker\n*

\n* dual-attach-point-ident := type | class | [object|service remote] function | parameter\n* | return | service | [object|record] field\n*
\n*\n* @param sourceKeyword Source keyword\n* @return Parsed node\n*/\nprivate STNode parseAttachPointIdent(STNode sourceKeyword) {\nswitch (peek().kind) {\ncase ANNOTATION_KEYWORD:\ncase EXTERNAL_KEYWORD:\ncase VAR_KEYWORD:\ncase CONST_KEYWORD:\ncase LISTENER_KEYWORD:\ncase WORKER_KEYWORD:\nSTNode firstIdent = consume();\nSTNode identList = STNodeFactory.createNodeList(firstIdent);\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\ncase OBJECT_KEYWORD:\ncase RESOURCE_KEYWORD:\ncase RECORD_KEYWORD:\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase SERVICE_KEYWORD:\ncase FIELD_KEYWORD:\ncase CLASS_KEYWORD:\nfirstIdent = consume();\nreturn parseDualAttachPointIdent(sourceKeyword, firstIdent);\ndefault:\nrecover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword);\nreturn parseAttachPointIdent(sourceKeyword);\n}\n}\n/**\n* Parse dual-attach-point ident.\n*\n* @param sourceKeyword Source keyword\n* @param firstIdent first part of the dual attach-point\n* @return Parsed node\n*/\nprivate STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) {\nSTNode secondIdent;\nswitch (firstIdent.kind) {\ncase OBJECT_KEYWORD:\nsecondIdent = parseIdentAfterObjectIdent();\nbreak;\ncase RESOURCE_KEYWORD:\nsecondIdent = parseFunctionIdent();\nbreak;\ncase RECORD_KEYWORD:\nsecondIdent = parseFieldIdent();\nbreak;\ncase SERVICE_KEYWORD:\nreturn parseServiceAttachPoint(sourceKeyword, firstIdent);\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase FIELD_KEYWORD:\ncase CLASS_KEYWORD:\ndefault:\nSTNode identList = STNodeFactory.createNodeList(firstIdent);\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\n}\nSTNode identList = STNodeFactory.createNodeList(firstIdent, secondIdent);\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\n}\n/**\n* Parse remote ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseRemoteIdent() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.REMOTE_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.REMOTE_IDENT);\nreturn parseRemoteIdent();\n}\n}\n/**\n* Parse service attach point.\n* service-attach-point := service | service remote function\n*\n* @return Parsed node\n*/\nprivate STNode parseServiceAttachPoint(STNode sourceKeyword, STNode firstIdent) {\nSTNode identList;\nSTToken token = peek();\nswitch (token.kind) {\ncase REMOTE_KEYWORD:\nSTNode secondIdent = parseRemoteIdent();\nSTNode thirdIdent = parseFunctionIdent();\nidentList = STNodeFactory.createNodeList(firstIdent, secondIdent, thirdIdent);\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\ncase COMMA_TOKEN:\ncase SEMICOLON_TOKEN:\nidentList = STNodeFactory.createNodeList(firstIdent);\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\ndefault:\nrecover(token, ParserRuleContext.SERVICE_IDENT_RHS);\nreturn parseServiceAttachPoint(sourceKeyword, firstIdent);\n}\n}\n/**\n* Parse the idents that are supported after object-ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseIdentAfterObjectIdent() {\nSTToken token = peek();\nswitch (token.kind) {\ncase FUNCTION_KEYWORD:\ncase FIELD_KEYWORD:\nreturn consume();\ndefault:\nrecover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT);\nreturn parseIdentAfterObjectIdent();\n}\n}\n/**\n* Parse function ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseFunctionIdent() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FUNCTION_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.FUNCTION_IDENT);\nreturn parseFunctionIdent();\n}\n}\n/**\n* Parse field ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldIdent() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FIELD_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.FIELD_IDENT);\nreturn parseFieldIdent();\n}\n}\n/**\n* Parse XML namespace declaration.\n*

\n* xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ;\n*
\n* xml-namespace-uri := simple-const-expr\n*
\n* xml-namespace-prefix := identifier\n*
\n*\n* @return\n*/\nprivate STNode parseXMLNamespaceDeclaration(boolean isModuleVar) {\nstartContext(ParserRuleContext.XML_NAMESPACE_DECLARATION);\nSTNode xmlnsKeyword = parseXMLNSKeyword();\nSTNode namespaceUri = parseSimpleConstExpr();\nwhile (!isValidXMLNameSpaceURI(namespaceUri)) {\nxmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri,\nDiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI);\nnamespaceUri = parseSimpleConstExpr();\n}\nSTNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);\nendContext();\nreturn xmlnsDecl;\n}\n/**\n* Parse xmlns keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLNSKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.XMLNS_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.XMLNS_KEYWORD);\nreturn parseXMLNSKeyword();\n}\n}\nprivate boolean isValidXMLNameSpaceURI(STNode expr) {\nswitch (expr.kind) {\ncase STRING_LITERAL:\ncase QUALIFIED_NAME_REFERENCE:\ncase SIMPLE_NAME_REFERENCE:\nreturn true;\ncase IDENTIFIER_TOKEN:\ndefault:\nreturn false;\n}\n}\nprivate STNode parseSimpleConstExpr() {\nstartContext(ParserRuleContext.CONSTANT_EXPRESSION);\nSTNode expr = parseSimpleConstExprInternal();\nendContext();\nreturn expr;\n}\n/**\n* Parse simple constants expr.\n*\n* @return Parsed node\n*/\nprivate STNode parseSimpleConstExprInternal() {\nswitch (peek().kind) {\ncase STRING_LITERAL_TOKEN:\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase NULL_KEYWORD:\nreturn parseBasicLiteral();\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn parseSignedIntOrFloat();\ncase OPEN_PAREN_TOKEN:\nreturn parseNilLiteral();\ndefault:\nSTToken token = peek();\nrecover(token, ParserRuleContext.CONSTANT_EXPRESSION_START);\nreturn parseSimpleConstExprInternal();\n}\n}\n/**\n* Parse the portion after the namsepsace-uri of an XML declaration.\n*\n* @param xmlnsKeyword XMLNS keyword\n* @param namespaceUri Namespace URI\n* @return Parsed node\n*/\nprivate STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) {\nSTNode asKeyword = STNodeFactory.createEmptyNode();\nSTNode namespacePrefix = STNodeFactory.createEmptyNode();\nswitch (peek().kind) {\ncase AS_KEYWORD:\nasKeyword = parseAsKeyword();\nnamespacePrefix = parseNamespacePrefix();\nbreak;\ncase SEMICOLON_TOKEN:\nbreak;\ndefault:\nrecover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri, isModuleVar);\nreturn parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);\n}\nSTNode semicolon = parseSemicolon();\nif (isModuleVar) {\nreturn STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword,\nnamespacePrefix, semicolon);\n}\nreturn STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix,\nsemicolon);\n}\n/**\n* Parse import prefix.\n*\n* @return Parsed node\n*/\nprivate STNode parseNamespacePrefix() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nrecover(peek(), ParserRuleContext.NAMESPACE_PREFIX);\nreturn parseNamespacePrefix();\n}\n}\n/**\n* Parse named worker declaration.\n*

\n* named-worker-decl := [annots] [transactional] worker worker-name return-type-descriptor { sequence-stmt }\n* \n*\n* @param annots Annotations attached to the worker decl\n* @param qualifiers Preceding transactional keyword in a list\n* @return Parsed node\n*/\nprivate STNode parseNamedWorkerDeclaration(STNode annots, List qualifiers) {\nstartContext(ParserRuleContext.NAMED_WORKER_DECL);\nSTNode transactionalKeyword = getTransactionalKeyword(qualifiers);\nSTNode workerKeyword = parseWorkerKeyword();\nSTNode workerName = parseWorkerName();\nSTNode returnTypeDesc = parseReturnTypeDescriptor();\nSTNode workerBody = parseBlockNode();\nendContext();\nreturn STNodeFactory.createNamedWorkerDeclarationNode(annots, transactionalKeyword, workerKeyword, workerName,\nreturnTypeDesc, workerBody);\n}\nprivate STNode getTransactionalKeyword(List qualifierList) {\nList validatedList = new ArrayList<>();\nfor (int i = 0; i < qualifierList.size(); i++) {\nSTNode qualifier = qualifierList.get(i);\nint nextIndex = i + 1;\nif (isSyntaxKindInList(validatedList, qualifier.kind)) {\nupdateLastNodeInListWithInvalidNode(validatedList, qualifier,\nDiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());\n} else if (qualifier.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {\nvalidatedList.add(qualifier);\n} else if (qualifierList.size() == nextIndex) {\naddInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED,\n((STToken) qualifier).text());\n} else {\nupdateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,\nDiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());\n}\n}\nSTNode transactionalKeyword;\nif (validatedList.isEmpty()) {\ntransactionalKeyword = STNodeFactory.createEmptyNode();\n} else {\ntransactionalKeyword = validatedList.get(0);\n}\nreturn transactionalKeyword;\n}\nprivate STNode parseReturnTypeDescriptor() {\nSTToken token = peek();\nif (token.kind != SyntaxKind.RETURNS_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode returnsKeyword = consume();\nSTNode annot = parseOptionalAnnotations();\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);\nreturn STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);\n}\n/**\n* Parse worker keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseWorkerKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.WORKER_KEYWORD) {\nreturn consume();\n} else {\nrecover(peek(), ParserRuleContext.WORKER_KEYWORD);\nreturn parseWorkerKeyword();\n}\n}\n/**\n* Parse worker name.\n*

\n* worker-name := identifier\n*\n* @return Parsed node\n*/\nprivate STNode parseWorkerName() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nrecover(peek(), ParserRuleContext.WORKER_NAME);\nreturn parseWorkerName();\n}\n}\n/**\n* Parse lock statement.\n* lock-stmt := lock block-stmt [on-fail-clause]\n*\n* @return Lock statement\n*/\nprivate STNode parseLockStatement() {\nstartContext(ParserRuleContext.LOCK_STMT);\nSTNode lockKeyword = parseLockKeyword();\nSTNode blockStatement = parseBlockNode();\nendContext();\nSTNode onFailClause = parseOptionalOnFailClause();\nreturn STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause);\n}\n/**\n* Parse lock-keyword.\n*\n* @return lock-keyword node\n*/\nprivate STNode parseLockKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LOCK_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.LOCK_KEYWORD);\nreturn parseLockKeyword();\n}\n}\n/**\n* Parse union type descriptor.\n* union-type-descriptor := type-descriptor | type-descriptor\n*\n* @param leftTypeDesc Type desc in the LHS os the union type desc.\n* @param context Current context.\n* @return parsed union type desc node\n*/\nprivate STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode pipeToken = consume();\nSTNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false);\nreturn createUnionTypeDesc(leftTypeDesc, pipeToken, rightTypeDesc);\n}\nprivate STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) {\nleftTypeDesc = validateForUsageOfVar(leftTypeDesc);\nrightTypeDesc = validateForUsageOfVar(rightTypeDesc);\nreturn STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc);\n}\n/**\n* Parse pipe token.\n*\n* @return parsed pipe token node\n*/\nprivate STNode parsePipeToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PIPE_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.PIPE);\nreturn parsePipeToken();\n}\n}\nprivate boolean isTypeStartingToken(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase IDENTIFIER_TOKEN:\ncase SERVICE_KEYWORD:\ncase RECORD_KEYWORD:\ncase OBJECT_KEYWORD:\ncase ABSTRACT_KEYWORD:\ncase CLIENT_KEYWORD:\ncase OPEN_PAREN_TOKEN:\ncase MAP_KEYWORD:\ncase FUTURE_KEYWORD:\ncase TYPEDESC_KEYWORD:\ncase ERROR_KEYWORD:\ncase STREAM_KEYWORD:\ncase TABLE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase DISTINCT_KEYWORD:\ncase ISOLATED_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\ncase TRANSACTION_KEYWORD:\nreturn true;\ndefault:\nif (isSingletonTypeDescStart(nodeKind, true)) {\nreturn true;\n}\nreturn isSimpleType(nodeKind);\n}\n}\nstatic boolean isSimpleType(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase INT_KEYWORD:\ncase FLOAT_KEYWORD:\ncase DECIMAL_KEYWORD:\ncase BOOLEAN_KEYWORD:\ncase STRING_KEYWORD:\ncase BYTE_KEYWORD:\ncase XML_KEYWORD:\ncase JSON_KEYWORD:\ncase HANDLE_KEYWORD:\ncase ANY_KEYWORD:\ncase ANYDATA_KEYWORD:\ncase NEVER_KEYWORD:\ncase VAR_KEYWORD:\ncase ERROR_KEYWORD:\ncase STREAM_KEYWORD:\ncase TYPEDESC_KEYWORD:\ncase READONLY_KEYWORD:\ncase DISTINCT_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\nstatic boolean isPredeclaredPrefix(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase BOOLEAN_KEYWORD:\ncase DECIMAL_KEYWORD:\ncase ERROR_KEYWORD:\ncase FLOAT_KEYWORD:\ncase FUTURE_KEYWORD:\ncase INT_KEYWORD:\ncase MAP_KEYWORD:\ncase OBJECT_KEYWORD:\ncase STREAM_KEYWORD:\ncase STRING_KEYWORD:\ncase TABLE_KEYWORD:\ncase TRANSACTION_KEYWORD:\ncase TYPEDESC_KEYWORD:\ncase XML_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate boolean isQualifiedIdentifierPredeclaredPrefix(SyntaxKind nodeKind) {\nreturn isPredeclaredPrefix(nodeKind) && getNextNextToken().kind == SyntaxKind.COLON_TOKEN;\n}\nprivate SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) {\nswitch (typeKeyword) {\ncase INT_KEYWORD:\nreturn SyntaxKind.INT_TYPE_DESC;\ncase FLOAT_KEYWORD:\nreturn SyntaxKind.FLOAT_TYPE_DESC;\ncase DECIMAL_KEYWORD:\nreturn SyntaxKind.DECIMAL_TYPE_DESC;\ncase BOOLEAN_KEYWORD:\nreturn SyntaxKind.BOOLEAN_TYPE_DESC;\ncase STRING_KEYWORD:\nreturn SyntaxKind.STRING_TYPE_DESC;\ncase BYTE_KEYWORD:\nreturn SyntaxKind.BYTE_TYPE_DESC;\ncase XML_KEYWORD:\nreturn SyntaxKind.XML_TYPE_DESC;\ncase JSON_KEYWORD:\nreturn SyntaxKind.JSON_TYPE_DESC;\ncase HANDLE_KEYWORD:\nreturn SyntaxKind.HANDLE_TYPE_DESC;\ncase ANY_KEYWORD:\nreturn SyntaxKind.ANY_TYPE_DESC;\ncase ANYDATA_KEYWORD:\nreturn SyntaxKind.ANYDATA_TYPE_DESC;\ncase READONLY_KEYWORD:\nreturn SyntaxKind.READONLY_TYPE_DESC;\ncase NEVER_KEYWORD:\nreturn SyntaxKind.NEVER_TYPE_DESC;\ncase VAR_KEYWORD:\nreturn SyntaxKind.VAR_TYPE_DESC;\ncase ERROR_KEYWORD:\nreturn SyntaxKind.ERROR_TYPE_DESC;\ndefault:\nreturn SyntaxKind.TYPE_REFERENCE;\n}\n}\n/**\n* Parse fork-keyword.\n*\n* @return Fork-keyword node\n*/\nprivate STNode parseForkKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FORK_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.FORK_KEYWORD);\nreturn parseForkKeyword();\n}\n}\n/**\n* Parse fork statement.\n* fork-stmt := fork { named-worker-decl+ }\n*\n* @return Fork statement\n*/\nprivate STNode parseForkStatement() {\nstartContext(ParserRuleContext.FORK_STMT);\nSTNode forkKeyword = parseForkKeyword();\nSTNode openBrace = parseOpenBrace();\nArrayList workers = new ArrayList<>();\nwhile (!isEndOfStatements()) {\nSTNode stmt = parseStatement();\nif (stmt == null) {\nbreak;\n}\nif (stmt.kind == SyntaxKind.LOCAL_TYPE_DEFINITION_STATEMENT) {\naddInvalidNodeToNextToken(stmt, DiagnosticErrorCode.ERROR_LOCAL_TYPE_DEFINITION_NOT_ALLOWED);\ncontinue;\n}\nswitch (stmt.kind) {\ncase NAMED_WORKER_DECLARATION:\nworkers.add(stmt);\nbreak;\ndefault:\nif (workers.isEmpty()) {\nopenBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt,\nDiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);\n} else {\nupdateLastNodeInListWithInvalidNode(workers, stmt,\nDiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);\n}\n}\n}\nSTNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers);\nSTNode closeBrace = parseCloseBrace();\nendContext();\nSTNode forkStmt =\nSTNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace);\nif (isNodeListEmpty(namedWorkerDeclarations)) {\nreturn SyntaxErrors.addDiagnostic(forkStmt,\nDiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT);\n}\nreturn forkStmt;\n}\n/**\n* Parse trap expression.\n*

\n* \n* trap-expr := trap expression\n* \n*\n* @param allowActions Allow actions\n* @param isRhsExpr Whether this is a RHS expression or not\n* @return Trap expression node\n*/\nprivate STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nSTNode trapKeyword = parseTrapKeyword();\nSTNode expr =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nif (isAction(expr)) {\nreturn STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr);\n}\nreturn STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr);\n}\n/**\n* Parse trap-keyword.\n*\n* @return Trap-keyword node\n*/\nprivate STNode parseTrapKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRAP_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.TRAP_KEYWORD);\nreturn parseTrapKeyword();\n}\n}\n/**\n* Parse list constructor expression.\n*

\n* \n* list-constructor-expr := [ [ expr-list ] ]\n*
\n* expr-list := expression (, expression)*\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseListConstructorExpr() {\nstartContext(ParserRuleContext.LIST_CONSTRUCTOR);\nSTNode openBracket = parseOpenBracket();\nSTNode expressions = parseOptionalExpressionsList();\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket);\n}\n/**\n* Parse optional expression list.\n*\n* @return Parsed node\n*/\nprivate STNode parseOptionalExpressionsList() {\nList expressions = new ArrayList<>();\nif (isEndOfListConstructor(peek().kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode expr = parseExpression();\nexpressions.add(expr);\nreturn parseOptionalExpressionsList(expressions);\n}\nprivate STNode parseOptionalExpressionsList(List expressions) {\nSTNode listConstructorMemberEnd;\nwhile (!isEndOfListConstructor(peek().kind)) {\nlistConstructorMemberEnd = parseListConstructorMemberEnd();\nif (listConstructorMemberEnd == null) {\nbreak;\n}\nexpressions.add(listConstructorMemberEnd);\nSTNode expr = parseExpression();\nexpressions.add(expr);\n}\nreturn STNodeFactory.createNodeList(expressions);\n}\nprivate boolean isEndOfListConstructor(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseListConstructorMemberEnd() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END);\nreturn parseListConstructorMemberEnd();\n}\n}\n/**\n* Parse foreach statement.\n* foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt [on-fail-clause]\n*\n* @return foreach statement\n*/\nprivate STNode parseForEachStatement() {\nstartContext(ParserRuleContext.FOREACH_STMT);\nSTNode forEachKeyword = parseForEachKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT);\nSTNode inKeyword = parseInKeyword();\nSTNode actionOrExpr = parseActionOrExpression();\nSTNode blockStatement = parseBlockNode();\nendContext();\nSTNode onFailClause = parseOptionalOnFailClause();\nreturn STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr,\nblockStatement, onFailClause);\n}\n/**\n* Parse foreach-keyword.\n*\n* @return ForEach-keyword node\n*/\nprivate STNode parseForEachKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FOREACH_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.FOREACH_KEYWORD);\nreturn parseForEachKeyword();\n}\n}\n/**\n* Parse in-keyword.\n*\n* @return In-keyword node\n*/\nprivate STNode parseInKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IN_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.IN_KEYWORD);\nreturn parseInKeyword();\n}\n}\n/**\n* Parse type cast expression.\n*

\n* \n* type-cast-expr := < type-cast-param > expression\n*
\n* type-cast-param := [annots] type-descriptor | annots\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nstartContext(ParserRuleContext.TYPE_CAST);\nSTNode ltToken = parseLTToken();\nSTNode typeCastParam = parseTypeCastParam();\nSTNode gtToken = parseGTToken();\nendContext();\nSTNode expression =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nreturn STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression);\n}\nprivate STNode parseTypeCastParam() {\nSTNode annot;\nSTNode type;\nSTToken token = peek();\nswitch (token.kind) {\ncase AT_TOKEN:\nannot = parseOptionalAnnotations();\ntoken = peek();\nif (isTypeStartingToken(token.kind)) {\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n} else {\ntype = STNodeFactory.createEmptyNode();\n}\nbreak;\ndefault:\nannot = STNodeFactory.createEmptyNode();\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\nbreak;\n}\nreturn STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type);\n}\n/**\n* Parse table constructor expression.\n*

\n* \n* table-constructor-expr-rhs := [ [row-list] ]\n* \n*\n* @param tableKeyword tableKeyword that precedes this rhs\n* @param keySpecifier keySpecifier that precedes this rhs\n* @return Parsed node\n*/\nprivate STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) {\nswitchContext(ParserRuleContext.TABLE_CONSTRUCTOR);\nSTNode openBracket = parseOpenBracket();\nSTNode rowList = parseRowList();\nSTNode closeBracket = parseCloseBracket();\nreturn STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList,\ncloseBracket);\n}\n/**\n* Parse table-keyword.\n*\n* @return Table-keyword node\n*/\nprivate STNode parseTableKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TABLE_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.TABLE_KEYWORD);\nreturn parseTableKeyword();\n}\n}\n/**\n* Parse table rows.\n*

\n* row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]\n*\n* @return Parsed node\n*/\nprivate STNode parseRowList() {\nSTToken nextToken = peek();\nif (isEndOfTableRowList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList mappings = new ArrayList<>();\nSTNode mapExpr = parseMappingConstructorExpr();\nmappings.add(mapExpr);\nnextToken = peek();\nSTNode rowEnd;\nwhile (!isEndOfTableRowList(nextToken.kind)) {\nrowEnd = parseTableRowEnd();\nif (rowEnd == null) {\nbreak;\n}\nmappings.add(rowEnd);\nmapExpr = parseMappingConstructorExpr();\nmappings.add(mapExpr);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(mappings);\n}\nprivate boolean isEndOfTableRowList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ncase COMMA_TOKEN:\ncase OPEN_BRACE_TOKEN:\nreturn false;\ndefault:\nreturn isEndOfMappingConstructor(tokenKind);\n}\n}\nprivate STNode parseTableRowEnd() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.TABLE_ROW_END);\nreturn parseTableRowEnd();\n}\n}\n/**\n* Parse key specifier.\n*

\n* key-specifier := key ( [ field-name (, field-name)* ] )\n*\n* @return Parsed node\n*/\nprivate STNode parseKeySpecifier() {\nstartContext(ParserRuleContext.KEY_SPECIFIER);\nSTNode keyKeyword = parseKeyKeyword();\nSTNode openParen = parseOpenParenthesis();\nSTNode fieldNames = parseFieldNames();\nSTNode closeParen = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen);\n}\n/**\n* Parse key-keyword.\n*\n* @return Key-keyword node\n*/\nprivate STNode parseKeyKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.KEY_KEYWORD) {\nreturn consume();\n}\nif (isKeyKeyword(token)) {\nreturn getKeyKeyword(consume());\n}\nrecover(token, ParserRuleContext.KEY_KEYWORD);\nreturn parseKeyKeyword();\n}\nstatic boolean isKeyKeyword(STToken token) {\nreturn token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text());\n}\nprivate STNode getKeyKeyword(STToken token) {\nreturn STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(),\ntoken.diagnostics());\n}\n/**\n* Parse field names.\n*

\n* field-name-list := [ field-name (, field-name)* ]\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldNames() {\nSTToken nextToken = peek();\nif (isEndOfFieldNamesList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList fieldNames = new ArrayList<>();\nSTNode fieldName = parseVariableName();\nfieldNames.add(fieldName);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndOfFieldNamesList(nextToken.kind)) {\nleadingComma = parseComma();\nfieldNames.add(leadingComma);\nfieldName = parseVariableName();\nfieldNames.add(fieldName);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(fieldNames);\n}\nprivate boolean isEndOfFieldNamesList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\ncase IDENTIFIER_TOKEN:\nreturn false;\ndefault:\nreturn true;\n}\n}\n/**\n* Parse error type descriptor.\n*

\n* error-type-descriptor := error [error-type-param]\n* error-type-param := < (detail-type-descriptor | inferred-type-descriptor) >\n* detail-type-descriptor := type-descriptor\n* inferred-type-descriptor := *\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseErrorTypeDescriptor() {\nSTNode errorKeywordToken = parseErrorKeyword();\nreturn parseErrorTypeDescriptor(errorKeywordToken);\n}\nprivate STNode parseErrorTypeDescriptor(STNode errorKeywordToken) {\nSTNode errorTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nerrorTypeParamsNode = parseErrorTypeParamsNode();\n} else {\nerrorTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode);\n}\n/**\n* Parse error type param node.\n*

\n* error-type-param := < (detail-type-descriptor | inferred-type-descriptor) >\n* detail-type-descriptor := type-descriptor\n* inferred-type-descriptor := *\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseErrorTypeParamsNode() {\nSTNode ltToken = parseLTToken();\nSTNode parameter;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {\nparameter = consume();\n} else {\nparameter = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n}\nSTNode gtToken = parseGTToken();\nreturn STNodeFactory.createErrorTypeParamsNode(ltToken, parameter, gtToken);\n}\n/**\n* Parse error-keyword.\n*\n* @return Parsed error-keyword node\n*/\nprivate STNode parseErrorKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ERROR_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.ERROR_KEYWORD);\nreturn parseErrorKeyword();\n}\n}\n/**\n* Parse typedesc type descriptor.\n* typedesc-type-descriptor := typedesc type-parameter\n*\n* @return Parsed typedesc type node\n*/\nprivate STNode parseTypedescTypeDescriptor(STNode typedescKeywordToken) {\nSTNode typedescTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\ntypedescTypeParamsNode = parseTypeParameter();\n} else {\ntypedescTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createTypedescTypeDescriptorNode(typedescKeywordToken, typedescTypeParamsNode);\n}\n/**\n* Parse stream type descriptor.\n*

\n* stream-type-descriptor := stream [stream-type-parameters]\n* stream-type-parameters := < type-descriptor [, type-descriptor]>\n*

\n*\n* @return Parsed stream type descriptor node\n*/\nprivate STNode parseStreamTypeDescriptor(STNode streamKeywordToken) {\nSTNode streamTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nstreamTypeParamsNode = parseStreamTypeParamsNode();\n} else {\nstreamTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode);\n}\n/**\n* Parse xml type descriptor.\n* xml-type-descriptor := xml type-parameter\n*\n* @return Parsed typedesc type node\n*/\nprivate STNode parseXmlTypeDescriptor(STNode xmlKeywordToken) {\nSTNode typedescTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\ntypedescTypeParamsNode = parseTypeParameter();\n} else {\ntypedescTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createXmlTypeDescriptorNode(xmlKeywordToken, typedescTypeParamsNode);\n}\n/**\n* Parse stream type params node.\n*

\n* stream-type-parameters := < type-descriptor [, type-descriptor]>\n*

\n*\n* @return Parsed stream type params node\n*/\nprivate STNode parseStreamTypeParamsNode() {\nSTNode ltToken = parseLTToken();\nstartContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);\nSTNode leftTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);\nSTNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode);\nendContext();\nreturn streamTypedesc;\n}\nprivate STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) {\nSTNode commaToken, rightTypeDescNode, gtToken;\nswitch (peek().kind) {\ncase COMMA_TOKEN:\ncommaToken = parseComma();\nrightTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);\nbreak;\ncase GT_TOKEN:\ncommaToken = STNodeFactory.createEmptyNode();\nrightTypeDescNode = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nrecover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode);\nreturn parseStreamTypeParamsNode(ltToken, leftTypeDescNode);\n}\ngtToken = parseGTToken();\nreturn STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode,\ngtToken);\n}\n/**\n* Parse stream-keyword.\n*\n* @return Parsed stream-keyword node\n*/\nprivate STNode parseStreamKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.STREAM_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.STREAM_KEYWORD);\nreturn parseStreamKeyword();\n}\n}\n/**\n* Parse let expression.\n*

\n* \n* let-expr := let let-var-decl [, let-var-decl]* in expression\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseLetExpression(boolean isRhsExpr) {\nSTNode letKeyword = parseLetKeyword();\nSTNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr);\nSTNode inKeyword = parseInKeyword();\nletKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,\nDiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression);\n}\n/**\n* Parse let-keyword.\n*\n* @return Let-keyword node\n*/\nprivate STNode parseLetKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LET_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.LET_KEYWORD);\nreturn parseLetKeyword();\n}\n}\n/**\n* Parse let variable declarations.\n*

\n* let-var-decl-list := let-var-decl [, let-var-decl]*\n*\n* @return Parsed node\n*/\nprivate STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) {\nstartContext(context);\nList varDecls = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfLetVarDeclarations(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode varDec = parseLetVarDecl(isRhsExpr);\nvarDecls.add(varDec);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndOfLetVarDeclarations(nextToken.kind)) {\nleadingComma = parseComma();\nvarDecls.add(leadingComma);\nvarDec = parseLetVarDecl(isRhsExpr);\nvarDecls.add(varDec);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(varDecls);\n}\nprivate boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\ncase AT_TOKEN:\nreturn false;\ncase IN_KEYWORD:\nreturn true;\ndefault:\nreturn !isTypeStartingToken(tokenKind);\n}\n}\n/**\n* Parse let variable declaration.\n*

\n* let-var-decl := [annots] typed-binding-pattern = expression\n*\n* @return Parsed node\n*/\nprivate STNode parseLetVarDecl(boolean isRhsExpr) {\nSTNode annot = parseOptionalAnnotations();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL);\nSTNode assign = parseAssignOp();\nSTNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false);\nreturn STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression);\n}\n/**\n* Parse raw backtick string template expression.\n*

\n* BacktickString := `expression`\n*\n* @return Template expression node\n*/\nprivate STNode parseTemplateExpression() {\nSTNode type = STNodeFactory.createEmptyNode();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContent();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick,\ncontent, endingBackTick);\n}\nprivate STNode parseTemplateContent() {\nList items = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode contentItem = parseTemplateItem();\nitems.add(contentItem);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(items);\n}\nprivate boolean isEndOfBacktickContent(SyntaxKind kind) {\nswitch (kind) {\ncase EOF_TOKEN:\ncase BACKTICK_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseTemplateItem() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {\nreturn parseInterpolation();\n}\nreturn consume();\n}\n/**\n* Parse string template expression.\n*

\n* string-template-expr := string ` expression `\n*\n* @return String template expression node\n*/\nprivate STNode parseStringTemplateExpression() {\nSTNode type = parseStringKeyword();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContent();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick,\ncontent, endingBackTick);\n}\n/**\n* Parse string keyword.\n*\n* @return string keyword node\n*/\nprivate STNode parseStringKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.STRING_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.STRING_KEYWORD);\nreturn parseStringKeyword();\n}\n}\n/**\n* Parse XML template expression.\n*

\n* xml-template-expr := xml BacktickString\n*\n* @return XML template expression\n*/\nprivate STNode parseXMLTemplateExpression() {\nSTNode xmlKeyword = parseXMLKeyword();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContentAsXML();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword,\nstartingBackTick, content, endingBackTick);\n}\n/**\n* Parse xml keyword.\n*\n* @return xml keyword node\n*/\nprivate STNode parseXMLKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.XML_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.XML_KEYWORD);\nreturn parseXMLKeyword();\n}\n}\n/**\n* Parse the content of the template string as XML. This method first read the\n* input in the same way as the raw-backtick-template (BacktickString). Then\n* it parses the content as XML.\n*\n* @return XML node\n*/\nprivate STNode parseTemplateContentAsXML() {\nArrayDeque expressions = new ArrayDeque<>();\nStringBuilder xmlStringBuilder = new StringBuilder();\nSTToken nextToken = peek();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode contentItem = parseTemplateItem();\nif (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {\nxmlStringBuilder.append(((STToken) contentItem).text());\n} else {\nxmlStringBuilder.append(\"${}\");\nexpressions.add(contentItem);\n}\nnextToken = peek();\n}\nCharReader charReader = CharReader.from(xmlStringBuilder.toString());\nAbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader));\nXMLParser xmlParser = new XMLParser(tokenReader, expressions);\nreturn xmlParser.parse();\n}\n/**\n* Parse interpolation of a back-tick string.\n*

\n* \n* interpolation := ${ expression }\n* \n*\n* @return Interpolation node\n*/\nprivate STNode parseInterpolation() {\nstartContext(ParserRuleContext.INTERPOLATION);\nSTNode interpolStart = parseInterpolationStart();\nSTNode expr = parseExpression();\nwhile (true) {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.EOF_TOKEN || nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nbreak;\n} else {\nnextToken = consume();\nexpr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken,\nDiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text());\n}\n}\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace);\n}\n/**\n* Parse interpolation start token.\n*

\n* interpolation-start := ${\n*\n* @return Interpolation start token\n*/\nprivate STNode parseInterpolationStart() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.INTERPOLATION_START_TOKEN);\nreturn parseInterpolationStart();\n}\n}\n/**\n* Parse back-tick token.\n*\n* @return Back-tick token\n*/\nprivate STNode parseBacktickToken(ParserRuleContext ctx) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ctx);\nreturn parseBacktickToken(ctx);\n}\n}\n/**\n* Parse table type descriptor.\n*

\n* table-type-descriptor := table row-type-parameter [key-constraint]\n* row-type-parameter := type-parameter\n* key-constraint := key-specifier | key-type-constraint\n* key-specifier := key ( [ field-name (, field-name)* ] )\n* key-type-constraint := key type-parameter\n*

\n*\n* @return Parsed table type desc node.\n*/\nprivate STNode parseTableTypeDescriptor(STNode tableKeywordToken) {\nSTNode rowTypeParameterNode = parseRowTypeParameter();\nSTNode keyConstraintNode;\nSTToken nextToken = peek();\nif (isKeyKeyword(nextToken)) {\nSTNode keyKeywordToken = getKeyKeyword(consume());\nkeyConstraintNode = parseKeyConstraint(keyKeywordToken);\n} else {\nkeyConstraintNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode);\n}\n/**\n* Parse row type parameter node.\n*

\n* row-type-parameter := type-parameter\n*

\n*\n* @return Parsed node.\n*/\nprivate STNode parseRowTypeParameter() {\nstartContext(ParserRuleContext.ROW_TYPE_PARAM);\nSTNode rowTypeParameterNode = parseTypeParameter();\nendContext();\nreturn rowTypeParameterNode;\n}\n/**\n* Parse type parameter node.\n*

\n* type-parameter := < type-descriptor >\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeParameter() {\nSTNode ltToken = parseLTToken();\nSTNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\nSTNode gtToken = parseGTToken();\nreturn STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken);\n}\n/**\n* Parse key constraint.\n*

\n* key-constraint := key-specifier | key-type-constraint\n*

\n*\n* @return Parsed node.\n*/\nprivate STNode parseKeyConstraint(STNode keyKeywordToken) {\nswitch (peek().kind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseKeySpecifier(keyKeywordToken);\ncase LT_TOKEN:\nreturn parseKeyTypeConstraint(keyKeywordToken);\ndefault:\nrecover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken);\nreturn parseKeyConstraint(keyKeywordToken);\n}\n}\n/**\n* Parse key specifier given parsed key keyword token.\n*

\n* key-specifier := key ( [ field-name (, field-name)* ] )\n*\n* @return Parsed node\n*/\nprivate STNode parseKeySpecifier(STNode keyKeywordToken) {\nstartContext(ParserRuleContext.KEY_SPECIFIER);\nSTNode openParenToken = parseOpenParenthesis();\nSTNode fieldNamesNode = parseFieldNames();\nSTNode closeParenToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken);\n}\n/**\n* Parse key type constraint.\n*

\n* key-type-constraint := key type-parameter\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseKeyTypeConstraint(STNode keyKeywordToken) {\nSTNode typeParameterNode = parseTypeParameter();\nreturn STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode);\n}\n/**\n* Parse function type descriptor.\n*

\n* function-type-descriptor := [isolated] function function-signature\n*\n* @param qualifiers Preceding type descriptor qualifiers\n* @return Function type descriptor node\n*/\nprivate STNode parseFunctionTypeDesc(List qualifiers) {\nstartContext(ParserRuleContext.FUNC_TYPE_DESC);\nSTNode qualifierList;\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode signature;\nswitch (peek().kind) {\ncase OPEN_PAREN_TOKEN:\nsignature = parseFuncSignature(true);\nqualifierList = createFuncTypeQualNodeList(qualifiers, true);\nbreak;\ndefault:\nsignature = STNodeFactory.createEmptyNode();\nqualifierList = createFuncTypeQualNodeList(qualifiers, false);\nbreak;\n}\nendContext();\nreturn STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, signature);\n}\nprivate STNode createFuncTypeQualNodeList(List qualifierList, boolean hasFuncSignature) {\nList validatedList = new ArrayList<>();\nfor (int i = 0; i < qualifierList.size(); i++) {\nSTNode qualifier = qualifierList.get(i);\nint nextIndex = i + 1;\nif (isSyntaxKindInList(validatedList, qualifier.kind)) {\nupdateLastNodeInListWithInvalidNode(validatedList, qualifier,\nDiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());\n} else if (hasFuncSignature && isRegularFuncQual(qualifier.kind)) {\nvalidatedList.add(qualifier);\n} else if (qualifier.kind == SyntaxKind.ISOLATED_KEYWORD) {\nvalidatedList.add(qualifier);\n} else if (qualifierList.size() == nextIndex) {\naddInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED,\n((STToken) qualifier).text());\n} else {\nupdateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,\nDiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());\n}\n}\nreturn STNodeFactory.createNodeList(validatedList);\n}\nprivate boolean isRegularFuncQual(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase ISOLATED_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse explicit anonymous function expression.\n*

\n* explicit-anonymous-function-expr :=\n* [annots] (isolated| transactional) function function-signature anon-func-body\n*\n* @param annots Annotations.\n* @param qualifiers Function qualifiers\n* @param isRhsExpr Is expression in rhs context\n* @return Anonymous function expression node\n*/\nprivate STNode parseExplicitFunctionExpression(STNode annots, List qualifiers, boolean isRhsExpr) {\nstartContext(ParserRuleContext.ANON_FUNC_EXPRESSION);\nSTNode qualifierList = createFuncTypeQualNodeList(qualifiers, true);\nSTNode funcKeyword = parseFunctionKeyword();\nSTNode funcSignature = parseFuncSignature(false);\nSTNode funcBody = parseAnonFuncBody(isRhsExpr);\nreturn STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, funcKeyword,\nfuncSignature, funcBody);\n}\n/**\n* Parse anonymous function body.\n*

\n* anon-func-body := block-function-body | expr-function-body\n*\n* @param isRhsExpr Is expression in rhs context\n* @return Anon function body node\n*/\nprivate STNode parseAnonFuncBody(boolean isRhsExpr) {\nswitch (peek().kind) {\ncase OPEN_BRACE_TOKEN:\ncase EOF_TOKEN:\nSTNode body = parseFunctionBodyBlock(true);\nendContext();\nreturn body;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nendContext();\nreturn parseExpressionFuncBody(true, isRhsExpr);\ndefault:\nrecover(peek(), ParserRuleContext.ANON_FUNC_BODY, isRhsExpr);\nreturn parseAnonFuncBody(isRhsExpr);\n}\n}\n/**\n* Parse expression function body.\n*

\n* expr-function-body := => expression\n*\n* @param isAnon Is anonymous function.\n* @param isRhsExpr Is expression in rhs context\n* @return Expression function body node\n*/\nprivate STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) {\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nSTNode semiColon;\nif (isAnon) {\nsemiColon = STNodeFactory.createEmptyNode();\n} else {\nsemiColon = parseSemicolon();\n}\nreturn STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon);\n}\n/**\n* Parse '=>' token.\n*\n* @return Double right arrow token\n*/\nprivate STNode parseDoubleRightArrow() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.EXPR_FUNC_BODY_START);\nreturn parseDoubleRightArrow();\n}\n}\nprivate STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) {\nswitch (params.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase INFER_PARAM_LIST:\nbreak;\ncase BRACED_EXPRESSION:\nparams = getAnonFuncParam((STBracedExpressionNode) params);\nbreak;\ndefault:\nSTToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nsyntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params,\nDiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);\nparams = STNodeFactory.createSimpleNameReferenceNode(syntheticParam);\n}\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression);\n}\n/**\n* Create a new anon-func-param node from a braced expression.\n*\n* @param params Braced expression\n* @return Anon-func param node\n*/\nprivate STNode getAnonFuncParam(STBracedExpressionNode params) {\nList paramList = new ArrayList<>();\nparamList.add(params.expression);\nreturn STNodeFactory.createImplicitAnonymousFunctionParameters(params.openParen,\nSTNodeFactory.createNodeList(paramList), params.closeParen);\n}\n/**\n* Parse implicit anon function expression.\n*\n* @param openParen Open parenthesis token\n* @param firstParam First parameter\n* @param isRhsExpr Is expression in rhs context\n* @return Implicit anon function expression node\n*/\nprivate STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) {\nList paramList = new ArrayList<>();\nparamList.add(firstParam);\nSTToken nextToken = peek();\nSTNode paramEnd;\nSTNode param;\nwhile (!isEndOfAnonFuncParametersList(nextToken.kind)) {\nparamEnd = parseImplicitAnonFuncParamEnd();\nif (paramEnd == null) {\nbreak;\n}\nparamList.add(paramEnd);\nparam = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM);\nparam = STNodeFactory.createSimpleNameReferenceNode(param);\nparamList.add(param);\nnextToken = peek();\n}\nSTNode params = STNodeFactory.createNodeList(paramList);\nSTNode closeParen = parseCloseParenthesis();\nendContext();\nSTNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\nreturn parseImplicitAnonFunc(inferedParams, isRhsExpr);\n}\nprivate STNode parseImplicitAnonFuncParamEnd() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS);\nreturn parseImplicitAnonFuncParamEnd();\n}\n}\nprivate boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase SEMICOLON_TOKEN:\ncase RETURNS_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\ncase DO_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse tuple type descriptor.\n*

\n* tuple-type-descriptor := [ tuple-member-type-descriptors ]\n*

\n* tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor]\n* | [ tuple-rest-descriptor ]\n*

\n* tuple-rest-descriptor := type-descriptor ...\n*
\n*\n* @return\n*/\nprivate STNode parseTupleTypeDesc() {\nSTNode openBracket = parseOpenBracket();\nstartContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nSTNode memberTypeDesc = parseTupleMemberTypeDescList();\nSTNode closeBracket = parseCloseBracket();\nendContext();\nopenBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket,\nDiagnosticErrorCode.ERROR_MISSING_TYPE_DESC);\nreturn STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket);\n}\n/**\n* Parse tuple member type descriptors.\n*\n* @return Parsed node\n*/\nprivate STNode parseTupleMemberTypeDescList() {\nList typeDescList = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfTypeList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nreturn parseTupleTypeMembers(typeDesc, typeDescList);\n}\nprivate STNode parseTupleTypeMembers(STNode typeDesc, List typeDescList) {\nSTToken nextToken;\nnextToken = peek();\nSTNode tupleMemberRhs;\nwhile (!isEndOfTypeList(nextToken.kind)) {\ntupleMemberRhs = parseTupleMemberRhs();\nif (tupleMemberRhs == null) {\nbreak;\n}\nif (tupleMemberRhs.kind == SyntaxKind.ELLIPSIS_TOKEN) {\ntypeDesc = STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs);\nbreak;\n}\ntypeDescList.add(typeDesc);\ntypeDescList.add(tupleMemberRhs);\ntypeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nnextToken = peek();\n}\ntypeDescList.add(typeDesc);\nreturn STNodeFactory.createNodeList(typeDescList);\n}\nprivate STNode parseTupleMemberRhs() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ncase ELLIPSIS_TOKEN:\nreturn parseEllipsis();\ndefault:\nrecover(peek(), ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS);\nreturn parseTupleMemberRhs();\n}\n}\nprivate boolean isEndOfTypeList(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase EOF_TOKEN:\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse table constructor or query expression.\n*

\n* \n* table-constructor-or-query-expr := table-constructor-expr | query-expr\n*
\n* table-constructor-expr := table [key-specifier] [ [row-list] ]\n*
\n* query-expr := [query-construct-type] query-pipeline select-clause\n* [query-construct-type] query-pipeline select-clause on-conflict-clause?\n*
\n* query-construct-type := table key-specifier | stream\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseTableConstructorOrQuery(boolean isRhsExpr) {\nstartContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION);\nSTNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr);\nendContext();\nreturn tableOrQueryExpr;\n}\nprivate STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr) {\nSTNode queryConstructType;\nswitch (peek().kind) {\ncase FROM_KEYWORD:\nqueryConstructType = STNodeFactory.createEmptyNode();\nreturn parseQueryExprRhs(queryConstructType, isRhsExpr);\ncase STREAM_KEYWORD:\nqueryConstructType = parseQueryConstructType(parseStreamKeyword(), null);\nreturn parseQueryExprRhs(queryConstructType, isRhsExpr);\ncase TABLE_KEYWORD:\nSTNode tableKeyword = parseTableKeyword();\nreturn parseTableConstructorOrQuery(tableKeyword, isRhsExpr);\ndefault:\nrecover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr);\nreturn parseTableConstructorOrQueryInternal(isRhsExpr);\n}\n}\nprivate STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) {\nSTNode keySpecifier;\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase OPEN_BRACKET_TOKEN:\nkeySpecifier = STNodeFactory.createEmptyNode();\nreturn parseTableConstructorExprRhs(tableKeyword, keySpecifier);\ncase KEY_KEYWORD:\nkeySpecifier = parseKeySpecifier();\nreturn parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\ncase IDENTIFIER_TOKEN:\nif (isKeyKeyword(nextToken)) {\nkeySpecifier = parseKeySpecifier();\nreturn parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\n}\nbreak;\ndefault:\nbreak;\n}\nrecover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr);\nreturn parseTableConstructorOrQuery(tableKeyword, isRhsExpr);\n}\nprivate STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) {\nswitch (peek().kind) {\ncase FROM_KEYWORD:\nreturn parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr);\ncase OPEN_BRACKET_TOKEN:\nreturn parseTableConstructorExprRhs(tableKeyword, keySpecifier);\ndefault:\nrecover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS, tableKeyword, keySpecifier,\nisRhsExpr);\nreturn parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\n}\n}\n/**\n* Parse query construct type.\n*

\n* query-construct-type := table key-specifier | stream\n*\n* @return Parsed node\n*/\nprivate STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) {\nreturn STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier);\n}\n/**\n* Parse query action or expression.\n*

\n* \n* query-expr-rhs := query-pipeline select-clause\n* query-pipeline select-clause on-conflict-clause?\n*
\n* query-pipeline := from-clause intermediate-clause*\n*
\n*\n* @param queryConstructType queryConstructType that precedes this rhs\n* @return Parsed node\n*/\nprivate STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) {\nswitchContext(ParserRuleContext.QUERY_EXPRESSION);\nSTNode fromClause = parseFromClause(isRhsExpr);\nList clauses = new ArrayList<>();\nSTNode intermediateClause;\nSTNode selectClause = null;\nwhile (!isEndOfIntermediateClause(peek().kind)) {\nintermediateClause = parseIntermediateClause(isRhsExpr);\nif (intermediateClause == null) {\nbreak;\n}\nif (selectClause != null) {\nselectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause,\nDiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE);\ncontinue;\n}\nif (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) {\nselectClause = intermediateClause;\n} else {\nclauses.add(intermediateClause);\n}\n}\nif (peek().kind == SyntaxKind.DO_KEYWORD) {\nSTNode intermediateClauses = STNodeFactory.createNodeList(clauses);\nSTNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);\nreturn parseQueryAction(queryConstructType, queryPipeline, selectClause, isRhsExpr);\n}\nif (selectClause == null) {\nSTNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD);\nSTNode expr = STNodeFactory\n.createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\nselectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr);\nif (clauses.isEmpty()) {\nfromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);\n} else {\nint lastIndex = clauses.size() - 1;\nSTNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex),\nDiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);\nclauses.set(lastIndex, intClauseWithDiagnostic);\n}\n}\nSTNode intermediateClauses = STNodeFactory.createNodeList(clauses);\nSTNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);\nSTNode onConflictClause = parseOnConflictClause(isRhsExpr);\nreturn STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause,\nonConflictClause);\n}\n/**\n* Parse an intermediate clause.\n*

\n* \n* intermediate-clause := from-clause | where-clause | let-clause | join-clause | limit-clause | order-by-clause\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseIntermediateClause(boolean isRhsExpr) {\nswitch (peek().kind) {\ncase FROM_KEYWORD:\nreturn parseFromClause(isRhsExpr);\ncase WHERE_KEYWORD:\nreturn parseWhereClause(isRhsExpr);\ncase LET_KEYWORD:\nreturn parseLetClause(isRhsExpr);\ncase SELECT_KEYWORD:\nreturn parseSelectClause(isRhsExpr);\ncase JOIN_KEYWORD:\ncase OUTER_KEYWORD:\nreturn parseJoinClause(isRhsExpr);\ncase ORDER_KEYWORD:\ncase BY_KEYWORD:\ncase ASCENDING_KEYWORD:\ncase DESCENDING_KEYWORD:\nreturn parseOrderByClause(isRhsExpr);\ncase LIMIT_KEYWORD:\nreturn parseLimitClause(isRhsExpr);\ncase DO_KEYWORD:\ncase SEMICOLON_TOKEN:\ncase ON_KEYWORD:\ncase CONFLICT_KEYWORD:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS, isRhsExpr);\nreturn parseIntermediateClause(isRhsExpr);\n}\n}\n/**\n* Parse join-keyword.\n*\n* @return Join-keyword node\n*/\nprivate STNode parseJoinKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.JOIN_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.JOIN_KEYWORD);\nreturn parseJoinKeyword();\n}\n}\n/**\n* Parse equals keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseEqualsKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.EQUALS_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.EQUALS_KEYWORD);\nreturn parseEqualsKeyword();\n}\n}\nprivate boolean isEndOfIntermediateClause(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase PUBLIC_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase EOF_TOKEN:\ncase RESOURCE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase DOCUMENTATION_STRING:\ncase PRIVATE_KEYWORD:\ncase RETURNS_KEYWORD:\ncase SERVICE_KEYWORD:\ncase TYPE_KEYWORD:\ncase CONST_KEYWORD:\ncase FINAL_KEYWORD:\ncase DO_KEYWORD:\nreturn true;\ndefault:\nreturn isValidExprRhsStart(tokenKind, SyntaxKind.NONE);\n}\n}\n/**\n* Parse from clause.\n*

\n* from-clause := from typed-binding-pattern in expression\n*\n* @return Parsed node\n*/\nprivate STNode parseFromClause(boolean isRhsExpr) {\nSTNode fromKeyword = parseFromKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE);\nSTNode inKeyword = parseInKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression);\n}\n/**\n* Parse from-keyword.\n*\n* @return From-keyword node\n*/\nprivate STNode parseFromKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FROM_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.FROM_KEYWORD);\nreturn parseFromKeyword();\n}\n}\n/**\n* Parse where clause.\n*

\n* where-clause := where expression\n*\n* @return Parsed node\n*/\nprivate STNode parseWhereClause(boolean isRhsExpr) {\nSTNode whereKeyword = parseWhereKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createWhereClauseNode(whereKeyword, expression);\n}\n/**\n* Parse where-keyword.\n*\n* @return Where-keyword node\n*/\nprivate STNode parseWhereKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.WHERE_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.WHERE_KEYWORD);\nreturn parseWhereKeyword();\n}\n}\n/**\n* Parse limit-keyword.\n*\n* @return limit-keyword node\n*/\nprivate STNode parseLimitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LIMIT_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.LIMIT_KEYWORD);\nreturn parseLimitKeyword();\n}\n}\n/**\n* Parse let clause.\n*

\n* let-clause := let let-var-decl [, let-var-decl]* \n*\n* @return Parsed node\n*/\nprivate STNode parseLetClause(boolean isRhsExpr) {\nSTNode letKeyword = parseLetKeyword();\nSTNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr);\nletKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,\nDiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);\nreturn STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations);\n}\n/**\n* Parse order-keyword.\n*\n* @return Order-keyword node\n*/\nprivate STNode parseOrderKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ORDER_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.ORDER_KEYWORD);\nreturn parseOrderKeyword();\n}\n}\n/**\n* Parse by-keyword.\n*\n* @return By-keyword node\n*/\nprivate STNode parseByKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BY_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.BY_KEYWORD);\nreturn parseByKeyword();\n}\n}\n/**\n* Parse order by clause.\n*

\n* order-by-clause := order by order-key-list\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseOrderByClause(boolean isRhsExpr) {\nSTNode orderKeyword = parseOrderKeyword();\nSTNode byKeyword = parseByKeyword();\nSTNode orderKeys = parseOrderKeyList(isRhsExpr);\nbyKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY);\nreturn STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys);\n}\n/**\n* Parse order key.\n*

\n* order-key-list := order-key [, order-key]*\n*\n* @return Parsed node\n*/\nprivate STNode parseOrderKeyList(boolean isRhsExpr) {\nstartContext(ParserRuleContext.ORDER_KEY_LIST);\nList orderKeys = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfOrderKeys(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode orderKey = parseOrderKey(isRhsExpr);\norderKeys.add(orderKey);\nnextToken = peek();\nSTNode orderKeyListMemberEnd;\nwhile (!isEndOfOrderKeys(nextToken.kind)) {\norderKeyListMemberEnd = parseOrderKeyListMemberEnd();\nif (orderKeyListMemberEnd == null) {\nbreak;\n}\norderKeys.add(orderKeyListMemberEnd);\norderKey = parseOrderKey(isRhsExpr);\norderKeys.add(orderKey);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(orderKeys);\n}\nprivate boolean isEndOfOrderKeys(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\ncase ASCENDING_KEYWORD:\ncase DESCENDING_KEYWORD:\nreturn false;\ncase SEMICOLON_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn isQueryClauseStartToken(tokenKind);\n}\n}\nprivate boolean isQueryClauseStartToken(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase SELECT_KEYWORD:\ncase LET_KEYWORD:\ncase WHERE_KEYWORD:\ncase OUTER_KEYWORD:\ncase JOIN_KEYWORD:\ncase ORDER_KEYWORD:\ncase DO_KEYWORD:\ncase FROM_KEYWORD:\ncase LIMIT_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseOrderKeyListMemberEnd() {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase EOF_TOKEN:\nreturn null;\ndefault:\nif (isQueryClauseStartToken(nextToken.kind)) {\nreturn null;\n}\nrecover(peek(), ParserRuleContext.ORDER_KEY_LIST_END);\nreturn parseOrderKeyListMemberEnd();\n}\n}\n/**\n* Parse order key.\n*

\n* order-key := expression (ascending | descending)?\n*\n* @return Parsed node\n*/\nprivate STNode parseOrderKey(boolean isRhsExpr) {\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nSTNode orderDirection;\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase ASCENDING_KEYWORD:\ncase DESCENDING_KEYWORD:\norderDirection = consume();\nbreak;\ndefault:\norderDirection = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createOrderKeyNode(expression, orderDirection);\n}\n/**\n* Parse select clause.\n*

\n* select-clause := select expression\n*\n* @return Parsed node\n*/\nprivate STNode parseSelectClause(boolean isRhsExpr) {\nstartContext(ParserRuleContext.SELECT_CLAUSE);\nSTNode selectKeyword = parseSelectKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nendContext();\nreturn STNodeFactory.createSelectClauseNode(selectKeyword, expression);\n}\n/**\n* Parse select-keyword.\n*\n* @return Select-keyword node\n*/\nprivate STNode parseSelectKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SELECT_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.SELECT_KEYWORD);\nreturn parseSelectKeyword();\n}\n}\n/**\n* Parse on-conflict clause.\n*

\n* \n* onConflictClause := on conflict expression\n* \n*\n* @return On conflict clause node\n*/\nprivate STNode parseOnConflictClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nstartContext(ParserRuleContext.ON_CONFLICT_CLAUSE);\nSTNode onKeyword = parseOnKeyword();\nSTNode conflictKeyword = parseConflictKeyword();\nendContext();\nSTNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr);\n}\n/**\n* Parse conflict keyword.\n*\n* @return Conflict keyword node\n*/\nprivate STNode parseConflictKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CONFLICT_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.CONFLICT_KEYWORD);\nreturn parseConflictKeyword();\n}\n}\n/**\n* Parse limit clause.\n*

\n* limitClause := limit expression\n*\n* @return Limit expression node\n*/\nprivate STNode parseLimitClause(boolean isRhsExpr) {\nSTNode limitKeyword = parseLimitKeyword();\nSTNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createLimitClauseNode(limitKeyword, expr);\n}\n/**\n* Parse join clause.\n*

\n* \n* join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause\n*
\n* join-var-decl := join (typeName | var) bindingPattern\n*
\n* outer-join-var-decl := outer join var binding-pattern\n*
\n*\n* @return Join clause\n*/\nprivate STNode parseJoinClause(boolean isRhsExpr) {\nstartContext(ParserRuleContext.JOIN_CLAUSE);\nSTNode outerKeyword;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.OUTER_KEYWORD) {\nouterKeyword = consume();\n} else {\nouterKeyword = STNodeFactory.createEmptyNode();\n}\nSTNode joinKeyword = parseJoinKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE);\nSTNode inKeyword = parseInKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nendContext();\nSTNode onCondition = parseOnClause(isRhsExpr);\nreturn STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression,\nonCondition);\n}\n/**\n* Parse on clause.\n*

\n* on clause := `on` expression `equals` expression\n*\n* @return On clause node\n*/\nprivate STNode parseOnClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nif (isQueryClauseStartToken(nextToken.kind)) {\nreturn createMissingOnClauseNode();\n}\nstartContext(ParserRuleContext.ON_CLAUSE);\nSTNode onKeyword = parseOnKeyword();\nSTNode lhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nSTNode equalsKeyword = parseEqualsKeyword();\nendContext();\nSTNode rhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);\n}\nprivate STNode createMissingOnClauseNode() {\nSTNode onKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ON_KEYWORD,\nDiagnosticErrorCode.ERROR_MISSING_ON_KEYWORD);\nSTNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);\nSTNode equalsKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.EQUALS_KEYWORD,\nDiagnosticErrorCode.ERROR_MISSING_EQUALS_KEYWORD);\nSTNode lhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);\nSTNode rhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);\nreturn STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);\n}\n/**\n* Parse start action.\n*

\n* start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)\n*\n* @return Start action node\n*/\nprivate STNode parseStartAction(STNode annots) {\nSTNode startKeyword = parseStartKeyword();\nSTNode expr = parseActionOrExpression();\nswitch (expr.kind) {\ncase FUNCTION_CALL:\ncase METHOD_CALL:\ncase REMOTE_METHOD_CALL_ACTION:\nbreak;\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\nSTNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN);\nSTNode arguments = STNodeFactory.createEmptyNodeList();\nSTNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN);\nexpr = STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken);\nbreak;\ndefault:\nstartKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr,\nDiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION);\nSTNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nfuncName = STNodeFactory.createSimpleNameReferenceNode(funcName);\nopenParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN);\narguments = STNodeFactory.createEmptyNodeList();\ncloseParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN);\nexpr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken, arguments,\ncloseParenToken);\nbreak;\n}\nreturn STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr);\n}\n/**\n* Parse start keyword.\n*\n* @return Start keyword node\n*/\nprivate STNode parseStartKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.START_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.START_KEYWORD);\nreturn parseStartKeyword();\n}\n}\n/**\n* Parse flush action.\n*

\n* flush-action := flush [peer-worker]\n*\n* @return flush action node\n*/\nprivate STNode parseFlushAction() {\nSTNode flushKeyword = parseFlushKeyword();\nSTNode peerWorker = parseOptionalPeerWorkerName();\nreturn STNodeFactory.createFlushActionNode(flushKeyword, peerWorker);\n}\n/**\n* Parse flush keyword.\n*\n* @return flush keyword node\n*/\nprivate STNode parseFlushKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FLUSH_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.FLUSH_KEYWORD);\nreturn parseFlushKeyword();\n}\n}\n/**\n* Parse peer worker.\n*

\n* peer-worker := worker-name | function\n*\n* @return peer worker name node\n*/\nprivate STNode parseOptionalPeerWorkerName() {\nSTToken token = peek();\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase FUNCTION_KEYWORD:\nreturn STNodeFactory.createSimpleNameReferenceNode(consume());\ndefault:\nreturn STNodeFactory.createEmptyNode();\n}\n}\n/**\n* Parse intersection type descriptor.\n*

\n* intersection-type-descriptor := type-descriptor & type-descriptor\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode bitwiseAndToken = consume();\nSTNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false);\nreturn createIntersectionTypeDesc(leftTypeDesc, bitwiseAndToken, rightTypeDesc);\n}\nprivate STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) {\nleftTypeDesc = validateForUsageOfVar(leftTypeDesc);\nrightTypeDesc = validateForUsageOfVar(rightTypeDesc);\nreturn STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc);\n}\n/**\n* Parse singleton type descriptor.\n*

\n* singleton-type-descriptor := simple-const-expr\n* simple-const-expr :=\n* nil-literal\n* | boolean-literal\n* | [Sign] int-literal\n* | [Sign] floating-point-literal\n* | string-literal\n* | constant-reference-expr\n*

\n*/\nprivate STNode parseSingletonTypeDesc() {\nSTNode simpleContExpr = parseSimpleConstExpr();\nreturn STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr);\n}\nprivate STNode parseSignedIntOrFloat() {\nSTNode operator = parseUnaryOperator();\nSTNode literal;\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\nliteral = parseBasicLiteral();\nbreak;\ndefault:\nliteral = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN);\nliteral = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal);\n}\nreturn STNodeFactory.createUnaryExpressionNode(operator, literal);\n}\nprivate boolean isSingletonTypeDescStart(SyntaxKind tokenKind, boolean inTypeDescCtx) {\nSTToken nextNextToken = getNextNextToken();\nswitch (tokenKind) {\ncase STRING_LITERAL_TOKEN:\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase NULL_KEYWORD:\nif (inTypeDescCtx || isValidTypeDescRHSOutSideTypeDescCtx(nextNextToken)) {\nreturn true;\n}\nreturn false;\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn isIntOrFloat(nextNextToken);\ndefault:\nreturn false;\n}\n}\nstatic boolean isIntOrFloat(STToken token) {\nswitch (token.kind) {\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate boolean isValidTypeDescRHSOutSideTypeDescCtx(STToken token) {\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\ncase OPEN_PAREN_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Check whether the parser reached to a valid expression start.\n*\n* @param nextTokenKind Kind of the next immediate token.\n* @param nextTokenIndex Index to the next token.\n* @return true if this is a start of a valid expression. false otherwise\n*/\nprivate boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) {\nnextTokenIndex++;\nswitch (nextTokenKind) {\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase STRING_LITERAL_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\nSyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind;\nreturn nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN || nextNextTokenKind == SyntaxKind.COMMA_TOKEN ||\nnextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN ||\nisValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE);\ncase IDENTIFIER_TOKEN:\nreturn isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE);\ncase OPEN_PAREN_TOKEN:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase TYPEOF_KEYWORD:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\ncase TRAP_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase LT_TOKEN:\ncase FROM_KEYWORD:\ncase LET_KEYWORD:\ncase BACKTICK_TOKEN:\ncase NEW_KEYWORD:\ncase LEFT_ARROW_TOKEN:\ncase FUNCTION_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\ncase ISOLATED_KEYWORD:\nreturn true;\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);\ncase TABLE_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD;\ncase STREAM_KEYWORD:\nSTToken nextNextToken = peek(nextTokenIndex);\nreturn nextNextToken.kind == SyntaxKind.KEY_KEYWORD ||\nnextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN ||\nnextNextToken.kind == SyntaxKind.FROM_KEYWORD;\ncase ERROR_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN;\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN;\ncase START_KEYWORD:\ncase FLUSH_KEYWORD:\ncase WAIT_KEYWORD:\ndefault:\nreturn false;\n}\n}\n/**\n* Parse sync send action.\n*

\n* sync-send-action := expression ->> peer-worker\n*\n* @param expression LHS expression of the sync send action\n* @return Sync send action node\n*/\nprivate STNode parseSyncSendAction(STNode expression) {\nSTNode syncSendToken = parseSyncSendToken();\nSTNode peerWorker = parsePeerWorkerName();\nreturn STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker);\n}\n/**\n* Parse peer worker.\n*

\n* peer-worker := worker-name | function\n*\n* @return peer worker name node\n*/\nprivate STNode parsePeerWorkerName() {\nSTToken token = peek();\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase FUNCTION_KEYWORD:\nreturn STNodeFactory.createSimpleNameReferenceNode(consume());\ndefault:\nrecover(token, ParserRuleContext.PEER_WORKER_NAME);\nreturn parsePeerWorkerName();\n}\n}\n/**\n* Parse sync send token.\n*

\n* sync-send-token := ->> \n*\n* @return sync send token\n*/\nprivate STNode parseSyncSendToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SYNC_SEND_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.SYNC_SEND_TOKEN);\nreturn parseSyncSendToken();\n}\n}\n/**\n* Parse receive action.\n*

\n* receive-action := single-receive-action | multiple-receive-action\n*\n* @return Receive action\n*/\nprivate STNode parseReceiveAction() {\nSTNode leftArrow = parseLeftArrowToken();\nSTNode receiveWorkers = parseReceiveWorkers();\nreturn STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers);\n}\nprivate STNode parseReceiveWorkers() {\nswitch (peek().kind) {\ncase FUNCTION_KEYWORD:\ncase IDENTIFIER_TOKEN:\nreturn parsePeerWorkerName();\ncase OPEN_BRACE_TOKEN:\nreturn parseMultipleReceiveWorkers();\ndefault:\nrecover(peek(), ParserRuleContext.RECEIVE_WORKERS);\nreturn parseReceiveWorkers();\n}\n}\n/**\n* Parse multiple worker receivers.\n*

\n* { receive-field (, receive-field)* }\n*\n* @return Multiple worker receiver node\n*/\nprivate STNode parseMultipleReceiveWorkers() {\nstartContext(ParserRuleContext.MULTI_RECEIVE_WORKERS);\nSTNode openBrace = parseOpenBrace();\nSTNode receiveFields = parseReceiveFields();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nopenBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace,\nDiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION);\nreturn STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace);\n}\nprivate STNode parseReceiveFields() {\nList receiveFields = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfReceiveFields(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode receiveField = parseReceiveField();\nreceiveFields.add(receiveField);\nnextToken = peek();\nSTNode recieveFieldEnd;\nwhile (!isEndOfReceiveFields(nextToken.kind)) {\nrecieveFieldEnd = parseReceiveFieldEnd();\nif (recieveFieldEnd == null) {\nbreak;\n}\nreceiveFields.add(recieveFieldEnd);\nreceiveField = parseReceiveField();\nreceiveFields.add(receiveField);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(receiveFields);\n}\nprivate boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseReceiveFieldEnd() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.RECEIVE_FIELD_END);\nreturn parseReceiveFieldEnd();\n}\n}\n/**\n* Parse receive field.\n*

\n* receive-field := peer-worker | field-name : peer-worker\n*\n* @return Receiver field node\n*/\nprivate STNode parseReceiveField() {\nswitch (peek().kind) {\ncase FUNCTION_KEYWORD:\nSTNode functionKeyword = consume();\nreturn STNodeFactory.createSimpleNameReferenceNode(functionKeyword);\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME);\nreturn createQualifiedReceiveField(identifier);\ndefault:\nrecover(peek(), ParserRuleContext.RECEIVE_FIELD);\nreturn parseReceiveField();\n}\n}\nprivate STNode createQualifiedReceiveField(STNode identifier) {\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn identifier;\n}\nSTNode colon = parseColon();\nSTNode peerWorker = parsePeerWorkerName();\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, peerWorker);\n}\n/**\n* Parse left arrow (<-) token.\n*\n* @return left arrow token\n*/\nprivate STNode parseLeftArrowToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.LEFT_ARROW_TOKEN);\nreturn parseLeftArrowToken();\n}\n}\n/**\n* Parse signed right shift token (>>).\n*\n* @return Parsed node\n*/\nprivate STNode parseSignedRightShiftToken() {\nSTNode openGTToken = consume();\nSTToken endLGToken = consume();\nSTNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, openGTToken.leadingMinutiae(),\nendLGToken.trailingMinutiae());\nif (hasTrailingMinutiae(openGTToken)) {\ndoubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken,\nDiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP);\n}\nreturn doubleGTToken;\n}\n/**\n* Parse unsigned right shift token (>>>).\n*\n* @return Parsed node\n*/\nprivate STNode parseUnsignedRightShiftToken() {\nSTNode openGTToken = consume();\nSTNode middleGTToken = consume();\nSTNode endLGToken = consume();\nSTNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN,\nopenGTToken.leadingMinutiae(), endLGToken.trailingMinutiae());\nboolean validOpenGTToken = !hasTrailingMinutiae(openGTToken);\nboolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken);\nif (validOpenGTToken && validMiddleGTToken) {\nreturn unsignedRightShiftToken;\n}\nunsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken,\nDiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP);\nreturn unsignedRightShiftToken;\n}\n/**\n* Parse wait action.\n*

\n* wait-action := single-wait-action | multiple-wait-action | alternate-wait-action \n*\n* @return Wait action node\n*/\nprivate STNode parseWaitAction() {\nSTNode waitKeyword = parseWaitKeyword();\nif (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {\nreturn parseMultiWaitAction(waitKeyword);\n}\nreturn parseSingleOrAlternateWaitAction(waitKeyword);\n}\n/**\n* Parse wait keyword.\n*\n* @return wait keyword\n*/\nprivate STNode parseWaitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.WAIT_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.WAIT_KEYWORD);\nreturn parseWaitKeyword();\n}\n}\n/**\n* Parse single or alternate wait actions.\n*

\n* \n* alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+\n*
\n* wait-future-expr := expression but not mapping-constructor-expr\n*
\n*\n* @param waitKeyword wait keyword\n* @return Single or alternate wait action node\n*/\nprivate STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) {\nstartContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS);\nSTToken nextToken = peek();\nif (isEndOfWaitFutureExprList(nextToken.kind)) {\nendContext();\nSTNode waitFutureExprs = STNodeFactory\n.createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\nwaitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs,\nDiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION);\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs);\n}\nList waitFutureExprList = new ArrayList<>();\nSTNode waitField = parseWaitFutureExpr();\nwaitFutureExprList.add(waitField);\nnextToken = peek();\nSTNode waitFutureExprEnd;\nwhile (!isEndOfWaitFutureExprList(nextToken.kind)) {\nwaitFutureExprEnd = parseWaitFutureExprEnd();\nif (waitFutureExprEnd == null) {\nbreak;\n}\nwaitFutureExprList.add(waitFutureExprEnd);\nwaitField = parseWaitFutureExpr();\nwaitFutureExprList.add(waitField);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0));\n}\nprivate boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase OPEN_BRACE_TOKEN:\nreturn true;\ncase PIPE_TOKEN:\ndefault:\nreturn false;\n}\n}\nprivate STNode parseWaitFutureExpr() {\nSTNode waitFutureExpr = parseActionOrExpression();\nif (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) {\nwaitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr,\nDiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR);\n} else if (isAction(waitFutureExpr)) {\nwaitFutureExpr =\nSyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR);\n}\nreturn waitFutureExpr;\n}\nprivate STNode parseWaitFutureExprEnd() {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase PIPE_TOKEN:\nreturn parsePipeToken();\ndefault:\nif (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) {\nreturn null;\n}\nrecover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END);\nreturn parseWaitFutureExprEnd();\n}\n}\n/**\n* Parse multiple wait action.\n*

\n* multiple-wait-action := wait { wait-field (, wait-field)* }\n*\n* @param waitKeyword Wait keyword\n* @return Multiple wait action node\n*/\nprivate STNode parseMultiWaitAction(STNode waitKeyword) {\nstartContext(ParserRuleContext.MULTI_WAIT_FIELDS);\nSTNode openBrace = parseOpenBrace();\nSTNode waitFields = parseWaitFields();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nopenBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace,\nDiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION);\nSTNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace);\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode);\n}\nprivate STNode parseWaitFields() {\nList waitFields = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfWaitFields(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode waitField = parseWaitField();\nwaitFields.add(waitField);\nnextToken = peek();\nSTNode waitFieldEnd;\nwhile (!isEndOfWaitFields(nextToken.kind)) {\nwaitFieldEnd = parseWaitFieldEnd();\nif (waitFieldEnd == null) {\nbreak;\n}\nwaitFields.add(waitFieldEnd);\nwaitField = parseWaitField();\nwaitFields.add(waitField);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(waitFields);\n}\nprivate boolean isEndOfWaitFields(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseWaitFieldEnd() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.WAIT_FIELD_END);\nreturn parseWaitFieldEnd();\n}\n}\n/**\n* Parse wait field.\n*

\n* wait-field := variable-name | field-name : wait-future-expr\n*\n* @return Receiver field node\n*/\nprivate STNode parseWaitField() {\nswitch (peek().kind) {\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME);\nidentifier = STNodeFactory.createSimpleNameReferenceNode(identifier);\nreturn createQualifiedWaitField(identifier);\ndefault:\nrecover(peek(), ParserRuleContext.WAIT_FIELD_NAME);\nreturn parseWaitField();\n}\n}\nprivate STNode createQualifiedWaitField(STNode identifier) {\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn identifier;\n}\nSTNode colon = parseColon();\nSTNode waitFutureExpr = parseWaitFutureExpr();\nreturn STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr);\n}\n/**\n* Parse annot access expression.\n*

\n* \n* annot-access-expr := expression .@ annot-tag-reference\n*
\n* annot-tag-reference := qualified-identifier | identifier\n*
\n*\n* @param lhsExpr Preceding expression of the annot access access\n* @return Parsed node\n*/\nprivate STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode annotAccessToken = parseAnnotChainingToken();\nSTNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr);\nreturn STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference);\n}\n/**\n* Parse annot-chaining-token.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotChainingToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN);\nreturn parseAnnotChainingToken();\n}\n}\n/**\n* Parse field access identifier.\n*

\n* field-access-identifier := qualified-identifier | identifier\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) {\nreturn parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr);\n}\n/**\n* Parse query action.\n*

\n* query-action := query-pipeline do-clause\n*
\n* do-clause := do block-stmt\n*
\n*\n* @param queryConstructType Query construct type. This is only for validation\n* @param queryPipeline Query pipeline\n* @param selectClause Select clause if any This is only for validation.\n* @return Query action node\n*/\nprivate STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause,\nboolean isRhsExpr) {\nif (queryConstructType != null) {\nqueryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType,\nDiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION);\n}\nif (selectClause != null) {\nqueryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause,\nDiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION);\n}\nstartContext(ParserRuleContext.DO_CLAUSE);\nSTNode doKeyword = parseDoKeyword();\nSTNode blockStmt = parseBlockNode();\nendContext();\nreturn STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt);\n}\n/**\n* Parse 'do' keyword.\n*\n* @return do keyword node\n*/\nprivate STNode parseDoKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DO_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.DO_KEYWORD);\nreturn parseDoKeyword();\n}\n}\n/**\n* Parse optional field access or xml optional attribute access expression.\n*

\n* \n* optional-field-access-expr := expression ?. field-name\n*
\n* xml-optional-attribute-access-expr := expression ?. xml-attribute-name\n*
\n* xml-attribute-name := xml-qualified-name | qualified-identifier | identifier\n*
\n* xml-qualified-name := xml-namespace-prefix : identifier\n*
\n* xml-namespace-prefix := identifier\n*
\n*\n* @param lhsExpr Preceding expression of the optional access\n* @return Parsed node\n*/\nprivate STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode optionalFieldAccessToken = parseOptionalChainingToken();\nSTNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr);\nreturn STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName);\n}\n/**\n* Parse optional chaining token.\n*\n* @return parsed node\n*/\nprivate STNode parseOptionalChainingToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN);\nreturn parseOptionalChainingToken();\n}\n}\n/**\n* Parse conditional expression.\n*

\n* conditional-expr := expression ? expression : expression\n*\n* @param lhsExpr Preceding expression of the question mark\n* @return Parsed node\n*/\nprivate STNode parseConditionalExpression(STNode lhsExpr) {\nstartContext(ParserRuleContext.CONDITIONAL_EXPRESSION);\nSTNode questionMark = parseQuestionMark();\nSTNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true);\nSTNode nextToken = peek();\nSTNode endExpr;\nSTNode colon;\nif (nextToken.kind != SyntaxKind.COLON_TOKEN && middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) middleExpr;\nSTNode modulePrefix = qualifiedNameRef.modulePrefix;\nif (modulePrefix.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nmiddleExpr = STNodeFactory.createSimpleNameReferenceNode(modulePrefix);\n} else {\nmiddleExpr = modulePrefix;\n}\ncolon = qualifiedNameRef.colon;\nendContext();\nendExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);\n} else {\nif (middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nmiddleExpr = generateQualifiedNameRef(middleExpr);\n}\ncolon = parseColon();\nendContext();\nendExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false);\n}\nreturn STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr);\n}\nprivate STNode generateQualifiedNameRef(STNode qualifiedName) {\nSTQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) qualifiedName;\nSTNode modulePrefix = qualifiedNameRef.modulePrefix;\nif (modulePrefix.kind != SyntaxKind.IDENTIFIER_TOKEN) {\nSTBuiltinSimpleNameReferenceNode builtInType = (STBuiltinSimpleNameReferenceNode) modulePrefix;\nSTToken nameToken = (STToken) builtInType.name;\nSTNode preDeclaredPrefix = STNodeFactory.createIdentifierToken(nameToken.text(),\nnameToken.leadingMinutiae(), nameToken.trailingMinutiae());\nreturn STNodeFactory.createQualifiedNameReferenceNode(preDeclaredPrefix,\nqualifiedNameRef.colon, qualifiedNameRef.identifier);\n} else {\nreturn qualifiedName;\n}\n}\n/**\n* Parse enum declaration.\n*

\n* module-enum-decl :=\n* metadata\n* [public] enum identifier { enum-member (, enum-member)* }\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @param metadata\n* @param qualifier\n* @return Parsed enum node.\n*/\nprivate STNode parseEnumDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.MODULE_ENUM_DECLARATION);\nSTNode enumKeywordToken = parseEnumKeyword();\nSTNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME);\nSTNode openBraceToken = parseOpenBrace();\nSTNode enumMemberList = parseEnumMemberList();\nSTNode closeBraceToken = parseCloseBrace();\nendContext();\nopenBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken,\nDiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER);\nreturn STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier,\nopenBraceToken, enumMemberList, closeBraceToken);\n}\n/**\n* Parse 'enum' keyword.\n*\n* @return enum keyword node\n*/\nprivate STNode parseEnumKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ENUM_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.ENUM_KEYWORD);\nreturn parseEnumKeyword();\n}\n}\n/**\n* Parse enum member list.\n*

\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @return enum member list node.\n*/\nprivate STNode parseEnumMemberList() {\nstartContext(ParserRuleContext.ENUM_MEMBER_LIST);\nif (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList enumMemberList = new ArrayList<>();\nSTNode enumMember = parseEnumMember();\nSTNode enumMemberRhs;\nwhile (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) {\nenumMemberRhs = parseEnumMemberEnd();\nif (enumMemberRhs == null) {\nbreak;\n}\nenumMemberList.add(enumMember);\nenumMemberList.add(enumMemberRhs);\nenumMember = parseEnumMember();\n}\nenumMemberList.add(enumMember);\nendContext();\nreturn STNodeFactory.createNodeList(enumMemberList);\n}\n/**\n* Parse enum member.\n*

\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @return Parsed enum member node.\n*/\nprivate STNode parseEnumMember() {\nSTNode metadata;\nswitch (peek().kind) {\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData();\nbreak;\ndefault:\nmetadata = STNodeFactory.createEmptyNode();\n}\nSTNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME);\nreturn parseEnumMemberRhs(metadata, identifierNode);\n}\nprivate STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) {\nSTNode equalToken, constExprNode;\nswitch (peek().kind) {\ncase EQUAL_TOKEN:\nequalToken = parseAssignOp();\nconstExprNode = parseExpression();\nbreak;\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nequalToken = STNodeFactory.createEmptyNode();\nconstExprNode = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nrecover(peek(), ParserRuleContext.ENUM_MEMBER_RHS, metadata, identifierNode);\nreturn parseEnumMemberRhs(metadata, identifierNode);\n}\nreturn STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode);\n}\nprivate STNode parseEnumMemberEnd() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.ENUM_MEMBER_END);\nreturn parseEnumMemberEnd();\n}\n}\nprivate STNode parseTransactionStmtOrVarDecl(STNode annots, List qualifiers, STToken transactionKeyword) {\nswitch (peek().kind) {\ncase OPEN_BRACE_TOKEN:\nreportInvalidStatementAnnots(annots, qualifiers);\nreportInvalidQualifierList(qualifiers);\nreturn parseTransactionStatement(transactionKeyword);\ncase COLON_TOKEN:\nif (getNextNextToken().kind == SyntaxKind.IDENTIFIER_TOKEN) {\nSTNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);\nreturn parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);\n}\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TRANSACTION_STMT_RHS_OR_TYPE_REF);\nif (solution.action == Action.KEEP ||\n(solution.action == Action.INSERT && solution.tokenKind == SyntaxKind.COLON_TOKEN)) {\nSTNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);\nreturn parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);\n}\nreturn parseTransactionStmtOrVarDecl(annots, qualifiers, transactionKeyword);\n}\n}\n/**\n* Parse transaction statement.\n*

\n* transaction-stmt := `transaction` block-stmt [on-fail-clause]\n*\n* @return Transaction statement node\n*/\nprivate STNode parseTransactionStatement(STNode transactionKeyword) {\nstartContext(ParserRuleContext.TRANSACTION_STMT);\nSTNode blockStmt = parseBlockNode();\nendContext();\nSTNode onFailClause = parseOptionalOnFailClause();\nreturn STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause);\n}\n/**\n* Parse commit action.\n*

\n* commit-action := \"commit\"\n*\n* @return Commit action node\n*/\nprivate STNode parseCommitAction() {\nSTNode commitKeyword = parseCommitKeyword();\nreturn STNodeFactory.createCommitActionNode(commitKeyword);\n}\n/**\n* Parse commit keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseCommitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COMMIT_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.COMMIT_KEYWORD);\nreturn parseCommitKeyword();\n}\n}\n/**\n* Parse retry statement.\n*

\n* \n* retry-stmt := `retry` retry-spec block-stmt [on-fail-clause]\n*
\n* retry-spec := [type-parameter] [ `(` arg-list `)` ]\n*
\n*\n* @return Retry statement node\n*/\nprivate STNode parseRetryStatement() {\nstartContext(ParserRuleContext.RETRY_STMT);\nSTNode retryKeyword = parseRetryKeyword();\nSTNode retryStmt = parseRetryKeywordRhs(retryKeyword);\nreturn retryStmt;\n}\nprivate STNode parseRetryKeywordRhs(STNode retryKeyword) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase LT_TOKEN:\nSTNode typeParam = parseTypeParameter();\nreturn parseRetryTypeParamRhs(retryKeyword, typeParam);\ncase OPEN_PAREN_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase TRANSACTION_KEYWORD:\ntypeParam = STNodeFactory.createEmptyNode();\nreturn parseRetryTypeParamRhs(retryKeyword, typeParam);\ndefault:\nrecover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS, retryKeyword);\nreturn parseRetryKeywordRhs(retryKeyword);\n}\n}\nprivate STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) {\nSTNode args;\nswitch (peek().kind) {\ncase OPEN_PAREN_TOKEN:\nargs = parseParenthesizedArgList();\nbreak;\ncase OPEN_BRACE_TOKEN:\ncase TRANSACTION_KEYWORD:\nargs = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nrecover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS, retryKeyword, typeParam);\nreturn parseRetryTypeParamRhs(retryKeyword, typeParam);\n}\nSTNode blockStmt = parseRetryBody();\nendContext();\nSTNode onFailClause = parseOptionalOnFailClause();\nreturn STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause);\n}\nprivate STNode parseRetryBody() {\nswitch (peek().kind) {\ncase OPEN_BRACE_TOKEN:\nreturn parseBlockNode();\ncase TRANSACTION_KEYWORD:\nreturn parseTransactionStatement(consume());\ndefault:\nrecover(peek(), ParserRuleContext.RETRY_BODY);\nreturn parseRetryBody();\n}\n}\n/**\n* Parse optional on fail clause.\n*\n* @return Parsed node\n*/\nprivate STNode parseOptionalOnFailClause() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.ON_KEYWORD) {\nreturn parseOnFailClause();\n}\nif (isEndOfRegularCompoundStmt(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNode();\n}\nrecover(nextToken, ParserRuleContext.REGULAR_COMPOUND_STMT_RHS);\nreturn parseOptionalOnFailClause();\n}\nprivate boolean isEndOfRegularCompoundStmt(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase CLOSE_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase AT_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn isStatementStartingToken(nodeKind);\n}\n}\nprivate boolean isStatementStartingToken(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase FINAL_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\ncase DO_KEYWORD:\ncase PANIC_KEYWORD:\ncase CONTINUE_KEYWORD:\ncase BREAK_KEYWORD:\ncase RETURN_KEYWORD:\ncase TYPE_KEYWORD:\ncase LOCK_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase FORK_KEYWORD:\ncase FOREACH_KEYWORD:\ncase XMLNS_KEYWORD:\ncase TRANSACTION_KEYWORD:\ncase RETRY_KEYWORD:\ncase ROLLBACK_KEYWORD:\ncase MATCH_KEYWORD:\ncase FAIL_KEYWORD:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase TRAP_KEYWORD:\ncase START_KEYWORD:\ncase FLUSH_KEYWORD:\ncase LEFT_ARROW_TOKEN:\ncase WAIT_KEYWORD:\ncase COMMIT_KEYWORD:\ncase WORKER_KEYWORD:\nreturn true;\ndefault:\nif (isTypeStartingToken(nodeKind)) {\nreturn true;\n}\nif (isValidExpressionStart(nodeKind, 1)) {\nreturn true;\n}\nreturn false;\n}\n}\n/**\n* Parse on fail clause.\n*

\n* \n* on-fail-clause := on fail typed-binding-pattern statement-block\n* \n*\n* @return On fail clause node\n*/\nprivate STNode parseOnFailClause() {\nstartContext(ParserRuleContext.ON_FAIL_CLAUSE);\nSTNode onKeyword = parseOnKeyword();\nSTNode failKeyword = parseFailKeyword();\nSTNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false);\nSTNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_REF);\nSTNode blockStatement = parseBlockNode();\nendContext();\nreturn STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typeDescriptor, identifier,\nblockStatement);\n}\n/**\n* Parse retry keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseRetryKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RETRY_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.RETRY_KEYWORD);\nreturn parseRetryKeyword();\n}\n}\n/**\n* Parse transaction statement.\n*

\n* rollback-stmt := \"rollback\" [expression] \";\"\n*\n* @return Rollback statement node\n*/\nprivate STNode parseRollbackStatement() {\nstartContext(ParserRuleContext.ROLLBACK_STMT);\nSTNode rollbackKeyword = parseRollbackKeyword();\nSTNode expression;\nif (peek().kind == SyntaxKind.SEMICOLON_TOKEN) {\nexpression = STNodeFactory.createEmptyNode();\n} else {\nexpression = parseExpression();\n}\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon);\n}\n/**\n* Parse rollback keyword.\n*\n* @return Rollback keyword node\n*/\nprivate STNode parseRollbackKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ROLLBACK_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.ROLLBACK_KEYWORD);\nreturn parseRollbackKeyword();\n}\n}\n/**\n* Parse transactional expression.\n*

\n* transactional-expr := \"transactional\"\n*\n* @return Transactional expression node\n*/\nprivate STNode parseTransactionalExpression() {\nSTNode transactionalKeyword = parseTransactionalKeyword();\nreturn STNodeFactory.createTransactionalExpressionNode(transactionalKeyword);\n}\n/**\n* Parse transactional keyword.\n*\n* @return Transactional keyword node\n*/\nprivate STNode parseTransactionalKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD);\nreturn parseTransactionalKeyword();\n}\n}\n/**\n* Parse base16 literal.\n*

\n* \n* byte-array-literal := Base16Literal | Base64Literal\n*
\n* Base16Literal := base16 WS ` HexGroup* WS `\n*
\n* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `\n*
\n*\n* @return parsed node\n*/\nprivate STNode parseByteArrayLiteral() {\nSTNode type;\nif (peek().kind == SyntaxKind.BASE16_KEYWORD) {\ntype = parseBase16Keyword();\n} else {\ntype = parseBase64Keyword();\n}\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nif (startingBackTick.isMissing()) {\nstartingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);\nSTNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);\nSTNode content = STNodeFactory.createEmptyNode();\nSTNode byteArrayLiteral =\nSTNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick);\nbyteArrayLiteral =\nSyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT);\nreturn byteArrayLiteral;\n}\nSTNode content = parseByteArrayContent();\nreturn parseByteArrayLiteral(type, startingBackTick, content);\n}\n/**\n* Parse byte array literal.\n*\n* @param typeKeyword keyword token, possible values are `base16` and `base64`\n* @param startingBackTick starting backtick token\n* @param byteArrayContent byte array literal content to be validated\n* @return parsed byte array literal node\n*/\nprivate STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) {\nSTNode content = STNodeFactory.createEmptyNode();\nSTNode newStartingBackTick = startingBackTick;\nSTNodeList items = (STNodeList) byteArrayContent;\nif (items.size() == 1) {\nSTNode item = items.get(0);\nif (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else if (item.kind != SyntaxKind.TEMPLATE_STRING) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else {\ncontent = item;\n}\n} else if (items.size() > 1) {\nSTNode clonedStartingBackTick = startingBackTick;\nfor (int index = 0; index < items.size(); index++) {\nSTNode item = items.get(index);\nclonedStartingBackTick =\nSyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item);\n}\nnewStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick,\nDiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);\n}\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick);\n}\n/**\n* Parse base16 keyword.\n*\n* @return base16 keyword node\n*/\nprivate STNode parseBase16Keyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BASE16_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.BASE16_KEYWORD);\nreturn parseBase16Keyword();\n}\n}\n/**\n* Parse base64 keyword.\n*\n* @return base64 keyword node\n*/\nprivate STNode parseBase64Keyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BASE64_KEYWORD) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.BASE64_KEYWORD);\nreturn parseBase64Keyword();\n}\n}\n/**\n* Validate and parse byte array literal content.\n* An error is reported, if the content is invalid.\n*\n* @return parsed node\n*/\nprivate STNode parseByteArrayContent() {\nSTToken nextToken = peek();\nList items = new ArrayList<>();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode content = parseTemplateItem();\nitems.add(content);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(items);\n}\n/**\n* Validate base16 literal content.\n*

\n* \n* Base16Literal := base16 WS ` HexGroup* WS `\n*
\n* HexGroup := WS HexDigit WS HexDigit\n*
\n* WS := WhiteSpaceChar*\n*
\n* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20\n*
\n*\n* @param content the string surrounded by the backticks\n* @return true, if the string content is valid. false otherwise.\n*/\nstatic boolean isValidBase16LiteralContent(String content) {\nchar[] charArray = content.toCharArray();\nint hexDigitCount = 0;\nfor (char c : charArray) {\nswitch (c) {\ncase LexerTerminals.TAB:\ncase LexerTerminals.NEWLINE:\ncase LexerTerminals.CARRIAGE_RETURN:\ncase LexerTerminals.SPACE:\nbreak;\ndefault:\nif (isHexDigit(c)) {\nhexDigitCount++;\n} else {\nreturn false;\n}\nbreak;\n}\n}\nreturn hexDigitCount % 2 == 0;\n}\n/**\n* Validate base64 literal content.\n*

\n* \n* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `\n*
\n* Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char\n*
\n* PaddedBase64Group :=\n* WS Base64Char WS Base64Char WS Base64Char WS PaddingChar\n* | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar\n*
\n* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /\n*
\n* PaddingChar := =\n*
\n* WS := WhiteSpaceChar*\n*
\n* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20\n*
\n*\n* @param content the string surrounded by the backticks\n* @return true, if the string content is valid. false otherwise.\n*/\nstatic boolean isValidBase64LiteralContent(String content) {\nchar[] charArray = content.toCharArray();\nint base64CharCount = 0;\nint paddingCharCount = 0;\nfor (char c : charArray) {\nswitch (c) {\ncase LexerTerminals.TAB:\ncase LexerTerminals.NEWLINE:\ncase LexerTerminals.CARRIAGE_RETURN:\ncase LexerTerminals.SPACE:\nbreak;\ncase LexerTerminals.EQUAL:\npaddingCharCount++;\nbreak;\ndefault:\nif (isBase64Char(c)) {\nif (paddingCharCount == 0) {\nbase64CharCount++;\n} else {\nreturn false;\n}\n} else {\nreturn false;\n}\nbreak;\n}\n}\nif (paddingCharCount > 2) {\nreturn false;\n} else if (paddingCharCount == 0) {\nreturn base64CharCount % 4 == 0;\n} else {\nreturn base64CharCount % 4 == 4 - paddingCharCount;\n}\n}\n/**\n*

\n* Check whether a given char is a base64 char.\n*

\n* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /\n*\n* @param c character to check\n* @return true, if the character represents a base64 char. false otherwise.\n*/\nstatic boolean isBase64Char(int c) {\nif ('a' <= c && c <= 'z') {\nreturn true;\n}\nif ('A' <= c && c <= 'Z') {\nreturn true;\n}\nif (c == '+' || c == '/') {\nreturn true;\n}\nreturn isDigit(c);\n}\nstatic boolean isHexDigit(int c) {\nif ('a' <= c && c <= 'f') {\nreturn true;\n}\nif ('A' <= c && c <= 'F') {\nreturn true;\n}\nreturn isDigit(c);\n}\nstatic boolean isDigit(int c) {\nreturn ('0' <= c && c <= '9');\n}\n/**\n* Parse xml filter expression.\n*

\n* xml-filter-expr := expression .< xml-name-pattern >\n*\n* @param lhsExpr Preceding expression of .< token\n* @return Parsed node\n*/\nprivate STNode parseXMLFilterExpression(STNode lhsExpr) {\nSTNode xmlNamePatternChain = parseXMLFilterExpressionRhs();\nreturn STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain);\n}\n/**\n* Parse xml filter expression rhs.\n*

\n* filer-expression-rhs := .< xml-name-pattern >\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLFilterExpressionRhs() {\nSTNode dotLTToken = parseDotLTToken();\nreturn parseXMLNamePatternChain(dotLTToken);\n}\n/**\n* Parse xml name pattern chain.\n*

\n* \n* xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step\n*
\n* filer-expression-rhs := .< xml-name-pattern >\n*
\n* xml-element-children-step := /< xml-name-pattern >\n*
\n* xml-element-descendants-step := /**\\/\n*
\n*\n* @param startToken Preceding token of xml name pattern\n* @return Parsed node\n*/\nprivate STNode parseXMLNamePatternChain(STNode startToken) {\nstartContext(ParserRuleContext.XML_NAME_PATTERN);\nSTNode xmlNamePattern = parseXMLNamePattern();\nSTNode gtToken = parseGTToken();\nendContext();\nstartToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken,\nDiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN);\nreturn STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken);\n}\n/**\n* Parse .< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseDotLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.DOT_LT_TOKEN);\nreturn parseDotLTToken();\n}\n}\n/**\n* Parse xml name pattern.\n*

\n* xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLNamePattern() {\nList xmlAtomicNamePatternList = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfXMLNamePattern(nextToken.kind)) {\nreturn STNodeFactory.createNodeList(xmlAtomicNamePatternList);\n}\nSTNode xmlAtomicNamePattern = parseXMLAtomicNamePattern();\nxmlAtomicNamePatternList.add(xmlAtomicNamePattern);\nSTNode separator;\nwhile (!isEndOfXMLNamePattern(peek().kind)) {\nseparator = parseXMLNamePatternSeparator();\nif (separator == null) {\nbreak;\n}\nxmlAtomicNamePatternList.add(separator);\nxmlAtomicNamePattern = parseXMLAtomicNamePattern();\nxmlAtomicNamePatternList.add(xmlAtomicNamePattern);\n}\nreturn STNodeFactory.createNodeList(xmlAtomicNamePatternList);\n}\nprivate boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase GT_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ncase IDENTIFIER_TOKEN:\ncase ASTERISK_TOKEN:\ncase COLON_TOKEN:\ndefault:\nreturn false;\n}\n}\nprivate STNode parseXMLNamePatternSeparator() {\nSTToken token = peek();\nswitch (token.kind) {\ncase PIPE_TOKEN:\nreturn consume();\ncase GT_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nrecover(token, ParserRuleContext.XML_NAME_PATTERN_RHS);\nreturn parseXMLNamePatternSeparator();\n}\n}\n/**\n* Parse xml atomic name pattern.\n*

\n* \n* xml-atomic-name-pattern :=\n* *\n* | identifier\n* | xml-namespace-prefix : identifier\n* | xml-namespace-prefix : *\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseXMLAtomicNamePattern() {\nstartContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN);\nSTNode atomicNamePattern = parseXMLAtomicNamePatternBody();\nendContext();\nreturn atomicNamePattern;\n}\nprivate STNode parseXMLAtomicNamePatternBody() {\nSTToken token = peek();\nSTNode identifier;\nswitch (token.kind) {\ncase ASTERISK_TOKEN:\nreturn consume();\ncase IDENTIFIER_TOKEN:\nidentifier = consume();\nbreak;\ndefault:\nrecover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START);\nreturn parseXMLAtomicNamePatternBody();\n}\nreturn parseXMLAtomicNameIdentifier(identifier);\n}\nprivate STNode parseXMLAtomicNameIdentifier(STNode identifier) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COLON_TOKEN) {\nSTNode colon = consume();\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {\nSTToken endToken = consume();\nreturn STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken);\n}\n}\nreturn STNodeFactory.createSimpleNameReferenceNode(identifier);\n}\n/**\n* Parse xml step expression.\n*

\n* xml-step-expr := expression xml-step-start\n*\n* @param lhsExpr Preceding expression of /*, /<, or /**\\/< token\n* @return Parsed node\n*/\nprivate STNode parseXMLStepExpression(STNode lhsExpr) {\nSTNode xmlStepStart = parseXMLStepStart();\nreturn STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart);\n}\n/**\n* Parse xml filter expression rhs.\n*

\n* \n* xml-step-start :=\n* xml-all-children-step\n* | xml-element-children-step\n* | xml-element-descendants-step\n*
\n* xml-all-children-step := /*\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLStepStart() {\nSTToken token = peek();\nSTNode startToken;\nswitch (token.kind) {\ncase SLASH_ASTERISK_TOKEN:\nreturn consume();\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\nstartToken = parseDoubleSlashDoubleAsteriskLTToken();\nbreak;\ncase SLASH_LT_TOKEN:\ndefault:\nstartToken = parseSlashLTToken();\nbreak;\n}\nreturn parseXMLNamePatternChain(startToken);\n}\n/**\n* Parse /< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseSlashLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.SLASH_LT_TOKEN);\nreturn parseSlashLTToken();\n}\n}\n/**\n* Parse /< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseDoubleSlashDoubleAsteriskLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN);\nreturn parseDoubleSlashDoubleAsteriskLTToken();\n}\n}\n/**\n* Parse match statement.\n*

\n* match-stmt := match action-or-expr { match-clause+ } [on-fail-clause]\n*\n* @return Match statement\n*/\nprivate STNode parseMatchStatement() {\nstartContext(ParserRuleContext.MATCH_STMT);\nSTNode matchKeyword = parseMatchKeyword();\nSTNode actionOrExpr = parseActionOrExpression();\nstartContext(ParserRuleContext.MATCH_BODY);\nSTNode openBrace = parseOpenBrace();\nList matchClausesList = new ArrayList<>();\nwhile (!isEndOfMatchClauses(peek().kind)) {\nSTNode clause = parseMatchClause();\nmatchClausesList.add(clause);\n}\nSTNode matchClauses = STNodeFactory.createNodeList(matchClausesList);\nif (isNodeListEmpty(matchClauses)) {\nopenBrace = SyntaxErrors.addDiagnostic(openBrace,\nDiagnosticErrorCode.ERROR_MATCH_STATEMENT_SHOULD_HAVE_ONE_OR_MORE_MATCH_CLAUSES);\n}\nSTNode closeBrace = parseCloseBrace();\nendContext();\nendContext();\nSTNode onFailClause = parseOptionalOnFailClause();\nreturn STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace,\nonFailClause);\n}\n/**\n* Parse match keyword.\n*\n* @return Match keyword node\n*/\nprivate STNode parseMatchKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.MATCH_KEYWORD) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.MATCH_KEYWORD);\nreturn parseMatchKeyword();\n}\n}\nprivate boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse a single match match clause.\n*

\n* \n* match-clause := match-pattern-list [match-guard] => block-stmt\n*
\n* match-guard := if expression\n*
\n*\n* @return A match clause\n*/\nprivate STNode parseMatchClause() {\nSTNode matchPatterns = parseMatchPatternList();\nSTNode matchGuard = parseMatchGuard();\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode blockStmt = parseBlockNode();\nreturn STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt);\n}\n/**\n* Parse match guard.\n*

\n* match-guard := if expression\n*\n* @return Match guard\n*/\nprivate STNode parseMatchGuard() {\nswitch (peek().kind) {\ncase IF_KEYWORD:\nSTNode ifKeyword = parseIfKeyword();\nSTNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false);\nreturn STNodeFactory.createMatchGuardNode(ifKeyword, expr);\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nrecover(peek(), ParserRuleContext.OPTIONAL_MATCH_GUARD);\nreturn parseMatchGuard();\n}\n}\n/**\n* Parse match patterns list.\n*

\n* match-pattern-list := match-pattern (| match-pattern)*\n*\n* @return Match patterns list\n*/\nprivate STNode parseMatchPatternList() {\nstartContext(ParserRuleContext.MATCH_PATTERN);\nList matchClauses = new ArrayList<>();\nwhile (!isEndOfMatchPattern(peek().kind)) {\nSTNode clause = parseMatchPattern();\nif (clause == null) {\nbreak;\n}\nmatchClauses.add(clause);\nSTNode seperator = parseMatchPatternListMemberRhs();\nif (seperator == null) {\nbreak;\n}\nmatchClauses.add(seperator);\n}\nendContext();\nreturn STNodeFactory.createNodeList(matchClauses);\n}\nprivate boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\ncase IF_KEYWORD:\ncase RIGHT_ARROW_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse match pattern.\n*

\n* \n* match-pattern := var binding-pattern\n* | wildcard-match-pattern\n* | const-pattern\n* | list-match-pattern\n* | mapping-match-pattern\n* | error-match-pattern\n* \n*\n* @return Match pattern\n*/\nprivate STNode parseMatchPattern() {\nswitch (peek().kind) {\ncase OPEN_PAREN_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\ncase STRING_LITERAL_TOKEN:\nreturn parseSimpleConstExpr();\ncase IDENTIFIER_TOKEN:\nSTNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);\nreturn parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);\ncase VAR_KEYWORD:\nreturn parseVarTypedBindingPattern();\ncase OPEN_BRACKET_TOKEN:\nreturn parseListMatchPattern();\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingMatchPattern();\ncase ERROR_KEYWORD:\nreturn parseErrorMatchPattern();\ndefault:\nrecover(peek(), ParserRuleContext.MATCH_PATTERN_START);\nreturn parseMatchPattern();\n}\n}\nprivate STNode parseMatchPatternListMemberRhs() {\nswitch (peek().kind) {\ncase PIPE_TOKEN:\nreturn parsePipeToken();\ncase IF_KEYWORD:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.MATCH_PATTERN_LIST_MEMBER_RHS);\nreturn parseMatchPatternListMemberRhs();\n}\n}\n/**\n* Parse var typed binding pattern.\n*

\n* var binding-pattern\n*

\n*\n* @return Parsed typed binding pattern node\n*/\nprivate STNode parseVarTypedBindingPattern() {\nSTNode varKeyword = parseVarKeyword();\nSTNode varTypeDesc = createBuiltinSimpleNameReference(varKeyword);\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createTypedBindingPatternNode(varTypeDesc, bindingPattern);\n}\n/**\n* Parse var keyword.\n*\n* @return Var keyword node\n*/\nprivate STNode parseVarKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.VAR_KEYWORD) {\nreturn consume();\n} else {\nrecover(nextToken, ParserRuleContext.VAR_KEYWORD);\nreturn parseVarKeyword();\n}\n}\n/**\n* Parse list match pattern.\n*

\n* \n* list-match-pattern := [ list-member-match-patterns ]\n* list-member-match-patterns :=\n* match-pattern (, match-pattern)* [, rest-match-pattern]\n* | [ rest-match-pattern ]\n* \n*

\n*\n* @return Parsed list match pattern node\n*/\nprivate STNode parseListMatchPattern() {\nstartContext(ParserRuleContext.LIST_MATCH_PATTERN);\nSTNode openBracketToken = parseOpenBracket();\nList matchPatternList = new ArrayList<>();\nSTNode listMatchPatternMemberRhs = null;\nboolean isEndOfFields = false;\nwhile (!isEndOfListMatchPattern()) {\nSTNode listMatchPatternMember = parseListMatchPatternMember();\nmatchPatternList.add(listMatchPatternMember);\nlistMatchPatternMemberRhs = parseListMatchPatternMemberRhs();\nif (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {\nisEndOfFields = true;\nbreak;\n}\nif (listMatchPatternMemberRhs != null) {\nmatchPatternList.add(listMatchPatternMemberRhs);\n} else {\nbreak;\n}\n}\nwhile (isEndOfFields && listMatchPatternMemberRhs != null) {\nupdateLastNodeInListWithInvalidNode(matchPatternList, listMatchPatternMemberRhs, null);\nif (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) {\nbreak;\n}\nSTNode invalidField = parseListMatchPatternMember();\nupdateLastNodeInListWithInvalidNode(matchPatternList, invalidField,\nDiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);\nlistMatchPatternMemberRhs = parseListMatchPatternMemberRhs();\n}\nSTNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList);\nSTNode closeBracketToken = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, closeBracketToken);\n}\npublic boolean isEndOfListMatchPattern() {\nswitch (peek().kind) {\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseListMatchPatternMember() {\nSTNode nextToken = peek();\nswitch (nextToken.kind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestMatchPattern();\ndefault:\nreturn parseMatchPattern();\n}\n}\n/**\n* Parse rest match pattern.\n*

\n* \n* rest-match-pattern := ... var variable-name\n* \n*

\n*\n* @return Parsed rest match pattern node\n*/\nprivate STNode parseRestMatchPattern() {\nstartContext(ParserRuleContext.REST_MATCH_PATTERN);\nSTNode ellipsisToken = parseEllipsis();\nSTNode varKeywordToken = parseVarKeyword();\nSTNode variableName = parseVariableName();\nendContext();\nSTSimpleNameReferenceNode simpleNameReferenceNode =\n(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName);\nreturn STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode);\n}\nprivate STNode parseListMatchPatternMemberRhs() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS);\nreturn parseListMatchPatternMemberRhs();\n}\n}\n/**\n* Parse mapping match pattern.\n*

\n* mapping-match-pattern := { field-match-patterns }\n*
\n* field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern]\n* | [ rest-match-pattern ]\n*
\n* field-match-pattern := field-name : match-pattern\n*
\n* rest-match-pattern := ... var variable-name\n*

\n*\n* @return Parsed Node.\n*/\nprivate STNode parseMappingMatchPattern() {\nstartContext(ParserRuleContext.MAPPING_MATCH_PATTERN);\nSTNode openBraceToken = parseOpenBrace();\nList fieldMatchPatternList = new ArrayList<>();\nSTNode fieldMatchPatternRhs = null;\nboolean isEndOfFields = false;\nwhile (!isEndOfMappingMatchPattern()) {\nSTNode fieldMatchPatternMember = parseFieldMatchPatternMember();\nfieldMatchPatternList.add(fieldMatchPatternMember);\nfieldMatchPatternRhs = parseFieldMatchPatternRhs();\nif (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {\nisEndOfFields = true;\nbreak;\n}\nif (fieldMatchPatternRhs != null) {\nfieldMatchPatternList.add(fieldMatchPatternRhs);\n} else {\nbreak;\n}\n}\nwhile (isEndOfFields && fieldMatchPatternRhs != null) {\nupdateLastNodeInListWithInvalidNode(fieldMatchPatternList, fieldMatchPatternRhs, null);\nif (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nbreak;\n}\nSTNode invalidField = parseFieldMatchPatternMember();\nupdateLastNodeInListWithInvalidNode(fieldMatchPatternList, invalidField,\nDiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);\nfieldMatchPatternRhs = parseFieldMatchPatternRhs();\n}\nSTNode fieldMatchPatterns = STNodeFactory.createNodeList(fieldMatchPatternList);\nSTNode closeBraceToken = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, closeBraceToken);\n}\nprivate STNode parseFieldMatchPatternMember() {\nswitch (peek().kind) {\ncase IDENTIFIER_TOKEN:\nreturn parseFieldMatchPattern();\ncase ELLIPSIS_TOKEN:\nreturn parseRestMatchPattern();\ndefault:\nrecover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER);\nreturn parseFieldMatchPatternMember();\n}\n}\n/**\n* Parse filed match pattern.\n*

\n* field-match-pattern := field-name : match-pattern\n*

\n*\n* @return Parsed field match pattern node\n*/\npublic STNode parseFieldMatchPattern() {\nSTNode fieldNameNode = parseVariableName();\nSTNode colonToken = parseColon();\nSTNode matchPattern = parseMatchPattern();\nreturn STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern);\n}\npublic boolean isEndOfMappingMatchPattern() {\nswitch (peek().kind) {\ncase CLOSE_BRACE_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseFieldMatchPatternRhs() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS);\nreturn parseFieldMatchPatternRhs();\n}\n}\nprivate STNode parseErrorMatchPatternOrConsPattern(STNode typeRefOrConstExpr) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase OPEN_PAREN_TOKEN:\nSTNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,\nParserRuleContext.ERROR_KEYWORD);\nstartContext(ParserRuleContext.ERROR_MATCH_PATTERN);\nreturn parseErrorMatchPattern(errorKeyword, typeRefOrConstExpr);\ndefault:\nif (isMatchPatternEnd(peek().kind)) {\nreturn typeRefOrConstExpr;\n}\nrecover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_OR_CONST_PATTERN, typeRefOrConstExpr);\nreturn parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);\n}\n}\nprivate boolean isMatchPatternEnd(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase RIGHT_DOUBLE_ARROW_TOKEN:\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase PIPE_TOKEN:\ncase IF_KEYWORD:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse functional match pattern.\n*

\n* error-match-pattern := error [error-type-reference] ( error-arg-list-match-pattern )\n* error-arg-list-match-pattern :=\n* error-message-match-pattern [, error-cause-match-pattern] [, error-field-match-patterns]\n* | [error-field-match-patterns]\n* error-message-match-pattern := simple-match-pattern\n* error-cause-match-pattern := simple-match-pattern | error-match-pattern\n* simple-match-pattern :=\n* wildcard-match-pattern\n* | const-pattern\n* | var variable-name\n* error-field-match-patterns :=\n* named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]\n* | rest-match-pattern\n* named-arg-match-pattern := arg-name = match-pattern\n*

\n*\n* @return Parsed functional match pattern node.\n*/\nprivate STNode parseErrorMatchPattern() {\nstartContext(ParserRuleContext.ERROR_MATCH_PATTERN);\nSTNode errorKeyword = consume();\nreturn parseErrorMatchPattern(errorKeyword);\n}\nprivate STNode parseErrorMatchPattern(STNode errorKeyword) {\nSTToken nextToken = peek();\nSTNode typeRef;\nswitch (nextToken.kind) {\ncase OPEN_PAREN_TOKEN:\ntypeRef = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nif (isTypeReferenceStartToken(nextToken.kind)) {\ntypeRef = parseTypeReference();\nbreak;\n}\nrecover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_ERROR_KEYWORD_RHS);\nreturn parseErrorMatchPattern(errorKeyword);\n}\nreturn parseErrorMatchPattern(errorKeyword, typeRef);\n}\nprivate STNode parseErrorMatchPattern(STNode errorKeyword, STNode typeRef) {\nSTNode openParenthesisToken = parseOpenParenthesis();\nSTNode argListMatchPatternNode = parseErrorArgListMatchPatterns();\nSTNode closeParenthesisToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createErrorMatchPatternNode(errorKeyword, typeRef, openParenthesisToken,\nargListMatchPatternNode, closeParenthesisToken);\n}\nprivate STNode parseErrorArgListMatchPatterns() {\nList argListMatchPatterns = new ArrayList<>();\nif (isEndOfErrorFieldMatchPatterns()) {\nreturn STNodeFactory.createNodeList(argListMatchPatterns);\n}\nstartContext(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_FIRST_ARG);\nSTNode firstArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_START);\nendContext();\nif (firstArg == null) {\nreturn STNodeFactory.createNodeList(argListMatchPatterns);\n}\nif (isSimpleMatchPattern(firstArg.kind)) {\nargListMatchPatterns.add(firstArg);\nSTNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_END);\nif (argEnd != null) {\nSTNode secondArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_RHS);\nif (isValidSecondArgMatchPattern(secondArg.kind)) {\nargListMatchPatterns.add(argEnd);\nargListMatchPatterns.add(secondArg);\n} else {\nupdateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);\nupdateLastNodeInListWithInvalidNode(argListMatchPatterns, secondArg,\nDiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);\n}\n}\n} else {\nif (firstArg.kind != SyntaxKind.NAMED_ARG_MATCH_PATTERN &&\nfirstArg.kind != SyntaxKind.REST_MATCH_PATTERN) {\naddInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);\n} else {\nargListMatchPatterns.add(firstArg);\n}\n}\nparseErrorFieldMatchPatterns(argListMatchPatterns);\nreturn STNodeFactory.createNodeList(argListMatchPatterns);\n}\nprivate boolean isSimpleMatchPattern(SyntaxKind matchPatternKind) {\nswitch (matchPatternKind) {\ncase IDENTIFIER_TOKEN:\ncase SIMPLE_NAME_REFERENCE:\ncase NUMERIC_LITERAL:\ncase STRING_LITERAL:\ncase NULL_LITERAL:\ncase NIL_LITERAL:\ncase BOOLEAN_LITERAL:\ncase TYPED_BINDING_PATTERN:\ncase UNARY_EXPRESSION:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate boolean isValidSecondArgMatchPattern(SyntaxKind syntaxKind) {\nswitch (syntaxKind) {\ncase ERROR_MATCH_PATTERN:\ncase NAMED_ARG_MATCH_PATTERN:\ncase REST_MATCH_PATTERN:\nreturn true;\ndefault:\nif (isSimpleMatchPattern(syntaxKind)) {\nreturn true;\n}\nreturn false;\n}\n}\n/**\n* Parse error field match patterns.\n* error-field-match-patterns :=\n* named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]\n* | rest-match-pattern\n* named-arg-match-pattern := arg-name = match-pattern\n* @param argListMatchPatterns\n*/\nprivate void parseErrorFieldMatchPatterns(List argListMatchPatterns) {\nSyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_MATCH_PATTERN;\nwhile (!isEndOfErrorFieldMatchPatterns()) {\nSTNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN_RHS);\nif (argEnd == null) {\nbreak;\n}\nSTNode currentArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN);\nDiagnosticErrorCode errorCode = validateErrorFieldMatchPatternOrder(lastValidArgKind, currentArg.kind);\nif (errorCode == null) {\nargListMatchPatterns.add(argEnd);\nargListMatchPatterns.add(currentArg);\nlastValidArgKind = currentArg.kind;\n} else if (argListMatchPatterns.size() == 0) {\naddInvalidNodeToNextToken(argEnd, null);\naddInvalidNodeToNextToken(currentArg, errorCode);\n} else {\nupdateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);\nupdateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode);\n}\n}\n}\nprivate boolean isEndOfErrorFieldMatchPatterns() {\nreturn isEndOfErrorFieldBindingPatterns();\n}\nprivate STNode parseErrorArgListMatchPatternEnd(ParserRuleContext currentCtx) {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn consume();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), currentCtx);\nreturn parseErrorArgListMatchPatternEnd(currentCtx);\n}\n}\nprivate STNode parseErrorArgListMatchPattern(ParserRuleContext context) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestMatchPattern();\ncase IDENTIFIER_TOKEN:\nreturn parseNamedOrSimpleMatchPattern();\ncase OPEN_PAREN_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\ncase STRING_LITERAL_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseMatchPattern();\ncase VAR_KEYWORD:\nSTNode varType = createBuiltinSimpleNameReference(consume());\nSTNode variableName = createCaptureOrWildcardBP(parseVariableName());\nreturn STNodeFactory.createTypedBindingPatternNode(varType, variableName);\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nrecover(nextToken, context);\nreturn parseErrorArgListMatchPattern(context);\n}\n}\nprivate STNode parseNamedOrSimpleMatchPattern() {\nSTNode identifier = consume();\nSTToken secondToken = peek();\nswitch (secondToken.kind) {\ncase EQUAL_TOKEN:\nreturn parseNamedArgMatchPattern(identifier);\ncase COMMA_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ndefault:\nreturn identifier;\n}\n}\n/**\n* Parses the next named arg match pattern.\n*
\n* named-arg-match-pattern := arg-name = match-pattern\n*
\n*
\n*\n* @return arg match pattern list node added the new arg match pattern\n*/\nprivate STNode parseNamedArgMatchPattern(STNode identifier) {\nstartContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN);\nSTNode equalToken = parseAssignOp();\nSTNode matchPattern = parseMatchPattern();\nendContext();\nreturn STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern);\n}\nprivate DiagnosticErrorCode validateErrorFieldMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {\nswitch (currentArgKind) {\ncase NAMED_ARG_MATCH_PATTERN:\ncase REST_MATCH_PATTERN:\nif (prevArgKind == SyntaxKind.REST_MATCH_PATTERN) {\nreturn DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;\n}\nreturn null;\ndefault:\nreturn DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED;\n}\n}\n/**\n* Parse markdown documentation.\n*\n* @return markdown documentation node\n*/\nprivate STNode parseMarkdownDocumentation() {\nList markdownDocLineList = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) {\nSTToken documentationString = consume();\nSTNode parsedDocLines = parseDocumentationString(documentationString);\nappendParsedDocumentationLines(markdownDocLineList, parsedDocLines);\nnextToken = peek();\n}\nSTNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList);\nreturn STNodeFactory.createMarkdownDocumentationNode(markdownDocLines);\n}\n/**\n* Parse documentation string.\n*\n* @return markdown documentation line list node\n*/\nprivate STNode parseDocumentationString(STToken documentationStringToken) {\nList leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae());\nCollection diagnostics = new ArrayList<>((documentationStringToken.diagnostics()));\nCharReader charReader = CharReader.from(documentationStringToken.text());\nDocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList, diagnostics);\nAbstractTokenReader tokenReader = new TokenReader(documentationLexer);\nDocumentationParser documentationParser = new DocumentationParser(tokenReader);\nreturn documentationParser.parse();\n}\nprivate List getLeadingTriviaList(STNode leadingMinutiaeNode) {\nList leadingTriviaList = new ArrayList<>();\nint bucketCount = leadingMinutiaeNode.bucketCount();\nfor (int i = 0; i < bucketCount; i++) {\nleadingTriviaList.add(leadingMinutiaeNode.childInBucket(i));\n}\nreturn leadingTriviaList;\n}\nprivate void appendParsedDocumentationLines(List markdownDocLineList, STNode parsedDocLines) {\nint bucketCount = parsedDocLines.bucketCount();\nfor (int i = 0; i < bucketCount; i++) {\nSTNode markdownDocLine = parsedDocLines.childInBucket(i);\nmarkdownDocLineList.add(markdownDocLine);\n}\n}\n/**\n* Parse any statement that starts with a token that has ambiguity between being\n* a type-desc or an expression.\n*\n* @param annots Annotations\n* @param qualifiers\n* @return Statement node\n*/\nprivate STNode parseStmtStartsWithTypeOrExpr(STNode annots, List qualifiers) {\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode typeOrExpr = parseTypedBindingPatternOrExpr(qualifiers, true);\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr);\n}\nprivate STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) {\nif (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nList varDeclQualifiers = new ArrayList<>();\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(annots, varDeclQualifiers, typedBindingPatternOrExpr, false);\n}\nSTNode expr = getExpression(typedBindingPatternOrExpr);\nexpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);\nreturn parseStatementStartWithExprRhs(expr);\n}\nprivate STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) {\nList typeDescQualifiers = new ArrayList<>();\nreturn parseTypedBindingPatternOrExpr(typeDescQualifiers, allowAssignment);\n}\nprivate STNode parseTypedBindingPatternOrExpr(List qualifiers, boolean allowAssignment) {\nparseTypeDescQualifiers(qualifiers);\nSTToken nextToken = peek();\nSTNode typeOrExpr;\nswitch (nextToken.kind) {\ncase OPEN_PAREN_TOKEN:\nreportInvalidQualifierList(qualifiers);\nreturn parseTypedBPOrExprStartsWithOpenParenthesis();\ncase FUNCTION_KEYWORD:\nreturn parseAnonFuncExprOrTypedBPWithFuncType(qualifiers);\ncase IDENTIFIER_TOKEN:\nreportInvalidQualifierList(qualifiers);\ntypeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\nreturn parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\ncase OPEN_BRACKET_TOKEN:\nreportInvalidQualifierList(qualifiers);\ntypeOrExpr = parseTupleTypeDescOrExprStartsWithOpenBracket();\nreturn parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase STRING_LITERAL_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\nreportInvalidQualifierList(qualifiers);\nSTNode basicLiteral = parseBasicLiteral();\nreturn parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment);\ndefault:\nif (isValidExpressionStart(nextToken.kind, 1)) {\nreportInvalidQualifierList(qualifiers);\nreturn parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());\n}\nreturn parseTypedBindingPattern(qualifiers, ParserRuleContext.VAR_DECL_STMT);\n}\n}\n/**\n* Parse the component after the ambiguous starting node. Ambiguous node could be either an expr\n* or a type-desc. The component followed by this ambiguous node could be the binding-pattern or\n* the expression-rhs.\n*\n* @param typeOrExpr Type desc or the expression\n* @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a\n* valid lvalue expression\n* @return Typed-binding-pattern node or an expression node\n*/\nprivate STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase PIPE_TOKEN:\nSTToken nextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode pipe = parsePipeToken();\nSTNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);\nif (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nSTNode newTypeDesc = createUnionTypeDesc(typeOrExpr, pipe, typedBP.typeDescriptor);\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);\n}\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe,\nrhsTypedBPOrExpr);\ncase BITWISE_AND_TOKEN:\nnextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode ampersand = parseBinaryOperator();\nrhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);\nif (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nSTNode newTypeDesc = createIntersectionTypeDesc(typeOrExpr, ampersand, typedBP.typeDescriptor);\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);\n}\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand,\nrhsTypedBPOrExpr);\ncase SEMICOLON_TOKEN:\nif (isDefiniteExpr(typeOrExpr.kind)) {\nreturn typeOrExpr;\n}\nif (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) {\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n}\nreturn typeOrExpr;\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\nif (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) {\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n}\nreturn typeOrExpr;\ncase EQUAL_TOKEN:\nreturn typeOrExpr;\ncase OPEN_BRACKET_TOKEN:\nreturn parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment,\nParserRuleContext.AMBIGUOUS_STMT);\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\ndefault:\nif (isCompoundBinaryOperator(nextToken.kind)) {\nreturn typeOrExpr;\n}\nif (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {\nreturn typeOrExpr;\n}\nSTToken token = peek();\nrecover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr, allowAssignment);\nreturn parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\n}\n}\nprivate STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) {\ntypeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false);\nreturn parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\n}\nprivate STNode parseTypedBPOrExprStartsWithOpenParenthesis() {\nSTNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis();\nif (isDefiniteTypeDesc(exprOrTypeDesc.kind)) {\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc);\n}\nreturn parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false);\n}\nprivate boolean isDefiniteTypeDesc(SyntaxKind kind) {\nreturn kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0;\n}\nprivate boolean isDefiniteExpr(SyntaxKind kind) {\nif (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nreturn false;\n}\nreturn kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&\nkind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;\n}\n/**\n* Parse type or expression that starts with open parenthesis. Possible options are:\n* 1) () - nil type-desc or nil-literal\n* 2) (T) - Parenthesized type-desc\n* 3) (expr) - Parenthesized expression\n* 4) (param, param, ..) - Anon function params\n*\n* @return Type-desc or expression node\n*/\nprivate STNode parseTypedDescOrExprStartsWithOpenParenthesis() {\nSTNode openParen = parseOpenParenthesis();\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {\nSTNode closeParen = parseCloseParenthesis();\nreturn parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen);\n}\nSTNode typeOrExpr = parseTypeDescOrExpr();\nif (isAction(typeOrExpr)) {\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr,\ncloseParen);\n}\nif (isExpression(typeOrExpr.kind)) {\nstartContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);\nreturn parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false);\n}\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeOrExpr, closeParen);\n}\n/**\n* Parse type-desc or expression. This method does not handle binding patterns.\n*\n* @return Type-desc node or expression node\n*/\nprivate STNode parseTypeDescOrExpr() {\nList typeDescQualifiers = new ArrayList<>();\nreturn parseTypeDescOrExpr(typeDescQualifiers);\n}\nprivate STNode parseTypeDescOrExpr(List qualifiers) {\nparseTypeDescQualifiers(qualifiers);\nSTToken nextToken = peek();\nSTNode typeOrExpr;\nswitch (nextToken.kind) {\ncase OPEN_PAREN_TOKEN:\nreportInvalidQualifierList(qualifiers);\ntypeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis();\nbreak;\ncase FUNCTION_KEYWORD:\ntypeOrExpr = parseAnonFuncExprOrFuncTypeDesc(qualifiers);\nbreak;\ncase IDENTIFIER_TOKEN:\nreportInvalidQualifierList(qualifiers);\ntypeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\nreturn parseTypeDescOrExprRhs(typeOrExpr);\ncase OPEN_BRACKET_TOKEN:\nreportInvalidQualifierList(qualifiers);\ntypeOrExpr = parseTupleTypeDescOrExprStartsWithOpenBracket();\nbreak;\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase STRING_LITERAL_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\ncase HEX_FLOATING_POINT_LITERAL_TOKEN:\nreportInvalidQualifierList(qualifiers);\nSTNode basicLiteral = parseBasicLiteral();\nreturn parseTypeDescOrExprRhs(basicLiteral);\ndefault:\nif (isValidExpressionStart(nextToken.kind, 1)) {\nreportInvalidQualifierList(qualifiers);\nreturn parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());\n}\nreturn parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\n}\nif (isDefiniteTypeDesc(typeOrExpr.kind)) {\nreturn parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\n}\nreturn parseTypeDescOrExprRhs(typeOrExpr);\n}\nprivate boolean isExpression(SyntaxKind kind) {\nswitch (kind) {\ncase NUMERIC_LITERAL:\ncase STRING_LITERAL_TOKEN:\ncase NIL_LITERAL:\ncase NULL_LITERAL:\ncase BOOLEAN_LITERAL:\nreturn true;\ndefault:\nreturn kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&\nkind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;\n}\n}\n/**\n* Parse statement that starts with an empty parenthesis. Empty parenthesis can be\n* 1) Nil literal\n* 2) Nil type-desc\n* 3) Anon-function params\n*\n* @param openParen Open parenthesis\n* @param closeParen Close parenthesis\n* @return Parsed node\n*/\nprivate STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nSTNode params = STNodeFactory.createEmptyNodeList();\nSTNode anonFuncParam =\nSTNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\nendContext();\nreturn anonFuncParam;\ndefault:\nreturn STNodeFactory.createNilLiteralNode(openParen, closeParen);\n}\n}\nprivate STNode parseAnonFuncExprOrTypedBPWithFuncType(List qualifiers) {\nSTNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(qualifiers);\nif (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) {\nreturn exprOrTypeDesc;\n}\nreturn parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT);\n}\n/**\n* Parse anon-func-expr or function-type-desc, by resolving the ambiguity.\n*\n* @param qualifiers Preceding qualifiers\n* @return Anon-func-expr or function-type-desc\n*/\nprivate STNode parseAnonFuncExprOrFuncTypeDesc(List qualifiers) {\nstartContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC);\nSTNode qualifierList;\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode funcSignature;\nif (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nfuncSignature = parseFuncSignature(true);\nqualifierList = createFuncTypeQualNodeList(qualifiers, true);\nendContext();\nreturn parseAnonFuncExprOrFuncTypeDesc(qualifierList, functionKeyword, funcSignature);\n}\nfuncSignature = STNodeFactory.createEmptyNode();\nqualifierList = createFuncTypeQualNodeList(qualifiers, false);\nSTNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,\nfuncSignature);\nif (getCurrentContext() != ParserRuleContext.STMT_START_BRACKETED_LIST) {\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nreturn parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\n}\nreturn parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\n}\nprivate STNode parseAnonFuncExprOrFuncTypeDesc(STNode qualifierList, STNode functionKeyword, STNode funcSignature) {\nParserRuleContext currentCtx = getCurrentContext();\nswitch (peek().kind) {\ncase OPEN_BRACE_TOKEN:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nif (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\n}\nstartContext(ParserRuleContext.ANON_FUNC_EXPRESSION);\nfuncSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);\nSTNode funcBody = parseAnonFuncBody(false);\nSTNode annots = STNodeFactory.createEmptyNodeList();\nSTNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList,\nfunctionKeyword, funcSignature, funcBody);\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true);\ncase IDENTIFIER_TOKEN:\ndefault:\nSTNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,\nfuncSignature);\nif (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nreturn parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,\ntrue);\n}\nreturn parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\n}\n}\nprivate STNode parseTypeDescOrExprRhs(STNode typeOrExpr) {\nSTToken nextToken = peek();\nSTNode typeDesc;\nswitch (nextToken.kind) {\ncase PIPE_TOKEN:\nSTToken nextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode pipe = parsePipeToken();\nSTNode rhsTypeDescOrExpr = parseTypeDescOrExpr();\nif (isExpression(rhsTypeDescOrExpr.kind)) {\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe,\nrhsTypeDescOrExpr);\n}\ntypeDesc = getTypeDescFromExpr(typeOrExpr);\nrhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);\nreturn createUnionTypeDesc(typeDesc, pipe, rhsTypeDescOrExpr);\ncase BITWISE_AND_TOKEN:\nnextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode ampersand = parseBinaryOperator();\nrhsTypeDescOrExpr = parseTypeDescOrExpr();\nif (isExpression(rhsTypeDescOrExpr.kind)) {\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand,\nrhsTypeDescOrExpr);\n}\ntypeDesc = getTypeDescFromExpr(typeOrExpr);\nrhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);\nreturn createIntersectionTypeDesc(typeDesc, ampersand, rhsTypeDescOrExpr);\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\ntypeDesc = parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,\nfalse);\nreturn typeDesc;\ncase SEMICOLON_TOKEN:\nreturn getTypeDescFromExpr(typeOrExpr);\ncase EQUAL_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\ncase COMMA_TOKEN:\nreturn typeOrExpr;\ncase OPEN_BRACKET_TOKEN:\nreturn parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true,\nParserRuleContext.AMBIGUOUS_STMT);\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nreturn STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis);\ndefault:\nif (isCompoundBinaryOperator(nextToken.kind)) {\nreturn typeOrExpr;\n}\nif (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false);\n}\nrecover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS, typeOrExpr);\nreturn parseTypeDescOrExprRhs(typeOrExpr);\n}\n}\nprivate boolean isAmbiguous(STNode node) {\nswitch (node.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase NIL_LITERAL:\ncase NULL_LITERAL:\ncase NUMERIC_LITERAL:\ncase STRING_LITERAL:\ncase BOOLEAN_LITERAL:\ncase BRACKETED_LIST:\nreturn true;\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;\nif (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||\nbinaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn false;\n}\nreturn isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);\ncase BRACED_EXPRESSION:\nreturn isAmbiguous(((STBracedExpressionNode) node).expression);\ncase INDEXED_EXPRESSION:\nSTIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node;\nif (!isAmbiguous(indexExpr.containerExpression)) {\nreturn false;\n}\nSTNode keys = indexExpr.keyExpression;\nfor (int i = 0; i < keys.bucketCount(); i++) {\nSTNode item = keys.childInBucket(i);\nif (item.kind == SyntaxKind.COMMA_TOKEN) {\ncontinue;\n}\nif (!isAmbiguous(item)) {\nreturn false;\n}\n}\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate boolean isAllBasicLiterals(STNode node) {\nswitch (node.kind) {\ncase NIL_LITERAL:\ncase NULL_LITERAL:\ncase NUMERIC_LITERAL:\ncase STRING_LITERAL:\ncase BOOLEAN_LITERAL:\nreturn true;\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;\nif (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||\nbinaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn false;\n}\nreturn isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);\ncase BRACED_EXPRESSION:\nreturn isAmbiguous(((STBracedExpressionNode) node).expression);\ncase BRACKETED_LIST:\nSTAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node;\nfor (STNode member : list.members) {\nif (member.kind == SyntaxKind.COMMA_TOKEN) {\ncontinue;\n}\nif (!isAllBasicLiterals(member)) {\nreturn false;\n}\n}\nreturn true;\ncase UNARY_EXPRESSION:\nSTUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node;\nif (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN &&\nunaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) {\nreturn false;\n}\nreturn isNumericLiteral(unaryExpr.expression);\ndefault:\nreturn false;\n}\n}\nprivate boolean isNumericLiteral(STNode node) {\nswitch (node.kind) {\ncase NUMERIC_LITERAL:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseTupleTypeDescOrExprStartsWithOpenBracket() {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList members = new ArrayList<>();\nSTNode memberEnd;\nwhile (!isEndOfListConstructor(peek().kind)) {\nSTNode expr = parseTypeDescOrExpr();\nif (peek().kind == SyntaxKind.ELLIPSIS_TOKEN && isDefiniteTypeDesc(expr.kind)) {\nSTNode ellipsis = consume();\nexpr = STNodeFactory.createRestDescriptorNode(expr, ellipsis);\n}\nmembers.add(expr);\nmemberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmembers.add(memberEnd);\n}\nSTNode memberNodes = STNodeFactory.createNodeList(members);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberNodes, closeBracket);\n}\n/**\n* Parse binding-patterns.\n*

\n* \n* binding-pattern := capture-binding-pattern\n* | wildcard-binding-pattern\n* | list-binding-pattern\n* | mapping-binding-pattern\n* | functional-binding-pattern\n*

\n*

\n* capture-binding-pattern := variable-name\n* variable-name := identifier\n*

\n*

\n* wildcard-binding-pattern := _\n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n*

\n* mapping-binding-pattern := { field-binding-patterns }\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n* field-binding-pattern := field-name : binding-pattern | variable-name\n*
\n* rest-binding-pattern := ... variable-name\n*

\n*

\n* functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )\n*
\n* arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns]\n* | other-arg-binding-patterns\n*
\n* positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*\n*
\n* positional-arg-binding-pattern := binding-pattern\n*
\n* other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern]\n* | [rest-binding-pattern]\n*
\n* named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*\n*
\n* named-arg-binding-pattern := arg-name = binding-pattern\n*
\n*\n* @return binding-pattern node\n*/\nprivate STNode parseBindingPattern() {\nswitch (peek().kind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseListBindingPattern();\ncase IDENTIFIER_TOKEN:\nreturn parseBindingPatternStartsWithIdentifier();\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingBindingPattern();\ncase ERROR_KEYWORD:\nreturn parseErrorBindingPattern();\ndefault:\nrecover(peek(), ParserRuleContext.BINDING_PATTERN);\nreturn parseBindingPattern();\n}\n}\nprivate STNode parseBindingPatternStartsWithIdentifier() {\nSTNode argNameOrBindingPattern =\nparseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);\nSTToken secondToken = peek();\nif (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nstartContext(ParserRuleContext.ERROR_BINDING_PATTERN);\nSTNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,\nParserRuleContext.ERROR_KEYWORD);\nreturn parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern);\n}\nif (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) {\nSTNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);\nidentifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern);\nreturn createCaptureOrWildcardBP(identifier);\n}\nreturn createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name);\n}\nprivate STNode createCaptureOrWildcardBP(STNode varName) {\nSTNode bindingPattern;\nif (isWildcardBP(varName)) {\nbindingPattern = getWildcardBindingPattern(varName);\n} else {\nbindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName);\n}\nreturn bindingPattern;\n}\n/**\n* Parse list-binding-patterns.\n*

\n* \n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n*\n* @return list-binding-pattern node\n*/\nprivate STNode parseListBindingPattern() {\nstartContext(ParserRuleContext.LIST_BINDING_PATTERN);\nSTNode openBracket = parseOpenBracket();\nList bindingPatternsList = new ArrayList<>();\nSTNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList);\nendContext();\nreturn listBindingPattern;\n}\nprivate STNode parseListBindingPattern(STNode openBracket, List bindingPatternsList) {\nif (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.size() == 0) {\nSTNode closeBracket = parseCloseBracket();\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList);\nreturn STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);\n}\nSTNode listBindingPatternMember = parseListBindingPatternMember();\nbindingPatternsList.add(listBindingPatternMember);\nSTNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList);\nreturn listBindingPattern;\n}\nprivate STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List bindingPatterns) {\nSTNode member = firstMember;\nSTToken token = peek();\nSTNode listBindingPatternRhs = null;\nwhile (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {\nlistBindingPatternRhs = parseListBindingPatternMemberRhs();\nif (listBindingPatternRhs == null) {\nbreak;\n}\nbindingPatterns.add(listBindingPatternRhs);\nmember = parseListBindingPatternMember();\nbindingPatterns.add(member);\ntoken = peek();\n}\nSTNode closeBracket = parseCloseBracket();\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);\nreturn STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);\n}\nprivate STNode parseListBindingPatternMemberRhs() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END);\nreturn parseListBindingPatternMemberRhs();\n}\n}\nprivate boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse list-binding-pattern member.\n*

\n* \n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n*\n* @return List binding pattern member\n*/\nprivate STNode parseListBindingPatternMember() {\nswitch (peek().kind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ncase OPEN_BRACKET_TOKEN:\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseBindingPattern();\ndefault:\nrecover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER);\nreturn parseListBindingPatternMember();\n}\n}\n/**\n* Parse rest binding pattern.\n*

\n* \n* rest-binding-pattern := ... variable-name\n* \n*\n* @return Rest binding pattern node\n*/\nprivate STNode parseRestBindingPattern() {\nstartContext(ParserRuleContext.REST_BINDING_PATTERN);\nSTNode ellipsis = parseEllipsis();\nSTNode varName = parseVariableName();\nendContext();\nSTSimpleNameReferenceNode simpleNameReferenceNode =\n(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName);\nreturn STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode);\n}\n/**\n* Parse Typed-binding-pattern.\n*

\n* \n* typed-binding-pattern := inferable-type-descriptor binding-pattern\n*

\n* inferable-type-descriptor := type-descriptor | var\n*
\n*\n* @return Typed binding pattern node\n*/\nprivate STNode parseTypedBindingPattern(ParserRuleContext context) {\nList typeDescQualifiers = new ArrayList<>();\nreturn parseTypedBindingPattern(typeDescQualifiers, context);\n}\nprivate STNode parseTypedBindingPattern(List qualifiers, ParserRuleContext context) {\nSTNode typeDesc = parseTypeDescriptor(qualifiers,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false);\nSTNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context);\nreturn typeBindingPattern;\n}\n/**\n* Parse mapping-binding-patterns.\n*

\n* \n* mapping-binding-pattern := { field-binding-patterns }\n*

\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n* field-binding-pattern := field-name : binding-pattern | variable-name\n*
\n*\n* @return mapping-binding-pattern node\n*/\nprivate STNode parseMappingBindingPattern() {\nstartContext(ParserRuleContext.MAPPING_BINDING_PATTERN);\nSTNode openBrace = parseOpenBrace();\nSTToken token = peek();\nif (isEndOfMappingBindingPattern(token.kind)) {\nSTNode closeBrace = parseCloseBrace();\nSTNode bindingPatternsNode = STNodeFactory.createEmptyNodeList();\nendContext();\nreturn STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);\n}\nList bindingPatterns = new ArrayList<>();\nSTNode prevMember = parseMappingBindingPatternMember();\nif (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {\nbindingPatterns.add(prevMember);\n}\nreturn parseMappingBindingPattern(openBrace, bindingPatterns, prevMember);\n}\nprivate STNode parseMappingBindingPattern(STNode openBrace, List bindingPatterns, STNode prevMember) {\nSTToken token = peek();\nSTNode mappingBindingPatternRhs = null;\nwhile (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {\nmappingBindingPatternRhs = parseMappingBindingPatternEnd();\nif (mappingBindingPatternRhs == null) {\nbreak;\n}\nbindingPatterns.add(mappingBindingPatternRhs);\nprevMember = parseMappingBindingPatternMember();\nif (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {\nbreak;\n}\nbindingPatterns.add(prevMember);\ntoken = peek();\n}\nif (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {\nbindingPatterns.add(prevMember);\n}\nSTNode closeBrace = parseCloseBrace();\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);\nendContext();\nreturn STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);\n}\n/**\n* Parse mapping-binding-pattern entry.\n*

\n* \n* mapping-binding-pattern := { field-binding-patterns }\n*

\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n* field-binding-pattern := field-name : binding-pattern\n* | variable-name\n*
\n*\n* @return mapping-binding-pattern node\n*/\nprivate STNode parseMappingBindingPatternMember() {\nSTToken token = peek();\nswitch (token.kind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ndefault:\nreturn parseFieldBindingPattern();\n}\n}\nprivate STNode parseMappingBindingPatternEnd() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.MAPPING_BINDING_PATTERN_END);\nreturn parseMappingBindingPatternEnd();\n}\n}\n/**\n* Parse field-binding-pattern.\n* field-binding-pattern := field-name : binding-pattern | varname\n*\n* @return field-binding-pattern node\n*/\nprivate STNode parseFieldBindingPattern() {\nswitch (peek().kind) {\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME);\nSTNode fieldBindingPattern = parseFieldBindingPattern(identifier);\nreturn fieldBindingPattern;\ndefault:\nrecover(peek(), ParserRuleContext.FIELD_BINDING_PATTERN_NAME);\nreturn parseFieldBindingPattern();\n}\n}\nprivate STNode parseFieldBindingPattern(STNode identifier) {\nSTNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier);\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference);\n}\nSTNode colon = parseColon();\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern);\n}\nprivate boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) {\nreturn nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN;\n}\nprivate STNode parseErrorTypeDescOrErrorBP(STNode annots) {\nSTToken nextNextToken = peek(2);\nswitch (nextNextToken.kind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseAsErrorBindingPattern();\ncase LT_TOKEN:\nreturn parseAsErrorTypeDesc(annots);\ncase IDENTIFIER_TOKEN:\nSyntaxKind nextNextNextTokenKind = peek(3).kind;\nif (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN ||\nnextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseAsErrorBindingPattern();\n}\ndefault:\nreturn parseAsErrorTypeDesc(annots);\n}\n}\nprivate STNode parseAsErrorBindingPattern() {\nstartContext(ParserRuleContext.ASSIGNMENT_STMT);\nreturn parseAssignmentStmtRhs(parseErrorBindingPattern());\n}\nprivate STNode parseAsErrorTypeDesc(STNode annots) {\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVariableDecl(getAnnotations(annots), finalKeyword);\n}\n/**\n* Parse error binding pattern node.\n*

\n* error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )\n*

\n* error-arg-list-binding-pattern :=\n* error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]\n* | [error-field-binding-patterns]\n*

\n* error-message-binding-pattern := simple-binding-pattern\n*

\n* error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern\n*

\n* simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern\n*

\n* error-field-binding-patterns :=\n* named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]\n* | rest-binding-pattern\n*

\n* named-arg-binding-pattern := arg-name = binding-pattern\n*\n* @return Error binding pattern node.\n*/\nprivate STNode parseErrorBindingPattern() {\nstartContext(ParserRuleContext.ERROR_BINDING_PATTERN);\nSTNode errorKeyword = parseErrorKeyword();\nreturn parseErrorBindingPattern(errorKeyword);\n}\nprivate STNode parseErrorBindingPattern(STNode errorKeyword) {\nSTToken nextToken = peek();\nSTNode typeRef;\nswitch (nextToken.kind) {\ncase OPEN_PAREN_TOKEN:\ntypeRef = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nif (isTypeReferenceStartToken(nextToken.kind)) {\ntypeRef = parseTypeReference();\nbreak;\n}\nrecover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS);\nreturn parseErrorBindingPattern(errorKeyword);\n}\nreturn parseErrorBindingPattern(errorKeyword, typeRef);\n}\nprivate STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) {\nSTNode openParenthesis = parseOpenParenthesis();\nSTNode argListBindingPatterns = parseErrorArgListBindingPatterns();\nSTNode closeParenthesis = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis,\nargListBindingPatterns, closeParenthesis);\n}\n/**\n* Parse error arg list binding pattern.\n*

\n* \n* error-arg-list-binding-pattern :=\n* error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]\n* | [error-field-binding-patterns]\n*

\n*

\n* error-message-binding-pattern := simple-binding-pattern\n*

\n*

\n* error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern\n*

\n*

\n* simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern\n*

\n*

\n* error-field-binding-patterns :=\n* named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]\n* | rest-binding-pattern\n*

\n*

\n* named-arg-binding-pattern := arg-name = binding-pattern\n* \n*\n* @return Error arg list binding patterns.\n*/\nprivate STNode parseErrorArgListBindingPatterns() {\nList argListBindingPatterns = new ArrayList<>();\nif (isEndOfErrorFieldBindingPatterns()) {\nreturn STNodeFactory.createNodeList(argListBindingPatterns);\n}\nreturn parseErrorArgListBindingPatterns(argListBindingPatterns);\n}\nprivate STNode parseErrorArgListBindingPatterns(List argListBindingPatterns) {\nSTNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START, true);\nif (firstArg == null) {\nreturn STNodeFactory.createNodeList(argListBindingPatterns);\n}\nswitch (firstArg.kind) {\ncase CAPTURE_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nargListBindingPatterns.add(firstArg);\nreturn parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);\ncase ERROR_BINDING_PATTERN:\nSTNode missingIdentifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nSTNode missingErrorMsgBP = STNodeFactory.createCaptureBindingPatternNode(missingIdentifier);\nmissingErrorMsgBP = SyntaxErrors.addDiagnostic(missingErrorMsgBP,\nDiagnosticErrorCode.ERROR_MISSING_ERROR_MESSAGE_BINDING_PATTERN);\nSTNode missingComma = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.COMMA_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_COMMA_TOKEN);\nargListBindingPatterns.add(missingErrorMsgBP);\nargListBindingPatterns.add(missingComma);\nargListBindingPatterns.add(firstArg);\nreturn parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);\ncase REST_BINDING_PATTERN:\ncase NAMED_ARG_BINDING_PATTERN:\nargListBindingPatterns.add(firstArg);\nreturn parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);\ndefault:\naddInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);\nreturn parseErrorArgListBindingPatterns(argListBindingPatterns);\n}\n}\nprivate STNode parseErrorArgListBPWithoutErrorMsg(List argListBindingPatterns) {\nSTNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END);\nif (argEnd == null) {\nreturn STNodeFactory.createNodeList(argListBindingPatterns);\n}\nSTNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS, false);\nassert secondArg != null;\nswitch (secondArg.kind) {\ncase CAPTURE_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\ncase ERROR_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\ncase NAMED_ARG_BINDING_PATTERN:\nargListBindingPatterns.add(argEnd);\nargListBindingPatterns.add(secondArg);\nreturn parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, secondArg.kind);\ndefault:\nupdateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);\nupdateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg,\nDiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);\nreturn parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);\n}\n}\nprivate STNode parseErrorArgListBPWithoutErrorMsgAndCause(List argListBindingPatterns,\nSyntaxKind lastValidArgKind) {\nwhile (!isEndOfErrorFieldBindingPatterns()) {\nSTNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END);\nif (argEnd == null) {\nbreak;\n}\nSTNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN, false);\nassert currentArg != null;\nDiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind);\nif (errorCode == null) {\nargListBindingPatterns.add(argEnd);\nargListBindingPatterns.add(currentArg);\nlastValidArgKind = currentArg.kind;\n} else if (argListBindingPatterns.size() == 0) {\naddInvalidNodeToNextToken(argEnd, null);\naddInvalidNodeToNextToken(currentArg, errorCode);\n} else {\nupdateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);\nupdateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode);\n}\n}\nreturn STNodeFactory.createNodeList(argListBindingPatterns);\n}\nprivate boolean isEndOfErrorFieldBindingPatterns() {\nSyntaxKind nextTokenKind = peek().kind;\nswitch (nextTokenKind) {\ncase CLOSE_PAREN_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn consume();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), currentCtx);\nreturn parseErrorArgsBindingPatternEnd(currentCtx);\n}\n}\nprivate STNode parseErrorArgListBindingPattern(ParserRuleContext context, boolean isFirstArg) {\nswitch (peek().kind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ncase IDENTIFIER_TOKEN:\nSTNode argNameOrSimpleBindingPattern = consume();\nreturn parseNamedOrSimpleArgBindingPattern(argNameOrSimpleBindingPattern);\ncase OPEN_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseBindingPattern();\ncase CLOSE_PAREN_TOKEN:\nif (isFirstArg) {\nreturn null;\n}\ndefault:\nrecover(peek(), context);\nreturn parseErrorArgListBindingPattern(context, isFirstArg);\n}\n}\nprivate STNode parseNamedOrSimpleArgBindingPattern(STNode argNameOrSimpleBindingPattern) {\nSTToken secondToken = peek();\nswitch (secondToken.kind) {\ncase EQUAL_TOKEN:\nSTNode equal = consume();\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern,\nequal, bindingPattern);\ncase COMMA_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ndefault:\nreturn createCaptureOrWildcardBP(argNameOrSimpleBindingPattern);\n}\n}\nprivate DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind,\nSyntaxKind currentArgKind) {\nswitch (currentArgKind) {\ncase NAMED_ARG_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\nif (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) {\nreturn DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;\n}\nreturn null;\ncase CAPTURE_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\ncase ERROR_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ndefault:\nreturn DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED;\n}\n}\n/*\n* This parses Typed binding patterns and deals with ambiguity between types,\n* and binding patterns. An example is 'T[a]'.\n* The ambiguity lies in between:\n* 1) Array Type\n* 2) List binding pattern\n* 3) Member access expression.\n*/\n/**\n* Parse the component after the type-desc, of a typed-binding-pattern.\n*\n* @param typeDesc Starting type-desc of the typed-binding-pattern\n* @return Typed-binding pattern\n*/\nprivate STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) {\nreturn parseTypedBindingPatternTypeRhs(typeDesc, context, true);\n}\nprivate STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) {\nswitch (peek().kind) {\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\ncase OPEN_BRACKET_TOKEN:\nSTNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context);\nassert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN;\nreturn typedBindingPattern;\ncase CLOSE_PAREN_TOKEN:\ncase COMMA_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nif (!isRoot) {\nreturn typeDesc;\n}\ndefault:\nrecover(peek(), ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS, typeDesc, context, isRoot);\nreturn parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot);\n}\n}\n/**\n* Parse typed-binding pattern with list, array-type-desc, or member-access-expr.\n*\n* @param typeDescOrExpr Type desc or the expression at the start\n* @param isTypedBindingPattern Is this is a typed-binding-pattern.\n* @return Parsed node\n*/\nprivate STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern,\nboolean allowAssignment, ParserRuleContext context) {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nif (isBracketedListEnd(peek().kind)) {\nreturn parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context);\n}\nSTNode member = parseBracketedListMember(isTypedBindingPattern);\nSyntaxKind currentNodeType = getBracketedListNodeType(member, isTypedBindingPattern);\nswitch (currentNodeType) {\ncase ARRAY_TYPE_DESC:\nSTNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context);\nreturn typedBindingPattern;\ncase LIST_BINDING_PATTERN:\nSTNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\ncase INDEXED_EXPRESSION:\nreturn parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member);\ncase NONE:\ndefault:\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd != null) {\nList memberList = new ArrayList<>();\nmemberList.add(getBindingPattern(member));\nmemberList.add(memberEnd);\nSTNode bindingPattern = parseAsListBindingPattern(openBracket, memberList);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n}\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,\nisTypedBindingPattern, allowAssignment, context);\n}\nprivate STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) {\nmember = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nSTNode keyExpr = STNodeFactory.createNodeList(member);\nSTNode memberAccessExpr =\nSTNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket);\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false);\n}\nprivate boolean isBracketedListEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse a member of an ambiguous bracketed list. This member could be:\n* 1) Array length\n* 2) Key expression of a member-access-expr\n* 3) A member-binding pattern of a list-binding-pattern.\n*\n* @param isTypedBindingPattern Is this in a definite typed-binding pattern\n* @return Parsed member node\n*/\nprivate STNode parseBracketedListMember(boolean isTypedBindingPattern) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase DECIMAL_INTEGER_LITERAL_TOKEN:\ncase HEX_INTEGER_LITERAL_TOKEN:\ncase ASTERISK_TOKEN:\ncase STRING_LITERAL_TOKEN:\nreturn parseBasicLiteral();\ncase CLOSE_BRACKET_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\ncase ELLIPSIS_TOKEN:\ncase OPEN_BRACKET_TOKEN:\nreturn parseStatementStartBracketedListMember();\ncase IDENTIFIER_TOKEN:\nif (isTypedBindingPattern) {\nreturn parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\n}\nbreak;\ndefault:\nif ((!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) ||\nisQualifiedIdentifierPredeclaredPrefix(nextToken.kind)) {\nbreak;\n}\nParserRuleContext recoverContext =\nisTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH\n: ParserRuleContext.BRACKETED_LIST_MEMBER;\nrecover(peek(), recoverContext, isTypedBindingPattern);\nreturn parseBracketedListMember(isTypedBindingPattern);\n}\nSTNode expr = parseExpression();\nif (isWildcardBP(expr)) {\nreturn getWildcardBindingPattern(expr);\n}\nreturn expr;\n}\n/**\n* Treat the current node as an array, and parse the remainder of the binding pattern.\n*\n* @param typeDesc Type-desc\n* @param openBracket Open bracket\n* @param member Member\n* @return Parsed node\n*/\nprivate STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) {\ntypeDesc = getTypeDescFromExpr(typeDesc);\ntypeDesc = validateForUsageOfVar(typeDesc);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true,\ncontext);\n}\nprivate STNode parseBracketedListMemberEnd() {\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nrecover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END);\nreturn parseBracketedListMemberEnd();\n}\n}\n/**\n* We reach here to break ambiguity of T[a]. This could be:\n* 1) Array Type Desc\n* 2) Member access on LHS\n* 3) Typed-binding-pattern\n*\n* @param typeDescOrExpr Type name or the expr that precede the open-bracket.\n* @param openBracket Open bracket\n* @param member Member\n* @param closeBracket Open bracket\n* @param isTypedBindingPattern Is this is a typed-binding-pattern.\n* @return Specific node that matches to T[a], after solving ambiguity.\n*/\nprivate STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket, boolean isTypedBindingPattern,\nboolean allowAssignment, ParserRuleContext context) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nSTNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\nreturn parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);\ncase OPEN_BRACKET_TOKEN:\nif (isTypedBindingPattern) {\ntypeDesc = getTypeDescFromExpr(typeDescOrExpr);\narrayTypeDesc = createArrayTypeDesc(typeDesc, openBracket, member, closeBracket);\nreturn parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);\n}\nSTNode keyExpr = getKeyExpr(member);\nSTNode expr =\nSTNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);\nreturn parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context);\ncase QUESTION_MARK_TOKEN:\ntypeDesc = getTypeDescFromExpr(typeDescOrExpr);\narrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\ntypeDesc = parseComplexTypeDescriptor(arrayTypeDesc,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nreturn parseTypedBindingPatternTypeRhs(typeDesc, context);\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\nreturn parseComplexTypeDescInTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket,\ncontext, isTypedBindingPattern);\ncase IN_KEYWORD:\nif (context != ParserRuleContext.FOREACH_STMT && context != ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\ncase EQUAL_TOKEN:\nif (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nif (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) {\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\n}\nkeyExpr = getKeyExpr(member);\ntypeDescOrExpr = getExpression(typeDescOrExpr);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);\ncase SEMICOLON_TOKEN:\nif (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\ncase CLOSE_BRACE_TOKEN:\ncase COMMA_TOKEN:\nif (context == ParserRuleContext.AMBIGUOUS_STMT) {\nkeyExpr = getKeyExpr(member);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,\ncloseBracket);\n}\ndefault:\nif (isValidExprRhsStart(nextToken.kind, closeBracket.kind)) {\nkeyExpr = getKeyExpr(member);\ntypeDescOrExpr = getExpression(typeDescOrExpr);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,\ncloseBracket);\n}\nbreak;\n}\nrecover(peek(), ParserRuleContext.BRACKETED_LIST_RHS, typeDescOrExpr, openBracket, member, closeBracket,\nisTypedBindingPattern, allowAssignment, context);\nreturn parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,\nisTypedBindingPattern, allowAssignment, context);\n}\nprivate STNode getKeyExpr(STNode member) {\nif (member == null) {\nSTToken keyIdentifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR);\nSTNode missingVarRef = STNodeFactory.createSimpleNameReferenceNode(keyIdentifier);\nreturn STNodeFactory.createNodeList(missingVarRef);\n}\nreturn STNodeFactory.createNodeList(member);\n}\nprivate STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket) {\nSTNode bindingPatterns = STNodeFactory.createEmptyNodeList();\nif (!isEmpty(member)) {\nif (member.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nopenBracket = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracket, member,\nDiagnosticErrorCode.ERROR_FIELD_BP_INSIDE_LIST_BP);\n} else {\nSTNode bindingPattern = getBindingPattern(member);\nbindingPatterns = STNodeFactory.createNodeList(bindingPattern);\n}\n}\nSTNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, closeBracket);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n}\n/**\n* Parse a union or intersection type-desc/binary-expression that involves ambiguous\n* bracketed list in lhs.\n*

\n* e.g: (T[a] & R..) or (T[a] | R.. )\n*

\n* Complexity occurs in scenarios such as T[a] |/& R[b]. If the token after this\n* is another binding-pattern, then (T[a] |/& R[b]) becomes the type-desc. However,\n* if the token follows this is an equal or semicolon, then (T[a] |/& R) becomes\n* the type-desc, and [b] becomes the binding pattern.\n*\n* @param typeDescOrExpr Type desc or the expression\n* @param openBracket Open bracket\n* @param member Member\n* @param closeBracket Close bracket\n* @param context COntext in which the typed binding pattern occurs\n* @return Parsed node\n*/\nprivate STNode parseComplexTypeDescInTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode pipeOrAndToken = parseUnionOrIntersectionToken();\nSTNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false);\nif (isTypedBindingPattern || typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);\nlhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);\nSTTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr;\nSTNode newTypeDesc;\nif (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) {\nnewTypeDesc = createUnionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor);\n} else {\nnewTypeDesc =\ncreateIntersectionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor);\n}\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern);\n} else {\nSTNode keyExpr = getExpression(member);\nSTNode containerExpr = getExpression(typeDescOrExpr);\nSTNode lhsExpr =\nSTNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket);\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken,\ntypedBindingPatternOrExpr);\n}\n}\nprivate STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) {\nif (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\nSTUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;\nSTNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc);\nlhsTypeDesc = createUnionTypeDesc(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc);\n} else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\nSTIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc;\nSTNode middleTypeDesc =\ngetArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc);\nlhsTypeDesc = createIntersectionTypeDesc(intersectionTypeDesc.leftTypeDesc,\nintersectionTypeDesc.bitwiseAndToken, middleTypeDesc);\n} else {\nlhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket);\n}\nreturn lhsTypeDesc;\n}\n/**\n* Parse union (|) or intersection (&) type operator.\n*\n* @return pipe or bitwise and token\n*/\nprivate STNode parseUnionOrIntersectionToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn consume();\n} else {\nrecover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN);\nreturn parseUnionOrIntersectionToken();\n}\n}\n/**\n* Infer the type of the ambiguous bracketed list, based on the type of the member.\n*\n* @param memberNode Member node\n* @return Inferred type of the bracketed list\n*/\nprivate SyntaxKind getBracketedListNodeType(STNode memberNode, boolean isTypedBindingPattern) {\nif (isEmpty(memberNode)) {\nreturn SyntaxKind.NONE;\n}\nif (isDefiniteTypeDesc(memberNode.kind)) {\nreturn SyntaxKind.TUPLE_TYPE_DESC;\n}\nswitch (memberNode.kind) {\ncase ASTERISK_LITERAL:\nreturn SyntaxKind.ARRAY_TYPE_DESC;\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.LIST_BINDING_PATTERN;\ncase QUALIFIED_NAME_REFERENCE:\ncase REST_TYPE:\nreturn SyntaxKind.TUPLE_TYPE_DESC;\ncase NUMERIC_LITERAL:\ncase SIMPLE_NAME_REFERENCE:\ncase BRACKETED_LIST:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.NONE;\ncase ERROR_CONSTRUCTOR:\nif (isPossibleErrorBindingPattern((STErrorConstructorExpressionNode) memberNode)) {\nreturn SyntaxKind.NONE;\n}\nreturn SyntaxKind.INDEXED_EXPRESSION;\ndefault:\nif (isTypedBindingPattern) {\nreturn SyntaxKind.NONE;\n}\nreturn SyntaxKind.INDEXED_EXPRESSION;\n}\n}\n/*\n* This section tries to break the ambiguity in parsing a statement that starts with a open-bracket.\n* The ambiguity lies in between:\n* 1) Assignment that starts with list binding pattern\n* 2) Var-decl statement that starts with tuple type\n* 3) Statement that starts with list constructor, such as sync-send, etc.\n*/\n/**\n* Parse any statement that starts with an open-bracket.\n*\n* @param annots Annotations attached to the statement.\n* @return Parsed node\n*/\nprivate STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) {\nstartContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT);\nreturn parseStatementStartsWithOpenBracket(annots, true, possibleMappingField);\n}\nprivate STNode parseMemberBracketedList(boolean possibleMappingField) {\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseStatementStartsWithOpenBracket(annots, false, possibleMappingField);\n}\n/**\n* The bracketed list at the start of a statement can be one of the following.\n* 1) List binding pattern\n* 2) Tuple type\n* 3) List constructor\n*\n* @param isRoot Is this the root of the list\n* @return Parsed node\n*/\nprivate STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) {\nstartContext(ParserRuleContext.STMT_START_BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList memberList = new ArrayList<>();\nwhile (!isBracketedListEnd(peek().kind)) {\nSTNode member = parseStatementStartBracketedListMember();\nSyntaxKind currentNodeType = getStmtStartBracketedListType(member);\nswitch (currentNodeType) {\ncase TUPLE_TYPE_DESC:\nreturn parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\ncase LIST_BINDING_PATTERN:\nreturn parseAsListBindingPattern(openBracket, memberList, member, isRoot);\ncase LIST_CONSTRUCTOR:\nreturn parseAsListConstructor(openBracket, memberList, member, isRoot);\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nreturn parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot);\ncase TUPLE_TYPE_DESC_OR_LIST_CONST:\nreturn parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot);\ncase NONE:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\n}\nSTNode closeBracket = parseCloseBracket();\nSTNode bracketedList = parseStatementStartBracketedListRhs(annots, openBracket, memberList, closeBracket,\nisRoot, possibleMappingField);\nreturn bracketedList;\n}\n/**\n* Parse a member of a list-binding-pattern, tuple-type-desc, or\n* list-constructor-expr, when the parent is ambiguous.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatementStartBracketedListMember() {\nList typeDescQualifiers = new ArrayList<>();\nreturn parseStatementStartBracketedListMember(typeDescQualifiers);\n}\nprivate STNode parseStatementStartBracketedListMember(List qualifiers) {\nparseTypeDescQualifiers(qualifiers);\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase OPEN_BRACKET_TOKEN:\nreportInvalidQualifierList(qualifiers);\nreturn parseMemberBracketedList(false);\ncase IDENTIFIER_TOKEN:\nreportInvalidQualifierList(qualifiers);\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nif (isWildcardBP(identifier)) {\nSTNode varName = ((STSimpleNameReferenceNode) identifier).name;\nreturn getWildcardBindingPattern(varName);\n}\nnextToken = peek();\nif (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {\nSTNode ellipsis = parseEllipsis();\nreturn STNodeFactory.createRestDescriptorNode(identifier, ellipsis);\n}\nif (nextToken.kind != SyntaxKind.OPEN_BRACKET_TOKEN && isValidTypeContinuationToken(nextToken)) {\nreturn parseComplexTypeDescriptor(identifier, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\n}\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true);\ncase OPEN_BRACE_TOKEN:\nreportInvalidQualifierList(qualifiers);\nreturn parseMappingBindingPatterOrMappingConstructor();\ncase ERROR_KEYWORD:\nreportInvalidQualifierList(qualifiers);\nSTToken nextNextToken = getNextNextToken();\nif (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||\nnextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn parseErrorBindingPatternOrErrorConstructor();\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase ELLIPSIS_TOKEN:\nreportInvalidQualifierList(qualifiers);\nreturn parseListBindingPatternMember();\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\nreportInvalidQualifierList(qualifiers);\nif (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseExpression(false);\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\nreportInvalidQualifierList(qualifiers);\nif (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nreturn parseExpression(false);\ncase OPEN_PAREN_TOKEN:\nreturn parseTypeDescOrExpr(qualifiers);\ncase FUNCTION_KEYWORD:\nreturn parseAnonFuncExprOrFuncTypeDesc(qualifiers);\ndefault:\nif (isValidExpressionStart(nextToken.kind, 1)) {\nreportInvalidQualifierList(qualifiers);\nreturn parseExpression(false);\n}\nif (isTypeStartingToken(nextToken.kind)) {\nreturn parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nrecover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER, qualifiers);\nreturn parseStatementStartBracketedListMember(qualifiers);\n}\n}\nprivate STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List memberList,\nSTNode member, boolean isRoot) {\nmemberList.add(member);\nSTNode memberEnd = parseBracketedListMemberEnd();\nSTNode tupleTypeDescOrListCons;\nif (memberEnd == null) {\nSTNode closeBracket = parseCloseBracket();\ntupleTypeDescOrListCons =\nparseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);\n} else {\nmemberList.add(memberEnd);\ntupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot);\n}\nreturn tupleTypeDescOrListCons;\n}\n/**\n* Parse tuple type desc or list constructor.\n*\n* @return Parsed node\n*/\nprivate STNode parseTupleTypeDescOrListConstructor(STNode annots) {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList memberList = new ArrayList<>();\nreturn parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false);\n}\nprivate STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List memberList,\nboolean isRoot) {\nSTToken nextToken = peek();\nwhile (!isBracketedListEnd(nextToken.kind)) {\nSTNode member = parseTupleTypeDescOrListConstructorMember(annots);\nSyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member);\nswitch (currentNodeType) {\ncase LIST_CONSTRUCTOR:\nreturn parseAsListConstructor(openBracket, memberList, member, isRoot);\ncase TUPLE_TYPE_DESC:\nreturn parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\ncase TUPLE_TYPE_DESC_OR_LIST_CONST:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\nnextToken = peek();\n}\nSTNode closeBracket = parseCloseBracket();\nreturn parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);\n}\nprivate STNode parseTupleTypeDescOrListConstructorMember(STNode annots) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseTupleTypeDescOrListConstructor(annots);\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nif (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) {\nSTNode ellipsis = parseEllipsis();\nreturn STNodeFactory.createRestDescriptorNode(identifier, ellipsis);\n}\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false);\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingConstructorExpr();\ncase ERROR_KEYWORD:\nSTToken nextNextToken = getNextNextToken();\nif (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||\nnextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn parseErrorConstructorExpr(false);\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\nif (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseExpression(false);\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\nif (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nreturn parseExpression(false);\ncase OPEN_PAREN_TOKEN:\nreturn parseTypeDescOrExpr();\ndefault:\nif (isValidExpressionStart(nextToken.kind, 1)) {\nreturn parseExpression(false);\n}\nif (isTypeStartingToken(nextToken.kind)) {\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nrecover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER, annots);\nreturn parseTupleTypeDescOrListConstructorMember(annots);\n}\n}\nprivate SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) {\nreturn getStmtStartBracketedListType(memberNode);\n}\nprivate STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List members, STNode closeBracket,\nboolean isRoot) {\nSTNode tupleTypeOrListConst;\nswitch (peek().kind) {\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nif (!isRoot) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members,\ncloseBracket);\n}\ndefault:\nif (isValidExprRhsStart(peek().kind, closeBracket.kind) ||\n(isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) {\nmembers = getExpressionList(members);\nSTNode memberExpressions = STNodeFactory.createNodeList(members);\ntupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket,\nmemberExpressions, closeBracket);\nbreak;\n}\nSTNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members));\nSTNode tupleTypeDesc =\nSTNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);\ntupleTypeOrListConst =\nparseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\n}\nendContext();\nif (!isRoot) {\nreturn tupleTypeOrListConst;\n}\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot);\n}", + "context_before": "class member, object member or object member descriptor.\n*

\n* \n* class-member := object-field | method-defn | object-type-inclusion\n*
\n* object-member := object-field | method-defn\n*
\n* object-member-descriptor := object-field-descriptor | method-decl | object-type-inclusion\n*
\n*\n* @param context Parsing context of the object member\n* @return Parsed node\n*/\nprivate STNode parseObjectMember(ParserRuleContext context) {\nSTNode metadata;\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ncase ASTERISK_TOKEN:\ncase PUBLIC_KEYWORD:\ncase PRIVATE_KEYWORD:\ncase FINAL_KEYWORD:\ncase REMOTE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\ncase ISOLATED_KEYWORD:\ncase RESOURCE_KEYWORD:\nmetadata = STNodeFactory.createEmptyNode();\nbreak;\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData();\nbreak;\ndefault:\nif (isTypeStartingToken(nextToken.kind)) {\nmetadata = STNodeFactory.createEmptyNode();\nbreak;\n}\nParserRuleContext recoveryCtx;\nif (context == ParserRuleContext.OBJECT_MEMBER) {\nrecoveryCtx = ParserRuleContext.OBJECT_MEMBER_START;\n} else {\nrecoveryCtx = ParserRuleContext.CLASS_MEMBER_START;\n}\nrecover(peek(), recoveryCtx);\nreturn parseObjectMember(context);\n}\nreturn parseObjectMemberWithoutMeta(metadata, context);\n}", + "context_after": "class member, object member or object member descriptor.\n*

\n* \n* class-member := object-field | method-defn | object-type-inclusion\n*
\n* object-member := object-field | method-defn\n*
\n* object-member-descriptor := object-field-descriptor | method-decl | object-type-inclusion\n*
\n*\n* @param context Parsing context of the object member\n* @return Parsed node\n*/\nprivate STNode parseObjectMember(ParserRuleContext context) {\nSTNode metadata;\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ncase ASTERISK_TOKEN:\ncase PUBLIC_KEYWORD:\ncase PRIVATE_KEYWORD:\ncase FINAL_KEYWORD:\ncase REMOTE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\ncase ISOLATED_KEYWORD:\ncase RESOURCE_KEYWORD:\nmetadata = STNodeFactory.createEmptyNode();\nbreak;\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData();\nbreak;\ndefault:\nif (isTypeStartingToken(nextToken.kind)) {\nmetadata = STNodeFactory.createEmptyNode();\nbreak;\n}\nParserRuleContext recoveryCtx;\nif (context == ParserRuleContext.OBJECT_MEMBER) {\nrecoveryCtx = ParserRuleContext.OBJECT_MEMBER_START;\n} else {\nrecoveryCtx = ParserRuleContext.CLASS_MEMBER_START;\n}\nrecover(peek(), recoveryCtx);\nreturn parseObjectMember(context);\n}\nreturn parseObjectMemberWithoutMeta(metadata, context);\n}" + }, + { + "comment": "change to empty or end with '_'", + "method_body": "public List getDatabaseNamesWithPrefix(String prefix) {\nif (closing) {\nreturn null;\n}\nList ret = new ArrayList();\nList names = replicatedEnvironment.getDatabaseNames();\nfor (String name : names) {\nif (name.equals(\"epochDB\")) {\ncontinue;\n}\nif (Strings.isNullOrEmpty(prefix)) {\nif (StringUtils.isNumeric(name)) {\nlong db = Long.parseLong(name);\nret.add(db);\n} else {\n}\n} else {\nif (name.startsWith(prefix)) {\nString dbStr = name.substring(prefix.length());\nif (StringUtils.isNumeric(dbStr)) {\nlong db = Long.parseLong(dbStr);\nret.add(db);\n} else {\n}\n} else {\n}\n}\n}\nCollections.sort(ret);\nreturn ret;\n}", + "target_code": "String dbStr = name.substring(prefix.length());", + "method_body_after": "public List getDatabaseNamesWithPrefix(String prefix) {\nif (closing) {\nreturn null;\n}\nList ret = new ArrayList();\nList names = replicatedEnvironment.getDatabaseNames();\nfor (String name : names) {\nif (name.equals(\"epochDB\")) {\ncontinue;\n}\nif (Strings.isNullOrEmpty(prefix)) {\nif (StringUtils.isNumeric(name)) {\nlong db = Long.parseLong(name);\nret.add(db);\n} else {\n}\n} else {\nif (name.startsWith(prefix)) {\nString dbStr = name.substring(prefix.length());\nif (StringUtils.isNumeric(dbStr)) {\nlong db = Long.parseLong(dbStr);\nret.add(db);\n} else {\n}\n} else {\n}\n}\n}\nCollections.sort(ret);\nreturn ret;\n}", + "context_before": "class BDBEnvironment {\nprivate static final Logger LOG = LogManager.getLogger(BDBEnvironment.class);\nprotected static int RETRY_TIME = 3;\nprotected static int SLEEP_INTERVAL_SEC = 5;\nprivate static final int MEMORY_CACHE_PERCENT = 20;\nprivate static final int INITAL_STATE_CHANGE_WAIT_SEC = 10;\npublic static final String STARROCKS_JOURNAL_GROUP = \"PALO_JOURNAL_GROUP\";\nprivate static final String BDB_DIR = \"/bdb\";\nprivate ReplicatedEnvironment replicatedEnvironment;\nprivate EnvironmentConfig environmentConfig;\nprivate ReplicationConfig replicationConfig;\nprivate DatabaseConfig dbConfig;\nprivate TransactionConfig txnConfig;\nprivate CloseSafeDatabase epochDB = null;\nprivate ReplicationGroupAdmin replicationGroupAdmin = null;\nprivate ReentrantReadWriteLock lock;\nprivate List openedDatabases;\nprivate volatile boolean closing = false;\nprivate final File envHome;\nprivate final String selfNodeName;\nprivate final String selfNodeHostPort;\nprivate final String helperHostPort;\nprivate final boolean isElectable;\n/**\n* init & return bdb environment\n* @param nodeName\n* @return\n* @throws JournalException\n*/\npublic static BDBEnvironment initBDBEnvironment(String nodeName) throws JournalException, InterruptedException {\nPair selfNode = GlobalStateMgr.getCurrentState().getSelfNode();\ntry {\nif (NetUtils.isPortUsing(selfNode.first, selfNode.second)) {\nString errMsg = String.format(\"edit_log_port %d is already in use. will exit.\", selfNode.second);\nLOG.error(errMsg);\nthrow new JournalException(errMsg);\n}\n} catch (IOException e) {\nString errMsg = String.format(\"failed to check if %s:%s is used!\", selfNode.first, selfNode.second);\nLOG.error(errMsg, e);\nJournalException journalException = new JournalException(errMsg);\njournalException.initCause(e);\nthrow journalException;\n}\nString selfNodeHostPort = selfNode.first + \":\" + selfNode.second;\nFile dbEnv = new File(getBdbDir());\nif (!dbEnv.exists()) {\ndbEnv.mkdirs();\n}\nPair helperNode = GlobalStateMgr.getCurrentState().getHelperNode();\nString helperHostPort = helperNode.first + \":\" + helperNode.second;\nBDBEnvironment bdbEnvironment = new BDBEnvironment(dbEnv, nodeName, selfNodeHostPort,\nhelperHostPort, GlobalStateMgr.getCurrentState().isElectable());\nbdbEnvironment.setup();\nreturn bdbEnvironment;\n}\npublic static String getBdbDir() {\nreturn Config.meta_dir + BDB_DIR;\n}\nprotected BDBEnvironment(File envHome, String selfNodeName, String selfNodeHostPort,\nString helperHostPort, boolean isElectable) {\nthis.envHome = envHome;\nthis.selfNodeName = selfNodeName;\nthis.selfNodeHostPort = selfNodeHostPort;\nthis.helperHostPort = helperHostPort;\nthis.isElectable = isElectable;\nopenedDatabases = new ArrayList<>();\nthis.lock = new ReentrantReadWriteLock(true);\n}\nprotected void setup() throws JournalException, InterruptedException {\nthis.closing = false;\nensureHelperInLocal();\ninitConfigs(isElectable);\nsetupEnvironment();\n}\nprotected void initConfigs(boolean isElectable) throws JournalException {\nif (Config.metadata_failure_recovery.equals(\"true\")) {\nif (!isElectable) {\nString errMsg = \"Current node is not in the electable_nodes list. will exit\";\nLOG.error(errMsg);\nthrow new JournalException(errMsg);\n}\nDbResetRepGroup resetUtility = new DbResetRepGroup(envHome, STARROCKS_JOURNAL_GROUP, selfNodeName,\nselfNodeHostPort);\nresetUtility.reset();\nLOG.info(\"group has been reset.\");\n}\nreplicationConfig = new ReplicationConfig();\nreplicationConfig.setNodeName(selfNodeName);\nreplicationConfig.setNodeHostPort(selfNodeHostPort);\nreplicationConfig.setHelperHosts(helperHostPort);\nreplicationConfig.setGroupName(STARROCKS_JOURNAL_GROUP);\nreplicationConfig.setConfigParam(ReplicationConfig.ENV_UNKNOWN_STATE_TIMEOUT, \"10\");\nreplicationConfig.setMaxClockDelta(Config.max_bdbje_clock_delta_ms, TimeUnit.MILLISECONDS);\nreplicationConfig.setConfigParam(ReplicationConfig.TXN_ROLLBACK_LIMIT,\nString.valueOf(Config.txn_rollback_limit));\nreplicationConfig\n.setConfigParam(ReplicationConfig.REPLICA_TIMEOUT, Config.bdbje_heartbeat_timeout_second + \" s\");\nreplicationConfig\n.setConfigParam(ReplicationConfig.FEEDER_TIMEOUT, Config.bdbje_heartbeat_timeout_second + \" s\");\nreplicationConfig\n.setConfigParam(ReplicationConfig.REPLAY_COST_PERCENT,\nString.valueOf(Config.bdbje_replay_cost_percent));\nif (isElectable) {\nreplicationConfig.setReplicaAckTimeout(Config.bdbje_replica_ack_timeout_second, TimeUnit.SECONDS);\nreplicationConfig.setConfigParam(ReplicationConfig.REPLICA_MAX_GROUP_COMMIT, \"0\");\nreplicationConfig.setConsistencyPolicy(new NoConsistencyRequiredPolicy());\n} else {\nreplicationConfig.setNodeType(NodeType.SECONDARY);\nreplicationConfig.setConsistencyPolicy(new NoConsistencyRequiredPolicy());\n}\njava.util.logging.Logger parent = java.util.logging.Logger.getLogger(\"com.sleepycat.je\");\nparent.setLevel(Level.parse(Config.bdbje_log_level));\nenvironmentConfig = new EnvironmentConfig();\nenvironmentConfig.setTransactional(true);\nenvironmentConfig.setAllowCreate(true);\nenvironmentConfig.setCachePercent(MEMORY_CACHE_PERCENT);\nenvironmentConfig.setLockTimeout(Config.bdbje_lock_timeout_second, TimeUnit.SECONDS);\nenvironmentConfig.setConfigParam(EnvironmentConfig.FILE_LOGGING_LEVEL, Config.bdbje_log_level);\nenvironmentConfig.setConfigParam(EnvironmentConfig.CLEANER_THREADS,\nString.valueOf(Config.bdbje_cleaner_threads));\nif (isElectable) {\nDurability durability = new Durability(getSyncPolicy(Config.master_sync_policy),\ngetSyncPolicy(Config.replica_sync_policy), getAckPolicy(Config.replica_ack_policy));\nenvironmentConfig.setDurability(durability);\n}\ndbConfig = new DatabaseConfig();\ndbConfig.setTransactional(true);\nif (isElectable) {\ndbConfig.setAllowCreate(true);\ndbConfig.setReadOnly(false);\n} else {\ndbConfig.setAllowCreate(false);\ndbConfig.setReadOnly(true);\n}\ntxnConfig = new TransactionConfig();\nif (isElectable) {\ntxnConfig.setDurability(new Durability(\ngetSyncPolicy(Config.master_sync_policy),\ngetSyncPolicy(Config.replica_sync_policy),\ngetAckPolicy(Config.replica_ack_policy)));\n}\n}\nprotected void setupEnvironment() throws JournalException, InterruptedException {\nJournalException exception = null;\nfor (int i = 0; i < RETRY_TIME; i++) {\nif (i > 0) {\nThread.sleep(SLEEP_INTERVAL_SEC * 1000);\n}\ntry {\nLOG.info(\"start to setup bdb environment for {} times\", i + 1);\nreplicatedEnvironment = new ReplicatedEnvironment(envHome, replicationConfig, environmentConfig);\nSet adminNodes = new HashSet();\nHostAndPort helperAddress = HostAndPort.fromString(helperHostPort);\nInetSocketAddress helper = new InetSocketAddress(helperAddress.getHost(),\nhelperAddress.getPort());\nadminNodes.add(helper);\nLOG.info(\"add helper[{}] as ReplicationGroupAdmin\", helperHostPort);\nif (!selfNodeHostPort.equals(helperHostPort) && isElectable) {\nHostAndPort selfNodeAddress = HostAndPort.fromString(selfNodeHostPort);\nInetSocketAddress self = new InetSocketAddress(selfNodeAddress.getHost(),\nselfNodeAddress.getPort());\nadminNodes.add(self);\nLOG.info(\"add self[{}] as ReplicationGroupAdmin\", selfNodeHostPort);\n}\nreplicationGroupAdmin = new ReplicationGroupAdmin(STARROCKS_JOURNAL_GROUP, adminNodes);\nHAProtocol protocol = new BDBHA(this, selfNodeName);\nGlobalStateMgr.getCurrentState().setHaProtocol(protocol);\nBDBStateChangeListener listener = new BDBStateChangeListener(isElectable);\nreplicatedEnvironment.setStateChangeListener(listener);\nLOG.info(\"replicated environment is all set, wait for state change...\");\nfor (int j = 0; j < INITAL_STATE_CHANGE_WAIT_SEC; j++) {\nif (FrontendNodeType.UNKNOWN != listener.getNewType()) {\nbreak;\n}\nThread.sleep(1000);\n}\nLOG.info(\"state change done, current role {}\", listener.getNewType());\nepochDB = new CloseSafeDatabase(replicatedEnvironment.openDatabase(null, \"epochDB\", dbConfig));\nLOG.info(\"end setup bdb environment after {} times\", i + 1);\nreturn;\n} catch (RestartRequiredException e) {\nString errMsg = String.format(\n\"catch a RestartRequiredException when setup environment after retried %d times, refresh and setup again\",\ni + 1);\nLOG.warn(errMsg, e);\nexception = new JournalException(errMsg);\nexception.initCause(e);\nif (e instanceof InsufficientLogException) {\nrefreshLog((InsufficientLogException) e);\n}\nclose();\n} catch (DatabaseException e) {\nif (i == 0 && e instanceof UnknownMasterException) {\nLOG.warn(\"failed to setup environment because of UnknowMasterException for the first time, ignore it.\");\n} else {\nString errMsg = String.format(\"failed to setup environment after retried %d times\", i + 1);\nLOG.error(errMsg, e);\nexception = new JournalException(errMsg);\nexception.initCause(e);\n}\n}\n}\nthrow exception;\n}\n/**\n* This method is used to check if the local replicated environment matches that of the helper.\n* This could happen in a situation like this:\n* 1. User adds a follower and starts the new follower without helper.\n* --> The new follower will run as a master in a standalone environment.\n* 2. User restarts this follower with a helper.\n* --> Sometimes this new follower will join the group successfully, making master crash.\n*\n* This method only init the replicated environment through a handshake.\n* It will not read or write any data.\n*/\nprotected void ensureHelperInLocal() throws JournalException, InterruptedException {\nif (!isElectable) {\nLOG.info(\"skip check local environment for observer\");\nreturn;\n}\nif (selfNodeHostPort.equals(helperHostPort)) {\nLOG.info(\"skip check local environment because helper node and local node are identical.\");\nreturn;\n}\nif (Config.metadata_failure_recovery.equals(\"true\")) {\nLOG.info(\"skip check local environment because metadata_failure_recovery = true\");\nreturn;\n}\nLOG.info(\"start to check if local replica environment from {} contains {}\", envHome, helperHostPort);\ninitConfigs(false);\nHostAndPort hostAndPort = HostAndPort.fromString(helperHostPort);\nJournalException exception = null;\nfor (int i = 0; i < RETRY_TIME; i++) {\nif (i > 0) {\nThread.sleep(SLEEP_INTERVAL_SEC * 1000);\n}\ntry {\nreplicatedEnvironment = new ReplicatedEnvironment(envHome, replicationConfig, environmentConfig);\nSet localNodes = replicatedEnvironment.getGroup().getNodes();\nif (localNodes.isEmpty()) {\nLOG.info(\"skip check empty environment\");\nreturn;\n}\nfor (ReplicationNode node : localNodes) {\nif (node.getHostName().equals(hostAndPort.getHost()) && node.getPort() == hostAndPort.getPort()) {\nLOG.info(\"found {} in local environment!\", helperHostPort);\nreturn;\n}\n}\nthrow new JournalException(\nString.format(\"bad environment %s! helper host %s not in local %s\",\nenvHome, helperHostPort, localNodes));\n} catch (RestartRequiredException e) {\nString errMsg = String.format(\n\"catch a RestartRequiredException when checking if helper in local after retried %d times, \" +\n\"refresh and check again\",\ni + 1);\nLOG.warn(errMsg, e);\nexception = new JournalException(errMsg);\nexception.initCause(e);\nif (e instanceof InsufficientLogException) {\nrefreshLog((InsufficientLogException) e);\n}\n} catch (DatabaseException e) {\nif (i == 0 && e instanceof UnknownMasterException) {\nLOG.warn(\n\"failed to check if helper in local because of UnknowMasterException for the first time, ignore it.\");\n} else {\nString errMsg = String.format(\"failed to check if helper in local after retried %d times\", i + 1);\nLOG.error(errMsg, e);\nexception = new JournalException(errMsg);\nexception.initCause(e);\n}\n} finally {\nif (replicatedEnvironment != null) {\nreplicatedEnvironment.close();\n}\n}\n}\nthrow exception;\n}\npublic void refreshLog(InsufficientLogException insufficientLogEx) {\ntry {\nNetworkRestore restore = new NetworkRestore();\nNetworkRestoreConfig config = new NetworkRestoreConfig();\nconfig.setRetainLogFiles(false);\nrestore.execute(insufficientLogEx, config);\n} catch (Throwable t) {\nLOG.warn(\"refresh log failed\", t);\n}\n}\npublic ReplicationGroupAdmin getReplicationGroupAdmin() {\nreturn this.replicationGroupAdmin;\n}\npublic void setNewReplicationGroupAdmin(Set newHelperNodes) {\nthis.replicationGroupAdmin = new ReplicationGroupAdmin(STARROCKS_JOURNAL_GROUP, newHelperNodes);\n}\npublic CloseSafeDatabase getEpochDB() {\nreturn epochDB;\n}\npublic ReplicatedEnvironment getReplicatedEnvironment() {\nreturn replicatedEnvironment;\n}\npublic CloseSafeDatabase openDatabase(String dbName) {\nCloseSafeDatabase db = null;\nlock.writeLock().lock();\ntry {\nif (closing) {\nreturn null;\n}\nfor (java.util.Iterator iter = openedDatabases.iterator(); iter.hasNext(); ) {\nCloseSafeDatabase openedDb = iter.next();\ntry {\nif (openedDb.getDb().getDatabaseName() == null) {\nopenedDb.close();\niter.remove();\ncontinue;\n}\n} catch (Exception e) {\n/*\n* In the case when 3 FE (1 master and 2 followers) start at same time,\n* We may catch com.sleepycat.je.rep.DatabasePreemptedException which said that\n* \"Database xx has been forcibly closed in order to apply a replicated remove operation.\"\n*\n* Because when Master FE finished to save image, it try to remove old journals,\n* and also remove the databases these old journals belongs to.\n* So after Master removed the database from replicatedEnvironment,\n* call db.getDatabaseName() will throw DatabasePreemptedException,\n* because it has already been destroyed.\n*\n* The reason why Master can safely remove a database is because it knows that all\n* non-master FE have already load the journal ahead of this database. So remove the\n* database is safe.\n*\n* Here we just try to close the useless database(which may be removed by Master),\n* so even we catch the exception, just ignore it is OK.\n*/\nLOG.warn(\"get exception when try to close previously opened bdb database. ignore it\", e);\niter.remove();\ncontinue;\n}\nif (openedDb.getDb().getDatabaseName().equals(dbName)) {\nreturn openedDb;\n}\n}\ntry {\ndb = new CloseSafeDatabase(replicatedEnvironment.openDatabase(null, dbName, dbConfig));\nopenedDatabases.add(db);\nLOG.info(\"successfully open new db {}\", db);\n} catch (Exception e) {\nLOG.warn(\"catch an exception when open database {}\", dbName, e);\n}\n} finally {\nlock.writeLock().unlock();\n}\nreturn db;\n}\npublic void removeDatabase(String dbName) {\nlock.writeLock().lock();\ntry {\nif (closing) {\nreturn;\n}\nString targetDbName = null;\nint index = 0;\nfor (CloseSafeDatabase db : openedDatabases) {\nString name = db.getDb().getDatabaseName();\nif (dbName.equals(name)) {\ndb.close();\nLOG.info(\"database {} has been closed\", name);\ntargetDbName = name;\nbreak;\n}\nindex++;\n}\nif (targetDbName != null) {\nLOG.info(\"begin to remove database {} from openedDatabases\", targetDbName);\nopenedDatabases.remove(index);\n}\ntry {\nLOG.info(\"begin to remove database {} from replicatedEnviroment\", dbName);\nreplicatedEnvironment.removeDatabase(null, dbName);\n} catch (DatabaseNotFoundException e) {\nLOG.warn(\"catch an exception when remove db:{}, this db does not exist\", dbName, e);\n}\n} finally {\nlock.writeLock().unlock();\n}\n}\npublic List getDatabaseNames() {\nreturn getDatabaseNamesWithPrefix(\"\");\n}\npublic boolean close() {\nboolean closeSuccess = true;\nlock.writeLock().lock();\ntry {\nclosing = true;\nLOG.info(\"start to close log databases\");\nfor (CloseSafeDatabase db : openedDatabases) {\ntry {\ndb.close();\n} catch (DatabaseException exception) {\nLOG.error(\"Error closing db {}\", db.getDatabaseName(), exception);\ncloseSuccess = false;\n}\n}\nLOG.info(\"close log databases end\");\nopenedDatabases.clear();\nLOG.info(\"start to close epoch database\");\nif (epochDB != null) {\ntry {\nepochDB.close();\n} catch (DatabaseException exception) {\nLOG.error(\"Error closing db {}\", epochDB.getDatabaseName(), exception);\ncloseSuccess = false;\n}\n}\nLOG.info(\"close epoch database end\");\nLOG.info(\"start to close replicated environment\");\nif (replicatedEnvironment != null) {\ntry {\nreplicatedEnvironment.close();\n} catch (DatabaseException exception) {\nLOG.error(\"Error closing replicatedEnvironment\", exception);\ncloseSuccess = false;\n}\n}\nLOG.info(\"close replicated environment end\");\n} finally {\nclosing = false;\nlock.writeLock().unlock();\n}\nreturn closeSuccess;\n}\npublic void flushVLSNMapping() {\nif (replicatedEnvironment != null) {\nRepInternal.getRepImpl(replicatedEnvironment).getVLSNIndex()\n.flushToDatabase(Durability.COMMIT_SYNC);\n}\n}\nprivate SyncPolicy getSyncPolicy(String policy) {\nif (policy.equalsIgnoreCase(\"SYNC\")) {\nreturn Durability.SyncPolicy.SYNC;\n}\nif (policy.equalsIgnoreCase(\"NO_SYNC\")) {\nreturn Durability.SyncPolicy.NO_SYNC;\n}\nreturn Durability.SyncPolicy.WRITE_NO_SYNC;\n}\nprivate ReplicaAckPolicy getAckPolicy(String policy) {\nif (policy.equalsIgnoreCase(\"ALL\")) {\nreturn Durability.ReplicaAckPolicy.ALL;\n}\nif (policy.equalsIgnoreCase(\"NONE\")) {\nreturn Durability.ReplicaAckPolicy.NONE;\n}\nreturn Durability.ReplicaAckPolicy.SIMPLE_MAJORITY;\n}\n/**\n* package private, used within com.starrocks.journal.bdbje\n*/\nTransactionConfig getTxnConfig() {\nreturn txnConfig;\n}\n}", + "context_after": "class BDBEnvironment {\nprivate static final Logger LOG = LogManager.getLogger(BDBEnvironment.class);\nprotected static int RETRY_TIME = 3;\nprotected static int SLEEP_INTERVAL_SEC = 5;\nprivate static final int MEMORY_CACHE_PERCENT = 20;\nprivate static final int INITAL_STATE_CHANGE_WAIT_SEC = 10;\npublic static final String STARROCKS_JOURNAL_GROUP = \"PALO_JOURNAL_GROUP\";\nprivate static final String BDB_DIR = \"/bdb\";\nprivate ReplicatedEnvironment replicatedEnvironment;\nprivate EnvironmentConfig environmentConfig;\nprivate ReplicationConfig replicationConfig;\nprivate DatabaseConfig dbConfig;\nprivate TransactionConfig txnConfig;\nprivate CloseSafeDatabase epochDB = null;\nprivate ReplicationGroupAdmin replicationGroupAdmin = null;\nprivate ReentrantReadWriteLock lock;\nprivate List openedDatabases;\nprivate volatile boolean closing = false;\nprivate final File envHome;\nprivate final String selfNodeName;\nprivate final String selfNodeHostPort;\nprivate final String helperHostPort;\nprivate final boolean isElectable;\n/**\n* init & return bdb environment\n* @param nodeName\n* @return\n* @throws JournalException\n*/\npublic static BDBEnvironment initBDBEnvironment(String nodeName) throws JournalException, InterruptedException {\nPair selfNode = GlobalStateMgr.getCurrentState().getSelfNode();\ntry {\nif (NetUtils.isPortUsing(selfNode.first, selfNode.second)) {\nString errMsg = String.format(\"edit_log_port %d is already in use. will exit.\", selfNode.second);\nLOG.error(errMsg);\nthrow new JournalException(errMsg);\n}\n} catch (IOException e) {\nString errMsg = String.format(\"failed to check if %s:%s is used!\", selfNode.first, selfNode.second);\nLOG.error(errMsg, e);\nJournalException journalException = new JournalException(errMsg);\njournalException.initCause(e);\nthrow journalException;\n}\nString selfNodeHostPort = selfNode.first + \":\" + selfNode.second;\nFile dbEnv = new File(getBdbDir());\nif (!dbEnv.exists()) {\ndbEnv.mkdirs();\n}\nPair helperNode = GlobalStateMgr.getCurrentState().getHelperNode();\nString helperHostPort = helperNode.first + \":\" + helperNode.second;\nBDBEnvironment bdbEnvironment = new BDBEnvironment(dbEnv, nodeName, selfNodeHostPort,\nhelperHostPort, GlobalStateMgr.getCurrentState().isElectable());\nbdbEnvironment.setup();\nreturn bdbEnvironment;\n}\npublic static String getBdbDir() {\nreturn Config.meta_dir + BDB_DIR;\n}\nprotected BDBEnvironment(File envHome, String selfNodeName, String selfNodeHostPort,\nString helperHostPort, boolean isElectable) {\nthis.envHome = envHome;\nthis.selfNodeName = selfNodeName;\nthis.selfNodeHostPort = selfNodeHostPort;\nthis.helperHostPort = helperHostPort;\nthis.isElectable = isElectable;\nopenedDatabases = new ArrayList<>();\nthis.lock = new ReentrantReadWriteLock(true);\n}\nprotected void setup() throws JournalException, InterruptedException {\nthis.closing = false;\nensureHelperInLocal();\ninitConfigs(isElectable);\nsetupEnvironment();\n}\nprotected void initConfigs(boolean isElectable) throws JournalException {\nif (Config.metadata_failure_recovery.equals(\"true\")) {\nif (!isElectable) {\nString errMsg = \"Current node is not in the electable_nodes list. will exit\";\nLOG.error(errMsg);\nthrow new JournalException(errMsg);\n}\nDbResetRepGroup resetUtility = new DbResetRepGroup(envHome, STARROCKS_JOURNAL_GROUP, selfNodeName,\nselfNodeHostPort);\nresetUtility.reset();\nLOG.info(\"group has been reset.\");\n}\nreplicationConfig = new ReplicationConfig();\nreplicationConfig.setNodeName(selfNodeName);\nreplicationConfig.setNodeHostPort(selfNodeHostPort);\nreplicationConfig.setHelperHosts(helperHostPort);\nreplicationConfig.setGroupName(STARROCKS_JOURNAL_GROUP);\nreplicationConfig.setConfigParam(ReplicationConfig.ENV_UNKNOWN_STATE_TIMEOUT, \"10\");\nreplicationConfig.setMaxClockDelta(Config.max_bdbje_clock_delta_ms, TimeUnit.MILLISECONDS);\nreplicationConfig.setConfigParam(ReplicationConfig.TXN_ROLLBACK_LIMIT,\nString.valueOf(Config.txn_rollback_limit));\nreplicationConfig\n.setConfigParam(ReplicationConfig.REPLICA_TIMEOUT, Config.bdbje_heartbeat_timeout_second + \" s\");\nreplicationConfig\n.setConfigParam(ReplicationConfig.FEEDER_TIMEOUT, Config.bdbje_heartbeat_timeout_second + \" s\");\nreplicationConfig\n.setConfigParam(ReplicationConfig.REPLAY_COST_PERCENT,\nString.valueOf(Config.bdbje_replay_cost_percent));\nif (isElectable) {\nreplicationConfig.setReplicaAckTimeout(Config.bdbje_replica_ack_timeout_second, TimeUnit.SECONDS);\nreplicationConfig.setConfigParam(ReplicationConfig.REPLICA_MAX_GROUP_COMMIT, \"0\");\nreplicationConfig.setConsistencyPolicy(new NoConsistencyRequiredPolicy());\n} else {\nreplicationConfig.setNodeType(NodeType.SECONDARY);\nreplicationConfig.setConsistencyPolicy(new NoConsistencyRequiredPolicy());\n}\njava.util.logging.Logger parent = java.util.logging.Logger.getLogger(\"com.sleepycat.je\");\nparent.setLevel(Level.parse(Config.bdbje_log_level));\nenvironmentConfig = new EnvironmentConfig();\nenvironmentConfig.setTransactional(true);\nenvironmentConfig.setAllowCreate(true);\nenvironmentConfig.setCachePercent(MEMORY_CACHE_PERCENT);\nenvironmentConfig.setLockTimeout(Config.bdbje_lock_timeout_second, TimeUnit.SECONDS);\nenvironmentConfig.setConfigParam(EnvironmentConfig.FILE_LOGGING_LEVEL, Config.bdbje_log_level);\nenvironmentConfig.setConfigParam(EnvironmentConfig.CLEANER_THREADS,\nString.valueOf(Config.bdbje_cleaner_threads));\nif (isElectable) {\nDurability durability = new Durability(getSyncPolicy(Config.master_sync_policy),\ngetSyncPolicy(Config.replica_sync_policy), getAckPolicy(Config.replica_ack_policy));\nenvironmentConfig.setDurability(durability);\n}\ndbConfig = new DatabaseConfig();\ndbConfig.setTransactional(true);\nif (isElectable) {\ndbConfig.setAllowCreate(true);\ndbConfig.setReadOnly(false);\n} else {\ndbConfig.setAllowCreate(false);\ndbConfig.setReadOnly(true);\n}\ntxnConfig = new TransactionConfig();\nif (isElectable) {\ntxnConfig.setDurability(new Durability(\ngetSyncPolicy(Config.master_sync_policy),\ngetSyncPolicy(Config.replica_sync_policy),\ngetAckPolicy(Config.replica_ack_policy)));\n}\n}\nprotected void setupEnvironment() throws JournalException, InterruptedException {\nJournalException exception = null;\nfor (int i = 0; i < RETRY_TIME; i++) {\nif (i > 0) {\nThread.sleep(SLEEP_INTERVAL_SEC * 1000);\n}\ntry {\nLOG.info(\"start to setup bdb environment for {} times\", i + 1);\nreplicatedEnvironment = new ReplicatedEnvironment(envHome, replicationConfig, environmentConfig);\nSet adminNodes = new HashSet();\nHostAndPort helperAddress = HostAndPort.fromString(helperHostPort);\nInetSocketAddress helper = new InetSocketAddress(helperAddress.getHost(),\nhelperAddress.getPort());\nadminNodes.add(helper);\nLOG.info(\"add helper[{}] as ReplicationGroupAdmin\", helperHostPort);\nif (!selfNodeHostPort.equals(helperHostPort) && isElectable) {\nHostAndPort selfNodeAddress = HostAndPort.fromString(selfNodeHostPort);\nInetSocketAddress self = new InetSocketAddress(selfNodeAddress.getHost(),\nselfNodeAddress.getPort());\nadminNodes.add(self);\nLOG.info(\"add self[{}] as ReplicationGroupAdmin\", selfNodeHostPort);\n}\nreplicationGroupAdmin = new ReplicationGroupAdmin(STARROCKS_JOURNAL_GROUP, adminNodes);\nHAProtocol protocol = new BDBHA(this, selfNodeName);\nGlobalStateMgr.getCurrentState().setHaProtocol(protocol);\nBDBStateChangeListener listener = new BDBStateChangeListener(isElectable);\nreplicatedEnvironment.setStateChangeListener(listener);\nLOG.info(\"replicated environment is all set, wait for state change...\");\nfor (int j = 0; j < INITAL_STATE_CHANGE_WAIT_SEC; j++) {\nif (FrontendNodeType.UNKNOWN != listener.getNewType()) {\nbreak;\n}\nThread.sleep(1000);\n}\nLOG.info(\"state change done, current role {}\", listener.getNewType());\nepochDB = new CloseSafeDatabase(replicatedEnvironment.openDatabase(null, \"epochDB\", dbConfig));\nLOG.info(\"end setup bdb environment after {} times\", i + 1);\nreturn;\n} catch (RestartRequiredException e) {\nString errMsg = String.format(\n\"catch a RestartRequiredException when setup environment after retried %d times, refresh and setup again\",\ni + 1);\nLOG.warn(errMsg, e);\nexception = new JournalException(errMsg);\nexception.initCause(e);\nif (e instanceof InsufficientLogException) {\nrefreshLog((InsufficientLogException) e);\n}\nclose();\n} catch (DatabaseException e) {\nif (i == 0 && e instanceof UnknownMasterException) {\nLOG.warn(\"failed to setup environment because of UnknowMasterException for the first time, ignore it.\");\n} else {\nString errMsg = String.format(\"failed to setup environment after retried %d times\", i + 1);\nLOG.error(errMsg, e);\nexception = new JournalException(errMsg);\nexception.initCause(e);\n}\n}\n}\nthrow exception;\n}\n/**\n* This method is used to check if the local replicated environment matches that of the helper.\n* This could happen in a situation like this:\n* 1. User adds a follower and starts the new follower without helper.\n* --> The new follower will run as a master in a standalone environment.\n* 2. User restarts this follower with a helper.\n* --> Sometimes this new follower will join the group successfully, making master crash.\n*\n* This method only init the replicated environment through a handshake.\n* It will not read or write any data.\n*/\nprotected void ensureHelperInLocal() throws JournalException, InterruptedException {\nif (!isElectable) {\nLOG.info(\"skip check local environment for observer\");\nreturn;\n}\nif (selfNodeHostPort.equals(helperHostPort)) {\nLOG.info(\"skip check local environment because helper node and local node are identical.\");\nreturn;\n}\nif (Config.metadata_failure_recovery.equals(\"true\")) {\nLOG.info(\"skip check local environment because metadata_failure_recovery = true\");\nreturn;\n}\nLOG.info(\"start to check if local replica environment from {} contains {}\", envHome, helperHostPort);\ninitConfigs(false);\nHostAndPort hostAndPort = HostAndPort.fromString(helperHostPort);\nJournalException exception = null;\nfor (int i = 0; i < RETRY_TIME; i++) {\nif (i > 0) {\nThread.sleep(SLEEP_INTERVAL_SEC * 1000);\n}\ntry {\nreplicatedEnvironment = new ReplicatedEnvironment(envHome, replicationConfig, environmentConfig);\nSet localNodes = replicatedEnvironment.getGroup().getNodes();\nif (localNodes.isEmpty()) {\nLOG.info(\"skip check empty environment\");\nreturn;\n}\nfor (ReplicationNode node : localNodes) {\nif (node.getHostName().equals(hostAndPort.getHost()) && node.getPort() == hostAndPort.getPort()) {\nLOG.info(\"found {} in local environment!\", helperHostPort);\nreturn;\n}\n}\nthrow new JournalException(\nString.format(\"bad environment %s! helper host %s not in local %s\",\nenvHome, helperHostPort, localNodes));\n} catch (RestartRequiredException e) {\nString errMsg = String.format(\n\"catch a RestartRequiredException when checking if helper in local after retried %d times, \" +\n\"refresh and check again\",\ni + 1);\nLOG.warn(errMsg, e);\nexception = new JournalException(errMsg);\nexception.initCause(e);\nif (e instanceof InsufficientLogException) {\nrefreshLog((InsufficientLogException) e);\n}\n} catch (DatabaseException e) {\nif (i == 0 && e instanceof UnknownMasterException) {\nLOG.warn(\n\"failed to check if helper in local because of UnknowMasterException for the first time, ignore it.\");\n} else {\nString errMsg = String.format(\"failed to check if helper in local after retried %d times\", i + 1);\nLOG.error(errMsg, e);\nexception = new JournalException(errMsg);\nexception.initCause(e);\n}\n} finally {\nif (replicatedEnvironment != null) {\nreplicatedEnvironment.close();\n}\n}\n}\nthrow exception;\n}\npublic void refreshLog(InsufficientLogException insufficientLogEx) {\ntry {\nNetworkRestore restore = new NetworkRestore();\nNetworkRestoreConfig config = new NetworkRestoreConfig();\nconfig.setRetainLogFiles(false);\nrestore.execute(insufficientLogEx, config);\n} catch (Throwable t) {\nLOG.warn(\"refresh log failed\", t);\n}\n}\npublic ReplicationGroupAdmin getReplicationGroupAdmin() {\nreturn this.replicationGroupAdmin;\n}\npublic void setNewReplicationGroupAdmin(Set newHelperNodes) {\nthis.replicationGroupAdmin = new ReplicationGroupAdmin(STARROCKS_JOURNAL_GROUP, newHelperNodes);\n}\npublic CloseSafeDatabase getEpochDB() {\nreturn epochDB;\n}\npublic ReplicatedEnvironment getReplicatedEnvironment() {\nreturn replicatedEnvironment;\n}\npublic CloseSafeDatabase openDatabase(String dbName) {\nCloseSafeDatabase db = null;\nlock.writeLock().lock();\ntry {\nif (closing) {\nreturn null;\n}\nfor (java.util.Iterator iter = openedDatabases.iterator(); iter.hasNext(); ) {\nCloseSafeDatabase openedDb = iter.next();\ntry {\nif (openedDb.getDb().getDatabaseName() == null) {\nopenedDb.close();\niter.remove();\ncontinue;\n}\n} catch (Exception e) {\n/*\n* In the case when 3 FE (1 master and 2 followers) start at same time,\n* We may catch com.sleepycat.je.rep.DatabasePreemptedException which said that\n* \"Database xx has been forcibly closed in order to apply a replicated remove operation.\"\n*\n* Because when Master FE finished to save image, it try to remove old journals,\n* and also remove the databases these old journals belongs to.\n* So after Master removed the database from replicatedEnvironment,\n* call db.getDatabaseName() will throw DatabasePreemptedException,\n* because it has already been destroyed.\n*\n* The reason why Master can safely remove a database is because it knows that all\n* non-master FE have already load the journal ahead of this database. So remove the\n* database is safe.\n*\n* Here we just try to close the useless database(which may be removed by Master),\n* so even we catch the exception, just ignore it is OK.\n*/\nLOG.warn(\"get exception when try to close previously opened bdb database. ignore it\", e);\niter.remove();\ncontinue;\n}\nif (openedDb.getDb().getDatabaseName().equals(dbName)) {\nreturn openedDb;\n}\n}\ntry {\ndb = new CloseSafeDatabase(replicatedEnvironment.openDatabase(null, dbName, dbConfig));\nopenedDatabases.add(db);\nLOG.info(\"successfully open new db {}\", db);\n} catch (Exception e) {\nLOG.warn(\"catch an exception when open database {}\", dbName, e);\n}\n} finally {\nlock.writeLock().unlock();\n}\nreturn db;\n}\npublic void removeDatabase(String dbName) {\nlock.writeLock().lock();\ntry {\nif (closing) {\nreturn;\n}\nString targetDbName = null;\nint index = 0;\nfor (CloseSafeDatabase db : openedDatabases) {\nString name = db.getDb().getDatabaseName();\nif (dbName.equals(name)) {\ndb.close();\nLOG.info(\"database {} has been closed\", name);\ntargetDbName = name;\nbreak;\n}\nindex++;\n}\nif (targetDbName != null) {\nLOG.info(\"begin to remove database {} from openedDatabases\", targetDbName);\nopenedDatabases.remove(index);\n}\ntry {\nLOG.info(\"begin to remove database {} from replicatedEnviroment\", dbName);\nreplicatedEnvironment.removeDatabase(null, dbName);\n} catch (DatabaseNotFoundException e) {\nLOG.warn(\"catch an exception when remove db:{}, this db does not exist\", dbName, e);\n}\n} finally {\nlock.writeLock().unlock();\n}\n}\npublic List getDatabaseNames() {\nreturn getDatabaseNamesWithPrefix(\"\");\n}\npublic boolean close() {\nboolean closeSuccess = true;\nlock.writeLock().lock();\ntry {\nclosing = true;\nLOG.info(\"start to close log databases\");\nfor (CloseSafeDatabase db : openedDatabases) {\ntry {\ndb.close();\n} catch (DatabaseException exception) {\nLOG.error(\"Error closing db {}\", db.getDatabaseName(), exception);\ncloseSuccess = false;\n}\n}\nLOG.info(\"close log databases end\");\nopenedDatabases.clear();\nLOG.info(\"start to close epoch database\");\nif (epochDB != null) {\ntry {\nepochDB.close();\n} catch (DatabaseException exception) {\nLOG.error(\"Error closing db {}\", epochDB.getDatabaseName(), exception);\ncloseSuccess = false;\n}\n}\nLOG.info(\"close epoch database end\");\nLOG.info(\"start to close replicated environment\");\nif (replicatedEnvironment != null) {\ntry {\nreplicatedEnvironment.close();\n} catch (DatabaseException exception) {\nLOG.error(\"Error closing replicatedEnvironment\", exception);\ncloseSuccess = false;\n}\n}\nLOG.info(\"close replicated environment end\");\n} finally {\nclosing = false;\nlock.writeLock().unlock();\n}\nreturn closeSuccess;\n}\npublic void flushVLSNMapping() {\nif (replicatedEnvironment != null) {\nRepInternal.getRepImpl(replicatedEnvironment).getVLSNIndex()\n.flushToDatabase(Durability.COMMIT_SYNC);\n}\n}\nprivate SyncPolicy getSyncPolicy(String policy) {\nif (policy.equalsIgnoreCase(\"SYNC\")) {\nreturn Durability.SyncPolicy.SYNC;\n}\nif (policy.equalsIgnoreCase(\"NO_SYNC\")) {\nreturn Durability.SyncPolicy.NO_SYNC;\n}\nreturn Durability.SyncPolicy.WRITE_NO_SYNC;\n}\nprivate ReplicaAckPolicy getAckPolicy(String policy) {\nif (policy.equalsIgnoreCase(\"ALL\")) {\nreturn Durability.ReplicaAckPolicy.ALL;\n}\nif (policy.equalsIgnoreCase(\"NONE\")) {\nreturn Durability.ReplicaAckPolicy.NONE;\n}\nreturn Durability.ReplicaAckPolicy.SIMPLE_MAJORITY;\n}\n/**\n* package private, used within com.starrocks.journal.bdbje\n*/\nTransactionConfig getTxnConfig() {\nreturn txnConfig;\n}\n}" + }, + { + "comment": "\u5efa\u8baeisNotBlank", + "method_body": "public static void deleteBodyTmpFiles(String reportId) {\nif (StringUtils.isNotEmpty(reportId)) {\nString executeTmpFolder = StringUtils.join(\nBODY_FILE_DIR,\nFile.separator,\n\"tmp\",\nFile.separator,\nreportId\n);\ntry {\nFileUtils.deleteDir(executeTmpFolder);\n} catch (Exception e) {\nLoggerUtil.error(\"\u5220\u9664[\" + reportId + \"]\u6267\u884c\u4e2d\u4ea7\u751f\u7684\u4e34\u65f6\u6587\u4ef6\u5931\u8d25!\", e);\n}\n}\n}", + "target_code": "if (StringUtils.isNotEmpty(reportId)) {", + "method_body_after": "public static void deleteBodyTmpFiles(String reportId) {\nif (StringUtils.isNotEmpty(reportId)) {\nString executeTmpFolder = StringUtils.join(\nBODY_FILE_DIR,\nFile.separator,\n\"tmp\",\nFile.separator,\nreportId\n);\ntry {\nFileUtils.deleteDir(executeTmpFolder);\n} catch (Exception e) {\nLoggerUtil.error(\"\u5220\u9664[\" + reportId + \"]\u6267\u884c\u4e2d\u4ea7\u751f\u7684\u4e34\u65f6\u6587\u4ef6\u5931\u8d25!\", e);\n}\n}\n}", + "context_before": "class FileUtils {\npublic static final String ROOT_DIR = \"/opt/metersphere/\";\npublic static final String BODY_FILE_DIR = \"/opt/metersphere/data/body\";\npublic static final String MD_IMAGE_DIR = \"/opt/metersphere/data/image/markdown\";\npublic static final String UI_IMAGE_DIR = \"/opt/metersphere/data/image/ui/screenshots\";\npublic static final String ATTACHMENT_DIR = \"/opt/metersphere/data/attachment\";\npublic static final String ATTACHMENT_TMP_DIR = \"/opt/metersphere/data/attachment/tmp\";\npublic static final String LOCAL_JAR = \"/opt/metersphere/data/local-jar/jar\";\npublic static void validateFileName(String fileName) {\nif (StringUtils.isNotEmpty(fileName) && StringUtils.contains(fileName, \".\" + File.separator)) {\nMSException.throwException(Translator.get(\"invalid_parameter\"));\n}\n}\npublic static byte[] listBytesToZip(Map mapReport) {\ntry {\nif (!mapReport.isEmpty()) {\nByteArrayOutputStream baos = new ByteArrayOutputStream();\nZipOutputStream zos = new ZipOutputStream(baos);\nfor (Map.Entry report : mapReport.entrySet()) {\nZipEntry entry = new ZipEntry(report.getKey());\nentry.setSize(report.getValue().length);\nzos.putNextEntry(entry);\nzos.write(report.getValue());\n}\nzos.closeEntry();\nzos.close();\nreturn baos.toByteArray();\n}\n} catch (Exception e) {\nreturn new byte[10];\n}\nreturn new byte[10];\n}\npublic static void createFile(String filePath, byte[] fileBytes) {\nFile file = new File(filePath);\nif (file.exists()) {\nfile.delete();\n}\ntry {\nFile dir = file.getParentFile();\nif (!dir.exists()) {\ndir.mkdirs();\n}\nfile.createNewFile();\n} catch (Exception e) {\nLogUtil.error(e);\n}\ntry (InputStream in = new ByteArrayInputStream(fileBytes); OutputStream out = new FileOutputStream(file)) {\nfinal int MAX = 4096;\nbyte[] buf = new byte[MAX];\nfor (int bytesRead = in.read(buf, 0, MAX); bytesRead != -1; bytesRead = in.read(buf, 0, MAX)) {\nout.write(buf, 0, bytesRead);\n}\n} catch (IOException e) {\nLogUtil.error(e);\nMSException.throwException(Translator.get(\"upload_fail\"));\n}\n}\nprivate static void create(List bodyUploadIds, List bodyFiles, String path) {\nString filePath = BODY_FILE_DIR;\nif (StringUtils.isNotEmpty(path)) {\nfilePath = path;\n}\nif (CollectionUtils.isNotEmpty(bodyUploadIds) && CollectionUtils.isNotEmpty(bodyFiles)) {\nFile testDir = new File(filePath);\nif (!testDir.exists()) {\ntestDir.mkdirs();\n}\nfor (int i = 0; i < bodyUploadIds.size(); i++) {\nMultipartFile item = bodyFiles.get(i);\nvalidateFileName(item.getOriginalFilename());\nFile file = new File(filePath + File.separator + bodyUploadIds.get(i) + \"_\" + item.getOriginalFilename());\ntry (InputStream in = item.getInputStream(); OutputStream out = new FileOutputStream(file)) {\nfile.createNewFile();\nfinal int MAX = 4096;\nbyte[] buf = new byte[MAX];\nfor (int bytesRead = in.read(buf, 0, MAX); bytesRead != -1; bytesRead = in.read(buf, 0, MAX)) {\nout.write(buf, 0, bytesRead);\n}\n} catch (IOException e) {\nLogUtil.error(e);\nMSException.throwException(Translator.get(\"upload_fail\"));\n}\n}\n}\n}\npublic static String create(String id, MultipartFile item) {\nString filePath = BODY_FILE_DIR + \"/plugin\";\nif (item != null) {\nvalidateFileName(item.getOriginalFilename());\nFile testDir = new File(filePath);\nif (!testDir.exists()) {\ntestDir.mkdirs();\n}\nFile file = new File(filePath + File.separator + id + \"_\" + item.getOriginalFilename());\ntry (InputStream in = item.getInputStream(); OutputStream out = new FileOutputStream(file)) {\nfile.createNewFile();\nfinal int MAX = 4096;\nbyte[] buf = new byte[MAX];\nfor (int bytesRead = in.read(buf, 0, MAX); bytesRead != -1; bytesRead = in.read(buf, 0, MAX)) {\nout.write(buf, 0, bytesRead);\n}\n} catch (IOException e) {\nLogUtil.error(e);\nMSException.throwException(Translator.get(\"upload_fail\"));\n}\nreturn file.getPath();\n}\nreturn null;\n}\npublic static void createBodyFiles(String requestId, List bodyFiles) {\nif (CollectionUtils.isNotEmpty(bodyFiles) && StringUtils.isNotBlank(requestId)) {\nString path = BODY_FILE_DIR + File.separator + requestId;\nFile testDir = new File(path);\nif (!testDir.exists()) {\ntestDir.mkdirs();\n}\nbodyFiles.forEach(item -> {\nvalidateFileName(item.getOriginalFilename());\nFile file = new File(path + File.separator + item.getOriginalFilename());\ntry (InputStream in = item.getInputStream(); OutputStream out = new FileOutputStream(file)) {\nfile.createNewFile();\nFileUtil.copyStream(in, out);\n} catch (IOException e) {\nLogUtil.error(e);\nMSException.throwException(Translator.get(\"upload_fail\"));\n}\n});\n}\n}\npublic static void copyBodyFiles(String sourceId, String targetId) {\ntry {\nString sourcePath = BODY_FILE_DIR + File.separator + sourceId;\nString targetPath = BODY_FILE_DIR + File.separator + targetId;\ncopyFolder(sourcePath, targetPath);\n} catch (Exception e) {\nLoggerUtil.error(e);\n}\n}\n/**\n* \u5f3a\u5236\u8986\u76d6\u6587\u4ef6\n*\n* @param sourceId \u6e90ID\n* @param targetId \u76ee\u6807ID\n*/\npublic static void forceOverrideBodyFiles(String sourceId, String targetId) {\ndeleteBodyFiles(targetId);\ncopyBodyFiles(sourceId, targetId);\n}\n/**\n* \u590d\u5236\u6587\u4ef6\u5939(\u4f7f\u7528\u7f13\u51b2\u5b57\u8282\u6d41)\n*\n* @param sourcePath \u6e90\u6587\u4ef6\u5939\u8def\u5f84\n* @param targetPath \u76ee\u6807\u6587\u4ef6\u5939\u8def\u5f84\n*/\npublic static void copyFolder(String sourcePath, String targetPath) {\nFile sourceFile = new File(sourcePath);\nFile targetFile = new File(targetPath);\nif (!sourceFile.exists() || !sourceFile.isDirectory()) {\nreturn;\n}\nif (!targetFile.exists()) {\ntargetFile.mkdirs();\n}\nFile[] files = sourceFile.listFiles();\nif (files == null || files.length == 0) {\nreturn;\n}\nfor (File file : files) {\ncopyFileToDir(file, targetFile);\n}\n}\npublic static void copyFileToDir(String filePath, String targetPath) {\nFile sourceFile = new File(filePath);\nFile targetDir = new File(targetPath);\nif (!sourceFile.exists()) {\nreturn;\n}\nif (!targetDir.exists()) {\ntargetDir.mkdirs();\n}\ncopyFileToDir(sourceFile, targetDir);\n}\nprivate static void copyFileToDir(File file, File targetDir) {\nString movePath = targetDir + File.separator + file.getName();\nif (file.isDirectory()) {\ncopyFolder(file.getAbsolutePath(), movePath);\n} else {\ntry (BufferedInputStream in = new BufferedInputStream(new FileInputStream(file));\nBufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(movePath))) {\nbyte[] b = new byte[1024];\nint temp;\nwhile ((temp = in.read(b)) != -1) {\nout.write(b, 0, temp);\n}\n} catch (Exception e) {\nLoggerUtil.error(e);\n}\n}\n}\npublic static File getFileByName(String name) {\nString path = BODY_FILE_DIR + File.separator + name;\nreturn new File(path);\n}\npublic static File getBodyFileByName(String name, String requestId) {\nString path = BODY_FILE_DIR + File.separator + requestId + File.separator + name;\nreturn new File(path);\n}\npublic static void copyBdyFile(String originId, String toId) {\ntry {\nif (StringUtils.isNotEmpty(originId) && StringUtils.isNotEmpty(toId) && !StringUtils.equals(originId, toId)) {\nFileUtil.copyDir(new File(FileUtils.BODY_FILE_DIR + File.separator + originId),\nnew File(FileUtils.BODY_FILE_DIR + File.separator + toId));\n}\n} catch (Exception e) {\nLogUtil.error(e.getMessage(), e);\n}\n}\npublic static void createBodyFiles(List bodyUploadIds, List bodyFiles) {\nFileUtils.create(bodyUploadIds, bodyFiles, null);\n}\npublic static void createFiles(List bodyUploadIds, List bodyFiles, String path) {\nFileUtils.create(bodyUploadIds, bodyFiles, path);\n}\npublic static String createFile(MultipartFile bodyFile) {\nvalidateFileName(bodyFile.getOriginalFilename());\nString dir = \"/opt/metersphere/data/body/tmp/\";\nFile fileDir = new File(dir);\nif (!fileDir.exists()) {\nfileDir.mkdirs();\n}\nFile file = new File(dir + UUID.randomUUID().toString() + \"_\" + bodyFile.getOriginalFilename());\ntry (InputStream in = bodyFile.getInputStream(); OutputStream out = new FileOutputStream(file)) {\nfile.createNewFile();\nFileUtil.copyStream(in, out);\n} catch (IOException e) {\nLogUtil.error(e);\nMSException.throwException(Translator.get(\"upload_fail\"));\n}\nreturn file.getPath();\n}\npublic static void deleteBodyFiles(String requestId) {\nFile file = new File(BODY_FILE_DIR + File.separator + requestId);\nFileUtil.deleteContents(file);\nif (file.exists()) {\nfile.delete();\n}\nfile = new File(BODY_FILE_DIR + File.separator + \"tmp\" + File.separator + requestId);\nFileUtil.deleteContents(file);\nif (file.exists()) {\nfile.delete();\n}\n}\npublic static String uploadFile(MultipartFile uploadFile, String path, String name) {\nvalidateFileName(name);\nif (uploadFile == null) {\nreturn null;\n}\nFile testDir = new File(path);\nif (!testDir.exists()) {\ntestDir.mkdirs();\n}\nString filePath = testDir + File.separator + name;\nFile file = new File(filePath);\ntry (InputStream in = uploadFile.getInputStream(); OutputStream out = new FileOutputStream(file)) {\nfile.createNewFile();\nFileUtil.copyStream(in, out);\n} catch (IOException e) {\nLogUtil.error(e.getMessage(), e);\nMSException.throwException(Translator.get(\"upload_fail\"));\n}\nreturn filePath;\n}\npublic static String uploadFile(MultipartFile uploadFile, String path) {\nreturn uploadFile(uploadFile, path, uploadFile.getOriginalFilename());\n}\npublic static void deleteFile(String path) {\nFile file = new File(path);\nif (file.exists()) {\nfile.delete();\n}\n}\npublic static void deleteDir(String path) {\nFile file = new File(path);\nFileUtil.deleteContents(file);\nif (file.exists()) {\nfile.delete();\n}\n}\n/**\n* \u83b7\u53d6\u5f53\u524djmx \u6d89\u53ca\u5230\u7684\u6587\u4ef6\n*\n* @param tree\n*/\npublic static void getFiles(HashTree tree, List files) {\nfor (Object key : tree.keySet()) {\nHashTree node = tree.get(key);\nif (key instanceof HTTPSamplerProxy) {\nHTTPSamplerProxy source = (HTTPSamplerProxy) key;\nif (source != null && source.getHTTPFiles().length > 0) {\nfor (HTTPFileArg arg : source.getHTTPFiles()) {\nBodyFile file = new BodyFile();\nfile.setId(arg.getParamName());\nfile.setName(arg.getPath());\nif (arg.getPropertyAsBoolean(\"isRef\")) {\nfile.setStorage(StorageConstants.FILE_REF.name());\nfile.setFileId(arg.getPropertyAsString(\"fileId\"));\n}\nfiles.add(file);\n}\n}\n} else if (key instanceof CSVDataSet) {\nCSVDataSet source = (CSVDataSet) key;\nif (source != null && StringUtils.isNotEmpty(source.getPropertyAsString(\"filename\"))) {\nBodyFile file = new BodyFile();\nfile.setId(source.getPropertyAsString(\"filename\"));\nfile.setName(source.getPropertyAsString(\"filename\"));\nfiles.add(file);\n}\n}\nif (node != null) {\ngetFiles(node, files);\n}\n}\n}\npublic static byte[] fileToByte(File tradeFile) {\nbyte[] buffer = null;\ntry (FileInputStream fis = new FileInputStream(tradeFile);\nByteArrayOutputStream bos = new ByteArrayOutputStream()) {\nbyte[] b = new byte[1024];\nint n;\nwhile ((n = fis.read(b)) != -1) {\nbos.write(b, 0, n);\n}\nbuffer = bos.toByteArray();\n} catch (Exception e) {\nLogUtil.error(e);\n}\nreturn buffer;\n}\npublic static File byteToFile(byte[] buf, String filePath, String fileName) {\nBufferedOutputStream bos = null;\nFileOutputStream fos = null;\nFile file = null;\ntry {\nFile dir = new File(filePath);\nif (!dir.exists()) {\ndir.mkdirs();\n}\nfile = new File(filePath + File.separator + fileName);\nfos = new FileOutputStream(file);\nbos = new BufferedOutputStream(fos);\nbos.write(buf);\n} catch (Exception e) {\ne.printStackTrace();\n} finally {\nif (bos != null) {\ntry {\nbos.close();\n} catch (IOException e) {\ne.printStackTrace();\n}\n}\nif (fos != null) {\ntry {\nfos.close();\n} catch (IOException e) {\ne.printStackTrace();\n}\n}\n}\nreturn file;\n}\npublic static String fileToStr(File tradeFile) {\nString buffer = null;\ntry (FileInputStream fis = new FileInputStream(tradeFile);\nByteArrayOutputStream bos = new ByteArrayOutputStream();) {\nbyte[] b = new byte[1024];\nint n;\nwhile ((n = fis.read(b)) != -1) {\nbos.write(b, 0, n);\n}\nbuffer = bos.toString();\n} catch (Exception e) {\n}\nreturn buffer;\n}\npublic static List getRepositoryFileMetadata(HashTree tree) {\nFileMetadataService fileMetadataService = CommonBeanFactory.getBean(FileMetadataService.class);\nList list = new ArrayList<>();\nfor (Object key : tree.keySet()) {\nHashTree node = tree.get(key);\nif (key instanceof HTTPSamplerProxy) {\nHTTPSamplerProxy source = (HTTPSamplerProxy) key;\nif (source != null && source.getHTTPFiles().length > 0) {\nfor (HTTPFileArg arg : source.getHTTPFiles()) {\nif (arg.getPropertyAsBoolean(\"isRef\") && fileMetadataService != null) {\nFileMetadata fileMetadata = fileMetadataService.getFileMetadataById(arg.getPropertyAsString(\"fileId\"));\nif (fileMetadata != null && !StringUtils.equals(fileMetadata.getStorage(), StorageConstants.LOCAL.name())) {\nlist.add(fileMetadata);\narg.setPath(fileMetadata.getName());\narg.setName(fileMetadata.getName());\n}\n}\n}\n}\n} else if (key instanceof CSVDataSet) {\nCSVDataSet source = (CSVDataSet) key;\nif (source != null && StringUtils.isNotEmpty(source.getPropertyAsString(\"filename\"))) {\nif (source.getPropertyAsBoolean(\"isRef\") && fileMetadataService != null) {\nFileMetadata fileMetadata = fileMetadataService.getFileMetadataById(source.getPropertyAsString(\"fileId\"));\nif (fileMetadata != null && !StringUtils.equals(fileMetadata.getStorage(), StorageConstants.LOCAL.name())) {\nlist.add(fileMetadata);\nsource.setFilename(fileMetadata.getName());\n}\n}\n}\n}\nif (node != null) {\nlist.addAll(getRepositoryFileMetadata(node));\n}\n}\nreturn list;\n}\npublic static boolean isFolderExists(String requestId) {\nFile file = new File(BODY_FILE_DIR + File.separator + requestId);\nreturn file.isDirectory();\n}\npublic List getZipJar() {\nList jarFiles = new LinkedList<>();\nJarConfigService jarConfigService = CommonBeanFactory.getBean(JarConfigService.class);\nList jars = jarConfigService.list();\nList files = new ArrayList<>();\njars.forEach(jarConfig -> {\nString path = jarConfig.getPath();\nFile file = new File(path);\nif (file.isDirectory() && !path.endsWith(File.separator)) {\nfile = new File(path + File.separator);\n}\nfiles.add(file);\n});\ntry {\nFile file = CompressUtils.zipFiles(UUID.randomUUID().toString() + \".zip\", files);\nFileSystemResource resource = new FileSystemResource(file);\nbyte[] fileByte = this.fileToByte(file);\nif (fileByte != null) {\nByteArrayResource byteArrayResource = new ByteArrayResource(fileByte) {\n@Override\npublic String getFilename() throws IllegalStateException {\nreturn resource.getFilename();\n}\n};\njarFiles.add(byteArrayResource);\n}\n} catch (Exception e) {\nLogUtil.error(e);\n}\nreturn jarFiles;\n}\npublic List getJar() {\nList jarFiles = new LinkedList<>();\nJarConfigService jarConfigService = CommonBeanFactory.getBean(JarConfigService.class);\nList jars = jarConfigService.list();\njars.forEach(jarConfig -> {\ntry {\nString path = jarConfig.getPath();\nFile file = new File(path);\nif (file.isDirectory() && !path.endsWith(File.separator)) {\nfile = new File(path + File.separator);\n}\nFileSystemResource resource = new FileSystemResource(file);\nbyte[] fileByte = this.fileToByte(file);\nif (fileByte != null) {\nByteArrayResource byteArrayResource = new ByteArrayResource(fileByte) {\n@Override\npublic String getFilename() throws IllegalStateException {\nreturn resource.getFilename();\n}\n};\njarFiles.add(byteArrayResource);\n}\n} catch (Exception e) {\nLogUtil.error(e.getMessage(), e);\n}\n});\nreturn jarFiles;\n}\npublic List getMultipartFiles(HashTree hashTree) {\nList multipartFiles = new LinkedList<>();\nList files = new LinkedList<>();\ngetFiles(hashTree, files);\nif (CollectionUtils.isNotEmpty(files)) {\nfor (BodyFile bodyFile : files) {\nFile file = new File(bodyFile.getName());\nif (file != null && !file.exists()) {\nFileSystemResource resource = new FileSystemResource(file);\nbyte[] fileByte = this.fileToByte(file);\nif (fileByte != null) {\nByteArrayResource byteArrayResource = new ByteArrayResource(fileByte) {\n@Override\npublic String getFilename() throws IllegalStateException {\nreturn resource.getFilename();\n}\n};\nmultipartFiles.add(byteArrayResource);\n}\n}\n}\n}\nreturn multipartFiles;\n}\npublic static Boolean writeToFile(String filePath, byte[] content) {\nOutputStream oStream = null;\ntry {\noStream = new FileOutputStream(filePath);\noStream.write(content);\nreturn Boolean.TRUE;\n} catch (Exception exception) {\nexception.printStackTrace();\nreturn Boolean.FALSE;\n} finally {\ntry {\noStream.close();\n} catch (IOException e) {\ne.printStackTrace();\n}\n}\n}\npublic static String getFilePath(BodyFile file) {\nString type = StringUtils.isNotEmpty(file.getFileType()) ? file.getFileType().toLowerCase() : null;\nString name = file.getName();\nif (type != null && !name.endsWith(type)) {\nname = StringUtils.join(name, \".\", type);\n}\nreturn StringUtils.join(FileUtils.BODY_FILE_DIR, File.separator, file.getProjectId(), File.separator, name);\n}\npublic static String getFilePath(FileMetadata fileMetadata) {\nString type = StringUtils.isNotEmpty(fileMetadata.getType()) ? fileMetadata.getType().toLowerCase() : null;\nString name = fileMetadata.getName();\nif (type != null && !name.endsWith(type)) {\nname = StringUtils.join(name, \".\", type);\n}\nreturn StringUtils.join(FileUtils.BODY_FILE_DIR, File.separator, fileMetadata.getProjectId(), File.separator, name);\n}\n}", + "context_after": "class FileUtils {\npublic static final String ROOT_DIR = \"/opt/metersphere/\";\npublic static final String BODY_FILE_DIR = \"/opt/metersphere/data/body\";\npublic static final String MD_IMAGE_DIR = \"/opt/metersphere/data/image/markdown\";\npublic static final String UI_IMAGE_DIR = \"/opt/metersphere/data/image/ui/screenshots\";\npublic static final String ATTACHMENT_DIR = \"/opt/metersphere/data/attachment\";\npublic static final String ATTACHMENT_TMP_DIR = \"/opt/metersphere/data/attachment/tmp\";\npublic static final String LOCAL_JAR = \"/opt/metersphere/data/local-jar/jar\";\npublic static void validateFileName(String fileName) {\nif (StringUtils.isNotEmpty(fileName) && StringUtils.contains(fileName, \".\" + File.separator)) {\nMSException.throwException(Translator.get(\"invalid_parameter\"));\n}\n}\npublic static byte[] listBytesToZip(Map mapReport) {\ntry {\nif (!mapReport.isEmpty()) {\nByteArrayOutputStream baos = new ByteArrayOutputStream();\nZipOutputStream zos = new ZipOutputStream(baos);\nfor (Map.Entry report : mapReport.entrySet()) {\nZipEntry entry = new ZipEntry(report.getKey());\nentry.setSize(report.getValue().length);\nzos.putNextEntry(entry);\nzos.write(report.getValue());\n}\nzos.closeEntry();\nzos.close();\nreturn baos.toByteArray();\n}\n} catch (Exception e) {\nreturn new byte[10];\n}\nreturn new byte[10];\n}\npublic static void createFile(String filePath, byte[] fileBytes) {\nFile file = new File(filePath);\nif (file.exists()) {\nfile.delete();\n}\ntry {\nFile dir = file.getParentFile();\nif (!dir.exists()) {\ndir.mkdirs();\n}\nfile.createNewFile();\n} catch (Exception e) {\nLogUtil.error(e);\n}\ntry (InputStream in = new ByteArrayInputStream(fileBytes); OutputStream out = new FileOutputStream(file)) {\nfinal int MAX = 4096;\nbyte[] buf = new byte[MAX];\nfor (int bytesRead = in.read(buf, 0, MAX); bytesRead != -1; bytesRead = in.read(buf, 0, MAX)) {\nout.write(buf, 0, bytesRead);\n}\n} catch (IOException e) {\nLogUtil.error(e);\nMSException.throwException(Translator.get(\"upload_fail\"));\n}\n}\nprivate static void create(List bodyUploadIds, List bodyFiles, String path) {\nString filePath = BODY_FILE_DIR;\nif (StringUtils.isNotEmpty(path)) {\nfilePath = path;\n}\nif (CollectionUtils.isNotEmpty(bodyUploadIds) && CollectionUtils.isNotEmpty(bodyFiles)) {\nFile testDir = new File(filePath);\nif (!testDir.exists()) {\ntestDir.mkdirs();\n}\nfor (int i = 0; i < bodyUploadIds.size(); i++) {\nMultipartFile item = bodyFiles.get(i);\nvalidateFileName(item.getOriginalFilename());\nFile file = new File(filePath + File.separator + bodyUploadIds.get(i) + \"_\" + item.getOriginalFilename());\ntry (InputStream in = item.getInputStream(); OutputStream out = new FileOutputStream(file)) {\nfile.createNewFile();\nfinal int MAX = 4096;\nbyte[] buf = new byte[MAX];\nfor (int bytesRead = in.read(buf, 0, MAX); bytesRead != -1; bytesRead = in.read(buf, 0, MAX)) {\nout.write(buf, 0, bytesRead);\n}\n} catch (IOException e) {\nLogUtil.error(e);\nMSException.throwException(Translator.get(\"upload_fail\"));\n}\n}\n}\n}\npublic static String create(String id, MultipartFile item) {\nString filePath = BODY_FILE_DIR + \"/plugin\";\nif (item != null) {\nvalidateFileName(item.getOriginalFilename());\nFile testDir = new File(filePath);\nif (!testDir.exists()) {\ntestDir.mkdirs();\n}\nFile file = new File(filePath + File.separator + id + \"_\" + item.getOriginalFilename());\ntry (InputStream in = item.getInputStream(); OutputStream out = new FileOutputStream(file)) {\nfile.createNewFile();\nfinal int MAX = 4096;\nbyte[] buf = new byte[MAX];\nfor (int bytesRead = in.read(buf, 0, MAX); bytesRead != -1; bytesRead = in.read(buf, 0, MAX)) {\nout.write(buf, 0, bytesRead);\n}\n} catch (IOException e) {\nLogUtil.error(e);\nMSException.throwException(Translator.get(\"upload_fail\"));\n}\nreturn file.getPath();\n}\nreturn null;\n}\npublic static void createBodyFiles(String requestId, List bodyFiles) {\nif (CollectionUtils.isNotEmpty(bodyFiles) && StringUtils.isNotBlank(requestId)) {\nString path = BODY_FILE_DIR + File.separator + requestId;\nFile testDir = new File(path);\nif (!testDir.exists()) {\ntestDir.mkdirs();\n}\nbodyFiles.forEach(item -> {\nvalidateFileName(item.getOriginalFilename());\nFile file = new File(path + File.separator + item.getOriginalFilename());\ntry (InputStream in = item.getInputStream(); OutputStream out = new FileOutputStream(file)) {\nfile.createNewFile();\nFileUtil.copyStream(in, out);\n} catch (IOException e) {\nLogUtil.error(e);\nMSException.throwException(Translator.get(\"upload_fail\"));\n}\n});\n}\n}\npublic static void copyBodyFiles(String sourceId, String targetId) {\ntry {\nString sourcePath = BODY_FILE_DIR + File.separator + sourceId;\nString targetPath = BODY_FILE_DIR + File.separator + targetId;\ncopyFolder(sourcePath, targetPath);\n} catch (Exception e) {\nLoggerUtil.error(e);\n}\n}\n/**\n* \u5f3a\u5236\u8986\u76d6\u6587\u4ef6\n*\n* @param sourceId \u6e90ID\n* @param targetId \u76ee\u6807ID\n*/\npublic static void forceOverrideBodyFiles(String sourceId, String targetId) {\ndeleteBodyFiles(targetId);\ncopyBodyFiles(sourceId, targetId);\n}\n/**\n* \u590d\u5236\u6587\u4ef6\u5939(\u4f7f\u7528\u7f13\u51b2\u5b57\u8282\u6d41)\n*\n* @param sourcePath \u6e90\u6587\u4ef6\u5939\u8def\u5f84\n* @param targetPath \u76ee\u6807\u6587\u4ef6\u5939\u8def\u5f84\n*/\npublic static void copyFolder(String sourcePath, String targetPath) {\nFile sourceFile = new File(sourcePath);\nFile targetFile = new File(targetPath);\nif (!sourceFile.exists() || !sourceFile.isDirectory()) {\nreturn;\n}\nif (!targetFile.exists()) {\ntargetFile.mkdirs();\n}\nFile[] files = sourceFile.listFiles();\nif (files == null || files.length == 0) {\nreturn;\n}\nfor (File file : files) {\ncopyFileToDir(file, targetFile);\n}\n}\npublic static void copyFileToDir(String filePath, String targetPath) {\nFile sourceFile = new File(filePath);\nFile targetDir = new File(targetPath);\nif (!sourceFile.exists()) {\nreturn;\n}\nif (!targetDir.exists()) {\ntargetDir.mkdirs();\n}\ncopyFileToDir(sourceFile, targetDir);\n}\nprivate static void copyFileToDir(File file, File targetDir) {\nString movePath = targetDir + File.separator + file.getName();\nif (file.isDirectory()) {\ncopyFolder(file.getAbsolutePath(), movePath);\n} else {\ntry (BufferedInputStream in = new BufferedInputStream(new FileInputStream(file));\nBufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(movePath))) {\nbyte[] b = new byte[1024];\nint temp;\nwhile ((temp = in.read(b)) != -1) {\nout.write(b, 0, temp);\n}\n} catch (Exception e) {\nLoggerUtil.error(e);\n}\n}\n}\npublic static File getFileByName(String name) {\nString path = BODY_FILE_DIR + File.separator + name;\nreturn new File(path);\n}\npublic static File getBodyFileByName(String name, String requestId) {\nString path = BODY_FILE_DIR + File.separator + requestId + File.separator + name;\nreturn new File(path);\n}\npublic static void copyBdyFile(String originId, String toId) {\ntry {\nif (StringUtils.isNotEmpty(originId) && StringUtils.isNotEmpty(toId) && !StringUtils.equals(originId, toId)) {\nFileUtil.copyDir(new File(FileUtils.BODY_FILE_DIR + File.separator + originId),\nnew File(FileUtils.BODY_FILE_DIR + File.separator + toId));\n}\n} catch (Exception e) {\nLogUtil.error(e.getMessage(), e);\n}\n}\npublic static void createBodyFiles(List bodyUploadIds, List bodyFiles) {\nFileUtils.create(bodyUploadIds, bodyFiles, null);\n}\npublic static void createFiles(List bodyUploadIds, List bodyFiles, String path) {\nFileUtils.create(bodyUploadIds, bodyFiles, path);\n}\npublic static String createFile(MultipartFile bodyFile) {\nvalidateFileName(bodyFile.getOriginalFilename());\nString dir = \"/opt/metersphere/data/body/tmp/\";\nFile fileDir = new File(dir);\nif (!fileDir.exists()) {\nfileDir.mkdirs();\n}\nFile file = new File(dir + UUID.randomUUID().toString() + \"_\" + bodyFile.getOriginalFilename());\ntry (InputStream in = bodyFile.getInputStream(); OutputStream out = new FileOutputStream(file)) {\nfile.createNewFile();\nFileUtil.copyStream(in, out);\n} catch (IOException e) {\nLogUtil.error(e);\nMSException.throwException(Translator.get(\"upload_fail\"));\n}\nreturn file.getPath();\n}\npublic static void deleteBodyFiles(String requestId) {\nFile file = new File(BODY_FILE_DIR + File.separator + requestId);\nFileUtil.deleteContents(file);\nif (file.exists()) {\nfile.delete();\n}\nfile = new File(BODY_FILE_DIR + File.separator + \"tmp\" + File.separator + requestId);\nFileUtil.deleteContents(file);\nif (file.exists()) {\nfile.delete();\n}\n}\npublic static String uploadFile(MultipartFile uploadFile, String path, String name) {\nvalidateFileName(name);\nif (uploadFile == null) {\nreturn null;\n}\nFile testDir = new File(path);\nif (!testDir.exists()) {\ntestDir.mkdirs();\n}\nString filePath = testDir + File.separator + name;\nFile file = new File(filePath);\ntry (InputStream in = uploadFile.getInputStream(); OutputStream out = new FileOutputStream(file)) {\nfile.createNewFile();\nFileUtil.copyStream(in, out);\n} catch (IOException e) {\nLogUtil.error(e.getMessage(), e);\nMSException.throwException(Translator.get(\"upload_fail\"));\n}\nreturn filePath;\n}\npublic static String uploadFile(MultipartFile uploadFile, String path) {\nreturn uploadFile(uploadFile, path, uploadFile.getOriginalFilename());\n}\npublic static void deleteFile(String path) {\nFile file = new File(path);\nif (file.exists()) {\nfile.delete();\n}\n}\npublic static void deleteDir(String path) {\nFile file = new File(path);\nFileUtil.deleteContents(file);\nif (file.exists()) {\nfile.delete();\n}\n}\n/**\n* \u83b7\u53d6\u5f53\u524djmx \u6d89\u53ca\u5230\u7684\u6587\u4ef6\n*\n* @param tree\n*/\npublic static void getFiles(HashTree tree, List files) {\nfor (Object key : tree.keySet()) {\nHashTree node = tree.get(key);\nif (key instanceof HTTPSamplerProxy) {\nHTTPSamplerProxy source = (HTTPSamplerProxy) key;\nif (source != null && source.getHTTPFiles().length > 0) {\nfor (HTTPFileArg arg : source.getHTTPFiles()) {\nBodyFile file = new BodyFile();\nfile.setId(arg.getParamName());\nfile.setName(arg.getPath());\nif (arg.getPropertyAsBoolean(\"isRef\")) {\nfile.setStorage(StorageConstants.FILE_REF.name());\nfile.setFileId(arg.getPropertyAsString(\"fileId\"));\n}\nfiles.add(file);\n}\n}\n} else if (key instanceof CSVDataSet) {\nCSVDataSet source = (CSVDataSet) key;\nif (source != null && StringUtils.isNotEmpty(source.getPropertyAsString(\"filename\"))) {\nBodyFile file = new BodyFile();\nfile.setId(source.getPropertyAsString(\"filename\"));\nfile.setName(source.getPropertyAsString(\"filename\"));\nfiles.add(file);\n}\n}\nif (node != null) {\ngetFiles(node, files);\n}\n}\n}\npublic static byte[] fileToByte(File tradeFile) {\nbyte[] buffer = null;\ntry (FileInputStream fis = new FileInputStream(tradeFile);\nByteArrayOutputStream bos = new ByteArrayOutputStream()) {\nbyte[] b = new byte[1024];\nint n;\nwhile ((n = fis.read(b)) != -1) {\nbos.write(b, 0, n);\n}\nbuffer = bos.toByteArray();\n} catch (Exception e) {\nLogUtil.error(e);\n}\nreturn buffer;\n}\npublic static File byteToFile(byte[] buf, String filePath, String fileName) {\nBufferedOutputStream bos = null;\nFileOutputStream fos = null;\nFile file = null;\ntry {\nFile dir = new File(filePath);\nif (!dir.exists()) {\ndir.mkdirs();\n}\nfile = new File(filePath + File.separator + fileName);\nfos = new FileOutputStream(file);\nbos = new BufferedOutputStream(fos);\nbos.write(buf);\n} catch (Exception e) {\ne.printStackTrace();\n} finally {\nif (bos != null) {\ntry {\nbos.close();\n} catch (IOException e) {\ne.printStackTrace();\n}\n}\nif (fos != null) {\ntry {\nfos.close();\n} catch (IOException e) {\ne.printStackTrace();\n}\n}\n}\nreturn file;\n}\npublic static String fileToStr(File tradeFile) {\nString buffer = null;\ntry (FileInputStream fis = new FileInputStream(tradeFile);\nByteArrayOutputStream bos = new ByteArrayOutputStream();) {\nbyte[] b = new byte[1024];\nint n;\nwhile ((n = fis.read(b)) != -1) {\nbos.write(b, 0, n);\n}\nbuffer = bos.toString();\n} catch (Exception e) {\n}\nreturn buffer;\n}\npublic static List getRepositoryFileMetadata(HashTree tree) {\nFileMetadataService fileMetadataService = CommonBeanFactory.getBean(FileMetadataService.class);\nList list = new ArrayList<>();\nfor (Object key : tree.keySet()) {\nHashTree node = tree.get(key);\nif (key instanceof HTTPSamplerProxy) {\nHTTPSamplerProxy source = (HTTPSamplerProxy) key;\nif (source != null && source.getHTTPFiles().length > 0) {\nfor (HTTPFileArg arg : source.getHTTPFiles()) {\nif (arg.getPropertyAsBoolean(\"isRef\") && fileMetadataService != null) {\nFileMetadata fileMetadata = fileMetadataService.getFileMetadataById(arg.getPropertyAsString(\"fileId\"));\nif (fileMetadata != null && !StringUtils.equals(fileMetadata.getStorage(), StorageConstants.LOCAL.name())) {\nlist.add(fileMetadata);\narg.setPath(fileMetadata.getName());\narg.setName(fileMetadata.getName());\n}\n}\n}\n}\n} else if (key instanceof CSVDataSet) {\nCSVDataSet source = (CSVDataSet) key;\nif (source != null && StringUtils.isNotEmpty(source.getPropertyAsString(\"filename\"))) {\nif (source.getPropertyAsBoolean(\"isRef\") && fileMetadataService != null) {\nFileMetadata fileMetadata = fileMetadataService.getFileMetadataById(source.getPropertyAsString(\"fileId\"));\nif (fileMetadata != null && !StringUtils.equals(fileMetadata.getStorage(), StorageConstants.LOCAL.name())) {\nlist.add(fileMetadata);\nsource.setFilename(fileMetadata.getName());\n}\n}\n}\n}\nif (node != null) {\nlist.addAll(getRepositoryFileMetadata(node));\n}\n}\nreturn list;\n}\npublic static boolean isFolderExists(String requestId) {\nFile file = new File(BODY_FILE_DIR + File.separator + requestId);\nreturn file.isDirectory();\n}\npublic List getZipJar() {\nList jarFiles = new LinkedList<>();\nJarConfigService jarConfigService = CommonBeanFactory.getBean(JarConfigService.class);\nList jars = jarConfigService.list();\nList files = new ArrayList<>();\njars.forEach(jarConfig -> {\nString path = jarConfig.getPath();\nFile file = new File(path);\nif (file.isDirectory() && !path.endsWith(File.separator)) {\nfile = new File(path + File.separator);\n}\nfiles.add(file);\n});\ntry {\nFile file = CompressUtils.zipFiles(UUID.randomUUID().toString() + \".zip\", files);\nFileSystemResource resource = new FileSystemResource(file);\nbyte[] fileByte = this.fileToByte(file);\nif (fileByte != null) {\nByteArrayResource byteArrayResource = new ByteArrayResource(fileByte) {\n@Override\npublic String getFilename() throws IllegalStateException {\nreturn resource.getFilename();\n}\n};\njarFiles.add(byteArrayResource);\n}\n} catch (Exception e) {\nLogUtil.error(e);\n}\nreturn jarFiles;\n}\npublic List getJar() {\nList jarFiles = new LinkedList<>();\nJarConfigService jarConfigService = CommonBeanFactory.getBean(JarConfigService.class);\nList jars = jarConfigService.list();\njars.forEach(jarConfig -> {\ntry {\nString path = jarConfig.getPath();\nFile file = new File(path);\nif (file.isDirectory() && !path.endsWith(File.separator)) {\nfile = new File(path + File.separator);\n}\nFileSystemResource resource = new FileSystemResource(file);\nbyte[] fileByte = this.fileToByte(file);\nif (fileByte != null) {\nByteArrayResource byteArrayResource = new ByteArrayResource(fileByte) {\n@Override\npublic String getFilename() throws IllegalStateException {\nreturn resource.getFilename();\n}\n};\njarFiles.add(byteArrayResource);\n}\n} catch (Exception e) {\nLogUtil.error(e.getMessage(), e);\n}\n});\nreturn jarFiles;\n}\npublic List getMultipartFiles(HashTree hashTree) {\nList multipartFiles = new LinkedList<>();\nList files = new LinkedList<>();\ngetFiles(hashTree, files);\nif (CollectionUtils.isNotEmpty(files)) {\nfor (BodyFile bodyFile : files) {\nFile file = new File(bodyFile.getName());\nif (file != null && !file.exists()) {\nFileSystemResource resource = new FileSystemResource(file);\nbyte[] fileByte = this.fileToByte(file);\nif (fileByte != null) {\nByteArrayResource byteArrayResource = new ByteArrayResource(fileByte) {\n@Override\npublic String getFilename() throws IllegalStateException {\nreturn resource.getFilename();\n}\n};\nmultipartFiles.add(byteArrayResource);\n}\n}\n}\n}\nreturn multipartFiles;\n}\npublic static Boolean writeToFile(String filePath, byte[] content) {\nOutputStream oStream = null;\ntry {\noStream = new FileOutputStream(filePath);\noStream.write(content);\nreturn Boolean.TRUE;\n} catch (Exception exception) {\nexception.printStackTrace();\nreturn Boolean.FALSE;\n} finally {\ntry {\noStream.close();\n} catch (IOException e) {\ne.printStackTrace();\n}\n}\n}\npublic static String getFilePath(BodyFile file) {\nString type = StringUtils.isNotEmpty(file.getFileType()) ? file.getFileType().toLowerCase() : null;\nString name = file.getName();\nif (type != null && !name.endsWith(type)) {\nname = StringUtils.join(name, \".\", type);\n}\nreturn StringUtils.join(FileUtils.BODY_FILE_DIR, File.separator, file.getProjectId(), File.separator, name);\n}\npublic static String getFilePath(FileMetadata fileMetadata) {\nString type = StringUtils.isNotEmpty(fileMetadata.getType()) ? fileMetadata.getType().toLowerCase() : null;\nString name = fileMetadata.getName();\nif (type != null && !name.endsWith(type)) {\nname = StringUtils.join(name, \".\", type);\n}\nreturn StringUtils.join(FileUtils.BODY_FILE_DIR, File.separator, fileMetadata.getProjectId(), File.separator, name);\n}\n}" + }, + { + "comment": "The most risky bug in this code is: Potential creation of unmanaged threads leading to resource leaks if executors are not properly shut down. You can modify the code like this: ```java @@ -53,6 +53,7 @@ public class IcebergConnector implements Connector, Closeable { // Make sure to implement Closeable or another interface for proper cleanup private final String catalogName; private IcebergCatalog icebergNativeCatalog; private ExecutorService icebergJobPlanningExecutor; + private ExecutorService refreshOtherFeExecutor; public IcebergConnector(ConnectorContext context) { this.catalogName = context.getCatalogName(); ... @@ -107,7 +108,8 @@ public void close() throws IOException { // Override close method to properly shutdown executors if (icebergJobPlanningExecutor != null && !icebergJobPlanningExecutor.isShutdown()) { icebergJobPlanningExecutor.shutdown(); } + if (refreshOtherFeExecutor != null && !refreshOtherFeExecutor.isShutdown()) { + refreshOtherFeExecutor.shutdown(); + } } ``` Explanation: It seems that the executors being created here (`icebergJobPlanningExecutor` and `refreshOtherFeExecutor`) have no corresponding shutdown mechanism in the provided code snippet. When an executor service is created but not properly shut down, it keeps running and waiting for tasks. This can lead to a situation where these executor services consume system resources unnecessarily and potentially cause memory leaks or thread leaks, which would be considered a serious issue, particularly if new instances of `IcebergConnector` are frequently created and disposed of. Implementing `Closeable` and adding a `close` method (or ensuring there is one if it's already part of the superclass not visible in the snippet), which shuts down these executors, allows for proper cleanup when an `IcebergConnector` instance is no longer needed.", + "method_body": "private ExecutorService buildBackgroundJobPlanningExecutor() {\nint defaultPoolSize = Math.max(2, Runtime.getRuntime().availableProcessors() / 8);\nint backgroundIcebergJobPlanningThreadPoolSize = Integer.parseInt(properties.getOrDefault(\n\"background_iceberg_job_planning_thread_num\", String.valueOf(defaultPoolSize)));\nreturn newWorkerPool(catalogName + \"-background-iceberg-worker-pool\", backgroundIcebergJobPlanningThreadPoolSize);\n}", + "target_code": "int backgroundIcebergJobPlanningThreadPoolSize = Integer.parseInt(properties.getOrDefault(", + "method_body_after": "private ExecutorService buildBackgroundJobPlanningExecutor() {\nint defaultPoolSize = Math.max(2, Runtime.getRuntime().availableProcessors() / 8);\nint backgroundIcebergJobPlanningThreadPoolSize = Integer.parseInt(properties.getOrDefault(\n\"background_iceberg_job_planning_thread_num\", String.valueOf(defaultPoolSize)));\nreturn newWorkerPool(catalogName + \"-background-iceberg-worker-pool\", backgroundIcebergJobPlanningThreadPoolSize);\n}", + "context_before": "class IcebergConnector implements Connector {\nprivate static final Logger LOG = LogManager.getLogger(IcebergConnector.class);\npublic static final String ICEBERG_CATALOG_TYPE = \"iceberg.catalog.type\";\n@Deprecated\npublic static final String ICEBERG_CATALOG_LEGACY = \"starrocks.catalog-type\";\n@Deprecated\npublic static final String ICEBERG_METASTORE_URIS = \"iceberg.catalog.hive.metastore.uris\";\npublic static final String HIVE_METASTORE_URIS = \"hive.metastore.uris\";\npublic static final String ICEBERG_CUSTOM_PROPERTIES_PREFIX = \"iceberg.catalog.\";\nprivate final Map properties;\nprivate final HdfsEnvironment hdfsEnvironment;\nprivate final String catalogName;\nprivate IcebergCatalog icebergNativeCatalog;\nprivate ExecutorService icebergJobPlanningExecutor;\nprivate ExecutorService refreshOtherFeExecutor;\npublic IcebergConnector(ConnectorContext context) {\nthis.catalogName = context.getCatalogName();\nthis.properties = context.getProperties();\nCloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(properties);\nthis.hdfsEnvironment = new HdfsEnvironment(cloudConfiguration);\n}\nprivate IcebergCatalog buildIcebergNativeCatalog() {\nIcebergCatalogType nativeCatalogType = getNativeCatalogType();\nConfiguration conf = hdfsEnvironment.getConfiguration();\nif (Config.enable_iceberg_custom_worker_thread) {\nLOG.info(\"Default iceberg worker thread number changed \" + Config.iceberg_worker_num_threads);\nProperties props = System.getProperties();\nprops.setProperty(ThreadPools.WORKER_THREAD_POOL_SIZE_PROP, String.valueOf(Config.iceberg_worker_num_threads));\n}\nswitch (nativeCatalogType) {\ncase HIVE_CATALOG:\nreturn new IcebergHiveCatalog(catalogName, conf, properties);\ncase GLUE_CATALOG:\nreturn new IcebergGlueCatalog(catalogName, conf, properties);\ncase REST_CATALOG:\nreturn new IcebergRESTCatalog(catalogName, conf, properties);\ndefault:\nthrow new StarRocksConnectorException(\"Property %s is missing or not supported now.\", ICEBERG_CATALOG_TYPE);\n}\n}\nprivate IcebergCatalogType getNativeCatalogType() {\nString nativeCatalogTypeStr = properties.get(ICEBERG_CATALOG_TYPE);\nif (Strings.isNullOrEmpty(nativeCatalogTypeStr)) {\nnativeCatalogTypeStr = properties.get(ICEBERG_CATALOG_LEGACY);\n}\nif (Strings.isNullOrEmpty(nativeCatalogTypeStr)) {\nthrow new StarRocksConnectorException(\"Can't find iceberg native catalog type. You must specify the\" +\n\" 'iceberg.catalog.type' property when creating an iceberg catalog in the catalog properties\");\n}\nreturn IcebergCatalogType.fromString(nativeCatalogTypeStr);\n}\n@Override\npublic ConnectorMetadata getMetadata() {\nreturn new IcebergMetadata(catalogName, hdfsEnvironment, getNativeCatalog(),\nbuildIcebergJobPlanningExecutor(), buildRefreshOtherFeExecutor());\n}\npublic IcebergCatalog getNativeCatalog() {\nif (icebergNativeCatalog == null) {\nIcebergCatalog nativeCatalog = buildIcebergNativeCatalog();\nboolean enableMetadataCache = Boolean.parseBoolean(\nproperties.getOrDefault(\"enable_iceberg_metadata_cache\", \"true\"));\nif (enableMetadataCache && !isResourceMappingCatalog(catalogName)) {\nlong ttl = Long.parseLong(properties.getOrDefault(\"iceberg_meta_cache_ttl_sec\", \"1800\"));\nnativeCatalog = new CachingIcebergCatalog(nativeCatalog, ttl, buildBackgroundJobPlanningExecutor());\nGlobalStateMgr.getCurrentState().getConnectorTableMetadataProcessor()\n.registerCachingIcebergCatalog(catalogName, nativeCatalog);\n}\nthis.icebergNativeCatalog = nativeCatalog;\n}\nreturn icebergNativeCatalog;\n}\nprivate ExecutorService buildIcebergJobPlanningExecutor() {\nif (icebergJobPlanningExecutor == null) {\nint poolSize = Math.max(2, Integer.parseInt(properties.getOrDefault(\"iceberg_job_planning_thread_num\",\nString.valueOf(Config.iceberg_worker_num_threads))));\nicebergJobPlanningExecutor = newWorkerPool(catalogName + \"-sr-iceberg-worker-pool\", poolSize);\n}\nreturn icebergJobPlanningExecutor;\n}\npublic ExecutorService buildRefreshOtherFeExecutor() {\nif (refreshOtherFeExecutor == null) {\nint threadSize = Math.max(2, Integer.parseInt(\nproperties.getOrDefault(\"refresh-other-fe-iceberg-cache-thread-num\", \"4\")));\nrefreshOtherFeExecutor = newWorkerPool(catalogName + \"-refresh-others-fe-iceberg-metadata-cache\", threadSize);\n}\nreturn refreshOtherFeExecutor;\n}\n@Override\npublic void shutdown() {\nGlobalStateMgr.getCurrentState().getConnectorTableMetadataProcessor().unRegisterCachingIcebergCatalog(catalogName);\nif (icebergJobPlanningExecutor != null) {\nicebergJobPlanningExecutor.shutdown();\n}\n}\n}", + "context_after": "class IcebergConnector implements Connector {\nprivate static final Logger LOG = LogManager.getLogger(IcebergConnector.class);\npublic static final String ICEBERG_CATALOG_TYPE = \"iceberg.catalog.type\";\n@Deprecated\npublic static final String ICEBERG_CATALOG_LEGACY = \"starrocks.catalog-type\";\n@Deprecated\npublic static final String ICEBERG_METASTORE_URIS = \"iceberg.catalog.hive.metastore.uris\";\npublic static final String HIVE_METASTORE_URIS = \"hive.metastore.uris\";\npublic static final String ICEBERG_CUSTOM_PROPERTIES_PREFIX = \"iceberg.catalog.\";\nprivate final Map properties;\nprivate final HdfsEnvironment hdfsEnvironment;\nprivate final String catalogName;\nprivate IcebergCatalog icebergNativeCatalog;\nprivate ExecutorService icebergJobPlanningExecutor;\nprivate ExecutorService refreshOtherFeExecutor;\npublic IcebergConnector(ConnectorContext context) {\nthis.catalogName = context.getCatalogName();\nthis.properties = context.getProperties();\nCloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(properties);\nthis.hdfsEnvironment = new HdfsEnvironment(cloudConfiguration);\n}\nprivate IcebergCatalog buildIcebergNativeCatalog() {\nIcebergCatalogType nativeCatalogType = getNativeCatalogType();\nConfiguration conf = hdfsEnvironment.getConfiguration();\nif (Config.enable_iceberg_custom_worker_thread) {\nLOG.info(\"Default iceberg worker thread number changed \" + Config.iceberg_worker_num_threads);\nProperties props = System.getProperties();\nprops.setProperty(ThreadPools.WORKER_THREAD_POOL_SIZE_PROP, String.valueOf(Config.iceberg_worker_num_threads));\n}\nswitch (nativeCatalogType) {\ncase HIVE_CATALOG:\nreturn new IcebergHiveCatalog(catalogName, conf, properties);\ncase GLUE_CATALOG:\nreturn new IcebergGlueCatalog(catalogName, conf, properties);\ncase REST_CATALOG:\nreturn new IcebergRESTCatalog(catalogName, conf, properties);\ndefault:\nthrow new StarRocksConnectorException(\"Property %s is missing or not supported now.\", ICEBERG_CATALOG_TYPE);\n}\n}\nprivate IcebergCatalogType getNativeCatalogType() {\nString nativeCatalogTypeStr = properties.get(ICEBERG_CATALOG_TYPE);\nif (Strings.isNullOrEmpty(nativeCatalogTypeStr)) {\nnativeCatalogTypeStr = properties.get(ICEBERG_CATALOG_LEGACY);\n}\nif (Strings.isNullOrEmpty(nativeCatalogTypeStr)) {\nthrow new StarRocksConnectorException(\"Can't find iceberg native catalog type. You must specify the\" +\n\" 'iceberg.catalog.type' property when creating an iceberg catalog in the catalog properties\");\n}\nreturn IcebergCatalogType.fromString(nativeCatalogTypeStr);\n}\n@Override\npublic ConnectorMetadata getMetadata() {\nreturn new IcebergMetadata(catalogName, hdfsEnvironment, getNativeCatalog(),\nbuildIcebergJobPlanningExecutor(), buildRefreshOtherFeExecutor());\n}\npublic IcebergCatalog getNativeCatalog() {\nif (icebergNativeCatalog == null) {\nIcebergCatalog nativeCatalog = buildIcebergNativeCatalog();\nboolean enableMetadataCache = Boolean.parseBoolean(\nproperties.getOrDefault(\"enable_iceberg_metadata_cache\", \"true\"));\nif (enableMetadataCache && !isResourceMappingCatalog(catalogName)) {\nlong ttl = Long.parseLong(properties.getOrDefault(\"iceberg_meta_cache_ttl_sec\", \"1800\"));\nnativeCatalog = new CachingIcebergCatalog(nativeCatalog, ttl, buildBackgroundJobPlanningExecutor());\nGlobalStateMgr.getCurrentState().getConnectorTableMetadataProcessor()\n.registerCachingIcebergCatalog(catalogName, nativeCatalog);\n}\nthis.icebergNativeCatalog = nativeCatalog;\n}\nreturn icebergNativeCatalog;\n}\nprivate ExecutorService buildIcebergJobPlanningExecutor() {\nif (icebergJobPlanningExecutor == null) {\nint poolSize = Math.max(2, Integer.parseInt(properties.getOrDefault(\"iceberg_job_planning_thread_num\",\nString.valueOf(Config.iceberg_worker_num_threads))));\nicebergJobPlanningExecutor = newWorkerPool(catalogName + \"-sr-iceberg-worker-pool\", poolSize);\n}\nreturn icebergJobPlanningExecutor;\n}\npublic ExecutorService buildRefreshOtherFeExecutor() {\nif (refreshOtherFeExecutor == null) {\nint threadSize = Math.max(2, Integer.parseInt(\nproperties.getOrDefault(\"refresh-other-fe-iceberg-cache-thread-num\", \"4\")));\nrefreshOtherFeExecutor = newWorkerPool(catalogName + \"-refresh-others-fe-iceberg-metadata-cache\", threadSize);\n}\nreturn refreshOtherFeExecutor;\n}\n@Override\npublic void shutdown() {\nGlobalStateMgr.getCurrentState().getConnectorTableMetadataProcessor().unRegisterCachingIcebergCatalog(catalogName);\nif (icebergJobPlanningExecutor != null) {\nicebergJobPlanningExecutor.shutdown();\n}\nif (refreshOtherFeExecutor != null) {\nrefreshOtherFeExecutor.shutdown();\n}\n}\n}" + }, + { + "comment": "> One other reason that could explain that it works is that some execution engines write at parallelism one resulting into a single instance of the write DoFn", + "method_body": "public void teardown() throws Exception {\nif (producer != null) {\nif (producer.getOutstandingRecordsCount() > 0) {\nproducer.flushSync();\n}\nproducer.destroy();\n}\nproducer = null;\n}", + "target_code": "producer = null;", + "method_body_after": "public void teardown() throws Exception {\nteardownSharedProducer();\n}", + "context_before": "class KinesisWriterFn extends DoFn {\nprivate static final int MAX_NUM_FAILURES = 10;\nprivate final KinesisIO.Write spec;\nprivate static transient IKinesisProducer producer;\nprivate transient KinesisPartitioner partitioner;\nprivate transient LinkedBlockingDeque failures;\nprivate transient List> putFutures;\nKinesisWriterFn(KinesisIO.Write spec) {\nthis.spec = spec;\ninitKinesisProducer();\n}\n@Setup\npublic void setup() {\nif (spec.getPartitioner() != null) {\npartitioner = spec.getPartitioner();\n}\n}\n@StartBundle\npublic void startBundle() {\nputFutures = Collections.synchronizedList(new ArrayList<>());\n/** Keep only the first {@link MAX_NUM_FAILURES} occurred exceptions */\nfailures = new LinkedBlockingDeque<>(MAX_NUM_FAILURES);\ninitKinesisProducer();\n}\nprivate synchronized void initKinesisProducer() {\nif (producer == null) {\nProperties props = spec.getProducerProperties();\nif (props == null) {\nprops = new Properties();\n}\nKinesisProducerConfiguration config = KinesisProducerConfiguration.fromProperties(props);\nproducer = spec.getAWSClientsProvider().createKinesisProducer(config);\n}\n}\nprivate void readObject(ObjectInputStream is) throws IOException, ClassNotFoundException {\nis.defaultReadObject();\ninitKinesisProducer();\n}\n/**\n* It adds a record asynchronously which then should be delivered by Kinesis producer in\n* background (Kinesis producer forks native processes to do this job).\n*\n*

The records can be batched and then they will be sent in one HTTP request. Amazon KPL\n* supports two types of batching - aggregation and collection - and they can be configured by\n* producer properties.\n*\n*

More details can be found here: and \n*/\n@ProcessElement\npublic void processElement(ProcessContext c) {\nByteBuffer data = ByteBuffer.wrap(c.element());\nString partitionKey = spec.getPartitionKey();\nString explicitHashKey = null;\nif (partitioner != null) {\npartitionKey = partitioner.getPartitionKey(c.element());\nexplicitHashKey = partitioner.getExplicitHashKey(c.element());\n}\nListenableFuture f =\nproducer.addUserRecord(spec.getStreamName(), partitionKey, explicitHashKey, data);\nputFutures.add(f);\n}\n@FinishBundle\npublic void finishBundle() throws Exception {\nflushBundle();\n}\n/**\n* Flush outstanding records until the total number of failed records will be less than 0 or\n* the number of retries will be exhausted. The retry timeout starts from 1 second and it\n* doubles on every iteration.\n*/\nprivate void flushBundle() throws InterruptedException, ExecutionException, IOException {\nint retries = spec.getRetries();\nint numFailedRecords;\nint retryTimeout = 1000;\nString message = \"\";\ndo {\nnumFailedRecords = 0;\nproducer.flush();\nfor (Future f : putFutures) {\nUserRecordResult result = f.get();\nif (!result.isSuccessful()) {\nnumFailedRecords++;\n}\n}\nThread.sleep(retryTimeout);\nretryTimeout *= 2;\n} while (numFailedRecords > 0 && retries-- > 0);\nif (numFailedRecords > 0) {\nfor (Future f : putFutures) {\nUserRecordResult result = f.get();\nif (!result.isSuccessful()) {\nfailures.offer(\nnew KinesisWriteException(\n\"Put record was not successful.\", new UserRecordFailedException(result)));\n}\n}\nmessage =\nString.format(\n\"After [%d] retries, number of failed records [%d] is still greater than 0\",\nspec.getRetries(), numFailedRecords);\nLOG.error(message);\n}\ncheckForFailures(message);\n}\n/** If any write has asynchronously failed, fail the bundle with a useful error. */\nprivate void checkForFailures(String message) throws IOException {\nif (failures.isEmpty()) {\nreturn;\n}\nStringBuilder logEntry = new StringBuilder();\nlogEntry.append(message).append(System.lineSeparator());\nint i = 0;\nwhile (!failures.isEmpty()) {\ni++;\nKinesisWriteException exc = failures.remove();\nlogEntry.append(System.lineSeparator()).append(exc.getMessage());\nThrowable cause = exc.getCause();\nif (cause != null) {\nlogEntry.append(\": \").append(cause.getMessage());\nif (cause instanceof UserRecordFailedException) {\nList attempts =\n((UserRecordFailedException) cause).getResult().getAttempts();\nfor (Attempt attempt : attempts) {\nif (attempt.getErrorMessage() != null) {\nlogEntry.append(System.lineSeparator()).append(attempt.getErrorMessage());\n}\n}\n}\n}\n}\nString errorMessage =\nString.format(\n\"Some errors occurred writing to Kinesis. First %d errors: %s\",\ni, logEntry.toString());\nthrow new IOException(errorMessage);\n}\n@Teardown\n}", + "context_after": "class KinesisWriterFn extends DoFn {\nprivate static final int MAX_NUM_FAILURES = 10;\n/** Usage count of static, shared Kinesis producer. */\nprivate static int producerRefCount = 0;\n/** Static, shared Kinesis producer. */\nprivate static IKinesisProducer producer;\nprivate final KinesisIO.Write spec;\nprivate transient KinesisPartitioner partitioner;\nprivate transient LinkedBlockingDeque failures;\nprivate transient List> putFutures;\nKinesisWriterFn(KinesisIO.Write spec) {\nthis.spec = spec;\n}\n/**\n* Initialize statically shared Kinesis producer if required and count usage.\n*\n*

NOTE: If there is, for whatever reasons, another instance of a {@link KinesisWriterFn}\n* with different producer properties or even a different implementation of {@link\n* AWSClientsProvider}, these changes will be silently discarded in favor of an existing\n* producer instance.\n*/\nprivate void setupSharedProducer() {\nsynchronized (KinesisWriterFn.class) {\nif (producer == null) {\nproducer =\nspec.getAWSClientsProvider()\n.createKinesisProducer(spec.createProducerConfiguration());\nproducerRefCount = 0;\n}\nproducerRefCount++;\n}\n}\n/**\n* Discard statically shared producer if it is not used anymore according to the usage count.\n*/\nprivate void teardownSharedProducer() {\nIKinesisProducer obsolete = null;\nsynchronized (KinesisWriterFn.class) {\nif (--producerRefCount == 0) {\nobsolete = producer;\nproducer = null;\n}\n}\nif (obsolete != null) {\nobsolete.flushSync();\nobsolete.destroy();\n}\n}\n@Setup\npublic void setup() {\nsetupSharedProducer();\nif (spec.getPartitioner() != null) {\npartitioner = spec.getPartitioner();\n}\n}\n@StartBundle\npublic void startBundle() {\nputFutures = Collections.synchronizedList(new ArrayList<>());\n/** Keep only the first {@link MAX_NUM_FAILURES} occurred exceptions */\nfailures = new LinkedBlockingDeque<>(MAX_NUM_FAILURES);\n}\n/**\n* It adds a record asynchronously which then should be delivered by Kinesis producer in\n* background (Kinesis producer forks native processes to do this job).\n*\n*

The records can be batched and then they will be sent in one HTTP request. Amazon KPL\n* supports two types of batching - aggregation and collection - and they can be configured by\n* producer properties.\n*\n*

More details can be found here: and \n*/\n@ProcessElement\npublic void processElement(ProcessContext c) {\nByteBuffer data = ByteBuffer.wrap(c.element());\nString partitionKey = spec.getPartitionKey();\nString explicitHashKey = null;\nif (partitioner != null) {\npartitionKey = partitioner.getPartitionKey(c.element());\nexplicitHashKey = partitioner.getExplicitHashKey(c.element());\n}\nListenableFuture f =\nproducer.addUserRecord(spec.getStreamName(), partitionKey, explicitHashKey, data);\nputFutures.add(f);\n}\n@FinishBundle\npublic void finishBundle() throws Exception {\nflushBundle();\n}\n/**\n* Flush outstanding records until the total number of failed records will be less than 0 or\n* the number of retries will be exhausted. The retry timeout starts from 1 second and it\n* doubles on every iteration.\n*/\nprivate void flushBundle() throws InterruptedException, ExecutionException, IOException {\nint retries = spec.getRetries();\nint numFailedRecords;\nint retryTimeout = 1000;\nString message = \"\";\ndo {\nnumFailedRecords = 0;\nproducer.flush();\nfor (Future f : putFutures) {\nUserRecordResult result = f.get();\nif (!result.isSuccessful()) {\nnumFailedRecords++;\n}\n}\nThread.sleep(retryTimeout);\nretryTimeout *= 2;\n} while (numFailedRecords > 0 && retries-- > 0);\nif (numFailedRecords > 0) {\nfor (Future f : putFutures) {\nUserRecordResult result = f.get();\nif (!result.isSuccessful()) {\nfailures.offer(\nnew KinesisWriteException(\n\"Put record was not successful.\", new UserRecordFailedException(result)));\n}\n}\nmessage =\nString.format(\n\"After [%d] retries, number of failed records [%d] is still greater than 0\",\nspec.getRetries(), numFailedRecords);\nLOG.error(message);\n}\ncheckForFailures(message);\n}\n/** If any write has asynchronously failed, fail the bundle with a useful error. */\nprivate void checkForFailures(String message) throws IOException {\nif (failures.isEmpty()) {\nreturn;\n}\nStringBuilder logEntry = new StringBuilder();\nlogEntry.append(message).append(System.lineSeparator());\nint i = 0;\nwhile (!failures.isEmpty()) {\ni++;\nKinesisWriteException exc = failures.remove();\nlogEntry.append(System.lineSeparator()).append(exc.getMessage());\nThrowable cause = exc.getCause();\nif (cause != null) {\nlogEntry.append(\": \").append(cause.getMessage());\nif (cause instanceof UserRecordFailedException) {\nList attempts =\n((UserRecordFailedException) cause).getResult().getAttempts();\nfor (Attempt attempt : attempts) {\nif (attempt.getErrorMessage() != null) {\nlogEntry.append(System.lineSeparator()).append(attempt.getErrorMessage());\n}\n}\n}\n}\n}\nString errorMessage =\nString.format(\n\"Some errors occurred writing to Kinesis. First %d errors: %s\",\ni, logEntry.toString());\nthrow new IOException(errorMessage);\n}\n@Teardown\n}" + }, + { + "comment": "```suggestion TabletMeta tabletMeta = Catalog.getCurrentInvertedIndex().getTabletMeta(entry.getKey()); ```", + "method_body": "public TUpdateTabletMetaInfoReq toThrift() {\nTUpdateTabletMetaInfoReq updateTabletMetaInfoReq = new TUpdateTabletMetaInfoReq();\nList metaInfos = Lists.newArrayList();\nint tabletEntryNum = 0;\nfor (Map.Entry entry : tabletWithoutPartitionId.entries()) {\nif (tabletEntryNum > 10000) {\nbreak;\n}\nTTabletMetaInfo metaInfo = new TTabletMetaInfo();\nmetaInfo.setTablet_id(entry.getKey());\nmetaInfo.setSchema_hash(entry.getValue());\nTabletMeta tabletMeta = Catalog.getInstance().getTabletInvertedIndex().getTabletMeta(entry.getKey());\nif (tabletMeta == null) {\nLOG.warn(\"could not find tablet [{}] in meta ignore it\", entry.getKey());\ncontinue;\n}\nmetaInfo.setPartition_id(tabletMeta.getPartitionId());\nmetaInfos.add(metaInfo);\n++tabletEntryNum;\n}\nupdateTabletMetaInfoReq.setTabletMetaInfos(metaInfos);\nreturn updateTabletMetaInfoReq;\n}", + "target_code": "TabletMeta tabletMeta = Catalog.getInstance().getTabletInvertedIndex().getTabletMeta(entry.getKey());", + "method_body_after": "public TUpdateTabletMetaInfoReq toThrift() {\nTUpdateTabletMetaInfoReq updateTabletMetaInfoReq = new TUpdateTabletMetaInfoReq();\nList metaInfos = Lists.newArrayList();\nint tabletEntryNum = 0;\nfor (Map.Entry entry : tabletWithoutPartitionId.entries()) {\nif (tabletEntryNum > 10000) {\nbreak;\n}\nTTabletMetaInfo metaInfo = new TTabletMetaInfo();\nmetaInfo.setTablet_id(entry.getKey());\nmetaInfo.setSchema_hash(entry.getValue());\nTabletMeta tabletMeta = Catalog.getInstance().getTabletInvertedIndex().getTabletMeta(entry.getKey());\nif (tabletMeta == null) {\nLOG.warn(\"could not find tablet [{}] in meta ignore it\", entry.getKey());\ncontinue;\n}\nmetaInfo.setPartition_id(tabletMeta.getPartitionId());\nmetaInfos.add(metaInfo);\n++tabletEntryNum;\n}\nupdateTabletMetaInfoReq.setTabletMetaInfos(metaInfos);\nreturn updateTabletMetaInfoReq;\n}", + "context_before": "class UpdateTabletMetaInfoTask extends AgentTask {\nprivate static final Logger LOG = LogManager.getLogger(ClearTransactionTask.class);\nprivate SetMultimap tabletWithoutPartitionId;\npublic UpdateTabletMetaInfoTask(long backendId, SetMultimap tabletWithoutPartitionId) {\nsuper(null, backendId, TTaskType.UPDATE_TABLET_META_INFO, -1L, -1L, -1L, -1L, -1L, backendId);\nthis.tabletWithoutPartitionId = tabletWithoutPartitionId;\n}\n}", + "context_after": "class UpdateTabletMetaInfoTask extends AgentTask {\nprivate static final Logger LOG = LogManager.getLogger(ClearTransactionTask.class);\nprivate SetMultimap tabletWithoutPartitionId;\npublic UpdateTabletMetaInfoTask(long backendId, SetMultimap tabletWithoutPartitionId) {\nsuper(null, backendId, TTaskType.UPDATE_TABLET_META_INFO, -1L, -1L, -1L, -1L, -1L, backendId);\nthis.tabletWithoutPartitionId = tabletWithoutPartitionId;\n}\n}" + }, + { + "comment": "nit: flatten nested `else { if {} }` to `else if {}`?", + "method_body": "public InputStatus pollNext(ReaderOutput sourceOutput) throws Exception {\nboolean finished = !waitingForMoreSplits;\ncurrentSplitIndex = 0;\nwhile (currentSplitIndex < assignedSplits.size()\n&& !assignedSplits.get(currentSplitIndex).isAvailable()) {\nfinished &= assignedSplits.get(currentSplitIndex).isFinished();\ncurrentSplitIndex++;\n}\nif (currentSplitIndex < assignedSplits.size()) {\nsourceOutput.collect(assignedSplits.get(currentSplitIndex).getNext(false)[0]);\nreturn InputStatus.MORE_AVAILABLE;\n} else {\nif (finished) {\nreturn InputStatus.END_OF_INPUT;\n}\nelse {\nif (markIdleOnNoSplits) {\nsourceOutput.markIdle();\n}\nmarkUnavailable();\nreturn InputStatus.NOTHING_AVAILABLE;\n}\n}\n}", + "target_code": "if (finished) {", + "method_body_after": "public InputStatus pollNext(ReaderOutput sourceOutput) throws Exception {\nboolean finished = !waitingForMoreSplits;\ncurrentSplitIndex = 0;\nwhile (currentSplitIndex < assignedSplits.size()\n&& !assignedSplits.get(currentSplitIndex).isAvailable()) {\nfinished &= assignedSplits.get(currentSplitIndex).isFinished();\ncurrentSplitIndex++;\n}\nif (currentSplitIndex < assignedSplits.size()) {\nsourceOutput.collect(assignedSplits.get(currentSplitIndex).getNext(false)[0]);\nreturn InputStatus.MORE_AVAILABLE;\n} else if (finished) {\nreturn InputStatus.END_OF_INPUT;\n}\nelse {\nif (markIdleOnNoSplits) {\nsourceOutput.markIdle();\n}\nmarkUnavailable();\nreturn InputStatus.NOTHING_AVAILABLE;\n}\n}", + "context_before": "class MockSourceReader implements SourceReader {\nprivate final List assignedSplits = new ArrayList<>();\nprivate final List receivedSourceEvents = new ArrayList<>();\nprivate final boolean markIdleOnNoSplits;\nprivate int currentSplitIndex = 0;\nprivate boolean started;\nprivate boolean closed;\nprivate boolean waitingForMoreSplits;\n@GuardedBy(\"this\")\nprivate CompletableFuture availableFuture;\npublic MockSourceReader() {\nthis(false, false);\n}\npublic MockSourceReader(boolean waitingForMoreSplits, boolean markIdleOnNoSplits) {\nthis.started = false;\nthis.closed = false;\nthis.availableFuture = CompletableFuture.completedFuture(null);\nthis.waitingForMoreSplits = waitingForMoreSplits;\nthis.markIdleOnNoSplits = markIdleOnNoSplits;\n}\n@Override\npublic void start() {\nthis.started = true;\n}\n@Override\n@Override\npublic List snapshotState() {\nreturn assignedSplits;\n}\n@Override\npublic synchronized CompletableFuture isAvailable() {\nreturn availableFuture;\n}\n@Override\npublic void addSplits(List splits) {\nassignedSplits.addAll(splits);\nmarkAvailable();\n}\n@Override\npublic void handleSourceEvents(SourceEvent sourceEvent) {\nif (sourceEvent instanceof MockNoMoreSplitsEvent) {\nwaitingForMoreSplits = false;\nmarkAvailable();\n}\nreceivedSourceEvents.add(sourceEvent);\n}\n@Override\npublic void close() throws Exception {\nthis.closed = true;\n}\nprivate synchronized void markUnavailable() {\nif (availableFuture.isDone()) {\navailableFuture = new CompletableFuture<>();\n}\n}\npublic void markAvailable() {\nCompletableFuture toNotify = null;\nsynchronized (this) {\nif (!availableFuture.isDone()) {\ntoNotify = availableFuture;\n}\n}\nif (toNotify != null) {\ntoNotify.complete(null);\n}\n}\npublic boolean isStarted() {\nreturn started;\n}\npublic boolean isClosed() {\nreturn closed;\n}\npublic List getAssignedSplits() {\nreturn assignedSplits;\n}\npublic List getReceivedSourceEvents() {\nreturn receivedSourceEvents;\n}\n/**\n* Simple event allowing {@link MockSourceReader} to finish when requested.\n*/\npublic static class MockNoMoreSplitsEvent implements SourceEvent {\n}\n}", + "context_after": "class MockSourceReader implements SourceReader {\nprivate final List assignedSplits = new ArrayList<>();\nprivate final List receivedSourceEvents = new ArrayList<>();\nprivate final boolean markIdleOnNoSplits;\nprivate int currentSplitIndex = 0;\nprivate boolean started;\nprivate boolean closed;\nprivate boolean waitingForMoreSplits;\n@GuardedBy(\"this\")\nprivate CompletableFuture availableFuture;\npublic MockSourceReader() {\nthis(false, false);\n}\npublic MockSourceReader(boolean waitingForMoreSplits, boolean markIdleOnNoSplits) {\nthis.started = false;\nthis.closed = false;\nthis.availableFuture = CompletableFuture.completedFuture(null);\nthis.waitingForMoreSplits = waitingForMoreSplits;\nthis.markIdleOnNoSplits = markIdleOnNoSplits;\n}\n@Override\npublic void start() {\nthis.started = true;\n}\n@Override\n@Override\npublic List snapshotState() {\nreturn assignedSplits;\n}\n@Override\npublic synchronized CompletableFuture isAvailable() {\nreturn availableFuture;\n}\n@Override\npublic void addSplits(List splits) {\nassignedSplits.addAll(splits);\nmarkAvailable();\n}\n@Override\npublic void handleSourceEvents(SourceEvent sourceEvent) {\nif (sourceEvent instanceof MockNoMoreSplitsEvent) {\nwaitingForMoreSplits = false;\nmarkAvailable();\n}\nreceivedSourceEvents.add(sourceEvent);\n}\n@Override\npublic void close() throws Exception {\nthis.closed = true;\n}\nprivate synchronized void markUnavailable() {\nif (availableFuture.isDone()) {\navailableFuture = new CompletableFuture<>();\n}\n}\npublic void markAvailable() {\nCompletableFuture toNotify = null;\nsynchronized (this) {\nif (!availableFuture.isDone()) {\ntoNotify = availableFuture;\n}\n}\nif (toNotify != null) {\ntoNotify.complete(null);\n}\n}\npublic boolean isStarted() {\nreturn started;\n}\npublic boolean isClosed() {\nreturn closed;\n}\npublic List getAssignedSplits() {\nreturn assignedSplits;\n}\npublic List getReceivedSourceEvents() {\nreturn receivedSourceEvents;\n}\n/**\n* Simple event allowing {@link MockSourceReader} to finish when requested.\n*/\npublic static class MockNoMoreSplitsEvent implements SourceEvent {\n}\n}" + }, + { + "comment": "Great point, will fix the test by counting the partition key range ids. ", + "method_body": "public void queryDiagnosticsOnOrderBy() {\nString containerId = \"testcontainer\";\ncosmosAsyncDatabase.createContainer(containerId, \"/mypk\",\nThroughputProperties.createManualThroughput(40000)).block();\nCosmosAsyncContainer testcontainer = cosmosAsyncDatabase.getContainer(containerId);\nCosmosQueryRequestOptions options = new CosmosQueryRequestOptions();\ntestcontainer.createItem(getInternalObjectNode()).block();\noptions.setMaxDegreeOfParallelism(-1);\nString query = \"SELECT * from c ORDER BY c._ts DESC\";\nCosmosPagedFlux cosmosPagedFlux = testcontainer.queryItems(query, options,\nInternalObjectNode.class);\nAtomicInteger counterPkRid = new AtomicInteger();\nAtomicInteger counterPartitionKeyRangeId = new AtomicInteger();\ncosmosPagedFlux.byPage().flatMap(feedResponse -> {\nString cosmosDiagnosticsString = feedResponse.getCosmosDiagnostics().toString();\nPattern pattern = Pattern.compile(\"\\\"partitionKeyRangeId\\\":\\\"\");\nMatcher matcher = pattern.matcher(cosmosDiagnosticsString);\nwhile (matcher.find()) {\ncounterPartitionKeyRangeId.incrementAndGet();\n}\npattern = Pattern.compile(\"pkrId:\");\nmatcher = pattern.matcher(cosmosDiagnosticsString);\nwhile (matcher.find()) {\ncounterPkRid.incrementAndGet();\n}\nreturn Flux.just(feedResponse);\n}).blockLast();\nassertThat(counterPkRid.get() * 2).isEqualTo(counterPartitionKeyRangeId.get());\ndeleteCollection(testcontainer);\n}", + "target_code": "Pattern pattern = Pattern.compile(\"\\\"partitionKeyRangeId\\\":\\\"\");", + "method_body_after": "public void queryDiagnosticsOnOrderBy() {\nString containerId = \"testcontainer\";\ncosmosAsyncDatabase.createContainer(containerId, \"/mypk\",\nThroughputProperties.createManualThroughput(40000)).block();\nCosmosAsyncContainer testcontainer = cosmosAsyncDatabase.getContainer(containerId);\nCosmosQueryRequestOptions options = new CosmosQueryRequestOptions();\noptions.setConsistencyLevel(ConsistencyLevel.EVENTUAL);\ntestcontainer.createItem(getInternalObjectNode()).block();\noptions.setMaxDegreeOfParallelism(-1);\nString query = \"SELECT * from c ORDER BY c._ts DESC\";\nCosmosPagedFlux cosmosPagedFlux = testcontainer.queryItems(query, options,\nInternalObjectNode.class);\nSet partitionKeyRangeIds = new HashSet<>();\nSet pkRids = new HashSet<>();\ncosmosPagedFlux.byPage().flatMap(feedResponse -> {\nString cosmosDiagnosticsString = feedResponse.getCosmosDiagnostics().toString();\nPattern pattern = Pattern.compile(\"(\\\"partitionKeyRangeId\\\":\\\")(\\\\d)\");\nMatcher matcher = pattern.matcher(cosmosDiagnosticsString);\nwhile (matcher.find()) {\nString group = matcher.group(2);\npartitionKeyRangeIds.add(group);\n}\npattern = Pattern.compile(\"(pkrId:)(\\\\d)\");\nmatcher = pattern.matcher(cosmosDiagnosticsString);\nwhile (matcher.find()) {\nString group = matcher.group(2);\npkRids.add(group);\n}\nreturn Flux.just(feedResponse);\n}).blockLast();\nassertThat(pkRids).isNotEmpty();\nassertThat(pkRids).isEqualTo(partitionKeyRangeIds);\ndeleteCollection(testcontainer);\n}", + "context_before": "class CosmosDiagnosticsTest extends TestSuiteBase {\nprivate static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();\nprivate static final DateTimeFormatter RESPONSE_TIME_FORMATTER = DateTimeFormatter.ISO_INSTANT;\nprivate CosmosClient gatewayClient;\nprivate CosmosClient directClient;\nprivate CosmosAsyncDatabase cosmosAsyncDatabase;\nprivate CosmosContainer container;\nprivate CosmosAsyncContainer cosmosAsyncContainer;\n@BeforeClass(groups = {\"simple\"}, timeOut = SETUP_TIMEOUT)\npublic void beforeClass() {\nassertThat(this.gatewayClient).isNull();\ngatewayClient = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.gatewayMode()\n.buildClient();\ndirectClient = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.directMode()\n.buildClient();\ncosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(this.gatewayClient.asyncClient());\ncosmosAsyncDatabase = directClient.asyncClient().getDatabase(cosmosAsyncContainer.getDatabase().getId());\ncontainer = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());\n}\n@AfterClass(groups = {\"simple\"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)\npublic void afterClass() {\nif (this.gatewayClient != null) {\nthis.gatewayClient.close();\n}\nif (this.directClient != null) {\nthis.directClient.close();\n}\n}\n@DataProvider(name = \"query\")\nprivate Object[][] query() {\nreturn new Object[][]{\nnew Object[] { \"Select * from c where c.id = 'wrongId'\", true },\nnew Object[] { \"Select top 1 * from c where c.id = 'wrongId'\", true },\nnew Object[] { \"Select * from c where c.id = 'wrongId' order by c.id\", true },\nnew Object[] { \"Select count(1) from c where c.id = 'wrongId' group by c.pk\", true },\nnew Object[] { \"Select distinct c.pk from c where c.id = 'wrongId'\", true },\nnew Object[] { \"Select * from c where c.id = 'wrongId'\", false },\nnew Object[] { \"Select top 1 * from c where c.id = 'wrongId'\", false },\nnew Object[] { \"Select * from c where c.id = 'wrongId' order by c.id\", false },\nnew Object[] { \"Select count(1) from c where c.id = 'wrongId' group by c.pk\", false },\nnew Object[] { \"Select distinct c.pk from c where c.id = 'wrongId'\", false },\nnew Object[] { \"Select * from c where c.id = 'wrongId'\", false },\nnew Object[] { \"Select top 1 * from c where c.id = 'wrongId'\", false },\nnew Object[] { \"Select * from c where c.id = 'wrongId' order by c.id\", false },\nnew Object[] { \"Select count(1) from c where c.id = 'wrongId' group by c.pk\", false },\nnew Object[] { \"Select distinct c.pk from c where c.id = 'wrongId'\", false },\n};\n}\n@DataProvider(name = \"readAllItemsOfLogicalPartition\")\nprivate Object[][] readAllItemsOfLogicalPartition() {\nreturn new Object[][]{\nnew Object[] { 1, true },\nnew Object[] { 5, null },\nnew Object[] { 20, null },\nnew Object[] { 1, false },\nnew Object[] { 5, false },\nnew Object[] { 20, false },\n};\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void gatewayDiagnostics() throws Exception {\nCosmosClient testGatewayClient = null;\ntry {\ntestGatewayClient = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.gatewayMode()\n.buildClient();\nCosmosContainer container =\ntestGatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosItemResponse createResponse = container.createItem(internalObjectNode);\nString diagnostics = createResponse.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"\\\"connectionMode\\\":\\\"GATEWAY\\\"\");\nassertThat(diagnostics).doesNotContain((\"\\\"gatewayStatistics\\\":null\"));\nassertThat(diagnostics).contains(\"\\\"operationType\\\":\\\"Create\\\"\");\nassertThat(diagnostics).contains(\"\\\"metaDataName\\\":\\\"CONTAINER_LOOK_UP\\\"\");\nassertThat(diagnostics).contains(\"\\\"serializationType\\\":\\\"PARTITION_KEY_FETCH_SERIALIZATION\\\"\");\nassertThat(diagnostics).contains(\"\\\"userAgent\\\":\\\"\" + Utils.getUserAgent() + \"\\\"\");\nassertThat(diagnostics).containsPattern(\"(?s).*?\\\"activityId\\\":\\\"[^\\\\s\\\"]+\\\".*\");\nassertThat(createResponse.getDiagnostics().getDuration()).isNotNull();\nassertThat(createResponse.getDiagnostics().getContactedRegionNames()).isNotNull();\nvalidateTransportRequestTimelineGateway(diagnostics);\nvalidateRegionContacted(createResponse.getDiagnostics(), testGatewayClient.asyncClient());\nisValidJSON(diagnostics);\n} finally {\nif (testGatewayClient != null) {\ntestGatewayClient.close();\n}\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void gatewayDiagnosticsOnException() throws Exception {\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosItemResponse createResponse = null;\ntry {\ncreateResponse = this.container.createItem(internalObjectNode);\nCosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();\nModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey(\"wrongPartitionKey\"));\nCosmosItemResponse readResponse =\nthis.container.readItem(BridgeInternal.getProperties(createResponse).getId(),\nnew PartitionKey(\"wrongPartitionKey\"),\nInternalObjectNode.class);\nfail(\"request should fail as partition key is wrong\");\n} catch (CosmosException exception) {\nisValidJSON(exception.toString());\nisValidJSON(exception.getMessage());\nString diagnostics = exception.getDiagnostics().toString();\nassertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);\nassertThat(diagnostics).contains(\"\\\"connectionMode\\\":\\\"GATEWAY\\\"\");\nassertThat(diagnostics).doesNotContain((\"\\\"gatewayStatistics\\\":null\"));\nassertThat(diagnostics).contains(\"\\\"statusCode\\\":404\");\nassertThat(diagnostics).contains(\"\\\"operationType\\\":\\\"Read\\\"\");\nassertThat(diagnostics).contains(\"\\\"userAgent\\\":\\\"\" + Utils.getUserAgent() + \"\\\"\");\nassertThat(diagnostics).containsPattern(\"(?s).*?\\\"activityId\\\":\\\"[^\\\\s\\\"]+\\\".*\");\nassertThat(diagnostics).doesNotContain((\"\\\"resourceAddress\\\":null\"));\nassertThat(createResponse.getDiagnostics().getContactedRegionNames()).isNotNull();\nvalidateRegionContacted(createResponse.getDiagnostics(), this.container.asyncContainer.getDatabase().getClient());\nassertThat(exception.getDiagnostics().getDuration()).isNotNull();\nvalidateTransportRequestTimelineGateway(diagnostics);\nisValidJSON(diagnostics);\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void systemDiagnosticsForSystemStateInformation() {\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosItemResponse createResponse = this.container.createItem(internalObjectNode);\nString diagnostics = createResponse.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"systemInformation\");\nassertThat(diagnostics).contains(\"usedMemory\");\nassertThat(diagnostics).contains(\"availableMemory\");\nassertThat(diagnostics).contains(\"systemCpuLoad\");\nassertThat(diagnostics).contains(\"\\\"userAgent\\\":\\\"\" + Utils.getUserAgent() + \"\\\"\");\nassertThat(diagnostics).containsPattern(\"(?s).*?\\\"activityId\\\":\\\"[^\\\\s\\\"]+\\\".*\");\nassertThat(createResponse.getDiagnostics().getDuration()).isNotNull();\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void directDiagnostics() throws Exception {\nCosmosClient testDirectClient = null;\ntry {\ntestDirectClient = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.directMode()\n.buildClient();\nCosmosContainer cosmosContainer =\ntestDirectClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosItemResponse createResponse = cosmosContainer.createItem(internalObjectNode);\nString diagnostics = createResponse.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"\\\"connectionMode\\\":\\\"DIRECT\\\"\");\nassertThat(diagnostics).contains(\"supplementalResponseStatisticsList\");\nassertThat(diagnostics).contains(\"\\\"gatewayStatistics\\\":null\");\nassertThat(diagnostics).contains(\"addressResolutionStatistics\");\nassertThat(diagnostics).contains(\"\\\"metaDataName\\\":\\\"CONTAINER_LOOK_UP\\\"\");\nassertThat(diagnostics).contains(\"\\\"metaDataName\\\":\\\"PARTITION_KEY_RANGE_LOOK_UP\\\"\");\nassertThat(diagnostics).contains(\"\\\"metaDataName\\\":\\\"SERVER_ADDRESS_LOOKUP\\\"\");\nassertThat(diagnostics).contains(\"\\\"serializationType\\\":\\\"PARTITION_KEY_FETCH_SERIALIZATION\\\"\");\nassertThat(diagnostics).contains(\"\\\"userAgent\\\":\\\"\" + Utils.getUserAgent() + \"\\\"\");\nassertThat(diagnostics).containsPattern(\"(?s).*?\\\"activityId\\\":\\\"[^\\\\s\\\"]+\\\".*\");\nassertThat(diagnostics).contains(\"\\\"backendLatencyInMs\\\"\");\nassertThat(createResponse.getDiagnostics().getContactedRegionNames()).isNotEmpty();\nassertThat(createResponse.getDiagnostics().getDuration()).isNotNull();\nvalidateTransportRequestTimelineDirect(diagnostics);\nvalidateRegionContacted(createResponse.getDiagnostics(), testDirectClient.asyncClient());\nisValidJSON(diagnostics);\ntry {\ncosmosContainer.createItem(internalObjectNode);\nfail(\"expected 409\");\n} catch (CosmosException e) {\ndiagnostics = e.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"\\\"backendLatencyInMs\\\"\");\nvalidateTransportRequestTimelineDirect(e.getDiagnostics().toString());\n}\n} finally {\nif (testDirectClient != null) {\ntestDirectClient.close();\n}\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void queryPlanDiagnostics() throws JsonProcessingException {\nCosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());\nList itemIdList = new ArrayList<>();\nfor(int i = 0; i< 100; i++) {\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosItemResponse createResponse = cosmosContainer.createItem(internalObjectNode);\nif(i%20 == 0) {\nitemIdList.add(internalObjectNode.getId());\n}\n}\nString queryDiagnostics = null;\nList queryList = new ArrayList<>();\nqueryList.add(\"Select * from c\");\nStringBuilder queryBuilder = new StringBuilder(\"SELECT * from c where c.mypk in (\");\nfor(int i = 0 ; i < itemIdList.size(); i++){\nqueryBuilder.append(\"'\").append(itemIdList.get(i)).append(\"'\");\nif(i < (itemIdList.size()-1)) {\nqueryBuilder.append(\",\");\n} else {\nqueryBuilder.append(\")\");\n}\n}\nqueryList.add(queryBuilder.toString());\nqueryList.add(\"Select * from c where c.id = 'wrongId'\");\nfor(String query : queryList) {\nint feedResponseCounter = 0;\nCosmosQueryRequestOptions options = new CosmosQueryRequestOptions();\noptions.setQueryMetricsEnabled(true);\nIterator> iterator = cosmosContainer.queryItems(query, options, InternalObjectNode.class).iterableByPage().iterator();\nwhile (iterator.hasNext()) {\nFeedResponse feedResponse = iterator.next();\nqueryDiagnostics = feedResponse.getCosmosDiagnostics().toString();\nif (feedResponseCounter == 0) {\nassertThat(queryDiagnostics).contains(\"QueryPlan Start Time (UTC)=\");\nassertThat(queryDiagnostics).contains(\"QueryPlan End Time (UTC)=\");\nassertThat(queryDiagnostics).contains(\"QueryPlan Duration (ms)=\");\nString requestTimeLine = OBJECT_MAPPER.writeValueAsString(feedResponse.getCosmosDiagnostics().getFeedResponseDiagnostics().getQueryPlanDiagnosticsContext().getRequestTimeline());\nassertThat(requestTimeLine).contains(\"connectionConfigured\");\nassertThat(requestTimeLine).contains(\"requestSent\");\nassertThat(requestTimeLine).contains(\"transitTime\");\nassertThat(requestTimeLine).contains(\"received\");\n} else {\nassertThat(queryDiagnostics).doesNotContain(\"QueryPlan Start Time (UTC)=\");\nassertThat(queryDiagnostics).doesNotContain(\"QueryPlan End Time (UTC)=\");\nassertThat(queryDiagnostics).doesNotContain(\"QueryPlan Duration (ms)=\");\nassertThat(queryDiagnostics).doesNotContain(\"QueryPlan RequestTimeline =\");\n}\nfeedResponseCounter++;\n}\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void queryMetricsWithIndexMetrics() {\nCosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());\nList itemIdList = new ArrayList<>();\nfor(int i = 0; i< 100; i++) {\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosItemResponse createResponse = cosmosContainer.createItem(internalObjectNode);\nif(i%20 == 0) {\nitemIdList.add(internalObjectNode.getId());\n}\n}\nString queryDiagnostics = null;\nList queryList = new ArrayList<>();\nStringBuilder queryBuilder = new StringBuilder(\"SELECT * from c where c.mypk in (\");\nfor(int i = 0 ; i < itemIdList.size(); i++){\nqueryBuilder.append(\"'\").append(itemIdList.get(i)).append(\"'\");\nif(i < (itemIdList.size()-1)) {\nqueryBuilder.append(\",\");\n} else {\nqueryBuilder.append(\")\");\n}\n}\nqueryList.add(queryBuilder.toString());\nfor (String query : queryList) {\nCosmosQueryRequestOptions options = new CosmosQueryRequestOptions();\noptions.setQueryMetricsEnabled(true);\noptions.setIndexMetricsEnabled(true);\nIterator> iterator = cosmosContainer.queryItems(query, options, InternalObjectNode.class).iterableByPage().iterator();\nwhile (iterator.hasNext()) {\nFeedResponse feedResponse = iterator.next();\nqueryDiagnostics = feedResponse.getCosmosDiagnostics().toString();\nlogger.info(\"This is query diagnostics {}\", queryDiagnostics);\nif (feedResponse.getResponseHeaders().containsKey(HttpConstants.HttpHeaders.INDEX_UTILIZATION)) {\nassertThat(feedResponse.getResponseHeaders().get(HttpConstants.HttpHeaders.INDEX_UTILIZATION)).isNotNull();\nassertThat(createFromJSONString(Utils.decodeBase64String(feedResponse.getResponseHeaders().get(HttpConstants.HttpHeaders.INDEX_UTILIZATION))).getUtilizedSingleIndexes()).isNotNull();\n}\n}\n}\n}\n@Test(groups = {\"simple\"}, dataProvider = \"query\", timeOut = TIMEOUT)\npublic void queryMetrics(String query, Boolean qmEnabled) {\nCosmosContainer directContainer =\nthis.directClient.getDatabase(this.cosmosAsyncContainer.getDatabase().getId())\n.getContainer(this.cosmosAsyncContainer.getId());\nCosmosQueryRequestOptions options = new CosmosQueryRequestOptions();\nif (qmEnabled != null) {\noptions.setQueryMetricsEnabled(qmEnabled);\n}\nboolean qroupByFirstResponse = true;\nIterator> iterator = directContainer.queryItems(query, options,\nInternalObjectNode.class).iterableByPage().iterator();\nassertThat(iterator.hasNext()).isTrue();\nwhile (iterator.hasNext()) {\nFeedResponse feedResponse = iterator.next();\nString queryDiagnostics = feedResponse.getCosmosDiagnostics().toString();\nassertThat(feedResponse.getResults().size()).isEqualTo(0);\nif (!query.contains(\"group by\") || qroupByFirstResponse) {\nvalidateQueryDiagnostics(queryDiagnostics, qmEnabled, true);\nvalidateDirectModeQueryDiagnostics(queryDiagnostics);\nif (query.contains(\"group by\")) {\nqroupByFirstResponse = false;\n}\n}\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\nprivate void validateDirectModeQueryDiagnostics(String diagnostics) {\nassertThat(diagnostics).contains(\"\\\"connectionMode\\\":\\\"DIRECT\\\"\");\nassertThat(diagnostics).contains(\"supplementalResponseStatisticsList\");\nassertThat(diagnostics).contains(\"responseStatisticsList\");\nassertThat(diagnostics).contains(\"\\\"gatewayStatistics\\\":null\");\nassertThat(diagnostics).contains(\"addressResolutionStatistics\");\nassertThat(diagnostics).contains(\"\\\"userAgent\\\":\\\"\" + Utils.getUserAgent() + \"\\\"\");\nassertThat(diagnostics).containsPattern(\"(?s).*?\\\"activityId\\\":\\\"[^\\\\s\\\"]+\\\".*\");\n}\nprivate void validateGatewayModeQueryDiagnostics(String diagnostics) {\nassertThat(diagnostics).contains(\"\\\"connectionMode\\\":\\\"GATEWAY\\\"\");\nassertThat(diagnostics).doesNotContain((\"\\\"gatewayStatistics\\\":null\"));\nassertThat(diagnostics).contains(\"\\\"operationType\\\":\\\"Query\\\"\");\nassertThat(diagnostics).contains(\"\\\"userAgent\\\":\\\"\" + Utils.getUserAgent() + \"\\\"\");\nassertThat(diagnostics).containsPattern(\"(?s).*?\\\"activityId\\\":\\\"[^\\\\s\\\"]+\\\".*\");\nassertThat(diagnostics).contains(\"\\\"regionsContacted\\\"\");\n}\n@Test(groups = {\"simple\"}, dataProvider = \"query\", timeOut = TIMEOUT*2)\npublic void queryDiagnosticsGatewayMode(String query, Boolean qmEnabled) {\nCosmosClient testDirectClient = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.gatewayMode()\n.buildClient();\nCosmosQueryRequestOptions options = new CosmosQueryRequestOptions();\nCosmosContainer cosmosContainer = testDirectClient.getDatabase(cosmosAsyncContainer.getDatabase().getId())\n.getContainer(cosmosAsyncContainer.getId());\nList itemIdList = new ArrayList<>();\nfor (int i = 0; i < 100; i++) {\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosItemResponse createResponse = cosmosContainer.createItem(internalObjectNode);\nif (i % 20 == 0) {\nitemIdList.add(internalObjectNode.getId());\n}\n}\nboolean qroupByFirstResponse = true;\nif (qmEnabled != null) {\noptions.setQueryMetricsEnabled(qmEnabled);\n}\nIterator> iterator = cosmosContainer\n.queryItems(query, options, InternalObjectNode.class)\n.iterableByPage()\n.iterator();\nassertThat(iterator.hasNext()).isTrue();\nwhile (iterator.hasNext()) {\nFeedResponse feedResponse = iterator.next();\nString queryDiagnostics = feedResponse.getCosmosDiagnostics().toString();\nassertThat(feedResponse.getResults().size()).isEqualTo(0);\nif (!query.contains(\"group by\") || qroupByFirstResponse) {\nvalidateQueryDiagnostics(queryDiagnostics, qmEnabled, true);\nvalidateGatewayModeQueryDiagnostics(queryDiagnostics);\nif (query.contains(\"group by\")) {\nqroupByFirstResponse = false;\n}\n}\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void queryMetricsWithADifferentLocale() {\nLocale.setDefault(Locale.GERMAN);\nString query = \"select * from root where root.id= \\\"someid\\\"\";\nCosmosQueryRequestOptions options = new CosmosQueryRequestOptions();\nIterator> iterator = this.container.queryItems(query, options,\nInternalObjectNode.class)\n.iterableByPage().iterator();\ndouble requestCharge = 0;\nwhile (iterator.hasNext()) {\nFeedResponse feedResponse = iterator.next();\nrequestCharge += feedResponse.getRequestCharge();\n}\nassertThat(requestCharge).isGreaterThan(0);\nLocale.setDefault(Locale.ROOT);\n}\nprivate static void validateQueryDiagnostics(\nString queryDiagnostics,\nBoolean qmEnabled,\nboolean expectQueryPlanDiagnostics) {\nif (qmEnabled == null || qmEnabled) {\nassertThat(queryDiagnostics).contains(\"Retrieved Document Count\");\nassertThat(queryDiagnostics).contains(\"Query Preparation Times\");\nassertThat(queryDiagnostics).contains(\"Runtime Execution Times\");\nassertThat(queryDiagnostics).contains(\"Partition Execution Timeline\");\n} else {\nassertThat(queryDiagnostics).doesNotContain(\"Retrieved Document Count\");\nassertThat(queryDiagnostics).doesNotContain(\"Query Preparation Times\");\nassertThat(queryDiagnostics).doesNotContain(\"Runtime Execution Times\");\nassertThat(queryDiagnostics).doesNotContain(\"Partition Execution Timeline\");\n}\nif (expectQueryPlanDiagnostics) {\nassertThat(queryDiagnostics).contains(\"QueryPlan Start Time (UTC)=\");\nassertThat(queryDiagnostics).contains(\"QueryPlan End Time (UTC)=\");\nassertThat(queryDiagnostics).contains(\"QueryPlan Duration (ms)=\");\n} else {\nassertThat(queryDiagnostics).doesNotContain(\"QueryPlan Start Time (UTC)=\");\nassertThat(queryDiagnostics).doesNotContain(\"QueryPlan End Time (UTC)=\");\nassertThat(queryDiagnostics).doesNotContain(\"QueryPlan Duration (ms)=\");\n}\n}\n@Test(groups = {\"simple\"}, dataProvider = \"readAllItemsOfLogicalPartition\", timeOut = TIMEOUT)\npublic void queryMetricsForReadAllItemsOfLogicalPartition(Integer expectedItemCount, Boolean qmEnabled) {\nString pkValue = UUID.randomUUID().toString();\nfor (int i = 0; i < expectedItemCount; i++) {\nInternalObjectNode internalObjectNode = getInternalObjectNode(pkValue);\nCosmosItemResponse createResponse = container.createItem(internalObjectNode);\n}\nCosmosQueryRequestOptions options = new CosmosQueryRequestOptions();\nif (qmEnabled != null) {\noptions = options.setQueryMetricsEnabled(qmEnabled);\n}\nModelBridgeInternal.setQueryRequestOptionsMaxItemCount(options, 5);\nIterator> iterator =\nthis.container\n.readAllItems(\nnew PartitionKey(pkValue),\noptions,\nInternalObjectNode.class)\n.iterableByPage().iterator();\nassertThat(iterator.hasNext()).isTrue();\nint actualItemCount = 0;\nwhile (iterator.hasNext()) {\nFeedResponse feedResponse = iterator.next();\nString queryDiagnostics = feedResponse.getCosmosDiagnostics().toString();\nactualItemCount += feedResponse.getResults().size();\nvalidateQueryDiagnostics(queryDiagnostics, qmEnabled, false);\n}\nassertThat(actualItemCount).isEqualTo(expectedItemCount);\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void directDiagnosticsOnException() throws Exception {\nCosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosItemResponse createResponse = null;\nCosmosClient client = null;\ntry {\nclient = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.directMode()\n.buildClient();\nCosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());\ncreateResponse = container.createItem(internalObjectNode);\nCosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();\nModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey(\"wrongPartitionKey\"));\nCosmosItemResponse readResponse =\ncosmosContainer.readItem(BridgeInternal.getProperties(createResponse).getId(),\nnew PartitionKey(\"wrongPartitionKey\"),\nInternalObjectNode.class);\nfail(\"request should fail as partition key is wrong\");\n} catch (CosmosException exception) {\nisValidJSON(exception.toString());\nisValidJSON(exception.getMessage());\nString diagnostics = exception.getDiagnostics().toString();\nassertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);\nassertThat(diagnostics).contains(\"\\\"connectionMode\\\":\\\"DIRECT\\\"\");\nassertThat(diagnostics).doesNotContain((\"\\\"resourceAddress\\\":null\"));\nassertThat(exception.getDiagnostics().getContactedRegionNames()).isNotEmpty();\nassertThat(exception.getDiagnostics().getDuration()).isNotNull();\nassertThat(diagnostics).contains(\"\\\"backendLatencyInMs\\\"\");\nassertThat(diagnostics).containsPattern(\"(?s).*?\\\"activityId\\\":\\\"[^\\\\s\\\"]+\\\".*\");\nisValidJSON(diagnostics);\nvalidateTransportRequestTimelineDirect(diagnostics);\nvalidateRegionContacted(createResponse.getDiagnostics(), client.asyncClient());\n} finally {\nif (client != null) {\nclient.close();\n}\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void directDiagnosticsOnMetadataException() {\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosClient client = null;\ntry {\nclient = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.directMode()\n.buildClient();\nCosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());\nHttpClient mockHttpClient = Mockito.mock(HttpClient.class);\nMockito.when(mockHttpClient.send(Mockito.any(HttpRequest.class), Mockito.any(Duration.class)))\n.thenReturn(Mono.error(new CosmosException(400, \"TestBadRequest\")));\nRxStoreModel rxGatewayStoreModel = rxGatewayStoreModel = ReflectionUtils.getGatewayProxy((RxDocumentClientImpl) client.asyncClient().getDocClientWrapper());\nReflectionUtils.setGatewayHttpClient(rxGatewayStoreModel, mockHttpClient);\ncontainer.createItem(internalObjectNode);\nfail(\"request should fail as bad request\");\n} catch (CosmosException exception) {\nisValidJSON(exception.toString());\nisValidJSON(exception.getMessage());\nString diagnostics = exception.getDiagnostics().toString();\nassertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.BADREQUEST);\nassertThat(diagnostics).contains(\"\\\"connectionMode\\\":\\\"DIRECT\\\"\");\nassertThat(diagnostics).doesNotContain((\"\\\"resourceAddress\\\":null\"));\nassertThat(diagnostics).contains(\"\\\"resourceType\\\":\\\"DocumentCollection\\\"\");\nassertThat(exception.getDiagnostics().getContactedRegionNames()).isNotEmpty();\nassertThat(exception.getDiagnostics().getDuration()).isNotNull();\nisValidJSON(diagnostics);\n} finally {\nif (client != null) {\nclient.close();\n}\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void supplementalResponseStatisticsList() throws Exception {\nClientSideRequestStatistics clientSideRequestStatistics = new ClientSideRequestStatistics(mockDiagnosticsClientContext(),null);\nfor (int i = 0; i < 15; i++) {\nRxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(mockDiagnosticsClientContext(), OperationType.Head, ResourceType.Document);\nclientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);\n}\nList storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);\nObjectMapper objectMapper = new ObjectMapper();\nString diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);\nJsonNode jsonNode = objectMapper.readTree(diagnostics);\nArrayNode supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get(\"supplementalResponseStatisticsList\");\nassertThat(storeResponseStatistics.size()).isEqualTo(15);\nassertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(10);\nclearStoreResponseStatistics(clientSideRequestStatistics);\nstoreResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);\nassertThat(storeResponseStatistics.size()).isEqualTo(0);\nfor (int i = 0; i < 7; i++) {\nRxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(mockDiagnosticsClientContext(), OperationType.Head, ResourceType.Document);\nclientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);\n}\nstoreResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);\nobjectMapper = new ObjectMapper();\ndiagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);\njsonNode = objectMapper.readTree(diagnostics);\nsupplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get(\"supplementalResponseStatisticsList\");\nassertThat(storeResponseStatistics.size()).isEqualTo(7);\nassertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(7);\nfor(JsonNode node : supplementalResponseStatisticsListNode) {\nassertThat(node.get(\"storeResult\").asText()).isNotNull();\nString requestResponseTimeUTC = node.get(\"requestResponseTimeUTC\").asText();\nInstant instant = Instant.from(RESPONSE_TIME_FORMATTER.parse(requestResponseTimeUTC));\nassertThat(Instant.now().toEpochMilli() - instant.toEpochMilli()).isLessThan(5000);\nassertThat(node.get(\"requestResponseTimeUTC\")).isNotNull();\nassertThat(node.get(\"requestOperationType\")).isNotNull();\nassertThat(node.get(\"requestOperationType\")).isNotNull();\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void serializationOnVariousScenarios() {\nCosmosDatabaseResponse cosmosDatabase = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).read();\nString diagnostics = cosmosDatabase.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"\\\"serializationType\\\":\\\"DATABASE_DESERIALIZATION\\\"\");\nCosmosContainerResponse containerResponse = this.container.read();\ndiagnostics = containerResponse.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"\\\"serializationType\\\":\\\"CONTAINER_DESERIALIZATION\\\"\");\nTestItem testItem = new TestItem();\ntestItem.id = \"TestId\";\ntestItem.mypk = \"TestPk\";\nCosmosItemResponse itemResponse = this.container.createItem(testItem);\ndiagnostics = itemResponse.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"\\\"serializationType\\\":\\\"PARTITION_KEY_FETCH_SERIALIZATION\\\"\");\ntestItem.id = \"TestId2\";\ntestItem.mypk = \"TestPk\";\nitemResponse = this.container.createItem(testItem, new PartitionKey(\"TestPk\"), null);\ndiagnostics = itemResponse.getDiagnostics().toString();\nassertThat(diagnostics).doesNotContain(\"\\\"serializationType\\\":\\\"PARTITION_KEY_FETCH_SERIALIZATION\\\"\");\nassertThat(diagnostics).doesNotContain(\"\\\"serializationType\\\":\\\"ITEM_DESERIALIZATION\\\"\");\nTestItem readTestItem = itemResponse.getItem();\ndiagnostics = itemResponse.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"\\\"serializationType\\\":\\\"ITEM_DESERIALIZATION\\\"\");\nCosmosItemResponse readItemResponse = this.container.readItem(testItem.id, new PartitionKey(testItem.mypk), null, InternalObjectNode.class);\nInternalObjectNode properties = readItemResponse.getItem();\ndiagnostics = readItemResponse.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"\\\"serializationType\\\":\\\"ITEM_DESERIALIZATION\\\"\");\nassertThat(diagnostics).contains(\"\\\"userAgent\\\":\\\"\" + Utils.getUserAgent() + \"\\\"\");\nassertThat(diagnostics).containsPattern(\"(?s).*?\\\"activityId\\\":\\\"[^\\\\s\\\"]+\\\".*\");\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void rntbdRequestResponseLengthStatistics() throws Exception {\nTestItem testItem = new TestItem();\ntestItem.id = UUID.randomUUID().toString();\ntestItem.mypk = UUID.randomUUID().toString();\nint testItemLength = OBJECT_MAPPER.writeValueAsBytes(testItem).length;\nCosmosContainer container = directClient.getDatabase(this.cosmosAsyncContainer.getDatabase().getId()).getContainer(this.cosmosAsyncContainer.getId());\nCosmosItemResponse createItemResponse = container.createItem(testItem);\nvalidate(createItemResponse.getDiagnostics(), testItemLength, ModelBridgeInternal.getPayloadLength(createItemResponse));\ntry {\ncontainer.createItem(testItem);\nfail(\"expected to fail due to 409\");\n} catch (CosmosException e) {\nvalidate(e.getDiagnostics(), testItemLength, 0);\n}\nCosmosItemResponse readItemResponse = container.readItem(testItem.id, new PartitionKey(testItem.mypk), TestItem.class);\nvalidate(readItemResponse.getDiagnostics(), 0, ModelBridgeInternal.getPayloadLength(readItemResponse));\nCosmosItemResponse deleteItemResponse = container.deleteItem(testItem, null);\nvalidate(deleteItemResponse.getDiagnostics(), 0, 0);\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void rntbdStatistics() throws Exception {\nInstant beforeClientInitialization = Instant.now();\nCosmosClient client1 = null;\ntry {\nclient1 = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.directMode()\n.buildClient();\nTestItem testItem = new TestItem();\ntestItem.id = UUID.randomUUID().toString();\ntestItem.mypk = UUID.randomUUID().toString();\nint testItemLength = OBJECT_MAPPER.writeValueAsBytes(testItem).length;\nCosmosContainer container = client1.getDatabase(this.cosmosAsyncContainer.getDatabase().getId()).getContainer(this.cosmosAsyncContainer.getId());\nThread.sleep(1000);\nInstant beforeInitializingRntbdServiceEndpoint = Instant.now();\nCosmosItemResponse operation1Response = container.upsertItem(testItem);\nInstant afterInitializingRntbdServiceEndpoint = Instant.now();\nThread.sleep(1000);\nInstant beforeOperation2 = Instant.now();\nCosmosItemResponse operation2Response = container.upsertItem(testItem);\nInstant afterOperation2 = Instant.now();\nThread.sleep(1000);\nInstant beforeOperation3 = Instant.now();\nCosmosItemResponse operation3Response = container.upsertItem(testItem);\nInstant afterOperation3 = Instant.now();\nvalidateRntbdStatistics(operation3Response.getDiagnostics(),\nbeforeClientInitialization,\nbeforeInitializingRntbdServiceEndpoint,\nafterInitializingRntbdServiceEndpoint,\nbeforeOperation2,\nafterOperation2,\nbeforeOperation3,\nafterOperation3);\nCosmosItemResponse readItemResponse = container.readItem(testItem.id, new PartitionKey(testItem.mypk), TestItem.class);\nvalidate(readItemResponse.getDiagnostics(), 0, ModelBridgeInternal.getPayloadLength(readItemResponse));\nCosmosItemResponse deleteItemResponse = container.deleteItem(testItem, null);\nvalidate(deleteItemResponse.getDiagnostics(), 0, 0);\n} finally {\nLifeCycleUtils.closeQuietly(client1);\n}\n}\nprivate void validateRntbdStatistics(CosmosDiagnostics cosmosDiagnostics,\nInstant clientInitializationTime,\nInstant beforeInitializingRntbdServiceEndpoint,\nInstant afterInitializingRntbdServiceEndpoint,\nInstant beforeOperation2,\nInstant afterOperation2,\nInstant beforeOperation3,\nInstant afterOperation3) throws Exception {\nObjectNode diagnostics = (ObjectNode) OBJECT_MAPPER.readTree(cosmosDiagnostics.toString());\nJsonNode responseStatisticsList = diagnostics.get(\"responseStatisticsList\");\nassertThat(responseStatisticsList.isArray()).isTrue();\nassertThat(responseStatisticsList.size()).isGreaterThan(0);\nJsonNode storeResult = responseStatisticsList.get(0).get(\"storeResult\");\nassertThat(storeResult).isNotNull();\nboolean hasPayload = storeResult.get(\"exception\").isNull();\nassertThat(storeResult.get(\"channelTaskQueueSize\").asInt(-1)).isGreaterThan(0);\nassertThat(storeResult.get(\"pendingRequestsCount\").asInt(-1)).isGreaterThanOrEqualTo(0);\nJsonNode serviceEndpointStatistics = storeResult.get(\"serviceEndpointStatistics\");\nassertThat(serviceEndpointStatistics).isNotNull();\nassertThat(serviceEndpointStatistics.get(\"availableChannels\").asInt(-1)).isGreaterThan(0);\nassertThat(serviceEndpointStatistics.get(\"acquiredChannels\").asInt(-1)).isEqualTo(0);\nassertThat(serviceEndpointStatistics.get(\"inflightRequests\").asInt(-1)).isEqualTo(1);\nassertThat(serviceEndpointStatistics.get(\"isClosed\").asBoolean()).isEqualTo(false);\nInstant beforeInitializationThreshold = beforeInitializingRntbdServiceEndpoint.minusMillis(1);\nassertThat(Instant.parse(serviceEndpointStatistics.get(\"createdTime\").asText()))\n.isAfterOrEqualTo(beforeInitializationThreshold);\nInstant afterInitializationThreshold = afterInitializingRntbdServiceEndpoint.plusMillis(2);\nassertThat(Instant.parse(serviceEndpointStatistics.get(\"createdTime\").asText()))\n.isBeforeOrEqualTo(afterInitializationThreshold);\nInstant afterOperation2Threshold = afterOperation2.plusMillis(2);\nInstant beforeOperation2Threshold = beforeOperation2.minusMillis(2);\nassertThat(Instant.parse(serviceEndpointStatistics.get(\"lastRequestTime\").asText()))\n.isAfterOrEqualTo(beforeOperation2Threshold)\n.isBeforeOrEqualTo(afterOperation2Threshold);\nassertThat(Instant.parse(serviceEndpointStatistics.get(\"lastSuccessfulRequestTime\").asText()))\n.isAfterOrEqualTo(beforeOperation2Threshold)\n.isBeforeOrEqualTo(afterOperation2Threshold);\n}\nprivate void validate(CosmosDiagnostics cosmosDiagnostics, int expectedRequestPayloadSize, int expectedResponsePayloadSize) throws Exception {\nObjectNode diagnostics = (ObjectNode) OBJECT_MAPPER.readTree(cosmosDiagnostics.toString());\nJsonNode responseStatisticsList = diagnostics.get(\"responseStatisticsList\");\nassertThat(responseStatisticsList.isArray()).isTrue();\nassertThat(responseStatisticsList.size()).isGreaterThan(0);\nJsonNode storeResult = responseStatisticsList.get(0).get(\"storeResult\");\nboolean hasPayload = storeResult.get(\"exception\").isNull();\nassertThat(storeResult).isNotNull();\nassertThat(storeResult.get(\"rntbdRequestLengthInBytes\").asInt(-1)).isGreaterThan(expectedRequestPayloadSize);\nassertThat(storeResult.get(\"rntbdRequestLengthInBytes\").asInt(-1)).isGreaterThan(expectedRequestPayloadSize);\nassertThat(storeResult.get(\"requestPayloadLengthInBytes\").asInt(-1)).isEqualTo(expectedRequestPayloadSize);\nif (hasPayload) {\nassertThat(storeResult.get(\"responsePayloadLengthInBytes\").asInt(-1)).isEqualTo(expectedResponsePayloadSize);\n}\nassertThat(storeResult.get(\"rntbdResponseLengthInBytes\").asInt(-1)).isGreaterThan(expectedResponsePayloadSize);\n}\n@Test(groups = {\"emulator\"}, timeOut = TIMEOUT)\npublic void addressResolutionStatistics() {\nCosmosClient client1 = null;\nCosmosClient client2 = null;\nString databaseId = DatabaseForTest.generateId();\nString containerId = UUID.randomUUID().toString();\nCosmosDatabase cosmosDatabase = null;\nCosmosContainer cosmosContainer = null;\ntry {\nclient1 = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.directMode()\n.buildClient();\nclient1.createDatabase(databaseId);\ncosmosDatabase = client1.getDatabase(databaseId);\ncosmosDatabase.createContainer(containerId, \"/mypk\");\nInternalObjectNode internalObjectNode = getInternalObjectNode();\ncosmosContainer = cosmosDatabase.getContainer(containerId);\nCosmosItemResponse writeResourceResponse = cosmosContainer.createItem(internalObjectNode);\nassertThat(writeResourceResponse.getDiagnostics().toString()).contains(\"addressResolutionStatistics\");\nassertThat(writeResourceResponse.getDiagnostics().toString()).contains(\"\\\"inflightRequest\\\":false\");\nassertThat(writeResourceResponse.getDiagnostics().toString()).doesNotContain(\"endTime=\\\"null\\\"\");\nassertThat(writeResourceResponse.getDiagnostics().toString()).contains(\"\\\"errorMessage\\\":null\");\nassertThat(writeResourceResponse.getDiagnostics().toString()).doesNotContain(\"\\\"errorMessage\\\":\\\"io.netty\" +\n\".channel.AbstractChannel$AnnotatedConnectException: Connection refused: no further information\");\nclient2 = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.directMode()\n.buildClient();\ncosmosDatabase = client2.getDatabase(databaseId);\ncosmosContainer = cosmosDatabase.getContainer(containerId);\nAsyncDocumentClient asyncDocumentClient = client2.asyncClient().getContextClient();\nGlobalAddressResolver addressResolver = (GlobalAddressResolver) FieldUtils.readField(asyncDocumentClient,\n\"addressResolver\", true);\n@SuppressWarnings(\"rawtypes\")\nMap addressCacheByEndpoint = (Map) FieldUtils.readField(addressResolver,\n\"addressCacheByEndpoint\",\ntrue);\nObject endpointCache = addressCacheByEndpoint.values().toArray()[0];\nGatewayAddressCache addressCache = (GatewayAddressCache) FieldUtils.readField(endpointCache, \"addressCache\", true);\nHttpClient httpClient = httpClient(true);\nFieldUtils.writeField(addressCache, \"httpClient\", httpClient, true);\nnew Thread(() -> {\ntry {\nThread.sleep(5000);\nHttpClient httpClient1 = httpClient(false);\nFieldUtils.writeField(addressCache, \"httpClient\", httpClient1, true);\n} catch (Exception e) {\nfail(e.getMessage());\n}\n}).start();\nPartitionKey partitionKey = new PartitionKey(internalObjectNode.get(\"mypk\"));\nCosmosItemResponse readResourceResponse =\ncosmosContainer.readItem(internalObjectNode.getId(), partitionKey, new CosmosItemRequestOptions(),\nInternalObjectNode.class);\nassertThat(readResourceResponse.getDiagnostics().toString()).contains(\"addressResolutionStatistics\");\nassertThat(readResourceResponse.getDiagnostics().toString()).contains(\"\\\"inflightRequest\\\":false\");\nassertThat(readResourceResponse.getDiagnostics().toString()).doesNotContain(\"endTime=\\\"null\\\"\");\nassertThat(readResourceResponse.getDiagnostics().toString()).contains(\"\\\"errorMessage\\\":null\");\nassertThat(readResourceResponse.getDiagnostics().toString()).contains(\"\\\"errorMessage\\\":\\\"io.netty\" +\n\".channel.AbstractChannel$AnnotatedConnectException: Connection refused\");\n} catch (Exception ex) {\nlogger.error(\"Error in test addressResolutionStatistics\", ex);\nfail(\"This test should not throw exception \" + ex);\n} finally {\nsafeDeleteSyncDatabase(cosmosDatabase);\nif (client1 != null) {\nclient1.close();\n}\nif (client2 != null) {\nclient2.close();\n}\n}\n}\nprivate InternalObjectNode getInternalObjectNode() {\nInternalObjectNode internalObjectNode = new InternalObjectNode();\nString uuid = UUID.randomUUID().toString();\ninternalObjectNode.setId(uuid);\nBridgeInternal.setProperty(internalObjectNode, \"mypk\", uuid);\nreturn internalObjectNode;\n}\nprivate InternalObjectNode getInternalObjectNode(String pkValue) {\nInternalObjectNode internalObjectNode = new InternalObjectNode();\nString uuid = UUID.randomUUID().toString();\ninternalObjectNode.setId(uuid);\nBridgeInternal.setProperty(internalObjectNode, \"mypk\", pkValue);\nreturn internalObjectNode;\n}\nprivate List getStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {\nField storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField(\"supplementalResponseStatisticsList\");\nstoreResponseStatisticsField.setAccessible(true);\n@SuppressWarnings({\"unchecked\"})\nList list\n= (List) storeResponseStatisticsField.get(requestStatistics);\nreturn list;\n}\nprivate void clearStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {\nField storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField(\"supplementalResponseStatisticsList\");\nstoreResponseStatisticsField.setAccessible(true);\nstoreResponseStatisticsField.set(requestStatistics, new ArrayList());\n}\nprivate void validateTransportRequestTimelineGateway(String diagnostics) {\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"connectionConfigured\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"connectionConfigured\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"requestSent\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"transitTime\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"received\\\"\");\n}\nprivate void validateTransportRequestTimelineDirect(String diagnostics) {\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"created\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"queued\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"channelAcquisitionStarted\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"pipelined\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"transitTime\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"received\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"completed\\\"\");\nassertThat(diagnostics).contains(\"\\\"startTimeUTC\\\"\");\nassertThat(diagnostics).contains(\"\\\"durationInMicroSec\\\"\");\n}\npublic void isValidJSON(final String json) {\ntry {\nfinal JsonParser parser = new ObjectMapper().createParser(json);\nwhile (parser.nextToken() != null) {\n}\n} catch (IOException ex) {\nfail(\"Diagnostic string is not in json format \", ex);\n}\n}\nprivate HttpClient httpClient(boolean fakeProxy) {\nHttpClientConfig httpClientConfig;\nif(fakeProxy) {\nhttpClientConfig = new HttpClientConfig(new Configs())\n.withProxy(new ProxyOptions(ProxyOptions.Type.HTTP, new InetSocketAddress(\"localhost\", 8888)));\n} else {\nhttpClientConfig = new HttpClientConfig(new Configs());\n}\nreturn HttpClient.createFixed(httpClientConfig);\n}\nprivate IndexUtilizationInfo createFromJSONString(String jsonString) {\nObjectMapper indexUtilizationInfoObjectMapper = new ObjectMapper();\nIndexUtilizationInfo indexUtilizationInfo = null;\ntry {\nindexUtilizationInfo = indexUtilizationInfoObjectMapper.readValue(jsonString, IndexUtilizationInfo.class);\n} catch (JsonProcessingException e) {\nlogger.error(\"Json not correctly formed \", e);\n}\nreturn indexUtilizationInfo;\n}\nprivate void validateRegionContacted(CosmosDiagnostics cosmosDiagnostics, CosmosAsyncClient cosmosAsyncClient) throws Exception {\nRxDocumentClientImpl rxDocumentClient =\n(RxDocumentClientImpl) ReflectionUtils.getAsyncDocumentClient(cosmosAsyncClient);\nGlobalEndpointManager globalEndpointManager = ReflectionUtils.getGlobalEndpointManager(rxDocumentClient);\nLocationCache locationCache = ReflectionUtils.getLocationCache(globalEndpointManager);\nField locationInfoField = LocationCache.class.getDeclaredField(\"locationInfo\");\nlocationInfoField.setAccessible(true);\nObject locationInfo = locationInfoField.get(locationCache);\nClass DatabaseAccountLocationsInfoClass = Class.forName(\"com.azure.cosmos.implementation.routing\" +\n\".LocationCache$DatabaseAccountLocationsInfo\");\nField availableWriteEndpointByLocation = DatabaseAccountLocationsInfoClass.getDeclaredField(\n\"availableWriteEndpointByLocation\");\navailableWriteEndpointByLocation.setAccessible(true);\n@SuppressWarnings(\"unchecked\")\nMap map = (Map) availableWriteEndpointByLocation.get(locationInfo);\nString regionName = map.keySet().iterator().next();\nassertThat(cosmosDiagnostics.getContactedRegionNames().size()).isEqualTo(1);\nassertThat(cosmosDiagnostics.getContactedRegionNames().iterator().next()).isEqualTo(regionName.toLowerCase());\n}\npublic static class TestItem {\npublic String id;\npublic String mypk;\npublic TestItem() {\n}\n}\n}", + "context_after": "class CosmosDiagnosticsTest extends TestSuiteBase {\nprivate static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();\nprivate static final DateTimeFormatter RESPONSE_TIME_FORMATTER = DateTimeFormatter.ISO_INSTANT;\nprivate CosmosClient gatewayClient;\nprivate CosmosClient directClient;\nprivate CosmosAsyncDatabase cosmosAsyncDatabase;\nprivate CosmosContainer container;\nprivate CosmosAsyncContainer cosmosAsyncContainer;\n@BeforeClass(groups = {\"simple\"}, timeOut = SETUP_TIMEOUT)\npublic void beforeClass() {\nassertThat(this.gatewayClient).isNull();\ngatewayClient = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.gatewayMode()\n.buildClient();\ndirectClient = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.directMode()\n.buildClient();\ncosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(this.gatewayClient.asyncClient());\ncosmosAsyncDatabase = directClient.asyncClient().getDatabase(cosmosAsyncContainer.getDatabase().getId());\ncontainer = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());\n}\n@AfterClass(groups = {\"simple\"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)\npublic void afterClass() {\nif (this.gatewayClient != null) {\nthis.gatewayClient.close();\n}\nif (this.directClient != null) {\nthis.directClient.close();\n}\n}\n@DataProvider(name = \"query\")\nprivate Object[][] query() {\nreturn new Object[][]{\nnew Object[] { \"Select * from c where c.id = 'wrongId'\", true },\nnew Object[] { \"Select top 1 * from c where c.id = 'wrongId'\", true },\nnew Object[] { \"Select * from c where c.id = 'wrongId' order by c.id\", true },\nnew Object[] { \"Select count(1) from c where c.id = 'wrongId' group by c.pk\", true },\nnew Object[] { \"Select distinct c.pk from c where c.id = 'wrongId'\", true },\nnew Object[] { \"Select * from c where c.id = 'wrongId'\", false },\nnew Object[] { \"Select top 1 * from c where c.id = 'wrongId'\", false },\nnew Object[] { \"Select * from c where c.id = 'wrongId' order by c.id\", false },\nnew Object[] { \"Select count(1) from c where c.id = 'wrongId' group by c.pk\", false },\nnew Object[] { \"Select distinct c.pk from c where c.id = 'wrongId'\", false },\nnew Object[] { \"Select * from c where c.id = 'wrongId'\", false },\nnew Object[] { \"Select top 1 * from c where c.id = 'wrongId'\", false },\nnew Object[] { \"Select * from c where c.id = 'wrongId' order by c.id\", false },\nnew Object[] { \"Select count(1) from c where c.id = 'wrongId' group by c.pk\", false },\nnew Object[] { \"Select distinct c.pk from c where c.id = 'wrongId'\", false },\n};\n}\n@DataProvider(name = \"readAllItemsOfLogicalPartition\")\nprivate Object[][] readAllItemsOfLogicalPartition() {\nreturn new Object[][]{\nnew Object[] { 1, true },\nnew Object[] { 5, null },\nnew Object[] { 20, null },\nnew Object[] { 1, false },\nnew Object[] { 5, false },\nnew Object[] { 20, false },\n};\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void gatewayDiagnostics() throws Exception {\nCosmosClient testGatewayClient = null;\ntry {\ntestGatewayClient = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.gatewayMode()\n.buildClient();\nCosmosContainer container =\ntestGatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosItemResponse createResponse = container.createItem(internalObjectNode);\nString diagnostics = createResponse.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"\\\"connectionMode\\\":\\\"GATEWAY\\\"\");\nassertThat(diagnostics).doesNotContain((\"\\\"gatewayStatistics\\\":null\"));\nassertThat(diagnostics).contains(\"\\\"operationType\\\":\\\"Create\\\"\");\nassertThat(diagnostics).contains(\"\\\"metaDataName\\\":\\\"CONTAINER_LOOK_UP\\\"\");\nassertThat(diagnostics).contains(\"\\\"serializationType\\\":\\\"PARTITION_KEY_FETCH_SERIALIZATION\\\"\");\nassertThat(diagnostics).contains(\"\\\"userAgent\\\":\\\"\" + Utils.getUserAgent() + \"\\\"\");\nassertThat(diagnostics).containsPattern(\"(?s).*?\\\"activityId\\\":\\\"[^\\\\s\\\"]+\\\".*\");\nassertThat(createResponse.getDiagnostics().getDuration()).isNotNull();\nassertThat(createResponse.getDiagnostics().getContactedRegionNames()).isNotNull();\nvalidateTransportRequestTimelineGateway(diagnostics);\nvalidateRegionContacted(createResponse.getDiagnostics(), testGatewayClient.asyncClient());\nisValidJSON(diagnostics);\n} finally {\nif (testGatewayClient != null) {\ntestGatewayClient.close();\n}\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void gatewayDiagnosticsOnException() throws Exception {\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosItemResponse createResponse = null;\ntry {\ncreateResponse = this.container.createItem(internalObjectNode);\nCosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();\nModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey(\"wrongPartitionKey\"));\nCosmosItemResponse readResponse =\nthis.container.readItem(BridgeInternal.getProperties(createResponse).getId(),\nnew PartitionKey(\"wrongPartitionKey\"),\nInternalObjectNode.class);\nfail(\"request should fail as partition key is wrong\");\n} catch (CosmosException exception) {\nisValidJSON(exception.toString());\nisValidJSON(exception.getMessage());\nString diagnostics = exception.getDiagnostics().toString();\nassertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);\nassertThat(diagnostics).contains(\"\\\"connectionMode\\\":\\\"GATEWAY\\\"\");\nassertThat(diagnostics).doesNotContain((\"\\\"gatewayStatistics\\\":null\"));\nassertThat(diagnostics).contains(\"\\\"statusCode\\\":404\");\nassertThat(diagnostics).contains(\"\\\"operationType\\\":\\\"Read\\\"\");\nassertThat(diagnostics).contains(\"\\\"userAgent\\\":\\\"\" + Utils.getUserAgent() + \"\\\"\");\nassertThat(diagnostics).containsPattern(\"(?s).*?\\\"activityId\\\":\\\"[^\\\\s\\\"]+\\\".*\");\nassertThat(diagnostics).doesNotContain((\"\\\"resourceAddress\\\":null\"));\nassertThat(createResponse.getDiagnostics().getContactedRegionNames()).isNotNull();\nvalidateRegionContacted(createResponse.getDiagnostics(), this.container.asyncContainer.getDatabase().getClient());\nassertThat(exception.getDiagnostics().getDuration()).isNotNull();\nvalidateTransportRequestTimelineGateway(diagnostics);\nisValidJSON(diagnostics);\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void systemDiagnosticsForSystemStateInformation() {\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosItemResponse createResponse = this.container.createItem(internalObjectNode);\nString diagnostics = createResponse.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"systemInformation\");\nassertThat(diagnostics).contains(\"usedMemory\");\nassertThat(diagnostics).contains(\"availableMemory\");\nassertThat(diagnostics).contains(\"systemCpuLoad\");\nassertThat(diagnostics).contains(\"\\\"userAgent\\\":\\\"\" + Utils.getUserAgent() + \"\\\"\");\nassertThat(diagnostics).containsPattern(\"(?s).*?\\\"activityId\\\":\\\"[^\\\\s\\\"]+\\\".*\");\nassertThat(createResponse.getDiagnostics().getDuration()).isNotNull();\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void directDiagnostics() throws Exception {\nCosmosClient testDirectClient = null;\ntry {\ntestDirectClient = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.directMode()\n.buildClient();\nCosmosContainer cosmosContainer =\ntestDirectClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosItemResponse createResponse = cosmosContainer.createItem(internalObjectNode);\nString diagnostics = createResponse.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"\\\"connectionMode\\\":\\\"DIRECT\\\"\");\nassertThat(diagnostics).contains(\"supplementalResponseStatisticsList\");\nassertThat(diagnostics).contains(\"\\\"gatewayStatistics\\\":null\");\nassertThat(diagnostics).contains(\"addressResolutionStatistics\");\nassertThat(diagnostics).contains(\"\\\"metaDataName\\\":\\\"CONTAINER_LOOK_UP\\\"\");\nassertThat(diagnostics).contains(\"\\\"metaDataName\\\":\\\"PARTITION_KEY_RANGE_LOOK_UP\\\"\");\nassertThat(diagnostics).contains(\"\\\"metaDataName\\\":\\\"SERVER_ADDRESS_LOOKUP\\\"\");\nassertThat(diagnostics).contains(\"\\\"serializationType\\\":\\\"PARTITION_KEY_FETCH_SERIALIZATION\\\"\");\nassertThat(diagnostics).contains(\"\\\"userAgent\\\":\\\"\" + Utils.getUserAgent() + \"\\\"\");\nassertThat(diagnostics).containsPattern(\"(?s).*?\\\"activityId\\\":\\\"[^\\\\s\\\"]+\\\".*\");\nassertThat(diagnostics).contains(\"\\\"backendLatencyInMs\\\"\");\nassertThat(createResponse.getDiagnostics().getContactedRegionNames()).isNotEmpty();\nassertThat(createResponse.getDiagnostics().getDuration()).isNotNull();\nvalidateTransportRequestTimelineDirect(diagnostics);\nvalidateRegionContacted(createResponse.getDiagnostics(), testDirectClient.asyncClient());\nisValidJSON(diagnostics);\ntry {\ncosmosContainer.createItem(internalObjectNode);\nfail(\"expected 409\");\n} catch (CosmosException e) {\ndiagnostics = e.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"\\\"backendLatencyInMs\\\"\");\nvalidateTransportRequestTimelineDirect(e.getDiagnostics().toString());\n}\n} finally {\nif (testDirectClient != null) {\ntestDirectClient.close();\n}\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void queryPlanDiagnostics() throws JsonProcessingException {\nCosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());\nList itemIdList = new ArrayList<>();\nfor(int i = 0; i< 100; i++) {\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosItemResponse createResponse = cosmosContainer.createItem(internalObjectNode);\nif(i%20 == 0) {\nitemIdList.add(internalObjectNode.getId());\n}\n}\nString queryDiagnostics = null;\nList queryList = new ArrayList<>();\nqueryList.add(\"Select * from c\");\nStringBuilder queryBuilder = new StringBuilder(\"SELECT * from c where c.mypk in (\");\nfor(int i = 0 ; i < itemIdList.size(); i++){\nqueryBuilder.append(\"'\").append(itemIdList.get(i)).append(\"'\");\nif(i < (itemIdList.size()-1)) {\nqueryBuilder.append(\",\");\n} else {\nqueryBuilder.append(\")\");\n}\n}\nqueryList.add(queryBuilder.toString());\nqueryList.add(\"Select * from c where c.id = 'wrongId'\");\nfor(String query : queryList) {\nint feedResponseCounter = 0;\nCosmosQueryRequestOptions options = new CosmosQueryRequestOptions();\noptions.setQueryMetricsEnabled(true);\nIterator> iterator = cosmosContainer.queryItems(query, options, InternalObjectNode.class).iterableByPage().iterator();\nwhile (iterator.hasNext()) {\nFeedResponse feedResponse = iterator.next();\nqueryDiagnostics = feedResponse.getCosmosDiagnostics().toString();\nif (feedResponseCounter == 0) {\nassertThat(queryDiagnostics).contains(\"QueryPlan Start Time (UTC)=\");\nassertThat(queryDiagnostics).contains(\"QueryPlan End Time (UTC)=\");\nassertThat(queryDiagnostics).contains(\"QueryPlan Duration (ms)=\");\nString requestTimeLine = OBJECT_MAPPER.writeValueAsString(feedResponse.getCosmosDiagnostics().getFeedResponseDiagnostics().getQueryPlanDiagnosticsContext().getRequestTimeline());\nassertThat(requestTimeLine).contains(\"connectionConfigured\");\nassertThat(requestTimeLine).contains(\"requestSent\");\nassertThat(requestTimeLine).contains(\"transitTime\");\nassertThat(requestTimeLine).contains(\"received\");\n} else {\nassertThat(queryDiagnostics).doesNotContain(\"QueryPlan Start Time (UTC)=\");\nassertThat(queryDiagnostics).doesNotContain(\"QueryPlan End Time (UTC)=\");\nassertThat(queryDiagnostics).doesNotContain(\"QueryPlan Duration (ms)=\");\nassertThat(queryDiagnostics).doesNotContain(\"QueryPlan RequestTimeline =\");\n}\nfeedResponseCounter++;\n}\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void queryMetricsWithIndexMetrics() {\nCosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());\nList itemIdList = new ArrayList<>();\nfor(int i = 0; i< 100; i++) {\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosItemResponse createResponse = cosmosContainer.createItem(internalObjectNode);\nif(i%20 == 0) {\nitemIdList.add(internalObjectNode.getId());\n}\n}\nString queryDiagnostics = null;\nList queryList = new ArrayList<>();\nStringBuilder queryBuilder = new StringBuilder(\"SELECT * from c where c.mypk in (\");\nfor(int i = 0 ; i < itemIdList.size(); i++){\nqueryBuilder.append(\"'\").append(itemIdList.get(i)).append(\"'\");\nif(i < (itemIdList.size()-1)) {\nqueryBuilder.append(\",\");\n} else {\nqueryBuilder.append(\")\");\n}\n}\nqueryList.add(queryBuilder.toString());\nfor (String query : queryList) {\nCosmosQueryRequestOptions options = new CosmosQueryRequestOptions();\noptions.setQueryMetricsEnabled(true);\noptions.setIndexMetricsEnabled(true);\nIterator> iterator = cosmosContainer.queryItems(query, options, InternalObjectNode.class).iterableByPage().iterator();\nwhile (iterator.hasNext()) {\nFeedResponse feedResponse = iterator.next();\nqueryDiagnostics = feedResponse.getCosmosDiagnostics().toString();\nlogger.info(\"This is query diagnostics {}\", queryDiagnostics);\nif (feedResponse.getResponseHeaders().containsKey(HttpConstants.HttpHeaders.INDEX_UTILIZATION)) {\nassertThat(feedResponse.getResponseHeaders().get(HttpConstants.HttpHeaders.INDEX_UTILIZATION)).isNotNull();\nassertThat(createFromJSONString(Utils.decodeBase64String(feedResponse.getResponseHeaders().get(HttpConstants.HttpHeaders.INDEX_UTILIZATION))).getUtilizedSingleIndexes()).isNotNull();\n}\n}\n}\n}\n@Test(groups = {\"simple\"}, dataProvider = \"query\", timeOut = TIMEOUT)\npublic void queryMetrics(String query, Boolean qmEnabled) {\nCosmosContainer directContainer =\nthis.directClient.getDatabase(this.cosmosAsyncContainer.getDatabase().getId())\n.getContainer(this.cosmosAsyncContainer.getId());\nCosmosQueryRequestOptions options = new CosmosQueryRequestOptions();\nif (qmEnabled != null) {\noptions.setQueryMetricsEnabled(qmEnabled);\n}\nboolean qroupByFirstResponse = true;\nIterator> iterator = directContainer.queryItems(query, options,\nInternalObjectNode.class).iterableByPage().iterator();\nassertThat(iterator.hasNext()).isTrue();\nwhile (iterator.hasNext()) {\nFeedResponse feedResponse = iterator.next();\nString queryDiagnostics = feedResponse.getCosmosDiagnostics().toString();\nassertThat(feedResponse.getResults().size()).isEqualTo(0);\nif (!query.contains(\"group by\") || qroupByFirstResponse) {\nvalidateQueryDiagnostics(queryDiagnostics, qmEnabled, true);\nvalidateDirectModeQueryDiagnostics(queryDiagnostics);\nif (query.contains(\"group by\")) {\nqroupByFirstResponse = false;\n}\n}\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\nprivate void validateDirectModeQueryDiagnostics(String diagnostics) {\nassertThat(diagnostics).contains(\"\\\"connectionMode\\\":\\\"DIRECT\\\"\");\nassertThat(diagnostics).contains(\"supplementalResponseStatisticsList\");\nassertThat(diagnostics).contains(\"responseStatisticsList\");\nassertThat(diagnostics).contains(\"\\\"gatewayStatistics\\\":null\");\nassertThat(diagnostics).contains(\"addressResolutionStatistics\");\nassertThat(diagnostics).contains(\"\\\"userAgent\\\":\\\"\" + Utils.getUserAgent() + \"\\\"\");\nassertThat(diagnostics).containsPattern(\"(?s).*?\\\"activityId\\\":\\\"[^\\\\s\\\"]+\\\".*\");\n}\nprivate void validateGatewayModeQueryDiagnostics(String diagnostics) {\nassertThat(diagnostics).contains(\"\\\"connectionMode\\\":\\\"GATEWAY\\\"\");\nassertThat(diagnostics).doesNotContain((\"\\\"gatewayStatistics\\\":null\"));\nassertThat(diagnostics).contains(\"\\\"operationType\\\":\\\"Query\\\"\");\nassertThat(diagnostics).contains(\"\\\"userAgent\\\":\\\"\" + Utils.getUserAgent() + \"\\\"\");\nassertThat(diagnostics).containsPattern(\"(?s).*?\\\"activityId\\\":\\\"[^\\\\s\\\"]+\\\".*\");\nassertThat(diagnostics).contains(\"\\\"regionsContacted\\\"\");\n}\n@Test(groups = {\"simple\"}, dataProvider = \"query\", timeOut = TIMEOUT*2)\npublic void queryDiagnosticsGatewayMode(String query, Boolean qmEnabled) {\nCosmosClient testDirectClient = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.gatewayMode()\n.buildClient();\nCosmosQueryRequestOptions options = new CosmosQueryRequestOptions();\nCosmosContainer cosmosContainer = testDirectClient.getDatabase(cosmosAsyncContainer.getDatabase().getId())\n.getContainer(cosmosAsyncContainer.getId());\nList itemIdList = new ArrayList<>();\nfor (int i = 0; i < 100; i++) {\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosItemResponse createResponse = cosmosContainer.createItem(internalObjectNode);\nif (i % 20 == 0) {\nitemIdList.add(internalObjectNode.getId());\n}\n}\nboolean qroupByFirstResponse = true;\nif (qmEnabled != null) {\noptions.setQueryMetricsEnabled(qmEnabled);\n}\nIterator> iterator = cosmosContainer\n.queryItems(query, options, InternalObjectNode.class)\n.iterableByPage()\n.iterator();\nassertThat(iterator.hasNext()).isTrue();\nwhile (iterator.hasNext()) {\nFeedResponse feedResponse = iterator.next();\nString queryDiagnostics = feedResponse.getCosmosDiagnostics().toString();\nassertThat(feedResponse.getResults().size()).isEqualTo(0);\nif (!query.contains(\"group by\") || qroupByFirstResponse) {\nvalidateQueryDiagnostics(queryDiagnostics, qmEnabled, true);\nvalidateGatewayModeQueryDiagnostics(queryDiagnostics);\nif (query.contains(\"group by\")) {\nqroupByFirstResponse = false;\n}\n}\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void queryMetricsWithADifferentLocale() {\nLocale.setDefault(Locale.GERMAN);\nString query = \"select * from root where root.id= \\\"someid\\\"\";\nCosmosQueryRequestOptions options = new CosmosQueryRequestOptions();\nIterator> iterator = this.container.queryItems(query, options,\nInternalObjectNode.class)\n.iterableByPage().iterator();\ndouble requestCharge = 0;\nwhile (iterator.hasNext()) {\nFeedResponse feedResponse = iterator.next();\nrequestCharge += feedResponse.getRequestCharge();\n}\nassertThat(requestCharge).isGreaterThan(0);\nLocale.setDefault(Locale.ROOT);\n}\nprivate static void validateQueryDiagnostics(\nString queryDiagnostics,\nBoolean qmEnabled,\nboolean expectQueryPlanDiagnostics) {\nif (qmEnabled == null || qmEnabled) {\nassertThat(queryDiagnostics).contains(\"Retrieved Document Count\");\nassertThat(queryDiagnostics).contains(\"Query Preparation Times\");\nassertThat(queryDiagnostics).contains(\"Runtime Execution Times\");\nassertThat(queryDiagnostics).contains(\"Partition Execution Timeline\");\n} else {\nassertThat(queryDiagnostics).doesNotContain(\"Retrieved Document Count\");\nassertThat(queryDiagnostics).doesNotContain(\"Query Preparation Times\");\nassertThat(queryDiagnostics).doesNotContain(\"Runtime Execution Times\");\nassertThat(queryDiagnostics).doesNotContain(\"Partition Execution Timeline\");\n}\nif (expectQueryPlanDiagnostics) {\nassertThat(queryDiagnostics).contains(\"QueryPlan Start Time (UTC)=\");\nassertThat(queryDiagnostics).contains(\"QueryPlan End Time (UTC)=\");\nassertThat(queryDiagnostics).contains(\"QueryPlan Duration (ms)=\");\n} else {\nassertThat(queryDiagnostics).doesNotContain(\"QueryPlan Start Time (UTC)=\");\nassertThat(queryDiagnostics).doesNotContain(\"QueryPlan End Time (UTC)=\");\nassertThat(queryDiagnostics).doesNotContain(\"QueryPlan Duration (ms)=\");\n}\n}\n@Test(groups = {\"simple\"}, dataProvider = \"readAllItemsOfLogicalPartition\", timeOut = TIMEOUT)\npublic void queryMetricsForReadAllItemsOfLogicalPartition(Integer expectedItemCount, Boolean qmEnabled) {\nString pkValue = UUID.randomUUID().toString();\nfor (int i = 0; i < expectedItemCount; i++) {\nInternalObjectNode internalObjectNode = getInternalObjectNode(pkValue);\nCosmosItemResponse createResponse = container.createItem(internalObjectNode);\n}\nCosmosQueryRequestOptions options = new CosmosQueryRequestOptions();\nif (qmEnabled != null) {\noptions = options.setQueryMetricsEnabled(qmEnabled);\n}\nModelBridgeInternal.setQueryRequestOptionsMaxItemCount(options, 5);\nIterator> iterator =\nthis.container\n.readAllItems(\nnew PartitionKey(pkValue),\noptions,\nInternalObjectNode.class)\n.iterableByPage().iterator();\nassertThat(iterator.hasNext()).isTrue();\nint actualItemCount = 0;\nwhile (iterator.hasNext()) {\nFeedResponse feedResponse = iterator.next();\nString queryDiagnostics = feedResponse.getCosmosDiagnostics().toString();\nactualItemCount += feedResponse.getResults().size();\nvalidateQueryDiagnostics(queryDiagnostics, qmEnabled, false);\n}\nassertThat(actualItemCount).isEqualTo(expectedItemCount);\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void directDiagnosticsOnException() throws Exception {\nCosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosItemResponse createResponse = null;\nCosmosClient client = null;\ntry {\nclient = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.directMode()\n.buildClient();\nCosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());\ncreateResponse = container.createItem(internalObjectNode);\nCosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();\nModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey(\"wrongPartitionKey\"));\nCosmosItemResponse readResponse =\ncosmosContainer.readItem(BridgeInternal.getProperties(createResponse).getId(),\nnew PartitionKey(\"wrongPartitionKey\"),\nInternalObjectNode.class);\nfail(\"request should fail as partition key is wrong\");\n} catch (CosmosException exception) {\nisValidJSON(exception.toString());\nisValidJSON(exception.getMessage());\nString diagnostics = exception.getDiagnostics().toString();\nassertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);\nassertThat(diagnostics).contains(\"\\\"connectionMode\\\":\\\"DIRECT\\\"\");\nassertThat(diagnostics).doesNotContain((\"\\\"resourceAddress\\\":null\"));\nassertThat(exception.getDiagnostics().getContactedRegionNames()).isNotEmpty();\nassertThat(exception.getDiagnostics().getDuration()).isNotNull();\nassertThat(diagnostics).contains(\"\\\"backendLatencyInMs\\\"\");\nassertThat(diagnostics).containsPattern(\"(?s).*?\\\"activityId\\\":\\\"[^\\\\s\\\"]+\\\".*\");\nisValidJSON(diagnostics);\nvalidateTransportRequestTimelineDirect(diagnostics);\nvalidateRegionContacted(createResponse.getDiagnostics(), client.asyncClient());\n} finally {\nif (client != null) {\nclient.close();\n}\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void directDiagnosticsOnMetadataException() {\nInternalObjectNode internalObjectNode = getInternalObjectNode();\nCosmosClient client = null;\ntry {\nclient = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.directMode()\n.buildClient();\nCosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());\nHttpClient mockHttpClient = Mockito.mock(HttpClient.class);\nMockito.when(mockHttpClient.send(Mockito.any(HttpRequest.class), Mockito.any(Duration.class)))\n.thenReturn(Mono.error(new CosmosException(400, \"TestBadRequest\")));\nRxStoreModel rxGatewayStoreModel = rxGatewayStoreModel = ReflectionUtils.getGatewayProxy((RxDocumentClientImpl) client.asyncClient().getDocClientWrapper());\nReflectionUtils.setGatewayHttpClient(rxGatewayStoreModel, mockHttpClient);\ncontainer.createItem(internalObjectNode);\nfail(\"request should fail as bad request\");\n} catch (CosmosException exception) {\nisValidJSON(exception.toString());\nisValidJSON(exception.getMessage());\nString diagnostics = exception.getDiagnostics().toString();\nassertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.BADREQUEST);\nassertThat(diagnostics).contains(\"\\\"connectionMode\\\":\\\"DIRECT\\\"\");\nassertThat(diagnostics).doesNotContain((\"\\\"resourceAddress\\\":null\"));\nassertThat(diagnostics).contains(\"\\\"resourceType\\\":\\\"DocumentCollection\\\"\");\nassertThat(exception.getDiagnostics().getContactedRegionNames()).isNotEmpty();\nassertThat(exception.getDiagnostics().getDuration()).isNotNull();\nisValidJSON(diagnostics);\n} finally {\nif (client != null) {\nclient.close();\n}\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void supplementalResponseStatisticsList() throws Exception {\nClientSideRequestStatistics clientSideRequestStatistics = new ClientSideRequestStatistics(mockDiagnosticsClientContext(),null);\nfor (int i = 0; i < 15; i++) {\nRxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(mockDiagnosticsClientContext(), OperationType.Head, ResourceType.Document);\nclientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);\n}\nList storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);\nObjectMapper objectMapper = new ObjectMapper();\nString diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);\nJsonNode jsonNode = objectMapper.readTree(diagnostics);\nArrayNode supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get(\"supplementalResponseStatisticsList\");\nassertThat(storeResponseStatistics.size()).isEqualTo(15);\nassertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(10);\nclearStoreResponseStatistics(clientSideRequestStatistics);\nstoreResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);\nassertThat(storeResponseStatistics.size()).isEqualTo(0);\nfor (int i = 0; i < 7; i++) {\nRxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(mockDiagnosticsClientContext(), OperationType.Head, ResourceType.Document);\nclientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);\n}\nstoreResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);\nobjectMapper = new ObjectMapper();\ndiagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);\njsonNode = objectMapper.readTree(diagnostics);\nsupplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get(\"supplementalResponseStatisticsList\");\nassertThat(storeResponseStatistics.size()).isEqualTo(7);\nassertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(7);\nfor(JsonNode node : supplementalResponseStatisticsListNode) {\nassertThat(node.get(\"storeResult\").asText()).isNotNull();\nString requestResponseTimeUTC = node.get(\"requestResponseTimeUTC\").asText();\nInstant instant = Instant.from(RESPONSE_TIME_FORMATTER.parse(requestResponseTimeUTC));\nassertThat(Instant.now().toEpochMilli() - instant.toEpochMilli()).isLessThan(5000);\nassertThat(node.get(\"requestResponseTimeUTC\")).isNotNull();\nassertThat(node.get(\"requestOperationType\")).isNotNull();\nassertThat(node.get(\"requestOperationType\")).isNotNull();\n}\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void serializationOnVariousScenarios() {\nCosmosDatabaseResponse cosmosDatabase = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).read();\nString diagnostics = cosmosDatabase.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"\\\"serializationType\\\":\\\"DATABASE_DESERIALIZATION\\\"\");\nCosmosContainerResponse containerResponse = this.container.read();\ndiagnostics = containerResponse.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"\\\"serializationType\\\":\\\"CONTAINER_DESERIALIZATION\\\"\");\nTestItem testItem = new TestItem();\ntestItem.id = \"TestId\";\ntestItem.mypk = \"TestPk\";\nCosmosItemResponse itemResponse = this.container.createItem(testItem);\ndiagnostics = itemResponse.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"\\\"serializationType\\\":\\\"PARTITION_KEY_FETCH_SERIALIZATION\\\"\");\ntestItem.id = \"TestId2\";\ntestItem.mypk = \"TestPk\";\nitemResponse = this.container.createItem(testItem, new PartitionKey(\"TestPk\"), null);\ndiagnostics = itemResponse.getDiagnostics().toString();\nassertThat(diagnostics).doesNotContain(\"\\\"serializationType\\\":\\\"PARTITION_KEY_FETCH_SERIALIZATION\\\"\");\nassertThat(diagnostics).doesNotContain(\"\\\"serializationType\\\":\\\"ITEM_DESERIALIZATION\\\"\");\nTestItem readTestItem = itemResponse.getItem();\ndiagnostics = itemResponse.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"\\\"serializationType\\\":\\\"ITEM_DESERIALIZATION\\\"\");\nCosmosItemResponse readItemResponse = this.container.readItem(testItem.id, new PartitionKey(testItem.mypk), null, InternalObjectNode.class);\nInternalObjectNode properties = readItemResponse.getItem();\ndiagnostics = readItemResponse.getDiagnostics().toString();\nassertThat(diagnostics).contains(\"\\\"serializationType\\\":\\\"ITEM_DESERIALIZATION\\\"\");\nassertThat(diagnostics).contains(\"\\\"userAgent\\\":\\\"\" + Utils.getUserAgent() + \"\\\"\");\nassertThat(diagnostics).containsPattern(\"(?s).*?\\\"activityId\\\":\\\"[^\\\\s\\\"]+\\\".*\");\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void rntbdRequestResponseLengthStatistics() throws Exception {\nTestItem testItem = new TestItem();\ntestItem.id = UUID.randomUUID().toString();\ntestItem.mypk = UUID.randomUUID().toString();\nint testItemLength = OBJECT_MAPPER.writeValueAsBytes(testItem).length;\nCosmosContainer container = directClient.getDatabase(this.cosmosAsyncContainer.getDatabase().getId()).getContainer(this.cosmosAsyncContainer.getId());\nCosmosItemResponse createItemResponse = container.createItem(testItem);\nvalidate(createItemResponse.getDiagnostics(), testItemLength, ModelBridgeInternal.getPayloadLength(createItemResponse));\ntry {\ncontainer.createItem(testItem);\nfail(\"expected to fail due to 409\");\n} catch (CosmosException e) {\nvalidate(e.getDiagnostics(), testItemLength, 0);\n}\nCosmosItemResponse readItemResponse = container.readItem(testItem.id, new PartitionKey(testItem.mypk), TestItem.class);\nvalidate(readItemResponse.getDiagnostics(), 0, ModelBridgeInternal.getPayloadLength(readItemResponse));\nCosmosItemResponse deleteItemResponse = container.deleteItem(testItem, null);\nvalidate(deleteItemResponse.getDiagnostics(), 0, 0);\n}\n@Test(groups = {\"simple\"}, timeOut = TIMEOUT)\npublic void rntbdStatistics() throws Exception {\nInstant beforeClientInitialization = Instant.now();\nCosmosClient client1 = null;\ntry {\nclient1 = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.directMode()\n.buildClient();\nTestItem testItem = new TestItem();\ntestItem.id = UUID.randomUUID().toString();\ntestItem.mypk = UUID.randomUUID().toString();\nint testItemLength = OBJECT_MAPPER.writeValueAsBytes(testItem).length;\nCosmosContainer container = client1.getDatabase(this.cosmosAsyncContainer.getDatabase().getId()).getContainer(this.cosmosAsyncContainer.getId());\nThread.sleep(1000);\nInstant beforeInitializingRntbdServiceEndpoint = Instant.now();\nCosmosItemResponse operation1Response = container.upsertItem(testItem);\nInstant afterInitializingRntbdServiceEndpoint = Instant.now();\nThread.sleep(1000);\nInstant beforeOperation2 = Instant.now();\nCosmosItemResponse operation2Response = container.upsertItem(testItem);\nInstant afterOperation2 = Instant.now();\nThread.sleep(1000);\nInstant beforeOperation3 = Instant.now();\nCosmosItemResponse operation3Response = container.upsertItem(testItem);\nInstant afterOperation3 = Instant.now();\nvalidateRntbdStatistics(operation3Response.getDiagnostics(),\nbeforeClientInitialization,\nbeforeInitializingRntbdServiceEndpoint,\nafterInitializingRntbdServiceEndpoint,\nbeforeOperation2,\nafterOperation2,\nbeforeOperation3,\nafterOperation3);\nCosmosItemResponse readItemResponse = container.readItem(testItem.id, new PartitionKey(testItem.mypk), TestItem.class);\nvalidate(readItemResponse.getDiagnostics(), 0, ModelBridgeInternal.getPayloadLength(readItemResponse));\nCosmosItemResponse deleteItemResponse = container.deleteItem(testItem, null);\nvalidate(deleteItemResponse.getDiagnostics(), 0, 0);\n} finally {\nLifeCycleUtils.closeQuietly(client1);\n}\n}\nprivate void validateRntbdStatistics(CosmosDiagnostics cosmosDiagnostics,\nInstant clientInitializationTime,\nInstant beforeInitializingRntbdServiceEndpoint,\nInstant afterInitializingRntbdServiceEndpoint,\nInstant beforeOperation2,\nInstant afterOperation2,\nInstant beforeOperation3,\nInstant afterOperation3) throws Exception {\nObjectNode diagnostics = (ObjectNode) OBJECT_MAPPER.readTree(cosmosDiagnostics.toString());\nJsonNode responseStatisticsList = diagnostics.get(\"responseStatisticsList\");\nassertThat(responseStatisticsList.isArray()).isTrue();\nassertThat(responseStatisticsList.size()).isGreaterThan(0);\nJsonNode storeResult = responseStatisticsList.get(0).get(\"storeResult\");\nassertThat(storeResult).isNotNull();\nboolean hasPayload = storeResult.get(\"exception\").isNull();\nassertThat(storeResult.get(\"channelTaskQueueSize\").asInt(-1)).isGreaterThan(0);\nassertThat(storeResult.get(\"pendingRequestsCount\").asInt(-1)).isGreaterThanOrEqualTo(0);\nJsonNode serviceEndpointStatistics = storeResult.get(\"serviceEndpointStatistics\");\nassertThat(serviceEndpointStatistics).isNotNull();\nassertThat(serviceEndpointStatistics.get(\"availableChannels\").asInt(-1)).isGreaterThan(0);\nassertThat(serviceEndpointStatistics.get(\"acquiredChannels\").asInt(-1)).isEqualTo(0);\nassertThat(serviceEndpointStatistics.get(\"inflightRequests\").asInt(-1)).isEqualTo(1);\nassertThat(serviceEndpointStatistics.get(\"isClosed\").asBoolean()).isEqualTo(false);\nInstant beforeInitializationThreshold = beforeInitializingRntbdServiceEndpoint.minusMillis(1);\nassertThat(Instant.parse(serviceEndpointStatistics.get(\"createdTime\").asText()))\n.isAfterOrEqualTo(beforeInitializationThreshold);\nInstant afterInitializationThreshold = afterInitializingRntbdServiceEndpoint.plusMillis(2);\nassertThat(Instant.parse(serviceEndpointStatistics.get(\"createdTime\").asText()))\n.isBeforeOrEqualTo(afterInitializationThreshold);\nInstant afterOperation2Threshold = afterOperation2.plusMillis(2);\nInstant beforeOperation2Threshold = beforeOperation2.minusMillis(2);\nassertThat(Instant.parse(serviceEndpointStatistics.get(\"lastRequestTime\").asText()))\n.isAfterOrEqualTo(beforeOperation2Threshold)\n.isBeforeOrEqualTo(afterOperation2Threshold);\nassertThat(Instant.parse(serviceEndpointStatistics.get(\"lastSuccessfulRequestTime\").asText()))\n.isAfterOrEqualTo(beforeOperation2Threshold)\n.isBeforeOrEqualTo(afterOperation2Threshold);\n}\nprivate void validate(CosmosDiagnostics cosmosDiagnostics, int expectedRequestPayloadSize, int expectedResponsePayloadSize) throws Exception {\nObjectNode diagnostics = (ObjectNode) OBJECT_MAPPER.readTree(cosmosDiagnostics.toString());\nJsonNode responseStatisticsList = diagnostics.get(\"responseStatisticsList\");\nassertThat(responseStatisticsList.isArray()).isTrue();\nassertThat(responseStatisticsList.size()).isGreaterThan(0);\nJsonNode storeResult = responseStatisticsList.get(0).get(\"storeResult\");\nboolean hasPayload = storeResult.get(\"exception\").isNull();\nassertThat(storeResult).isNotNull();\nassertThat(storeResult.get(\"rntbdRequestLengthInBytes\").asInt(-1)).isGreaterThan(expectedRequestPayloadSize);\nassertThat(storeResult.get(\"rntbdRequestLengthInBytes\").asInt(-1)).isGreaterThan(expectedRequestPayloadSize);\nassertThat(storeResult.get(\"requestPayloadLengthInBytes\").asInt(-1)).isEqualTo(expectedRequestPayloadSize);\nif (hasPayload) {\nassertThat(storeResult.get(\"responsePayloadLengthInBytes\").asInt(-1)).isEqualTo(expectedResponsePayloadSize);\n}\nassertThat(storeResult.get(\"rntbdResponseLengthInBytes\").asInt(-1)).isGreaterThan(expectedResponsePayloadSize);\n}\n@Test(groups = {\"emulator\"}, timeOut = TIMEOUT)\npublic void addressResolutionStatistics() {\nCosmosClient client1 = null;\nCosmosClient client2 = null;\nString databaseId = DatabaseForTest.generateId();\nString containerId = UUID.randomUUID().toString();\nCosmosDatabase cosmosDatabase = null;\nCosmosContainer cosmosContainer = null;\ntry {\nclient1 = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.directMode()\n.buildClient();\nclient1.createDatabase(databaseId);\ncosmosDatabase = client1.getDatabase(databaseId);\ncosmosDatabase.createContainer(containerId, \"/mypk\");\nInternalObjectNode internalObjectNode = getInternalObjectNode();\ncosmosContainer = cosmosDatabase.getContainer(containerId);\nCosmosItemResponse writeResourceResponse = cosmosContainer.createItem(internalObjectNode);\nassertThat(writeResourceResponse.getDiagnostics().toString()).contains(\"addressResolutionStatistics\");\nassertThat(writeResourceResponse.getDiagnostics().toString()).contains(\"\\\"inflightRequest\\\":false\");\nassertThat(writeResourceResponse.getDiagnostics().toString()).doesNotContain(\"endTime=\\\"null\\\"\");\nassertThat(writeResourceResponse.getDiagnostics().toString()).contains(\"\\\"errorMessage\\\":null\");\nassertThat(writeResourceResponse.getDiagnostics().toString()).doesNotContain(\"\\\"errorMessage\\\":\\\"io.netty\" +\n\".channel.AbstractChannel$AnnotatedConnectException: Connection refused: no further information\");\nclient2 = new CosmosClientBuilder()\n.endpoint(TestConfigurations.HOST)\n.key(TestConfigurations.MASTER_KEY)\n.contentResponseOnWriteEnabled(true)\n.directMode()\n.buildClient();\ncosmosDatabase = client2.getDatabase(databaseId);\ncosmosContainer = cosmosDatabase.getContainer(containerId);\nAsyncDocumentClient asyncDocumentClient = client2.asyncClient().getContextClient();\nGlobalAddressResolver addressResolver = (GlobalAddressResolver) FieldUtils.readField(asyncDocumentClient,\n\"addressResolver\", true);\n@SuppressWarnings(\"rawtypes\")\nMap addressCacheByEndpoint = (Map) FieldUtils.readField(addressResolver,\n\"addressCacheByEndpoint\",\ntrue);\nObject endpointCache = addressCacheByEndpoint.values().toArray()[0];\nGatewayAddressCache addressCache = (GatewayAddressCache) FieldUtils.readField(endpointCache, \"addressCache\", true);\nHttpClient httpClient = httpClient(true);\nFieldUtils.writeField(addressCache, \"httpClient\", httpClient, true);\nnew Thread(() -> {\ntry {\nThread.sleep(5000);\nHttpClient httpClient1 = httpClient(false);\nFieldUtils.writeField(addressCache, \"httpClient\", httpClient1, true);\n} catch (Exception e) {\nfail(e.getMessage());\n}\n}).start();\nPartitionKey partitionKey = new PartitionKey(internalObjectNode.get(\"mypk\"));\nCosmosItemResponse readResourceResponse =\ncosmosContainer.readItem(internalObjectNode.getId(), partitionKey, new CosmosItemRequestOptions(),\nInternalObjectNode.class);\nassertThat(readResourceResponse.getDiagnostics().toString()).contains(\"addressResolutionStatistics\");\nassertThat(readResourceResponse.getDiagnostics().toString()).contains(\"\\\"inflightRequest\\\":false\");\nassertThat(readResourceResponse.getDiagnostics().toString()).doesNotContain(\"endTime=\\\"null\\\"\");\nassertThat(readResourceResponse.getDiagnostics().toString()).contains(\"\\\"errorMessage\\\":null\");\nassertThat(readResourceResponse.getDiagnostics().toString()).contains(\"\\\"errorMessage\\\":\\\"io.netty\" +\n\".channel.AbstractChannel$AnnotatedConnectException: Connection refused\");\n} catch (Exception ex) {\nlogger.error(\"Error in test addressResolutionStatistics\", ex);\nfail(\"This test should not throw exception \" + ex);\n} finally {\nsafeDeleteSyncDatabase(cosmosDatabase);\nif (client1 != null) {\nclient1.close();\n}\nif (client2 != null) {\nclient2.close();\n}\n}\n}\nprivate InternalObjectNode getInternalObjectNode() {\nInternalObjectNode internalObjectNode = new InternalObjectNode();\nString uuid = UUID.randomUUID().toString();\ninternalObjectNode.setId(uuid);\nBridgeInternal.setProperty(internalObjectNode, \"mypk\", uuid);\nreturn internalObjectNode;\n}\nprivate InternalObjectNode getInternalObjectNode(String pkValue) {\nInternalObjectNode internalObjectNode = new InternalObjectNode();\nString uuid = UUID.randomUUID().toString();\ninternalObjectNode.setId(uuid);\nBridgeInternal.setProperty(internalObjectNode, \"mypk\", pkValue);\nreturn internalObjectNode;\n}\nprivate List getStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {\nField storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField(\"supplementalResponseStatisticsList\");\nstoreResponseStatisticsField.setAccessible(true);\n@SuppressWarnings({\"unchecked\"})\nList list\n= (List) storeResponseStatisticsField.get(requestStatistics);\nreturn list;\n}\nprivate void clearStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {\nField storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField(\"supplementalResponseStatisticsList\");\nstoreResponseStatisticsField.setAccessible(true);\nstoreResponseStatisticsField.set(requestStatistics, new ArrayList());\n}\nprivate void validateTransportRequestTimelineGateway(String diagnostics) {\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"connectionConfigured\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"connectionConfigured\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"requestSent\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"transitTime\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"received\\\"\");\n}\nprivate void validateTransportRequestTimelineDirect(String diagnostics) {\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"created\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"queued\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"channelAcquisitionStarted\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"pipelined\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"transitTime\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"received\\\"\");\nassertThat(diagnostics).contains(\"\\\"eventName\\\":\\\"completed\\\"\");\nassertThat(diagnostics).contains(\"\\\"startTimeUTC\\\"\");\nassertThat(diagnostics).contains(\"\\\"durationInMicroSec\\\"\");\n}\npublic void isValidJSON(final String json) {\ntry {\nfinal JsonParser parser = new ObjectMapper().createParser(json);\nwhile (parser.nextToken() != null) {\n}\n} catch (IOException ex) {\nfail(\"Diagnostic string is not in json format \", ex);\n}\n}\nprivate HttpClient httpClient(boolean fakeProxy) {\nHttpClientConfig httpClientConfig;\nif(fakeProxy) {\nhttpClientConfig = new HttpClientConfig(new Configs())\n.withProxy(new ProxyOptions(ProxyOptions.Type.HTTP, new InetSocketAddress(\"localhost\", 8888)));\n} else {\nhttpClientConfig = new HttpClientConfig(new Configs());\n}\nreturn HttpClient.createFixed(httpClientConfig);\n}\nprivate IndexUtilizationInfo createFromJSONString(String jsonString) {\nObjectMapper indexUtilizationInfoObjectMapper = new ObjectMapper();\nIndexUtilizationInfo indexUtilizationInfo = null;\ntry {\nindexUtilizationInfo = indexUtilizationInfoObjectMapper.readValue(jsonString, IndexUtilizationInfo.class);\n} catch (JsonProcessingException e) {\nlogger.error(\"Json not correctly formed \", e);\n}\nreturn indexUtilizationInfo;\n}\nprivate void validateRegionContacted(CosmosDiagnostics cosmosDiagnostics, CosmosAsyncClient cosmosAsyncClient) throws Exception {\nRxDocumentClientImpl rxDocumentClient =\n(RxDocumentClientImpl) ReflectionUtils.getAsyncDocumentClient(cosmosAsyncClient);\nGlobalEndpointManager globalEndpointManager = ReflectionUtils.getGlobalEndpointManager(rxDocumentClient);\nLocationCache locationCache = ReflectionUtils.getLocationCache(globalEndpointManager);\nField locationInfoField = LocationCache.class.getDeclaredField(\"locationInfo\");\nlocationInfoField.setAccessible(true);\nObject locationInfo = locationInfoField.get(locationCache);\nClass DatabaseAccountLocationsInfoClass = Class.forName(\"com.azure.cosmos.implementation.routing\" +\n\".LocationCache$DatabaseAccountLocationsInfo\");\nField availableWriteEndpointByLocation = DatabaseAccountLocationsInfoClass.getDeclaredField(\n\"availableWriteEndpointByLocation\");\navailableWriteEndpointByLocation.setAccessible(true);\n@SuppressWarnings(\"unchecked\")\nMap map = (Map) availableWriteEndpointByLocation.get(locationInfo);\nString regionName = map.keySet().iterator().next();\nassertThat(cosmosDiagnostics.getContactedRegionNames().size()).isEqualTo(1);\nassertThat(cosmosDiagnostics.getContactedRegionNames().iterator().next()).isEqualTo(regionName.toLowerCase());\n}\npublic static class TestItem {\npublic String id;\npublic String mypk;\npublic TestItem() {\n}\n}\n}" + }, + { + "comment": "The reason I had to even touch this code is because if I have the `container-image-podman` extension present but I have both docker & podman installed on my machine, it was always finding docker first and trying to build with docker. This change makes it so the caller can control the search order (if they want to - its backwards compatible to other callers that don't care).", + "method_body": "private static ContainerRuntime getContainerRuntimeEnvironment(List orderToCheckRuntimes) {\nvar runtimesToCheck = new ArrayList<>(orderToCheckRuntimes.stream().distinct().toList());\nruntimesToCheck.retainAll(List.of(ContainerRuntime.DOCKER, ContainerRuntime.PODMAN));\nif (CONTAINER_EXECUTABLE != null) {\nvar runtime = runtimesToCheck.stream()\n.filter(containerRuntime -> CONTAINER_EXECUTABLE.trim()\n.equalsIgnoreCase(containerRuntime.getExecutableName()))\n.findFirst()\n.filter(r -> {\nvar versionOutput = getVersionOutputFor(r);\nreturn switch (r) {\ncase DOCKER, DOCKER_ROOTLESS -> versionOutput.contains(\"Docker version\");\ncase PODMAN, PODMAN_ROOTLESS -> PODMAN_PATTERN.matcher(versionOutput).matches();\ndefault -> false;\n};\n});\nif (runtime.isPresent()) {\nreturn runtime.get();\n} else {\nlog.warn(\"quarkus.native.container-runtime config property must be set to either podman or docker \" +\n\"and the executable must be available. Ignoring it.\");\n}\n}\nfor (var runtime : runtimesToCheck) {\nvar versionOutput = getVersionOutputFor(runtime);\nswitch (runtime) {\ncase DOCKER:\ncase DOCKER_ROOTLESS:\nvar dockerAvailable = versionOutput.contains(\"Docker version\");\nif (dockerAvailable) {\nreturn PODMAN_PATTERN.matcher(versionOutput).matches() ? ContainerRuntime.PODMAN\n: ContainerRuntime.DOCKER;\n}\nbreak;\ncase PODMAN:\ncase PODMAN_ROOTLESS:\nif (PODMAN_PATTERN.matcher(versionOutput).matches()) {\nreturn ContainerRuntime.PODMAN;\n}\nbreak;\n}\n}\nreturn ContainerRuntime.UNAVAILABLE;\n}", + "target_code": ".filter(r -> {", + "method_body_after": "private static ContainerRuntime getContainerRuntimeEnvironment(List orderToCheckRuntimes) {\nvar runtimesToCheck = new ArrayList<>(orderToCheckRuntimes.stream().distinct().toList());\nruntimesToCheck.retainAll(List.of(ContainerRuntime.DOCKER, ContainerRuntime.PODMAN));\nif (CONTAINER_EXECUTABLE != null) {\nvar runtime = runtimesToCheck.stream()\n.filter(containerRuntime -> CONTAINER_EXECUTABLE.trim()\n.equalsIgnoreCase(containerRuntime.getExecutableName()))\n.findFirst()\n.filter(r -> {\nvar versionOutput = getVersionOutputFor(r);\nreturn switch (r) {\ncase DOCKER, DOCKER_ROOTLESS -> versionOutput.contains(\"Docker version\");\ncase PODMAN, PODMAN_ROOTLESS -> PODMAN_PATTERN.matcher(versionOutput).matches();\ndefault -> false;\n};\n});\nif (runtime.isPresent()) {\nreturn runtime.get();\n} else {\nlog.warn(\"quarkus.native.container-runtime config property must be set to either podman or docker \" +\n\"and the executable must be available. Ignoring it.\");\n}\n}\nfor (var runtime : runtimesToCheck) {\nvar versionOutput = getVersionOutputFor(runtime);\nswitch (runtime) {\ncase DOCKER:\ncase DOCKER_ROOTLESS:\nvar dockerAvailable = versionOutput.contains(\"Docker version\");\nif (dockerAvailable) {\nreturn PODMAN_PATTERN.matcher(versionOutput).matches() ? ContainerRuntime.PODMAN\n: ContainerRuntime.DOCKER;\n}\nbreak;\ncase PODMAN:\ncase PODMAN_ROOTLESS:\nif (PODMAN_PATTERN.matcher(versionOutput).matches()) {\nreturn ContainerRuntime.PODMAN;\n}\nbreak;\n}\n}\nreturn ContainerRuntime.UNAVAILABLE;\n}", + "context_before": "class gets loaded by different classloaders at\n* runtime and the container runtime would be detected again and again unnecessarily.\n*/\nprivate static final String CONTAINER_RUNTIME_SYS_PROP = \"quarkus-local-container-runtime\";\n/**\n* Defines the maximum number of characters to read from the output of the `docker info` command.\n*/\nprivate static final int MAX_ANTICIPATED_CHARACTERS_IN_DOCKER_INFO = 3000;\nprivate ContainerRuntimeUtil() {\n}", + "context_after": "class gets loaded by different classloaders at\n* runtime and the container runtime would be detected again and again unnecessarily.\n*/\nprivate static final String CONTAINER_RUNTIME_SYS_PROP = \"quarkus-local-container-runtime\";\n/**\n* Defines the maximum number of characters to read from the output of the `docker info` command.\n*/\nprivate static final int MAX_ANTICIPATED_CHARACTERS_IN_DOCKER_INFO = 3000;\nprivate ContainerRuntimeUtil() {\n}" + }, + { + "comment": "Passing a raw bool into a function call is not very readable. I suggest splitting into `verifyDoFnSupportedForStreaming` and `verifyDoFnSupportedForBatch`. These can each call the common code.", + "method_body": "public PCollection expand(PCollection> input) {\nDoFn, OutputT> fn = originalParDo.getFn();\nverifyFnIsStateful(fn);\nDataflowRunner.verifyDoFnSupported(fn, false);\nDataflowRunner.verifyStateSupportForWindowingStrategy(input.getWindowingStrategy());\nif (isFnApi) {\nreturn input\n.apply(GroupByKey.create())\n.apply(ParDo.of(new ExpandGbkFn<>()))\n.apply(originalParDo);\n}\nPTransform<\nPCollection>>>>>,\nPCollection>\nstatefulParDo =\nParDo.of(new BatchStatefulDoFn<>(fn)).withSideInputs(originalParDo.getSideInputs());\nreturn input.apply(new GbkBeforeStatefulParDo<>()).apply(statefulParDo);\n}", + "target_code": "DataflowRunner.verifyDoFnSupported(fn, false);", + "method_body_after": "public PCollection expand(PCollection> input) {\nDoFn, OutputT> fn = originalParDo.getFn();\nverifyFnIsStateful(fn);\nDataflowRunner.verifyDoFnSupportedBatch(fn);\nDataflowRunner.verifyStateSupportForWindowingStrategy(input.getWindowingStrategy());\nif (isFnApi) {\nreturn input\n.apply(GroupByKey.create())\n.apply(ParDo.of(new ExpandGbkFn<>()))\n.apply(originalParDo);\n}\nPTransform<\nPCollection>>>>>,\nPCollection>\nstatefulParDo =\nParDo.of(new BatchStatefulDoFn<>(fn)).withSideInputs(originalParDo.getSideInputs());\nreturn input.apply(new GbkBeforeStatefulParDo<>()).apply(statefulParDo);\n}", + "context_before": "class StatefulSingleOutputParDo\nextends PTransform>, PCollection> {\nprivate final ParDo.SingleOutput, OutputT> originalParDo;\nprivate final boolean isFnApi;\nStatefulSingleOutputParDo(\nParDo.SingleOutput, OutputT> originalParDo, boolean isFnApi) {\nthis.originalParDo = originalParDo;\nthis.isFnApi = isFnApi;\n}\nParDo.SingleOutput, OutputT> getOriginalParDo() {\nreturn originalParDo;\n}\n@Override\n}", + "context_after": "class StatefulSingleOutputParDo\nextends PTransform>, PCollection> {\nprivate final ParDo.SingleOutput, OutputT> originalParDo;\nprivate final boolean isFnApi;\nStatefulSingleOutputParDo(\nParDo.SingleOutput, OutputT> originalParDo, boolean isFnApi) {\nthis.originalParDo = originalParDo;\nthis.isFnApi = isFnApi;\n}\nParDo.SingleOutput, OutputT> getOriginalParDo() {\nreturn originalParDo;\n}\n@Override\n}" + }, + { + "comment": "Updated", + "method_body": "public void testSupportCombinedTxnLog() throws Exception {\nnew MockUp() {\n@Mock\npublic boolean isSharedDataMode() {\nreturn true;\n}\n};\nConfig.lake_use_combined_txn_log = true;\nAssert.assertTrue(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.BACKEND_STREAMING));\nAssert.assertTrue(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.ROUTINE_LOAD_TASK));\nAssert.assertFalse(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.INSERT_STREAMING));\nAssert.assertFalse(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.BATCH_LOAD_JOB));\nAssert.assertFalse(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.LAKE_COMPACTION));\nAssert.assertFalse(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.FRONTEND_STREAMING));\nAssert.assertFalse(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.BYPASS_WRITE));\nAssert.assertFalse(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.DELETE));\nAssert.assertFalse(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.MV_REFRESH));\nConfig.lake_use_combined_txn_log = false;\nAssert.assertFalse(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.BACKEND_STREAMING));\n}", + "target_code": "new MockUp() {", + "method_body_after": "public void testSupportCombinedTxnLog() throws Exception {\nConfig.lake_use_combined_txn_log = true;\nAssert.assertTrue(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.BACKEND_STREAMING));\nAssert.assertTrue(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.ROUTINE_LOAD_TASK));\nAssert.assertFalse(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.INSERT_STREAMING));\nAssert.assertFalse(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.BATCH_LOAD_JOB));\nAssert.assertFalse(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.LAKE_COMPACTION));\nAssert.assertFalse(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.FRONTEND_STREAMING));\nAssert.assertFalse(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.BYPASS_WRITE));\nAssert.assertFalse(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.DELETE));\nAssert.assertFalse(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.MV_REFRESH));\nConfig.lake_use_combined_txn_log = false;\nAssert.assertFalse(LakeTableHelper.supportCombinedTxnLog(TransactionState.LoadJobSourceType.BACKEND_STREAMING));\n}", + "context_before": "class LakeTableHelperTest {\nprivate static ConnectContext connectContext;\nprivate static final String DB_NAME = \"test_lake_table_helper\";\n@BeforeClass\npublic static void beforeClass() throws Exception {\nUtFrameUtils.createMinStarRocksCluster(RunMode.SHARED_DATA);\nconnectContext = UtFrameUtils.createDefaultCtx();\nString createDbStmtStr = \"create database \" + DB_NAME;\nCreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseStmtWithNewParser(createDbStmtStr, connectContext);\nGlobalStateMgr.getCurrentState().getMetadata().createDb(createDbStmt.getFullDbName());\n}\n@AfterClass\npublic static void afterClass() {\n}\nprivate static LakeTable createTable(String sql) throws Exception {\nCreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseStmtWithNewParser(sql, connectContext);\nGlobalStateMgr.getCurrentState().getLocalMetastore().createTable(createTableStmt);\nTable table = testDb().getTable(createTableStmt.getTableName());\nreturn (LakeTable) table;\n}\nprivate static Database testDb() {\nreturn GlobalStateMgr.getCurrentState().getLocalMetastore().getDb(DB_NAME);\n}\n@Test\n}", + "context_after": "class LakeTableHelperTest {\nprivate static ConnectContext connectContext;\nprivate static final String DB_NAME = \"test_lake_table_helper\";\n@BeforeClass\npublic static void beforeClass() throws Exception {\nUtFrameUtils.createMinStarRocksCluster(RunMode.SHARED_DATA);\nconnectContext = UtFrameUtils.createDefaultCtx();\nString createDbStmtStr = \"create database \" + DB_NAME;\nCreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseStmtWithNewParser(createDbStmtStr, connectContext);\nGlobalStateMgr.getCurrentState().getMetadata().createDb(createDbStmt.getFullDbName());\n}\n@AfterClass\npublic static void afterClass() {\n}\nprivate static LakeTable createTable(String sql) throws Exception {\nCreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseStmtWithNewParser(sql, connectContext);\nGlobalStateMgr.getCurrentState().getLocalMetastore().createTable(createTableStmt);\nTable table = testDb().getTable(createTableStmt.getTableName());\nreturn (LakeTable) table;\n}\nprivate static Database testDb() {\nreturn GlobalStateMgr.getCurrentState().getLocalMetastore().getDb(DB_NAME);\n}\n@Test\n}" + }, + { + "comment": "I don't think we have that requirement. The purpose of unsetting flink/process size from configuration, is to make sure the values in `workerResourceSpec` (task heap, task off-heap, network, managed) and those not in `workerResorceSpec`(framework heap, framework off-heap, jvm metaspace, jvm overhead, total flink, total process) can put together w/o conflict. If they cannot be put together, we don't really need a check state to remind us because the generating of `taskExecutorProcessSpec` will fail.", + "method_body": "private KubernetesTaskManagerParameters createKubernetesTaskManagerParameters(WorkerResourceSpec workerResourceSpec) {\nfinal TaskExecutorProcessSpec taskExecutorProcessSpec =\nTaskExecutorProcessUtils.processSpecFromWorkerResourceSpec(flinkConfig, workerResourceSpec);\nfinal String podName = String.format(\nTASK_MANAGER_POD_FORMAT,\nclusterId,\ncurrentMaxAttemptId,\n++currentMaxPodId);\nfinal ContaineredTaskManagerParameters taskManagerParameters =\nContaineredTaskManagerParameters.create(flinkConfig, taskExecutorProcessSpec);\nfinal String dynamicProperties =\nBootstrapTools.getDynamicPropertiesAsString(flinkClientConfig, flinkConfig);\nreturn new KubernetesTaskManagerParameters(\nflinkConfig,\npodName,\ndynamicProperties,\ntaskManagerParameters);\n}", + "target_code": "", + "method_body_after": "private KubernetesTaskManagerParameters createKubernetesTaskManagerParameters(WorkerResourceSpec workerResourceSpec) {\nfinal TaskExecutorProcessSpec taskExecutorProcessSpec =\nTaskExecutorProcessUtils.processSpecFromWorkerResourceSpec(flinkConfig, workerResourceSpec);\nfinal String podName = String.format(\nTASK_MANAGER_POD_FORMAT,\nclusterId,\ncurrentMaxAttemptId,\n++currentMaxPodId);\nfinal ContaineredTaskManagerParameters taskManagerParameters =\nContaineredTaskManagerParameters.create(flinkConfig, taskExecutorProcessSpec);\nfinal String dynamicProperties =\nBootstrapTools.getDynamicPropertiesAsString(flinkClientConfig, flinkConfig);\nreturn new KubernetesTaskManagerParameters(\nflinkConfig,\npodName,\ndynamicProperties,\ntaskManagerParameters);\n}", + "context_before": "class KubernetesResourceManager extends ActiveResourceManager\nimplements FlinkKubeClient.PodCallbackHandler {\nprivate static final Logger LOG = LoggerFactory.getLogger(KubernetesResourceManager.class);\n/** The taskmanager pod name pattern is {clusterId}-{taskmanager}-{attemptId}-{podIndex}. */\nprivate static final String TASK_MANAGER_POD_FORMAT = \"%s-taskmanager-%d-%d\";\nprivate final Map workerNodes = new HashMap<>();\n/** When ResourceManager failover, the max attempt should recover. */\nprivate long currentMaxAttemptId = 0;\n/** Current max pod index. When creating a new pod, it should increase one. */\nprivate long currentMaxPodId = 0;\nprivate final String clusterId;\nprivate final FlinkKubeClient kubeClient;\n/** Map from pod name to worker resource. */\nprivate final Map podWorkerResources;\npublic KubernetesResourceManager(\nRpcService rpcService,\nString resourceManagerEndpointId,\nResourceID resourceId,\nConfiguration flinkConfig,\nHighAvailabilityServices highAvailabilityServices,\nHeartbeatServices heartbeatServices,\nSlotManager slotManager,\nJobLeaderIdService jobLeaderIdService,\nClusterInformation clusterInformation,\nFatalErrorHandler fatalErrorHandler,\nResourceManagerMetricGroup resourceManagerMetricGroup) {\nsuper(\nflinkConfig,\nSystem.getenv(),\nrpcService,\nresourceManagerEndpointId,\nresourceId,\nhighAvailabilityServices,\nheartbeatServices,\nslotManager,\njobLeaderIdService,\nclusterInformation,\nfatalErrorHandler,\nresourceManagerMetricGroup);\nthis.clusterId = flinkConfig.getString(KubernetesConfigOptions.CLUSTER_ID);\nthis.kubeClient = createFlinkKubeClient();\nthis.podWorkerResources = new HashMap<>();\n}\n@Override\nprotected Configuration loadClientConfiguration() {\nreturn GlobalConfiguration.loadConfiguration();\n}\n@Override\nprotected void initialize() throws ResourceManagerException {\nrecoverWorkerNodesFromPreviousAttempts();\nkubeClient.watchPodsAndDoCallback(KubernetesUtils.getTaskManagerLabels(clusterId), this);\n}\n@Override\npublic CompletableFuture onStop() {\nThrowable exception = null;\ntry {\nkubeClient.close();\n} catch (Throwable t) {\nexception = t;\n}\nreturn getStopTerminationFutureOrCompletedExceptionally(exception);\n}\n@Override\nprotected void internalDeregisterApplication(ApplicationStatus finalStatus, @Nullable String diagnostics) {\nLOG.info(\n\"Stopping kubernetes cluster, clusterId: {}, diagnostics: {}\",\nclusterId,\ndiagnostics == null ? \"\" : diagnostics);\nkubeClient.stopAndCleanupCluster(clusterId);\n}\n@Override\npublic boolean startNewWorker(WorkerResourceSpec workerResourceSpec) {\nLOG.info(\"Starting new worker with worker resource spec, {}\", workerResourceSpec);\nrequestKubernetesPod(workerResourceSpec);\nreturn true;\n}\n@Override\nprotected KubernetesWorkerNode workerStarted(ResourceID resourceID) {\nreturn workerNodes.get(resourceID);\n}\n@Override\npublic boolean stopWorker(final KubernetesWorkerNode worker) {\nLOG.info(\"Stopping Worker {}.\", worker.getResourceID());\nremoveWorkerNodeAndResourceSpec(worker.getResourceID());\ntry {\nkubeClient.stopPod(worker.getResourceID().toString());\n} catch (Exception e) {\nkubeClient.handleException(e);\nreturn false;\n}\nreturn true;\n}\n@Override\npublic void onAdded(List pods) {\nrunAsync(() -> {\npods.forEach(pod -> {\nWorkerResourceSpec workerResourceSpec = podWorkerResources.get(pod.getName());\nfinal int pendingNum = pendingWorkerCounter.getNum(workerResourceSpec);\nif (pendingNum > 0) {\npendingWorkerCounter.decreaseAndGet(workerResourceSpec);\nfinal KubernetesWorkerNode worker = new KubernetesWorkerNode(new ResourceID(pod.getName()));\nworkerNodes.putIfAbsent(worker.getResourceID(), worker);\n}\nlog.info(\"Received new TaskManager pod: {}\", pod.getName());\n});\nlog.info(\"Received {} new TaskManager pods. Remaining pending pod requests: {}\",\npods.size(), pendingWorkerCounter.getTotalNum());\n});\n}\n@Override\npublic void onModified(List pods) {\nrunAsync(() -> pods.forEach(this::removePodIfTerminated));\n}\n@Override\npublic void onDeleted(List pods) {\nrunAsync(() -> pods.forEach(this::removePodIfTerminated));\n}\n@Override\npublic void onError(List pods) {\nrunAsync(() -> pods.forEach(this::removePodIfTerminated));\n}\n@VisibleForTesting\nMap getWorkerNodes() {\nreturn workerNodes;\n}\nprivate void recoverWorkerNodesFromPreviousAttempts() throws ResourceManagerException {\nfinal List podList = kubeClient.getPodsWithLabels(KubernetesUtils.getTaskManagerLabels(clusterId));\nfor (KubernetesPod pod : podList) {\nfinal KubernetesWorkerNode worker = new KubernetesWorkerNode(new ResourceID(pod.getName()));\nworkerNodes.put(worker.getResourceID(), worker);\nfinal long attempt = worker.getAttempt();\nif (attempt > currentMaxAttemptId) {\ncurrentMaxAttemptId = attempt;\n}\n}\nlog.info(\"Recovered {} pods from previous attempts, current attempt id is {}.\",\nworkerNodes.size(),\n++currentMaxAttemptId);\n}\nprivate void requestKubernetesPod(WorkerResourceSpec workerResourceSpec) {\nfinal KubernetesTaskManagerParameters parameters =\ncreateKubernetesTaskManagerParameters(workerResourceSpec);\npodWorkerResources.put(parameters.getPodName(), workerResourceSpec);\nfinal int pendingWorkerNum = pendingWorkerCounter.increaseAndGet(workerResourceSpec);\nlog.info(\"Requesting new TaskManager pod with <{},{}>. Number pending requests {}.\",\nparameters.getTaskManagerMemoryMB(),\nparameters.getTaskManagerCPU(),\npendingWorkerNum);\nlog.info(\"TaskManager {} will be started with {}.\", parameters.getPodName(), workerResourceSpec);\nfinal KubernetesPod taskManagerPod =\nKubernetesTaskManagerFactory.createTaskManagerComponent(parameters);\nkubeClient.createTaskManagerPod(taskManagerPod);\n}\n/**\n* Request new pod if pending pods cannot satisfy pending slot requests.\n*/\nprivate void requestKubernetesPodIfRequired(WorkerResourceSpec workerResourceSpec) {\nfinal int requiredTaskManagers = getPendingWorkerNums().get(workerResourceSpec);\nfinal int pendingWorkerNum = pendingWorkerCounter.getNum(workerResourceSpec);\nif (requiredTaskManagers > pendingWorkerNum) {\nrequestKubernetesPod(workerResourceSpec);\n}\n}\nprivate void removePodIfTerminated(KubernetesPod pod) {\nif (pod.isTerminated()) {\nkubeClient.stopPod(pod.getName());\nfinal WorkerResourceSpec workerResourceSpec = removeWorkerNodeAndResourceSpec(new ResourceID(pod.getName()));\nif (workerResourceSpec != null) {\nrequestKubernetesPodIfRequired(workerResourceSpec);\n}\n}\n}\nprivate WorkerResourceSpec removeWorkerNodeAndResourceSpec(ResourceID resourceId) {\nfinal KubernetesWorkerNode kubernetesWorkerNode = workerNodes.remove(resourceId);\nreturn kubernetesWorkerNode != null ? podWorkerResources.remove(resourceId.toString()) : null;\n}\nprotected FlinkKubeClient createFlinkKubeClient() {\nreturn KubeClientFactory.fromConfiguration(flinkConfig);\n}\n@Override\nprotected double getCpuCores(Configuration configuration) {\nreturn TaskExecutorProcessUtils.getCpuCoresWithFallbackConfigOption(configuration, KubernetesConfigOptions.TASK_MANAGER_CPU);\n}\n}", + "context_after": "class KubernetesResourceManager extends ActiveResourceManager\nimplements FlinkKubeClient.PodCallbackHandler {\nprivate static final Logger LOG = LoggerFactory.getLogger(KubernetesResourceManager.class);\n/** The taskmanager pod name pattern is {clusterId}-{taskmanager}-{attemptId}-{podIndex}. */\nprivate static final String TASK_MANAGER_POD_FORMAT = \"%s-taskmanager-%d-%d\";\nprivate final Map workerNodes = new HashMap<>();\n/** When ResourceManager failover, the max attempt should recover. */\nprivate long currentMaxAttemptId = 0;\n/** Current max pod index. When creating a new pod, it should increase one. */\nprivate long currentMaxPodId = 0;\nprivate final String clusterId;\nprivate final FlinkKubeClient kubeClient;\nprivate final KubernetesResourceManagerConfiguration configuration;\n/** Map from pod name to worker resource. */\nprivate final Map podWorkerResources;\npublic KubernetesResourceManager(\nRpcService rpcService,\nResourceID resourceId,\nConfiguration flinkConfig,\nHighAvailabilityServices highAvailabilityServices,\nHeartbeatServices heartbeatServices,\nSlotManager slotManager,\nResourceManagerPartitionTrackerFactory clusterPartitionTrackerFactory,\nJobLeaderIdService jobLeaderIdService,\nClusterInformation clusterInformation,\nFatalErrorHandler fatalErrorHandler,\nResourceManagerMetricGroup resourceManagerMetricGroup,\nFlinkKubeClient kubeClient,\nKubernetesResourceManagerConfiguration configuration) {\nsuper(\nflinkConfig,\nSystem.getenv(),\nrpcService,\nresourceId,\nhighAvailabilityServices,\nheartbeatServices,\nslotManager,\nclusterPartitionTrackerFactory,\njobLeaderIdService,\nclusterInformation,\nfatalErrorHandler,\nresourceManagerMetricGroup);\nthis.clusterId = configuration.getClusterId();\nthis.kubeClient = kubeClient;\nthis.configuration = configuration;\nthis.podWorkerResources = new HashMap<>();\n}\n@Override\nprotected Configuration loadClientConfiguration() {\nreturn GlobalConfiguration.loadConfiguration();\n}\n@Override\nprotected void initialize() throws ResourceManagerException {\nrecoverWorkerNodesFromPreviousAttempts();\nkubeClient.watchPodsAndDoCallback(KubernetesUtils.getTaskManagerLabels(clusterId), this);\n}\n@Override\npublic CompletableFuture onStop() {\nThrowable exception = null;\ntry {\nkubeClient.close();\n} catch (Throwable t) {\nexception = t;\n}\nreturn getStopTerminationFutureOrCompletedExceptionally(exception);\n}\n@Override\nprotected void internalDeregisterApplication(ApplicationStatus finalStatus, @Nullable String diagnostics) {\nLOG.info(\n\"Stopping kubernetes cluster, clusterId: {}, diagnostics: {}\",\nclusterId,\ndiagnostics == null ? \"\" : diagnostics);\nkubeClient.stopAndCleanupCluster(clusterId);\n}\n@Override\npublic boolean startNewWorker(WorkerResourceSpec workerResourceSpec) {\nLOG.info(\"Starting new worker with worker resource spec, {}\", workerResourceSpec);\nrequestKubernetesPod(workerResourceSpec);\nreturn true;\n}\n@Override\nprotected KubernetesWorkerNode workerStarted(ResourceID resourceID) {\nreturn workerNodes.get(resourceID);\n}\n@Override\npublic boolean stopWorker(final KubernetesWorkerNode worker) {\nfinal ResourceID resourceId = worker.getResourceID();\nLOG.info(\"Stopping Worker {}.\", resourceId);\ninternalStopPod(resourceId.toString());\nreturn true;\n}\n@Override\npublic void onAdded(List pods) {\nrunAsync(() -> {\nint duplicatePodNum = 0;\nfor (KubernetesPod pod : pods) {\nfinal String podName = pod.getName();\nfinal ResourceID resourceID = new ResourceID(podName);\nif (workerNodes.containsKey(resourceID)) {\nlog.debug(\"Ignore TaskManager pod that is already added: {}\", podName);\n++duplicatePodNum;\ncontinue;\n}\nfinal WorkerResourceSpec workerResourceSpec = Preconditions.checkNotNull(\npodWorkerResources.get(podName),\n\"Unrecognized pod {}. Pods from previous attempt should have already been added.\", podName);\nfinal int pendingNum = getNumPendingWorkersFor(workerResourceSpec);\nPreconditions.checkState(pendingNum > 0, \"Should not receive more workers than requested.\");\nnotifyNewWorkerAllocated(workerResourceSpec);\nfinal KubernetesWorkerNode worker = new KubernetesWorkerNode(resourceID);\nworkerNodes.put(resourceID, worker);\nlog.info(\"Received new TaskManager pod: {}\", podName);\n}\nlog.info(\"Received {} new TaskManager pods. Remaining pending pod requests: {}\",\npods.size() - duplicatePodNum, getNumPendingWorkers());\n});\n}\n@Override\npublic void onModified(List pods) {\nrunAsync(() -> pods.forEach(this::removePodAndTryRestartIfRequired));\n}\n@Override\npublic void onDeleted(List pods) {\nrunAsync(() -> pods.forEach(this::removePodAndTryRestartIfRequired));\n}\n@Override\npublic void onError(List pods) {\nrunAsync(() -> pods.forEach(this::removePodAndTryRestartIfRequired));\n}\n@VisibleForTesting\nMap getWorkerNodes() {\nreturn workerNodes;\n}\nprivate void recoverWorkerNodesFromPreviousAttempts() throws ResourceManagerException {\nfinal List podList = kubeClient.getPodsWithLabels(KubernetesUtils.getTaskManagerLabels(clusterId));\nfor (KubernetesPod pod : podList) {\nfinal KubernetesWorkerNode worker = new KubernetesWorkerNode(new ResourceID(pod.getName()));\nworkerNodes.put(worker.getResourceID(), worker);\nfinal long attempt = worker.getAttempt();\nif (attempt > currentMaxAttemptId) {\ncurrentMaxAttemptId = attempt;\n}\n}\nlog.info(\"Recovered {} pods from previous attempts, current attempt id is {}.\",\nworkerNodes.size(),\n++currentMaxAttemptId);\n}\nprivate void requestKubernetesPod(WorkerResourceSpec workerResourceSpec) {\nfinal KubernetesTaskManagerParameters parameters =\ncreateKubernetesTaskManagerParameters(workerResourceSpec);\npodWorkerResources.put(parameters.getPodName(), workerResourceSpec);\nfinal int pendingWorkerNum = notifyNewWorkerRequested(workerResourceSpec);\nlog.info(\"Requesting new TaskManager pod with <{},{}>. Number pending requests {}.\",\nparameters.getTaskManagerMemoryMB(),\nparameters.getTaskManagerCPU(),\npendingWorkerNum);\nfinal KubernetesPod taskManagerPod =\nKubernetesTaskManagerFactory.buildTaskManagerKubernetesPod(parameters);\nkubeClient.createTaskManagerPod(taskManagerPod)\n.whenCompleteAsync(\n(ignore, throwable) -> {\nif (throwable != null) {\nfinal Time retryInterval = configuration.getPodCreationRetryInterval();\nlog.warn(\"Could not start TaskManager in pod {}, retry in {}. \",\ntaskManagerPod.getName(), retryInterval, throwable);\npodWorkerResources.remove(parameters.getPodName());\nnotifyNewWorkerAllocationFailed(workerResourceSpec);\nscheduleRunAsync(\nthis::requestKubernetesPodIfRequired,\nretryInterval);\n} else {\nlog.info(\"TaskManager {} will be started with {}.\", parameters.getPodName(), workerResourceSpec);\n}\n},\ngetMainThreadExecutor());\n}\n/**\n* Request new pod if pending pods cannot satisfy pending slot requests.\n*/\nprivate void requestKubernetesPodIfRequired() {\nfor (Map.Entry entry : getRequiredResources().entrySet()) {\nfinal WorkerResourceSpec workerResourceSpec = entry.getKey();\nfinal int requiredTaskManagers = entry.getValue();\nwhile (requiredTaskManagers > getNumPendingWorkersFor(workerResourceSpec)) {\nrequestKubernetesPod(workerResourceSpec);\n}\n}\n}\nprivate void removePodAndTryRestartIfRequired(KubernetesPod pod) {\nif (pod.isTerminated()) {\ninternalStopPod(pod.getName());\nrequestKubernetesPodIfRequired();\n}\n}\nprivate void internalStopPod(String podName) {\nfinal ResourceID resourceId = new ResourceID(podName);\nfinal boolean isPendingWorkerOfCurrentAttempt = isPendingWorkerOfCurrentAttempt(podName);\nkubeClient.stopPod(podName)\n.whenComplete(\n(ignore, throwable) -> {\nif (throwable != null) {\nlog.warn(\"Could not stop TaskManager in pod {}.\", podName, throwable);\n}\n}\n);\nfinal WorkerResourceSpec workerResourceSpec = podWorkerResources.remove(podName);\nworkerNodes.remove(resourceId);\nif (isPendingWorkerOfCurrentAttempt) {\nnotifyNewWorkerAllocationFailed(\nPreconditions.checkNotNull(workerResourceSpec,\n\"Worker resource spec of current attempt pending worker should be known.\"));\n}\n}\nprivate boolean isPendingWorkerOfCurrentAttempt(String podName) {\nreturn podWorkerResources.containsKey(podName) &&\n!workerNodes.containsKey(new ResourceID(podName));\n}\n}" + }, + { + "comment": "O actually I am probably wrong. I can add a specific type with NULL value. Trying this idea.", + "method_body": "public void testConditionalOperatorsAndFunctions() {\nExpressionChecker checker =\nnew ExpressionChecker()\n.addExpr(\"CASE 1 WHEN 1 THEN 'hello' ELSE 'world' END\", \"hello\")\n.addExpr(\n\"CASE 2 \" + \"WHEN 1 THEN 'hello' \" + \"WHEN 3 THEN 'bond' \" + \"ELSE 'world' END\",\n\"world\")\n.addExpr(\n\"CASE 3 \" + \"WHEN 1 THEN 'hello' \" + \"WHEN 3 THEN 'bond' \" + \"ELSE 'world' END\",\n\"bond\")\n.addExpr(\"CASE \" + \"WHEN 1 = 1 THEN 'hello' \" + \"ELSE 'world' END\", \"hello\")\n.addExpr(\"CASE \" + \"WHEN 1 > 1 THEN 'hello' \" + \"ELSE 'world' END\", \"world\")\n.addExpr(\"NULLIF(5, 4) \", 5)\n.addExpr(\"NULLIF(4, 5) \", 4)\n.addExpr(\"COALESCE(1, 5) \", 1)\n.addExpr(\"COALESCE(NULL, 5) \", 5)\n.addExpr(\"COALESCE(NULL, 4, 5) \", 4)\n.addExpr(\"COALESCE(NULL, NULL, 5) \", 5)\n.addExpr(\"COALESCE(5, NULL) \", 5);\nchecker.buildRunAndCheck();\n}", + "target_code": "", + "method_body_after": "public void testConditionalOperatorsAndFunctions() {\nExpressionChecker checker =\nnew ExpressionChecker()\n.addExpr(\"CASE 1 WHEN 1 THEN 'hello' ELSE 'world' END\", \"hello\")\n.addExpr(\n\"CASE 2 \" + \"WHEN 1 THEN 'hello' \" + \"WHEN 3 THEN 'bond' \" + \"ELSE 'world' END\",\n\"world\")\n.addExpr(\n\"CASE 3 \" + \"WHEN 1 THEN 'hello' \" + \"WHEN 3 THEN 'bond' \" + \"ELSE 'world' END\",\n\"bond\")\n.addExpr(\"CASE \" + \"WHEN 1 = 1 THEN 'hello' \" + \"ELSE 'world' END\", \"hello\")\n.addExpr(\"CASE \" + \"WHEN 1 > 1 THEN 'hello' \" + \"ELSE 'world' END\", \"world\")\n.addExpr(\"NULLIF(5, 4) \", 5)\n.addExpr(\"NULLIF(4, 5) \", 4)\n.addExpr(\"NULLIF(5, 5)\", null, FieldType.INT32)\n.addExpr(\"COALESCE(1, 5) \", 1)\n.addExpr(\"COALESCE(NULL, 5) \", 5)\n.addExpr(\"COALESCE(NULL, 4, 5) \", 4)\n.addExpr(\"COALESCE(NULL, NULL, 5) \", 5)\n.addExpr(\"COALESCE(5, NULL) \", 5);\nchecker.buildRunAndCheck();\n}", + "context_before": "class for passing around\n* the ids.\n*/\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.METHOD}", + "context_after": "class for passing around\n* the ids.\n*/\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.METHOD}" + }, + { + "comment": "We do not have the original full pattern. We only have the inclusion part. ", + "method_body": "public static void parseInsertion(String regExpStr) {\ntry {\nCharReader charReader = CharReader.from(regExpStr);\nTokenReader tokenReader = new TokenReader(new TreeTraverser(charReader));\nTreeBuilder treeBuilder = new TreeBuilder(tokenReader);\ntreeBuilder.parseInsertion();\n} catch (BallerinaException e) {\nthrow ErrorCreator.createError(BallerinaErrorReasons.REG_EXP_PARSING_ERROR,\nStringUtils.fromString(e.getMessage() + \" in insertion substring '\"\n+ regExpStr.substring(3, regExpStr.length() - 1) + \"'\"));\n}\n}", + "target_code": "StringUtils.fromString(e.getMessage() + \" in insertion substring '\"", + "method_body_after": "public static void parseInsertion(String regExpStr) {\ntry {\nCharReader charReader = CharReader.from(regExpStr);\nTokenReader tokenReader = new TokenReader(new TreeTraverser(charReader));\nTreeBuilder treeBuilder = new TreeBuilder(tokenReader);\ntreeBuilder.parseInsertion();\n} catch (BallerinaException e) {\nthrow ErrorCreator.createError(BallerinaErrorReasons.REG_EXP_PARSING_ERROR,\nStringUtils.fromString(e.getMessage() + \" in insertion substring '\"\n+ regExpStr.substring(3, regExpStr.length() - 1) + \"'\"));\n}\n}", + "context_before": "class RegExpFactory {\nprivate RegExpFactory() {\n}\npublic static RegExpValue createRegExpValue(RegExpDisjunction regExpDisjunction) {\nreturn new RegExpValue(regExpDisjunction);\n}\npublic static RegExpDisjunction createReDisjunction(ArrayValue termsList) {\nreturn new RegExpDisjunction(termsList);\n}\npublic static RegExpSequence createReSequence(ArrayValue seqList) {\nreturn new RegExpSequence(seqList);\n}\npublic static RegExpAssertion createReAssertion(BString assertion) {\nreturn new RegExpAssertion(assertion.getValue());\n}\npublic static RegExpAtomQuantifier createReAtomQuantifier(Object reAtom, RegExpQuantifier quantifier) {\nreturn new RegExpAtomQuantifier(reAtom, quantifier);\n}\npublic static RegExpLiteralCharOrEscape createReLiteralCharOrEscape(BString charOrEscape) {\nreturn new RegExpLiteralCharOrEscape(charOrEscape.getValue());\n}\npublic static RegExpCharacterClass createReCharacterClass(BString characterClassStart, BString negation,\nRegExpCharSet reCharSet, BString characterClassEnd) {\nreturn new RegExpCharacterClass(characterClassStart.getValue(), negation.getValue(), reCharSet,\ncharacterClassEnd.getValue());\n}\npublic static RegExpCharSet createReCharSet(ArrayValue charSet) {\nreturn new RegExpCharSet(charSet);\n}\npublic static RegExpCharSetRange createReCharSetRange(BString lhsCharSetAtom, BString dash,\nBString rhsCharSetAtom) {\nreturn new RegExpCharSetRange(lhsCharSetAtom.getValue(), dash.getValue(), rhsCharSetAtom.getValue());\n}\npublic static RegExpCapturingGroup createReCapturingGroup(BString openParen, Object flagExpr,\nRegExpDisjunction reDisjunction, BString closeParen) {\nreturn new RegExpCapturingGroup(openParen.getValue(), (RegExpFlagExpression) flagExpr, reDisjunction,\ncloseParen.getValue());\n}\npublic static RegExpFlagExpression createReFlagExpression(BString questionMark, RegExpFlagOnOff flagsOnOff,\nBString colon) {\nreturn new RegExpFlagExpression(questionMark.getValue(), flagsOnOff, colon.getValue());\n}\npublic static RegExpFlagOnOff createReFlagOnOff(BString flags) {\nreturn new RegExpFlagOnOff(flags.getValue());\n}\npublic static RegExpQuantifier createReQuantifier(BString quantifier, BString nonGreedyChar) {\nreturn new RegExpQuantifier(quantifier.getValue(), nonGreedyChar.getValue());\n}\npublic static RegExpValue parse(String regExpStr) {\ntry {\nCharReader charReader = CharReader.from(regExpStr);\nTokenReader tokenReader = new TokenReader(new TreeTraverser(charReader));\nTreeBuilder treeBuilder = new TreeBuilder(tokenReader);\nreturn treeBuilder.parse();\n} catch (BallerinaException e) {\nthrow ErrorCreator.createError(StringUtils.fromString(e.getMessage()\n+ \" in '\" + regExpStr + \"' RegExp pattern\"));\n}\n}\npublic static RegExpValue translateRegExpConstructs(RegExpValue regExpValue) {\nRegExpDisjunction disjunction = regExpValue.getRegExpDisjunction();\nif (disjunction.stringValue(null).equals(\"\")) {\ndisjunction = getNonCapturingGroupDisjunction();\n}\nfor (Object s : disjunction.getRegExpSeqList()) {\nif (!(s instanceof RegExpSequence)) {\ncontinue;\n}\nRegExpSequence seq = (RegExpSequence) s;\ntranslateRegExpTerms(seq.getRegExpTermsList());\n}\nreturn new RegExpValue(disjunction);\n}\nprivate static RegExpDisjunction getNonCapturingGroupDisjunction() {\nRegExpFlagOnOff flagsOnOff = new RegExpFlagOnOff(\"\");\nRegExpFlagExpression flagExpr = new RegExpFlagExpression(\"?\", flagsOnOff, \":\");\nRegExpDisjunction reDisjunction = new RegExpDisjunction(new Object[]{});\nRegExpCapturingGroup reAtom = new RegExpCapturingGroup(\"(\", flagExpr, reDisjunction, \")\");\nRegExpQuantifier reQuantifier = new RegExpQuantifier(\"\", \"\");\nRegExpTerm[] termList = new RegExpTerm[]{new RegExpAtomQuantifier(reAtom, reQuantifier)};\nreturn new RegExpDisjunction(new Object[]{new RegExpSequence(termList)});\n}\nprivate static void translateRegExpTerms(RegExpTerm[] terms) {\nfor (RegExpTerm t : terms) {\nif (!(t instanceof RegExpAtomQuantifier)) {\ncontinue;\n}\nRegExpAtomQuantifier atomQuantifier = (RegExpAtomQuantifier) t;\nObject reAtom = atomQuantifier.getReAtom();\nif (reAtom instanceof RegExpLiteralCharOrEscape) {\natomQuantifier.setReAtom(translateLiteralCharOrEscape((RegExpLiteralCharOrEscape) reAtom));\n} else if (reAtom instanceof RegExpCharacterClass) {\natomQuantifier.setReAtom(translateCharacterClass((RegExpCharacterClass) reAtom));\n}\n}\n}\nprivate static RegExpAtom translateLiteralCharOrEscape(RegExpLiteralCharOrEscape charOrEscape) {\nString value = charOrEscape.getCharOrEscape();\nif (\".\".equals(value)) {\nreturn createCharacterClass(\"^\", new String[]{\"\\\\r\", \"\\\\n\"});\n}\nif (\"\\\\s\".equals(value)) {\nreturn createCharacterClass(\"\", new String[]{\"\\\\t\", \"\\\\s\", \"\\\\n\", \"\\\\r\"});\n}\nif (\"\\\\S\".equals(value)) {\nreturn createCharacterClass(\"^\", new String[]{\"\\\\t\", \"\\\\s\", \"\\\\n\", \"\\\\r\"});\n}\nif (\"&\".equals(value)) {\nreturn createLiteralCharOrEscape(\"\\\\&\");\n}\nreturn charOrEscape;\n}\nprivate static RegExpLiteralCharOrEscape createLiteralCharOrEscape(String charOrEscape) {\nreturn new RegExpLiteralCharOrEscape(charOrEscape);\n}\nprivate static RegExpCharacterClass createCharacterClass(String negation, Object[] charSet) {\nreturn new RegExpCharacterClass(\"[\", negation, new RegExpCharSet(charSet) , \"]\");\n}\nprivate static RegExpAtom translateCharacterClass(RegExpCharacterClass charClass) {\nRegExpCharSet charSet = charClass.getReCharSet();\nObject[] charAtoms = charSet.getCharSetAtoms();\nint c = charAtoms.length;\nfor (int i = 0; i < c; i++) {\nObject charAtom = charAtoms[i];\nif (charAtom instanceof RegExpCharSetRange) {\nRegExpCharSetRange range = (RegExpCharSetRange) charAtom;\nrange.setLhsCharSetAtom(translateCharInCharacterClass(range.getLhsCharSetAtom()));\nrange.setRhsCharSetAom(translateCharInCharacterClass(range.getRhsCharSetAtom()));\ncontinue;\n}\nif (charAtom != null) {\ncharAtoms[i] = translateCharInCharacterClass((String) charAtom);\n}\n}\nreturn charClass;\n}\nprivate static String translateCharInCharacterClass(String originalValue) {\nif (\"&\".equals(originalValue)) {\nreturn \"\\\\&\";\n}\nreturn originalValue;\n}\n}", + "context_after": "class RegExpFactory {\nprivate RegExpFactory() {\n}\npublic static RegExpValue createRegExpValue(RegExpDisjunction regExpDisjunction) {\nreturn new RegExpValue(regExpDisjunction);\n}\npublic static RegExpDisjunction createReDisjunction(ArrayValue termsList) {\nreturn new RegExpDisjunction(termsList);\n}\npublic static RegExpSequence createReSequence(ArrayValue seqList) {\nreturn new RegExpSequence(seqList);\n}\npublic static RegExpAssertion createReAssertion(BString assertion) {\nreturn new RegExpAssertion(assertion.getValue());\n}\npublic static RegExpAtomQuantifier createReAtomQuantifier(Object reAtom, RegExpQuantifier quantifier) {\nreturn new RegExpAtomQuantifier(reAtom, quantifier);\n}\npublic static RegExpLiteralCharOrEscape createReLiteralCharOrEscape(BString charOrEscape) {\nreturn new RegExpLiteralCharOrEscape(charOrEscape.getValue());\n}\npublic static RegExpCharacterClass createReCharacterClass(BString characterClassStart, BString negation,\nRegExpCharSet reCharSet, BString characterClassEnd) {\nreturn new RegExpCharacterClass(characterClassStart.getValue(), negation.getValue(), reCharSet,\ncharacterClassEnd.getValue());\n}\npublic static RegExpCharSet createReCharSet(ArrayValue charSet) {\nreturn new RegExpCharSet(charSet);\n}\npublic static RegExpCharSetRange createReCharSetRange(BString lhsCharSetAtom, BString dash,\nBString rhsCharSetAtom) {\nreturn new RegExpCharSetRange(lhsCharSetAtom.getValue(), dash.getValue(), rhsCharSetAtom.getValue());\n}\npublic static RegExpCapturingGroup createReCapturingGroup(BString openParen, Object flagExpr,\nRegExpDisjunction reDisjunction, BString closeParen) {\nreturn new RegExpCapturingGroup(openParen.getValue(), (RegExpFlagExpression) flagExpr, reDisjunction,\ncloseParen.getValue());\n}\npublic static RegExpFlagExpression createReFlagExpression(BString questionMark, RegExpFlagOnOff flagsOnOff,\nBString colon) {\nreturn new RegExpFlagExpression(questionMark.getValue(), flagsOnOff, colon.getValue());\n}\npublic static RegExpFlagOnOff createReFlagOnOff(BString flags) {\nreturn new RegExpFlagOnOff(flags.getValue());\n}\npublic static RegExpQuantifier createReQuantifier(BString quantifier, BString nonGreedyChar) {\nreturn new RegExpQuantifier(quantifier.getValue(), nonGreedyChar.getValue());\n}\npublic static RegExpValue parse(String regExpStr) {\ntry {\nCharReader charReader = CharReader.from(regExpStr);\nTokenReader tokenReader = new TokenReader(new TreeTraverser(charReader));\nTreeBuilder treeBuilder = new TreeBuilder(tokenReader);\nreturn treeBuilder.parse();\n} catch (BallerinaException e) {\nthrow ErrorCreator.createError(StringUtils.fromString(\"Failed to parse regular expression: \"\n+ e.getMessage() + \" in '\" + regExpStr + \"'\"));\n}\n}\npublic static RegExpValue translateRegExpConstructs(RegExpValue regExpValue) {\nRegExpDisjunction disjunction = regExpValue.getRegExpDisjunction();\nif (disjunction.stringValue(null).equals(\"\")) {\ndisjunction = getNonCapturingGroupDisjunction();\n}\nfor (Object s : disjunction.getRegExpSeqList()) {\nif (!(s instanceof RegExpSequence)) {\ncontinue;\n}\nRegExpSequence seq = (RegExpSequence) s;\ntranslateRegExpTerms(seq.getRegExpTermsList());\n}\nreturn new RegExpValue(disjunction);\n}\nprivate static RegExpDisjunction getNonCapturingGroupDisjunction() {\nRegExpFlagOnOff flagsOnOff = new RegExpFlagOnOff(\"\");\nRegExpFlagExpression flagExpr = new RegExpFlagExpression(\"?\", flagsOnOff, \":\");\nRegExpDisjunction reDisjunction = new RegExpDisjunction(new Object[]{});\nRegExpCapturingGroup reAtom = new RegExpCapturingGroup(\"(\", flagExpr, reDisjunction, \")\");\nRegExpQuantifier reQuantifier = new RegExpQuantifier(\"\", \"\");\nRegExpTerm[] termList = new RegExpTerm[]{new RegExpAtomQuantifier(reAtom, reQuantifier)};\nreturn new RegExpDisjunction(new Object[]{new RegExpSequence(termList)});\n}\nprivate static void translateRegExpTerms(RegExpTerm[] terms) {\nfor (RegExpTerm t : terms) {\nif (!(t instanceof RegExpAtomQuantifier)) {\ncontinue;\n}\nRegExpAtomQuantifier atomQuantifier = (RegExpAtomQuantifier) t;\nObject reAtom = atomQuantifier.getReAtom();\nif (reAtom instanceof RegExpLiteralCharOrEscape) {\natomQuantifier.setReAtom(translateLiteralCharOrEscape((RegExpLiteralCharOrEscape) reAtom));\n} else if (reAtom instanceof RegExpCharacterClass) {\natomQuantifier.setReAtom(translateCharacterClass((RegExpCharacterClass) reAtom));\n}\n}\n}\nprivate static RegExpAtom translateLiteralCharOrEscape(RegExpLiteralCharOrEscape charOrEscape) {\nString value = charOrEscape.getCharOrEscape();\nif (\".\".equals(value)) {\nreturn createCharacterClass(\"^\", new String[]{\"\\\\r\", \"\\\\n\"});\n}\nif (\"\\\\s\".equals(value)) {\nreturn createCharacterClass(\"\", new String[]{\"\\\\t\", \"\\\\s\", \"\\\\n\", \"\\\\r\"});\n}\nif (\"\\\\S\".equals(value)) {\nreturn createCharacterClass(\"^\", new String[]{\"\\\\t\", \"\\\\s\", \"\\\\n\", \"\\\\r\"});\n}\nif (\"&\".equals(value)) {\nreturn createLiteralCharOrEscape(\"\\\\&\");\n}\nreturn charOrEscape;\n}\nprivate static RegExpLiteralCharOrEscape createLiteralCharOrEscape(String charOrEscape) {\nreturn new RegExpLiteralCharOrEscape(charOrEscape);\n}\nprivate static RegExpCharacterClass createCharacterClass(String negation, Object[] charSet) {\nreturn new RegExpCharacterClass(\"[\", negation, new RegExpCharSet(charSet), \"]\");\n}\nprivate static RegExpAtom translateCharacterClass(RegExpCharacterClass charClass) {\nRegExpCharSet charSet = charClass.getReCharSet();\nObject[] charAtoms = charSet.getCharSetAtoms();\nint c = charAtoms.length;\nfor (int i = 0; i < c; i++) {\nObject charAtom = charAtoms[i];\nif (charAtom instanceof RegExpCharSetRange) {\nRegExpCharSetRange range = (RegExpCharSetRange) charAtom;\nrange.setLhsCharSetAtom(translateCharInCharacterClass(range.getLhsCharSetAtom()));\nrange.setRhsCharSetAom(translateCharInCharacterClass(range.getRhsCharSetAtom()));\ncontinue;\n}\nif (charAtom != null) {\ncharAtoms[i] = translateVisitor(charAtom);\n}\n}\nreturn charClass;\n}\nprivate static Object translateVisitor(Object node) {\nif (node instanceof RegExpLiteralCharOrEscape) {\nreturn translateLiteralCharOrEscape((RegExpLiteralCharOrEscape) node);\n} else if (node instanceof String) {\nreturn translateCharInCharacterClass((String) node);\n}\nreturn node;\n}\nprivate static String translateCharInCharacterClass(String originalValue) {\nif (\"&\".equals(originalValue)) {\nreturn \"\\\\&\";\n}\nreturn originalValue;\n}\n}" + }, + { + "comment": "Done.", + "method_body": "public void testJoinClauseWithLargeList() {\nObject values = BRunUtil.invoke(result, \"testJoinClauseWithLargeList\");\nAssert.assertTrue((Boolean) values);\n}", + "target_code": "Assert.assertTrue((Boolean) values);", + "method_body_after": "public void testJoinClauseWithLargeList() {\nBRunUtil.invoke(result, \"testJoinClauseWithLargeList\");\n}", + "context_before": "class JoinClauseTest {\nprivate CompileResult result;\nprivate CompileResult negativeResult;\n@BeforeClass\npublic void setup() {\nresult = BCompileUtil.compile(\"test-src/query/join-clause.bal\");\nnegativeResult = BCompileUtil.compile(\"test-src/query/join-clause-negative.bal\");\n}\n@Test(description = \"Test join clause with record variable definition\")\npublic void testSimpleJoinClauseWithRecordVariable() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithRecordVariable\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause having mapping binding with field name : variable name\")\npublic void testSimpleJoinClauseWithRecordVariable2() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithRecordVariable2\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause having mapping binding with variable name\")\npublic void testSimpleJoinClauseWithRecordVariable3() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithRecordVariable3\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause with simple variable definition and stream\")\npublic void testJoinClauseWithStream() {\nObject values = BRunUtil.invoke(result, \"testJoinClauseWithStream\", new Object[]{});\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test query expr with join and limit clause\")\npublic void testJoinClauseWithLimit() {\nObject values = BRunUtil.invoke(result, \"testJoinClauseWithLimit\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test outer join clause with record variable definition\")\npublic void testOuterJoinClauseWithRecordVariable() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithRecordVariable\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause having mapping binding with field name : variable name\")\npublic void testOuterJoinClauseWithRecordVariable2() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithRecordVariable2\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause having mapping binding with variable name\")\npublic void testOuterJoinClauseWithRecordVariable3() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithRecordVariable3\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause with simple variable definition and stream\")\npublic void testOuterJoinClauseWithStream() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithStream\", new Object[]{});\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test query expr with join and limit clause\")\npublic void testOuterJoinClauseWithLimit() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithLimit\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test equals clause with a variable defined from a let clause\")\npublic void testSimpleJoinClauseWithLetAndEquals() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithLetAndEquals\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test equals clause with a function invocation\")\npublic void testSimpleJoinClauseWithFunctionInAnEquals() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithFunctionInAnEquals\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test outer join with null results\")\npublic void testOuterJoinWithNullResults() {\nBRunUtil.invoke(result, \"testOuterJoin\");\n}\n@Test(description = \"Test join clause with a large list\")\n@Test(description = \"Test negative scenarios for query expr with join clause\")\npublic void testNegativeScenarios() {\nAssert.assertEquals(negativeResult.getErrorCount(), 40);\nint i = 0;\nvalidateError(negativeResult, i++, \"incompatible types: expected 'Department', found 'Person'\", 46, 13);\nvalidateError(negativeResult, i++, \"undeclared field 'name' in record 'Person'\", 51, 19);\nvalidateError(negativeResult, i++, \"unknown type 'XYZ'\", 69, 13);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'int', found 'other'\", 70, 28);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptId'\", 93, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'person'\", 93, 25);\nvalidateError(negativeResult, i++, \"undefined symbol 'id'\", 116, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'person'\", 116, 21);\nvalidateError(negativeResult, i++, \"undefined symbol 'name'\", 140, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptName'\", 140, 23);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptId'\", 163, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'person'\", 163, 25);\nvalidateError(negativeResult, i++, \"undefined symbol 'id'\", 186, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'person'\", 186, 21);\nvalidateError(negativeResult, i++, \"undefined symbol 'name'\", 210, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptName'\", 210, 23);\nvalidateError(negativeResult, i++, \"undefined symbol 'id'\", 234, 23);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'string', found 'other'\", 234, 34);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptName'\", 234, 34);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'boolean', found 'other'\", 266, 1);\nvalidateError(negativeResult, i++, \"missing equals keyword\", 266, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 266, 1);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'boolean', found 'other'\", 289, 1);\nvalidateError(negativeResult, i++, \"missing equals keyword\", 289, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 289, 1);\nvalidateError(negativeResult, i++, \"missing equals keyword\", 309, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 309, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 309, 1);\nvalidateError(negativeResult, i++, \"missing on keyword\", 309, 1);\nvalidateError(negativeResult, i++, \"undefined symbol 'dept'\", 329, 24);\nvalidateError(negativeResult, i++, \"missing equals keyword\", 330, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 330, 1);\nvalidateError(negativeResult, i++, \"outer join must be declared with 'var'\", 353, 19);\nvalidateError(negativeResult, i++, \"undefined symbol 'dept'\", 357, 19);\nvalidateError(negativeResult, i++, \"invalid operation: type 'Person?' does not support field access\", 374, 16);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'int', found 'other'\", 389, 59);\nvalidateError(negativeResult, i++, \"invalid operation: type 'Person?' does not support field access\", 389, 59);\nvalidateError(negativeResult, i++, \"invalid operation: type 'Person?' does not support field access\", 395, 22);\nvalidateError(negativeResult, i++, \"order by not supported for complex type fields, order key should belong\" +\n\" to a basic type\", 395, 22);\nvalidateError(negativeResult, i++, \"invalid operation: type 'Person?' does not support field access\", 397, 36);\n}\n@AfterClass\npublic void tearDown() {\nresult = null;\nnegativeResult = null;\n}\n}", + "context_after": "class JoinClauseTest {\nprivate CompileResult result;\nprivate CompileResult negativeResult;\n@BeforeClass\npublic void setup() {\nresult = BCompileUtil.compile(\"test-src/query/join-clause.bal\");\nnegativeResult = BCompileUtil.compile(\"test-src/query/join-clause-negative.bal\");\n}\n@Test(description = \"Test join clause with record variable definition\")\npublic void testSimpleJoinClauseWithRecordVariable() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithRecordVariable\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause having mapping binding with field name : variable name\")\npublic void testSimpleJoinClauseWithRecordVariable2() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithRecordVariable2\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause having mapping binding with variable name\")\npublic void testSimpleJoinClauseWithRecordVariable3() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithRecordVariable3\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause with simple variable definition and stream\")\npublic void testJoinClauseWithStream() {\nObject values = BRunUtil.invoke(result, \"testJoinClauseWithStream\", new Object[]{});\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test query expr with join and limit clause\")\npublic void testJoinClauseWithLimit() {\nObject values = BRunUtil.invoke(result, \"testJoinClauseWithLimit\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test outer join clause with record variable definition\")\npublic void testOuterJoinClauseWithRecordVariable() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithRecordVariable\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause having mapping binding with field name : variable name\")\npublic void testOuterJoinClauseWithRecordVariable2() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithRecordVariable2\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause having mapping binding with variable name\")\npublic void testOuterJoinClauseWithRecordVariable3() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithRecordVariable3\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause with simple variable definition and stream\")\npublic void testOuterJoinClauseWithStream() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithStream\", new Object[]{});\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test query expr with join and limit clause\")\npublic void testOuterJoinClauseWithLimit() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithLimit\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test equals clause with a variable defined from a let clause\")\npublic void testSimpleJoinClauseWithLetAndEquals() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithLetAndEquals\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test equals clause with a function invocation\")\npublic void testSimpleJoinClauseWithFunctionInAnEquals() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithFunctionInAnEquals\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test outer join with null results\")\npublic void testOuterJoinWithNullResults() {\nBRunUtil.invoke(result, \"testOuterJoin\");\n}\n@Test(description = \"Test join clause with a large list\")\n@Test(description = \"Test negative scenarios for query expr with join clause\")\npublic void testNegativeScenarios() {\nAssert.assertEquals(negativeResult.getErrorCount(), 40);\nint i = 0;\nvalidateError(negativeResult, i++, \"incompatible types: expected 'Department', found 'Person'\", 46, 13);\nvalidateError(negativeResult, i++, \"undeclared field 'name' in record 'Person'\", 51, 19);\nvalidateError(negativeResult, i++, \"unknown type 'XYZ'\", 69, 13);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'int', found 'other'\", 70, 28);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptId'\", 93, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'person'\", 93, 25);\nvalidateError(negativeResult, i++, \"undefined symbol 'id'\", 116, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'person'\", 116, 21);\nvalidateError(negativeResult, i++, \"undefined symbol 'name'\", 140, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptName'\", 140, 23);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptId'\", 163, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'person'\", 163, 25);\nvalidateError(negativeResult, i++, \"undefined symbol 'id'\", 186, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'person'\", 186, 21);\nvalidateError(negativeResult, i++, \"undefined symbol 'name'\", 210, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptName'\", 210, 23);\nvalidateError(negativeResult, i++, \"undefined symbol 'id'\", 234, 23);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'string', found 'other'\", 234, 34);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptName'\", 234, 34);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'boolean', found 'other'\", 266, 1);\nvalidateError(negativeResult, i++, \"missing equals keyword\", 266, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 266, 1);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'boolean', found 'other'\", 289, 1);\nvalidateError(negativeResult, i++, \"missing equals keyword\", 289, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 289, 1);\nvalidateError(negativeResult, i++, \"missing equals keyword\", 309, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 309, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 309, 1);\nvalidateError(negativeResult, i++, \"missing on keyword\", 309, 1);\nvalidateError(negativeResult, i++, \"undefined symbol 'dept'\", 329, 24);\nvalidateError(negativeResult, i++, \"missing equals keyword\", 330, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 330, 1);\nvalidateError(negativeResult, i++, \"outer join must be declared with 'var'\", 353, 19);\nvalidateError(negativeResult, i++, \"undefined symbol 'dept'\", 357, 19);\nvalidateError(negativeResult, i++, \"invalid operation: type 'Person?' does not support field access\", 374, 16);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'int', found 'other'\", 389, 59);\nvalidateError(negativeResult, i++, \"invalid operation: type 'Person?' does not support field access\", 389, 59);\nvalidateError(negativeResult, i++, \"invalid operation: type 'Person?' does not support field access\", 395, 22);\nvalidateError(negativeResult, i++, \"order by not supported for complex type fields, order key should belong\" +\n\" to a basic type\", 395, 22);\nvalidateError(negativeResult, i++, \"invalid operation: type 'Person?' does not support field access\", 397, 36);\n}\n@AfterClass\npublic void tearDown() {\nresult = null;\nnegativeResult = null;\n}\n}" + }, + { + "comment": "remove from 411 ~ 416 ", + "method_body": "public void computeStats(Analyzer analyzer) {\nsuper.computeStats(analyzer);\nif (cardinality > 0) {\navgRowSize = totalBytes / (float) cardinality * COMPRESSION_RATIO;\ncapCardinalityAtLimit();\n}\ncardinality = cardinality == -1 ? 0 : cardinality;\nif (analyzer.safeIsEnableJoinReorderBasedCost()) {\nstatsDeriveResult.setRowCount(cardinality);\nfor (Map.Entry entry : statsDeriveResult.getColumnToNdv().entrySet()) {\nif (entry.getValue() > 0) {\ncardinality = Math.min(cardinality, entry.getValue());\n}\n}\nstatsDeriveResult.setRowCount(cardinality);\n}\n}", + "target_code": "for (Map.Entry entry : statsDeriveResult.getColumnToNdv().entrySet()) {", + "method_body_after": "public void computeStats(Analyzer analyzer) {\nsuper.computeStats(analyzer);\nif (cardinality > 0) {\navgRowSize = totalBytes / (float) cardinality * COMPRESSION_RATIO;\ncapCardinalityAtLimit();\n}\ncardinality = cardinality == -1 ? 0 : cardinality;\nif (analyzer.safeIsEnableJoinReorderBasedCost()) {\nstatsDeriveResult.setRowCount(cardinality);\n}\n}", + "context_before": "class OlapScanNode extends ScanNode {\nprivate static final Logger LOG = LogManager.getLogger(OlapScanNode.class);\nprivate final static int COMPRESSION_RATIO = 5;\nprivate List result = new ArrayList<>();\n/*\n* When the field value is ON, the storage engine can return the data directly without pre-aggregation.\n* When the field value is OFF, the storage engine needs to aggregate the data before returning to scan node.\n* For example:\n* Aggregate table: k1, k2, v1 sum\n* Field value is ON\n* Query1: select k1, sum(v1) from table group by k1\n* This aggregation function in query is same as the schema.\n* So the field value is ON while the query can scan data directly.\n*\n* Field value is OFF\n* Query1: select k1 , k2 from table\n* This aggregation info is null.\n* Query2: select k1, min(v1) from table group by k1\n* This aggregation function in query is min which different from the schema.\n* So the data stored in storage engine need to be merged firstly before returning to scan node.\n*\n* There are currently two places to modify this variable:\n* 1. The turnOffPreAgg() method of SingleNodePlanner.\n* This method will only be called on the left deepest OlapScanNode the plan tree,\n* while other nodes are false by default (because the Aggregation operation is executed after Join,\n* we cannot judge whether other OlapScanNodes can close the pre-aggregation).\n* So even the Duplicate key table, if it is not the left deepest node, it will remain false too.\n*\n* 2. After MaterializedViewSelector selects the materialized view, the updateScanRangeInfoByNewMVSelector()\\\n* method of OlapScanNode may be called to update this variable.\n* This call will be executed on all ScanNodes in the plan tree. In this step,\n* for the DuplicateKey table, the variable will be set to true.\n* See comment of \"isPreAggregation\" variable in MaterializedViewSelector for details.\n*/\nprivate boolean isPreAggregation = false;\nprivate String reasonOfPreAggregation = null;\nprivate boolean canTurnOnPreAggr = true;\nprivate boolean forceOpenPreAgg = false;\nprivate OlapTable olapTable = null;\nprivate long selectedTabletsNum = 0;\nprivate long totalTabletsNum = 0;\nprivate long selectedIndexId = -1;\nprivate int selectedPartitionNum = 0;\nprivate Collection selectedPartitionIds = Lists.newArrayList();\nprivate long totalBytes = 0;\nprivate ArrayList scanTabletIds = Lists.newArrayList();\nprivate HashSet scanBackendIds = new HashSet<>();\nprivate Map tabletId2BucketSeq = Maps.newHashMap();\npublic ArrayListMultimap bucketSeq2locations = ArrayListMultimap.create();\npublic OlapScanNode(PlanNodeId id, TupleDescriptor desc, String planNodeName) {\nsuper(id, desc, planNodeName, NodeType.OLAP_SCAN_NODE);\nolapTable = (OlapTable) desc.getTable();\n}\npublic void setIsPreAggregation(boolean isPreAggregation, String reason) {\nthis.isPreAggregation = isPreAggregation;\nthis.reasonOfPreAggregation = reason;\n}\npublic boolean isPreAggregation() {\nreturn isPreAggregation;\n}\npublic boolean getCanTurnOnPreAggr() {\nreturn canTurnOnPreAggr;\n}\npublic void setCanTurnOnPreAggr(boolean canChangePreAggr) {\nthis.canTurnOnPreAggr = canChangePreAggr;\n}\npublic void closePreAggregation(String reason) {\nsetIsPreAggregation(false, reason);\nsetCanTurnOnPreAggr(false);\n}\npublic long getTotalTabletsNum() { return totalTabletsNum; }\npublic boolean getForceOpenPreAgg() {\nreturn forceOpenPreAgg;\n}\npublic void setForceOpenPreAgg(boolean forceOpenPreAgg) {\nthis.forceOpenPreAgg = forceOpenPreAgg;\n}\npublic Integer getSelectedPartitionNum() {\nreturn selectedPartitionNum;\n}\npublic Long getSelectedTabletsNum() {\nreturn selectedTabletsNum;\n}\npublic Collection getSelectedPartitionIds() {\nreturn selectedPartitionIds;\n}\npublic void setSelectedPartitionIds(Collection selectedPartitionIds) {\nthis.selectedPartitionIds = selectedPartitionIds;\n}\n/**\n* The function is used to directly select the index id of the base table as the selectedIndexId.\n* It makes sure that the olap scan node must scan the base data rather than scan the materialized view data.\n*\n* This function is mainly used to update stmt.\n* Update stmt also needs to scan data like normal queries.\n* But its syntax is different from ordinary queries,\n* so planner cannot use the logic of query to automatically match the best index id.\n* So, here it need to manually specify the index id to scan the base table directly.\n*/\npublic void useBaseIndexId() {\nthis.selectedIndexId = olapTable.getBaseIndexId();\n}\n/**\n* This method is mainly used to update scan range info in OlapScanNode by the new materialized selector.\n* Situation1:\n* If the new scan range is same as the old scan range which determined by the old materialized selector,\n* the scan range will not be changed.\n*

\n* Situation2: Scan range is difference. The type of table is duplicated.\n* The new scan range is used directly.\n* The reason is that the old selector does not support SPJ<->SPJG, so the result of old one must be incorrect.\n*

\n* Situation3: Scan range is difference. The type of table is aggregated.\n* The new scan range is different from the old one.\n* If the test_materialized_view is set to true, an error will be reported.\n* The query will be cancelled.\n*

\n* Situation4: Scan range is difference. The type of table is aggregated. `test_materialized_view` is set to false.\n* The result of the old version selector will be selected. Print the warning log\n*\n* @param selectedIndexId\n* @param isPreAggregation\n* @param reasonOfDisable\n* @throws UserException\n*/\npublic void updateScanRangeInfoByNewMVSelector(long selectedIndexId, boolean isPreAggregation, String reasonOfDisable)\nthrows UserException {\nif (selectedIndexId == this.selectedIndexId && isPreAggregation == this.isPreAggregation) {\nreturn;\n}\nStringBuilder stringBuilder = new StringBuilder(\"The new selected index id \")\n.append(selectedIndexId)\n.append(\", pre aggregation tag \").append(isPreAggregation)\n.append(\", reason \").append(reasonOfDisable == null ? \"null\" : reasonOfDisable)\n.append(\". The old selected index id \").append(this.selectedIndexId)\n.append(\" pre aggregation tag \").append(this.isPreAggregation)\n.append(\" reason \").append(this.reasonOfPreAggregation == null ? \"null\" : this.reasonOfPreAggregation);\nString scanRangeInfo = stringBuilder.toString();\nString situation;\nboolean update;\nCHECK:\n{\nif (olapTable.getKeysType() == KeysType.DUP_KEYS) {\nsituation = \"The key type of table is duplicate.\";\nupdate = true;\nbreak CHECK;\n}\nif (ConnectContext.get() == null) {\nsituation = \"Connection context is null\";\nupdate = true;\nbreak CHECK;\n}\nSessionVariable sessionVariable = ConnectContext.get().getSessionVariable();\nif (sessionVariable.getTestMaterializedView()) {\nthrow new AnalysisException(\"The old scan range info is different from the new one when \"\n+ \"test_materialized_view is true. \"\n+ scanRangeInfo);\n}\nsituation = \"The key type of table is aggregated.\";\nupdate = false;\nbreak CHECK;\n}\nif (update) {\nthis.selectedIndexId = selectedIndexId;\nsetIsPreAggregation(isPreAggregation, reasonOfDisable);\nupdateColumnType();\nif (LOG.isDebugEnabled()) {\nLOG.debug(\"Using the new scan range info instead of the old one. {}, {}\", situation ,scanRangeInfo);\n}\n} else {\nif (LOG.isDebugEnabled()) {\nLOG.debug(\"Using the old scan range info instead of the new one. {}, {}\", situation, scanRangeInfo);\n}\n}\n}\n/**\n* In some situation, the column type between base and mv is different.\n* If mv selector selects the mv index, the type of column should be changed to the type of mv column.\n* For example:\n* base table: k1 int, k2 int\n* mv table: k1 int, k2 bigint sum\n* The type of `k2` column between base and mv is different.\n* When mv selector selects the mv table to scan, the type of column should be changed to bigint in here.\n* Currently, only `SUM` aggregate type could match this changed.\n*/\nprivate void updateColumnType() {\nif (selectedIndexId == olapTable.getBaseIndexId()) {\nreturn;\n}\nMaterializedIndexMeta meta = olapTable.getIndexMetaByIndexId(selectedIndexId);\nfor (SlotDescriptor slotDescriptor : desc.getSlots()) {\nif (!slotDescriptor.isMaterialized()) {\ncontinue;\n}\nColumn baseColumn = slotDescriptor.getColumn();\nPreconditions.checkNotNull(baseColumn);\nColumn mvColumn = meta.getColumnByName(baseColumn.getName());\nPreconditions.checkNotNull(mvColumn);\nif (mvColumn.getType() != baseColumn.getType()) {\nslotDescriptor.setColumn(mvColumn);\n}\n}\n}\npublic OlapTable getOlapTable() {\nreturn olapTable;\n}\n@Override\nprotected String debugString() {\nMoreObjects.ToStringHelper helper = MoreObjects.toStringHelper(this);\nhelper.addValue(super.debugString());\nhelper.addValue(\"olapTable=\" + olapTable.getName());\nreturn helper.toString();\n}\n@Override\npublic void init(Analyzer analyzer) throws UserException {\nsuper.init(analyzer);\nfilterDeletedRows(analyzer);\ncomputeColumnFilter();\ncomputePartitionInfo();\ncomputeTupleState(analyzer);\n/**\n* Compute InAccurate cardinality before mv selector and tablet pruning.\n* - Accurate statistical information relies on the selector of materialized views and bucket reduction.\n* - However, Those both processes occur after the reorder algorithm is completed.\n* - When Join reorder is turned on, the cardinality must be calculated before the reorder algorithm.\n* - So only an inaccurate cardinality can be calculated here.\n*/\nif (analyzer.safeIsEnableJoinReorderBasedCost()) {\nmockRowCountInStatistic();\ncomputeInaccurateCardinality();\n}\n}\n/**\n* Remove the method after statistics collection is working properly\n*/\npublic void mockRowCountInStatistic() {\nlong tableId = desc.getTable().getId();\ncardinality = 0;\nfor (long selectedPartitionId : selectedPartitionIds) {\nfinal Partition partition = olapTable.getPartition(selectedPartitionId);\nfinal MaterializedIndex baseIndex = partition.getBaseIndex();\ncardinality += baseIndex.getRowCount();\n}\nCatalog.getCurrentCatalog().getStatisticsManager().getStatistics().mockTableStatsWithRowCount(tableId, cardinality);\n}\n@Override\npublic void finalize(Analyzer analyzer) throws UserException {\nLOG.debug(\"OlapScanNode get scan range locations. Tuple: {}\", desc);\n/**\n* If JoinReorder is turned on, it will be calculated init(), and this value is not accurate.\n* In the following logic, cardinality will be accurately calculated again.\n* So here we need to reset the value of cardinality.\n*/\nif (analyzer.safeIsEnableJoinReorderBasedCost()) {\ncardinality = 0;\n}\ntry {\ngetScanRangeLocations();\n} catch (AnalysisException e) {\nthrow new UserException(e.getMessage());\n}\ncomputeStats(analyzer);\ncomputeNumNodes();\n}\npublic void computeTupleState(Analyzer analyzer) {\nfor (TupleId id : tupleIds) {\nanalyzer.getDescTbl().getTupleDesc(id).computeStat();\n}\n}\n@Override\n@Override\nprotected void computeNumNodes() {\nif (cardinality > 0) {\nnumNodes = scanBackendIds.size();\n}\nnumNodes = numNodes <= 0 ? 1 : numNodes;\n}\nprivate void computeInaccurateCardinality() throws UserException {\nStatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);\ncardinality = statsDeriveResult.getRowCount();\n}\nprivate Collection partitionPrune(PartitionInfo partitionInfo, PartitionNames partitionNames) throws AnalysisException {\nPartitionPruner partitionPruner = null;\nMap keyItemMap;\nif (partitionNames != null) {\nkeyItemMap = Maps.newHashMap();\nfor (String partName : partitionNames.getPartitionNames()) {\nPartition partition = olapTable.getPartition(partName, partitionNames.isTemp());\nif (partition == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_NO_SUCH_PARTITION, partName);\n}\nkeyItemMap.put(partition.getId(), partitionInfo.getItem(partition.getId()));\n}\n} else {\nkeyItemMap = partitionInfo.getIdToItem(false);\n}\nif (partitionInfo.getType() == PartitionType.RANGE) {\nif (analyzer.partitionPruneV2Enabled()) {\npartitionPruner = new RangePartitionPrunerV2(keyItemMap,\npartitionInfo.getPartitionColumns(), columnNameToRange);\n} else {\npartitionPruner = new RangePartitionPruner(keyItemMap,\npartitionInfo.getPartitionColumns(), columnFilters);\n}\n} else if (partitionInfo.getType() == PartitionType.LIST) {\nif (analyzer.partitionPruneV2Enabled()) {\npartitionPruner = new ListPartitionPrunerV2(keyItemMap, partitionInfo.getPartitionColumns(),\ncolumnNameToRange);\n} else {\npartitionPruner = new ListPartitionPruner(keyItemMap,\npartitionInfo.getPartitionColumns(), columnFilters);\n}\n}\nreturn partitionPruner.prune();\n}\nprivate Collection distributionPrune(\nMaterializedIndex table,\nDistributionInfo distributionInfo) throws AnalysisException {\nDistributionPruner distributionPruner = null;\nswitch (distributionInfo.getType()) {\ncase HASH: {\nHashDistributionInfo info = (HashDistributionInfo) distributionInfo;\ndistributionPruner = new HashDistributionPruner(table.getTabletIdsInOrder(),\ninfo.getDistributionColumns(),\ncolumnFilters,\ninfo.getBucketNum());\nreturn distributionPruner.prune();\n}\ncase RANDOM: {\nreturn null;\n}\ndefault: {\nreturn null;\n}\n}\n}\nprivate void addScanRangeLocations(Partition partition,\nList tablets) throws UserException {\nlong visibleVersion = partition.getVisibleVersion();\nString visibleVersionStr = String.valueOf(visibleVersion);\nSet allowedTags = Sets.newHashSet();\nboolean needCheckTags = false;\nif (ConnectContext.get() != null) {\nallowedTags = ConnectContext.get().getResourceTags();\nneedCheckTags = ConnectContext.get().isResourceTagsSet();\n}\nfor (Tablet tablet : tablets) {\nlong tabletId = tablet.getId();\nTScanRangeLocations scanRangeLocations = new TScanRangeLocations();\nTPaloScanRange paloRange = new TPaloScanRange();\npaloRange.setDbName(\"\");\npaloRange.setSchemaHash(\"\");\npaloRange.setVersion(visibleVersionStr);\npaloRange.setVersionHash(\"\");\npaloRange.setTabletId(tabletId);\nList replicas = tablet.getQueryableReplicas(visibleVersion);\nif (replicas.isEmpty()) {\nLOG.error(\"no queryable replica found in tablet {}. visible version {}\",\ntabletId, visibleVersion);\nif (LOG.isDebugEnabled()) {\nfor (Replica replica : tablet.getReplicas()) {\nLOG.debug(\"tablet {}, replica: {}\", tabletId, replica.toString());\n}\n}\nthrow new UserException(\"Failed to get scan range, no queryable replica found in tablet: \" + tabletId);\n}\nCollections.shuffle(replicas);\nboolean tabletIsNull = true;\nboolean collectedStat = false;\nList errs = Lists.newArrayList();\nfor (Replica replica : replicas) {\nBackend backend = Catalog.getCurrentSystemInfo().getBackend(replica.getBackendId());\nif (backend == null || !backend.isAlive()) {\nLOG.debug(\"backend {} not exists or is not alive for replica {}\",\nreplica.getBackendId(), replica.getId());\nerrs.add(replica.getId() + \"'s backend \" + replica.getBackendId() + \" does not exist or not alive\");\ncontinue;\n}\nif (needCheckTags && !allowedTags.isEmpty() && !allowedTags.contains(backend.getTag())) {\nString err = String.format(\"Replica on backend %d with tag %s, which is not in user's resource tags: %s\",\nbackend.getId(), backend.getTag(), allowedTags);\nif (LOG.isDebugEnabled()) {\nLOG.debug(err);\n}\nerrs.add(err);\ncontinue;\n}\nString ip = backend.getHost();\nint port = backend.getBePort();\nTScanRangeLocation scanRangeLocation = new TScanRangeLocation(new TNetworkAddress(ip, port));\nscanRangeLocation.setBackendId(replica.getBackendId());\nscanRangeLocations.addToLocations(scanRangeLocation);\npaloRange.addToHosts(new TNetworkAddress(ip, port));\ntabletIsNull = false;\nif (!collectedStat && replica.getRowCount() != -1) {\ncardinality += replica.getRowCount();\ntotalBytes += replica.getDataSize();\ncollectedStat = true;\n}\nscanBackendIds.add(backend.getId());\n}\nif (tabletIsNull) {\nthrow new UserException(tabletId + \" have no queryable replicas. err: \" + Joiner.on(\", \").join(errs));\n}\nTScanRange scanRange = new TScanRange();\nscanRange.setPaloScanRange(paloRange);\nscanRangeLocations.setScanRange(scanRange);\nbucketSeq2locations.put(tabletId2BucketSeq.get(tabletId), scanRangeLocations);\nresult.add(scanRangeLocations);\n}\nif (tablets.size() == 0) {\ndesc.setCardinality(0);\n} else {\ndesc.setCardinality(cardinality);\n}\n}\nprivate void computePartitionInfo() throws AnalysisException {\nlong start = System.currentTimeMillis();\nPartitionNames partitionNames = ((BaseTableRef) desc.getRef()).getPartitionNames();\nPartitionInfo partitionInfo = olapTable.getPartitionInfo();\nif (partitionInfo.getType() == PartitionType.RANGE || partitionInfo.getType() == PartitionType.LIST) {\nselectedPartitionIds = partitionPrune(partitionInfo, partitionNames);\n} else {\nselectedPartitionIds = null;\n}\nif (selectedPartitionIds == null) {\nselectedPartitionIds = Lists.newArrayList();\nfor (Partition partition : olapTable.getPartitions()) {\nif (!partition.hasData()) {\ncontinue;\n}\nselectedPartitionIds.add(partition.getId());\n}\n} else {\nselectedPartitionIds = selectedPartitionIds.stream()\n.filter(id -> olapTable.getPartition(id).hasData())\n.collect(Collectors.toList());\n}\nselectedPartitionNum = selectedPartitionIds.size();\nfor(long id : selectedPartitionIds){\nPartition partition = olapTable.getPartition(id);\nif(partition.getState() == PartitionState.RESTORE){\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_PARTITION_STATE, partition.getName(), \"RESTORING\");\n}\n}\nLOG.debug(\"partition prune cost: {} ms, partitions: {}\",\n(System.currentTimeMillis() - start), selectedPartitionIds);\n}\npublic void selectBestRollupByRollupSelector(Analyzer analyzer) throws UserException {\nlong start = System.currentTimeMillis();\nif (olapTable.getKeysType() == KeysType.DUP_KEYS) {\nselectedIndexId = olapTable.getBaseIndexId();\nLOG.debug(\"The best index will be selected later in mv selector\");\nreturn;\n}\nfinal RollupSelector rollupSelector = new RollupSelector(analyzer, desc, olapTable);\nselectedIndexId = rollupSelector.selectBestRollup(selectedPartitionIds, conjuncts, isPreAggregation);\nLOG.debug(\"select best roll up cost: {} ms, best index id: {}\",\n(System.currentTimeMillis() - start), selectedIndexId);\n}\nprivate void getScanRangeLocations() throws UserException {\nif (selectedPartitionIds.size() == 0) {\ndesc.setCardinality(0);\nreturn;\n}\nPreconditions.checkState(selectedIndexId != -1);\nlong start = System.currentTimeMillis();\ncomputeTabletInfo();\nLOG.debug(\"distribution prune cost: {} ms\", (System.currentTimeMillis() - start));\n}\nprivate void computeTabletInfo() throws UserException {\n/**\n* The tablet info could be computed only once.\n* So the scanBackendIds should be empty in the beginning.\n*/\nPreconditions.checkState(scanBackendIds.size() == 0);\nPreconditions.checkState(scanTabletIds.size() == 0);\nfor (Long partitionId : selectedPartitionIds) {\nfinal Partition partition = olapTable.getPartition(partitionId);\nfinal MaterializedIndex selectedTable = partition.getIndex(selectedIndexId);\nfinal List tablets = Lists.newArrayList();\nfinal Collection tabletIds = distributionPrune(selectedTable, partition.getDistributionInfo());\nLOG.debug(\"distribution prune tablets: {}\", tabletIds);\nList allTabletIds = selectedTable.getTabletIdsInOrder();\nif (tabletIds != null) {\nfor (Long id : tabletIds) {\ntablets.add(selectedTable.getTablet(id));\n}\nscanTabletIds.addAll(tabletIds);\n} else {\ntablets.addAll(selectedTable.getTablets());\nscanTabletIds.addAll(allTabletIds);\n}\nfor (int i = 0; i < allTabletIds.size(); i++) {\ntabletId2BucketSeq.put(allTabletIds.get(i), i);\n}\ntotalTabletsNum += selectedTable.getTablets().size();\nselectedTabletsNum += tablets.size();\naddScanRangeLocations(partition, tablets);\n}\n}\n/**\n* We query Palo Meta to get request's data location\n* extra result info will pass to backend ScanNode\n*/\n@Override\npublic List getScanRangeLocations(long maxScanRangeLength) {\nreturn result;\n}\n@Override\npublic String getNodeExplainString(String prefix, TExplainLevel detailLevel) {\nStringBuilder output = new StringBuilder();\noutput.append(prefix).append(\"TABLE: \").append(olapTable.getName()).append(\"\\n\");\nif (detailLevel == TExplainLevel.BRIEF) {\nreturn output.toString();\n}\nif (null != sortColumn) {\noutput.append(prefix).append(\"SORT COLUMN: \").append(sortColumn).append(\"\\n\");\n}\nif (isPreAggregation) {\noutput.append(prefix).append(\"PREAGGREGATION: ON\").append(\"\\n\");\n} else {\noutput.append(prefix).append(\"PREAGGREGATION: OFF. Reason: \").append(reasonOfPreAggregation).append(\"\\n\");\n}\nif (!conjuncts.isEmpty()) {\noutput.append(prefix).append(\"PREDICATES: \").append(\ngetExplainString(conjuncts)).append(\"\\n\");\n}\nif (!runtimeFilters.isEmpty()) {\noutput.append(prefix).append(\"runtime filters: \");\noutput.append(getRuntimeFilterExplainString(false));\n}\noutput.append(prefix).append(String.format(\n\"partitions=%s/%s\",\nselectedPartitionNum,\nolapTable.getPartitions().size()));\nString indexName = olapTable.getIndexNameById(selectedIndexId);\noutput.append(\"\\n\").append(prefix).append(String.format(\"rollup: %s\", indexName));\noutput.append(\"\\n\");\noutput.append(prefix).append(String.format(\n\"tabletRatio=%s/%s\", selectedTabletsNum, totalTabletsNum));\noutput.append(\"\\n\");\nif (scanTabletIds.size() > 10) {\nList firstTenTabletIds = scanTabletIds.subList(0, 10);\noutput.append(prefix).append(String.format(\"tabletList=%s ...\", Joiner.on(\",\").join(firstTenTabletIds)));\n} else {\noutput.append(prefix).append(String.format(\"tabletList=%s\", Joiner.on(\",\").join(scanTabletIds)));\n}\noutput.append(\"\\n\");\noutput.append(prefix).append(String.format(\n\"cardinality=%s\", cardinality));\noutput.append(\"\\n\");\noutput.append(prefix).append(String.format(\n\"avgRowSize=%s\", avgRowSize));\noutput.append(\"\\n\");\noutput.append(prefix).append(String.format(\n\"numNodes=%s\", numNodes));\noutput.append(\"\\n\");\nreturn output.toString();\n}\n@Override\npublic int getNumInstances() {\nreturn result.size();\n}\n@Override\nprotected void toThrift(TPlanNode msg) {\nList keyColumnNames = new ArrayList();\nList keyColumnTypes = new ArrayList();\nif (selectedIndexId != -1) {\nfor (Column col : olapTable.getSchemaByIndexId(selectedIndexId)) {\nif (!col.isKey()) {\nbreak;\n}\nkeyColumnNames.add(col.getName());\nkeyColumnTypes.add(col.getDataType().toThrift());\n}\n}\nmsg.node_type = TPlanNodeType.OLAP_SCAN_NODE;\nmsg.olap_scan_node =\nnew TOlapScanNode(desc.getId().asInt(), keyColumnNames, keyColumnTypes, isPreAggregation);\nif (null != sortColumn) {\nmsg.olap_scan_node.setSortColumn(sortColumn);\n}\nmsg.olap_scan_node.setKeyType(olapTable.getKeysType().toThrift());\n}\npublic static OlapScanNode createOlapScanNodeByLocation(\nPlanNodeId id, TupleDescriptor desc, String planNodeName, List locationsList) {\nOlapScanNode olapScanNode = new OlapScanNode(id, desc, planNodeName);\nolapScanNode.numInstances = 1;\nolapScanNode.selectedIndexId = olapScanNode.olapTable.getBaseIndexId();\nolapScanNode.selectedPartitionNum = 1;\nolapScanNode.selectedTabletsNum = 1;\nolapScanNode.totalTabletsNum = 1;\nolapScanNode.setIsPreAggregation(false, \"Export job\");\nolapScanNode.result.addAll(locationsList);\nreturn olapScanNode;\n}\npublic void collectColumns(Analyzer analyzer, Set equivalenceColumns, Set unequivalenceColumns) {\nfor (Expr expr : conjuncts) {\nif (!isPredicateUsedForPrefixIndex(expr, false)) {\ncontinue;\n}\nfor (SlotDescriptor slot : desc.getMaterializedSlots()) {\nif (expr.isBound(slot.getId())) {\nif (!isEquivalenceExpr(expr)) {\nunequivalenceColumns.add(slot.getColumn().getName());\n} else {\nequivalenceColumns.add(slot.getColumn().getName());\n}\nbreak;\n}\n}\n}\nList eqJoinPredicate = analyzer.getEqJoinConjuncts(desc.getId());\nfor (Expr expr : eqJoinPredicate) {\nif (!isPredicateUsedForPrefixIndex(expr, true)) {\ncontinue;\n}\nfor (SlotDescriptor slot : desc.getMaterializedSlots()) {\nPreconditions.checkState(expr.getChildren().size() == 2);\nfor (Expr child : expr.getChildren()) {\nif (child.isBound(slot.getId())) {\nequivalenceColumns.add(slot.getColumn().getName());\nbreak;\n}\n}\n}\n}\n}\npublic TupleId getTupleId() {\nPreconditions.checkNotNull(desc);\nreturn desc.getId();\n}\nprivate boolean isEquivalenceExpr(Expr expr) {\nif (expr instanceof InPredicate) {\nreturn true;\n}\nif (expr instanceof BinaryPredicate) {\nfinal BinaryPredicate predicate = (BinaryPredicate) expr;\nif (predicate.getOp().isEquivalence()) {\nreturn true;\n}\n}\nreturn false;\n}\nprivate boolean isPredicateUsedForPrefixIndex(Expr expr, boolean isJoinConjunct) {\nif (!(expr instanceof InPredicate)\n&& !(expr instanceof BinaryPredicate)) {\nreturn false;\n}\nif (expr instanceof InPredicate) {\nreturn isInPredicateUsedForPrefixIndex((InPredicate) expr);\n} else if (expr instanceof BinaryPredicate) {\nif (isJoinConjunct) {\nreturn isEqualJoinConjunctUsedForPrefixIndex((BinaryPredicate) expr);\n} else {\nreturn isBinaryPredicateUsedForPrefixIndex((BinaryPredicate) expr);\n}\n}\nreturn true;\n}\nprivate boolean isEqualJoinConjunctUsedForPrefixIndex(BinaryPredicate expr) {\nPreconditions.checkArgument(expr.getOp().isEquivalence());\nif (expr.isAuxExpr()) {\nreturn false;\n}\nfor (Expr child : expr.getChildren()) {\nfor (SlotDescriptor slot : desc.getMaterializedSlots()) {\nif (child.isBound(slot.getId()) && isSlotRefNested(child)) {\nreturn true;\n}\n}\n}\nreturn false;\n}\nprivate boolean isBinaryPredicateUsedForPrefixIndex(BinaryPredicate expr) {\nif (expr.isAuxExpr() || expr.getOp().isUnequivalence()) {\nreturn false;\n}\nreturn (isSlotRefNested(expr.getChild(0)) && expr.getChild(1).isConstant())\n|| (isSlotRefNested(expr.getChild(1)) && expr.getChild(0).isConstant());\n}\nprivate boolean isInPredicateUsedForPrefixIndex(InPredicate expr) {\nif (expr.isNotIn()) {\nreturn false;\n}\nreturn isSlotRefNested(expr.getChild(0)) && expr.isLiteralChildren();\n}\nprivate boolean isSlotRefNested(Expr expr) {\nwhile (expr instanceof CastExpr) {\nexpr = expr.getChild(0);\n}\nreturn expr instanceof SlotRef;\n}\nprivate void filterDeletedRows(Analyzer analyzer) throws AnalysisException {\nif (!Util.showHiddenColumns() && olapTable.hasDeleteSign()) {\nSlotRef deleteSignSlot = new SlotRef(desc.getAliasAsName(), Column.DELETE_SIGN);\ndeleteSignSlot.analyze(analyzer);\ndeleteSignSlot.getDesc().setIsMaterialized(true);\nExpr conjunct = new BinaryPredicate(BinaryPredicate.Operator.EQ, deleteSignSlot, new IntLiteral(0));\nconjunct.analyze(analyzer);\nconjuncts.add(conjunct);\n}\n}\n/*\nAlthough sometimes the scan range only involves one instance,\nthe data distribution cannot be set to UNPARTITIONED here.\nThe reason is that @coordinator will not set the scan range for the fragment,\nwhen data partition of fragment is UNPARTITIONED.\n*/\npublic DataPartition constructInputPartitionByDistributionInfo() throws UserException {\nColocateTableIndex colocateTableIndex = Catalog.getCurrentColocateIndex();\nif ((colocateTableIndex.isColocateTable(olapTable.getId())\n&& !colocateTableIndex.isGroupUnstable(colocateTableIndex.getGroup(olapTable.getId())))\n|| olapTable.getPartitionInfo().getType() == PartitionType.UNPARTITIONED\n|| olapTable.getPartitions().size() == 1) {\nDistributionInfo distributionInfo = olapTable.getDefaultDistributionInfo();\nif (!(distributionInfo instanceof HashDistributionInfo)) {\nreturn DataPartition.RANDOM;\n}\nList distributeColumns = ((HashDistributionInfo) distributionInfo).getDistributionColumns();\nList dataDistributeExprs = Lists.newArrayList();\nfor (Column column : distributeColumns) {\nSlotRef slotRef = new SlotRef(desc.getRef().getName(), column.getName());\ndataDistributeExprs.add(slotRef);\n}\nreturn DataPartition.hashPartitioned(dataDistributeExprs);\n} else {\nreturn DataPartition.RANDOM;\n}\n}\n}", + "context_after": "class OlapScanNode extends ScanNode {\nprivate static final Logger LOG = LogManager.getLogger(OlapScanNode.class);\nprivate final static int COMPRESSION_RATIO = 5;\nprivate List result = new ArrayList<>();\n/*\n* When the field value is ON, the storage engine can return the data directly without pre-aggregation.\n* When the field value is OFF, the storage engine needs to aggregate the data before returning to scan node.\n* For example:\n* Aggregate table: k1, k2, v1 sum\n* Field value is ON\n* Query1: select k1, sum(v1) from table group by k1\n* This aggregation function in query is same as the schema.\n* So the field value is ON while the query can scan data directly.\n*\n* Field value is OFF\n* Query1: select k1 , k2 from table\n* This aggregation info is null.\n* Query2: select k1, min(v1) from table group by k1\n* This aggregation function in query is min which different from the schema.\n* So the data stored in storage engine need to be merged firstly before returning to scan node.\n*\n* There are currently two places to modify this variable:\n* 1. The turnOffPreAgg() method of SingleNodePlanner.\n* This method will only be called on the left deepest OlapScanNode the plan tree,\n* while other nodes are false by default (because the Aggregation operation is executed after Join,\n* we cannot judge whether other OlapScanNodes can close the pre-aggregation).\n* So even the Duplicate key table, if it is not the left deepest node, it will remain false too.\n*\n* 2. After MaterializedViewSelector selects the materialized view, the updateScanRangeInfoByNewMVSelector()\\\n* method of OlapScanNode may be called to update this variable.\n* This call will be executed on all ScanNodes in the plan tree. In this step,\n* for the DuplicateKey table, the variable will be set to true.\n* See comment of \"isPreAggregation\" variable in MaterializedViewSelector for details.\n*/\nprivate boolean isPreAggregation = false;\nprivate String reasonOfPreAggregation = null;\nprivate boolean canTurnOnPreAggr = true;\nprivate boolean forceOpenPreAgg = false;\nprivate OlapTable olapTable = null;\nprivate long selectedTabletsNum = 0;\nprivate long totalTabletsNum = 0;\nprivate long selectedIndexId = -1;\nprivate int selectedPartitionNum = 0;\nprivate Collection selectedPartitionIds = Lists.newArrayList();\nprivate long totalBytes = 0;\nprivate ArrayList scanTabletIds = Lists.newArrayList();\nprivate HashSet scanBackendIds = new HashSet<>();\nprivate Map tabletId2BucketSeq = Maps.newHashMap();\npublic ArrayListMultimap bucketSeq2locations = ArrayListMultimap.create();\npublic OlapScanNode(PlanNodeId id, TupleDescriptor desc, String planNodeName) {\nsuper(id, desc, planNodeName, NodeType.OLAP_SCAN_NODE);\nolapTable = (OlapTable) desc.getTable();\n}\npublic void setIsPreAggregation(boolean isPreAggregation, String reason) {\nthis.isPreAggregation = isPreAggregation;\nthis.reasonOfPreAggregation = reason;\n}\npublic boolean isPreAggregation() {\nreturn isPreAggregation;\n}\npublic boolean getCanTurnOnPreAggr() {\nreturn canTurnOnPreAggr;\n}\npublic void setCanTurnOnPreAggr(boolean canChangePreAggr) {\nthis.canTurnOnPreAggr = canChangePreAggr;\n}\npublic void closePreAggregation(String reason) {\nsetIsPreAggregation(false, reason);\nsetCanTurnOnPreAggr(false);\n}\npublic long getTotalTabletsNum() { return totalTabletsNum; }\npublic boolean getForceOpenPreAgg() {\nreturn forceOpenPreAgg;\n}\npublic void setForceOpenPreAgg(boolean forceOpenPreAgg) {\nthis.forceOpenPreAgg = forceOpenPreAgg;\n}\npublic Integer getSelectedPartitionNum() {\nreturn selectedPartitionNum;\n}\npublic Long getSelectedTabletsNum() {\nreturn selectedTabletsNum;\n}\npublic Collection getSelectedPartitionIds() {\nreturn selectedPartitionIds;\n}\npublic void setSelectedPartitionIds(Collection selectedPartitionIds) {\nthis.selectedPartitionIds = selectedPartitionIds;\n}\n/**\n* The function is used to directly select the index id of the base table as the selectedIndexId.\n* It makes sure that the olap scan node must scan the base data rather than scan the materialized view data.\n*\n* This function is mainly used to update stmt.\n* Update stmt also needs to scan data like normal queries.\n* But its syntax is different from ordinary queries,\n* so planner cannot use the logic of query to automatically match the best index id.\n* So, here it need to manually specify the index id to scan the base table directly.\n*/\npublic void useBaseIndexId() {\nthis.selectedIndexId = olapTable.getBaseIndexId();\n}\n/**\n* This method is mainly used to update scan range info in OlapScanNode by the new materialized selector.\n* Situation1:\n* If the new scan range is same as the old scan range which determined by the old materialized selector,\n* the scan range will not be changed.\n*

\n* Situation2: Scan range is difference. The type of table is duplicated.\n* The new scan range is used directly.\n* The reason is that the old selector does not support SPJ<->SPJG, so the result of old one must be incorrect.\n*

\n* Situation3: Scan range is difference. The type of table is aggregated.\n* The new scan range is different from the old one.\n* If the test_materialized_view is set to true, an error will be reported.\n* The query will be cancelled.\n*

\n* Situation4: Scan range is difference. The type of table is aggregated. `test_materialized_view` is set to false.\n* The result of the old version selector will be selected. Print the warning log\n*\n* @param selectedIndexId\n* @param isPreAggregation\n* @param reasonOfDisable\n* @throws UserException\n*/\npublic void updateScanRangeInfoByNewMVSelector(long selectedIndexId, boolean isPreAggregation, String reasonOfDisable)\nthrows UserException {\nif (selectedIndexId == this.selectedIndexId && isPreAggregation == this.isPreAggregation) {\nreturn;\n}\nStringBuilder stringBuilder = new StringBuilder(\"The new selected index id \")\n.append(selectedIndexId)\n.append(\", pre aggregation tag \").append(isPreAggregation)\n.append(\", reason \").append(reasonOfDisable == null ? \"null\" : reasonOfDisable)\n.append(\". The old selected index id \").append(this.selectedIndexId)\n.append(\" pre aggregation tag \").append(this.isPreAggregation)\n.append(\" reason \").append(this.reasonOfPreAggregation == null ? \"null\" : this.reasonOfPreAggregation);\nString scanRangeInfo = stringBuilder.toString();\nString situation;\nboolean update;\nCHECK:\n{\nif (olapTable.getKeysType() == KeysType.DUP_KEYS) {\nsituation = \"The key type of table is duplicate.\";\nupdate = true;\nbreak CHECK;\n}\nif (ConnectContext.get() == null) {\nsituation = \"Connection context is null\";\nupdate = true;\nbreak CHECK;\n}\nSessionVariable sessionVariable = ConnectContext.get().getSessionVariable();\nif (sessionVariable.getTestMaterializedView()) {\nthrow new AnalysisException(\"The old scan range info is different from the new one when \"\n+ \"test_materialized_view is true. \"\n+ scanRangeInfo);\n}\nsituation = \"The key type of table is aggregated.\";\nupdate = false;\nbreak CHECK;\n}\nif (update) {\nthis.selectedIndexId = selectedIndexId;\nsetIsPreAggregation(isPreAggregation, reasonOfDisable);\nupdateColumnType();\nif (LOG.isDebugEnabled()) {\nLOG.debug(\"Using the new scan range info instead of the old one. {}, {}\", situation ,scanRangeInfo);\n}\n} else {\nif (LOG.isDebugEnabled()) {\nLOG.debug(\"Using the old scan range info instead of the new one. {}, {}\", situation, scanRangeInfo);\n}\n}\n}\n/**\n* In some situation, the column type between base and mv is different.\n* If mv selector selects the mv index, the type of column should be changed to the type of mv column.\n* For example:\n* base table: k1 int, k2 int\n* mv table: k1 int, k2 bigint sum\n* The type of `k2` column between base and mv is different.\n* When mv selector selects the mv table to scan, the type of column should be changed to bigint in here.\n* Currently, only `SUM` aggregate type could match this changed.\n*/\nprivate void updateColumnType() {\nif (selectedIndexId == olapTable.getBaseIndexId()) {\nreturn;\n}\nMaterializedIndexMeta meta = olapTable.getIndexMetaByIndexId(selectedIndexId);\nfor (SlotDescriptor slotDescriptor : desc.getSlots()) {\nif (!slotDescriptor.isMaterialized()) {\ncontinue;\n}\nColumn baseColumn = slotDescriptor.getColumn();\nPreconditions.checkNotNull(baseColumn);\nColumn mvColumn = meta.getColumnByName(baseColumn.getName());\nPreconditions.checkNotNull(mvColumn);\nif (mvColumn.getType() != baseColumn.getType()) {\nslotDescriptor.setColumn(mvColumn);\n}\n}\n}\npublic OlapTable getOlapTable() {\nreturn olapTable;\n}\n@Override\nprotected String debugString() {\nMoreObjects.ToStringHelper helper = MoreObjects.toStringHelper(this);\nhelper.addValue(super.debugString());\nhelper.addValue(\"olapTable=\" + olapTable.getName());\nreturn helper.toString();\n}\n@Override\npublic void init(Analyzer analyzer) throws UserException {\nsuper.init(analyzer);\nfilterDeletedRows(analyzer);\ncomputeColumnFilter();\ncomputePartitionInfo();\ncomputeTupleState(analyzer);\n/**\n* Compute InAccurate cardinality before mv selector and tablet pruning.\n* - Accurate statistical information relies on the selector of materialized views and bucket reduction.\n* - However, Those both processes occur after the reorder algorithm is completed.\n* - When Join reorder is turned on, the cardinality must be calculated before the reorder algorithm.\n* - So only an inaccurate cardinality can be calculated here.\n*/\nif (analyzer.safeIsEnableJoinReorderBasedCost()) {\nmockRowCountInStatistic();\ncomputeInaccurateCardinality();\n}\n}\n/**\n* Remove the method after statistics collection is working properly\n*/\npublic void mockRowCountInStatistic() {\nlong tableId = desc.getTable().getId();\ncardinality = 0;\nfor (long selectedPartitionId : selectedPartitionIds) {\nfinal Partition partition = olapTable.getPartition(selectedPartitionId);\nfinal MaterializedIndex baseIndex = partition.getBaseIndex();\ncardinality += baseIndex.getRowCount();\n}\nCatalog.getCurrentCatalog().getStatisticsManager().getStatistics().mockTableStatsWithRowCount(tableId, cardinality);\n}\n@Override\npublic void finalize(Analyzer analyzer) throws UserException {\nLOG.debug(\"OlapScanNode get scan range locations. Tuple: {}\", desc);\n/**\n* If JoinReorder is turned on, it will be calculated init(), and this value is not accurate.\n* In the following logic, cardinality will be accurately calculated again.\n* So here we need to reset the value of cardinality.\n*/\nif (analyzer.safeIsEnableJoinReorderBasedCost()) {\ncardinality = 0;\n}\ntry {\ngetScanRangeLocations();\n} catch (AnalysisException e) {\nthrow new UserException(e.getMessage());\n}\ncomputeStats(analyzer);\ncomputeNumNodes();\n}\npublic void computeTupleState(Analyzer analyzer) {\nfor (TupleId id : tupleIds) {\nanalyzer.getDescTbl().getTupleDesc(id).computeStat();\n}\n}\n@Override\n@Override\nprotected void computeNumNodes() {\nif (cardinality > 0) {\nnumNodes = scanBackendIds.size();\n}\nnumNodes = numNodes <= 0 ? 1 : numNodes;\n}\nprivate void computeInaccurateCardinality() throws UserException {\nStatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);\ncardinality = statsDeriveResult.getRowCount();\n}\nprivate Collection partitionPrune(PartitionInfo partitionInfo, PartitionNames partitionNames) throws AnalysisException {\nPartitionPruner partitionPruner = null;\nMap keyItemMap;\nif (partitionNames != null) {\nkeyItemMap = Maps.newHashMap();\nfor (String partName : partitionNames.getPartitionNames()) {\nPartition partition = olapTable.getPartition(partName, partitionNames.isTemp());\nif (partition == null) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_NO_SUCH_PARTITION, partName);\n}\nkeyItemMap.put(partition.getId(), partitionInfo.getItem(partition.getId()));\n}\n} else {\nkeyItemMap = partitionInfo.getIdToItem(false);\n}\nif (partitionInfo.getType() == PartitionType.RANGE) {\nif (analyzer.partitionPruneV2Enabled()) {\npartitionPruner = new RangePartitionPrunerV2(keyItemMap,\npartitionInfo.getPartitionColumns(), columnNameToRange);\n} else {\npartitionPruner = new RangePartitionPruner(keyItemMap,\npartitionInfo.getPartitionColumns(), columnFilters);\n}\n} else if (partitionInfo.getType() == PartitionType.LIST) {\nif (analyzer.partitionPruneV2Enabled()) {\npartitionPruner = new ListPartitionPrunerV2(keyItemMap, partitionInfo.getPartitionColumns(),\ncolumnNameToRange);\n} else {\npartitionPruner = new ListPartitionPruner(keyItemMap,\npartitionInfo.getPartitionColumns(), columnFilters);\n}\n}\nreturn partitionPruner.prune();\n}\nprivate Collection distributionPrune(\nMaterializedIndex table,\nDistributionInfo distributionInfo) throws AnalysisException {\nDistributionPruner distributionPruner = null;\nswitch (distributionInfo.getType()) {\ncase HASH: {\nHashDistributionInfo info = (HashDistributionInfo) distributionInfo;\ndistributionPruner = new HashDistributionPruner(table.getTabletIdsInOrder(),\ninfo.getDistributionColumns(),\ncolumnFilters,\ninfo.getBucketNum());\nreturn distributionPruner.prune();\n}\ncase RANDOM: {\nreturn null;\n}\ndefault: {\nreturn null;\n}\n}\n}\nprivate void addScanRangeLocations(Partition partition,\nList tablets) throws UserException {\nlong visibleVersion = partition.getVisibleVersion();\nString visibleVersionStr = String.valueOf(visibleVersion);\nSet allowedTags = Sets.newHashSet();\nboolean needCheckTags = false;\nif (ConnectContext.get() != null) {\nallowedTags = ConnectContext.get().getResourceTags();\nneedCheckTags = ConnectContext.get().isResourceTagsSet();\n}\nfor (Tablet tablet : tablets) {\nlong tabletId = tablet.getId();\nTScanRangeLocations scanRangeLocations = new TScanRangeLocations();\nTPaloScanRange paloRange = new TPaloScanRange();\npaloRange.setDbName(\"\");\npaloRange.setSchemaHash(\"\");\npaloRange.setVersion(visibleVersionStr);\npaloRange.setVersionHash(\"\");\npaloRange.setTabletId(tabletId);\nList replicas = tablet.getQueryableReplicas(visibleVersion);\nif (replicas.isEmpty()) {\nLOG.error(\"no queryable replica found in tablet {}. visible version {}\",\ntabletId, visibleVersion);\nif (LOG.isDebugEnabled()) {\nfor (Replica replica : tablet.getReplicas()) {\nLOG.debug(\"tablet {}, replica: {}\", tabletId, replica.toString());\n}\n}\nthrow new UserException(\"Failed to get scan range, no queryable replica found in tablet: \" + tabletId);\n}\nCollections.shuffle(replicas);\nboolean tabletIsNull = true;\nboolean collectedStat = false;\nList errs = Lists.newArrayList();\nfor (Replica replica : replicas) {\nBackend backend = Catalog.getCurrentSystemInfo().getBackend(replica.getBackendId());\nif (backend == null || !backend.isAlive()) {\nLOG.debug(\"backend {} not exists or is not alive for replica {}\",\nreplica.getBackendId(), replica.getId());\nerrs.add(replica.getId() + \"'s backend \" + replica.getBackendId() + \" does not exist or not alive\");\ncontinue;\n}\nif (needCheckTags && !allowedTags.isEmpty() && !allowedTags.contains(backend.getTag())) {\nString err = String.format(\"Replica on backend %d with tag %s, which is not in user's resource tags: %s\",\nbackend.getId(), backend.getTag(), allowedTags);\nif (LOG.isDebugEnabled()) {\nLOG.debug(err);\n}\nerrs.add(err);\ncontinue;\n}\nString ip = backend.getHost();\nint port = backend.getBePort();\nTScanRangeLocation scanRangeLocation = new TScanRangeLocation(new TNetworkAddress(ip, port));\nscanRangeLocation.setBackendId(replica.getBackendId());\nscanRangeLocations.addToLocations(scanRangeLocation);\npaloRange.addToHosts(new TNetworkAddress(ip, port));\ntabletIsNull = false;\nif (!collectedStat && replica.getRowCount() != -1) {\ncardinality += replica.getRowCount();\ntotalBytes += replica.getDataSize();\ncollectedStat = true;\n}\nscanBackendIds.add(backend.getId());\n}\nif (tabletIsNull) {\nthrow new UserException(tabletId + \" have no queryable replicas. err: \" + Joiner.on(\", \").join(errs));\n}\nTScanRange scanRange = new TScanRange();\nscanRange.setPaloScanRange(paloRange);\nscanRangeLocations.setScanRange(scanRange);\nbucketSeq2locations.put(tabletId2BucketSeq.get(tabletId), scanRangeLocations);\nresult.add(scanRangeLocations);\n}\nif (tablets.size() == 0) {\ndesc.setCardinality(0);\n} else {\ndesc.setCardinality(cardinality);\n}\n}\nprivate void computePartitionInfo() throws AnalysisException {\nlong start = System.currentTimeMillis();\nPartitionNames partitionNames = ((BaseTableRef) desc.getRef()).getPartitionNames();\nPartitionInfo partitionInfo = olapTable.getPartitionInfo();\nif (partitionInfo.getType() == PartitionType.RANGE || partitionInfo.getType() == PartitionType.LIST) {\nselectedPartitionIds = partitionPrune(partitionInfo, partitionNames);\n} else {\nselectedPartitionIds = null;\n}\nif (selectedPartitionIds == null) {\nselectedPartitionIds = Lists.newArrayList();\nfor (Partition partition : olapTable.getPartitions()) {\nif (!partition.hasData()) {\ncontinue;\n}\nselectedPartitionIds.add(partition.getId());\n}\n} else {\nselectedPartitionIds = selectedPartitionIds.stream()\n.filter(id -> olapTable.getPartition(id).hasData())\n.collect(Collectors.toList());\n}\nselectedPartitionNum = selectedPartitionIds.size();\nfor(long id : selectedPartitionIds){\nPartition partition = olapTable.getPartition(id);\nif(partition.getState() == PartitionState.RESTORE){\nErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_PARTITION_STATE, partition.getName(), \"RESTORING\");\n}\n}\nLOG.debug(\"partition prune cost: {} ms, partitions: {}\",\n(System.currentTimeMillis() - start), selectedPartitionIds);\n}\npublic void selectBestRollupByRollupSelector(Analyzer analyzer) throws UserException {\nlong start = System.currentTimeMillis();\nif (olapTable.getKeysType() == KeysType.DUP_KEYS) {\nselectedIndexId = olapTable.getBaseIndexId();\nLOG.debug(\"The best index will be selected later in mv selector\");\nreturn;\n}\nfinal RollupSelector rollupSelector = new RollupSelector(analyzer, desc, olapTable);\nselectedIndexId = rollupSelector.selectBestRollup(selectedPartitionIds, conjuncts, isPreAggregation);\nLOG.debug(\"select best roll up cost: {} ms, best index id: {}\",\n(System.currentTimeMillis() - start), selectedIndexId);\n}\nprivate void getScanRangeLocations() throws UserException {\nif (selectedPartitionIds.size() == 0) {\ndesc.setCardinality(0);\nreturn;\n}\nPreconditions.checkState(selectedIndexId != -1);\nlong start = System.currentTimeMillis();\ncomputeTabletInfo();\nLOG.debug(\"distribution prune cost: {} ms\", (System.currentTimeMillis() - start));\n}\nprivate void computeTabletInfo() throws UserException {\n/**\n* The tablet info could be computed only once.\n* So the scanBackendIds should be empty in the beginning.\n*/\nPreconditions.checkState(scanBackendIds.size() == 0);\nPreconditions.checkState(scanTabletIds.size() == 0);\nfor (Long partitionId : selectedPartitionIds) {\nfinal Partition partition = olapTable.getPartition(partitionId);\nfinal MaterializedIndex selectedTable = partition.getIndex(selectedIndexId);\nfinal List tablets = Lists.newArrayList();\nfinal Collection tabletIds = distributionPrune(selectedTable, partition.getDistributionInfo());\nLOG.debug(\"distribution prune tablets: {}\", tabletIds);\nList allTabletIds = selectedTable.getTabletIdsInOrder();\nif (tabletIds != null) {\nfor (Long id : tabletIds) {\ntablets.add(selectedTable.getTablet(id));\n}\nscanTabletIds.addAll(tabletIds);\n} else {\ntablets.addAll(selectedTable.getTablets());\nscanTabletIds.addAll(allTabletIds);\n}\nfor (int i = 0; i < allTabletIds.size(); i++) {\ntabletId2BucketSeq.put(allTabletIds.get(i), i);\n}\ntotalTabletsNum += selectedTable.getTablets().size();\nselectedTabletsNum += tablets.size();\naddScanRangeLocations(partition, tablets);\n}\n}\n/**\n* We query Palo Meta to get request's data location\n* extra result info will pass to backend ScanNode\n*/\n@Override\npublic List getScanRangeLocations(long maxScanRangeLength) {\nreturn result;\n}\n@Override\npublic String getNodeExplainString(String prefix, TExplainLevel detailLevel) {\nStringBuilder output = new StringBuilder();\noutput.append(prefix).append(\"TABLE: \").append(olapTable.getName()).append(\"\\n\");\nif (detailLevel == TExplainLevel.BRIEF) {\nreturn output.toString();\n}\nif (null != sortColumn) {\noutput.append(prefix).append(\"SORT COLUMN: \").append(sortColumn).append(\"\\n\");\n}\nif (isPreAggregation) {\noutput.append(prefix).append(\"PREAGGREGATION: ON\").append(\"\\n\");\n} else {\noutput.append(prefix).append(\"PREAGGREGATION: OFF. Reason: \").append(reasonOfPreAggregation).append(\"\\n\");\n}\nif (!conjuncts.isEmpty()) {\noutput.append(prefix).append(\"PREDICATES: \").append(\ngetExplainString(conjuncts)).append(\"\\n\");\n}\nif (!runtimeFilters.isEmpty()) {\noutput.append(prefix).append(\"runtime filters: \");\noutput.append(getRuntimeFilterExplainString(false));\n}\noutput.append(prefix).append(String.format(\n\"partitions=%s/%s\",\nselectedPartitionNum,\nolapTable.getPartitions().size()));\nString indexName = olapTable.getIndexNameById(selectedIndexId);\noutput.append(\"\\n\").append(prefix).append(String.format(\"rollup: %s\", indexName));\noutput.append(\"\\n\");\noutput.append(prefix).append(String.format(\n\"tabletRatio=%s/%s\", selectedTabletsNum, totalTabletsNum));\noutput.append(\"\\n\");\nif (scanTabletIds.size() > 10) {\nList firstTenTabletIds = scanTabletIds.subList(0, 10);\noutput.append(prefix).append(String.format(\"tabletList=%s ...\", Joiner.on(\",\").join(firstTenTabletIds)));\n} else {\noutput.append(prefix).append(String.format(\"tabletList=%s\", Joiner.on(\",\").join(scanTabletIds)));\n}\noutput.append(\"\\n\");\noutput.append(prefix).append(String.format(\n\"cardinality=%s\", cardinality));\noutput.append(\"\\n\");\noutput.append(prefix).append(String.format(\n\"avgRowSize=%s\", avgRowSize));\noutput.append(\"\\n\");\noutput.append(prefix).append(String.format(\n\"numNodes=%s\", numNodes));\noutput.append(\"\\n\");\nreturn output.toString();\n}\n@Override\npublic int getNumInstances() {\nreturn result.size();\n}\n@Override\nprotected void toThrift(TPlanNode msg) {\nList keyColumnNames = new ArrayList();\nList keyColumnTypes = new ArrayList();\nif (selectedIndexId != -1) {\nfor (Column col : olapTable.getSchemaByIndexId(selectedIndexId)) {\nif (!col.isKey()) {\nbreak;\n}\nkeyColumnNames.add(col.getName());\nkeyColumnTypes.add(col.getDataType().toThrift());\n}\n}\nmsg.node_type = TPlanNodeType.OLAP_SCAN_NODE;\nmsg.olap_scan_node =\nnew TOlapScanNode(desc.getId().asInt(), keyColumnNames, keyColumnTypes, isPreAggregation);\nif (null != sortColumn) {\nmsg.olap_scan_node.setSortColumn(sortColumn);\n}\nmsg.olap_scan_node.setKeyType(olapTable.getKeysType().toThrift());\n}\npublic static OlapScanNode createOlapScanNodeByLocation(\nPlanNodeId id, TupleDescriptor desc, String planNodeName, List locationsList) {\nOlapScanNode olapScanNode = new OlapScanNode(id, desc, planNodeName);\nolapScanNode.numInstances = 1;\nolapScanNode.selectedIndexId = olapScanNode.olapTable.getBaseIndexId();\nolapScanNode.selectedPartitionNum = 1;\nolapScanNode.selectedTabletsNum = 1;\nolapScanNode.totalTabletsNum = 1;\nolapScanNode.setIsPreAggregation(false, \"Export job\");\nolapScanNode.result.addAll(locationsList);\nreturn olapScanNode;\n}\npublic void collectColumns(Analyzer analyzer, Set equivalenceColumns, Set unequivalenceColumns) {\nfor (Expr expr : conjuncts) {\nif (!isPredicateUsedForPrefixIndex(expr, false)) {\ncontinue;\n}\nfor (SlotDescriptor slot : desc.getMaterializedSlots()) {\nif (expr.isBound(slot.getId())) {\nif (!isEquivalenceExpr(expr)) {\nunequivalenceColumns.add(slot.getColumn().getName());\n} else {\nequivalenceColumns.add(slot.getColumn().getName());\n}\nbreak;\n}\n}\n}\nList eqJoinPredicate = analyzer.getEqJoinConjuncts(desc.getId());\nfor (Expr expr : eqJoinPredicate) {\nif (!isPredicateUsedForPrefixIndex(expr, true)) {\ncontinue;\n}\nfor (SlotDescriptor slot : desc.getMaterializedSlots()) {\nPreconditions.checkState(expr.getChildren().size() == 2);\nfor (Expr child : expr.getChildren()) {\nif (child.isBound(slot.getId())) {\nequivalenceColumns.add(slot.getColumn().getName());\nbreak;\n}\n}\n}\n}\n}\npublic TupleId getTupleId() {\nPreconditions.checkNotNull(desc);\nreturn desc.getId();\n}\nprivate boolean isEquivalenceExpr(Expr expr) {\nif (expr instanceof InPredicate) {\nreturn true;\n}\nif (expr instanceof BinaryPredicate) {\nfinal BinaryPredicate predicate = (BinaryPredicate) expr;\nif (predicate.getOp().isEquivalence()) {\nreturn true;\n}\n}\nreturn false;\n}\nprivate boolean isPredicateUsedForPrefixIndex(Expr expr, boolean isJoinConjunct) {\nif (!(expr instanceof InPredicate)\n&& !(expr instanceof BinaryPredicate)) {\nreturn false;\n}\nif (expr instanceof InPredicate) {\nreturn isInPredicateUsedForPrefixIndex((InPredicate) expr);\n} else if (expr instanceof BinaryPredicate) {\nif (isJoinConjunct) {\nreturn isEqualJoinConjunctUsedForPrefixIndex((BinaryPredicate) expr);\n} else {\nreturn isBinaryPredicateUsedForPrefixIndex((BinaryPredicate) expr);\n}\n}\nreturn true;\n}\nprivate boolean isEqualJoinConjunctUsedForPrefixIndex(BinaryPredicate expr) {\nPreconditions.checkArgument(expr.getOp().isEquivalence());\nif (expr.isAuxExpr()) {\nreturn false;\n}\nfor (Expr child : expr.getChildren()) {\nfor (SlotDescriptor slot : desc.getMaterializedSlots()) {\nif (child.isBound(slot.getId()) && isSlotRefNested(child)) {\nreturn true;\n}\n}\n}\nreturn false;\n}\nprivate boolean isBinaryPredicateUsedForPrefixIndex(BinaryPredicate expr) {\nif (expr.isAuxExpr() || expr.getOp().isUnequivalence()) {\nreturn false;\n}\nreturn (isSlotRefNested(expr.getChild(0)) && expr.getChild(1).isConstant())\n|| (isSlotRefNested(expr.getChild(1)) && expr.getChild(0).isConstant());\n}\nprivate boolean isInPredicateUsedForPrefixIndex(InPredicate expr) {\nif (expr.isNotIn()) {\nreturn false;\n}\nreturn isSlotRefNested(expr.getChild(0)) && expr.isLiteralChildren();\n}\nprivate boolean isSlotRefNested(Expr expr) {\nwhile (expr instanceof CastExpr) {\nexpr = expr.getChild(0);\n}\nreturn expr instanceof SlotRef;\n}\nprivate void filterDeletedRows(Analyzer analyzer) throws AnalysisException {\nif (!Util.showHiddenColumns() && olapTable.hasDeleteSign()) {\nSlotRef deleteSignSlot = new SlotRef(desc.getAliasAsName(), Column.DELETE_SIGN);\ndeleteSignSlot.analyze(analyzer);\ndeleteSignSlot.getDesc().setIsMaterialized(true);\nExpr conjunct = new BinaryPredicate(BinaryPredicate.Operator.EQ, deleteSignSlot, new IntLiteral(0));\nconjunct.analyze(analyzer);\nconjuncts.add(conjunct);\n}\n}\n/*\nAlthough sometimes the scan range only involves one instance,\nthe data distribution cannot be set to UNPARTITIONED here.\nThe reason is that @coordinator will not set the scan range for the fragment,\nwhen data partition of fragment is UNPARTITIONED.\n*/\npublic DataPartition constructInputPartitionByDistributionInfo() throws UserException {\nColocateTableIndex colocateTableIndex = Catalog.getCurrentColocateIndex();\nif ((colocateTableIndex.isColocateTable(olapTable.getId())\n&& !colocateTableIndex.isGroupUnstable(colocateTableIndex.getGroup(olapTable.getId())))\n|| olapTable.getPartitionInfo().getType() == PartitionType.UNPARTITIONED\n|| olapTable.getPartitions().size() == 1) {\nDistributionInfo distributionInfo = olapTable.getDefaultDistributionInfo();\nif (!(distributionInfo instanceof HashDistributionInfo)) {\nreturn DataPartition.RANDOM;\n}\nList distributeColumns = ((HashDistributionInfo) distributionInfo).getDistributionColumns();\nList dataDistributeExprs = Lists.newArrayList();\nfor (Column column : distributeColumns) {\nSlotRef slotRef = new SlotRef(desc.getRef().getName(), column.getName());\ndataDistributeExprs.add(slotRef);\n}\nreturn DataPartition.hashPartitioned(dataDistributeExprs);\n} else {\nreturn DataPartition.RANDOM;\n}\n}\n}" + }, + { + "comment": "so, if a task' state updated, we insert a new tuple into this table?", + "method_body": "public CreateTableStmt buildAnalysisJobTblStmt() throws UserException {\nTableName tableName = new TableName(\"\",\nFeConstants.INTERNAL_DB_NAME, StatisticConstants.ANALYSIS_JOB_TABLE);\nList columnDefs = new ArrayList<>();\ncolumnDefs.add(new ColumnDef(\"job_id\", TypeDef.create(PrimitiveType.BIGINT)));\ncolumnDefs.add(new ColumnDef(\"task_id\", TypeDef.create(PrimitiveType.BIGINT)));\ncolumnDefs.add(new ColumnDef(\"catalog_name\", TypeDef.createVarchar(1024)));\ncolumnDefs.add(new ColumnDef(\"db_name\", TypeDef.createVarchar(1024)));\ncolumnDefs.add(new ColumnDef(\"tbl_name\", TypeDef.createVarchar(1024)));\ncolumnDefs.add(new ColumnDef(\"col_name\", TypeDef.createVarchar(1024)));\ncolumnDefs.add(new ColumnDef(\"index_id\", TypeDef.create(PrimitiveType.BIGINT)));\ncolumnDefs.add(new ColumnDef(\"job_type\", TypeDef.createVarchar(32)));\ncolumnDefs.add(new ColumnDef(\"analysis_type\", TypeDef.createVarchar(32)));\ncolumnDefs.add(new ColumnDef(\"message\", TypeDef.createVarchar(1024)));\ncolumnDefs.add(new ColumnDef(\"last_exec_time_in_ms\", TypeDef.create(PrimitiveType.BIGINT)));\ncolumnDefs.add(new ColumnDef(\"state\", TypeDef.createVarchar(32)));\ncolumnDefs.add(new ColumnDef(\"schedule_type\", TypeDef.createVarchar(32)));\nString engineName = \"olap\";\nKeysDesc keysDesc = new KeysDesc(KeysType.DUP_KEYS,\nLists.newArrayList(\"job_id\", \"task_id\"));\nDistributionDesc distributionDesc = new HashDistributionDesc(\nStatisticConstants.STATISTIC_TABLE_BUCKET_COUNT,\nLists.newArrayList(\"job_id\"));\nMap properties = new HashMap() {\n{\nput(\"replication_num\", String.valueOf(Config.statistic_internal_table_replica_num));\n}\n};\nCreateTableStmt createTableStmt = new CreateTableStmt(true, false,\ntableName, columnDefs, engineName, keysDesc, null, distributionDesc,\nproperties, null, \"Doris internal statistics table, don't modify it\", null);\nStatisticsUtil.analyze(createTableStmt);\nreturn createTableStmt;\n}", + "target_code": "KeysDesc keysDesc = new KeysDesc(KeysType.DUP_KEYS,", + "method_body_after": "public CreateTableStmt buildAnalysisJobTblStmt() throws UserException {\nTableName tableName = new TableName(\"\",\nFeConstants.INTERNAL_DB_NAME, StatisticConstants.ANALYSIS_JOB_TABLE);\nList columnDefs = new ArrayList<>();\ncolumnDefs.add(new ColumnDef(\"job_id\", TypeDef.create(PrimitiveType.BIGINT)));\ncolumnDefs.add(new ColumnDef(\"task_id\", TypeDef.create(PrimitiveType.BIGINT)));\ncolumnDefs.add(new ColumnDef(\"catalog_name\", TypeDef.createVarchar(1024)));\ncolumnDefs.add(new ColumnDef(\"db_name\", TypeDef.createVarchar(1024)));\ncolumnDefs.add(new ColumnDef(\"tbl_name\", TypeDef.createVarchar(1024)));\ncolumnDefs.add(new ColumnDef(\"col_name\", TypeDef.createVarchar(1024)));\ncolumnDefs.add(new ColumnDef(\"index_id\", TypeDef.create(PrimitiveType.BIGINT)));\ncolumnDefs.add(new ColumnDef(\"job_type\", TypeDef.createVarchar(32)));\ncolumnDefs.add(new ColumnDef(\"analysis_type\", TypeDef.createVarchar(32)));\ncolumnDefs.add(new ColumnDef(\"message\", TypeDef.createVarchar(1024)));\ncolumnDefs.add(new ColumnDef(\"last_exec_time_in_ms\", TypeDef.create(PrimitiveType.BIGINT)));\ncolumnDefs.add(new ColumnDef(\"state\", TypeDef.createVarchar(32)));\ncolumnDefs.add(new ColumnDef(\"schedule_type\", TypeDef.createVarchar(32)));\nString engineName = \"olap\";\nKeysDesc keysDesc = new KeysDesc(KeysType.UNIQUE_KEYS,\nLists.newArrayList(\"job_id\"));\nDistributionDesc distributionDesc = new HashDistributionDesc(\nStatisticConstants.STATISTIC_TABLE_BUCKET_COUNT,\nLists.newArrayList(\"job_id\"));\nMap properties = new HashMap() {\n{\nput(\"replication_num\", String.valueOf(Config.statistic_internal_table_replica_num));\n}\n};\nCreateTableStmt createTableStmt = new CreateTableStmt(true, false,\ntableName, columnDefs, engineName, keysDesc, null, distributionDesc,\nproperties, null, \"Doris internal statistics table, don't modify it\", null);\nStatisticsUtil.analyze(createTableStmt);\nreturn createTableStmt;\n}", + "context_before": "class InternalSchemaInitializer extends Thread {\nprivate static final Logger LOG = LogManager.getLogger(InternalSchemaInitializer.class);\n/**\n* If internal table creation failed, will retry after below seconds.\n*/\npublic static final int TABLE_CREATION_RETRY_INTERVAL_IN_SECONDS = 1;\npublic void run() {\nif (FeConstants.runningUnitTest) {\nreturn;\n}\nwhile (!created()) {\nFrontendNodeType feType = Env.getCurrentEnv().getFeType();\nif (feType.equals(FrontendNodeType.INIT) || feType.equals(FrontendNodeType.UNKNOWN)) {\nLOG.warn(\"FE is not ready\");\ncontinue;\n}\ntry {\nThread.currentThread()\n.join(TABLE_CREATION_RETRY_INTERVAL_IN_SECONDS * 1000L);\ncreateDB();\ncreateTbl();\n} catch (Throwable e) {\nLOG.warn(\"Statistics storage initiated failed, will try again later\", e);\n}\n}\nLOG.info(\"Internal schema initiated\");\n}\nprivate void createTbl() throws UserException {\nEnv.getCurrentEnv().getInternalCatalog().createTable(buildStatisticsTblStmt());\nEnv.getCurrentEnv().getInternalCatalog().createTable(buildAnalysisJobTblStmt());\n}\n@VisibleForTesting\npublic static void createDB() {\nCreateDbStmt createDbStmt = new CreateDbStmt(true,\nClusterNamespace.getFullName(SystemInfoService.DEFAULT_CLUSTER, FeConstants.INTERNAL_DB_NAME),\nnull);\ncreateDbStmt.setClusterName(SystemInfoService.DEFAULT_CLUSTER);\ntry {\nEnv.getCurrentEnv().createDb(createDbStmt);\n} catch (DdlException e) {\nLOG.warn(\"Failed to create database: {}, will try again later\",\nFeConstants.INTERNAL_DB_NAME, e);\n}\n}\n@VisibleForTesting\npublic CreateTableStmt buildStatisticsTblStmt() throws UserException {\nTableName tableName = new TableName(\"\",\nFeConstants.INTERNAL_DB_NAME, StatisticConstants.STATISTIC_TBL_NAME);\nList columnDefs = new ArrayList<>();\ncolumnDefs.add(new ColumnDef(\"id\", TypeDef.createVarchar(StatisticConstants.ID_LEN)));\ncolumnDefs.add(new ColumnDef(\"catalog_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\ncolumnDefs.add(new ColumnDef(\"db_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\ncolumnDefs.add(new ColumnDef(\"tbl_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\ncolumnDefs.add(new ColumnDef(\"col_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\nColumnDef partId = new ColumnDef(\"part_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN));\npartId.setAllowNull(true);\ncolumnDefs.add(partId);\ncolumnDefs.add(new ColumnDef(\"count\", TypeDef.create(PrimitiveType.BIGINT)));\ncolumnDefs.add(new ColumnDef(\"ndv\", TypeDef.create(PrimitiveType.BIGINT)));\ncolumnDefs.add(new ColumnDef(\"null_count\", TypeDef.create(PrimitiveType.BIGINT)));\ncolumnDefs.add(new ColumnDef(\"min\", TypeDef.createVarchar(ScalarType.MAX_VARCHAR_LENGTH)));\ncolumnDefs.add(new ColumnDef(\"max\", TypeDef.createVarchar(ScalarType.MAX_VARCHAR_LENGTH)));\ncolumnDefs.add(new ColumnDef(\"data_size_in_bytes\", TypeDef.create(PrimitiveType.BIGINT)));\ncolumnDefs.add(new ColumnDef(\"update_time\", TypeDef.create(PrimitiveType.DATETIME)));\nString engineName = \"olap\";\nKeysDesc keysDesc = new KeysDesc(KeysType.UNIQUE_KEYS,\nLists.newArrayList(\"id\"));\nDistributionDesc distributionDesc = new HashDistributionDesc(\nStatisticConstants.STATISTIC_TABLE_BUCKET_COUNT,\nLists.newArrayList(\"id\"));\nMap properties = new HashMap() {\n{\nput(\"replication_num\", String.valueOf(Config.statistic_internal_table_replica_num));\n}\n};\nCreateTableStmt createTableStmt = new CreateTableStmt(true, false,\ntableName, columnDefs, engineName, keysDesc, null, distributionDesc,\nproperties, null, \"Doris internal statistics table, don't modify it\", null);\nStatisticsUtil.analyze(createTableStmt);\nreturn createTableStmt;\n}\n@VisibleForTesting\nprivate boolean created() {\nOptional optionalDatabase =\nEnv.getCurrentEnv().getInternalCatalog()\n.getDb(SystemInfoService.DEFAULT_CLUSTER + \":\" + FeConstants.INTERNAL_DB_NAME);\nif (!optionalDatabase.isPresent()) {\nreturn false;\n}\nDatabase db = optionalDatabase.get();\nreturn db.getTable(StatisticConstants.STATISTIC_TBL_NAME).isPresent()\n&& db.getTable(StatisticConstants.ANALYSIS_JOB_TABLE).isPresent();\n}\n}", + "context_after": "class InternalSchemaInitializer extends Thread {\nprivate static final Logger LOG = LogManager.getLogger(InternalSchemaInitializer.class);\n/**\n* If internal table creation failed, will retry after below seconds.\n*/\npublic static final int TABLE_CREATION_RETRY_INTERVAL_IN_SECONDS = 1;\npublic void run() {\nif (FeConstants.runningUnitTest) {\nreturn;\n}\nwhile (!created()) {\nFrontendNodeType feType = Env.getCurrentEnv().getFeType();\nif (feType.equals(FrontendNodeType.INIT) || feType.equals(FrontendNodeType.UNKNOWN)) {\nLOG.warn(\"FE is not ready\");\ncontinue;\n}\ntry {\nThread.currentThread()\n.join(TABLE_CREATION_RETRY_INTERVAL_IN_SECONDS * 1000L);\ncreateDB();\ncreateTbl();\n} catch (Throwable e) {\nLOG.warn(\"Statistics storage initiated failed, will try again later\", e);\n}\n}\nLOG.info(\"Internal schema initiated\");\n}\nprivate void createTbl() throws UserException {\nEnv.getCurrentEnv().getInternalCatalog().createTable(buildStatisticsTblStmt());\nEnv.getCurrentEnv().getInternalCatalog().createTable(buildAnalysisJobTblStmt());\n}\n@VisibleForTesting\npublic static void createDB() {\nCreateDbStmt createDbStmt = new CreateDbStmt(true,\nClusterNamespace.getFullName(SystemInfoService.DEFAULT_CLUSTER, FeConstants.INTERNAL_DB_NAME),\nnull);\ncreateDbStmt.setClusterName(SystemInfoService.DEFAULT_CLUSTER);\ntry {\nEnv.getCurrentEnv().createDb(createDbStmt);\n} catch (DdlException e) {\nLOG.warn(\"Failed to create database: {}, will try again later\",\nFeConstants.INTERNAL_DB_NAME, e);\n}\n}\n@VisibleForTesting\npublic CreateTableStmt buildStatisticsTblStmt() throws UserException {\nTableName tableName = new TableName(\"\",\nFeConstants.INTERNAL_DB_NAME, StatisticConstants.STATISTIC_TBL_NAME);\nList columnDefs = new ArrayList<>();\ncolumnDefs.add(new ColumnDef(\"id\", TypeDef.createVarchar(StatisticConstants.ID_LEN)));\ncolumnDefs.add(new ColumnDef(\"catalog_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\ncolumnDefs.add(new ColumnDef(\"db_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\ncolumnDefs.add(new ColumnDef(\"tbl_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\ncolumnDefs.add(new ColumnDef(\"col_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\nColumnDef partId = new ColumnDef(\"part_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN));\npartId.setAllowNull(true);\ncolumnDefs.add(partId);\ncolumnDefs.add(new ColumnDef(\"count\", TypeDef.create(PrimitiveType.BIGINT)));\ncolumnDefs.add(new ColumnDef(\"ndv\", TypeDef.create(PrimitiveType.BIGINT)));\ncolumnDefs.add(new ColumnDef(\"null_count\", TypeDef.create(PrimitiveType.BIGINT)));\ncolumnDefs.add(new ColumnDef(\"min\", TypeDef.createVarchar(ScalarType.MAX_VARCHAR_LENGTH)));\ncolumnDefs.add(new ColumnDef(\"max\", TypeDef.createVarchar(ScalarType.MAX_VARCHAR_LENGTH)));\ncolumnDefs.add(new ColumnDef(\"data_size_in_bytes\", TypeDef.create(PrimitiveType.BIGINT)));\ncolumnDefs.add(new ColumnDef(\"update_time\", TypeDef.create(PrimitiveType.DATETIME)));\nString engineName = \"olap\";\nKeysDesc keysDesc = new KeysDesc(KeysType.UNIQUE_KEYS,\nLists.newArrayList(\"id\"));\nDistributionDesc distributionDesc = new HashDistributionDesc(\nStatisticConstants.STATISTIC_TABLE_BUCKET_COUNT,\nLists.newArrayList(\"id\"));\nMap properties = new HashMap() {\n{\nput(\"replication_num\", String.valueOf(Config.statistic_internal_table_replica_num));\n}\n};\nCreateTableStmt createTableStmt = new CreateTableStmt(true, false,\ntableName, columnDefs, engineName, keysDesc, null, distributionDesc,\nproperties, null, \"Doris internal statistics table, don't modify it\", null);\nStatisticsUtil.analyze(createTableStmt);\nreturn createTableStmt;\n}\n@VisibleForTesting\nprivate boolean created() {\nOptional optionalDatabase =\nEnv.getCurrentEnv().getInternalCatalog()\n.getDb(SystemInfoService.DEFAULT_CLUSTER + \":\" + FeConstants.INTERNAL_DB_NAME);\nif (!optionalDatabase.isPresent()) {\nreturn false;\n}\nDatabase db = optionalDatabase.get();\nreturn db.getTable(StatisticConstants.STATISTIC_TBL_NAME).isPresent()\n&& db.getTable(StatisticConstants.ANALYSIS_JOB_TABLE).isPresent();\n}\n}" + }, + { + "comment": "Question, should we have this changed for everything using mini cluster, or only for our tests that are using unaligned checkpoints? \ud83e\udd14 Maybe for example because of benchmarks, and overall running times (costs of our CI), we should keep using memory storage if not needed? ", + "method_body": "private void startMiniCluster() throws Exception {\nfinal Configuration configuration =\nnew Configuration(miniClusterResourceConfiguration.getConfiguration());\nconfiguration.setString(\nCoreOptions.TMP_DIRS, temporaryFolder.newFolder().getAbsolutePath());\nconfiguration.set(\nCheckpointingOptions.CHECKPOINTS_DIRECTORY,\ntemporaryFolder.newFolder().toURI().toString());\nif (!configuration.contains(CoreOptions.FILESYTEM_DEFAULT_OVERRIDE)) {\nconfiguration.setBoolean(CoreOptions.FILESYTEM_DEFAULT_OVERRIDE, true);\n}\nif (!configuration.contains(TaskManagerOptions.MANAGED_MEMORY_SIZE)) {\nconfiguration.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, DEFAULT_MANAGED_MEMORY_SIZE);\n}\nconfiguration.setInteger(JobManagerOptions.PORT, 0);\nif (!(configuration.contains(RestOptions.BIND_PORT)\n|| configuration.contains(RestOptions.PORT))) {\nconfiguration.setString(RestOptions.BIND_PORT, \"0\");\n}\nrandomizeConfiguration(configuration);\nfinal MiniClusterConfiguration miniClusterConfiguration =\nnew MiniClusterConfiguration.Builder()\n.setConfiguration(configuration)\n.setNumTaskManagers(\nminiClusterResourceConfiguration.getNumberTaskManagers())\n.setNumSlotsPerTaskManager(\nminiClusterResourceConfiguration.getNumberSlotsPerTaskManager())\n.setRpcServiceSharing(\nminiClusterResourceConfiguration.getRpcServiceSharing())\n.setHaServices(miniClusterResourceConfiguration.getHaServices())\n.build();\nminiCluster =\nnew MiniCluster(miniClusterConfiguration, () -> Reference.borrowed(rpcSystem));\nminiCluster.start();\nfinal URI restAddress = miniCluster.getRestAddress().get();\ncreateClientConfiguration(restAddress);\n}", + "target_code": "temporaryFolder.newFolder().toURI().toString());", + "method_body_after": "private void startMiniCluster() throws Exception {\nfinal Configuration configuration =\nnew Configuration(miniClusterResourceConfiguration.getConfiguration());\nconfiguration.setString(\nCoreOptions.TMP_DIRS, temporaryFolder.newFolder().getAbsolutePath());\nif (!configuration.contains(CheckpointingOptions.CHECKPOINTS_DIRECTORY)) {\nconfiguration.set(\nCheckpointingOptions.CHECKPOINTS_DIRECTORY,\ntemporaryFolder.newFolder().toURI().toString());\n}\nif (!configuration.contains(CoreOptions.FILESYTEM_DEFAULT_OVERRIDE)) {\nconfiguration.setBoolean(CoreOptions.FILESYTEM_DEFAULT_OVERRIDE, true);\n}\nif (!configuration.contains(TaskManagerOptions.MANAGED_MEMORY_SIZE)) {\nconfiguration.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, DEFAULT_MANAGED_MEMORY_SIZE);\n}\nconfiguration.setInteger(JobManagerOptions.PORT, 0);\nif (!(configuration.contains(RestOptions.BIND_PORT)\n|| configuration.contains(RestOptions.PORT))) {\nconfiguration.setString(RestOptions.BIND_PORT, \"0\");\n}\nrandomizeConfiguration(configuration);\nfinal MiniClusterConfiguration miniClusterConfiguration =\nnew MiniClusterConfiguration.Builder()\n.setConfiguration(configuration)\n.setNumTaskManagers(\nminiClusterResourceConfiguration.getNumberTaskManagers())\n.setNumSlotsPerTaskManager(\nminiClusterResourceConfiguration.getNumberSlotsPerTaskManager())\n.setRpcServiceSharing(\nminiClusterResourceConfiguration.getRpcServiceSharing())\n.setHaServices(miniClusterResourceConfiguration.getHaServices())\n.build();\nminiCluster =\nnew MiniCluster(miniClusterConfiguration, () -> Reference.borrowed(rpcSystem));\nminiCluster.start();\nfinal URI restAddress = miniCluster.getRestAddress().get();\ncreateClientConfiguration(restAddress);\n}", + "context_before": "class MiniClusterResource extends ExternalResource {\nprivate static final boolean RANDOMIZE_BUFFER_DEBLOAT_CONFIG =\nBoolean.parseBoolean(System.getProperty(\"buffer-debloat.randomization\", \"false\"));\nprivate static final MemorySize DEFAULT_MANAGED_MEMORY_SIZE = MemorySize.parse(\"80m\");\nprotected final Logger log = LoggerFactory.getLogger(getClass());\nprivate final TemporaryFolder temporaryFolder = new TemporaryFolder();\nprivate final MiniClusterResourceConfiguration miniClusterResourceConfiguration;\nprivate MiniCluster miniCluster = null;\nprivate int numberSlots = -1;\nprivate UnmodifiableConfiguration restClusterClientConfig;\nprivate static final RpcSystem rpcSystem = RpcSystem.load();\nstatic {\nRuntime.getRuntime().addShutdownHook(new Thread(() -> rpcSystem.close()));\n}\npublic MiniClusterResource(\nfinal MiniClusterResourceConfiguration miniClusterResourceConfiguration) {\nthis.miniClusterResourceConfiguration =\nPreconditions.checkNotNull(miniClusterResourceConfiguration);\n}\npublic int getNumberSlots() {\nreturn numberSlots;\n}\npublic MiniCluster getMiniCluster() {\nreturn miniCluster;\n}\npublic UnmodifiableConfiguration getClientConfiguration() {\nreturn restClusterClientConfig;\n}\n/** @deprecated use {@link\n@Deprecated\npublic URI getRestAddres() {\nreturn getRestAddress();\n}\npublic URI getRestAddress() {\nreturn miniCluster.getRestAddress().join();\n}\n@Override\npublic void before() throws Exception {\ntemporaryFolder.create();\nstartMiniCluster();\nnumberSlots =\nminiClusterResourceConfiguration.getNumberSlotsPerTaskManager()\n* miniClusterResourceConfiguration.getNumberTaskManagers();\n}\npublic void cancelAllJobsAndWaitUntilSlotsAreFreed() {\nfinal long heartbeatTimeout =\nminiCluster.getConfiguration().get(HeartbeatManagerOptions.HEARTBEAT_INTERVAL);\nfinal long shutdownTimeout =\nminiClusterResourceConfiguration.getShutdownTimeout().toMilliseconds();\nPreconditions.checkState(\nheartbeatTimeout < shutdownTimeout,\n\"Heartbeat timeout (%d) needs to be lower than the shutdown timeout (%d) in order to ensure reliable job cancellation and resource cleanup.\",\nheartbeatTimeout,\nshutdownTimeout);\ncancelAllJobs(true);\n}\npublic void cancelAllJobs() {\ncancelAllJobs(false);\n}\nprivate void cancelAllJobs(boolean waitUntilSlotsAreFreed) {\ntry {\nfinal List> jobCancellationFutures =\nminiCluster.listJobs().get().stream()\n.filter(status -> !status.getJobState().isGloballyTerminalState())\n.map(status -> miniCluster.cancelJob(status.getJobId()))\n.collect(Collectors.toList());\nFutureUtils.waitForAll(jobCancellationFutures).get();\nCommonTestUtils.waitUntilCondition(\n() -> {\nfinal long unfinishedJobs =\nminiCluster.listJobs().get().stream()\n.filter(\nstatus ->\n!status.getJobState()\n.isGloballyTerminalState())\n.count();\nreturn unfinishedJobs == 0;\n});\nif (waitUntilSlotsAreFreed) {\nCommonTestUtils.waitUntilCondition(\n() -> {\nfinal ResourceOverview resourceOverview =\nminiCluster.getResourceOverview().get();\nreturn resourceOverview.getNumberRegisteredSlots()\n== resourceOverview.getNumberFreeSlots();\n});\n}\n} catch (Exception e) {\nlog.warn(\"Exception while shutting down remaining jobs.\", e);\n}\n}\n@Override\npublic void after() {\nException exception = null;\nif (miniCluster != null) {\ncancelAllJobs();\nfinal CompletableFuture terminationFuture = miniCluster.closeAsync();\ntry {\nterminationFuture.get(\nminiClusterResourceConfiguration.getShutdownTimeout().toMilliseconds(),\nTimeUnit.MILLISECONDS);\n} catch (Exception e) {\nexception = ExceptionUtils.firstOrSuppressed(e, exception);\n}\nminiCluster = null;\n}\nif (exception != null) {\nlog.warn(\"Could not properly shut down the MiniClusterResource.\", exception);\n}\ntemporaryFolder.delete();\n}\n/**\n* This is the place for randomization the configuration that relates to task execution such as\n* TaskManagerConf. Configurations which relates to streaming should be randomized in\n* TestStreamEnvironment\n*/\nprivate static void randomizeConfiguration(Configuration configuration) {\nif (RANDOMIZE_BUFFER_DEBLOAT_CONFIG\n&& !configuration.contains(TaskManagerOptions.BUFFER_DEBLOAT_ENABLED)) {\nrandomize(configuration, TaskManagerOptions.BUFFER_DEBLOAT_ENABLED, true, false);\n}\n}\nprivate void createClientConfiguration(URI restAddress) {\nConfiguration restClientConfig = new Configuration();\nrestClientConfig.setString(JobManagerOptions.ADDRESS, restAddress.getHost());\nrestClientConfig.setInteger(RestOptions.PORT, restAddress.getPort());\nthis.restClusterClientConfig = new UnmodifiableConfiguration(restClientConfig);\n}\n}", + "context_after": "class MiniClusterResource extends ExternalResource {\nprivate static final boolean RANDOMIZE_BUFFER_DEBLOAT_CONFIG =\nBoolean.parseBoolean(System.getProperty(\"buffer-debloat.randomization\", \"false\"));\nprivate static final MemorySize DEFAULT_MANAGED_MEMORY_SIZE = MemorySize.parse(\"80m\");\nprotected final Logger log = LoggerFactory.getLogger(getClass());\nprivate final TemporaryFolder temporaryFolder = new TemporaryFolder();\nprivate final MiniClusterResourceConfiguration miniClusterResourceConfiguration;\nprivate MiniCluster miniCluster = null;\nprivate int numberSlots = -1;\nprivate UnmodifiableConfiguration restClusterClientConfig;\nprivate static final RpcSystem rpcSystem = RpcSystem.load();\nstatic {\nRuntime.getRuntime().addShutdownHook(new Thread(() -> rpcSystem.close()));\n}\npublic MiniClusterResource(\nfinal MiniClusterResourceConfiguration miniClusterResourceConfiguration) {\nthis.miniClusterResourceConfiguration =\nPreconditions.checkNotNull(miniClusterResourceConfiguration);\n}\npublic int getNumberSlots() {\nreturn numberSlots;\n}\npublic MiniCluster getMiniCluster() {\nreturn miniCluster;\n}\npublic UnmodifiableConfiguration getClientConfiguration() {\nreturn restClusterClientConfig;\n}\n/** @deprecated use {@link\n@Deprecated\npublic URI getRestAddres() {\nreturn getRestAddress();\n}\npublic URI getRestAddress() {\nreturn miniCluster.getRestAddress().join();\n}\n@Override\npublic void before() throws Exception {\ntemporaryFolder.create();\nstartMiniCluster();\nnumberSlots =\nminiClusterResourceConfiguration.getNumberSlotsPerTaskManager()\n* miniClusterResourceConfiguration.getNumberTaskManagers();\n}\npublic void cancelAllJobsAndWaitUntilSlotsAreFreed() {\nfinal long heartbeatTimeout =\nminiCluster.getConfiguration().get(HeartbeatManagerOptions.HEARTBEAT_INTERVAL);\nfinal long shutdownTimeout =\nminiClusterResourceConfiguration.getShutdownTimeout().toMilliseconds();\nPreconditions.checkState(\nheartbeatTimeout < shutdownTimeout,\n\"Heartbeat timeout (%d) needs to be lower than the shutdown timeout (%d) in order to ensure reliable job cancellation and resource cleanup.\",\nheartbeatTimeout,\nshutdownTimeout);\ncancelAllJobs(true);\n}\npublic void cancelAllJobs() {\ncancelAllJobs(false);\n}\nprivate void cancelAllJobs(boolean waitUntilSlotsAreFreed) {\ntry {\nfinal List> jobCancellationFutures =\nminiCluster.listJobs().get().stream()\n.filter(status -> !status.getJobState().isGloballyTerminalState())\n.map(status -> miniCluster.cancelJob(status.getJobId()))\n.collect(Collectors.toList());\nFutureUtils.waitForAll(jobCancellationFutures).get();\nCommonTestUtils.waitUntilCondition(\n() -> {\nfinal long unfinishedJobs =\nminiCluster.listJobs().get().stream()\n.filter(\nstatus ->\n!status.getJobState()\n.isGloballyTerminalState())\n.count();\nreturn unfinishedJobs == 0;\n});\nif (waitUntilSlotsAreFreed) {\nCommonTestUtils.waitUntilCondition(\n() -> {\nfinal ResourceOverview resourceOverview =\nminiCluster.getResourceOverview().get();\nreturn resourceOverview.getNumberRegisteredSlots()\n== resourceOverview.getNumberFreeSlots();\n});\n}\n} catch (Exception e) {\nlog.warn(\"Exception while shutting down remaining jobs.\", e);\n}\n}\n@Override\npublic void after() {\nException exception = null;\nif (miniCluster != null) {\ncancelAllJobs();\nfinal CompletableFuture terminationFuture = miniCluster.closeAsync();\ntry {\nterminationFuture.get(\nminiClusterResourceConfiguration.getShutdownTimeout().toMilliseconds(),\nTimeUnit.MILLISECONDS);\n} catch (Exception e) {\nexception = ExceptionUtils.firstOrSuppressed(e, exception);\n}\nminiCluster = null;\n}\nif (exception != null) {\nlog.warn(\"Could not properly shut down the MiniClusterResource.\", exception);\n}\ntemporaryFolder.delete();\n}\n/**\n* This is the place for randomization the configuration that relates to task execution such as\n* TaskManagerConf. Configurations which relates to streaming should be randomized in\n* TestStreamEnvironment\n*/\nprivate static void randomizeConfiguration(Configuration configuration) {\nif (RANDOMIZE_BUFFER_DEBLOAT_CONFIG\n&& !configuration.contains(TaskManagerOptions.BUFFER_DEBLOAT_ENABLED)) {\nrandomize(configuration, TaskManagerOptions.BUFFER_DEBLOAT_ENABLED, true, false);\n}\n}\nprivate void createClientConfiguration(URI restAddress) {\nConfiguration restClientConfig = new Configuration();\nrestClientConfig.setString(JobManagerOptions.ADDRESS, restAddress.getHost());\nrestClientConfig.setInteger(RestOptions.PORT, restAddress.getPort());\nthis.restClusterClientConfig = new UnmodifiableConfiguration(restClientConfig);\n}\n}" + }, + { + "comment": "Good point - @mmusgrov do know if there is any property we don't want to set at build time?", + "method_body": "public void setProperties(NarayanaJtaRecorder recorder) {\nProperties defaultProperties = PropertiesFactory.getDefaultProperties();\nfor (Object i : System.getProperties().keySet()) {\ndefaultProperties.remove(i);\n}\nrecorder.setDefaultProperties(defaultProperties);\n}", + "target_code": "recorder.setDefaultProperties(defaultProperties);", + "method_body_after": "public void setProperties(NarayanaJtaRecorder recorder) {\nProperties defaultProperties = PropertiesFactory.getDefaultProperties();\nfor (Object i : System.getProperties().keySet()) {\ndefaultProperties.remove(i);\n}\nrecorder.setDefaultProperties(defaultProperties);\n}", + "context_before": "class NarayanaJtaProcessor {\nprivate static final String TEST_TRANSACTION = \"io.quarkus.test.TestTransaction\";\n@BuildStep\npublic NativeImageSystemPropertyBuildItem nativeImageSystemPropertyBuildItem() {\nreturn new NativeImageSystemPropertyBuildItem(\"CoordinatorEnvironmentBean.transactionStatusManagerEnable\", \"false\");\n}\n@BuildStep\n@Record(RUNTIME_INIT)\n@Produce(NarayanaInitBuildItem.class)\npublic void build(NarayanaJtaRecorder recorder,\nCombinedIndexBuildItem indexBuildItem,\nBuildProducer additionalBeans,\nBuildProducer reflectiveClass,\nBuildProducer runtimeInit,\nBuildProducer feature,\nTransactionManagerConfiguration transactions, ShutdownContextBuildItem shutdownContextBuildItem) {\nrecorder.handleShutdown(shutdownContextBuildItem, transactions);\nfeature.produce(new FeatureBuildItem(Feature.NARAYANA_JTA));\nadditionalBeans.produce(new AdditionalBeanBuildItem(NarayanaJtaProducers.class));\nadditionalBeans.produce(AdditionalBeanBuildItem.unremovableOf(\"io.quarkus.narayana.jta.RequestScopedTransaction\"));\nruntimeInit.produce(new RuntimeInitializedClassBuildItem(\n\"com.arjuna.ats.internal.jta.resources.arjunacore.CommitMarkableResourceRecord\"));\nruntimeInit.produce(new RuntimeInitializedClassBuildItem(SocketProcessId.class.getName()));\nruntimeInit.produce(new RuntimeInitializedClassBuildItem(CommitMarkableResourceRecordRecoveryModule.class.getName()));\nruntimeInit.produce(new RuntimeInitializedClassBuildItem(RecoverConnectableAtomicAction.class.getName()));\nruntimeInit.produce(new RuntimeInitializedClassBuildItem(TransactionStatusConnectionManager.class.getName()));\nruntimeInit.produce(new RuntimeInitializedClassBuildItem(JTAActionStatusServiceXAResourceOrphanFilter.class.getName()));\nruntimeInit.produce(new RuntimeInitializedClassBuildItem(AtomicActionExpiryScanner.class.getName()));\nindexBuildItem.getIndex().getAllKnownSubclasses(JDBCImple_driver.class).stream()\n.map(impl -> ReflectiveClassBuildItem.builder(impl.name().toString()).build())\n.forEach(reflectiveClass::produce);\nreflectiveClass.produce(ReflectiveClassBuildItem.builder(JTAEnvironmentBean.class,\nUserTransactionImple.class,\nCheckedActionFactoryImple.class,\nTransactionManagerImple.class,\nTransactionSynchronizationRegistryImple.class,\nObjectStoreEnvironmentBean.class,\nShadowNoFileLockStore.class,\nJDBCStore.class,\nSocketProcessId.class,\nAtomicActionRecoveryModule.class,\nXARecoveryModule.class,\nXAResourceRecord.class,\nJTATransactionLogXAResourceOrphanFilter.class,\nJTANodeNameXAResourceOrphanFilter.class,\nJTAActionStatusServiceXAResourceOrphanFilter.class,\nExpiredTransactionStatusManagerScanner.class).build());\nAdditionalBeanBuildItem.Builder builder = AdditionalBeanBuildItem.builder();\nbuilder.addBeanClass(TransactionalInterceptorSupports.class);\nbuilder.addBeanClass(TransactionalInterceptorNever.class);\nbuilder.addBeanClass(TransactionalInterceptorRequired.class);\nbuilder.addBeanClass(TransactionalInterceptorRequiresNew.class);\nbuilder.addBeanClass(TransactionalInterceptorMandatory.class);\nbuilder.addBeanClass(TransactionalInterceptorNotSupported.class);\nadditionalBeans.produce(builder.build());\nrecorder.disableTransactionStatusManager();\nrecorder.setNodeName(transactions);\nrecorder.setDefaultTimeout(transactions);\nrecorder.setConfig(transactions);\n}\n@BuildStep\n@Record(STATIC_INIT)\n@BuildStep\n@Record(RUNTIME_INIT)\n@Consume(NarayanaInitBuildItem.class)\n@Consume(SyntheticBeansRuntimeInitBuildItem.class)\npublic void startRecoveryService(NarayanaJtaRecorder recorder,\nList jdbcDataSourceBuildItems, TransactionManagerConfiguration transactions) {\nMap configuredDataSourcesConfigKeys = jdbcDataSourceBuildItems.stream()\n.map(j -> j.getName())\n.collect(Collectors.toMap(Function.identity(),\nn -> DataSourceUtil.dataSourcePropertyKey(n, \"jdbc.transactions\")));\nSet dataSourcesWithTransactionIntegration = jdbcDataSourceBuildItems.stream()\n.filter(j -> j.isTransactionIntegrationEnabled())\n.map(j -> j.getName())\n.collect(Collectors.toSet());\nrecorder.startRecoveryService(transactions, configuredDataSourcesConfigKeys, dataSourcesWithTransactionIntegration);\n}\n@BuildStep(onlyIf = IsTest.class)\nvoid testTx(BuildProducer generatedBeanBuildItemBuildProducer,\nBuildProducer additionalBeans) {\ntry (ClassCreator c = ClassCreator.builder()\n.classOutput(new GeneratedBeanGizmoAdaptor(generatedBeanBuildItemBuildProducer)).className(\nTestTransactionInterceptor.class.getName() + \"Generated\")\n.superClass(TestTransactionInterceptor.class).build()) {\nc.addAnnotation(TEST_TRANSACTION);\nc.addAnnotation(Interceptor.class.getName());\nc.addAnnotation(Priority.class).addValue(\"value\", Interceptor.Priority.PLATFORM_BEFORE + 200);\n}\nadditionalBeans.produce(AdditionalBeanBuildItem.builder().addBeanClass(TestTransactionInterceptor.class)\n.addBeanClass(TEST_TRANSACTION).build());\n}\n@BuildStep\npublic ContextConfiguratorBuildItem transactionContext(ContextRegistrationPhaseBuildItem contextRegistrationPhase) {\nreturn new ContextConfiguratorBuildItem(contextRegistrationPhase.getContext()\n.configure(TransactionScoped.class).normal().contextClass(TransactionContext.class));\n}\n@BuildStep\npublic CustomScopeBuildItem registerScope() {\nreturn new CustomScopeBuildItem(TransactionScoped.class);\n}\n@BuildStep\nvoid unremovableBean(BuildProducer unremovableBeans) {\nunremovableBeans.produce(UnremovableBeanBuildItem.beanClassNames(JtaContextProvider.LifecycleManager.class.getName()));\nunremovableBeans.produce(UnremovableBeanBuildItem.beanTypes(TransactionManager.class));\n}\n@BuildStep\nvoid logCleanupFilters(BuildProducer logCleanupFilters) {\nlogCleanupFilters.produce(new LogCleanupFilterBuildItem(\"com.arjuna.ats.jbossatx\", \"ARJUNA032010:\", \"ARJUNA032013:\"));\n}\n}", + "context_after": "class NarayanaJtaProcessor {\nprivate static final String TEST_TRANSACTION = \"io.quarkus.test.TestTransaction\";\n@BuildStep\npublic NativeImageSystemPropertyBuildItem nativeImageSystemPropertyBuildItem() {\nreturn new NativeImageSystemPropertyBuildItem(\"CoordinatorEnvironmentBean.transactionStatusManagerEnable\", \"false\");\n}\n@BuildStep\n@Record(RUNTIME_INIT)\n@Produce(NarayanaInitBuildItem.class)\npublic void build(NarayanaJtaRecorder recorder,\nCombinedIndexBuildItem indexBuildItem,\nBuildProducer additionalBeans,\nBuildProducer reflectiveClass,\nBuildProducer runtimeInit,\nBuildProducer feature,\nTransactionManagerConfiguration transactions, ShutdownContextBuildItem shutdownContextBuildItem) {\nrecorder.handleShutdown(shutdownContextBuildItem, transactions);\nfeature.produce(new FeatureBuildItem(Feature.NARAYANA_JTA));\nadditionalBeans.produce(new AdditionalBeanBuildItem(NarayanaJtaProducers.class));\nadditionalBeans.produce(AdditionalBeanBuildItem.unremovableOf(\"io.quarkus.narayana.jta.RequestScopedTransaction\"));\nruntimeInit.produce(new RuntimeInitializedClassBuildItem(\n\"com.arjuna.ats.internal.jta.resources.arjunacore.CommitMarkableResourceRecord\"));\nruntimeInit.produce(new RuntimeInitializedClassBuildItem(SocketProcessId.class.getName()));\nruntimeInit.produce(new RuntimeInitializedClassBuildItem(CommitMarkableResourceRecordRecoveryModule.class.getName()));\nruntimeInit.produce(new RuntimeInitializedClassBuildItem(RecoverConnectableAtomicAction.class.getName()));\nruntimeInit.produce(new RuntimeInitializedClassBuildItem(TransactionStatusConnectionManager.class.getName()));\nruntimeInit.produce(new RuntimeInitializedClassBuildItem(JTAActionStatusServiceXAResourceOrphanFilter.class.getName()));\nruntimeInit.produce(new RuntimeInitializedClassBuildItem(AtomicActionExpiryScanner.class.getName()));\nindexBuildItem.getIndex().getAllKnownSubclasses(JDBCImple_driver.class).stream()\n.map(impl -> ReflectiveClassBuildItem.builder(impl.name().toString()).build())\n.forEach(reflectiveClass::produce);\nreflectiveClass.produce(ReflectiveClassBuildItem.builder(JTAEnvironmentBean.class,\nUserTransactionImple.class,\nCheckedActionFactoryImple.class,\nTransactionManagerImple.class,\nTransactionSynchronizationRegistryImple.class,\nObjectStoreEnvironmentBean.class,\nShadowNoFileLockStore.class,\nJDBCStore.class,\nSocketProcessId.class,\nAtomicActionRecoveryModule.class,\nXARecoveryModule.class,\nXAResourceRecord.class,\nJTATransactionLogXAResourceOrphanFilter.class,\nJTANodeNameXAResourceOrphanFilter.class,\nJTAActionStatusServiceXAResourceOrphanFilter.class,\nExpiredTransactionStatusManagerScanner.class).build());\nAdditionalBeanBuildItem.Builder builder = AdditionalBeanBuildItem.builder();\nbuilder.addBeanClass(TransactionalInterceptorSupports.class);\nbuilder.addBeanClass(TransactionalInterceptorNever.class);\nbuilder.addBeanClass(TransactionalInterceptorRequired.class);\nbuilder.addBeanClass(TransactionalInterceptorRequiresNew.class);\nbuilder.addBeanClass(TransactionalInterceptorMandatory.class);\nbuilder.addBeanClass(TransactionalInterceptorNotSupported.class);\nadditionalBeans.produce(builder.build());\nrecorder.disableTransactionStatusManager();\nrecorder.setNodeName(transactions);\nrecorder.setDefaultTimeout(transactions);\nrecorder.setConfig(transactions);\n}\n@BuildStep\n@Record(STATIC_INIT)\n@BuildStep\n@Record(RUNTIME_INIT)\n@Consume(NarayanaInitBuildItem.class)\n@Consume(SyntheticBeansRuntimeInitBuildItem.class)\npublic void startRecoveryService(NarayanaJtaRecorder recorder,\nList jdbcDataSourceBuildItems, TransactionManagerConfiguration transactions) {\nMap configuredDataSourcesConfigKeys = jdbcDataSourceBuildItems.stream()\n.map(j -> j.getName())\n.collect(Collectors.toMap(Function.identity(),\nn -> DataSourceUtil.dataSourcePropertyKey(n, \"jdbc.transactions\")));\nSet dataSourcesWithTransactionIntegration = jdbcDataSourceBuildItems.stream()\n.filter(j -> j.isTransactionIntegrationEnabled())\n.map(j -> j.getName())\n.collect(Collectors.toSet());\nrecorder.startRecoveryService(transactions, configuredDataSourcesConfigKeys, dataSourcesWithTransactionIntegration);\n}\n@BuildStep(onlyIf = IsTest.class)\nvoid testTx(BuildProducer generatedBeanBuildItemBuildProducer,\nBuildProducer additionalBeans) {\ntry (ClassCreator c = ClassCreator.builder()\n.classOutput(new GeneratedBeanGizmoAdaptor(generatedBeanBuildItemBuildProducer)).className(\nTestTransactionInterceptor.class.getName() + \"Generated\")\n.superClass(TestTransactionInterceptor.class).build()) {\nc.addAnnotation(TEST_TRANSACTION);\nc.addAnnotation(Interceptor.class.getName());\nc.addAnnotation(Priority.class).addValue(\"value\", Interceptor.Priority.PLATFORM_BEFORE + 200);\n}\nadditionalBeans.produce(AdditionalBeanBuildItem.builder().addBeanClass(TestTransactionInterceptor.class)\n.addBeanClass(TEST_TRANSACTION).build());\n}\n@BuildStep\npublic ContextConfiguratorBuildItem transactionContext(ContextRegistrationPhaseBuildItem contextRegistrationPhase) {\nreturn new ContextConfiguratorBuildItem(contextRegistrationPhase.getContext()\n.configure(TransactionScoped.class).normal().contextClass(TransactionContext.class));\n}\n@BuildStep\npublic CustomScopeBuildItem registerScope() {\nreturn new CustomScopeBuildItem(TransactionScoped.class);\n}\n@BuildStep\nvoid unremovableBean(BuildProducer unremovableBeans) {\nunremovableBeans.produce(UnremovableBeanBuildItem.beanClassNames(JtaContextProvider.LifecycleManager.class.getName()));\nunremovableBeans.produce(UnremovableBeanBuildItem.beanTypes(TransactionManager.class));\n}\n@BuildStep\nvoid logCleanupFilters(BuildProducer logCleanupFilters) {\nlogCleanupFilters.produce(new LogCleanupFilterBuildItem(\"com.arjuna.ats.jbossatx\", \"ARJUNA032010:\", \"ARJUNA032013:\"));\n}\n}" + }, + { + "comment": "@aloubyansky any formula is interesting to test in isolation IMHO to make sur the calculation is done right. Even more when there is casts and different types. For example, if registryPreference or platformPreference type changes, that could make it fail again, while with a strong typed test on `computeScore(int, int, int, int)` (or whatever) make sure it is used right and valid in common cases. This is just a suggestion.. I am not blocking the PR ;)", + "method_body": "private double calculateScore(OriginCombination s) {\ndouble combinationScore = 0;\nfor (OriginWithPreference o : s.getCollectedOrigins()) {\ncombinationScore += Math.pow(extOrigins.size(),\nhighestRegistryPreference + 1 - o.getPreference().registryPreference)\n* (((double) Integer.MAX_VALUE + 1 - o.getPreference().platformPreference) / Integer.MAX_VALUE);\n}\nreturn combinationScore;\n}", + "target_code": "* (((double) Integer.MAX_VALUE + 1 - o.getPreference().platformPreference) / Integer.MAX_VALUE);", + "method_body_after": "private double calculateScore(OriginCombination s) {\ndouble combinationScore = 0;\nfor (OriginWithPreference o : s.getCollectedOrigins()) {\ncombinationScore += Math.pow(extOrigins.size(),\nhighestRegistryPreference + 1 - o.getPreference().registryPreference)\n* ((((double) Integer.MAX_VALUE) + 1 - o.getPreference().platformPreference) / Integer.MAX_VALUE);\n}\nreturn combinationScore;\n}", + "context_before": "class OriginSelector {\nprivate final List extOrigins;\nprivate final List completeCombinations = new ArrayList<>();\nprivate int highestRegistryPreference;\npublic OriginSelector(List extOrigins) {\nthis.extOrigins = extOrigins;\n}\npublic void calculateCompatibleCombinations() {\nif (extOrigins.isEmpty()) {\nreturn;\n}\nselect(0, new OriginCombination());\n}\npublic OriginCombination getRecommendedCombination() {\nif (completeCombinations.isEmpty()) {\nreturn null;\n}\nif (completeCombinations.size() == 1) {\nreturn completeCombinations.get(0);\n}\ndouble highestScore = 0;\nOriginCombination recommended = null;\nfor (OriginCombination combination : completeCombinations) {\nfinal double score = calculateScore(combination);\nif (score > highestScore) {\nhighestScore = score;\nrecommended = combination;\n}\n}\nreturn recommended;\n}\nprivate void select(int extIndex, OriginCombination combination) {\nif (extIndex >= extOrigins.size()) {\nthrow new IllegalArgumentException(\n\"Extension index \" + extIndex + \" exceeded the total number of extensions \" + extOrigins.size());\n}\nfinal ExtensionOrigins eo = extOrigins.get(extIndex);\nfor (OriginWithPreference o : eo.getOrigins()) {\nhighestRegistryPreference = Math.max(highestRegistryPreference, o.getPreference().registryPreference);\nfinal OriginCombination augmentedCombination = combination.add(eo.getExtensionKey(), o);\nif (augmentedCombination == null) {\ncontinue;\n}\nif (extOrigins.size() == augmentedCombination.size()) {\ncompleteCombinations.add(augmentedCombination);\ncontinue;\n}\nif (extIndex + 1 == extOrigins.size()) {\nreturn;\n}\nselect(extIndex + 1, augmentedCombination);\n}\n}\n}", + "context_after": "class OriginSelector {\nprivate final List extOrigins;\nprivate final List completeCombinations = new ArrayList<>();\nprivate int highestRegistryPreference;\npublic OriginSelector(List extOrigins) {\nthis.extOrigins = extOrigins;\n}\npublic void calculateCompatibleCombinations() {\nif (extOrigins.isEmpty()) {\nreturn;\n}\nselect(0, new OriginCombination());\n}\npublic OriginCombination getRecommendedCombination() {\nif (completeCombinations.isEmpty()) {\nreturn null;\n}\nif (completeCombinations.size() == 1) {\nreturn completeCombinations.get(0);\n}\ndouble highestScore = 0;\nOriginCombination recommended = null;\nfor (OriginCombination combination : completeCombinations) {\nfinal double score = calculateScore(combination);\nif (score > highestScore) {\nhighestScore = score;\nrecommended = combination;\n}\n}\nreturn recommended;\n}\nprivate void select(int extIndex, OriginCombination combination) {\nif (extIndex >= extOrigins.size()) {\nthrow new IllegalArgumentException(\n\"Extension index \" + extIndex + \" exceeded the total number of extensions \" + extOrigins.size());\n}\nfinal ExtensionOrigins eo = extOrigins.get(extIndex);\nfor (OriginWithPreference o : eo.getOrigins()) {\nhighestRegistryPreference = Math.max(highestRegistryPreference, o.getPreference().registryPreference);\nfinal OriginCombination augmentedCombination = combination.add(eo.getExtensionKey(), o);\nif (augmentedCombination == null) {\ncontinue;\n}\nif (extOrigins.size() == augmentedCombination.size()) {\ncompleteCombinations.add(augmentedCombination);\ncontinue;\n}\nif (extIndex + 1 == extOrigins.size()) {\nreturn;\n}\nselect(extIndex + 1, augmentedCombination);\n}\n}\n}" + }, + { + "comment": "For now, external table only support one alter table operation, that is modify table property. Also added Precondition check.", + "method_body": "public void processAlterTable(AlterTableStmt stmt) throws UserException {\nTableName dbTableName = stmt.getTbl();\nString ctlName = dbTableName.getCtl();\nString dbName = dbTableName.getDb();\nString tableName = dbTableName.getTbl();\nDatabaseIf dbIf = Env.getCurrentEnv().getCatalogMgr().getCatalog(ctlName).getDbOrDdlException(dbName);\nTableIf tableIf = dbIf.getTableOrDdlException(tableName);\nList alterClauses = Lists.newArrayList();\nboolean needProcessOutsideTableLock = false;\nswitch (tableIf.getType()) {\ncase MATERIALIZED_VIEW:\ncase OLAP:\nOlapTable olapTable = (OlapTable) tableIf;\nneedProcessOutsideTableLock = processAlterOlapTable(stmt, olapTable, alterClauses, (Database) dbIf);\nbreak;\ncase ODBC:\ncase JDBC:\ncase HIVE:\ncase MYSQL:\ncase ELASTICSEARCH:\nprocessAlterExternalTable(stmt, (Table) tableIf, (Database) dbIf);\nreturn;\ncase HMS_EXTERNAL_TABLE:\nalterClauses.addAll(stmt.getOps());\nsetHMSExternalTableAutoAnalyzePolicy((HMSExternalTable) tableIf, alterClauses);\nreturn;\ndefault:\nthrow new DdlException(\"Do not support alter \"\n+ tableIf.getType().toString() + \" table[\" + tableName + \"]\");\n}\nDatabase db = (Database) dbIf;\nif (needProcessOutsideTableLock) {\nPreconditions.checkState(alterClauses.size() == 1);\nAlterClause alterClause = alterClauses.get(0);\nif (alterClause instanceof AddPartitionClause) {\nif (!((AddPartitionClause) alterClause).isTempPartition()) {\nDynamicPartitionUtil.checkAlterAllowed(\n(OlapTable) db.getTableOrMetaException(tableName, TableType.OLAP));\n}\nEnv.getCurrentEnv().addPartition(db, tableName, (AddPartitionClause) alterClause, false, 0, true);\n} else if (alterClause instanceof AddPartitionLikeClause) {\nif (!((AddPartitionLikeClause) alterClause).getIsTempPartition()) {\nDynamicPartitionUtil.checkAlterAllowed(\n(OlapTable) db.getTableOrMetaException(tableName, TableType.OLAP));\n}\nEnv.getCurrentEnv().addPartitionLike(db, tableName, (AddPartitionLikeClause) alterClause);\n} else if (alterClause instanceof ModifyPartitionClause) {\nModifyPartitionClause clause = ((ModifyPartitionClause) alterClause);\nMap properties = clause.getProperties();\nList partitionNames = clause.getPartitionNames();\n((SchemaChangeHandler) schemaChangeHandler).updatePartitionsProperties(\ndb, tableName, partitionNames, properties);\nOlapTable olapTable = (OlapTable) tableIf;\nolapTable.writeLockOrDdlException();\ntry {\nmodifyPartitionsProperty(db, olapTable, partitionNames, properties, clause.isTempPartition());\n} finally {\nolapTable.writeUnlock();\n}\n} else if (alterClause instanceof ModifyTablePropertiesClause) {\nMap properties = alterClause.getProperties();\n((SchemaChangeHandler) schemaChangeHandler).updateTableProperties(db, tableName, properties);\n} else if (alterClause instanceof AlterMultiPartitionClause) {\nif (!((AlterMultiPartitionClause) alterClause).isTempPartition()) {\nDynamicPartitionUtil.checkAlterAllowed(\n(OlapTable) db.getTableOrMetaException(tableName, TableType.OLAP));\n}\nEnv.getCurrentEnv().addMultiPartitions(db, tableName, (AlterMultiPartitionClause) alterClause);\n} else {\nthrow new DdlException(\"Invalid alter operation: \" + alterClause.getOpType());\n}\n}\n}", + "target_code": "(OlapTable) db.getTableOrMetaException(tableName, TableType.OLAP));", + "method_body_after": "public void processAlterTable(AlterTableStmt stmt) throws UserException {\nTableName dbTableName = stmt.getTbl();\nString ctlName = dbTableName.getCtl();\nString dbName = dbTableName.getDb();\nString tableName = dbTableName.getTbl();\nDatabaseIf dbIf = Env.getCurrentEnv().getCatalogMgr()\n.getCatalogOrException(ctlName, catalog -> new DdlException(\"Unknown catalog \" + catalog))\n.getDbOrDdlException(dbName);\nTableIf tableIf = dbIf.getTableOrDdlException(tableName);\nList alterClauses = Lists.newArrayList();\nboolean needProcessOutsideTableLock = false;\nswitch (tableIf.getType()) {\ncase MATERIALIZED_VIEW:\ncase OLAP:\nOlapTable olapTable = (OlapTable) tableIf;\nneedProcessOutsideTableLock = processAlterOlapTable(stmt, olapTable, alterClauses, (Database) dbIf);\nbreak;\ncase ODBC:\ncase JDBC:\ncase HIVE:\ncase MYSQL:\ncase ELASTICSEARCH:\nprocessAlterExternalTable(stmt, (Table) tableIf, (Database) dbIf);\nreturn;\ncase HMS_EXTERNAL_TABLE:\ncase JDBC_EXTERNAL_TABLE:\ncase ICEBERG_EXTERNAL_TABLE:\ncase PAIMON_EXTERNAL_TABLE:\ncase MAX_COMPUTE_EXTERNAL_TABLE:\ncase HUDI_EXTERNAL_TABLE:\ncase TRINO_CONNECTOR_EXTERNAL_TABLE:\nalterClauses.addAll(stmt.getOps());\nsetExternalTableAutoAnalyzePolicy((ExternalTable) tableIf, alterClauses);\nreturn;\ndefault:\nthrow new DdlException(\"Do not support alter \"\n+ tableIf.getType().toString() + \" table[\" + tableName + \"]\");\n}\nDatabase db = (Database) dbIf;\nif (needProcessOutsideTableLock) {\nPreconditions.checkState(alterClauses.size() == 1);\nAlterClause alterClause = alterClauses.get(0);\nif (alterClause instanceof AddPartitionClause) {\nif (!((AddPartitionClause) alterClause).isTempPartition()) {\nDynamicPartitionUtil.checkAlterAllowed(\n(OlapTable) db.getTableOrMetaException(tableName, TableType.OLAP));\n}\nEnv.getCurrentEnv().addPartition(db, tableName, (AddPartitionClause) alterClause, false, 0, true);\n} else if (alterClause instanceof AddPartitionLikeClause) {\nif (!((AddPartitionLikeClause) alterClause).getIsTempPartition()) {\nDynamicPartitionUtil.checkAlterAllowed(\n(OlapTable) db.getTableOrMetaException(tableName, TableType.OLAP));\n}\nEnv.getCurrentEnv().addPartitionLike(db, tableName, (AddPartitionLikeClause) alterClause);\n} else if (alterClause instanceof ModifyPartitionClause) {\nModifyPartitionClause clause = ((ModifyPartitionClause) alterClause);\nMap properties = clause.getProperties();\nList partitionNames = clause.getPartitionNames();\n((SchemaChangeHandler) schemaChangeHandler).updatePartitionsProperties(\ndb, tableName, partitionNames, properties);\nOlapTable olapTable = (OlapTable) tableIf;\nolapTable.writeLockOrDdlException();\ntry {\nmodifyPartitionsProperty(db, olapTable, partitionNames, properties, clause.isTempPartition());\n} finally {\nolapTable.writeUnlock();\n}\n} else if (alterClause instanceof ModifyTablePropertiesClause) {\nMap properties = alterClause.getProperties();\n((SchemaChangeHandler) schemaChangeHandler).updateTableProperties(db, tableName, properties);\n} else if (alterClause instanceof AlterMultiPartitionClause) {\nif (!((AlterMultiPartitionClause) alterClause).isTempPartition()) {\nDynamicPartitionUtil.checkAlterAllowed(\n(OlapTable) db.getTableOrMetaException(tableName, TableType.OLAP));\n}\nEnv.getCurrentEnv().addMultiPartitions(db, tableName, (AlterMultiPartitionClause) alterClause);\n} else {\nthrow new DdlException(\"Invalid alter operation: \" + alterClause.getOpType());\n}\n}\n}", + "context_before": "class Alter {\nprivate static final Logger LOG = LogManager.getLogger(Alter.class);\nprivate AlterHandler schemaChangeHandler;\nprivate AlterHandler materializedViewHandler;\nprivate SystemHandler clusterHandler;\npublic Alter() {\nschemaChangeHandler = Config.isCloudMode() ? new CloudSchemaChangeHandler() : new SchemaChangeHandler();\nmaterializedViewHandler = new MaterializedViewHandler();\nclusterHandler = new SystemHandler();\n}\npublic void start() {\nschemaChangeHandler.start();\nmaterializedViewHandler.start();\nclusterHandler.start();\n}\npublic void processCreateMaterializedView(CreateMaterializedViewStmt stmt)\nthrows DdlException, AnalysisException, MetaNotFoundException {\nString tableName = stmt.getBaseIndexName();\nString dbName = stmt.getDBName();\nDatabase db = Env.getCurrentInternalCatalog().getDbOrDdlException(dbName);\nEnv.getCurrentInternalCatalog().checkAvailableCapacity(db);\nOlapTable olapTable = (OlapTable) db.getTableOrMetaException(tableName, TableType.OLAP);\n((MaterializedViewHandler) materializedViewHandler).processCreateMaterializedView(stmt, db, olapTable);\n}\npublic void processDropMaterializedView(DropMaterializedViewStmt stmt) throws DdlException, MetaNotFoundException {\nTableName tableName = stmt.getTableName();\nString dbName = tableName.getDb();\nDatabase db = Env.getCurrentInternalCatalog().getDbOrDdlException(dbName);\nString name = tableName.getTbl();\nOlapTable olapTable = (OlapTable) db.getTableOrMetaException(name, TableType.OLAP);\n((MaterializedViewHandler) materializedViewHandler).processDropMaterializedView(stmt, db, olapTable);\n}\nprivate boolean processAlterOlapTable(AlterTableStmt stmt, OlapTable olapTable, List alterClauses,\nDatabase db) throws UserException {\nif (olapTable.getDataSortInfo() != null\n&& olapTable.getDataSortInfo().getSortType() == TSortType.ZORDER) {\nthrow new UserException(\"z-order table can not support schema change!\");\n}\nstmt.rewriteAlterClause(olapTable);\nalterClauses.addAll(stmt.getOps());\nAlterOperations currentAlterOps = new AlterOperations();\ncurrentAlterOps.checkConflict(alterClauses);\nfor (AlterClause clause : alterClauses) {\nMap properties = null;\ntry {\nproperties = clause.getProperties();\n} catch (Exception e) {\ncontinue;\n}\nif (properties != null && !properties.isEmpty()) {\ncheckNoForceProperty(properties);\n}\n}\nif (olapTable instanceof MTMV) {\ncurrentAlterOps.checkMTMVAllow(alterClauses);\n}\nif (currentAlterOps.needCheckCapacity()) {\nEnv.getCurrentInternalCatalog().checkAvailableCapacity(db);\n}\nolapTable.checkNormalStateForAlter();\nboolean needProcessOutsideTableLock = false;\nif (currentAlterOps.checkTableStoragePolicy(alterClauses)) {\nString tableStoragePolicy = olapTable.getStoragePolicy();\nString currentStoragePolicy = currentAlterOps.getTableStoragePolicy(alterClauses);\nif (!Env.getCurrentEnv().getPolicyMgr()\n.checkStoragePolicyIfSameResource(tableStoragePolicy, currentStoragePolicy)\n&& !tableStoragePolicy.isEmpty()) {\nfor (Partition partition : olapTable.getAllPartitions()) {\nif (Partition.PARTITION_INIT_VERSION < partition.getVisibleVersion()) {\nthrow new DdlException(\"Do not support alter table's storage policy , this table [\"\n+ olapTable.getName() + \"] has storage policy \" + tableStoragePolicy\n+ \", the table need to be empty.\");\n}\n}\n}\nEnv.getCurrentEnv().getPolicyMgr().checkStoragePolicyExist(currentStoragePolicy);\nolapTable.setStoragePolicy(currentStoragePolicy);\nneedProcessOutsideTableLock = true;\n} else if (currentAlterOps.checkIsBeingSynced(alterClauses)) {\nolapTable.setIsBeingSynced(currentAlterOps.isBeingSynced(alterClauses));\nneedProcessOutsideTableLock = true;\n} else if (currentAlterOps.checkMinLoadReplicaNum(alterClauses)) {\nPreconditions.checkState(alterClauses.size() == 1);\nAlterClause alterClause = alterClauses.get(0);\nprocessModifyMinLoadReplicaNum(db, olapTable, alterClause);\n} else if (currentAlterOps.checkBinlogConfigChange(alterClauses)) {\nif (!Config.enable_feature_binlog) {\nthrow new DdlException(\"Binlog feature is not enabled\");\n}\n((SchemaChangeHandler) schemaChangeHandler).updateBinlogConfig(db, olapTable, alterClauses);\n} else if (currentAlterOps.hasSchemaChangeOp()) {\nschemaChangeHandler.process(stmt.toSql(), alterClauses, db, olapTable);\n} else if (currentAlterOps.hasRollupOp()) {\nmaterializedViewHandler.process(alterClauses, db, olapTable);\n} else if (currentAlterOps.hasPartitionOp()) {\nPreconditions.checkState(!alterClauses.isEmpty());\nfor (AlterClause alterClause : alterClauses) {\nolapTable.writeLockOrDdlException();\ntry {\nif (alterClause instanceof DropPartitionClause) {\nif (!((DropPartitionClause) alterClause).isTempPartition()) {\nDynamicPartitionUtil.checkAlterAllowed(olapTable);\n}\nEnv.getCurrentEnv().dropPartition(db, olapTable, ((DropPartitionClause) alterClause));\n} else if (alterClause instanceof ReplacePartitionClause) {\nEnv.getCurrentEnv().replaceTempPartition(db, olapTable, (ReplacePartitionClause) alterClause);\n} else if (alterClause instanceof ModifyPartitionClause) {\nModifyPartitionClause clause = ((ModifyPartitionClause) alterClause);\nif (clause.isNeedExpand()) {\nList partitionNames = clause.getPartitionNames();\npartitionNames.clear();\nfor (Partition partition : olapTable.getPartitions()) {\npartitionNames.add(partition.getName());\n}\n}\nMap properties = clause.getProperties();\nif (properties.containsKey(PropertyAnalyzer.PROPERTIES_INMEMORY)) {\nboolean isInMemory =\nBoolean.parseBoolean(properties.get(PropertyAnalyzer.PROPERTIES_INMEMORY));\nif (isInMemory) {\nthrow new UserException(\"Not support set 'in_memory'='true' now!\");\n}\nneedProcessOutsideTableLock = true;\n} else {\nList partitionNames = clause.getPartitionNames();\nif (!properties.containsKey(PropertyAnalyzer.PROPERTIES_STORAGE_POLICY)) {\nmodifyPartitionsProperty(db, olapTable, partitionNames, properties,\nclause.isTempPartition());\n} else {\nneedProcessOutsideTableLock = true;\n}\n}\n} else if (alterClause instanceof DropPartitionFromIndexClause) {\n} else if (alterClause instanceof AddPartitionClause\n|| alterClause instanceof AddPartitionLikeClause\n|| alterClause instanceof AlterMultiPartitionClause) {\nneedProcessOutsideTableLock = true;\n} else {\nthrow new DdlException(\"Invalid alter operation: \" + alterClause.getOpType());\n}\n} finally {\nolapTable.writeUnlock();\n}\n}\n} else if (currentAlterOps.hasRenameOp()) {\nprocessRename(db, olapTable, alterClauses);\n} else if (currentAlterOps.hasReplaceTableOp()) {\nprocessReplaceTable(db, olapTable, alterClauses);\n} else if (currentAlterOps.contains(AlterOpType.MODIFY_TABLE_PROPERTY_SYNC)) {\nneedProcessOutsideTableLock = true;\n} else if (currentAlterOps.contains(AlterOpType.MODIFY_DISTRIBUTION)) {\nPreconditions.checkState(alterClauses.size() == 1);\nAlterClause alterClause = alterClauses.get(0);\nEnv.getCurrentEnv()\n.modifyDefaultDistributionBucketNum(db, olapTable, (ModifyDistributionClause) alterClause);\n} else if (currentAlterOps.contains(AlterOpType.MODIFY_COLUMN_COMMENT)) {\nprocessModifyColumnComment(db, olapTable, alterClauses);\n} else if (currentAlterOps.contains(AlterOpType.MODIFY_TABLE_COMMENT)) {\nPreconditions.checkState(alterClauses.size() == 1);\nAlterClause alterClause = alterClauses.get(0);\nprocessModifyTableComment(db, olapTable, alterClause);\n} else {\nthrow new DdlException(\"Invalid alter operations: \" + currentAlterOps);\n}\nif (needChangeMTMVState(alterClauses)) {\nEnv.getCurrentEnv().getMtmvService().alterTable(olapTable);\n}\nreturn needProcessOutsideTableLock;\n}\nprivate void setHMSExternalTableAutoAnalyzePolicy(HMSExternalTable table, List alterClauses) {\nPreconditions.checkState(alterClauses.size() == 1);\nAlterClause alterClause = alterClauses.get(0);\nPreconditions.checkState(alterClause instanceof ModifyTablePropertiesClause);\nMap properties = alterClause.getProperties();\nPreconditions.checkState(properties.size() == 1);\nPreconditions.checkState(properties.containsKey(PropertyAnalyzer.PROPERTIES_AUTO_ANALYZE_POLICY));\nString value = properties.get(PropertyAnalyzer.PROPERTIES_AUTO_ANALYZE_POLICY);\nPreconditions.checkState(PropertyAnalyzer.ENABLE_AUTO_ANALYZE_POLICY.equalsIgnoreCase(value)\n|| PropertyAnalyzer.DISABLE_AUTO_ANALYZE_POLICY.equalsIgnoreCase(value)\n|| PropertyAnalyzer.NONE_AUTO_ANALYZE_POLICY.equalsIgnoreCase(value));\nvalue = value.equalsIgnoreCase(PropertyAnalyzer.NONE_AUTO_ANALYZE_POLICY) ? null : value;\ntable.setAutoAnalyzePolicy(value);\nModifyTablePropertyOperationLog info = new ModifyTablePropertyOperationLog(table.getCatalog().getName(),\ntable.getDatabase().getFullName(), table.getName(), properties);\nEnv.getCurrentEnv().getEditLog().logModifyTableProperties(info);\n}\nprivate boolean needChangeMTMVState(List alterClauses) {\nfor (AlterClause alterClause : alterClauses) {\nif (alterClause.needChangeMTMVState()) {\nreturn true;\n}\n}\nreturn false;\n}\nprivate void processModifyTableComment(Database db, OlapTable tbl, AlterClause alterClause)\nthrows DdlException {\ntbl.writeLockOrDdlException();\ntry {\nModifyTableCommentClause clause = (ModifyTableCommentClause) alterClause;\ntbl.setComment(clause.getComment());\nModifyCommentOperationLog op = ModifyCommentOperationLog\n.forTable(db.getId(), tbl.getId(), clause.getComment());\nEnv.getCurrentEnv().getEditLog().logModifyComment(op);\n} finally {\ntbl.writeUnlock();\n}\n}\nprivate void processModifyColumnComment(Database db, OlapTable tbl, List alterClauses)\nthrows DdlException {\ntbl.writeLockOrDdlException();\ntry {\nMap colToComment = Maps.newHashMap();\nfor (AlterClause alterClause : alterClauses) {\nPreconditions.checkState(alterClause instanceof ModifyColumnCommentClause);\nModifyColumnCommentClause clause = (ModifyColumnCommentClause) alterClause;\nString colName = clause.getColName();\nif (tbl.getColumn(colName) == null) {\nthrow new DdlException(\"Unknown column: \" + colName);\n}\nif (colToComment.containsKey(colName)) {\nthrow new DdlException(\"Duplicate column: \" + colName);\n}\ncolToComment.put(colName, clause.getComment());\n}\nfor (Map.Entry entry : colToComment.entrySet()) {\nColumn col = tbl.getColumn(entry.getKey());\ncol.setComment(entry.getValue());\n}\nModifyCommentOperationLog op = ModifyCommentOperationLog.forColumn(db.getId(), tbl.getId(), colToComment);\nEnv.getCurrentEnv().getEditLog().logModifyComment(op);\n} finally {\ntbl.writeUnlock();\n}\n}\npublic void replayModifyComment(ModifyCommentOperationLog operation) throws MetaNotFoundException {\nlong dbId = operation.getDbId();\nlong tblId = operation.getTblId();\nDatabase db = Env.getCurrentInternalCatalog().getDbOrMetaException(dbId);\nTable tbl = db.getTableOrMetaException(tblId);\ntbl.writeLock();\ntry {\nModifyCommentOperationLog.Type type = operation.getType();\nswitch (type) {\ncase TABLE:\ntbl.setComment(operation.getTblComment());\nbreak;\ncase COLUMN:\nfor (Map.Entry entry : operation.getColToComment().entrySet()) {\ntbl.getColumn(entry.getKey()).setComment(entry.getValue());\n}\nbreak;\ndefault:\nbreak;\n}\n} finally {\ntbl.writeUnlock();\n}\n}\nprivate void processAlterExternalTable(AlterTableStmt stmt, Table externalTable, Database db) throws UserException {\nstmt.checkExternalTableOperationAllow(externalTable);\nList alterClauses = stmt.getOps();\nAlterOperations currentAlterOps = new AlterOperations();\ncurrentAlterOps.checkConflict(alterClauses);\nif (currentAlterOps.hasRenameOp()) {\nprocessRename(db, externalTable, alterClauses);\n} else if (currentAlterOps.hasSchemaChangeOp()) {\nschemaChangeHandler.processExternalTable(alterClauses, db, externalTable);\n} else if (currentAlterOps.contains(AlterOpType.MODIFY_ENGINE)) {\nModifyEngineClause modifyEngineClause = (ModifyEngineClause) alterClauses.get(0);\nprocessModifyEngine(db, externalTable, modifyEngineClause);\n}\n}\npublic void processModifyEngine(Database db, Table externalTable, ModifyEngineClause clause) throws DdlException {\nexternalTable.writeLockOrDdlException();\ntry {\nif (externalTable.getType() != TableType.MYSQL) {\nthrow new DdlException(\"Only support modify table engine from MySQL to ODBC\");\n}\nprocessModifyEngineInternal(db, externalTable, clause.getProperties(), false);\n} finally {\nexternalTable.writeUnlock();\n}\nLOG.info(\"modify table {}'s engine from MySQL to ODBC\", externalTable.getName());\n}\npublic void replayProcessModifyEngine(ModifyTableEngineOperationLog log) {\nDatabase db = Env.getCurrentInternalCatalog().getDbNullable(log.getDbId());\nif (db == null) {\nreturn;\n}\nMysqlTable mysqlTable = (MysqlTable) db.getTableNullable(log.getTableId());\nif (mysqlTable == null) {\nreturn;\n}\nmysqlTable.writeLock();\ntry {\nprocessModifyEngineInternal(db, mysqlTable, log.getProperties(), true);\n} finally {\nmysqlTable.writeUnlock();\n}\n}\nprivate void processModifyEngineInternal(Database db, Table externalTable,\nMap prop, boolean isReplay) {\nMysqlTable mysqlTable = (MysqlTable) externalTable;\nMap newProp = Maps.newHashMap(prop);\nnewProp.put(OdbcTable.ODBC_HOST, mysqlTable.getHost());\nnewProp.put(OdbcTable.ODBC_PORT, mysqlTable.getPort());\nnewProp.put(OdbcTable.ODBC_USER, mysqlTable.getUserName());\nnewProp.put(OdbcTable.ODBC_PASSWORD, mysqlTable.getPasswd());\nnewProp.put(OdbcTable.ODBC_DATABASE, mysqlTable.getMysqlDatabaseName());\nnewProp.put(OdbcTable.ODBC_TABLE, mysqlTable.getMysqlTableName());\nnewProp.put(OdbcTable.ODBC_TYPE, TOdbcTableType.MYSQL.name());\nOdbcTable odbcTable = null;\ntry {\nodbcTable = new OdbcTable(mysqlTable.getId(), mysqlTable.getName(), mysqlTable.getBaseSchema(), newProp);\n} catch (DdlException e) {\nLOG.warn(\"Should not happen\", e);\nreturn;\n}\nodbcTable.writeLock();\ntry {\ndb.unregisterTable(mysqlTable.getName());\ndb.registerTable(odbcTable);\nif (!isReplay) {\nModifyTableEngineOperationLog log = new ModifyTableEngineOperationLog(db.getId(),\nexternalTable.getId(), prop);\nEnv.getCurrentEnv().getEditLog().logModifyTableEngine(log);\n}\n} finally {\nodbcTable.writeUnlock();\n}\n}\nprivate void processReplaceTable(Database db, OlapTable origTable, List alterClauses)\nthrows UserException {\nReplaceTableClause clause = (ReplaceTableClause) alterClauses.get(0);\nString newTblName = clause.getTblName();\nboolean swapTable = clause.isSwapTable();\nprocessReplaceTable(db, origTable, newTblName, swapTable);\n}\npublic void processReplaceTable(Database db, OlapTable origTable, String newTblName, boolean swapTable)\nthrows UserException {\ndb.writeLockOrDdlException();\ntry {\nList tableTypes = Lists.newArrayList(TableType.OLAP, TableType.MATERIALIZED_VIEW);\nTable newTbl = db.getTableOrMetaException(newTblName, tableTypes);\nOlapTable olapNewTbl = (OlapTable) newTbl;\nList tableList = Lists.newArrayList(origTable, newTbl);\ntableList.sort((Comparator.comparing(Table::getId)));\nMetaLockUtils.writeLockTablesOrMetaException(tableList);\ntry {\nString oldTblName = origTable.getName();\nolapNewTbl.checkAndSetName(oldTblName, true);\nif (swapTable) {\norigTable.checkAndSetName(newTblName, true);\n}\nreplaceTableInternal(db, origTable, olapNewTbl, swapTable, false);\nReplaceTableOperationLog log = new ReplaceTableOperationLog(db.getId(),\norigTable.getId(), olapNewTbl.getId(), swapTable);\nEnv.getCurrentEnv().getEditLog().logReplaceTable(log);\nLOG.info(\"finish replacing table {} with table {}, is swap: {}\", oldTblName, newTblName, swapTable);\n} finally {\nMetaLockUtils.writeUnlockTables(tableList);\n}\n} finally {\ndb.writeUnlock();\n}\n}\npublic void replayReplaceTable(ReplaceTableOperationLog log) throws MetaNotFoundException {\nlong dbId = log.getDbId();\nlong origTblId = log.getOrigTblId();\nlong newTblId = log.getNewTblId();\nDatabase db = Env.getCurrentInternalCatalog().getDbOrMetaException(dbId);\nList tableTypes = Lists.newArrayList(TableType.OLAP, TableType.MATERIALIZED_VIEW);\nOlapTable origTable = (OlapTable) db.getTableOrMetaException(origTblId, tableTypes);\nOlapTable newTbl = (OlapTable) db.getTableOrMetaException(newTblId, tableTypes);\nList
tableList = Lists.newArrayList(origTable, newTbl);\ntableList.sort((Comparator.comparing(Table::getId)));\nMetaLockUtils.writeLockTablesOrMetaException(tableList);\ntry {\nreplaceTableInternal(db, origTable, newTbl, log.isSwapTable(), true);\n} catch (DdlException e) {\nLOG.warn(\"should not happen\", e);\n} finally {\nMetaLockUtils.writeUnlockTables(tableList);\n}\nLOG.info(\"finish replay replacing table {} with table {}, is swap: {}\", origTblId, newTblId, log.isSwapTable());\n}\n/**\n* The replace table operation works as follow:\n* For example, REPLACE TABLE A WITH TABLE B.\n*

\n* 1. If \"swapTable\" is true, A will be renamed to B, and B will be renamed to A\n* 1.1 check if A can be renamed to B (checking name conflict, etc...)\n* 1.2 check if B can be renamed to A (checking name conflict, etc...)\n* 1.3 rename B to A, drop old A, and add new A to database.\n* 1.4 rename A to B, drop old B, and add new B to database.\n*

\n* 2. If \"swapTable\" is false, A will be dropped, and B will be renamed to A\n* 1.1 check if B can be renamed to A (checking name conflict, etc...)\n* 1.2 rename B to A, drop old A, and add new A to database.\n*/\nprivate void replaceTableInternal(Database db, OlapTable origTable, OlapTable newTbl, boolean swapTable,\nboolean isReplay)\nthrows DdlException {\nString oldTblName = origTable.getName();\nString newTblName = newTbl.getName();\ndb.unregisterTable(oldTblName);\ndb.unregisterTable(newTblName);\nnewTbl.checkAndSetName(oldTblName, false);\ndb.registerTable(newTbl);\nif (swapTable) {\norigTable.checkAndSetName(newTblName, false);\ndb.registerTable(origTable);\n} else {\nEnv.getCurrentEnv().onEraseOlapTable(origTable, isReplay);\nif (origTable.getType() == TableType.MATERIALIZED_VIEW) {\nEnv.getCurrentEnv().getMtmvService().deregisterMTMV((MTMV) origTable);\n}\n}\n}\npublic void processAlterView(AlterViewStmt stmt, ConnectContext ctx) throws UserException {\nTableName dbTableName = stmt.getTbl();\nString dbName = dbTableName.getDb();\nDatabase db = Env.getCurrentInternalCatalog().getDbOrDdlException(dbName);\nString tableName = dbTableName.getTbl();\nView view = (View) db.getTableOrMetaException(tableName, TableType.VIEW);\nmodifyViewDef(db, view, stmt.getInlineViewDef(), ctx.getSessionVariable().getSqlMode(), stmt.getColumns());\n}\nprivate void modifyViewDef(Database db, View view, String inlineViewDef, long sqlMode,\nList newFullSchema) throws DdlException {\ndb.writeLockOrDdlException();\ntry {\nview.writeLockOrDdlException();\ntry {\nview.setInlineViewDefWithSqlMode(inlineViewDef, sqlMode);\ntry {\nview.init();\n} catch (UserException e) {\nthrow new DdlException(\"failed to init view stmt, reason=\" + e.getMessage());\n}\nview.setNewFullSchema(newFullSchema);\nString viewName = view.getName();\ndb.unregisterTable(viewName);\ndb.registerTable(view);\nAlterViewInfo alterViewInfo = new AlterViewInfo(db.getId(), view.getId(),\ninlineViewDef, newFullSchema, sqlMode);\nEnv.getCurrentEnv().getEditLog().logModifyViewDef(alterViewInfo);\nLOG.info(\"modify view[{}] definition to {}\", viewName, inlineViewDef);\n} finally {\nview.writeUnlock();\n}\n} finally {\ndb.writeUnlock();\n}\n}\npublic void replayModifyViewDef(AlterViewInfo alterViewInfo) throws MetaNotFoundException, DdlException {\nlong dbId = alterViewInfo.getDbId();\nlong tableId = alterViewInfo.getTableId();\nString inlineViewDef = alterViewInfo.getInlineViewDef();\nList newFullSchema = alterViewInfo.getNewFullSchema();\nDatabase db = Env.getCurrentInternalCatalog().getDbOrMetaException(dbId);\nView view = (View) db.getTableOrMetaException(tableId, TableType.VIEW);\ndb.writeLock();\nview.writeLock();\ntry {\nString viewName = view.getName();\nview.setInlineViewDefWithSqlMode(inlineViewDef, alterViewInfo.getSqlMode());\ntry {\nview.init();\n} catch (UserException e) {\nthrow new DdlException(\"failed to init view stmt, reason=\" + e.getMessage());\n}\nview.setNewFullSchema(newFullSchema);\ndb.unregisterTable(viewName);\ndb.registerTable(view);\nLOG.info(\"replay modify view[{}] definition to {}\", viewName, inlineViewDef);\n} finally {\nview.writeUnlock();\ndb.writeUnlock();\n}\n}\npublic void processAlterCluster(AlterSystemStmt stmt) throws UserException {\nclusterHandler.process(Collections.singletonList(stmt.getAlterClause()), null, null);\n}\nprivate void processRename(Database db, OlapTable table, List alterClauses) throws DdlException {\nfor (AlterClause alterClause : alterClauses) {\nif (alterClause instanceof TableRenameClause) {\nEnv.getCurrentEnv().renameTable(db, table, (TableRenameClause) alterClause);\nbreak;\n} else {\nif (alterClause instanceof RollupRenameClause) {\nEnv.getCurrentEnv().renameRollup(db, table, (RollupRenameClause) alterClause);\nbreak;\n} else if (alterClause instanceof PartitionRenameClause) {\nEnv.getCurrentEnv().renamePartition(db, table, (PartitionRenameClause) alterClause);\nbreak;\n} else if (alterClause instanceof ColumnRenameClause) {\nEnv.getCurrentEnv().renameColumn(db, table, (ColumnRenameClause) alterClause);\nbreak;\n} else {\nPreconditions.checkState(false);\n}\n}\n}\n}\nprivate void processRename(Database db, Table table, List alterClauses) throws DdlException {\nfor (AlterClause alterClause : alterClauses) {\nif (alterClause instanceof TableRenameClause) {\nEnv.getCurrentEnv().renameTable(db, table, (TableRenameClause) alterClause);\nbreak;\n} else {\nPreconditions.checkState(false);\n}\n}\n}\n/**\n* Batch update partitions' properties\n* caller should hold the table lock\n*/\npublic void modifyPartitionsProperty(Database db,\nOlapTable olapTable,\nList partitionNames,\nMap properties,\nboolean isTempPartition)\nthrows DdlException, AnalysisException {\ncheckNoForceProperty(properties);\nPreconditions.checkArgument(olapTable.isWriteLockHeldByCurrentThread());\nList modifyPartitionInfos = Lists.newArrayList();\nolapTable.checkNormalStateForAlter();\nfor (String partitionName : partitionNames) {\nPartition partition = olapTable.getPartition(partitionName, isTempPartition);\nif (partition == null) {\nthrow new DdlException(\n\"Partition[\" + partitionName + \"] does not exist in table[\" + olapTable.getName() + \"]\");\n}\n}\nboolean hasInMemory = false;\nif (properties.containsKey(PropertyAnalyzer.PROPERTIES_INMEMORY)) {\nhasInMemory = true;\n}\nReplicaAllocation replicaAlloc = PropertyAnalyzer.analyzeReplicaAllocation(properties, \"\");\nif (!replicaAlloc.isNotSet()) {\nolapTable.checkChangeReplicaAllocation();\n}\nEnv.getCurrentSystemInfo().checkReplicaAllocation(replicaAlloc);\nboolean newInMemory = PropertyAnalyzer.analyzeBooleanProp(properties,\nPropertyAnalyzer.PROPERTIES_INMEMORY, false);\nTTabletType tTabletType =\nPropertyAnalyzer.analyzeTabletType(properties);\nPartitionInfo partitionInfo = olapTable.getPartitionInfo();\nfor (String partitionName : partitionNames) {\nPartition partition = olapTable.getPartition(partitionName, isTempPartition);\nDataProperty dataProperty = partitionInfo.getDataProperty(partition.getId());\nMap modifiedProperties = Maps.newHashMap();\nmodifiedProperties.putAll(properties);\nString currentStoragePolicy = PropertyAnalyzer.analyzeStoragePolicy(properties);\nif (!currentStoragePolicy.equals(\"\")) {\nEnv.getCurrentEnv().getPolicyMgr().checkStoragePolicyExist(currentStoragePolicy);\npartitionInfo.setStoragePolicy(partition.getId(), currentStoragePolicy);\n} else {\nif (partition.getRemoteDataSize() > 0) {\nthrow new AnalysisException(\n\"Cannot cancel storage policy for partition which is already on cold storage.\");\n}\nStoragePolicy checkedPolicyCondition = StoragePolicy.ofCheck(dataProperty.getStoragePolicy());\nStoragePolicy policy = (StoragePolicy) Env.getCurrentEnv().getPolicyMgr()\n.getPolicy(checkedPolicyCondition);\nif (policy != null) {\nlong latestTime = policy.getCooldownTimestampMs() > 0 ? policy.getCooldownTimestampMs()\n: Long.MAX_VALUE;\nif (policy.getCooldownTtl() > 0) {\nlatestTime = Math.min(latestTime,\npartition.getVisibleVersionTime() + policy.getCooldownTtl() * 1000);\n}\nif (latestTime < System.currentTimeMillis() + 20 * 1000) {\nthrow new AnalysisException(\n\"Cannot cancel storage policy for partition which already be cooldown\"\n+ \" or will be cooldown soon later\");\n}\n}\npartitionInfo.setStoragePolicy(partition.getId(), \"\");\n}\nDataProperty newDataProperty = PropertyAnalyzer.analyzeDataProperty(modifiedProperties, dataProperty);\nif (newDataProperty != null) {\npartitionInfo.setDataProperty(partition.getId(), newDataProperty);\n}\nif (!replicaAlloc.isNotSet()) {\npartitionInfo.setReplicaAllocation(partition.getId(), replicaAlloc);\n}\nboolean oldInMemory = partitionInfo.getIsInMemory(partition.getId());\nif (hasInMemory && (newInMemory != oldInMemory)) {\npartitionInfo.setIsInMemory(partition.getId(), newInMemory);\n}\nif (tTabletType != partitionInfo.getTabletType(partition.getId())) {\npartitionInfo.setTabletType(partition.getId(), tTabletType);\n}\nModifyPartitionInfo info = new ModifyPartitionInfo(db.getId(), olapTable.getId(), partition.getId(),\nnewDataProperty, replicaAlloc, hasInMemory ? newInMemory : oldInMemory, currentStoragePolicy,\nMaps.newHashMap());\nmodifyPartitionInfos.add(info);\n}\nBatchModifyPartitionsInfo info = new BatchModifyPartitionsInfo(modifyPartitionInfos);\nEnv.getCurrentEnv().getEditLog().logBatchModifyPartition(info);\n}\npublic void checkNoForceProperty(Map properties) throws DdlException {\nfor (RewriteProperty property : PropertyAnalyzer.getInstance().getForceProperties()) {\nif (properties.containsKey(property.key())) {\nthrow new DdlException(\"Cann't modify property '\" + property.key() + \"'\"\n+ (Config.isCloudMode() ? \" in cloud mode\" : \"\") + \".\");\n}\n}\n}\npublic void replayModifyPartition(ModifyPartitionInfo info) throws MetaNotFoundException {\nDatabase db = Env.getCurrentInternalCatalog().getDbOrMetaException(info.getDbId());\nOlapTable olapTable = (OlapTable) db.getTableOrMetaException(info.getTableId(), TableType.OLAP);\nolapTable.writeLock();\ntry {\nPartitionInfo partitionInfo = olapTable.getPartitionInfo();\nif (info.getDataProperty() != null) {\npartitionInfo.setDataProperty(info.getPartitionId(), info.getDataProperty());\n}\nif (!info.getReplicaAlloc().isNotSet()) {\npartitionInfo.setReplicaAllocation(info.getPartitionId(), info.getReplicaAlloc());\n}\nOptional.ofNullable(info.getStoragePolicy()).filter(p -> !p.isEmpty())\n.ifPresent(p -> partitionInfo.setStoragePolicy(info.getPartitionId(), p));\npartitionInfo.setIsInMemory(info.getPartitionId(), info.isInMemory());\nMap tblProperties = info.getTblProperties();\nif (tblProperties != null && !tblProperties.isEmpty()) {\nolapTable.setReplicaAllocation(tblProperties);\n}\n} finally {\nolapTable.writeUnlock();\n}\n}\nprivate void processModifyMinLoadReplicaNum(Database db, OlapTable olapTable, AlterClause alterClause)\nthrows DdlException {\nMap properties = alterClause.getProperties();\nshort minLoadReplicaNum = -1;\ntry {\nminLoadReplicaNum = PropertyAnalyzer.analyzeMinLoadReplicaNum(properties);\n} catch (AnalysisException e) {\nthrow new DdlException(e.getMessage());\n}\nReplicaAllocation replicaAlloc = olapTable.getDefaultReplicaAllocation();\nif (minLoadReplicaNum > replicaAlloc.getTotalReplicaNum()) {\nthrow new DdlException(\"Failed to check min load replica num [\" + minLoadReplicaNum + \"] <= \"\n+ \"default replica num [\" + replicaAlloc.getTotalReplicaNum() + \"]\");\n}\nif (olapTable.dynamicPartitionExists()) {\nreplicaAlloc = olapTable.getTableProperty().getDynamicPartitionProperty().getReplicaAllocation();\nif (!replicaAlloc.isNotSet() && minLoadReplicaNum > replicaAlloc.getTotalReplicaNum()) {\nthrow new DdlException(\"Failed to check min load replica num [\" + minLoadReplicaNum + \"] <= \"\n+ \"dynamic partition replica num [\" + replicaAlloc.getTotalReplicaNum() + \"]\");\n}\n}\nproperties.put(PropertyAnalyzer.PROPERTIES_MIN_LOAD_REPLICA_NUM, Short.toString(minLoadReplicaNum));\nolapTable.setMinLoadReplicaNum(minLoadReplicaNum);\nolapTable.writeLockOrDdlException();\ntry {\nEnv.getCurrentEnv().modifyTableProperties(db, olapTable, properties);\n} finally {\nolapTable.writeUnlock();\n}\n}\npublic Set getUnfinishedAlterTableIds() {\nSet unfinishedTableIds = Sets.newHashSet();\nfor (AlterJobV2 job : schemaChangeHandler.getAlterJobsV2().values()) {\nif (!job.isDone()) {\nunfinishedTableIds.add(job.getTableId());\n}\n}\nfor (IndexChangeJob job : ((SchemaChangeHandler) schemaChangeHandler).getIndexChangeJobs().values()) {\nif (!job.isDone()) {\nunfinishedTableIds.add(job.getTableId());\n}\n}\nfor (AlterJobV2 job : materializedViewHandler.getAlterJobsV2().values()) {\nif (!job.isDone()) {\nunfinishedTableIds.add(job.getTableId());\n}\n}\nreturn unfinishedTableIds;\n}\npublic AlterHandler getSchemaChangeHandler() {\nreturn schemaChangeHandler;\n}\npublic AlterHandler getMaterializedViewHandler() {\nreturn materializedViewHandler;\n}\npublic AlterHandler getClusterHandler() {\nreturn clusterHandler;\n}\npublic void processAlterMTMV(AlterMTMV alterMTMV, boolean isReplay) {\nTableNameInfo tbl = alterMTMV.getMvName();\nMTMV mtmv = null;\ntry {\nDatabase db = Env.getCurrentInternalCatalog().getDbOrDdlException(tbl.getDb());\nmtmv = (MTMV) db.getTableOrMetaException(tbl.getTbl(), TableType.MATERIALIZED_VIEW);\nmtmv.writeMvLock();\nswitch (alterMTMV.getOpType()) {\ncase ALTER_REFRESH_INFO:\nmtmv.alterRefreshInfo(alterMTMV.getRefreshInfo());\nbreak;\ncase ALTER_STATUS:\nmtmv.alterStatus(alterMTMV.getStatus());\nbreak;\ncase ALTER_PROPERTY:\nmtmv.alterMvProperties(alterMTMV.getMvProperties());\nbreak;\ncase ADD_TASK:\nmtmv.addTaskResult(alterMTMV.getTask(), alterMTMV.getRelation(), alterMTMV.getPartitionSnapshots());\nEnv.getCurrentEnv().getMtmvService()\n.refreshComplete(mtmv, alterMTMV.getRelation(), alterMTMV.getTask());\nbreak;\ndefault:\nthrow new RuntimeException(\"Unknown type value: \" + alterMTMV.getOpType());\n}\nif (!isReplay) {\nEnv.getCurrentEnv().getMtmvService().alterMTMV(mtmv, alterMTMV);\nEnv.getCurrentEnv().getEditLog().logAlterMTMV(alterMTMV);\n}\n} catch (UserException e) {\nLOG.warn(e);\n} finally {\nif (mtmv != null) {\nmtmv.writeMvUnlock();\n}\n}\n}\n}", + "context_after": "class Alter {\nprivate static final Logger LOG = LogManager.getLogger(Alter.class);\nprivate AlterHandler schemaChangeHandler;\nprivate AlterHandler materializedViewHandler;\nprivate SystemHandler clusterHandler;\npublic Alter() {\nschemaChangeHandler = Config.isCloudMode() ? new CloudSchemaChangeHandler() : new SchemaChangeHandler();\nmaterializedViewHandler = new MaterializedViewHandler();\nclusterHandler = new SystemHandler();\n}\npublic void start() {\nschemaChangeHandler.start();\nmaterializedViewHandler.start();\nclusterHandler.start();\n}\npublic void processCreateMaterializedView(CreateMaterializedViewStmt stmt)\nthrows DdlException, AnalysisException, MetaNotFoundException {\nString tableName = stmt.getBaseIndexName();\nString dbName = stmt.getDBName();\nDatabase db = Env.getCurrentInternalCatalog().getDbOrDdlException(dbName);\nEnv.getCurrentInternalCatalog().checkAvailableCapacity(db);\nOlapTable olapTable = (OlapTable) db.getTableOrMetaException(tableName, TableType.OLAP);\n((MaterializedViewHandler) materializedViewHandler).processCreateMaterializedView(stmt, db, olapTable);\n}\npublic void processDropMaterializedView(DropMaterializedViewStmt stmt) throws DdlException, MetaNotFoundException {\nTableName tableName = stmt.getTableName();\nString dbName = tableName.getDb();\nDatabase db = Env.getCurrentInternalCatalog().getDbOrDdlException(dbName);\nString name = tableName.getTbl();\nOlapTable olapTable = (OlapTable) db.getTableOrMetaException(name, TableType.OLAP);\n((MaterializedViewHandler) materializedViewHandler).processDropMaterializedView(stmt, db, olapTable);\n}\nprivate boolean processAlterOlapTable(AlterTableStmt stmt, OlapTable olapTable, List alterClauses,\nDatabase db) throws UserException {\nif (olapTable.getDataSortInfo() != null\n&& olapTable.getDataSortInfo().getSortType() == TSortType.ZORDER) {\nthrow new UserException(\"z-order table can not support schema change!\");\n}\nstmt.rewriteAlterClause(olapTable);\nalterClauses.addAll(stmt.getOps());\nAlterOperations currentAlterOps = new AlterOperations();\ncurrentAlterOps.checkConflict(alterClauses);\nfor (AlterClause clause : alterClauses) {\nMap properties = null;\ntry {\nproperties = clause.getProperties();\n} catch (Exception e) {\ncontinue;\n}\nif (properties != null && !properties.isEmpty()) {\ncheckNoForceProperty(properties);\n}\n}\nif (olapTable instanceof MTMV) {\ncurrentAlterOps.checkMTMVAllow(alterClauses);\n}\nif (currentAlterOps.needCheckCapacity()) {\nEnv.getCurrentInternalCatalog().checkAvailableCapacity(db);\n}\nolapTable.checkNormalStateForAlter();\nboolean needProcessOutsideTableLock = false;\nString oldTableName = olapTable.getName();\nif (currentAlterOps.checkTableStoragePolicy(alterClauses)) {\nString tableStoragePolicy = olapTable.getStoragePolicy();\nString currentStoragePolicy = currentAlterOps.getTableStoragePolicy(alterClauses);\nif (!Env.getCurrentEnv().getPolicyMgr()\n.checkStoragePolicyIfSameResource(tableStoragePolicy, currentStoragePolicy)\n&& !tableStoragePolicy.isEmpty()) {\nfor (Partition partition : olapTable.getAllPartitions()) {\nif (Partition.PARTITION_INIT_VERSION < partition.getVisibleVersion()) {\nthrow new DdlException(\"Do not support alter table's storage policy , this table [\"\n+ olapTable.getName() + \"] has storage policy \" + tableStoragePolicy\n+ \", the table need to be empty.\");\n}\n}\n}\nEnv.getCurrentEnv().getPolicyMgr().checkStoragePolicyExist(currentStoragePolicy);\nboolean enableUniqueKeyMergeOnWrite;\nolapTable.readLock();\ntry {\nenableUniqueKeyMergeOnWrite = olapTable.getEnableUniqueKeyMergeOnWrite();\n} finally {\nolapTable.readUnlock();\n}\nif (enableUniqueKeyMergeOnWrite && !Strings.isNullOrEmpty(currentStoragePolicy)) {\nthrow new UserException(\n\"Can not set UNIQUE KEY table that enables Merge-On-write\"\n+ \" with storage policy(\" + currentStoragePolicy + \")\");\n}\nolapTable.setStoragePolicy(currentStoragePolicy);\nneedProcessOutsideTableLock = true;\n} else if (currentAlterOps.checkIsBeingSynced(alterClauses)) {\nolapTable.setIsBeingSynced(currentAlterOps.isBeingSynced(alterClauses));\nneedProcessOutsideTableLock = true;\n} else if (currentAlterOps.checkMinLoadReplicaNum(alterClauses)) {\nPreconditions.checkState(alterClauses.size() == 1);\nAlterClause alterClause = alterClauses.get(0);\nprocessModifyMinLoadReplicaNum(db, olapTable, alterClause);\n} else if (currentAlterOps.checkBinlogConfigChange(alterClauses)) {\nif (!Config.enable_feature_binlog) {\nthrow new DdlException(\"Binlog feature is not enabled\");\n}\n((SchemaChangeHandler) schemaChangeHandler).updateBinlogConfig(db, olapTable, alterClauses);\n} else if (currentAlterOps.hasSchemaChangeOp()) {\nschemaChangeHandler.process(stmt.toSql(), alterClauses, db, olapTable);\n} else if (currentAlterOps.hasRollupOp()) {\nmaterializedViewHandler.process(alterClauses, db, olapTable);\n} else if (currentAlterOps.hasPartitionOp()) {\nPreconditions.checkState(!alterClauses.isEmpty());\nfor (AlterClause alterClause : alterClauses) {\nolapTable.writeLockOrDdlException();\ntry {\nif (alterClause instanceof DropPartitionClause) {\nif (!((DropPartitionClause) alterClause).isTempPartition()) {\nDynamicPartitionUtil.checkAlterAllowed(olapTable);\n}\nEnv.getCurrentEnv().dropPartition(db, olapTable, ((DropPartitionClause) alterClause));\n} else if (alterClause instanceof ReplacePartitionClause) {\nEnv.getCurrentEnv().replaceTempPartition(db, olapTable, (ReplacePartitionClause) alterClause);\n} else if (alterClause instanceof ModifyPartitionClause) {\nModifyPartitionClause clause = ((ModifyPartitionClause) alterClause);\nif (clause.isNeedExpand()) {\nList partitionNames = clause.getPartitionNames();\npartitionNames.clear();\nfor (Partition partition : olapTable.getPartitions()) {\npartitionNames.add(partition.getName());\n}\n}\nMap properties = clause.getProperties();\nif (properties.containsKey(PropertyAnalyzer.PROPERTIES_INMEMORY)) {\nboolean isInMemory =\nBoolean.parseBoolean(properties.get(PropertyAnalyzer.PROPERTIES_INMEMORY));\nif (isInMemory) {\nthrow new UserException(\"Not support set 'in_memory'='true' now!\");\n}\nneedProcessOutsideTableLock = true;\n} else {\nList partitionNames = clause.getPartitionNames();\nif (!properties.containsKey(PropertyAnalyzer.PROPERTIES_STORAGE_POLICY)) {\nmodifyPartitionsProperty(db, olapTable, partitionNames, properties,\nclause.isTempPartition());\n} else {\nneedProcessOutsideTableLock = true;\n}\n}\n} else if (alterClause instanceof DropPartitionFromIndexClause) {\n} else if (alterClause instanceof AddPartitionClause\n|| alterClause instanceof AddPartitionLikeClause\n|| alterClause instanceof AlterMultiPartitionClause) {\nneedProcessOutsideTableLock = true;\n} else {\nthrow new DdlException(\"Invalid alter operation: \" + alterClause.getOpType());\n}\n} finally {\nolapTable.writeUnlock();\n}\n}\n} else if (currentAlterOps.hasRenameOp()) {\nprocessRename(db, olapTable, alterClauses);\n} else if (currentAlterOps.hasReplaceTableOp()) {\nprocessReplaceTable(db, olapTable, alterClauses);\n} else if (currentAlterOps.contains(AlterOpType.MODIFY_TABLE_PROPERTY_SYNC)) {\nneedProcessOutsideTableLock = true;\n} else if (currentAlterOps.contains(AlterOpType.MODIFY_DISTRIBUTION)) {\nPreconditions.checkState(alterClauses.size() == 1);\nAlterClause alterClause = alterClauses.get(0);\nEnv.getCurrentEnv()\n.modifyDefaultDistributionBucketNum(db, olapTable, (ModifyDistributionClause) alterClause);\n} else if (currentAlterOps.contains(AlterOpType.MODIFY_COLUMN_COMMENT)) {\nprocessModifyColumnComment(db, olapTable, alterClauses);\n} else if (currentAlterOps.contains(AlterOpType.MODIFY_TABLE_COMMENT)) {\nPreconditions.checkState(alterClauses.size() == 1);\nAlterClause alterClause = alterClauses.get(0);\nprocessModifyTableComment(db, olapTable, alterClause);\n} else {\nthrow new DdlException(\"Invalid alter operations: \" + currentAlterOps);\n}\nif (needChangeMTMVState(alterClauses)) {\nEnv.getCurrentEnv().getMtmvService().alterTable(olapTable, oldTableName);\n}\nreturn needProcessOutsideTableLock;\n}\nprivate void setExternalTableAutoAnalyzePolicy(ExternalTable table, List alterClauses) {\nPreconditions.checkState(alterClauses.size() == 1);\nAlterClause alterClause = alterClauses.get(0);\nPreconditions.checkState(alterClause instanceof ModifyTablePropertiesClause);\nMap properties = alterClause.getProperties();\nPreconditions.checkState(properties.size() == 1);\nPreconditions.checkState(properties.containsKey(PropertyAnalyzer.PROPERTIES_AUTO_ANALYZE_POLICY));\nString value = properties.get(PropertyAnalyzer.PROPERTIES_AUTO_ANALYZE_POLICY);\nPreconditions.checkState(PropertyAnalyzer.ENABLE_AUTO_ANALYZE_POLICY.equalsIgnoreCase(value)\n|| PropertyAnalyzer.DISABLE_AUTO_ANALYZE_POLICY.equalsIgnoreCase(value)\n|| PropertyAnalyzer.USE_CATALOG_AUTO_ANALYZE_POLICY.equalsIgnoreCase(value));\nvalue = value.equalsIgnoreCase(PropertyAnalyzer.USE_CATALOG_AUTO_ANALYZE_POLICY) ? null : value;\ntable.getCatalog().setAutoAnalyzePolicy(table.getDatabase().getFullName(), table.getName(), value);\nModifyTablePropertyOperationLog info = new ModifyTablePropertyOperationLog(table.getCatalog().getName(),\ntable.getDatabase().getFullName(), table.getName(), properties);\nEnv.getCurrentEnv().getEditLog().logModifyTableProperties(info);\n}\nprivate boolean needChangeMTMVState(List alterClauses) {\nfor (AlterClause alterClause : alterClauses) {\nif (alterClause.needChangeMTMVState()) {\nreturn true;\n}\n}\nreturn false;\n}\nprivate void processModifyTableComment(Database db, OlapTable tbl, AlterClause alterClause)\nthrows DdlException {\ntbl.writeLockOrDdlException();\ntry {\nModifyTableCommentClause clause = (ModifyTableCommentClause) alterClause;\ntbl.setComment(clause.getComment());\nModifyCommentOperationLog op = ModifyCommentOperationLog\n.forTable(db.getId(), tbl.getId(), clause.getComment());\nEnv.getCurrentEnv().getEditLog().logModifyComment(op);\n} finally {\ntbl.writeUnlock();\n}\n}\nprivate void processModifyColumnComment(Database db, OlapTable tbl, List alterClauses)\nthrows DdlException {\ntbl.writeLockOrDdlException();\ntry {\nMap colToComment = Maps.newHashMap();\nfor (AlterClause alterClause : alterClauses) {\nPreconditions.checkState(alterClause instanceof ModifyColumnCommentClause);\nModifyColumnCommentClause clause = (ModifyColumnCommentClause) alterClause;\nString colName = clause.getColName();\nif (tbl.getColumn(colName) == null) {\nthrow new DdlException(\"Unknown column: \" + colName);\n}\nif (colToComment.containsKey(colName)) {\nthrow new DdlException(\"Duplicate column: \" + colName);\n}\ncolToComment.put(colName, clause.getComment());\n}\nfor (Map.Entry entry : colToComment.entrySet()) {\nColumn col = tbl.getColumn(entry.getKey());\ncol.setComment(entry.getValue());\n}\nModifyCommentOperationLog op = ModifyCommentOperationLog.forColumn(db.getId(), tbl.getId(), colToComment);\nEnv.getCurrentEnv().getEditLog().logModifyComment(op);\n} finally {\ntbl.writeUnlock();\n}\n}\npublic void replayModifyComment(ModifyCommentOperationLog operation) throws MetaNotFoundException {\nlong dbId = operation.getDbId();\nlong tblId = operation.getTblId();\nDatabase db = Env.getCurrentInternalCatalog().getDbOrMetaException(dbId);\nTable tbl = db.getTableOrMetaException(tblId);\ntbl.writeLock();\ntry {\nModifyCommentOperationLog.Type type = operation.getType();\nswitch (type) {\ncase TABLE:\ntbl.setComment(operation.getTblComment());\nbreak;\ncase COLUMN:\nfor (Map.Entry entry : operation.getColToComment().entrySet()) {\ntbl.getColumn(entry.getKey()).setComment(entry.getValue());\n}\nbreak;\ndefault:\nbreak;\n}\n} finally {\ntbl.writeUnlock();\n}\n}\nprivate void processAlterExternalTable(AlterTableStmt stmt, Table externalTable, Database db) throws UserException {\nstmt.checkExternalTableOperationAllow(externalTable);\nList alterClauses = stmt.getOps();\nAlterOperations currentAlterOps = new AlterOperations();\ncurrentAlterOps.checkConflict(alterClauses);\nif (currentAlterOps.hasRenameOp()) {\nprocessRename(db, externalTable, alterClauses);\n} else if (currentAlterOps.hasSchemaChangeOp()) {\nschemaChangeHandler.processExternalTable(alterClauses, db, externalTable);\n} else if (currentAlterOps.contains(AlterOpType.MODIFY_ENGINE)) {\nModifyEngineClause modifyEngineClause = (ModifyEngineClause) alterClauses.get(0);\nprocessModifyEngine(db, externalTable, modifyEngineClause);\n}\n}\npublic void processModifyEngine(Database db, Table externalTable, ModifyEngineClause clause) throws DdlException {\nexternalTable.writeLockOrDdlException();\ntry {\nif (externalTable.getType() != TableType.MYSQL) {\nthrow new DdlException(\"Only support modify table engine from MySQL to ODBC\");\n}\nprocessModifyEngineInternal(db, externalTable, clause.getProperties(), false);\n} finally {\nexternalTable.writeUnlock();\n}\nLOG.info(\"modify table {}'s engine from MySQL to ODBC\", externalTable.getName());\n}\npublic void replayProcessModifyEngine(ModifyTableEngineOperationLog log) {\nDatabase db = Env.getCurrentInternalCatalog().getDbNullable(log.getDbId());\nif (db == null) {\nreturn;\n}\nMysqlTable mysqlTable = (MysqlTable) db.getTableNullable(log.getTableId());\nif (mysqlTable == null) {\nreturn;\n}\nmysqlTable.writeLock();\ntry {\nprocessModifyEngineInternal(db, mysqlTable, log.getProperties(), true);\n} finally {\nmysqlTable.writeUnlock();\n}\n}\nprivate void processModifyEngineInternal(Database db, Table externalTable,\nMap prop, boolean isReplay) {\nMysqlTable mysqlTable = (MysqlTable) externalTable;\nMap newProp = Maps.newHashMap(prop);\nnewProp.put(OdbcTable.ODBC_HOST, mysqlTable.getHost());\nnewProp.put(OdbcTable.ODBC_PORT, mysqlTable.getPort());\nnewProp.put(OdbcTable.ODBC_USER, mysqlTable.getUserName());\nnewProp.put(OdbcTable.ODBC_PASSWORD, mysqlTable.getPasswd());\nnewProp.put(OdbcTable.ODBC_DATABASE, mysqlTable.getMysqlDatabaseName());\nnewProp.put(OdbcTable.ODBC_TABLE, mysqlTable.getMysqlTableName());\nnewProp.put(OdbcTable.ODBC_TYPE, TOdbcTableType.MYSQL.name());\nOdbcTable odbcTable = null;\ntry {\nodbcTable = new OdbcTable(mysqlTable.getId(), mysqlTable.getName(), mysqlTable.getBaseSchema(), newProp);\n} catch (DdlException e) {\nLOG.warn(\"Should not happen\", e);\nreturn;\n}\nodbcTable.writeLock();\ntry {\ndb.unregisterTable(mysqlTable.getName());\ndb.registerTable(odbcTable);\nif (!isReplay) {\nModifyTableEngineOperationLog log = new ModifyTableEngineOperationLog(db.getId(),\nexternalTable.getId(), prop);\nEnv.getCurrentEnv().getEditLog().logModifyTableEngine(log);\n}\n} finally {\nodbcTable.writeUnlock();\n}\n}\nprivate void processReplaceTable(Database db, OlapTable origTable, List alterClauses)\nthrows UserException {\nReplaceTableClause clause = (ReplaceTableClause) alterClauses.get(0);\nString newTblName = clause.getTblName();\nboolean swapTable = clause.isSwapTable();\nprocessReplaceTable(db, origTable, newTblName, swapTable);\n}\npublic void processReplaceTable(Database db, OlapTable origTable, String newTblName, boolean swapTable)\nthrows UserException {\ndb.writeLockOrDdlException();\ntry {\nList tableTypes = Lists.newArrayList(TableType.OLAP, TableType.MATERIALIZED_VIEW);\nTable newTbl = db.getTableOrMetaException(newTblName, tableTypes);\nOlapTable olapNewTbl = (OlapTable) newTbl;\nList

tableList = Lists.newArrayList(origTable, newTbl);\ntableList.sort((Comparator.comparing(Table::getId)));\nMetaLockUtils.writeLockTablesOrMetaException(tableList);\ntry {\nString oldTblName = origTable.getName();\nolapNewTbl.checkAndSetName(oldTblName, true);\nif (swapTable) {\norigTable.checkAndSetName(newTblName, true);\n}\nreplaceTableInternal(db, origTable, olapNewTbl, swapTable, false);\nReplaceTableOperationLog log = new ReplaceTableOperationLog(db.getId(),\norigTable.getId(), olapNewTbl.getId(), swapTable);\nEnv.getCurrentEnv().getEditLog().logReplaceTable(log);\nLOG.info(\"finish replacing table {} with table {}, is swap: {}\", oldTblName, newTblName, swapTable);\n} finally {\nMetaLockUtils.writeUnlockTables(tableList);\n}\n} finally {\ndb.writeUnlock();\n}\n}\npublic void replayReplaceTable(ReplaceTableOperationLog log) throws MetaNotFoundException {\nlong dbId = log.getDbId();\nlong origTblId = log.getOrigTblId();\nlong newTblId = log.getNewTblId();\nDatabase db = Env.getCurrentInternalCatalog().getDbOrMetaException(dbId);\nList tableTypes = Lists.newArrayList(TableType.OLAP, TableType.MATERIALIZED_VIEW);\nOlapTable origTable = (OlapTable) db.getTableOrMetaException(origTblId, tableTypes);\nOlapTable newTbl = (OlapTable) db.getTableOrMetaException(newTblId, tableTypes);\nList
tableList = Lists.newArrayList(origTable, newTbl);\ntableList.sort((Comparator.comparing(Table::getId)));\nMetaLockUtils.writeLockTablesOrMetaException(tableList);\ntry {\nreplaceTableInternal(db, origTable, newTbl, log.isSwapTable(), true);\n} catch (DdlException e) {\nLOG.warn(\"should not happen\", e);\n} finally {\nMetaLockUtils.writeUnlockTables(tableList);\n}\nLOG.info(\"finish replay replacing table {} with table {}, is swap: {}\", origTblId, newTblId, log.isSwapTable());\n}\n/**\n* The replace table operation works as follow:\n* For example, REPLACE TABLE A WITH TABLE B.\n*

\n* 1. If \"swapTable\" is true, A will be renamed to B, and B will be renamed to A\n* 1.1 check if A can be renamed to B (checking name conflict, etc...)\n* 1.2 check if B can be renamed to A (checking name conflict, etc...)\n* 1.3 rename B to A, drop old A, and add new A to database.\n* 1.4 rename A to B, drop old B, and add new B to database.\n*

\n* 2. If \"swapTable\" is false, A will be dropped, and B will be renamed to A\n* 1.1 check if B can be renamed to A (checking name conflict, etc...)\n* 1.2 rename B to A, drop old A, and add new A to database.\n*/\nprivate void replaceTableInternal(Database db, OlapTable origTable, OlapTable newTbl, boolean swapTable,\nboolean isReplay)\nthrows DdlException {\nString oldTblName = origTable.getName();\nString newTblName = newTbl.getName();\ndb.unregisterTable(oldTblName);\ndb.unregisterTable(newTblName);\nnewTbl.checkAndSetName(oldTblName, false);\ndb.registerTable(newTbl);\nif (swapTable) {\norigTable.checkAndSetName(newTblName, false);\ndb.registerTable(origTable);\n} else {\nEnv.getCurrentEnv().onEraseOlapTable(origTable, isReplay);\nif (origTable.getType() == TableType.MATERIALIZED_VIEW) {\nEnv.getCurrentEnv().getMtmvService().deregisterMTMV((MTMV) origTable);\n}\n}\n}\npublic void processAlterView(AlterViewStmt stmt, ConnectContext ctx) throws UserException {\nTableName dbTableName = stmt.getTbl();\nString dbName = dbTableName.getDb();\nDatabase db = Env.getCurrentInternalCatalog().getDbOrDdlException(dbName);\nString tableName = dbTableName.getTbl();\nView view = (View) db.getTableOrMetaException(tableName, TableType.VIEW);\nmodifyViewDef(db, view, stmt.getInlineViewDef(), ctx.getSessionVariable().getSqlMode(), stmt.getColumns());\n}\nprivate void modifyViewDef(Database db, View view, String inlineViewDef, long sqlMode,\nList newFullSchema) throws DdlException {\ndb.writeLockOrDdlException();\ntry {\nview.writeLockOrDdlException();\ntry {\nview.setInlineViewDefWithSqlMode(inlineViewDef, sqlMode);\ntry {\nview.init();\n} catch (UserException e) {\nthrow new DdlException(\"failed to init view stmt, reason=\" + e.getMessage());\n}\nview.setNewFullSchema(newFullSchema);\nString viewName = view.getName();\ndb.unregisterTable(viewName);\ndb.registerTable(view);\nAlterViewInfo alterViewInfo = new AlterViewInfo(db.getId(), view.getId(),\ninlineViewDef, newFullSchema, sqlMode);\nEnv.getCurrentEnv().getEditLog().logModifyViewDef(alterViewInfo);\nLOG.info(\"modify view[{}] definition to {}\", viewName, inlineViewDef);\n} finally {\nview.writeUnlock();\n}\n} finally {\ndb.writeUnlock();\n}\n}\npublic void replayModifyViewDef(AlterViewInfo alterViewInfo) throws MetaNotFoundException, DdlException {\nlong dbId = alterViewInfo.getDbId();\nlong tableId = alterViewInfo.getTableId();\nString inlineViewDef = alterViewInfo.getInlineViewDef();\nList newFullSchema = alterViewInfo.getNewFullSchema();\nDatabase db = Env.getCurrentInternalCatalog().getDbOrMetaException(dbId);\nView view = (View) db.getTableOrMetaException(tableId, TableType.VIEW);\ndb.writeLock();\nview.writeLock();\ntry {\nString viewName = view.getName();\nview.setInlineViewDefWithSqlMode(inlineViewDef, alterViewInfo.getSqlMode());\ntry {\nview.init();\n} catch (UserException e) {\nthrow new DdlException(\"failed to init view stmt, reason=\" + e.getMessage());\n}\nview.setNewFullSchema(newFullSchema);\ndb.unregisterTable(viewName);\ndb.registerTable(view);\nLOG.info(\"replay modify view[{}] definition to {}\", viewName, inlineViewDef);\n} finally {\nview.writeUnlock();\ndb.writeUnlock();\n}\n}\npublic void processAlterCluster(AlterSystemStmt stmt) throws UserException {\nclusterHandler.process(Collections.singletonList(stmt.getAlterClause()), null, null);\n}\nprivate void processRename(Database db, OlapTable table, List alterClauses) throws DdlException {\nfor (AlterClause alterClause : alterClauses) {\nif (alterClause instanceof TableRenameClause) {\nEnv.getCurrentEnv().renameTable(db, table, (TableRenameClause) alterClause);\nbreak;\n} else {\nif (alterClause instanceof RollupRenameClause) {\nEnv.getCurrentEnv().renameRollup(db, table, (RollupRenameClause) alterClause);\nbreak;\n} else if (alterClause instanceof PartitionRenameClause) {\nEnv.getCurrentEnv().renamePartition(db, table, (PartitionRenameClause) alterClause);\nbreak;\n} else if (alterClause instanceof ColumnRenameClause) {\nEnv.getCurrentEnv().renameColumn(db, table, (ColumnRenameClause) alterClause);\nbreak;\n} else {\nPreconditions.checkState(false);\n}\n}\n}\n}\nprivate void processRename(Database db, Table table, List alterClauses) throws DdlException {\nfor (AlterClause alterClause : alterClauses) {\nif (alterClause instanceof TableRenameClause) {\nEnv.getCurrentEnv().renameTable(db, table, (TableRenameClause) alterClause);\nbreak;\n} else {\nPreconditions.checkState(false);\n}\n}\n}\n/**\n* Batch update partitions' properties\n* caller should hold the table lock\n*/\npublic void modifyPartitionsProperty(Database db,\nOlapTable olapTable,\nList partitionNames,\nMap properties,\nboolean isTempPartition)\nthrows DdlException, AnalysisException {\ncheckNoForceProperty(properties);\nPreconditions.checkArgument(olapTable.isWriteLockHeldByCurrentThread());\nList modifyPartitionInfos = Lists.newArrayList();\nolapTable.checkNormalStateForAlter();\nfor (String partitionName : partitionNames) {\nPartition partition = olapTable.getPartition(partitionName, isTempPartition);\nif (partition == null) {\nthrow new DdlException(\n\"Partition[\" + partitionName + \"] does not exist in table[\" + olapTable.getName() + \"]\");\n}\n}\nboolean hasInMemory = false;\nif (properties.containsKey(PropertyAnalyzer.PROPERTIES_INMEMORY)) {\nhasInMemory = true;\n}\nReplicaAllocation replicaAlloc = PropertyAnalyzer.analyzeReplicaAllocation(properties, \"\");\nif (!replicaAlloc.isNotSet()) {\nolapTable.checkChangeReplicaAllocation();\n}\nEnv.getCurrentSystemInfo().checkReplicaAllocation(replicaAlloc);\nboolean newInMemory = PropertyAnalyzer.analyzeBooleanProp(properties,\nPropertyAnalyzer.PROPERTIES_INMEMORY, false);\nTTabletType tTabletType =\nPropertyAnalyzer.analyzeTabletType(properties);\nPartitionInfo partitionInfo = olapTable.getPartitionInfo();\nfor (String partitionName : partitionNames) {\nPartition partition = olapTable.getPartition(partitionName, isTempPartition);\nDataProperty dataProperty = partitionInfo.getDataProperty(partition.getId());\nMap modifiedProperties = Maps.newHashMap();\nmodifiedProperties.putAll(properties);\nString currentStoragePolicy = PropertyAnalyzer.analyzeStoragePolicy(properties);\nif (!currentStoragePolicy.equals(\"\")) {\nEnv.getCurrentEnv().getPolicyMgr().checkStoragePolicyExist(currentStoragePolicy);\npartitionInfo.setStoragePolicy(partition.getId(), currentStoragePolicy);\n} else {\nif (partition.getRemoteDataSize() > 0) {\nthrow new AnalysisException(\n\"Cannot cancel storage policy for partition which is already on cold storage.\");\n}\nStoragePolicy checkedPolicyCondition = StoragePolicy.ofCheck(dataProperty.getStoragePolicy());\nStoragePolicy policy = (StoragePolicy) Env.getCurrentEnv().getPolicyMgr()\n.getPolicy(checkedPolicyCondition);\nif (policy != null) {\nlong latestTime = policy.getCooldownTimestampMs() > 0 ? policy.getCooldownTimestampMs()\n: Long.MAX_VALUE;\nif (policy.getCooldownTtl() > 0) {\nlatestTime = Math.min(latestTime,\npartition.getVisibleVersionTime() + policy.getCooldownTtl() * 1000);\n}\nif (latestTime < System.currentTimeMillis() + 20 * 1000) {\nthrow new AnalysisException(\n\"Cannot cancel storage policy for partition which already be cooldown\"\n+ \" or will be cooldown soon later\");\n}\n}\npartitionInfo.setStoragePolicy(partition.getId(), \"\");\n}\nDataProperty newDataProperty = PropertyAnalyzer.analyzeDataProperty(modifiedProperties, dataProperty);\nif (newDataProperty != null) {\npartitionInfo.setDataProperty(partition.getId(), newDataProperty);\n}\nif (!replicaAlloc.isNotSet()) {\npartitionInfo.setReplicaAllocation(partition.getId(), replicaAlloc);\n}\nboolean oldInMemory = partitionInfo.getIsInMemory(partition.getId());\nif (hasInMemory && (newInMemory != oldInMemory)) {\npartitionInfo.setIsInMemory(partition.getId(), newInMemory);\n}\nif (tTabletType != partitionInfo.getTabletType(partition.getId())) {\npartitionInfo.setTabletType(partition.getId(), tTabletType);\n}\nModifyPartitionInfo info = new ModifyPartitionInfo(db.getId(), olapTable.getId(), partition.getId(),\nnewDataProperty, replicaAlloc, hasInMemory ? newInMemory : oldInMemory, currentStoragePolicy,\nMaps.newHashMap());\nmodifyPartitionInfos.add(info);\n}\nBatchModifyPartitionsInfo info = new BatchModifyPartitionsInfo(modifyPartitionInfos);\nEnv.getCurrentEnv().getEditLog().logBatchModifyPartition(info);\n}\npublic void checkNoForceProperty(Map properties) throws DdlException {\nfor (RewriteProperty property : PropertyAnalyzer.getInstance().getForceProperties()) {\nif (properties.containsKey(property.key())) {\nthrow new DdlException(\"Cann't modify property '\" + property.key() + \"'\"\n+ (Config.isCloudMode() ? \" in cloud mode\" : \"\") + \".\");\n}\n}\n}\npublic void replayModifyPartition(ModifyPartitionInfo info) throws MetaNotFoundException {\nDatabase db = Env.getCurrentInternalCatalog().getDbOrMetaException(info.getDbId());\nOlapTable olapTable = (OlapTable) db.getTableOrMetaException(info.getTableId(), TableType.OLAP);\nolapTable.writeLock();\ntry {\nPartitionInfo partitionInfo = olapTable.getPartitionInfo();\nif (info.getDataProperty() != null) {\npartitionInfo.setDataProperty(info.getPartitionId(), info.getDataProperty());\n}\nif (!info.getReplicaAlloc().isNotSet()) {\npartitionInfo.setReplicaAllocation(info.getPartitionId(), info.getReplicaAlloc());\n}\nOptional.ofNullable(info.getStoragePolicy()).filter(p -> !p.isEmpty())\n.ifPresent(p -> partitionInfo.setStoragePolicy(info.getPartitionId(), p));\npartitionInfo.setIsInMemory(info.getPartitionId(), info.isInMemory());\nMap tblProperties = info.getTblProperties();\nif (tblProperties != null && !tblProperties.isEmpty()) {\nolapTable.setReplicaAllocation(tblProperties);\n}\n} finally {\nolapTable.writeUnlock();\n}\n}\nprivate void processModifyMinLoadReplicaNum(Database db, OlapTable olapTable, AlterClause alterClause)\nthrows DdlException {\nMap properties = alterClause.getProperties();\nshort minLoadReplicaNum = -1;\ntry {\nminLoadReplicaNum = PropertyAnalyzer.analyzeMinLoadReplicaNum(properties);\n} catch (AnalysisException e) {\nthrow new DdlException(e.getMessage());\n}\nReplicaAllocation replicaAlloc = olapTable.getDefaultReplicaAllocation();\nif (minLoadReplicaNum > replicaAlloc.getTotalReplicaNum()) {\nthrow new DdlException(\"Failed to check min load replica num [\" + minLoadReplicaNum + \"] <= \"\n+ \"default replica num [\" + replicaAlloc.getTotalReplicaNum() + \"]\");\n}\nif (olapTable.dynamicPartitionExists()) {\nreplicaAlloc = olapTable.getTableProperty().getDynamicPartitionProperty().getReplicaAllocation();\nif (!replicaAlloc.isNotSet() && minLoadReplicaNum > replicaAlloc.getTotalReplicaNum()) {\nthrow new DdlException(\"Failed to check min load replica num [\" + minLoadReplicaNum + \"] <= \"\n+ \"dynamic partition replica num [\" + replicaAlloc.getTotalReplicaNum() + \"]\");\n}\n}\nproperties.put(PropertyAnalyzer.PROPERTIES_MIN_LOAD_REPLICA_NUM, Short.toString(minLoadReplicaNum));\nolapTable.setMinLoadReplicaNum(minLoadReplicaNum);\nolapTable.writeLockOrDdlException();\ntry {\nEnv.getCurrentEnv().modifyTableProperties(db, olapTable, properties);\n} finally {\nolapTable.writeUnlock();\n}\n}\npublic Set getUnfinishedAlterTableIds() {\nSet unfinishedTableIds = Sets.newHashSet();\nfor (AlterJobV2 job : schemaChangeHandler.getAlterJobsV2().values()) {\nif (!job.isDone()) {\nunfinishedTableIds.add(job.getTableId());\n}\n}\nfor (IndexChangeJob job : ((SchemaChangeHandler) schemaChangeHandler).getIndexChangeJobs().values()) {\nif (!job.isDone()) {\nunfinishedTableIds.add(job.getTableId());\n}\n}\nfor (AlterJobV2 job : materializedViewHandler.getAlterJobsV2().values()) {\nif (!job.isDone()) {\nunfinishedTableIds.add(job.getTableId());\n}\n}\nreturn unfinishedTableIds;\n}\npublic AlterHandler getSchemaChangeHandler() {\nreturn schemaChangeHandler;\n}\npublic AlterHandler getMaterializedViewHandler() {\nreturn materializedViewHandler;\n}\npublic AlterHandler getClusterHandler() {\nreturn clusterHandler;\n}\npublic void processAlterMTMV(AlterMTMV alterMTMV, boolean isReplay) {\nTableNameInfo tbl = alterMTMV.getMvName();\nMTMV mtmv = null;\ntry {\nDatabase db = Env.getCurrentInternalCatalog().getDbOrDdlException(tbl.getDb());\nmtmv = (MTMV) db.getTableOrMetaException(tbl.getTbl(), TableType.MATERIALIZED_VIEW);\nmtmv.writeMvLock();\nswitch (alterMTMV.getOpType()) {\ncase ALTER_REFRESH_INFO:\nmtmv.alterRefreshInfo(alterMTMV.getRefreshInfo());\nbreak;\ncase ALTER_STATUS:\nmtmv.alterStatus(alterMTMV.getStatus());\nbreak;\ncase ALTER_PROPERTY:\nmtmv.alterMvProperties(alterMTMV.getMvProperties());\nbreak;\ncase ADD_TASK:\nmtmv.addTaskResult(alterMTMV.getTask(), alterMTMV.getRelation(), alterMTMV.getPartitionSnapshots());\nEnv.getCurrentEnv().getMtmvService()\n.refreshComplete(mtmv, alterMTMV.getRelation(), alterMTMV.getTask());\nbreak;\ndefault:\nthrow new RuntimeException(\"Unknown type value: \" + alterMTMV.getOpType());\n}\nif (!isReplay) {\nEnv.getCurrentEnv().getMtmvService().alterMTMV(mtmv, alterMTMV);\nEnv.getCurrentEnv().getEditLog().logAlterMTMV(alterMTMV);\n}\n} catch (UserException e) {\nLOG.warn(e);\n} finally {\nif (mtmv != null) {\nmtmv.writeMvUnlock();\n}\n}\n}\n}" + }, + { + "comment": "```suggestion } checkState(!iterator.hasNext(), \"Value state should be empty.\"); ```", + "method_body": "public void initializeState(FunctionInitializationContext context) throws Exception {\nsuper.initializeState(context);\nIterator iterator = valueState.get().iterator();\nif (id > 0) {\ncheckState(iterator.hasNext(), \"Value state can not be empty.\");\nInteger state = iterator.next();\ncheckState(\nid == state,\nString.format(\"Value state(%s) should be equal to id(%s).\", state, id));\n} else {\ncheckState(!iterator.hasNext(), \"Value state should be empty.\");\n}\n}", + "target_code": "}", + "method_body_after": "public void initializeState(FunctionInitializationContext context) throws Exception {\nsuper.initializeState(context);\nIterator iterator = valueState.get().iterator();\nif (id > 0) {\ncheckState(iterator.hasNext(), \"Value state can not be empty.\");\nInteger state = iterator.next();\ncheckState(\nid == state,\nString.format(\"Value state(%s) should be equal to id(%s).\", state, id));\n}\ncheckState(!iterator.hasNext(), \"Value state should be empty.\");\n}", + "context_before": "class StringMap extends AbstractMap {\nprivate StringMap(int id) {\nsuper(id);\n}\n@Override\npublic String map(String value) throws Exception {\nreturn String.valueOf(calculate(Integer.parseInt(value)));\n}\n@Override\n}", + "context_after": "class StringMap extends AbstractMap {\nprivate StringMap(int id) {\nsuper(id);\n}\n@Override\npublic String map(String value) throws Exception {\nreturn String.valueOf(calculate(Integer.parseInt(value)));\n}\n@Override\n}" + }, + { + "comment": "A Table-level SAS is supported by Cosmos - or are those separate tests?", + "method_body": "public void canUseSasTokenToCreateValidTableClient() {\nAssumptions.assumeFalse(IS_COSMOS_TEST, \"SAS Tokens are not supported for Cosmos endpoints.\");\nfinal OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);\nfinal TableSasPermission permissions = TableSasPermission.parse(\"a\");\nfinal TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;\nfinal TableSasSignatureValues sasSignatureValues =\nnew TableSasSignatureValues(expiryTime, permissions)\n.setProtocol(protocol)\n.setVersion(TableServiceVersion.V2019_02_02.getVersion());\nfinal String sas = tableClient.generateSas(sasSignatureValues);\nfinal TableClientBuilder tableClientBuilder = new TableClientBuilder()\n.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))\n.endpoint(tableClient.getTableEndpoint())\n.sasToken(sas)\n.tableName(tableClient.getTableName());\nif (interceptorManager.isPlaybackMode()) {\ntableClientBuilder.httpClient(playbackClient);\n} else {\ntableClientBuilder.httpClient(DEFAULT_HTTP_CLIENT);\nif (!interceptorManager.isLiveMode()) {\ntableClientBuilder.addPolicy(recordPolicy);\n}\ntableClientBuilder.addPolicy(new RetryPolicy(new ExponentialBackoff(6, Duration.ofMillis(1500),\nDuration.ofSeconds(100))));\n}\nfinal TableAsyncClient tableAsyncClient = tableClientBuilder.buildAsyncClient();\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);\nfinal int expectedStatusCode = 204;\nStepVerifier.create(tableAsyncClient.createEntityWithResponse(entity))\n.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))\n.expectComplete()\n.verify();\n}", + "target_code": "Assumptions.assumeFalse(IS_COSMOS_TEST, \"SAS Tokens are not supported for Cosmos endpoints.\");", + "method_body_after": "public void canUseSasTokenToCreateValidTableClient() {\nAssumptions.assumeFalse(IS_COSMOS_TEST, \"Skipping Cosmos test.\");\nfinal OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);\nfinal TableSasPermission permissions = TableSasPermission.parse(\"a\");\nfinal TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;\nfinal TableSasSignatureValues sasSignatureValues =\nnew TableSasSignatureValues(expiryTime, permissions)\n.setProtocol(protocol)\n.setVersion(TableServiceVersion.V2019_02_02.getVersion());\nfinal String sas = tableClient.generateSas(sasSignatureValues);\nfinal TableClientBuilder tableClientBuilder = new TableClientBuilder()\n.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))\n.endpoint(tableClient.getTableEndpoint())\n.sasToken(sas)\n.tableName(tableClient.getTableName());\nif (interceptorManager.isPlaybackMode()) {\ntableClientBuilder.httpClient(playbackClient);\n} else {\ntableClientBuilder.httpClient(DEFAULT_HTTP_CLIENT);\nif (!interceptorManager.isLiveMode()) {\ntableClientBuilder.addPolicy(recordPolicy);\n}\ntableClientBuilder.addPolicy(new RetryPolicy(new ExponentialBackoff(6, Duration.ofMillis(1500),\nDuration.ofSeconds(100))));\n}\nfinal TableAsyncClient tableAsyncClient = tableClientBuilder.buildAsyncClient();\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);\nfinal int expectedStatusCode = 204;\nStepVerifier.create(tableAsyncClient.createEntityWithResponse(entity))\n.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))\n.expectComplete()\n.verify();\n}", + "context_before": "class TableAsyncClientTest extends TestBase {\nprivate static final Duration TIMEOUT = Duration.ofSeconds(100);\nprivate static final HttpClient DEFAULT_HTTP_CLIENT = HttpClient.createDefault();\nprivate static final boolean IS_COSMOS_TEST = System.getenv(\"AZURE_TABLES_CONNECTION_STRING\") != null\n&& System.getenv(\"AZURE_TABLES_CONNECTION_STRING\").contains(\"cosmos.azure.com\");\nprivate TableAsyncClient tableClient;\nprivate HttpPipelinePolicy recordPolicy;\nprivate HttpClient playbackClient;\nprivate TableClientBuilder getClientBuilder(String tableName, String connectionString) {\nfinal TableClientBuilder builder = new TableClientBuilder()\n.connectionString(connectionString)\n.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))\n.tableName(tableName);\nif (interceptorManager.isPlaybackMode()) {\nplaybackClient = interceptorManager.getPlaybackClient();\nbuilder.httpClient(playbackClient);\n} else {\nbuilder.httpClient(DEFAULT_HTTP_CLIENT);\nif (!interceptorManager.isLiveMode()) {\nrecordPolicy = interceptorManager.getRecordPolicy();\nbuilder.addPolicy(recordPolicy);\n}\n}\nreturn builder;\n}\n@BeforeAll\nstatic void beforeAll() {\nStepVerifier.setDefaultTimeout(TIMEOUT);\n}\n@AfterAll\nstatic void afterAll() {\nStepVerifier.resetDefaultTimeout();\n}\n@Override\nprotected void beforeTest() {\nfinal String tableName = testResourceNamer.randomName(\"tableName\", 20);\nfinal String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());\ntableClient = getClientBuilder(tableName, connectionString).buildAsyncClient();\ntableClient.createTable().block(TIMEOUT);\n}\n@Test\nvoid createTableAsync() {\nfinal String tableName2 = testResourceNamer.randomName(\"tableName\", 20);\nfinal String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());\nfinal TableAsyncClient tableClient2 = getClientBuilder(tableName2, connectionString).buildAsyncClient();\nStepVerifier.create(tableClient2.createTable())\n.assertNext(Assertions::assertNotNull)\n.expectComplete()\n.verify();\n}\n@Test\nvoid createTableWithResponseAsync() {\nfinal String tableName2 = testResourceNamer.randomName(\"tableName\", 20);\nfinal String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());\nfinal TableAsyncClient tableClient2 = getClientBuilder(tableName2, connectionString).buildAsyncClient();\nfinal int expectedStatusCode = 204;\nStepVerifier.create(tableClient2.createTableWithResponse())\n.assertNext(response -> {\nassertEquals(expectedStatusCode, response.getStatusCode());\n})\n.expectComplete()\n.verify();\n}\n@Test\nvoid createEntityAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);\nStepVerifier.create(tableClient.createEntity(tableEntity))\n.expectComplete()\n.verify();\n}\n@Test\nvoid createEntityWithResponseAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);\nfinal int expectedStatusCode = 204;\nStepVerifier.create(tableClient.createEntityWithResponse(entity))\n.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))\n.expectComplete()\n.verify();\n}\n@Test\nvoid createEntityWithAllSupportedDataTypesAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);\nfinal boolean booleanValue = true;\nfinal byte[] binaryValue = \"Test value\".getBytes();\nfinal Date dateValue = new Date();\nfinal OffsetDateTime offsetDateTimeValue = OffsetDateTime.now();\nfinal double doubleValue = 2.0d;\nfinal UUID guidValue = UUID.randomUUID();\nfinal int int32Value = 1337;\nfinal long int64Value = 1337L;\nfinal String stringValue = \"This is table entity\";\ntableEntity.addProperty(\"BinaryTypeProperty\", binaryValue);\ntableEntity.addProperty(\"BooleanTypeProperty\", booleanValue);\ntableEntity.addProperty(\"DateTypeProperty\", dateValue);\ntableEntity.addProperty(\"OffsetDateTimeTypeProperty\", offsetDateTimeValue);\ntableEntity.addProperty(\"DoubleTypeProperty\", doubleValue);\ntableEntity.addProperty(\"GuidTypeProperty\", guidValue);\ntableEntity.addProperty(\"Int32TypeProperty\", int32Value);\ntableEntity.addProperty(\"Int64TypeProperty\", int64Value);\ntableEntity.addProperty(\"StringTypeProperty\", stringValue);\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nStepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null))\n.assertNext(response -> {\nfinal TableEntity entity = response.getValue();\nfinal Map properties = entity.getProperties();\nassertTrue(properties.get(\"BinaryTypeProperty\") instanceof byte[]);\nassertTrue(properties.get(\"BooleanTypeProperty\") instanceof Boolean);\nassertTrue(properties.get(\"DateTypeProperty\") instanceof OffsetDateTime);\nassertTrue(properties.get(\"OffsetDateTimeTypeProperty\") instanceof OffsetDateTime);\nassertTrue(properties.get(\"DoubleTypeProperty\") instanceof Double);\nassertTrue(properties.get(\"GuidTypeProperty\") instanceof UUID);\nassertTrue(properties.get(\"Int32TypeProperty\") instanceof Integer);\nassertTrue(properties.get(\"Int64TypeProperty\") instanceof Long);\nassertTrue(properties.get(\"StringTypeProperty\") instanceof String);\n})\n.expectComplete()\n.verify();\n}\n/*@Test\nvoid createEntitySubclassAsync() {\nString partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nString rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nbyte[] bytes = new byte[]{1, 2, 3};\nboolean b = true;\nOffsetDateTime dateTime = OffsetDateTime.of(2020, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);\ndouble d = 1.23D;\nUUID uuid = UUID.fromString(\"11111111-2222-3333-4444-555555555555\");\nint i = 123;\nlong l = 123L;\nString s = \"Test\";\nSampleEntity.Color color = SampleEntity.Color.GREEN;\nSampleEntity tableEntity = new SampleEntity(partitionKeyValue, rowKeyValue);\ntableEntity.setByteField(bytes);\ntableEntity.setBooleanField(b);\ntableEntity.setDateTimeField(dateTime);\ntableEntity.setDoubleField(d);\ntableEntity.setUuidField(uuid);\ntableEntity.setIntField(i);\ntableEntity.setLongField(l);\ntableEntity.setStringField(s);\ntableEntity.setEnumField(color);\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nStepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null))\n.assertNext(response -> {\nTableEntity entity = response.getValue();\nassertArrayEquals((byte[]) entity.getProperties().get(\"ByteField\"), bytes);\nassertEquals(entity.getProperties().get(\"BooleanField\"), b);\nassertTrue(dateTime.isEqual((OffsetDateTime) entity.getProperties().get(\"DateTimeField\")));\nassertEquals(entity.getProperties().get(\"DoubleField\"), d);\nassertEquals(0, uuid.compareTo((UUID) entity.getProperties().get(\"UuidField\")));\nassertEquals(entity.getProperties().get(\"IntField\"), i);\nassertEquals(entity.getProperties().get(\"LongField\"), l);\nassertEquals(entity.getProperties().get(\"StringField\"), s);\nassertEquals(entity.getProperties().get(\"EnumField\"), color.name());\n})\n.expectComplete()\n.verify();\n}*/\n@Test\nvoid deleteTableAsync() {\nStepVerifier.create(tableClient.deleteTable())\n.expectComplete()\n.verify();\n}\n@Test\nvoid deleteNonExistingTableAsync() {\ntableClient.deleteTable().block();\nStepVerifier.create(tableClient.deleteTable())\n.expectComplete()\n.verify();\n}\n@Test\nvoid deleteTableWithResponseAsync() {\nfinal int expectedStatusCode = 204;\nStepVerifier.create(tableClient.deleteTableWithResponse())\n.assertNext(response -> {\nassertEquals(expectedStatusCode, response.getStatusCode());\n})\n.expectComplete()\n.verify();\n}\n@Test\nvoid deleteNonExistingTableWithResponseAsync() {\nfinal int expectedStatusCode = 404;\ntableClient.deleteTableWithResponse().block();\nStepVerifier.create(tableClient.deleteTableWithResponse())\n.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))\n.expectComplete()\n.verify();\n}\n@Test\nvoid deleteEntityAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nfinal TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue).block(TIMEOUT);\nassertNotNull(createdEntity, \"'createdEntity' should not be null.\");\nassertNotNull(createdEntity.getETag(), \"'eTag' should not be null.\");\nStepVerifier.create(tableClient.deleteEntity(partitionKeyValue, rowKeyValue))\n.expectComplete()\n.verify();\n}\n@Test\nvoid deleteNonExistingEntityAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nStepVerifier.create(tableClient.deleteEntity(partitionKeyValue, rowKeyValue))\n.expectComplete()\n.verify();\n}\n@Test\nvoid deleteEntityWithResponseAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);\nfinal int expectedStatusCode = 204;\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nfinal TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue).block(TIMEOUT);\nassertNotNull(createdEntity, \"'createdEntity' should not be null.\");\nassertNotNull(createdEntity.getETag(), \"'eTag' should not be null.\");\nStepVerifier.create(tableClient.deleteEntityWithResponse(createdEntity, false))\n.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))\n.expectComplete()\n.verify();\n}\n@Test\nvoid deleteNonExistingEntityWithResponseAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);\nfinal int expectedStatusCode = 404;\nStepVerifier.create(tableClient.deleteEntityWithResponse(entity, false))\n.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))\n.expectComplete()\n.verify();\n}\n@Test\nvoid deleteEntityWithResponseMatchETagAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);\nfinal int expectedStatusCode = 204;\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nfinal TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue).block(TIMEOUT);\nassertNotNull(createdEntity, \"'createdEntity' should not be null.\");\nassertNotNull(createdEntity.getETag(), \"'eTag' should not be null.\");\nStepVerifier.create(tableClient.deleteEntityWithResponse(createdEntity, true))\n.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))\n.expectComplete()\n.verify();\n}\n@Test\nvoid getEntityWithResponseAsync() {\ngetEntityWithResponseAsyncImpl(this.tableClient, this.testResourceNamer);\n}\nstatic void getEntityWithResponseAsyncImpl(TableAsyncClient tableClient, TestResourceNamer testResourceNamer) {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);\nfinal int expectedStatusCode = 200;\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nStepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null))\n.assertNext(response -> {\nfinal TableEntity entity = response.getValue();\nassertEquals(expectedStatusCode, response.getStatusCode());\nassertNotNull(entity);\nassertEquals(tableEntity.getPartitionKey(), entity.getPartitionKey());\nassertEquals(tableEntity.getRowKey(), entity.getRowKey());\nassertNotNull(entity.getTimestamp());\nassertNotNull(entity.getETag());\nassertNotNull(entity.getProperties());\n})\n.expectComplete()\n.verify();\n}\n@Test\nvoid getEntityWithResponseWithSelectAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);\ntableEntity.addProperty(\"Test\", \"Value\");\nfinal int expectedStatusCode = 200;\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nList propertyList = new ArrayList<>();\npropertyList.add(\"Test\");\nStepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, propertyList))\n.assertNext(response -> {\nfinal TableEntity entity = response.getValue();\nassertEquals(expectedStatusCode, response.getStatusCode());\nassertNotNull(entity);\nassertNull(entity.getPartitionKey());\nassertNull(entity.getRowKey());\nassertNull(entity.getTimestamp());\nassertNotNull(entity.getETag());\nassertEquals(entity.getProperties().get(\"Test\"), \"Value\");\n})\n.expectComplete()\n.verify();\n}\n/*@Test\nvoid getEntityWithResponseSubclassAsync() {\nString partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nString rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nbyte[] bytes = new byte[]{1, 2, 3};\nboolean b = true;\nOffsetDateTime dateTime = OffsetDateTime.of(2020, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);\ndouble d = 1.23D;\nUUID uuid = UUID.fromString(\"11111111-2222-3333-4444-555555555555\");\nint i = 123;\nlong l = 123L;\nString s = \"Test\";\nSampleEntity.Color color = SampleEntity.Color.GREEN;\nfinal Map props = new HashMap<>();\nprops.put(\"ByteField\", bytes);\nprops.put(\"BooleanField\", b);\nprops.put(\"DateTimeField\", dateTime);\nprops.put(\"DoubleField\", d);\nprops.put(\"UuidField\", uuid);\nprops.put(\"IntField\", i);\nprops.put(\"LongField\", l);\nprops.put(\"StringField\", s);\nprops.put(\"EnumField\", color);\nTableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);\ntableEntity.setProperties(props);\nint expectedStatusCode = 200;\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nStepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null, SampleEntity.class))\n.assertNext(response -> {\nSampleEntity entity = response.getValue();\nassertEquals(expectedStatusCode, response.getStatusCode());\nassertNotNull(entity);\nassertEquals(tableEntity.getPartitionKey(), entity.getPartitionKey());\nassertEquals(tableEntity.getRowKey(), entity.getRowKey());\nassertNotNull(entity.getTimestamp());\nassertNotNull(entity.getETag());\nassertArrayEquals(bytes, entity.getByteField());\nassertEquals(b, entity.getBooleanField());\nassertTrue(dateTime.isEqual(entity.getDateTimeField()));\nassertEquals(d, entity.getDoubleField());\nassertEquals(0, uuid.compareTo(entity.getUuidField()));\nassertEquals(i, entity.getIntField());\nassertEquals(l, entity.getLongField());\nassertEquals(s, entity.getStringField());\nassertEquals(color, entity.getEnumField());\n})\n.expectComplete()\n.verify();\n}*/\n@Test\nvoid updateEntityWithResponseReplaceAsync() {\nupdateEntityWithResponseAsync(TableEntityUpdateMode.REPLACE);\n}\n@Test\nvoid updateEntityWithResponseMergeAsync() {\nupdateEntityWithResponseAsync(TableEntityUpdateMode.MERGE);\n}\n/**\n* In the case of {@link TableEntityUpdateMode\n* In the case of {@link TableEntityUpdateMode\n*/\nvoid updateEntityWithResponseAsync(TableEntityUpdateMode mode) {\nfinal boolean expectOldProperty = mode == TableEntityUpdateMode.MERGE;\nfinal String partitionKeyValue = testResourceNamer.randomName(\"APartitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"ARowKey\", 20);\nfinal int expectedStatusCode = 204;\nfinal String oldPropertyKey = \"propertyA\";\nfinal String newPropertyKey = \"propertyB\";\nfinal TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue)\n.addProperty(oldPropertyKey, \"valueA\");\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nfinal TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue).block(TIMEOUT);\nassertNotNull(createdEntity, \"'createdEntity' should not be null.\");\nassertNotNull(createdEntity.getETag(), \"'eTag' should not be null.\");\ncreatedEntity.getProperties().remove(oldPropertyKey);\ncreatedEntity.addProperty(newPropertyKey, \"valueB\");\nStepVerifier.create(tableClient.updateEntityWithResponse(createdEntity, mode, true))\n.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))\n.expectComplete()\n.verify();\nStepVerifier.create(tableClient.getEntity(partitionKeyValue, rowKeyValue))\n.assertNext(entity -> {\nfinal Map properties = entity.getProperties();\nassertTrue(properties.containsKey(newPropertyKey));\nassertEquals(expectOldProperty, properties.containsKey(oldPropertyKey));\n})\n.verifyComplete();\n}\n/*@Test\nvoid updateEntityWithResponseSubclassAsync() {\nString partitionKeyValue = testResourceNamer.randomName(\"APartitionKey\", 20);\nString rowKeyValue = testResourceNamer.randomName(\"ARowKey\", 20);\nint expectedStatusCode = 204;\nSingleFieldEntity tableEntity = new SingleFieldEntity(partitionKeyValue, rowKeyValue);\ntableEntity.setSubclassProperty(\"InitialValue\");\ntableClient.createEntity(tableEntity).block(TIMEOUT);\ntableEntity.setSubclassProperty(\"UpdatedValue\");\nStepVerifier.create(tableClient.updateEntityWithResponse(tableEntity, TableEntityUpdateMode.REPLACE, true))\n.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))\n.expectComplete()\n.verify();\nStepVerifier.create(tableClient.getEntity(partitionKeyValue, rowKeyValue))\n.assertNext(entity -> {\nfinal Map properties = entity.getProperties();\nassertTrue(properties.containsKey(\"SubclassProperty\"));\nassertEquals(\"UpdatedValue\", properties.get(\"SubclassProperty\"));\n})\n.verifyComplete();\n}*/\n@Test\nvoid listEntitiesAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal String rowKeyValue2 = testResourceNamer.randomName(\"rowKey\", 20);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2)).block(TIMEOUT);\nStepVerifier.create(tableClient.listEntities())\n.expectNextCount(2)\n.thenConsumeWhile(x -> true)\n.expectComplete()\n.verify();\n}\n@Test\nvoid listEntitiesWithFilterAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal String rowKeyValue2 = testResourceNamer.randomName(\"rowKey\", 20);\nListEntitiesOptions options = new ListEntitiesOptions().setFilter(\"RowKey eq '\" + rowKeyValue + \"'\");\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2)).block(TIMEOUT);\nStepVerifier.create(tableClient.listEntities(options))\n.assertNext(returnEntity -> {\nassertEquals(partitionKeyValue, returnEntity.getPartitionKey());\nassertEquals(rowKeyValue, returnEntity.getRowKey());\n})\n.expectNextCount(0)\n.thenConsumeWhile(x -> true)\n.expectComplete()\n.verify();\n}\n@Test\nvoid listEntitiesWithSelectAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue)\n.addProperty(\"propertyC\", \"valueC\")\n.addProperty(\"propertyD\", \"valueD\");\nList propertyList = new ArrayList<>();\npropertyList.add(\"propertyC\");\nListEntitiesOptions options = new ListEntitiesOptions()\n.setSelect(propertyList);\ntableClient.createEntity(entity).block(TIMEOUT);\nStepVerifier.create(tableClient.listEntities(options))\n.assertNext(returnEntity -> {\nassertNull(returnEntity.getRowKey());\nassertNull(returnEntity.getPartitionKey());\nassertEquals(\"valueC\", returnEntity.getProperties().get(\"propertyC\"));\nassertNull(returnEntity.getProperties().get(\"propertyD\"));\n})\n.expectComplete()\n.verify();\n}\n@Test\nvoid listEntitiesWithTopAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal String rowKeyValue2 = testResourceNamer.randomName(\"rowKey\", 20);\nfinal String rowKeyValue3 = testResourceNamer.randomName(\"rowKey\", 20);\nListEntitiesOptions options = new ListEntitiesOptions().setTop(2);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue3)).block(TIMEOUT);\nStepVerifier.create(tableClient.listEntities(options))\n.expectNextCount(2)\n.thenConsumeWhile(x -> true)\n.expectComplete()\n.verify();\n}\n/*@Test\nvoid listEntitiesSubclassAsync() {\nString partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nString rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValue2 = testResourceNamer.randomName(\"rowKey\", 20);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2)).block(TIMEOUT);\nStepVerifier.create(tableClient.listEntities(SampleEntity.class))\n.expectNextCount(2)\n.thenConsumeWhile(x -> true)\n.expectComplete()\n.verify();\n}*/\n@Test\nvoid submitTransactionAsync() {\nString partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nString rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValue2 = testResourceNamer.randomName(\"rowKey\", 20);\nint expectedBatchStatusCode = 202;\nint expectedOperationStatusCode = 204;\nList transactionalBatch = new ArrayList<>();\ntransactionalBatch.add(new TableTransactionAction(\nTableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));\ntransactionalBatch.add(new TableTransactionAction(\nTableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue2)));\nfinal Response result =\ntableClient.submitTransactionWithResponse(transactionalBatch).block(TIMEOUT);\nassertNotNull(result);\nassertEquals(expectedBatchStatusCode, result.getStatusCode());\nassertEquals(transactionalBatch.size(), result.getValue().getTransactionActionResponses().size());\nassertEquals(expectedOperationStatusCode,\nresult.getValue().getTransactionActionResponses().get(0).getStatusCode());\nassertEquals(expectedOperationStatusCode,\nresult.getValue().getTransactionActionResponses().get(1).getStatusCode());\nStepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null))\n.assertNext(response -> {\nfinal TableEntity entity = response.getValue();\nassertNotNull(entity);\nassertEquals(partitionKeyValue, entity.getPartitionKey());\nassertEquals(rowKeyValue, entity.getRowKey());\nassertNotNull(entity.getTimestamp());\nassertNotNull(entity.getETag());\nassertNotNull(entity.getProperties());\n})\n.expectComplete()\n.verify();\n}\n@Test\nvoid submitTransactionAsyncAllActions() {\nString partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nString rowKeyValueCreate = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValueUpsertInsert = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValueUpsertMerge = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValueUpsertReplace = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValueUpdateMerge = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValueUpdateReplace = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValueDelete = testResourceNamer.randomName(\"rowKey\", 20);\nint expectedBatchStatusCode = 202;\nint expectedOperationStatusCode = 204;\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpsertMerge)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpsertReplace)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpdateMerge)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpdateReplace)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueDelete)).block(TIMEOUT);\nTableEntity toUpsertMerge = new TableEntity(partitionKeyValue, rowKeyValueUpsertMerge);\ntoUpsertMerge.addProperty(\"Test\", \"MergedValue\");\nTableEntity toUpsertReplace = new TableEntity(partitionKeyValue, rowKeyValueUpsertReplace);\ntoUpsertReplace.addProperty(\"Test\", \"ReplacedValue\");\nTableEntity toUpdateMerge = new TableEntity(partitionKeyValue, rowKeyValueUpdateMerge);\ntoUpdateMerge.addProperty(\"Test\", \"MergedValue\");\nTableEntity toUpdateReplace = new TableEntity(partitionKeyValue, rowKeyValueUpdateReplace);\ntoUpdateReplace.addProperty(\"Test\", \"MergedValue\");\nList transactionalBatch = new ArrayList<>();\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.CREATE,\nnew TableEntity(partitionKeyValue, rowKeyValueCreate)));\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_MERGE,\nnew TableEntity(partitionKeyValue, rowKeyValueUpsertInsert)));\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_MERGE, toUpsertMerge));\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_REPLACE, toUpsertReplace));\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPDATE_MERGE, toUpdateMerge));\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPDATE_REPLACE, toUpdateReplace));\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.DELETE,\nnew TableEntity(partitionKeyValue, rowKeyValueDelete)));\nStepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))\n.assertNext(response -> {\nassertNotNull(response);\nassertEquals(expectedBatchStatusCode, response.getStatusCode());\nTableTransactionResult result = response.getValue();\nassertEquals(transactionalBatch.size(), result.getTransactionActionResponses().size());\nfor (TableTransactionActionResponse subResponse : result.getTransactionActionResponses()) {\nassertEquals(expectedOperationStatusCode, subResponse.getStatusCode());\n}\n})\n.expectComplete()\n.verify();\n}\n@Test\nvoid submitTransactionAsyncWithFailingAction() {\nString partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nString rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValue2 = testResourceNamer.randomName(\"rowKey\", 20);\nList transactionalBatch = new ArrayList<>();\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.CREATE,\nnew TableEntity(partitionKeyValue, rowKeyValue)));\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.DELETE,\nnew TableEntity(partitionKeyValue, rowKeyValue2)));\nStepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))\n.expectErrorMatches(e -> e instanceof TableTransactionFailedException\n&& e.getMessage().contains(\"An action within the operation failed\")\n&& e.getMessage().contains(\"The failed operation was\")\n&& e.getMessage().contains(\"DeleteEntity\")\n&& e.getMessage().contains(\"partitionKey='\" + partitionKeyValue)\n&& e.getMessage().contains(\"rowKey='\" + rowKeyValue2))\n.verify();\n}\n@Test\nvoid submitTransactionAsyncWithSameRowKeys() {\nString partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nString rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nList transactionalBatch = new ArrayList<>();\ntransactionalBatch.add(new TableTransactionAction(\nTableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));\ntransactionalBatch.add(new TableTransactionAction(\nTableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));\nif (IS_COSMOS_TEST) {\nStepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))\n.expectErrorMatches(e -> e instanceof TableServiceException\n&& e.getMessage().contains(\"Status code 400\")\n&& e.getMessage().contains(\"InvalidDuplicateRow\")\n&& e.getMessage().contains(\"The batch request contains multiple changes with same row key.\")\n&& e.getMessage().contains(\"An entity can appear only once in a batch request.\"))\n.verify();\n} else {\nStepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))\n.expectErrorMatches(e -> e instanceof TableTransactionFailedException\n&& e.getMessage().contains(\"An action within the operation failed\")\n&& e.getMessage().contains(\"The failed operation was\")\n&& e.getMessage().contains(\"CreateEntity\")\n&& e.getMessage().contains(\"partitionKey='\" + partitionKeyValue)\n&& e.getMessage().contains(\"rowKey='\" + rowKeyValue))\n.verify();\n}\n}\n@Test\nvoid submitTransactionAsyncWithDifferentPartitionKeys() {\nString partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nString partitionKeyValue2 = testResourceNamer.randomName(\"partitionKey\", 20);\nString rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValue2 = testResourceNamer.randomName(\"rowKey\", 20);\nList transactionalBatch = new ArrayList<>();\ntransactionalBatch.add(new TableTransactionAction(\nTableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));\ntransactionalBatch.add(new TableTransactionAction(\nTableTransactionActionType.CREATE, new TableEntity(partitionKeyValue2, rowKeyValue2)));\nif (IS_COSMOS_TEST) {\nStepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))\n.expectErrorMatches(e -> e instanceof TableTransactionFailedException\n&& e.getMessage().contains(\"An action within the operation failed\")\n&& e.getMessage().contains(\"The failed operation was\")\n&& e.getMessage().contains(\"CreateEntity\")\n&& e.getMessage().contains(\"partitionKey='\" + partitionKeyValue)\n&& e.getMessage().contains(\"rowKey='\" + rowKeyValue))\n.verify();\n} else {\nStepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))\n.expectErrorMatches(e -> e instanceof TableTransactionFailedException\n&& e.getMessage().contains(\"An action within the operation failed\")\n&& e.getMessage().contains(\"The failed operation was\")\n&& e.getMessage().contains(\"CreateEntity\")\n&& e.getMessage().contains(\"partitionKey='\" + partitionKeyValue2)\n&& e.getMessage().contains(\"rowKey='\" + rowKeyValue2))\n.verify();\n}\n}\n@Test\npublic void generateSasTokenWithMinimumParameters() {\nfinal OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);\nfinal TableSasPermission permissions = TableSasPermission.parse(\"r\");\nfinal TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;\nfinal TableSasSignatureValues sasSignatureValues =\nnew TableSasSignatureValues(expiryTime, permissions)\n.setProtocol(protocol)\n.setVersion(TableServiceVersion.V2019_02_02.getVersion());\nfinal String sas = tableClient.generateSas(sasSignatureValues);\nassertTrue(\nsas.startsWith(\n\"sv=2019-02-02\"\n+ \"&se=2021-12-12T00%3A00%3A00Z\"\n+ \"&tn=\" + tableClient.getTableName()\n+ \"&sp=r\"\n+ \"&spr=https\"\n+ \"&sig=\"\n)\n);\n}\n@Test\npublic void generateSasTokenWithAllParameters() {\nfinal OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);\nfinal TableSasPermission permissions = TableSasPermission.parse(\"raud\");\nfinal TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;\nfinal OffsetDateTime startTime = OffsetDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);\nfinal TableSasIpRange ipRange = TableSasIpRange.parse(\"a-b\");\nfinal String startPartitionKey = \"startPartitionKey\";\nfinal String startRowKey = \"startRowKey\";\nfinal String endPartitionKey = \"endPartitionKey\";\nfinal String endRowKey = \"endRowKey\";\nfinal TableSasSignatureValues sasSignatureValues =\nnew TableSasSignatureValues(expiryTime, permissions)\n.setProtocol(protocol)\n.setVersion(TableServiceVersion.V2019_02_02.getVersion())\n.setStartTime(startTime)\n.setSasIpRange(ipRange)\n.setStartPartitionKey(startPartitionKey)\n.setStartRowKey(startRowKey)\n.setEndPartitionKey(endPartitionKey)\n.setEndRowKey(endRowKey);\nfinal String sas = tableClient.generateSas(sasSignatureValues);\nassertTrue(\nsas.startsWith(\n\"sv=2019-02-02\"\n+ \"&st=2015-01-01T00%3A00%3A00Z\"\n+ \"&se=2021-12-12T00%3A00%3A00Z\"\n+ \"&tn=\" + tableClient.getTableName()\n+ \"&sp=raud\"\n+ \"&spk=startPartitionKey\"\n+ \"&srk=startRowKey\"\n+ \"&epk=endPartitionKey\"\n+ \"&erk=endRowKey\"\n+ \"&sip=a-b\"\n+ \"&spr=https%2Chttp\"\n+ \"&sig=\"\n)\n);\n}\n@Test\n@Test\npublic void setAndListAccessPolicies() {\nAssumptions.assumeFalse(IS_COSMOS_TEST,\n\"Setting and listing access policies is not supported on Cosmos endpoints.\");\nOffsetDateTime startTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);\nOffsetDateTime expiryTime = OffsetDateTime.of(2022, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);\nString permissions = \"r\";\nTableAccessPolicy tableAccessPolicy = new TableAccessPolicy()\n.setStartsOn(startTime)\n.setExpiresOn(expiryTime)\n.setPermissions(permissions);\nString id = \"testPolicy\";\nTableSignedIdentifier tableSignedIdentifier = new TableSignedIdentifier(id).setAccessPolicy(tableAccessPolicy);\nStepVerifier.create(tableClient.setAccessPoliciesWithResponse(Collections.singletonList(tableSignedIdentifier)))\n.assertNext(response -> assertEquals(204, response.getStatusCode()))\n.expectComplete()\n.verify();\nStepVerifier.create(tableClient.getAccessPolicies())\n.assertNext(tableAccessPolicies -> {\nassertNotNull(tableAccessPolicies);\nassertNotNull(tableAccessPolicies.getIdentifiers());\nTableSignedIdentifier signedIdentifier = tableAccessPolicies.getIdentifiers().get(0);\nassertNotNull(signedIdentifier);\nTableAccessPolicy accessPolicy = signedIdentifier.getAccessPolicy();\nassertNotNull(accessPolicy);\nassertEquals(startTime, accessPolicy.getStartsOn());\nassertEquals(expiryTime, accessPolicy.getExpiresOn());\nassertEquals(permissions, accessPolicy.getPermissions());\nassertEquals(id, signedIdentifier.getId());\n})\n.expectComplete()\n.verify();\n}\n@Test\npublic void setAndListMultipleAccessPolicies() {\nAssumptions.assumeFalse(IS_COSMOS_TEST,\n\"Setting and listing access policies is not supported on Cosmos endpoints\");\nOffsetDateTime startTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);\nOffsetDateTime expiryTime = OffsetDateTime.of(2022, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);\nString permissions = \"r\";\nTableAccessPolicy tableAccessPolicy = new TableAccessPolicy()\n.setStartsOn(startTime)\n.setExpiresOn(expiryTime)\n.setPermissions(permissions);\nString id1 = \"testPolicy1\";\nString id2 = \"testPolicy2\";\nList tableSignedIdentifiers = new ArrayList<>();\ntableSignedIdentifiers.add(new TableSignedIdentifier(id1).setAccessPolicy(tableAccessPolicy));\ntableSignedIdentifiers.add(new TableSignedIdentifier(id2).setAccessPolicy(tableAccessPolicy));\nStepVerifier.create(tableClient.setAccessPoliciesWithResponse(tableSignedIdentifiers))\n.assertNext(response -> assertEquals(204, response.getStatusCode()))\n.expectComplete()\n.verify();\nStepVerifier.create(tableClient.getAccessPolicies())\n.assertNext(tableAccessPolicies -> {\nassertNotNull(tableAccessPolicies);\nassertNotNull(tableAccessPolicies.getIdentifiers());\nassertEquals(2, tableAccessPolicies.getIdentifiers().size());\nassertEquals(id1, tableAccessPolicies.getIdentifiers().get(0).getId());\nassertEquals(id2, tableAccessPolicies.getIdentifiers().get(1).getId());\nfor (TableSignedIdentifier signedIdentifier : tableAccessPolicies.getIdentifiers()) {\nassertNotNull(signedIdentifier);\nTableAccessPolicy accessPolicy = signedIdentifier.getAccessPolicy();\nassertNotNull(accessPolicy);\nassertEquals(startTime, accessPolicy.getStartsOn());\nassertEquals(expiryTime, accessPolicy.getExpiresOn());\nassertEquals(permissions, accessPolicy.getPermissions());\n}\n})\n.expectComplete()\n.verify();\n}\n}", + "context_after": "class TableAsyncClientTest extends TestBase {\nprivate static final Duration TIMEOUT = Duration.ofSeconds(100);\nprivate static final HttpClient DEFAULT_HTTP_CLIENT = HttpClient.createDefault();\nprivate static final boolean IS_COSMOS_TEST = System.getenv(\"AZURE_TABLES_CONNECTION_STRING\") != null\n&& System.getenv(\"AZURE_TABLES_CONNECTION_STRING\").contains(\"cosmos.azure.com\");\nprivate TableAsyncClient tableClient;\nprivate HttpPipelinePolicy recordPolicy;\nprivate HttpClient playbackClient;\nprivate TableClientBuilder getClientBuilder(String tableName, String connectionString) {\nfinal TableClientBuilder builder = new TableClientBuilder()\n.connectionString(connectionString)\n.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))\n.tableName(tableName);\nif (interceptorManager.isPlaybackMode()) {\nplaybackClient = interceptorManager.getPlaybackClient();\nbuilder.httpClient(playbackClient);\n} else {\nbuilder.httpClient(DEFAULT_HTTP_CLIENT);\nif (!interceptorManager.isLiveMode()) {\nrecordPolicy = interceptorManager.getRecordPolicy();\nbuilder.addPolicy(recordPolicy);\n}\n}\nreturn builder;\n}\n@BeforeAll\nstatic void beforeAll() {\nStepVerifier.setDefaultTimeout(TIMEOUT);\n}\n@AfterAll\nstatic void afterAll() {\nStepVerifier.resetDefaultTimeout();\n}\n@Override\nprotected void beforeTest() {\nfinal String tableName = testResourceNamer.randomName(\"tableName\", 20);\nfinal String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());\ntableClient = getClientBuilder(tableName, connectionString).buildAsyncClient();\ntableClient.createTable().block(TIMEOUT);\n}\n@Test\nvoid createTableAsync() {\nfinal String tableName2 = testResourceNamer.randomName(\"tableName\", 20);\nfinal String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());\nfinal TableAsyncClient tableClient2 = getClientBuilder(tableName2, connectionString).buildAsyncClient();\nStepVerifier.create(tableClient2.createTable())\n.assertNext(Assertions::assertNotNull)\n.expectComplete()\n.verify();\n}\n@Test\nvoid createTableWithResponseAsync() {\nfinal String tableName2 = testResourceNamer.randomName(\"tableName\", 20);\nfinal String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());\nfinal TableAsyncClient tableClient2 = getClientBuilder(tableName2, connectionString).buildAsyncClient();\nfinal int expectedStatusCode = 204;\nStepVerifier.create(tableClient2.createTableWithResponse())\n.assertNext(response -> {\nassertEquals(expectedStatusCode, response.getStatusCode());\n})\n.expectComplete()\n.verify();\n}\n@Test\nvoid createEntityAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);\nStepVerifier.create(tableClient.createEntity(tableEntity))\n.expectComplete()\n.verify();\n}\n@Test\nvoid createEntityWithResponseAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);\nfinal int expectedStatusCode = 204;\nStepVerifier.create(tableClient.createEntityWithResponse(entity))\n.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))\n.expectComplete()\n.verify();\n}\n@Test\nvoid createEntityWithAllSupportedDataTypesAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);\nfinal boolean booleanValue = true;\nfinal byte[] binaryValue = \"Test value\".getBytes();\nfinal Date dateValue = new Date();\nfinal OffsetDateTime offsetDateTimeValue = OffsetDateTime.now();\nfinal double doubleValue = 2.0d;\nfinal UUID guidValue = UUID.randomUUID();\nfinal int int32Value = 1337;\nfinal long int64Value = 1337L;\nfinal String stringValue = \"This is table entity\";\ntableEntity.addProperty(\"BinaryTypeProperty\", binaryValue);\ntableEntity.addProperty(\"BooleanTypeProperty\", booleanValue);\ntableEntity.addProperty(\"DateTypeProperty\", dateValue);\ntableEntity.addProperty(\"OffsetDateTimeTypeProperty\", offsetDateTimeValue);\ntableEntity.addProperty(\"DoubleTypeProperty\", doubleValue);\ntableEntity.addProperty(\"GuidTypeProperty\", guidValue);\ntableEntity.addProperty(\"Int32TypeProperty\", int32Value);\ntableEntity.addProperty(\"Int64TypeProperty\", int64Value);\ntableEntity.addProperty(\"StringTypeProperty\", stringValue);\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nStepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null))\n.assertNext(response -> {\nfinal TableEntity entity = response.getValue();\nfinal Map properties = entity.getProperties();\nassertTrue(properties.get(\"BinaryTypeProperty\") instanceof byte[]);\nassertTrue(properties.get(\"BooleanTypeProperty\") instanceof Boolean);\nassertTrue(properties.get(\"DateTypeProperty\") instanceof OffsetDateTime);\nassertTrue(properties.get(\"OffsetDateTimeTypeProperty\") instanceof OffsetDateTime);\nassertTrue(properties.get(\"DoubleTypeProperty\") instanceof Double);\nassertTrue(properties.get(\"GuidTypeProperty\") instanceof UUID);\nassertTrue(properties.get(\"Int32TypeProperty\") instanceof Integer);\nassertTrue(properties.get(\"Int64TypeProperty\") instanceof Long);\nassertTrue(properties.get(\"StringTypeProperty\") instanceof String);\n})\n.expectComplete()\n.verify();\n}\n/*@Test\nvoid createEntitySubclassAsync() {\nString partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nString rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nbyte[] bytes = new byte[]{1, 2, 3};\nboolean b = true;\nOffsetDateTime dateTime = OffsetDateTime.of(2020, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);\ndouble d = 1.23D;\nUUID uuid = UUID.fromString(\"11111111-2222-3333-4444-555555555555\");\nint i = 123;\nlong l = 123L;\nString s = \"Test\";\nSampleEntity.Color color = SampleEntity.Color.GREEN;\nSampleEntity tableEntity = new SampleEntity(partitionKeyValue, rowKeyValue);\ntableEntity.setByteField(bytes);\ntableEntity.setBooleanField(b);\ntableEntity.setDateTimeField(dateTime);\ntableEntity.setDoubleField(d);\ntableEntity.setUuidField(uuid);\ntableEntity.setIntField(i);\ntableEntity.setLongField(l);\ntableEntity.setStringField(s);\ntableEntity.setEnumField(color);\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nStepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null))\n.assertNext(response -> {\nTableEntity entity = response.getValue();\nassertArrayEquals((byte[]) entity.getProperties().get(\"ByteField\"), bytes);\nassertEquals(entity.getProperties().get(\"BooleanField\"), b);\nassertTrue(dateTime.isEqual((OffsetDateTime) entity.getProperties().get(\"DateTimeField\")));\nassertEquals(entity.getProperties().get(\"DoubleField\"), d);\nassertEquals(0, uuid.compareTo((UUID) entity.getProperties().get(\"UuidField\")));\nassertEquals(entity.getProperties().get(\"IntField\"), i);\nassertEquals(entity.getProperties().get(\"LongField\"), l);\nassertEquals(entity.getProperties().get(\"StringField\"), s);\nassertEquals(entity.getProperties().get(\"EnumField\"), color.name());\n})\n.expectComplete()\n.verify();\n}*/\n@Test\nvoid deleteTableAsync() {\nStepVerifier.create(tableClient.deleteTable())\n.expectComplete()\n.verify();\n}\n@Test\nvoid deleteNonExistingTableAsync() {\ntableClient.deleteTable().block();\nStepVerifier.create(tableClient.deleteTable())\n.expectComplete()\n.verify();\n}\n@Test\nvoid deleteTableWithResponseAsync() {\nfinal int expectedStatusCode = 204;\nStepVerifier.create(tableClient.deleteTableWithResponse())\n.assertNext(response -> {\nassertEquals(expectedStatusCode, response.getStatusCode());\n})\n.expectComplete()\n.verify();\n}\n@Test\nvoid deleteNonExistingTableWithResponseAsync() {\nfinal int expectedStatusCode = 404;\ntableClient.deleteTableWithResponse().block();\nStepVerifier.create(tableClient.deleteTableWithResponse())\n.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))\n.expectComplete()\n.verify();\n}\n@Test\nvoid deleteEntityAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nfinal TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue).block(TIMEOUT);\nassertNotNull(createdEntity, \"'createdEntity' should not be null.\");\nassertNotNull(createdEntity.getETag(), \"'eTag' should not be null.\");\nStepVerifier.create(tableClient.deleteEntity(partitionKeyValue, rowKeyValue))\n.expectComplete()\n.verify();\n}\n@Test\nvoid deleteNonExistingEntityAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nStepVerifier.create(tableClient.deleteEntity(partitionKeyValue, rowKeyValue))\n.expectComplete()\n.verify();\n}\n@Test\nvoid deleteEntityWithResponseAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);\nfinal int expectedStatusCode = 204;\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nfinal TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue).block(TIMEOUT);\nassertNotNull(createdEntity, \"'createdEntity' should not be null.\");\nassertNotNull(createdEntity.getETag(), \"'eTag' should not be null.\");\nStepVerifier.create(tableClient.deleteEntityWithResponse(createdEntity, false))\n.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))\n.expectComplete()\n.verify();\n}\n@Test\nvoid deleteNonExistingEntityWithResponseAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);\nfinal int expectedStatusCode = 404;\nStepVerifier.create(tableClient.deleteEntityWithResponse(entity, false))\n.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))\n.expectComplete()\n.verify();\n}\n@Test\nvoid deleteEntityWithResponseMatchETagAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);\nfinal int expectedStatusCode = 204;\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nfinal TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue).block(TIMEOUT);\nassertNotNull(createdEntity, \"'createdEntity' should not be null.\");\nassertNotNull(createdEntity.getETag(), \"'eTag' should not be null.\");\nStepVerifier.create(tableClient.deleteEntityWithResponse(createdEntity, true))\n.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))\n.expectComplete()\n.verify();\n}\n@Test\nvoid getEntityWithResponseAsync() {\ngetEntityWithResponseAsyncImpl(this.tableClient, this.testResourceNamer);\n}\nstatic void getEntityWithResponseAsyncImpl(TableAsyncClient tableClient, TestResourceNamer testResourceNamer) {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);\nfinal int expectedStatusCode = 200;\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nStepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null))\n.assertNext(response -> {\nfinal TableEntity entity = response.getValue();\nassertEquals(expectedStatusCode, response.getStatusCode());\nassertNotNull(entity);\nassertEquals(tableEntity.getPartitionKey(), entity.getPartitionKey());\nassertEquals(tableEntity.getRowKey(), entity.getRowKey());\nassertNotNull(entity.getTimestamp());\nassertNotNull(entity.getETag());\nassertNotNull(entity.getProperties());\n})\n.expectComplete()\n.verify();\n}\n@Test\nvoid getEntityWithResponseWithSelectAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);\ntableEntity.addProperty(\"Test\", \"Value\");\nfinal int expectedStatusCode = 200;\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nList propertyList = new ArrayList<>();\npropertyList.add(\"Test\");\nStepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, propertyList))\n.assertNext(response -> {\nfinal TableEntity entity = response.getValue();\nassertEquals(expectedStatusCode, response.getStatusCode());\nassertNotNull(entity);\nassertNull(entity.getPartitionKey());\nassertNull(entity.getRowKey());\nassertNull(entity.getTimestamp());\nassertNotNull(entity.getETag());\nassertEquals(entity.getProperties().get(\"Test\"), \"Value\");\n})\n.expectComplete()\n.verify();\n}\n/*@Test\nvoid getEntityWithResponseSubclassAsync() {\nString partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nString rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nbyte[] bytes = new byte[]{1, 2, 3};\nboolean b = true;\nOffsetDateTime dateTime = OffsetDateTime.of(2020, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);\ndouble d = 1.23D;\nUUID uuid = UUID.fromString(\"11111111-2222-3333-4444-555555555555\");\nint i = 123;\nlong l = 123L;\nString s = \"Test\";\nSampleEntity.Color color = SampleEntity.Color.GREEN;\nfinal Map props = new HashMap<>();\nprops.put(\"ByteField\", bytes);\nprops.put(\"BooleanField\", b);\nprops.put(\"DateTimeField\", dateTime);\nprops.put(\"DoubleField\", d);\nprops.put(\"UuidField\", uuid);\nprops.put(\"IntField\", i);\nprops.put(\"LongField\", l);\nprops.put(\"StringField\", s);\nprops.put(\"EnumField\", color);\nTableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);\ntableEntity.setProperties(props);\nint expectedStatusCode = 200;\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nStepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null, SampleEntity.class))\n.assertNext(response -> {\nSampleEntity entity = response.getValue();\nassertEquals(expectedStatusCode, response.getStatusCode());\nassertNotNull(entity);\nassertEquals(tableEntity.getPartitionKey(), entity.getPartitionKey());\nassertEquals(tableEntity.getRowKey(), entity.getRowKey());\nassertNotNull(entity.getTimestamp());\nassertNotNull(entity.getETag());\nassertArrayEquals(bytes, entity.getByteField());\nassertEquals(b, entity.getBooleanField());\nassertTrue(dateTime.isEqual(entity.getDateTimeField()));\nassertEquals(d, entity.getDoubleField());\nassertEquals(0, uuid.compareTo(entity.getUuidField()));\nassertEquals(i, entity.getIntField());\nassertEquals(l, entity.getLongField());\nassertEquals(s, entity.getStringField());\nassertEquals(color, entity.getEnumField());\n})\n.expectComplete()\n.verify();\n}*/\n@Test\nvoid updateEntityWithResponseReplaceAsync() {\nupdateEntityWithResponseAsync(TableEntityUpdateMode.REPLACE);\n}\n@Test\nvoid updateEntityWithResponseMergeAsync() {\nupdateEntityWithResponseAsync(TableEntityUpdateMode.MERGE);\n}\n/**\n* In the case of {@link TableEntityUpdateMode\n* In the case of {@link TableEntityUpdateMode\n*/\nvoid updateEntityWithResponseAsync(TableEntityUpdateMode mode) {\nfinal boolean expectOldProperty = mode == TableEntityUpdateMode.MERGE;\nfinal String partitionKeyValue = testResourceNamer.randomName(\"APartitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"ARowKey\", 20);\nfinal int expectedStatusCode = 204;\nfinal String oldPropertyKey = \"propertyA\";\nfinal String newPropertyKey = \"propertyB\";\nfinal TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue)\n.addProperty(oldPropertyKey, \"valueA\");\ntableClient.createEntity(tableEntity).block(TIMEOUT);\nfinal TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue).block(TIMEOUT);\nassertNotNull(createdEntity, \"'createdEntity' should not be null.\");\nassertNotNull(createdEntity.getETag(), \"'eTag' should not be null.\");\ncreatedEntity.getProperties().remove(oldPropertyKey);\ncreatedEntity.addProperty(newPropertyKey, \"valueB\");\nStepVerifier.create(tableClient.updateEntityWithResponse(createdEntity, mode, true))\n.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))\n.expectComplete()\n.verify();\nStepVerifier.create(tableClient.getEntity(partitionKeyValue, rowKeyValue))\n.assertNext(entity -> {\nfinal Map properties = entity.getProperties();\nassertTrue(properties.containsKey(newPropertyKey));\nassertEquals(expectOldProperty, properties.containsKey(oldPropertyKey));\n})\n.verifyComplete();\n}\n/*@Test\nvoid updateEntityWithResponseSubclassAsync() {\nString partitionKeyValue = testResourceNamer.randomName(\"APartitionKey\", 20);\nString rowKeyValue = testResourceNamer.randomName(\"ARowKey\", 20);\nint expectedStatusCode = 204;\nSingleFieldEntity tableEntity = new SingleFieldEntity(partitionKeyValue, rowKeyValue);\ntableEntity.setSubclassProperty(\"InitialValue\");\ntableClient.createEntity(tableEntity).block(TIMEOUT);\ntableEntity.setSubclassProperty(\"UpdatedValue\");\nStepVerifier.create(tableClient.updateEntityWithResponse(tableEntity, TableEntityUpdateMode.REPLACE, true))\n.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))\n.expectComplete()\n.verify();\nStepVerifier.create(tableClient.getEntity(partitionKeyValue, rowKeyValue))\n.assertNext(entity -> {\nfinal Map properties = entity.getProperties();\nassertTrue(properties.containsKey(\"SubclassProperty\"));\nassertEquals(\"UpdatedValue\", properties.get(\"SubclassProperty\"));\n})\n.verifyComplete();\n}*/\n@Test\nvoid listEntitiesAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal String rowKeyValue2 = testResourceNamer.randomName(\"rowKey\", 20);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2)).block(TIMEOUT);\nStepVerifier.create(tableClient.listEntities())\n.expectNextCount(2)\n.thenConsumeWhile(x -> true)\n.expectComplete()\n.verify();\n}\n@Test\nvoid listEntitiesWithFilterAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal String rowKeyValue2 = testResourceNamer.randomName(\"rowKey\", 20);\nListEntitiesOptions options = new ListEntitiesOptions().setFilter(\"RowKey eq '\" + rowKeyValue + \"'\");\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2)).block(TIMEOUT);\nStepVerifier.create(tableClient.listEntities(options))\n.assertNext(returnEntity -> {\nassertEquals(partitionKeyValue, returnEntity.getPartitionKey());\nassertEquals(rowKeyValue, returnEntity.getRowKey());\n})\n.expectNextCount(0)\n.thenConsumeWhile(x -> true)\n.expectComplete()\n.verify();\n}\n@Test\nvoid listEntitiesWithSelectAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue)\n.addProperty(\"propertyC\", \"valueC\")\n.addProperty(\"propertyD\", \"valueD\");\nList propertyList = new ArrayList<>();\npropertyList.add(\"propertyC\");\nListEntitiesOptions options = new ListEntitiesOptions()\n.setSelect(propertyList);\ntableClient.createEntity(entity).block(TIMEOUT);\nStepVerifier.create(tableClient.listEntities(options))\n.assertNext(returnEntity -> {\nassertNull(returnEntity.getRowKey());\nassertNull(returnEntity.getPartitionKey());\nassertEquals(\"valueC\", returnEntity.getProperties().get(\"propertyC\"));\nassertNull(returnEntity.getProperties().get(\"propertyD\"));\n})\n.expectComplete()\n.verify();\n}\n@Test\nvoid listEntitiesWithTopAsync() {\nfinal String partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nfinal String rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nfinal String rowKeyValue2 = testResourceNamer.randomName(\"rowKey\", 20);\nfinal String rowKeyValue3 = testResourceNamer.randomName(\"rowKey\", 20);\nListEntitiesOptions options = new ListEntitiesOptions().setTop(2);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue3)).block(TIMEOUT);\nStepVerifier.create(tableClient.listEntities(options))\n.expectNextCount(2)\n.thenConsumeWhile(x -> true)\n.expectComplete()\n.verify();\n}\n/*@Test\nvoid listEntitiesSubclassAsync() {\nString partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nString rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValue2 = testResourceNamer.randomName(\"rowKey\", 20);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2)).block(TIMEOUT);\nStepVerifier.create(tableClient.listEntities(SampleEntity.class))\n.expectNextCount(2)\n.thenConsumeWhile(x -> true)\n.expectComplete()\n.verify();\n}*/\n@Test\nvoid submitTransactionAsync() {\nString partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nString rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValue2 = testResourceNamer.randomName(\"rowKey\", 20);\nint expectedBatchStatusCode = 202;\nint expectedOperationStatusCode = 204;\nList transactionalBatch = new ArrayList<>();\ntransactionalBatch.add(new TableTransactionAction(\nTableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));\ntransactionalBatch.add(new TableTransactionAction(\nTableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue2)));\nfinal Response result =\ntableClient.submitTransactionWithResponse(transactionalBatch).block(TIMEOUT);\nassertNotNull(result);\nassertEquals(expectedBatchStatusCode, result.getStatusCode());\nassertEquals(transactionalBatch.size(), result.getValue().getTransactionActionResponses().size());\nassertEquals(expectedOperationStatusCode,\nresult.getValue().getTransactionActionResponses().get(0).getStatusCode());\nassertEquals(expectedOperationStatusCode,\nresult.getValue().getTransactionActionResponses().get(1).getStatusCode());\nStepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null))\n.assertNext(response -> {\nfinal TableEntity entity = response.getValue();\nassertNotNull(entity);\nassertEquals(partitionKeyValue, entity.getPartitionKey());\nassertEquals(rowKeyValue, entity.getRowKey());\nassertNotNull(entity.getTimestamp());\nassertNotNull(entity.getETag());\nassertNotNull(entity.getProperties());\n})\n.expectComplete()\n.verify();\n}\n@Test\nvoid submitTransactionAsyncAllActions() {\nString partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nString rowKeyValueCreate = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValueUpsertInsert = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValueUpsertMerge = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValueUpsertReplace = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValueUpdateMerge = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValueUpdateReplace = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValueDelete = testResourceNamer.randomName(\"rowKey\", 20);\nint expectedBatchStatusCode = 202;\nint expectedOperationStatusCode = 204;\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpsertMerge)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpsertReplace)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpdateMerge)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpdateReplace)).block(TIMEOUT);\ntableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueDelete)).block(TIMEOUT);\nTableEntity toUpsertMerge = new TableEntity(partitionKeyValue, rowKeyValueUpsertMerge);\ntoUpsertMerge.addProperty(\"Test\", \"MergedValue\");\nTableEntity toUpsertReplace = new TableEntity(partitionKeyValue, rowKeyValueUpsertReplace);\ntoUpsertReplace.addProperty(\"Test\", \"ReplacedValue\");\nTableEntity toUpdateMerge = new TableEntity(partitionKeyValue, rowKeyValueUpdateMerge);\ntoUpdateMerge.addProperty(\"Test\", \"MergedValue\");\nTableEntity toUpdateReplace = new TableEntity(partitionKeyValue, rowKeyValueUpdateReplace);\ntoUpdateReplace.addProperty(\"Test\", \"MergedValue\");\nList transactionalBatch = new ArrayList<>();\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.CREATE,\nnew TableEntity(partitionKeyValue, rowKeyValueCreate)));\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_MERGE,\nnew TableEntity(partitionKeyValue, rowKeyValueUpsertInsert)));\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_MERGE, toUpsertMerge));\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_REPLACE, toUpsertReplace));\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPDATE_MERGE, toUpdateMerge));\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPDATE_REPLACE, toUpdateReplace));\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.DELETE,\nnew TableEntity(partitionKeyValue, rowKeyValueDelete)));\nStepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))\n.assertNext(response -> {\nassertNotNull(response);\nassertEquals(expectedBatchStatusCode, response.getStatusCode());\nTableTransactionResult result = response.getValue();\nassertEquals(transactionalBatch.size(), result.getTransactionActionResponses().size());\nfor (TableTransactionActionResponse subResponse : result.getTransactionActionResponses()) {\nassertEquals(expectedOperationStatusCode, subResponse.getStatusCode());\n}\n})\n.expectComplete()\n.verify();\n}\n@Test\nvoid submitTransactionAsyncWithFailingAction() {\nString partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nString rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValue2 = testResourceNamer.randomName(\"rowKey\", 20);\nList transactionalBatch = new ArrayList<>();\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.CREATE,\nnew TableEntity(partitionKeyValue, rowKeyValue)));\ntransactionalBatch.add(new TableTransactionAction(TableTransactionActionType.DELETE,\nnew TableEntity(partitionKeyValue, rowKeyValue2)));\nStepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))\n.expectErrorMatches(e -> e instanceof TableTransactionFailedException\n&& e.getMessage().contains(\"An action within the operation failed\")\n&& e.getMessage().contains(\"The failed operation was\")\n&& e.getMessage().contains(\"DeleteEntity\")\n&& e.getMessage().contains(\"partitionKey='\" + partitionKeyValue)\n&& e.getMessage().contains(\"rowKey='\" + rowKeyValue2))\n.verify();\n}\n@Test\nvoid submitTransactionAsyncWithSameRowKeys() {\nString partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nString rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nList transactionalBatch = new ArrayList<>();\ntransactionalBatch.add(new TableTransactionAction(\nTableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));\ntransactionalBatch.add(new TableTransactionAction(\nTableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));\nif (IS_COSMOS_TEST) {\nStepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))\n.expectErrorMatches(e -> e instanceof TableServiceException\n&& e.getMessage().contains(\"Status code 400\")\n&& e.getMessage().contains(\"InvalidDuplicateRow\")\n&& e.getMessage().contains(\"The batch request contains multiple changes with same row key.\")\n&& e.getMessage().contains(\"An entity can appear only once in a batch request.\"))\n.verify();\n} else {\nStepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))\n.expectErrorMatches(e -> e instanceof TableTransactionFailedException\n&& e.getMessage().contains(\"An action within the operation failed\")\n&& e.getMessage().contains(\"The failed operation was\")\n&& e.getMessage().contains(\"CreateEntity\")\n&& e.getMessage().contains(\"partitionKey='\" + partitionKeyValue)\n&& e.getMessage().contains(\"rowKey='\" + rowKeyValue))\n.verify();\n}\n}\n@Test\nvoid submitTransactionAsyncWithDifferentPartitionKeys() {\nString partitionKeyValue = testResourceNamer.randomName(\"partitionKey\", 20);\nString partitionKeyValue2 = testResourceNamer.randomName(\"partitionKey\", 20);\nString rowKeyValue = testResourceNamer.randomName(\"rowKey\", 20);\nString rowKeyValue2 = testResourceNamer.randomName(\"rowKey\", 20);\nList transactionalBatch = new ArrayList<>();\ntransactionalBatch.add(new TableTransactionAction(\nTableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));\ntransactionalBatch.add(new TableTransactionAction(\nTableTransactionActionType.CREATE, new TableEntity(partitionKeyValue2, rowKeyValue2)));\nif (IS_COSMOS_TEST) {\nStepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))\n.expectErrorMatches(e -> e instanceof TableTransactionFailedException\n&& e.getMessage().contains(\"An action within the operation failed\")\n&& e.getMessage().contains(\"The failed operation was\")\n&& e.getMessage().contains(\"CreateEntity\")\n&& e.getMessage().contains(\"partitionKey='\" + partitionKeyValue)\n&& e.getMessage().contains(\"rowKey='\" + rowKeyValue))\n.verify();\n} else {\nStepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))\n.expectErrorMatches(e -> e instanceof TableTransactionFailedException\n&& e.getMessage().contains(\"An action within the operation failed\")\n&& e.getMessage().contains(\"The failed operation was\")\n&& e.getMessage().contains(\"CreateEntity\")\n&& e.getMessage().contains(\"partitionKey='\" + partitionKeyValue2)\n&& e.getMessage().contains(\"rowKey='\" + rowKeyValue2))\n.verify();\n}\n}\n@Test\npublic void generateSasTokenWithMinimumParameters() {\nfinal OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);\nfinal TableSasPermission permissions = TableSasPermission.parse(\"r\");\nfinal TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;\nfinal TableSasSignatureValues sasSignatureValues =\nnew TableSasSignatureValues(expiryTime, permissions)\n.setProtocol(protocol)\n.setVersion(TableServiceVersion.V2019_02_02.getVersion());\nfinal String sas = tableClient.generateSas(sasSignatureValues);\nassertTrue(\nsas.startsWith(\n\"sv=2019-02-02\"\n+ \"&se=2021-12-12T00%3A00%3A00Z\"\n+ \"&tn=\" + tableClient.getTableName()\n+ \"&sp=r\"\n+ \"&spr=https\"\n+ \"&sig=\"\n)\n);\n}\n@Test\npublic void generateSasTokenWithAllParameters() {\nfinal OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);\nfinal TableSasPermission permissions = TableSasPermission.parse(\"raud\");\nfinal TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;\nfinal OffsetDateTime startTime = OffsetDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);\nfinal TableSasIpRange ipRange = TableSasIpRange.parse(\"a-b\");\nfinal String startPartitionKey = \"startPartitionKey\";\nfinal String startRowKey = \"startRowKey\";\nfinal String endPartitionKey = \"endPartitionKey\";\nfinal String endRowKey = \"endRowKey\";\nfinal TableSasSignatureValues sasSignatureValues =\nnew TableSasSignatureValues(expiryTime, permissions)\n.setProtocol(protocol)\n.setVersion(TableServiceVersion.V2019_02_02.getVersion())\n.setStartTime(startTime)\n.setSasIpRange(ipRange)\n.setStartPartitionKey(startPartitionKey)\n.setStartRowKey(startRowKey)\n.setEndPartitionKey(endPartitionKey)\n.setEndRowKey(endRowKey);\nfinal String sas = tableClient.generateSas(sasSignatureValues);\nassertTrue(\nsas.startsWith(\n\"sv=2019-02-02\"\n+ \"&st=2015-01-01T00%3A00%3A00Z\"\n+ \"&se=2021-12-12T00%3A00%3A00Z\"\n+ \"&tn=\" + tableClient.getTableName()\n+ \"&sp=raud\"\n+ \"&spk=startPartitionKey\"\n+ \"&srk=startRowKey\"\n+ \"&epk=endPartitionKey\"\n+ \"&erk=endRowKey\"\n+ \"&sip=a-b\"\n+ \"&spr=https%2Chttp\"\n+ \"&sig=\"\n)\n);\n}\n@Test\n@Test\npublic void setAndListAccessPolicies() {\nAssumptions.assumeFalse(IS_COSMOS_TEST,\n\"Setting and listing access policies is not supported on Cosmos endpoints.\");\nOffsetDateTime startTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);\nOffsetDateTime expiryTime = OffsetDateTime.of(2022, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);\nString permissions = \"r\";\nTableAccessPolicy tableAccessPolicy = new TableAccessPolicy()\n.setStartsOn(startTime)\n.setExpiresOn(expiryTime)\n.setPermissions(permissions);\nString id = \"testPolicy\";\nTableSignedIdentifier tableSignedIdentifier = new TableSignedIdentifier(id).setAccessPolicy(tableAccessPolicy);\nStepVerifier.create(tableClient.setAccessPoliciesWithResponse(Collections.singletonList(tableSignedIdentifier)))\n.assertNext(response -> assertEquals(204, response.getStatusCode()))\n.expectComplete()\n.verify();\nStepVerifier.create(tableClient.getAccessPolicies())\n.assertNext(tableAccessPolicies -> {\nassertNotNull(tableAccessPolicies);\nassertNotNull(tableAccessPolicies.getIdentifiers());\nTableSignedIdentifier signedIdentifier = tableAccessPolicies.getIdentifiers().get(0);\nassertNotNull(signedIdentifier);\nTableAccessPolicy accessPolicy = signedIdentifier.getAccessPolicy();\nassertNotNull(accessPolicy);\nassertEquals(startTime, accessPolicy.getStartsOn());\nassertEquals(expiryTime, accessPolicy.getExpiresOn());\nassertEquals(permissions, accessPolicy.getPermissions());\nassertEquals(id, signedIdentifier.getId());\n})\n.expectComplete()\n.verify();\n}\n@Test\npublic void setAndListMultipleAccessPolicies() {\nAssumptions.assumeFalse(IS_COSMOS_TEST,\n\"Setting and listing access policies is not supported on Cosmos endpoints\");\nOffsetDateTime startTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);\nOffsetDateTime expiryTime = OffsetDateTime.of(2022, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);\nString permissions = \"r\";\nTableAccessPolicy tableAccessPolicy = new TableAccessPolicy()\n.setStartsOn(startTime)\n.setExpiresOn(expiryTime)\n.setPermissions(permissions);\nString id1 = \"testPolicy1\";\nString id2 = \"testPolicy2\";\nList tableSignedIdentifiers = new ArrayList<>();\ntableSignedIdentifiers.add(new TableSignedIdentifier(id1).setAccessPolicy(tableAccessPolicy));\ntableSignedIdentifiers.add(new TableSignedIdentifier(id2).setAccessPolicy(tableAccessPolicy));\nStepVerifier.create(tableClient.setAccessPoliciesWithResponse(tableSignedIdentifiers))\n.assertNext(response -> assertEquals(204, response.getStatusCode()))\n.expectComplete()\n.verify();\nStepVerifier.create(tableClient.getAccessPolicies())\n.assertNext(tableAccessPolicies -> {\nassertNotNull(tableAccessPolicies);\nassertNotNull(tableAccessPolicies.getIdentifiers());\nassertEquals(2, tableAccessPolicies.getIdentifiers().size());\nassertEquals(id1, tableAccessPolicies.getIdentifiers().get(0).getId());\nassertEquals(id2, tableAccessPolicies.getIdentifiers().get(1).getId());\nfor (TableSignedIdentifier signedIdentifier : tableAccessPolicies.getIdentifiers()) {\nassertNotNull(signedIdentifier);\nTableAccessPolicy accessPolicy = signedIdentifier.getAccessPolicy();\nassertNotNull(accessPolicy);\nassertEquals(startTime, accessPolicy.getStartsOn());\nassertEquals(expiryTime, accessPolicy.getExpiresOn());\nassertEquals(permissions, accessPolicy.getPermissions());\n}\n})\n.expectComplete()\n.verify();\n}\n}" + }, + { + "comment": "I have followed the writing style of other methods in this class. Although I have thought of using refresh materialized, the style is not uniform and I am also afraid of other problems.", + "method_body": "public void testRangePartitionWithJDBCTableUseStr2DateWithMaxValue() throws Exception {\nMockedMetadataMgr metadataMgr = (MockedMetadataMgr) connectContext.getGlobalStateMgr().getMetadataMgr();\nMockedJDBCMetadata mockedJDBCMetadata =\n(MockedJDBCMetadata) metadataMgr.getOptionalMetadata(MockedJDBCMetadata.MOCKED_JDBC_CATALOG_NAME).get();\nmockedJDBCMetadata.initPartitions();\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"jdbc_parttbl_mv5\"));\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(PARTITION_START, \"20230801\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"20230805\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(3, partitions.size());\nAssert.assertNotNull(materializedView.getPartition(\"p20230801\"));\nAssert.assertNotNull(materializedView.getPartition(\"p20230802\"));\nAssert.assertNotNull(materializedView.getPartition(\"pmaxvalue\"));\n}", + "target_code": "taskRunProperties.put(PARTITION_START, \"20230801\");", + "method_body_after": "public void testRangePartitionWithJDBCTableUseStr2DateWithMaxValue() throws Exception {\nMockedMetadataMgr metadataMgr = (MockedMetadataMgr) connectContext.getGlobalStateMgr().getMetadataMgr();\nMockedJDBCMetadata mockedJDBCMetadata =\n(MockedJDBCMetadata) metadataMgr.getOptionalMetadata(MockedJDBCMetadata.MOCKED_JDBC_CATALOG_NAME).get();\nmockedJDBCMetadata.initPartitions();\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"jdbc_parttbl_mv5\"));\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(PARTITION_START, \"20230801\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"20230805\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(3, partitions.size());\nAssert.assertNotNull(materializedView.getPartition(\"p20230801\"));\nAssert.assertNotNull(materializedView.getPartition(\"p20230802\"));\nAssert.assertNotNull(materializedView.getPartition(\"pmaxvalue\"));\n}", + "context_before": "class PartitionBasedMvRefreshProcessorTest {\nprivate static ConnectContext connectContext;\nprivate static StarRocksAssert starRocksAssert;\n@BeforeClass\npublic static void beforeClass() throws Exception {\nFeConstants.runningUnitTest = true;\nConfig.enable_experimental_mv = true;\nUtFrameUtils.createMinStarRocksCluster();\nconnectContext = UtFrameUtils.createDefaultCtx();\nConnectorPlanTestBase.mockCatalog(connectContext);\nstarRocksAssert = new StarRocksAssert(connectContext);\nif (!starRocksAssert.databaseExist(\"_statistics_\")) {\nStatisticsMetaManager m = new StatisticsMetaManager();\nm.createStatisticsTablesForTest();\n}\nstarRocksAssert.withDatabase(\"test\");\nstarRocksAssert.useDatabase(\"test\");\nstarRocksAssert.withTable(\"CREATE TABLE test.tbl1\\n\" +\n\"(\\n\" +\n\" k1 date,\\n\" +\n\" k2 int,\\n\" +\n\" v1 int sum\\n\" +\n\")\\n\" +\n\"PARTITION BY RANGE(k1)\\n\" +\n\"(\\n\" +\n\" PARTITION p0 values [('2021-12-01'),('2022-01-01')),\\n\" +\n\" PARTITION p1 values [('2022-01-01'),('2022-02-01')),\\n\" +\n\" PARTITION p2 values [('2022-02-01'),('2022-03-01')),\\n\" +\n\" PARTITION p3 values [('2022-03-01'),('2022-04-01')),\\n\" +\n\" PARTITION p4 values [('2022-04-01'),('2022-05-01'))\\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(k2) BUCKETS 3\\n\" +\n\"PROPERTIES('replication_num' = '1');\")\n.withTable(\"CREATE TABLE test.tbl2\\n\" +\n\"(\\n\" +\n\" k1 date,\\n\" +\n\" k2 int,\\n\" +\n\" v1 int sum\\n\" +\n\")\\n\" +\n\"PARTITION BY RANGE(k1)\\n\" +\n\"(\\n\" +\n\" PARTITION p1 values less than('2022-02-01'),\\n\" +\n\" PARTITION p2 values less than('2022-03-01')\\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(k2) BUCKETS 3\\n\" +\n\"PROPERTIES('replication_num' = '1');\")\n.withTable(\"CREATE TABLE test.tbl3\\n\" +\n\"(\\n\" +\n\" k1 date,\\n\" +\n\" k2 int,\\n\" +\n\" v1 int sum\\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(k2) BUCKETS 3\\n\" +\n\"PROPERTIES('replication_num' = '1');\")\n.withTable(\"CREATE TABLE test.tbl4\\n\" +\n\"(\\n\" +\n\" k1 date,\\n\" +\n\" k2 int,\\n\" +\n\" v1 int sum\\n\" +\n\")\\n\" +\n\"PARTITION BY RANGE(k1)\\n\" +\n\"(\\n\" +\n\" PARTITION p0 values [('2021-12-01'),('2022-01-01')),\\n\" +\n\" PARTITION p1 values [('2022-01-01'),('2022-02-01')),\\n\" +\n\" PARTITION p2 values [('2022-02-01'),('2022-03-01')),\\n\" +\n\" PARTITION p3 values [('2022-03-01'),('2022-04-01')),\\n\" +\n\" PARTITION p4 values [('2022-04-01'),('2022-05-01'))\\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(k2) BUCKETS 3\\n\" +\n\"PROPERTIES('replication_num' = '1');\")\n.withTable(\"CREATE TABLE test.tbl5\\n\" +\n\"(\\n\" +\n\" dt date,\\n\" +\n\" k1 datetime,\\n\" +\n\" k2 int,\\n\" +\n\" k3 bigint\\n\" +\n\")\\n\" +\n\"PARTITION BY RANGE(dt)\\n\" +\n\"(\\n\" +\n\" PARTITION p0 values [('2021-12-01'),('2022-01-01')),\\n\" +\n\" PARTITION p1 values [('2022-01-01'),('2022-02-01')),\\n\" +\n\" PARTITION p2 values [('2022-02-01'),('2022-03-01')),\\n\" +\n\" PARTITION p3 values [('2022-03-01'),('2022-04-01')),\\n\" +\n\" PARTITION p4 values [('2022-04-01'),('2022-05-01'))\\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(k2) BUCKETS 3\\n\" +\n\"PROPERTIES('replication_num' = '1');\")\n.withTable(\"CREATE TABLE test.tbl6\\n\" +\n\"(\\n\" +\n\" k1 date,\\n\" +\n\" k2 int,\\n\" +\n\" v1 int sum\\n\" +\n\")\\n\" +\n\"PARTITION BY RANGE(k1)\\n\" +\n\"(\\n\" +\n\" PARTITION p0 values [('2021-12-01'),('2022-01-01')),\\n\" +\n\" PARTITION p1 values [('2022-01-01'),('2022-02-01')),\\n\" +\n\" PARTITION p2 values [('2022-02-01'),('2022-03-01')),\\n\" +\n\" PARTITION p3 values [('2022-03-01'),('2022-04-01')),\\n\" +\n\" PARTITION p4 values [('2022-04-01'),('2022-05-01'))\\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(k2) BUCKETS 3\\n\" +\n\"PROPERTIES('replication_num' = '1');\")\n.withMaterializedView(\"create materialized view test.union_all_mv\\n\" +\n\"partition by dt \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh manual\\n\" +\n\"properties('replication_num' = '1')\\n\" +\n\"as select dt, -1 as k2 from tbl5 where k2 is null union all select dt, k2 from tbl5;\")\n.withMaterializedView(\"create materialized view test.mv1\\n\" +\n\"partition by date_trunc('month',k1) \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties('replication_num' = '1')\\n\" +\n\"as select tbl1.k1, tbl2.k2 from tbl1 join tbl2 on tbl1.k2 = tbl2.k2;\")\n.withMaterializedView(\"create materialized view test.mv2\\n\" +\n\"partition by date_trunc('month',k1) \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties('replication_num' = '1')\\n\" +\n\"as select tbl4.k1, tbl4.k2 from tbl4;\")\n.withMaterializedView(\"create materialized view test.mv_inactive\\n\" +\n\"partition by date_trunc('month',k1) \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties('replication_num' = '1')\\n\" +\n\"as select tbl1.k1, tbl2.k2 from tbl1 join tbl2 on tbl1.k2 = tbl2.k2;\")\n.withMaterializedView(\"create materialized view test.mv_without_partition\\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties('replication_num' = '1')\\n\" +\n\"as select k2, sum(v1) as total_sum from tbl3 group by k2;\")\n.withTable(\"CREATE TABLE test.base\\n\" +\n\"(\\n\" +\n\" k1 date,\\n\" +\n\" k2 int,\\n\" +\n\" v1 int sum\\n\" +\n\")\\n\" +\n\"PARTITION BY RANGE(k1)\\n\" +\n\"(\\n\" +\n\" PARTITION p0 values [('2021-12-01'),('2022-01-01')),\\n\" +\n\" PARTITION p1 values [('2022-01-01'),('2022-02-01')),\\n\" +\n\" PARTITION p2 values [('2022-02-01'),('2022-03-01')),\\n\" +\n\" PARTITION p3 values [('2022-03-01'),('2022-04-01')),\\n\" +\n\" PARTITION p4 values [('2022-04-01'),('2022-05-01'))\\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(k2) BUCKETS 3\\n\" +\n\"PROPERTIES('replication_num' = '1');\")\n.withMaterializedView(\"create materialized view test.mv_with_test_refresh\\n\" +\n\"partition by k1\\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"as select k1, k2, sum(v1) as total_sum from base group by k1, k2;\")\n.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`hive_parttbl_mv`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`l_shipdate`)\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate` FROM `hive0`.`partitioned_db`.`lineitem_par` as a;\")\n.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`hive_mul_parttbl_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`l_shipdate`)\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate`, sum(l_extendedprice) as total_price FROM \" +\n\"`hive0`.`partitioned_db`.`lineitem_mul_par` as a group by `l_orderkey`, `l_suppkey`, `l_shipdate`;\")\n.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`hive_mul_parttbl_mv2`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`par_date`)\\n\" +\n\"DISTRIBUTED BY HASH(`par_col`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT c1, c2, par_date, par_col FROM `hive0`.`partitioned_db`.`t1_par`;\")\n.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`hive_join_mv`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`par_date`)\\n\" +\n\"DISTRIBUTED BY HASH(`c1`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT t1.c1, t1.c2, t1_par.par_col, t1_par.par_date FROM `hive0`.`partitioned_db`.`t1` join \" +\n\"`hive0`.`partitioned_db`.`t1_par` using (par_col)\")\n.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`jdbc_parttbl_mv0`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`d`)\\n\" +\n\"DISTRIBUTED BY HASH(`a`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `a`, `b`, `c`, `d` FROM `jdbc0`.`partitioned_db0`.`tbl0`;\")\n.withMaterializedView(\"create materialized view jdbc_parttbl_mv1 \" +\n\"partition by ss \" +\n\"distributed by hash(a) buckets 10 \" +\n\"REFRESH DEFERRED MANUAL \" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\"\\n\" +\n\") \" +\n\"as select str2date(d,'%Y%m%d') ss, a, b, c from jdbc0.partitioned_db0.tbl1;\")\n.withMaterializedView(\"create materialized view jdbc_parttbl_mv2 \" +\n\"partition by ss \" +\n\"distributed by hash(a) buckets 10 \" +\n\"REFRESH DEFERRED MANUAL \" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\"\\n\" +\n\") \" +\n\"as select str2date(d,'%Y%m%d') ss, a, b, c from jdbc0.partitioned_db0.tbl2;\")\n.withMaterializedView(\"create materialized view jdbc_parttbl_mv3 \" +\n\"partition by str2date(d,'%Y%m%d') \" +\n\"distributed by hash(a) buckets 10 \" +\n\"REFRESH DEFERRED MANUAL \" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\"\\n\" +\n\") \" +\n\"as select a, b, c, d from jdbc0.partitioned_db0.tbl1;\")\n.withMaterializedView(\"create materialized view jdbc_parttbl_mv5 \" +\n\"partition by str2date(d,'%Y%m%d') \" +\n\"distributed by hash(a) buckets 10 \" +\n\"REFRESH DEFERRED MANUAL \" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\"\\n\" +\n\") \" +\n\"as select a, b, c, d from jdbc0.partitioned_db0.tbl3;\");\nnew MockUp() {\n@Mock\npublic void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception {\nif (stmt instanceof InsertStmt) {\nInsertStmt insertStmt = (InsertStmt) stmt;\nTableName tableName = insertStmt.getTableName();\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nOlapTable tbl = ((OlapTable) testDb.getTable(tableName.getTbl()));\nif (tbl != null) {\nfor (Partition partition : tbl.getPartitions()) {\nif (insertStmt.getTargetPartitionIds().contains(partition.getId())) {\nsetPartitionVersion(partition, partition.getVisibleVersion() + 1);\n}\n}\n}\n}\n}\n};\n}\nprotected void assertPlanContains(ExecPlan execPlan, String... explain) throws Exception {\nString explainString = execPlan.getExplainString(TExplainLevel.NORMAL);\nfor (String expected : explain) {\nAssert.assertTrue(\"expected is: \" + expected + \" but plan is \\n\" + explainString,\nStringUtils.containsIgnoreCase(explainString.toLowerCase(), expected));\n}\n}\n@Test\npublic void testUnionAllMvWithPartition() {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"union_all_mv\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntry {\nString insertSql = \"insert into tbl5 partition(p4) values('2022-04-01', '2021-04-01 00:02:11', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertFalse(plan.contains(\"partitions=5/5\"));\n} catch (Exception e) {\ne.printStackTrace();\nAssert.fail(e.getMessage());\n}\n}\n@Test\npublic void testWithPartition() {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntry {\ntaskRun.executeTaskRun();\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(5, partitions.size());\nString addPartitionSql = \"ALTER TABLE test.tbl1 ADD\\n\" +\n\"PARTITION p5 VALUES [('2022-05-01'),('2022-06-01'))\";\nnew StmtExecutor(connectContext, addPartitionSql).execute();\ntaskRun.executeTaskRun();\npartitions = materializedView.getPartitions();\nAssert.assertEquals(6, partitions.size());\nString dropPartitionSql = \"ALTER TABLE test.tbl1 DROP PARTITION p5\\n\";\nnew StmtExecutor(connectContext, dropPartitionSql).execute();\ntaskRun.executeTaskRun();\npartitions = materializedView.getPartitions();\nAssert.assertEquals(5, partitions.size());\naddPartitionSql = \"ALTER TABLE test.tbl2 ADD PARTITION p3 values less than('2022-04-01')\";\nnew StmtExecutor(connectContext, addPartitionSql).execute();\ntaskRun.executeTaskRun();\npartitions = materializedView.getPartitions();\nAssert.assertEquals(5, partitions.size());\ndropPartitionSql = \"ALTER TABLE test.tbl2 DROP PARTITION p3\";\nnew StmtExecutor(connectContext, dropPartitionSql).execute();\ntaskRun.executeTaskRun();\npartitions = materializedView.getPartitions();\nAssert.assertEquals(5, partitions.size());\ntestBaseTablePartitionInsertData(testDb, materializedView, taskRun);\ntestBaseTablePartitionReplace(testDb, materializedView, taskRun);\ntestBaseTableAddPartitionWhileSync(testDb, materializedView, taskRun);\ntestBaseTableAddPartitionWhileRefresh(testDb, materializedView, taskRun);\ntestBaseTableDropPartitionWhileSync(testDb, materializedView, taskRun);\ntestBaseTableDropPartitionWhileRefresh(testDb, materializedView, taskRun);\ntestBaseTablePartitionRename(taskRun);\ntestRefreshWithFailure(testDb, materializedView, taskRun);\n} catch (Exception e) {\ne.printStackTrace();\nAssert.fail(e.getMessage());\n}\n}\n@Test\npublic void testInactive() {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_inactive\"));\nmaterializedView.setInactiveAndReason(\"\");\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntry {\ntaskRun.executeTaskRun();\nAssert.fail(\"should not be here. executeTaskRun will throw exception\");\n} catch (Exception e) {\nAssert.assertTrue(e.getMessage().contains(\"is not active, skip sync partition and data with base tables\"));\n}\n}\n@Test\npublic void testMvWithoutPartition() {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_without_partition\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntry {\ntaskRun.executeTaskRun();\n} catch (Exception e) {\ne.printStackTrace();\nAssert.fail(\"refresh failed\");\n}\n}\n@Test\npublic void testRangePartitionRefresh() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv2\"));\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(PARTITION_START, \"2022-01-03\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"2022-02-05\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nString insertSql = \"insert into tbl4 partition(p1) values('2022-01-02',2,10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nAssert.assertEquals(1, materializedView.getPartition(\"p202112_202201\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p202201_202202\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p202202_202203\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p202203_202204\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p202204_202205\").getVisibleVersion());\ntaskRunProperties.put(PARTITION_START, \"2021-12-03\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"2022-04-05\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\ntaskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nAssert.assertEquals(1, materializedView.getPartition(\"p202112_202201\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p202201_202202\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p202202_202203\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p202203_202204\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p202204_202205\").getVisibleVersion());\ntaskRunProperties.put(PARTITION_START, \"2021-12-03\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"2022-03-01\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\ninsertSql = \"insert into tbl4 partition(p3) values('2022-03-02',21,102);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ninsertSql = \"insert into tbl4 partition(p0) values('2021-12-02',81,182);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nAssert.assertEquals(2, materializedView.getPartition(\"p202112_202201\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p202201_202202\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p202202_202203\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p202203_202204\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p202204_202205\").getVisibleVersion());\ntaskRunProperties.put(PARTITION_START, \"2021-12-03\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"2022-05-06\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(true));\ntaskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nAssert.assertEquals(3, materializedView.getPartition(\"p202112_202201\").getVisibleVersion());\nAssert.assertEquals(3, materializedView.getPartition(\"p202201_202202\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p202202_202203\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p202203_202204\").getVisibleVersion());\nAssert.assertEquals(3, materializedView.getPartition(\"p202204_202205\").getVisibleVersion());\n}\n@Test\npublic void testRefreshPriority() throws Exception {\nnew MockUp() {\n@Mock\npublic void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception {\nif (stmt instanceof InsertStmt) {\nInsertStmt insertStmt = (InsertStmt) stmt;\nTableName tableName = insertStmt.getTableName();\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nOlapTable tbl = ((OlapTable) testDb.getTable(tableName.getTbl()));\nfor (Partition partition : tbl.getPartitions()) {\nif (insertStmt.getTargetPartitionIds().contains(partition.getId())) {\nsetPartitionVersion(partition, partition.getVisibleVersion() + 1);\n}\n}\n}\n}\n};\nString mvName = \"mv_refresh_priority\";\nstarRocksAssert.withMaterializedView(\"create materialized view test.mv_refresh_priority\\n\" +\n\"partition by date_trunc('month',k1) \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties('replication_num' = '1', 'partition_refresh_number'='1')\\n\" +\n\"as select k1, k2 from tbl6;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(mvName));\nTaskManager tm = GlobalStateMgr.getCurrentState().getTaskManager();\nTaskRunManager trm = tm.getTaskRunManager();\nString insertSql = \"insert into tbl6 partition(p1) values('2022-01-02',2,10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ninsertSql = \"insert into tbl6 partition(p2) values('2022-02-02',2,10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(true));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nlong taskId = tm.getTask(TaskBuilder.getMvTaskName(materializedView.getId())).getId();\nTaskRun run = tm.getTaskRunManager().getRunnableTaskRun(taskId);\nAssert.assertEquals(Constants.TaskRunPriority.HIGHEST.value(), run.getStatus().getPriority());\nwhile (MapUtils.isNotEmpty(trm.getRunningTaskRunMap())) {\nThread.sleep(100);\n}\n}\n@Test\npublic void testAutoRefreshPartitionLimitWithHiveTable() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `hive_parttbl_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`l_shipdate`)\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate` FROM `hive0`.`partitioned_db`.`lineitem_par` as a;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_parttbl_mv1\"));\nmaterializedView.getTableProperty().setAutoRefreshPartitionsLimit(2);\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\ntask.setType(Constants.TaskType.PERIODICAL);\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(6, partitions.size());\nAssert.assertEquals(1, materializedView.getPartition(\"p19980101\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p19980102\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p19980103\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980104\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980105\").getVisibleVersion());\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"lineitem_par\",\nImmutableList.of(\"l_shipdate=1998-01-02\", \"l_shipdate=1998-01-03\"));\ntaskRun.executeTaskRun();\nAssert.assertEquals(1, materializedView.getPartition(\"p19980101\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p19980102\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p19980103\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980104\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980105\").getVisibleVersion());\ntask.setType(Constants.TaskType.MANUAL);\ntaskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nAssert.assertEquals(6, partitions.size());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980101\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980102\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980103\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980104\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980105\").getVisibleVersion());\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_parttbl_mv1\");\n}\n@Test\npublic void testRefreshWithHiveTableJoin() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_join_mv\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"4:HASH JOIN\"));\n}\n@Test\npublic void testAutoPartitionRefreshWithPartitionChanged() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`hive_parttbl_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`l_shipdate`)\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate` FROM `hive0`.`partitioned_db`.`lineitem_par` as a;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_parttbl_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.addPartition(\"partitioned_db\", \"lineitem_par\", \"l_shipdate=1998-01-06\");\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=1/7\");\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(7, partitions.size());\nmockedHiveMetadata.dropPartition(\"partitioned_db\", \"lineitem_par\", \"l_shipdate=1998-01-06\");\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_parttbl_mv1\");\n}\n@Test\npublic void testAutoPartitionRefreshWithHiveTableJoin1() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `hive_join_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`par_date`)\\n\" +\n\"DISTRIBUTED BY HASH(`c1`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT t1.c1, t1.c2, t1_par.par_col, t1_par.par_date FROM `hive0`.`partitioned_db`.`t1` join \" +\n\"`hive0`.`partitioned_db`.`t1_par` using (par_col)\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_join_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=7/7\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"t1_par\",\nImmutableList.of(\"par_col=0/par_date=2020-01-03\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"par_date >= '2020-01-03', 9: par_date < '2020-01-04'\", \"partitions=2/7\");\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"t1\",\nImmutableList.of(\"par_col=0\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=7/7\", \"partitions=3/3\");\n}\n@Test\npublic void testAutoPartitionRefreshWithHiveTableJoin2() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `hive_join_mv2`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`l_shipdate`)\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate`,`o_custkey` FROM `hive0`.`partitioned_db`.`lineitem_par` \" +\n\"as a join `hive0`.`tpch`.`orders` on l_orderkey = o_orderkey\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_join_mv2\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"lineitem_par\",\nImmutableList.of(\"l_shipdate=1998-01-04\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"l_shipdate >= '1998-01-04', 16: l_shipdate < '1998-01-05'\",\n\"partitions=1/6\");\nmockedHiveMetadata.updateTable(\"tpch\", \"orders\");\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\", \"partitions=1/1\");\n}\n@Test\npublic void testAutoPartitionRefreshWithUnPartitionedHiveTable() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `hive_tbl_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`n_nationkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `n_nationkey`, `n_name`, `n_comment` FROM `hive0`.`tpch`.`nation`;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_tbl_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=1/1\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updateTable(\"tpch\", \"nation\");\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=1/1\");\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(1, partitions.size());\nAssert.assertEquals(3, materializedView.getPartition(\"hive_tbl_mv1\").getVisibleVersion());\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_tbl_mv1\");\n}\n@Test\npublic void testAutoPartitionRefreshWithPartitionedHiveTable1() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `hive_parttbl_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`l_shipdate`)\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate` FROM `hive0`.`partitioned_db`.`lineitem_par` as a;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_parttbl_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\", \"PARTITION PREDICATES: (16: l_shipdate < '1998-01-06') \" +\n\"OR (16: l_shipdate IS NULL)\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"lineitem_par\",\nImmutableList.of(\"l_shipdate=1998-01-02\", \"l_shipdate=1998-01-03\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"PARTITION PREDICATES: 16: l_shipdate >= '1998-01-02', 16: l_shipdate < '1998-01-04'\",\n\"partitions=2/6\");\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(6, partitions.size());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980101\").getVisibleVersion());\nAssert.assertEquals(3, materializedView.getPartition(\"p19980102\").getVisibleVersion());\nAssert.assertEquals(3, materializedView.getPartition(\"p19980103\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980104\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980105\").getVisibleVersion());\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_parttbl_mv1\");\n}\n@Test\npublic void testAutoPartitionRefreshWithPartitionedHiveTable2() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `hive_tbl_mv2`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate` FROM `hive0`.`partitioned_db`.`lineitem_par` as a;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_tbl_mv2\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"lineitem_par\",\nImmutableList.of(\"l_shipdate=1998-01-02\", \"l_shipdate=1998-01-03\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\");\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(1, partitions.size());\nAssert.assertEquals(3, materializedView.getPartition(\"hive_tbl_mv2\").getVisibleVersion());\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_tbl_mv2\");\n}\n@Test\npublic void testAutoPartitionRefreshWithPartitionedHiveTableJoinInternalTable() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\n\"CREATE MATERIALIZED VIEW `hive_join_internal_mv`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate` FROM `hive0`.`partitioned_db`.`lineitem_par` as a\" +\n\" join test.tbl1 b on a.l_suppkey=b.k2;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_join_internal_mv\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"lineitem_par\",\nImmutableList.of(\"l_shipdate=1998-01-02\", \"l_shipdate=1998-01-03\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\");\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(1, partitions.size());\nAssert.assertEquals(3, materializedView.getPartition(\"hive_join_internal_mv\").getVisibleVersion());\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_join_internal_mv\");\n}\n@Test\npublic void testPartitionRefreshWithUpperCaseTable() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `hive_parttbl_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`l_shipdate`)\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate` FROM `hive0`.`partitioned_db`.`LINEITEM_PAR` as \" +\n\"`LINEITEM_PAR_ALIAS`;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_parttbl_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"lineitem_par\",\nImmutableList.of(\"l_shipdate=1998-01-02\", \"l_shipdate=1998-01-03\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"PARTITION PREDICATES: 16: l_shipdate >= '1998-01-02', 16: l_shipdate < '1998-01-04'\",\n\"partitions=2/6\");\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(6, partitions.size());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980101\").getVisibleVersion());\nAssert.assertEquals(3, materializedView.getPartition(\"p19980102\").getVisibleVersion());\nAssert.assertEquals(3, materializedView.getPartition(\"p19980103\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980104\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980105\").getVisibleVersion());\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_parttbl_mv1\");\n}\npublic void testPartitionRefreshWithUpperCaseDb() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `hive_parttbl_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`l_shipdate`)\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate` FROM `hive0`.`partitioned_DB`.`LINEITEM_PAR` as \" +\n\"`LINEITEM_PAR_ALIAS`;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_parttbl_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"lineitem_par\",\nImmutableList.of(\"l_shipdate=1998-01-02\", \"l_shipdate=1998-01-03\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"PARTITION PREDICATES: 16: l_shipdate >= '1998-01-02', 16: l_shipdate < '1998-01-04'\",\n\"partitions=2/6\");\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(6, partitions.size());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980101\").getVisibleVersion());\nAssert.assertEquals(3, materializedView.getPartition(\"p19980102\").getVisibleVersion());\nAssert.assertEquals(3, materializedView.getPartition(\"p19980103\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980104\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980105\").getVisibleVersion());\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_parttbl_mv1\");\n}\n@Test\npublic void testPartitionRefreshWithLowerCase() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\n\"CREATE MATERIALIZED VIEW `test`.`hive_parttbl_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`par_col`)\\n\" +\n\"DISTRIBUTED BY HASH(`c1`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT c1, c2, par_col FROM `hive0`.`partitioned_db2`.`t2`;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_parttbl_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=3/3\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db2\", \"t2\",\nImmutableList.of(\"par_col=0\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"par_col >= 0, 4: par_col < 1\", \"partitions=1/3\");\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(3, partitions.size());\nAssert.assertEquals(3, materializedView.getPartition(\"p0\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p1\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p2\").getVisibleVersion());\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_parttbl_mv1\");\n}\n@Test\npublic void testRangePartitionRefreshWithHiveTable() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_parttbl_mv\"));\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(PARTITION_START, \"1998-01-01\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"1998-01-03\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(6, partitions.size());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980101\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980102\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p19980103\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p19980104\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p19980105\").getVisibleVersion());\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"PARTITION PREDICATES: 16: l_shipdate >= '1998-01-01', \" +\n\"16: l_shipdate < '1998-01-03'\"));\nAssert.assertTrue(plan.contains(\"partitions=2/6\"));\n}\n@Test\npublic void testRefreshPartitionWithMulParColumnsHiveTable1() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_mul_parttbl_mv1\"));\nMap mvProperties = Maps.newHashMap();\nmvProperties.put(PARTITION_START, \"1998-01-01\");\nmvProperties.put(PARTITION_END, \"1998-01-03\");\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(mvProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"PARTITION PREDICATES: 15: l_shipdate >= '1998-01-01', 15: l_shipdate < '1998-01-03'\"));\nAssert.assertTrue(plan.contains(\"partitions=5/8\"));\n}\n@Test\npublic void testRefreshPartitionWithMulParColumnsHiveTable2() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_mul_parttbl_mv2\"));\nMap mvProperties = Maps.newHashMap();\nmvProperties.put(PARTITION_START, \"2020-01-01\");\nmvProperties.put(PARTITION_END, \"2020-01-03\");\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(mvProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"PARTITION PREDICATES: 5: par_date >= '2020-01-01', 5: par_date < '2020-01-03'\"));\nAssert.assertTrue(plan.contains(\"partitions=3/7\"));\n}\n@Test\npublic void testRangePartitionChangeWithJDBCTable() throws Exception {\nMockedMetadataMgr metadataMgr = (MockedMetadataMgr) connectContext.getGlobalStateMgr().getMetadataMgr();\nMockedJDBCMetadata mockedJDBCMetadata =\n(MockedJDBCMetadata) metadataMgr.getOptionalMetadata(MockedJDBCMetadata.MOCKED_JDBC_CATALOG_NAME).get();\nmockedJDBCMetadata.initPartitions();\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"jdbc_parttbl_mv0\"));\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(PARTITION_START, \"20230801\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"20230805\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(3, partitions.size());\nAssert.assertNotNull(materializedView.getPartition(\"P20230801\"));\nAssert.assertNotNull(materializedView.getPartition(\"P20230802\"));\nAssert.assertNotNull(materializedView.getPartition(\"P20230803\"));\nmockedJDBCMetadata.addPartitions();\ntaskRun.executeTaskRun();\nCollection incrementalPartitions = materializedView.getPartitions();\nAssert.assertEquals(4, incrementalPartitions.size());\nAssert.assertNotNull(materializedView.getPartition(\"P20230802\"));\nAssert.assertNotNull(materializedView.getPartition(\"P20230803\"));\nAssert.assertNotNull(materializedView.getPartition(\"P20230804\"));\nAssert.assertNotNull(materializedView.getPartition(\"P20230805\"));\n}\n@Test\npublic void testRangePartitionRefreshWithJDBCTable() throws Exception {\nMockedMetadataMgr metadataMgr = (MockedMetadataMgr) connectContext.getGlobalStateMgr().getMetadataMgr();\nMockedJDBCMetadata mockedJDBCMetadata =\n(MockedJDBCMetadata) metadataMgr.getOptionalMetadata(MockedJDBCMetadata.MOCKED_JDBC_CATALOG_NAME).get();\nmockedJDBCMetadata.initPartitions();\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"jdbc_parttbl_mv0\"));\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(PARTITION_START, \"20230801\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"20230805\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nlong refreshBeforeVersionTime = materializedView.getPartition(\"P20230803\").getVisibleVersionTime();\nmockedJDBCMetadata.refreshPartitions();\ntaskRun.executeTaskRun();\nlong refreshAfterVersionTime = materializedView.getPartition(\"P20230803\").getVisibleVersionTime();\nAssert.assertNotEquals(refreshBeforeVersionTime, refreshAfterVersionTime);\n}\n@Test\npublic void testRangePartitionWithJDBCTableUseStr2Date() throws Exception {\nMockedMetadataMgr metadataMgr = (MockedMetadataMgr) connectContext.getGlobalStateMgr().getMetadataMgr();\nMockedJDBCMetadata mockedJDBCMetadata =\n(MockedJDBCMetadata) metadataMgr.getOptionalMetadata(MockedJDBCMetadata.MOCKED_JDBC_CATALOG_NAME).get();\nmockedJDBCMetadata.initPartitions();\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"jdbc_parttbl_mv1\"));\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(PARTITION_START, \"20230801\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"20230805\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(3, partitions.size());\nAssert.assertNotNull(materializedView.getPartition(\"p20230801\"));\nAssert.assertNotNull(materializedView.getPartition(\"p20230802\"));\nAssert.assertNotNull(materializedView.getPartition(\"p20230803\"));\n}\n@Test\npublic void testRangePartitionWithJDBCTableUseStr2DateForError() {\ntry {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"jdbc_parttbl_mv2\"));\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(PARTITION_START, \"20230801\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"20230805\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\n} catch (Exception e) {\nAssert.assertTrue(e.getMessage().contains(\"Text '1234567' could not be parsed\"));\n}\n}\n@Test\npublic void testRangePartitionWithJDBCTableUseStr2Date2() throws Exception {\nMockedMetadataMgr metadataMgr = (MockedMetadataMgr) connectContext.getGlobalStateMgr().getMetadataMgr();\nMockedJDBCMetadata mockedJDBCMetadata =\n(MockedJDBCMetadata) metadataMgr.getOptionalMetadata(MockedJDBCMetadata.MOCKED_JDBC_CATALOG_NAME).get();\nmockedJDBCMetadata.initPartitions();\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"jdbc_parttbl_mv3\"));\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(PARTITION_START, \"20230801\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"20230805\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(3, partitions.size());\nAssert.assertNotNull(materializedView.getPartition(\"P20230801\"));\nAssert.assertNotNull(materializedView.getPartition(\"P20230802\"));\nAssert.assertNotNull(materializedView.getPartition(\"P20230803\"));\n}\n@Test\n@Test\npublic void testMvWithoutPartitionRefreshTwice() throws Exception {\nfinal AtomicInteger taskRunCounter = new AtomicInteger();\nnew MockUp() {\n@Mock\npublic void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception {\ntaskRunCounter.incrementAndGet();\n}\n};\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_without_partition\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\nString insertSql = \"insert into tbl3 values('2021-12-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntry {\nfor (int i = 0; i < 2; i++) {\ntaskRun.executeTaskRun();\n}\nAssert.assertEquals(1, taskRunCounter.get());\n} catch (Exception e) {\ne.printStackTrace();\nAssert.fail(\"refresh failed\");\n}\n}\n@Test\npublic void testClearQueryInfo() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_without_partition\"));\nnew MockUp() {\n@Mock\npublic void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception {\nUUID uuid = UUID.randomUUID();\nTUniqueId loadId = new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());\nSystem.out.println(\"register query id: \" + DebugUtil.printId(connectContext.getExecutionId()));\nLoadPlanner loadPlanner = new LoadPlanner(1, loadId, 1, 1, materializedView,\nfalse, \"UTC\", 10, System.currentTimeMillis(),\nfalse, connectContext, null, 10,\n10, null, null, null, 1);\nDefaultCoordinator coordinator = new DefaultCoordinator.Factory().createBrokerLoadScheduler(loadPlanner);\n}\n};\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\nString insertSql = \"insert into tbl3 values('2021-12-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nSystem.out.println(\"unregister query id: \" + DebugUtil.printId(connectContext.getExecutionId()));\nAssert.assertNull(QeProcessorImpl.INSTANCE.getCoordinator(connectContext.getExecutionId()));\n}\nprivate void testBaseTablePartitionInsertData(Database testDb, MaterializedView materializedView, TaskRun taskRun)\nthrows Exception {\nString insertSql = \"insert into tbl1 partition(p0) values('2021-12-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ninsertSql = \"insert into tbl1 partition(p1) values('2022-01-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\nOlapTable tbl1 = ((OlapTable) testDb.getTable(\"tbl1\"));\ntaskRun.executeTaskRun();\nMap> baseTableVisibleVersionMap =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext().getBaseTableVisibleVersionMap();\nMaterializedView.BasePartitionInfo basePartitionInfo = baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p0\");\nAssert.assertEquals(2, basePartitionInfo.getVersion());\ninsertSql = \"insert into tbl1 partition(p0) values('2021-12-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nMap> baseTableVisibleVersionMap2 =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext().getBaseTableVisibleVersionMap();\nMaterializedView.BasePartitionInfo newP0PartitionInfo = baseTableVisibleVersionMap2.get(tbl1.getId()).get(\"p0\");\nAssert.assertEquals(3, newP0PartitionInfo.getVersion());\nMaterializedView.BasePartitionInfo p1PartitionInfo = baseTableVisibleVersionMap2.get(tbl1.getId()).get(\"p1\");\nAssert.assertEquals(2, p1PartitionInfo.getVersion());\n}\nprivate void testRefreshWithFailure(Database testDb, MaterializedView materializedView, TaskRun taskRun)\nthrows Exception {\nOlapTable tbl1 = ((OlapTable) testDb.getTable(\"tbl1\"));\nString insertSql = \"insert into tbl1 partition(p0) values('2021-12-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\nnew MockUp() {\n@Mock\npublic void processTaskRun(TaskRunContext context) throws Exception {\nthrow new RuntimeException(\"new exception\");\n}\n};\ntry {\ntaskRun.executeTaskRun();\n} catch (Exception e) {\ne.printStackTrace();\n}\nMap> baseTableVisibleVersionMap2 =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext().getBaseTableVisibleVersionMap();\nMaterializedView.BasePartitionInfo newP0PartitionInfo = baseTableVisibleVersionMap2.get(tbl1.getId()).get(\"p0\");\nAssert.assertEquals(3, newP0PartitionInfo.getVersion());\n}\npublic void testBaseTablePartitionRename(TaskRun taskRun)\nthrows Exception {\nnew MockUp() {\n@Mock\nprivate Map> collectBaseTables(MaterializedView materializedView) {\nMap> olapTables = Maps.newHashMap();\nList baseTableInfos = materializedView.getBaseTableInfos();\nfor (BaseTableInfo baseTableInfo : baseTableInfos) {\nif (!baseTableInfo.getTable().isOlapTable()) {\ncontinue;\n}\nDatabase baseDb = GlobalStateMgr.getCurrentState().getDb(baseTableInfo.getDbId());\nif (baseDb == null) {\nthrow new SemanticException(\"Materialized view base db: \" +\nbaseTableInfo.getDbId() + \" not exist.\");\n}\nOlapTable olapTable = (OlapTable) baseDb.getTable(baseTableInfo.getTableId());\nif (olapTable == null) {\nthrow new SemanticException(\"Materialized view base table: \" +\nbaseTableInfo.getTableId() + \" not exist.\");\n}\nOlapTable copied = new OlapTable();\nif (!DeepCopy.copy(olapTable, copied, OlapTable.class)) {\nthrow new SemanticException(\"Failed to copy olap table: \" + olapTable.getName());\n}\nolapTables.put(olapTable.getId(), Pair.create(baseTableInfo, copied));\n}\nString renamePartitionSql = \"ALTER TABLE test.tbl1 RENAME PARTITION p1 p1_1\";\ntry {\nnew StmtExecutor(connectContext, renamePartitionSql).execute();\n} catch (Exception e) {\ne.printStackTrace();\n}\nreturn olapTables;\n}\n};\nString insertSql = \"insert into tbl1 partition(p1) values('2022-01-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntry {\ntaskRun.executeTaskRun();\n} catch (Exception e) {\nAssert.assertTrue(e.getMessage().contains(\"is not active, skip sync partition and data with base tables\"));\n}\n}\nprivate void testBaseTablePartitionReplace(Database testDb, MaterializedView materializedView, TaskRun taskRun)\nthrows Exception {\nOlapTable tbl1 = ((OlapTable) testDb.getTable(\"tbl1\"));\nnew MockUp() {\n@Mock\npublic Map> collectBaseTables(MaterializedView materializedView) {\nMap> olapTables = Maps.newHashMap();\nList baseTableInfos = materializedView.getBaseTableInfos();\nfor (BaseTableInfo baseTableInfo : baseTableInfos) {\nif (!baseTableInfo.getTable().isOlapTable()) {\ncontinue;\n}\nDatabase baseDb = GlobalStateMgr.getCurrentState().getDb(baseTableInfo.getDbId());\nif (baseDb == null) {\nthrow new SemanticException(\"Materialized view base db: \" +\nbaseTableInfo.getDbId() + \" not exist.\");\n}\nOlapTable olapTable = (OlapTable) baseDb.getTable(baseTableInfo.getTableId());\nif (olapTable == null) {\nthrow new SemanticException(\"Materialized view base table: \" +\nbaseTableInfo.getTableId() + \" not exist.\");\n}\nOlapTable copied = new OlapTable();\nif (!DeepCopy.copy(olapTable, copied, OlapTable.class)) {\nthrow new SemanticException(\"Failed to copy olap table: \" + olapTable.getName());\n}\nolapTables.put(olapTable.getId(), Pair.create(baseTableInfo, olapTable));\n}\ntry {\nString replacePartitionSql = \"ALTER TABLE test.tbl1 REPLACE PARTITION (p3) WITH TEMPORARY PARTITION (tp3)\\n\" +\n\"PROPERTIES (\\n\" +\n\" \\\"strict_range\\\" = \\\"false\\\",\\n\" +\n\" \\\"use_temp_partition_name\\\" = \\\"false\\\"\\n\" +\n\");\";\nnew StmtExecutor(connectContext, replacePartitionSql).execute();\nString insertSql = \"insert into tbl1 partition(p3) values('2021-03-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\n} catch (Exception e) {\ne.printStackTrace();\n}\nreturn olapTables;\n}\n};\nPartition partition = tbl1.getPartition(\"p3\");\nString createTempPartitionSql =\n\"ALTER TABLE test.tbl1 ADD TEMPORARY PARTITION tp3 values [('2022-03-01'),('2022-04-01'))\";\nnew StmtExecutor(connectContext, createTempPartitionSql).execute();\nString insertSql = \"insert into tbl1 partition(p3) values('2021-03-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nMap> baseTableVisibleVersionMap =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext().getBaseTableVisibleVersionMap();\nMaterializedView.BasePartitionInfo basePartitionInfo = baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p3\");\nAssert.assertNotEquals(partition.getId(), basePartitionInfo.getId());\n}\npublic void testBaseTableAddPartitionWhileSync(Database testDb, MaterializedView materializedView, TaskRun taskRun)\nthrows Exception {\nOlapTable tbl1 = ((OlapTable) testDb.getTable(\"tbl1\"));\nnew MockUp() {\n@Mock\npublic Map> collectBaseTables(MaterializedView materializedView) {\nMap> olapTables = Maps.newHashMap();\nList baseTableInfos = materializedView.getBaseTableInfos();\nfor (BaseTableInfo baseTableInfo : baseTableInfos) {\nif (!baseTableInfo.getTable().isOlapTable()) {\ncontinue;\n}\nDatabase baseDb = GlobalStateMgr.getCurrentState().getDb(baseTableInfo.getDbId());\nif (baseDb == null) {\nthrow new SemanticException(\"Materialized view base db: \" +\nbaseTableInfo.getDbId() + \" not exist.\");\n}\nOlapTable olapTable = (OlapTable) baseDb.getTable(baseTableInfo.getTableId());\nif (olapTable == null) {\nthrow new SemanticException(\"Materialized view base table: \" +\nbaseTableInfo.getTableId() + \" not exist.\");\n}\nOlapTable copied = new OlapTable();\nif (!DeepCopy.copy(olapTable, copied, OlapTable.class)) {\nthrow new SemanticException(\"Failed to copy olap table: \" + olapTable.getName());\n}\nolapTables.put(olapTable.getId(), Pair.create(baseTableInfo, copied));\n}\nString addPartitionSql = \"ALTER TABLE test.tbl1 ADD PARTITION p99 VALUES [('9999-03-01'),('9999-04-01'))\";\ntry {\nnew StmtExecutor(connectContext, addPartitionSql).execute();\n} catch (Exception e) {\ne.printStackTrace();\n}\nString insertSql = \"insert into tbl1 partition(p99) values('9999-03-01', 2, 10);\";\ntry {\nnew StmtExecutor(connectContext, insertSql).execute();\n} catch (Exception e) {\ne.printStackTrace();\n}\nreturn olapTables;\n}\n};\nString insertSql = \"insert into tbl1 partition(p3) values('2022-03-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nMap> baseTableVisibleVersionMap =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext().getBaseTableVisibleVersionMap();\nAssert.assertEquals(3, baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p3\").getVersion());\nAssert.assertNotNull(baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p99\"));\nAssert.assertEquals(2, baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p99\").getVersion());\n}\npublic void testBaseTableAddPartitionWhileRefresh(Database testDb, MaterializedView materializedView, TaskRun taskRun)\nthrows Exception {\nOlapTable tbl1 = ((OlapTable) testDb.getTable(\"tbl1\"));\nnew MockUp() {\n@Mock\npublic void refreshMaterializedView(MvTaskRunContext mvContext, ExecPlan execPlan,\nInsertStmt insertStmt) throws Exception {\nString addPartitionSql = \"ALTER TABLE test.tbl1 ADD PARTITION p100 VALUES [('9999-04-01'),('9999-05-01'))\";\nString insertSql = \"insert into tbl1 partition(p100) values('9999-04-01', 3, 10);\";\ntry {\nnew StmtExecutor(connectContext, addPartitionSql).execute();\nnew StmtExecutor(connectContext, insertSql).execute();\n} catch (Exception e) {\ne.printStackTrace();\n}\nConnectContext ctx = mvContext.getCtx();\nStmtExecutor executor = new StmtExecutor(ctx, insertStmt);\nctx.setExecutor(executor);\nctx.setThreadLocalInfo();\nctx.setStmtId(new AtomicInteger().incrementAndGet());\nctx.setExecutionId(UUIDUtil.toTUniqueId(ctx.getQueryId()));\nexecutor.handleDMLStmt(execPlan, insertStmt);\n}\n};\nString insertSql = \"insert into tbl1 partition(p3) values('2022-03-01', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nMap> baseTableVisibleVersionMap =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext().getBaseTableVisibleVersionMap();\nAssert.assertEquals(4, baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p3\").getVersion());\nAssert.assertNull(baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p100\"));\n}\npublic void testBaseTableDropPartitionWhileSync(Database testDb, MaterializedView materializedView, TaskRun taskRun)\nthrows Exception {\nOlapTable tbl1 = ((OlapTable) testDb.getTable(\"tbl1\"));\nnew MockUp() {\n@Mock\nprivate Map> collectBaseTables(MaterializedView materializedView) {\nMap> olapTables = Maps.newHashMap();\nList baseTableInfos = materializedView.getBaseTableInfos();\nfor (BaseTableInfo baseTableInfo : baseTableInfos) {\nif (!baseTableInfo.getTable().isOlapTable()) {\ncontinue;\n}\nDatabase baseDb = GlobalStateMgr.getCurrentState().getDb(baseTableInfo.getDbId());\nif (baseDb == null) {\nthrow new SemanticException(\"Materialized view base db: \" +\nbaseTableInfo.getDbId() + \" not exist.\");\n}\nOlapTable olapTable = (OlapTable) baseDb.getTable(baseTableInfo.getTableId());\nif (olapTable == null) {\nthrow new SemanticException(\"Materialized view base table: \" +\nbaseTableInfo.getTableId() + \" not exist.\");\n}\nOlapTable copied = new OlapTable();\nif (!DeepCopy.copy(olapTable, copied, OlapTable.class)) {\nthrow new SemanticException(\"Failed to copy olap table: \" + olapTable.getName());\n}\nolapTables.put(olapTable.getId(), Pair.create(baseTableInfo, copied));\n}\nString dropPartitionSql = \"ALTER TABLE test.tbl1 DROP PARTITION p4\";\ntry {\nnew StmtExecutor(connectContext, dropPartitionSql).execute();\n} catch (Exception e) {\ne.printStackTrace();\n}\nreturn olapTables;\n}\n};\nString insertSql = \"insert into tbl1 partition(p3) values('2022-03-01', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nMap> baseTableVisibleVersionMap =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext().getBaseTableVisibleVersionMap();\nAssert.assertNull(baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p4\"));\n}\npublic void testBaseTableDropPartitionWhileRefresh(Database testDb, MaterializedView materializedView, TaskRun taskRun)\nthrows Exception {\nOlapTable tbl1 = ((OlapTable) testDb.getTable(\"tbl1\"));\nnew MockUp() {\n@Mock\npublic void refreshMaterializedView(MvTaskRunContext mvContext, ExecPlan execPlan,\nInsertStmt insertStmt) throws Exception {\nString dropPartitionSql = \"ALTER TABLE test.tbl1 DROP PARTITION p100\";\ntry {\nnew StmtExecutor(connectContext, dropPartitionSql).execute();\n} catch (Exception e) {\ne.printStackTrace();\n}\nConnectContext ctx = mvContext.getCtx();\nStmtExecutor executor = new StmtExecutor(ctx, insertStmt);\nctx.setExecutor(executor);\nctx.setThreadLocalInfo();\nctx.setStmtId(new AtomicInteger().incrementAndGet());\nctx.setExecutionId(UUIDUtil.toTUniqueId(ctx.getQueryId()));\nexecutor.handleDMLStmt(execPlan, insertStmt);\n}\n};\nString insertSql = \"insert into tbl1 partition(p3) values('2022-03-01', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nMap> baseTableVisibleVersionMap =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext().getBaseTableVisibleVersionMap();\nAssert.assertNotNull(baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p100\"));\nAssert.assertEquals(3, baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p100\").getVersion());\n}\nprivate static void setPartitionVersion(Partition partition, long version) {\npartition.setVisibleVersion(version, System.currentTimeMillis());\nMaterializedIndex baseIndex = partition.getBaseIndex();\nList tablets = baseIndex.getTablets();\nfor (Tablet tablet : tablets) {\nList replicas = ((LocalTablet) tablet).getImmutableReplicas();\nfor (Replica replica : replicas) {\nreplica.updateVersionInfo(version, -1, version);\n}\n}\n}\n@Test\npublic void testFilterPartitionByRefreshNumber() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_with_test_refresh\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nmaterializedView.getTableProperty().setPartitionRefreshNumber(3);\nPartitionBasedMvRefreshProcessor processor = new PartitionBasedMvRefreshProcessor();\nMvTaskRunContext mvContext = new MvTaskRunContext(new TaskRunContext());\nprocessor.setMvContext(mvContext);\nprocessor.filterPartitionByRefreshNumber(materializedView.getPartitionNames(), materializedView);\nmvContext = processor.getMvContext();\nAssert.assertEquals(\"2022-03-01\", mvContext.getNextPartitionStart());\nAssert.assertEquals(\"2022-05-01\", mvContext.getNextPartitionEnd());\ntaskRun.executeTaskRun();\nprocessor.filterPartitionByRefreshNumber(Sets.newHashSet(), materializedView);\nmvContext = processor.getMvContext();\nAssert.assertNull(mvContext.getNextPartitionStart());\nAssert.assertNull(mvContext.getNextPartitionEnd());\n}\n@Test\npublic void testRefreshMaterializedViewDefaultConfig1() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"create materialized view test.mv_config1\\n\" +\n\"partition by date_trunc('month',k1) \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties(\" +\n\"'replication_num' = '1',\\n\" +\n\"'storage_medium' = 'SSD'\" +\n\")\\n\" +\n\"as select k1, k2 from tbl1;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_config1\"));\nString insertSql = \"insert into tbl1 partition(p3) values('2022-03-01', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\n{\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nAssert.assertTrue(execPlan.getConnectContext().getSessionVariable().getEnableSpill());\n}\n{\nConfig.enable_materialized_view_spill = false;\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nAssert.assertFalse(execPlan.getConnectContext().getSessionVariable().getEnableSpill());\nConfig.enable_materialized_view_spill = true;\n}\n}\n@Test\npublic void testRefreshMaterializedViewDefaultConfig2() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"create materialized view test.mv_config2\\n\" +\n\"partition by date_trunc('month',k1) \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties(\" +\n\"'replication_num' = '1',\\n\" +\n\"'session.enable_spill' = 'false'\" +\n\")\\n\" +\n\"as select k1, k2 from tbl1;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_config2\"));\nString insertSql = \"insert into tbl1 partition(p3) values('2022-03-01', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nAssert.assertFalse(execPlan.getConnectContext().getSessionVariable().getEnableSpill());\n}\n@Test\npublic void testSyncPartitionWithSsdStorage() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"create materialized view test.mv_with_ssd\\n\" +\n\"partition by date_trunc('month',k1) \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties('replication_num' = '1',\\n\" +\n\"'storage_medium' = 'SSD')\\n\" +\n\"as select tbl1.k1, tbl2.k2 from tbl1 join tbl2 on tbl1.k2 = tbl2.k2;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_with_ssd\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\ntry {\ntaskRun.executeTaskRun();\n} catch (Exception e) {\ne.printStackTrace();\nAssert.fail(\"refresh failed\");\n}\n}\n@Test\npublic void testSyncPartitionWithSsdStorageAndCooldownTime() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\n\"create materialized view test.mv_use_ssd_and_cooldown\\n\" +\n\"partition by date_trunc('month',k1) \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties('replication_num' = '1',\\n\" +\n\"'storage_medium' = 'SSD',\\n\" +\n\"'storage_cooldown_time' = '2222-04-21 20:45:11')\\n\" +\n\"as select tbl1.k1, tbl2.k2 from tbl1 join tbl2 on tbl1.k2 = tbl2.k2;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_use_ssd_and_cooldown\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntry {\ntaskRun.executeTaskRun();\n} catch (Exception e) {\ne.printStackTrace();\nAssert.fail(\"refresh failed\");\n}\n}\n@Test\npublic void testCreateMaterializedViewOnListPartitionTables1() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nString createSQL = \"CREATE TABLE test.list_partition_tbl1 (\\n\" +\n\" id BIGINT,\\n\" +\n\" age SMALLINT,\\n\" +\n\" dt VARCHAR(10),\\n\" +\n\" province VARCHAR(64) not null\\n\" +\n\")\\n\" +\n\"ENGINE=olap\\n\" +\n\"DUPLICATE KEY(id)\\n\" +\n\"PARTITION BY LIST (province) (\\n\" +\n\" PARTITION p1 VALUES IN (\\\"beijing\\\",\\\"chongqing\\\") ,\\n\" +\n\" PARTITION p2 VALUES IN (\\\"guangdong\\\") \\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(id) BUCKETS 10\\n\" +\n\"PROPERTIES (\\n\" +\n\" \\\"replication_num\\\" = \\\"1\\\"\\n\" +\n\")\";\nstarRocksAssert.withTable(createSQL);\nString sql = \"create materialized view list_partition_mv1 \" +\n\"distributed by hash(dt, province) buckets 10 \" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\"\" +\n\") \" +\n\"as select dt, province, avg(age) from list_partition_tbl1 group by dt, province;\";\nstarRocksAssert.withMaterializedView(sql);\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"list_partition_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\nconnectContext.getSessionVariable().setOptimizerExecuteTimeout(300000);\n{\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nAssert.assertTrue(execPlan == null);\n}\n{\nString insertSql = \"INSERT INTO list_partition_tbl1 VALUES (1, 1, '2023-08-15', 'beijing');\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"partitions=2/2\\n\" +\n\" rollup: list_partition_tbl1\"));\n}\n{\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nAssert.assertTrue(execPlan == null);\n}\nstarRocksAssert.dropMaterializedView(\"list_partition_mv1\");\n}\n@Test\npublic void testPartitionPruneNonRefBaseTable1() throws Exception {\nstarRocksAssert.useDatabase(\"test\")\n.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`partition_prune_non_ref_tables1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`k1`)\\n\" +\n\"DISTRIBUTED BY HASH(`k1`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT t1.k1, sum(t1.k2) as sum1, avg(t2.k2) as avg1 FROM tbl4 as t1 join \" +\n\"tbl5 t2 on t1.k1=t2.dt group by t1.k1\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"partition_prune_non_ref_tables1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\n{\ntaskRun.executeTaskRun();\n}\n{\nString insertSql = \"insert into tbl4 partition(p4) values('2022-04-01', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertFalse(plan.contains(\"partitions=5/5\"));\nAssert.assertTrue(plan.contains(\"partitions=1/5\\n\" +\n\" rollup: tbl5\"));\nAssert.assertTrue(plan.contains(\"partitions=1/5\\n\" +\n\" rollup: tbl4\"));\n}\n{\nString insertSql = \"insert into tbl5 partition(p4) values('2022-04-01', '2021-04-01 00:02:11', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"partitions=5/5\\n\" +\n\" rollup: tbl5\"));\nAssert.assertTrue(plan.contains(\"partitions=5/5\\n\" +\n\" rollup: tbl4\"));\n}\nstarRocksAssert.dropMaterializedView(\"partition_prune_non_ref_tables1\");\n}\n@Test\npublic void testPartitionPruneNonRefBaseTable2() throws Exception {\nstarRocksAssert.useDatabase(\"test\")\n.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`partition_prune_non_ref_tables2`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`k11`)\\n\" +\n\"DISTRIBUTED BY HASH(`k11`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT t1.k1 as k11, sum(t1.k2) as sum1, avg(t2.k2) as avg1 FROM tbl4 as t1 join \" +\n\"tbl5 t2 on t1.k1=t2.dt group by t1.k1\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"partition_prune_non_ref_tables2\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\n{\ntaskRun.executeTaskRun();\n}\n{\nString insertSql = \"insert into tbl4 partition(p4) values('2022-04-01', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertFalse(plan.contains(\"partitions=5/5\"));\nAssert.assertTrue(plan.contains(\"partitions=1/5\\n\" +\n\" rollup: tbl5\"));\nAssert.assertTrue(plan.contains(\"partitions=1/5\\n\" +\n\" rollup: tbl4\"));\n}\n{\nString insertSql = \"insert into tbl5 partition(p4) values('2022-04-01', '2021-04-01 00:02:11', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"partitions=5/5\\n\" +\n\" rollup: tbl5\"));\nAssert.assertTrue(plan.contains(\"partitions=5/5\\n\" +\n\" rollup: tbl4\"));\n}\nstarRocksAssert.dropMaterializedView(\"partition_prune_non_ref_tables2\");\n}\n@Test\npublic void testPartitionPruneNonRefBaseTable3() throws Exception {\nstarRocksAssert.useDatabase(\"test\")\n.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`partition_prune_non_ref_tables1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`k1`)\\n\" +\n\"DISTRIBUTED BY HASH(`k1`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT t1.k1, sum(t1.k2) as sum1, avg(t2.k2) as avg1 FROM tbl4 as t1 join \" +\n\"tbl5 t2 on t1.k1=t2.dt where t1.k1>'2022-01-01' and t1.k2>0 group by t1.k1\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"partition_prune_non_ref_tables1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\n{\ntaskRun.executeTaskRun();\n}\n{\nString insertSql = \"insert into tbl4 partition(p4) values('2022-04-01', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nSystem.out.println(plan);\nAssert.assertFalse(plan.contains(\"partitions=5/5\"));\nAssert.assertTrue(plan.contains(\"PREDICATES: 2: k2 > 0\\n\" +\n\" partitions=1/5\\n\" +\n\" rollup: tbl4\"));\nAssert.assertTrue(plan.contains(\"partitions=1/5\\n\" +\n\" rollup: tbl5\"));\n}\n{\nString insertSql = \"insert into tbl5 partition(p4) values('2022-04-01', '2021-04-01 00:02:11', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nSystem.out.println(plan);\nAssert.assertTrue(plan.contains(\"k1 > '2022-01-01', 2: k2 > 0\\n\" +\n\" partitions=4/5\\n\" +\n\" rollup: tbl4\"));\nAssert.assertTrue(plan.contains(\"4: dt > '2022-01-01'\\n\" +\n\" partitions=4/5\\n\" +\n\" rollup: tbl5\"));\n}\nstarRocksAssert.dropMaterializedView(\"partition_prune_non_ref_tables1\");\n}\n@Test\npublic void testHivePartitionPruneNonRefBaseTable1() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nstarRocksAssert.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`hive_partition_prune_non_ref_tables2`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`par_date`)\\n\" +\n\"DISTRIBUTED BY HASH(`c1`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT part_tbl1.c1, part_tbl2.c2, part_tbl1.par_date FROM `hive0`.`partitioned_db`.`part_tbl1` join \" +\n\"`hive0`.`partitioned_db`.`part_tbl2` using (par_date)\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_partition_prune_non_ref_tables2\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\n{\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"TABLE: part_tbl1\\n\" +\n\" PARTITION PREDICATES: 4: par_date >= '2020-01-01', 4: par_date < '2020-01-05'\\n\" +\n\" partitions=4/4\"));\nAssert.assertTrue(plan.contains(\"TABLE: part_tbl2\\n\" +\n\" PARTITION PREDICATES: 8: par_date >= '2020-01-01', 8: par_date < '2020-01-05'\\n\" +\n\" partitions=4/4\"));\n}\n{\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.addPartition(\"partitioned_db\", \"part_tbl1\", \"par_date=2020-01-05\");\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"TABLE: part_tbl1\\n\" +\n\" PARTITION PREDICATES: 4: par_date >= '2020-01-05', 4: par_date < '2020-01-06'\\n\" +\n\" partitions=1/5\"));\nAssert.assertTrue(plan.contains(\"TABLE: part_tbl2\\n\" +\n\" PARTITION PREDICATES: 8: par_date >= '2020-01-05', 8: par_date < '2020-01-06'\\n\" +\n\" partitions=0/4\"));\n}\n{\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.addPartition(\"partitioned_db\", \"part_tbl2\", \"par_date=2020-01-05\");\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"TABLE: part_tbl1\\n\" +\n\" PARTITION PREDICATES: 4: par_date >= '2020-01-01', 4: par_date < '2020-01-06'\\n\" +\n\" partitions=5/5\"));\nAssert.assertTrue(plan.contains(\"TABLE: part_tbl2\\n\" +\n\" PARTITION PREDICATES: 8: par_date >= '2020-01-01', 8: par_date < '2020-01-06'\\n\" +\n\" partitions=5/5\"));\n}\n{\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.dropPartition(\"partitioned_db\", \"part_tbl1\", \"par_date=2020-01-05\");\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nAssert.assertTrue(execPlan == null);\n}\n{\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.dropPartition(\"partitioned_db\", \"part_tbl2\", \"par_date=2020-01-05\");\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"TABLE: part_tbl1\\n\" +\n\" PARTITION PREDICATES: 4: par_date >= '2020-01-01', 4: par_date < '2020-01-05'\\n\" +\n\" partitions=4/4\"));\nAssert.assertTrue(plan.contains(\"TABLE: part_tbl2\\n\" +\n\" PARTITION PREDICATES: 8: par_date >= '2020-01-01', 8: par_date < '2020-01-05'\\n\" +\n\" partitions=4/4\"));\n}\nstarRocksAssert.dropMaterializedView(\"hive_partition_prune_non_ref_tables2\");\n}\n@Test\npublic void testHivePartitionPruneNonRefBaseTable2() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nstarRocksAssert.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`hive_partition_prune_non_ref_tables1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`par_date`)\\n\" +\n\"DISTRIBUTED BY HASH(`c1`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT t2_par.c1, t2_par.c2, t1_par.par_col, t1_par.par_date FROM `hive0`.`partitioned_db`.`t2_par` join \" +\n\"`hive0`.`partitioned_db`.`t1_par` using (par_col)\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_partition_prune_non_ref_tables1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\n{\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"TABLE: t1_par\\n\" +\n\" PARTITION PREDICATES: 10: par_date >= '2020-01-01', 10: par_date < '2020-01-05'\\n\" +\n\" partitions=6/6\"));\nAssert.assertTrue(plan.contains(\"TABLE: t2_par\\n\" +\n\" PARTITION PREDICATES: 4: par_col IS NOT NULL\\n\" +\n\" partitions=6/6\"));\n}\n{\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.addPartition(\"partitioned_db\", \"t1_par\", \"par_col=4/par_date=2020-01-05\");\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"TABLE: t1_par\\n\" +\n\" PARTITION PREDICATES: 10: par_date >= '2020-01-05', 10: par_date < '2020-01-06'\\n\" +\n\" partitions=1/7\"));\nAssert.assertTrue(plan.contains(\"TABLE: t2_par\\n\" +\n\" PARTITION PREDICATES: 4: par_col IS NOT NULL\\n\" +\n\" partitions=6/6\"));\n}\n{\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.addPartition(\"partitioned_db\", \"t2_par\", \"par_col=4/par_date=2020-01-05\");\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"TABLE: t1_par\\n\" +\n\" PARTITION PREDICATES: 10: par_date >= '2020-01-01', 10: par_date < '2020-01-06'\\n\" +\n\" partitions=7/7\"));\nAssert.assertTrue(plan.contains(\"TABLE: t2_par\\n\" +\n\" PARTITION PREDICATES: 4: par_col IS NOT NULL\\n\" +\n\" partitions=7/7\"));\n}\n{\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.dropPartition(\"partitioned_db\", \"t1_par\", \"par_col=3/par_date=2020-01-05\");\nmockedHiveMetadata.dropPartition(\"partitioned_db\", \"t2_par\", \"par_col=3/par_date=2020-01-05\");\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nAssert.assertTrue(execPlan == null);\n}\nstarRocksAssert.dropMaterializedView(\"hive_partition_prune_non_ref_tables1\");\n}\n@Test\npublic void testHivePartitionPruneNonRefBaseTable3() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nstarRocksAssert.withTable(\"CREATE TABLE `test_partition_prune_tbl1` (\\n\" +\n\"`k1` date,\\n\" +\n\"`k2` int,\\n\" +\n\"`k3` int\\n\" +\n\")\\n\" +\n\"DUPLICATE KEY(`k1`)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"PARTITION BY RANGE (k1) (\\n\" +\n\"START (\\\"2020-10-01\\\") END (\\\"2020-12-01\\\") EVERY (INTERVAL 15 day)\\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(`k1`) BUCKETS 3 \" +\n\"PROPERTIES('replication_num' = '1');\");\nstarRocksAssert.withTable(\"CREATE TABLE `test_partition_prune_tbl2` (\\n\" +\n\"`k1` date,\\n\" +\n\"`k2` int,\\n\" +\n\"`k3` int\\n\" +\n\")\\n\" +\n\"DUPLICATE KEY(`k1`)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`k1`) BUCKETS 3\\n\" +\n\"PROPERTIES('replication_num' = '1');\");\nstarRocksAssert.withMaterializedView(\"CREATE MATERIALIZED VIEW partition_prune_mv1 \\n\" +\n\"PARTITION BY k3\\n\" +\n\"DISTRIBUTED BY HASH(k1) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL \\n\" +\n\"PROPERTIES('replication_num' = '1') \\n\" +\n\"AS \" +\n\" SELECT test_partition_prune_tbl2.k1 as k1, test_partition_prune_tbl2.k2 as k2, \" +\n\" test_partition_prune_tbl1.k1 as k3, test_partition_prune_tbl1.k2 as k4\\n\" +\n\" FROM test_partition_prune_tbl1 join test_partition_prune_tbl2 on \" +\n\" test_partition_prune_tbl1.k1=test_partition_prune_tbl2.k1;\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"partition_prune_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\nconnectContext.getSessionVariable().setOptimizerExecuteTimeout(300000);\n{\nString insertSql = \"INSERT INTO test_partition_prune_tbl1 VALUES (\\\"2020-11-10\\\",1,1);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"partitions=5/5\\n\" +\n\" rollup: test_partition_prune_tbl1\"));\nAssert.assertTrue(plan.contains(\"PREDICATES: 4: k1 >= '2020-10-01', 4: k1 < '2020-12-15'\\n\" +\n\" partitions=1/1\\n\" +\n\" rollup: test_partition_prune_tbl2\"));\n}\n{\nString insertSql = \"INSERT INTO test_partition_prune_tbl2 VALUES (\\\"2020-11-10\\\",1,1);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"partitions=5/5\\n\" +\n\" rollup: test_partition_prune_tbl1\"));\nAssert.assertTrue(plan.contains(\"PREDICATES: 4: k1 >= '2020-10-01', 4: k1 < '2020-12-15'\\n\" +\n\" partitions=1/1\\n\" +\n\" rollup: test_partition_prune_tbl2\"));\n}\n{\nString insertSql = \"INSERT INTO test_partition_prune_tbl1 VALUES (\\\"2020-11-10\\\",1,1);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"partitions=5/5\\n\" +\n\" rollup: test_partition_prune_tbl1\"));\nAssert.assertTrue(plan.contains(\"PREDICATES: 4: k1 >= '2020-10-01', 4: k1 < '2020-12-15'\\n\" +\n\" partitions=1/1\\n\" +\n\" rollup: test_partition_prune_tbl2\"));\n}\nstarRocksAssert.dropMaterializedView(\"partition_prune_mv1\");\n}\n}", + "context_after": "class PartitionBasedMvRefreshProcessorTest {\nprivate static ConnectContext connectContext;\nprivate static StarRocksAssert starRocksAssert;\n@BeforeClass\npublic static void beforeClass() throws Exception {\nFeConstants.runningUnitTest = true;\nConfig.enable_experimental_mv = true;\nUtFrameUtils.createMinStarRocksCluster();\nconnectContext = UtFrameUtils.createDefaultCtx();\nConnectorPlanTestBase.mockCatalog(connectContext);\nstarRocksAssert = new StarRocksAssert(connectContext);\nif (!starRocksAssert.databaseExist(\"_statistics_\")) {\nStatisticsMetaManager m = new StatisticsMetaManager();\nm.createStatisticsTablesForTest();\n}\nstarRocksAssert.withDatabase(\"test\");\nstarRocksAssert.useDatabase(\"test\");\nstarRocksAssert.withTable(\"CREATE TABLE test.tbl1\\n\" +\n\"(\\n\" +\n\" k1 date,\\n\" +\n\" k2 int,\\n\" +\n\" v1 int sum\\n\" +\n\")\\n\" +\n\"PARTITION BY RANGE(k1)\\n\" +\n\"(\\n\" +\n\" PARTITION p0 values [('2021-12-01'),('2022-01-01')),\\n\" +\n\" PARTITION p1 values [('2022-01-01'),('2022-02-01')),\\n\" +\n\" PARTITION p2 values [('2022-02-01'),('2022-03-01')),\\n\" +\n\" PARTITION p3 values [('2022-03-01'),('2022-04-01')),\\n\" +\n\" PARTITION p4 values [('2022-04-01'),('2022-05-01'))\\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(k2) BUCKETS 3\\n\" +\n\"PROPERTIES('replication_num' = '1');\")\n.withTable(\"CREATE TABLE test.tbl2\\n\" +\n\"(\\n\" +\n\" k1 date,\\n\" +\n\" k2 int,\\n\" +\n\" v1 int sum\\n\" +\n\")\\n\" +\n\"PARTITION BY RANGE(k1)\\n\" +\n\"(\\n\" +\n\" PARTITION p1 values less than('2022-02-01'),\\n\" +\n\" PARTITION p2 values less than('2022-03-01')\\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(k2) BUCKETS 3\\n\" +\n\"PROPERTIES('replication_num' = '1');\")\n.withTable(\"CREATE TABLE test.tbl3\\n\" +\n\"(\\n\" +\n\" k1 date,\\n\" +\n\" k2 int,\\n\" +\n\" v1 int sum\\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(k2) BUCKETS 3\\n\" +\n\"PROPERTIES('replication_num' = '1');\")\n.withTable(\"CREATE TABLE test.tbl4\\n\" +\n\"(\\n\" +\n\" k1 date,\\n\" +\n\" k2 int,\\n\" +\n\" v1 int sum\\n\" +\n\")\\n\" +\n\"PARTITION BY RANGE(k1)\\n\" +\n\"(\\n\" +\n\" PARTITION p0 values [('2021-12-01'),('2022-01-01')),\\n\" +\n\" PARTITION p1 values [('2022-01-01'),('2022-02-01')),\\n\" +\n\" PARTITION p2 values [('2022-02-01'),('2022-03-01')),\\n\" +\n\" PARTITION p3 values [('2022-03-01'),('2022-04-01')),\\n\" +\n\" PARTITION p4 values [('2022-04-01'),('2022-05-01'))\\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(k2) BUCKETS 3\\n\" +\n\"PROPERTIES('replication_num' = '1');\")\n.withTable(\"CREATE TABLE test.tbl5\\n\" +\n\"(\\n\" +\n\" dt date,\\n\" +\n\" k1 datetime,\\n\" +\n\" k2 int,\\n\" +\n\" k3 bigint\\n\" +\n\")\\n\" +\n\"PARTITION BY RANGE(dt)\\n\" +\n\"(\\n\" +\n\" PARTITION p0 values [('2021-12-01'),('2022-01-01')),\\n\" +\n\" PARTITION p1 values [('2022-01-01'),('2022-02-01')),\\n\" +\n\" PARTITION p2 values [('2022-02-01'),('2022-03-01')),\\n\" +\n\" PARTITION p3 values [('2022-03-01'),('2022-04-01')),\\n\" +\n\" PARTITION p4 values [('2022-04-01'),('2022-05-01'))\\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(k2) BUCKETS 3\\n\" +\n\"PROPERTIES('replication_num' = '1');\")\n.withTable(\"CREATE TABLE test.tbl6\\n\" +\n\"(\\n\" +\n\" k1 date,\\n\" +\n\" k2 int,\\n\" +\n\" v1 int sum\\n\" +\n\")\\n\" +\n\"PARTITION BY RANGE(k1)\\n\" +\n\"(\\n\" +\n\" PARTITION p0 values [('2021-12-01'),('2022-01-01')),\\n\" +\n\" PARTITION p1 values [('2022-01-01'),('2022-02-01')),\\n\" +\n\" PARTITION p2 values [('2022-02-01'),('2022-03-01')),\\n\" +\n\" PARTITION p3 values [('2022-03-01'),('2022-04-01')),\\n\" +\n\" PARTITION p4 values [('2022-04-01'),('2022-05-01'))\\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(k2) BUCKETS 3\\n\" +\n\"PROPERTIES('replication_num' = '1');\")\n.withMaterializedView(\"create materialized view test.union_all_mv\\n\" +\n\"partition by dt \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh manual\\n\" +\n\"properties('replication_num' = '1')\\n\" +\n\"as select dt, -1 as k2 from tbl5 where k2 is null union all select dt, k2 from tbl5;\")\n.withMaterializedView(\"create materialized view test.mv1\\n\" +\n\"partition by date_trunc('month',k1) \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties('replication_num' = '1')\\n\" +\n\"as select tbl1.k1, tbl2.k2 from tbl1 join tbl2 on tbl1.k2 = tbl2.k2;\")\n.withMaterializedView(\"create materialized view test.mv2\\n\" +\n\"partition by date_trunc('month',k1) \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties('replication_num' = '1')\\n\" +\n\"as select tbl4.k1, tbl4.k2 from tbl4;\")\n.withMaterializedView(\"create materialized view test.mv_inactive\\n\" +\n\"partition by date_trunc('month',k1) \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties('replication_num' = '1')\\n\" +\n\"as select tbl1.k1, tbl2.k2 from tbl1 join tbl2 on tbl1.k2 = tbl2.k2;\")\n.withMaterializedView(\"create materialized view test.mv_without_partition\\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties('replication_num' = '1')\\n\" +\n\"as select k2, sum(v1) as total_sum from tbl3 group by k2;\")\n.withTable(\"CREATE TABLE test.base\\n\" +\n\"(\\n\" +\n\" k1 date,\\n\" +\n\" k2 int,\\n\" +\n\" v1 int sum\\n\" +\n\")\\n\" +\n\"PARTITION BY RANGE(k1)\\n\" +\n\"(\\n\" +\n\" PARTITION p0 values [('2021-12-01'),('2022-01-01')),\\n\" +\n\" PARTITION p1 values [('2022-01-01'),('2022-02-01')),\\n\" +\n\" PARTITION p2 values [('2022-02-01'),('2022-03-01')),\\n\" +\n\" PARTITION p3 values [('2022-03-01'),('2022-04-01')),\\n\" +\n\" PARTITION p4 values [('2022-04-01'),('2022-05-01'))\\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(k2) BUCKETS 3\\n\" +\n\"PROPERTIES('replication_num' = '1');\")\n.withMaterializedView(\"create materialized view test.mv_with_test_refresh\\n\" +\n\"partition by k1\\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"as select k1, k2, sum(v1) as total_sum from base group by k1, k2;\")\n.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`hive_parttbl_mv`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`l_shipdate`)\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate` FROM `hive0`.`partitioned_db`.`lineitem_par` as a;\")\n.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`hive_mul_parttbl_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`l_shipdate`)\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate`, sum(l_extendedprice) as total_price FROM \" +\n\"`hive0`.`partitioned_db`.`lineitem_mul_par` as a group by `l_orderkey`, `l_suppkey`, `l_shipdate`;\")\n.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`hive_mul_parttbl_mv2`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`par_date`)\\n\" +\n\"DISTRIBUTED BY HASH(`par_col`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT c1, c2, par_date, par_col FROM `hive0`.`partitioned_db`.`t1_par`;\")\n.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`hive_join_mv`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`par_date`)\\n\" +\n\"DISTRIBUTED BY HASH(`c1`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT t1.c1, t1.c2, t1_par.par_col, t1_par.par_date FROM `hive0`.`partitioned_db`.`t1` join \" +\n\"`hive0`.`partitioned_db`.`t1_par` using (par_col)\")\n.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`jdbc_parttbl_mv0`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`d`)\\n\" +\n\"DISTRIBUTED BY HASH(`a`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `a`, `b`, `c`, `d` FROM `jdbc0`.`partitioned_db0`.`tbl0`;\")\n.withMaterializedView(\"create materialized view jdbc_parttbl_mv1 \" +\n\"partition by ss \" +\n\"distributed by hash(a) buckets 10 \" +\n\"REFRESH DEFERRED MANUAL \" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\"\\n\" +\n\") \" +\n\"as select str2date(d,'%Y%m%d') ss, a, b, c from jdbc0.partitioned_db0.tbl1;\")\n.withMaterializedView(\"create materialized view jdbc_parttbl_mv2 \" +\n\"partition by ss \" +\n\"distributed by hash(a) buckets 10 \" +\n\"REFRESH DEFERRED MANUAL \" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\"\\n\" +\n\") \" +\n\"as select str2date(d,'%Y%m%d') ss, a, b, c from jdbc0.partitioned_db0.tbl2;\")\n.withMaterializedView(\"create materialized view jdbc_parttbl_mv3 \" +\n\"partition by str2date(d,'%Y%m%d') \" +\n\"distributed by hash(a) buckets 10 \" +\n\"REFRESH DEFERRED MANUAL \" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\"\\n\" +\n\") \" +\n\"as select a, b, c, d from jdbc0.partitioned_db0.tbl1;\")\n.withMaterializedView(\"create materialized view jdbc_parttbl_mv5 \" +\n\"partition by str2date(d,'%Y%m%d') \" +\n\"distributed by hash(a) buckets 10 \" +\n\"REFRESH DEFERRED MANUAL \" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\"\\n\" +\n\") \" +\n\"as select a, b, c, d from jdbc0.partitioned_db0.tbl3;\");\nnew MockUp() {\n@Mock\npublic void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception {\nif (stmt instanceof InsertStmt) {\nInsertStmt insertStmt = (InsertStmt) stmt;\nTableName tableName = insertStmt.getTableName();\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nOlapTable tbl = ((OlapTable) testDb.getTable(tableName.getTbl()));\nif (tbl != null) {\nfor (Partition partition : tbl.getPartitions()) {\nif (insertStmt.getTargetPartitionIds().contains(partition.getId())) {\nsetPartitionVersion(partition, partition.getVisibleVersion() + 1);\n}\n}\n}\n}\n}\n};\n}\nprotected void assertPlanContains(ExecPlan execPlan, String... explain) throws Exception {\nString explainString = execPlan.getExplainString(TExplainLevel.NORMAL);\nfor (String expected : explain) {\nAssert.assertTrue(\"expected is: \" + expected + \" but plan is \\n\" + explainString,\nStringUtils.containsIgnoreCase(explainString.toLowerCase(), expected));\n}\n}\n@Test\npublic void testUnionAllMvWithPartition() {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"union_all_mv\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntry {\nString insertSql = \"insert into tbl5 partition(p4) values('2022-04-01', '2021-04-01 00:02:11', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertFalse(plan.contains(\"partitions=5/5\"));\n} catch (Exception e) {\ne.printStackTrace();\nAssert.fail(e.getMessage());\n}\n}\n@Test\npublic void testWithPartition() {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntry {\ntaskRun.executeTaskRun();\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(5, partitions.size());\nString addPartitionSql = \"ALTER TABLE test.tbl1 ADD\\n\" +\n\"PARTITION p5 VALUES [('2022-05-01'),('2022-06-01'))\";\nnew StmtExecutor(connectContext, addPartitionSql).execute();\ntaskRun.executeTaskRun();\npartitions = materializedView.getPartitions();\nAssert.assertEquals(6, partitions.size());\nString dropPartitionSql = \"ALTER TABLE test.tbl1 DROP PARTITION p5\\n\";\nnew StmtExecutor(connectContext, dropPartitionSql).execute();\ntaskRun.executeTaskRun();\npartitions = materializedView.getPartitions();\nAssert.assertEquals(5, partitions.size());\naddPartitionSql = \"ALTER TABLE test.tbl2 ADD PARTITION p3 values less than('2022-04-01')\";\nnew StmtExecutor(connectContext, addPartitionSql).execute();\ntaskRun.executeTaskRun();\npartitions = materializedView.getPartitions();\nAssert.assertEquals(5, partitions.size());\ndropPartitionSql = \"ALTER TABLE test.tbl2 DROP PARTITION p3\";\nnew StmtExecutor(connectContext, dropPartitionSql).execute();\ntaskRun.executeTaskRun();\npartitions = materializedView.getPartitions();\nAssert.assertEquals(5, partitions.size());\ntestBaseTablePartitionInsertData(testDb, materializedView, taskRun);\ntestBaseTablePartitionReplace(testDb, materializedView, taskRun);\ntestBaseTableAddPartitionWhileSync(testDb, materializedView, taskRun);\ntestBaseTableAddPartitionWhileRefresh(testDb, materializedView, taskRun);\ntestBaseTableDropPartitionWhileSync(testDb, materializedView, taskRun);\ntestBaseTableDropPartitionWhileRefresh(testDb, materializedView, taskRun);\ntestBaseTablePartitionRename(taskRun);\ntestRefreshWithFailure(testDb, materializedView, taskRun);\n} catch (Exception e) {\ne.printStackTrace();\nAssert.fail(e.getMessage());\n}\n}\n@Test\npublic void testInactive() {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_inactive\"));\nmaterializedView.setInactiveAndReason(\"\");\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntry {\ntaskRun.executeTaskRun();\nAssert.fail(\"should not be here. executeTaskRun will throw exception\");\n} catch (Exception e) {\nAssert.assertTrue(e.getMessage().contains(\"is not active, skip sync partition and data with base tables\"));\n}\n}\n@Test\npublic void testMvWithoutPartition() {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_without_partition\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntry {\ntaskRun.executeTaskRun();\n} catch (Exception e) {\ne.printStackTrace();\nAssert.fail(\"refresh failed\");\n}\n}\n@Test\npublic void testRangePartitionRefresh() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv2\"));\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(PARTITION_START, \"2022-01-03\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"2022-02-05\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nString insertSql = \"insert into tbl4 partition(p1) values('2022-01-02',2,10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nAssert.assertEquals(1, materializedView.getPartition(\"p202112_202201\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p202201_202202\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p202202_202203\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p202203_202204\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p202204_202205\").getVisibleVersion());\ntaskRunProperties.put(PARTITION_START, \"2021-12-03\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"2022-04-05\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\ntaskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nAssert.assertEquals(1, materializedView.getPartition(\"p202112_202201\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p202201_202202\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p202202_202203\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p202203_202204\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p202204_202205\").getVisibleVersion());\ntaskRunProperties.put(PARTITION_START, \"2021-12-03\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"2022-03-01\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\ninsertSql = \"insert into tbl4 partition(p3) values('2022-03-02',21,102);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ninsertSql = \"insert into tbl4 partition(p0) values('2021-12-02',81,182);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nAssert.assertEquals(2, materializedView.getPartition(\"p202112_202201\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p202201_202202\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p202202_202203\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p202203_202204\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p202204_202205\").getVisibleVersion());\ntaskRunProperties.put(PARTITION_START, \"2021-12-03\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"2022-05-06\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(true));\ntaskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nAssert.assertEquals(3, materializedView.getPartition(\"p202112_202201\").getVisibleVersion());\nAssert.assertEquals(3, materializedView.getPartition(\"p202201_202202\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p202202_202203\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p202203_202204\").getVisibleVersion());\nAssert.assertEquals(3, materializedView.getPartition(\"p202204_202205\").getVisibleVersion());\n}\n@Test\npublic void testRefreshPriority() throws Exception {\nnew MockUp() {\n@Mock\npublic void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception {\nif (stmt instanceof InsertStmt) {\nInsertStmt insertStmt = (InsertStmt) stmt;\nTableName tableName = insertStmt.getTableName();\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nOlapTable tbl = ((OlapTable) testDb.getTable(tableName.getTbl()));\nfor (Partition partition : tbl.getPartitions()) {\nif (insertStmt.getTargetPartitionIds().contains(partition.getId())) {\nsetPartitionVersion(partition, partition.getVisibleVersion() + 1);\n}\n}\n}\n}\n};\nString mvName = \"mv_refresh_priority\";\nstarRocksAssert.withMaterializedView(\"create materialized view test.mv_refresh_priority\\n\" +\n\"partition by date_trunc('month',k1) \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties('replication_num' = '1', 'partition_refresh_number'='1')\\n\" +\n\"as select k1, k2 from tbl6;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(mvName));\nTaskManager tm = GlobalStateMgr.getCurrentState().getTaskManager();\nTaskRunManager trm = tm.getTaskRunManager();\nString insertSql = \"insert into tbl6 partition(p1) values('2022-01-02',2,10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ninsertSql = \"insert into tbl6 partition(p2) values('2022-02-02',2,10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(true));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nlong taskId = tm.getTask(TaskBuilder.getMvTaskName(materializedView.getId())).getId();\nTaskRun run = tm.getTaskRunManager().getRunnableTaskRun(taskId);\nAssert.assertEquals(Constants.TaskRunPriority.HIGHEST.value(), run.getStatus().getPriority());\nwhile (MapUtils.isNotEmpty(trm.getRunningTaskRunMap())) {\nThread.sleep(100);\n}\n}\n@Test\npublic void testAutoRefreshPartitionLimitWithHiveTable() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `hive_parttbl_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`l_shipdate`)\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate` FROM `hive0`.`partitioned_db`.`lineitem_par` as a;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_parttbl_mv1\"));\nmaterializedView.getTableProperty().setAutoRefreshPartitionsLimit(2);\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\ntask.setType(Constants.TaskType.PERIODICAL);\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(6, partitions.size());\nAssert.assertEquals(1, materializedView.getPartition(\"p19980101\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p19980102\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p19980103\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980104\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980105\").getVisibleVersion());\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"lineitem_par\",\nImmutableList.of(\"l_shipdate=1998-01-02\", \"l_shipdate=1998-01-03\"));\ntaskRun.executeTaskRun();\nAssert.assertEquals(1, materializedView.getPartition(\"p19980101\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p19980102\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p19980103\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980104\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980105\").getVisibleVersion());\ntask.setType(Constants.TaskType.MANUAL);\ntaskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nAssert.assertEquals(6, partitions.size());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980101\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980102\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980103\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980104\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980105\").getVisibleVersion());\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_parttbl_mv1\");\n}\n@Test\npublic void testRefreshWithHiveTableJoin() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_join_mv\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"4:HASH JOIN\"));\n}\n@Test\npublic void testAutoPartitionRefreshWithPartitionChanged() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`hive_parttbl_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`l_shipdate`)\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate` FROM `hive0`.`partitioned_db`.`lineitem_par` as a;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_parttbl_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.addPartition(\"partitioned_db\", \"lineitem_par\", \"l_shipdate=1998-01-06\");\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=1/7\");\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(7, partitions.size());\nmockedHiveMetadata.dropPartition(\"partitioned_db\", \"lineitem_par\", \"l_shipdate=1998-01-06\");\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_parttbl_mv1\");\n}\n@Test\npublic void testAutoPartitionRefreshWithHiveTableJoin1() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `hive_join_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`par_date`)\\n\" +\n\"DISTRIBUTED BY HASH(`c1`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT t1.c1, t1.c2, t1_par.par_col, t1_par.par_date FROM `hive0`.`partitioned_db`.`t1` join \" +\n\"`hive0`.`partitioned_db`.`t1_par` using (par_col)\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_join_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=7/7\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"t1_par\",\nImmutableList.of(\"par_col=0/par_date=2020-01-03\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"par_date >= '2020-01-03', 9: par_date < '2020-01-04'\", \"partitions=2/7\");\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"t1\",\nImmutableList.of(\"par_col=0\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=7/7\", \"partitions=3/3\");\n}\n@Test\npublic void testAutoPartitionRefreshWithHiveTableJoin2() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `hive_join_mv2`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`l_shipdate`)\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate`,`o_custkey` FROM `hive0`.`partitioned_db`.`lineitem_par` \" +\n\"as a join `hive0`.`tpch`.`orders` on l_orderkey = o_orderkey\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_join_mv2\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"lineitem_par\",\nImmutableList.of(\"l_shipdate=1998-01-04\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"l_shipdate >= '1998-01-04', 16: l_shipdate < '1998-01-05'\",\n\"partitions=1/6\");\nmockedHiveMetadata.updateTable(\"tpch\", \"orders\");\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\", \"partitions=1/1\");\n}\n@Test\npublic void testAutoPartitionRefreshWithUnPartitionedHiveTable() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `hive_tbl_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`n_nationkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `n_nationkey`, `n_name`, `n_comment` FROM `hive0`.`tpch`.`nation`;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_tbl_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=1/1\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updateTable(\"tpch\", \"nation\");\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=1/1\");\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(1, partitions.size());\nAssert.assertEquals(3, materializedView.getPartition(\"hive_tbl_mv1\").getVisibleVersion());\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_tbl_mv1\");\n}\n@Test\npublic void testAutoPartitionRefreshWithPartitionedHiveTable1() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `hive_parttbl_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`l_shipdate`)\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate` FROM `hive0`.`partitioned_db`.`lineitem_par` as a;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_parttbl_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\", \"PARTITION PREDICATES: (16: l_shipdate < '1998-01-06') \" +\n\"OR (16: l_shipdate IS NULL)\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"lineitem_par\",\nImmutableList.of(\"l_shipdate=1998-01-02\", \"l_shipdate=1998-01-03\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"PARTITION PREDICATES: 16: l_shipdate >= '1998-01-02', 16: l_shipdate < '1998-01-04'\",\n\"partitions=2/6\");\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(6, partitions.size());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980101\").getVisibleVersion());\nAssert.assertEquals(3, materializedView.getPartition(\"p19980102\").getVisibleVersion());\nAssert.assertEquals(3, materializedView.getPartition(\"p19980103\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980104\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980105\").getVisibleVersion());\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_parttbl_mv1\");\n}\n@Test\npublic void testAutoPartitionRefreshWithPartitionedHiveTable2() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `hive_tbl_mv2`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate` FROM `hive0`.`partitioned_db`.`lineitem_par` as a;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_tbl_mv2\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"lineitem_par\",\nImmutableList.of(\"l_shipdate=1998-01-02\", \"l_shipdate=1998-01-03\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\");\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(1, partitions.size());\nAssert.assertEquals(3, materializedView.getPartition(\"hive_tbl_mv2\").getVisibleVersion());\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_tbl_mv2\");\n}\n@Test\npublic void testAutoPartitionRefreshWithPartitionedHiveTableJoinInternalTable() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\n\"CREATE MATERIALIZED VIEW `hive_join_internal_mv`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate` FROM `hive0`.`partitioned_db`.`lineitem_par` as a\" +\n\" join test.tbl1 b on a.l_suppkey=b.k2;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_join_internal_mv\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"lineitem_par\",\nImmutableList.of(\"l_shipdate=1998-01-02\", \"l_shipdate=1998-01-03\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\");\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(1, partitions.size());\nAssert.assertEquals(3, materializedView.getPartition(\"hive_join_internal_mv\").getVisibleVersion());\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_join_internal_mv\");\n}\n@Test\npublic void testPartitionRefreshWithUpperCaseTable() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `hive_parttbl_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`l_shipdate`)\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate` FROM `hive0`.`partitioned_db`.`LINEITEM_PAR` as \" +\n\"`LINEITEM_PAR_ALIAS`;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_parttbl_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"lineitem_par\",\nImmutableList.of(\"l_shipdate=1998-01-02\", \"l_shipdate=1998-01-03\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"PARTITION PREDICATES: 16: l_shipdate >= '1998-01-02', 16: l_shipdate < '1998-01-04'\",\n\"partitions=2/6\");\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(6, partitions.size());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980101\").getVisibleVersion());\nAssert.assertEquals(3, materializedView.getPartition(\"p19980102\").getVisibleVersion());\nAssert.assertEquals(3, materializedView.getPartition(\"p19980103\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980104\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980105\").getVisibleVersion());\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_parttbl_mv1\");\n}\npublic void testPartitionRefreshWithUpperCaseDb() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"CREATE MATERIALIZED VIEW `hive_parttbl_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`l_shipdate`)\\n\" +\n\"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT `l_orderkey`, `l_suppkey`, `l_shipdate` FROM `hive0`.`partitioned_DB`.`LINEITEM_PAR` as \" +\n\"`LINEITEM_PAR_ALIAS`;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_parttbl_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=6/6\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db\", \"lineitem_par\",\nImmutableList.of(\"l_shipdate=1998-01-02\", \"l_shipdate=1998-01-03\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"PARTITION PREDICATES: 16: l_shipdate >= '1998-01-02', 16: l_shipdate < '1998-01-04'\",\n\"partitions=2/6\");\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(6, partitions.size());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980101\").getVisibleVersion());\nAssert.assertEquals(3, materializedView.getPartition(\"p19980102\").getVisibleVersion());\nAssert.assertEquals(3, materializedView.getPartition(\"p19980103\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980104\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980105\").getVisibleVersion());\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_parttbl_mv1\");\n}\n@Test\npublic void testPartitionRefreshWithLowerCase() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\n\"CREATE MATERIALIZED VIEW `test`.`hive_parttbl_mv1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`par_col`)\\n\" +\n\"DISTRIBUTED BY HASH(`c1`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT c1, c2, par_col FROM `hive0`.`partitioned_db2`.`t2`;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_parttbl_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"partitions=3/3\");\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.updatePartitions(\"partitioned_db2\", \"t2\",\nImmutableList.of(\"par_col=0\"));\ntaskRun.executeTaskRun();\nprocessor = (PartitionBasedMvRefreshProcessor) taskRun.getProcessor();\nmvContext = processor.getMvContext();\nexecPlan = mvContext.getExecPlan();\nassertPlanContains(execPlan, \"par_col >= 0, 4: par_col < 1\", \"partitions=1/3\");\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(3, partitions.size());\nAssert.assertEquals(3, materializedView.getPartition(\"p0\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p1\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p2\").getVisibleVersion());\nstarRocksAssert.useDatabase(\"test\").dropMaterializedView(\"hive_parttbl_mv1\");\n}\n@Test\npublic void testRangePartitionRefreshWithHiveTable() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_parttbl_mv\"));\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(PARTITION_START, \"1998-01-01\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"1998-01-03\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(6, partitions.size());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980101\").getVisibleVersion());\nAssert.assertEquals(2, materializedView.getPartition(\"p19980102\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p19980103\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p19980104\").getVisibleVersion());\nAssert.assertEquals(1, materializedView.getPartition(\"p19980105\").getVisibleVersion());\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"PARTITION PREDICATES: 16: l_shipdate >= '1998-01-01', \" +\n\"16: l_shipdate < '1998-01-03'\"));\nAssert.assertTrue(plan.contains(\"partitions=2/6\"));\n}\n@Test\npublic void testRefreshPartitionWithMulParColumnsHiveTable1() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_mul_parttbl_mv1\"));\nMap mvProperties = Maps.newHashMap();\nmvProperties.put(PARTITION_START, \"1998-01-01\");\nmvProperties.put(PARTITION_END, \"1998-01-03\");\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(mvProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"PARTITION PREDICATES: 15: l_shipdate >= '1998-01-01', 15: l_shipdate < '1998-01-03'\"));\nAssert.assertTrue(plan.contains(\"partitions=5/8\"));\n}\n@Test\npublic void testRefreshPartitionWithMulParColumnsHiveTable2() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_mul_parttbl_mv2\"));\nMap mvProperties = Maps.newHashMap();\nmvProperties.put(PARTITION_START, \"2020-01-01\");\nmvProperties.put(PARTITION_END, \"2020-01-03\");\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(mvProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"PARTITION PREDICATES: 5: par_date >= '2020-01-01', 5: par_date < '2020-01-03'\"));\nAssert.assertTrue(plan.contains(\"partitions=3/7\"));\n}\n@Test\npublic void testRangePartitionChangeWithJDBCTable() throws Exception {\nMockedMetadataMgr metadataMgr = (MockedMetadataMgr) connectContext.getGlobalStateMgr().getMetadataMgr();\nMockedJDBCMetadata mockedJDBCMetadata =\n(MockedJDBCMetadata) metadataMgr.getOptionalMetadata(MockedJDBCMetadata.MOCKED_JDBC_CATALOG_NAME).get();\nmockedJDBCMetadata.initPartitions();\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"jdbc_parttbl_mv0\"));\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(PARTITION_START, \"20230801\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"20230805\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(3, partitions.size());\nAssert.assertNotNull(materializedView.getPartition(\"P20230801\"));\nAssert.assertNotNull(materializedView.getPartition(\"P20230802\"));\nAssert.assertNotNull(materializedView.getPartition(\"P20230803\"));\nmockedJDBCMetadata.addPartitions();\ntaskRun.executeTaskRun();\nCollection incrementalPartitions = materializedView.getPartitions();\nAssert.assertEquals(4, incrementalPartitions.size());\nAssert.assertNotNull(materializedView.getPartition(\"P20230802\"));\nAssert.assertNotNull(materializedView.getPartition(\"P20230803\"));\nAssert.assertNotNull(materializedView.getPartition(\"P20230804\"));\nAssert.assertNotNull(materializedView.getPartition(\"P20230805\"));\n}\n@Test\npublic void testRangePartitionRefreshWithJDBCTable() throws Exception {\nMockedMetadataMgr metadataMgr = (MockedMetadataMgr) connectContext.getGlobalStateMgr().getMetadataMgr();\nMockedJDBCMetadata mockedJDBCMetadata =\n(MockedJDBCMetadata) metadataMgr.getOptionalMetadata(MockedJDBCMetadata.MOCKED_JDBC_CATALOG_NAME).get();\nmockedJDBCMetadata.initPartitions();\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"jdbc_parttbl_mv0\"));\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(PARTITION_START, \"20230801\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"20230805\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nlong refreshBeforeVersionTime = materializedView.getPartition(\"P20230803\").getVisibleVersionTime();\nmockedJDBCMetadata.refreshPartitions();\ntaskRun.executeTaskRun();\nlong refreshAfterVersionTime = materializedView.getPartition(\"P20230803\").getVisibleVersionTime();\nAssert.assertNotEquals(refreshBeforeVersionTime, refreshAfterVersionTime);\n}\n@Test\npublic void testRangePartitionWithJDBCTableUseStr2Date() throws Exception {\nMockedMetadataMgr metadataMgr = (MockedMetadataMgr) connectContext.getGlobalStateMgr().getMetadataMgr();\nMockedJDBCMetadata mockedJDBCMetadata =\n(MockedJDBCMetadata) metadataMgr.getOptionalMetadata(MockedJDBCMetadata.MOCKED_JDBC_CATALOG_NAME).get();\nmockedJDBCMetadata.initPartitions();\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"jdbc_parttbl_mv1\"));\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(PARTITION_START, \"20230801\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"20230805\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(3, partitions.size());\nAssert.assertNotNull(materializedView.getPartition(\"p20230801\"));\nAssert.assertNotNull(materializedView.getPartition(\"p20230802\"));\nAssert.assertNotNull(materializedView.getPartition(\"p20230803\"));\n}\n@Test\npublic void testRangePartitionWithJDBCTableUseStr2DateForError() {\ntry {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"jdbc_parttbl_mv2\"));\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(PARTITION_START, \"20230801\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"20230805\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\n} catch (Exception e) {\nAssert.assertTrue(e.getMessage().contains(\"Text '1234567' could not be parsed\"));\n}\n}\n@Test\npublic void testRangePartitionWithJDBCTableUseStr2Date2() throws Exception {\nMockedMetadataMgr metadataMgr = (MockedMetadataMgr) connectContext.getGlobalStateMgr().getMetadataMgr();\nMockedJDBCMetadata mockedJDBCMetadata =\n(MockedJDBCMetadata) metadataMgr.getOptionalMetadata(MockedJDBCMetadata.MOCKED_JDBC_CATALOG_NAME).get();\nmockedJDBCMetadata.initPartitions();\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"jdbc_parttbl_mv3\"));\nHashMap taskRunProperties = new HashMap<>();\ntaskRunProperties.put(PARTITION_START, \"20230801\");\ntaskRunProperties.put(TaskRun.PARTITION_END, \"20230805\");\ntaskRunProperties.put(TaskRun.FORCE, Boolean.toString(false));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).properties(taskRunProperties).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nCollection partitions = materializedView.getPartitions();\nAssert.assertEquals(3, partitions.size());\nAssert.assertNotNull(materializedView.getPartition(\"P20230801\"));\nAssert.assertNotNull(materializedView.getPartition(\"P20230802\"));\nAssert.assertNotNull(materializedView.getPartition(\"P20230803\"));\n}\n@Test\n@Test\npublic void testMvWithoutPartitionRefreshTwice() throws Exception {\nfinal AtomicInteger taskRunCounter = new AtomicInteger();\nnew MockUp() {\n@Mock\npublic void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception {\ntaskRunCounter.incrementAndGet();\n}\n};\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_without_partition\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\nString insertSql = \"insert into tbl3 values('2021-12-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntry {\nfor (int i = 0; i < 2; i++) {\ntaskRun.executeTaskRun();\n}\nAssert.assertEquals(1, taskRunCounter.get());\n} catch (Exception e) {\ne.printStackTrace();\nAssert.fail(\"refresh failed\");\n}\n}\n@Test\npublic void testClearQueryInfo() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_without_partition\"));\nnew MockUp() {\n@Mock\npublic void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception {\nUUID uuid = UUID.randomUUID();\nTUniqueId loadId = new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());\nSystem.out.println(\"register query id: \" + DebugUtil.printId(connectContext.getExecutionId()));\nLoadPlanner loadPlanner = new LoadPlanner(1, loadId, 1, 1, materializedView,\nfalse, \"UTC\", 10, System.currentTimeMillis(),\nfalse, connectContext, null, 10,\n10, null, null, null, 1);\nDefaultCoordinator coordinator = new DefaultCoordinator.Factory().createBrokerLoadScheduler(loadPlanner);\n}\n};\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\nString insertSql = \"insert into tbl3 values('2021-12-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nSystem.out.println(\"unregister query id: \" + DebugUtil.printId(connectContext.getExecutionId()));\nAssert.assertNull(QeProcessorImpl.INSTANCE.getCoordinator(connectContext.getExecutionId()));\n}\nprivate void testBaseTablePartitionInsertData(Database testDb, MaterializedView materializedView, TaskRun taskRun)\nthrows Exception {\nString insertSql = \"insert into tbl1 partition(p0) values('2021-12-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ninsertSql = \"insert into tbl1 partition(p1) values('2022-01-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\nOlapTable tbl1 = ((OlapTable) testDb.getTable(\"tbl1\"));\ntaskRun.executeTaskRun();\nMap> baseTableVisibleVersionMap =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext().getBaseTableVisibleVersionMap();\nMaterializedView.BasePartitionInfo basePartitionInfo = baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p0\");\nAssert.assertEquals(2, basePartitionInfo.getVersion());\ninsertSql = \"insert into tbl1 partition(p0) values('2021-12-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nMap> baseTableVisibleVersionMap2 =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext().getBaseTableVisibleVersionMap();\nMaterializedView.BasePartitionInfo newP0PartitionInfo = baseTableVisibleVersionMap2.get(tbl1.getId()).get(\"p0\");\nAssert.assertEquals(3, newP0PartitionInfo.getVersion());\nMaterializedView.BasePartitionInfo p1PartitionInfo = baseTableVisibleVersionMap2.get(tbl1.getId()).get(\"p1\");\nAssert.assertEquals(2, p1PartitionInfo.getVersion());\n}\nprivate void testRefreshWithFailure(Database testDb, MaterializedView materializedView, TaskRun taskRun)\nthrows Exception {\nOlapTable tbl1 = ((OlapTable) testDb.getTable(\"tbl1\"));\nString insertSql = \"insert into tbl1 partition(p0) values('2021-12-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\nnew MockUp() {\n@Mock\npublic void processTaskRun(TaskRunContext context) throws Exception {\nthrow new RuntimeException(\"new exception\");\n}\n};\ntry {\ntaskRun.executeTaskRun();\n} catch (Exception e) {\ne.printStackTrace();\n}\nMap> baseTableVisibleVersionMap2 =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext().getBaseTableVisibleVersionMap();\nMaterializedView.BasePartitionInfo newP0PartitionInfo = baseTableVisibleVersionMap2.get(tbl1.getId()).get(\"p0\");\nAssert.assertEquals(3, newP0PartitionInfo.getVersion());\n}\npublic void testBaseTablePartitionRename(TaskRun taskRun)\nthrows Exception {\nnew MockUp() {\n@Mock\nprivate Map> collectBaseTables(MaterializedView materializedView) {\nMap> olapTables = Maps.newHashMap();\nList baseTableInfos = materializedView.getBaseTableInfos();\nfor (BaseTableInfo baseTableInfo : baseTableInfos) {\nif (!baseTableInfo.getTable().isOlapTable()) {\ncontinue;\n}\nDatabase baseDb = GlobalStateMgr.getCurrentState().getDb(baseTableInfo.getDbId());\nif (baseDb == null) {\nthrow new SemanticException(\"Materialized view base db: \" +\nbaseTableInfo.getDbId() + \" not exist.\");\n}\nOlapTable olapTable = (OlapTable) baseDb.getTable(baseTableInfo.getTableId());\nif (olapTable == null) {\nthrow new SemanticException(\"Materialized view base table: \" +\nbaseTableInfo.getTableId() + \" not exist.\");\n}\nOlapTable copied = new OlapTable();\nif (!DeepCopy.copy(olapTable, copied, OlapTable.class)) {\nthrow new SemanticException(\"Failed to copy olap table: \" + olapTable.getName());\n}\nolapTables.put(olapTable.getId(), Pair.create(baseTableInfo, copied));\n}\nString renamePartitionSql = \"ALTER TABLE test.tbl1 RENAME PARTITION p1 p1_1\";\ntry {\nnew StmtExecutor(connectContext, renamePartitionSql).execute();\n} catch (Exception e) {\ne.printStackTrace();\n}\nreturn olapTables;\n}\n};\nString insertSql = \"insert into tbl1 partition(p1) values('2022-01-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntry {\ntaskRun.executeTaskRun();\n} catch (Exception e) {\nAssert.assertTrue(e.getMessage().contains(\"is not active, skip sync partition and data with base tables\"));\n}\n}\nprivate void testBaseTablePartitionReplace(Database testDb, MaterializedView materializedView, TaskRun taskRun)\nthrows Exception {\nOlapTable tbl1 = ((OlapTable) testDb.getTable(\"tbl1\"));\nnew MockUp() {\n@Mock\npublic Map> collectBaseTables(MaterializedView materializedView) {\nMap> olapTables = Maps.newHashMap();\nList baseTableInfos = materializedView.getBaseTableInfos();\nfor (BaseTableInfo baseTableInfo : baseTableInfos) {\nif (!baseTableInfo.getTable().isOlapTable()) {\ncontinue;\n}\nDatabase baseDb = GlobalStateMgr.getCurrentState().getDb(baseTableInfo.getDbId());\nif (baseDb == null) {\nthrow new SemanticException(\"Materialized view base db: \" +\nbaseTableInfo.getDbId() + \" not exist.\");\n}\nOlapTable olapTable = (OlapTable) baseDb.getTable(baseTableInfo.getTableId());\nif (olapTable == null) {\nthrow new SemanticException(\"Materialized view base table: \" +\nbaseTableInfo.getTableId() + \" not exist.\");\n}\nOlapTable copied = new OlapTable();\nif (!DeepCopy.copy(olapTable, copied, OlapTable.class)) {\nthrow new SemanticException(\"Failed to copy olap table: \" + olapTable.getName());\n}\nolapTables.put(olapTable.getId(), Pair.create(baseTableInfo, olapTable));\n}\ntry {\nString replacePartitionSql = \"ALTER TABLE test.tbl1 REPLACE PARTITION (p3) WITH TEMPORARY PARTITION (tp3)\\n\" +\n\"PROPERTIES (\\n\" +\n\" \\\"strict_range\\\" = \\\"false\\\",\\n\" +\n\" \\\"use_temp_partition_name\\\" = \\\"false\\\"\\n\" +\n\");\";\nnew StmtExecutor(connectContext, replacePartitionSql).execute();\nString insertSql = \"insert into tbl1 partition(p3) values('2021-03-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\n} catch (Exception e) {\ne.printStackTrace();\n}\nreturn olapTables;\n}\n};\nPartition partition = tbl1.getPartition(\"p3\");\nString createTempPartitionSql =\n\"ALTER TABLE test.tbl1 ADD TEMPORARY PARTITION tp3 values [('2022-03-01'),('2022-04-01'))\";\nnew StmtExecutor(connectContext, createTempPartitionSql).execute();\nString insertSql = \"insert into tbl1 partition(p3) values('2021-03-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nMap> baseTableVisibleVersionMap =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext().getBaseTableVisibleVersionMap();\nMaterializedView.BasePartitionInfo basePartitionInfo = baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p3\");\nAssert.assertNotEquals(partition.getId(), basePartitionInfo.getId());\n}\npublic void testBaseTableAddPartitionWhileSync(Database testDb, MaterializedView materializedView, TaskRun taskRun)\nthrows Exception {\nOlapTable tbl1 = ((OlapTable) testDb.getTable(\"tbl1\"));\nnew MockUp() {\n@Mock\npublic Map> collectBaseTables(MaterializedView materializedView) {\nMap> olapTables = Maps.newHashMap();\nList baseTableInfos = materializedView.getBaseTableInfos();\nfor (BaseTableInfo baseTableInfo : baseTableInfos) {\nif (!baseTableInfo.getTable().isOlapTable()) {\ncontinue;\n}\nDatabase baseDb = GlobalStateMgr.getCurrentState().getDb(baseTableInfo.getDbId());\nif (baseDb == null) {\nthrow new SemanticException(\"Materialized view base db: \" +\nbaseTableInfo.getDbId() + \" not exist.\");\n}\nOlapTable olapTable = (OlapTable) baseDb.getTable(baseTableInfo.getTableId());\nif (olapTable == null) {\nthrow new SemanticException(\"Materialized view base table: \" +\nbaseTableInfo.getTableId() + \" not exist.\");\n}\nOlapTable copied = new OlapTable();\nif (!DeepCopy.copy(olapTable, copied, OlapTable.class)) {\nthrow new SemanticException(\"Failed to copy olap table: \" + olapTable.getName());\n}\nolapTables.put(olapTable.getId(), Pair.create(baseTableInfo, copied));\n}\nString addPartitionSql = \"ALTER TABLE test.tbl1 ADD PARTITION p99 VALUES [('9999-03-01'),('9999-04-01'))\";\ntry {\nnew StmtExecutor(connectContext, addPartitionSql).execute();\n} catch (Exception e) {\ne.printStackTrace();\n}\nString insertSql = \"insert into tbl1 partition(p99) values('9999-03-01', 2, 10);\";\ntry {\nnew StmtExecutor(connectContext, insertSql).execute();\n} catch (Exception e) {\ne.printStackTrace();\n}\nreturn olapTables;\n}\n};\nString insertSql = \"insert into tbl1 partition(p3) values('2022-03-01', 2, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nMap> baseTableVisibleVersionMap =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext().getBaseTableVisibleVersionMap();\nAssert.assertEquals(3, baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p3\").getVersion());\nAssert.assertNotNull(baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p99\"));\nAssert.assertEquals(2, baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p99\").getVersion());\n}\npublic void testBaseTableAddPartitionWhileRefresh(Database testDb, MaterializedView materializedView, TaskRun taskRun)\nthrows Exception {\nOlapTable tbl1 = ((OlapTable) testDb.getTable(\"tbl1\"));\nnew MockUp() {\n@Mock\npublic void refreshMaterializedView(MvTaskRunContext mvContext, ExecPlan execPlan,\nInsertStmt insertStmt) throws Exception {\nString addPartitionSql = \"ALTER TABLE test.tbl1 ADD PARTITION p100 VALUES [('9999-04-01'),('9999-05-01'))\";\nString insertSql = \"insert into tbl1 partition(p100) values('9999-04-01', 3, 10);\";\ntry {\nnew StmtExecutor(connectContext, addPartitionSql).execute();\nnew StmtExecutor(connectContext, insertSql).execute();\n} catch (Exception e) {\ne.printStackTrace();\n}\nConnectContext ctx = mvContext.getCtx();\nStmtExecutor executor = new StmtExecutor(ctx, insertStmt);\nctx.setExecutor(executor);\nctx.setThreadLocalInfo();\nctx.setStmtId(new AtomicInteger().incrementAndGet());\nctx.setExecutionId(UUIDUtil.toTUniqueId(ctx.getQueryId()));\nexecutor.handleDMLStmt(execPlan, insertStmt);\n}\n};\nString insertSql = \"insert into tbl1 partition(p3) values('2022-03-01', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nMap> baseTableVisibleVersionMap =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext().getBaseTableVisibleVersionMap();\nAssert.assertEquals(4, baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p3\").getVersion());\nAssert.assertNull(baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p100\"));\n}\npublic void testBaseTableDropPartitionWhileSync(Database testDb, MaterializedView materializedView, TaskRun taskRun)\nthrows Exception {\nOlapTable tbl1 = ((OlapTable) testDb.getTable(\"tbl1\"));\nnew MockUp() {\n@Mock\nprivate Map> collectBaseTables(MaterializedView materializedView) {\nMap> olapTables = Maps.newHashMap();\nList baseTableInfos = materializedView.getBaseTableInfos();\nfor (BaseTableInfo baseTableInfo : baseTableInfos) {\nif (!baseTableInfo.getTable().isOlapTable()) {\ncontinue;\n}\nDatabase baseDb = GlobalStateMgr.getCurrentState().getDb(baseTableInfo.getDbId());\nif (baseDb == null) {\nthrow new SemanticException(\"Materialized view base db: \" +\nbaseTableInfo.getDbId() + \" not exist.\");\n}\nOlapTable olapTable = (OlapTable) baseDb.getTable(baseTableInfo.getTableId());\nif (olapTable == null) {\nthrow new SemanticException(\"Materialized view base table: \" +\nbaseTableInfo.getTableId() + \" not exist.\");\n}\nOlapTable copied = new OlapTable();\nif (!DeepCopy.copy(olapTable, copied, OlapTable.class)) {\nthrow new SemanticException(\"Failed to copy olap table: \" + olapTable.getName());\n}\nolapTables.put(olapTable.getId(), Pair.create(baseTableInfo, copied));\n}\nString dropPartitionSql = \"ALTER TABLE test.tbl1 DROP PARTITION p4\";\ntry {\nnew StmtExecutor(connectContext, dropPartitionSql).execute();\n} catch (Exception e) {\ne.printStackTrace();\n}\nreturn olapTables;\n}\n};\nString insertSql = \"insert into tbl1 partition(p3) values('2022-03-01', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nMap> baseTableVisibleVersionMap =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext().getBaseTableVisibleVersionMap();\nAssert.assertNull(baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p4\"));\n}\npublic void testBaseTableDropPartitionWhileRefresh(Database testDb, MaterializedView materializedView, TaskRun taskRun)\nthrows Exception {\nOlapTable tbl1 = ((OlapTable) testDb.getTable(\"tbl1\"));\nnew MockUp() {\n@Mock\npublic void refreshMaterializedView(MvTaskRunContext mvContext, ExecPlan execPlan,\nInsertStmt insertStmt) throws Exception {\nString dropPartitionSql = \"ALTER TABLE test.tbl1 DROP PARTITION p100\";\ntry {\nnew StmtExecutor(connectContext, dropPartitionSql).execute();\n} catch (Exception e) {\ne.printStackTrace();\n}\nConnectContext ctx = mvContext.getCtx();\nStmtExecutor executor = new StmtExecutor(ctx, insertStmt);\nctx.setExecutor(executor);\nctx.setThreadLocalInfo();\nctx.setStmtId(new AtomicInteger().incrementAndGet());\nctx.setExecutionId(UUIDUtil.toTUniqueId(ctx.getQueryId()));\nexecutor.handleDMLStmt(execPlan, insertStmt);\n}\n};\nString insertSql = \"insert into tbl1 partition(p3) values('2022-03-01', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nMap> baseTableVisibleVersionMap =\nmaterializedView.getRefreshScheme().getAsyncRefreshContext().getBaseTableVisibleVersionMap();\nAssert.assertNotNull(baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p100\"));\nAssert.assertEquals(3, baseTableVisibleVersionMap.get(tbl1.getId()).get(\"p100\").getVersion());\n}\nprivate static void setPartitionVersion(Partition partition, long version) {\npartition.setVisibleVersion(version, System.currentTimeMillis());\nMaterializedIndex baseIndex = partition.getBaseIndex();\nList tablets = baseIndex.getTablets();\nfor (Tablet tablet : tablets) {\nList replicas = ((LocalTablet) tablet).getImmutableReplicas();\nfor (Replica replica : replicas) {\nreplica.updateVersionInfo(version, -1, version);\n}\n}\n}\n@Test\npublic void testFilterPartitionByRefreshNumber() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_with_test_refresh\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nmaterializedView.getTableProperty().setPartitionRefreshNumber(3);\nPartitionBasedMvRefreshProcessor processor = new PartitionBasedMvRefreshProcessor();\nMvTaskRunContext mvContext = new MvTaskRunContext(new TaskRunContext());\nprocessor.setMvContext(mvContext);\nprocessor.filterPartitionByRefreshNumber(materializedView.getPartitionNames(), materializedView);\nmvContext = processor.getMvContext();\nAssert.assertEquals(\"2022-03-01\", mvContext.getNextPartitionStart());\nAssert.assertEquals(\"2022-05-01\", mvContext.getNextPartitionEnd());\ntaskRun.executeTaskRun();\nprocessor.filterPartitionByRefreshNumber(Sets.newHashSet(), materializedView);\nmvContext = processor.getMvContext();\nAssert.assertNull(mvContext.getNextPartitionStart());\nAssert.assertNull(mvContext.getNextPartitionEnd());\n}\n@Test\npublic void testRefreshMaterializedViewDefaultConfig1() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"create materialized view test.mv_config1\\n\" +\n\"partition by date_trunc('month',k1) \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties(\" +\n\"'replication_num' = '1',\\n\" +\n\"'storage_medium' = 'SSD'\" +\n\")\\n\" +\n\"as select k1, k2 from tbl1;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_config1\"));\nString insertSql = \"insert into tbl1 partition(p3) values('2022-03-01', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\n{\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nAssert.assertTrue(execPlan.getConnectContext().getSessionVariable().getEnableSpill());\n}\n{\nConfig.enable_materialized_view_spill = false;\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nAssert.assertFalse(execPlan.getConnectContext().getSessionVariable().getEnableSpill());\nConfig.enable_materialized_view_spill = true;\n}\n}\n@Test\npublic void testRefreshMaterializedViewDefaultConfig2() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"create materialized view test.mv_config2\\n\" +\n\"partition by date_trunc('month',k1) \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties(\" +\n\"'replication_num' = '1',\\n\" +\n\"'session.enable_spill' = 'false'\" +\n\")\\n\" +\n\"as select k1, k2 from tbl1;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_config2\"));\nString insertSql = \"insert into tbl1 partition(p3) values('2022-03-01', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nAssert.assertFalse(execPlan.getConnectContext().getSessionVariable().getEnableSpill());\n}\n@Test\npublic void testSyncPartitionWithSsdStorage() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\"create materialized view test.mv_with_ssd\\n\" +\n\"partition by date_trunc('month',k1) \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties('replication_num' = '1',\\n\" +\n\"'storage_medium' = 'SSD')\\n\" +\n\"as select tbl1.k1, tbl2.k2 from tbl1 join tbl2 on tbl1.k2 = tbl2.k2;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_with_ssd\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntaskRun.executeTaskRun();\ntry {\ntaskRun.executeTaskRun();\n} catch (Exception e) {\ne.printStackTrace();\nAssert.fail(\"refresh failed\");\n}\n}\n@Test\npublic void testSyncPartitionWithSsdStorageAndCooldownTime() throws Exception {\nstarRocksAssert.useDatabase(\"test\").withMaterializedView(\n\"create materialized view test.mv_use_ssd_and_cooldown\\n\" +\n\"partition by date_trunc('month',k1) \\n\" +\n\"distributed by hash(k2) buckets 10\\n\" +\n\"refresh deferred manual\\n\" +\n\"properties('replication_num' = '1',\\n\" +\n\"'storage_medium' = 'SSD',\\n\" +\n\"'storage_cooldown_time' = '2222-04-21 20:45:11')\\n\" +\n\"as select tbl1.k1, tbl2.k2 from tbl1 join tbl2 on tbl1.k2 = tbl2.k2;\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"mv_use_ssd_and_cooldown\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\ntry {\ntaskRun.executeTaskRun();\n} catch (Exception e) {\ne.printStackTrace();\nAssert.fail(\"refresh failed\");\n}\n}\n@Test\npublic void testCreateMaterializedViewOnListPartitionTables1() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nString createSQL = \"CREATE TABLE test.list_partition_tbl1 (\\n\" +\n\" id BIGINT,\\n\" +\n\" age SMALLINT,\\n\" +\n\" dt VARCHAR(10),\\n\" +\n\" province VARCHAR(64) not null\\n\" +\n\")\\n\" +\n\"ENGINE=olap\\n\" +\n\"DUPLICATE KEY(id)\\n\" +\n\"PARTITION BY LIST (province) (\\n\" +\n\" PARTITION p1 VALUES IN (\\\"beijing\\\",\\\"chongqing\\\") ,\\n\" +\n\" PARTITION p2 VALUES IN (\\\"guangdong\\\") \\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(id) BUCKETS 10\\n\" +\n\"PROPERTIES (\\n\" +\n\" \\\"replication_num\\\" = \\\"1\\\"\\n\" +\n\")\";\nstarRocksAssert.withTable(createSQL);\nString sql = \"create materialized view list_partition_mv1 \" +\n\"distributed by hash(dt, province) buckets 10 \" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\"\" +\n\") \" +\n\"as select dt, province, avg(age) from list_partition_tbl1 group by dt, province;\";\nstarRocksAssert.withMaterializedView(sql);\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"list_partition_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\nconnectContext.getSessionVariable().setOptimizerExecuteTimeout(300000);\n{\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nAssert.assertTrue(execPlan == null);\n}\n{\nString insertSql = \"INSERT INTO list_partition_tbl1 VALUES (1, 1, '2023-08-15', 'beijing');\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"partitions=2/2\\n\" +\n\" rollup: list_partition_tbl1\"));\n}\n{\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nAssert.assertTrue(execPlan == null);\n}\nstarRocksAssert.dropMaterializedView(\"list_partition_mv1\");\n}\n@Test\npublic void testPartitionPruneNonRefBaseTable1() throws Exception {\nstarRocksAssert.useDatabase(\"test\")\n.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`partition_prune_non_ref_tables1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`k1`)\\n\" +\n\"DISTRIBUTED BY HASH(`k1`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT t1.k1, sum(t1.k2) as sum1, avg(t2.k2) as avg1 FROM tbl4 as t1 join \" +\n\"tbl5 t2 on t1.k1=t2.dt group by t1.k1\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"partition_prune_non_ref_tables1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\n{\ntaskRun.executeTaskRun();\n}\n{\nString insertSql = \"insert into tbl4 partition(p4) values('2022-04-01', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertFalse(plan.contains(\"partitions=5/5\"));\nAssert.assertTrue(plan.contains(\"partitions=1/5\\n\" +\n\" rollup: tbl5\"));\nAssert.assertTrue(plan.contains(\"partitions=1/5\\n\" +\n\" rollup: tbl4\"));\n}\n{\nString insertSql = \"insert into tbl5 partition(p4) values('2022-04-01', '2021-04-01 00:02:11', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"partitions=5/5\\n\" +\n\" rollup: tbl5\"));\nAssert.assertTrue(plan.contains(\"partitions=5/5\\n\" +\n\" rollup: tbl4\"));\n}\nstarRocksAssert.dropMaterializedView(\"partition_prune_non_ref_tables1\");\n}\n@Test\npublic void testPartitionPruneNonRefBaseTable2() throws Exception {\nstarRocksAssert.useDatabase(\"test\")\n.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`partition_prune_non_ref_tables2`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`k11`)\\n\" +\n\"DISTRIBUTED BY HASH(`k11`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT t1.k1 as k11, sum(t1.k2) as sum1, avg(t2.k2) as avg1 FROM tbl4 as t1 join \" +\n\"tbl5 t2 on t1.k1=t2.dt group by t1.k1\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"partition_prune_non_ref_tables2\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\n{\ntaskRun.executeTaskRun();\n}\n{\nString insertSql = \"insert into tbl4 partition(p4) values('2022-04-01', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertFalse(plan.contains(\"partitions=5/5\"));\nAssert.assertTrue(plan.contains(\"partitions=1/5\\n\" +\n\" rollup: tbl5\"));\nAssert.assertTrue(plan.contains(\"partitions=1/5\\n\" +\n\" rollup: tbl4\"));\n}\n{\nString insertSql = \"insert into tbl5 partition(p4) values('2022-04-01', '2021-04-01 00:02:11', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"partitions=5/5\\n\" +\n\" rollup: tbl5\"));\nAssert.assertTrue(plan.contains(\"partitions=5/5\\n\" +\n\" rollup: tbl4\"));\n}\nstarRocksAssert.dropMaterializedView(\"partition_prune_non_ref_tables2\");\n}\n@Test\npublic void testPartitionPruneNonRefBaseTable3() throws Exception {\nstarRocksAssert.useDatabase(\"test\")\n.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`partition_prune_non_ref_tables1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`k1`)\\n\" +\n\"DISTRIBUTED BY HASH(`k1`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT t1.k1, sum(t1.k2) as sum1, avg(t2.k2) as avg1 FROM tbl4 as t1 join \" +\n\"tbl5 t2 on t1.k1=t2.dt where t1.k1>'2022-01-01' and t1.k2>0 group by t1.k1\");\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"partition_prune_non_ref_tables1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\n{\ntaskRun.executeTaskRun();\n}\n{\nString insertSql = \"insert into tbl4 partition(p4) values('2022-04-01', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nSystem.out.println(plan);\nAssert.assertFalse(plan.contains(\"partitions=5/5\"));\nAssert.assertTrue(plan.contains(\"PREDICATES: 2: k2 > 0\\n\" +\n\" partitions=1/5\\n\" +\n\" rollup: tbl4\"));\nAssert.assertTrue(plan.contains(\"partitions=1/5\\n\" +\n\" rollup: tbl5\"));\n}\n{\nString insertSql = \"insert into tbl5 partition(p4) values('2022-04-01', '2021-04-01 00:02:11', 3, 10);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nSystem.out.println(plan);\nAssert.assertTrue(plan.contains(\"k1 > '2022-01-01', 2: k2 > 0\\n\" +\n\" partitions=4/5\\n\" +\n\" rollup: tbl4\"));\nAssert.assertTrue(plan.contains(\"4: dt > '2022-01-01'\\n\" +\n\" partitions=4/5\\n\" +\n\" rollup: tbl5\"));\n}\nstarRocksAssert.dropMaterializedView(\"partition_prune_non_ref_tables1\");\n}\n@Test\npublic void testHivePartitionPruneNonRefBaseTable1() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nstarRocksAssert.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`hive_partition_prune_non_ref_tables2`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`par_date`)\\n\" +\n\"DISTRIBUTED BY HASH(`c1`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT part_tbl1.c1, part_tbl2.c2, part_tbl1.par_date FROM `hive0`.`partitioned_db`.`part_tbl1` join \" +\n\"`hive0`.`partitioned_db`.`part_tbl2` using (par_date)\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_partition_prune_non_ref_tables2\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\n{\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"TABLE: part_tbl1\\n\" +\n\" PARTITION PREDICATES: 4: par_date >= '2020-01-01', 4: par_date < '2020-01-05'\\n\" +\n\" partitions=4/4\"));\nAssert.assertTrue(plan.contains(\"TABLE: part_tbl2\\n\" +\n\" PARTITION PREDICATES: 8: par_date >= '2020-01-01', 8: par_date < '2020-01-05'\\n\" +\n\" partitions=4/4\"));\n}\n{\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.addPartition(\"partitioned_db\", \"part_tbl1\", \"par_date=2020-01-05\");\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"TABLE: part_tbl1\\n\" +\n\" PARTITION PREDICATES: 4: par_date >= '2020-01-05', 4: par_date < '2020-01-06'\\n\" +\n\" partitions=1/5\"));\nAssert.assertTrue(plan.contains(\"TABLE: part_tbl2\\n\" +\n\" PARTITION PREDICATES: 8: par_date >= '2020-01-05', 8: par_date < '2020-01-06'\\n\" +\n\" partitions=0/4\"));\n}\n{\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.addPartition(\"partitioned_db\", \"part_tbl2\", \"par_date=2020-01-05\");\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"TABLE: part_tbl1\\n\" +\n\" PARTITION PREDICATES: 4: par_date >= '2020-01-01', 4: par_date < '2020-01-06'\\n\" +\n\" partitions=5/5\"));\nAssert.assertTrue(plan.contains(\"TABLE: part_tbl2\\n\" +\n\" PARTITION PREDICATES: 8: par_date >= '2020-01-01', 8: par_date < '2020-01-06'\\n\" +\n\" partitions=5/5\"));\n}\n{\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.dropPartition(\"partitioned_db\", \"part_tbl1\", \"par_date=2020-01-05\");\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nAssert.assertTrue(execPlan == null);\n}\n{\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.dropPartition(\"partitioned_db\", \"part_tbl2\", \"par_date=2020-01-05\");\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"TABLE: part_tbl1\\n\" +\n\" PARTITION PREDICATES: 4: par_date >= '2020-01-01', 4: par_date < '2020-01-05'\\n\" +\n\" partitions=4/4\"));\nAssert.assertTrue(plan.contains(\"TABLE: part_tbl2\\n\" +\n\" PARTITION PREDICATES: 8: par_date >= '2020-01-01', 8: par_date < '2020-01-05'\\n\" +\n\" partitions=4/4\"));\n}\nstarRocksAssert.dropMaterializedView(\"hive_partition_prune_non_ref_tables2\");\n}\n@Test\npublic void testHivePartitionPruneNonRefBaseTable2() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nstarRocksAssert.withMaterializedView(\"CREATE MATERIALIZED VIEW `test`.`hive_partition_prune_non_ref_tables1`\\n\" +\n\"COMMENT \\\"MATERIALIZED_VIEW\\\"\\n\" +\n\"PARTITION BY (`par_date`)\\n\" +\n\"DISTRIBUTED BY HASH(`c1`) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"storage_medium\\\" = \\\"HDD\\\"\\n\" +\n\")\\n\" +\n\"AS SELECT t2_par.c1, t2_par.c2, t1_par.par_col, t1_par.par_date FROM `hive0`.`partitioned_db`.`t2_par` join \" +\n\"`hive0`.`partitioned_db`.`t1_par` using (par_col)\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"hive_partition_prune_non_ref_tables1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\n{\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"TABLE: t1_par\\n\" +\n\" PARTITION PREDICATES: 10: par_date >= '2020-01-01', 10: par_date < '2020-01-05'\\n\" +\n\" partitions=6/6\"));\nAssert.assertTrue(plan.contains(\"TABLE: t2_par\\n\" +\n\" PARTITION PREDICATES: 4: par_col IS NOT NULL\\n\" +\n\" partitions=6/6\"));\n}\n{\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.addPartition(\"partitioned_db\", \"t1_par\", \"par_col=4/par_date=2020-01-05\");\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"TABLE: t1_par\\n\" +\n\" PARTITION PREDICATES: 10: par_date >= '2020-01-05', 10: par_date < '2020-01-06'\\n\" +\n\" partitions=1/7\"));\nAssert.assertTrue(plan.contains(\"TABLE: t2_par\\n\" +\n\" PARTITION PREDICATES: 4: par_col IS NOT NULL\\n\" +\n\" partitions=6/6\"));\n}\n{\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.addPartition(\"partitioned_db\", \"t2_par\", \"par_col=4/par_date=2020-01-05\");\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"TABLE: t1_par\\n\" +\n\" PARTITION PREDICATES: 10: par_date >= '2020-01-01', 10: par_date < '2020-01-06'\\n\" +\n\" partitions=7/7\"));\nAssert.assertTrue(plan.contains(\"TABLE: t2_par\\n\" +\n\" PARTITION PREDICATES: 4: par_col IS NOT NULL\\n\" +\n\" partitions=7/7\"));\n}\n{\nMockedHiveMetadata mockedHiveMetadata = (MockedHiveMetadata) connectContext.getGlobalStateMgr().getMetadataMgr().\ngetOptionalMetadata(MockedHiveMetadata.MOCKED_HIVE_CATALOG_NAME).get();\nmockedHiveMetadata.dropPartition(\"partitioned_db\", \"t1_par\", \"par_col=3/par_date=2020-01-05\");\nmockedHiveMetadata.dropPartition(\"partitioned_db\", \"t2_par\", \"par_col=3/par_date=2020-01-05\");\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nAssert.assertTrue(execPlan == null);\n}\nstarRocksAssert.dropMaterializedView(\"hive_partition_prune_non_ref_tables1\");\n}\n@Test\npublic void testHivePartitionPruneNonRefBaseTable3() throws Exception {\nDatabase testDb = GlobalStateMgr.getCurrentState().getDb(\"test\");\nstarRocksAssert.withTable(\"CREATE TABLE `test_partition_prune_tbl1` (\\n\" +\n\"`k1` date,\\n\" +\n\"`k2` int,\\n\" +\n\"`k3` int\\n\" +\n\")\\n\" +\n\"DUPLICATE KEY(`k1`)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"PARTITION BY RANGE (k1) (\\n\" +\n\"START (\\\"2020-10-01\\\") END (\\\"2020-12-01\\\") EVERY (INTERVAL 15 day)\\n\" +\n\")\\n\" +\n\"DISTRIBUTED BY HASH(`k1`) BUCKETS 3 \" +\n\"PROPERTIES('replication_num' = '1');\");\nstarRocksAssert.withTable(\"CREATE TABLE `test_partition_prune_tbl2` (\\n\" +\n\"`k1` date,\\n\" +\n\"`k2` int,\\n\" +\n\"`k3` int\\n\" +\n\")\\n\" +\n\"DUPLICATE KEY(`k1`)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`k1`) BUCKETS 3\\n\" +\n\"PROPERTIES('replication_num' = '1');\");\nstarRocksAssert.withMaterializedView(\"CREATE MATERIALIZED VIEW partition_prune_mv1 \\n\" +\n\"PARTITION BY k3\\n\" +\n\"DISTRIBUTED BY HASH(k1) BUCKETS 10\\n\" +\n\"REFRESH DEFERRED MANUAL \\n\" +\n\"PROPERTIES('replication_num' = '1') \\n\" +\n\"AS \" +\n\" SELECT test_partition_prune_tbl2.k1 as k1, test_partition_prune_tbl2.k2 as k2, \" +\n\" test_partition_prune_tbl1.k1 as k3, test_partition_prune_tbl1.k2 as k4\\n\" +\n\" FROM test_partition_prune_tbl1 join test_partition_prune_tbl2 on \" +\n\" test_partition_prune_tbl1.k1=test_partition_prune_tbl2.k1;\");\nMaterializedView materializedView = ((MaterializedView) testDb.getTable(\"partition_prune_mv1\"));\nTask task = TaskBuilder.buildMvTask(materializedView, testDb.getFullName());\nTaskRun taskRun = TaskRunBuilder.newBuilder(task).build();\ntaskRun.initStatus(UUIDUtil.genUUID().toString(), System.currentTimeMillis());\nconnectContext.getSessionVariable().setOptimizerExecuteTimeout(300000);\n{\nString insertSql = \"INSERT INTO test_partition_prune_tbl1 VALUES (\\\"2020-11-10\\\",1,1);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"partitions=5/5\\n\" +\n\" rollup: test_partition_prune_tbl1\"));\nAssert.assertTrue(plan.contains(\"PREDICATES: 4: k1 >= '2020-10-01', 4: k1 < '2020-12-15'\\n\" +\n\" partitions=1/1\\n\" +\n\" rollup: test_partition_prune_tbl2\"));\n}\n{\nString insertSql = \"INSERT INTO test_partition_prune_tbl2 VALUES (\\\"2020-11-10\\\",1,1);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"partitions=5/5\\n\" +\n\" rollup: test_partition_prune_tbl1\"));\nAssert.assertTrue(plan.contains(\"PREDICATES: 4: k1 >= '2020-10-01', 4: k1 < '2020-12-15'\\n\" +\n\" partitions=1/1\\n\" +\n\" rollup: test_partition_prune_tbl2\"));\n}\n{\nString insertSql = \"INSERT INTO test_partition_prune_tbl1 VALUES (\\\"2020-11-10\\\",1,1);\";\nnew StmtExecutor(connectContext, insertSql).execute();\ntaskRun.executeTaskRun();\nPartitionBasedMvRefreshProcessor processor = (PartitionBasedMvRefreshProcessor)\ntaskRun.getProcessor();\nMvTaskRunContext mvContext = processor.getMvContext();\nExecPlan execPlan = mvContext.getExecPlan();\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nAssert.assertTrue(plan.contains(\"partitions=5/5\\n\" +\n\" rollup: test_partition_prune_tbl1\"));\nAssert.assertTrue(plan.contains(\"PREDICATES: 4: k1 >= '2020-10-01', 4: k1 < '2020-12-15'\\n\" +\n\" partitions=1/1\\n\" +\n\" rollup: test_partition_prune_tbl2\"));\n}\nstarRocksAssert.dropMaterializedView(\"partition_prune_mv1\");\n}\n}" + }, + { + "comment": "@jmartisk - a proper fix for this would be in SmallRye, so that all the rules on should we display the message etc applies. For now I added it in the extension, with a TODO to remove once we have a new release of SmallRye. I'll do another release of SmallRye before Quarkus 2. Let me know.", + "method_body": "protected void doHandle(final RoutingContext ctx) {\nif (ctx.request().headers().contains(HttpHeaders.UPGRADE, HttpHeaders.WEBSOCKET, true)) {\nctx.request().toWebSocket(new Handler>() {\n@Override\npublic void handle(AsyncResult event) {\nif (event.succeeded()) {\nServerWebSocket socket = event.result();\nsocket.textMessageHandler(new Handler() {\n@Override\npublic void handle(String message) {\nfinal AtomicReference subscriptionRef = new AtomicReference<>();\nJsonObject jsonInput = inputToJsonObject(message);\nExecutionResponse executionResponse = getExecutionService()\n.execute(jsonInput);\nExecutionResult executionResult = executionResponse.getExecutionResult();\nif (executionResult != null) {\nif (executionResult.getErrors() != null && !executionResult.getErrors().isEmpty()) {\nsocket.writeTextMessage(executionResponse.getExecutionResultAsString());\nsocket.close();\n} else {\nPublisher stream = executionResponse.getExecutionResult().getData();\nif (stream != null) {\nstream.subscribe(new Subscriber() {\n@Override\npublic void onSubscribe(Subscription s) {\nsubscriptionRef.set(s);\ns.request(1);\n}\n@Override\npublic void onNext(ExecutionResult er) {\nObject response = er.getData();\nif (!socket.isClosed()) {\nsocket.writeTextMessage(JSONB.toJson(response));\n}\nSubscription s = subscriptionRef.get();\ns.request(1);\n}\n@Override\npublic void onError(Throwable t) {\nif (!socket.isClosed()) {\nsocket.writeTextMessage(t.getMessage());\n}\n}\n@Override\npublic void onComplete() {\nif (!socket.isClosed()) {\nsocket.close();\n}\n}\n});\n}\n}\n} else {\nif (!socket.isClosed()) {\nsocket.close();\n}\n}\n}\n});\n}\n}\n});\n} else {\nctx.next();\n}\n}", + "target_code": "@Override", + "method_body_after": "protected void doHandle(final RoutingContext ctx) {\nif (ctx.request().headers().contains(HttpHeaders.UPGRADE, HttpHeaders.WEBSOCKET, true) && !ctx.request().isEnded()) {\nctx.request().toWebSocket(new SmallRyeWebSocketHandler());\n} else {\nctx.next();\n}\n}", + "context_before": "class SmallRyeGraphQLSubscriptionHandler extends SmallRyeGraphQLAbstractHandler {\nprivate static final Jsonb JSONB = JsonbBuilder.create(new JsonbConfig().withNullValues(true));\nprivate final ExecutionErrorsService executionErrorsService;\npublic SmallRyeGraphQLSubscriptionHandler(Config config, CurrentIdentityAssociation currentIdentityAssociation,\nCurrentVertxRequest currentVertxRequest) {\nsuper(currentIdentityAssociation, currentVertxRequest);\nthis.executionErrorsService = new ExecutionErrorsService(config);\n}\n@Override\n}", + "context_after": "class SmallRyeGraphQLSubscriptionHandler extends SmallRyeGraphQLAbstractHandler {\nprivate static final Logger log = Logger.getLogger(SmallRyeGraphQLSubscriptionHandler.class);\nprivate final ExecutionErrorsService executionErrorsService;\nprivate final Config config;\nprivate final AtomicReference subscriptionRef = new AtomicReference<>();\npublic SmallRyeGraphQLSubscriptionHandler(Config config, CurrentIdentityAssociation currentIdentityAssociation,\nCurrentVertxRequest currentVertxRequest) {\nsuper(currentIdentityAssociation, currentVertxRequest);\nthis.config = config;\nthis.executionErrorsService = new ExecutionErrorsService(config);\n}\n@Override\nprivate class SmallRyeWebSocketHandler implements Handler> {\n@Override\npublic void handle(AsyncResult event) {\nif (event.succeeded()) {\nServerWebSocket serverWebSocket = event.result();\nserverWebSocket.closeHandler(new CloseHandler());\nserverWebSocket.endHandler(new EndHandler());\nserverWebSocket.exceptionHandler(new ExceptionHandler());\nserverWebSocket.textMessageHandler(new TextMessageHandler(serverWebSocket));\n}\n}\n}\nprivate class CloseHandler implements Handler {\n@Override\npublic void handle(Void e) {\nunsubscribe();\n}\n}\nprivate class EndHandler implements Handler {\n@Override\npublic void handle(Void e) {\nunsubscribe();\n}\n}\nprivate class ExceptionHandler implements Handler {\n@Override\npublic void handle(Throwable e) {\nlog.error(e.getMessage());\nunsubscribe();\n}\n}\npublic void unsubscribe() {\nif (subscriptionRef.get() != null) {\nSubscriptions.cancel(subscriptionRef);\nsubscriptionRef.set(null);\n}\n}\nprivate class TextMessageHandler implements Handler {\nprivate final SmallRyeGraphQLSubscriptionSubscriber smallRyeGraphQLSubscriptionSubscriber;\nTextMessageHandler(final ServerWebSocket serverWebSocket) {\nthis.smallRyeGraphQLSubscriptionSubscriber = new SmallRyeGraphQLSubscriptionSubscriber(serverWebSocket);\n}\n@Override\npublic void handle(String message) {\nJsonObject jsonInput = inputToJsonObject(message);\nExecutionResponse executionResponse = getExecutionService()\n.execute(jsonInput);\nExecutionResult executionResult = executionResponse.getExecutionResult();\nif (executionResult != null) {\nif (executionResult.getErrors() != null && !executionResult.getErrors().isEmpty()) {\nsmallRyeGraphQLSubscriptionSubscriber.onNext(executionResult);\nsmallRyeGraphQLSubscriptionSubscriber.closeWebSocket();\n} else {\nPublisher stream = executionResponse.getExecutionResult()\n.getData();\nif (stream != null) {\nMulti multiStream = Multi.createFrom().publisher(stream);\nmultiStream.onFailure().recoverWithItem(failure -> {\nreturn new ExecutionResultImpl(GraphqlErrorBuilder.newError()\n.message(failure.getMessage())\n.build());\n}).subscribe(smallRyeGraphQLSubscriptionSubscriber);\n}\n}\n}\n}\n}\nprivate class SmallRyeGraphQLSubscriptionSubscriber implements Subscriber {\nprivate final ServerWebSocket serverWebSocket;\npublic SmallRyeGraphQLSubscriptionSubscriber(ServerWebSocket serverWebSocket) {\nthis.serverWebSocket = serverWebSocket;\n}\n@Override\npublic void onSubscribe(Subscription s) {\nif (subscriptionRef.compareAndSet(null, s)) {\ns.request(1);\n} else {\ns.cancel();\n}\n}\n@Override\npublic void onNext(ExecutionResult executionResult) {\nif (serverWebSocket != null && !serverWebSocket.isClosed()) {\nExecutionResponse executionResponse = new ExecutionResponse(executionResult, config);\nserverWebSocket.writeTextMessage(executionResponse.getExecutionResultAsString());\nSubscription s = subscriptionRef.get();\ns.request(1);\n} else {\nunsubscribe();\n}\n}\n@Override\npublic void onError(Throwable thrwbl) {\nlog.error(\"Error in GraphQL Subscription Websocket\", thrwbl);\nunsubscribe();\ncloseWebSocket();\n}\n@Override\npublic void onComplete() {\nunsubscribe();\ncloseWebSocket();\n}\npublic void closeWebSocket() {\nif (!serverWebSocket.isClosed()) {\nserverWebSocket.close();\n}\n}\n}\n}" + }, + { + "comment": "Yes, almost. If you analyze today's partition, you will get the table's level column statistics(**aggregate all the partitions columns statistics**). As i said above, `spark does not have single partition's level column statistics information. It only have table's level statistics information.`, **analyze partiton column statistics** is equal **analyze table column statistics**.", + "method_body": "public HivePartitionStats getTableStatistics(String dbName, String tblName) {\norg.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName);\nHiveCommonStats commonStats = toHiveCommonStats(table.getParameters());\nlong totalRowNums = commonStats.getRowNums();\nif (totalRowNums == -1) {\nreturn HivePartitionStats.empty();\n}\nList dataColumns = table.getSd().getCols().stream()\n.map(FieldSchema::getName)\n.collect(toImmutableList());\nList statisticsObjs = client.getTableColumnStats(dbName, tblName, dataColumns);\nif (statisticsObjs.isEmpty()) {\nstatisticsObjs = HiveMetastoreApiConverter.getColStatsFromSparkParams(table);\n}\nMap columnStatistics =\nHiveMetastoreApiConverter.toSinglePartitionColumnStats(statisticsObjs, totalRowNums);\nreturn new HivePartitionStats(commonStats, columnStatistics);\n}", + "target_code": "if (statisticsObjs.isEmpty()) {", + "method_body_after": "public HivePartitionStats getTableStatistics(String dbName, String tblName) {\norg.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName);\nHiveCommonStats commonStats = toHiveCommonStats(table.getParameters());\nlong totalRowNums = commonStats.getRowNums();\nif (totalRowNums == -1) {\nreturn HivePartitionStats.empty();\n}\nList dataColumns = table.getSd().getCols().stream()\n.map(FieldSchema::getName)\n.collect(toImmutableList());\nList statisticsObjs = client.getTableColumnStats(dbName, tblName, dataColumns);\nif (statisticsObjs.isEmpty() && Config.enable_reuse_spark_column_statistics) {\ntry {\nif (table.getParameters().keySet().stream().anyMatch(k -> k.startsWith(\"spark.sql.statistics.colStats.\"))) {\nstatisticsObjs = HiveMetastoreApiConverter.getColStatsFromSparkParams(table);\n}\n} catch (Exception e) {\nLOG.warn(\"Failed to get column stats from table [{}.{}]\", dbName, tblName);\n}\n}\nMap columnStatistics =\nHiveMetastoreApiConverter.toSinglePartitionColumnStats(statisticsObjs, totalRowNums);\nreturn new HivePartitionStats(commonStats, columnStatistics);\n}", + "context_before": "class HiveMetastore implements IHiveMetastore {\nprivate static final Logger LOG = LogManager.getLogger(CachingHiveMetastore.class);\nprivate final HiveMetaClient client;\nprivate final String catalogName;\nprivate final MetastoreType metastoreType;\npublic HiveMetastore(HiveMetaClient client, String catalogName, MetastoreType metastoreType) {\nthis.client = client;\nthis.catalogName = catalogName;\nthis.metastoreType = metastoreType;\n}\n@Override\npublic List getAllDatabaseNames() {\nreturn client.getAllDatabaseNames();\n}\n@Override\npublic void createDb(String dbName, Map properties) {\nString location = properties.getOrDefault(LOCATION_PROPERTY, \"\");\nlong dbId = ConnectorTableId.CONNECTOR_ID_GENERATOR.getNextId().asInt();\nDatabase database = new Database(dbId, dbName, location);\nclient.createDatabase(HiveMetastoreApiConverter.toMetastoreApiDatabase(database));\n}\n@Override\npublic void dropDb(String dbName, boolean deleteData) {\nclient.dropDatabase(dbName, deleteData);\n}\n@Override\npublic List getAllTableNames(String dbName) {\nreturn client.getAllTableNames(dbName);\n}\n@Override\npublic Database getDb(String dbName) {\norg.apache.hadoop.hive.metastore.api.Database db = client.getDb(dbName);\nreturn HiveMetastoreApiConverter.toDatabase(db);\n}\n@Override\npublic void createTable(String dbName, Table table) {\norg.apache.hadoop.hive.metastore.api.Table hiveTable = toMetastoreApiTable((HiveTable) table);\nclient.createTable(hiveTable);\n}\n@Override\npublic void dropTable(String dbName, String tableName) {\nclient.dropTable(dbName, tableName);\n}\npublic Table getTable(String dbName, String tableName) {\norg.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tableName);\nStorageDescriptor sd = table.getSd();\nif (sd == null) {\nthrow new StarRocksConnectorException(\"Table is missing storage descriptor\");\n}\nif (!HiveMetastoreApiConverter.isHudiTable(table.getSd().getInputFormat())) {\nvalidateHiveTableType(table.getTableType());\nif (AcidUtils.isFullAcidTable(table)) {\nthrow new StarRocksConnectorException(\nString.format(\"%s.%s is a hive transactional table(full acid), sr didn't support it yet\", dbName,\ntableName));\n}\nif (table.getTableType().equalsIgnoreCase(\"VIRTUAL_VIEW\")) {\nreturn HiveMetastoreApiConverter.toHiveView(table, catalogName);\n} else {\nreturn HiveMetastoreApiConverter.toHiveTable(table, catalogName);\n}\n} else {\nreturn HiveMetastoreApiConverter.toHudiTable(table, catalogName);\n}\n}\n@Override\npublic boolean tableExists(String dbName, String tableName) {\nreturn client.tableExists(dbName, tableName);\n}\n@Override\npublic List getPartitionKeysByValue(String dbName, String tableName, List> partitionValues) {\nif (partitionValues.isEmpty()) {\nreturn client.getPartitionKeys(dbName, tableName);\n} else {\nList partitionValuesStr = partitionValues.stream()\n.map(v -> v.orElse(\"\")).collect(Collectors.toList());\nreturn client.getPartitionKeysByValue(dbName, tableName, partitionValuesStr);\n}\n}\n@Override\npublic boolean partitionExists(Table table, List partitionValues) {\nHiveTable hiveTable = (HiveTable) table;\nString dbName = hiveTable.getDbName();\nString tableName = hiveTable.getTableName();\nif (metastoreType == MetastoreType.GLUE && hiveTable.hasBooleanTypePartitionColumn()) {\nList allPartitionNames = client.getPartitionKeys(dbName, tableName);\nString hivePartitionName = toHivePartitionName(hiveTable.getPartitionColumnNames(), partitionValues);\nreturn allPartitionNames.contains(hivePartitionName);\n} else {\nreturn !client.getPartitionKeysByValue(dbName, tableName, partitionValues).isEmpty();\n}\n}\n@Override\npublic Partition getPartition(String dbName, String tblName, List partitionValues) {\nStorageDescriptor sd;\nMap params;\nif (partitionValues.size() > 0) {\norg.apache.hadoop.hive.metastore.api.Partition partition =\nclient.getPartition(dbName, tblName, partitionValues);\nsd = partition.getSd();\nparams = partition.getParameters();\n} else {\norg.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName);\nsd = table.getSd();\nparams = table.getParameters();\n}\nreturn HiveMetastoreApiConverter.toPartition(sd, params);\n}\npublic Map getPartitionsByNames(String dbName, String tblName, List partitionNames) {\nList partitions = new ArrayList<>();\nfor (int start = 0; start < partitionNames.size(); start += Config.max_hive_partitions_per_rpc) {\nint end = Math.min(start + Config.max_hive_partitions_per_rpc, partitionNames.size());\nList namesPerRPC = partitionNames.subList(start, end);\nList partsPerRPC =\nclient.getPartitionsByNames(dbName, tblName, namesPerRPC);\npartitions.addAll(partsPerRPC);\n}\nMap> partitionNameToPartitionValues = partitionNames.stream()\n.collect(Collectors.toMap(Function.identity(), PartitionUtil::toPartitionValues));\nMap, Partition> partitionValuesToPartition = partitions.stream()\n.collect(Collectors.toMap(\norg.apache.hadoop.hive.metastore.api.Partition::getValues,\npartition -> HiveMetastoreApiConverter.toPartition(partition.getSd(), partition.getParameters())));\nImmutableMap.Builder resultBuilder = ImmutableMap.builder();\nfor (Map.Entry> entry : partitionNameToPartitionValues.entrySet()) {\nPartition partition = partitionValuesToPartition.get(entry.getValue());\nresultBuilder.put(entry.getKey(), partition);\n}\nreturn resultBuilder.build();\n}\n@Override\npublic void addPartitions(String dbName, String tableName, List partitions) {\nList hivePartitions = partitions.stream()\n.map(HiveMetastoreApiConverter::toMetastoreApiPartition)\n.collect(Collectors.toList());\nclient.addPartitions(dbName, tableName, hivePartitions);\n}\n@Override\npublic void dropPartition(String dbName, String tableName, List partValues, boolean deleteData) {\nclient.dropPartition(dbName, tableName, partValues, deleteData);\n}\npublic void updateTableStatistics(String dbName, String tableName, Function update) {\norg.apache.hadoop.hive.metastore.api.Table originTable = client.getTable(dbName, tableName);\nif (originTable == null) {\nthrow new StarRocksConnectorException(\"Table '%s.%s' not found\", dbName, tableName);\n}\norg.apache.hadoop.hive.metastore.api.Table newTable = originTable.deepCopy();\nHiveCommonStats curCommonStats = toHiveCommonStats(originTable.getParameters());\nHivePartitionStats curPartitionStats = new HivePartitionStats(curCommonStats, new HashMap<>());\nHivePartitionStats updatedStats = update.apply(curPartitionStats);\nHiveCommonStats commonStats = updatedStats.getCommonStats();\nMap originParams = newTable.getParameters();\noriginParams.put(TRANSIENT_LAST_DDL_TIME, String.valueOf(System.currentTimeMillis() / 1000));\nnewTable.setParameters(updateStatisticsParameters(originParams, commonStats));\nclient.alterTable(dbName, tableName, newTable);\n}\npublic void updatePartitionStatistics(String dbName, String tableName, String partitionName,\nFunction update) {\nList partitions = client.getPartitionsByNames(\ndbName, tableName, ImmutableList.of(partitionName));\nif (partitions.size() != 1) {\nthrow new StarRocksConnectorException(\"Metastore returned multiple partitions for name: \" + partitionName);\n}\norg.apache.hadoop.hive.metastore.api.Partition originPartition = getOnlyElement(partitions);\nHiveCommonStats curCommonStats = toHiveCommonStats(originPartition.getParameters());\nHivePartitionStats curPartitionStats = new HivePartitionStats(curCommonStats, new HashMap<>());\nHivePartitionStats updatedStats = update.apply(curPartitionStats);\norg.apache.hadoop.hive.metastore.api.Partition modifiedPartition = originPartition.deepCopy();\nHiveCommonStats commonStats = updatedStats.getCommonStats();\nMap originParams = modifiedPartition.getParameters();\noriginParams.put(TRANSIENT_LAST_DDL_TIME, String.valueOf(System.currentTimeMillis() / 1000));\nmodifiedPartition.setParameters(updateStatisticsParameters(modifiedPartition.getParameters(), commonStats));\nclient.alterPartition(dbName, tableName, modifiedPartition);\n}\npublic Map getPartitionStatistics(Table table, List partitionNames) {\nHiveMetaStoreTable hmsTbl = (HiveMetaStoreTable) table;\nString dbName = hmsTbl.getDbName();\nString tblName = hmsTbl.getTableName();\nList dataColumns = hmsTbl.getDataColumnNames();\nMap partitions = getPartitionsByNames(hmsTbl.getDbName(), hmsTbl.getTableName(), partitionNames);\nMap partitionCommonStats = partitions.entrySet().stream()\n.collect(toImmutableMap(Map.Entry::getKey, entry -> toHiveCommonStats(entry.getValue().getParameters())));\nMap partitionRowNums = partitionCommonStats.entrySet().stream()\n.collect(toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().getRowNums()));\nImmutableMap.Builder resultBuilder = ImmutableMap.builder();\nMap> partitionNameToColumnStatsObj =\nclient.getPartitionColumnStats(dbName, tblName, partitionNames, dataColumns);\nMap> partitionColumnStats = HiveMetastoreApiConverter\n.toPartitionColumnStatistics(partitionNameToColumnStatsObj, partitionRowNums);\nfor (String partitionName : partitionCommonStats.keySet()) {\nHiveCommonStats commonStats = partitionCommonStats.get(partitionName);\nMap columnStatistics = partitionColumnStats\n.getOrDefault(partitionName, ImmutableMap.of());\nresultBuilder.put(partitionName, new HivePartitionStats(commonStats, columnStatistics));\n}\nreturn resultBuilder.build();\n}\npublic long getCurrentEventId() {\nreturn client.getCurrentNotificationEventId().getEventId();\n}\npublic NotificationEventResponse getNextEventResponse(long lastSyncedEventId, String catalogName,\nfinal boolean getAllEvents)\nthrows MetastoreNotificationFetchException {\ntry {\nint batchSize = getAllEvents ? -1 : Config.hms_events_batch_size_per_rpc;\nNotificationEventResponse response = client.getNextNotification(lastSyncedEventId, batchSize, null);\nif (response.getEvents().size() == 0) {\nLOG.info(\"Event size is 0 when pulling events on catalog [{}]\", catalogName);\nreturn null;\n}\nLOG.info(String.format(\"Received %d events. Start event id : %d. Last synced id : %d on catalog : %s\",\nresponse.getEvents().size(), response.getEvents().get(0).getEventId(),\nlastSyncedEventId, catalogName));\nreturn response;\n} catch (MetastoreNotificationFetchException e) {\nLOG.error(\"Unable to fetch notifications from metastore. Last synced event id is {}\", lastSyncedEventId, e);\nthrow new MetastoreNotificationFetchException(\"Unable to fetch notifications from metastore. \" +\n\"Last synced event id is \" + lastSyncedEventId, e);\n}\n}\n}", + "context_after": "class HiveMetastore implements IHiveMetastore {\nprivate static final Logger LOG = LogManager.getLogger(CachingHiveMetastore.class);\nprivate final HiveMetaClient client;\nprivate final String catalogName;\nprivate final MetastoreType metastoreType;\npublic HiveMetastore(HiveMetaClient client, String catalogName, MetastoreType metastoreType) {\nthis.client = client;\nthis.catalogName = catalogName;\nthis.metastoreType = metastoreType;\n}\n@Override\npublic List getAllDatabaseNames() {\nreturn client.getAllDatabaseNames();\n}\n@Override\npublic void createDb(String dbName, Map properties) {\nString location = properties.getOrDefault(LOCATION_PROPERTY, \"\");\nlong dbId = ConnectorTableId.CONNECTOR_ID_GENERATOR.getNextId().asInt();\nDatabase database = new Database(dbId, dbName, location);\nclient.createDatabase(HiveMetastoreApiConverter.toMetastoreApiDatabase(database));\n}\n@Override\npublic void dropDb(String dbName, boolean deleteData) {\nclient.dropDatabase(dbName, deleteData);\n}\n@Override\npublic List getAllTableNames(String dbName) {\nreturn client.getAllTableNames(dbName);\n}\n@Override\npublic Database getDb(String dbName) {\norg.apache.hadoop.hive.metastore.api.Database db = client.getDb(dbName);\nreturn HiveMetastoreApiConverter.toDatabase(db);\n}\n@Override\npublic void createTable(String dbName, Table table) {\norg.apache.hadoop.hive.metastore.api.Table hiveTable = toMetastoreApiTable((HiveTable) table);\nclient.createTable(hiveTable);\n}\n@Override\npublic void dropTable(String dbName, String tableName) {\nclient.dropTable(dbName, tableName);\n}\npublic Table getTable(String dbName, String tableName) {\norg.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tableName);\nStorageDescriptor sd = table.getSd();\nif (sd == null) {\nthrow new StarRocksConnectorException(\"Table is missing storage descriptor\");\n}\nif (!HiveMetastoreApiConverter.isHudiTable(table.getSd().getInputFormat())) {\nvalidateHiveTableType(table.getTableType());\nif (AcidUtils.isFullAcidTable(table)) {\nthrow new StarRocksConnectorException(\nString.format(\"%s.%s is a hive transactional table(full acid), sr didn't support it yet\", dbName,\ntableName));\n}\nif (table.getTableType().equalsIgnoreCase(\"VIRTUAL_VIEW\")) {\nreturn HiveMetastoreApiConverter.toHiveView(table, catalogName);\n} else {\nreturn HiveMetastoreApiConverter.toHiveTable(table, catalogName);\n}\n} else {\nreturn HiveMetastoreApiConverter.toHudiTable(table, catalogName);\n}\n}\n@Override\npublic boolean tableExists(String dbName, String tableName) {\nreturn client.tableExists(dbName, tableName);\n}\n@Override\npublic List getPartitionKeysByValue(String dbName, String tableName, List> partitionValues) {\nif (partitionValues.isEmpty()) {\nreturn client.getPartitionKeys(dbName, tableName);\n} else {\nList partitionValuesStr = partitionValues.stream()\n.map(v -> v.orElse(\"\")).collect(Collectors.toList());\nreturn client.getPartitionKeysByValue(dbName, tableName, partitionValuesStr);\n}\n}\n@Override\npublic boolean partitionExists(Table table, List partitionValues) {\nHiveTable hiveTable = (HiveTable) table;\nString dbName = hiveTable.getDbName();\nString tableName = hiveTable.getTableName();\nif (metastoreType == MetastoreType.GLUE && hiveTable.hasBooleanTypePartitionColumn()) {\nList allPartitionNames = client.getPartitionKeys(dbName, tableName);\nString hivePartitionName = toHivePartitionName(hiveTable.getPartitionColumnNames(), partitionValues);\nreturn allPartitionNames.contains(hivePartitionName);\n} else {\nreturn !client.getPartitionKeysByValue(dbName, tableName, partitionValues).isEmpty();\n}\n}\n@Override\npublic Partition getPartition(String dbName, String tblName, List partitionValues) {\nStorageDescriptor sd;\nMap params;\nif (partitionValues.size() > 0) {\norg.apache.hadoop.hive.metastore.api.Partition partition =\nclient.getPartition(dbName, tblName, partitionValues);\nsd = partition.getSd();\nparams = partition.getParameters();\n} else {\norg.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName);\nsd = table.getSd();\nparams = table.getParameters();\n}\nreturn HiveMetastoreApiConverter.toPartition(sd, params);\n}\npublic Map getPartitionsByNames(String dbName, String tblName, List partitionNames) {\nList partitions = new ArrayList<>();\nfor (int start = 0; start < partitionNames.size(); start += Config.max_hive_partitions_per_rpc) {\nint end = Math.min(start + Config.max_hive_partitions_per_rpc, partitionNames.size());\nList namesPerRPC = partitionNames.subList(start, end);\nList partsPerRPC =\nclient.getPartitionsByNames(dbName, tblName, namesPerRPC);\npartitions.addAll(partsPerRPC);\n}\nMap> partitionNameToPartitionValues = partitionNames.stream()\n.collect(Collectors.toMap(Function.identity(), PartitionUtil::toPartitionValues));\nMap, Partition> partitionValuesToPartition = partitions.stream()\n.collect(Collectors.toMap(\norg.apache.hadoop.hive.metastore.api.Partition::getValues,\npartition -> HiveMetastoreApiConverter.toPartition(partition.getSd(), partition.getParameters())));\nImmutableMap.Builder resultBuilder = ImmutableMap.builder();\nfor (Map.Entry> entry : partitionNameToPartitionValues.entrySet()) {\nPartition partition = partitionValuesToPartition.get(entry.getValue());\nresultBuilder.put(entry.getKey(), partition);\n}\nreturn resultBuilder.build();\n}\n@Override\npublic void addPartitions(String dbName, String tableName, List partitions) {\nList hivePartitions = partitions.stream()\n.map(HiveMetastoreApiConverter::toMetastoreApiPartition)\n.collect(Collectors.toList());\nclient.addPartitions(dbName, tableName, hivePartitions);\n}\n@Override\npublic void dropPartition(String dbName, String tableName, List partValues, boolean deleteData) {\nclient.dropPartition(dbName, tableName, partValues, deleteData);\n}\npublic void updateTableStatistics(String dbName, String tableName, Function update) {\norg.apache.hadoop.hive.metastore.api.Table originTable = client.getTable(dbName, tableName);\nif (originTable == null) {\nthrow new StarRocksConnectorException(\"Table '%s.%s' not found\", dbName, tableName);\n}\norg.apache.hadoop.hive.metastore.api.Table newTable = originTable.deepCopy();\nHiveCommonStats curCommonStats = toHiveCommonStats(originTable.getParameters());\nHivePartitionStats curPartitionStats = new HivePartitionStats(curCommonStats, new HashMap<>());\nHivePartitionStats updatedStats = update.apply(curPartitionStats);\nHiveCommonStats commonStats = updatedStats.getCommonStats();\nMap originParams = newTable.getParameters();\noriginParams.put(TRANSIENT_LAST_DDL_TIME, String.valueOf(System.currentTimeMillis() / 1000));\nnewTable.setParameters(updateStatisticsParameters(originParams, commonStats));\nclient.alterTable(dbName, tableName, newTable);\n}\npublic void updatePartitionStatistics(String dbName, String tableName, String partitionName,\nFunction update) {\nList partitions = client.getPartitionsByNames(\ndbName, tableName, ImmutableList.of(partitionName));\nif (partitions.size() != 1) {\nthrow new StarRocksConnectorException(\"Metastore returned multiple partitions for name: \" + partitionName);\n}\norg.apache.hadoop.hive.metastore.api.Partition originPartition = getOnlyElement(partitions);\nHiveCommonStats curCommonStats = toHiveCommonStats(originPartition.getParameters());\nHivePartitionStats curPartitionStats = new HivePartitionStats(curCommonStats, new HashMap<>());\nHivePartitionStats updatedStats = update.apply(curPartitionStats);\norg.apache.hadoop.hive.metastore.api.Partition modifiedPartition = originPartition.deepCopy();\nHiveCommonStats commonStats = updatedStats.getCommonStats();\nMap originParams = modifiedPartition.getParameters();\noriginParams.put(TRANSIENT_LAST_DDL_TIME, String.valueOf(System.currentTimeMillis() / 1000));\nmodifiedPartition.setParameters(updateStatisticsParameters(modifiedPartition.getParameters(), commonStats));\nclient.alterPartition(dbName, tableName, modifiedPartition);\n}\npublic Map getPartitionStatistics(Table table, List partitionNames) {\nHiveMetaStoreTable hmsTbl = (HiveMetaStoreTable) table;\nString dbName = hmsTbl.getDbName();\nString tblName = hmsTbl.getTableName();\nList dataColumns = hmsTbl.getDataColumnNames();\nMap partitions = getPartitionsByNames(hmsTbl.getDbName(), hmsTbl.getTableName(), partitionNames);\nMap partitionCommonStats = partitions.entrySet().stream()\n.collect(toImmutableMap(Map.Entry::getKey, entry -> toHiveCommonStats(entry.getValue().getParameters())));\nMap partitionRowNums = partitionCommonStats.entrySet().stream()\n.collect(toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().getRowNums()));\nImmutableMap.Builder resultBuilder = ImmutableMap.builder();\nMap> partitionNameToColumnStatsObj =\nclient.getPartitionColumnStats(dbName, tblName, partitionNames, dataColumns);\nMap> partitionColumnStats = HiveMetastoreApiConverter\n.toPartitionColumnStatistics(partitionNameToColumnStatsObj, partitionRowNums);\nfor (String partitionName : partitionCommonStats.keySet()) {\nHiveCommonStats commonStats = partitionCommonStats.get(partitionName);\nMap columnStatistics = partitionColumnStats\n.getOrDefault(partitionName, ImmutableMap.of());\nresultBuilder.put(partitionName, new HivePartitionStats(commonStats, columnStatistics));\n}\nreturn resultBuilder.build();\n}\npublic long getCurrentEventId() {\nreturn client.getCurrentNotificationEventId().getEventId();\n}\npublic NotificationEventResponse getNextEventResponse(long lastSyncedEventId, String catalogName,\nfinal boolean getAllEvents)\nthrows MetastoreNotificationFetchException {\ntry {\nint batchSize = getAllEvents ? -1 : Config.hms_events_batch_size_per_rpc;\nNotificationEventResponse response = client.getNextNotification(lastSyncedEventId, batchSize, null);\nif (response.getEvents().size() == 0) {\nLOG.info(\"Event size is 0 when pulling events on catalog [{}]\", catalogName);\nreturn null;\n}\nLOG.info(String.format(\"Received %d events. Start event id : %d. Last synced id : %d on catalog : %s\",\nresponse.getEvents().size(), response.getEvents().get(0).getEventId(),\nlastSyncedEventId, catalogName));\nreturn response;\n} catch (MetastoreNotificationFetchException e) {\nLOG.error(\"Unable to fetch notifications from metastore. Last synced event id is {}\", lastSyncedEventId, e);\nthrow new MetastoreNotificationFetchException(\"Unable to fetch notifications from metastore. \" +\n\"Last synced event id is \" + lastSyncedEventId, e);\n}\n}\n}" + }, + { + "comment": "yea flattening. yes we can. will add that fix. ", + "method_body": "private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) {\nswitch (peek(lookahead + 1).kind) {\ncase IDENTIFIER_TOKEN:\nSyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind;\nswitch (tokenAfterIdentifier) {\ncase ON_KEYWORD:\ncase OPEN_BRACE_TOKEN:\nreturn true;\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\ncase QUESTION_MARK_TOKEN:\nreturn false;\ndefault:\nreturn false;\n}\ncase ON_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse listener declaration, given the qualifier.\n*

\n* \n* listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ;\n* \n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @return Parsed node\n*/\nprivate STNode parseListenerDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.LISTENER_DECL);\nSTNode listenerKeyword = parseListenerKeyword();\nif (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) {\nSTNode listenerDecl =\nparseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true);\nendContext();\nreturn listenerDecl;\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode variableName = parseVariableName();\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nendContext();\nreturn STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName,\nequalsToken, initializer, semicolonToken);\n}\n/**\n* Parse listener keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseListenerKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LISTENER_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LISTENER_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse constant declaration, given the qualifier.\n*

\n* module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @return Parsed node\n*/\nprivate STNode parseConstantDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.CONSTANT_DECL);\nSTNode constKeyword = parseConstantKeyword();\nSTNode constDecl = parseConstDecl(metadata, qualifier, constKeyword);\nendContext();\nreturn constDecl;\n}\n/**\n* Parse the components that follows after the const keyword of a constant declaration.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the constant decl\n* @param constKeyword Const keyword\n* @return Parsed node\n*/\nprivate STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) {\nSTToken nextToken = peek();\nreturn parseConstDeclFromType(nextToken.kind, metadata, qualifier, constKeyword);\n}\nprivate STNode parseConstDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode keyword) {\nswitch (nextTokenKind) {\ncase ANNOTATION_KEYWORD:\nswitchContext(ParserRuleContext.ANNOTATION_DECL);\nreturn parseAnnotationDeclaration(metadata, qualifier, keyword);\ncase IDENTIFIER_TOKEN:\nreturn parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, keyword, false);\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, keyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseConstDeclFromType(solution.tokenKind, metadata, qualifier, keyword);\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode variableName = parseVariableName();\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, typeDesc, variableName,\nequalsToken, initializer, semicolonToken);\n}\nprivate STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,\nboolean isListener) {\nSTNode varNameOrTypeName = parseStatementStartIdentifier();\nSTNode constDecl =\nparseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener);\nreturn constDecl;\n}\n/**\n* Parse the component that follows the first identifier in a const decl. The identifier\n* can be either the type-name (a user defined type) or the var-name there the type-name\n* is not present.\n*\n* @param qualifier Qualifier that precedes the constant decl\n* @param keyword Keyword\n* @param typeOrVarName Identifier that follows the const-keywoord\n* @return Parsed node\n*/\nprivate STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword,\nSTNode typeOrVarName, boolean isListener) {\nif (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTNode type = typeOrVarName;\nSTNode variableName = parseVariableName();\nreturn parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);\n}\nSTToken token = peek();\nreturn parseConstantOrListenerDeclRhs(token.kind, metadata, qualifier, keyword, typeOrVarName, isListener);\n}\nprivate STNode parseConstantOrListenerDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,\nSTNode keyword, STNode typeOrVarName, boolean isListener) {\nSTNode type;\nSTNode variableName;\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\ntype = typeOrVarName;\nvariableName = parseVariableName();\nbreak;\ncase EQUAL_TOKEN:\nvariableName = ((STSimpleNameReferenceNode) typeOrVarName).name;\ntype = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, keyword,\ntypeOrVarName, isListener);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseConstantOrListenerDeclRhs(solution.tokenKind, metadata, qualifier, keyword, typeOrVarName,\nisListener);\n}\nreturn parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);\n}\nprivate STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener,\nSTNode type, STNode variableName) {\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nif (isListener) {\nreturn STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName,\nequalsToken, initializer, semicolonToken);\n}\nreturn STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName,\nequalsToken, initializer, semicolonToken);\n}\n/**\n* Parse const keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseConstantKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CONST_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CONST_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse nil type descriptor.\n*

\n* nil-type-descriptor := ( ) \n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseNilTypeDescriptor() {\nstartContext(ParserRuleContext.NIL_TYPE_DESCRIPTOR);\nSTNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode closeParenthesisToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createNilTypeDescriptorNode(openParenthesisToken, closeParenthesisToken);\n}\n/**\n* Parse typeof expression.\n*

\n* \n* typeof-expr := typeof expression\n* \n*\n* @param isRhsExpr\n* @return Typeof expression node\n*/\nprivate STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\nSTNode typeofKeyword = parseTypeofKeyword();\nSTNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);\nreturn STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr);\n}\n/**\n* Parse typeof-keyword.\n*\n* @return Typeof-keyword node\n*/\nprivate STNode parseTypeofKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TYPEOF_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TYPEOF_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse optional type descriptor.\n*

\n* optional-type-descriptor := type-descriptor ? \n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) {\nstartContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR);\nSTNode questionMarkToken = parseQuestionMark();\nendContext();\nreturn STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken);\n}\n/**\n* Parse unary expression.\n*

\n* \n* unary-expr := + expression | - expression | ~ expression | ! expression\n* \n*\n* @param isRhsExpr\n* @return Unary expression node\n*/\nprivate STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\nSTNode unaryOperator = parseUnaryOperator();\nSTNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);\nreturn STNodeFactory.createUnaryExpressionNode(unaryOperator, expr);\n}\n/**\n* Parse unary operator.\n* UnaryOperator := + | - | ~ | !\n*\n* @return Parsed node\n*/\nprivate STNode parseUnaryOperator() {\nSTToken token = peek();\nif (isUnaryOperator(token.kind)) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.UNARY_OPERATOR);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Check whether the given token kind is a unary operator.\n*\n* @param kind STToken kind\n* @return true if the token kind refers to a unary operator. false otherwise\n*/\nprivate boolean isUnaryOperator(SyntaxKind kind) {\nswitch (kind) {\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse array type descriptor.\n*

\n* \n* array-type-descriptor := member-type-descriptor [ [ array-length ] ]\n* member-type-descriptor := type-descriptor\n* array-length :=\n* int-literal\n* | constant-reference-expr\n* | inferred-array-length\n* inferred-array-length := *\n* \n*

\n*\n* @param memberTypeDesc\n*\n* @return Parsed Node\n*/\nprivate STNode parseArrayTypeDescriptor(STNode memberTypeDesc) {\nstartContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);\nSTNode openBracketToken = parseOpenBracket();\nSTNode arrayLengthNode = parseArrayLength();\nSTNode closeBracketToken = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, openBracketToken, arrayLengthNode,\ncloseBracketToken);\n}\n/**\n* Parse array length.\n*

\n* \n* array-length :=\n* int-literal\n* | constant-reference-expr\n* | inferred-array-length\n* constant-reference-expr := variable-reference-expr\n* \n*

\n*\n* @return Parsed array length\n*/\nprivate STNode parseArrayLength() {\nSTToken token = peek();\nswitch (token.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase ASTERISK_TOKEN:\nreturn parseBasicLiteral();\ncase CLOSE_BRACKET_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH);\ndefault:\nSolution sol = recover(token, ParserRuleContext.ARRAY_LENGTH);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse annotations.\n*

\n* Note: In the ballerina spec ({@link https:\n* annotations-list is specified as one-or-more annotations. And the usage is marked as\n* optional annotations-list. However, for the consistency of the tree, here we make the\n* annotation-list as zero-or-more annotations, and the usage is not-optional.\n*

\n* annots := annotation*\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotations() {\nSTToken nextToken = peek();\nreturn parseAnnotations(nextToken.kind);\n}\nprivate STNode parseAnnotations(SyntaxKind nextTokenKind) {\nstartContext(ParserRuleContext.ANNOTATIONS);\nList annotList = new ArrayList<>();\nwhile (nextTokenKind == SyntaxKind.AT_TOKEN) {\nannotList.add(parseAnnotation());\nnextTokenKind = peek().kind;\n}\nendContext();\nreturn STNodeFactory.createNodeList(annotList);\n}\n/**\n* Parse annotation attachment.\n*

\n* annotation := @ annot-tag-reference annot-value\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotation() {\nSTNode atToken = parseAtToken();\nSTNode annotReference;\nif (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) {\nannotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n} else {\nannotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE);\n}\nSTNode annotValue;\nif (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {\nannotValue = parseMappingConstructorExpr();\n} else {\nannotValue = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue);\n}\n/**\n* Parse '@' token.\n*\n* @return Parsed node\n*/\nprivate STNode parseAtToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.AT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.AT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse metadata. Meta data consist of optional doc string and\n* an annotations list.\n*

\n* metadata := [DocumentationString] annots\n*\n* @return Parse node\n*/\nprivate STNode parseMetaData(SyntaxKind nextTokenKind) {\nSTNode docString;\nSTNode annotations;\nswitch (nextTokenKind) {\ncase DOCUMENTATION_STRING:\ndocString = parseMarkdownDocumentation();\nannotations = parseAnnotations();\nbreak;\ncase AT_TOKEN:\ndocString = STNodeFactory.createEmptyNode();\nannotations = parseAnnotations(nextTokenKind);\nbreak;\ndefault:\nreturn createEmptyMetadata();\n}\nreturn STNodeFactory.createMetadataNode(docString, annotations);\n}\n/**\n* Create empty metadata node.\n*\n* @return A metadata node with no doc string and no annotations\n*/\nprivate STNode createEmptyMetadata() {\nreturn STNodeFactory.createMetadataNode(STNodeFactory.createEmptyNode(), STNodeFactory.createEmptyNodeList());\n}\n/**\n* Parse is expression.\n* \n* is-expr := expression is type-descriptor\n* \n*\n* @param lhsExpr Preceding expression of the is expression\n* @return Is expression node\n*/\nprivate STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode isKeyword = parseIsKeyword();\nSTNode typeDescriptor =\nparseTypeDescriptorInExpression(ParserRuleContext.TYPE_DESC_IN_EXPRESSION, isInConditionalExpr);\nreturn STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor);\n}\n/**\n* Parse is-keyword.\n*\n* @return Is-keyword node\n*/\nprivate STNode parseIsKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IS_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.IS_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse local type definition statement statement.\n* ocal-type-defn-stmt := [annots] type identifier type-descriptor ;\n*\n* @return local type definition statement statement\n*/\nprivate STNode parseLocalTypeDefinitionStatement(STNode annots) {\nstartContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT);\nSTNode typeKeyword = parseTypeKeyword();\nSTNode typeName = parseTypeName();\nSTNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor,\nsemicolon);\n}\n/**\n* Parse statement which is only consists of an action or expression.\n*\n* @param annots Annotations\n* @param nextTokenKind Next token kind\n* @return Statement node\n*/\nprivate STNode parseExpressionStatement(SyntaxKind nextTokenKind, STNode annots) {\nstartContext(ParserRuleContext.EXPRESSION_STATEMENT);\nSTNode expression = parseActionOrExpressionInLhs(nextTokenKind, annots);\nreturn getExpressionAsStatement(expression);\n}\n/**\n* Parse statements that starts with an expression.\n*\n* @return Statement node\n*/\nprivate STNode parseStatementStartWithExpr(STNode annots) {\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode expr = parseActionOrExpressionInLhs(peek().kind, annots);\nreturn parseStatementStartWithExprRhs(expr);\n}\n/**\n* Parse rhs of statements that starts with an expression.\n*\n* @return Statement node\n*/\nprivate STNode parseStatementStartWithExprRhs(STNode expression) {\nSTToken nextToken = peek();\nreturn parseStatementStartWithExprRhs(nextToken.kind, expression);\n}\n/**\n* Parse the component followed by the expression, at the beginning of a statement.\n*\n* @param nextTokenKind Kind of the next token\n* @return Statement node\n*/\nprivate STNode parseStatementStartWithExprRhs(SyntaxKind nextTokenKind, STNode expression) {\nswitch (nextTokenKind) {\ncase EQUAL_TOKEN:\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nreturn parseAssignmentStmtRhs(expression);\ncase SEMICOLON_TOKEN:\nreturn getExpressionAsStatement(expression);\ncase IDENTIFIER_TOKEN:\ndefault:\nif (isCompoundBinaryOperator(nextTokenKind)) {\nreturn parseCompoundAssignmentStmtRhs(expression);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.STMT_START_WITH_EXPR_RHS, expression);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatementStartWithExprRhs(solution.tokenKind, expression);\n}\n}\nprivate STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) {\nSTNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression);\nSTNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression;\nif (lengthExprs.isEmpty()) {\nreturn STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, indexedExpr.openBracket,\nSTNodeFactory.createEmptyNode(), indexedExpr.closeBracket);\n}\nSTNode lengthExpr = lengthExprs.get(0);\nswitch (lengthExpr.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase ASTERISK_TOKEN:\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\nbreak;\ndefault:\nSTNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(\nindexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);\nindexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics);\nlengthExpr = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, indexedExpr.openBracket, lengthExpr,\nindexedExpr.closeBracket);\n}\nprivate STNode getExpressionAsStatement(STNode expression) {\nswitch (expression.kind) {\ncase METHOD_CALL:\ncase FUNCTION_CALL:\ncase CHECK_EXPRESSION:\nreturn parseCallStatement(expression);\ncase REMOTE_METHOD_CALL_ACTION:\ncase CHECK_ACTION:\ncase BRACED_ACTION:\ncase START_ACTION:\ncase TRAP_ACTION:\ncase FLUSH_ACTION:\ncase ASYNC_SEND_ACTION:\ncase SYNC_SEND_ACTION:\ncase RECEIVE_ACTION:\ncase WAIT_ACTION:\ncase QUERY_ACTION:\ncase COMMIT_ACTION:\nreturn parseActionStatement(expression);\ndefault:\nSTNode semicolon = parseSemicolon();\nendContext();\nSTNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT,\nexpression, semicolon);\nexprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT);\nreturn exprStmt;\n}\n}\n/**\n*

\n* Parse call statement, given the call expression.\n*

\n* \n* call-stmt := call-expr ;\n*
\n* call-expr := function-call-expr | method-call-expr | checking-keyword call-expr\n*
\n*\n* @param expression Call expression associated with the call statement\n* @return Call statement node\n*/\nprivate STNode parseCallStatement(STNode expression) {\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon);\n}\n/**\n* Check whether a node is a missing node.\n*\n* @param node Node to check\n* @return true if the node is a missing node. false otherwise\n*/\nprivate boolean isMissingNode(STNode node) {\nif (node.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nreturn isMissingNode(((STSimpleNameReferenceNode) node).name);\n}\nreturn node instanceof STMissingToken;\n}\nprivate STNode parseActionStatement(STNode action) {\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon);\n}\n/**\n* Parse remote method call action, given the starting expression.\n*

\n* \n* remote-method-call-action := expression -> method-name ( arg-list )\n*
\n* async-send-action := expression -> peer-worker ;\n*
\n*\n* @param isRhsExpr Is this an RHS action\n* @param expression LHS expression\n* @return\n*/\nprivate STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) {\nSTNode rightArrow = parseRightArrow();\nreturn parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);\n}\nprivate STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) {\nreturn parseRemoteCallOrAsyncSendActionRhs(peek().kind, expression, isRhsExpr, rightArrow);\n}\nprivate STNode parseRemoteCallOrAsyncSendActionRhs(SyntaxKind nextTokenKind, STNode expression, boolean isRhsExpr,\nSTNode rightArrow) {\nSTNode name;\nswitch (nextTokenKind) {\ncase DEFAULT_KEYWORD:\nname = parseDefaultKeyword();\nreturn parseAsyncSendAction(expression, rightArrow, name);\ncase IDENTIFIER_TOKEN:\nname = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());\nbreak;\ncase CONTINUE_KEYWORD:\ncase COMMIT_KEYWORD:\nname = getKeywordAsSimpleNameRef();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS, expression,\nisRhsExpr, rightArrow);\nif (solution.action == Action.REMOVE) {\nname = solution.recoveredNode;\nbreak;\n}\nreturn parseRemoteCallOrAsyncSendActionRhs(solution.tokenKind, expression, isRhsExpr, rightArrow);\n}\nreturn parseRemoteCallOrAsyncSendEnd(peek().kind, expression, rightArrow, name);\n}\nprivate STNode parseRemoteCallOrAsyncSendEnd(SyntaxKind nextTokenKind, STNode expression, STNode rightArrow,\nSTNode name) {\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseRemoteMethodCallAction(expression, rightArrow, name);\ncase SEMICOLON_TOKEN:\nreturn parseAsyncSendAction(expression, rightArrow, name);\ndefault:\nSTToken token = peek();\nSolution solution =\nrecover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END, expression, rightArrow, name);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseRemoteCallOrAsyncSendEnd(solution.tokenKind, expression, rightArrow, name);\n}\n}\n/**\n* Parse default keyword.\n*\n* @return default keyword node\n*/\nprivate STNode parseDefaultKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DEFAULT_KEYWORD) {\nreturn STNodeFactory.createSimpleNameReferenceNode(consume());\n} else {\nSolution sol = recover(token, ParserRuleContext.DEFAULT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) {\nreturn STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker);\n}\nprivate STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) {\nSTNode openParenToken = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode arguments = parseArgsList();\nSTNode closeParenToken = parseCloseParenthesis();\nreturn STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments,\ncloseParenToken);\n}\n/**\n* Parse right arrow (->) token.\n*\n* @return Parsed node\n*/\nprivate STNode parseRightArrow() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.RIGHT_ARROW);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse parameterized type descriptor.\n* parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter\n*\n* @return Parsed node\n*/\nprivate STNode parseParameterizedTypeDescriptor() {\nSTNode parameterizedTypeKeyword = parseParameterizedTypeKeyword();\nSTNode ltToken = parseLTToken();\nSTNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\nSTNode gtToken = parseGTToken();\nreturn STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, ltToken, typeNode,\ngtToken);\n}\n/**\n* Parse map or future keyword token.\n*\n* @return Parsed node\n*/\nprivate STNode parseParameterizedTypeKeyword() {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase MAP_KEYWORD:\ncase FUTURE_KEYWORD:\nreturn consume();\ndefault:\nSolution sol = recover(nextToken, ParserRuleContext.PARAMETERIZED_TYPE);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse < token.\n*\n* @return Parsed node\n*/\nprivate STNode parseGTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.GT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.GT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse > token.\n*\n* @return Parsed node\n*/\nprivate STNode parseLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.LT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse nil literal. Here nil literal is only referred to ( ).\n*\n* @return Parsed node\n*/\nprivate STNode parseNilLiteral() {\nstartContext(ParserRuleContext.NIL_LITERAL);\nSTNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode closeParenthesisToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken);\n}\n/**\n* Parse annotation declaration, given the qualifier.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @param constKeyword Const keyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) {\nstartContext(ParserRuleContext.ANNOTATION_DECL);\nSTNode annotationKeyword = parseAnnotationKeyword();\nSTNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);\nendContext();\nreturn annotDecl;\n}\n/**\n* Parse annotation keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ANNOTATION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ANNOTATION_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse the components that follows after the annotation keyword of a annotation declaration.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the constant decl\n* @param constKeyword Const keyword\n* @param annotationKeyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword) {\nSTToken nextToken = peek();\nreturn parseAnnotationDeclFromType(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword);\n}\nprivate STNode parseAnnotationDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,\nSTNode constKeyword, STNode annotationKeyword) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword);\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier,\nconstKeyword, annotationKeyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAnnotationDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword,\nannotationKeyword);\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL);\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\n/**\n* Parse annotation tag.\n*

\n* annot-tag := identifier\n*\n* @return\n*/\nprivate STNode parseAnnotationTag() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.ANNOTATION_TAG);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword) {\nSTNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);\nif (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword,\ntypeDescOrAnnotTag, annotTag);\n}\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) {\nSTNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag,\nParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false);\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\nSTNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name;\nreturn parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag);\n}\n/**\n* Parse the component that follows the first identifier in an annotation decl. The identifier\n* can be either the type-name (a user defined type) or the annot-tag, where the type-name\n* is not present.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the annotation decl\n* @param constKeyword Const keyword\n* @param annotationKeyword Annotation keyword\n* @param typeDescOrAnnotTag Identifier that follows the annotation-keyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword, STNode typeDescOrAnnotTag) {\nSTToken token = peek();\nreturn parseAnnotationDeclRhs(token.kind, metadata, qualifier, constKeyword, annotationKeyword,\ntypeDescOrAnnotTag);\n}\nprivate STNode parseAnnotationDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,\nSTNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) {\nSTNode typeDesc;\nSTNode annotTag;\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\ntypeDesc = typeDescOrAnnotTag;\nannotTag = parseAnnotationTag();\nbreak;\ncase SEMICOLON_TOKEN:\ncase ON_KEYWORD:\ntypeDesc = STNodeFactory.createEmptyNode();\nannotTag = typeDescOrAnnotTag;\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword,\nannotationKeyword, typeDescOrAnnotTag);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAnnotationDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword,\ntypeDescOrAnnotTag);\n}\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\nprivate STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword, STNode typeDesc, STNode annotTag) {\nSTToken nextToken = peek();\nreturn parseAnnotationDeclAttachPoints(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword,\ntypeDesc, annotTag);\n}\nprivate STNode parseAnnotationDeclAttachPoints(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,\nSTNode constKeyword, STNode annotationKeyword, STNode typeDesc,\nSTNode annotTag) {\nSTNode onKeyword;\nSTNode attachPoints;\nswitch (nextTokenKind) {\ncase SEMICOLON_TOKEN:\nonKeyword = STNodeFactory.createEmptyNode();\nattachPoints = STNodeFactory.createEmptyNodeList();\nbreak;\ncase ON_KEYWORD:\nonKeyword = parseOnKeyword();\nattachPoints = parseAnnotationAttachPoints();\nonKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword,\nDiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier,\nconstKeyword, annotationKeyword, typeDesc, annotTag);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAnnotationDeclAttachPoints(solution.tokenKind, metadata, qualifier, constKeyword,\nannotationKeyword, typeDesc, annotTag);\n}\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword,\ntypeDesc, annotTag, onKeyword, attachPoints, semicolonToken);\n}\n/**\n* Parse annotation attach points.\n*

\n* \n* annot-attach-points := annot-attach-point (, annot-attach-point)*\n*

\n* annot-attach-point := dual-attach-point | source-only-attach-point\n*

\n* dual-attach-point := [source] dual-attach-point-ident\n*

\n* dual-attach-point-ident :=\n* [object] type\n* | [object|resource] function\n* | parameter\n* | return\n* | service\n* | [object|record] field\n*

\n* source-only-attach-point := source source-only-attach-point-ident\n*

\n* source-only-attach-point-ident :=\n* annotation\n* | external\n* | var\n* | const\n* | listener\n* | worker\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationAttachPoints() {\nstartContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST);\nList attachPoints = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndAnnotAttachPointList(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode attachPoint = parseAnnotationAttachPoint();\nattachPoints.add(attachPoint);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndAnnotAttachPointList(nextToken.kind)) {\nleadingComma = parseAttachPointEnd();\nif (leadingComma == null) {\nbreak;\n}\nattachPoints.add(leadingComma);\nattachPoint = parseAnnotationAttachPoint();\nif (attachPoint == null) {\nattachPoint = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);\nattachPoints.add(attachPoint);\nbreak;\n}\nattachPoints.add(attachPoint);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(attachPoints);\n}\n/**\n* Parse annotation attach point end.\n*\n* @return Parsed node\n*/\nprivate STNode parseAttachPointEnd() {\nSTToken nextToken = peek();\nreturn parseAttachPointEnd(nextToken.kind);\n}\nprivate STNode parseAttachPointEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase SEMICOLON_TOKEN:\nreturn null;\ncase COMMA_TOKEN:\nreturn consume();\ndefault:\nSolution sol = recover(peek(), ParserRuleContext.ATTACH_POINT_END);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nreturn sol.tokenKind == SyntaxKind.COMMA_TOKEN ? sol.recoveredNode : null;\n}\n}\nprivate boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse annotation attach point.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationAttachPoint() {\nreturn parseAnnotationAttachPoint(peek().kind);\n}\nprivate STNode parseAnnotationAttachPoint(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\nreturn null;\ncase ANNOTATION_KEYWORD:\ncase EXTERNAL_KEYWORD:\ncase VAR_KEYWORD:\ncase CONST_KEYWORD:\ncase LISTENER_KEYWORD:\ncase WORKER_KEYWORD:\ncase SOURCE_KEYWORD:\nSTNode sourceKeyword = parseSourceKeyword();\nreturn parseAttachPointIdent(sourceKeyword);\ncase OBJECT_KEYWORD:\ncase TYPE_KEYWORD:\ncase RESOURCE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase SERVICE_KEYWORD:\ncase FIELD_KEYWORD:\ncase RECORD_KEYWORD:\nsourceKeyword = STNodeFactory.createEmptyNode();\nSTNode firstIdent = consume();\nreturn parseDualAttachPointIdent(sourceKeyword, firstIdent);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ATTACH_POINT);\nreturn solution.recoveredNode;\n}\n}\n/**\n* Parse source keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseSourceKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SOURCE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SOURCE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse attach point ident gievn.\n*

\n* \n* source-only-attach-point-ident := annotation | external | var | const | listener | worker\n*

\n* dual-attach-point-ident := [object] type | [object|resource] function | parameter\n* | return | service | [object|record] field\n*
\n*\n* @param sourceKeyword Source keyword\n* @return Parsed node\n*/\nprivate STNode parseAttachPointIdent(STNode sourceKeyword) {\nreturn parseAttachPointIdent(peek().kind, sourceKeyword);\n}\nprivate STNode parseAttachPointIdent(SyntaxKind nextTokenKind, STNode sourceKeyword) {\nswitch (nextTokenKind) {\ncase ANNOTATION_KEYWORD:\ncase EXTERNAL_KEYWORD:\ncase VAR_KEYWORD:\ncase CONST_KEYWORD:\ncase LISTENER_KEYWORD:\ncase WORKER_KEYWORD:\nSTNode firstIdent = consume();\nSTNode secondIdent = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent);\ncase OBJECT_KEYWORD:\ncase RESOURCE_KEYWORD:\ncase RECORD_KEYWORD:\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase SERVICE_KEYWORD:\ncase FIELD_KEYWORD:\nfirstIdent = consume();\nreturn parseDualAttachPointIdent(sourceKeyword, firstIdent);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nfirstIdent = solution.recoveredNode;\nreturn parseDualAttachPointIdent(sourceKeyword, firstIdent);\n}\n}\n/**\n* Parse dual-attach-point ident.\n*\n* @param sourceKeyword Source keyword\n* @param firstIdent first part of the dual attach-point\n* @return Parsed node\n*/\nprivate STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) {\nSTNode secondIdent;\nswitch (firstIdent.kind) {\ncase OBJECT_KEYWORD:\nsecondIdent = parseIdentAfterObjectIdent();\nbreak;\ncase RESOURCE_KEYWORD:\nsecondIdent = parseFunctionIdent();\nbreak;\ncase RECORD_KEYWORD:\nsecondIdent = parseFieldIdent();\nbreak;\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase SERVICE_KEYWORD:\ncase FIELD_KEYWORD:\ndefault:\nsecondIdent = STNodeFactory.createEmptyNode();\nbreak;\n}\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent);\n}\n/**\n* Parse the idents that are supported after object-ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseIdentAfterObjectIdent() {\nSTToken token = peek();\nswitch (token.kind) {\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase FIELD_KEYWORD:\nreturn consume();\ndefault:\nSolution sol = recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse function ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseFunctionIdent() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FUNCTION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FUNCTION_IDENT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse field ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldIdent() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FIELD_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FIELD_IDENT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse XML namespace declaration.\n*

\n* xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ;\n*
\n* xml-namespace-uri := simple-const-expr\n*
\n* xml-namespace-prefix := identifier\n*
\n*\n* @return\n*/\nprivate STNode parseXMLNamespaceDeclaration(boolean isModuleVar) {\nstartContext(ParserRuleContext.XML_NAMESPACE_DECLARATION);\nSTNode xmlnsKeyword = parseXMLNSKeyword();\nSTNode namespaceUri = parseXMLNamespaceUri();\nSTNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);\nendContext();\nreturn xmlnsDecl;\n}\n/**\n* Parse xmlns keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLNSKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.XMLNS_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.XMLNS_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse namespace uri.\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLNamespaceUri() {\nSTNode expr = parseSimpleConstExpr();\nswitch (expr.kind) {\ncase STRING_LITERAL:\ncase IDENTIFIER_TOKEN:\ncase QUALIFIED_NAME_REFERENCE:\nbreak;\ndefault:\nexpr = SyntaxErrors.addDiagnostic(expr, DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI);\n}\nreturn expr;\n}\nprivate STNode parseSimpleConstExpr() {\nstartContext(ParserRuleContext.CONSTANT_EXPRESSION);\nSTNode expr = parseSimpleConstExprInternal();\nendContext();\nreturn expr;\n}\nprivate STNode parseSimpleConstExprInternal() {\nSTToken nextToken = peek();\nreturn parseConstExprInternal(nextToken.kind);\n}\n/**\n* Parse constants expr.\n*\n* @return Parsed node\n*/\nprivate STNode parseConstExprInternal(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase STRING_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase NULL_KEYWORD:\nreturn parseBasicLiteral();\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn parseSignedIntOrFloat();\ncase OPEN_PAREN_TOKEN:\nreturn parseNilLiteral();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START);\nreturn solution.recoveredNode;\n}\n}\n/**\n* Parse the portion after the namsepsace-uri of an XML declaration.\n*\n* @param xmlnsKeyword XMLNS keyword\n* @param namespaceUri Namespace URI\n* @return Parsed node\n*/\nprivate STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) {\nreturn parseXMLDeclRhs(peek().kind, xmlnsKeyword, namespaceUri, isModuleVar);\n}\nprivate STNode parseXMLDeclRhs(SyntaxKind nextTokenKind, STNode xmlnsKeyword, STNode namespaceUri,\nboolean isModuleVar) {\nSTNode asKeyword = STNodeFactory.createEmptyNode();\nSTNode namespacePrefix = STNodeFactory.createEmptyNode();\nswitch (nextTokenKind) {\ncase AS_KEYWORD:\nasKeyword = parseAsKeyword();\nnamespacePrefix = parseNamespacePrefix();\nbreak;\ncase SEMICOLON_TOKEN:\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword,\nnamespaceUri, isModuleVar);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseXMLDeclRhs(solution.tokenKind, xmlnsKeyword, namespaceUri, isModuleVar);\n}\nSTNode semicolon = parseSemicolon();\nif (isModuleVar) {\nreturn STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword,\nnamespacePrefix, semicolon);\n}\nreturn STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix,\nsemicolon);\n}\n/**\n* Parse import prefix.\n*\n* @return Parsed node\n*/\nprivate STNode parseNamespacePrefix() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.NAMESPACE_PREFIX);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse named worker declaration.\n*

\n* named-worker-decl := [annots] worker worker-name return-type-descriptor { sequence-stmt }\n*\n* @param annots Annotations attached to the worker decl\n* @return Parsed node\n*/\nprivate STNode parseNamedWorkerDeclaration(STNode annots) {\nstartContext(ParserRuleContext.NAMED_WORKER_DECL);\nSTNode workerKeyword = parseWorkerKeyword();\nSTNode workerName = parseWorkerName();\nSTNode returnTypeDesc = parseReturnTypeDescriptor();\nSTNode workerBody = parseBlockNode();\nendContext();\nreturn STNodeFactory.createNamedWorkerDeclarationNode(annots, workerKeyword, workerName, returnTypeDesc,\nworkerBody);\n}\nprivate STNode parseReturnTypeDescriptor() {\nSTToken token = peek();\nif (token.kind != SyntaxKind.RETURNS_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode returnsKeyword = consume();\nSTNode annot = parseAnnotations();\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);\nreturn STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);\n}\n/**\n* Parse worker keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseWorkerKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.WORKER_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.WORKER_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse worker name.\n*

\n* worker-name := identifier\n*\n* @return Parsed node\n*/\nprivate STNode parseWorkerName() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.WORKER_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse lock statement.\n* lock-stmt := lock block-stmt ;\n*\n* @return Lock statement\n*/\nprivate STNode parseLockStatement() {\nstartContext(ParserRuleContext.LOCK_STMT);\nSTNode lockKeyword = parseLockKeyword();\nSTNode blockStatement = parseBlockNode();\nendContext();\nreturn STNodeFactory.createLockStatementNode(lockKeyword, blockStatement);\n}\n/**\n* Parse lock-keyword.\n*\n* @return lock-keyword node\n*/\nprivate STNode parseLockKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LOCK_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LOCK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse union type descriptor.\n* union-type-descriptor := type-descriptor | type-descriptor\n*\n* @param leftTypeDesc Type desc in the LHS os the union type desc.\n* @param context Current context.\n* @return parsed union type desc node\n*/\nprivate STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode pipeToken = parsePipeToken();\nSTNode rightTypeDesc = parseTypeDescriptor(context, isTypedBindingPattern, false);\nreturn STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc);\n}\n/**\n* Parse pipe token.\n*\n* @return parsed pipe token node\n*/\nprivate STNode parsePipeToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PIPE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.PIPE);\nreturn sol.recoveredNode;\n}\n}\nprivate boolean isTypeStartingToken(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase IDENTIFIER_TOKEN:\ncase SERVICE_KEYWORD:\ncase RECORD_KEYWORD:\ncase OBJECT_KEYWORD:\ncase ABSTRACT_KEYWORD:\ncase CLIENT_KEYWORD:\ncase OPEN_PAREN_TOKEN:\ncase MAP_KEYWORD:\ncase FUTURE_KEYWORD:\ncase TYPEDESC_KEYWORD:\ncase ERROR_KEYWORD:\ncase STREAM_KEYWORD:\ncase TABLE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase DISTINCT_KEYWORD:\nreturn true;\ndefault:\nif (isSingletonTypeDescStart(nodeKind, true)) {\nreturn true;\n}\nreturn isSimpleType(nodeKind);\n}\n}\nstatic boolean isSimpleType(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase INT_KEYWORD:\ncase FLOAT_KEYWORD:\ncase DECIMAL_KEYWORD:\ncase BOOLEAN_KEYWORD:\ncase STRING_KEYWORD:\ncase BYTE_KEYWORD:\ncase XML_KEYWORD:\ncase JSON_KEYWORD:\ncase HANDLE_KEYWORD:\ncase ANY_KEYWORD:\ncase ANYDATA_KEYWORD:\ncase NEVER_KEYWORD:\ncase SERVICE_KEYWORD:\ncase VAR_KEYWORD:\ncase ERROR_KEYWORD:\ncase STREAM_KEYWORD:\ncase TYPEDESC_KEYWORD:\ncase READONLY_KEYWORD:\ncase DISTINCT_KEYWORD:\nreturn true;\ncase TYPE_DESC:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) {\nswitch (typeKeyword) {\ncase INT_KEYWORD:\nreturn SyntaxKind.INT_TYPE_DESC;\ncase FLOAT_KEYWORD:\nreturn SyntaxKind.FLOAT_TYPE_DESC;\ncase DECIMAL_KEYWORD:\nreturn SyntaxKind.DECIMAL_TYPE_DESC;\ncase BOOLEAN_KEYWORD:\nreturn SyntaxKind.BOOLEAN_TYPE_DESC;\ncase STRING_KEYWORD:\nreturn SyntaxKind.STRING_TYPE_DESC;\ncase BYTE_KEYWORD:\nreturn SyntaxKind.BYTE_TYPE_DESC;\ncase XML_KEYWORD:\nreturn SyntaxKind.XML_TYPE_DESC;\ncase JSON_KEYWORD:\nreturn SyntaxKind.JSON_TYPE_DESC;\ncase HANDLE_KEYWORD:\nreturn SyntaxKind.HANDLE_TYPE_DESC;\ncase ANY_KEYWORD:\nreturn SyntaxKind.ANY_TYPE_DESC;\ncase ANYDATA_KEYWORD:\nreturn SyntaxKind.ANYDATA_TYPE_DESC;\ncase READONLY_KEYWORD:\nreturn SyntaxKind.READONLY_TYPE_DESC;\ncase NEVER_KEYWORD:\nreturn SyntaxKind.NEVER_TYPE_DESC;\ncase SERVICE_KEYWORD:\nreturn SyntaxKind.SERVICE_TYPE_DESC;\ncase VAR_KEYWORD:\nreturn SyntaxKind.VAR_TYPE_DESC;\ndefault:\nreturn SyntaxKind.TYPE_DESC;\n}\n}\n/**\n* Parse fork-keyword.\n*\n* @return Fork-keyword node\n*/\nprivate STNode parseForkKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FORK_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FORK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse fork statement.\n* fork-stmt := fork { named-worker-decl+ }\n*\n* @return Fork statement\n*/\nprivate STNode parseForkStatement() {\nstartContext(ParserRuleContext.FORK_STMT);\nSTNode forkKeyword = parseForkKeyword();\nSTNode openBrace = parseOpenBrace();\nArrayList workers = new ArrayList<>();\nwhile (!isEndOfStatements()) {\nSTNode stmt = parseStatement();\nif (stmt == null) {\nbreak;\n}\nswitch (stmt.kind) {\ncase NAMED_WORKER_DECLARATION:\nworkers.add(stmt);\nbreak;\ndefault:\nif (workers.isEmpty()) {\nopenBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt,\nDiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);\n} else {\nupdateLastNodeInListWithInvalidNode(workers, stmt,\nDiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);\n}\n}\n}\nSTNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers);\nSTNode closeBrace = parseCloseBrace();\nendContext();\nopenBrace = cloneWithDiagnosticIfListEmpty(namedWorkerDeclarations, openBrace,\nDiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT);\nreturn STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace);\n}\n/**\n* Parse trap expression.\n*

\n* \n* trap-expr := trap expression\n* \n*\n* @param allowActions Allow actions\n* @param isRhsExpr Whether this is a RHS expression or not\n* @return Trap expression node\n*/\nprivate STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nSTNode trapKeyword = parseTrapKeyword();\nSTNode expr =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nif (isAction(expr)) {\nreturn STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr);\n}\nreturn STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr);\n}\n/**\n* Parse trap-keyword.\n*\n* @return Trap-keyword node\n*/\nprivate STNode parseTrapKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRAP_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TRAP_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse list constructor expression.\n*

\n* \n* list-constructor-expr := [ [ expr-list ] ]\n*
\n* expr-list := expression (, expression)*\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseListConstructorExpr() {\nstartContext(ParserRuleContext.LIST_CONSTRUCTOR);\nSTNode openBracket = parseOpenBracket();\nSTNode expressions = parseOptionalExpressionsList();\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket);\n}\n/**\n* Parse optional expression list.\n*\n* @return Parsed node\n*/\nprivate STNode parseOptionalExpressionsList() {\nList expressions = new ArrayList<>();\nif (isEndOfListConstructor(peek().kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode expr = parseExpression();\nexpressions.add(expr);\nreturn parseOptionalExpressionsList(expressions);\n}\nprivate STNode parseOptionalExpressionsList(List expressions) {\nSTNode listConstructorMemberEnd;\nwhile (!isEndOfListConstructor(peek().kind)) {\nlistConstructorMemberEnd = parseListConstructorMemberEnd();\nif (listConstructorMemberEnd == null) {\nbreak;\n}\nexpressions.add(listConstructorMemberEnd);\nSTNode expr = parseExpression();\nexpressions.add(expr);\n}\nreturn STNodeFactory.createNodeList(expressions);\n}\nprivate boolean isEndOfListConstructor(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseListConstructorMemberEnd() {\nreturn parseListConstructorMemberEnd(peek().kind);\n}\nprivate STNode parseListConstructorMemberEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListConstructorMemberEnd(solution.tokenKind);\n}\n}\n/**\n* Parse foreach statement.\n* foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt\n*\n* @return foreach statement\n*/\nprivate STNode parseForEachStatement() {\nstartContext(ParserRuleContext.FOREACH_STMT);\nSTNode forEachKeyword = parseForEachKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT);\nSTNode inKeyword = parseInKeyword();\nSTNode actionOrExpr = parseActionOrExpression();\nSTNode blockStatement = parseBlockNode();\nendContext();\nreturn STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr,\nblockStatement);\n}\n/**\n* Parse foreach-keyword.\n*\n* @return ForEach-keyword node\n*/\nprivate STNode parseForEachKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FOREACH_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FOREACH_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse in-keyword.\n*\n* @return In-keyword node\n*/\nprivate STNode parseInKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IN_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.IN_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse type cast expression.\n*

\n* \n* type-cast-expr := < type-cast-param > expression\n*
\n* type-cast-param := [annots] type-descriptor | annots\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nstartContext(ParserRuleContext.TYPE_CAST);\nSTNode ltToken = parseLTToken();\nSTNode typeCastParam = parseTypeCastParam();\nSTNode gtToken = parseGTToken();\nendContext();\nSTNode expression =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nreturn STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression);\n}\nprivate STNode parseTypeCastParam() {\nSTNode annot;\nSTNode type;\nSTToken token = peek();\nswitch (token.kind) {\ncase AT_TOKEN:\nannot = parseAnnotations();\ntoken = peek();\nif (isTypeStartingToken(token.kind)) {\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n} else {\ntype = STNodeFactory.createEmptyNode();\n}\nbreak;\ndefault:\nannot = STNodeFactory.createEmptyNode();\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\nbreak;\n}\nreturn STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type);\n}\n/**\n* Parse table constructor expression.\n*

\n* \n* table-constructor-expr-rhs := [ [row-list] ]\n* \n*\n* @param tableKeyword tableKeyword that precedes this rhs\n* @param keySpecifier keySpecifier that precedes this rhs\n* @return Parsed node\n*/\nprivate STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) {\nswitchContext(ParserRuleContext.TABLE_CONSTRUCTOR);\nSTNode openBracket = parseOpenBracket();\nSTNode rowList = parseRowList();\nSTNode closeBracket = parseCloseBracket();\nreturn STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList,\ncloseBracket);\n}\n/**\n* Parse table-keyword.\n*\n* @return Table-keyword node\n*/\nprivate STNode parseTableKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TABLE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TABLE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse table rows.\n*

\n* row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]\n*\n* @return Parsed node\n*/\nprivate STNode parseRowList() {\nSTToken nextToken = peek();\nif (isEndOfTableRowList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList mappings = new ArrayList<>();\nSTNode mapExpr = parseMappingConstructorExpr();\nmappings.add(mapExpr);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndOfTableRowList(nextToken.kind)) {\nleadingComma = parseComma();\nmappings.add(leadingComma);\nmapExpr = parseMappingConstructorExpr();\nmappings.add(mapExpr);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(mappings);\n}\nprivate boolean isEndOfTableRowList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ncase COMMA_TOKEN:\ncase OPEN_BRACE_TOKEN:\nreturn false;\ndefault:\nreturn isEndOfMappingConstructor(tokenKind);\n}\n}\n/**\n* Parse key specifier.\n*

\n* key-specifier := key ( [ field-name (, field-name)* ] )\n*\n* @return Parsed node\n*/\nprivate STNode parseKeySpecifier() {\nstartContext(ParserRuleContext.KEY_SPECIFIER);\nSTNode keyKeyword = parseKeyKeyword();\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode fieldNames = parseFieldNames();\nSTNode closeParen = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen);\n}\n/**\n* Parse key-keyword.\n*\n* @return Key-keyword node\n*/\nprivate STNode parseKeyKeyword() {\nSTToken token = peek();\nif (isKeyKeyword(token)) {\nreturn getKeyKeyword(consume());\n} else {\nSolution sol = recover(token, ParserRuleContext.KEY_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nstatic boolean isKeyKeyword(STToken token) {\nreturn token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text());\n}\nprivate STNode getKeyKeyword(STToken token) {\nreturn STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(),\ntoken.diagnostics());\n}\n/**\n* Parse field names.\n*

\n* field-name-list := [ field-name (, field-name)* ]\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldNames() {\nSTToken nextToken = peek();\nif (isEndOfFieldNamesList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList fieldNames = new ArrayList<>();\nSTNode fieldName = parseVariableName();\nfieldNames.add(fieldName);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndOfFieldNamesList(nextToken.kind)) {\nleadingComma = parseComma();\nfieldNames.add(leadingComma);\nfieldName = parseVariableName();\nfieldNames.add(fieldName);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(fieldNames);\n}\nprivate boolean isEndOfFieldNamesList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\ncase IDENTIFIER_TOKEN:\nreturn false;\ndefault:\nreturn true;\n}\n}\n/**\n* Parse error type descriptor.\n*

\n* error-type-descriptor := error [error-type-param]\n* error-type-param := < (detail-type-descriptor | inferred-type-descriptor) >\n* detail-type-descriptor := type-descriptor\n* inferred-type-descriptor := *\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseErrorTypeDescriptor() {\nSTNode errorKeywordToken = parseErrorKeyword();\nSTNode errorTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nerrorTypeParamsNode = parseErrorTypeParamsNode();\n} else {\nerrorTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode);\n}\n/**\n* Parse error type param node.\n*

\n* error-type-param := < (detail-type-descriptor | inferred-type-descriptor) >\n* detail-type-descriptor := type-descriptor\n* inferred-type-descriptor := *\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseErrorTypeParamsNode() {\nSTNode ltToken = parseLTToken();\nSTNode parameter;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {\nparameter = consume();\n} else {\nparameter = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n}\nSTNode gtToken = parseGTToken();\nreturn STNodeFactory.createErrorTypeParamsNode(ltToken, parameter, gtToken);\n}\n/**\n* Parse error-keyword.\n*\n* @return Parsed error-keyword node\n*/\nprivate STNode parseErrorKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ERROR_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ERROR_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse typedesc type descriptor.\n* typedesc-type-descriptor := typedesc type-parameter\n*\n* @return Parsed typedesc type node\n*/\nprivate STNode parseTypedescTypeDescriptor() {\nSTNode typedescKeywordToken = parseTypedescKeyword();\nSTNode typedescTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\ntypedescTypeParamsNode = parseTypeParameter();\n} else {\ntypedescTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createTypedescTypeDescriptorNode(typedescKeywordToken, typedescTypeParamsNode);\n}\n/**\n* Parse typedesc-keyword.\n*\n* @return Parsed typedesc-keyword node\n*/\nprivate STNode parseTypedescKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TYPEDESC_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TYPEDESC_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse stream type descriptor.\n*

\n* stream-type-descriptor := stream [stream-type-parameters]\n* stream-type-parameters := < type-descriptor [, type-descriptor]>\n*

\n*\n* @return Parsed stream type descriptor node\n*/\nprivate STNode parseStreamTypeDescriptor() {\nSTNode streamKeywordToken = parseStreamKeyword();\nSTNode streamTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nstreamTypeParamsNode = parseStreamTypeParamsNode();\n} else {\nstreamTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode);\n}\n/**\n* Parse xml type descriptor.\n* xml-type-descriptor := xml type-parameter\n*\n* @return Parsed typedesc type node\n*/\nprivate STNode parseXmlTypeDescriptor() {\nSTNode xmlKeywordToken = parseXMLKeyword();\nSTNode typedescTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\ntypedescTypeParamsNode = parseTypeParameter();\n} else {\ntypedescTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createXmlTypeDescriptorNode(xmlKeywordToken, typedescTypeParamsNode);\n}\n/**\n* Parse stream type params node.\n*

\n* stream-type-parameters := < type-descriptor [, type-descriptor]>\n*

\n*\n* @return Parsed stream type params node\n*/\nprivate STNode parseStreamTypeParamsNode() {\nSTNode ltToken = parseLTToken();\nstartContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);\nSTNode leftTypeDescNode = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC, false);\nSTNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode);\nendContext();\nreturn streamTypedesc;\n}\nprivate STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) {\nreturn parseStreamTypeParamsNode(peek().kind, ltToken, leftTypeDescNode);\n}\nprivate STNode parseStreamTypeParamsNode(SyntaxKind nextTokenKind, STNode ltToken, STNode leftTypeDescNode) {\nSTNode commaToken, rightTypeDescNode, gtToken;\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\ncommaToken = parseComma();\nrightTypeDescNode = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC, false);\nbreak;\ncase GT_TOKEN:\ncommaToken = STNodeFactory.createEmptyNode();\nrightTypeDescNode = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSolution solution =\nrecover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStreamTypeParamsNode(solution.tokenKind, ltToken, leftTypeDescNode);\n}\ngtToken = parseGTToken();\nreturn STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode,\ngtToken);\n}\n/**\n* Parse stream-keyword.\n*\n* @return Parsed stream-keyword node\n*/\nprivate STNode parseStreamKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.STREAM_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.STREAM_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse let expression.\n*

\n* \n* let-expr := let let-var-decl [, let-var-decl]* in expression\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseLetExpression(boolean isRhsExpr) {\nSTNode letKeyword = parseLetKeyword();\nSTNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr);\nSTNode inKeyword = parseInKeyword();\nletKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,\nDiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression);\n}\n/**\n* Parse let-keyword.\n*\n* @return Let-keyword node\n*/\nprivate STNode parseLetKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LET_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LET_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse let variable declarations.\n*

\n* let-var-decl-list := let-var-decl [, let-var-decl]*\n*\n* @return Parsed node\n*/\nprivate STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) {\nstartContext(context);\nList varDecls = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfLetVarDeclarations(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode varDec = parseLetVarDecl(isRhsExpr);\nvarDecls.add(varDec);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndOfLetVarDeclarations(nextToken.kind)) {\nleadingComma = parseComma();\nvarDecls.add(leadingComma);\nvarDec = parseLetVarDecl(isRhsExpr);\nvarDecls.add(varDec);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(varDecls);\n}\nprivate boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\ncase AT_TOKEN:\nreturn false;\ncase IN_KEYWORD:\nreturn true;\ndefault:\nreturn !isTypeStartingToken(tokenKind);\n}\n}\n/**\n* Parse let variable declaration.\n*

\n* let-var-decl := [annots] typed-binding-pattern = expression\n*\n* @return Parsed node\n*/\nprivate STNode parseLetVarDecl(boolean isRhsExpr) {\nSTNode annot = parseAnnotations();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL);\nSTNode assign = parseAssignOp();\nSTNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false);\nreturn STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression);\n}\n/**\n* Parse raw backtick string template expression.\n*

\n* BacktickString := `expression`\n*\n* @return Template expression node\n*/\nprivate STNode parseTemplateExpression() {\nSTNode type = STNodeFactory.createEmptyNode();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContent();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick,\ncontent, endingBackTick);\n}\nprivate STNode parseTemplateContent() {\nList items = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode contentItem = parseTemplateItem();\nitems.add(contentItem);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(items);\n}\nprivate boolean isEndOfBacktickContent(SyntaxKind kind) {\nswitch (kind) {\ncase EOF_TOKEN:\ncase BACKTICK_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseTemplateItem() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {\nreturn parseInterpolation();\n}\nreturn consume();\n}\n/**\n* Parse string template expression.\n*

\n* string-template-expr := string ` expression `\n*\n* @return String template expression node\n*/\nprivate STNode parseStringTemplateExpression() {\nSTNode type = parseStringKeyword();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContent();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick,\ncontent, endingBackTick);\n}\n/**\n* Parse string keyword.\n*\n* @return string keyword node\n*/\nprivate STNode parseStringKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.STRING_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.STRING_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse XML template expression.\n*

\n* xml-template-expr := xml BacktickString\n*\n* @return XML template expression\n*/\nprivate STNode parseXMLTemplateExpression() {\nSTNode xmlKeyword = parseXMLKeyword();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContentAsXML();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword,\nstartingBackTick, content, endingBackTick);\n}\n/**\n* Parse xml keyword.\n*\n* @return xml keyword node\n*/\nprivate STNode parseXMLKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.XML_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.XML_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse the content of the template string as XML. This method first read the\n* input in the same way as the raw-backtick-template (BacktickString). Then\n* it parses the content as XML.\n*\n* @return XML node\n*/\nprivate STNode parseTemplateContentAsXML() {\nArrayDeque expressions = new ArrayDeque<>();\nStringBuilder xmlStringBuilder = new StringBuilder();\nSTToken nextToken = peek();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode contentItem = parseTemplateItem();\nif (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {\nxmlStringBuilder.append(((STToken) contentItem).text());\n} else {\nxmlStringBuilder.append(\"${}\");\nexpressions.add(contentItem);\n}\nnextToken = peek();\n}\nTextDocument textDocument = TextDocuments.from(xmlStringBuilder.toString());\nAbstractTokenReader tokenReader = new TokenReader(new XMLLexer(textDocument.getCharacterReader()));\nXMLParser xmlParser = new XMLParser(tokenReader, expressions);\nreturn xmlParser.parse();\n}\n/**\n* Parse interpolation of a back-tick string.\n*

\n* \n* interpolation := ${ expression }\n* \n*\n* @return Interpolation node\n*/\nprivate STNode parseInterpolation() {\nstartContext(ParserRuleContext.INTERPOLATION);\nSTNode interpolStart = parseInterpolationStart();\nSTNode expr = parseExpression();\nwhile (true) {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.EOF_TOKEN || nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nbreak;\n} else {\nnextToken = consume();\nexpr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken,\nDiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text());\n}\n}\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace);\n}\n/**\n* Parse interpolation start token.\n*

\n* interpolation-start := ${\n*\n* @return Interpolation start token\n*/\nprivate STNode parseInterpolationStart() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse back-tick token.\n*\n* @return Back-tick token\n*/\nprivate STNode parseBacktickToken(ParserRuleContext ctx) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ctx);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse table type descriptor.\n*

\n* table-type-descriptor := table row-type-parameter [key-constraint]\n* row-type-parameter := type-parameter\n* key-constraint := key-specifier | key-type-constraint\n* key-specifier := key ( [ field-name (, field-name)* ] )\n* key-type-constraint := key type-parameter\n*

\n*\n* @return Parsed table type desc node.\n*/\nprivate STNode parseTableTypeDescriptor() {\nSTNode tableKeywordToken = parseTableKeyword();\nSTNode rowTypeParameterNode = parseRowTypeParameter();\nSTNode keyConstraintNode;\nSTToken nextToken = peek();\nif (isKeyKeyword(nextToken)) {\nSTNode keyKeywordToken = getKeyKeyword(consume());\nkeyConstraintNode = parseKeyConstraint(keyKeywordToken);\n} else {\nkeyConstraintNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode);\n}\n/**\n* Parse row type parameter node.\n*

\n* row-type-parameter := type-parameter\n*

\n*\n* @return Parsed node.\n*/\nprivate STNode parseRowTypeParameter() {\nstartContext(ParserRuleContext.ROW_TYPE_PARAM);\nSTNode rowTypeParameterNode = parseTypeParameter();\nendContext();\nreturn rowTypeParameterNode;\n}\n/**\n* Parse type parameter node.\n*

\n* type-parameter := < type-descriptor >\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeParameter() {\nSTNode ltToken = parseLTToken();\nSTNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\nSTNode gtToken = parseGTToken();\nreturn STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken);\n}\n/**\n* Parse key constraint.\n*

\n* key-constraint := key-specifier | key-type-constraint\n*

\n*\n* @return Parsed node.\n*/\nprivate STNode parseKeyConstraint(STNode keyKeywordToken) {\nreturn parseKeyConstraint(peek().kind, keyKeywordToken);\n}\nprivate STNode parseKeyConstraint(SyntaxKind nextTokenKind, STNode keyKeywordToken) {\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseKeySpecifier(keyKeywordToken);\ncase LT_TOKEN:\nreturn parseKeyTypeConstraint(keyKeywordToken);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseKeyConstraint(solution.tokenKind, keyKeywordToken);\n}\n}\n/**\n* Parse key specifier given parsed key keyword token.\n*

\n* key-specifier := key ( [ field-name (, field-name)* ] )\n*\n* @return Parsed node\n*/\nprivate STNode parseKeySpecifier(STNode keyKeywordToken) {\nstartContext(ParserRuleContext.KEY_SPECIFIER);\nSTNode openParenToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode fieldNamesNode = parseFieldNames();\nSTNode closeParenToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken);\n}\n/**\n* Parse key type constraint.\n*

\n* key-type-constraint := key type-parameter\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseKeyTypeConstraint(STNode keyKeywordToken) {\nSTNode typeParameterNode = parseTypeParameter();\nreturn STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode);\n}\n/**\n* Parse function type descriptor.\n*

\n* function-type-descriptor := function function-signature\n*\n* @return Function type descriptor node\n*/\nprivate STNode parseFunctionTypeDesc() {\nstartContext(ParserRuleContext.FUNC_TYPE_DESC);\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode signature = parseFuncSignature(true);\nendContext();\nreturn STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, signature);\n}\n/**\n* Parse explicit anonymous function expression.\n*

\n* explicit-anonymous-function-expr := [annots] function function-signature anon-func-body\n*\n* @param annots Annotations.\n* @param isRhsExpr Is expression in rhs context\n* @return Anonymous function expression node\n*/\nprivate STNode parseExplicitFunctionExpression(STNode annots, boolean isRhsExpr) {\nstartContext(ParserRuleContext.ANON_FUNC_EXPRESSION);\nSTNode funcKeyword = parseFunctionKeyword();\nSTNode funcSignature = parseFuncSignature(false);\nSTNode funcBody = parseAnonFuncBody(isRhsExpr);\nreturn STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, funcKeyword, funcSignature,\nfuncBody);\n}\n/**\n* Parse anonymous function body.\n*

\n* anon-func-body := block-function-body | expr-function-body\n*\n* @param isRhsExpr Is expression in rhs context\n* @return Anon function body node\n*/\nprivate STNode parseAnonFuncBody(boolean isRhsExpr) {\nreturn parseAnonFuncBody(peek().kind, isRhsExpr);\n}\nprivate STNode parseAnonFuncBody(SyntaxKind nextTokenKind, boolean isRhsExpr) {\nswitch (nextTokenKind) {\ncase OPEN_BRACE_TOKEN:\ncase EOF_TOKEN:\nSTNode body = parseFunctionBodyBlock(true);\nendContext();\nreturn body;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nendContext();\nreturn parseExpressionFuncBody(true, isRhsExpr);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ANON_FUNC_BODY, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAnonFuncBody(solution.tokenKind, isRhsExpr);\n}\n}\n/**\n* Parse expression function body.\n*

\n* expr-function-body := => expression\n*\n* @param isAnon Is anonymous function.\n* @param isRhsExpr Is expression in rhs context\n* @return Expression function body node\n*/\nprivate STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) {\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nSTNode semiColon;\nif (isAnon) {\nsemiColon = STNodeFactory.createEmptyNode();\n} else {\nsemiColon = parseSemicolon();\n}\nreturn STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon);\n}\n/**\n* Parse '=>' token.\n*\n* @return Double right arrow token\n*/\nprivate STNode parseDoubleRightArrow() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.EXPR_FUNC_BODY_START);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) {\nswitch (params.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase INFER_PARAM_LIST:\nbreak;\ncase BRACED_EXPRESSION:\nparams = getAnonFuncParam((STBracedExpressionNode) params);\nbreak;\ndefault:\nparams = SyntaxErrors.addDiagnostic(params,\nDiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);\n}\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression);\n}\n/**\n* Create a new anon-func-param node from a braced expression.\n*\n* @param params Braced expression\n* @return Anon-func param node\n*/\nprivate STNode getAnonFuncParam(STBracedExpressionNode params) {\nList paramList = new ArrayList<>();\nparamList.add(params.expression);\nreturn STNodeFactory.createImplicitAnonymousFunctionParameters(params.openParen,\nSTNodeFactory.createNodeList(paramList), params.closeParen);\n}\n/**\n* Parse implicit anon function expression.\n*\n* @param openParen Open parenthesis token\n* @param firstParam First parameter\n* @param isRhsExpr Is expression in rhs context\n* @return Implicit anon function expression node\n*/\nprivate STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) {\nList paramList = new ArrayList<>();\nparamList.add(firstParam);\nSTToken nextToken = peek();\nSTNode paramEnd;\nSTNode param;\nwhile (!isEndOfAnonFuncParametersList(nextToken.kind)) {\nparamEnd = parseImplicitAnonFuncParamEnd(nextToken.kind);\nif (paramEnd == null) {\nbreak;\n}\nparamList.add(paramEnd);\nparam = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM);\nparam = STNodeFactory.createSimpleNameReferenceNode(param);\nparamList.add(param);\nnextToken = peek();\n}\nSTNode params = STNodeFactory.createNodeList(paramList);\nSTNode closeParen = parseCloseParenthesis();\nendContext();\nSTNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\nreturn parseImplicitAnonFunc(inferedParams, isRhsExpr);\n}\nprivate STNode parseImplicitAnonFuncParamEnd() {\nreturn parseImplicitAnonFuncParamEnd(peek().kind);\n}\nprivate STNode parseImplicitAnonFuncParamEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseImplicitAnonFuncParamEnd(solution.tokenKind);\n}\n}\nprivate boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase SEMICOLON_TOKEN:\ncase RETURNS_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse tuple type descriptor.\n*

\n* tuple-type-descriptor := [ tuple-member-type-descriptors ]\n*

\n* tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor]\n* | [ tuple-rest-descriptor ]\n*

\n* tuple-rest-descriptor := type-descriptor ...\n*
\n*\n* @return\n*/\nprivate STNode parseTupleTypeDesc() {\nSTNode openBracket = parseOpenBracket();\nstartContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nSTNode memberTypeDesc = parseTupleMemberTypeDescList();\nSTNode closeBracket = parseCloseBracket();\nendContext();\nopenBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket,\nDiagnosticErrorCode.ERROR_MISSING_TYPE_DESC);\nreturn STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket);\n}\n/**\n* Parse tuple member type descriptors.\n*\n* @return Parsed node\n*/\nprivate STNode parseTupleMemberTypeDescList() {\nList typeDescList = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfTypeList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode typeDesc = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\nreturn parseTupleTypeMembers(typeDesc, typeDescList);\n}\nprivate STNode parseTupleTypeMembers(STNode typeDesc, List typeDescList) {\nSTToken nextToken;\nnextToken = peek();\nSTNode tupleMemberRhs;\nwhile (!isEndOfTypeList(nextToken.kind)) {\ntupleMemberRhs = parseTupleMemberRhs(nextToken.kind);\nif (tupleMemberRhs == null) {\nbreak;\n}\nif (tupleMemberRhs.kind == SyntaxKind.ELLIPSIS_TOKEN) {\ntypeDesc = STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs);\nbreak;\n}\ntypeDescList.add(typeDesc);\ntypeDescList.add(tupleMemberRhs);\ntypeDesc = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\nnextToken = peek();\n}\ntypeDescList.add(typeDesc);\nreturn STNodeFactory.createNodeList(typeDescList);\n}\nprivate STNode parseTupleMemberRhs() {\nreturn parseTupleMemberRhs(peek().kind);\n}\nprivate STNode parseTupleMemberRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ncase ELLIPSIS_TOKEN:\nreturn parseEllipsis();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTupleMemberRhs(solution.tokenKind);\n}\n}\nprivate boolean isEndOfTypeList(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase EOF_TOKEN:\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse table constructor or query expression.\n*

\n* \n* table-constructor-or-query-expr := table-constructor-expr | query-expr\n*
\n* table-constructor-expr := table [key-specifier] [ [row-list] ]\n*
\n* query-expr := [query-construct-type] query-pipeline select-clause\n* [query-construct-type] query-pipeline select-clause on-conflict-clause? limit-lause?\n*
\n* query-construct-type := table key-specifier | stream\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseTableConstructorOrQuery(boolean isRhsExpr) {\nstartContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION);\nSTNode tableOrQueryExpr = parseTableConstructorOrQuery(peek().kind, isRhsExpr);\nendContext();\nreturn tableOrQueryExpr;\n}\nprivate STNode parseTableConstructorOrQuery(SyntaxKind nextTokenKind, boolean isRhsExpr) {\nSTNode queryConstructType;\nswitch (nextTokenKind) {\ncase FROM_KEYWORD:\nqueryConstructType = STNodeFactory.createEmptyNode();\nreturn parseQueryExprRhs(queryConstructType, isRhsExpr);\ncase STREAM_KEYWORD:\nqueryConstructType = parseQueryConstructType(parseStreamKeyword(), null);\nreturn parseQueryExprRhs(queryConstructType, isRhsExpr);\ncase TABLE_KEYWORD:\nSTNode tableKeyword = parseTableKeyword();\nreturn parseTableConstructorOrQuery(tableKeyword, isRhsExpr);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTableConstructorOrQuery(solution.tokenKind, isRhsExpr);\n}\n}\nprivate STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) {\nSTToken nextToken = peek();\nreturn parseTableConstructorOrQuery(nextToken.kind, nextToken, tableKeyword, isRhsExpr);\n}\nprivate STNode parseTableConstructorOrQuery(SyntaxKind nextTokenKind, STToken nextToken, STNode tableKeyword,\nboolean isRhsExpr) {\nSTNode keySpecifier;\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nkeySpecifier = STNodeFactory.createEmptyNode();\nreturn parseTableConstructorExprRhs(tableKeyword, keySpecifier);\ncase KEY_KEYWORD:\nkeySpecifier = parseKeySpecifier();\nreturn parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\ncase IDENTIFIER_TOKEN:\nif (isKeyKeyword(nextToken)) {\nkeySpecifier = parseKeySpecifier();\nreturn parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\n}\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTableConstructorOrQuery(solution.tokenKind, null, tableKeyword, isRhsExpr);\n}\n}\nprivate STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) {\nreturn parseTableConstructorOrQueryRhs(peek().kind, tableKeyword, keySpecifier, isRhsExpr);\n}\nprivate STNode parseTableConstructorOrQueryRhs(SyntaxKind nextTokenKind, STNode tableKeyword, STNode keySpecifier,\nboolean isRhsExpr) {\nswitch (nextTokenKind) {\ncase FROM_KEYWORD:\nreturn parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr);\ncase OPEN_BRACKET_TOKEN:\nreturn parseTableConstructorExprRhs(tableKeyword, keySpecifier);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS, tableKeyword,\nkeySpecifier, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTableConstructorOrQueryRhs(solution.tokenKind, tableKeyword, keySpecifier, isRhsExpr);\n}\n}\n/**\n* Parse query construct type.\n*

\n* query-construct-type := table key-specifier | stream\n*\n* @return Parsed node\n*/\nprivate STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) {\nreturn STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier);\n}\n/**\n* Parse query expression.\n*

\n* \n* query-expr-rhs := query-pipeline select-clause\n* query-pipeline select-clause on-conflict-clause? limit-clause?\n*
\n* query-pipeline := from-clause intermediate-clause*\n*
\n*\n* @param queryConstructType queryConstructType that precedes this rhs\n* @return Parsed node\n*/\nprivate STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) {\nswitchContext(ParserRuleContext.QUERY_EXPRESSION);\nSTNode fromClause = parseFromClause(isRhsExpr);\nList clauses = new ArrayList<>();\nSTNode intermediateClause;\nSTNode selectClause = null;\nwhile (!isEndOfIntermediateClause(peek().kind, SyntaxKind.NONE)) {\nintermediateClause = parseIntermediateClause(isRhsExpr);\nif (intermediateClause == null) {\nbreak;\n}\nif (selectClause != null) {\nselectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause,\nDiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE);\ncontinue;\n}\nif (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) {\nselectClause = intermediateClause;\n} else {\nclauses.add(intermediateClause);\n}\n}\nif (peek().kind == SyntaxKind.DO_KEYWORD) {\nSTNode intermediateClauses = STNodeFactory.createNodeList(clauses);\nSTNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);\nreturn parseQueryAction(queryPipeline, selectClause, isRhsExpr);\n}\nif (selectClause == null) {\nSTNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD);\nSTNode expr = STNodeFactory\n.createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\nselectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr);\nif (clauses.isEmpty()) {\nfromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);\n} else {\nint lastIndex = clauses.size() - 1;\nSTNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex),\nDiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);\nclauses.set(lastIndex, intClauseWithDiagnostic);\n}\n}\nSTNode intermediateClauses = STNodeFactory.createNodeList(clauses);\nSTNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);\nSTNode onConflictClause = parseOnConflictClause(isRhsExpr);\nSTNode limitClause = parseLimitClause(isRhsExpr);\nreturn STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause,\nonConflictClause, limitClause);\n}\n/**\n* Parse limit keyword.\n*\n* @return Limit keyword node\n*/\nprivate STNode parseLimitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LIMIT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LIMIT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse an intermediate clause.\n*

\n* \n* intermediate-clause := from-clause | where-clause | let-clause\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseIntermediateClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nreturn parseIntermediateClause(nextToken.kind, isRhsExpr);\n}\nprivate STNode parseIntermediateClause(SyntaxKind nextTokenKind, boolean isRhsExpr) {\nswitch (nextTokenKind) {\ncase FROM_KEYWORD:\nreturn parseFromClause(isRhsExpr);\ncase WHERE_KEYWORD:\nreturn parseWhereClause(isRhsExpr);\ncase LET_KEYWORD:\nreturn parseLetClause(isRhsExpr);\ncase SELECT_KEYWORD:\nreturn parseSelectClause(isRhsExpr);\ncase JOIN_KEYWORD:\ncase OUTER_KEYWORD:\nreturn parseJoinClause(isRhsExpr);\ncase DO_KEYWORD:\ncase SEMICOLON_TOKEN:\ncase ON_KEYWORD:\ncase CONFLICT_KEYWORD:\ncase LIMIT_KEYWORD:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseIntermediateClause(solution.tokenKind, isRhsExpr);\n}\n}\n/**\n* Parse select-keyword.\n*\n* @return Select-keyword node\n*/\nprivate STNode parseJoinKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.JOIN_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.JOIN_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse select-keyword.\n*\n* @return Select-keyword node\n*/\nprivate STNode parseOuterKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OUTER_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OUTER_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate boolean isEndOfIntermediateClause(SyntaxKind tokenKind, SyntaxKind precedingNodeKind) {\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase PUBLIC_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase EOF_TOKEN:\ncase RESOURCE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase DOCUMENTATION_STRING:\ncase PRIVATE_KEYWORD:\ncase RETURNS_KEYWORD:\ncase SERVICE_KEYWORD:\ncase TYPE_KEYWORD:\ncase CONST_KEYWORD:\ncase FINAL_KEYWORD:\ncase DO_KEYWORD:\nreturn true;\ndefault:\nreturn isValidExprRhsStart(tokenKind, precedingNodeKind);\n}\n}\n/**\n* Parse from clause.\n*

\n* from-clause := from typed-binding-pattern in expression\n*\n* @return Parsed node\n*/\nprivate STNode parseFromClause(boolean isRhsExpr) {\nSTNode fromKeyword = parseFromKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE);\nSTNode inKeyword = parseInKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression);\n}\n/**\n* Parse from-keyword.\n*\n* @return From-keyword node\n*/\nprivate STNode parseFromKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FROM_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FROM_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse where clause.\n*

\n* where-clause := where expression\n*\n* @return Parsed node\n*/\nprivate STNode parseWhereClause(boolean isRhsExpr) {\nSTNode whereKeyword = parseWhereKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createWhereClauseNode(whereKeyword, expression);\n}\n/**\n* Parse where-keyword.\n*\n* @return Where-keyword node\n*/\nprivate STNode parseWhereKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.WHERE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.WHERE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse let clause.\n*

\n* let-clause := let let-var-decl [, let-var-decl]* \n*\n* @return Parsed node\n*/\nprivate STNode parseLetClause(boolean isRhsExpr) {\nSTNode letKeyword = parseLetKeyword();\nSTNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr);\nletKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,\nDiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);\nreturn STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations);\n}\n/**\n* Parse select clause.\n*

\n* select-clause := select expression\n*\n* @return Parsed node\n*/\nprivate STNode parseSelectClause(boolean isRhsExpr) {\nSTNode selectKeyword = parseSelectKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createSelectClauseNode(selectKeyword, expression);\n}\n/**\n* Parse select-keyword.\n*\n* @return Select-keyword node\n*/\nprivate STNode parseSelectKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SELECT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SELECT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse on-conflict clause.\n*

\n* \n* onConflictClause := on conflict expression\n* \n*\n* @return On conflict clause node\n*/\nprivate STNode parseOnConflictClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.ON_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode onKeyword = parseOnKeyword();\nSTNode conflictKeyword = parseConflictKeyword();\nSTNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr);\n}\n/**\n* Parse conflict keyword.\n*\n* @return Conflict keyword node\n*/\nprivate STNode parseConflictKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CONFLICT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CONFLICT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse limit clause.\n*

\n* limitClause := limit expression\n*\n* @return Limit expression node\n*/\nprivate STNode parseLimitClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.LIMIT_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode limitKeyword = parseLimitKeyword();\nSTNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createLimitClauseNode(limitKeyword, expr);\n}\n/**\n* Parse join clause.\n*

\n* \n* join-clause := (join-var-decl | outer-join-var-decl) in expression\n*
\n* join-var-decl := join (typeName | var) bindingPattern\n*
\n* outer-join-var-decl := outer join var binding-pattern\n*
\n*\n* @return Join clause\n*/\nprivate STNode parseJoinClause(boolean isRhsExpr) {\nSTNode outerKeyword;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.OUTER_KEYWORD) {\nouterKeyword = parseOuterKeyword();\n} else {\nouterKeyword = STNodeFactory.createEmptyNode();\n}\nSTNode joinKeyword = parseJoinKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE);\nSTNode inKeyword = parseInKeyword();\nSTNode onCondition;\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nnextToken = peek();\nif (nextToken.kind == SyntaxKind.ON_KEYWORD) {\nonCondition = parseOnClause(isRhsExpr);\n} else {\nonCondition = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression,\nonCondition);\n}\n/**\n* Parse on clause.\n*

\n* on clause := on expression\n*\n* @return On clause node\n*/\nprivate STNode parseOnClause(boolean isRhsExpr) {\nSTNode onKeyword = parseOnKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createOnClauseNode(onKeyword, expression);\n}\n/**\n* Parse start action.\n*

\n* start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)\n*\n* @return Start action node\n*/\nprivate STNode parseStartAction(STNode annots) {\nSTNode startKeyword = parseStartKeyword();\nSTNode expr = parseActionOrExpression();\nswitch (expr.kind) {\ncase FUNCTION_CALL:\ncase METHOD_CALL:\ncase REMOTE_METHOD_CALL_ACTION:\nbreak;\ndefault:\nif (!isMissingNode(expr)) {\nexpr = SyntaxErrors.addDiagnostic(expr,\nDiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION);\n}\n}\nreturn STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr);\n}\n/**\n* Parse start keyword.\n*\n* @return Start keyword node\n*/\nprivate STNode parseStartKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.START_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.START_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse flush action.\n*

\n* flush-action := flush [peer-worker]\n*\n* @return flush action node\n*/\nprivate STNode parseFlushAction() {\nSTNode flushKeyword = parseFlushKeyword();\nSTNode peerWorker = parseOptionalPeerWorkerName();\nreturn STNodeFactory.createFlushActionNode(flushKeyword, peerWorker);\n}\n/**\n* Parse flush keyword.\n*\n* @return flush keyword node\n*/\nprivate STNode parseFlushKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FLUSH_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FLUSH_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse peer worker.\n*

\n* peer-worker := worker-name | default\n*\n* @return peer worker name node\n*/\nprivate STNode parseOptionalPeerWorkerName() {\nSTToken token = peek();\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase DEFAULT_KEYWORD:\nreturn STNodeFactory.createSimpleNameReferenceNode(consume());\ndefault:\nreturn STNodeFactory.createEmptyNode();\n}\n}\n/**\n* Parse intersection type descriptor.\n*

\n* intersection-type-descriptor := type-descriptor & type-descriptor\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode bitwiseAndToken = consume();\nSTNode rightTypeDesc = parseTypeDescriptor(context, isTypedBindingPattern, false);\nreturn STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc);\n}\n/**\n* Parse singleton type descriptor.\n*

\n* singleton-type-descriptor := simple-const-expr\n* simple-const-expr :=\n* nil-literal\n* | boolean-literal\n* | [Sign] int-literal\n* | [Sign] floating-point-literal\n* | string-literal\n* | constant-reference-expr\n*

\n*/\nprivate STNode parseSingletonTypeDesc() {\nSTNode simpleContExpr = parseSimpleConstExpr();\nreturn STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr);\n}\nprivate STNode parseSignedIntOrFloat() {\nSTNode operator = parseUnaryOperator();\nSTNode literal;\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nliteral = parseBasicLiteral();\nbreak;\ndefault:\nliteral = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL);\nliteral = STNodeFactory.createBasicLiteralNode(literal.kind, literal);\n}\nreturn STNodeFactory.createUnaryExpressionNode(operator, literal);\n}\nprivate boolean isSingletonTypeDescStart(SyntaxKind tokenKind, boolean inTypeDescCtx) {\nSTToken nextNextToken = getNextNextToken(tokenKind);\nswitch (tokenKind) {\ncase STRING_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase NULL_KEYWORD:\nif (inTypeDescCtx || isValidTypeDescRHSOutSideTypeDescCtx(nextNextToken)) {\nreturn true;\n}\nreturn false;\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn isIntOrFloat(nextNextToken);\ndefault:\nreturn false;\n}\n}\nstatic boolean isIntOrFloat(STToken token) {\nswitch (token.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate boolean isValidTypeDescRHSOutSideTypeDescCtx(STToken token) {\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\ncase OPEN_PAREN_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Check whether the parser reached to a valid expression start.\n*\n* @param nextTokenKind Kind of the next immediate token.\n* @param nextTokenIndex Index to the next token.\n* @return true if this is a start of a valid expression. false otherwise\n*/\nprivate boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) {\nnextTokenIndex++;\nswitch (nextTokenKind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nSyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind;\nreturn nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN || nextNextTokenKind == SyntaxKind.COMMA_TOKEN ||\nnextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN ||\nisValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE);\ncase IDENTIFIER_TOKEN:\nreturn isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE);\ncase OPEN_PAREN_TOKEN:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase TYPEOF_KEYWORD:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\ncase TRAP_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase LT_TOKEN:\ncase FROM_KEYWORD:\ncase LET_KEYWORD:\ncase BACKTICK_TOKEN:\ncase NEW_KEYWORD:\ncase LEFT_ARROW_TOKEN:\nreturn true;\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);\ncase FUNCTION_KEYWORD:\ncase TABLE_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD;\ncase STREAM_KEYWORD:\nSTToken nextNextToken = peek(nextTokenIndex);\nreturn nextNextToken.kind == SyntaxKind.KEY_KEYWORD ||\nnextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN ||\nnextNextToken.kind == SyntaxKind.FROM_KEYWORD;\ncase ERROR_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN;\ncase SERVICE_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.OPEN_BRACE_TOKEN;\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN;\ncase START_KEYWORD:\ncase FLUSH_KEYWORD:\ncase WAIT_KEYWORD:\ndefault:\nreturn false;\n}\n}\n/**\n* Parse sync send action.\n*

\n* sync-send-action := expression ->> peer-worker\n*\n* @param expression LHS expression of the sync send action\n* @return Sync send action node\n*/\nprivate STNode parseSyncSendAction(STNode expression) {\nSTNode syncSendToken = parseSyncSendToken();\nSTNode peerWorker = parsePeerWorkerName();\nreturn STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker);\n}\n/**\n* Parse peer worker.\n*

\n* peer-worker := worker-name | default\n*\n* @return peer worker name node\n*/\nprivate STNode parsePeerWorkerName() {\nSTToken token = peek();\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase DEFAULT_KEYWORD:\nreturn STNodeFactory.createSimpleNameReferenceNode(consume());\ndefault:\nSolution sol = recover(token, ParserRuleContext.PEER_WORKER_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse sync send token.\n*

\n* sync-send-token := ->> \n*\n* @return sync send token\n*/\nprivate STNode parseSyncSendToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SYNC_SEND_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SYNC_SEND_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse receive action.\n*

\n* receive-action := single-receive-action | multiple-receive-action\n*\n* @return Receive action\n*/\nprivate STNode parseReceiveAction() {\nSTNode leftArrow = parseLeftArrowToken();\nSTNode receiveWorkers = parseReceiveWorkers();\nreturn STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers);\n}\nprivate STNode parseReceiveWorkers() {\nreturn parseReceiveWorkers(peek().kind);\n}\nprivate STNode parseReceiveWorkers(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase DEFAULT_KEYWORD:\ncase IDENTIFIER_TOKEN:\nreturn parsePeerWorkerName();\ncase OPEN_BRACE_TOKEN:\nreturn parseMultipleReceiveWorkers();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RECEIVE_WORKERS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseReceiveWorkers(solution.tokenKind);\n}\n}\n/**\n* Parse multiple worker receivers.\n*

\n* { receive-field (, receive-field)* }\n*\n* @return Multiple worker receiver node\n*/\nprivate STNode parseMultipleReceiveWorkers() {\nstartContext(ParserRuleContext.MULTI_RECEIVE_WORKERS);\nSTNode openBrace = parseOpenBrace();\nSTNode receiveFields = parseReceiveFields();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nopenBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace,\nDiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION);\nreturn STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace);\n}\nprivate STNode parseReceiveFields() {\nList receiveFields = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfReceiveFields(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode receiveField = parseReceiveField();\nreceiveFields.add(receiveField);\nnextToken = peek();\nSTNode recieveFieldEnd;\nwhile (!isEndOfReceiveFields(nextToken.kind)) {\nrecieveFieldEnd = parseReceiveFieldEnd(nextToken.kind);\nif (recieveFieldEnd == null) {\nbreak;\n}\nreceiveFields.add(recieveFieldEnd);\nreceiveField = parseReceiveField();\nreceiveFields.add(receiveField);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(receiveFields);\n}\nprivate boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseReceiveFieldEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RECEIVE_FIELD_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseReceiveFieldEnd(solution.tokenKind);\n}\n}\nprivate STNode parseReceiveField() {\nreturn parseReceiveField(peek().kind);\n}\n/**\n* Parse receive field.\n*

\n* receive-field := peer-worker | field-name : peer-worker\n*\n* @param nextTokenKind Kind of the next token\n* @return Receiver field node\n*/\nprivate STNode parseReceiveField(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase DEFAULT_KEYWORD:\nreturn parseDefaultKeyword();\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME);\nreturn createQualifiedReceiveField(identifier);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RECEIVE_FIELD);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.tokenKind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn createQualifiedReceiveField(solution.recoveredNode);\n}\nreturn solution.recoveredNode;\n}\n}\nprivate STNode createQualifiedReceiveField(STNode identifier) {\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn identifier;\n}\nSTNode colon = parseColon();\nSTNode peerWorker = parsePeerWorkerName();\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, peerWorker);\n}\n/**\n*\n* Parse left arrow (<-) token.\n*\n* @return left arrow token\n*/\nprivate STNode parseLeftArrowToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LEFT_ARROW_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse signed right shift token (>>).\n*\n* @return Parsed node\n*/\nprivate STNode parseSignedRightShiftToken() {\nSTNode openGTToken = consume();\nSTToken endLGToken = consume();\nSTNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, openGTToken.leadingMinutiae(),\nendLGToken.trailingMinutiae());\nif (!validateRightShiftOperatorWS(openGTToken)) {\ndoubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken,\nDiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP);\n}\nreturn doubleGTToken;\n}\n/**\n* Parse unsigned right shift token (>>>).\n*\n* @return Parsed node\n*/\nprivate STNode parseUnsignedRightShiftToken() {\nSTNode openGTToken = consume();\nSTNode middleGTToken = consume();\nSTNode endLGToken = consume();\nSTNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN,\nopenGTToken.leadingMinutiae(), endLGToken.trailingMinutiae());\nboolean validOpenGTToken = validateRightShiftOperatorWS(openGTToken);\nboolean validMiddleGTToken = validateRightShiftOperatorWS(middleGTToken);\nif (validOpenGTToken && validMiddleGTToken) {\nreturn unsignedRightShiftToken;\n}\nunsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken,\nDiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP);\nreturn unsignedRightShiftToken;\n}\n/**\n* Validate the whitespace between '>' tokens of right shift operators.\n*\n* @param node Preceding node\n* @return the validated node\n*/\nprivate boolean validateRightShiftOperatorWS(STNode node) {\nint diff = node.widthWithTrailingMinutiae() - node.width();\nreturn diff == 0;\n}\n/**\n* Parse wait action.\n*

\n* wait-action := single-wait-action | multiple-wait-action | alternate-wait-action \n*\n* @return Wait action node\n*/\nprivate STNode parseWaitAction() {\nSTNode waitKeyword = parseWaitKeyword();\nif (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {\nreturn parseMultiWaitAction(waitKeyword);\n}\nreturn parseSingleOrAlternateWaitAction(waitKeyword);\n}\n/**\n* Parse wait keyword.\n*\n* @return wait keyword\n*/\nprivate STNode parseWaitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.WAIT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.WAIT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse single or alternate wait actions.\n*

\n* \n* alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+\n*
\n* wait-future-expr := expression but not mapping-constructor-expr\n*
\n*\n* @param waitKeyword wait keyword\n* @return Single or alternate wait action node\n*/\nprivate STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) {\nstartContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS);\nSTToken nextToken = peek();\nif (isEndOfWaitFutureExprList(nextToken.kind)) {\nendContext();\nSTNode waitFutureExprs = STNodeFactory.createEmptyNodeList();\nwaitKeyword = cloneWithDiagnosticIfListEmpty(waitFutureExprs, waitKeyword,\nDiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION);\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs);\n}\nList waitFutureExprList = new ArrayList<>();\nSTNode waitField = parseWaitFutureExpr();\nwaitFutureExprList.add(waitField);\nnextToken = peek();\nSTNode waitFutureExprEnd;\nwhile (!isEndOfWaitFutureExprList(nextToken.kind)) {\nwaitFutureExprEnd = parseWaitFutureExprEnd(nextToken.kind, 1);\nif (waitFutureExprEnd == null) {\nbreak;\n}\nwaitFutureExprList.add(waitFutureExprEnd);\nwaitField = parseWaitFutureExpr();\nwaitFutureExprList.add(waitField);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0));\n}\nprivate boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseWaitFutureExpr() {\nSTNode waitFutureExpr = parseExpression();\nif (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) {\nwaitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr,\nDiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR);\n}\nreturn waitFutureExpr;\n}\nprivate STNode parseWaitFutureExprEnd(int nextTokenIndex) {\nreturn parseWaitFutureExprEnd(peek().kind, 1);\n}\nprivate STNode parseWaitFutureExprEnd(SyntaxKind nextTokenKind, int nextTokenIndex) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\nreturn parsePipeToken();\ndefault:\nif (isEndOfWaitFutureExprList(nextTokenKind) ||\n!isValidExpressionStart(nextTokenKind, nextTokenIndex)) {\nreturn null;\n}\nSolution solution = recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END, nextTokenIndex);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseWaitFutureExprEnd(solution.tokenKind, 0);\n}\n}\n/**\n* Parse multiple wait action.\n*

\n* multiple-wait-action := wait { wait-field (, wait-field)* }\n*\n* @param waitKeyword Wait keyword\n* @return Multiple wait action node\n*/\nprivate STNode parseMultiWaitAction(STNode waitKeyword) {\nstartContext(ParserRuleContext.MULTI_WAIT_FIELDS);\nSTNode openBrace = parseOpenBrace();\nSTNode waitFields = parseWaitFields();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nopenBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace,\nDiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION);\nSTNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace);\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode);\n}\nprivate STNode parseWaitFields() {\nList waitFields = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfWaitFields(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode waitField = parseWaitField();\nwaitFields.add(waitField);\nnextToken = peek();\nSTNode waitFieldEnd;\nwhile (!isEndOfWaitFields(nextToken.kind)) {\nwaitFieldEnd = parseWaitFieldEnd(nextToken.kind);\nif (waitFieldEnd == null) {\nbreak;\n}\nwaitFields.add(waitFieldEnd);\nwaitField = parseWaitField();\nwaitFields.add(waitField);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(waitFields);\n}\nprivate boolean isEndOfWaitFields(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseWaitFieldEnd() {\nreturn parseWaitFieldEnd(peek().kind);\n}\nprivate STNode parseWaitFieldEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.WAIT_FIELD_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseWaitFieldEnd(solution.tokenKind);\n}\n}\nprivate STNode parseWaitField() {\nreturn parseWaitField(peek().kind);\n}\n/**\n* Parse wait field.\n*

\n* wait-field := variable-name | field-name : wait-future-expr\n*\n* @param nextTokenKind Kind of the next token\n* @return Receiver field node\n*/\nprivate STNode parseWaitField(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME);\nidentifier = STNodeFactory.createSimpleNameReferenceNode(identifier);\nreturn createQualifiedWaitField(identifier);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.WAIT_FIELD_NAME);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseWaitField(solution.tokenKind);\n}\n}\nprivate STNode createQualifiedWaitField(STNode identifier) {\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn identifier;\n}\nSTNode colon = parseColon();\nSTNode waitFutureExpr = parseWaitFutureExpr();\nreturn STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr);\n}\n/**\n* Parse annot access expression.\n*

\n* \n* annot-access-expr := expression .@ annot-tag-reference\n*
\n* annot-tag-reference := qualified-identifier | identifier\n*
\n*\n* @param lhsExpr Preceding expression of the annot access access\n* @return Parsed node\n*/\nprivate STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode annotAccessToken = parseAnnotChainingToken();\nSTNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr);\nreturn STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference);\n}\n/**\n* Parse annot-chaining-token.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotChainingToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse field access identifier.\n*

\n* field-access-identifier := qualified-identifier | identifier\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) {\nreturn parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr);\n}\n/**\n* Parse query action.\n*

\n* query-action := query-pipeline do-clause\n*
\n* do-clause := do block-stmt\n*
\n*\n* @param queryPipeline Query pipeline\n* @param selectClause Select clause if any This is only for validation.\n* @return Query action node\n*/\nprivate STNode parseQueryAction(STNode queryPipeline, STNode selectClause, boolean isRhsExpr) {\nif (selectClause != null) {\nqueryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause,\nDiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION);\n}\nstartContext(ParserRuleContext.DO_CLAUSE);\nSTNode doKeyword = parseDoKeyword();\nSTNode blockStmt = parseBlockNode();\nendContext();\nSTNode limitClause = parseLimitClause(isRhsExpr);\nreturn STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt, limitClause);\n}\n/**\n* Parse 'do' keyword.\n*\n* @return do keyword node\n*/\nprivate STNode parseDoKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DO_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.DO_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse optional field access or xml optional attribute access expression.\n*

\n* \n* optional-field-access-expr := expression ?. field-name\n*
\n* xml-optional-attribute-access-expr := expression ?. xml-attribute-name\n*
\n* xml-attribute-name := xml-qualified-name | qualified-identifier | identifier\n*
\n* xml-qualified-name := xml-namespace-prefix : identifier\n*
\n* xml-namespace-prefix := identifier\n*
\n*\n* @param lhsExpr Preceding expression of the optional access\n* @return Parsed node\n*/\nprivate STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode optionalFieldAccessToken = parseOptionalChainingToken();\nSTNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr);\nreturn STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName);\n}\n/**\n* Parse optional chaining token.\n*\n* @return parsed node\n*/\nprivate STNode parseOptionalChainingToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse conditional expression.\n*

\n* conditional-expr := expression ? expression : expression\n*\n* @param lhsExpr Preceding expression of the question mark\n* @return Parsed node\n*/\nprivate STNode parseConditionalExpression(STNode lhsExpr) {\nstartContext(ParserRuleContext.CONDITIONAL_EXPRESSION);\nSTNode questionMark = parseQuestionMark();\nSTNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true);\nSTNode nextToken = peek();\nSTNode endExpr;\nSTNode colon;\nif (nextToken.kind != SyntaxKind.COLON_TOKEN && middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) middleExpr;\nmiddleExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.modulePrefix);\ncolon = qualifiedNameRef.colon;\nendContext();\nendExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);\n} else {\ncolon = parseColon();\nendContext();\nendExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false);\n}\nreturn STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr);\n}\n/**\n* Parse enum declaration.\n*

\n* module-enum-decl :=\n* metadata\n* [public] enum identifier { enum-member (, enum-member)* }\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @param metadata\n* @param qualifier\n*\n* @return Parsed enum node.\n*/\nprivate STNode parseEnumDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.MODULE_ENUM_DECLARATION);\nSTNode enumKeywordToken = parseEnumKeyword();\nSTNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME);\nSTNode openBraceToken = parseOpenBrace();\nSTNode enumMemberList = parseEnumMemberList();\nSTNode closeBraceToken = parseCloseBrace();\nendContext();\nopenBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken,\nDiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER);\nreturn STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier,\nopenBraceToken, enumMemberList, closeBraceToken);\n}\n/**\n* Parse 'enum' keyword.\n*\n* @return enum keyword node\n*/\nprivate STNode parseEnumKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ENUM_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ENUM_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse enum member list.\n*

\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @return enum member list node.\n*/\nprivate STNode parseEnumMemberList() {\nstartContext(ParserRuleContext.ENUM_MEMBER_LIST);\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList enumMemberList = new ArrayList<>();\nSTNode enumMember = parseEnumMember();\nnextToken = peek();\nSTNode enumMemberRhs;\nwhile (nextToken.kind != SyntaxKind.CLOSE_BRACE_TOKEN) {\nenumMemberRhs = parseEnumMemberEnd(nextToken.kind);\nif (enumMemberRhs == null) {\nbreak;\n}\nenumMemberList.add(enumMember);\nenumMemberList.add(enumMemberRhs);\nenumMember = parseEnumMember();\nnextToken = peek();\n}\nenumMemberList.add(enumMember);\nendContext();\nreturn STNodeFactory.createNodeList(enumMemberList);\n}\n/**\n* Parse enum member.\n*

\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @return Parsed enum member node.\n*/\nprivate STNode parseEnumMember() {\nSTToken nextToken = peek();\nSTNode metadata;\nswitch (nextToken.kind) {\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData(nextToken.kind);\nbreak;\ndefault:\nmetadata = STNodeFactory.createEmptyNode();\n}\nSTNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME);\nreturn parseEnumMemberRhs(metadata, identifierNode);\n}\nprivate STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) {\nreturn parseEnumMemberRhs(peek().kind, metadata, identifierNode);\n}\nprivate STNode parseEnumMemberRhs(SyntaxKind nextToken, STNode metadata, STNode identifierNode) {\nSTNode equalToken, constExprNode;\nswitch (nextToken) {\ncase EQUAL_TOKEN:\nequalToken = parseAssignOp();\nconstExprNode = parseExpression();\nbreak;\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nequalToken = STNodeFactory.createEmptyNode();\nconstExprNode = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS, metadata, identifierNode);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseEnumMemberRhs(solution.tokenKind, metadata, identifierNode);\n}\nreturn STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode);\n}\nprivate STNode parseEnumMemberEnd() {\nreturn parseEnumMemberEnd(peek().kind);\n}\nprivate STNode parseEnumMemberEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ENUM_MEMBER_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseEnumMemberEnd(solution.tokenKind);\n}\n}\n/**\n* Parse transaction statement.\n*

\n* transaction-stmt := \"transaction\" block-stmt ;\n*\n* @return Transaction statement node\n*/\nprivate STNode parseTransactionStatement() {\nstartContext(ParserRuleContext.TRANSACTION_STMT);\nSTNode transactionKeyword = parseTransactionKeyword();\nSTNode blockStmt = parseBlockNode();\nendContext();\nreturn STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt);\n}\n/**\n* Parse transaction keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseTransactionKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRANSACTION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TRANSACTION_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse commit action.\n*

\n* commit-action := \"commit\"\n*\n* @return Commit action node\n*/\nprivate STNode parseCommitAction() {\nSTNode commitKeyword = parseCommitKeyword();\nreturn STNodeFactory.createCommitActionNode(commitKeyword);\n}\n/**\n* Parse commit keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseCommitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COMMIT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.COMMIT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse retry statement.\n*

\n* \n* retry-stmt := \"retry\" retry-spec block-stmt\n*
\n* retry-spec := [type-parameter] [ \"(\" arg-list \")\" ]\n*
\n*\n* @return Retry statement node\n*/\nprivate STNode parseRetryStatement() {\nstartContext(ParserRuleContext.RETRY_STMT);\nSTNode retryKeyword = parseRetryKeyword();\nSTNode retryStmt = parseRetryKeywordRhs(retryKeyword);\nendContext();\nreturn retryStmt;\n}\nprivate STNode parseRetryKeywordRhs(STNode retryKeyword) {\nreturn parseRetryKeywordRhs(peek().kind, retryKeyword);\n}\nprivate STNode parseRetryKeywordRhs(SyntaxKind nextTokenKind, STNode retryKeyword) {\nswitch (nextTokenKind) {\ncase LT_TOKEN:\nSTNode typeParam = parseTypeParameter();\nreturn parseRetryTypeParamRhs(retryKeyword, typeParam);\ncase OPEN_PAREN_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase TRANSACTION_KEYWORD:\ntypeParam = STNodeFactory.createEmptyNode();\nreturn parseRetryTypeParamRhs(nextTokenKind, retryKeyword, typeParam);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS, retryKeyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseRetryKeywordRhs(solution.tokenKind, retryKeyword);\n}\n}\nprivate STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) {\nreturn parseRetryTypeParamRhs(peek().kind, retryKeyword, typeParam);\n}\nprivate STNode parseRetryTypeParamRhs(SyntaxKind nextTokenKind, STNode retryKeyword, STNode typeParam) {\nSTNode args;\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\nargs = parseParenthesizedArgList();\nbreak;\ncase OPEN_BRACE_TOKEN:\ncase TRANSACTION_KEYWORD:\nargs = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS, retryKeyword, typeParam);\nreturn parseRetryTypeParamRhs(solution.tokenKind, retryKeyword, typeParam);\n}\nSTNode blockStmt = parseRetryBody();\nreturn STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt);\n}\nprivate STNode parseRetryBody() {\nreturn parseRetryBody(peek().kind);\n}\nprivate STNode parseRetryBody(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACE_TOKEN:\nreturn parseBlockNode();\ncase TRANSACTION_KEYWORD:\nreturn parseTransactionStatement();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RETRY_BODY);\nreturn parseRetryBody(solution.tokenKind);\n}\n}\n/**\n* Parse retry keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseRetryKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RETRY_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RETRY_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse transaction statement.\n*

\n* rollback-stmt := \"rollback\" [expression] \";\"\n*\n* @return Rollback statement node\n*/\nprivate STNode parseRollbackStatement() {\nstartContext(ParserRuleContext.ROLLBACK_STMT);\nSTNode rollbackKeyword = parseRollbackKeyword();\nSTNode expression;\nif (peek().kind == SyntaxKind.SEMICOLON_TOKEN) {\nexpression = STNodeFactory.createEmptyNode();\n} else {\nexpression = parseExpression();\n}\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon);\n}\n/**\n* Parse rollback keyword.\n*\n* @return Rollback keyword node\n*/\nprivate STNode parseRollbackKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ROLLBACK_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ROLLBACK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse transactional expression.\n*

\n* transactional-expr := \"transactional\"\n*\n* @return Transactional expression node\n*/\nprivate STNode parseTransactionalExpression() {\nSTNode transactionalKeyword = parseTransactionalKeyword();\nreturn STNodeFactory.createTransactionalExpressionNode(transactionalKeyword);\n}\n/**\n* Parse transactional keyword.\n*\n* @return Transactional keyword node\n*/\nprivate STNode parseTransactionalKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ROLLBACK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse service-constructor-expr.\n*

\n* \n* service-constructor-expr := [annots] service service-body-block\n*
\n* service-body-block := { service-method-defn* }\n*
\n* service-method-defn := metadata [resource] function identifier function-signature method-defn-body\n*
\n*\n* @param annots Annotations\n* @return Service constructor expression node\n*/\nprivate STNode parseServiceConstructorExpression(STNode annots) {\nstartContext(ParserRuleContext.SERVICE_CONSTRUCTOR_EXPRESSION);\nSTNode serviceKeyword = parseServiceKeyword();\nSTNode serviceBody = parseServiceBody();\nendContext();\nreturn STNodeFactory.createServiceConstructorExpressionNode(annots, serviceKeyword, serviceBody);\n}\n/**\n* Parse base16 literal.\n*

\n* \n* byte-array-literal := Base16Literal | Base64Literal\n*
\n* Base16Literal := base16 WS ` HexGroup* WS `\n*
\n* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `\n*
\n*\n* @param kind byte array literal kind\n* @return parsed node\n*/\nprivate STNode parseByteArrayLiteral(SyntaxKind kind) {\nSTNode type;\nif (kind == SyntaxKind.BASE16_KEYWORD) {\ntype = parseBase16Keyword();\n} else {\ntype = parseBase64Keyword();\n}\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseByteArrayContent(kind);\nreturn parseByteArrayLiteral(kind, type, startingBackTick, content);\n}\n/**\n* Parse byte array literal.\n*\n* @param baseKind indicates the SyntaxKind base16 or base64\n* @param typeKeyword keyword token, possible values are `base16` and `base64`\n* @param startingBackTick starting backtick token\n* @param byteArrayContent byte array literal content to be validated\n* @return parsed byte array literal node\n*/\nprivate STNode parseByteArrayLiteral(SyntaxKind baseKind, STNode typeKeyword, STNode startingBackTick,\nSTNode byteArrayContent) {\nSTNode content = STNodeFactory.createEmptyNode();\nSTNode newStartingBackTick = startingBackTick;\nSTNodeList items = (STNodeList) byteArrayContent;\nif (items.size() == 1) {\nSTNode item = items.get(0);\nif (baseKind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else if (baseKind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else if (item.kind != SyntaxKind.TEMPLATE_STRING) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else {\ncontent = item;\n}\n} else if (items.size() > 1) {\nSTNode clonedStartingBackTick = startingBackTick;\nfor (int index = 0; index < items.size(); index++) {\nSTNode item = items.get(index);\nclonedStartingBackTick =\nSyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item);\n}\nnewStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick,\nDiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);\n}\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick);\n}\n/**\n* Parse base16 keyword.\n*\n* @return base16 keyword node\n*/\nprivate STNode parseBase16Keyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BASE16_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BASE16_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse base64 keyword.\n*\n* @return base64 keyword node\n*/\nprivate STNode parseBase64Keyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BASE64_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BASE64_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Validate and parse byte array literal content.\n* An error is reported, if the content is invalid.\n*\n* @param kind byte array literal kind\n* @return parsed node\n*/\nprivate STNode parseByteArrayContent(SyntaxKind kind) {\nSTToken nextToken = peek();\nList items = new ArrayList<>();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode content = parseTemplateItem();\nitems.add(content);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(items);\n}\n/**\n* Validate base16 literal content.\n*

\n* \n* Base16Literal := base16 WS ` HexGroup* WS `\n*
\n* HexGroup := WS HexDigit WS HexDigit\n*
\n* WS := WhiteSpaceChar*\n*
\n* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20\n*
\n*\n* @param content the string surrounded by the backticks\n* @return true, if the string content is valid. false otherwise.\n*/\nstatic boolean isValidBase16LiteralContent(String content) {\nchar[] charArray = content.toCharArray();\nint hexDigitCount = 0;\nfor (char c : charArray) {\nswitch (c) {\ncase LexerTerminals.TAB:\ncase LexerTerminals.NEWLINE:\ncase LexerTerminals.CARRIAGE_RETURN:\ncase LexerTerminals.SPACE:\nbreak;\ndefault:\nif (isHexDigit(c)) {\nhexDigitCount++;\n} else {\nreturn false;\n}\nbreak;\n}\n}\nreturn hexDigitCount % 2 == 0;\n}\n/**\n* Validate base64 literal content.\n*

\n* \n* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `\n*
\n* Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char\n*
\n* PaddedBase64Group :=\n* WS Base64Char WS Base64Char WS Base64Char WS PaddingChar\n* | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar\n*
\n* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /\n*
\n* PaddingChar := =\n*
\n* WS := WhiteSpaceChar*\n*
\n* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20\n*
\n*\n* @param content the string surrounded by the backticks\n* @return true, if the string content is valid. false otherwise.\n*/\nstatic boolean isValidBase64LiteralContent(String content) {\nchar[] charArray = content.toCharArray();\nint base64CharCount = 0;\nint paddingCharCount = 0;\nfor (char c : charArray) {\nswitch (c) {\ncase LexerTerminals.TAB:\ncase LexerTerminals.NEWLINE:\ncase LexerTerminals.CARRIAGE_RETURN:\ncase LexerTerminals.SPACE:\nbreak;\ncase LexerTerminals.EQUAL:\npaddingCharCount++;\nbreak;\ndefault:\nif (isBase64Char(c)) {\nif (paddingCharCount == 0) {\nbase64CharCount++;\n} else {\nreturn false;\n}\n} else {\nreturn false;\n}\nbreak;\n}\n}\nif (paddingCharCount > 2) {\nreturn false;\n} else if (paddingCharCount == 0) {\nreturn base64CharCount % 4 == 0;\n} else {\nreturn base64CharCount % 4 == 4 - paddingCharCount;\n}\n}\n/**\n*

\n* Check whether a given char is a base64 char.\n*

\n* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /\n*\n* @param c character to check\n* @return true, if the character represents a base64 char. false otherwise.\n*/\nstatic boolean isBase64Char(int c) {\nif ('a' <= c && c <= 'z') {\nreturn true;\n}\nif ('A' <= c && c <= 'Z') {\nreturn true;\n}\nif (c == '+' || c == '/') {\nreturn true;\n}\nreturn isDigit(c);\n}\nstatic boolean isHexDigit(int c) {\nif ('a' <= c && c <= 'f') {\nreturn true;\n}\nif ('A' <= c && c <= 'F') {\nreturn true;\n}\nreturn isDigit(c);\n}\nstatic boolean isDigit(int c) {\nreturn ('0' <= c && c <= '9');\n}\n/**\n* Parse xml filter expression.\n*

\n* xml-filter-expr := expression .< xml-name-pattern >\n*\n* @param lhsExpr Preceding expression of .< token\n* @return Parsed node\n*/\nprivate STNode parseXMLFilterExpression(STNode lhsExpr) {\nSTNode xmlNamePatternChain = parseXMLFilterExpressionRhs();\nreturn STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain);\n}\n/**\n* Parse xml filter expression rhs.\n*

\n* filer-expression-rhs := .< xml-name-pattern >\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLFilterExpressionRhs() {\nSTNode dotLTToken = parseDotLTToken();\nreturn parseXMLNamePatternChain(dotLTToken);\n}\n/**\n* Parse xml name pattern chain.\n*

\n* \n* xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step\n*
\n* filer-expression-rhs := .< xml-name-pattern >\n*
\n* xml-element-children-step := /< xml-name-pattern >\n*
\n* xml-element-descendants-step := /**\\/\n*
\n*\n* @param startToken Preceding token of xml name pattern\n* @return Parsed node\n*/\nprivate STNode parseXMLNamePatternChain(STNode startToken) {\nstartContext(ParserRuleContext.XML_NAME_PATTERN);\nSTNode xmlNamePattern = parseXMLNamePattern();\nSTNode gtToken = parseGTToken();\nendContext();\nstartToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken,\nDiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN);\nreturn STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken);\n}\n/**\n* Parse .< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseDotLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.DOT_LT_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse xml name pattern.\n*

\n* xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLNamePattern() {\nList xmlAtomicNamePatternList = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfXMLNamePattern(nextToken.kind)) {\nreturn STNodeFactory.createNodeList(xmlAtomicNamePatternList);\n}\nSTNode xmlAtomicNamePattern = parseXMLAtomicNamePattern();\nxmlAtomicNamePatternList.add(xmlAtomicNamePattern);\nSTNode separator;\nwhile (!isEndOfXMLNamePattern(peek().kind)) {\nseparator = parseXMLNamePatternSeparator();\nif (separator == null) {\nbreak;\n}\nxmlAtomicNamePatternList.add(separator);\nxmlAtomicNamePattern = parseXMLAtomicNamePattern();\nxmlAtomicNamePatternList.add(xmlAtomicNamePattern);\n}\nreturn STNodeFactory.createNodeList(xmlAtomicNamePatternList);\n}\nprivate boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase GT_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ncase IDENTIFIER_TOKEN:\ncase ASTERISK_TOKEN:\ncase COLON_TOKEN:\ndefault:\nreturn false;\n}\n}\nprivate STNode parseXMLNamePatternSeparator() {\nSTToken token = peek();\nswitch (token.kind) {\ncase PIPE_TOKEN:\nreturn consume();\ncase GT_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution sol = recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS);\nif (sol.tokenKind == SyntaxKind.GT_TOKEN) {\nreturn null;\n}\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse xml atomic name pattern.\n*

\n* \n* xml-atomic-name-pattern :=\n* *\n* | identifier\n* | xml-namespace-prefix : identifier\n* | xml-namespace-prefix : *\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseXMLAtomicNamePattern() {\nstartContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN);\nSTNode atomicNamePattern = parseXMLAtomicNamePatternBody();\nendContext();\nreturn atomicNamePattern;\n}\nprivate STNode parseXMLAtomicNamePatternBody() {\nSTToken token = peek();\nSTNode identifier;\nswitch (token.kind) {\ncase ASTERISK_TOKEN:\nreturn consume();\ncase IDENTIFIER_TOKEN:\nidentifier = consume();\nbreak;\ndefault:\nSolution sol = recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nif (sol.recoveredNode.kind == SyntaxKind.ASTERISK_TOKEN) {\nreturn sol.recoveredNode;\n}\nidentifier = sol.recoveredNode;\nbreak;\n}\nreturn parseXMLAtomicNameIdentifier(identifier);\n}\nprivate STNode parseXMLAtomicNameIdentifier(STNode identifier) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COLON_TOKEN) {\nSTNode colon = consume();\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {\nSTToken endToken = consume();\nreturn STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken);\n}\n}\nreturn STNodeFactory.createSimpleNameReferenceNode(identifier);\n}\n/**\n* Parse xml step expression.\n*

\n* xml-step-expr := expression xml-step-start\n*\n* @param lhsExpr Preceding expression of /*, /<, or /**\\/< token\n* @return Parsed node\n*/\nprivate STNode parseXMLStepExpression(STNode lhsExpr) {\nSTNode xmlStepStart = parseXMLStepStart();\nreturn STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart);\n}\n/**\n* Parse xml filter expression rhs.\n*

\n* \n* xml-step-start :=\n* xml-all-children-step\n* | xml-element-children-step\n* | xml-element-descendants-step\n*
\n* xml-all-children-step := /*\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLStepStart() {\nSTToken token = peek();\nSTNode startToken;\nswitch (token.kind) {\ncase SLASH_ASTERISK_TOKEN:\nreturn consume();\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\nstartToken = parseDoubleSlashDoubleAsteriskLTToken();\nbreak;\ncase SLASH_LT_TOKEN:\ndefault:\nstartToken = parseSlashLTToken();\nbreak;\n}\nreturn parseXMLNamePatternChain(startToken);\n}\n/**\n* Parse /< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseSlashLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse /< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseDoubleSlashDoubleAsteriskLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse match statement.\n*

\n* match-stmt := match action-or-expr { match-clause+ }\n*\n* @return Match statement\n*/\nprivate STNode parseMatchStatement() {\nstartContext(ParserRuleContext.MATCH_STMT);\nSTNode matchKeyword = parseMatchKeyword();\nSTNode actionOrExpr = parseActionOrExpression();\nstartContext(ParserRuleContext.MATCH_BODY);\nSTNode openBrace = parseOpenBrace();\nSTNode matchClauses = parseMatchClauses();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nendContext();\nreturn STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace);\n}\n/**\n* Parse match keyword.\n*\n* @return Match keyword node\n*/\nprivate STNode parseMatchKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.MATCH_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.MATCH_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse match clauses list.\n*\n* @return Match clauses list\n*/\nprivate STNode parseMatchClauses() {\nList matchClauses = new ArrayList<>();\nwhile (!isEndOfMatchClauses(peek().kind)) {\nSTNode clause = parseMatchClause();\nmatchClauses.add(clause);\n}\nreturn STNodeFactory.createNodeList(matchClauses);\n}\nprivate boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse a single match match clause.\n*

\n* \n* match-clause := match-pattern-list [match-guard] => block-stmt\n*
\n* match-guard := if expression\n*
\n*\n* @return A match clause\n*/\nprivate STNode parseMatchClause() {\nSTNode matchPatterns = parseMatchPatternList();\nSTNode matchGuard = parseMatchGuard();\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode blockStmt = parseBlockNode();\nreturn STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt);\n}\n/**\n* Parse match guard.\n*

\n* match-guard := if expression\n*\n* @return Match guard\n*/\nprivate STNode parseMatchGuard() {\nSTToken nextToken = peek();\nreturn parseMatchGuard(nextToken.kind);\n}\nprivate STNode parseMatchGuard(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IF_KEYWORD:\nSTNode ifKeyword = parseIfKeyword();\nSTNode expr = parseExpression(peek().kind, DEFAULT_OP_PRECEDENCE, true, false, true, false);\nreturn STNodeFactory.createMatchGuardNode(ifKeyword, expr);\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.OPTIONAL_MATCH_GUARD);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMatchGuard(solution.tokenKind);\n}\n}\n/**\n* Parse match patterns list.\n*

\n* match-pattern-list := match-pattern (| match-pattern)*\n*\n* @return Match patterns list\n*/\nprivate STNode parseMatchPatternList() {\nstartContext(ParserRuleContext.MATCH_PATTERN);\nList matchClauses = new ArrayList<>();\nwhile (!isEndOfMatchPattern(peek().kind)) {\nSTNode clause = parseMatchPattern();\nif (clause == null) {\nbreak;\n}\nmatchClauses.add(clause);\nSTNode seperator = parseMatchPatternEnd();\nif (seperator == null) {\nbreak;\n}\nmatchClauses.add(seperator);\n}\nendContext();\nreturn STNodeFactory.createNodeList(matchClauses);\n}\nprivate boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\ncase IF_KEYWORD:\ncase RIGHT_ARROW_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse match pattern.\n*

\n* \n* match-pattern := var binding-pattern\n* | wildcard-match-pattern\n* | const-pattern\n* | list-match-pattern\n* | mapping-match-pattern\n* | functional-match-pattern\n* \n*\n* @return Match pattern\n*/\nprivate STNode parseMatchPattern() {\nSTToken nextToken = peek();\nreturn parseMatchPattern(nextToken.kind);\n}\nprivate STNode parseMatchPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase STRING_LITERAL:\nreturn parseSimpleConstExpr();\ncase IDENTIFIER_TOKEN:\nSTNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);\nreturn parseFunctionalMatchPatternOrConsPattern(typeRefOrConstExpr);\ncase VAR_KEYWORD:\nreturn parseVarTypedBindingPattern();\ncase OPEN_BRACKET_TOKEN:\nreturn parseListMatchPattern();\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingMatchPattern();\ncase ERROR_KEYWORD:\nreturn parseFunctionalMatchPattern(consume());\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MATCH_PATTERN_START);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMatchPattern(solution.tokenKind);\n}\n}\nprivate STNode parseMatchPatternEnd() {\nSTToken nextToken = peek();\nreturn parseMatchPatternEnd(nextToken.kind);\n}\nprivate STNode parseMatchPatternEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\nreturn parsePipeToken();\ncase IF_KEYWORD:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MATCH_PATTERN_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMatchPatternEnd(solution.tokenKind);\n}\n}\n/**\n* Parse var typed binding pattern.\n*

\n* var binding-pattern\n*

\n*\n* @return Parsed typed binding pattern node\n*/\nprivate STNode parseVarTypedBindingPattern() {\nSTNode varKeyword = parseVarKeyword();\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createTypedBindingPatternNode(varKeyword, bindingPattern);\n}\n/**\n* Parse var keyword.\n*\n* @return Var keyword node\n*/\nprivate STNode parseVarKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.VAR_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.VAR_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse list match pattern.\n*

\n* \n* list-match-pattern := [ list-member-match-patterns ]\n* list-member-match-patterns :=\n* match-pattern (, match-pattern)* [, rest-match-pattern]\n* | [ rest-match-pattern ]\n* \n*

\n*\n* @return Parsed list match pattern node\n*/\nprivate STNode parseListMatchPattern() {\nstartContext(ParserRuleContext.LIST_MATCH_PATTERN);\nSTNode openBracketToken = parseOpenBracket();\nList matchPatternList = new ArrayList<>();\nSTNode restMatchPattern = null;\nwhile (!isEndOfListMatchPattern()) {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {\nrestMatchPattern = parseRestMatchPattern();\nbreak;\n}\nSTNode matchPatternListMember = parseMatchPattern();\nmatchPatternList.add(matchPatternListMember);\nSTNode matchPatternMemberRhs = parseListMatchPatternMemberRhs();\nif (matchPatternMemberRhs != null) {\nmatchPatternList.add(matchPatternMemberRhs);\n} else {\nbreak;\n}\n}\nif (restMatchPattern == null) {\nrestMatchPattern = STNodeFactory.createEmptyNode();\n}\nSTNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList);\nSTNode closeBracketToken = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, restMatchPattern,\ncloseBracketToken);\n}\npublic boolean isEndOfListMatchPattern() {\nswitch (peek().kind) {\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse rest match pattern.\n*

\n* \n* rest-match-pattern := ... var variable-name\n* \n*

\n*\n* @return Parsed rest match pattern node\n*/\nprivate STNode parseRestMatchPattern() {\nstartContext(ParserRuleContext.REST_MATCH_PATTERN);\nSTNode ellipsisToken = parseEllipsis();\nSTNode varKeywordToken = parseVarKeyword();\nSTNode variableName = parseVariableName();\nendContext();\nSTSimpleNameReferenceNode simpleNameReferenceNode =\n(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName);\nreturn STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode);\n}\nprivate STNode parseListMatchPatternMemberRhs() {\nreturn parseListMatchPatternMemberRhs(peek().kind);\n}\nprivate STNode parseListMatchPatternMemberRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListMatchPatternMemberRhs(solution.tokenKind);\n}\n}\n/**\n* Parse mapping match pattern.\n*

\n* mapping-match-pattern := { field-match-patterns }\n*
\n* field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern]\n* | [ rest-match-pattern ]\n*
\n* field-match-pattern := field-name : match-pattern\n*
\n* rest-match-pattern := ... var variable-name\n*

\n*\n* @return Parsed Node.\n*/\nprivate STNode parseMappingMatchPattern() {\nstartContext(ParserRuleContext.MAPPING_MATCH_PATTERN);\nSTNode openBraceToken = parseOpenBrace();\nList fieldMatchPatternList = new ArrayList<>();\nSTNode restMatchPattern = null;\nboolean isEndOfFields = false;\nwhile (!isEndOfMappingMatchPattern()) {\nSTNode fieldMatchPatternMember = parseFieldMatchPatternMember();\nif (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {\nrestMatchPattern = fieldMatchPatternMember;\nisEndOfFields = true;\nbreak;\n}\nfieldMatchPatternList.add(fieldMatchPatternMember);\nSTNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();\nif (fieldMatchPatternRhs != null) {\nfieldMatchPatternList.add(fieldMatchPatternRhs);\n} else {\nbreak;\n}\n}\nSTNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();\nwhile (isEndOfFields && fieldMatchPatternRhs != null) {\nSTNode invalidField = parseFieldMatchPatternMember();\nrestMatchPattern =\nSyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, fieldMatchPatternRhs);\nrestMatchPattern = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, invalidField);\nrestMatchPattern = SyntaxErrors.addDiagnostic(restMatchPattern,\nDiagnosticErrorCode.ERROR_MORE_FIELD_MATCH_PATTERNS_AFTER_REST_FIELD);\nfieldMatchPatternRhs = parseFieldMatchPatternRhs();\n}\nif (restMatchPattern == null) {\nrestMatchPattern = STNodeFactory.createEmptyNode();\n}\nSTNode fieldMatchPatterns = STNodeFactory.createNodeList(fieldMatchPatternList);\nSTNode closeBraceToken = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, restMatchPattern,\ncloseBraceToken);\n}\nprivate STNode parseFieldMatchPatternMember() {\nreturn parseFieldMatchPatternMember(peek().kind);\n}\nprivate STNode parseFieldMatchPatternMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseFieldMatchPattern();\ncase ELLIPSIS_TOKEN:\nreturn parseRestMatchPattern();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldMatchPatternMember(solution.tokenKind);\n}\n}\n/**\n* Parse filed match pattern.\n*

\n* field-match-pattern := field-name : match-pattern\n*

\n*\n* @return Parsed field match pattern node\n*/\npublic STNode parseFieldMatchPattern() {\nSTNode fieldNameNode = parseVariableName();\nSTNode colonToken = parseColon();\nSTNode matchPattern = parseMatchPattern();\nreturn STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern);\n}\npublic boolean isEndOfMappingMatchPattern() {\nswitch (peek().kind) {\ncase CLOSE_BRACE_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseFieldMatchPatternRhs() {\nreturn parseFieldMatchPatternRhs(peek().kind);\n}\nprivate STNode parseFieldMatchPatternRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldMatchPatternRhs(solution.tokenKind);\n}\n}\nprivate STNode parseFunctionalMatchPatternOrConsPattern(STNode typeRefOrConstExpr) {\nreturn parseFunctionalMatchPatternOrConsPattern(peek().kind, typeRefOrConstExpr);\n}\nprivate STNode parseFunctionalMatchPatternOrConsPattern(SyntaxKind nextToken, STNode typeRefOrConstExpr) {\nswitch (nextToken) {\ncase OPEN_PAREN_TOKEN:\nreturn parseFunctionalMatchPattern(typeRefOrConstExpr);\ndefault:\nif (isMatchPatternEnd(peek().kind)) {\nreturn typeRefOrConstExpr;\n}\nSolution solution = recover(peek(), ParserRuleContext.FUNC_MATCH_PATTERN_OR_CONST_PATTERN,\ntypeRefOrConstExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFunctionalMatchPatternOrConsPattern(solution.tokenKind, typeRefOrConstExpr);\n}\n}\nprivate boolean isMatchPatternEnd(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase RIGHT_DOUBLE_ARROW_TOKEN:\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase PIPE_TOKEN:\ncase IF_KEYWORD:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse functional match pattern.\n*

\n* functional-match-pattern := functionally-constructible-type-reference ( arg-list-match-pattern )\n*
\n* functionally-constructible-type-reference := error | type-reference\n*
\n* type-reference := identifier | qualified-identifier\n*
\n* arg-list-match-pattern := positional-arg-match-patterns [, other-arg-match-patterns]\n* | other-arg-match-patterns\n*

\n*\n* @return Parsed functional match pattern node.\n*/\nprivate STNode parseFunctionalMatchPattern(STNode typeRef) {\nstartContext(ParserRuleContext.FUNCTIONAL_MATCH_PATTERN);\nSTNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode argListMatchPatternNode = parseArgListMatchPatterns();\nSTNode closeParenthesisToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createFunctionalMatchPatternNode(typeRef, openParenthesisToken, argListMatchPatternNode,\ncloseParenthesisToken);\n}\nprivate STNode parseArgListMatchPatterns() {\nList argListMatchPatterns = new ArrayList<>();\nSyntaxKind lastValidArgKind = SyntaxKind.IDENTIFIER_TOKEN;\nwhile (!isEndOfFunctionalMatchPattern()) {\nSTNode currentArg = parseArgMatchPattern();\nDiagnosticErrorCode errorCode = validateArgMatchPatternOrder(lastValidArgKind, currentArg.kind);\nif (errorCode == null) {\nargListMatchPatterns.add(currentArg);\nlastValidArgKind = currentArg.kind;\n} else {\nupdateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode);\n}\nSTNode argRhs = parseArgMatchPatternRhs();\nif (argRhs == null) {\nbreak;\n}\nif (errorCode == null) {\nargListMatchPatterns.add(argRhs);\n} else {\nupdateLastNodeInListWithInvalidNode(argListMatchPatterns, argRhs, null);\n}\n}\nreturn STNodeFactory.createNodeList(argListMatchPatterns);\n}\nprivate boolean isEndOfFunctionalMatchPattern() {\nswitch (peek().kind) {\ncase CLOSE_PAREN_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse arg match patterns.\n* \n* arg-match-pattern := match-pattern | named-arg-match-pattern | rest-match-pattern\n* \n*

\n*\n* @return parsed arg match pattern node.\n*/\nprivate STNode parseArgMatchPattern() {\nreturn parseArgMatchPattern(peek().kind);\n}\nprivate STNode parseArgMatchPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseNamedOrPositionalArgMatchPattern();\ncase ELLIPSIS_TOKEN:\nreturn parseRestMatchPattern();\ncase OPEN_PAREN_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase STRING_LITERAL:\ncase VAR_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseMatchPattern();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_MATCH_PATTERN);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgMatchPattern(solution.tokenKind);\n}\n}\nprivate STNode parseNamedOrPositionalArgMatchPattern() {\nSTNode identifier = parseIdentifier(ParserRuleContext.MATCH_PATTERN_START);\nSTToken secondToken = peek();\nswitch (secondToken.kind) {\ncase EQUAL_TOKEN:\nreturn parseNamedArgMatchPattern(identifier);\ncase OPEN_PAREN_TOKEN:\nreturn parseFunctionalMatchPattern(identifier);\ncase COMMA_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ndefault:\nreturn identifier;\n}\n}\n/**\n* Parses the next named arg match pattern.\n*
\n* named-arg-match-pattern := arg-name = match-pattern\n*

\n*\n* @return arg match pattern list node added the new arg match pattern\n*/\nprivate STNode parseNamedArgMatchPattern(STNode identifier) {\nstartContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN);\nSTNode equalToken = parseAssignOp();\nSTNode matchPattern = parseMatchPattern();\nendContext();\nreturn STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern);\n}\nprivate STNode parseArgMatchPatternRhs() {\nreturn parseArgMatchPatternRhs(peek().kind);\n}\nprivate STNode parseArgMatchPatternRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_MATCH_PATTERN_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgMatchPatternRhs(solution.tokenKind);\n}\n}\nprivate DiagnosticErrorCode validateArgMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {\nDiagnosticErrorCode errorCode = null;\nswitch (prevArgKind) {\ncase NAMED_ARG_MATCH_PATTERN:\nif (currentArgKind != SyntaxKind.NAMED_ARG_MATCH_PATTERN &&\ncurrentArgKind != SyntaxKind.REST_MATCH_PATTERN) {\nerrorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG;\n}\nbreak;\ncase REST_MATCH_PATTERN:\nerrorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG;\nbreak;\ndefault:\nbreak;\n}\nreturn errorCode;\n}\n/**\n* Parse markdown documentation.\n*\n* @return markdown documentation node\n*/\nprivate STNode parseMarkdownDocumentation() {\nList markdownDocLineList = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) {\nSTToken documentationString = consume();\nSTNode markdownDocLines = parseDocumentationString(documentationString);\nmarkdownDocLineList.add(markdownDocLines);\nnextToken = peek();\n}\nSTNode arrangedMarkdownDocLines = rearrangeMarkdownDocumentationLines(markdownDocLineList);\nreturn STNodeFactory.createMarkdownDocumentationNode(arrangedMarkdownDocLines);\n}\n/**\n* Parse documentation string.\n*\n* @return markdown documentation line list node\n*/\nprivate STNode parseDocumentationString(STToken documentationStringToken) {\nList leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae());\nTextDocument textDocument = TextDocuments.from(documentationStringToken.text());\nDocumentationLexer documentationLexer = new DocumentationLexer(textDocument.getCharacterReader(),\nleadingTriviaList);\nAbstractTokenReader tokenReader = new TokenReader(documentationLexer);\nDocumentationParser documentationParser = new DocumentationParser(tokenReader);\nreturn documentationParser.parse();\n}\nprivate List getLeadingTriviaList(STNode leadingMinutiaeNode) {\nList leadingTriviaList = new ArrayList<>();\nint bucketCount = leadingMinutiaeNode.bucketCount();\nfor (int i = 0; i < bucketCount; i++) {\nleadingTriviaList.add(leadingMinutiaeNode.childInBucket(i));\n}\nreturn leadingTriviaList;\n}\nprivate STNode rearrangeMarkdownDocumentationLines(List markdownDocLineList) {\nList arrangedDocLines = new ArrayList<>();\nfor (STNode markdownDocLines : markdownDocLineList) {\nint bucketCount = markdownDocLines.bucketCount();\nfor (int i = 0; i < bucketCount; i++) {\nSTNode markdownDocLine = markdownDocLines.childInBucket(i);\narrangedDocLines.add(markdownDocLine);\n}\n}\nreturn STNodeFactory.createNodeList(arrangedDocLines);\n}\n/**\n* Parse any statement that starts with a token that has ambiguity between being\n* a type-desc or an expression.\n*\n* @param annots Annotations\n* @return Statement node\n*/\nprivate STNode parseStmtStartsWithTypeOrExpr(SyntaxKind nextTokenKind, STNode annots) {\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode typeOrExpr = parseTypedBindingPatternOrExpr(nextTokenKind, true);\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr);\n}\nprivate STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) {\nif (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(annots, finalKeyword, typedBindingPatternOrExpr, false);\n}\nSTNode expr = getExpression(typedBindingPatternOrExpr);\nexpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);\nreturn parseStatementStartWithExprRhs(expr);\n}\nprivate STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternOrExpr(nextToken.kind, allowAssignment);\n}\nprivate STNode parseTypedBindingPatternOrExpr(SyntaxKind nextTokenKind, boolean allowAssignment) {\nSTNode typeOrExpr;\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseTypedBPOrExprStartsWithOpenParenthesis();\ncase FUNCTION_KEYWORD:\nreturn parseAnonFuncExprOrTypedBPWithFuncType();\ncase IDENTIFIER_TOKEN:\ntypeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\nreturn parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\ncase OPEN_BRACKET_TOKEN:\ntypeOrExpr = parseTypedDescOrExprStartsWithOpenBracket();\nreturn parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nSTNode basicLiteral = parseBasicLiteral();\nreturn parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment);\ndefault:\nif (isValidExpressionStart(nextTokenKind, 1)) {\nreturn parseActionOrExpressionInLhs(nextTokenKind, null);\n}\nreturn parseTypedBindingPattern(ParserRuleContext.VAR_DECL_STMT);\n}\n}\n/**\n* Parse the component after the ambiguous starting node. Ambiguous node could be either an expr\n* or a type-desc. The component followed by this ambiguous node could be the binding-pattern or\n* the expression-rhs.\n*\n* @param typeOrExpr Type desc or the expression\n* @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a\n* valid lvalue expression\n* @return Typed-binding-pattern node or an expression node\n*/\nprivate STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternOrExprRhs(nextToken.kind, typeOrExpr, allowAssignment);\n}\nprivate STNode parseTypedBindingPatternOrExprRhs(SyntaxKind nextTokenKind, STNode typeOrExpr,\nboolean allowAssignment) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\nSTToken nextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode pipe = parsePipeToken();\nSTNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);\nif (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nSTNode newTypeDesc =\nSTNodeFactory.createUnionTypeDescriptorNode(typeOrExpr, pipe, typedBP.typeDescriptor);\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);\n}\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe,\nrhsTypedBPOrExpr);\ncase BITWISE_AND_TOKEN:\nnextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode ampersand = parseBinaryOperator();\nrhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);\nif (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nSTNode newTypeDesc = STNodeFactory.createIntersectionTypeDescriptorNode(typeOrExpr, ampersand,\ntypedBP.typeDescriptor);\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);\n}\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand,\nrhsTypedBPOrExpr);\ncase SEMICOLON_TOKEN:\nif (isDefiniteExpr(typeOrExpr.kind)) {\nreturn typeOrExpr;\n}\nif (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) {\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n}\nreturn typeOrExpr;\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\nif (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) {\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n}\nreturn typeOrExpr;\ncase EQUAL_TOKEN:\nreturn typeOrExpr;\ncase OPEN_BRACKET_TOKEN:\nreturn parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment,\nParserRuleContext.AMBIGUOUS_STMT);\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\ndefault:\nif (isCompoundBinaryOperator(nextTokenKind)) {\nreturn typeOrExpr;\n}\nif (isValidExprRhsStart(nextTokenKind, typeOrExpr.kind)) {\nreturn typeOrExpr;\n}\nSTToken token = peek();\nSolution solution =\nrecover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr, allowAssignment);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypedBindingPatternOrExprRhs(solution.tokenKind, typeOrExpr, allowAssignment);\n}\n}\nprivate STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) {\nstartContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\ntypeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false);\nendContext();\nreturn parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\n}\nprivate STNode parseTypedBPOrExprStartsWithOpenParenthesis() {\nSTNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis();\nif (isDefiniteTypeDesc(exprOrTypeDesc.kind)) {\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc);\n}\nreturn parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false);\n}\nprivate boolean isDefiniteTypeDesc(SyntaxKind kind) {\nreturn kind.compareTo(SyntaxKind.TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0;\n}\nprivate boolean isDefiniteExpr(SyntaxKind kind) {\nif (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nreturn false;\n}\nreturn kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&\nkind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;\n}\n/**\n* Parse type or expression that starts with open parenthesis. Possible options are:\n* 1) () - nil type-desc or nil-literal\n* 2) (T) - Parenthesized type-desc\n* 3) (expr) - Parenthesized expression\n* 4) (param, param, ..) - Anon function params\n*\n* @return Type-desc or expression node\n*/\nprivate STNode parseTypedDescOrExprStartsWithOpenParenthesis() {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {\nSTNode closeParen = parseCloseParenthesis();\nreturn parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen);\n}\nSTNode typeOrExpr = parseTypeDescOrExpr();\nif (isAction(typeOrExpr)) {\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr,\ncloseParen);\n}\nif (isExpression(typeOrExpr.kind)) {\nstartContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);\nreturn parseBracedExprOrAnonFuncParamRhs(peek().kind, openParen, typeOrExpr, false);\n}\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeOrExpr, closeParen);\n}\n/**\n* Parse type-desc or expression. This method does not handle binding patterns.\n*\n* @return Type-desc node or expression node\n*/\nprivate STNode parseTypeDescOrExpr() {\nSTToken nextToken = peek();\nSTNode typeOrExpr;\nswitch (nextToken.kind) {\ncase OPEN_PAREN_TOKEN:\ntypeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis();\nbreak;\ncase FUNCTION_KEYWORD:\ntypeOrExpr = parseAnonFuncExprOrFuncTypeDesc();\nbreak;\ncase IDENTIFIER_TOKEN:\ntypeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\nreturn parseTypeDescOrExprRhs(typeOrExpr);\ncase OPEN_BRACKET_TOKEN:\ntypeOrExpr = parseTypedDescOrExprStartsWithOpenBracket();\nbreak;\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nSTNode basicLiteral = parseBasicLiteral();\nreturn parseTypeDescOrExprRhs(basicLiteral);\ndefault:\nif (isValidExpressionStart(nextToken.kind, 1)) {\nreturn parseActionOrExpressionInLhs(nextToken.kind, null);\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\n}\nif (isDefiniteTypeDesc(typeOrExpr.kind)) {\nreturn parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\n}\nreturn parseTypeDescOrExprRhs(typeOrExpr);\n}\nprivate boolean isExpression(SyntaxKind kind) {\nswitch (kind) {\ncase BASIC_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn true;\ndefault:\nreturn kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&\nkind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;\n}\n}\n/**\n* Parse statement that starts with an empty parenthesis. Empty parenthesis can be\n* 1) Nil literal\n* 2) Nil type-desc\n* 3) Anon-function params\n*\n* @param openParen Open parenthesis\n* @param closeParen Close parenthesis\n* @return Parsed node\n*/\nprivate STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nSTNode params = STNodeFactory.createNodeList();\nSTNode anonFuncParam =\nSTNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\nendContext();\nreturn anonFuncParam;\ndefault:\nreturn STNodeFactory.createNilLiteralNode(openParen, closeParen);\n}\n}\nprivate STNode parseAnonFuncExprOrTypedBPWithFuncType() {\nSTNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc();\nif (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) {\nreturn exprOrTypeDesc;\n}\nreturn parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT);\n}\n/**\n* Parse anon-func-expr or function-type-desc, by resolving the ambiguity.\n*\n* @return Anon-func-expr or function-type-desc\n*/\nprivate STNode parseAnonFuncExprOrFuncTypeDesc() {\nstartContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC);\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode funcSignature = parseFuncSignature(true);\nendContext();\nswitch (peek().kind) {\ncase OPEN_BRACE_TOKEN:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nstartContext(ParserRuleContext.ANON_FUNC_EXPRESSION);\nfuncSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);\nSTNode funcBody = parseAnonFuncBody(false);\nSTNode annots = STNodeFactory.createEmptyNodeList();\nSTNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, functionKeyword,\nfuncSignature, funcBody);\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true);\ncase IDENTIFIER_TOKEN:\ndefault:\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature);\nreturn parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,\ntrue);\n}\n}\nprivate STNode parseTypeDescOrExprRhs(STNode typeOrExpr) {\nSyntaxKind nextTokenKind = peek().kind;\nreturn parseTypeDescOrExprRhs(nextTokenKind, typeOrExpr);\n}\nprivate STNode parseTypeDescOrExprRhs(SyntaxKind nextTokenKind, STNode typeOrExpr) {\nSTNode typeDesc;\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\nSTToken nextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode pipe = parsePipeToken();\nSTNode rhsTypeDescOrExpr = parseTypeDescOrExpr();\nif (isExpression(rhsTypeDescOrExpr.kind)) {\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe,\nrhsTypeDescOrExpr);\n}\ntypeDesc = getTypeDescFromExpr(typeOrExpr);\nrhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);\nreturn STNodeFactory.createUnionTypeDescriptorNode(typeDesc, pipe, rhsTypeDescOrExpr);\ncase BITWISE_AND_TOKEN:\nnextNextToken = peek(2);\nif (nextNextToken.kind != SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode ampersand = parseBinaryOperator();\nrhsTypeDescOrExpr = parseTypeDescOrExpr();\nif (isExpression(rhsTypeDescOrExpr.kind)) {\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand,\nrhsTypeDescOrExpr);\n}\ntypeDesc = getTypeDescFromExpr(typeOrExpr);\nrhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);\nreturn STNodeFactory.createIntersectionTypeDescriptorNode(typeDesc, ampersand, rhsTypeDescOrExpr);\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\nstartContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\ntypeDesc = parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,\nfalse);\nendContext();\nreturn typeDesc;\ncase SEMICOLON_TOKEN:\nreturn getTypeDescFromExpr(typeOrExpr);\ncase EQUAL_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\ncase COMMA_TOKEN:\nreturn typeOrExpr;\ncase OPEN_BRACKET_TOKEN:\nreturn parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true,\nParserRuleContext.AMBIGUOUS_STMT);\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nreturn STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis);\ndefault:\nif (isCompoundBinaryOperator(nextTokenKind)) {\nreturn typeOrExpr;\n}\nif (isValidExprRhsStart(nextTokenKind, typeOrExpr.kind)) {\nreturn parseExpressionRhs(nextTokenKind, DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false,\nfalse);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypeDescOrExprRhs(solution.tokenKind, typeOrExpr);\n}\n}\nprivate boolean isAmbiguous(STNode node) {\nswitch (node.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase BRACKETED_LIST:\nreturn true;\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;\nif (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||\nbinaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn false;\n}\nreturn isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);\ncase BRACED_EXPRESSION:\nreturn isAmbiguous(((STBracedExpressionNode) node).expression);\ncase INDEXED_EXPRESSION:\nSTIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node;\nif (!isAmbiguous(indexExpr.containerExpression)) {\nreturn false;\n}\nSTNode keys = indexExpr.keyExpression;\nfor (int i = 0; i < keys.bucketCount(); i++) {\nSTNode item = keys.childInBucket(i);\nif (item.kind == SyntaxKind.COMMA_TOKEN) {\ncontinue;\n}\nif (!isAmbiguous(item)) {\nreturn false;\n}\n}\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate boolean isAllBasicLiterals(STNode node) {\nswitch (node.kind) {\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn true;\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;\nif (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||\nbinaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn false;\n}\nreturn isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);\ncase BRACED_EXPRESSION:\nreturn isAmbiguous(((STBracedExpressionNode) node).expression);\ncase BRACKETED_LIST:\nSTAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node;\nfor (STNode member : list.members) {\nif (member.kind == SyntaxKind.COMMA_TOKEN) {\ncontinue;\n}\nif (!isAllBasicLiterals(member)) {\nreturn false;\n}\n}\nreturn true;\ncase UNARY_EXPRESSION:\nSTUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node;\nif (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN &&\nunaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) {\nreturn false;\n}\nreturn isNumericLiteral(unaryExpr.expression);\ndefault:\nreturn false;\n}\n}\nprivate boolean isNumericLiteral(STNode node) {\nswitch (node.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseTypedDescOrExprStartsWithOpenBracket() {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList members = new ArrayList<>();\nSTNode memberEnd;\nwhile (!isEndOfListConstructor(peek().kind)) {\nSTNode expr = parseTypeDescOrExpr();\nmembers.add(expr);\nmemberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmembers.add(memberEnd);\n}\nSTNode memberNodes = STNodeFactory.createNodeList(members);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberNodes, closeBracket);\n}\n/**\n* Parse binding-patterns.\n*

\n* \n* binding-pattern := capture-binding-pattern\n* | wildcard-binding-pattern\n* | list-binding-pattern\n* | mapping-binding-pattern\n* | functional-binding-pattern\n*

\n*\n* capture-binding-pattern := variable-name\n* variable-name := identifier\n*

\n*\n* wildcard-binding-pattern := _\n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n*\n* mapping-binding-pattern := { field-binding-patterns }\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n* field-binding-pattern := field-name : binding-pattern | variable-name\n*
\n* rest-binding-pattern := ... variable-name\n*\n*

\n* functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )\n*
\n* arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns]\n* | other-arg-binding-patterns\n*
\n* positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*\n*
\n* positional-arg-binding-pattern := binding-pattern\n*
\n* other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern]\n* | [rest-binding-pattern]\n*
\n* named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*\n*
\n* named-arg-binding-pattern := arg-name = binding-pattern\n*
\n*\n* @return binding-pattern node\n*/\nprivate STNode parseBindingPattern() {\nSTToken token = peek();\nreturn parseBindingPattern(token.kind);\n}\nprivate STNode parseBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseListBindingPattern();\ncase IDENTIFIER_TOKEN:\nreturn parseBindingPatternStartsWithIdentifier();\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingBindingPattern();\ncase ERROR_KEYWORD:\nreturn parseErrorBindingPattern();\ndefault:\nSolution sol = recover(peek(), ParserRuleContext.BINDING_PATTERN);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nreturn parseBindingPattern(sol.tokenKind);\n}\n}\nprivate STNode parseBindingPatternStartsWithIdentifier() {\nSTNode argNameOrBindingPattern =\nparseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);\nSTToken secondToken = peek();\nif (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nstartContext(ParserRuleContext.FUNCTIONAL_BINDING_PATTERN);\nreturn parseFunctionalBindingPattern(argNameOrBindingPattern);\n}\nif (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) {\nSTNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN);\nidentifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern);\nreturn createCaptureOrWildcardBP(identifier);\n}\nreturn createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name);\n}\nprivate STNode createCaptureOrWildcardBP(STNode varName) {\nSTNode bindingPattern;\nif (isWildcardBP(varName)) {\nbindingPattern = getWildcardBindingPattern(varName);\n} else {\nbindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName);\n}\nreturn bindingPattern;\n}\n/**\n* Parse list-binding-patterns.\n*

\n* \n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n*\n* @return list-binding-pattern node\n*/\nprivate STNode parseListBindingPattern() {\nstartContext(ParserRuleContext.LIST_BINDING_PATTERN);\nSTNode openBracket = parseOpenBracket();\nList bindingPatternsList = new ArrayList<>();\nSTNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList);\nendContext();\nreturn listBindingPattern;\n}\nprivate STNode parseListBindingPattern(STNode openBracket, List bindingPatternsList) {\nSTNode listBindingPatternMember = parseListBindingPatternMember();\nbindingPatternsList.add(listBindingPatternMember);\nSTNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList);\nreturn listBindingPattern;\n}\nprivate STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List bindingPatterns) {\nSTNode member = firstMember;\nSTToken token = peek();\nSTNode listBindingPatternRhs = null;\nwhile (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {\nlistBindingPatternRhs = parseListBindingPatternMemberRhs(token.kind);\nif (listBindingPatternRhs == null) {\nbreak;\n}\nbindingPatterns.add(listBindingPatternRhs);\nmember = parseListBindingPatternMember();\nbindingPatterns.add(member);\ntoken = peek();\n}\nSTNode restBindingPattern;\nif (member.kind == SyntaxKind.REST_BINDING_PATTERN) {\nrestBindingPattern = bindingPatterns.remove(bindingPatterns.size() - 1);\n} else {\nrestBindingPattern = STNodeFactory.createEmptyNode();\n}\nSTNode closeBracket = parseCloseBracket();\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);\nreturn STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, restBindingPattern,\ncloseBracket);\n}\nprivate STNode parseListBindingPatternMemberRhs() {\nreturn parseListBindingPatternMemberRhs(peek().kind);\n}\nprivate STNode parseListBindingPatternMemberRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListBindingPatternMemberRhs(solution.tokenKind);\n}\n}\nprivate boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse list-binding-pattern entry.\n*

\n* \n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n*\n* @return rest-binding-pattern node\n*/\nprivate STNode parseListBindingPatternMember() {\nSTToken token = peek();\nreturn parseListBindingPatternMember(token.kind);\n}\nprivate STNode parseListBindingPatternMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ncase OPEN_BRACKET_TOKEN:\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseBindingPattern();\ndefault:\nSolution sol = recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nreturn parseListBindingPatternMember(sol.tokenKind);\n}\n}\nprivate STNode parseRestBindingPattern() {\nstartContext(ParserRuleContext.REST_BINDING_PATTERN);\nSTNode ellipsis = parseEllipsis();\nSTNode varName = parseVariableName();\nendContext();\nSTSimpleNameReferenceNode simpleNameReferenceNode =\n(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName);\nreturn STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode);\n}\n/**\n* Parse Typed-binding-pattern.\n*

\n* \n* typed-binding-pattern := inferable-type-descriptor binding-pattern\n*

\n* inferable-type-descriptor := type-descriptor | var\n*
\n*\n* @return Typed binding pattern node\n*/\nprivate STNode parseTypedBindingPattern(ParserRuleContext context) {\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false);\nSTNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context);\nreturn typeBindingPattern;\n}\n/**\n* Parse mapping-binding-patterns.\n*

\n* \n* mapping-binding-pattern := { field-binding-patterns }\n*

\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n* field-binding-pattern := field-name : binding-pattern | variable-name\n*
\n*\n* @return mapping-binding-pattern node\n*/\nprivate STNode parseMappingBindingPattern() {\nstartContext(ParserRuleContext.MAPPING_BINDING_PATTERN);\nSTNode openBrace = parseOpenBrace();\nSTToken token = peek();\nif (isEndOfMappingBindingPattern(token.kind)) {\nSTNode closeBrace = parseCloseBrace();\nSTNode bindingPatternsNode = STNodeFactory.createEmptyNodeList();\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nendContext();\nreturn STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, restBindingPattern,\ncloseBrace);\n}\nList bindingPatterns = new ArrayList<>();\nSTNode prevMember = parseMappingBindingPatternMember();\nbindingPatterns.add(prevMember);\nreturn parseMappingBindingPattern(openBrace, bindingPatterns, prevMember);\n}\nprivate STNode parseMappingBindingPattern(STNode openBrace, List bindingPatterns, STNode member) {\nSTToken token = peek();\nSTNode mappingBindingPatternRhs = null;\nwhile (!isEndOfMappingBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {\nmappingBindingPatternRhs = parseMappingBindingPatternEnd(token.kind);\nif (mappingBindingPatternRhs == null) {\nbreak;\n}\nbindingPatterns.add(mappingBindingPatternRhs);\nmember = parseMappingBindingPatternMember();\nif (member.kind == SyntaxKind.REST_BINDING_PATTERN) {\nbreak;\n}\nbindingPatterns.add(member);\ntoken = peek();\n}\nSTNode restBindingPattern;\nif (member.kind == SyntaxKind.REST_BINDING_PATTERN) {\nrestBindingPattern = member;\n} else {\nrestBindingPattern = STNodeFactory.createEmptyNode();\n}\nSTNode closeBrace = parseCloseBrace();\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);\nendContext();\nreturn STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, restBindingPattern,\ncloseBrace);\n}\n/**\n* Parse mapping-binding-pattern entry.\n*

\n* \n* mapping-binding-pattern := { field-binding-patterns }\n*

\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n* field-binding-pattern := field-name : binding-pattern\n* | variable-name\n*
\n*\n* @return mapping-binding-pattern node\n*/\nprivate STNode parseMappingBindingPatternMember() {\nSTToken token = peek();\nswitch (token.kind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ndefault:\nreturn parseFieldBindingPattern();\n}\n}\nprivate STNode parseMappingBindingPatternEnd() {\nreturn parseMappingBindingPatternEnd(peek().kind);\n}\nprivate STNode parseMappingBindingPatternEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MAPPING_BINDING_PATTERN_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMappingBindingPatternEnd(solution.tokenKind);\n}\n}\nprivate STNode parseFieldBindingPattern() {\nreturn parseFieldBindingPattern(peek().kind);\n}\n/**\n* Parse field-binding-pattern.\n* field-binding-pattern := field-name : binding-pattern | varname\n*\n* @return field-binding-pattern node\n*/\nprivate STNode parseFieldBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME);\nSTNode fieldBindingPattern = parseFieldBindingPattern(identifier);\nreturn fieldBindingPattern;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.FIELD_BINDING_PATTERN_NAME);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldBindingPattern(solution.tokenKind);\n}\n}\nprivate STNode parseFieldBindingPattern(STNode identifier) {\nSTNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier);\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference);\n}\nSTNode colon = parseColon();\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern);\n}\nprivate boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseErrorBindingPattern() {\nstartContext(ParserRuleContext.FUNCTIONAL_BINDING_PATTERN);\nSTNode typeDesc = parseErrorKeyword();\nreturn parseFunctionalBindingPattern(typeDesc);\n}\nprivate STNode parseFunctionalBindingPattern(STNode typeDesc) {\nSTNode openParenthesis = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode argListBindingPatterns = parseArgListBindingPatterns();\nSTNode closeParenthesis = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createFunctionalBindingPatternNode(typeDesc, openParenthesis, argListBindingPatterns,\ncloseParenthesis);\n}\nprivate STNode parseArgListBindingPatterns() {\nList argListBindingPatterns = new ArrayList<>();\nSyntaxKind lastValidArgKind = SyntaxKind.CAPTURE_BINDING_PATTERN;\nSTToken nextToken = peek();\nwhile (!isEndOfParametersList(nextToken.kind)) {\nSTNode currentArg = parseArgBindingPattern(nextToken.kind);\nDiagnosticErrorCode errorCode = validateArgBindingPatternOrder(lastValidArgKind, currentArg.kind);\nif (errorCode == null) {\nargListBindingPatterns.add(currentArg);\nlastValidArgKind = currentArg.kind;\n} else {\nupdateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode);\n}\nnextToken = peek();\nSTNode argEnd = parseArgsBindingPatternEnd(nextToken.kind);\nif (argEnd == null) {\nbreak;\n}\nif (errorCode == null) {\nargListBindingPatterns.add(argEnd);\n} else {\nupdateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);\n}\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(argListBindingPatterns);\n}\nprivate STNode parseArgsBindingPatternEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_BINDING_PATTERN_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgsBindingPatternEnd(solution.tokenKind);\n}\n}\nprivate STNode parseArgBindingPattern(SyntaxKind kind) {\nswitch (kind) {\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ncase IDENTIFIER_TOKEN:\nreturn parseNamedOrPositionalArgBindingPattern(kind);\ncase OPEN_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseBindingPattern();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_BINDING_PATTERN);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgBindingPattern(solution.tokenKind);\n}\n}\nprivate STNode parseNamedOrPositionalArgBindingPattern(SyntaxKind nextTokenKind) {\nSTNode argNameOrBindingPattern = parseQualifiedIdentifier(ParserRuleContext.ARG_BINDING_PATTERN_START_IDENT);\nSTToken secondToken = peek();\nswitch (secondToken.kind) {\ncase EQUAL_TOKEN:\nSTNode equal = parseAssignOp();\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createNamedArgBindingPatternNode(argNameOrBindingPattern, equal, bindingPattern);\ncase OPEN_PAREN_TOKEN:\nreturn parseFunctionalBindingPattern(argNameOrBindingPattern);\ncase COMMA_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ndefault:\nreturn createCaptureOrWildcardBP(argNameOrBindingPattern);\n}\n}\nprivate DiagnosticErrorCode validateArgBindingPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {\nDiagnosticErrorCode errorCode = null;\nswitch (prevArgKind) {\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\nbreak;\ncase NAMED_ARG_BINDING_PATTERN:\nif (currentArgKind != SyntaxKind.NAMED_ARG_BINDING_PATTERN &&\ncurrentArgKind != SyntaxKind.REST_BINDING_PATTERN) {\nerrorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG;\n}\nbreak;\ncase REST_BINDING_PATTERN:\nerrorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG;\nbreak;\ndefault:\nthrow new IllegalStateException(\"Invalid SyntaxKind in an argument\");\n}\nreturn errorCode;\n}\n/*\n* This parses Typed binding patterns and deals with ambiguity between types,\n* and binding patterns. An example is 'T[a]'.\n* The ambiguity lies in between:\n* 1) Array Type\n* 2) List binding pattern\n* 3) Member access expression.\n*/\n/**\n* Parse the component after the type-desc, of a typed-binding-pattern.\n*\n* @param typeDesc Starting type-desc of the typed-binding-pattern\n* @return Typed-binding pattern\n*/\nprivate STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternTypeRhs(nextToken.kind, typeDesc, context, true);\n}\nprivate STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternTypeRhs(nextToken.kind, typeDesc, context, isRoot);\n}\nprivate STNode parseTypedBindingPatternTypeRhs(SyntaxKind nextTokenKind, STNode typeDesc, ParserRuleContext context,\nboolean isRoot) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode bindingPattern = parseBindingPattern(nextTokenKind);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\ncase OPEN_BRACKET_TOKEN:\nSTNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context);\nassert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN;\nreturn typedBindingPattern;\ncase CLOSE_PAREN_TOKEN:\ncase COMMA_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nif (!isRoot) {\nreturn typeDesc;\n}\ndefault:\nSolution solution =\nrecover(peek(), ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS, typeDesc, context, isRoot);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypedBindingPatternTypeRhs(solution.tokenKind, typeDesc, context, isRoot);\n}\n}\n/**\n* Parse typed-binding pattern with list, array-type-desc, or member-access-expr.\n*\n* @param typeDescOrExpr Type desc or the expression at the start\n* @param isTypedBindingPattern Is this is a typed-binding-pattern.\n* @return Parsed node\n*/\nprivate STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern,\nboolean allowAssignment, ParserRuleContext context) {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nif (isBracketedListEnd(peek().kind)) {\nreturn parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context);\n}\nSTNode member = parseBracketedListMember(isTypedBindingPattern);\nSyntaxKind currentNodeType = getBracketedListNodeType(member);\nswitch (currentNodeType) {\ncase ARRAY_TYPE_DESC:\nSTNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context);\nreturn typedBindingPattern;\ncase LIST_BINDING_PATTERN:\nSTNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\ncase INDEXED_EXPRESSION:\nreturn parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member);\ncase NONE:\ndefault:\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd != null) {\nList memberList = new ArrayList<>();\nmemberList.add(member);\nmemberList.add(memberEnd);\nSTNode bindingPattern = parseAsListBindingPattern(openBracket, memberList);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n}\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,\nisTypedBindingPattern, allowAssignment, context);\n}\nprivate STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) {\nmember = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nSTNode keyExpr = STNodeFactory.createNodeList(member);\nSTNode memberAccessExpr =\nSTNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket);\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false);\n}\nprivate boolean isBracketedListEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseBracketedListMember(boolean isTypedBindingPattern) {\nreturn parseBracketedListMember(peek().kind, isTypedBindingPattern);\n}\n/**\n* Parse a member of an ambiguous bracketed list. This member could be:\n* 1) Array length\n* 2) Key expression of a member-access-expr\n* 3) A member-binding pattern of a list-binding-pattern.\n*\n* @param nextTokenKind Kind of the next token\n* @param isTypedBindingPattern Is this in a definite typed-binding pattern\n* @return Parsed member node\n*/\nprivate STNode parseBracketedListMember(SyntaxKind nextTokenKind, boolean isTypedBindingPattern) {\nswitch (nextTokenKind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase ASTERISK_TOKEN:\ncase STRING_LITERAL:\nreturn parseBasicLiteral();\ncase CLOSE_BRACKET_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\ncase ELLIPSIS_TOKEN:\ncase OPEN_BRACKET_TOKEN:\nreturn parseStatementStartBracketedListMember();\ncase IDENTIFIER_TOKEN:\nif (isTypedBindingPattern) {\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nnextTokenKind = peek().kind;\nif (nextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseListBindingPatternMember();\n}\nreturn identifier;\n}\nbreak;\ndefault:\nif (!isTypedBindingPattern && isValidExpressionStart(nextTokenKind, 1)) {\nbreak;\n}\nParserRuleContext recoverContext =\nisTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH\n: ParserRuleContext.BRACKETED_LIST_MEMBER;\nSolution solution = recover(peek(), recoverContext, isTypedBindingPattern);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseBracketedListMember(solution.tokenKind, isTypedBindingPattern);\n}\nSTNode expr = parseExpression();\nif (isWildcardBP(expr)) {\nreturn getWildcardBindingPattern(expr);\n}\nif (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE || expr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nnextTokenKind = peek().kind;\nif (nextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseListBindingPatternMember();\n}\n}\nreturn expr;\n}\n/**\n* Treat the current node as an array, and parse the remainder of the binding pattern.\n*\n* @param typeDesc Type-desc\n* @param openBracket Open bracket\n* @param member Member\n* @return Parsed node\n*/\nprivate STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) {\ntypeDesc = getTypeDescFromExpr(typeDesc);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true,\ncontext);\n}\nprivate STNode parseBracketedListMemberEnd() {\nreturn parseBracketedListMemberEnd(peek().kind);\n}\nprivate STNode parseBracketedListMemberEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseBracketedListMemberEnd(solution.tokenKind);\n}\n}\n/**\n* We reach here to break ambiguity of T[a]. This could be:\n* 1) Array Type Desc\n* 2) Member access on LHS\n* 3) Typed-binding-pattern\n*\n* @param typeDescOrExpr Type name or the expr that precede the open-bracket.\n* @param openBracket Open bracket\n* @param member Member\n* @param closeBracket Open bracket\n* @param isTypedBindingPattern Is this is a typed-binding-pattern.\n* @return Specific node that matches to T[a], after solving ambiguity.\n*/\nprivate STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket, boolean isTypedBindingPattern,\nboolean allowAssignment, ParserRuleContext context) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternOrMemberAccessRhs(nextToken.kind, typeDescOrExpr, openBracket, member,\ncloseBracket, isTypedBindingPattern, allowAssignment, context);\n}\nprivate STNode parseTypedBindingPatternOrMemberAccessRhs(SyntaxKind nextTokenKind, STNode typeDescOrExpr,\nSTNode openBracket, STNode member, STNode closeBracket,\nboolean isTypedBindingPattern, boolean allowAssignment,\nParserRuleContext context) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nSTNode arrayTypeDesc = createArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\nreturn parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);\ncase OPEN_BRACKET_TOKEN:\nif (isTypedBindingPattern) {\ntypeDesc = getTypeDescFromExpr(typeDescOrExpr);\narrayTypeDesc =\nSTNodeFactory.createArrayTypeDescriptorNode(typeDesc, openBracket, member, closeBracket);\nreturn parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);\n}\nSTNode keyExpr = STNodeFactory.createNodeList(member);\nSTNode expr =\nSTNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);\nreturn parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context);\ncase QUESTION_MARK_TOKEN:\ntypeDesc = getTypeDescFromExpr(typeDescOrExpr);\narrayTypeDesc = createArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\ntypeDesc = parseComplexTypeDescriptor(arrayTypeDesc,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nreturn parseTypedBindingPatternTypeRhs(typeDesc, context);\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\nreturn parseComplexTypeDescInTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket,\ncontext, isTypedBindingPattern);\ncase IN_KEYWORD:\nif (context != ParserRuleContext.FOREACH_STMT && context != ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\ncase EQUAL_TOKEN:\nif (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nif (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) {\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\n}\nkeyExpr = STNodeFactory.createNodeList(member);\ntypeDescOrExpr = getExpression(typeDescOrExpr);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);\ncase SEMICOLON_TOKEN:\nif (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\ncase CLOSE_BRACE_TOKEN:\ncase COMMA_TOKEN:\nif (context == ParserRuleContext.AMBIGUOUS_STMT) {\nkeyExpr = STNodeFactory.createNodeList(member);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,\ncloseBracket);\n}\ndefault:\nif (isValidExprRhsStart(nextTokenKind, closeBracket.kind)) {\nkeyExpr = STNodeFactory.createNodeList(member);\ntypeDescOrExpr = getExpression(typeDescOrExpr);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,\ncloseBracket);\n}\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.BRACKETED_LIST_RHS, typeDescOrExpr, openBracket, member,\ncloseBracket, isTypedBindingPattern, allowAssignment, context);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypedBindingPatternOrMemberAccessRhs(solution.tokenKind, typeDescOrExpr, openBracket, member,\ncloseBracket, isTypedBindingPattern, allowAssignment, context);\n}\nprivate STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket) {\nSTNode bindingPatterns;\nif (isEmpty(member)) {\nbindingPatterns = STNodeFactory.createEmptyNodeList();\n} else {\nSTNode bindingPattern = getBindingPattern(member);\nbindingPatterns = STNodeFactory.createNodeList(bindingPattern);\n}\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nSTNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns,\nrestBindingPattern, closeBracket);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n}\n/**\n* Parse a union or intersection type-desc/binary-expression that involves ambiguous\n* bracketed list in lhs.\n*

\n* e.g: (T[a] & R..) or (T[a] | R.. )\n*

\n* Complexity occurs in scenarios such as T[a] |/& R[b]. If the token after this\n* is another binding-pattern, then (T[a] |/& R[b]) becomes the type-desc. However,\n* if the token follows this is an equal or semicolon, then (T[a] |/& R) becomes\n* the type-desc, and [b] becomes the binding pattern.\n*\n* @param typeDescOrExpr Type desc or the expression\n* @param openBracket Open bracket\n* @param member Member\n* @param closeBracket Close bracket\n* @param context COntext in which the typed binding pattern occurs\n* @return Parsed node\n*/\nprivate STNode parseComplexTypeDescInTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode pipeOrAndToken = parseUnionOrIntersectionToken();\nSTNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false);\nif (isTypedBindingPattern || typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);\nlhsTypeDesc = createArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);\nSTTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr;\nSTNode newTypeDesc;\nif (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) {\nnewTypeDesc = STNodeFactory.createUnionTypeDescriptorNode(lhsTypeDesc, pipeOrAndToken,\nrhsTypedBindingPattern.typeDescriptor);\n} else {\nnewTypeDesc = STNodeFactory.createIntersectionTypeDescriptorNode(lhsTypeDesc, pipeOrAndToken,\nrhsTypedBindingPattern.typeDescriptor);\n}\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern);\n} else {\nSTNode keyExpr = getExpression(member);\nSTNode containerExpr = getExpression(typeDescOrExpr);\nSTNode lhsExpr =\nSTNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket);\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken,\ntypedBindingPatternOrExpr);\n}\n}\nprivate STNode createArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) {\nif (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\nSTUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;\nSTNode middleTypeDesc = createArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc);\nlhsTypeDesc = STNodeFactory.createUnionTypeDescriptorNode(unionTypeDesc.leftTypeDesc,\nunionTypeDesc.pipeToken, middleTypeDesc);\n} else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\nSTIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc;\nSTNode middleTypeDesc =\ncreateArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc);\nlhsTypeDesc = STNodeFactory.createIntersectionTypeDescriptorNode(intersectionTypeDesc.leftTypeDesc,\nintersectionTypeDesc.bitwiseAndToken, middleTypeDesc);\n} else {\nlhsTypeDesc = STNodeFactory.createArrayTypeDescriptorNode(lhsTypeDesc, openBracket, member, closeBracket);\n}\nreturn lhsTypeDesc;\n}\n/**\n* Parse union (|) or intersection (&) type operator.\n*\n* @return pipe or bitwise and token\n*/\nprivate STNode parseUnionOrIntersectionToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Infer the type of the ambiguous bracketed list, based on the type of the member.\n*\n* @param memberNode Member node\n* @return Inferred type of the bracketed list\n*/\nprivate SyntaxKind getBracketedListNodeType(STNode memberNode) {\nif (isEmpty(memberNode)) {\nreturn SyntaxKind.NONE;\n}\nif (isDefiniteTypeDesc(memberNode.kind)) {\nreturn SyntaxKind.TUPLE_TYPE_DESC;\n}\nswitch (memberNode.kind) {\ncase ASTERISK_TOKEN:\nreturn SyntaxKind.ARRAY_TYPE_DESC;\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.LIST_BINDING_PATTERN;\ncase QUALIFIED_NAME_REFERENCE:\ncase REST_TYPE:\nreturn SyntaxKind.TUPLE_TYPE_DESC;\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase SIMPLE_NAME_REFERENCE:\ncase BRACKETED_LIST:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.NONE;\ndefault:\nreturn SyntaxKind.INDEXED_EXPRESSION;\n}\n}\n/*\n* This section tries to break the ambiguity in parsing a statement that starts with a open-bracket.\n* The ambiguity lies in between:\n* 1) Assignment that starts with list binding pattern\n* 2) Var-decl statement that starts with tuple type\n* 3) Statement that starts with list constructor, such as sync-send, etc.\n*/\n/**\n* Parse any statement that starts with an open-bracket.\n*\n* @param annots Annotations attached to the statement.\n* @return Parsed node\n*/\nprivate STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) {\nstartContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT);\nreturn parseStatementStartsWithOpenBracket(annots, true, possibleMappingField);\n}\nprivate STNode parseMemberBracketedList(boolean possibleMappingField) {\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseStatementStartsWithOpenBracket(annots, false, possibleMappingField);\n}\n/**\n* The bracketed list at the start of a statement can be one of the following.\n* 1) List binding pattern\n* 2) Tuple type\n* 3) List constructor\n*\n* @param isRoot Is this the root of the list\n* @return Parsed node\n*/\nprivate STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) {\nstartContext(ParserRuleContext.STMT_START_BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList memberList = new ArrayList<>();\nwhile (!isBracketedListEnd(peek().kind)) {\nSTNode member = parseStatementStartBracketedListMember();\nSyntaxKind currentNodeType = getStmtStartBracketedListType(member);\nswitch (currentNodeType) {\ncase TUPLE_TYPE_DESC:\nreturn parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\ncase LIST_BINDING_PATTERN:\nreturn parseAsListBindingPattern(openBracket, memberList, member, isRoot);\ncase LIST_CONSTRUCTOR:\nreturn parseAsListConstructor(openBracket, memberList, member, isRoot);\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nreturn parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot);\ncase NONE:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\n}\nSTNode closeBracket = parseCloseBracket();\nSTNode bracketedList = parseStatementStartBracketedList(annots, openBracket, memberList, closeBracket, isRoot,\npossibleMappingField);\nreturn bracketedList;\n}\nprivate STNode parseStatementStartBracketedListMember() {\nSTToken nextToken = peek();\nreturn parseStatementStartBracketedListMember(nextToken.kind);\n}\n/**\n* Parse a member of a list-binding-pattern, tuple-type-desc, or\n* list-constructor-expr, when the parent is ambiguous.\n*\n* @param nextTokenKind Kind of the next token.\n* @return Parsed node\n*/\nprivate STNode parseStatementStartBracketedListMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseMemberBracketedList(false);\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nif (isWildcardBP(identifier)) {\nSTNode varName = ((STSimpleNameReferenceNode) identifier).name;\nreturn getWildcardBindingPattern(varName);\n}\nnextTokenKind = peek().kind;\nif (nextTokenKind == SyntaxKind.ELLIPSIS_TOKEN) {\nSTNode ellipsis = parseEllipsis();\nreturn STNodeFactory.createRestDescriptorNode(identifier, ellipsis);\n}\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true);\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingBindingPatterOrMappingConstructor();\ncase ERROR_KEYWORD:\nif (getNextNextToken(nextTokenKind).kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseErrorConstructorExpr();\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase ELLIPSIS_TOKEN:\nreturn parseListBindingPatternMember();\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\nif (getNextNextToken(nextTokenKind).kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseExpression(false);\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\nif (getNextNextToken(nextTokenKind).kind == SyntaxKind.LT_TOKEN) {\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nreturn parseExpression(false);\ncase OPEN_PAREN_TOKEN:\nreturn parseTypeDescOrExpr();\ndefault:\nif (isValidExpressionStart(nextTokenKind, 1)) {\nreturn parseExpression(false);\n}\nif (isTypeStartingToken(nextTokenKind)) {\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nSolution solution = recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatementStartBracketedListMember(solution.tokenKind);\n}\n}\nprivate STNode parseAsTupleTypeDesc(STNode annots, STNode openBracket, List memberList, STNode member,\nboolean isRoot) {\nmemberList = getTypeDescList(memberList);\nstartContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nSTNode tupleTypeMembers = parseTupleTypeMembers(member, memberList);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nSTNode tupleType = STNodeFactory.createTupleTypeDescriptorNode(openBracket, tupleTypeMembers, closeBracket);\nSTNode typeDesc =\nparseComplexTypeDescriptor(tupleType, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nendContext();\nSTNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT, isRoot);\nif (!isRoot) {\nreturn typedBindingPattern;\n}\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(annots, STNodeFactory.createEmptyNode(), typedBindingPattern, false);\n}\nprivate STNode parseAsListBindingPattern(STNode openBracket, List memberList, STNode member,\nboolean isRoot) {\nmemberList = getBindingPatternsList(memberList);\nmemberList.add(member);\nswitchContext(ParserRuleContext.LIST_BINDING_PATTERN);\nSTNode listBindingPattern = parseListBindingPattern(openBracket, member, memberList);\nendContext();\nif (!isRoot) {\nreturn listBindingPattern;\n}\nreturn parseAssignmentStmtRhs(listBindingPattern);\n}\nprivate STNode parseAsListBindingPattern(STNode openBracket, List memberList) {\nmemberList = getBindingPatternsList(memberList);\nswitchContext(ParserRuleContext.LIST_BINDING_PATTERN);\nSTNode listBindingPattern = parseListBindingPattern(openBracket, memberList);\nendContext();\nreturn listBindingPattern;\n}\nprivate STNode parseAsListBindingPatternOrListConstructor(STNode openBracket, List memberList,\nSTNode member, boolean isRoot) {\nmemberList.add(member);\nSTNode memberEnd = parseBracketedListMemberEnd();\nSTNode listBindingPatternOrListCons;\nif (memberEnd == null) {\nSTNode closeBracket = parseCloseBracket();\nlistBindingPatternOrListCons =\nparseListBindingPatternOrListConstructor(openBracket, memberList, closeBracket, isRoot);\n} else {\nmemberList.add(memberEnd);\nlistBindingPatternOrListCons = parseListBindingPatternOrListConstructor(openBracket, memberList, isRoot);\n}\nreturn listBindingPatternOrListCons;\n}\nprivate SyntaxKind getStmtStartBracketedListType(STNode memberNode) {\nif (memberNode.kind.compareTo(SyntaxKind.TYPE_DESC) >= 0 &&\nmemberNode.kind.compareTo(SyntaxKind.TYPEDESC_TYPE_DESC) <= 0) {\nreturn SyntaxKind.TUPLE_TYPE_DESC;\n}\nswitch (memberNode.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase ASTERISK_TOKEN:\nreturn SyntaxKind.ARRAY_TYPE_DESC;\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.LIST_BINDING_PATTERN;\ncase QUALIFIED_NAME_REFERENCE:\ncase REST_TYPE:\nreturn SyntaxKind.TUPLE_TYPE_DESC;\ncase LIST_CONSTRUCTOR:\ncase MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.LIST_CONSTRUCTOR;\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR;\ncase SIMPLE_NAME_REFERENCE:\ncase BRACKETED_LIST:\nreturn SyntaxKind.NONE;\ncase FUNCTION_CALL:\nif (isPosibleFunctionalBindingPattern((STFunctionCallExpressionNode) memberNode)) {\nreturn SyntaxKind.NONE;\n}\nreturn SyntaxKind.LIST_CONSTRUCTOR;\ndefault:\nif (isExpression(memberNode.kind) && !isAllBasicLiterals(memberNode) && !isAmbiguous(memberNode)) {\nreturn SyntaxKind.LIST_CONSTRUCTOR;\n}\nreturn SyntaxKind.NONE;\n}\n}\nprivate boolean isPosibleFunctionalBindingPattern(STFunctionCallExpressionNode funcCall) {\nSTNode args = funcCall.arguments;\nint size = args.bucketCount();\nfor (int i = 0; i < size; i++) {\nSTNode arg = args.childInBucket(i);\nif (arg.kind != SyntaxKind.NAMED_ARG && arg.kind != SyntaxKind.POSITIONAL_ARG &&\narg.kind != SyntaxKind.REST_ARG) {\ncontinue;\n}\nif (!isPosibleArgBindingPattern((STFunctionArgumentNode) arg)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate boolean isPosibleArgBindingPattern(STFunctionArgumentNode arg) {\nswitch (arg.kind) {\ncase POSITIONAL_ARG:\nSTNode expr = ((STPositionalArgumentNode) arg).expression;\nreturn isPosibleBindingPattern(expr);\ncase NAMED_ARG:\nexpr = ((STNamedArgumentNode) arg).expression;\nreturn isPosibleBindingPattern(expr);\ncase REST_ARG:\nexpr = ((STRestArgumentNode) arg).expression;\nreturn expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE;\ndefault:\nreturn false;\n}\n}\nprivate boolean isPosibleBindingPattern(STNode node) {\nswitch (node.kind) {\ncase SIMPLE_NAME_REFERENCE:\nreturn true;\ncase LIST_CONSTRUCTOR:\nSTListConstructorExpressionNode listConstructor = (STListConstructorExpressionNode) node;\nfor (int i = 0; i < listConstructor.bucketCount(); i++) {\nSTNode expr = listConstructor.childInBucket(i);\nif (!isPosibleBindingPattern(expr)) {\nreturn false;\n}\n}\nreturn true;\ncase MAPPING_CONSTRUCTOR:\nSTMappingConstructorExpressionNode mappingConstructor = (STMappingConstructorExpressionNode) node;\nfor (int i = 0; i < mappingConstructor.bucketCount(); i++) {\nSTNode expr = mappingConstructor.childInBucket(i);\nif (!isPosibleBindingPattern(expr)) {\nreturn false;\n}\n}\nreturn true;\ncase SPECIFIC_FIELD:\nSTSpecificFieldNode specificField = (STSpecificFieldNode) node;\nif (specificField.readonlyKeyword != null) {\nreturn false;\n}\nif (specificField.valueExpr == null) {\nreturn true;\n}\nreturn isPosibleBindingPattern(specificField.valueExpr);\ncase FUNCTION_CALL:\nreturn isPosibleFunctionalBindingPattern((STFunctionCallExpressionNode) node);\ndefault:\nreturn false;\n}\n}\nprivate STNode parseStatementStartBracketedList(STNode annots, STNode openBracket, List members,\nSTNode closeBracket, boolean isRoot, boolean possibleMappingField) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase EQUAL_TOKEN:\nif (!isRoot) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\n}\nSTNode memberBindingPatterns = STNodeFactory.createNodeList(getBindingPatternsList(members));\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nSTNode listBindingPattern = STNodeFactory.createListBindingPatternNode(openBracket,\nmemberBindingPatterns, restBindingPattern, closeBracket);\nendContext();\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nreturn parseAssignmentStmtRhs(listBindingPattern);\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\nif (!isRoot) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\n}\nif (members.isEmpty()) {\nopenBracket =\nSyntaxErrors.addDiagnostic(openBracket, DiagnosticErrorCode.ERROR_MISSING_TUPLE_MEMBER);\n}\nswitchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\nstartContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nSTNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members));\nSTNode tupleTypeDesc =\nSTNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);\nendContext();\nSTNode typeDesc = parseComplexTypeDescriptor(tupleTypeDesc,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nSTNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\nendContext();\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, typedBindingPattern);\ncase OPEN_BRACKET_TOKEN:\nif (!isRoot) {\nmemberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members));\ntupleTypeDesc =\nSTNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);\nendContext();\ntypeDesc = parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\nreturn typeDesc;\n}\nSTAmbiguousCollectionNode list =\nnew STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\nendContext();\nSTNode tpbOrExpr = parseTypedBindingPatternOrExprRhs(list, true);\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, tpbOrExpr);\ncase COLON_TOKEN:\nif (possibleMappingField && members.size() == 1) {\nstartContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nSTNode colon = parseColon();\nSTNode fieldNameExpr = getExpression(members.get(0));\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createComputedNameFieldNode(openBracket, fieldNameExpr, closeBracket, colon,\nvalueExpr);\n}\ndefault:\nendContext();\nif (!isRoot) {\nreturn new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\n}\nlist = new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\nSTNode exprOrTPB = parseTypedBindingPatternOrExprRhs(list, false);\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, exprOrTPB);\n}\n}\nprivate boolean isWildcardBP(STNode node) {\nswitch (node.kind) {\ncase SIMPLE_NAME_REFERENCE:\nSTToken nameToken = (STToken) ((STSimpleNameReferenceNode) node).name;\nreturn isUnderscoreToken(nameToken);\ncase IDENTIFIER_TOKEN:\nreturn isUnderscoreToken((STToken) node);\ndefault:\nreturn false;\n}\n}\nprivate boolean isUnderscoreToken(STToken token) {\nreturn \"_\".equals(token.text());\n}\nprivate STNode getWildcardBindingPattern(STNode identifier) {\nswitch (identifier.kind) {\ncase SIMPLE_NAME_REFERENCE:\nSTNode varName = ((STSimpleNameReferenceNode) identifier).name;\nreturn STNodeFactory.createWildcardBindingPatternNode(varName);\ncase IDENTIFIER_TOKEN:\nreturn STNodeFactory.createWildcardBindingPatternNode(identifier);\ndefault:\nthrow new IllegalStateException();\n}\n}\n/*\n* This section tries to break the ambiguity in parsing a statement that starts with a open-brace.\n*/\n/**\n* Parse statements that starts with open-brace. It could be a:\n* 1) Block statement\n* 2) Var-decl with mapping binding pattern.\n* 3) Statement that starts with mapping constructor expression.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatementStartsWithOpenBrace() {\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode openBrace = parseOpenBrace();\nif (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nSTNode closeBrace = parseCloseBrace();\nswitch (peek().kind) {\ncase EQUAL_TOKEN:\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nSTNode fields = STNodeFactory.createEmptyNodeList();\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nSTNode bindingPattern = STNodeFactory.createMappingBindingPatternNode(openBrace, fields,\nrestBindingPattern, closeBrace);\nreturn parseAssignmentStmtRhs(bindingPattern);\ncase RIGHT_ARROW_TOKEN:\ncase SYNC_SEND_TOKEN:\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nfields = STNodeFactory.createEmptyNodeList();\nSTNode expr = STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace);\nexpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);\nreturn parseStatementStartWithExprRhs(expr);\ndefault:\nSTNode statements = STNodeFactory.createEmptyNodeList();\nendContext();\nreturn STNodeFactory.createBlockStatementNode(openBrace, statements, closeBrace);\n}\n}\nSTNode member = parseStatementStartingBracedListFirstMember();\nSyntaxKind nodeType = getBracedListType(member);\nSTNode stmt;\nswitch (nodeType) {\ncase MAPPING_BINDING_PATTERN:\nreturn parseStmtAsMappingBindingPatternStart(openBrace, member);\ncase MAPPING_CONSTRUCTOR:\nreturn parseStmtAsMappingConstructorStart(openBrace, member);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn parseStmtAsMappingBPOrMappingConsStart(openBrace, member);\ncase BLOCK_STATEMENT:\nSTNode closeBrace = parseCloseBrace();\nstmt = STNodeFactory.createBlockStatementNode(openBrace, member, closeBrace);\nendContext();\nreturn stmt;\ndefault:\nArrayList stmts = new ArrayList<>();\nstmts.add(member);\nSTNode statements = parseStatements(stmts);\ncloseBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createBlockStatementNode(openBrace, statements, closeBrace);\n}\n}\n/**\n* Parse the rest of the statement, treating the start as a mapping binding pattern.\n*\n* @param openBrace Open brace\n* @param firstMappingField First member\n* @return Parsed node\n*/\nprivate STNode parseStmtAsMappingBindingPatternStart(STNode openBrace, STNode firstMappingField) {\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nstartContext(ParserRuleContext.MAPPING_BINDING_PATTERN);\nList bindingPatterns = new ArrayList<>();\nif (firstMappingField.kind != SyntaxKind.REST_BINDING_PATTERN) {\nbindingPatterns.add(getBindingPattern(firstMappingField));\n}\nSTNode mappingBP = parseMappingBindingPattern(openBrace, bindingPatterns, firstMappingField);\nreturn parseAssignmentStmtRhs(mappingBP);\n}\n/**\n* Parse the rest of the statement, treating the start as a mapping constructor expression.\n*\n* @param openBrace Open brace\n* @param firstMember First member\n* @return Parsed node\n*/\nprivate STNode parseStmtAsMappingConstructorStart(STNode openBrace, STNode firstMember) {\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nstartContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nList members = new ArrayList<>();\nSTNode mappingCons = parseAsMappingConstructor(openBrace, members, firstMember);\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, mappingCons, false, true);\nreturn parseStatementStartWithExprRhs(expr);\n}\n/**\n* Parse the braced-list as a mapping constructor expression.\n*\n* @param openBrace Open brace\n* @param members members list\n* @param member Most recently parsed member\n* @return Parsed node\n*/\nprivate STNode parseAsMappingConstructor(STNode openBrace, List members, STNode member) {\nmembers.add(member);\nmembers = getExpressionList(members);\nswitchContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nSTNode fields = parseMappingConstructorFields(members);\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace);\n}\n/**\n* Parse the rest of the statement, treating the start as a mapping binding pattern\n* or a mapping constructor expression.\n*\n* @param openBrace Open brace\n* @param member First member\n* @return Parsed node\n*/\nprivate STNode parseStmtAsMappingBPOrMappingConsStart(STNode openBrace, STNode member) {\nstartContext(ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR);\nList members = new ArrayList<>();\nmembers.add(member);\nSTNode bpOrConstructor;\nSTNode memberEnd = parseMappingFieldEnd();\nif (memberEnd == null) {\nSTNode closeBrace = parseCloseBrace();\nbpOrConstructor = parseMappingBindingPatternOrMappingConstructor(openBrace, members, closeBrace);\n} else {\nmembers.add(memberEnd);\nbpOrConstructor = parseMappingBindingPatternOrMappingConstructor(openBrace, members);;\n}\nswitch (bpOrConstructor.kind) {\ncase MAPPING_CONSTRUCTOR:\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, bpOrConstructor, false, true);\nreturn parseStatementStartWithExprRhs(expr);\ncase MAPPING_BINDING_PATTERN:\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nSTNode bindingPattern = getBindingPattern(bpOrConstructor);\nreturn parseAssignmentStmtRhs(bindingPattern);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ndefault:\nif (peek().kind == SyntaxKind.EQUAL_TOKEN) {\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nbindingPattern = getBindingPattern(bpOrConstructor);\nreturn parseAssignmentStmtRhs(bindingPattern);\n}\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nexpr = getExpression(bpOrConstructor);\nexpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);\nreturn parseStatementStartWithExprRhs(expr);\n}\n}\n/**\n* Parse a member of a braced-list that occurs at the start of a statement.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatementStartingBracedListFirstMember() {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase READONLY_KEYWORD:\nSTNode readonlyKeyword = parseReadonlyKeyword();\nreturn bracedListMemberStartsWithReadonly(readonlyKeyword);\ncase IDENTIFIER_TOKEN:\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nreturn parseIdentifierRhsInStmtStartingBrace(readonlyKeyword);\ncase STRING_LITERAL:\nSTNode key = parseStringLiteral();\nif (peek().kind == SyntaxKind.COLON_TOKEN) {\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\n}\nswitchContext(ParserRuleContext.BLOCK_STMT);\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, key, false, true);\nreturn parseStatementStartWithExprRhs(expr);\ncase OPEN_BRACKET_TOKEN:\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseStatementStartsWithOpenBracket(annots, true);\ncase OPEN_BRACE_TOKEN:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nreturn parseStatementStartsWithOpenBrace();\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ndefault:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nreturn parseStatements();\n}\n}\nprivate STNode bracedListMemberStartsWithReadonly(STNode readonlyKeyword) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase IDENTIFIER_TOKEN:\nreturn parseIdentifierRhsInStmtStartingBrace(readonlyKeyword);\ncase STRING_LITERAL:\nif (peek(2).kind == SyntaxKind.COLON_TOKEN) {\nSTNode key = parseStringLiteral();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\n}\ndefault:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nstartContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\nSTNode typeDesc = parseComplexTypeDescriptor(readonlyKeyword,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nendContext();\nSTNode metadata = STNodeFactory.createEmptyNode();\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nSTNode typedBP = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(metadata, finalKeyword, typedBP, false);\n}\n}\n/**\n* Parse the rhs components of an identifier that follows an open brace,\n* at the start of a statement. i.e: \"{foo\".\n*\n* @param readonlyKeyword Readonly keyword\n* @return Parsed node\n*/\nprivate STNode parseIdentifierRhsInStmtStartingBrace(STNode readonlyKeyword) {\nSTNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_REF);\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nSTNode colon = STNodeFactory.createEmptyNode();\nSTNode value = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, value);\ncase COLON_TOKEN:\ncolon = parseColon();\nif (!isEmpty(readonlyKeyword)) {\nvalue = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, value);\n}\nSyntaxKind nextTokenKind = peek().kind;\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nSTNode bindingPatternOrExpr = parseListBindingPatternOrListConstructor();\nreturn getMappingField(identifier, colon, bindingPatternOrExpr);\ncase OPEN_BRACE_TOKEN:\nbindingPatternOrExpr = parseMappingBindingPatterOrMappingConstructor();\nreturn getMappingField(identifier, colon, bindingPatternOrExpr);\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifierRhsInStmtStartBrace(identifier, colon);\ndefault:\nSTNode expr = parseExpression();\nreturn getMappingField(identifier, colon, expr);\n}\ndefault:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nif (!isEmpty(readonlyKeyword)) {\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode bindingPattern = STNodeFactory.createCaptureBindingPatternNode(identifier);\nSTNode typedBindingPattern =\nSTNodeFactory.createTypedBindingPatternNode(readonlyKeyword, bindingPattern);\nSTNode metadata = STNodeFactory.createEmptyNode();\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVarDeclRhs(metadata, finalKeyword, typedBindingPattern, false);\n}\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode qualifiedIdentifier = parseQualifiedIdentifier(identifier, false);\nSTNode expr = parseTypedBindingPatternOrExprRhs(qualifiedIdentifier, true);\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, expr);\n}\n}\n/**\n* Parse the rhs components of \"{ identifier : identifier\",\n* at the start of a statement. i.e: \"{foo:bar\".\n*\n* @return Parsed node\n*/\nprivate STNode parseQualifiedIdentifierRhsInStmtStartBrace(STNode identifier, STNode colon) {\nSTNode secondIdentifier = parseIdentifier(ParserRuleContext.VARIABLE_REF);\nSTNode secondNameRef = STNodeFactory.createSimpleNameReferenceNode(secondIdentifier);\nif (isWildcardBP(secondIdentifier)) {\nreturn getWildcardBindingPattern(secondIdentifier);\n}\nSyntaxKind nextTokenKind = peek().kind;\nSTNode qualifiedNameRef = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondNameRef);\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn qualifiedNameRef;\ncase OPEN_BRACE_TOKEN:\ncase IDENTIFIER_TOKEN:\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nSTNode typeBindingPattern =\nparseTypedBindingPatternTypeRhs(qualifiedNameRef, ParserRuleContext.VAR_DECL_STMT);\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, false);\ncase OPEN_BRACKET_TOKEN:\nreturn parseMemberRhsInStmtStartWithBrace(identifier, colon, secondNameRef);\ncase QUESTION_MARK_TOKEN:\nSTNode typeDesc = parseComplexTypeDescriptor(qualifiedNameRef,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nfinalKeyword = STNodeFactory.createEmptyNode();\ntypeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\nannots = STNodeFactory.createEmptyNodeList();\nreturn parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, false);\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn parseStatementStartWithExprRhs(qualifiedNameRef);\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\ndefault:\nreturn parseMemberWithExprInRhs(identifier, colon, secondNameRef, secondNameRef);\n}\n}\nprivate SyntaxKind getBracedListType(STNode member) {\nswitch (member.kind) {\ncase FIELD_BINDING_PATTERN:\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.MAPPING_BINDING_PATTERN;\ncase SPECIFIC_FIELD:\nSTNode expr = ((STSpecificFieldNode) member).valueExpr;\nif (expr == null) {\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\n}\nswitch (expr.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\ncase FUNCTION_CALL:\nif (isPosibleFunctionalBindingPattern((STFunctionCallExpressionNode) expr)) {\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\n}\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\ndefault:\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\n}\ncase SPREAD_FIELD:\ncase COMPUTED_NAME_FIELD:\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ncase REST_BINDING_PATTERN:\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\ncase LIST:\nreturn SyntaxKind.BLOCK_STATEMENT;\ndefault:\nreturn SyntaxKind.NONE;\n}\n}\n/**\n* Parse mapping binding pattern or mapping constructor.\n*\n* @return Parsed node\n*/\nprivate STNode parseMappingBindingPatterOrMappingConstructor() {\nstartContext(ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR);\nSTNode openBrace = parseOpenBrace();\nList memberList = new ArrayList<>();\nreturn parseMappingBindingPatternOrMappingConstructor(openBrace, memberList);\n}\nprivate boolean isBracedListEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseMappingBindingPatternOrMappingConstructor(STNode openBrace, List memberList) {\nSTToken nextToken = peek();\nwhile (!isBracedListEnd(nextToken.kind)) {\nSTNode member = parseMappingBindingPatterOrMappingConstructorMember(nextToken.kind);\nSyntaxKind currentNodeType = getTypeOfMappingBPOrMappingCons(member);\nswitch (currentNodeType) {\ncase MAPPING_CONSTRUCTOR:\nreturn parseAsMappingConstructor(openBrace, memberList, member);\ncase MAPPING_BINDING_PATTERN:\nreturn parseAsMappingBindingPattern(openBrace, memberList, member);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseMappingFieldEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\nnextToken = peek();\n}\nSTNode closeBrace = parseCloseBrace();\nreturn parseMappingBindingPatternOrMappingConstructor(openBrace, memberList, closeBrace);\n}\nprivate STNode parseMappingBindingPatterOrMappingConstructorMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nSTNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME);\nreturn parseMappingFieldRhs(key);\ncase STRING_LITERAL:\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nkey = parseStringLiteral();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\ncase OPEN_BRACKET_TOKEN:\nreturn parseComputedField();\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\nSTNode expr = parseExpression();\nif (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nreturn STNodeFactory.createRestBindingPatternNode(ellipsis, expr);\n}\nreturn STNodeFactory.createSpreadFieldNode(ellipsis, expr);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListBindingPatternOrListConstructorMember(solution.tokenKind);\n}\n}\nprivate STNode parseMappingFieldRhs(STNode key) {\nSTToken nextToken = peek();\nreturn parseMappingFieldRhs(nextToken.kind, key);\n}\nprivate STNode parseMappingFieldRhs(SyntaxKind tokenKind, STNode key) {\nSTNode colon;\nSTNode valueExpr;\nswitch (tokenKind) {\ncase COLON_TOKEN:\ncolon = parseColon();\nreturn parseMappingFieldValue(key, colon);\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\ncolon = STNodeFactory.createEmptyNode();\nvalueExpr = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, key);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nreturn parseSpecificFieldRhs(solution.tokenKind, readonlyKeyword, key);\n}\n}\nprivate STNode parseMappingFieldValue(STNode key, STNode colon) {\nSTNode expr;\nswitch (peek().kind) {\ncase IDENTIFIER_TOKEN:\nexpr = parseExpression();\nbreak;\ncase OPEN_BRACKET_TOKEN:\nexpr = parseListBindingPatternOrListConstructor();\nbreak;\ncase OPEN_BRACE_TOKEN:\nexpr = parseMappingBindingPatterOrMappingConstructor();\nbreak;\ndefault:\nexpr = parseExpression();\nbreak;\n}\nif (isBindingPattern(expr.kind)) {\nreturn STNodeFactory.createFieldBindingPatternFullNode(key, colon, expr);\n}\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, expr);\n}\nprivate boolean isBindingPattern(SyntaxKind kind) {\nswitch (kind) {\ncase FIELD_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate SyntaxKind getTypeOfMappingBPOrMappingCons(STNode memberNode) {\nswitch (memberNode.kind) {\ncase FIELD_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.MAPPING_BINDING_PATTERN;\ncase SPECIFIC_FIELD:\nSTNode expr = ((STSpecificFieldNode) memberNode).valueExpr;\nif (expr == null || expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE ||\nexpr.kind == SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR ||\nexpr.kind == SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR) {\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\n}\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\ncase SPREAD_FIELD:\ncase COMPUTED_NAME_FIELD:\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase REST_BINDING_PATTERN:\ndefault:\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\n}\n}\nprivate STNode parseMappingBindingPatternOrMappingConstructor(STNode openBrace, List members,\nSTNode closeBrace) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR, openBrace, members,\ncloseBrace);\n}\nprivate STNode parseAsMappingBindingPattern(STNode openBrace, List members, STNode member) {\nmembers.add(member);\nmembers = getBindingPatternsList(members);\nswitchContext(ParserRuleContext.MAPPING_BINDING_PATTERN);\nreturn parseMappingBindingPattern(openBrace, members, member);\n}\n/**\n* Parse list binding pattern or list constructor.\n*\n* @return Parsed node\n*/\nprivate STNode parseListBindingPatternOrListConstructor() {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList memberList = new ArrayList<>();\nreturn parseListBindingPatternOrListConstructor(openBracket, memberList, false);\n}\nprivate STNode parseListBindingPatternOrListConstructor(STNode openBracket, List memberList,\nboolean isRoot) {\nSTToken nextToken = peek();\nwhile (!isBracketedListEnd(nextToken.kind)) {\nSTNode member = parseListBindingPatternOrListConstructorMember(nextToken.kind);\nSyntaxKind currentNodeType = getParsingNodeTypeOfListBPOrListCons(member);\nswitch (currentNodeType) {\ncase LIST_CONSTRUCTOR:\nreturn parseAsListConstructor(openBracket, memberList, member, isRoot);\ncase LIST_BINDING_PATTERN:\nreturn parseAsListBindingPattern(openBracket, memberList, member, isRoot);\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\nnextToken = peek();\n}\nSTNode closeBracket = parseCloseBracket();\nreturn parseListBindingPatternOrListConstructor(openBracket, memberList, closeBracket, isRoot);\n}\nprivate STNode parseListBindingPatternOrListConstructorMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseListBindingPatternOrListConstructor();\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nif (isWildcardBP(identifier)) {\nreturn getWildcardBindingPattern(identifier);\n}\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false);\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingBindingPatterOrMappingConstructor();\ncase ELLIPSIS_TOKEN:\nreturn parseListBindingPatternMember();\ndefault:\nif (isValidExpressionStart(nextTokenKind, 1)) {\nreturn parseExpression();\n}\nSolution solution = recover(peek(), ParserRuleContext.LIST_BP_OR_LIST_CONSTRUCTOR_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListBindingPatternOrListConstructorMember(solution.tokenKind);\n}\n}\nprivate SyntaxKind getParsingNodeTypeOfListBPOrListCons(STNode memberNode) {\nswitch (memberNode.kind) {\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.LIST_BINDING_PATTERN;\ncase SIMPLE_NAME_REFERENCE:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR;\ndefault:\nreturn SyntaxKind.LIST_CONSTRUCTOR;\n}\n}\nprivate STNode parseAsListConstructor(STNode openBracket, List memberList, STNode member, boolean isRoot) {\nmemberList.add(member);\nmemberList = getExpressionList(memberList);\nswitchContext(ParserRuleContext.LIST_CONSTRUCTOR);\nSTNode expressions = parseOptionalExpressionsList(memberList);\nSTNode closeBracket = parseCloseBracket();\nSTNode listConstructor =\nSTNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket);\nendContext();\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, listConstructor, false, false);\nif (!isRoot) {\nreturn expr;\n}\nreturn parseStatementStartWithExprRhs(expr);\n}\nprivate STNode parseListBindingPatternOrListConstructor(STNode openBracket, List members,\nSTNode closeBracket, boolean isRoot) {\nSTNode lbpOrListCons;\nswitch (peek().kind) {\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nif (!isRoot) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR, openBracket, members,\ncloseBracket);\n}\ndefault:\nif (isValidExprRhsStart(peek().kind, closeBracket.kind)) {\nmembers = getExpressionList(members);\nSTNode memberExpressions = STNodeFactory.createNodeList(members);\nlbpOrListCons = STNodeFactory.createListConstructorExpressionNode(openBracket, memberExpressions,\ncloseBracket);\nbreak;\n}\nmembers = getBindingPatternsList(members);\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(members);\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nlbpOrListCons = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode,\nrestBindingPattern, closeBracket);\nbreak;\n}\nendContext();\nif (!isRoot) {\nreturn lbpOrListCons;\n}\nreturn parseStmtStartsWithTypedBPOrExprRhs(null, lbpOrListCons);\n}\nprivate STNode parseMemberRhsInStmtStartWithBrace(STNode identifier, STNode colon, STNode secondIdentifier) {\nSTNode typedBPOrExpr =\nparseTypedBindingPatternOrMemberAccess(secondIdentifier, false, true, ParserRuleContext.AMBIGUOUS_STMT);\nif (isExpression(typedBPOrExpr.kind)) {\nreturn parseMemberWithExprInRhs(identifier, colon, secondIdentifier, typedBPOrExpr);\n}\nswitchContext(ParserRuleContext.BLOCK_STMT);\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nSTNode annots = STNodeFactory.createEmptyNode();\nSTNode qualifiedNameRef = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondIdentifier);\nSTNode typeDesc = mergeQualifiedNameWithTypeDesc(qualifiedNameRef,\n((STTypedBindingPatternNode) typedBPOrExpr).typeDescriptor);\nreturn parseVarDeclRhs(annots, finalKeyword, typeDesc, false);\n}\n/**\n* Parse a member that starts with \"foo:bar[\", in a statement starting with a brace.\n*\n* @param identifier First identifier of the statement\n* @param colon Colon that follows the first identifier\n* @param secondIdentifier Identifier that follows the colon\n* @param memberAccessExpr Member access expression\n* @return Parsed node\n*/\nprivate STNode parseMemberWithExprInRhs(STNode identifier, STNode colon, STNode secondIdentifier,\nSTNode memberAccessExpr) {\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, true);\nswitch (peek().kind) {\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nstartContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, expr);\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\ndefault:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nstartContext(ParserRuleContext.EXPRESSION_STATEMENT);\nSTNode qualifiedName =\nSTNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondIdentifier);\nSTNode updatedExpr = mergeQualifiedNameWithExpr(qualifiedName, expr);\nreturn parseStatementStartWithExprRhs(updatedExpr);\n}\n}\n/**\n* Replace the first identifier of an expression, with a given qualified-identifier.\n* Only expressions that can start with \"bar[..]\" can reach here.\n*\n* @param qualifiedName Qualified identifier to replace simple identifier\n* @param exprOrAction Expression or action\n* @return Updated expression\n*/\nprivate STNode mergeQualifiedNameWithExpr(STNode qualifiedName, STNode exprOrAction) {\nswitch (exprOrAction.kind) {\ncase SIMPLE_NAME_REFERENCE:\nreturn qualifiedName;\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) exprOrAction;\nSTNode newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, binaryExpr.lhsExpr);\nreturn STNodeFactory.createBinaryExpressionNode(binaryExpr.kind, newLhsExpr, binaryExpr.operator,\nbinaryExpr.rhsExpr);\ncase FIELD_ACCESS:\nSTFieldAccessExpressionNode fieldAccess = (STFieldAccessExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, fieldAccess.expression);\nreturn STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, fieldAccess.dotToken,\nfieldAccess.fieldName);\ncase INDEXED_EXPRESSION:\nSTIndexedExpressionNode memberAccess = (STIndexedExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, memberAccess.containerExpression);\nreturn STNodeFactory.createIndexedExpressionNode(newLhsExpr, memberAccess.openBracket,\nmemberAccess.keyExpression, memberAccess.closeBracket);\ncase TYPE_TEST_EXPRESSION:\nSTTypeTestExpressionNode typeTest = (STTypeTestExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, typeTest.expression);\nreturn STNodeFactory.createTypeTestExpressionNode(newLhsExpr, typeTest.isKeyword,\ntypeTest.typeDescriptor);\ncase ANNOT_ACCESS:\nSTAnnotAccessExpressionNode annotAccess = (STAnnotAccessExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, annotAccess.expression);\nreturn STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, annotAccess.annotChainingToken,\nannotAccess.annotTagReference);\ncase OPTIONAL_FIELD_ACCESS:\nSTOptionalFieldAccessExpressionNode optionalFieldAccess =\n(STOptionalFieldAccessExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, optionalFieldAccess.expression);\nreturn STNodeFactory.createFieldAccessExpressionNode(newLhsExpr,\noptionalFieldAccess.optionalChainingToken, optionalFieldAccess.fieldName);\ncase CONDITIONAL_EXPRESSION:\nSTConditionalExpressionNode conditionalExpr = (STConditionalExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, conditionalExpr.lhsExpression);\nreturn STNodeFactory.createConditionalExpressionNode(newLhsExpr, conditionalExpr.questionMarkToken,\nconditionalExpr.middleExpression, conditionalExpr.colonToken, conditionalExpr.endExpression);\ncase REMOTE_METHOD_CALL_ACTION:\nSTRemoteMethodCallActionNode remoteCall = (STRemoteMethodCallActionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, remoteCall.expression);\nreturn STNodeFactory.createRemoteMethodCallActionNode(newLhsExpr, remoteCall.rightArrowToken,\nremoteCall.methodName, remoteCall.openParenToken, remoteCall.arguments,\nremoteCall.closeParenToken);\ncase ASYNC_SEND_ACTION:\nSTAsyncSendActionNode asyncSend = (STAsyncSendActionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, asyncSend.expression);\nreturn STNodeFactory.createAsyncSendActionNode(newLhsExpr, asyncSend.rightArrowToken,\nasyncSend.peerWorker);\ncase SYNC_SEND_ACTION:\nSTSyncSendActionNode syncSend = (STSyncSendActionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, syncSend.expression);\nreturn STNodeFactory.createAsyncSendActionNode(newLhsExpr, syncSend.syncSendToken, syncSend.peerWorker);\ndefault:\nreturn exprOrAction;\n}\n}\nprivate STNode mergeQualifiedNameWithTypeDesc(STNode qualifiedName, STNode typeDesc) {\nswitch (typeDesc.kind) {\ncase SIMPLE_NAME_REFERENCE:\nreturn qualifiedName;\ncase ARRAY_TYPE_DESC:\nSTArrayTypeDescriptorNode arrayTypeDesc = (STArrayTypeDescriptorNode) typeDesc;\nSTNode newMemberType = mergeQualifiedNameWithTypeDesc(qualifiedName, arrayTypeDesc.memberTypeDesc);\nreturn STNodeFactory.createArrayTypeDescriptorNode(newMemberType, arrayTypeDesc.openBracket,\narrayTypeDesc.arrayLength, arrayTypeDesc.closeBracket);\ncase UNION_TYPE_DESC:\nSTUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) typeDesc;\nSTNode newlhsType = mergeQualifiedNameWithTypeDesc(qualifiedName, unionTypeDesc.leftTypeDesc);\nreturn STNodeFactory.createUnionTypeDescriptorNode(newlhsType, unionTypeDesc.pipeToken,\nunionTypeDesc.rightTypeDesc);\ncase INTERSECTION_TYPE_DESC:\nSTIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) typeDesc;\nnewlhsType = mergeQualifiedNameWithTypeDesc(qualifiedName, intersectionTypeDesc.leftTypeDesc);\nreturn STNodeFactory.createUnionTypeDescriptorNode(newlhsType, intersectionTypeDesc.bitwiseAndToken,\nintersectionTypeDesc.rightTypeDesc);\ncase OPTIONAL_TYPE_DESC:\nSTOptionalTypeDescriptorNode optionalType = (STOptionalTypeDescriptorNode) typeDesc;\nnewMemberType = mergeQualifiedNameWithTypeDesc(qualifiedName, optionalType.typeDescriptor);\nreturn STNodeFactory.createOptionalTypeDescriptorNode(newMemberType, optionalType.questionMarkToken);\ndefault:\nreturn typeDesc;\n}\n}\nprivate List getTypeDescList(List ambiguousList) {\nList typeDescList = new ArrayList<>();\nfor (STNode item : ambiguousList) {\ntypeDescList.add(getTypeDescFromExpr(item));\n}\nreturn typeDescList;\n}\n/**\n* Create a type-desc out of an expression.\n*\n* @param expression Expression\n* @return Type descriptor\n*/\nprivate STNode getTypeDescFromExpr(STNode expression) {\nswitch (expression.kind) {\ncase INDEXED_EXPRESSION:\nreturn parseArrayTypeDescriptorNode((STIndexedExpressionNode) expression);\ncase BASIC_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn STNodeFactory.createSingletonTypeDescriptorNode(expression);\ncase TYPE_REFERENCE_TYPE_DESC:\nreturn ((STTypeReferenceTypeDescNode) expression).typeRef;\ncase BRACED_EXPRESSION:\nSTBracedExpressionNode bracedExpr = (STBracedExpressionNode) expression;\nSTNode typeDesc = getTypeDescFromExpr(bracedExpr.expression);\nreturn STNodeFactory.createParenthesisedTypeDescriptorNode(bracedExpr.openParen, typeDesc,\nbracedExpr.closeParen);\ncase NIL_LITERAL:\nSTNilLiteralNode nilLiteral = (STNilLiteralNode) expression;\nreturn STNodeFactory.createNilTypeDescriptorNode(nilLiteral.openParenToken, nilLiteral.closeParenToken);\ncase BRACKETED_LIST:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nSTAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) expression;\nSTNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(innerList.members));\nreturn STNodeFactory.createTupleTypeDescriptorNode(innerList.collectionStartToken, memberTypeDescs,\ninnerList.collectionEndToken);\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) expression;\nswitch (binaryExpr.operator.kind) {\ncase PIPE_TOKEN:\nSTNode lhsTypeDesc = getTypeDescFromExpr(binaryExpr.lhsExpr);\nSTNode rhsTypeDesc = getTypeDescFromExpr(binaryExpr.rhsExpr);\nreturn STNodeFactory.createUnionTypeDescriptorNode(lhsTypeDesc, binaryExpr.operator,\nrhsTypeDesc);\ncase BITWISE_AND_TOKEN:\nlhsTypeDesc = getTypeDescFromExpr(binaryExpr.lhsExpr);\nrhsTypeDesc = getTypeDescFromExpr(binaryExpr.rhsExpr);\nreturn STNodeFactory.createIntersectionTypeDescriptorNode(lhsTypeDesc, binaryExpr.operator,\nrhsTypeDesc);\ndefault:\nbreak;\n}\nreturn expression;\ncase UNARY_EXPRESSION:\nreturn STNodeFactory.createSingletonTypeDescriptorNode(expression);\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ndefault:\nreturn expression;\n}\n}\nprivate List getBindingPatternsList(List ambibuousList) {\nList bindingPatterns = new ArrayList();\nfor (STNode item : ambibuousList) {\nbindingPatterns.add(getBindingPattern(item));\n}\nreturn bindingPatterns;\n}\nprivate STNode getBindingPattern(STNode ambiguousNode) {\nif (isEmpty(ambiguousNode)) {\nreturn ambiguousNode;\n}\nswitch (ambiguousNode.kind) {\ncase SIMPLE_NAME_REFERENCE:\nSTNode varName = ((STSimpleNameReferenceNode) ambiguousNode).name;\nreturn createCaptureOrWildcardBP(varName);\ncase QUALIFIED_NAME_REFERENCE:\nSTQualifiedNameReferenceNode qualifiedName = (STQualifiedNameReferenceNode) ambiguousNode;\nSTNode fieldName = STNodeFactory.createSimpleNameReferenceNode(qualifiedName.modulePrefix);\nreturn STNodeFactory.createFieldBindingPatternFullNode(fieldName, qualifiedName.colon,\ngetBindingPattern(qualifiedName.identifier));\ncase BRACKETED_LIST:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nSTAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) ambiguousNode;\nSTNode memberBindingPatterns = STNodeFactory.createNodeList(getBindingPatternsList(innerList.members));\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createListBindingPatternNode(innerList.collectionStartToken, memberBindingPatterns,\nrestBindingPattern, innerList.collectionEndToken);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ninnerList = (STAmbiguousCollectionNode) ambiguousNode;\nList bindingPatterns = new ArrayList<>();\nrestBindingPattern = STNodeFactory.createEmptyNode();\nfor (int i = 0; i < innerList.members.size(); i++) {\nSTNode bp = getBindingPattern(innerList.members.get(i));\nif (bp.kind == SyntaxKind.REST_BINDING_PATTERN) {\nrestBindingPattern = bp;\nbreak;\n}\nbindingPatterns.add(bp);\n}\nmemberBindingPatterns = STNodeFactory.createNodeList(bindingPatterns);\nreturn STNodeFactory.createMappingBindingPatternNode(innerList.collectionStartToken,\nmemberBindingPatterns, restBindingPattern, innerList.collectionEndToken);\ncase SPECIFIC_FIELD:\nSTSpecificFieldNode field = (STSpecificFieldNode) ambiguousNode;\nfieldName = STNodeFactory.createSimpleNameReferenceNode(field.fieldName);\nif (field.valueExpr == null) {\nreturn STNodeFactory.createFieldBindingPatternVarnameNode(fieldName);\n}\nreturn STNodeFactory.createFieldBindingPatternFullNode(fieldName, field.colon,\ngetBindingPattern(field.valueExpr));\ncase FUNCTION_CALL:\nSTFunctionCallExpressionNode funcCall = (STFunctionCallExpressionNode) ambiguousNode;\nSTNode args = funcCall.arguments;\nint size = args.bucketCount();\nbindingPatterns = new ArrayList<>();\nfor (int i = 0; i < size; i++) {\nSTNode arg = args.childInBucket(i);\nbindingPatterns.add(getBindingPattern(arg));\n}\nSTNode argListBindingPatterns = STNodeFactory.createNodeList(bindingPatterns);\nreturn STNodeFactory.createFunctionalBindingPatternNode(funcCall.functionName, funcCall.openParenToken,\nargListBindingPatterns, funcCall.closeParenToken);\ncase POSITIONAL_ARG:\nSTPositionalArgumentNode positionalArg = (STPositionalArgumentNode) ambiguousNode;\nreturn getBindingPattern(positionalArg.expression);\ncase NAMED_ARG:\nSTNamedArgumentNode namedArg = (STNamedArgumentNode) ambiguousNode;\nreturn STNodeFactory.createNamedArgBindingPatternNode(namedArg.argumentName, namedArg.equalsToken,\ngetBindingPattern(namedArg.expression));\ncase REST_ARG:\nSTRestArgumentNode restArg = (STRestArgumentNode) ambiguousNode;\nreturn STNodeFactory.createRestBindingPatternNode(restArg.ellipsis, restArg.expression);\ndefault:\nreturn ambiguousNode;\n}\n}\nprivate List getExpressionList(List ambibuousList) {\nList exprList = new ArrayList();\nfor (STNode item : ambibuousList) {\nexprList.add(getExpression(item));\n}\nreturn exprList;\n}\nprivate STNode getExpression(STNode ambiguousNode) {\nif (isEmpty(ambiguousNode)) {\nreturn ambiguousNode;\n}\nswitch (ambiguousNode.kind) {\ncase BRACKETED_LIST:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nSTAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) ambiguousNode;\nSTNode memberExprs = STNodeFactory.createNodeList(getExpressionList(innerList.members));\nreturn STNodeFactory.createListConstructorExpressionNode(innerList.collectionStartToken, memberExprs,\ninnerList.collectionEndToken);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ninnerList = (STAmbiguousCollectionNode) ambiguousNode;\nmemberExprs = STNodeFactory.createNodeList(getExpressionList(innerList.members));\nreturn STNodeFactory.createMappingConstructorExpressionNode(innerList.collectionStartToken, memberExprs,\ninnerList.collectionEndToken);\ncase REST_BINDING_PATTERN:\nSTRestBindingPatternNode restBindingPattern = (STRestBindingPatternNode) ambiguousNode;\nreturn STNodeFactory.createSpreadFieldNode(restBindingPattern.ellipsisToken,\nrestBindingPattern.variableName);\ncase SPECIFIC_FIELD:\nSTSpecificFieldNode field = (STSpecificFieldNode) ambiguousNode;\nreturn STNodeFactory.createSpecificFieldNode(field.readonlyKeyword, field.fieldName, field.colon,\ngetExpression(field.valueExpr));\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ndefault:\nreturn ambiguousNode;\n}\n}\nprivate STNode getMappingField(STNode identifier, STNode colon, STNode bindingPatternOrExpr) {\nSTNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier);\nswitch (bindingPatternOrExpr.kind) {\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\nreturn STNodeFactory.createFieldBindingPatternFullNode(simpleNameRef, colon, bindingPatternOrExpr);\ncase LIST_CONSTRUCTOR:\ncase MAPPING_CONSTRUCTOR:\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, simpleNameRef, colon, identifier);\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ndefault:\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, bindingPatternOrExpr);\n}\n}\n}", + "target_code": "private STNode rearrangeMarkdownDocumentationLines(List markdownDocLineList) {", + "method_body_after": "private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) {\nswitch (peek(lookahead + 1).kind) {\ncase IDENTIFIER_TOKEN:\nSyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind;\nswitch (tokenAfterIdentifier) {\ncase ON_KEYWORD:\ncase OPEN_BRACE_TOKEN:\nreturn true;\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\ncase QUESTION_MARK_TOKEN:\nreturn false;\ndefault:\nreturn false;\n}\ncase ON_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse listener declaration, given the qualifier.\n*

\n* \n* listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ;\n* \n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @return Parsed node\n*/\nprivate STNode parseListenerDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.LISTENER_DECL);\nSTNode listenerKeyword = parseListenerKeyword();\nif (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) {\nSTNode listenerDecl =\nparseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true);\nendContext();\nreturn listenerDecl;\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode variableName = parseVariableName();\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nendContext();\nreturn STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName,\nequalsToken, initializer, semicolonToken);\n}\n/**\n* Parse listener keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseListenerKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LISTENER_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LISTENER_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse constant declaration, given the qualifier.\n*

\n* module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @return Parsed node\n*/\nprivate STNode parseConstantDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.CONSTANT_DECL);\nSTNode constKeyword = parseConstantKeyword();\nSTNode constDecl = parseConstDecl(metadata, qualifier, constKeyword);\nendContext();\nreturn constDecl;\n}\n/**\n* Parse the components that follows after the const keyword of a constant declaration.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the constant decl\n* @param constKeyword Const keyword\n* @return Parsed node\n*/\nprivate STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) {\nSTToken nextToken = peek();\nreturn parseConstDeclFromType(nextToken.kind, metadata, qualifier, constKeyword);\n}\nprivate STNode parseConstDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode keyword) {\nswitch (nextTokenKind) {\ncase ANNOTATION_KEYWORD:\nswitchContext(ParserRuleContext.ANNOTATION_DECL);\nreturn parseAnnotationDeclaration(metadata, qualifier, keyword);\ncase IDENTIFIER_TOKEN:\nreturn parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, keyword, false);\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, keyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseConstDeclFromType(solution.tokenKind, metadata, qualifier, keyword);\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode variableName = parseVariableName();\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, typeDesc, variableName,\nequalsToken, initializer, semicolonToken);\n}\nprivate STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,\nboolean isListener) {\nSTNode varNameOrTypeName = parseStatementStartIdentifier();\nSTNode constDecl =\nparseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener);\nreturn constDecl;\n}\n/**\n* Parse the component that follows the first identifier in a const decl. The identifier\n* can be either the type-name (a user defined type) or the var-name there the type-name\n* is not present.\n*\n* @param qualifier Qualifier that precedes the constant decl\n* @param keyword Keyword\n* @param typeOrVarName Identifier that follows the const-keywoord\n* @return Parsed node\n*/\nprivate STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword,\nSTNode typeOrVarName, boolean isListener) {\nif (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTNode type = typeOrVarName;\nSTNode variableName = parseVariableName();\nreturn parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);\n}\nSTToken token = peek();\nreturn parseConstantOrListenerDeclRhs(token.kind, metadata, qualifier, keyword, typeOrVarName, isListener);\n}\nprivate STNode parseConstantOrListenerDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,\nSTNode keyword, STNode typeOrVarName, boolean isListener) {\nSTNode type;\nSTNode variableName;\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\ntype = typeOrVarName;\nvariableName = parseVariableName();\nbreak;\ncase EQUAL_TOKEN:\nvariableName = ((STSimpleNameReferenceNode) typeOrVarName).name;\ntype = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, keyword,\ntypeOrVarName, isListener);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseConstantOrListenerDeclRhs(solution.tokenKind, metadata, qualifier, keyword, typeOrVarName,\nisListener);\n}\nreturn parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);\n}\nprivate STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener,\nSTNode type, STNode variableName) {\nSTNode equalsToken = parseAssignOp();\nSTNode initializer = parseExpression();\nSTNode semicolonToken = parseSemicolon();\nif (isListener) {\nreturn STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName,\nequalsToken, initializer, semicolonToken);\n}\nreturn STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName,\nequalsToken, initializer, semicolonToken);\n}\n/**\n* Parse const keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseConstantKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CONST_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CONST_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse nil type descriptor.\n*

\n* nil-type-descriptor := ( ) \n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseNilTypeDescriptor() {\nstartContext(ParserRuleContext.NIL_TYPE_DESCRIPTOR);\nSTNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode closeParenthesisToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createNilTypeDescriptorNode(openParenthesisToken, closeParenthesisToken);\n}\n/**\n* Parse typeof expression.\n*

\n* \n* typeof-expr := typeof expression\n* \n*\n* @param isRhsExpr\n* @return Typeof expression node\n*/\nprivate STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\nSTNode typeofKeyword = parseTypeofKeyword();\nSTNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);\nreturn STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr);\n}\n/**\n* Parse typeof-keyword.\n*\n* @return Typeof-keyword node\n*/\nprivate STNode parseTypeofKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TYPEOF_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TYPEOF_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse optional type descriptor.\n*

\n* optional-type-descriptor := type-descriptor ? \n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) {\nstartContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR);\nSTNode questionMarkToken = parseQuestionMark();\nendContext();\nreturn STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken);\n}\n/**\n* Parse unary expression.\n*

\n* \n* unary-expr := + expression | - expression | ~ expression | ! expression\n* \n*\n* @param isRhsExpr\n* @return Unary expression node\n*/\nprivate STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\nSTNode unaryOperator = parseUnaryOperator();\nSTNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);\nreturn STNodeFactory.createUnaryExpressionNode(unaryOperator, expr);\n}\n/**\n* Parse unary operator.\n* UnaryOperator := + | - | ~ | !\n*\n* @return Parsed node\n*/\nprivate STNode parseUnaryOperator() {\nSTToken token = peek();\nif (isUnaryOperator(token.kind)) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.UNARY_OPERATOR);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Check whether the given token kind is a unary operator.\n*\n* @param kind STToken kind\n* @return true if the token kind refers to a unary operator. false otherwise\n*/\nprivate boolean isUnaryOperator(SyntaxKind kind) {\nswitch (kind) {\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse array type descriptor.\n*

\n* \n* array-type-descriptor := member-type-descriptor [ [ array-length ] ]\n* member-type-descriptor := type-descriptor\n* array-length :=\n* int-literal\n* | constant-reference-expr\n* | inferred-array-length\n* inferred-array-length := *\n* \n*

\n*\n* @param memberTypeDesc\n*\n* @return Parsed Node\n*/\nprivate STNode parseArrayTypeDescriptor(STNode memberTypeDesc) {\nstartContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);\nSTNode openBracketToken = parseOpenBracket();\nSTNode arrayLengthNode = parseArrayLength();\nSTNode closeBracketToken = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, openBracketToken, arrayLengthNode,\ncloseBracketToken);\n}\n/**\n* Parse array length.\n*

\n* \n* array-length :=\n* int-literal\n* | constant-reference-expr\n* | inferred-array-length\n* constant-reference-expr := variable-reference-expr\n* \n*

\n*\n* @return Parsed array length\n*/\nprivate STNode parseArrayLength() {\nSTToken token = peek();\nswitch (token.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase ASTERISK_TOKEN:\nreturn parseBasicLiteral();\ncase CLOSE_BRACKET_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH);\ndefault:\nSolution sol = recover(token, ParserRuleContext.ARRAY_LENGTH);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse annotations.\n*

\n* Note: In the ballerina spec ({@link https:\n* annotations-list is specified as one-or-more annotations. And the usage is marked as\n* optional annotations-list. However, for the consistency of the tree, here we make the\n* annotation-list as zero-or-more annotations, and the usage is not-optional.\n*

\n* annots := annotation*\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotations() {\nSTToken nextToken = peek();\nreturn parseAnnotations(nextToken.kind);\n}\nprivate STNode parseAnnotations(SyntaxKind nextTokenKind) {\nstartContext(ParserRuleContext.ANNOTATIONS);\nList annotList = new ArrayList<>();\nwhile (nextTokenKind == SyntaxKind.AT_TOKEN) {\nannotList.add(parseAnnotation());\nnextTokenKind = peek().kind;\n}\nendContext();\nreturn STNodeFactory.createNodeList(annotList);\n}\n/**\n* Parse annotation attachment.\n*

\n* annotation := @ annot-tag-reference annot-value\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotation() {\nSTNode atToken = parseAtToken();\nSTNode annotReference;\nif (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) {\nannotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n} else {\nannotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE);\n}\nSTNode annotValue;\nif (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {\nannotValue = parseMappingConstructorExpr();\n} else {\nannotValue = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue);\n}\n/**\n* Parse '@' token.\n*\n* @return Parsed node\n*/\nprivate STNode parseAtToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.AT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.AT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse metadata. Meta data consist of optional doc string and\n* an annotations list.\n*

\n* metadata := [DocumentationString] annots\n*\n* @return Parse node\n*/\nprivate STNode parseMetaData(SyntaxKind nextTokenKind) {\nSTNode docString;\nSTNode annotations;\nswitch (nextTokenKind) {\ncase DOCUMENTATION_STRING:\ndocString = parseMarkdownDocumentation();\nannotations = parseAnnotations();\nbreak;\ncase AT_TOKEN:\ndocString = STNodeFactory.createEmptyNode();\nannotations = parseAnnotations(nextTokenKind);\nbreak;\ndefault:\nreturn createEmptyMetadata();\n}\nreturn STNodeFactory.createMetadataNode(docString, annotations);\n}\n/**\n* Create empty metadata node.\n*\n* @return A metadata node with no doc string and no annotations\n*/\nprivate STNode createEmptyMetadata() {\nreturn STNodeFactory.createMetadataNode(STNodeFactory.createEmptyNode(), STNodeFactory.createEmptyNodeList());\n}\n/**\n* Parse is expression.\n* \n* is-expr := expression is type-descriptor\n* \n*\n* @param lhsExpr Preceding expression of the is expression\n* @return Is expression node\n*/\nprivate STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode isKeyword = parseIsKeyword();\nSTNode typeDescriptor =\nparseTypeDescriptorInExpression(ParserRuleContext.TYPE_DESC_IN_EXPRESSION, isInConditionalExpr);\nreturn STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor);\n}\n/**\n* Parse is-keyword.\n*\n* @return Is-keyword node\n*/\nprivate STNode parseIsKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IS_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.IS_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse local type definition statement statement.\n* ocal-type-defn-stmt := [annots] type identifier type-descriptor ;\n*\n* @return local type definition statement statement\n*/\nprivate STNode parseLocalTypeDefinitionStatement(STNode annots) {\nstartContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT);\nSTNode typeKeyword = parseTypeKeyword();\nSTNode typeName = parseTypeName();\nSTNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor,\nsemicolon);\n}\n/**\n* Parse statement which is only consists of an action or expression.\n*\n* @param annots Annotations\n* @param nextTokenKind Next token kind\n* @return Statement node\n*/\nprivate STNode parseExpressionStatement(SyntaxKind nextTokenKind, STNode annots) {\nstartContext(ParserRuleContext.EXPRESSION_STATEMENT);\nSTNode expression = parseActionOrExpressionInLhs(nextTokenKind, annots);\nreturn getExpressionAsStatement(expression);\n}\n/**\n* Parse statements that starts with an expression.\n*\n* @return Statement node\n*/\nprivate STNode parseStatementStartWithExpr(STNode annots) {\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode expr = parseActionOrExpressionInLhs(peek().kind, annots);\nreturn parseStatementStartWithExprRhs(expr);\n}\n/**\n* Parse rhs of statements that starts with an expression.\n*\n* @return Statement node\n*/\nprivate STNode parseStatementStartWithExprRhs(STNode expression) {\nSTToken nextToken = peek();\nreturn parseStatementStartWithExprRhs(nextToken.kind, expression);\n}\n/**\n* Parse the component followed by the expression, at the beginning of a statement.\n*\n* @param nextTokenKind Kind of the next token\n* @return Statement node\n*/\nprivate STNode parseStatementStartWithExprRhs(SyntaxKind nextTokenKind, STNode expression) {\nswitch (nextTokenKind) {\ncase EQUAL_TOKEN:\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nreturn parseAssignmentStmtRhs(expression);\ncase SEMICOLON_TOKEN:\nreturn getExpressionAsStatement(expression);\ncase IDENTIFIER_TOKEN:\ndefault:\nif (isCompoundBinaryOperator(nextTokenKind)) {\nreturn parseCompoundAssignmentStmtRhs(expression);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.STMT_START_WITH_EXPR_RHS, expression);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatementStartWithExprRhs(solution.tokenKind, expression);\n}\n}\nprivate STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) {\nSTNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression);\nSTNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression;\nif (lengthExprs.isEmpty()) {\nreturn STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, indexedExpr.openBracket,\nSTNodeFactory.createEmptyNode(), indexedExpr.closeBracket);\n}\nSTNode lengthExpr = lengthExprs.get(0);\nswitch (lengthExpr.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase ASTERISK_TOKEN:\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\nbreak;\ndefault:\nSTNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(\nindexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);\nindexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics);\nlengthExpr = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, indexedExpr.openBracket, lengthExpr,\nindexedExpr.closeBracket);\n}\nprivate STNode getExpressionAsStatement(STNode expression) {\nswitch (expression.kind) {\ncase METHOD_CALL:\ncase FUNCTION_CALL:\ncase CHECK_EXPRESSION:\nreturn parseCallStatement(expression);\ncase REMOTE_METHOD_CALL_ACTION:\ncase CHECK_ACTION:\ncase BRACED_ACTION:\ncase START_ACTION:\ncase TRAP_ACTION:\ncase FLUSH_ACTION:\ncase ASYNC_SEND_ACTION:\ncase SYNC_SEND_ACTION:\ncase RECEIVE_ACTION:\ncase WAIT_ACTION:\ncase QUERY_ACTION:\ncase COMMIT_ACTION:\nreturn parseActionStatement(expression);\ndefault:\nSTNode semicolon = parseSemicolon();\nendContext();\nSTNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT,\nexpression, semicolon);\nexprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT);\nreturn exprStmt;\n}\n}\n/**\n*

\n* Parse call statement, given the call expression.\n*

\n* \n* call-stmt := call-expr ;\n*
\n* call-expr := function-call-expr | method-call-expr | checking-keyword call-expr\n*
\n*\n* @param expression Call expression associated with the call statement\n* @return Call statement node\n*/\nprivate STNode parseCallStatement(STNode expression) {\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon);\n}\n/**\n* Check whether a node is a missing node.\n*\n* @param node Node to check\n* @return true if the node is a missing node. false otherwise\n*/\nprivate boolean isMissingNode(STNode node) {\nif (node.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nreturn isMissingNode(((STSimpleNameReferenceNode) node).name);\n}\nreturn node instanceof STMissingToken;\n}\nprivate STNode parseActionStatement(STNode action) {\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon);\n}\n/**\n* Parse remote method call action, given the starting expression.\n*

\n* \n* remote-method-call-action := expression -> method-name ( arg-list )\n*
\n* async-send-action := expression -> peer-worker ;\n*
\n*\n* @param isRhsExpr Is this an RHS action\n* @param expression LHS expression\n* @return\n*/\nprivate STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) {\nSTNode rightArrow = parseRightArrow();\nreturn parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);\n}\nprivate STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) {\nreturn parseRemoteCallOrAsyncSendActionRhs(peek().kind, expression, isRhsExpr, rightArrow);\n}\nprivate STNode parseRemoteCallOrAsyncSendActionRhs(SyntaxKind nextTokenKind, STNode expression, boolean isRhsExpr,\nSTNode rightArrow) {\nSTNode name;\nswitch (nextTokenKind) {\ncase DEFAULT_KEYWORD:\nname = parseDefaultKeyword();\nreturn parseAsyncSendAction(expression, rightArrow, name);\ncase IDENTIFIER_TOKEN:\nname = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());\nbreak;\ncase CONTINUE_KEYWORD:\ncase COMMIT_KEYWORD:\nname = getKeywordAsSimpleNameRef();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS, expression,\nisRhsExpr, rightArrow);\nif (solution.action == Action.REMOVE) {\nname = solution.recoveredNode;\nbreak;\n}\nreturn parseRemoteCallOrAsyncSendActionRhs(solution.tokenKind, expression, isRhsExpr, rightArrow);\n}\nreturn parseRemoteCallOrAsyncSendEnd(peek().kind, expression, rightArrow, name);\n}\nprivate STNode parseRemoteCallOrAsyncSendEnd(SyntaxKind nextTokenKind, STNode expression, STNode rightArrow,\nSTNode name) {\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseRemoteMethodCallAction(expression, rightArrow, name);\ncase SEMICOLON_TOKEN:\nreturn parseAsyncSendAction(expression, rightArrow, name);\ndefault:\nSTToken token = peek();\nSolution solution =\nrecover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END, expression, rightArrow, name);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseRemoteCallOrAsyncSendEnd(solution.tokenKind, expression, rightArrow, name);\n}\n}\n/**\n* Parse default keyword.\n*\n* @return default keyword node\n*/\nprivate STNode parseDefaultKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DEFAULT_KEYWORD) {\nreturn STNodeFactory.createSimpleNameReferenceNode(consume());\n} else {\nSolution sol = recover(token, ParserRuleContext.DEFAULT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) {\nreturn STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker);\n}\nprivate STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) {\nSTNode openParenToken = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode arguments = parseArgsList();\nSTNode closeParenToken = parseCloseParenthesis();\nreturn STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments,\ncloseParenToken);\n}\n/**\n* Parse right arrow (->) token.\n*\n* @return Parsed node\n*/\nprivate STNode parseRightArrow() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.RIGHT_ARROW);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse parameterized type descriptor.\n* parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter\n*\n* @return Parsed node\n*/\nprivate STNode parseParameterizedTypeDescriptor() {\nSTNode parameterizedTypeKeyword = parseParameterizedTypeKeyword();\nSTNode ltToken = parseLTToken();\nSTNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\nSTNode gtToken = parseGTToken();\nreturn STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, ltToken, typeNode,\ngtToken);\n}\n/**\n* Parse map or future keyword token.\n*\n* @return Parsed node\n*/\nprivate STNode parseParameterizedTypeKeyword() {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase MAP_KEYWORD:\ncase FUTURE_KEYWORD:\nreturn consume();\ndefault:\nSolution sol = recover(nextToken, ParserRuleContext.PARAMETERIZED_TYPE);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse < token.\n*\n* @return Parsed node\n*/\nprivate STNode parseGTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.GT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.GT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse > token.\n*\n* @return Parsed node\n*/\nprivate STNode parseLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.LT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse nil literal. Here nil literal is only referred to ( ).\n*\n* @return Parsed node\n*/\nprivate STNode parseNilLiteral() {\nstartContext(ParserRuleContext.NIL_LITERAL);\nSTNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode closeParenthesisToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken);\n}\n/**\n* Parse annotation declaration, given the qualifier.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the listener declaration\n* @param constKeyword Const keyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) {\nstartContext(ParserRuleContext.ANNOTATION_DECL);\nSTNode annotationKeyword = parseAnnotationKeyword();\nSTNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);\nendContext();\nreturn annotDecl;\n}\n/**\n* Parse annotation keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ANNOTATION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ANNOTATION_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse the components that follows after the annotation keyword of a annotation declaration.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the constant decl\n* @param constKeyword Const keyword\n* @param annotationKeyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword) {\nSTToken nextToken = peek();\nreturn parseAnnotationDeclFromType(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword);\n}\nprivate STNode parseAnnotationDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,\nSTNode constKeyword, STNode annotationKeyword) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword);\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier,\nconstKeyword, annotationKeyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAnnotationDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword,\nannotationKeyword);\n}\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL);\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\n/**\n* Parse annotation tag.\n*

\n* annot-tag := identifier\n*\n* @return\n*/\nprivate STNode parseAnnotationTag() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.ANNOTATION_TAG);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword) {\nSTNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);\nif (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword,\ntypeDescOrAnnotTag, annotTag);\n}\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) {\nSTNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag,\nParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false);\nSTNode annotTag = parseAnnotationTag();\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\nSTNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name;\nreturn parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag);\n}\n/**\n* Parse the component that follows the first identifier in an annotation decl. The identifier\n* can be either the type-name (a user defined type) or the annot-tag, where the type-name\n* is not present.\n*\n* @param metadata Metadata\n* @param qualifier Qualifier that precedes the annotation decl\n* @param constKeyword Const keyword\n* @param annotationKeyword Annotation keyword\n* @param typeDescOrAnnotTag Identifier that follows the annotation-keyword\n* @return Parsed node\n*/\nprivate STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword, STNode typeDescOrAnnotTag) {\nSTToken token = peek();\nreturn parseAnnotationDeclRhs(token.kind, metadata, qualifier, constKeyword, annotationKeyword,\ntypeDescOrAnnotTag);\n}\nprivate STNode parseAnnotationDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,\nSTNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) {\nSTNode typeDesc;\nSTNode annotTag;\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\ntypeDesc = typeDescOrAnnotTag;\nannotTag = parseAnnotationTag();\nbreak;\ncase SEMICOLON_TOKEN:\ncase ON_KEYWORD:\ntypeDesc = STNodeFactory.createEmptyNode();\nannotTag = typeDescOrAnnotTag;\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword,\nannotationKeyword, typeDescOrAnnotTag);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAnnotationDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword,\ntypeDescOrAnnotTag);\n}\nreturn parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\nannotTag);\n}\nprivate STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword,\nSTNode annotationKeyword, STNode typeDesc, STNode annotTag) {\nSTToken nextToken = peek();\nreturn parseAnnotationDeclAttachPoints(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword,\ntypeDesc, annotTag);\n}\nprivate STNode parseAnnotationDeclAttachPoints(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,\nSTNode constKeyword, STNode annotationKeyword, STNode typeDesc,\nSTNode annotTag) {\nSTNode onKeyword;\nSTNode attachPoints;\nswitch (nextTokenKind) {\ncase SEMICOLON_TOKEN:\nonKeyword = STNodeFactory.createEmptyNode();\nattachPoints = STNodeFactory.createEmptyNodeList();\nbreak;\ncase ON_KEYWORD:\nonKeyword = parseOnKeyword();\nattachPoints = parseAnnotationAttachPoints();\nonKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword,\nDiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier,\nconstKeyword, annotationKeyword, typeDesc, annotTag);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAnnotationDeclAttachPoints(solution.tokenKind, metadata, qualifier, constKeyword,\nannotationKeyword, typeDesc, annotTag);\n}\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword,\ntypeDesc, annotTag, onKeyword, attachPoints, semicolonToken);\n}\n/**\n* Parse annotation attach points.\n*

\n* \n* annot-attach-points := annot-attach-point (, annot-attach-point)*\n*

\n* annot-attach-point := dual-attach-point | source-only-attach-point\n*

\n* dual-attach-point := [source] dual-attach-point-ident\n*

\n* dual-attach-point-ident :=\n* [object] type\n* | [object|resource] function\n* | parameter\n* | return\n* | service\n* | [object|record] field\n*

\n* source-only-attach-point := source source-only-attach-point-ident\n*

\n* source-only-attach-point-ident :=\n* annotation\n* | external\n* | var\n* | const\n* | listener\n* | worker\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationAttachPoints() {\nstartContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST);\nList attachPoints = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndAnnotAttachPointList(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode attachPoint = parseAnnotationAttachPoint();\nattachPoints.add(attachPoint);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndAnnotAttachPointList(nextToken.kind)) {\nleadingComma = parseAttachPointEnd();\nif (leadingComma == null) {\nbreak;\n}\nattachPoints.add(leadingComma);\nattachPoint = parseAnnotationAttachPoint();\nif (attachPoint == null) {\nattachPoint = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);\nattachPoints.add(attachPoint);\nbreak;\n}\nattachPoints.add(attachPoint);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(attachPoints);\n}\n/**\n* Parse annotation attach point end.\n*\n* @return Parsed node\n*/\nprivate STNode parseAttachPointEnd() {\nSTToken nextToken = peek();\nreturn parseAttachPointEnd(nextToken.kind);\n}\nprivate STNode parseAttachPointEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase SEMICOLON_TOKEN:\nreturn null;\ncase COMMA_TOKEN:\nreturn consume();\ndefault:\nSolution sol = recover(peek(), ParserRuleContext.ATTACH_POINT_END);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nreturn sol.tokenKind == SyntaxKind.COMMA_TOKEN ? sol.recoveredNode : null;\n}\n}\nprivate boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse annotation attach point.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotationAttachPoint() {\nreturn parseAnnotationAttachPoint(peek().kind);\n}\nprivate STNode parseAnnotationAttachPoint(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\nreturn null;\ncase ANNOTATION_KEYWORD:\ncase EXTERNAL_KEYWORD:\ncase VAR_KEYWORD:\ncase CONST_KEYWORD:\ncase LISTENER_KEYWORD:\ncase WORKER_KEYWORD:\ncase SOURCE_KEYWORD:\nSTNode sourceKeyword = parseSourceKeyword();\nreturn parseAttachPointIdent(sourceKeyword);\ncase OBJECT_KEYWORD:\ncase TYPE_KEYWORD:\ncase RESOURCE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase SERVICE_KEYWORD:\ncase FIELD_KEYWORD:\ncase RECORD_KEYWORD:\nsourceKeyword = STNodeFactory.createEmptyNode();\nSTNode firstIdent = consume();\nreturn parseDualAttachPointIdent(sourceKeyword, firstIdent);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ATTACH_POINT);\nreturn solution.recoveredNode;\n}\n}\n/**\n* Parse source keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseSourceKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SOURCE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SOURCE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse attach point ident gievn.\n*

\n* \n* source-only-attach-point-ident := annotation | external | var | const | listener | worker\n*

\n* dual-attach-point-ident := [object] type | [object|resource] function | parameter\n* | return | service | [object|record] field\n*
\n*\n* @param sourceKeyword Source keyword\n* @return Parsed node\n*/\nprivate STNode parseAttachPointIdent(STNode sourceKeyword) {\nreturn parseAttachPointIdent(peek().kind, sourceKeyword);\n}\nprivate STNode parseAttachPointIdent(SyntaxKind nextTokenKind, STNode sourceKeyword) {\nswitch (nextTokenKind) {\ncase ANNOTATION_KEYWORD:\ncase EXTERNAL_KEYWORD:\ncase VAR_KEYWORD:\ncase CONST_KEYWORD:\ncase LISTENER_KEYWORD:\ncase WORKER_KEYWORD:\nSTNode firstIdent = consume();\nSTNode secondIdent = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent);\ncase OBJECT_KEYWORD:\ncase RESOURCE_KEYWORD:\ncase RECORD_KEYWORD:\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase SERVICE_KEYWORD:\ncase FIELD_KEYWORD:\nfirstIdent = consume();\nreturn parseDualAttachPointIdent(sourceKeyword, firstIdent);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nfirstIdent = solution.recoveredNode;\nreturn parseDualAttachPointIdent(sourceKeyword, firstIdent);\n}\n}\n/**\n* Parse dual-attach-point ident.\n*\n* @param sourceKeyword Source keyword\n* @param firstIdent first part of the dual attach-point\n* @return Parsed node\n*/\nprivate STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) {\nSTNode secondIdent;\nswitch (firstIdent.kind) {\ncase OBJECT_KEYWORD:\nsecondIdent = parseIdentAfterObjectIdent();\nbreak;\ncase RESOURCE_KEYWORD:\nsecondIdent = parseFunctionIdent();\nbreak;\ncase RECORD_KEYWORD:\nsecondIdent = parseFieldIdent();\nbreak;\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase PARAMETER_KEYWORD:\ncase RETURN_KEYWORD:\ncase SERVICE_KEYWORD:\ncase FIELD_KEYWORD:\ndefault:\nsecondIdent = STNodeFactory.createEmptyNode();\nbreak;\n}\nreturn STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent);\n}\n/**\n* Parse the idents that are supported after object-ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseIdentAfterObjectIdent() {\nSTToken token = peek();\nswitch (token.kind) {\ncase TYPE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase FIELD_KEYWORD:\nreturn consume();\ndefault:\nSolution sol = recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse function ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseFunctionIdent() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FUNCTION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FUNCTION_IDENT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse field ident.\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldIdent() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FIELD_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FIELD_IDENT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse XML namespace declaration.\n*

\n* xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ;\n*
\n* xml-namespace-uri := simple-const-expr\n*
\n* xml-namespace-prefix := identifier\n*
\n*\n* @return\n*/\nprivate STNode parseXMLNamespaceDeclaration(boolean isModuleVar) {\nstartContext(ParserRuleContext.XML_NAMESPACE_DECLARATION);\nSTNode xmlnsKeyword = parseXMLNSKeyword();\nSTNode namespaceUri = parseXMLNamespaceUri();\nSTNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);\nendContext();\nreturn xmlnsDecl;\n}\n/**\n* Parse xmlns keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLNSKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.XMLNS_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.XMLNS_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse namespace uri.\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLNamespaceUri() {\nSTNode expr = parseSimpleConstExpr();\nswitch (expr.kind) {\ncase STRING_LITERAL:\ncase IDENTIFIER_TOKEN:\ncase QUALIFIED_NAME_REFERENCE:\nbreak;\ndefault:\nexpr = SyntaxErrors.addDiagnostic(expr, DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI);\n}\nreturn expr;\n}\nprivate STNode parseSimpleConstExpr() {\nstartContext(ParserRuleContext.CONSTANT_EXPRESSION);\nSTNode expr = parseSimpleConstExprInternal();\nendContext();\nreturn expr;\n}\nprivate STNode parseSimpleConstExprInternal() {\nSTToken nextToken = peek();\nreturn parseConstExprInternal(nextToken.kind);\n}\n/**\n* Parse constants expr.\n*\n* @return Parsed node\n*/\nprivate STNode parseConstExprInternal(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase STRING_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase NULL_KEYWORD:\nreturn parseBasicLiteral();\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn parseSignedIntOrFloat();\ncase OPEN_PAREN_TOKEN:\nreturn parseNilLiteral();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START);\nreturn solution.recoveredNode;\n}\n}\n/**\n* Parse the portion after the namsepsace-uri of an XML declaration.\n*\n* @param xmlnsKeyword XMLNS keyword\n* @param namespaceUri Namespace URI\n* @return Parsed node\n*/\nprivate STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) {\nreturn parseXMLDeclRhs(peek().kind, xmlnsKeyword, namespaceUri, isModuleVar);\n}\nprivate STNode parseXMLDeclRhs(SyntaxKind nextTokenKind, STNode xmlnsKeyword, STNode namespaceUri,\nboolean isModuleVar) {\nSTNode asKeyword = STNodeFactory.createEmptyNode();\nSTNode namespacePrefix = STNodeFactory.createEmptyNode();\nswitch (nextTokenKind) {\ncase AS_KEYWORD:\nasKeyword = parseAsKeyword();\nnamespacePrefix = parseNamespacePrefix();\nbreak;\ncase SEMICOLON_TOKEN:\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword,\nnamespaceUri, isModuleVar);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseXMLDeclRhs(solution.tokenKind, xmlnsKeyword, namespaceUri, isModuleVar);\n}\nSTNode semicolon = parseSemicolon();\nif (isModuleVar) {\nreturn STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword,\nnamespacePrefix, semicolon);\n}\nreturn STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix,\nsemicolon);\n}\n/**\n* Parse import prefix.\n*\n* @return Parsed node\n*/\nprivate STNode parseNamespacePrefix() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.NAMESPACE_PREFIX);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse named worker declaration.\n*

\n* named-worker-decl := [annots] worker worker-name return-type-descriptor { sequence-stmt }\n*\n* @param annots Annotations attached to the worker decl\n* @return Parsed node\n*/\nprivate STNode parseNamedWorkerDeclaration(STNode annots) {\nstartContext(ParserRuleContext.NAMED_WORKER_DECL);\nSTNode workerKeyword = parseWorkerKeyword();\nSTNode workerName = parseWorkerName();\nSTNode returnTypeDesc = parseReturnTypeDescriptor();\nSTNode workerBody = parseBlockNode();\nendContext();\nreturn STNodeFactory.createNamedWorkerDeclarationNode(annots, workerKeyword, workerName, returnTypeDesc,\nworkerBody);\n}\nprivate STNode parseReturnTypeDescriptor() {\nSTToken token = peek();\nif (token.kind != SyntaxKind.RETURNS_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode returnsKeyword = consume();\nSTNode annot = parseAnnotations();\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);\nreturn STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);\n}\n/**\n* Parse worker keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseWorkerKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.WORKER_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.WORKER_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse worker name.\n*

\n* worker-name := identifier\n*\n* @return Parsed node\n*/\nprivate STNode parseWorkerName() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.WORKER_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse lock statement.\n* lock-stmt := lock block-stmt ;\n*\n* @return Lock statement\n*/\nprivate STNode parseLockStatement() {\nstartContext(ParserRuleContext.LOCK_STMT);\nSTNode lockKeyword = parseLockKeyword();\nSTNode blockStatement = parseBlockNode();\nendContext();\nreturn STNodeFactory.createLockStatementNode(lockKeyword, blockStatement);\n}\n/**\n* Parse lock-keyword.\n*\n* @return lock-keyword node\n*/\nprivate STNode parseLockKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LOCK_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LOCK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse union type descriptor.\n* union-type-descriptor := type-descriptor | type-descriptor\n*\n* @param leftTypeDesc Type desc in the LHS os the union type desc.\n* @param context Current context.\n* @return parsed union type desc node\n*/\nprivate STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode pipeToken = parsePipeToken();\nSTNode rightTypeDesc = parseTypeDescriptor(context, isTypedBindingPattern, false);\nreturn STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc);\n}\n/**\n* Parse pipe token.\n*\n* @return parsed pipe token node\n*/\nprivate STNode parsePipeToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PIPE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.PIPE);\nreturn sol.recoveredNode;\n}\n}\nprivate boolean isTypeStartingToken(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase IDENTIFIER_TOKEN:\ncase SERVICE_KEYWORD:\ncase RECORD_KEYWORD:\ncase OBJECT_KEYWORD:\ncase ABSTRACT_KEYWORD:\ncase CLIENT_KEYWORD:\ncase OPEN_PAREN_TOKEN:\ncase MAP_KEYWORD:\ncase FUTURE_KEYWORD:\ncase TYPEDESC_KEYWORD:\ncase ERROR_KEYWORD:\ncase STREAM_KEYWORD:\ncase TABLE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase DISTINCT_KEYWORD:\nreturn true;\ndefault:\nif (isSingletonTypeDescStart(nodeKind, true)) {\nreturn true;\n}\nreturn isSimpleType(nodeKind);\n}\n}\nstatic boolean isSimpleType(SyntaxKind nodeKind) {\nswitch (nodeKind) {\ncase INT_KEYWORD:\ncase FLOAT_KEYWORD:\ncase DECIMAL_KEYWORD:\ncase BOOLEAN_KEYWORD:\ncase STRING_KEYWORD:\ncase BYTE_KEYWORD:\ncase XML_KEYWORD:\ncase JSON_KEYWORD:\ncase HANDLE_KEYWORD:\ncase ANY_KEYWORD:\ncase ANYDATA_KEYWORD:\ncase NEVER_KEYWORD:\ncase SERVICE_KEYWORD:\ncase VAR_KEYWORD:\ncase ERROR_KEYWORD:\ncase STREAM_KEYWORD:\ncase TYPEDESC_KEYWORD:\ncase READONLY_KEYWORD:\ncase DISTINCT_KEYWORD:\nreturn true;\ncase TYPE_DESC:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) {\nswitch (typeKeyword) {\ncase INT_KEYWORD:\nreturn SyntaxKind.INT_TYPE_DESC;\ncase FLOAT_KEYWORD:\nreturn SyntaxKind.FLOAT_TYPE_DESC;\ncase DECIMAL_KEYWORD:\nreturn SyntaxKind.DECIMAL_TYPE_DESC;\ncase BOOLEAN_KEYWORD:\nreturn SyntaxKind.BOOLEAN_TYPE_DESC;\ncase STRING_KEYWORD:\nreturn SyntaxKind.STRING_TYPE_DESC;\ncase BYTE_KEYWORD:\nreturn SyntaxKind.BYTE_TYPE_DESC;\ncase XML_KEYWORD:\nreturn SyntaxKind.XML_TYPE_DESC;\ncase JSON_KEYWORD:\nreturn SyntaxKind.JSON_TYPE_DESC;\ncase HANDLE_KEYWORD:\nreturn SyntaxKind.HANDLE_TYPE_DESC;\ncase ANY_KEYWORD:\nreturn SyntaxKind.ANY_TYPE_DESC;\ncase ANYDATA_KEYWORD:\nreturn SyntaxKind.ANYDATA_TYPE_DESC;\ncase READONLY_KEYWORD:\nreturn SyntaxKind.READONLY_TYPE_DESC;\ncase NEVER_KEYWORD:\nreturn SyntaxKind.NEVER_TYPE_DESC;\ncase SERVICE_KEYWORD:\nreturn SyntaxKind.SERVICE_TYPE_DESC;\ncase VAR_KEYWORD:\nreturn SyntaxKind.VAR_TYPE_DESC;\ndefault:\nreturn SyntaxKind.TYPE_DESC;\n}\n}\n/**\n* Parse fork-keyword.\n*\n* @return Fork-keyword node\n*/\nprivate STNode parseForkKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FORK_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FORK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse fork statement.\n* fork-stmt := fork { named-worker-decl+ }\n*\n* @return Fork statement\n*/\nprivate STNode parseForkStatement() {\nstartContext(ParserRuleContext.FORK_STMT);\nSTNode forkKeyword = parseForkKeyword();\nSTNode openBrace = parseOpenBrace();\nArrayList workers = new ArrayList<>();\nwhile (!isEndOfStatements()) {\nSTNode stmt = parseStatement();\nif (stmt == null) {\nbreak;\n}\nswitch (stmt.kind) {\ncase NAMED_WORKER_DECLARATION:\nworkers.add(stmt);\nbreak;\ndefault:\nif (workers.isEmpty()) {\nopenBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt,\nDiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);\n} else {\nupdateLastNodeInListWithInvalidNode(workers, stmt,\nDiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);\n}\n}\n}\nSTNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers);\nSTNode closeBrace = parseCloseBrace();\nendContext();\nopenBrace = cloneWithDiagnosticIfListEmpty(namedWorkerDeclarations, openBrace,\nDiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT);\nreturn STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace);\n}\n/**\n* Parse trap expression.\n*

\n* \n* trap-expr := trap expression\n* \n*\n* @param allowActions Allow actions\n* @param isRhsExpr Whether this is a RHS expression or not\n* @return Trap expression node\n*/\nprivate STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nSTNode trapKeyword = parseTrapKeyword();\nSTNode expr =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nif (isAction(expr)) {\nreturn STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr);\n}\nreturn STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr);\n}\n/**\n* Parse trap-keyword.\n*\n* @return Trap-keyword node\n*/\nprivate STNode parseTrapKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRAP_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TRAP_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse list constructor expression.\n*

\n* \n* list-constructor-expr := [ [ expr-list ] ]\n*
\n* expr-list := expression (, expression)*\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseListConstructorExpr() {\nstartContext(ParserRuleContext.LIST_CONSTRUCTOR);\nSTNode openBracket = parseOpenBracket();\nSTNode expressions = parseOptionalExpressionsList();\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket);\n}\n/**\n* Parse optional expression list.\n*\n* @return Parsed node\n*/\nprivate STNode parseOptionalExpressionsList() {\nList expressions = new ArrayList<>();\nif (isEndOfListConstructor(peek().kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode expr = parseExpression();\nexpressions.add(expr);\nreturn parseOptionalExpressionsList(expressions);\n}\nprivate STNode parseOptionalExpressionsList(List expressions) {\nSTNode listConstructorMemberEnd;\nwhile (!isEndOfListConstructor(peek().kind)) {\nlistConstructorMemberEnd = parseListConstructorMemberEnd();\nif (listConstructorMemberEnd == null) {\nbreak;\n}\nexpressions.add(listConstructorMemberEnd);\nSTNode expr = parseExpression();\nexpressions.add(expr);\n}\nreturn STNodeFactory.createNodeList(expressions);\n}\nprivate boolean isEndOfListConstructor(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseListConstructorMemberEnd() {\nreturn parseListConstructorMemberEnd(peek().kind);\n}\nprivate STNode parseListConstructorMemberEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListConstructorMemberEnd(solution.tokenKind);\n}\n}\n/**\n* Parse foreach statement.\n* foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt\n*\n* @return foreach statement\n*/\nprivate STNode parseForEachStatement() {\nstartContext(ParserRuleContext.FOREACH_STMT);\nSTNode forEachKeyword = parseForEachKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT);\nSTNode inKeyword = parseInKeyword();\nSTNode actionOrExpr = parseActionOrExpression();\nSTNode blockStatement = parseBlockNode();\nendContext();\nreturn STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr,\nblockStatement);\n}\n/**\n* Parse foreach-keyword.\n*\n* @return ForEach-keyword node\n*/\nprivate STNode parseForEachKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FOREACH_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FOREACH_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse in-keyword.\n*\n* @return In-keyword node\n*/\nprivate STNode parseInKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IN_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.IN_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse type cast expression.\n*

\n* \n* type-cast-expr := < type-cast-param > expression\n*
\n* type-cast-param := [annots] type-descriptor | annots\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nstartContext(ParserRuleContext.TYPE_CAST);\nSTNode ltToken = parseLTToken();\nSTNode typeCastParam = parseTypeCastParam();\nSTNode gtToken = parseGTToken();\nendContext();\nSTNode expression =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nreturn STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression);\n}\nprivate STNode parseTypeCastParam() {\nSTNode annot;\nSTNode type;\nSTToken token = peek();\nswitch (token.kind) {\ncase AT_TOKEN:\nannot = parseAnnotations();\ntoken = peek();\nif (isTypeStartingToken(token.kind)) {\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n} else {\ntype = STNodeFactory.createEmptyNode();\n}\nbreak;\ndefault:\nannot = STNodeFactory.createEmptyNode();\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\nbreak;\n}\nreturn STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type);\n}\n/**\n* Parse table constructor expression.\n*

\n* \n* table-constructor-expr-rhs := [ [row-list] ]\n* \n*\n* @param tableKeyword tableKeyword that precedes this rhs\n* @param keySpecifier keySpecifier that precedes this rhs\n* @return Parsed node\n*/\nprivate STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) {\nswitchContext(ParserRuleContext.TABLE_CONSTRUCTOR);\nSTNode openBracket = parseOpenBracket();\nSTNode rowList = parseRowList();\nSTNode closeBracket = parseCloseBracket();\nreturn STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList,\ncloseBracket);\n}\n/**\n* Parse table-keyword.\n*\n* @return Table-keyword node\n*/\nprivate STNode parseTableKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TABLE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TABLE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse table rows.\n*

\n* row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]\n*\n* @return Parsed node\n*/\nprivate STNode parseRowList() {\nSTToken nextToken = peek();\nif (isEndOfTableRowList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList mappings = new ArrayList<>();\nSTNode mapExpr = parseMappingConstructorExpr();\nmappings.add(mapExpr);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndOfTableRowList(nextToken.kind)) {\nleadingComma = parseComma();\nmappings.add(leadingComma);\nmapExpr = parseMappingConstructorExpr();\nmappings.add(mapExpr);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(mappings);\n}\nprivate boolean isEndOfTableRowList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ncase COMMA_TOKEN:\ncase OPEN_BRACE_TOKEN:\nreturn false;\ndefault:\nreturn isEndOfMappingConstructor(tokenKind);\n}\n}\n/**\n* Parse key specifier.\n*

\n* key-specifier := key ( [ field-name (, field-name)* ] )\n*\n* @return Parsed node\n*/\nprivate STNode parseKeySpecifier() {\nstartContext(ParserRuleContext.KEY_SPECIFIER);\nSTNode keyKeyword = parseKeyKeyword();\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode fieldNames = parseFieldNames();\nSTNode closeParen = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen);\n}\n/**\n* Parse key-keyword.\n*\n* @return Key-keyword node\n*/\nprivate STNode parseKeyKeyword() {\nSTToken token = peek();\nif (isKeyKeyword(token)) {\nreturn getKeyKeyword(consume());\n} else {\nSolution sol = recover(token, ParserRuleContext.KEY_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nstatic boolean isKeyKeyword(STToken token) {\nreturn token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text());\n}\nprivate STNode getKeyKeyword(STToken token) {\nreturn STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(),\ntoken.diagnostics());\n}\n/**\n* Parse field names.\n*

\n* field-name-list := [ field-name (, field-name)* ]\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldNames() {\nSTToken nextToken = peek();\nif (isEndOfFieldNamesList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList fieldNames = new ArrayList<>();\nSTNode fieldName = parseVariableName();\nfieldNames.add(fieldName);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndOfFieldNamesList(nextToken.kind)) {\nleadingComma = parseComma();\nfieldNames.add(leadingComma);\nfieldName = parseVariableName();\nfieldNames.add(fieldName);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(fieldNames);\n}\nprivate boolean isEndOfFieldNamesList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\ncase IDENTIFIER_TOKEN:\nreturn false;\ndefault:\nreturn true;\n}\n}\n/**\n* Parse error type descriptor.\n*

\n* error-type-descriptor := error [error-type-param]\n* error-type-param := < (detail-type-descriptor | inferred-type-descriptor) >\n* detail-type-descriptor := type-descriptor\n* inferred-type-descriptor := *\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseErrorTypeDescriptor() {\nSTNode errorKeywordToken = parseErrorKeyword();\nSTNode errorTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nerrorTypeParamsNode = parseErrorTypeParamsNode();\n} else {\nerrorTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode);\n}\n/**\n* Parse error type param node.\n*

\n* error-type-param := < (detail-type-descriptor | inferred-type-descriptor) >\n* detail-type-descriptor := type-descriptor\n* inferred-type-descriptor := *\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseErrorTypeParamsNode() {\nSTNode ltToken = parseLTToken();\nSTNode parameter;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {\nparameter = consume();\n} else {\nparameter = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n}\nSTNode gtToken = parseGTToken();\nreturn STNodeFactory.createErrorTypeParamsNode(ltToken, parameter, gtToken);\n}\n/**\n* Parse error-keyword.\n*\n* @return Parsed error-keyword node\n*/\nprivate STNode parseErrorKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ERROR_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ERROR_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse typedesc type descriptor.\n* typedesc-type-descriptor := typedesc type-parameter\n*\n* @return Parsed typedesc type node\n*/\nprivate STNode parseTypedescTypeDescriptor() {\nSTNode typedescKeywordToken = parseTypedescKeyword();\nSTNode typedescTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\ntypedescTypeParamsNode = parseTypeParameter();\n} else {\ntypedescTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createTypedescTypeDescriptorNode(typedescKeywordToken, typedescTypeParamsNode);\n}\n/**\n* Parse typedesc-keyword.\n*\n* @return Parsed typedesc-keyword node\n*/\nprivate STNode parseTypedescKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TYPEDESC_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TYPEDESC_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse stream type descriptor.\n*

\n* stream-type-descriptor := stream [stream-type-parameters]\n* stream-type-parameters := < type-descriptor [, type-descriptor]>\n*

\n*\n* @return Parsed stream type descriptor node\n*/\nprivate STNode parseStreamTypeDescriptor() {\nSTNode streamKeywordToken = parseStreamKeyword();\nSTNode streamTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\nstreamTypeParamsNode = parseStreamTypeParamsNode();\n} else {\nstreamTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode);\n}\n/**\n* Parse xml type descriptor.\n* xml-type-descriptor := xml type-parameter\n*\n* @return Parsed typedesc type node\n*/\nprivate STNode parseXmlTypeDescriptor() {\nSTNode xmlKeywordToken = parseXMLKeyword();\nSTNode typedescTypeParamsNode;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.LT_TOKEN) {\ntypedescTypeParamsNode = parseTypeParameter();\n} else {\ntypedescTypeParamsNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createXmlTypeDescriptorNode(xmlKeywordToken, typedescTypeParamsNode);\n}\n/**\n* Parse stream type params node.\n*

\n* stream-type-parameters := < type-descriptor [, type-descriptor]>\n*

\n*\n* @return Parsed stream type params node\n*/\nprivate STNode parseStreamTypeParamsNode() {\nSTNode ltToken = parseLTToken();\nstartContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);\nSTNode leftTypeDescNode = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC, false);\nSTNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode);\nendContext();\nreturn streamTypedesc;\n}\nprivate STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) {\nreturn parseStreamTypeParamsNode(peek().kind, ltToken, leftTypeDescNode);\n}\nprivate STNode parseStreamTypeParamsNode(SyntaxKind nextTokenKind, STNode ltToken, STNode leftTypeDescNode) {\nSTNode commaToken, rightTypeDescNode, gtToken;\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\ncommaToken = parseComma();\nrightTypeDescNode = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC, false);\nbreak;\ncase GT_TOKEN:\ncommaToken = STNodeFactory.createEmptyNode();\nrightTypeDescNode = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSolution solution =\nrecover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStreamTypeParamsNode(solution.tokenKind, ltToken, leftTypeDescNode);\n}\ngtToken = parseGTToken();\nreturn STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode,\ngtToken);\n}\n/**\n* Parse stream-keyword.\n*\n* @return Parsed stream-keyword node\n*/\nprivate STNode parseStreamKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.STREAM_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.STREAM_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse let expression.\n*

\n* \n* let-expr := let let-var-decl [, let-var-decl]* in expression\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseLetExpression(boolean isRhsExpr) {\nSTNode letKeyword = parseLetKeyword();\nSTNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr);\nSTNode inKeyword = parseInKeyword();\nletKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,\nDiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression);\n}\n/**\n* Parse let-keyword.\n*\n* @return Let-keyword node\n*/\nprivate STNode parseLetKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LET_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LET_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse let variable declarations.\n*

\n* let-var-decl-list := let-var-decl [, let-var-decl]*\n*\n* @return Parsed node\n*/\nprivate STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) {\nstartContext(context);\nList varDecls = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfLetVarDeclarations(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode varDec = parseLetVarDecl(isRhsExpr);\nvarDecls.add(varDec);\nnextToken = peek();\nSTNode leadingComma;\nwhile (!isEndOfLetVarDeclarations(nextToken.kind)) {\nleadingComma = parseComma();\nvarDecls.add(leadingComma);\nvarDec = parseLetVarDecl(isRhsExpr);\nvarDecls.add(varDec);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(varDecls);\n}\nprivate boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\ncase AT_TOKEN:\nreturn false;\ncase IN_KEYWORD:\nreturn true;\ndefault:\nreturn !isTypeStartingToken(tokenKind);\n}\n}\n/**\n* Parse let variable declaration.\n*

\n* let-var-decl := [annots] typed-binding-pattern = expression\n*\n* @return Parsed node\n*/\nprivate STNode parseLetVarDecl(boolean isRhsExpr) {\nSTNode annot = parseAnnotations();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL);\nSTNode assign = parseAssignOp();\nSTNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false);\nreturn STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression);\n}\n/**\n* Parse raw backtick string template expression.\n*

\n* BacktickString := `expression`\n*\n* @return Template expression node\n*/\nprivate STNode parseTemplateExpression() {\nSTNode type = STNodeFactory.createEmptyNode();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContent();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick,\ncontent, endingBackTick);\n}\nprivate STNode parseTemplateContent() {\nList items = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode contentItem = parseTemplateItem();\nitems.add(contentItem);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(items);\n}\nprivate boolean isEndOfBacktickContent(SyntaxKind kind) {\nswitch (kind) {\ncase EOF_TOKEN:\ncase BACKTICK_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseTemplateItem() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {\nreturn parseInterpolation();\n}\nreturn consume();\n}\n/**\n* Parse string template expression.\n*

\n* string-template-expr := string ` expression `\n*\n* @return String template expression node\n*/\nprivate STNode parseStringTemplateExpression() {\nSTNode type = parseStringKeyword();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContent();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick,\ncontent, endingBackTick);\n}\n/**\n* Parse string keyword.\n*\n* @return string keyword node\n*/\nprivate STNode parseStringKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.STRING_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.STRING_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse XML template expression.\n*

\n* xml-template-expr := xml BacktickString\n*\n* @return XML template expression\n*/\nprivate STNode parseXMLTemplateExpression() {\nSTNode xmlKeyword = parseXMLKeyword();\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseTemplateContentAsXML();\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword,\nstartingBackTick, content, endingBackTick);\n}\n/**\n* Parse xml keyword.\n*\n* @return xml keyword node\n*/\nprivate STNode parseXMLKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.XML_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.XML_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse the content of the template string as XML. This method first read the\n* input in the same way as the raw-backtick-template (BacktickString). Then\n* it parses the content as XML.\n*\n* @return XML node\n*/\nprivate STNode parseTemplateContentAsXML() {\nArrayDeque expressions = new ArrayDeque<>();\nStringBuilder xmlStringBuilder = new StringBuilder();\nSTToken nextToken = peek();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode contentItem = parseTemplateItem();\nif (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {\nxmlStringBuilder.append(((STToken) contentItem).text());\n} else {\nxmlStringBuilder.append(\"${}\");\nexpressions.add(contentItem);\n}\nnextToken = peek();\n}\nTextDocument textDocument = TextDocuments.from(xmlStringBuilder.toString());\nAbstractTokenReader tokenReader = new TokenReader(new XMLLexer(textDocument.getCharacterReader()));\nXMLParser xmlParser = new XMLParser(tokenReader, expressions);\nreturn xmlParser.parse();\n}\n/**\n* Parse interpolation of a back-tick string.\n*

\n* \n* interpolation := ${ expression }\n* \n*\n* @return Interpolation node\n*/\nprivate STNode parseInterpolation() {\nstartContext(ParserRuleContext.INTERPOLATION);\nSTNode interpolStart = parseInterpolationStart();\nSTNode expr = parseExpression();\nwhile (true) {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.EOF_TOKEN || nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nbreak;\n} else {\nnextToken = consume();\nexpr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken,\nDiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text());\n}\n}\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace);\n}\n/**\n* Parse interpolation start token.\n*

\n* interpolation-start := ${\n*\n* @return Interpolation start token\n*/\nprivate STNode parseInterpolationStart() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse back-tick token.\n*\n* @return Back-tick token\n*/\nprivate STNode parseBacktickToken(ParserRuleContext ctx) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ctx);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse table type descriptor.\n*

\n* table-type-descriptor := table row-type-parameter [key-constraint]\n* row-type-parameter := type-parameter\n* key-constraint := key-specifier | key-type-constraint\n* key-specifier := key ( [ field-name (, field-name)* ] )\n* key-type-constraint := key type-parameter\n*

\n*\n* @return Parsed table type desc node.\n*/\nprivate STNode parseTableTypeDescriptor() {\nSTNode tableKeywordToken = parseTableKeyword();\nSTNode rowTypeParameterNode = parseRowTypeParameter();\nSTNode keyConstraintNode;\nSTToken nextToken = peek();\nif (isKeyKeyword(nextToken)) {\nSTNode keyKeywordToken = getKeyKeyword(consume());\nkeyConstraintNode = parseKeyConstraint(keyKeywordToken);\n} else {\nkeyConstraintNode = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode);\n}\n/**\n* Parse row type parameter node.\n*

\n* row-type-parameter := type-parameter\n*

\n*\n* @return Parsed node.\n*/\nprivate STNode parseRowTypeParameter() {\nstartContext(ParserRuleContext.ROW_TYPE_PARAM);\nSTNode rowTypeParameterNode = parseTypeParameter();\nendContext();\nreturn rowTypeParameterNode;\n}\n/**\n* Parse type parameter node.\n*

\n* type-parameter := < type-descriptor >\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeParameter() {\nSTNode ltToken = parseLTToken();\nSTNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\nSTNode gtToken = parseGTToken();\nreturn STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken);\n}\n/**\n* Parse key constraint.\n*

\n* key-constraint := key-specifier | key-type-constraint\n*

\n*\n* @return Parsed node.\n*/\nprivate STNode parseKeyConstraint(STNode keyKeywordToken) {\nreturn parseKeyConstraint(peek().kind, keyKeywordToken);\n}\nprivate STNode parseKeyConstraint(SyntaxKind nextTokenKind, STNode keyKeywordToken) {\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseKeySpecifier(keyKeywordToken);\ncase LT_TOKEN:\nreturn parseKeyTypeConstraint(keyKeywordToken);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseKeyConstraint(solution.tokenKind, keyKeywordToken);\n}\n}\n/**\n* Parse key specifier given parsed key keyword token.\n*

\n* key-specifier := key ( [ field-name (, field-name)* ] )\n*\n* @return Parsed node\n*/\nprivate STNode parseKeySpecifier(STNode keyKeywordToken) {\nstartContext(ParserRuleContext.KEY_SPECIFIER);\nSTNode openParenToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode fieldNamesNode = parseFieldNames();\nSTNode closeParenToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken);\n}\n/**\n* Parse key type constraint.\n*

\n* key-type-constraint := key type-parameter\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseKeyTypeConstraint(STNode keyKeywordToken) {\nSTNode typeParameterNode = parseTypeParameter();\nreturn STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode);\n}\n/**\n* Parse function type descriptor.\n*

\n* function-type-descriptor := function function-signature\n*\n* @return Function type descriptor node\n*/\nprivate STNode parseFunctionTypeDesc() {\nstartContext(ParserRuleContext.FUNC_TYPE_DESC);\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode signature = parseFuncSignature(true);\nendContext();\nreturn STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, signature);\n}\n/**\n* Parse explicit anonymous function expression.\n*

\n* explicit-anonymous-function-expr := [annots] function function-signature anon-func-body\n*\n* @param annots Annotations.\n* @param isRhsExpr Is expression in rhs context\n* @return Anonymous function expression node\n*/\nprivate STNode parseExplicitFunctionExpression(STNode annots, boolean isRhsExpr) {\nstartContext(ParserRuleContext.ANON_FUNC_EXPRESSION);\nSTNode funcKeyword = parseFunctionKeyword();\nSTNode funcSignature = parseFuncSignature(false);\nSTNode funcBody = parseAnonFuncBody(isRhsExpr);\nreturn STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, funcKeyword, funcSignature,\nfuncBody);\n}\n/**\n* Parse anonymous function body.\n*

\n* anon-func-body := block-function-body | expr-function-body\n*\n* @param isRhsExpr Is expression in rhs context\n* @return Anon function body node\n*/\nprivate STNode parseAnonFuncBody(boolean isRhsExpr) {\nreturn parseAnonFuncBody(peek().kind, isRhsExpr);\n}\nprivate STNode parseAnonFuncBody(SyntaxKind nextTokenKind, boolean isRhsExpr) {\nswitch (nextTokenKind) {\ncase OPEN_BRACE_TOKEN:\ncase EOF_TOKEN:\nSTNode body = parseFunctionBodyBlock(true);\nendContext();\nreturn body;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nendContext();\nreturn parseExpressionFuncBody(true, isRhsExpr);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ANON_FUNC_BODY, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAnonFuncBody(solution.tokenKind, isRhsExpr);\n}\n}\n/**\n* Parse expression function body.\n*

\n* expr-function-body := => expression\n*\n* @param isAnon Is anonymous function.\n* @param isRhsExpr Is expression in rhs context\n* @return Expression function body node\n*/\nprivate STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) {\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nSTNode semiColon;\nif (isAnon) {\nsemiColon = STNodeFactory.createEmptyNode();\n} else {\nsemiColon = parseSemicolon();\n}\nreturn STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon);\n}\n/**\n* Parse '=>' token.\n*\n* @return Double right arrow token\n*/\nprivate STNode parseDoubleRightArrow() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.EXPR_FUNC_BODY_START);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) {\nswitch (params.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase INFER_PARAM_LIST:\nbreak;\ncase BRACED_EXPRESSION:\nparams = getAnonFuncParam((STBracedExpressionNode) params);\nbreak;\ndefault:\nparams = SyntaxErrors.addDiagnostic(params,\nDiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);\n}\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression);\n}\n/**\n* Create a new anon-func-param node from a braced expression.\n*\n* @param params Braced expression\n* @return Anon-func param node\n*/\nprivate STNode getAnonFuncParam(STBracedExpressionNode params) {\nList paramList = new ArrayList<>();\nparamList.add(params.expression);\nreturn STNodeFactory.createImplicitAnonymousFunctionParameters(params.openParen,\nSTNodeFactory.createNodeList(paramList), params.closeParen);\n}\n/**\n* Parse implicit anon function expression.\n*\n* @param openParen Open parenthesis token\n* @param firstParam First parameter\n* @param isRhsExpr Is expression in rhs context\n* @return Implicit anon function expression node\n*/\nprivate STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) {\nList paramList = new ArrayList<>();\nparamList.add(firstParam);\nSTToken nextToken = peek();\nSTNode paramEnd;\nSTNode param;\nwhile (!isEndOfAnonFuncParametersList(nextToken.kind)) {\nparamEnd = parseImplicitAnonFuncParamEnd(nextToken.kind);\nif (paramEnd == null) {\nbreak;\n}\nparamList.add(paramEnd);\nparam = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM);\nparam = STNodeFactory.createSimpleNameReferenceNode(param);\nparamList.add(param);\nnextToken = peek();\n}\nSTNode params = STNodeFactory.createNodeList(paramList);\nSTNode closeParen = parseCloseParenthesis();\nendContext();\nSTNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\nreturn parseImplicitAnonFunc(inferedParams, isRhsExpr);\n}\nprivate STNode parseImplicitAnonFuncParamEnd() {\nreturn parseImplicitAnonFuncParamEnd(peek().kind);\n}\nprivate STNode parseImplicitAnonFuncParamEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseImplicitAnonFuncParamEnd(solution.tokenKind);\n}\n}\nprivate boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase SEMICOLON_TOKEN:\ncase RETURNS_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse tuple type descriptor.\n*

\n* tuple-type-descriptor := [ tuple-member-type-descriptors ]\n*

\n* tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor]\n* | [ tuple-rest-descriptor ]\n*

\n* tuple-rest-descriptor := type-descriptor ...\n*
\n*\n* @return\n*/\nprivate STNode parseTupleTypeDesc() {\nSTNode openBracket = parseOpenBracket();\nstartContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nSTNode memberTypeDesc = parseTupleMemberTypeDescList();\nSTNode closeBracket = parseCloseBracket();\nendContext();\nopenBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket,\nDiagnosticErrorCode.ERROR_MISSING_TYPE_DESC);\nreturn STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket);\n}\n/**\n* Parse tuple member type descriptors.\n*\n* @return Parsed node\n*/\nprivate STNode parseTupleMemberTypeDescList() {\nList typeDescList = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfTypeList(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode typeDesc = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\nreturn parseTupleTypeMembers(typeDesc, typeDescList);\n}\nprivate STNode parseTupleTypeMembers(STNode typeDesc, List typeDescList) {\nSTToken nextToken;\nnextToken = peek();\nSTNode tupleMemberRhs;\nwhile (!isEndOfTypeList(nextToken.kind)) {\ntupleMemberRhs = parseTupleMemberRhs(nextToken.kind);\nif (tupleMemberRhs == null) {\nbreak;\n}\nif (tupleMemberRhs.kind == SyntaxKind.ELLIPSIS_TOKEN) {\ntypeDesc = STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs);\nbreak;\n}\ntypeDescList.add(typeDesc);\ntypeDescList.add(tupleMemberRhs);\ntypeDesc = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\nnextToken = peek();\n}\ntypeDescList.add(typeDesc);\nreturn STNodeFactory.createNodeList(typeDescList);\n}\nprivate STNode parseTupleMemberRhs() {\nreturn parseTupleMemberRhs(peek().kind);\n}\nprivate STNode parseTupleMemberRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ncase ELLIPSIS_TOKEN:\nreturn parseEllipsis();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTupleMemberRhs(solution.tokenKind);\n}\n}\nprivate boolean isEndOfTypeList(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase EOF_TOKEN:\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse table constructor or query expression.\n*

\n* \n* table-constructor-or-query-expr := table-constructor-expr | query-expr\n*
\n* table-constructor-expr := table [key-specifier] [ [row-list] ]\n*
\n* query-expr := [query-construct-type] query-pipeline select-clause\n* [query-construct-type] query-pipeline select-clause on-conflict-clause? limit-lause?\n*
\n* query-construct-type := table key-specifier | stream\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseTableConstructorOrQuery(boolean isRhsExpr) {\nstartContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION);\nSTNode tableOrQueryExpr = parseTableConstructorOrQuery(peek().kind, isRhsExpr);\nendContext();\nreturn tableOrQueryExpr;\n}\nprivate STNode parseTableConstructorOrQuery(SyntaxKind nextTokenKind, boolean isRhsExpr) {\nSTNode queryConstructType;\nswitch (nextTokenKind) {\ncase FROM_KEYWORD:\nqueryConstructType = STNodeFactory.createEmptyNode();\nreturn parseQueryExprRhs(queryConstructType, isRhsExpr);\ncase STREAM_KEYWORD:\nqueryConstructType = parseQueryConstructType(parseStreamKeyword(), null);\nreturn parseQueryExprRhs(queryConstructType, isRhsExpr);\ncase TABLE_KEYWORD:\nSTNode tableKeyword = parseTableKeyword();\nreturn parseTableConstructorOrQuery(tableKeyword, isRhsExpr);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTableConstructorOrQuery(solution.tokenKind, isRhsExpr);\n}\n}\nprivate STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) {\nSTToken nextToken = peek();\nreturn parseTableConstructorOrQuery(nextToken.kind, nextToken, tableKeyword, isRhsExpr);\n}\nprivate STNode parseTableConstructorOrQuery(SyntaxKind nextTokenKind, STToken nextToken, STNode tableKeyword,\nboolean isRhsExpr) {\nSTNode keySpecifier;\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nkeySpecifier = STNodeFactory.createEmptyNode();\nreturn parseTableConstructorExprRhs(tableKeyword, keySpecifier);\ncase KEY_KEYWORD:\nkeySpecifier = parseKeySpecifier();\nreturn parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\ncase IDENTIFIER_TOKEN:\nif (isKeyKeyword(nextToken)) {\nkeySpecifier = parseKeySpecifier();\nreturn parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\n}\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTableConstructorOrQuery(solution.tokenKind, null, tableKeyword, isRhsExpr);\n}\n}\nprivate STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) {\nreturn parseTableConstructorOrQueryRhs(peek().kind, tableKeyword, keySpecifier, isRhsExpr);\n}\nprivate STNode parseTableConstructorOrQueryRhs(SyntaxKind nextTokenKind, STNode tableKeyword, STNode keySpecifier,\nboolean isRhsExpr) {\nswitch (nextTokenKind) {\ncase FROM_KEYWORD:\nreturn parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr);\ncase OPEN_BRACKET_TOKEN:\nreturn parseTableConstructorExprRhs(tableKeyword, keySpecifier);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS, tableKeyword,\nkeySpecifier, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTableConstructorOrQueryRhs(solution.tokenKind, tableKeyword, keySpecifier, isRhsExpr);\n}\n}\n/**\n* Parse query construct type.\n*

\n* query-construct-type := table key-specifier | stream\n*\n* @return Parsed node\n*/\nprivate STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) {\nreturn STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier);\n}\n/**\n* Parse query expression.\n*

\n* \n* query-expr-rhs := query-pipeline select-clause\n* query-pipeline select-clause on-conflict-clause? limit-clause?\n*
\n* query-pipeline := from-clause intermediate-clause*\n*
\n*\n* @param queryConstructType queryConstructType that precedes this rhs\n* @return Parsed node\n*/\nprivate STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) {\nswitchContext(ParserRuleContext.QUERY_EXPRESSION);\nSTNode fromClause = parseFromClause(isRhsExpr);\nList clauses = new ArrayList<>();\nSTNode intermediateClause;\nSTNode selectClause = null;\nwhile (!isEndOfIntermediateClause(peek().kind, SyntaxKind.NONE)) {\nintermediateClause = parseIntermediateClause(isRhsExpr);\nif (intermediateClause == null) {\nbreak;\n}\nif (selectClause != null) {\nselectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause,\nDiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE);\ncontinue;\n}\nif (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) {\nselectClause = intermediateClause;\n} else {\nclauses.add(intermediateClause);\n}\n}\nif (peek().kind == SyntaxKind.DO_KEYWORD) {\nSTNode intermediateClauses = STNodeFactory.createNodeList(clauses);\nSTNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);\nreturn parseQueryAction(queryPipeline, selectClause, isRhsExpr);\n}\nif (selectClause == null) {\nSTNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD);\nSTNode expr = STNodeFactory\n.createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\nselectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr);\nif (clauses.isEmpty()) {\nfromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);\n} else {\nint lastIndex = clauses.size() - 1;\nSTNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex),\nDiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);\nclauses.set(lastIndex, intClauseWithDiagnostic);\n}\n}\nSTNode intermediateClauses = STNodeFactory.createNodeList(clauses);\nSTNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);\nSTNode onConflictClause = parseOnConflictClause(isRhsExpr);\nSTNode limitClause = parseLimitClause(isRhsExpr);\nreturn STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause,\nonConflictClause, limitClause);\n}\n/**\n* Parse limit keyword.\n*\n* @return Limit keyword node\n*/\nprivate STNode parseLimitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LIMIT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LIMIT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse an intermediate clause.\n*

\n* \n* intermediate-clause := from-clause | where-clause | let-clause\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseIntermediateClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nreturn parseIntermediateClause(nextToken.kind, isRhsExpr);\n}\nprivate STNode parseIntermediateClause(SyntaxKind nextTokenKind, boolean isRhsExpr) {\nswitch (nextTokenKind) {\ncase FROM_KEYWORD:\nreturn parseFromClause(isRhsExpr);\ncase WHERE_KEYWORD:\nreturn parseWhereClause(isRhsExpr);\ncase LET_KEYWORD:\nreturn parseLetClause(isRhsExpr);\ncase SELECT_KEYWORD:\nreturn parseSelectClause(isRhsExpr);\ncase JOIN_KEYWORD:\ncase OUTER_KEYWORD:\nreturn parseJoinClause(isRhsExpr);\ncase DO_KEYWORD:\ncase SEMICOLON_TOKEN:\ncase ON_KEYWORD:\ncase CONFLICT_KEYWORD:\ncase LIMIT_KEYWORD:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseIntermediateClause(solution.tokenKind, isRhsExpr);\n}\n}\n/**\n* Parse select-keyword.\n*\n* @return Select-keyword node\n*/\nprivate STNode parseJoinKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.JOIN_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.JOIN_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse select-keyword.\n*\n* @return Select-keyword node\n*/\nprivate STNode parseOuterKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OUTER_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OUTER_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate boolean isEndOfIntermediateClause(SyntaxKind tokenKind, SyntaxKind precedingNodeKind) {\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase PUBLIC_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase EOF_TOKEN:\ncase RESOURCE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase DOCUMENTATION_STRING:\ncase PRIVATE_KEYWORD:\ncase RETURNS_KEYWORD:\ncase SERVICE_KEYWORD:\ncase TYPE_KEYWORD:\ncase CONST_KEYWORD:\ncase FINAL_KEYWORD:\ncase DO_KEYWORD:\nreturn true;\ndefault:\nreturn isValidExprRhsStart(tokenKind, precedingNodeKind);\n}\n}\n/**\n* Parse from clause.\n*

\n* from-clause := from typed-binding-pattern in expression\n*\n* @return Parsed node\n*/\nprivate STNode parseFromClause(boolean isRhsExpr) {\nSTNode fromKeyword = parseFromKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE);\nSTNode inKeyword = parseInKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression);\n}\n/**\n* Parse from-keyword.\n*\n* @return From-keyword node\n*/\nprivate STNode parseFromKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FROM_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FROM_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse where clause.\n*

\n* where-clause := where expression\n*\n* @return Parsed node\n*/\nprivate STNode parseWhereClause(boolean isRhsExpr) {\nSTNode whereKeyword = parseWhereKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createWhereClauseNode(whereKeyword, expression);\n}\n/**\n* Parse where-keyword.\n*\n* @return Where-keyword node\n*/\nprivate STNode parseWhereKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.WHERE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.WHERE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse let clause.\n*

\n* let-clause := let let-var-decl [, let-var-decl]* \n*\n* @return Parsed node\n*/\nprivate STNode parseLetClause(boolean isRhsExpr) {\nSTNode letKeyword = parseLetKeyword();\nSTNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr);\nletKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,\nDiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);\nreturn STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations);\n}\n/**\n* Parse select clause.\n*

\n* select-clause := select expression\n*\n* @return Parsed node\n*/\nprivate STNode parseSelectClause(boolean isRhsExpr) {\nSTNode selectKeyword = parseSelectKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createSelectClauseNode(selectKeyword, expression);\n}\n/**\n* Parse select-keyword.\n*\n* @return Select-keyword node\n*/\nprivate STNode parseSelectKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SELECT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SELECT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse on-conflict clause.\n*

\n* \n* onConflictClause := on conflict expression\n* \n*\n* @return On conflict clause node\n*/\nprivate STNode parseOnConflictClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.ON_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode onKeyword = parseOnKeyword();\nSTNode conflictKeyword = parseConflictKeyword();\nSTNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr);\n}\n/**\n* Parse conflict keyword.\n*\n* @return Conflict keyword node\n*/\nprivate STNode parseConflictKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CONFLICT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CONFLICT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse limit clause.\n*

\n* limitClause := limit expression\n*\n* @return Limit expression node\n*/\nprivate STNode parseLimitClause(boolean isRhsExpr) {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.LIMIT_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode limitKeyword = parseLimitKeyword();\nSTNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createLimitClauseNode(limitKeyword, expr);\n}\n/**\n* Parse join clause.\n*

\n* \n* join-clause := (join-var-decl | outer-join-var-decl) in expression\n*
\n* join-var-decl := join (typeName | var) bindingPattern\n*
\n* outer-join-var-decl := outer join var binding-pattern\n*
\n*\n* @return Join clause\n*/\nprivate STNode parseJoinClause(boolean isRhsExpr) {\nSTNode outerKeyword;\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.OUTER_KEYWORD) {\nouterKeyword = parseOuterKeyword();\n} else {\nouterKeyword = STNodeFactory.createEmptyNode();\n}\nSTNode joinKeyword = parseJoinKeyword();\nSTNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE);\nSTNode inKeyword = parseInKeyword();\nSTNode onCondition;\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nnextToken = peek();\nif (nextToken.kind == SyntaxKind.ON_KEYWORD) {\nonCondition = parseOnClause(isRhsExpr);\n} else {\nonCondition = STNodeFactory.createEmptyNode();\n}\nreturn STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression,\nonCondition);\n}\n/**\n* Parse on clause.\n*

\n* on clause := on expression\n*\n* @return On clause node\n*/\nprivate STNode parseOnClause(boolean isRhsExpr) {\nSTNode onKeyword = parseOnKeyword();\nSTNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\nreturn STNodeFactory.createOnClauseNode(onKeyword, expression);\n}\n/**\n* Parse start action.\n*

\n* start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)\n*\n* @return Start action node\n*/\nprivate STNode parseStartAction(STNode annots) {\nSTNode startKeyword = parseStartKeyword();\nSTNode expr = parseActionOrExpression();\nswitch (expr.kind) {\ncase FUNCTION_CALL:\ncase METHOD_CALL:\ncase REMOTE_METHOD_CALL_ACTION:\nbreak;\ndefault:\nif (!isMissingNode(expr)) {\nexpr = SyntaxErrors.addDiagnostic(expr,\nDiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION);\n}\n}\nreturn STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr);\n}\n/**\n* Parse start keyword.\n*\n* @return Start keyword node\n*/\nprivate STNode parseStartKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.START_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.START_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse flush action.\n*

\n* flush-action := flush [peer-worker]\n*\n* @return flush action node\n*/\nprivate STNode parseFlushAction() {\nSTNode flushKeyword = parseFlushKeyword();\nSTNode peerWorker = parseOptionalPeerWorkerName();\nreturn STNodeFactory.createFlushActionNode(flushKeyword, peerWorker);\n}\n/**\n* Parse flush keyword.\n*\n* @return flush keyword node\n*/\nprivate STNode parseFlushKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FLUSH_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FLUSH_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse peer worker.\n*

\n* peer-worker := worker-name | default\n*\n* @return peer worker name node\n*/\nprivate STNode parseOptionalPeerWorkerName() {\nSTToken token = peek();\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase DEFAULT_KEYWORD:\nreturn STNodeFactory.createSimpleNameReferenceNode(consume());\ndefault:\nreturn STNodeFactory.createEmptyNode();\n}\n}\n/**\n* Parse intersection type descriptor.\n*

\n* intersection-type-descriptor := type-descriptor & type-descriptor\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode bitwiseAndToken = consume();\nSTNode rightTypeDesc = parseTypeDescriptor(context, isTypedBindingPattern, false);\nreturn STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc);\n}\n/**\n* Parse singleton type descriptor.\n*

\n* singleton-type-descriptor := simple-const-expr\n* simple-const-expr :=\n* nil-literal\n* | boolean-literal\n* | [Sign] int-literal\n* | [Sign] floating-point-literal\n* | string-literal\n* | constant-reference-expr\n*

\n*/\nprivate STNode parseSingletonTypeDesc() {\nSTNode simpleContExpr = parseSimpleConstExpr();\nreturn STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr);\n}\nprivate STNode parseSignedIntOrFloat() {\nSTNode operator = parseUnaryOperator();\nSTNode literal;\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nliteral = parseBasicLiteral();\nbreak;\ndefault:\nliteral = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL);\nliteral = STNodeFactory.createBasicLiteralNode(literal.kind, literal);\n}\nreturn STNodeFactory.createUnaryExpressionNode(operator, literal);\n}\nprivate boolean isSingletonTypeDescStart(SyntaxKind tokenKind, boolean inTypeDescCtx) {\nSTToken nextNextToken = getNextNextToken(tokenKind);\nswitch (tokenKind) {\ncase STRING_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase NULL_KEYWORD:\nif (inTypeDescCtx || isValidTypeDescRHSOutSideTypeDescCtx(nextNextToken)) {\nreturn true;\n}\nreturn false;\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn isIntOrFloat(nextNextToken);\ndefault:\nreturn false;\n}\n}\nstatic boolean isIntOrFloat(STToken token) {\nswitch (token.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate boolean isValidTypeDescRHSOutSideTypeDescCtx(STToken token) {\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\ncase OPEN_PAREN_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Check whether the parser reached to a valid expression start.\n*\n* @param nextTokenKind Kind of the next immediate token.\n* @param nextTokenIndex Index to the next token.\n* @return true if this is a start of a valid expression. false otherwise\n*/\nprivate boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) {\nnextTokenIndex++;\nswitch (nextTokenKind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nSyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind;\nreturn nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN || nextNextTokenKind == SyntaxKind.COMMA_TOKEN ||\nnextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN ||\nisValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE);\ncase IDENTIFIER_TOKEN:\nreturn isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE);\ncase OPEN_PAREN_TOKEN:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase TYPEOF_KEYWORD:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\ncase TRAP_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase LT_TOKEN:\ncase FROM_KEYWORD:\ncase LET_KEYWORD:\ncase BACKTICK_TOKEN:\ncase NEW_KEYWORD:\ncase LEFT_ARROW_TOKEN:\nreturn true;\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);\ncase FUNCTION_KEYWORD:\ncase TABLE_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD;\ncase STREAM_KEYWORD:\nSTToken nextNextToken = peek(nextTokenIndex);\nreturn nextNextToken.kind == SyntaxKind.KEY_KEYWORD ||\nnextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN ||\nnextNextToken.kind == SyntaxKind.FROM_KEYWORD;\ncase ERROR_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN;\ncase SERVICE_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.OPEN_BRACE_TOKEN;\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\nreturn peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN;\ncase START_KEYWORD:\ncase FLUSH_KEYWORD:\ncase WAIT_KEYWORD:\ndefault:\nreturn false;\n}\n}\n/**\n* Parse sync send action.\n*

\n* sync-send-action := expression ->> peer-worker\n*\n* @param expression LHS expression of the sync send action\n* @return Sync send action node\n*/\nprivate STNode parseSyncSendAction(STNode expression) {\nSTNode syncSendToken = parseSyncSendToken();\nSTNode peerWorker = parsePeerWorkerName();\nreturn STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker);\n}\n/**\n* Parse peer worker.\n*

\n* peer-worker := worker-name | default\n*\n* @return peer worker name node\n*/\nprivate STNode parsePeerWorkerName() {\nSTToken token = peek();\nswitch (token.kind) {\ncase IDENTIFIER_TOKEN:\ncase DEFAULT_KEYWORD:\nreturn STNodeFactory.createSimpleNameReferenceNode(consume());\ndefault:\nSolution sol = recover(token, ParserRuleContext.PEER_WORKER_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse sync send token.\n*

\n* sync-send-token := ->> \n*\n* @return sync send token\n*/\nprivate STNode parseSyncSendToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SYNC_SEND_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SYNC_SEND_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse receive action.\n*

\n* receive-action := single-receive-action | multiple-receive-action\n*\n* @return Receive action\n*/\nprivate STNode parseReceiveAction() {\nSTNode leftArrow = parseLeftArrowToken();\nSTNode receiveWorkers = parseReceiveWorkers();\nreturn STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers);\n}\nprivate STNode parseReceiveWorkers() {\nreturn parseReceiveWorkers(peek().kind);\n}\nprivate STNode parseReceiveWorkers(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase DEFAULT_KEYWORD:\ncase IDENTIFIER_TOKEN:\nreturn parsePeerWorkerName();\ncase OPEN_BRACE_TOKEN:\nreturn parseMultipleReceiveWorkers();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RECEIVE_WORKERS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseReceiveWorkers(solution.tokenKind);\n}\n}\n/**\n* Parse multiple worker receivers.\n*

\n* { receive-field (, receive-field)* }\n*\n* @return Multiple worker receiver node\n*/\nprivate STNode parseMultipleReceiveWorkers() {\nstartContext(ParserRuleContext.MULTI_RECEIVE_WORKERS);\nSTNode openBrace = parseOpenBrace();\nSTNode receiveFields = parseReceiveFields();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nopenBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace,\nDiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION);\nreturn STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace);\n}\nprivate STNode parseReceiveFields() {\nList receiveFields = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfReceiveFields(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode receiveField = parseReceiveField();\nreceiveFields.add(receiveField);\nnextToken = peek();\nSTNode recieveFieldEnd;\nwhile (!isEndOfReceiveFields(nextToken.kind)) {\nrecieveFieldEnd = parseReceiveFieldEnd(nextToken.kind);\nif (recieveFieldEnd == null) {\nbreak;\n}\nreceiveFields.add(recieveFieldEnd);\nreceiveField = parseReceiveField();\nreceiveFields.add(receiveField);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(receiveFields);\n}\nprivate boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseReceiveFieldEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RECEIVE_FIELD_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseReceiveFieldEnd(solution.tokenKind);\n}\n}\nprivate STNode parseReceiveField() {\nreturn parseReceiveField(peek().kind);\n}\n/**\n* Parse receive field.\n*

\n* receive-field := peer-worker | field-name : peer-worker\n*\n* @param nextTokenKind Kind of the next token\n* @return Receiver field node\n*/\nprivate STNode parseReceiveField(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase DEFAULT_KEYWORD:\nreturn parseDefaultKeyword();\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME);\nreturn createQualifiedReceiveField(identifier);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RECEIVE_FIELD);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.tokenKind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn createQualifiedReceiveField(solution.recoveredNode);\n}\nreturn solution.recoveredNode;\n}\n}\nprivate STNode createQualifiedReceiveField(STNode identifier) {\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn identifier;\n}\nSTNode colon = parseColon();\nSTNode peerWorker = parsePeerWorkerName();\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, peerWorker);\n}\n/**\n*\n* Parse left arrow (<-) token.\n*\n* @return left arrow token\n*/\nprivate STNode parseLeftArrowToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.LEFT_ARROW_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse signed right shift token (>>).\n*\n* @return Parsed node\n*/\nprivate STNode parseSignedRightShiftToken() {\nSTNode openGTToken = consume();\nSTToken endLGToken = consume();\nSTNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, openGTToken.leadingMinutiae(),\nendLGToken.trailingMinutiae());\nif (!validateRightShiftOperatorWS(openGTToken)) {\ndoubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken,\nDiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP);\n}\nreturn doubleGTToken;\n}\n/**\n* Parse unsigned right shift token (>>>).\n*\n* @return Parsed node\n*/\nprivate STNode parseUnsignedRightShiftToken() {\nSTNode openGTToken = consume();\nSTNode middleGTToken = consume();\nSTNode endLGToken = consume();\nSTNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN,\nopenGTToken.leadingMinutiae(), endLGToken.trailingMinutiae());\nboolean validOpenGTToken = validateRightShiftOperatorWS(openGTToken);\nboolean validMiddleGTToken = validateRightShiftOperatorWS(middleGTToken);\nif (validOpenGTToken && validMiddleGTToken) {\nreturn unsignedRightShiftToken;\n}\nunsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken,\nDiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP);\nreturn unsignedRightShiftToken;\n}\n/**\n* Validate the whitespace between '>' tokens of right shift operators.\n*\n* @param node Preceding node\n* @return the validated node\n*/\nprivate boolean validateRightShiftOperatorWS(STNode node) {\nint diff = node.widthWithTrailingMinutiae() - node.width();\nreturn diff == 0;\n}\n/**\n* Parse wait action.\n*

\n* wait-action := single-wait-action | multiple-wait-action | alternate-wait-action \n*\n* @return Wait action node\n*/\nprivate STNode parseWaitAction() {\nSTNode waitKeyword = parseWaitKeyword();\nif (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {\nreturn parseMultiWaitAction(waitKeyword);\n}\nreturn parseSingleOrAlternateWaitAction(waitKeyword);\n}\n/**\n* Parse wait keyword.\n*\n* @return wait keyword\n*/\nprivate STNode parseWaitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.WAIT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.WAIT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse single or alternate wait actions.\n*

\n* \n* alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+\n*
\n* wait-future-expr := expression but not mapping-constructor-expr\n*
\n*\n* @param waitKeyword wait keyword\n* @return Single or alternate wait action node\n*/\nprivate STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) {\nstartContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS);\nSTToken nextToken = peek();\nif (isEndOfWaitFutureExprList(nextToken.kind)) {\nendContext();\nSTNode waitFutureExprs = STNodeFactory.createEmptyNodeList();\nwaitKeyword = cloneWithDiagnosticIfListEmpty(waitFutureExprs, waitKeyword,\nDiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION);\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs);\n}\nList waitFutureExprList = new ArrayList<>();\nSTNode waitField = parseWaitFutureExpr();\nwaitFutureExprList.add(waitField);\nnextToken = peek();\nSTNode waitFutureExprEnd;\nwhile (!isEndOfWaitFutureExprList(nextToken.kind)) {\nwaitFutureExprEnd = parseWaitFutureExprEnd(nextToken.kind, 1);\nif (waitFutureExprEnd == null) {\nbreak;\n}\nwaitFutureExprList.add(waitFutureExprEnd);\nwaitField = parseWaitFutureExpr();\nwaitFutureExprList.add(waitField);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0));\n}\nprivate boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseWaitFutureExpr() {\nSTNode waitFutureExpr = parseExpression();\nif (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) {\nwaitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr,\nDiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR);\n}\nreturn waitFutureExpr;\n}\nprivate STNode parseWaitFutureExprEnd(int nextTokenIndex) {\nreturn parseWaitFutureExprEnd(peek().kind, 1);\n}\nprivate STNode parseWaitFutureExprEnd(SyntaxKind nextTokenKind, int nextTokenIndex) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\nreturn parsePipeToken();\ndefault:\nif (isEndOfWaitFutureExprList(nextTokenKind) ||\n!isValidExpressionStart(nextTokenKind, nextTokenIndex)) {\nreturn null;\n}\nSolution solution = recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END, nextTokenIndex);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseWaitFutureExprEnd(solution.tokenKind, 0);\n}\n}\n/**\n* Parse multiple wait action.\n*

\n* multiple-wait-action := wait { wait-field (, wait-field)* }\n*\n* @param waitKeyword Wait keyword\n* @return Multiple wait action node\n*/\nprivate STNode parseMultiWaitAction(STNode waitKeyword) {\nstartContext(ParserRuleContext.MULTI_WAIT_FIELDS);\nSTNode openBrace = parseOpenBrace();\nSTNode waitFields = parseWaitFields();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nopenBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace,\nDiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION);\nSTNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace);\nreturn STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode);\n}\nprivate STNode parseWaitFields() {\nList waitFields = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfWaitFields(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode waitField = parseWaitField();\nwaitFields.add(waitField);\nnextToken = peek();\nSTNode waitFieldEnd;\nwhile (!isEndOfWaitFields(nextToken.kind)) {\nwaitFieldEnd = parseWaitFieldEnd(nextToken.kind);\nif (waitFieldEnd == null) {\nbreak;\n}\nwaitFields.add(waitFieldEnd);\nwaitField = parseWaitField();\nwaitFields.add(waitField);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(waitFields);\n}\nprivate boolean isEndOfWaitFields(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseWaitFieldEnd() {\nreturn parseWaitFieldEnd(peek().kind);\n}\nprivate STNode parseWaitFieldEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.WAIT_FIELD_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseWaitFieldEnd(solution.tokenKind);\n}\n}\nprivate STNode parseWaitField() {\nreturn parseWaitField(peek().kind);\n}\n/**\n* Parse wait field.\n*

\n* wait-field := variable-name | field-name : wait-future-expr\n*\n* @param nextTokenKind Kind of the next token\n* @return Receiver field node\n*/\nprivate STNode parseWaitField(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME);\nidentifier = STNodeFactory.createSimpleNameReferenceNode(identifier);\nreturn createQualifiedWaitField(identifier);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.WAIT_FIELD_NAME);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseWaitField(solution.tokenKind);\n}\n}\nprivate STNode createQualifiedWaitField(STNode identifier) {\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn identifier;\n}\nSTNode colon = parseColon();\nSTNode waitFutureExpr = parseWaitFutureExpr();\nreturn STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr);\n}\n/**\n* Parse annot access expression.\n*

\n* \n* annot-access-expr := expression .@ annot-tag-reference\n*
\n* annot-tag-reference := qualified-identifier | identifier\n*
\n*\n* @param lhsExpr Preceding expression of the annot access access\n* @return Parsed node\n*/\nprivate STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode annotAccessToken = parseAnnotChainingToken();\nSTNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr);\nreturn STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference);\n}\n/**\n* Parse annot-chaining-token.\n*\n* @return Parsed node\n*/\nprivate STNode parseAnnotChainingToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse field access identifier.\n*

\n* field-access-identifier := qualified-identifier | identifier\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) {\nreturn parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr);\n}\n/**\n* Parse query action.\n*

\n* query-action := query-pipeline do-clause\n*
\n* do-clause := do block-stmt\n*
\n*\n* @param queryPipeline Query pipeline\n* @param selectClause Select clause if any This is only for validation.\n* @return Query action node\n*/\nprivate STNode parseQueryAction(STNode queryPipeline, STNode selectClause, boolean isRhsExpr) {\nif (selectClause != null) {\nqueryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause,\nDiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION);\n}\nstartContext(ParserRuleContext.DO_CLAUSE);\nSTNode doKeyword = parseDoKeyword();\nSTNode blockStmt = parseBlockNode();\nendContext();\nSTNode limitClause = parseLimitClause(isRhsExpr);\nreturn STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt, limitClause);\n}\n/**\n* Parse 'do' keyword.\n*\n* @return do keyword node\n*/\nprivate STNode parseDoKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DO_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.DO_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse optional field access or xml optional attribute access expression.\n*

\n* \n* optional-field-access-expr := expression ?. field-name\n*
\n* xml-optional-attribute-access-expr := expression ?. xml-attribute-name\n*
\n* xml-attribute-name := xml-qualified-name | qualified-identifier | identifier\n*
\n* xml-qualified-name := xml-namespace-prefix : identifier\n*
\n* xml-namespace-prefix := identifier\n*
\n*\n* @param lhsExpr Preceding expression of the optional access\n* @return Parsed node\n*/\nprivate STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode optionalFieldAccessToken = parseOptionalChainingToken();\nSTNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr);\nreturn STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName);\n}\n/**\n* Parse optional chaining token.\n*\n* @return parsed node\n*/\nprivate STNode parseOptionalChainingToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse conditional expression.\n*

\n* conditional-expr := expression ? expression : expression\n*\n* @param lhsExpr Preceding expression of the question mark\n* @return Parsed node\n*/\nprivate STNode parseConditionalExpression(STNode lhsExpr) {\nstartContext(ParserRuleContext.CONDITIONAL_EXPRESSION);\nSTNode questionMark = parseQuestionMark();\nSTNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true);\nSTNode nextToken = peek();\nSTNode endExpr;\nSTNode colon;\nif (nextToken.kind != SyntaxKind.COLON_TOKEN && middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nSTQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) middleExpr;\nmiddleExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.modulePrefix);\ncolon = qualifiedNameRef.colon;\nendContext();\nendExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);\n} else {\ncolon = parseColon();\nendContext();\nendExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false);\n}\nreturn STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr);\n}\n/**\n* Parse enum declaration.\n*

\n* module-enum-decl :=\n* metadata\n* [public] enum identifier { enum-member (, enum-member)* }\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @param metadata\n* @param qualifier\n*\n* @return Parsed enum node.\n*/\nprivate STNode parseEnumDeclaration(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.MODULE_ENUM_DECLARATION);\nSTNode enumKeywordToken = parseEnumKeyword();\nSTNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME);\nSTNode openBraceToken = parseOpenBrace();\nSTNode enumMemberList = parseEnumMemberList();\nSTNode closeBraceToken = parseCloseBrace();\nendContext();\nopenBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken,\nDiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER);\nreturn STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier,\nopenBraceToken, enumMemberList, closeBraceToken);\n}\n/**\n* Parse 'enum' keyword.\n*\n* @return enum keyword node\n*/\nprivate STNode parseEnumKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ENUM_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ENUM_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse enum member list.\n*

\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @return enum member list node.\n*/\nprivate STNode parseEnumMemberList() {\nstartContext(ParserRuleContext.ENUM_MEMBER_LIST);\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList enumMemberList = new ArrayList<>();\nSTNode enumMember = parseEnumMember();\nnextToken = peek();\nSTNode enumMemberRhs;\nwhile (nextToken.kind != SyntaxKind.CLOSE_BRACE_TOKEN) {\nenumMemberRhs = parseEnumMemberEnd(nextToken.kind);\nif (enumMemberRhs == null) {\nbreak;\n}\nenumMemberList.add(enumMember);\nenumMemberList.add(enumMemberRhs);\nenumMember = parseEnumMember();\nnextToken = peek();\n}\nenumMemberList.add(enumMember);\nendContext();\nreturn STNodeFactory.createNodeList(enumMemberList);\n}\n/**\n* Parse enum member.\n*

\n* enum-member := metadata identifier [= const-expr]\n*

\n*\n* @return Parsed enum member node.\n*/\nprivate STNode parseEnumMember() {\nSTToken nextToken = peek();\nSTNode metadata;\nswitch (nextToken.kind) {\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData(nextToken.kind);\nbreak;\ndefault:\nmetadata = STNodeFactory.createEmptyNode();\n}\nSTNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME);\nreturn parseEnumMemberRhs(metadata, identifierNode);\n}\nprivate STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) {\nreturn parseEnumMemberRhs(peek().kind, metadata, identifierNode);\n}\nprivate STNode parseEnumMemberRhs(SyntaxKind nextToken, STNode metadata, STNode identifierNode) {\nSTNode equalToken, constExprNode;\nswitch (nextToken) {\ncase EQUAL_TOKEN:\nequalToken = parseAssignOp();\nconstExprNode = parseExpression();\nbreak;\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nequalToken = STNodeFactory.createEmptyNode();\nconstExprNode = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS, metadata, identifierNode);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseEnumMemberRhs(solution.tokenKind, metadata, identifierNode);\n}\nreturn STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode);\n}\nprivate STNode parseEnumMemberEnd() {\nreturn parseEnumMemberEnd(peek().kind);\n}\nprivate STNode parseEnumMemberEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ENUM_MEMBER_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseEnumMemberEnd(solution.tokenKind);\n}\n}\n/**\n* Parse transaction statement.\n*

\n* transaction-stmt := \"transaction\" block-stmt ;\n*\n* @return Transaction statement node\n*/\nprivate STNode parseTransactionStatement() {\nstartContext(ParserRuleContext.TRANSACTION_STMT);\nSTNode transactionKeyword = parseTransactionKeyword();\nSTNode blockStmt = parseBlockNode();\nendContext();\nreturn STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt);\n}\n/**\n* Parse transaction keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseTransactionKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRANSACTION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TRANSACTION_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse commit action.\n*

\n* commit-action := \"commit\"\n*\n* @return Commit action node\n*/\nprivate STNode parseCommitAction() {\nSTNode commitKeyword = parseCommitKeyword();\nreturn STNodeFactory.createCommitActionNode(commitKeyword);\n}\n/**\n* Parse commit keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseCommitKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COMMIT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.COMMIT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse retry statement.\n*

\n* \n* retry-stmt := \"retry\" retry-spec block-stmt\n*
\n* retry-spec := [type-parameter] [ \"(\" arg-list \")\" ]\n*
\n*\n* @return Retry statement node\n*/\nprivate STNode parseRetryStatement() {\nstartContext(ParserRuleContext.RETRY_STMT);\nSTNode retryKeyword = parseRetryKeyword();\nSTNode retryStmt = parseRetryKeywordRhs(retryKeyword);\nendContext();\nreturn retryStmt;\n}\nprivate STNode parseRetryKeywordRhs(STNode retryKeyword) {\nreturn parseRetryKeywordRhs(peek().kind, retryKeyword);\n}\nprivate STNode parseRetryKeywordRhs(SyntaxKind nextTokenKind, STNode retryKeyword) {\nswitch (nextTokenKind) {\ncase LT_TOKEN:\nSTNode typeParam = parseTypeParameter();\nreturn parseRetryTypeParamRhs(retryKeyword, typeParam);\ncase OPEN_PAREN_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase TRANSACTION_KEYWORD:\ntypeParam = STNodeFactory.createEmptyNode();\nreturn parseRetryTypeParamRhs(nextTokenKind, retryKeyword, typeParam);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS, retryKeyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseRetryKeywordRhs(solution.tokenKind, retryKeyword);\n}\n}\nprivate STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) {\nreturn parseRetryTypeParamRhs(peek().kind, retryKeyword, typeParam);\n}\nprivate STNode parseRetryTypeParamRhs(SyntaxKind nextTokenKind, STNode retryKeyword, STNode typeParam) {\nSTNode args;\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\nargs = parseParenthesizedArgList();\nbreak;\ncase OPEN_BRACE_TOKEN:\ncase TRANSACTION_KEYWORD:\nargs = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS, retryKeyword, typeParam);\nreturn parseRetryTypeParamRhs(solution.tokenKind, retryKeyword, typeParam);\n}\nSTNode blockStmt = parseRetryBody();\nreturn STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt);\n}\nprivate STNode parseRetryBody() {\nreturn parseRetryBody(peek().kind);\n}\nprivate STNode parseRetryBody(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACE_TOKEN:\nreturn parseBlockNode();\ncase TRANSACTION_KEYWORD:\nreturn parseTransactionStatement();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.RETRY_BODY);\nreturn parseRetryBody(solution.tokenKind);\n}\n}\n/**\n* Parse retry keyword.\n*\n* @return parsed node\n*/\nprivate STNode parseRetryKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RETRY_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RETRY_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse transaction statement.\n*

\n* rollback-stmt := \"rollback\" [expression] \";\"\n*\n* @return Rollback statement node\n*/\nprivate STNode parseRollbackStatement() {\nstartContext(ParserRuleContext.ROLLBACK_STMT);\nSTNode rollbackKeyword = parseRollbackKeyword();\nSTNode expression;\nif (peek().kind == SyntaxKind.SEMICOLON_TOKEN) {\nexpression = STNodeFactory.createEmptyNode();\n} else {\nexpression = parseExpression();\n}\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon);\n}\n/**\n* Parse rollback keyword.\n*\n* @return Rollback keyword node\n*/\nprivate STNode parseRollbackKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ROLLBACK_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ROLLBACK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse transactional expression.\n*

\n* transactional-expr := \"transactional\"\n*\n* @return Transactional expression node\n*/\nprivate STNode parseTransactionalExpression() {\nSTNode transactionalKeyword = parseTransactionalKeyword();\nreturn STNodeFactory.createTransactionalExpressionNode(transactionalKeyword);\n}\n/**\n* Parse transactional keyword.\n*\n* @return Transactional keyword node\n*/\nprivate STNode parseTransactionalKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ROLLBACK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse service-constructor-expr.\n*

\n* \n* service-constructor-expr := [annots] service service-body-block\n*
\n* service-body-block := { service-method-defn* }\n*
\n* service-method-defn := metadata [resource] function identifier function-signature method-defn-body\n*
\n*\n* @param annots Annotations\n* @return Service constructor expression node\n*/\nprivate STNode parseServiceConstructorExpression(STNode annots) {\nstartContext(ParserRuleContext.SERVICE_CONSTRUCTOR_EXPRESSION);\nSTNode serviceKeyword = parseServiceKeyword();\nSTNode serviceBody = parseServiceBody();\nendContext();\nreturn STNodeFactory.createServiceConstructorExpressionNode(annots, serviceKeyword, serviceBody);\n}\n/**\n* Parse base16 literal.\n*

\n* \n* byte-array-literal := Base16Literal | Base64Literal\n*
\n* Base16Literal := base16 WS ` HexGroup* WS `\n*
\n* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `\n*
\n*\n* @param kind byte array literal kind\n* @return parsed node\n*/\nprivate STNode parseByteArrayLiteral(SyntaxKind kind) {\nSTNode type;\nif (kind == SyntaxKind.BASE16_KEYWORD) {\ntype = parseBase16Keyword();\n} else {\ntype = parseBase64Keyword();\n}\nSTNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\nSTNode content = parseByteArrayContent(kind);\nreturn parseByteArrayLiteral(kind, type, startingBackTick, content);\n}\n/**\n* Parse byte array literal.\n*\n* @param baseKind indicates the SyntaxKind base16 or base64\n* @param typeKeyword keyword token, possible values are `base16` and `base64`\n* @param startingBackTick starting backtick token\n* @param byteArrayContent byte array literal content to be validated\n* @return parsed byte array literal node\n*/\nprivate STNode parseByteArrayLiteral(SyntaxKind baseKind, STNode typeKeyword, STNode startingBackTick,\nSTNode byteArrayContent) {\nSTNode content = STNodeFactory.createEmptyNode();\nSTNode newStartingBackTick = startingBackTick;\nSTNodeList items = (STNodeList) byteArrayContent;\nif (items.size() == 1) {\nSTNode item = items.get(0);\nif (baseKind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else if (baseKind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else if (item.kind != SyntaxKind.TEMPLATE_STRING) {\nnewStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\nDiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);\n} else {\ncontent = item;\n}\n} else if (items.size() > 1) {\nSTNode clonedStartingBackTick = startingBackTick;\nfor (int index = 0; index < items.size(); index++) {\nSTNode item = items.get(index);\nclonedStartingBackTick =\nSyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item);\n}\nnewStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick,\nDiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);\n}\nSTNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\nreturn STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick);\n}\n/**\n* Parse base16 keyword.\n*\n* @return base16 keyword node\n*/\nprivate STNode parseBase16Keyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BASE16_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BASE16_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse base64 keyword.\n*\n* @return base64 keyword node\n*/\nprivate STNode parseBase64Keyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BASE64_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BASE64_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Validate and parse byte array literal content.\n* An error is reported, if the content is invalid.\n*\n* @param kind byte array literal kind\n* @return parsed node\n*/\nprivate STNode parseByteArrayContent(SyntaxKind kind) {\nSTToken nextToken = peek();\nList items = new ArrayList<>();\nwhile (!isEndOfBacktickContent(nextToken.kind)) {\nSTNode content = parseTemplateItem();\nitems.add(content);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(items);\n}\n/**\n* Validate base16 literal content.\n*

\n* \n* Base16Literal := base16 WS ` HexGroup* WS `\n*
\n* HexGroup := WS HexDigit WS HexDigit\n*
\n* WS := WhiteSpaceChar*\n*
\n* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20\n*
\n*\n* @param content the string surrounded by the backticks\n* @return true, if the string content is valid. false otherwise.\n*/\nstatic boolean isValidBase16LiteralContent(String content) {\nchar[] charArray = content.toCharArray();\nint hexDigitCount = 0;\nfor (char c : charArray) {\nswitch (c) {\ncase LexerTerminals.TAB:\ncase LexerTerminals.NEWLINE:\ncase LexerTerminals.CARRIAGE_RETURN:\ncase LexerTerminals.SPACE:\nbreak;\ndefault:\nif (isHexDigit(c)) {\nhexDigitCount++;\n} else {\nreturn false;\n}\nbreak;\n}\n}\nreturn hexDigitCount % 2 == 0;\n}\n/**\n* Validate base64 literal content.\n*

\n* \n* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `\n*
\n* Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char\n*
\n* PaddedBase64Group :=\n* WS Base64Char WS Base64Char WS Base64Char WS PaddingChar\n* | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar\n*
\n* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /\n*
\n* PaddingChar := =\n*
\n* WS := WhiteSpaceChar*\n*
\n* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20\n*
\n*\n* @param content the string surrounded by the backticks\n* @return true, if the string content is valid. false otherwise.\n*/\nstatic boolean isValidBase64LiteralContent(String content) {\nchar[] charArray = content.toCharArray();\nint base64CharCount = 0;\nint paddingCharCount = 0;\nfor (char c : charArray) {\nswitch (c) {\ncase LexerTerminals.TAB:\ncase LexerTerminals.NEWLINE:\ncase LexerTerminals.CARRIAGE_RETURN:\ncase LexerTerminals.SPACE:\nbreak;\ncase LexerTerminals.EQUAL:\npaddingCharCount++;\nbreak;\ndefault:\nif (isBase64Char(c)) {\nif (paddingCharCount == 0) {\nbase64CharCount++;\n} else {\nreturn false;\n}\n} else {\nreturn false;\n}\nbreak;\n}\n}\nif (paddingCharCount > 2) {\nreturn false;\n} else if (paddingCharCount == 0) {\nreturn base64CharCount % 4 == 0;\n} else {\nreturn base64CharCount % 4 == 4 - paddingCharCount;\n}\n}\n/**\n*

\n* Check whether a given char is a base64 char.\n*

\n* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /\n*\n* @param c character to check\n* @return true, if the character represents a base64 char. false otherwise.\n*/\nstatic boolean isBase64Char(int c) {\nif ('a' <= c && c <= 'z') {\nreturn true;\n}\nif ('A' <= c && c <= 'Z') {\nreturn true;\n}\nif (c == '+' || c == '/') {\nreturn true;\n}\nreturn isDigit(c);\n}\nstatic boolean isHexDigit(int c) {\nif ('a' <= c && c <= 'f') {\nreturn true;\n}\nif ('A' <= c && c <= 'F') {\nreturn true;\n}\nreturn isDigit(c);\n}\nstatic boolean isDigit(int c) {\nreturn ('0' <= c && c <= '9');\n}\n/**\n* Parse xml filter expression.\n*

\n* xml-filter-expr := expression .< xml-name-pattern >\n*\n* @param lhsExpr Preceding expression of .< token\n* @return Parsed node\n*/\nprivate STNode parseXMLFilterExpression(STNode lhsExpr) {\nSTNode xmlNamePatternChain = parseXMLFilterExpressionRhs();\nreturn STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain);\n}\n/**\n* Parse xml filter expression rhs.\n*

\n* filer-expression-rhs := .< xml-name-pattern >\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLFilterExpressionRhs() {\nSTNode dotLTToken = parseDotLTToken();\nreturn parseXMLNamePatternChain(dotLTToken);\n}\n/**\n* Parse xml name pattern chain.\n*

\n* \n* xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step\n*
\n* filer-expression-rhs := .< xml-name-pattern >\n*
\n* xml-element-children-step := /< xml-name-pattern >\n*
\n* xml-element-descendants-step := /**\\/\n*
\n*\n* @param startToken Preceding token of xml name pattern\n* @return Parsed node\n*/\nprivate STNode parseXMLNamePatternChain(STNode startToken) {\nstartContext(ParserRuleContext.XML_NAME_PATTERN);\nSTNode xmlNamePattern = parseXMLNamePattern();\nSTNode gtToken = parseGTToken();\nendContext();\nstartToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken,\nDiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN);\nreturn STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken);\n}\n/**\n* Parse .< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseDotLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.DOT_LT_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse xml name pattern.\n*

\n* xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLNamePattern() {\nList xmlAtomicNamePatternList = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfXMLNamePattern(nextToken.kind)) {\nreturn STNodeFactory.createNodeList(xmlAtomicNamePatternList);\n}\nSTNode xmlAtomicNamePattern = parseXMLAtomicNamePattern();\nxmlAtomicNamePatternList.add(xmlAtomicNamePattern);\nSTNode separator;\nwhile (!isEndOfXMLNamePattern(peek().kind)) {\nseparator = parseXMLNamePatternSeparator();\nif (separator == null) {\nbreak;\n}\nxmlAtomicNamePatternList.add(separator);\nxmlAtomicNamePattern = parseXMLAtomicNamePattern();\nxmlAtomicNamePatternList.add(xmlAtomicNamePattern);\n}\nreturn STNodeFactory.createNodeList(xmlAtomicNamePatternList);\n}\nprivate boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase GT_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ncase IDENTIFIER_TOKEN:\ncase ASTERISK_TOKEN:\ncase COLON_TOKEN:\ndefault:\nreturn false;\n}\n}\nprivate STNode parseXMLNamePatternSeparator() {\nSTToken token = peek();\nswitch (token.kind) {\ncase PIPE_TOKEN:\nreturn consume();\ncase GT_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution sol = recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS);\nif (sol.tokenKind == SyntaxKind.GT_TOKEN) {\nreturn null;\n}\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse xml atomic name pattern.\n*

\n* \n* xml-atomic-name-pattern :=\n* *\n* | identifier\n* | xml-namespace-prefix : identifier\n* | xml-namespace-prefix : *\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseXMLAtomicNamePattern() {\nstartContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN);\nSTNode atomicNamePattern = parseXMLAtomicNamePatternBody();\nendContext();\nreturn atomicNamePattern;\n}\nprivate STNode parseXMLAtomicNamePatternBody() {\nSTToken token = peek();\nSTNode identifier;\nswitch (token.kind) {\ncase ASTERISK_TOKEN:\nreturn consume();\ncase IDENTIFIER_TOKEN:\nidentifier = consume();\nbreak;\ndefault:\nSolution sol = recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nif (sol.recoveredNode.kind == SyntaxKind.ASTERISK_TOKEN) {\nreturn sol.recoveredNode;\n}\nidentifier = sol.recoveredNode;\nbreak;\n}\nreturn parseXMLAtomicNameIdentifier(identifier);\n}\nprivate STNode parseXMLAtomicNameIdentifier(STNode identifier) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COLON_TOKEN) {\nSTNode colon = consume();\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {\nSTToken endToken = consume();\nreturn STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken);\n}\n}\nreturn STNodeFactory.createSimpleNameReferenceNode(identifier);\n}\n/**\n* Parse xml step expression.\n*

\n* xml-step-expr := expression xml-step-start\n*\n* @param lhsExpr Preceding expression of /*, /<, or /**\\/< token\n* @return Parsed node\n*/\nprivate STNode parseXMLStepExpression(STNode lhsExpr) {\nSTNode xmlStepStart = parseXMLStepStart();\nreturn STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart);\n}\n/**\n* Parse xml filter expression rhs.\n*

\n* \n* xml-step-start :=\n* xml-all-children-step\n* | xml-element-children-step\n* | xml-element-descendants-step\n*
\n* xml-all-children-step := /*\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseXMLStepStart() {\nSTToken token = peek();\nSTNode startToken;\nswitch (token.kind) {\ncase SLASH_ASTERISK_TOKEN:\nreturn consume();\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\nstartToken = parseDoubleSlashDoubleAsteriskLTToken();\nbreak;\ncase SLASH_LT_TOKEN:\ndefault:\nstartToken = parseSlashLTToken();\nbreak;\n}\nreturn parseXMLNamePatternChain(startToken);\n}\n/**\n* Parse /< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseSlashLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse /< token.\n*\n* @return Parsed node\n*/\nprivate STNode parseDoubleSlashDoubleAsteriskLTToken() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse match statement.\n*

\n* match-stmt := match action-or-expr { match-clause+ }\n*\n* @return Match statement\n*/\nprivate STNode parseMatchStatement() {\nstartContext(ParserRuleContext.MATCH_STMT);\nSTNode matchKeyword = parseMatchKeyword();\nSTNode actionOrExpr = parseActionOrExpression();\nstartContext(ParserRuleContext.MATCH_BODY);\nSTNode openBrace = parseOpenBrace();\nSTNode matchClauses = parseMatchClauses();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nendContext();\nreturn STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace);\n}\n/**\n* Parse match keyword.\n*\n* @return Match keyword node\n*/\nprivate STNode parseMatchKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.MATCH_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.MATCH_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse match clauses list.\n*\n* @return Match clauses list\n*/\nprivate STNode parseMatchClauses() {\nList matchClauses = new ArrayList<>();\nwhile (!isEndOfMatchClauses(peek().kind)) {\nSTNode clause = parseMatchClause();\nmatchClauses.add(clause);\n}\nreturn STNodeFactory.createNodeList(matchClauses);\n}\nprivate boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse a single match match clause.\n*

\n* \n* match-clause := match-pattern-list [match-guard] => block-stmt\n*
\n* match-guard := if expression\n*
\n*\n* @return A match clause\n*/\nprivate STNode parseMatchClause() {\nSTNode matchPatterns = parseMatchPatternList();\nSTNode matchGuard = parseMatchGuard();\nSTNode rightDoubleArrow = parseDoubleRightArrow();\nSTNode blockStmt = parseBlockNode();\nreturn STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt);\n}\n/**\n* Parse match guard.\n*

\n* match-guard := if expression\n*\n* @return Match guard\n*/\nprivate STNode parseMatchGuard() {\nSTToken nextToken = peek();\nreturn parseMatchGuard(nextToken.kind);\n}\nprivate STNode parseMatchGuard(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IF_KEYWORD:\nSTNode ifKeyword = parseIfKeyword();\nSTNode expr = parseExpression(peek().kind, DEFAULT_OP_PRECEDENCE, true, false, true, false);\nreturn STNodeFactory.createMatchGuardNode(ifKeyword, expr);\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.OPTIONAL_MATCH_GUARD);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMatchGuard(solution.tokenKind);\n}\n}\n/**\n* Parse match patterns list.\n*

\n* match-pattern-list := match-pattern (| match-pattern)*\n*\n* @return Match patterns list\n*/\nprivate STNode parseMatchPatternList() {\nstartContext(ParserRuleContext.MATCH_PATTERN);\nList matchClauses = new ArrayList<>();\nwhile (!isEndOfMatchPattern(peek().kind)) {\nSTNode clause = parseMatchPattern();\nif (clause == null) {\nbreak;\n}\nmatchClauses.add(clause);\nSTNode seperator = parseMatchPatternEnd();\nif (seperator == null) {\nbreak;\n}\nmatchClauses.add(seperator);\n}\nendContext();\nreturn STNodeFactory.createNodeList(matchClauses);\n}\nprivate boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\ncase IF_KEYWORD:\ncase RIGHT_ARROW_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse match pattern.\n*

\n* \n* match-pattern := var binding-pattern\n* | wildcard-match-pattern\n* | const-pattern\n* | list-match-pattern\n* | mapping-match-pattern\n* | functional-match-pattern\n* \n*\n* @return Match pattern\n*/\nprivate STNode parseMatchPattern() {\nSTToken nextToken = peek();\nreturn parseMatchPattern(nextToken.kind);\n}\nprivate STNode parseMatchPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase STRING_LITERAL:\nreturn parseSimpleConstExpr();\ncase IDENTIFIER_TOKEN:\nSTNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);\nreturn parseFunctionalMatchPatternOrConsPattern(typeRefOrConstExpr);\ncase VAR_KEYWORD:\nreturn parseVarTypedBindingPattern();\ncase OPEN_BRACKET_TOKEN:\nreturn parseListMatchPattern();\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingMatchPattern();\ncase ERROR_KEYWORD:\nreturn parseFunctionalMatchPattern(consume());\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MATCH_PATTERN_START);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMatchPattern(solution.tokenKind);\n}\n}\nprivate STNode parseMatchPatternEnd() {\nSTToken nextToken = peek();\nreturn parseMatchPatternEnd(nextToken.kind);\n}\nprivate STNode parseMatchPatternEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\nreturn parsePipeToken();\ncase IF_KEYWORD:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MATCH_PATTERN_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMatchPatternEnd(solution.tokenKind);\n}\n}\n/**\n* Parse var typed binding pattern.\n*

\n* var binding-pattern\n*

\n*\n* @return Parsed typed binding pattern node\n*/\nprivate STNode parseVarTypedBindingPattern() {\nSTNode varKeyword = parseVarKeyword();\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createTypedBindingPatternNode(varKeyword, bindingPattern);\n}\n/**\n* Parse var keyword.\n*\n* @return Var keyword node\n*/\nprivate STNode parseVarKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.VAR_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(nextToken, ParserRuleContext.VAR_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse list match pattern.\n*

\n* \n* list-match-pattern := [ list-member-match-patterns ]\n* list-member-match-patterns :=\n* match-pattern (, match-pattern)* [, rest-match-pattern]\n* | [ rest-match-pattern ]\n* \n*

\n*\n* @return Parsed list match pattern node\n*/\nprivate STNode parseListMatchPattern() {\nstartContext(ParserRuleContext.LIST_MATCH_PATTERN);\nSTNode openBracketToken = parseOpenBracket();\nList matchPatternList = new ArrayList<>();\nSTNode restMatchPattern = null;\nwhile (!isEndOfListMatchPattern()) {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {\nrestMatchPattern = parseRestMatchPattern();\nbreak;\n}\nSTNode matchPatternListMember = parseMatchPattern();\nmatchPatternList.add(matchPatternListMember);\nSTNode matchPatternMemberRhs = parseListMatchPatternMemberRhs();\nif (matchPatternMemberRhs != null) {\nmatchPatternList.add(matchPatternMemberRhs);\n} else {\nbreak;\n}\n}\nif (restMatchPattern == null) {\nrestMatchPattern = STNodeFactory.createEmptyNode();\n}\nSTNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList);\nSTNode closeBracketToken = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, restMatchPattern,\ncloseBracketToken);\n}\npublic boolean isEndOfListMatchPattern() {\nswitch (peek().kind) {\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse rest match pattern.\n*

\n* \n* rest-match-pattern := ... var variable-name\n* \n*

\n*\n* @return Parsed rest match pattern node\n*/\nprivate STNode parseRestMatchPattern() {\nstartContext(ParserRuleContext.REST_MATCH_PATTERN);\nSTNode ellipsisToken = parseEllipsis();\nSTNode varKeywordToken = parseVarKeyword();\nSTNode variableName = parseVariableName();\nendContext();\nSTSimpleNameReferenceNode simpleNameReferenceNode =\n(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName);\nreturn STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode);\n}\nprivate STNode parseListMatchPatternMemberRhs() {\nreturn parseListMatchPatternMemberRhs(peek().kind);\n}\nprivate STNode parseListMatchPatternMemberRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListMatchPatternMemberRhs(solution.tokenKind);\n}\n}\n/**\n* Parse mapping match pattern.\n*

\n* mapping-match-pattern := { field-match-patterns }\n*
\n* field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern]\n* | [ rest-match-pattern ]\n*
\n* field-match-pattern := field-name : match-pattern\n*
\n* rest-match-pattern := ... var variable-name\n*

\n*\n* @return Parsed Node.\n*/\nprivate STNode parseMappingMatchPattern() {\nstartContext(ParserRuleContext.MAPPING_MATCH_PATTERN);\nSTNode openBraceToken = parseOpenBrace();\nList fieldMatchPatternList = new ArrayList<>();\nSTNode restMatchPattern = null;\nboolean isEndOfFields = false;\nwhile (!isEndOfMappingMatchPattern()) {\nSTNode fieldMatchPatternMember = parseFieldMatchPatternMember();\nif (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {\nrestMatchPattern = fieldMatchPatternMember;\nisEndOfFields = true;\nbreak;\n}\nfieldMatchPatternList.add(fieldMatchPatternMember);\nSTNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();\nif (fieldMatchPatternRhs != null) {\nfieldMatchPatternList.add(fieldMatchPatternRhs);\n} else {\nbreak;\n}\n}\nSTNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();\nwhile (isEndOfFields && fieldMatchPatternRhs != null) {\nSTNode invalidField = parseFieldMatchPatternMember();\nrestMatchPattern =\nSyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, fieldMatchPatternRhs);\nrestMatchPattern = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, invalidField);\nrestMatchPattern = SyntaxErrors.addDiagnostic(restMatchPattern,\nDiagnosticErrorCode.ERROR_MORE_FIELD_MATCH_PATTERNS_AFTER_REST_FIELD);\nfieldMatchPatternRhs = parseFieldMatchPatternRhs();\n}\nif (restMatchPattern == null) {\nrestMatchPattern = STNodeFactory.createEmptyNode();\n}\nSTNode fieldMatchPatterns = STNodeFactory.createNodeList(fieldMatchPatternList);\nSTNode closeBraceToken = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, restMatchPattern,\ncloseBraceToken);\n}\nprivate STNode parseFieldMatchPatternMember() {\nreturn parseFieldMatchPatternMember(peek().kind);\n}\nprivate STNode parseFieldMatchPatternMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseFieldMatchPattern();\ncase ELLIPSIS_TOKEN:\nreturn parseRestMatchPattern();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldMatchPatternMember(solution.tokenKind);\n}\n}\n/**\n* Parse filed match pattern.\n*

\n* field-match-pattern := field-name : match-pattern\n*

\n*\n* @return Parsed field match pattern node\n*/\npublic STNode parseFieldMatchPattern() {\nSTNode fieldNameNode = parseVariableName();\nSTNode colonToken = parseColon();\nSTNode matchPattern = parseMatchPattern();\nreturn STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern);\n}\npublic boolean isEndOfMappingMatchPattern() {\nswitch (peek().kind) {\ncase CLOSE_BRACE_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseFieldMatchPatternRhs() {\nreturn parseFieldMatchPatternRhs(peek().kind);\n}\nprivate STNode parseFieldMatchPatternRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldMatchPatternRhs(solution.tokenKind);\n}\n}\nprivate STNode parseFunctionalMatchPatternOrConsPattern(STNode typeRefOrConstExpr) {\nreturn parseFunctionalMatchPatternOrConsPattern(peek().kind, typeRefOrConstExpr);\n}\nprivate STNode parseFunctionalMatchPatternOrConsPattern(SyntaxKind nextToken, STNode typeRefOrConstExpr) {\nswitch (nextToken) {\ncase OPEN_PAREN_TOKEN:\nreturn parseFunctionalMatchPattern(typeRefOrConstExpr);\ndefault:\nif (isMatchPatternEnd(peek().kind)) {\nreturn typeRefOrConstExpr;\n}\nSolution solution = recover(peek(), ParserRuleContext.FUNC_MATCH_PATTERN_OR_CONST_PATTERN,\ntypeRefOrConstExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFunctionalMatchPatternOrConsPattern(solution.tokenKind, typeRefOrConstExpr);\n}\n}\nprivate boolean isMatchPatternEnd(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase RIGHT_DOUBLE_ARROW_TOKEN:\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase PIPE_TOKEN:\ncase IF_KEYWORD:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse functional match pattern.\n*

\n* functional-match-pattern := functionally-constructible-type-reference ( arg-list-match-pattern )\n*
\n* functionally-constructible-type-reference := error | type-reference\n*
\n* type-reference := identifier | qualified-identifier\n*
\n* arg-list-match-pattern := positional-arg-match-patterns [, other-arg-match-patterns]\n* | other-arg-match-patterns\n*

\n*\n* @return Parsed functional match pattern node.\n*/\nprivate STNode parseFunctionalMatchPattern(STNode typeRef) {\nstartContext(ParserRuleContext.FUNCTIONAL_MATCH_PATTERN);\nSTNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode argListMatchPatternNode = parseArgListMatchPatterns();\nSTNode closeParenthesisToken = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createFunctionalMatchPatternNode(typeRef, openParenthesisToken, argListMatchPatternNode,\ncloseParenthesisToken);\n}\nprivate STNode parseArgListMatchPatterns() {\nList argListMatchPatterns = new ArrayList<>();\nSyntaxKind lastValidArgKind = SyntaxKind.IDENTIFIER_TOKEN;\nwhile (!isEndOfFunctionalMatchPattern()) {\nSTNode currentArg = parseArgMatchPattern();\nDiagnosticErrorCode errorCode = validateArgMatchPatternOrder(lastValidArgKind, currentArg.kind);\nif (errorCode == null) {\nargListMatchPatterns.add(currentArg);\nlastValidArgKind = currentArg.kind;\n} else {\nupdateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode);\n}\nSTNode argRhs = parseArgMatchPatternRhs();\nif (argRhs == null) {\nbreak;\n}\nif (errorCode == null) {\nargListMatchPatterns.add(argRhs);\n} else {\nupdateLastNodeInListWithInvalidNode(argListMatchPatterns, argRhs, null);\n}\n}\nreturn STNodeFactory.createNodeList(argListMatchPatterns);\n}\nprivate boolean isEndOfFunctionalMatchPattern() {\nswitch (peek().kind) {\ncase CLOSE_PAREN_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse arg match patterns.\n* \n* arg-match-pattern := match-pattern | named-arg-match-pattern | rest-match-pattern\n* \n*

\n*\n* @return parsed arg match pattern node.\n*/\nprivate STNode parseArgMatchPattern() {\nreturn parseArgMatchPattern(peek().kind);\n}\nprivate STNode parseArgMatchPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseNamedOrPositionalArgMatchPattern();\ncase ELLIPSIS_TOKEN:\nreturn parseRestMatchPattern();\ncase OPEN_PAREN_TOKEN:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase STRING_LITERAL:\ncase VAR_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseMatchPattern();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_MATCH_PATTERN);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgMatchPattern(solution.tokenKind);\n}\n}\nprivate STNode parseNamedOrPositionalArgMatchPattern() {\nSTNode identifier = parseIdentifier(ParserRuleContext.MATCH_PATTERN_START);\nSTToken secondToken = peek();\nswitch (secondToken.kind) {\ncase EQUAL_TOKEN:\nreturn parseNamedArgMatchPattern(identifier);\ncase OPEN_PAREN_TOKEN:\nreturn parseFunctionalMatchPattern(identifier);\ncase COMMA_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ndefault:\nreturn identifier;\n}\n}\n/**\n* Parses the next named arg match pattern.\n*
\n* named-arg-match-pattern := arg-name = match-pattern\n*

\n*\n* @return arg match pattern list node added the new arg match pattern\n*/\nprivate STNode parseNamedArgMatchPattern(STNode identifier) {\nstartContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN);\nSTNode equalToken = parseAssignOp();\nSTNode matchPattern = parseMatchPattern();\nendContext();\nreturn STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern);\n}\nprivate STNode parseArgMatchPatternRhs() {\nreturn parseArgMatchPatternRhs(peek().kind);\n}\nprivate STNode parseArgMatchPatternRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\ncase EOF_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_MATCH_PATTERN_RHS);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgMatchPatternRhs(solution.tokenKind);\n}\n}\nprivate DiagnosticErrorCode validateArgMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {\nDiagnosticErrorCode errorCode = null;\nswitch (prevArgKind) {\ncase NAMED_ARG_MATCH_PATTERN:\nif (currentArgKind != SyntaxKind.NAMED_ARG_MATCH_PATTERN &&\ncurrentArgKind != SyntaxKind.REST_MATCH_PATTERN) {\nerrorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG;\n}\nbreak;\ncase REST_MATCH_PATTERN:\nerrorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG;\nbreak;\ndefault:\nbreak;\n}\nreturn errorCode;\n}\n/**\n* Parse markdown documentation.\n*\n* @return markdown documentation node\n*/\nprivate STNode parseMarkdownDocumentation() {\nList markdownDocLineList = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) {\nSTToken documentationString = consume();\nSTNode markdownDocLines = parseDocumentationString(documentationString);\nmarkdownDocLineList.add(markdownDocLines);\nnextToken = peek();\n}\nSTNode arrangedMarkdownDocLines = rearrangeMarkdownDocumentationLines(markdownDocLineList);\nreturn STNodeFactory.createMarkdownDocumentationNode(arrangedMarkdownDocLines);\n}\n/**\n* Parse documentation string.\n*\n* @return markdown documentation line list node\n*/\nprivate STNode parseDocumentationString(STToken documentationStringToken) {\nList leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae());\nTextDocument textDocument = TextDocuments.from(documentationStringToken.text());\nDocumentationLexer documentationLexer = new DocumentationLexer(textDocument.getCharacterReader(),\nleadingTriviaList);\nAbstractTokenReader tokenReader = new TokenReader(documentationLexer);\nDocumentationParser documentationParser = new DocumentationParser(tokenReader);\nreturn documentationParser.parse();\n}\nprivate List getLeadingTriviaList(STNode leadingMinutiaeNode) {\nList leadingTriviaList = new ArrayList<>();\nint bucketCount = leadingMinutiaeNode.bucketCount();\nfor (int i = 0; i < bucketCount; i++) {\nleadingTriviaList.add(leadingMinutiaeNode.childInBucket(i));\n}\nreturn leadingTriviaList;\n}\nprivate STNode rearrangeMarkdownDocumentationLines(List markdownDocLineList) {\nList arrangedDocLines = new ArrayList<>();\nfor (STNode markdownDocLines : markdownDocLineList) {\nint bucketCount = markdownDocLines.bucketCount();\nfor (int i = 0; i < bucketCount; i++) {\nSTNode markdownDocLine = markdownDocLines.childInBucket(i);\narrangedDocLines.add(markdownDocLine);\n}\n}\nreturn STNodeFactory.createNodeList(arrangedDocLines);\n}\n/**\n* Parse any statement that starts with a token that has ambiguity between being\n* a type-desc or an expression.\n*\n* @param annots Annotations\n* @return Statement node\n*/\nprivate STNode parseStmtStartsWithTypeOrExpr(SyntaxKind nextTokenKind, STNode annots) {\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode typeOrExpr = parseTypedBindingPatternOrExpr(nextTokenKind, true);\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr);\n}\nprivate STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) {\nif (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(annots, finalKeyword, typedBindingPatternOrExpr, false);\n}\nSTNode expr = getExpression(typedBindingPatternOrExpr);\nexpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);\nreturn parseStatementStartWithExprRhs(expr);\n}\nprivate STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternOrExpr(nextToken.kind, allowAssignment);\n}\nprivate STNode parseTypedBindingPatternOrExpr(SyntaxKind nextTokenKind, boolean allowAssignment) {\nSTNode typeOrExpr;\nswitch (nextTokenKind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseTypedBPOrExprStartsWithOpenParenthesis();\ncase FUNCTION_KEYWORD:\nreturn parseAnonFuncExprOrTypedBPWithFuncType();\ncase IDENTIFIER_TOKEN:\ntypeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\nreturn parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\ncase OPEN_BRACKET_TOKEN:\ntypeOrExpr = parseTypedDescOrExprStartsWithOpenBracket();\nreturn parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nSTNode basicLiteral = parseBasicLiteral();\nreturn parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment);\ndefault:\nif (isValidExpressionStart(nextTokenKind, 1)) {\nreturn parseActionOrExpressionInLhs(nextTokenKind, null);\n}\nreturn parseTypedBindingPattern(ParserRuleContext.VAR_DECL_STMT);\n}\n}\n/**\n* Parse the component after the ambiguous starting node. Ambiguous node could be either an expr\n* or a type-desc. The component followed by this ambiguous node could be the binding-pattern or\n* the expression-rhs.\n*\n* @param typeOrExpr Type desc or the expression\n* @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a\n* valid lvalue expression\n* @return Typed-binding-pattern node or an expression node\n*/\nprivate STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternOrExprRhs(nextToken.kind, typeOrExpr, allowAssignment);\n}\nprivate STNode parseTypedBindingPatternOrExprRhs(SyntaxKind nextTokenKind, STNode typeOrExpr,\nboolean allowAssignment) {\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\nSTToken nextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode pipe = parsePipeToken();\nSTNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);\nif (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nSTNode newTypeDesc =\nSTNodeFactory.createUnionTypeDescriptorNode(typeOrExpr, pipe, typedBP.typeDescriptor);\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);\n}\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe,\nrhsTypedBPOrExpr);\ncase BITWISE_AND_TOKEN:\nnextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode ampersand = parseBinaryOperator();\nrhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);\nif (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nSTNode newTypeDesc = STNodeFactory.createIntersectionTypeDescriptorNode(typeOrExpr, ampersand,\ntypedBP.typeDescriptor);\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);\n}\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand,\nrhsTypedBPOrExpr);\ncase SEMICOLON_TOKEN:\nif (isDefiniteExpr(typeOrExpr.kind)) {\nreturn typeOrExpr;\n}\nif (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) {\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n}\nreturn typeOrExpr;\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\nif (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) {\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n}\nreturn typeOrExpr;\ncase EQUAL_TOKEN:\nreturn typeOrExpr;\ncase OPEN_BRACKET_TOKEN:\nreturn parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment,\nParserRuleContext.AMBIGUOUS_STMT);\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode typeDesc = getTypeDescFromExpr(typeOrExpr);\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\ndefault:\nif (isCompoundBinaryOperator(nextTokenKind)) {\nreturn typeOrExpr;\n}\nif (isValidExprRhsStart(nextTokenKind, typeOrExpr.kind)) {\nreturn typeOrExpr;\n}\nSTToken token = peek();\nSolution solution =\nrecover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr, allowAssignment);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypedBindingPatternOrExprRhs(solution.tokenKind, typeOrExpr, allowAssignment);\n}\n}\nprivate STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) {\nstartContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\ntypeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false);\nendContext();\nreturn parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\n}\nprivate STNode parseTypedBPOrExprStartsWithOpenParenthesis() {\nSTNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis();\nif (isDefiniteTypeDesc(exprOrTypeDesc.kind)) {\nreturn parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc);\n}\nreturn parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false);\n}\nprivate boolean isDefiniteTypeDesc(SyntaxKind kind) {\nreturn kind.compareTo(SyntaxKind.TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0;\n}\nprivate boolean isDefiniteExpr(SyntaxKind kind) {\nif (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nreturn false;\n}\nreturn kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&\nkind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;\n}\n/**\n* Parse type or expression that starts with open parenthesis. Possible options are:\n* 1) () - nil type-desc or nil-literal\n* 2) (T) - Parenthesized type-desc\n* 3) (expr) - Parenthesized expression\n* 4) (param, param, ..) - Anon function params\n*\n* @return Type-desc or expression node\n*/\nprivate STNode parseTypedDescOrExprStartsWithOpenParenthesis() {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {\nSTNode closeParen = parseCloseParenthesis();\nreturn parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen);\n}\nSTNode typeOrExpr = parseTypeDescOrExpr();\nif (isAction(typeOrExpr)) {\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr,\ncloseParen);\n}\nif (isExpression(typeOrExpr.kind)) {\nstartContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);\nreturn parseBracedExprOrAnonFuncParamRhs(peek().kind, openParen, typeOrExpr, false);\n}\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeOrExpr, closeParen);\n}\n/**\n* Parse type-desc or expression. This method does not handle binding patterns.\n*\n* @return Type-desc node or expression node\n*/\nprivate STNode parseTypeDescOrExpr() {\nSTToken nextToken = peek();\nSTNode typeOrExpr;\nswitch (nextToken.kind) {\ncase OPEN_PAREN_TOKEN:\ntypeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis();\nbreak;\ncase FUNCTION_KEYWORD:\ntypeOrExpr = parseAnonFuncExprOrFuncTypeDesc();\nbreak;\ncase IDENTIFIER_TOKEN:\ntypeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\nreturn parseTypeDescOrExprRhs(typeOrExpr);\ncase OPEN_BRACKET_TOKEN:\ntypeOrExpr = parseTypedDescOrExprStartsWithOpenBracket();\nbreak;\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nSTNode basicLiteral = parseBasicLiteral();\nreturn parseTypeDescOrExprRhs(basicLiteral);\ndefault:\nif (isValidExpressionStart(nextToken.kind, 1)) {\nreturn parseActionOrExpressionInLhs(nextToken.kind, null);\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\n}\nif (isDefiniteTypeDesc(typeOrExpr.kind)) {\nreturn parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\n}\nreturn parseTypeDescOrExprRhs(typeOrExpr);\n}\nprivate boolean isExpression(SyntaxKind kind) {\nswitch (kind) {\ncase BASIC_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn true;\ndefault:\nreturn kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&\nkind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;\n}\n}\n/**\n* Parse statement that starts with an empty parenthesis. Empty parenthesis can be\n* 1) Nil literal\n* 2) Nil type-desc\n* 3) Anon-function params\n*\n* @param openParen Open parenthesis\n* @param closeParen Close parenthesis\n* @return Parsed node\n*/\nprivate STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nSTNode params = STNodeFactory.createNodeList();\nSTNode anonFuncParam =\nSTNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\nendContext();\nreturn anonFuncParam;\ndefault:\nreturn STNodeFactory.createNilLiteralNode(openParen, closeParen);\n}\n}\nprivate STNode parseAnonFuncExprOrTypedBPWithFuncType() {\nSTNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc();\nif (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) {\nreturn exprOrTypeDesc;\n}\nreturn parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT);\n}\n/**\n* Parse anon-func-expr or function-type-desc, by resolving the ambiguity.\n*\n* @return Anon-func-expr or function-type-desc\n*/\nprivate STNode parseAnonFuncExprOrFuncTypeDesc() {\nstartContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC);\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode funcSignature = parseFuncSignature(true);\nendContext();\nswitch (peek().kind) {\ncase OPEN_BRACE_TOKEN:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nstartContext(ParserRuleContext.ANON_FUNC_EXPRESSION);\nfuncSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);\nSTNode funcBody = parseAnonFuncBody(false);\nSTNode annots = STNodeFactory.createEmptyNodeList();\nSTNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, functionKeyword,\nfuncSignature, funcBody);\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true);\ncase IDENTIFIER_TOKEN:\ndefault:\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature);\nreturn parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,\ntrue);\n}\n}\nprivate STNode parseTypeDescOrExprRhs(STNode typeOrExpr) {\nSyntaxKind nextTokenKind = peek().kind;\nreturn parseTypeDescOrExprRhs(nextTokenKind, typeOrExpr);\n}\nprivate STNode parseTypeDescOrExprRhs(SyntaxKind nextTokenKind, STNode typeOrExpr) {\nSTNode typeDesc;\nswitch (nextTokenKind) {\ncase PIPE_TOKEN:\nSTToken nextNextToken = peek(2);\nif (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode pipe = parsePipeToken();\nSTNode rhsTypeDescOrExpr = parseTypeDescOrExpr();\nif (isExpression(rhsTypeDescOrExpr.kind)) {\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe,\nrhsTypeDescOrExpr);\n}\ntypeDesc = getTypeDescFromExpr(typeOrExpr);\nrhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);\nreturn STNodeFactory.createUnionTypeDescriptorNode(typeDesc, pipe, rhsTypeDescOrExpr);\ncase BITWISE_AND_TOKEN:\nnextNextToken = peek(2);\nif (nextNextToken.kind != SyntaxKind.EQUAL_TOKEN) {\nreturn typeOrExpr;\n}\nSTNode ampersand = parseBinaryOperator();\nrhsTypeDescOrExpr = parseTypeDescOrExpr();\nif (isExpression(rhsTypeDescOrExpr.kind)) {\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand,\nrhsTypeDescOrExpr);\n}\ntypeDesc = getTypeDescFromExpr(typeOrExpr);\nrhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);\nreturn STNodeFactory.createIntersectionTypeDescriptorNode(typeDesc, ampersand, rhsTypeDescOrExpr);\ncase IDENTIFIER_TOKEN:\ncase QUESTION_MARK_TOKEN:\nstartContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\ntypeDesc = parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,\nfalse);\nendContext();\nreturn typeDesc;\ncase SEMICOLON_TOKEN:\nreturn getTypeDescFromExpr(typeOrExpr);\ncase EQUAL_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\ncase COMMA_TOKEN:\nreturn typeOrExpr;\ncase OPEN_BRACKET_TOKEN:\nreturn parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true,\nParserRuleContext.AMBIGUOUS_STMT);\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\ntypeOrExpr = getTypeDescFromExpr(typeOrExpr);\nreturn STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis);\ndefault:\nif (isCompoundBinaryOperator(nextTokenKind)) {\nreturn typeOrExpr;\n}\nif (isValidExprRhsStart(nextTokenKind, typeOrExpr.kind)) {\nreturn parseExpressionRhs(nextTokenKind, DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false,\nfalse);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypeDescOrExprRhs(solution.tokenKind, typeOrExpr);\n}\n}\nprivate boolean isAmbiguous(STNode node) {\nswitch (node.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase BRACKETED_LIST:\nreturn true;\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;\nif (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||\nbinaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn false;\n}\nreturn isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);\ncase BRACED_EXPRESSION:\nreturn isAmbiguous(((STBracedExpressionNode) node).expression);\ncase INDEXED_EXPRESSION:\nSTIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node;\nif (!isAmbiguous(indexExpr.containerExpression)) {\nreturn false;\n}\nSTNode keys = indexExpr.keyExpression;\nfor (int i = 0; i < keys.bucketCount(); i++) {\nSTNode item = keys.childInBucket(i);\nif (item.kind == SyntaxKind.COMMA_TOKEN) {\ncontinue;\n}\nif (!isAmbiguous(item)) {\nreturn false;\n}\n}\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate boolean isAllBasicLiterals(STNode node) {\nswitch (node.kind) {\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn true;\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;\nif (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||\nbinaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn false;\n}\nreturn isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);\ncase BRACED_EXPRESSION:\nreturn isAmbiguous(((STBracedExpressionNode) node).expression);\ncase BRACKETED_LIST:\nSTAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node;\nfor (STNode member : list.members) {\nif (member.kind == SyntaxKind.COMMA_TOKEN) {\ncontinue;\n}\nif (!isAllBasicLiterals(member)) {\nreturn false;\n}\n}\nreturn true;\ncase UNARY_EXPRESSION:\nSTUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node;\nif (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN &&\nunaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) {\nreturn false;\n}\nreturn isNumericLiteral(unaryExpr.expression);\ndefault:\nreturn false;\n}\n}\nprivate boolean isNumericLiteral(STNode node) {\nswitch (node.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseTypedDescOrExprStartsWithOpenBracket() {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList members = new ArrayList<>();\nSTNode memberEnd;\nwhile (!isEndOfListConstructor(peek().kind)) {\nSTNode expr = parseTypeDescOrExpr();\nmembers.add(expr);\nmemberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmembers.add(memberEnd);\n}\nSTNode memberNodes = STNodeFactory.createNodeList(members);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberNodes, closeBracket);\n}\n/**\n* Parse binding-patterns.\n*

\n* \n* binding-pattern := capture-binding-pattern\n* | wildcard-binding-pattern\n* | list-binding-pattern\n* | mapping-binding-pattern\n* | functional-binding-pattern\n*

\n*\n* capture-binding-pattern := variable-name\n* variable-name := identifier\n*

\n*\n* wildcard-binding-pattern := _\n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n*\n* mapping-binding-pattern := { field-binding-patterns }\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n* field-binding-pattern := field-name : binding-pattern | variable-name\n*
\n* rest-binding-pattern := ... variable-name\n*\n*

\n* functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )\n*
\n* arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns]\n* | other-arg-binding-patterns\n*
\n* positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*\n*
\n* positional-arg-binding-pattern := binding-pattern\n*
\n* other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern]\n* | [rest-binding-pattern]\n*
\n* named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*\n*
\n* named-arg-binding-pattern := arg-name = binding-pattern\n*
\n*\n* @return binding-pattern node\n*/\nprivate STNode parseBindingPattern() {\nSTToken token = peek();\nreturn parseBindingPattern(token.kind);\n}\nprivate STNode parseBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseListBindingPattern();\ncase IDENTIFIER_TOKEN:\nreturn parseBindingPatternStartsWithIdentifier();\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingBindingPattern();\ncase ERROR_KEYWORD:\nreturn parseErrorBindingPattern();\ndefault:\nSolution sol = recover(peek(), ParserRuleContext.BINDING_PATTERN);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nreturn parseBindingPattern(sol.tokenKind);\n}\n}\nprivate STNode parseBindingPatternStartsWithIdentifier() {\nSTNode argNameOrBindingPattern =\nparseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);\nSTToken secondToken = peek();\nif (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nstartContext(ParserRuleContext.FUNCTIONAL_BINDING_PATTERN);\nreturn parseFunctionalBindingPattern(argNameOrBindingPattern);\n}\nif (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) {\nSTNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN);\nidentifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern);\nreturn createCaptureOrWildcardBP(identifier);\n}\nreturn createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name);\n}\nprivate STNode createCaptureOrWildcardBP(STNode varName) {\nSTNode bindingPattern;\nif (isWildcardBP(varName)) {\nbindingPattern = getWildcardBindingPattern(varName);\n} else {\nbindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName);\n}\nreturn bindingPattern;\n}\n/**\n* Parse list-binding-patterns.\n*

\n* \n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n*\n* @return list-binding-pattern node\n*/\nprivate STNode parseListBindingPattern() {\nstartContext(ParserRuleContext.LIST_BINDING_PATTERN);\nSTNode openBracket = parseOpenBracket();\nList bindingPatternsList = new ArrayList<>();\nSTNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList);\nendContext();\nreturn listBindingPattern;\n}\nprivate STNode parseListBindingPattern(STNode openBracket, List bindingPatternsList) {\nSTNode listBindingPatternMember = parseListBindingPatternMember();\nbindingPatternsList.add(listBindingPatternMember);\nSTNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList);\nreturn listBindingPattern;\n}\nprivate STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List bindingPatterns) {\nSTNode member = firstMember;\nSTToken token = peek();\nSTNode listBindingPatternRhs = null;\nwhile (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {\nlistBindingPatternRhs = parseListBindingPatternMemberRhs(token.kind);\nif (listBindingPatternRhs == null) {\nbreak;\n}\nbindingPatterns.add(listBindingPatternRhs);\nmember = parseListBindingPatternMember();\nbindingPatterns.add(member);\ntoken = peek();\n}\nSTNode restBindingPattern;\nif (member.kind == SyntaxKind.REST_BINDING_PATTERN) {\nrestBindingPattern = bindingPatterns.remove(bindingPatterns.size() - 1);\n} else {\nrestBindingPattern = STNodeFactory.createEmptyNode();\n}\nSTNode closeBracket = parseCloseBracket();\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);\nreturn STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, restBindingPattern,\ncloseBracket);\n}\nprivate STNode parseListBindingPatternMemberRhs() {\nreturn parseListBindingPatternMemberRhs(peek().kind);\n}\nprivate STNode parseListBindingPatternMemberRhs(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListBindingPatternMemberRhs(solution.tokenKind);\n}\n}\nprivate boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACKET_TOKEN:\ncase EOF_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse list-binding-pattern entry.\n*

\n* \n* list-binding-pattern := [ list-member-binding-patterns ]\n*
\n* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*
\n*\n* @return rest-binding-pattern node\n*/\nprivate STNode parseListBindingPatternMember() {\nSTToken token = peek();\nreturn parseListBindingPatternMember(token.kind);\n}\nprivate STNode parseListBindingPatternMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ncase OPEN_BRACKET_TOKEN:\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseBindingPattern();\ndefault:\nSolution sol = recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nreturn parseListBindingPatternMember(sol.tokenKind);\n}\n}\nprivate STNode parseRestBindingPattern() {\nstartContext(ParserRuleContext.REST_BINDING_PATTERN);\nSTNode ellipsis = parseEllipsis();\nSTNode varName = parseVariableName();\nendContext();\nSTSimpleNameReferenceNode simpleNameReferenceNode =\n(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName);\nreturn STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode);\n}\n/**\n* Parse Typed-binding-pattern.\n*

\n* \n* typed-binding-pattern := inferable-type-descriptor binding-pattern\n*

\n* inferable-type-descriptor := type-descriptor | var\n*
\n*\n* @return Typed binding pattern node\n*/\nprivate STNode parseTypedBindingPattern(ParserRuleContext context) {\nSTNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false);\nSTNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context);\nreturn typeBindingPattern;\n}\n/**\n* Parse mapping-binding-patterns.\n*

\n* \n* mapping-binding-pattern := { field-binding-patterns }\n*

\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n* field-binding-pattern := field-name : binding-pattern | variable-name\n*
\n*\n* @return mapping-binding-pattern node\n*/\nprivate STNode parseMappingBindingPattern() {\nstartContext(ParserRuleContext.MAPPING_BINDING_PATTERN);\nSTNode openBrace = parseOpenBrace();\nSTToken token = peek();\nif (isEndOfMappingBindingPattern(token.kind)) {\nSTNode closeBrace = parseCloseBrace();\nSTNode bindingPatternsNode = STNodeFactory.createEmptyNodeList();\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nendContext();\nreturn STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, restBindingPattern,\ncloseBrace);\n}\nList bindingPatterns = new ArrayList<>();\nSTNode prevMember = parseMappingBindingPatternMember();\nbindingPatterns.add(prevMember);\nreturn parseMappingBindingPattern(openBrace, bindingPatterns, prevMember);\n}\nprivate STNode parseMappingBindingPattern(STNode openBrace, List bindingPatterns, STNode member) {\nSTToken token = peek();\nSTNode mappingBindingPatternRhs = null;\nwhile (!isEndOfMappingBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {\nmappingBindingPatternRhs = parseMappingBindingPatternEnd(token.kind);\nif (mappingBindingPatternRhs == null) {\nbreak;\n}\nbindingPatterns.add(mappingBindingPatternRhs);\nmember = parseMappingBindingPatternMember();\nif (member.kind == SyntaxKind.REST_BINDING_PATTERN) {\nbreak;\n}\nbindingPatterns.add(member);\ntoken = peek();\n}\nSTNode restBindingPattern;\nif (member.kind == SyntaxKind.REST_BINDING_PATTERN) {\nrestBindingPattern = member;\n} else {\nrestBindingPattern = STNodeFactory.createEmptyNode();\n}\nSTNode closeBrace = parseCloseBrace();\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);\nendContext();\nreturn STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, restBindingPattern,\ncloseBrace);\n}\n/**\n* Parse mapping-binding-pattern entry.\n*

\n* \n* mapping-binding-pattern := { field-binding-patterns }\n*

\n* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n* | [ rest-binding-pattern ]\n*

\n* field-binding-pattern := field-name : binding-pattern\n* | variable-name\n*
\n*\n* @return mapping-binding-pattern node\n*/\nprivate STNode parseMappingBindingPatternMember() {\nSTToken token = peek();\nswitch (token.kind) {\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ndefault:\nreturn parseFieldBindingPattern();\n}\n}\nprivate STNode parseMappingBindingPatternEnd() {\nreturn parseMappingBindingPatternEnd(peek().kind);\n}\nprivate STNode parseMappingBindingPatternEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MAPPING_BINDING_PATTERN_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMappingBindingPatternEnd(solution.tokenKind);\n}\n}\nprivate STNode parseFieldBindingPattern() {\nreturn parseFieldBindingPattern(peek().kind);\n}\n/**\n* Parse field-binding-pattern.\n* field-binding-pattern := field-name : binding-pattern | varname\n*\n* @return field-binding-pattern node\n*/\nprivate STNode parseFieldBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME);\nSTNode fieldBindingPattern = parseFieldBindingPattern(identifier);\nreturn fieldBindingPattern;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.FIELD_BINDING_PATTERN_NAME);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldBindingPattern(solution.tokenKind);\n}\n}\nprivate STNode parseFieldBindingPattern(STNode identifier) {\nSTNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier);\nif (peek().kind != SyntaxKind.COLON_TOKEN) {\nreturn STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference);\n}\nSTNode colon = parseColon();\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern);\n}\nprivate boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseErrorBindingPattern() {\nstartContext(ParserRuleContext.FUNCTIONAL_BINDING_PATTERN);\nSTNode typeDesc = parseErrorKeyword();\nreturn parseFunctionalBindingPattern(typeDesc);\n}\nprivate STNode parseFunctionalBindingPattern(STNode typeDesc) {\nSTNode openParenthesis = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode argListBindingPatterns = parseArgListBindingPatterns();\nSTNode closeParenthesis = parseCloseParenthesis();\nendContext();\nreturn STNodeFactory.createFunctionalBindingPatternNode(typeDesc, openParenthesis, argListBindingPatterns,\ncloseParenthesis);\n}\nprivate STNode parseArgListBindingPatterns() {\nList argListBindingPatterns = new ArrayList<>();\nSyntaxKind lastValidArgKind = SyntaxKind.CAPTURE_BINDING_PATTERN;\nSTToken nextToken = peek();\nwhile (!isEndOfParametersList(nextToken.kind)) {\nSTNode currentArg = parseArgBindingPattern(nextToken.kind);\nDiagnosticErrorCode errorCode = validateArgBindingPatternOrder(lastValidArgKind, currentArg.kind);\nif (errorCode == null) {\nargListBindingPatterns.add(currentArg);\nlastValidArgKind = currentArg.kind;\n} else {\nupdateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode);\n}\nnextToken = peek();\nSTNode argEnd = parseArgsBindingPatternEnd(nextToken.kind);\nif (argEnd == null) {\nbreak;\n}\nif (errorCode == null) {\nargListBindingPatterns.add(argEnd);\n} else {\nupdateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);\n}\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(argListBindingPatterns);\n}\nprivate STNode parseArgsBindingPatternEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_BINDING_PATTERN_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgsBindingPatternEnd(solution.tokenKind);\n}\n}\nprivate STNode parseArgBindingPattern(SyntaxKind kind) {\nswitch (kind) {\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ncase IDENTIFIER_TOKEN:\nreturn parseNamedOrPositionalArgBindingPattern(kind);\ncase OPEN_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn parseBindingPattern();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_BINDING_PATTERN);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgBindingPattern(solution.tokenKind);\n}\n}\nprivate STNode parseNamedOrPositionalArgBindingPattern(SyntaxKind nextTokenKind) {\nSTNode argNameOrBindingPattern = parseQualifiedIdentifier(ParserRuleContext.ARG_BINDING_PATTERN_START_IDENT);\nSTToken secondToken = peek();\nswitch (secondToken.kind) {\ncase EQUAL_TOKEN:\nSTNode equal = parseAssignOp();\nSTNode bindingPattern = parseBindingPattern();\nreturn STNodeFactory.createNamedArgBindingPatternNode(argNameOrBindingPattern, equal, bindingPattern);\ncase OPEN_PAREN_TOKEN:\nreturn parseFunctionalBindingPattern(argNameOrBindingPattern);\ncase COMMA_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ndefault:\nreturn createCaptureOrWildcardBP(argNameOrBindingPattern);\n}\n}\nprivate DiagnosticErrorCode validateArgBindingPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {\nDiagnosticErrorCode errorCode = null;\nswitch (prevArgKind) {\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\nbreak;\ncase NAMED_ARG_BINDING_PATTERN:\nif (currentArgKind != SyntaxKind.NAMED_ARG_BINDING_PATTERN &&\ncurrentArgKind != SyntaxKind.REST_BINDING_PATTERN) {\nerrorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG;\n}\nbreak;\ncase REST_BINDING_PATTERN:\nerrorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG;\nbreak;\ndefault:\nthrow new IllegalStateException(\"Invalid SyntaxKind in an argument\");\n}\nreturn errorCode;\n}\n/*\n* This parses Typed binding patterns and deals with ambiguity between types,\n* and binding patterns. An example is 'T[a]'.\n* The ambiguity lies in between:\n* 1) Array Type\n* 2) List binding pattern\n* 3) Member access expression.\n*/\n/**\n* Parse the component after the type-desc, of a typed-binding-pattern.\n*\n* @param typeDesc Starting type-desc of the typed-binding-pattern\n* @return Typed-binding pattern\n*/\nprivate STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternTypeRhs(nextToken.kind, typeDesc, context, true);\n}\nprivate STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternTypeRhs(nextToken.kind, typeDesc, context, isRoot);\n}\nprivate STNode parseTypedBindingPatternTypeRhs(SyntaxKind nextTokenKind, STNode typeDesc, ParserRuleContext context,\nboolean isRoot) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode bindingPattern = parseBindingPattern(nextTokenKind);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\ncase OPEN_BRACKET_TOKEN:\nSTNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context);\nassert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN;\nreturn typedBindingPattern;\ncase CLOSE_PAREN_TOKEN:\ncase COMMA_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nif (!isRoot) {\nreturn typeDesc;\n}\ndefault:\nSolution solution =\nrecover(peek(), ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS, typeDesc, context, isRoot);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypedBindingPatternTypeRhs(solution.tokenKind, typeDesc, context, isRoot);\n}\n}\n/**\n* Parse typed-binding pattern with list, array-type-desc, or member-access-expr.\n*\n* @param typeDescOrExpr Type desc or the expression at the start\n* @param isTypedBindingPattern Is this is a typed-binding-pattern.\n* @return Parsed node\n*/\nprivate STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern,\nboolean allowAssignment, ParserRuleContext context) {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nif (isBracketedListEnd(peek().kind)) {\nreturn parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context);\n}\nSTNode member = parseBracketedListMember(isTypedBindingPattern);\nSyntaxKind currentNodeType = getBracketedListNodeType(member);\nswitch (currentNodeType) {\ncase ARRAY_TYPE_DESC:\nSTNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context);\nreturn typedBindingPattern;\ncase LIST_BINDING_PATTERN:\nSTNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\ncase INDEXED_EXPRESSION:\nreturn parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member);\ncase NONE:\ndefault:\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd != null) {\nList memberList = new ArrayList<>();\nmemberList.add(member);\nmemberList.add(memberEnd);\nSTNode bindingPattern = parseAsListBindingPattern(openBracket, memberList);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n}\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,\nisTypedBindingPattern, allowAssignment, context);\n}\nprivate STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) {\nmember = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nSTNode keyExpr = STNodeFactory.createNodeList(member);\nSTNode memberAccessExpr =\nSTNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket);\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false);\n}\nprivate boolean isBracketedListEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseBracketedListMember(boolean isTypedBindingPattern) {\nreturn parseBracketedListMember(peek().kind, isTypedBindingPattern);\n}\n/**\n* Parse a member of an ambiguous bracketed list. This member could be:\n* 1) Array length\n* 2) Key expression of a member-access-expr\n* 3) A member-binding pattern of a list-binding-pattern.\n*\n* @param nextTokenKind Kind of the next token\n* @param isTypedBindingPattern Is this in a definite typed-binding pattern\n* @return Parsed member node\n*/\nprivate STNode parseBracketedListMember(SyntaxKind nextTokenKind, boolean isTypedBindingPattern) {\nswitch (nextTokenKind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase ASTERISK_TOKEN:\ncase STRING_LITERAL:\nreturn parseBasicLiteral();\ncase CLOSE_BRACKET_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\ncase ELLIPSIS_TOKEN:\ncase OPEN_BRACKET_TOKEN:\nreturn parseStatementStartBracketedListMember();\ncase IDENTIFIER_TOKEN:\nif (isTypedBindingPattern) {\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nnextTokenKind = peek().kind;\nif (nextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseListBindingPatternMember();\n}\nreturn identifier;\n}\nbreak;\ndefault:\nif (!isTypedBindingPattern && isValidExpressionStart(nextTokenKind, 1)) {\nbreak;\n}\nParserRuleContext recoverContext =\nisTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH\n: ParserRuleContext.BRACKETED_LIST_MEMBER;\nSolution solution = recover(peek(), recoverContext, isTypedBindingPattern);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseBracketedListMember(solution.tokenKind, isTypedBindingPattern);\n}\nSTNode expr = parseExpression();\nif (isWildcardBP(expr)) {\nreturn getWildcardBindingPattern(expr);\n}\nif (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE || expr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nnextTokenKind = peek().kind;\nif (nextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseListBindingPatternMember();\n}\n}\nreturn expr;\n}\n/**\n* Treat the current node as an array, and parse the remainder of the binding pattern.\n*\n* @param typeDesc Type-desc\n* @param openBracket Open bracket\n* @param member Member\n* @return Parsed node\n*/\nprivate STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) {\ntypeDesc = getTypeDescFromExpr(typeDesc);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nreturn parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true,\ncontext);\n}\nprivate STNode parseBracketedListMemberEnd() {\nreturn parseBracketedListMemberEnd(peek().kind);\n}\nprivate STNode parseBracketedListMemberEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseBracketedListMemberEnd(solution.tokenKind);\n}\n}\n/**\n* We reach here to break ambiguity of T[a]. This could be:\n* 1) Array Type Desc\n* 2) Member access on LHS\n* 3) Typed-binding-pattern\n*\n* @param typeDescOrExpr Type name or the expr that precede the open-bracket.\n* @param openBracket Open bracket\n* @param member Member\n* @param closeBracket Open bracket\n* @param isTypedBindingPattern Is this is a typed-binding-pattern.\n* @return Specific node that matches to T[a], after solving ambiguity.\n*/\nprivate STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket, boolean isTypedBindingPattern,\nboolean allowAssignment, ParserRuleContext context) {\nSTToken nextToken = peek();\nreturn parseTypedBindingPatternOrMemberAccessRhs(nextToken.kind, typeDescOrExpr, openBracket, member,\ncloseBracket, isTypedBindingPattern, allowAssignment, context);\n}\nprivate STNode parseTypedBindingPatternOrMemberAccessRhs(SyntaxKind nextTokenKind, STNode typeDescOrExpr,\nSTNode openBracket, STNode member, STNode closeBracket,\nboolean isTypedBindingPattern, boolean allowAssignment,\nParserRuleContext context) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nSTNode arrayTypeDesc = createArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\nreturn parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);\ncase OPEN_BRACKET_TOKEN:\nif (isTypedBindingPattern) {\ntypeDesc = getTypeDescFromExpr(typeDescOrExpr);\narrayTypeDesc =\nSTNodeFactory.createArrayTypeDescriptorNode(typeDesc, openBracket, member, closeBracket);\nreturn parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);\n}\nSTNode keyExpr = STNodeFactory.createNodeList(member);\nSTNode expr =\nSTNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);\nreturn parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context);\ncase QUESTION_MARK_TOKEN:\ntypeDesc = getTypeDescFromExpr(typeDescOrExpr);\narrayTypeDesc = createArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\ntypeDesc = parseComplexTypeDescriptor(arrayTypeDesc,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nreturn parseTypedBindingPatternTypeRhs(typeDesc, context);\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\nreturn parseComplexTypeDescInTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket,\ncontext, isTypedBindingPattern);\ncase IN_KEYWORD:\nif (context != ParserRuleContext.FOREACH_STMT && context != ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\ncase EQUAL_TOKEN:\nif (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nif (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) {\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\n}\nkeyExpr = STNodeFactory.createNodeList(member);\ntypeDescOrExpr = getExpression(typeDescOrExpr);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);\ncase SEMICOLON_TOKEN:\nif (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {\nbreak;\n}\nreturn createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\ncase CLOSE_BRACE_TOKEN:\ncase COMMA_TOKEN:\nif (context == ParserRuleContext.AMBIGUOUS_STMT) {\nkeyExpr = STNodeFactory.createNodeList(member);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,\ncloseBracket);\n}\ndefault:\nif (isValidExprRhsStart(nextTokenKind, closeBracket.kind)) {\nkeyExpr = STNodeFactory.createNodeList(member);\ntypeDescOrExpr = getExpression(typeDescOrExpr);\nreturn STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,\ncloseBracket);\n}\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.BRACKETED_LIST_RHS, typeDescOrExpr, openBracket, member,\ncloseBracket, isTypedBindingPattern, allowAssignment, context);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypedBindingPatternOrMemberAccessRhs(solution.tokenKind, typeDescOrExpr, openBracket, member,\ncloseBracket, isTypedBindingPattern, allowAssignment, context);\n}\nprivate STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket) {\nSTNode bindingPatterns;\nif (isEmpty(member)) {\nbindingPatterns = STNodeFactory.createEmptyNodeList();\n} else {\nSTNode bindingPattern = getBindingPattern(member);\nbindingPatterns = STNodeFactory.createNodeList(bindingPattern);\n}\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nSTNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns,\nrestBindingPattern, closeBracket);\nSTNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\nreturn STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n}\n/**\n* Parse a union or intersection type-desc/binary-expression that involves ambiguous\n* bracketed list in lhs.\n*

\n* e.g: (T[a] & R..) or (T[a] | R.. )\n*

\n* Complexity occurs in scenarios such as T[a] |/& R[b]. If the token after this\n* is another binding-pattern, then (T[a] |/& R[b]) becomes the type-desc. However,\n* if the token follows this is an equal or semicolon, then (T[a] |/& R) becomes\n* the type-desc, and [b] becomes the binding pattern.\n*\n* @param typeDescOrExpr Type desc or the expression\n* @param openBracket Open bracket\n* @param member Member\n* @param closeBracket Close bracket\n* @param context COntext in which the typed binding pattern occurs\n* @return Parsed node\n*/\nprivate STNode parseComplexTypeDescInTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,\nSTNode closeBracket, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTNode pipeOrAndToken = parseUnionOrIntersectionToken();\nSTNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false);\nif (isTypedBindingPattern || typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\nSTNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);\nlhsTypeDesc = createArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);\nSTTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr;\nSTNode newTypeDesc;\nif (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) {\nnewTypeDesc = STNodeFactory.createUnionTypeDescriptorNode(lhsTypeDesc, pipeOrAndToken,\nrhsTypedBindingPattern.typeDescriptor);\n} else {\nnewTypeDesc = STNodeFactory.createIntersectionTypeDescriptorNode(lhsTypeDesc, pipeOrAndToken,\nrhsTypedBindingPattern.typeDescriptor);\n}\nreturn STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern);\n} else {\nSTNode keyExpr = getExpression(member);\nSTNode containerExpr = getExpression(typeDescOrExpr);\nSTNode lhsExpr =\nSTNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket);\nreturn STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken,\ntypedBindingPatternOrExpr);\n}\n}\nprivate STNode createArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) {\nif (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\nSTUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;\nSTNode middleTypeDesc = createArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc);\nlhsTypeDesc = STNodeFactory.createUnionTypeDescriptorNode(unionTypeDesc.leftTypeDesc,\nunionTypeDesc.pipeToken, middleTypeDesc);\n} else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\nSTIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc;\nSTNode middleTypeDesc =\ncreateArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc);\nlhsTypeDesc = STNodeFactory.createIntersectionTypeDescriptorNode(intersectionTypeDesc.leftTypeDesc,\nintersectionTypeDesc.bitwiseAndToken, middleTypeDesc);\n} else {\nlhsTypeDesc = STNodeFactory.createArrayTypeDescriptorNode(lhsTypeDesc, openBracket, member, closeBracket);\n}\nreturn lhsTypeDesc;\n}\n/**\n* Parse union (|) or intersection (&) type operator.\n*\n* @return pipe or bitwise and token\n*/\nprivate STNode parseUnionOrIntersectionToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Infer the type of the ambiguous bracketed list, based on the type of the member.\n*\n* @param memberNode Member node\n* @return Inferred type of the bracketed list\n*/\nprivate SyntaxKind getBracketedListNodeType(STNode memberNode) {\nif (isEmpty(memberNode)) {\nreturn SyntaxKind.NONE;\n}\nif (isDefiniteTypeDesc(memberNode.kind)) {\nreturn SyntaxKind.TUPLE_TYPE_DESC;\n}\nswitch (memberNode.kind) {\ncase ASTERISK_TOKEN:\nreturn SyntaxKind.ARRAY_TYPE_DESC;\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.LIST_BINDING_PATTERN;\ncase QUALIFIED_NAME_REFERENCE:\ncase REST_TYPE:\nreturn SyntaxKind.TUPLE_TYPE_DESC;\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase SIMPLE_NAME_REFERENCE:\ncase BRACKETED_LIST:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.NONE;\ndefault:\nreturn SyntaxKind.INDEXED_EXPRESSION;\n}\n}\n/*\n* This section tries to break the ambiguity in parsing a statement that starts with a open-bracket.\n* The ambiguity lies in between:\n* 1) Assignment that starts with list binding pattern\n* 2) Var-decl statement that starts with tuple type\n* 3) Statement that starts with list constructor, such as sync-send, etc.\n*/\n/**\n* Parse any statement that starts with an open-bracket.\n*\n* @param annots Annotations attached to the statement.\n* @return Parsed node\n*/\nprivate STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) {\nstartContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT);\nreturn parseStatementStartsWithOpenBracket(annots, true, possibleMappingField);\n}\nprivate STNode parseMemberBracketedList(boolean possibleMappingField) {\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseStatementStartsWithOpenBracket(annots, false, possibleMappingField);\n}\n/**\n* The bracketed list at the start of a statement can be one of the following.\n* 1) List binding pattern\n* 2) Tuple type\n* 3) List constructor\n*\n* @param isRoot Is this the root of the list\n* @return Parsed node\n*/\nprivate STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) {\nstartContext(ParserRuleContext.STMT_START_BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList memberList = new ArrayList<>();\nwhile (!isBracketedListEnd(peek().kind)) {\nSTNode member = parseStatementStartBracketedListMember();\nSyntaxKind currentNodeType = getStmtStartBracketedListType(member);\nswitch (currentNodeType) {\ncase TUPLE_TYPE_DESC:\nreturn parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\ncase LIST_BINDING_PATTERN:\nreturn parseAsListBindingPattern(openBracket, memberList, member, isRoot);\ncase LIST_CONSTRUCTOR:\nreturn parseAsListConstructor(openBracket, memberList, member, isRoot);\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nreturn parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot);\ncase NONE:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\n}\nSTNode closeBracket = parseCloseBracket();\nSTNode bracketedList = parseStatementStartBracketedList(annots, openBracket, memberList, closeBracket, isRoot,\npossibleMappingField);\nreturn bracketedList;\n}\nprivate STNode parseStatementStartBracketedListMember() {\nSTToken nextToken = peek();\nreturn parseStatementStartBracketedListMember(nextToken.kind);\n}\n/**\n* Parse a member of a list-binding-pattern, tuple-type-desc, or\n* list-constructor-expr, when the parent is ambiguous.\n*\n* @param nextTokenKind Kind of the next token.\n* @return Parsed node\n*/\nprivate STNode parseStatementStartBracketedListMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseMemberBracketedList(false);\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nif (isWildcardBP(identifier)) {\nSTNode varName = ((STSimpleNameReferenceNode) identifier).name;\nreturn getWildcardBindingPattern(varName);\n}\nnextTokenKind = peek().kind;\nif (nextTokenKind == SyntaxKind.ELLIPSIS_TOKEN) {\nSTNode ellipsis = parseEllipsis();\nreturn STNodeFactory.createRestDescriptorNode(identifier, ellipsis);\n}\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true);\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingBindingPatterOrMappingConstructor();\ncase ERROR_KEYWORD:\nif (getNextNextToken(nextTokenKind).kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseErrorConstructorExpr();\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase ELLIPSIS_TOKEN:\nreturn parseListBindingPatternMember();\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\nif (getNextNextToken(nextTokenKind).kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseExpression(false);\n}\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\nif (getNextNextToken(nextTokenKind).kind == SyntaxKind.LT_TOKEN) {\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nreturn parseExpression(false);\ncase OPEN_PAREN_TOKEN:\nreturn parseTypeDescOrExpr();\ndefault:\nif (isValidExpressionStart(nextTokenKind, 1)) {\nreturn parseExpression(false);\n}\nif (isTypeStartingToken(nextTokenKind)) {\nreturn parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n}\nSolution solution = recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatementStartBracketedListMember(solution.tokenKind);\n}\n}\nprivate STNode parseAsTupleTypeDesc(STNode annots, STNode openBracket, List memberList, STNode member,\nboolean isRoot) {\nmemberList = getTypeDescList(memberList);\nstartContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nSTNode tupleTypeMembers = parseTupleTypeMembers(member, memberList);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nSTNode tupleType = STNodeFactory.createTupleTypeDescriptorNode(openBracket, tupleTypeMembers, closeBracket);\nSTNode typeDesc =\nparseComplexTypeDescriptor(tupleType, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nendContext();\nSTNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT, isRoot);\nif (!isRoot) {\nreturn typedBindingPattern;\n}\nswitchContext(ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(annots, STNodeFactory.createEmptyNode(), typedBindingPattern, false);\n}\nprivate STNode parseAsListBindingPattern(STNode openBracket, List memberList, STNode member,\nboolean isRoot) {\nmemberList = getBindingPatternsList(memberList);\nmemberList.add(member);\nswitchContext(ParserRuleContext.LIST_BINDING_PATTERN);\nSTNode listBindingPattern = parseListBindingPattern(openBracket, member, memberList);\nendContext();\nif (!isRoot) {\nreturn listBindingPattern;\n}\nreturn parseAssignmentStmtRhs(listBindingPattern);\n}\nprivate STNode parseAsListBindingPattern(STNode openBracket, List memberList) {\nmemberList = getBindingPatternsList(memberList);\nswitchContext(ParserRuleContext.LIST_BINDING_PATTERN);\nSTNode listBindingPattern = parseListBindingPattern(openBracket, memberList);\nendContext();\nreturn listBindingPattern;\n}\nprivate STNode parseAsListBindingPatternOrListConstructor(STNode openBracket, List memberList,\nSTNode member, boolean isRoot) {\nmemberList.add(member);\nSTNode memberEnd = parseBracketedListMemberEnd();\nSTNode listBindingPatternOrListCons;\nif (memberEnd == null) {\nSTNode closeBracket = parseCloseBracket();\nlistBindingPatternOrListCons =\nparseListBindingPatternOrListConstructor(openBracket, memberList, closeBracket, isRoot);\n} else {\nmemberList.add(memberEnd);\nlistBindingPatternOrListCons = parseListBindingPatternOrListConstructor(openBracket, memberList, isRoot);\n}\nreturn listBindingPatternOrListCons;\n}\nprivate SyntaxKind getStmtStartBracketedListType(STNode memberNode) {\nif (memberNode.kind.compareTo(SyntaxKind.TYPE_DESC) >= 0 &&\nmemberNode.kind.compareTo(SyntaxKind.TYPEDESC_TYPE_DESC) <= 0) {\nreturn SyntaxKind.TUPLE_TYPE_DESC;\n}\nswitch (memberNode.kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase ASTERISK_TOKEN:\nreturn SyntaxKind.ARRAY_TYPE_DESC;\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.LIST_BINDING_PATTERN;\ncase QUALIFIED_NAME_REFERENCE:\ncase REST_TYPE:\nreturn SyntaxKind.TUPLE_TYPE_DESC;\ncase LIST_CONSTRUCTOR:\ncase MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.LIST_CONSTRUCTOR;\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR;\ncase SIMPLE_NAME_REFERENCE:\ncase BRACKETED_LIST:\nreturn SyntaxKind.NONE;\ncase FUNCTION_CALL:\nif (isPosibleFunctionalBindingPattern((STFunctionCallExpressionNode) memberNode)) {\nreturn SyntaxKind.NONE;\n}\nreturn SyntaxKind.LIST_CONSTRUCTOR;\ndefault:\nif (isExpression(memberNode.kind) && !isAllBasicLiterals(memberNode) && !isAmbiguous(memberNode)) {\nreturn SyntaxKind.LIST_CONSTRUCTOR;\n}\nreturn SyntaxKind.NONE;\n}\n}\nprivate boolean isPosibleFunctionalBindingPattern(STFunctionCallExpressionNode funcCall) {\nSTNode args = funcCall.arguments;\nint size = args.bucketCount();\nfor (int i = 0; i < size; i++) {\nSTNode arg = args.childInBucket(i);\nif (arg.kind != SyntaxKind.NAMED_ARG && arg.kind != SyntaxKind.POSITIONAL_ARG &&\narg.kind != SyntaxKind.REST_ARG) {\ncontinue;\n}\nif (!isPosibleArgBindingPattern((STFunctionArgumentNode) arg)) {\nreturn false;\n}\n}\nreturn true;\n}\nprivate boolean isPosibleArgBindingPattern(STFunctionArgumentNode arg) {\nswitch (arg.kind) {\ncase POSITIONAL_ARG:\nSTNode expr = ((STPositionalArgumentNode) arg).expression;\nreturn isPosibleBindingPattern(expr);\ncase NAMED_ARG:\nexpr = ((STNamedArgumentNode) arg).expression;\nreturn isPosibleBindingPattern(expr);\ncase REST_ARG:\nexpr = ((STRestArgumentNode) arg).expression;\nreturn expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE;\ndefault:\nreturn false;\n}\n}\nprivate boolean isPosibleBindingPattern(STNode node) {\nswitch (node.kind) {\ncase SIMPLE_NAME_REFERENCE:\nreturn true;\ncase LIST_CONSTRUCTOR:\nSTListConstructorExpressionNode listConstructor = (STListConstructorExpressionNode) node;\nfor (int i = 0; i < listConstructor.bucketCount(); i++) {\nSTNode expr = listConstructor.childInBucket(i);\nif (!isPosibleBindingPattern(expr)) {\nreturn false;\n}\n}\nreturn true;\ncase MAPPING_CONSTRUCTOR:\nSTMappingConstructorExpressionNode mappingConstructor = (STMappingConstructorExpressionNode) node;\nfor (int i = 0; i < mappingConstructor.bucketCount(); i++) {\nSTNode expr = mappingConstructor.childInBucket(i);\nif (!isPosibleBindingPattern(expr)) {\nreturn false;\n}\n}\nreturn true;\ncase SPECIFIC_FIELD:\nSTSpecificFieldNode specificField = (STSpecificFieldNode) node;\nif (specificField.readonlyKeyword != null) {\nreturn false;\n}\nif (specificField.valueExpr == null) {\nreturn true;\n}\nreturn isPosibleBindingPattern(specificField.valueExpr);\ncase FUNCTION_CALL:\nreturn isPosibleFunctionalBindingPattern((STFunctionCallExpressionNode) node);\ndefault:\nreturn false;\n}\n}\nprivate STNode parseStatementStartBracketedList(STNode annots, STNode openBracket, List members,\nSTNode closeBracket, boolean isRoot, boolean possibleMappingField) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase EQUAL_TOKEN:\nif (!isRoot) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\n}\nSTNode memberBindingPatterns = STNodeFactory.createNodeList(getBindingPatternsList(members));\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nSTNode listBindingPattern = STNodeFactory.createListBindingPatternNode(openBracket,\nmemberBindingPatterns, restBindingPattern, closeBracket);\nendContext();\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nreturn parseAssignmentStmtRhs(listBindingPattern);\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACE_TOKEN:\nif (!isRoot) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\n}\nif (members.isEmpty()) {\nopenBracket =\nSyntaxErrors.addDiagnostic(openBracket, DiagnosticErrorCode.ERROR_MISSING_TUPLE_MEMBER);\n}\nswitchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\nstartContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);\nSTNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members));\nSTNode tupleTypeDesc =\nSTNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);\nendContext();\nSTNode typeDesc = parseComplexTypeDescriptor(tupleTypeDesc,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nSTNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\nendContext();\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, typedBindingPattern);\ncase OPEN_BRACKET_TOKEN:\nif (!isRoot) {\nmemberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members));\ntupleTypeDesc =\nSTNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);\nendContext();\ntypeDesc = parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\nreturn typeDesc;\n}\nSTAmbiguousCollectionNode list =\nnew STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\nendContext();\nSTNode tpbOrExpr = parseTypedBindingPatternOrExprRhs(list, true);\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, tpbOrExpr);\ncase COLON_TOKEN:\nif (possibleMappingField && members.size() == 1) {\nstartContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nSTNode colon = parseColon();\nSTNode fieldNameExpr = getExpression(members.get(0));\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createComputedNameFieldNode(openBracket, fieldNameExpr, closeBracket, colon,\nvalueExpr);\n}\ndefault:\nendContext();\nif (!isRoot) {\nreturn new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\n}\nlist = new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket);\nSTNode exprOrTPB = parseTypedBindingPatternOrExprRhs(list, false);\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, exprOrTPB);\n}\n}\nprivate boolean isWildcardBP(STNode node) {\nswitch (node.kind) {\ncase SIMPLE_NAME_REFERENCE:\nSTToken nameToken = (STToken) ((STSimpleNameReferenceNode) node).name;\nreturn isUnderscoreToken(nameToken);\ncase IDENTIFIER_TOKEN:\nreturn isUnderscoreToken((STToken) node);\ndefault:\nreturn false;\n}\n}\nprivate boolean isUnderscoreToken(STToken token) {\nreturn \"_\".equals(token.text());\n}\nprivate STNode getWildcardBindingPattern(STNode identifier) {\nswitch (identifier.kind) {\ncase SIMPLE_NAME_REFERENCE:\nSTNode varName = ((STSimpleNameReferenceNode) identifier).name;\nreturn STNodeFactory.createWildcardBindingPatternNode(varName);\ncase IDENTIFIER_TOKEN:\nreturn STNodeFactory.createWildcardBindingPatternNode(identifier);\ndefault:\nthrow new IllegalStateException();\n}\n}\n/*\n* This section tries to break the ambiguity in parsing a statement that starts with a open-brace.\n*/\n/**\n* Parse statements that starts with open-brace. It could be a:\n* 1) Block statement\n* 2) Var-decl with mapping binding pattern.\n* 3) Statement that starts with mapping constructor expression.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatementStartsWithOpenBrace() {\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode openBrace = parseOpenBrace();\nif (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nSTNode closeBrace = parseCloseBrace();\nswitch (peek().kind) {\ncase EQUAL_TOKEN:\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nSTNode fields = STNodeFactory.createEmptyNodeList();\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nSTNode bindingPattern = STNodeFactory.createMappingBindingPatternNode(openBrace, fields,\nrestBindingPattern, closeBrace);\nreturn parseAssignmentStmtRhs(bindingPattern);\ncase RIGHT_ARROW_TOKEN:\ncase SYNC_SEND_TOKEN:\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nfields = STNodeFactory.createEmptyNodeList();\nSTNode expr = STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace);\nexpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);\nreturn parseStatementStartWithExprRhs(expr);\ndefault:\nSTNode statements = STNodeFactory.createEmptyNodeList();\nendContext();\nreturn STNodeFactory.createBlockStatementNode(openBrace, statements, closeBrace);\n}\n}\nSTNode member = parseStatementStartingBracedListFirstMember();\nSyntaxKind nodeType = getBracedListType(member);\nSTNode stmt;\nswitch (nodeType) {\ncase MAPPING_BINDING_PATTERN:\nreturn parseStmtAsMappingBindingPatternStart(openBrace, member);\ncase MAPPING_CONSTRUCTOR:\nreturn parseStmtAsMappingConstructorStart(openBrace, member);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn parseStmtAsMappingBPOrMappingConsStart(openBrace, member);\ncase BLOCK_STATEMENT:\nSTNode closeBrace = parseCloseBrace();\nstmt = STNodeFactory.createBlockStatementNode(openBrace, member, closeBrace);\nendContext();\nreturn stmt;\ndefault:\nArrayList stmts = new ArrayList<>();\nstmts.add(member);\nSTNode statements = parseStatements(stmts);\ncloseBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createBlockStatementNode(openBrace, statements, closeBrace);\n}\n}\n/**\n* Parse the rest of the statement, treating the start as a mapping binding pattern.\n*\n* @param openBrace Open brace\n* @param firstMappingField First member\n* @return Parsed node\n*/\nprivate STNode parseStmtAsMappingBindingPatternStart(STNode openBrace, STNode firstMappingField) {\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nstartContext(ParserRuleContext.MAPPING_BINDING_PATTERN);\nList bindingPatterns = new ArrayList<>();\nif (firstMappingField.kind != SyntaxKind.REST_BINDING_PATTERN) {\nbindingPatterns.add(getBindingPattern(firstMappingField));\n}\nSTNode mappingBP = parseMappingBindingPattern(openBrace, bindingPatterns, firstMappingField);\nreturn parseAssignmentStmtRhs(mappingBP);\n}\n/**\n* Parse the rest of the statement, treating the start as a mapping constructor expression.\n*\n* @param openBrace Open brace\n* @param firstMember First member\n* @return Parsed node\n*/\nprivate STNode parseStmtAsMappingConstructorStart(STNode openBrace, STNode firstMember) {\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nstartContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nList members = new ArrayList<>();\nSTNode mappingCons = parseAsMappingConstructor(openBrace, members, firstMember);\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, mappingCons, false, true);\nreturn parseStatementStartWithExprRhs(expr);\n}\n/**\n* Parse the braced-list as a mapping constructor expression.\n*\n* @param openBrace Open brace\n* @param members members list\n* @param member Most recently parsed member\n* @return Parsed node\n*/\nprivate STNode parseAsMappingConstructor(STNode openBrace, List members, STNode member) {\nmembers.add(member);\nmembers = getExpressionList(members);\nswitchContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nSTNode fields = parseMappingConstructorFields(members);\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace);\n}\n/**\n* Parse the rest of the statement, treating the start as a mapping binding pattern\n* or a mapping constructor expression.\n*\n* @param openBrace Open brace\n* @param member First member\n* @return Parsed node\n*/\nprivate STNode parseStmtAsMappingBPOrMappingConsStart(STNode openBrace, STNode member) {\nstartContext(ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR);\nList members = new ArrayList<>();\nmembers.add(member);\nSTNode bpOrConstructor;\nSTNode memberEnd = parseMappingFieldEnd();\nif (memberEnd == null) {\nSTNode closeBrace = parseCloseBrace();\nbpOrConstructor = parseMappingBindingPatternOrMappingConstructor(openBrace, members, closeBrace);\n} else {\nmembers.add(memberEnd);\nbpOrConstructor = parseMappingBindingPatternOrMappingConstructor(openBrace, members);;\n}\nswitch (bpOrConstructor.kind) {\ncase MAPPING_CONSTRUCTOR:\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, bpOrConstructor, false, true);\nreturn parseStatementStartWithExprRhs(expr);\ncase MAPPING_BINDING_PATTERN:\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nSTNode bindingPattern = getBindingPattern(bpOrConstructor);\nreturn parseAssignmentStmtRhs(bindingPattern);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ndefault:\nif (peek().kind == SyntaxKind.EQUAL_TOKEN) {\nswitchContext(ParserRuleContext.ASSIGNMENT_STMT);\nbindingPattern = getBindingPattern(bpOrConstructor);\nreturn parseAssignmentStmtRhs(bindingPattern);\n}\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nexpr = getExpression(bpOrConstructor);\nexpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);\nreturn parseStatementStartWithExprRhs(expr);\n}\n}\n/**\n* Parse a member of a braced-list that occurs at the start of a statement.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatementStartingBracedListFirstMember() {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase READONLY_KEYWORD:\nSTNode readonlyKeyword = parseReadonlyKeyword();\nreturn bracedListMemberStartsWithReadonly(readonlyKeyword);\ncase IDENTIFIER_TOKEN:\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nreturn parseIdentifierRhsInStmtStartingBrace(readonlyKeyword);\ncase STRING_LITERAL:\nSTNode key = parseStringLiteral();\nif (peek().kind == SyntaxKind.COLON_TOKEN) {\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\n}\nswitchContext(ParserRuleContext.BLOCK_STMT);\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, key, false, true);\nreturn parseStatementStartWithExprRhs(expr);\ncase OPEN_BRACKET_TOKEN:\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseStatementStartsWithOpenBracket(annots, true);\ncase OPEN_BRACE_TOKEN:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nreturn parseStatementStartsWithOpenBrace();\ncase ELLIPSIS_TOKEN:\nreturn parseRestBindingPattern();\ndefault:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nreturn parseStatements();\n}\n}\nprivate STNode bracedListMemberStartsWithReadonly(STNode readonlyKeyword) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase IDENTIFIER_TOKEN:\nreturn parseIdentifierRhsInStmtStartingBrace(readonlyKeyword);\ncase STRING_LITERAL:\nif (peek(2).kind == SyntaxKind.COLON_TOKEN) {\nSTNode key = parseStringLiteral();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\n}\ndefault:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nstartContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\nSTNode typeDesc = parseComplexTypeDescriptor(readonlyKeyword,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nendContext();\nSTNode metadata = STNodeFactory.createEmptyNode();\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nSTNode typedBP = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(metadata, finalKeyword, typedBP, false);\n}\n}\n/**\n* Parse the rhs components of an identifier that follows an open brace,\n* at the start of a statement. i.e: \"{foo\".\n*\n* @param readonlyKeyword Readonly keyword\n* @return Parsed node\n*/\nprivate STNode parseIdentifierRhsInStmtStartingBrace(STNode readonlyKeyword) {\nSTNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_REF);\nswitch (peek().kind) {\ncase COMMA_TOKEN:\nSTNode colon = STNodeFactory.createEmptyNode();\nSTNode value = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, value);\ncase COLON_TOKEN:\ncolon = parseColon();\nif (!isEmpty(readonlyKeyword)) {\nvalue = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, value);\n}\nSyntaxKind nextTokenKind = peek().kind;\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nSTNode bindingPatternOrExpr = parseListBindingPatternOrListConstructor();\nreturn getMappingField(identifier, colon, bindingPatternOrExpr);\ncase OPEN_BRACE_TOKEN:\nbindingPatternOrExpr = parseMappingBindingPatterOrMappingConstructor();\nreturn getMappingField(identifier, colon, bindingPatternOrExpr);\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifierRhsInStmtStartBrace(identifier, colon);\ndefault:\nSTNode expr = parseExpression();\nreturn getMappingField(identifier, colon, expr);\n}\ndefault:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nif (!isEmpty(readonlyKeyword)) {\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode bindingPattern = STNodeFactory.createCaptureBindingPatternNode(identifier);\nSTNode typedBindingPattern =\nSTNodeFactory.createTypedBindingPatternNode(readonlyKeyword, bindingPattern);\nSTNode metadata = STNodeFactory.createEmptyNode();\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVarDeclRhs(metadata, finalKeyword, typedBindingPattern, false);\n}\nstartContext(ParserRuleContext.AMBIGUOUS_STMT);\nSTNode qualifiedIdentifier = parseQualifiedIdentifier(identifier, false);\nSTNode expr = parseTypedBindingPatternOrExprRhs(qualifiedIdentifier, true);\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseStmtStartsWithTypedBPOrExprRhs(annots, expr);\n}\n}\n/**\n* Parse the rhs components of \"{ identifier : identifier\",\n* at the start of a statement. i.e: \"{foo:bar\".\n*\n* @return Parsed node\n*/\nprivate STNode parseQualifiedIdentifierRhsInStmtStartBrace(STNode identifier, STNode colon) {\nSTNode secondIdentifier = parseIdentifier(ParserRuleContext.VARIABLE_REF);\nSTNode secondNameRef = STNodeFactory.createSimpleNameReferenceNode(secondIdentifier);\nif (isWildcardBP(secondIdentifier)) {\nreturn getWildcardBindingPattern(secondIdentifier);\n}\nSyntaxKind nextTokenKind = peek().kind;\nSTNode qualifiedNameRef = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondNameRef);\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn qualifiedNameRef;\ncase OPEN_BRACE_TOKEN:\ncase IDENTIFIER_TOKEN:\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nSTNode typeBindingPattern =\nparseTypedBindingPatternTypeRhs(qualifiedNameRef, ParserRuleContext.VAR_DECL_STMT);\nSTNode annots = STNodeFactory.createEmptyNodeList();\nreturn parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, false);\ncase OPEN_BRACKET_TOKEN:\nreturn parseMemberRhsInStmtStartWithBrace(identifier, colon, secondNameRef);\ncase QUESTION_MARK_TOKEN:\nSTNode typeDesc = parseComplexTypeDescriptor(qualifiedNameRef,\nParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\nfinalKeyword = STNodeFactory.createEmptyNode();\ntypeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\nannots = STNodeFactory.createEmptyNodeList();\nreturn parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, false);\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn parseStatementStartWithExprRhs(qualifiedNameRef);\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\ndefault:\nreturn parseMemberWithExprInRhs(identifier, colon, secondNameRef, secondNameRef);\n}\n}\nprivate SyntaxKind getBracedListType(STNode member) {\nswitch (member.kind) {\ncase FIELD_BINDING_PATTERN:\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.MAPPING_BINDING_PATTERN;\ncase SPECIFIC_FIELD:\nSTNode expr = ((STSpecificFieldNode) member).valueExpr;\nif (expr == null) {\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\n}\nswitch (expr.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\ncase FUNCTION_CALL:\nif (isPosibleFunctionalBindingPattern((STFunctionCallExpressionNode) expr)) {\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\n}\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\ndefault:\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\n}\ncase SPREAD_FIELD:\ncase COMPUTED_NAME_FIELD:\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ncase REST_BINDING_PATTERN:\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\ncase LIST:\nreturn SyntaxKind.BLOCK_STATEMENT;\ndefault:\nreturn SyntaxKind.NONE;\n}\n}\n/**\n* Parse mapping binding pattern or mapping constructor.\n*\n* @return Parsed node\n*/\nprivate STNode parseMappingBindingPatterOrMappingConstructor() {\nstartContext(ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR);\nSTNode openBrace = parseOpenBrace();\nList memberList = new ArrayList<>();\nreturn parseMappingBindingPatternOrMappingConstructor(openBrace, memberList);\n}\nprivate boolean isBracedListEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate STNode parseMappingBindingPatternOrMappingConstructor(STNode openBrace, List memberList) {\nSTToken nextToken = peek();\nwhile (!isBracedListEnd(nextToken.kind)) {\nSTNode member = parseMappingBindingPatterOrMappingConstructorMember(nextToken.kind);\nSyntaxKind currentNodeType = getTypeOfMappingBPOrMappingCons(member);\nswitch (currentNodeType) {\ncase MAPPING_CONSTRUCTOR:\nreturn parseAsMappingConstructor(openBrace, memberList, member);\ncase MAPPING_BINDING_PATTERN:\nreturn parseAsMappingBindingPattern(openBrace, memberList, member);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseMappingFieldEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\nnextToken = peek();\n}\nSTNode closeBrace = parseCloseBrace();\nreturn parseMappingBindingPatternOrMappingConstructor(openBrace, memberList, closeBrace);\n}\nprivate STNode parseMappingBindingPatterOrMappingConstructorMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nSTNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME);\nreturn parseMappingFieldRhs(key);\ncase STRING_LITERAL:\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nkey = parseStringLiteral();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\ncase OPEN_BRACKET_TOKEN:\nreturn parseComputedField();\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\nSTNode expr = parseExpression();\nif (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nreturn STNodeFactory.createRestBindingPatternNode(ellipsis, expr);\n}\nreturn STNodeFactory.createSpreadFieldNode(ellipsis, expr);\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListBindingPatternOrListConstructorMember(solution.tokenKind);\n}\n}\nprivate STNode parseMappingFieldRhs(STNode key) {\nSTToken nextToken = peek();\nreturn parseMappingFieldRhs(nextToken.kind, key);\n}\nprivate STNode parseMappingFieldRhs(SyntaxKind tokenKind, STNode key) {\nSTNode colon;\nSTNode valueExpr;\nswitch (tokenKind) {\ncase COLON_TOKEN:\ncolon = parseColon();\nreturn parseMappingFieldValue(key, colon);\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\ncolon = STNodeFactory.createEmptyNode();\nvalueExpr = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, key);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nreturn parseSpecificFieldRhs(solution.tokenKind, readonlyKeyword, key);\n}\n}\nprivate STNode parseMappingFieldValue(STNode key, STNode colon) {\nSTNode expr;\nswitch (peek().kind) {\ncase IDENTIFIER_TOKEN:\nexpr = parseExpression();\nbreak;\ncase OPEN_BRACKET_TOKEN:\nexpr = parseListBindingPatternOrListConstructor();\nbreak;\ncase OPEN_BRACE_TOKEN:\nexpr = parseMappingBindingPatterOrMappingConstructor();\nbreak;\ndefault:\nexpr = parseExpression();\nbreak;\n}\nif (isBindingPattern(expr.kind)) {\nreturn STNodeFactory.createFieldBindingPatternFullNode(key, colon, expr);\n}\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, expr);\n}\nprivate boolean isBindingPattern(SyntaxKind kind) {\nswitch (kind) {\ncase FIELD_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn true;\ndefault:\nreturn false;\n}\n}\nprivate SyntaxKind getTypeOfMappingBPOrMappingCons(STNode memberNode) {\nswitch (memberNode.kind) {\ncase FIELD_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.MAPPING_BINDING_PATTERN;\ncase SPECIFIC_FIELD:\nSTNode expr = ((STSpecificFieldNode) memberNode).valueExpr;\nif (expr == null || expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE ||\nexpr.kind == SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR ||\nexpr.kind == SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR) {\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\n}\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\ncase SPREAD_FIELD:\ncase COMPUTED_NAME_FIELD:\nreturn SyntaxKind.MAPPING_CONSTRUCTOR;\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase REST_BINDING_PATTERN:\ndefault:\nreturn SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR;\n}\n}\nprivate STNode parseMappingBindingPatternOrMappingConstructor(STNode openBrace, List members,\nSTNode closeBrace) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR, openBrace, members,\ncloseBrace);\n}\nprivate STNode parseAsMappingBindingPattern(STNode openBrace, List members, STNode member) {\nmembers.add(member);\nmembers = getBindingPatternsList(members);\nswitchContext(ParserRuleContext.MAPPING_BINDING_PATTERN);\nreturn parseMappingBindingPattern(openBrace, members, member);\n}\n/**\n* Parse list binding pattern or list constructor.\n*\n* @return Parsed node\n*/\nprivate STNode parseListBindingPatternOrListConstructor() {\nstartContext(ParserRuleContext.BRACKETED_LIST);\nSTNode openBracket = parseOpenBracket();\nList memberList = new ArrayList<>();\nreturn parseListBindingPatternOrListConstructor(openBracket, memberList, false);\n}\nprivate STNode parseListBindingPatternOrListConstructor(STNode openBracket, List memberList,\nboolean isRoot) {\nSTToken nextToken = peek();\nwhile (!isBracketedListEnd(nextToken.kind)) {\nSTNode member = parseListBindingPatternOrListConstructorMember(nextToken.kind);\nSyntaxKind currentNodeType = getParsingNodeTypeOfListBPOrListCons(member);\nswitch (currentNodeType) {\ncase LIST_CONSTRUCTOR:\nreturn parseAsListConstructor(openBracket, memberList, member, isRoot);\ncase LIST_BINDING_PATTERN:\nreturn parseAsListBindingPattern(openBracket, memberList, member, isRoot);\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ndefault:\nmemberList.add(member);\nbreak;\n}\nSTNode memberEnd = parseBracketedListMemberEnd();\nif (memberEnd == null) {\nbreak;\n}\nmemberList.add(memberEnd);\nnextToken = peek();\n}\nSTNode closeBracket = parseCloseBracket();\nreturn parseListBindingPatternOrListConstructor(openBracket, memberList, closeBracket, isRoot);\n}\nprivate STNode parseListBindingPatternOrListConstructorMember(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACKET_TOKEN:\nreturn parseListBindingPatternOrListConstructor();\ncase IDENTIFIER_TOKEN:\nSTNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\nif (isWildcardBP(identifier)) {\nreturn getWildcardBindingPattern(identifier);\n}\nreturn parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false);\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingBindingPatterOrMappingConstructor();\ncase ELLIPSIS_TOKEN:\nreturn parseListBindingPatternMember();\ndefault:\nif (isValidExpressionStart(nextTokenKind, 1)) {\nreturn parseExpression();\n}\nSolution solution = recover(peek(), ParserRuleContext.LIST_BP_OR_LIST_CONSTRUCTOR_MEMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseListBindingPatternOrListConstructorMember(solution.tokenKind);\n}\n}\nprivate SyntaxKind getParsingNodeTypeOfListBPOrListCons(STNode memberNode) {\nswitch (memberNode.kind) {\ncase CAPTURE_BINDING_PATTERN:\ncase LIST_BINDING_PATTERN:\ncase REST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase WILDCARD_BINDING_PATTERN:\nreturn SyntaxKind.LIST_BINDING_PATTERN;\ncase SIMPLE_NAME_REFERENCE:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\nreturn SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR;\ndefault:\nreturn SyntaxKind.LIST_CONSTRUCTOR;\n}\n}\nprivate STNode parseAsListConstructor(STNode openBracket, List memberList, STNode member, boolean isRoot) {\nmemberList.add(member);\nmemberList = getExpressionList(memberList);\nswitchContext(ParserRuleContext.LIST_CONSTRUCTOR);\nSTNode expressions = parseOptionalExpressionsList(memberList);\nSTNode closeBracket = parseCloseBracket();\nSTNode listConstructor =\nSTNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket);\nendContext();\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, listConstructor, false, false);\nif (!isRoot) {\nreturn expr;\n}\nreturn parseStatementStartWithExprRhs(expr);\n}\nprivate STNode parseListBindingPatternOrListConstructor(STNode openBracket, List members,\nSTNode closeBracket, boolean isRoot) {\nSTNode lbpOrListCons;\nswitch (peek().kind) {\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\nif (!isRoot) {\nendContext();\nreturn new STAmbiguousCollectionNode(SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR, openBracket, members,\ncloseBracket);\n}\ndefault:\nif (isValidExprRhsStart(peek().kind, closeBracket.kind)) {\nmembers = getExpressionList(members);\nSTNode memberExpressions = STNodeFactory.createNodeList(members);\nlbpOrListCons = STNodeFactory.createListConstructorExpressionNode(openBracket, memberExpressions,\ncloseBracket);\nbreak;\n}\nmembers = getBindingPatternsList(members);\nSTNode bindingPatternsNode = STNodeFactory.createNodeList(members);\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nlbpOrListCons = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode,\nrestBindingPattern, closeBracket);\nbreak;\n}\nendContext();\nif (!isRoot) {\nreturn lbpOrListCons;\n}\nreturn parseStmtStartsWithTypedBPOrExprRhs(null, lbpOrListCons);\n}\nprivate STNode parseMemberRhsInStmtStartWithBrace(STNode identifier, STNode colon, STNode secondIdentifier) {\nSTNode typedBPOrExpr =\nparseTypedBindingPatternOrMemberAccess(secondIdentifier, false, true, ParserRuleContext.AMBIGUOUS_STMT);\nif (isExpression(typedBPOrExpr.kind)) {\nreturn parseMemberWithExprInRhs(identifier, colon, secondIdentifier, typedBPOrExpr);\n}\nswitchContext(ParserRuleContext.BLOCK_STMT);\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nSTNode annots = STNodeFactory.createEmptyNode();\nSTNode qualifiedNameRef = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondIdentifier);\nSTNode typeDesc = mergeQualifiedNameWithTypeDesc(qualifiedNameRef,\n((STTypedBindingPatternNode) typedBPOrExpr).typeDescriptor);\nreturn parseVarDeclRhs(annots, finalKeyword, typeDesc, false);\n}\n/**\n* Parse a member that starts with \"foo:bar[\", in a statement starting with a brace.\n*\n* @param identifier First identifier of the statement\n* @param colon Colon that follows the first identifier\n* @param secondIdentifier Identifier that follows the colon\n* @param memberAccessExpr Member access expression\n* @return Parsed node\n*/\nprivate STNode parseMemberWithExprInRhs(STNode identifier, STNode colon, STNode secondIdentifier,\nSTNode memberAccessExpr) {\nSTNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, true);\nswitch (peek().kind) {\ncase COMMA_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nswitchContext(ParserRuleContext.EXPRESSION_STATEMENT);\nstartContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, expr);\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\ndefault:\nswitchContext(ParserRuleContext.BLOCK_STMT);\nstartContext(ParserRuleContext.EXPRESSION_STATEMENT);\nSTNode qualifiedName =\nSTNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondIdentifier);\nSTNode updatedExpr = mergeQualifiedNameWithExpr(qualifiedName, expr);\nreturn parseStatementStartWithExprRhs(updatedExpr);\n}\n}\n/**\n* Replace the first identifier of an expression, with a given qualified-identifier.\n* Only expressions that can start with \"bar[..]\" can reach here.\n*\n* @param qualifiedName Qualified identifier to replace simple identifier\n* @param exprOrAction Expression or action\n* @return Updated expression\n*/\nprivate STNode mergeQualifiedNameWithExpr(STNode qualifiedName, STNode exprOrAction) {\nswitch (exprOrAction.kind) {\ncase SIMPLE_NAME_REFERENCE:\nreturn qualifiedName;\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) exprOrAction;\nSTNode newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, binaryExpr.lhsExpr);\nreturn STNodeFactory.createBinaryExpressionNode(binaryExpr.kind, newLhsExpr, binaryExpr.operator,\nbinaryExpr.rhsExpr);\ncase FIELD_ACCESS:\nSTFieldAccessExpressionNode fieldAccess = (STFieldAccessExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, fieldAccess.expression);\nreturn STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, fieldAccess.dotToken,\nfieldAccess.fieldName);\ncase INDEXED_EXPRESSION:\nSTIndexedExpressionNode memberAccess = (STIndexedExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, memberAccess.containerExpression);\nreturn STNodeFactory.createIndexedExpressionNode(newLhsExpr, memberAccess.openBracket,\nmemberAccess.keyExpression, memberAccess.closeBracket);\ncase TYPE_TEST_EXPRESSION:\nSTTypeTestExpressionNode typeTest = (STTypeTestExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, typeTest.expression);\nreturn STNodeFactory.createTypeTestExpressionNode(newLhsExpr, typeTest.isKeyword,\ntypeTest.typeDescriptor);\ncase ANNOT_ACCESS:\nSTAnnotAccessExpressionNode annotAccess = (STAnnotAccessExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, annotAccess.expression);\nreturn STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, annotAccess.annotChainingToken,\nannotAccess.annotTagReference);\ncase OPTIONAL_FIELD_ACCESS:\nSTOptionalFieldAccessExpressionNode optionalFieldAccess =\n(STOptionalFieldAccessExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, optionalFieldAccess.expression);\nreturn STNodeFactory.createFieldAccessExpressionNode(newLhsExpr,\noptionalFieldAccess.optionalChainingToken, optionalFieldAccess.fieldName);\ncase CONDITIONAL_EXPRESSION:\nSTConditionalExpressionNode conditionalExpr = (STConditionalExpressionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, conditionalExpr.lhsExpression);\nreturn STNodeFactory.createConditionalExpressionNode(newLhsExpr, conditionalExpr.questionMarkToken,\nconditionalExpr.middleExpression, conditionalExpr.colonToken, conditionalExpr.endExpression);\ncase REMOTE_METHOD_CALL_ACTION:\nSTRemoteMethodCallActionNode remoteCall = (STRemoteMethodCallActionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, remoteCall.expression);\nreturn STNodeFactory.createRemoteMethodCallActionNode(newLhsExpr, remoteCall.rightArrowToken,\nremoteCall.methodName, remoteCall.openParenToken, remoteCall.arguments,\nremoteCall.closeParenToken);\ncase ASYNC_SEND_ACTION:\nSTAsyncSendActionNode asyncSend = (STAsyncSendActionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, asyncSend.expression);\nreturn STNodeFactory.createAsyncSendActionNode(newLhsExpr, asyncSend.rightArrowToken,\nasyncSend.peerWorker);\ncase SYNC_SEND_ACTION:\nSTSyncSendActionNode syncSend = (STSyncSendActionNode) exprOrAction;\nnewLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, syncSend.expression);\nreturn STNodeFactory.createAsyncSendActionNode(newLhsExpr, syncSend.syncSendToken, syncSend.peerWorker);\ndefault:\nreturn exprOrAction;\n}\n}\nprivate STNode mergeQualifiedNameWithTypeDesc(STNode qualifiedName, STNode typeDesc) {\nswitch (typeDesc.kind) {\ncase SIMPLE_NAME_REFERENCE:\nreturn qualifiedName;\ncase ARRAY_TYPE_DESC:\nSTArrayTypeDescriptorNode arrayTypeDesc = (STArrayTypeDescriptorNode) typeDesc;\nSTNode newMemberType = mergeQualifiedNameWithTypeDesc(qualifiedName, arrayTypeDesc.memberTypeDesc);\nreturn STNodeFactory.createArrayTypeDescriptorNode(newMemberType, arrayTypeDesc.openBracket,\narrayTypeDesc.arrayLength, arrayTypeDesc.closeBracket);\ncase UNION_TYPE_DESC:\nSTUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) typeDesc;\nSTNode newlhsType = mergeQualifiedNameWithTypeDesc(qualifiedName, unionTypeDesc.leftTypeDesc);\nreturn STNodeFactory.createUnionTypeDescriptorNode(newlhsType, unionTypeDesc.pipeToken,\nunionTypeDesc.rightTypeDesc);\ncase INTERSECTION_TYPE_DESC:\nSTIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) typeDesc;\nnewlhsType = mergeQualifiedNameWithTypeDesc(qualifiedName, intersectionTypeDesc.leftTypeDesc);\nreturn STNodeFactory.createUnionTypeDescriptorNode(newlhsType, intersectionTypeDesc.bitwiseAndToken,\nintersectionTypeDesc.rightTypeDesc);\ncase OPTIONAL_TYPE_DESC:\nSTOptionalTypeDescriptorNode optionalType = (STOptionalTypeDescriptorNode) typeDesc;\nnewMemberType = mergeQualifiedNameWithTypeDesc(qualifiedName, optionalType.typeDescriptor);\nreturn STNodeFactory.createOptionalTypeDescriptorNode(newMemberType, optionalType.questionMarkToken);\ndefault:\nreturn typeDesc;\n}\n}\nprivate List getTypeDescList(List ambiguousList) {\nList typeDescList = new ArrayList<>();\nfor (STNode item : ambiguousList) {\ntypeDescList.add(getTypeDescFromExpr(item));\n}\nreturn typeDescList;\n}\n/**\n* Create a type-desc out of an expression.\n*\n* @param expression Expression\n* @return Type descriptor\n*/\nprivate STNode getTypeDescFromExpr(STNode expression) {\nswitch (expression.kind) {\ncase INDEXED_EXPRESSION:\nreturn parseArrayTypeDescriptorNode((STIndexedExpressionNode) expression);\ncase BASIC_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn STNodeFactory.createSingletonTypeDescriptorNode(expression);\ncase TYPE_REFERENCE_TYPE_DESC:\nreturn ((STTypeReferenceTypeDescNode) expression).typeRef;\ncase BRACED_EXPRESSION:\nSTBracedExpressionNode bracedExpr = (STBracedExpressionNode) expression;\nSTNode typeDesc = getTypeDescFromExpr(bracedExpr.expression);\nreturn STNodeFactory.createParenthesisedTypeDescriptorNode(bracedExpr.openParen, typeDesc,\nbracedExpr.closeParen);\ncase NIL_LITERAL:\nSTNilLiteralNode nilLiteral = (STNilLiteralNode) expression;\nreturn STNodeFactory.createNilTypeDescriptorNode(nilLiteral.openParenToken, nilLiteral.closeParenToken);\ncase BRACKETED_LIST:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nSTAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) expression;\nSTNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(innerList.members));\nreturn STNodeFactory.createTupleTypeDescriptorNode(innerList.collectionStartToken, memberTypeDescs,\ninnerList.collectionEndToken);\ncase BINARY_EXPRESSION:\nSTBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) expression;\nswitch (binaryExpr.operator.kind) {\ncase PIPE_TOKEN:\nSTNode lhsTypeDesc = getTypeDescFromExpr(binaryExpr.lhsExpr);\nSTNode rhsTypeDesc = getTypeDescFromExpr(binaryExpr.rhsExpr);\nreturn STNodeFactory.createUnionTypeDescriptorNode(lhsTypeDesc, binaryExpr.operator,\nrhsTypeDesc);\ncase BITWISE_AND_TOKEN:\nlhsTypeDesc = getTypeDescFromExpr(binaryExpr.lhsExpr);\nrhsTypeDesc = getTypeDescFromExpr(binaryExpr.rhsExpr);\nreturn STNodeFactory.createIntersectionTypeDescriptorNode(lhsTypeDesc, binaryExpr.operator,\nrhsTypeDesc);\ndefault:\nbreak;\n}\nreturn expression;\ncase UNARY_EXPRESSION:\nreturn STNodeFactory.createSingletonTypeDescriptorNode(expression);\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ndefault:\nreturn expression;\n}\n}\nprivate List getBindingPatternsList(List ambibuousList) {\nList bindingPatterns = new ArrayList();\nfor (STNode item : ambibuousList) {\nbindingPatterns.add(getBindingPattern(item));\n}\nreturn bindingPatterns;\n}\nprivate STNode getBindingPattern(STNode ambiguousNode) {\nif (isEmpty(ambiguousNode)) {\nreturn ambiguousNode;\n}\nswitch (ambiguousNode.kind) {\ncase SIMPLE_NAME_REFERENCE:\nSTNode varName = ((STSimpleNameReferenceNode) ambiguousNode).name;\nreturn createCaptureOrWildcardBP(varName);\ncase QUALIFIED_NAME_REFERENCE:\nSTQualifiedNameReferenceNode qualifiedName = (STQualifiedNameReferenceNode) ambiguousNode;\nSTNode fieldName = STNodeFactory.createSimpleNameReferenceNode(qualifiedName.modulePrefix);\nreturn STNodeFactory.createFieldBindingPatternFullNode(fieldName, qualifiedName.colon,\ngetBindingPattern(qualifiedName.identifier));\ncase BRACKETED_LIST:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nSTAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) ambiguousNode;\nSTNode memberBindingPatterns = STNodeFactory.createNodeList(getBindingPatternsList(innerList.members));\nSTNode restBindingPattern = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createListBindingPatternNode(innerList.collectionStartToken, memberBindingPatterns,\nrestBindingPattern, innerList.collectionEndToken);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ninnerList = (STAmbiguousCollectionNode) ambiguousNode;\nList bindingPatterns = new ArrayList<>();\nrestBindingPattern = STNodeFactory.createEmptyNode();\nfor (int i = 0; i < innerList.members.size(); i++) {\nSTNode bp = getBindingPattern(innerList.members.get(i));\nif (bp.kind == SyntaxKind.REST_BINDING_PATTERN) {\nrestBindingPattern = bp;\nbreak;\n}\nbindingPatterns.add(bp);\n}\nmemberBindingPatterns = STNodeFactory.createNodeList(bindingPatterns);\nreturn STNodeFactory.createMappingBindingPatternNode(innerList.collectionStartToken,\nmemberBindingPatterns, restBindingPattern, innerList.collectionEndToken);\ncase SPECIFIC_FIELD:\nSTSpecificFieldNode field = (STSpecificFieldNode) ambiguousNode;\nfieldName = STNodeFactory.createSimpleNameReferenceNode(field.fieldName);\nif (field.valueExpr == null) {\nreturn STNodeFactory.createFieldBindingPatternVarnameNode(fieldName);\n}\nreturn STNodeFactory.createFieldBindingPatternFullNode(fieldName, field.colon,\ngetBindingPattern(field.valueExpr));\ncase FUNCTION_CALL:\nSTFunctionCallExpressionNode funcCall = (STFunctionCallExpressionNode) ambiguousNode;\nSTNode args = funcCall.arguments;\nint size = args.bucketCount();\nbindingPatterns = new ArrayList<>();\nfor (int i = 0; i < size; i++) {\nSTNode arg = args.childInBucket(i);\nbindingPatterns.add(getBindingPattern(arg));\n}\nSTNode argListBindingPatterns = STNodeFactory.createNodeList(bindingPatterns);\nreturn STNodeFactory.createFunctionalBindingPatternNode(funcCall.functionName, funcCall.openParenToken,\nargListBindingPatterns, funcCall.closeParenToken);\ncase POSITIONAL_ARG:\nSTPositionalArgumentNode positionalArg = (STPositionalArgumentNode) ambiguousNode;\nreturn getBindingPattern(positionalArg.expression);\ncase NAMED_ARG:\nSTNamedArgumentNode namedArg = (STNamedArgumentNode) ambiguousNode;\nreturn STNodeFactory.createNamedArgBindingPatternNode(namedArg.argumentName, namedArg.equalsToken,\ngetBindingPattern(namedArg.expression));\ncase REST_ARG:\nSTRestArgumentNode restArg = (STRestArgumentNode) ambiguousNode;\nreturn STNodeFactory.createRestBindingPatternNode(restArg.ellipsis, restArg.expression);\ndefault:\nreturn ambiguousNode;\n}\n}\nprivate List getExpressionList(List ambibuousList) {\nList exprList = new ArrayList();\nfor (STNode item : ambibuousList) {\nexprList.add(getExpression(item));\n}\nreturn exprList;\n}\nprivate STNode getExpression(STNode ambiguousNode) {\nif (isEmpty(ambiguousNode)) {\nreturn ambiguousNode;\n}\nswitch (ambiguousNode.kind) {\ncase BRACKETED_LIST:\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\nSTAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) ambiguousNode;\nSTNode memberExprs = STNodeFactory.createNodeList(getExpressionList(innerList.members));\nreturn STNodeFactory.createListConstructorExpressionNode(innerList.collectionStartToken, memberExprs,\ninnerList.collectionEndToken);\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ninnerList = (STAmbiguousCollectionNode) ambiguousNode;\nmemberExprs = STNodeFactory.createNodeList(getExpressionList(innerList.members));\nreturn STNodeFactory.createMappingConstructorExpressionNode(innerList.collectionStartToken, memberExprs,\ninnerList.collectionEndToken);\ncase REST_BINDING_PATTERN:\nSTRestBindingPatternNode restBindingPattern = (STRestBindingPatternNode) ambiguousNode;\nreturn STNodeFactory.createSpreadFieldNode(restBindingPattern.ellipsisToken,\nrestBindingPattern.variableName);\ncase SPECIFIC_FIELD:\nSTSpecificFieldNode field = (STSpecificFieldNode) ambiguousNode;\nreturn STNodeFactory.createSpecificFieldNode(field.readonlyKeyword, field.fieldName, field.colon,\ngetExpression(field.valueExpr));\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ndefault:\nreturn ambiguousNode;\n}\n}\nprivate STNode getMappingField(STNode identifier, STNode colon, STNode bindingPatternOrExpr) {\nSTNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier);\nswitch (bindingPatternOrExpr.kind) {\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\nreturn STNodeFactory.createFieldBindingPatternFullNode(simpleNameRef, colon, bindingPatternOrExpr);\ncase LIST_CONSTRUCTOR:\ncase MAPPING_CONSTRUCTOR:\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, simpleNameRef, colon, identifier);\ncase LIST_BP_OR_LIST_CONSTRUCTOR:\ncase MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\ndefault:\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, bindingPatternOrExpr);\n}\n}\n}", + "context_before": "class BallerinaParser extends AbstractParser {\nprivate static final OperatorPrecedence DEFAULT_OP_PRECEDENCE = OperatorPrecedence.DEFAULT;\nprotected BallerinaParser(AbstractTokenReader tokenReader) {\nsuper(tokenReader, new BallerinaParserErrorHandler(tokenReader));\n}\n/**\n* Start parsing the given input.\n*\n* @return Parsed node\n*/\n@Override\npublic STNode parse() {\nreturn parseCompUnit();\n}\n/**\n* Start parsing the input from a given context. Supported starting points are:\n*

    \n*
  • Module part (a file)
  • \n*
  • Top level node
  • \n*
  • Statement
  • \n*
  • Expression
  • \n*
\n*\n* @param context Context to start parsing\n* @return Parsed node\n*/\npublic STNode parse(ParserRuleContext context) {\nswitch (context) {\ncase COMP_UNIT:\nreturn parseCompUnit();\ncase TOP_LEVEL_NODE:\nstartContext(ParserRuleContext.COMP_UNIT);\nreturn parseTopLevelNode();\ncase STATEMENT:\nstartContext(ParserRuleContext.COMP_UNIT);\nstartContext(ParserRuleContext.FUNC_BODY_BLOCK);\nreturn parseStatement();\ncase EXPRESSION:\nstartContext(ParserRuleContext.COMP_UNIT);\nstartContext(ParserRuleContext.FUNC_BODY_BLOCK);\nstartContext(ParserRuleContext.STATEMENT);\nreturn parseExpression();\ndefault:\nthrow new UnsupportedOperationException(\"Cannot start parsing from: \" + context);\n}\n}\n/**\n* Resume the parsing from the given context.\n*\n* @param context Context to resume parsing\n* @param args Arguments that requires to continue parsing from the given parser context\n* @return Parsed node\n*/\n@Override\npublic STNode resumeParsing(ParserRuleContext context, Object... args) {\nswitch (context) {\ncase FUNC_BODY:\nreturn parseFunctionBody((boolean) args[0]);\ncase OPEN_BRACE:\nreturn parseOpenBrace();\ncase CLOSE_BRACE:\nreturn parseCloseBrace();\ncase FUNC_NAME:\nreturn parseFunctionName();\ncase OPEN_PARENTHESIS:\ncase ARG_LIST_START:\nreturn parseOpenParenthesis(context);\ncase SIMPLE_TYPE_DESCRIPTOR:\nreturn parseSimpleTypeDescriptor();\ncase ASSIGN_OP:\nreturn parseAssignOp();\ncase EXTERNAL_KEYWORD:\nreturn parseExternalKeyword();\ncase SEMICOLON:\nreturn parseSemicolon();\ncase CLOSE_PARENTHESIS:\nreturn parseCloseParenthesis();\ncase VARIABLE_NAME:\nreturn parseVariableName();\ncase TERMINAL_EXPRESSION:\nreturn parseTerminalExpression((STNode) args[0], (boolean) args[1], (boolean) args[2],\n(boolean) args[3]);\ncase STATEMENT:\nreturn parseStatement();\ncase STATEMENT_WITHOUT_ANNOTS:\nreturn parseStatement((STNode) args[0]);\ncase EXPRESSION_RHS:\nreturn parseExpressionRhs((OperatorPrecedence) args[0], (STNode) args[1], (boolean) args[2],\n(boolean) args[3], (boolean) args[4], (boolean) args[5]);\ncase PARAMETER_START:\nreturn parseParameter((SyntaxKind) args[0], (STNode) args[1], (int) args[2], (boolean) args[3]);\ncase PARAMETER_WITHOUT_ANNOTS:\nreturn parseParamGivenAnnots((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (int) args[3],\n(boolean) args[4]);\ncase AFTER_PARAMETER_TYPE:\nreturn parseAfterParamType((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],\n(STNode) args[4], (boolean) args[5]);\ncase PARAMETER_NAME_RHS:\nreturn parseParameterRhs((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],\n(STNode) args[4], (STNode) args[5]);\ncase TOP_LEVEL_NODE:\nreturn parseTopLevelNode();\ncase TOP_LEVEL_NODE_WITHOUT_METADATA:\nreturn parseTopLevelNode((STNode) args[0]);\ncase TOP_LEVEL_NODE_WITHOUT_MODIFIER:\nreturn parseTopLevelNode((STNode) args[0], (STNode) args[1]);\ncase TYPE_NAME_OR_VAR_NAME:\ncase RECORD_FIELD_NAME_OR_TYPE_NAME:\ncase TYPE_REFERENCE:\ncase ANNOT_REFERENCE:\ncase FIELD_ACCESS_IDENTIFIER:\nreturn parseQualifiedIdentifier(context, (boolean) args[0]);\ncase VAR_DECL_STMT_RHS:\nreturn parseVarDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (boolean) args[3]);\ncase FIELD_DESCRIPTOR_RHS:\nreturn parseFieldDescriptorRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]);\ncase RECORD_BODY_START:\nreturn parseRecordBodyStartDelimiter();\ncase TYPE_DESCRIPTOR:\nreturn parseTypeDescriptorInternal((ParserRuleContext) args[0], (boolean) args[1]);\ncase OBJECT_MEMBER_START:\nreturn parseObjectMember();\ncase OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY:\nreturn parseObjectMethodOrField((STNode) args[0], (STNode) args[1]);\ncase OBJECT_FIELD_RHS:\nreturn parseObjectFieldRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],\n(STNode) args[4]);\ncase OBJECT_TYPE_QUALIFIER:\nreturn parseObjectTypeQualifiers();\ncase OBJECT_KEYWORD:\nreturn parseObjectKeyword();\ncase TYPE_NAME:\nreturn parseTypeName();\ncase IF_KEYWORD:\nreturn parseIfKeyword();\ncase ELSE_KEYWORD:\nreturn parseElseKeyword();\ncase ELSE_BODY:\nreturn parseElseBody();\ncase WHILE_KEYWORD:\nreturn parseWhileKeyword();\ncase PANIC_KEYWORD:\nreturn parsePanicKeyword();\ncase IMPORT_DECL_RHS:\nreturn parseImportDecl((STNode) args[0], (STNode) args[1]);\ncase IMPORT_PREFIX:\nreturn parseImportPrefix();\ncase IMPORT_MODULE_NAME:\ncase IMPORT_ORG_OR_MODULE_NAME:\ncase VARIABLE_REF:\ncase SERVICE_NAME:\ncase IMPLICIT_ANON_FUNC_PARAM:\ncase MAPPING_FIELD_NAME:\ncase RECEIVE_FIELD_NAME:\ncase MODULE_ENUM_NAME:\ncase ENUM_MEMBER_NAME:\nreturn parseIdentifier(context);\ncase IMPORT_KEYWORD:\nreturn parseImportKeyword();\ncase SLASH:\nreturn parseSlashToken();\ncase DOT:\nreturn parseDotToken();\ncase IMPORT_VERSION_DECL:\nreturn parseVersion();\ncase VERSION_KEYWORD:\nreturn parseVersionKeyword();\ncase VERSION_NUMBER:\nreturn parseVersionNumber();\ncase DECIMAL_INTEGER_LITERAL:\ncase MAJOR_VERSION:\ncase MINOR_VERSION:\ncase PATCH_VERSION:\nreturn parseDecimalIntLiteral(context);\ncase IMPORT_SUB_VERSION:\nreturn parseSubVersion(context);\ncase IMPORT_PREFIX_DECL:\nreturn parseImportPrefixDecl();\ncase AS_KEYWORD:\nreturn parseAsKeyword();\ncase CONTINUE_KEYWORD:\nreturn parseContinueKeyword();\ncase BREAK_KEYWORD:\nreturn parseBreakKeyword();\ncase RETURN_KEYWORD:\nreturn parseReturnKeyword();\ncase MAPPING_FIELD:\ncase FIRST_MAPPING_FIELD:\nreturn parseMappingField((ParserRuleContext) args[0]);\ncase SPECIFIC_FIELD_RHS:\nreturn parseSpecificFieldRhs((STNode) args[0], (STNode) args[1]);\ncase STRING_LITERAL:\nreturn parseStringLiteral();\ncase COLON:\nreturn parseColon();\ncase OPEN_BRACKET:\nreturn parseOpenBracket();\ncase RESOURCE_DEF:\nreturn parseResource();\ncase OPTIONAL_SERVICE_NAME:\nreturn parseServiceName();\ncase SERVICE_KEYWORD:\nreturn parseServiceKeyword();\ncase ON_KEYWORD:\nreturn parseOnKeyword();\ncase RESOURCE_KEYWORD:\nreturn parseResourceKeyword();\ncase LISTENER_KEYWORD:\nreturn parseListenerKeyword();\ncase NIL_TYPE_DESCRIPTOR:\nreturn parseNilTypeDescriptor();\ncase COMPOUND_ASSIGNMENT_STMT:\nreturn parseCompoundAssignmentStmt();\ncase TYPEOF_KEYWORD:\nreturn parseTypeofKeyword();\ncase ARRAY_LENGTH:\nreturn parseArrayLength();\ncase IS_KEYWORD:\nreturn parseIsKeyword();\ncase STMT_START_WITH_EXPR_RHS:\nreturn parseStatementStartWithExprRhs((STNode) args[0]);\ncase COMMA:\nreturn parseComma();\ncase CONST_DECL_TYPE:\nreturn parseConstDecl((STNode) args[0], (STNode) args[1], (STNode) args[2]);\ncase BINDING_PATTERN_OR_EXPR_RHS:\nreturn parseTypedBindingPatternOrExprRhs((STNode) args[0], (boolean) args[1]);\ncase LT:\nreturn parseLTToken();\ncase GT:\nreturn parseGTToken();\ncase RECORD_FIELD_OR_RECORD_END:\nreturn parseFieldOrRestDescriptor((boolean) args[0]);\ncase ANNOTATION_KEYWORD:\nreturn parseAnnotationKeyword();\ncase ANNOT_DECL_OPTIONAL_TYPE:\nreturn parseAnnotationDeclFromType((STNode) args[0], (STNode) args[1], (STNode) args[2],\n(STNode) args[3]);\ncase ANNOT_DECL_RHS:\nreturn parseAnnotationDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],\n(STNode) args[4]);\ncase ANNOT_OPTIONAL_ATTACH_POINTS:\nreturn parseAnnotationDeclAttachPoints((STNode) args[0], (STNode) args[1], (STNode) args[2],\n(STNode) args[3], (STNode) args[4], (STNode) args[5]);\ncase SOURCE_KEYWORD:\nreturn parseSourceKeyword();\ncase ATTACH_POINT_IDENT:\nreturn parseAttachPointIdent((STNode) args[0]);\ncase IDENT_AFTER_OBJECT_IDENT:\nreturn parseIdentAfterObjectIdent();\ncase FUNCTION_IDENT:\nreturn parseFunctionIdent();\ncase FIELD_IDENT:\nreturn parseFieldIdent();\ncase ATTACH_POINT_END:\nreturn parseAttachPointEnd();\ncase XMLNS_KEYWORD:\nreturn parseXMLNSKeyword();\ncase XML_NAMESPACE_PREFIX_DECL:\nreturn parseXMLDeclRhs((STNode) args[0], (STNode) args[1], (boolean) args[2]);\ncase NAMESPACE_PREFIX:\nreturn parseNamespacePrefix();\ncase WORKER_KEYWORD:\nreturn parseWorkerKeyword();\ncase WORKER_NAME:\nreturn parseWorkerName();\ncase FORK_KEYWORD:\nreturn parseForkKeyword();\ncase TRAP_KEYWORD:\nreturn parseTrapKeyword();\ncase IN_KEYWORD:\nreturn parseInKeyword();\ncase FOREACH_KEYWORD:\nreturn parseForEachKeyword();\ncase TABLE_KEYWORD:\nreturn parseTableKeyword();\ncase KEY_KEYWORD:\nreturn parseKeyKeyword();\ncase TABLE_KEYWORD_RHS:\nreturn parseTableConstructorOrQuery((STNode) args[0], (boolean) args[1]);\ncase ERROR_KEYWORD:\nreturn parseErrorKeyword();\ncase LET_KEYWORD:\nreturn parseLetKeyword();\ncase STREAM_KEYWORD:\nreturn parseStreamKeyword();\ncase STREAM_TYPE_FIRST_PARAM_RHS:\nreturn parseStreamTypeParamsNode((STNode) args[0], (STNode) args[1]);\ncase TEMPLATE_START:\ncase TEMPLATE_END:\nreturn parseBacktickToken(context);\ncase KEY_CONSTRAINTS_RHS:\nreturn parseKeyConstraint((STNode) args[0]);\ncase FUNCTION_KEYWORD_RHS:\nreturn parseFunctionKeywordRhs((STNode) args[0], (STNode) args[1], (boolean) args[2], (boolean) args[3],\n(STNode[]) args[4]);\ncase RETURNS_KEYWORD:\nreturn parseReturnsKeyword();\ncase NEW_KEYWORD:\nreturn parseNewKeyword();\ncase FROM_KEYWORD:\nreturn parseFromKeyword();\ncase WHERE_KEYWORD:\nreturn parseWhereKeyword();\ncase SELECT_KEYWORD:\nreturn parseSelectKeyword();\ncase TABLE_CONSTRUCTOR_OR_QUERY_START:\nreturn parseTableConstructorOrQuery((boolean) args[0]);\ncase TABLE_CONSTRUCTOR_OR_QUERY_RHS:\nreturn parseTableConstructorOrQueryRhs((STNode) args[0], (STNode) args[1], (boolean) args[2]);\ncase QUERY_PIPELINE_RHS:\nreturn parseIntermediateClause((boolean) args[0]);\ncase ANON_FUNC_BODY:\nreturn parseAnonFuncBody((boolean) args[0]);\ncase CLOSE_BRACKET:\nreturn parseCloseBracket();\ncase ARG_START_OR_ARG_LIST_END:\nreturn parseArgument();\ncase ARG_END:\nreturn parseArgEnd();\ncase MAPPING_FIELD_END:\nreturn parseMappingFieldEnd();\ncase FUNCTION_KEYWORD:\nreturn parseFunctionKeyword();\ncase FIELD_OR_REST_DESCIPTOR_RHS:\nreturn parseFieldOrRestDescriptorRhs((STNode) args[0], (STNode) args[1]);\ncase TYPE_DESC_IN_TUPLE_RHS:\nreturn parseTupleMemberRhs();\ncase LIST_BINDING_PATTERN_MEMBER_END:\nreturn parseListBindingPatternMemberRhs();\ncase MAPPING_BINDING_PATTERN_END:\nreturn parseMappingBindingPatternEnd();\ncase FIELD_BINDING_PATTERN_NAME:\nreturn parseFieldBindingPattern();\ncase CONSTANT_EXPRESSION_START:\nreturn parseSimpleConstExprInternal();\ncase LIST_CONSTRUCTOR_MEMBER_END:\nreturn parseListConstructorMemberEnd();\ncase NIL_OR_PARENTHESISED_TYPE_DESC_RHS:\nreturn parseNilOrParenthesisedTypeDescRhs((STNode) args[0]);\ncase ANON_FUNC_PARAM_RHS:\nreturn parseImplicitAnonFuncParamEnd();\ncase LIST_BINDING_PATTERN:\nreturn parseListBindingPattern();\ncase BINDING_PATTERN:\nreturn parseBindingPattern();\ncase PEER_WORKER_NAME:\nreturn parsePeerWorkerName();\ncase SYNC_SEND_TOKEN:\nreturn parseSyncSendToken();\ncase LEFT_ARROW_TOKEN:\nreturn parseLeftArrowToken();\ncase RECEIVE_WORKERS:\nreturn parseReceiveWorkers();\ncase WAIT_KEYWORD:\nreturn parseWaitKeyword();\ncase WAIT_FUTURE_EXPR_END:\nreturn parseWaitFutureExprEnd((int) args[0]);\ncase WAIT_FIELD_NAME:\nreturn parseWaitField();\ncase WAIT_FIELD_END:\nreturn parseWaitFieldEnd();\ncase ANNOT_CHAINING_TOKEN:\nreturn parseAnnotChainingToken();\ncase DO_KEYWORD:\nreturn parseDoKeyword();\ncase MEMBER_ACCESS_KEY_EXPR_END:\nreturn parseMemberAccessKeyExprEnd();\ncase OPTIONAL_CHAINING_TOKEN:\nreturn parseOptionalChainingToken();\ncase RETRY_KEYWORD_RHS:\nreturn parseRetryKeywordRhs((STNode) args[0]);\ncase RETRY_TYPE_PARAM_RHS:\nreturn parseRetryTypeParamRhs((STNode) args[0], (STNode) args[1]);\ncase TRANSACTION_KEYWORD:\nreturn parseTransactionKeyword();\ncase COMMIT_KEYWORD:\nreturn parseCommitKeyword();\ncase RETRY_KEYWORD:\nreturn parseRetryKeyword();\ncase ROLLBACK_KEYWORD:\nreturn parseRollbackKeyword();\ncase RETRY_BODY:\nreturn parseRetryBody();\ncase ENUM_MEMBER_END:\nreturn parseEnumMemberEnd();\ncase BRACKETED_LIST_MEMBER_END:\nreturn parseBracketedListMemberEnd();\ncase STMT_START_BRACKETED_LIST_MEMBER:\nreturn parseStatementStartBracketedListMember();\ncase TYPED_BINDING_PATTERN_TYPE_RHS:\nreturn parseTypedBindingPatternTypeRhs((STNode) args[0], (ParserRuleContext) args[1],\n(boolean) args[2]);\ncase BRACKETED_LIST_RHS:\nreturn parseTypedBindingPatternOrMemberAccessRhs((STNode) args[0], (STNode) args[1], (STNode) args[2],\n(STNode) args[3], (boolean) args[4], (boolean) args[5], (ParserRuleContext) args[6]);\ncase UNION_OR_INTERSECTION_TOKEN:\nreturn parseUnionOrIntersectionToken();\ncase BRACKETED_LIST_MEMBER:\ncase LIST_BINDING_MEMBER_OR_ARRAY_LENGTH:\nreturn parseBracketedListMember((boolean) args[0]);\ncase BASE16_KEYWORD:\nreturn parseBase16Keyword();\ncase BASE64_KEYWORD:\nreturn parseBase64Keyword();\ncase DOT_LT_TOKEN:\nreturn parseDotLTToken();\ncase SLASH_LT_TOKEN:\nreturn parseSlashLTToken();\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\nreturn parseDoubleSlashDoubleAsteriskLTToken();\ncase XML_ATOMIC_NAME_PATTERN_START:\nreturn parseXMLAtomicNamePatternBody();\ncase BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS:\nreturn parseBracedExprOrAnonFuncParamRhs((STNode) args[0], (STNode) args[1], (boolean) args[2]);\ncase READONLY_KEYWORD:\nreturn parseReadonlyKeyword();\ncase SPECIFIC_FIELD:\nreturn parseSpecificField((STNode) args[0]);\ncase OPTIONAL_MATCH_GUARD:\nreturn parseMatchGuard();\ncase MATCH_PATTERN_START:\nreturn parseMatchPattern();\ncase MATCH_PATTERN_RHS:\nreturn parseMatchPatternEnd();\ncase ENUM_MEMBER_RHS:\nreturn parseEnumMemberRhs((STNode) args[0], (STNode) args[1]);\ncase RECEIVE_FIELD:\nreturn parseReceiveField();\ncase PUBLIC_KEYWORD:\nreturn parseQualifier();\ncase PARAM_END:\nreturn parseParameterRhs();\ncase ELLIPSIS:\nreturn parseEllipsis();\ncase BINARY_OPERATOR:\nreturn parseBinaryOperator();\ncase TYPE_KEYWORD:\nreturn parseTypeKeyword();\ncase CLOSED_RECORD_BODY_START:\nreturn parseClosedRecordBodyStart();\ncase CLOSED_RECORD_BODY_END:\nreturn parseClosedRecordBodyEnd();\ncase QUESTION_MARK:\nreturn parseQuestionMark();\ncase FINAL_KEYWORD:\nreturn parseFinalKeyword();\ncase CLIENT_KEYWORD:\nreturn parseClientKeyword();\ncase ABSTRACT_KEYWORD:\nreturn parseAbstractKeyword();\ncase REMOTE_KEYWORD:\nreturn parseRemoteKeyword();\ncase CHECKING_KEYWORD:\nreturn parseCheckingKeyword();\ncase COMPOUND_BINARY_OPERATOR:\nreturn parseCompoundBinaryOperator();\ncase CONST_DECL_RHS:\nreturn parseConstantOrListenerDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2],\n(STNode) args[3], (boolean) args[4]);\ncase CONST_KEYWORD:\nreturn parseConstantKeyword();\ncase UNARY_OPERATOR:\nreturn parseUnaryOperator();\ncase AT:\nreturn parseAtToken();\ncase REMOTE_CALL_OR_ASYNC_SEND_RHS:\nreturn parseRemoteCallOrAsyncSendActionRhs((STNode) args[0], (boolean) args[1], (STNode) args[2]);\ncase DEFAULT_KEYWORD:\nreturn parseDefaultKeyword();\ncase RIGHT_ARROW:\nreturn parseRightArrow();\ncase PARAMETERIZED_TYPE:\nreturn parseParameterizedTypeKeyword();\ncase ANNOTATION_TAG:\nreturn parseAnnotationTag();\ncase ATTACH_POINT:\nreturn parseAnnotationAttachPoint();\ncase LOCK_KEYWORD:\nreturn parseLockKeyword();\ncase PIPE:\nreturn parsePipeToken();\ncase STRING_KEYWORD:\nreturn parseStringKeyword();\ncase XML_KEYWORD:\nreturn parseXMLKeyword();\ncase INTERPOLATION_START_TOKEN:\nreturn parseInterpolationStart();\ncase EXPR_FUNC_BODY_START:\nreturn parseDoubleRightArrow();\ncase START_KEYWORD:\nreturn parseStartKeyword();\ncase FLUSH_KEYWORD:\nreturn parseFlushKeyword();\ncase ENUM_KEYWORD:\nreturn parseEnumKeyword();\ncase MATCH_KEYWORD:\nreturn parseMatchKeyword();\ncase RECORD_KEYWORD:\nreturn parseRecordKeyword();\ncase LIST_MATCH_PATTERN_MEMBER_RHS:\nreturn parseListMatchPatternMemberRhs();\ncase LIST_BINDING_PATTERN_MEMBER:\nreturn parseListBindingPatternMember();\ncase FIELD_MATCH_PATTERN_MEMBER:\nreturn parseFieldMatchPatternMember();\ncase FIELD_MATCH_PATTERN_MEMBER_RHS:\nreturn parseFieldMatchPatternRhs();\ncase FUNC_MATCH_PATTERN_OR_CONST_PATTERN:\nreturn parseFunctionalMatchPatternOrConsPattern((STNode) args[0]);\ncase ARG_MATCH_PATTERN:\nreturn parseArgMatchPattern();\ncase ARG_MATCH_PATTERN_RHS:\nreturn parseArgMatchPatternRhs();\ndefault:\nthrow new IllegalStateException(\"cannot resume parsing the rule: \" + context);\n}\n}\n/*\n* Private methods.\n*/\n/**\n* Parse a given input and returns the AST. Starts parsing from the top of a compilation unit.\n*\n* @return Parsed node\n*/\nprivate STNode parseCompUnit() {\nstartContext(ParserRuleContext.COMP_UNIT);\nSTToken token = peek();\nList otherDecls = new ArrayList<>();\nList importDecls = new ArrayList<>();\nboolean processImports = true;\nwhile (token.kind != SyntaxKind.EOF_TOKEN) {\nSTNode decl = parseTopLevelNode(token.kind);\nif (decl == null) {\nbreak;\n}\nif (decl.kind == SyntaxKind.IMPORT_DECLARATION) {\nif (processImports) {\nimportDecls.add(decl);\n} else {\nupdateLastNodeInListWithInvalidNode(otherDecls, decl,\nDiagnosticErrorCode.ERROR_IMPORT_DECLARATION_AFTER_OTHER_DECLARATIONS);\n}\n} else {\nif (processImports) {\nprocessImports = false;\n}\notherDecls.add(decl);\n}\ntoken = peek();\n}\nSTToken eof = consume();\nendContext();\nreturn STNodeFactory.createModulePartNode(STNodeFactory.createNodeList(importDecls),\nSTNodeFactory.createNodeList(otherDecls), eof);\n}\n/**\n* Parse top level node having an optional modifier preceding it.\n*\n* @return Parsed node\n*/\nprivate STNode parseTopLevelNode() {\nSTToken token = peek();\nreturn parseTopLevelNode(token.kind);\n}\nprotected STNode parseTopLevelNode(SyntaxKind tokenKind) {\nSTNode metadata;\nswitch (tokenKind) {\ncase EOF_TOKEN:\nreturn null;\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData(tokenKind);\nreturn parseTopLevelNode(metadata);\ncase IMPORT_KEYWORD:\ncase FINAL_KEYWORD:\ncase PUBLIC_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase CONST_KEYWORD:\ncase ANNOTATION_KEYWORD:\ncase XMLNS_KEYWORD:\ncase SERVICE_KEYWORD:\ncase ENUM_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nmetadata = createEmptyMetadata();\nbreak;\ncase IDENTIFIER_TOKEN:\nif (isModuleVarDeclStart(1)) {\nreturn parseModuleVarDecl(createEmptyMetadata(), null);\n}\ndefault:\nif (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\nmetadata = createEmptyMetadata();\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE);\nif (solution.action == Action.KEEP) {\nmetadata = STNodeFactory.createEmptyNodeList();\nbreak;\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTopLevelNode(solution.tokenKind);\n}\nreturn parseTopLevelNode(tokenKind, metadata);\n}\n/**\n* Parse top level node having an optional modifier preceding it, given the next token kind.\n*\n* @param metadata Next token kind\n* @return Parsed node\n*/\nprivate STNode parseTopLevelNode(STNode metadata) {\nSTToken nextToken = peek();\nreturn parseTopLevelNode(nextToken.kind, metadata);\n}\nprivate STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata) {\nSTNode qualifier = null;\nswitch (tokenKind) {\ncase EOF_TOKEN:\nif (metadata != null) {\naddInvalidNodeToNextToken(metadata, DiagnosticErrorCode.ERROR_INVALID_METADATA);\n}\nreturn null;\ncase PUBLIC_KEYWORD:\nqualifier = parseQualifier();\ntokenKind = peek().kind;\nbreak;\ncase FUNCTION_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase CONST_KEYWORD:\ncase FINAL_KEYWORD:\ncase IMPORT_KEYWORD:\ncase ANNOTATION_KEYWORD:\ncase XMLNS_KEYWORD:\ncase ENUM_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nbreak;\ncase IDENTIFIER_TOKEN:\nif (isModuleVarDeclStart(1)) {\nreturn parseModuleVarDecl(metadata, null);\n}\ndefault:\nif (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA, metadata);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.action == Action.KEEP) {\nqualifier = STNodeFactory.createEmptyNode();\nbreak;\n}\nreturn parseTopLevelNode(solution.tokenKind, metadata);\n}\nreturn parseTopLevelNode(tokenKind, metadata, qualifier);\n}\n/**\n* Check whether the cursor is at the start of a module level var-decl.\n*\n* @param lookahead Offset of the token to to check\n* @return true if the cursor is at the start of a module level var-decl.\n* false otherwise.\n*/\nprivate boolean isModuleVarDeclStart(int lookahead) {\nSTToken nextToken = peek(lookahead + 1);\nswitch (nextToken.kind) {\ncase EQUAL_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase QUESTION_MARK_TOKEN:\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn true;\ncase IDENTIFIER_TOKEN:\nswitch (peek(lookahead + 2).kind) {\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\ncase COLON_TOKEN:\nif (lookahead > 1) {\nreturn false;\n}\nif (peek(lookahead + 2).kind != SyntaxKind.IDENTIFIER_TOKEN) {\nreturn false;\n}\nreturn isModuleVarDeclStart(lookahead + 2);\ndefault:\nreturn false;\n}\n}\n/**\n* Parse import declaration.\n*

\n* import-decl := import [org-name /] module-name [version sem-ver] [as import-prefix] ;\n*\n* @return Parsed node\n*/\nprivate STNode parseImportDecl() {\nstartContext(ParserRuleContext.IMPORT_DECL);\nthis.tokenReader.startMode(ParserMode.IMPORT);\nSTNode importKeyword = parseImportKeyword();\nSTNode identifier = parseIdentifier(ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME);\nSTToken token = peek();\nSTNode importDecl = parseImportDecl(token.kind, importKeyword, identifier);\nthis.tokenReader.endMode();\nendContext();\nreturn importDecl;\n}\n/**\n* Parse import keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseImportKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IMPORT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.IMPORT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse identifier.\n*\n* @return Parsed node\n*/\nprivate STNode parseIdentifier(ParserRuleContext currentCtx) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else if (token.kind == SyntaxKind.MAP_KEYWORD) {\nSTToken mapKeyword = consume();\nreturn STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(),\nmapKeyword.trailingMinutiae(), mapKeyword.diagnostics());\n} else {\nSolution sol = recover(token, currentCtx);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse RHS of the import declaration. This includes the components after the\n* starting identifier (org-name/module-name) of the import decl.\n*\n* @param importKeyword Import keyword\n* @param identifier Org-name or the module name\n* @return Parsed node\n*/\nprivate STNode parseImportDecl(STNode importKeyword, STNode identifier) {\nSTToken nextToken = peek();\nreturn parseImportDecl(nextToken.kind, importKeyword, identifier);\n}\nprivate STNode parseImportDecl(SyntaxKind tokenKind, STNode importKeyword, STNode identifier) {\nSTNode orgName;\nSTNode moduleName;\nSTNode version;\nSTNode alias;\nswitch (tokenKind) {\ncase SLASH_TOKEN:\nSTNode slash = parseSlashToken();\norgName = STNodeFactory.createImportOrgNameNode(identifier, slash);\nmoduleName = parseModuleName();\nversion = parseVersion();\nalias = parseImportPrefixDecl();\nbreak;\ncase DOT_TOKEN:\ncase VERSION_KEYWORD:\norgName = STNodeFactory.createEmptyNode();\nmoduleName = parseModuleName(tokenKind, identifier);\nversion = parseVersion();\nalias = parseImportPrefixDecl();\nbreak;\ncase AS_KEYWORD:\norgName = STNodeFactory.createEmptyNode();\nmoduleName = parseModuleName(tokenKind, identifier);\nversion = STNodeFactory.createEmptyNode();\nalias = parseImportPrefixDecl();\nbreak;\ncase SEMICOLON_TOKEN:\norgName = STNodeFactory.createEmptyNode();\nmoduleName = parseModuleName(tokenKind, identifier);\nversion = STNodeFactory.createEmptyNode();\nalias = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.IMPORT_DECL_RHS, importKeyword, identifier);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseImportDecl(solution.tokenKind, importKeyword, identifier);\n}\nSTNode semicolon = parseSemicolon();\nreturn STNodeFactory.createImportDeclarationNode(importKeyword, orgName, moduleName, version, alias, semicolon);\n}\n/**\n* parse slash token.\n*\n* @return Parsed node\n*/\nprivate STNode parseSlashToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SLASH_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SLASH);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse dot token.\n*\n* @return Parsed node\n*/\nprivate STNode parseDotToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DOT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.DOT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse module name of a import declaration.\n*\n* @return Parsed node\n*/\nprivate STNode parseModuleName() {\nSTNode moduleNameStart = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME);\nreturn parseModuleName(peek().kind, moduleNameStart);\n}\n/**\n* Parse import module name of a import declaration, given the module name start identifier.\n*\n* @param moduleNameStart Starting identifier of the module name\n* @return Parsed node\n*/\nprivate STNode parseModuleName(SyntaxKind nextTokenKind, STNode moduleNameStart) {\nList moduleNameParts = new ArrayList<>();\nmoduleNameParts.add(moduleNameStart);\nwhile (!isEndOfImportModuleName(nextTokenKind)) {\nmoduleNameParts.add(parseDotToken());\nmoduleNameParts.add(parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME));\nnextTokenKind = peek().kind;\n}\nreturn STNodeFactory.createNodeList(moduleNameParts);\n}\nprivate boolean isEndOfImportModuleName(SyntaxKind nextTokenKind) {\nreturn nextTokenKind != SyntaxKind.DOT_TOKEN && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN;\n}\nprivate boolean isEndOfImportDecl(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase SEMICOLON_TOKEN:\ncase PUBLIC_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase TYPE_KEYWORD:\ncase ABSTRACT_KEYWORD:\ncase CONST_KEYWORD:\ncase EOF_TOKEN:\ncase SERVICE_KEYWORD:\ncase IMPORT_KEYWORD:\ncase FINAL_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse version component of a import declaration.\n*

\n* version-decl := version sem-ver\n*\n* @return Parsed node\n*/\nprivate STNode parseVersion() {\nSTToken nextToken = peek();\nreturn parseVersion(nextToken.kind);\n}\nprivate STNode parseVersion(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase VERSION_KEYWORD:\nSTNode versionKeyword = parseVersionKeyword();\nSTNode versionNumber = parseVersionNumber();\nreturn STNodeFactory.createImportVersionNode(versionKeyword, versionNumber);\ncase AS_KEYWORD:\ncase SEMICOLON_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nif (isEndOfImportDecl(nextTokenKind)) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.IMPORT_VERSION_DECL);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseVersion(solution.tokenKind);\n}\n}\n/**\n* Parse version keywrod.\n*\n* @return Parsed node\n*/\nprivate STNode parseVersionKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.VERSION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.VERSION_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse version number.\n*

\n* sem-ver := major-num [. minor-num [. patch-num]]\n*
\n* major-num := DecimalNumber\n*
\n* minor-num := DecimalNumber\n*
\n* patch-num := DecimalNumber\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseVersionNumber() {\nSTToken nextToken = peek();\nreturn parseVersionNumber(nextToken.kind);\n}\nprivate STNode parseVersionNumber(SyntaxKind nextTokenKind) {\nSTNode majorVersion;\nswitch (nextTokenKind) {\ncase DECIMAL_INTEGER_LITERAL:\nmajorVersion = parseMajorVersion();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.VERSION_NUMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseVersionNumber(solution.tokenKind);\n}\nList versionParts = new ArrayList<>();\nversionParts.add(majorVersion);\nSTNode minorVersion = parseMinorVersion();\nif (minorVersion != null) {\nversionParts.add(minorVersion);\nSTNode patchVersion = parsePatchVersion();\nif (patchVersion != null) {\nversionParts.add(patchVersion);\n}\n}\nreturn STNodeFactory.createNodeList(versionParts);\n}\nprivate STNode parseMajorVersion() {\nreturn parseDecimalIntLiteral(ParserRuleContext.MAJOR_VERSION);\n}\nprivate STNode parseMinorVersion() {\nreturn parseSubVersion(ParserRuleContext.MINOR_VERSION);\n}\nprivate STNode parsePatchVersion() {\nreturn parseSubVersion(ParserRuleContext.PATCH_VERSION);\n}\n/**\n* Parse decimal literal.\n*\n* @param context Context in which the decimal literal is used.\n* @return Parsed node\n*/\nprivate STNode parseDecimalIntLiteral(ParserRuleContext context) {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), context);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse sub version. i.e: minor-version/patch-version.\n*\n* @param context Context indicating what kind of sub-version is being parsed.\n* @return Parsed node\n*/\nprivate STNode parseSubVersion(ParserRuleContext context) {\nSTToken nextToken = peek();\nreturn parseSubVersion(nextToken.kind, context);\n}\nprivate STNode parseSubVersion(SyntaxKind nextTokenKind, ParserRuleContext context) {\nswitch (nextTokenKind) {\ncase AS_KEYWORD:\ncase SEMICOLON_TOKEN:\nreturn null;\ncase DOT_TOKEN:\nSTNode leadingDot = parseDotToken();\nSTNode versionNumber = parseDecimalIntLiteral(context);\nreturn STNodeFactory.createImportSubVersionNode(leadingDot, versionNumber);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.IMPORT_SUB_VERSION);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseSubVersion(solution.tokenKind, context);\n}\n}\n/**\n* Parse import prefix declaration.\n*

\n* import-prefix-decl := as import-prefix\n*
\n* import-prefix := a identifier | _\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseImportPrefixDecl() {\nSTToken token = peek();\nreturn parseImportPrefixDecl(token.kind);\n}\nprivate STNode parseImportPrefixDecl(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase AS_KEYWORD:\nSTNode asKeyword = parseAsKeyword();\nSTNode prefix = parseImportPrefix();\nreturn STNodeFactory.createImportPrefixNode(asKeyword, prefix);\ncase SEMICOLON_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nif (isEndOfImportDecl(nextTokenKind)) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.IMPORT_PREFIX_DECL);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseImportPrefixDecl(solution.tokenKind);\n}\n}\n/**\n* Parse as keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseAsKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.AS_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.AS_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse import prefix.\n*\n* @return Parsed node\n*/\nprivate STNode parseImportPrefix() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.IMPORT_PREFIX);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse top level node, given the modifier that precedes it.\n*\n* @param qualifier Qualifier that precedes the top level node\n* @return Parsed node\n*/\nprivate STNode parseTopLevelNode(STNode metadata, STNode qualifier) {\nSTToken token = peek();\nreturn parseTopLevelNode(token.kind, metadata, qualifier);\n}\n/**\n* Parse top level node given the next token kind and the modifier that precedes it.\n*\n* @param tokenKind Next token kind\n* @param qualifier Qualifier that precedes the top level node\n* @return Parsed top-level node\n*/\nprivate STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata, STNode qualifier) {\nswitch (tokenKind) {\ncase FUNCTION_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nreturn parseFuncDefOrFuncTypeDesc(metadata, false, getQualifier(qualifier), null);\ncase TYPE_KEYWORD:\nreturn parseModuleTypeDefinition(metadata, getQualifier(qualifier));\ncase LISTENER_KEYWORD:\nreturn parseListenerDeclaration(metadata, getQualifier(qualifier));\ncase CONST_KEYWORD:\nreturn parseConstantDeclaration(metadata, getQualifier(qualifier));\ncase ANNOTATION_KEYWORD:\nSTNode constKeyword = STNodeFactory.createEmptyNode();\nreturn parseAnnotationDeclaration(metadata, getQualifier(qualifier), constKeyword);\ncase IMPORT_KEYWORD:\nreportInvalidQualifier(qualifier);\nreturn parseImportDecl();\ncase XMLNS_KEYWORD:\nreportInvalidQualifier(qualifier);\nreturn parseXMLNamespaceDeclaration(true);\ncase FINAL_KEYWORD:\nreportInvalidQualifier(qualifier);\nSTNode finalKeyword = parseFinalKeyword();\nreturn parseVariableDecl(metadata, finalKeyword, true);\ncase SERVICE_KEYWORD:\nif (isServiceDeclStart(ParserRuleContext.TOP_LEVEL_NODE, 1)) {\nreportInvalidQualifier(qualifier);\nreturn parseServiceDecl(metadata);\n}\nreturn parseModuleVarDecl(metadata, qualifier);\ncase ENUM_KEYWORD:\nreturn parseEnumDeclaration(metadata, getQualifier(qualifier));\ncase IDENTIFIER_TOKEN:\nif (isModuleVarDeclStart(1)) {\nreturn parseModuleVarDecl(metadata, qualifier);\n}\ndefault:\nif (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\nreturn parseModuleVarDecl(metadata, qualifier);\n}\nSTToken token = peek();\nSolution solution =\nrecover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER, metadata, qualifier);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.action == Action.KEEP) {\nreturn parseModuleVarDecl(metadata, qualifier);\n}\nreturn parseTopLevelNode(solution.tokenKind, metadata, qualifier);\n}\n}\nprivate STNode parseModuleVarDecl(STNode metadata, STNode qualifier) {\nreportInvalidQualifier(qualifier);\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVariableDecl(metadata, finalKeyword, true);\n}\nprivate STNode getQualifier(STNode qualifier) {\nreturn qualifier == null ? STNodeFactory.createEmptyNode() : qualifier;\n}\nprivate void reportInvalidQualifier(STNode qualifier) {\nif (qualifier != null && qualifier.kind != SyntaxKind.NONE) {\naddInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_INVALID_QUALIFIER,\nqualifier.toString().trim());\n}\n}\n/**\n* Parse access modifiers.\n*\n* @return Parsed node\n*/\nprivate STNode parseQualifier() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PUBLIC_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseFuncDefinition(STNode metadata, boolean isObjectMethod, STNode... qualifiers) {\nparseTransactionalQUalifier(qualifiers);\nstartContext(ParserRuleContext.FUNC_DEF);\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode funcDef = parseFunctionKeywordRhs(metadata, functionKeyword, true, isObjectMethod, qualifiers);\nreturn funcDef;\n}\n/**\n* Parse function definition for the function type descriptor.\n*

\n* \n* function-defn := FUNCTION identifier function-signature function-body\n*
\n* function-type-descriptor := function function-signature\n*
\n*\n* @param metadata Metadata\n* @param visibilityQualifier Visibility qualifier\n* @return Parsed node\n*/\nprivate STNode parseFuncDefOrFuncTypeDesc(STNode metadata, boolean isObjectMethod, STNode... qualifiers) {\nparseTransactionalQUalifier(qualifiers);\nstartContext(ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE);\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode funcDefOrType = parseFunctionKeywordRhs(metadata, functionKeyword, false, isObjectMethod, qualifiers);\nreturn funcDefOrType;\n}\nprivate void parseTransactionalQUalifier(STNode... qualifiers) {\nif (peek().kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {\nqualifiers[qualifiers.length - 1] = consume();\n} else {\nqualifiers[qualifiers.length - 1] = STNodeFactory.createEmptyNode();\n}\n}\nprivate STNode parseFunctionKeywordRhs(STNode metadata, STNode functionKeyword, boolean isFuncDef,\nboolean isObjectMethod, STNode... qualifiers) {\nreturn parseFunctionKeywordRhs(peek().kind, metadata, functionKeyword, isFuncDef, isObjectMethod, qualifiers);\n}\nprivate STNode parseFunctionKeywordRhs(SyntaxKind nextTokenKind, STNode metadata, STNode functionKeyword,\nboolean isFuncDef, boolean isObjectMethod, STNode... qualifiers) {\nSTNode name;\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nname = parseFunctionName();\nisFuncDef = true;\nbreak;\ncase OPEN_PAREN_TOKEN:\nname = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.FUNCTION_KEYWORD_RHS, metadata, functionKeyword,\nisFuncDef, isObjectMethod, qualifiers);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFunctionKeywordRhs(solution.tokenKind, metadata, functionKeyword, isFuncDef, isObjectMethod,\nqualifiers);\n}\nif (isFuncDef) {\nswitchContext(ParserRuleContext.FUNC_DEF);\nSTNode funcSignature = parseFuncSignature(false);\nSTNode funcDef = createFuncDefOrMethodDecl(metadata, functionKeyword, isObjectMethod, name, funcSignature,\nqualifiers);\nendContext();\nreturn funcDef;\n}\nSTNode funcSignature = parseFuncSignature(true);\nreturn parseReturnTypeDescRhs(metadata, functionKeyword, funcSignature, isObjectMethod, qualifiers);\n}\nprivate STNode createFuncDefOrMethodDecl(STNode metadata, STNode functionKeyword, boolean isObjectMethod,\nSTNode name, STNode funcSignature, STNode... qualifiers) {\nSTNode body = parseFunctionBody(isObjectMethod);\nif (body.kind == SyntaxKind.SEMICOLON_TOKEN) {\nreturn STNodeFactory.createMethodDeclarationNode(metadata, qualifiers[0], functionKeyword, name,\nfuncSignature, body);\n}\nif (isObjectMethod) {\nreturn STNodeFactory.createObjectMethodDefinitionNode(metadata, qualifiers[0], qualifiers[1], qualifiers[2],\nfunctionKeyword, name, funcSignature, body);\n}\nreturn STNodeFactory.createFunctionDefinitionNode(metadata, qualifiers[0], qualifiers[1], functionKeyword, name,\nfuncSignature, body);\n}\n/**\n* Parse function signature.\n*

\n* \n* function-signature := ( param-list ) return-type-descriptor\n*
\n* return-type-descriptor := [ returns [annots] type-descriptor ]\n*
\n*\n* @param isParamNameOptional Whether the parameter names are optional\n* @return Function signature node\n*/\nprivate STNode parseFuncSignature(boolean isParamNameOptional) {\nSTNode openParenthesis = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode parameters = parseParamList(isParamNameOptional);\nSTNode closeParenthesis = parseCloseParenthesis();\nendContext();\nSTNode returnTypeDesc = parseFuncReturnTypeDescriptor();\nreturn STNodeFactory.createFunctionSignatureNode(openParenthesis, parameters, closeParenthesis, returnTypeDesc);\n}\nprivate STNode parseReturnTypeDescRhs(STNode metadata, STNode functionKeyword, STNode funcSignature,\nboolean isObjectMethod, STNode... qualifiers) {\nswitch (peek().kind) {\ncase SEMICOLON_TOKEN:\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACKET_TOKEN:\nendContext();\nSTNode typeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature);\nif (isObjectMethod) {\nSTNode readonlyQualifier = STNodeFactory.createEmptyNode();\nSTNode fieldName = parseVariableName();\nreturn parseObjectFieldRhs(metadata, qualifiers[0], readonlyQualifier, typeDesc, fieldName);\n}\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(metadata, qualifiers[0], typedBindingPattern, true);\ncase OPEN_BRACE_TOKEN:\ncase EQUAL_TOKEN:\nbreak;\ndefault:\nbreak;\n}\nSTNode name = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_FUNCTION_NAME);\nfuncSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);\nSTNode funcDef =\ncreateFuncDefOrMethodDecl(metadata, functionKeyword, isObjectMethod, name, funcSignature, qualifiers);\nendContext();\nreturn funcDef;\n}\n/**\n* Validate the param list and return. If there are params without param-name,\n* then this method will create a new set of params with missing param-name\n* and return.\n*\n* @param signature Function signature\n* @return\n*/\nprivate STNode validateAndGetFuncParams(STFunctionSignatureNode signature) {\nSTNode parameters = signature.parameters;\nint paramCount = parameters.bucketCount();\nint index = 0;\nfor (; index < paramCount; index++) {\nSTNode param = parameters.childInBucket(index);\nswitch (param.kind) {\ncase REQUIRED_PARAM:\nSTRequiredParameterNode requiredParam = (STRequiredParameterNode) param;\nif (isEmpty(requiredParam.paramName)) {\nbreak;\n}\ncontinue;\ncase DEFAULTABLE_PARAM:\nSTDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param;\nif (isEmpty(defaultableParam.paramName)) {\nbreak;\n}\ncontinue;\ncase REST_PARAM:\nSTRestParameterNode restParam = (STRestParameterNode) param;\nif (isEmpty(restParam.paramName)) {\nbreak;\n}\ncontinue;\ndefault:\ncontinue;\n}\nbreak;\n}\nif (index == paramCount) {\nreturn signature;\n}\nSTNode updatedParams = getUpdatedParamList(parameters, index);\nreturn STNodeFactory.createFunctionSignatureNode(signature.openParenToken, updatedParams,\nsignature.closeParenToken, signature.returnTypeDesc);\n}\nprivate STNode getUpdatedParamList(STNode parameters, int index) {\nint paramCount = parameters.bucketCount();\nint newIndex = 0;\nArrayList newParams = new ArrayList<>();\nfor (; newIndex < index; newIndex++) {\nnewParams.add(parameters.childInBucket(index));\n}\nfor (; newIndex < paramCount; newIndex++) {\nSTNode param = parameters.childInBucket(newIndex);\nSTNode paramName = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nswitch (param.kind) {\ncase REQUIRED_PARAM:\nSTRequiredParameterNode requiredParam = (STRequiredParameterNode) param;\nif (isEmpty(requiredParam.paramName)) {\nparam = STNodeFactory.createRequiredParameterNode(requiredParam.leadingComma,\nrequiredParam.annotations, requiredParam.visibilityQualifier, requiredParam.typeName,\nparamName);\n}\nbreak;\ncase DEFAULTABLE_PARAM:\nSTDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param;\nif (isEmpty(defaultableParam.paramName)) {\nparam = STNodeFactory.createDefaultableParameterNode(defaultableParam.leadingComma,\ndefaultableParam.annotations, defaultableParam.visibilityQualifier,\ndefaultableParam.typeName, paramName, defaultableParam.equalsToken,\ndefaultableParam.expression);\n}\nbreak;\ncase REST_PARAM:\nSTRestParameterNode restParam = (STRestParameterNode) param;\nif (isEmpty(restParam.paramName)) {\nparam = STNodeFactory.createRestParameterNode(restParam.leadingComma, restParam.annotations,\nrestParam.typeName, restParam.ellipsisToken, paramName);\n}\nbreak;\ndefault:\nbreak;\n}\nnewParams.add(param);\n}\nreturn STNodeFactory.createNodeList(newParams);\n}\nprivate boolean isEmpty(STNode node) {\nreturn !SyntaxUtils.isSTNodePresent(node);\n}\n/**\n* Parse function keyword. Need to validate the token before consuming,\n* since we can reach here while recovering.\n*\n* @return Parsed node\n*/\nprivate STNode parseFunctionKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FUNCTION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FUNCTION_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse function name.\n*\n* @return Parsed node\n*/\nprivate STNode parseFunctionName() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FUNC_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse open parenthesis.\n*\n* @param ctx Context of the parenthesis\n* @return Parsed node\n*/\nprivate STNode parseOpenParenthesis(ParserRuleContext ctx) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ctx);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse close parenthesis.\n*\n* @return Parsed node\n*/\nprivate STNode parseCloseParenthesis() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSE_PARENTHESIS);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse parameter list.\n*

\n* \n* param-list := required-params [, defaultable-params] [, rest-param]\n*
 | defaultable-params [, rest-param]\n*
 | [rest-param]\n*

\n* required-params := required-param (, required-param)*\n*

\n* required-param := [annots] [public] type-descriptor [param-name]\n*

\n* defaultable-params := defaultable-param (, defaultable-param)*\n*

\n* defaultable-param := [annots] [public] type-descriptor [param-name] default-value\n*

\n* rest-param := [annots] type-descriptor ... [param-name]\n*

\n* param-name := identifier\n*
\n*\n* @param isParamNameOptional Whether the param names in the signature is optional or not.\n* @return Parsed node\n*/\nprivate STNode parseParamList(boolean isParamNameOptional) {\nstartContext(ParserRuleContext.PARAM_LIST);\nSTToken token = peek();\nif (isEndOfParametersList(token.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nArrayList paramsList = new ArrayList<>();\nSTNode startingComma = STNodeFactory.createEmptyNode();\nstartContext(ParserRuleContext.REQUIRED_PARAM);\nSTNode firstParam = parseParameter(startingComma, SyntaxKind.REQUIRED_PARAM, isParamNameOptional);\nSyntaxKind prevParamKind = firstParam.kind;\nparamsList.add(firstParam);\nboolean paramOrderErrorPresent = false;\ntoken = peek();\nwhile (!isEndOfParametersList(token.kind)) {\nif (prevParamKind == SyntaxKind.DEFAULTABLE_PARAM) {\nstartContext(ParserRuleContext.DEFAULTABLE_PARAM);\n} else {\nstartContext(ParserRuleContext.REQUIRED_PARAM);\n}\nSTNode paramEnd = parseParameterRhs();\nif (paramEnd == null) {\nendContext();\nbreak;\n}\nSTNode param = parseParameter(paramEnd, prevParamKind, isParamNameOptional);\nif (paramOrderErrorPresent) {\nupdateLastNodeInListWithInvalidNode(paramsList, param, null);\n} else {\nDiagnosticCode paramOrderError = validateParamOrder(param, prevParamKind);\nif (paramOrderError == null) {\nparamsList.add(param);\n} else {\nparamOrderErrorPresent = true;\nupdateLastNodeInListWithInvalidNode(paramsList, param, paramOrderError);\n}\n}\nprevParamKind = param.kind;\ntoken = peek();\n}\nreturn STNodeFactory.createNodeList(paramsList);\n}\n/**\n* Return the appropriate {@code DiagnosticCode} if there are parameter order issues.\n*\n* @param param the new parameter\n* @param prevParamKind the SyntaxKind of the previously added parameter\n*/\nprivate DiagnosticCode validateParamOrder(STNode param, SyntaxKind prevParamKind) {\nif (prevParamKind == SyntaxKind.REST_PARAM) {\nreturn DiagnosticErrorCode.ERROR_PARAMETER_AFTER_THE_REST_PARAMETER;\n} else if (prevParamKind == SyntaxKind.DEFAULTABLE_PARAM && param.kind == SyntaxKind.REQUIRED_PARAM) {\nreturn DiagnosticErrorCode.ERROR_REQUIRED_PARAMETER_AFTER_THE_DEFAULTABLE_PARAMETER;\n} else {\nreturn null;\n}\n}\nprivate boolean isNodeWithSyntaxKindInList(List nodeList, SyntaxKind kind) {\nfor (STNode node : nodeList) {\nif (node.kind == kind) {\nreturn true;\n}\n}\nreturn false;\n}\nprivate STNode parseParameterRhs() {\nreturn parseParameterRhs(peek().kind);\n}\nprivate STNode parseParameterRhs(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.PARAM_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseParameterRhs(solution.tokenKind);\n}\n}\n/**\n* Parse a single parameter. Parameter can be a required parameter, a defaultable\n* parameter, or a rest parameter.\n*\n* @param prevParamKind Kind of the parameter that precedes current parameter\n* @param leadingComma Comma that occurs before the param\n* @param isParamNameOptional Whether the param names in the signature is optional or not.\n* @return Parsed node\n*/\nprivate STNode parseParameter(STNode leadingComma, SyntaxKind prevParamKind, boolean isParamNameOptional) {\nSTToken token = peek();\nreturn parseParameter(token.kind, prevParamKind, leadingComma, 1, isParamNameOptional);\n}\nprivate STNode parseParameter(SyntaxKind prevParamKind, STNode leadingComma, int nextTokenOffset,\nboolean isParamNameOptional) {\nreturn parseParameter(peek().kind, prevParamKind, leadingComma, nextTokenOffset, isParamNameOptional);\n}\nprivate STNode parseParameter(SyntaxKind nextTokenKind, SyntaxKind prevParamKind, STNode leadingComma,\nint nextTokenOffset, boolean isParamNameOptional) {\nSTNode annots;\nswitch (nextTokenKind) {\ncase AT_TOKEN:\nannots = parseAnnotations(nextTokenKind);\nnextTokenKind = peek().kind;\nbreak;\ncase PUBLIC_KEYWORD:\ncase IDENTIFIER_TOKEN:\nannots = STNodeFactory.createEmptyNodeList();\nbreak;\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nannots = STNodeFactory.createNodeList(new ArrayList<>());\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.PARAMETER_START, prevParamKind, leadingComma,\nnextTokenOffset, isParamNameOptional);\nif (solution.action == Action.KEEP) {\nannots = STNodeFactory.createEmptyNodeList();\nbreak;\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseParameter(solution.tokenKind, prevParamKind, leadingComma, 0, isParamNameOptional);\n}\nreturn parseParamGivenAnnots(nextTokenKind, prevParamKind, leadingComma, annots, 1, isParamNameOptional);\n}\nprivate STNode parseParamGivenAnnots(SyntaxKind prevParamKind, STNode leadingComma, STNode annots,\nint nextNextTokenOffset, boolean isFuncDef) {\nreturn parseParamGivenAnnots(peek().kind, prevParamKind, leadingComma, annots, nextNextTokenOffset, isFuncDef);\n}\nprivate STNode parseParamGivenAnnots(SyntaxKind nextTokenKind, SyntaxKind prevParamKind, STNode leadingComma,\nSTNode annots, int nextTokenOffset, boolean isParamNameOptional) {\nSTNode qualifier;\nswitch (nextTokenKind) {\ncase PUBLIC_KEYWORD:\nqualifier = parseQualifier();\nbreak;\ncase IDENTIFIER_TOKEN:\nqualifier = STNodeFactory.createEmptyNode();\nbreak;\ncase AT_TOKEN:\ndefault:\nif (isTypeStartingToken(nextTokenKind) && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\nqualifier = STNodeFactory.createEmptyNode();\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.PARAMETER_WITHOUT_ANNOTS, prevParamKind,\nleadingComma, annots, nextTokenOffset, isParamNameOptional);\nif (solution.action == Action.KEEP) {\nqualifier = STNodeFactory.createEmptyNode();\nbreak;\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseParamGivenAnnots(solution.tokenKind, prevParamKind, leadingComma, annots, 0,\nisParamNameOptional);\n}\nreturn parseParamGivenAnnotsAndQualifier(prevParamKind, leadingComma, annots, qualifier, isParamNameOptional);\n}\nprivate STNode parseParamGivenAnnotsAndQualifier(SyntaxKind prevParamKind, STNode leadingComma, STNode annots,\nSTNode qualifier, boolean isParamNameOptional) {\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode param = parseAfterParamType(prevParamKind, leadingComma, annots, qualifier, type, isParamNameOptional);\nendContext();\nreturn param;\n}\nprivate STNode parseAfterParamType(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier,\nSTNode type, boolean isParamNameOptional) {\nSTToken token = peek();\nreturn parseAfterParamType(token.kind, prevParamKind, leadingComma, annots, qualifier, type,\nisParamNameOptional);\n}\nprivate STNode parseAfterParamType(SyntaxKind tokenKind, SyntaxKind prevParamKind, STNode leadingComma,\nSTNode annots, STNode qualifier, STNode type, boolean isParamNameOptional) {\nSTNode paramName;\nswitch (tokenKind) {\ncase ELLIPSIS_TOKEN:\nswitchContext(ParserRuleContext.REST_PARAM);\nreportInvalidQualifier(qualifier);\nSTNode ellipsis = parseEllipsis();\nif (isParamNameOptional && peek().kind != SyntaxKind.IDENTIFIER_TOKEN) {\nparamName = STNodeFactory.createEmptyNode();\n} else {\nparamName = parseVariableName();\n}\nreturn STNodeFactory.createRestParameterNode(leadingComma, annots, type, ellipsis, paramName);\ncase IDENTIFIER_TOKEN:\nparamName = parseVariableName();\nreturn parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);\ncase EQUAL_TOKEN:\nif (!isParamNameOptional) {\nbreak;\n}\nparamName = STNodeFactory.createEmptyNode();\nreturn parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);\ndefault:\nif (!isParamNameOptional) {\nbreak;\n}\nparamName = STNodeFactory.createEmptyNode();\nreturn parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.AFTER_PARAMETER_TYPE, prevParamKind, leadingComma, annots,\nqualifier, type, isParamNameOptional);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAfterParamType(solution.tokenKind, prevParamKind, leadingComma, annots, qualifier, type,\nisParamNameOptional);\n}\n/**\n* Parse ellipsis.\n*\n* @return Parsed node\n*/\nprivate STNode parseEllipsis() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ELLIPSIS_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ELLIPSIS);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse the right hand side of a required/defaultable parameter.\n*

\n* parameter-rhs := [= expression]\n*\n* @param leadingComma Comma that precedes this parameter\n* @param prevParamKind Kind of the parameter that precedes current parameter\n* @param annots Annotations attached to the parameter\n* @param qualifier Visibility qualifier\n* @param type Type descriptor\n* @param paramName Name of the parameter\n* @return Parsed parameter node\n*/\nprivate STNode parseParameterRhs(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier,\nSTNode type, STNode paramName) {\nSTToken token = peek();\nreturn parseParameterRhs(token.kind, prevParamKind, leadingComma, annots, qualifier, type, paramName);\n}\nprivate STNode parseParameterRhs(SyntaxKind tokenKind, SyntaxKind prevParamKind, STNode leadingComma, STNode annots,\nSTNode qualifier, STNode type, STNode paramName) {\nif (isEndOfParameter(tokenKind)) {\nreturn STNodeFactory.createRequiredParameterNode(leadingComma, annots, qualifier, type, paramName);\n} else if (tokenKind == SyntaxKind.EQUAL_TOKEN) {\nif (prevParamKind == SyntaxKind.REQUIRED_PARAM) {\nswitchContext(ParserRuleContext.DEFAULTABLE_PARAM);\n}\nSTNode equal = parseAssignOp();\nSTNode expr = parseExpression();\nreturn STNodeFactory.createDefaultableParameterNode(leadingComma, annots, qualifier, type, paramName, equal,\nexpr);\n} else {\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.PARAMETER_NAME_RHS, prevParamKind, leadingComma,\nannots, qualifier, type, paramName);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseParameterRhs(solution.tokenKind, prevParamKind, leadingComma, annots, qualifier, type,\nparamName);\n}\n}\n/**\n* Parse comma.\n*\n* @return Parsed node\n*/\nprivate STNode parseComma() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COMMA_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.COMMA);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse return type descriptor of a function. A return type descriptor has the following structure.\n*\n* return-type-descriptor := [ returns annots type-descriptor ]\n*\n* @return Parsed node\n*/\nprivate STNode parseFuncReturnTypeDescriptor() {\nreturn parseFuncReturnTypeDescriptor(peek().kind);\n}\nprivate STNode parseFuncReturnTypeDescriptor(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACE_TOKEN:\ncase EQUAL_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ncase RETURNS_KEYWORD:\nbreak;\ndefault:\nSTToken nextNextToken = getNextNextToken(nextTokenKind);\nif (nextNextToken.kind == SyntaxKind.RETURNS_KEYWORD) {\nbreak;\n}\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode returnsKeyword = parseReturnsKeyword();\nSTNode annot = parseAnnotations();\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);\nreturn STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);\n}\n/**\n* Parse 'returns' keyword.\n*\n* @return Return-keyword node\n*/\nprivate STNode parseReturnsKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RETURNS_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RETURNS_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse a type descriptor. A type descriptor has the following structure.\n*

\n* type-descriptor :=\n*  simple-type-descriptor
\n*  | structured-type-descriptor
\n*  | behavioral-type-descriptor
\n*  | singleton-type-descriptor
\n*  | union-type-descriptor
\n*  | optional-type-descriptor
\n*  | any-type-descriptor
\n*  | anydata-type-descriptor
\n*  | byte-type-descriptor
\n*  | json-type-descriptor
\n*  | type-descriptor-reference
\n*  | ( type-descriptor )\n*
\n* type-descriptor-reference := qualified-identifier
\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeDescriptor(ParserRuleContext context) {\nreturn parseTypeDescriptor(context, false, false);\n}\nprivate STNode parseTypeDescriptorInExpression(ParserRuleContext context, boolean isInConditionalExpr) {\nreturn parseTypeDescriptor(context, false, isInConditionalExpr);\n}\nprivate STNode parseTypeDescriptor(ParserRuleContext context, boolean isTypedBindingPattern,\nboolean isInConditionalExpr) {\nstartContext(context);\nSTNode typeDesc = parseTypeDescriptorInternal(context, isTypedBindingPattern, isInConditionalExpr);\nendContext();\nreturn typeDesc;\n}\nprivate STNode parseTypeDescriptorInternal(ParserRuleContext context, boolean isInConditionalExpr) {\nreturn parseTypeDescriptorInternal(context, false, isInConditionalExpr);\n}\nprivate STNode parseTypeDescriptorInternal(ParserRuleContext context, boolean isTypedBindingPattern,\nboolean isInConditionalExpr) {\nSTToken token = peek();\nSTNode typeDesc = parseTypeDescriptorInternal(token.kind, context, isInConditionalExpr);\nreturn parseComplexTypeDescriptor(typeDesc, context, isTypedBindingPattern);\n}\n/**\n* This will handle the parsing of optional,array,union type desc to infinite length.\n*\n* @param typeDesc\n*\n* @return Parsed type descriptor node\n*/\nprivate STNode parseComplexTypeDescriptor(STNode typeDesc, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase QUESTION_MARK_TOKEN:\nif (context == ParserRuleContext.TYPE_DESC_IN_EXPRESSION &&\n!isValidTypeContinuationToken(getNextNextToken(nextToken.kind)) &&\nisValidExprStart(getNextNextToken(nextToken.kind).kind)) {\nreturn typeDesc;\n}\nreturn parseComplexTypeDescriptor(parseOptionalTypeDescriptor(typeDesc), context,\nisTypedBindingPattern);\ncase OPEN_BRACKET_TOKEN:\nif (isTypedBindingPattern) {\nreturn typeDesc;\n}\nreturn parseComplexTypeDescriptor(parseArrayTypeDescriptor(typeDesc), context, isTypedBindingPattern);\ncase PIPE_TOKEN:\nreturn parseUnionTypeDescriptor(typeDesc, context, isTypedBindingPattern);\ncase BITWISE_AND_TOKEN:\nreturn parseIntersectionTypeDescriptor(typeDesc, context, isTypedBindingPattern);\ndefault:\nreturn typeDesc;\n}\n}\nprivate boolean isValidTypeContinuationToken(STToken nextToken) {\nswitch (nextToken.kind) {\ncase QUESTION_MARK_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n*

\n* Parse a type descriptor, given the next token kind.\n*

\n* If the preceding token is ? then it is an optional type descriptor\n*\n* @param tokenKind Next token kind\n* @param context Current context\n* @param isInConditionalExpr\n* @return Parsed node\n*/\nprivate STNode parseTypeDescriptorInternal(SyntaxKind tokenKind, ParserRuleContext context,\nboolean isInConditionalExpr) {\nswitch (tokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseTypeReference(isInConditionalExpr);\ncase RECORD_KEYWORD:\nreturn parseRecordTypeDescriptor();\ncase READONLY_KEYWORD:\nSTToken nextNextToken = getNextNextToken(tokenKind);\nSyntaxKind nextNextTokenKind = nextNextToken.kind;\nif (nextNextTokenKind != SyntaxKind.OBJECT_KEYWORD &&\nnextNextTokenKind != SyntaxKind.ABSTRACT_KEYWORD &&\nnextNextTokenKind != SyntaxKind.CLIENT_KEYWORD) {\nreturn parseSimpleTypeDescriptor();\n}\ncase OBJECT_KEYWORD:\ncase ABSTRACT_KEYWORD:\ncase CLIENT_KEYWORD:\nreturn parseObjectTypeDescriptor();\ncase OPEN_PAREN_TOKEN:\nreturn parseNilOrParenthesisedTypeDesc();\ncase MAP_KEYWORD:\ncase FUTURE_KEYWORD:\nreturn parseParameterizedTypeDescriptor();\ncase TYPEDESC_KEYWORD:\nreturn parseTypedescTypeDescriptor();\ncase ERROR_KEYWORD:\nreturn parseErrorTypeDescriptor();\ncase XML_KEYWORD:\nreturn parseXmlTypeDescriptor();\ncase STREAM_KEYWORD:\nreturn parseStreamTypeDescriptor();\ncase TABLE_KEYWORD:\nreturn parseTableTypeDescriptor();\ncase FUNCTION_KEYWORD:\nreturn parseFunctionTypeDesc();\ncase OPEN_BRACKET_TOKEN:\nreturn parseTupleTypeDesc();\ncase DISTINCT_KEYWORD:\nreturn parseDistinctTypeDesc(context);\ndefault:\nif (isSingletonTypeDescStart(tokenKind, true)) {\nreturn parseSingletonTypeDesc();\n}\nif (isSimpleType(tokenKind)) {\nreturn parseSimpleTypeDescriptor();\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.TYPE_DESCRIPTOR, context, isInConditionalExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypeDescriptorInternal(solution.tokenKind, context, isInConditionalExpr);\n}\n}\n/**\n* Parse distinct type descriptor.\n*

\n* \n* distinct-type-descriptor := distinct type-descriptor\n* \n*\n* @param context Context in which the type desc is used.\n* @return Distinct type descriptor\n*/\nprivate STNode parseDistinctTypeDesc(ParserRuleContext context) {\nSTNode distinctKeyword = parseDistinctKeyword();\nSTNode typeDesc = parseTypeDescriptor(context);\nreturn STNodeFactory.createDistinctTypeDescriptorNode(distinctKeyword, typeDesc);\n}\nprivate STNode parseDistinctKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DISTINCT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.DISTINCT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseNilOrParenthesisedTypeDesc() {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nreturn parseNilOrParenthesisedTypeDescRhs(openParen);\n}\nprivate STNode parseNilOrParenthesisedTypeDescRhs(STNode openParen) {\nreturn parseNilOrParenthesisedTypeDescRhs(peek().kind, openParen);\n}\nprivate STNode parseNilOrParenthesisedTypeDescRhs(SyntaxKind nextTokenKind, STNode openParen) {\nSTNode closeParen;\nswitch (nextTokenKind) {\ncase CLOSE_PAREN_TOKEN:\ncloseParen = parseCloseParenthesis();\nreturn STNodeFactory.createNilTypeDescriptorNode(openParen, closeParen);\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nSTNode typedesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_PARENTHESIS);\ncloseParen = parseCloseParenthesis();\nreturn STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typedesc, closeParen);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.NIL_OR_PARENTHESISED_TYPE_DESC_RHS, openParen);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseNilOrParenthesisedTypeDescRhs(solution.tokenKind, openParen);\n}\n}\n/**\n* Parse simple type descriptor.\n*\n* @return Parsed node\n*/\nprivate STNode parseSimpleTypeDescriptor() {\nSTToken node = peek();\nif (isSimpleType(node.kind)) {\nSTToken token = consume();\nreturn createBuiltinSimpleNameReference(token);\n} else {\nSolution sol = recover(peek(), ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR);\nSTNode recoveredNode = sol.recoveredNode;\nreturn createBuiltinSimpleNameReference(recoveredNode);\n}\n}\nprivate STNode createBuiltinSimpleNameReference(STNode token) {\nSyntaxKind typeKind = getTypeSyntaxKind(token.kind);\nreturn STNodeFactory.createBuiltinSimpleNameReferenceNode(typeKind, token);\n}\n/**\n*

\n* Parse function body. A function body has the following structure.\n*

\n* \n* function-body := function-body-block | external-function-body\n* external-function-body := = annots external ;\n* function-body-block := { [default-worker-init, named-worker-decl+] default-worker }\n* \n*\n* @param isObjectMethod Flag indicating whether this is an object-method\n* @return Parsed node\n*/\nprivate STNode parseFunctionBody(boolean isObjectMethod) {\nSTToken token = peek();\nreturn parseFunctionBody(token.kind, isObjectMethod);\n}\n/**\n* Parse function body, given the next token kind.\n*\n* @param tokenKind Next token kind\n* @param isObjectMethod Flag indicating whether this is an object-method\n* @return Parsed node\n*/\nprotected STNode parseFunctionBody(SyntaxKind tokenKind, boolean isObjectMethod) {\nswitch (tokenKind) {\ncase EQUAL_TOKEN:\nreturn parseExternalFunctionBody();\ncase OPEN_BRACE_TOKEN:\nreturn parseFunctionBodyBlock(false);\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn parseExpressionFuncBody(false, false);\ncase SEMICOLON_TOKEN:\nif (isObjectMethod) {\nreturn parseSemicolon();\n}\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.FUNC_BODY, isObjectMethod);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.tokenKind == SyntaxKind.NONE) {\nreturn STNodeFactory.createMissingToken(solution.tokenKind);\n}\nreturn parseFunctionBody(solution.tokenKind, isObjectMethod);\n}\n}\n/**\n*

\n* Parse function body block. A function body block has the following structure.\n*

\n*\n* \n* function-body-block := { [default-worker-init, named-worker-decl+] default-worker }
\n* default-worker-init := sequence-stmt
\n* default-worker := sequence-stmt
\n* named-worker-decl := worker worker-name return-type-descriptor { sequence-stmt }
\n* worker-name := identifier
\n*
\n*\n* @param isAnonFunc Flag indicating whether the func body belongs to an anonymous function\n* @return Parsed node\n*/\nprivate STNode parseFunctionBodyBlock(boolean isAnonFunc) {\nstartContext(ParserRuleContext.FUNC_BODY_BLOCK);\nSTNode openBrace = parseOpenBrace();\nSTToken token = peek();\nArrayList firstStmtList = new ArrayList<>();\nArrayList workers = new ArrayList<>();\nArrayList secondStmtList = new ArrayList<>();\nParserRuleContext currentCtx = ParserRuleContext.DEFAULT_WORKER_INIT;\nboolean hasNamedWorkers = false;\nwhile (!isEndOfFuncBodyBlock(token.kind, isAnonFunc)) {\nSTNode stmt = parseStatement();\nif (stmt == null) {\nbreak;\n}\nswitch (currentCtx) {\ncase DEFAULT_WORKER_INIT:\nif (stmt.kind != SyntaxKind.NAMED_WORKER_DECLARATION) {\nfirstStmtList.add(stmt);\nbreak;\n}\ncurrentCtx = ParserRuleContext.NAMED_WORKERS;\nhasNamedWorkers = true;\ncase NAMED_WORKERS:\nif (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {\nworkers.add(stmt);\nbreak;\n}\ncurrentCtx = ParserRuleContext.DEFAULT_WORKER;\ncase DEFAULT_WORKER:\ndefault:\nif (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {\nupdateLastNodeInListWithInvalidNode(secondStmtList, stmt,\nDiagnosticErrorCode.ERROR_NAMED_WORKER_NOT_ALLOWED_HERE);\nbreak;\n}\nsecondStmtList.add(stmt);\nbreak;\n}\ntoken = peek();\n}\nSTNode namedWorkersList;\nSTNode statements;\nif (hasNamedWorkers) {\nSTNode workerInitStatements = STNodeFactory.createNodeList(firstStmtList);\nSTNode namedWorkers = STNodeFactory.createNodeList(workers);\nnamedWorkersList = STNodeFactory.createNamedWorkerDeclarator(workerInitStatements, namedWorkers);\nstatements = STNodeFactory.createNodeList(secondStmtList);\n} else {\nnamedWorkersList = STNodeFactory.createEmptyNode();\nstatements = STNodeFactory.createNodeList(firstStmtList);\n}\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createFunctionBodyBlockNode(openBrace, namedWorkersList, statements, closeBrace);\n}\nprivate boolean isEndOfFuncBodyBlock(SyntaxKind nextTokenKind, boolean isAnonFunc) {\nif (isAnonFunc) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase COMMA_TOKEN:\ncase PUBLIC_KEYWORD:\ncase EOF_TOKEN:\ncase EQUAL_TOKEN:\ncase BACKTICK_TOKEN:\nreturn true;\ndefault:\nbreak;\n}\n}\nreturn isEndOfStatements();\n}\nprivate boolean isEndOfRecordTypeNode(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase TYPE_KEYWORD:\ncase PUBLIC_KEYWORD:\ndefault:\nreturn endOfModuleLevelNode(1);\n}\n}\nprivate boolean isEndOfObjectTypeNode() {\nreturn endOfModuleLevelNode(1, true);\n}\nprivate boolean isEndOfStatements() {\nswitch (peek().kind) {\ncase RESOURCE_KEYWORD:\nreturn true;\ndefault:\nreturn endOfModuleLevelNode(1);\n}\n}\nprivate boolean endOfModuleLevelNode(int peekIndex) {\nreturn endOfModuleLevelNode(peekIndex, false);\n}\nprivate boolean endOfModuleLevelNode(int peekIndex, boolean isObject) {\nswitch (peek(peekIndex).kind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACE_PIPE_TOKEN:\ncase IMPORT_KEYWORD:\ncase CONST_KEYWORD:\ncase ANNOTATION_KEYWORD:\ncase LISTENER_KEYWORD:\nreturn true;\ncase SERVICE_KEYWORD:\nreturn isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 1);\ncase PUBLIC_KEYWORD:\nreturn endOfModuleLevelNode(peekIndex + 1, isObject);\ncase FUNCTION_KEYWORD:\nif (isObject) {\nreturn false;\n}\nreturn peek(peekIndex + 1).kind == SyntaxKind.IDENTIFIER_TOKEN;\ndefault:\nreturn false;\n}\n}\n/**\n* Check whether the given token is an end of a parameter.\n*\n* @param tokenKind Next token kind\n* @return true if the token represents an end of a parameter. false otherwise\n*/\nprivate boolean isEndOfParameter(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase SEMICOLON_TOKEN:\ncase COMMA_TOKEN:\ncase RETURNS_KEYWORD:\ncase TYPE_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\ncase AT_TOKEN:\nreturn true;\ndefault:\nreturn endOfModuleLevelNode(1);\n}\n}\n/**\n* Check whether the given token is an end of a parameter-list.\n*\n* @param tokenKind Next token kind\n* @return true if the token represents an end of a parameter-list. false otherwise\n*/\nprivate boolean isEndOfParametersList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase CLOSE_PAREN_TOKEN:\ncase SEMICOLON_TOKEN:\ncase RETURNS_KEYWORD:\ncase TYPE_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\nreturn true;\ndefault:\nreturn endOfModuleLevelNode(1);\n}\n}\n/**\n* Parse type reference or variable reference.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatementStartIdentifier() {\nreturn parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\n}\n/**\n* Parse variable name.\n*\n* @return Parsed node\n*/\nprivate STNode parseVariableName() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.VARIABLE_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse open brace.\n*\n* @return Parsed node\n*/\nprivate STNode parseOpenBrace() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPEN_BRACE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OPEN_BRACE);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse close brace.\n*\n* @return Parsed node\n*/\nprivate STNode parseCloseBrace() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSE_BRACE);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse external function body. An external function body has the following structure.\n*

\n* \n* external-function-body := = annots external ;\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseExternalFunctionBody() {\nstartContext(ParserRuleContext.EXTERNAL_FUNC_BODY);\nSTNode assign = parseAssignOp();\nSTNode annotation = parseAnnotations();\nSTNode externalKeyword = parseExternalKeyword();\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createExternalFunctionBodyNode(assign, annotation, externalKeyword, semicolon);\n}\n/**\n* Parse semicolon.\n*\n* @return Parsed node\n*/\nprivate STNode parseSemicolon() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SEMICOLON_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SEMICOLON);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse external keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseExternalKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.EXTERNAL_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.EXTERNAL_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/*\n* Operators\n*/\n/**\n* Parse assign operator.\n*\n* @return Parsed node\n*/\nprivate STNode parseAssignOp() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ASSIGN_OP);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse binary operator.\n*\n* @return Parsed node\n*/\nprivate STNode parseBinaryOperator() {\nSTToken token = peek();\nif (isBinaryOperator(token.kind)) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BINARY_OPERATOR);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Check whether the given token kind is a binary operator.\n*\n* @param kind STToken kind\n* @return true if the token kind refers to a binary operator. false otherwise\n*/\nprivate boolean isBinaryOperator(SyntaxKind kind) {\nswitch (kind) {\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase SLASH_TOKEN:\ncase ASTERISK_TOKEN:\ncase GT_TOKEN:\ncase LT_TOKEN:\ncase DOUBLE_EQUAL_TOKEN:\ncase TRIPPLE_EQUAL_TOKEN:\ncase LT_EQUAL_TOKEN:\ncase GT_EQUAL_TOKEN:\ncase NOT_EQUAL_TOKEN:\ncase NOT_DOUBLE_EQUAL_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase BITWISE_XOR_TOKEN:\ncase PIPE_TOKEN:\ncase LOGICAL_AND_TOKEN:\ncase LOGICAL_OR_TOKEN:\ncase PERCENT_TOKEN:\ncase DOUBLE_LT_TOKEN:\ncase DOUBLE_GT_TOKEN:\ncase TRIPPLE_GT_TOKEN:\ncase ELLIPSIS_TOKEN:\ncase DOUBLE_DOT_LT_TOKEN:\ncase ELVIS_TOKEN:\ncase EQUALS_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Get the precedence of a given operator.\n*\n* @param binaryOpKind Operator kind\n* @return Precedence of the given operator\n*/\nprivate OperatorPrecedence getOpPrecedence(SyntaxKind binaryOpKind) {\nswitch (binaryOpKind) {\ncase ASTERISK_TOKEN:\ncase SLASH_TOKEN:\ncase PERCENT_TOKEN:\nreturn OperatorPrecedence.MULTIPLICATIVE;\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn OperatorPrecedence.ADDITIVE;\ncase GT_TOKEN:\ncase LT_TOKEN:\ncase GT_EQUAL_TOKEN:\ncase LT_EQUAL_TOKEN:\ncase IS_KEYWORD:\nreturn OperatorPrecedence.BINARY_COMPARE;\ncase DOT_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase OPEN_PAREN_TOKEN:\ncase ANNOT_CHAINING_TOKEN:\ncase OPTIONAL_CHAINING_TOKEN:\ncase DOT_LT_TOKEN:\ncase SLASH_LT_TOKEN:\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\ncase SLASH_ASTERISK_TOKEN:\nreturn OperatorPrecedence.MEMBER_ACCESS;\ncase DOUBLE_EQUAL_TOKEN:\ncase TRIPPLE_EQUAL_TOKEN:\ncase NOT_EQUAL_TOKEN:\ncase NOT_DOUBLE_EQUAL_TOKEN:\ncase EQUALS_KEYWORD:\nreturn OperatorPrecedence.EQUALITY;\ncase BITWISE_AND_TOKEN:\nreturn OperatorPrecedence.BITWISE_AND;\ncase BITWISE_XOR_TOKEN:\nreturn OperatorPrecedence.BITWISE_XOR;\ncase PIPE_TOKEN:\nreturn OperatorPrecedence.BITWISE_OR;\ncase LOGICAL_AND_TOKEN:\nreturn OperatorPrecedence.LOGICAL_AND;\ncase LOGICAL_OR_TOKEN:\nreturn OperatorPrecedence.LOGICAL_OR;\ncase RIGHT_ARROW_TOKEN:\nreturn OperatorPrecedence.REMOTE_CALL_ACTION;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn OperatorPrecedence.ANON_FUNC_OR_LET;\ncase SYNC_SEND_TOKEN:\nreturn OperatorPrecedence.ACTION;\ncase DOUBLE_LT_TOKEN:\ncase DOUBLE_GT_TOKEN:\ncase TRIPPLE_GT_TOKEN:\nreturn OperatorPrecedence.SHIFT;\ncase ELLIPSIS_TOKEN:\ncase DOUBLE_DOT_LT_TOKEN:\nreturn OperatorPrecedence.RANGE;\ncase ELVIS_TOKEN:\nreturn OperatorPrecedence.ELVIS_CONDITIONAL;\ncase QUESTION_MARK_TOKEN:\ncase COLON_TOKEN:\nreturn OperatorPrecedence.CONDITIONAL;\ndefault:\nthrow new UnsupportedOperationException(\"Unsupported binary operator '\" + binaryOpKind + \"'\");\n}\n}\n/**\n*

\n* Get the operator kind to insert during recovery, given the precedence level.\n*

\n*\n* @param opPrecedenceLevel Precedence of the given operator\n* @return Kind of the operator to insert\n*/\nprivate SyntaxKind getBinaryOperatorKindToInsert(OperatorPrecedence opPrecedenceLevel) {\nswitch (opPrecedenceLevel) {\ncase DEFAULT:\ncase UNARY:\ncase ACTION:\ncase EXPRESSION_ACTION:\ncase REMOTE_CALL_ACTION:\ncase ANON_FUNC_OR_LET:\ncase QUERY:\ncase MULTIPLICATIVE:\nreturn SyntaxKind.ASTERISK_TOKEN;\ncase ADDITIVE:\nreturn SyntaxKind.PLUS_TOKEN;\ncase SHIFT:\nreturn SyntaxKind.DOUBLE_LT_TOKEN;\ncase RANGE:\nreturn SyntaxKind.ELLIPSIS_TOKEN;\ncase BINARY_COMPARE:\nreturn SyntaxKind.LT_TOKEN;\ncase EQUALITY:\nreturn SyntaxKind.DOUBLE_EQUAL_TOKEN;\ncase BITWISE_AND:\nreturn SyntaxKind.BITWISE_AND_TOKEN;\ncase BITWISE_XOR:\nreturn SyntaxKind.BITWISE_XOR_TOKEN;\ncase BITWISE_OR:\nreturn SyntaxKind.PIPE_TOKEN;\ncase LOGICAL_AND:\nreturn SyntaxKind.LOGICAL_AND_TOKEN;\ncase LOGICAL_OR:\nreturn SyntaxKind.LOGICAL_OR_TOKEN;\ncase ELVIS_CONDITIONAL:\nreturn SyntaxKind.ELVIS_TOKEN;\ndefault:\nthrow new UnsupportedOperationException(\n\"Unsupported operator precedence level'\" + opPrecedenceLevel + \"'\");\n}\n}\n/**\n*

\n* Parse a module type definition.\n*

\n* module-type-defn := metadata [public] type identifier type-descriptor ;\n*\n* @param metadata Metadata\n* @param qualifier Visibility qualifier\n* @return Parsed node\n*/\nprivate STNode parseModuleTypeDefinition(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.MODULE_TYPE_DEFINITION);\nSTNode typeKeyword = parseTypeKeyword();\nSTNode typeName = parseTypeName();\nSTNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createTypeDefinitionNode(metadata, qualifier, typeKeyword, typeName, typeDescriptor,\nsemicolon);\n}\n/**\n* Parse type keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TYPE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TYPE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse type name.\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeName() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TYPE_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse record type descriptor. A record type descriptor body has the following structure.\n*

\n*\n* record-type-descriptor := inclusive-record-type-descriptor | exclusive-record-type-descriptor\n*

inclusive-record-type-descriptor := record { field-descriptor* }\n*

exclusive-record-type-descriptor := record {| field-descriptor* [record-rest-descriptor] |}\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseRecordTypeDescriptor() {\nstartContext(ParserRuleContext.RECORD_TYPE_DESCRIPTOR);\nSTNode recordKeyword = parseRecordKeyword();\nSTNode bodyStartDelimiter = parseRecordBodyStartDelimiter();\nboolean isInclusive = bodyStartDelimiter.kind == SyntaxKind.OPEN_BRACE_TOKEN;\nSTNode fields = parseFieldDescriptors(isInclusive);\nSTNode bodyEndDelimiter = parseRecordBodyCloseDelimiter(bodyStartDelimiter.kind);\nendContext();\nreturn STNodeFactory.createRecordTypeDescriptorNode(recordKeyword, bodyStartDelimiter, fields,\nbodyEndDelimiter);\n}\n/**\n* Parse record body start delimiter.\n*\n* @return Parsed node\n*/\nprivate STNode parseRecordBodyStartDelimiter() {\nSTToken token = peek();\nreturn parseRecordBodyStartDelimiter(token.kind);\n}\nprivate STNode parseRecordBodyStartDelimiter(SyntaxKind kind) {\nswitch (kind) {\ncase OPEN_BRACE_PIPE_TOKEN:\nreturn parseClosedRecordBodyStart();\ncase OPEN_BRACE_TOKEN:\nreturn parseOpenBrace();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RECORD_BODY_START);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseRecordBodyStartDelimiter(solution.tokenKind);\n}\n}\n/**\n* Parse closed-record body start delimiter.\n*\n* @return Parsed node\n*/\nprivate STNode parseClosedRecordBodyStart() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_START);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse record body close delimiter.\n*\n* @return Parsed node\n*/\nprivate STNode parseRecordBodyCloseDelimiter(SyntaxKind startingDelimeter) {\nswitch (startingDelimeter) {\ncase OPEN_BRACE_PIPE_TOKEN:\nreturn parseClosedRecordBodyEnd();\ncase OPEN_BRACE_TOKEN:\nreturn parseCloseBrace();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RECORD_BODY_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseRecordBodyCloseDelimiter(solution.tokenKind);\n}\n}\n/**\n* Parse closed-record body end delimiter.\n*\n* @return Parsed node\n*/\nprivate STNode parseClosedRecordBodyEnd() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_END);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse record keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseRecordKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RECORD_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RECORD_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse field descriptors.\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldDescriptors(boolean isInclusive) {\nArrayList recordFields = new ArrayList<>();\nSTToken token = peek();\nboolean endOfFields = false;\nwhile (!isEndOfRecordTypeNode(token.kind)) {\nSTNode field = parseFieldOrRestDescriptor(isInclusive);\nif (field == null) {\nendOfFields = true;\nbreak;\n}\nrecordFields.add(field);\ntoken = peek();\nif (field.kind == SyntaxKind.RECORD_REST_TYPE) {\nbreak;\n}\n}\nwhile (!endOfFields && !isEndOfRecordTypeNode(token.kind)) {\nSTNode invalidField = parseFieldOrRestDescriptor(isInclusive);\nupdateLastNodeInListWithInvalidNode(recordFields, invalidField,\nDiagnosticErrorCode.ERROR_MORE_RECORD_FIELDS_AFTER_REST_FIELD);\ntoken = peek();\n}\nreturn STNodeFactory.createNodeList(recordFields);\n}\n/**\n*

\n* Parse field descriptor or rest descriptor.\n*

\n*\n* \n*

field-descriptor := individual-field-descriptor | record-type-reference\n*


individual-field-descriptor := metadata type-descriptor field-name [? | default-value] ;\n*

field-name := identifier\n*

default-value := = expression\n*

record-type-reference := * type-reference ;\n*

record-rest-descriptor := type-descriptor ... ;\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldOrRestDescriptor(boolean isInclusive) {\nreturn parseFieldOrRestDescriptor(peek().kind, isInclusive);\n}\nprivate STNode parseFieldOrRestDescriptor(SyntaxKind nextTokenKind, boolean isInclusive) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACE_PIPE_TOKEN:\nreturn null;\ncase ASTERISK_TOKEN:\nstartContext(ParserRuleContext.RECORD_FIELD);\nSTNode asterisk = consume();\nSTNode type = parseTypeReference();\nSTNode semicolonToken = parseSemicolon();\nendContext();\nreturn STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken);\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nstartContext(ParserRuleContext.RECORD_FIELD);\nSTNode metadata = parseMetaData(nextTokenKind);\nnextTokenKind = peek().kind;\nreturn parseRecordField(nextTokenKind, isInclusive, metadata);\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nstartContext(ParserRuleContext.RECORD_FIELD);\nmetadata = createEmptyMetadata();\nreturn parseRecordField(nextTokenKind, isInclusive, metadata);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RECORD_FIELD_OR_RECORD_END, isInclusive);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldOrRestDescriptor(solution.tokenKind, isInclusive);\n}\n}\nprivate STNode parseRecordField(SyntaxKind nextTokenKind, boolean isInclusive, STNode metadata) {\nif (nextTokenKind != SyntaxKind.READONLY_KEYWORD) {\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD);\nSTNode fieldOrRestDesc = parseFieldDescriptor(isInclusive, metadata, type);\nendContext();\nreturn fieldOrRestDesc;\n}\nSTNode type;\nSTNode fieldOrRestDesc;\nSTNode readOnlyQualifier;\nreadOnlyQualifier = parseReadonlyKeyword();\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nSTNode fieldNameOrTypeDesc = parseQualifiedIdentifier(ParserRuleContext.RECORD_FIELD_NAME_OR_TYPE_NAME);\nif (fieldNameOrTypeDesc.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\ntype = fieldNameOrTypeDesc;\n} else {\nnextToken = peek();\nswitch (nextToken.kind) {\ncase SEMICOLON_TOKEN:\ncase EQUAL_TOKEN:\ntype = createBuiltinSimpleNameReference(readOnlyQualifier);\nreadOnlyQualifier = STNodeFactory.createEmptyNode();\nSTNode fieldName = ((STSimpleNameReferenceNode) fieldNameOrTypeDesc).name;\nreturn parseFieldDescriptorRhs(metadata, readOnlyQualifier, type, fieldName);\ndefault:\ntype = fieldNameOrTypeDesc;\nbreak;\n}\n}\n} else if (isTypeStartingToken(nextToken.kind)) {\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD);\n} else {\nreadOnlyQualifier = createBuiltinSimpleNameReference(readOnlyQualifier);\ntype = parseComplexTypeDescriptor(readOnlyQualifier, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD, false);\nreadOnlyQualifier = STNodeFactory.createEmptyNode();\n}\nfieldOrRestDesc = parseIndividualRecordField(metadata, readOnlyQualifier, type);\nendContext();\nreturn fieldOrRestDesc;\n}\nprivate STNode parseFieldDescriptor(boolean isInclusive, STNode metadata, STNode type) {\nif (isInclusive) {\nSTNode readOnlyQualifier = STNodeFactory.createEmptyNode();\nreturn parseIndividualRecordField(metadata, readOnlyQualifier, type);\n} else {\nreturn parseFieldOrRestDescriptorRhs(metadata, type);\n}\n}\nprivate STNode parseIndividualRecordField(STNode metadata, STNode readOnlyQualifier, STNode type) {\nSTNode fieldName = parseVariableName();\nreturn parseFieldDescriptorRhs(metadata, readOnlyQualifier, type, fieldName);\n}\n/**\n* Parse type reference.\n* type-reference := identifier | qualified-identifier\n*\n* @return Type reference node\n*/\nprivate STNode parseTypeReference() {\nreturn parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE, false);\n}\nprivate STNode parseTypeReference(boolean isInConditionalExpr) {\nreturn parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE, isInConditionalExpr);\n}\n/**\n* Parse identifier or qualified identifier.\n*\n* @return Identifier node\n*/\nprivate STNode parseQualifiedIdentifier(ParserRuleContext currentCtx) {\nreturn parseQualifiedIdentifier(currentCtx, false);\n}\nprivate STNode parseQualifiedIdentifier(ParserRuleContext currentCtx, boolean isInConditionalExpr) {\nSTToken token = peek();\nSTNode typeRefOrPkgRef;\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\ntypeRefOrPkgRef = consume();\n} else {\nSolution sol = recover(token, currentCtx, isInConditionalExpr);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nif (sol.tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\naddInvalidTokenToNextToken(errorHandler.consumeInvalidToken());\nreturn parseQualifiedIdentifier(currentCtx, isInConditionalExpr);\n}\ntypeRefOrPkgRef = sol.recoveredNode;\n}\nreturn parseQualifiedIdentifier(typeRefOrPkgRef, isInConditionalExpr);\n}\n/**\n* Parse identifier or qualified identifier, given the starting identifier.\n*\n* @param identifier Starting identifier\n* @return Parse node\n*/\nprivate STNode parseQualifiedIdentifier(STNode identifier, boolean isInConditionalExpr) {\nSTToken nextToken = peek(1);\nif (nextToken.kind != SyntaxKind.COLON_TOKEN) {\nreturn STNodeFactory.createSimpleNameReferenceNode(identifier);\n}\nSTToken nextNextToken = peek(2);\nswitch (nextNextToken.kind) {\ncase IDENTIFIER_TOKEN:\nSTToken colon = consume();\nSTNode varOrFuncName = consume();\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName);\ncase MAP_KEYWORD:\ncolon = consume();\nSTToken mapKeyword = consume();\nSTNode refName = STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(),\nmapKeyword.trailingMinutiae(), mapKeyword.diagnostics());\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, refName);\ncase COLON_TOKEN:\naddInvalidTokenToNextToken(errorHandler.consumeInvalidToken());\nreturn parseQualifiedIdentifier(identifier, isInConditionalExpr);\ndefault:\nif (isInConditionalExpr) {\nreturn STNodeFactory.createSimpleNameReferenceNode(identifier);\n}\ncolon = consume();\nvarOrFuncName = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName);\n}\n}\n/**\n* Parse RHS of a field or rest type descriptor.\n*\n* @param metadata Metadata\n* @param type Type descriptor\n* @return Parsed node\n*/\nprivate STNode parseFieldOrRestDescriptorRhs(STNode metadata, STNode type) {\nSTToken token = peek();\nreturn parseFieldOrRestDescriptorRhs(token.kind, metadata, type);\n}\nprivate STNode parseFieldOrRestDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type) {\nswitch (kind) {\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createRecordRestDescriptorNode(type, ellipsis, semicolonToken);\ncase IDENTIFIER_TOKEN:\nSTNode readonlyQualifier = STNodeFactory.createEmptyNode();\nreturn parseIndividualRecordField(metadata, readonlyQualifier, type);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.FIELD_OR_REST_DESCIPTOR_RHS, metadata, type);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldOrRestDescriptorRhs(solution.tokenKind, metadata, type);\n}\n}\n/**\n*

\n* Parse field descriptor rhs.\n*

\n*\n* @param metadata Metadata\n* @param readonlyQualifier Readonly qualifier\n* @param type Type descriptor\n* @param fieldName Field name\n* @return Parsed node\n*/\nprivate STNode parseFieldDescriptorRhs(STNode metadata, STNode readonlyQualifier, STNode type, STNode fieldName) {\nSTToken token = peek();\nreturn parseFieldDescriptorRhs(token.kind, metadata, readonlyQualifier, type, fieldName);\n}\n/**\n*

\n* Parse field descriptor rhs.\n*

\n*\n* \n* field-descriptor := [? | default-value] ;\n*
default-value := = expression\n*
\n*\n* @param kind Kind of the next token\n* @param metadata Metadata\n* @param type Type descriptor\n* @param fieldName Field name\n* @return Parsed node\n*/\nprivate STNode parseFieldDescriptorRhs(SyntaxKind kind, STNode metadata, STNode readonlyQualifier, STNode type,\nSTNode fieldName) {\nswitch (kind) {\ncase SEMICOLON_TOKEN:\nSTNode questionMarkToken = STNodeFactory.createEmptyNode();\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createRecordFieldNode(metadata, readonlyQualifier, type, fieldName,\nquestionMarkToken, semicolonToken);\ncase QUESTION_MARK_TOKEN:\nquestionMarkToken = parseQuestionMark();\nsemicolonToken = parseSemicolon();\nreturn STNodeFactory.createRecordFieldNode(metadata, readonlyQualifier, type, fieldName,\nquestionMarkToken, semicolonToken);\ncase EQUAL_TOKEN:\nSTNode equalsToken = parseAssignOp();\nSTNode expression = parseExpression();\nsemicolonToken = parseSemicolon();\nreturn STNodeFactory.createRecordFieldWithDefaultValueNode(metadata, readonlyQualifier, type, fieldName,\nequalsToken, expression, semicolonToken);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.FIELD_DESCRIPTOR_RHS, metadata, readonlyQualifier,\ntype, fieldName);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldDescriptorRhs(solution.tokenKind, metadata, readonlyQualifier, type, fieldName);\n}\n}\n/**\n* Parse question mark.\n*\n* @return Parsed node\n*/\nprivate STNode parseQuestionMark() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.QUESTION_MARK_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.QUESTION_MARK);\nreturn sol.recoveredNode;\n}\n}\n/*\n* Statements\n*/\n/**\n* Parse statements, until an end of a block is reached.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatements() {\nArrayList stmts = new ArrayList<>();\nreturn parseStatements(stmts);\n}\nprivate STNode parseStatements(ArrayList stmts) {\nwhile (!isEndOfStatements()) {\nSTNode stmt = parseStatement();\nif (stmt == null) {\nbreak;\n}\nif (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {\naddInvalidNodeToNextToken(stmt, DiagnosticErrorCode.ERROR_NAMED_WORKER_NOT_ALLOWED_HERE);\nbreak;\n}\nstmts.add(stmt);\n}\nreturn STNodeFactory.createNodeList(stmts);\n}\n/**\n* Parse a single statement.\n*\n* @return Parsed node\n*/\nprotected STNode parseStatement() {\nSTToken token = peek();\nreturn parseStatement(token.kind, 1);\n}\nprivate STNode parseStatement(SyntaxKind tokenKind, int nextTokenIndex) {\nSTNode annots = null;\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ncase SEMICOLON_TOKEN:\naddInvalidTokenToNextToken(errorHandler.consumeInvalidToken());\nreturn parseStatement();\ncase AT_TOKEN:\nannots = parseAnnotations(tokenKind);\ntokenKind = peek().kind;\nbreak;\ncase FINAL_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\ncase PANIC_KEYWORD:\ncase CONTINUE_KEYWORD:\ncase BREAK_KEYWORD:\ncase RETURN_KEYWORD:\ncase TYPE_KEYWORD:\ncase LOCK_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase FORK_KEYWORD:\ncase FOREACH_KEYWORD:\ncase XMLNS_KEYWORD:\ncase TRANSACTION_KEYWORD:\ncase RETRY_KEYWORD:\ncase ROLLBACK_KEYWORD:\ncase MATCH_KEYWORD:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase TRAP_KEYWORD:\ncase START_KEYWORD:\ncase FLUSH_KEYWORD:\ncase LEFT_ARROW_TOKEN:\ncase WAIT_KEYWORD:\ncase COMMIT_KEYWORD:\ncase WORKER_KEYWORD:\nbreak;\ndefault:\nif (isTypeStartingToken(tokenKind)) {\nbreak;\n}\nif (isValidExpressionStart(tokenKind, nextTokenIndex)) {\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.STATEMENT, nextTokenIndex);\nif (solution.action == Action.KEEP) {\nbreak;\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatement(solution.tokenKind, nextTokenIndex);\n}\nreturn parseStatement(tokenKind, annots, nextTokenIndex);\n}\nprivate STNode getAnnotations(STNode nullbaleAnnot) {\nif (nullbaleAnnot != null) {\nreturn nullbaleAnnot;\n}\nreturn STNodeFactory.createEmptyNodeList();\n}\nprivate STNode parseStatement(STNode annots) {\nreturn parseStatement(peek().kind, annots, 1);\n}\n/**\n* Parse a single statement, given the next token kind.\n*\n* @param tokenKind Next token kind\n* @return Parsed node\n*/\nprivate STNode parseStatement(SyntaxKind tokenKind, STNode annots, int nextTokenIndex) {\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\naddInvalidNodeToNextToken(annots, DiagnosticErrorCode.ERROR_INVALID_ANNOTATIONS);\nreturn null;\ncase SEMICOLON_TOKEN:\naddInvalidTokenToNextToken(errorHandler.consumeInvalidToken());\nreturn parseStatement(annots);\ncase FINAL_KEYWORD:\nSTNode finalKeyword = parseFinalKeyword();\nreturn parseVariableDecl(getAnnotations(annots), finalKeyword, false);\ncase IF_KEYWORD:\nreturn parseIfElseBlock();\ncase WHILE_KEYWORD:\nreturn parseWhileStatement();\ncase PANIC_KEYWORD:\nreturn parsePanicStatement();\ncase CONTINUE_KEYWORD:\nreturn parseContinueStatement();\ncase BREAK_KEYWORD:\nreturn parseBreakStatement();\ncase RETURN_KEYWORD:\nreturn parseReturnStatement();\ncase TYPE_KEYWORD:\nreturn parseLocalTypeDefinitionStatement(getAnnotations(annots));\ncase LOCK_KEYWORD:\nreturn parseLockStatement();\ncase OPEN_BRACE_TOKEN:\nreturn parseStatementStartsWithOpenBrace();\ncase WORKER_KEYWORD:\nreturn parseNamedWorkerDeclaration(getAnnotations(annots));\ncase FORK_KEYWORD:\nreturn parseForkStatement();\ncase FOREACH_KEYWORD:\nreturn parseForEachStatement();\ncase START_KEYWORD:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase TRAP_KEYWORD:\ncase FLUSH_KEYWORD:\ncase LEFT_ARROW_TOKEN:\ncase WAIT_KEYWORD:\ncase FROM_KEYWORD:\ncase COMMIT_KEYWORD:\nreturn parseExpressionStatement(tokenKind, getAnnotations(annots));\ncase XMLNS_KEYWORD:\nreturn parseXMLNamespaceDeclaration(false);\ncase TRANSACTION_KEYWORD:\nreturn parseTransactionStatement();\ncase RETRY_KEYWORD:\nreturn parseRetryStatement();\ncase ROLLBACK_KEYWORD:\nreturn parseRollbackStatement();\ncase OPEN_BRACKET_TOKEN:\nreturn parseStatementStartsWithOpenBracket(getAnnotations(annots), false);\ncase FUNCTION_KEYWORD:\ncase OPEN_PAREN_TOKEN:\ncase IDENTIFIER_TOKEN:\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase STRING_KEYWORD:\ncase XML_KEYWORD:\nreturn parseStmtStartsWithTypeOrExpr(tokenKind, getAnnotations(annots));\ncase MATCH_KEYWORD:\nreturn parseMatchStatement();\ndefault:\nif (isValidExpressionStart(tokenKind, nextTokenIndex)) {\nreturn parseStatementStartWithExpr(getAnnotations(annots));\n}\nif (isTypeStartingToken(tokenKind)) {\nfinalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVariableDecl(getAnnotations(annots), finalKeyword, false);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.STATEMENT_WITHOUT_ANNOTS, annots, nextTokenIndex);\nif (solution.action == Action.KEEP) {\nfinalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVariableDecl(getAnnotations(annots), finalKeyword, false);\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatement(solution.tokenKind, annots, nextTokenIndex - 1);\n}\n}\n/**\n*

\n* Parse variable declaration. Variable declaration can be a local or module level.\n*

\n*\n* \n* local-var-decl-stmt := local-init-var-decl-stmt | local-no-init-var-decl-stmt\n*

\n* local-init-var-decl-stmt := [annots] [final] typed-binding-pattern = action-or-expr ;\n*

\n* local-no-init-var-decl-stmt := [annots] [final] type-descriptor variable-name ;\n*
\n*\n* @param annots Annotations or metadata\n* @param finalKeyword Final keyword\n* @return Parsed node\n*/\nprivate STNode parseVariableDecl(STNode annots, STNode finalKeyword, boolean isModuleVar) {\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode typeBindingPattern = parseTypedBindingPattern(ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, isModuleVar);\n}\n/**\n* Parse final keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseFinalKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FINAL_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FINAL_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse the right hand side of a variable declaration statement.\n*

\n* \n* var-decl-rhs := ; | = action-or-expr ;\n* \n*\n* @param metadata metadata\n* @param finalKeyword Final keyword\n* @param typedBindingPattern Typed binding pattern\n* @return Parsed node\n*/\nprivate STNode parseVarDeclRhs(STNode metadata, STNode finalKeyword, STNode typedBindingPattern,\nboolean isModuleVar) {\nSTToken token = peek();\nreturn parseVarDeclRhs(token.kind, metadata, finalKeyword, typedBindingPattern, isModuleVar);\n}\n/**\n* Parse the right hand side of a variable declaration statement, given the\n* next token kind.\n*\n* @param tokenKind Next token kind\n* @param metadata Metadata\n* @param finalKeyword Final keyword\n* @param typedBindingPattern Typed binding pattern\n* @param isModuleVar flag indicating whether the var is module level\n* @return Parsed node\n*/\nprivate STNode parseVarDeclRhs(SyntaxKind tokenKind, STNode metadata, STNode finalKeyword,\nSTNode typedBindingPattern, boolean isModuleVar) {\nSTNode assign;\nSTNode expr;\nSTNode semicolon;\nswitch (tokenKind) {\ncase EQUAL_TOKEN:\nassign = parseAssignOp();\nif (isModuleVar) {\nexpr = parseExpression();\n} else {\nexpr = parseActionOrExpression();\n}\nsemicolon = parseSemicolon();\nbreak;\ncase SEMICOLON_TOKEN:\nassign = STNodeFactory.createEmptyNode();\nexpr = STNodeFactory.createEmptyNode();\nsemicolon = parseSemicolon();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.VAR_DECL_STMT_RHS, metadata, finalKeyword,\ntypedBindingPattern, isModuleVar);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseVarDeclRhs(solution.tokenKind, metadata, finalKeyword, typedBindingPattern, isModuleVar);\n}\nendContext();\nif (isModuleVar) {\nreturn STNodeFactory.createModuleVariableDeclarationNode(metadata, finalKeyword, typedBindingPattern,\nassign, expr, semicolon);\n}\nreturn STNodeFactory.createVariableDeclarationNode(metadata, finalKeyword, typedBindingPattern, assign, expr,\nsemicolon);\n}\n/**\n*

\n* Parse the RHS portion of the assignment.\n*

\n* assignment-stmt-rhs := = action-or-expr ;\n*\n* @param lvExpr LHS expression\n* @return Parsed node\n*/\nprivate STNode parseAssignmentStmtRhs(STNode lvExpr) {\nSTNode assign = parseAssignOp();\nSTNode expr = parseActionOrExpression();\nSTNode semicolon = parseSemicolon();\nendContext();\nif (lvExpr.kind == SyntaxKind.FUNCTION_CALL &&\nisPosibleFunctionalBindingPattern((STFunctionCallExpressionNode) lvExpr)) {\nlvExpr = getBindingPattern(lvExpr);\n}\nboolean lvExprValid = isValidLVExpr(lvExpr);\nif (!lvExprValid) {\nSTNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nSTNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier);\nlvExpr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(simpleNameRef, lvExpr,\nDiagnosticErrorCode.ERROR_INVALID_EXPR_IN_ASSIGNMENT_LHS);\n}\nreturn STNodeFactory.createAssignmentStatementNode(lvExpr, assign, expr, semicolon);\n}\n/*\n* Expressions\n*/\n/**\n* Parse expression. This will start parsing expressions from the lowest level of precedence.\n*\n* @return Parsed node\n*/\nprotected STNode parseExpression() {\nreturn parseExpression(DEFAULT_OP_PRECEDENCE, true, false);\n}\n/**\n* Parse action or expression. This will start parsing actions or expressions from the lowest level of precedence.\n*\n* @return Parsed node\n*/\nprivate STNode parseActionOrExpression() {\nreturn parseExpression(DEFAULT_OP_PRECEDENCE, true, true);\n}\nprivate STNode parseActionOrExpressionInLhs(SyntaxKind tokenKind, STNode annots) {\nreturn parseExpression(tokenKind, DEFAULT_OP_PRECEDENCE, annots, false, true, false);\n}\n/**\n* Parse expression.\n*\n* @param isRhsExpr Flag indicating whether this is a rhs expression\n* @return Parsed node\n*/\nprivate STNode parseExpression(boolean isRhsExpr) {\nreturn parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, false);\n}\nprivate boolean isValidLVExpr(STNode expression) {\nswitch (expression.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase FUNCTIONAL_BINDING_PATTERN:\nreturn true;\ncase FIELD_ACCESS:\nreturn isValidLVMemberExpr(((STFieldAccessExpressionNode) expression).expression);\ncase INDEXED_EXPRESSION:\nreturn isValidLVMemberExpr(((STIndexedExpressionNode) expression).containerExpression);\ndefault:\nreturn (expression instanceof STMissingToken);\n}\n}\nprivate boolean isValidLVMemberExpr(STNode expression) {\nswitch (expression.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\nreturn true;\ncase FIELD_ACCESS:\nreturn isValidLVMemberExpr(((STFieldAccessExpressionNode) expression).expression);\ncase INDEXED_EXPRESSION:\nreturn isValidLVMemberExpr(((STIndexedExpressionNode) expression).containerExpression);\ncase BRACED_EXPRESSION:\nreturn isValidLVMemberExpr(((STBracedExpressionNode) expression).expression);\ndefault:\nreturn (expression instanceof STMissingToken);\n}\n}\n/**\n* Parse an expression that has an equal or higher precedence than a given level.\n*\n* @param precedenceLevel Precedence level of expression to be parsed\n* @param isRhsExpr Flag indicating whether this is a rhs expression\n* @param allowActions Flag indicating whether the current context support actions\n* @return Parsed node\n*/\nprivate STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) {\nreturn parseExpression(precedenceLevel, isRhsExpr, allowActions, false);\n}\nprivate STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions,\nboolean isInConditionalExpr) {\nSTToken token = peek();\nreturn parseExpression(token.kind, precedenceLevel, isRhsExpr, allowActions, false, isInConditionalExpr);\n}\nprivate STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, boolean isRhsExpr,\nboolean allowActions, boolean isInMatchGuard, boolean isInConditionalExpr) {\nSTNode expr = parseTerminalExpression(kind, isRhsExpr, allowActions, isInConditionalExpr);\nreturn parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr);\n}\nprivate STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, STNode annots,\nboolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nSTNode expr = parseTerminalExpression(kind, annots, isRhsExpr, allowActions, isInConditionalExpr);\nreturn parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions, false, isInConditionalExpr);\n}\n/**\n* Parse terminal expressions. A terminal expression has the highest precedence level\n* out of all expressions, and will be at the leaves of an expression tree.\n*\n* @param annots Annotations\n* @param isRhsExpr Is a rhs expression\n* @param allowActions Allow actions\n* @return Parsed node\n*/\nprivate STNode parseTerminalExpression(STNode annots, boolean isRhsExpr, boolean allowActions,\nboolean isInConditionalExpr) {\nreturn parseTerminalExpression(peek().kind, annots, isRhsExpr, allowActions, isInConditionalExpr);\n}\nprivate STNode parseTerminalExpression(SyntaxKind kind, boolean isRhsExpr, boolean allowActions,\nboolean isInConditionalExpr) {\nSTNode annots;\nif (kind == SyntaxKind.AT_TOKEN) {\nannots = parseAnnotations();\nkind = peek().kind;\n} else {\nannots = STNodeFactory.createEmptyNodeList();\n}\nSTNode expr = parseTerminalExpression(kind, annots, isRhsExpr, allowActions, isInConditionalExpr);\nif (!isNodeListEmpty(annots) && expr.kind != SyntaxKind.START_ACTION) {\nexpr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(expr, annots,\nDiagnosticErrorCode.ERROR_ANNOTATIONS_ATTACHED_TO_EXPRESSION);\n}\nreturn expr;\n}\nprivate STNode parseTerminalExpression(SyntaxKind kind, STNode annots, boolean isRhsExpr, boolean allowActions,\nboolean isInConditionalExpr) {\nswitch (kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn parseBasicLiteral();\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF, isInConditionalExpr);\ncase OPEN_PAREN_TOKEN:\nreturn parseBracedExpression(isRhsExpr, allowActions);\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\nreturn parseCheckExpression(isRhsExpr, allowActions, isInConditionalExpr);\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingConstructorExpr();\ncase TYPEOF_KEYWORD:\nreturn parseTypeofExpression(isRhsExpr, isInConditionalExpr);\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\nreturn parseUnaryExpression(isRhsExpr, isInConditionalExpr);\ncase TRAP_KEYWORD:\nreturn parseTrapExpression(isRhsExpr, allowActions, isInConditionalExpr);\ncase OPEN_BRACKET_TOKEN:\nreturn parseListConstructorExpr();\ncase LT_TOKEN:\nreturn parseTypeCastExpr(isRhsExpr, allowActions, isInConditionalExpr);\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\ncase FROM_KEYWORD:\nreturn parseTableConstructorOrQuery(isRhsExpr);\ncase ERROR_KEYWORD:\nreturn parseErrorConstructorExpr();\ncase LET_KEYWORD:\nreturn parseLetExpression(isRhsExpr);\ncase BACKTICK_TOKEN:\nreturn parseTemplateExpression();\ncase XML_KEYWORD:\nSTToken nextNextToken = getNextNextToken(kind);\nif (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseXMLTemplateExpression();\n}\nreturn parseSimpleTypeDescriptor();\ncase STRING_KEYWORD:\nnextNextToken = getNextNextToken(kind);\nif (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseStringTemplateExpression();\n}\nreturn parseSimpleTypeDescriptor();\ncase FUNCTION_KEYWORD:\nreturn parseExplicitFunctionExpression(annots, isRhsExpr);\ncase AT_TOKEN:\nbreak;\ncase NEW_KEYWORD:\nreturn parseNewExpression();\ncase START_KEYWORD:\nreturn parseStartAction(annots);\ncase FLUSH_KEYWORD:\nreturn parseFlushAction();\ncase LEFT_ARROW_TOKEN:\nreturn parseReceiveAction();\ncase WAIT_KEYWORD:\nreturn parseWaitAction();\ncase COMMIT_KEYWORD:\nreturn parseCommitAction();\ncase TRANSACTIONAL_KEYWORD:\nreturn parseTransactionalExpression();\ncase SERVICE_KEYWORD:\nreturn parseServiceConstructorExpression(annots);\ncase BASE16_KEYWORD:\ncase BASE64_KEYWORD:\nreturn parseByteArrayLiteral(kind);\ndefault:\nif (isSimpleType(kind)) {\nreturn parseSimpleTypeDescriptor();\n}\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.TERMINAL_EXPRESSION, annots, isRhsExpr, allowActions,\nisInConditionalExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.action == Action.KEEP) {\nif (kind == SyntaxKind.XML_KEYWORD) {\nreturn parseXMLTemplateExpression();\n}\nreturn parseStringTemplateExpression();\n}\nswitch (solution.tokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(solution.recoveredNode, isInConditionalExpr);\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn solution.recoveredNode;\ndefault:\nreturn parseTerminalExpression(solution.tokenKind, annots, isRhsExpr, allowActions,\nisInConditionalExpr);\n}\n}\nprivate boolean isValidExprStart(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase IDENTIFIER_TOKEN:\ncase OPEN_PAREN_TOKEN:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase TYPEOF_KEYWORD:\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\ncase TRAP_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase LT_TOKEN:\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\ncase FROM_KEYWORD:\ncase ERROR_KEYWORD:\ncase LET_KEYWORD:\ncase BACKTICK_TOKEN:\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase AT_TOKEN:\ncase NEW_KEYWORD:\ncase START_KEYWORD:\ncase FLUSH_KEYWORD:\ncase LEFT_ARROW_TOKEN:\ncase WAIT_KEYWORD:\ncase SERVICE_KEYWORD:\nreturn true;\ndefault:\nreturn isSimpleType(tokenKind);\n}\n}\n/**\n*

\n* Parse a new expression.\n*

\n* \n* new-expr := explicit-new-expr | implicit-new-expr\n*
\n* explicit-new-expr := new type-descriptor ( arg-list )\n*
\n* implicit-new-expr := new [( arg-list )]\n*
\n*\n* @return Parsed NewExpression node.\n*/\nprivate STNode parseNewExpression() {\nSTNode newKeyword = parseNewKeyword();\nreturn parseNewKeywordRhs(newKeyword);\n}\n/**\n*

\n* Parse `new` keyword.\n*

\n*\n* @return Parsed NEW_KEYWORD Token.\n*/\nprivate STNode parseNewKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.NEW_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.NEW_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseNewKeywordRhs(STNode newKeyword) {\nSTNode token = peek();\nreturn parseNewKeywordRhs(token.kind, newKeyword);\n}\n/**\n*

\n* Parse an implicit or explicit new expression.\n*

\n*\n* @param kind next token kind.\n* @param newKeyword parsed node for `new` keyword.\n* @return Parsed new-expression node.\n*/\nprivate STNode parseNewKeywordRhs(SyntaxKind kind, STNode newKeyword) {\nswitch (kind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseImplicitNewRhs(newKeyword);\ncase SEMICOLON_TOKEN:\nbreak;\ncase IDENTIFIER_TOKEN:\ncase OBJECT_KEYWORD:\ncase STREAM_KEYWORD:\nreturn parseTypeDescriptorInNewExpr(newKeyword);\ndefault:\nbreak;\n}\nreturn STNodeFactory.createImplicitNewExpressionNode(newKeyword, STNodeFactory.createEmptyNode());\n}\n/**\n*

\n* Parse an Explicit New expression.\n*

\n* \n* explicit-new-expr := new type-descriptor ( arg-list )\n* \n*\n* @param newKeyword Parsed `new` keyword.\n* @return the Parsed Explicit New Expression.\n*/\nprivate STNode parseTypeDescriptorInNewExpr(STNode newKeyword) {\nSTNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_NEW_EXPR);\nSTNode parenthesizedArgsList = parseParenthesizedArgList();\nreturn STNodeFactory.createExplicitNewExpressionNode(newKeyword, typeDescriptor, parenthesizedArgsList);\n}\n/**\n*

\n* Parse an implicit-new-expr with arguments.\n*

\n*\n* @param newKeyword Parsed `new` keyword.\n* @return Parsed implicit-new-expr.\n*/\nprivate STNode parseImplicitNewRhs(STNode newKeyword) {\nSTNode implicitNewArgList = parseParenthesizedArgList();\nreturn STNodeFactory.createImplicitNewExpressionNode(newKeyword, implicitNewArgList);\n}\n/**\n*

\n* Parse the parenthesized argument list for a new-expr.\n*

\n*\n* @return Parsed parenthesized rhs of new-expr.\n*/\nprivate STNode parseParenthesizedArgList() {\nSTNode openParan = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode arguments = parseArgsList();\nSTNode closeParan = parseCloseParenthesis();\nreturn STNodeFactory.createParenthesizedArgList(openParan, arguments, closeParan);\n}\n/**\n*

\n* Parse the right-hand-side of an expression.\n*

\n* expr-rhs := (binary-op expression\n* | dot identifier\n* | open-bracket expression close-bracket\n* )*\n*\n* @param precedenceLevel Precedence level of the expression that is being parsed currently\n* @param lhsExpr LHS expression of the expression\n* @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement\n* @param allowActions Flag indicating whether the current context support actions\n* @return Parsed node\n*/\nprivate STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isRhsExpr,\nboolean allowActions) {\nreturn parseExpressionRhs(precedenceLevel, lhsExpr, isRhsExpr, allowActions, false, false);\n}\nprivate STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isRhsExpr,\nboolean allowActions, boolean isInMatchGuard, boolean isInConditionalExpr) {\nSTToken token = peek();\nreturn parseExpressionRhs(token.kind, precedenceLevel, lhsExpr, isRhsExpr, allowActions, isInMatchGuard,\nisInConditionalExpr);\n}\n/**\n* Parse the right hand side of an expression given the next token kind.\n*\n* @param tokenKind Next token kind\n* @param currentPrecedenceLevel Precedence level of the expression that is being parsed currently\n* @param lhsExpr LHS expression\n* @param isRhsExpr Flag indicating whether this is a rhs expr or not\n* @param allowActions Flag indicating whether to allow actions or not\n* @param isInMatchGuard Flag indicating whether this expression is in a match-guard\n* @return Parsed node\n*/\nprivate STNode parseExpressionRhs(SyntaxKind tokenKind, OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr,\nboolean isRhsExpr, boolean allowActions, boolean isInMatchGuard,\nboolean isInConditionalExpr) {\nif (isEndOfExpression(tokenKind, isRhsExpr, isInMatchGuard, lhsExpr.kind)) {\nreturn lhsExpr;\n}\nif (lhsExpr.kind == SyntaxKind.ASYNC_SEND_ACTION) {\nreturn lhsExpr;\n}\nif (!isValidExprRhsStart(tokenKind, lhsExpr.kind)) {\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.EXPRESSION_RHS, currentPrecedenceLevel, lhsExpr,\nisRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.ctx == ParserRuleContext.BINARY_OPERATOR) {\nSyntaxKind binaryOpKind = getBinaryOperatorKindToInsert(currentPrecedenceLevel);\nreturn parseExpressionRhs(binaryOpKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions,\nisInMatchGuard, isInConditionalExpr);\n} else {\nreturn parseExpressionRhs(solution.tokenKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions,\nisInMatchGuard, isInConditionalExpr);\n}\n}\nif (tokenKind == SyntaxKind.GT_TOKEN && peek(2).kind == SyntaxKind.GT_TOKEN) {\nif (peek(3).kind == SyntaxKind.GT_TOKEN) {\ntokenKind = SyntaxKind.TRIPPLE_GT_TOKEN;\n} else {\ntokenKind = SyntaxKind.DOUBLE_GT_TOKEN;\n}\n}\nOperatorPrecedence nextOperatorPrecedence = getOpPrecedence(tokenKind);\nif (currentPrecedenceLevel.isHigherThanOrEqual(nextOperatorPrecedence, allowActions)) {\nreturn lhsExpr;\n}\nSTNode newLhsExpr;\nSTNode operator;\nswitch (tokenKind) {\ncase OPEN_PAREN_TOKEN:\nnewLhsExpr = parseFuncCall(lhsExpr);\nbreak;\ncase OPEN_BRACKET_TOKEN:\nnewLhsExpr = parseMemberAccessExpr(lhsExpr, isRhsExpr);\nbreak;\ncase DOT_TOKEN:\nnewLhsExpr = parseFieldAccessOrMethodCall(lhsExpr, isInConditionalExpr);\nbreak;\ncase IS_KEYWORD:\nnewLhsExpr = parseTypeTestExpression(lhsExpr, isInConditionalExpr);\nbreak;\ncase RIGHT_ARROW_TOKEN:\nnewLhsExpr = parseRemoteMethodCallOrAsyncSendAction(lhsExpr, isRhsExpr);\nif (!allowActions) {\nnewLhsExpr = SyntaxErrors.addDiagnostic(newLhsExpr,\nDiagnosticErrorCode.ERROR_EXPRESSION_EXPECTED_ACTION_FOUND);\n}\nbreak;\ncase SYNC_SEND_TOKEN:\nnewLhsExpr = parseSyncSendAction(lhsExpr);\nif (!allowActions) {\nnewLhsExpr = SyntaxErrors.addDiagnostic(newLhsExpr,\nDiagnosticErrorCode.ERROR_EXPRESSION_EXPECTED_ACTION_FOUND);\n}\nbreak;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nnewLhsExpr = parseImplicitAnonFunc(lhsExpr, isRhsExpr);\nbreak;\ncase ANNOT_CHAINING_TOKEN:\nnewLhsExpr = parseAnnotAccessExpression(lhsExpr, isInConditionalExpr);\nbreak;\ncase OPTIONAL_CHAINING_TOKEN:\nnewLhsExpr = parseOptionalFieldAccessExpression(lhsExpr, isInConditionalExpr);\nbreak;\ncase QUESTION_MARK_TOKEN:\nnewLhsExpr = parseConditionalExpression(lhsExpr);\nbreak;\ncase DOT_LT_TOKEN:\nnewLhsExpr = parseXMLFilterExpression(lhsExpr);\nbreak;\ncase SLASH_LT_TOKEN:\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\ncase SLASH_ASTERISK_TOKEN:\nnewLhsExpr = parseXMLStepExpression(lhsExpr);\nbreak;\ndefault:\nif (tokenKind == SyntaxKind.DOUBLE_GT_TOKEN) {\noperator = parseSignedRightShiftToken();\n} else if (tokenKind == SyntaxKind.TRIPPLE_GT_TOKEN) {\noperator = parseUnsignedRightShiftToken();\n} else {\noperator = parseBinaryOperator();\n}\nSTNode rhsExpr = parseExpression(nextOperatorPrecedence, isRhsExpr, false, isInConditionalExpr);\nnewLhsExpr = STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, operator,\nrhsExpr);\nbreak;\n}\nreturn parseExpressionRhs(currentPrecedenceLevel, newLhsExpr, isRhsExpr, allowActions, isInMatchGuard,\nisInConditionalExpr);\n}\nprivate boolean isValidExprRhsStart(SyntaxKind tokenKind, SyntaxKind precedingNodeKind) {\nswitch (tokenKind) {\ncase OPEN_PAREN_TOKEN:\nreturn precedingNodeKind == SyntaxKind.QUALIFIED_NAME_REFERENCE ||\nprecedingNodeKind == SyntaxKind.SIMPLE_NAME_REFERENCE;\ncase DOT_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase IS_KEYWORD:\ncase RIGHT_ARROW_TOKEN:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\ncase SYNC_SEND_TOKEN:\ncase ANNOT_CHAINING_TOKEN:\ncase OPTIONAL_CHAINING_TOKEN:\ncase QUESTION_MARK_TOKEN:\ncase COLON_TOKEN:\ncase DOT_LT_TOKEN:\ncase SLASH_LT_TOKEN:\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\ncase SLASH_ASTERISK_TOKEN:\nreturn true;\ndefault:\nreturn isBinaryOperator(tokenKind);\n}\n}\n/**\n* Parse member access expression.\n*\n* @param lhsExpr Container expression\n* @param isRhsExpr Is this is a rhs expression\n* @return Member access expression\n*/\nprivate STNode parseMemberAccessExpr(STNode lhsExpr, boolean isRhsExpr) {\nstartContext(ParserRuleContext.MEMBER_ACCESS_KEY_EXPR);\nSTNode openBracket = parseOpenBracket();\nSTNode keyExpr = parseMemberAccessKeyExprs(isRhsExpr);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nif (isRhsExpr && ((STNodeList) keyExpr).isEmpty()) {\nkeyExpr = STNodeFactory.createNodeList(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\ncloseBracket = SyntaxErrors.addDiagnostic(closeBracket,\nDiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR);\n}\nreturn STNodeFactory.createIndexedExpressionNode(lhsExpr, openBracket, keyExpr, closeBracket);\n}\n/**\n* Parse key expression of a member access expression. A type descriptor\n* that starts with a type-ref (e.g: T[a][b]) also goes through this\n* method.\n*

\n* key-expression := single-key-expression | multi-key-expression\n*\n* @param isRhsExpr Is this is a rhs expression\n* @return Key expression\n*/\nprivate STNode parseMemberAccessKeyExprs(boolean isRhsExpr) {\nList exprList = new ArrayList<>();\nSTNode keyExpr;\nSTNode keyExprEnd;\nwhile (!isEndOfTypeList(peek().kind)) {\nkeyExpr = parseKeyExpr(isRhsExpr);\nexprList.add(keyExpr);\nkeyExprEnd = parseMemberAccessKeyExprEnd();\nif (keyExprEnd == null) {\nbreak;\n}\nexprList.add(keyExprEnd);\n}\nreturn STNodeFactory.createNodeList(exprList);\n}\nprivate STNode parseKeyExpr(boolean isRhsExpr) {\nif (!isRhsExpr && peek().kind == SyntaxKind.ASTERISK_TOKEN) {\nreturn STNodeFactory.createBasicLiteralNode(SyntaxKind.ASTERISK_TOKEN, consume());\n}\nreturn parseExpression(isRhsExpr);\n}\nprivate STNode parseMemberAccessKeyExprEnd() {\nreturn parseMemberAccessKeyExprEnd(peek().kind);\n}\nprivate STNode parseMemberAccessKeyExprEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MEMBER_ACCESS_KEY_EXPR_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMemberAccessKeyExprEnd(solution.tokenKind);\n}\n}\n/**\n* Parse close bracket.\n*\n* @return Parsed node\n*/\nprivate STNode parseCloseBracket() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSE_BRACKET);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse field access, xml required attribute access expressions or method call expression.\n*

\n* \n* field-access-expr := expression . field-name\n*
\n* xml-required-attribute-access-expr := expression . xml-attribute-name\n*
\n* xml-attribute-name := xml-qualified-name | qualified-identifier | identifier\n*
\n* method-call-expr := expression . method-name ( arg-list )\n*
\n*\n* @param lhsExpr Preceding expression of the field access or method call\n* @return One of field-access-expression or method-call-expression.\n*/\nprivate STNode parseFieldAccessOrMethodCall(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode dotToken = parseDotToken();\nSTToken token = peek();\nif (token.kind == SyntaxKind.MAP_KEYWORD || token.kind == SyntaxKind.START_KEYWORD) {\nSTNode methodName = getKeywordAsSimpleNameRef();\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode args = parseArgsList();\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, methodName, openParen, args,\ncloseParen);\n}\nSTNode fieldOrMethodName = parseFieldAccessIdentifier(isInConditionalExpr);\nif (fieldOrMethodName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nreturn STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName);\n}\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode args = parseArgsList();\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, fieldOrMethodName, openParen, args,\ncloseParen);\n}\nreturn STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName);\n}\nprivate STNode getKeywordAsSimpleNameRef() {\nSTToken mapKeyword = consume();\nSTNode methodName = STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(),\nmapKeyword.trailingMinutiae(), mapKeyword.diagnostics());\nmethodName = STNodeFactory.createSimpleNameReferenceNode(methodName);\nreturn methodName;\n}\n/**\n*

\n* Parse braced expression.\n*

\n* braced-expr := ( expression )\n*\n* @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement\n* @param allowActions Allow actions\n* @return Parsed node\n*/\nprivate STNode parseBracedExpression(boolean isRhsExpr, boolean allowActions) {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nif (peek().kind == SyntaxKind.CLOSE_PAREN_TOKEN) {\nreturn parseNilLiteralOrEmptyAnonFuncParamRhs(openParen);\n}\nstartContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);\nSTNode expr;\nif (allowActions) {\nexpr = parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, true);\n} else {\nexpr = parseExpression(isRhsExpr);\n}\nreturn parseBracedExprOrAnonFuncParamRhs(peek().kind, openParen, expr, isRhsExpr);\n}\nprivate STNode parseNilLiteralOrEmptyAnonFuncParamRhs(STNode openParen) {\nSTNode closeParen = parseCloseParenthesis();\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {\nreturn STNodeFactory.createNilLiteralNode(openParen, closeParen);\n} else {\nSTNode params = STNodeFactory.createNodeList();\nSTNode anonFuncParam =\nSTNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\nreturn anonFuncParam;\n}\n}\nprivate STNode parseBracedExprOrAnonFuncParamRhs(STNode openParen, STNode expr, boolean isRhsExpr) {\nSTToken nextToken = peek();\nreturn parseBracedExprOrAnonFuncParamRhs(nextToken.kind, openParen, expr, isRhsExpr);\n}\nprivate STNode parseBracedExprOrAnonFuncParamRhs(SyntaxKind nextTokenKind, STNode openParen, STNode expr,\nboolean isRhsExpr) {\nif (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nswitch (nextTokenKind) {\ncase CLOSE_PAREN_TOKEN:\nbreak;\ncase COMMA_TOKEN:\nreturn parseImplicitAnonFunc(openParen, expr, isRhsExpr);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS, openParen,\nexpr, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nendContext();\nreturn solution.recoveredNode;\n}\nreturn parseBracedExprOrAnonFuncParamRhs(solution.tokenKind, openParen, expr, isRhsExpr);\n}\n}\nSTNode closeParen = parseCloseParenthesis();\nendContext();\nif (isAction(expr)) {\nreturn STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen);\n}\nreturn STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen);\n}\n/**\n* Check whether a given node is an action node.\n*\n* @param node Node to check\n* @return true if the node is an action node. false otherwise\n*/\nprivate boolean isAction(STNode node) {\nswitch (node.kind) {\ncase REMOTE_METHOD_CALL_ACTION:\ncase BRACED_ACTION:\ncase CHECK_ACTION:\ncase START_ACTION:\ncase TRAP_ACTION:\ncase FLUSH_ACTION:\ncase ASYNC_SEND_ACTION:\ncase SYNC_SEND_ACTION:\ncase RECEIVE_ACTION:\ncase WAIT_ACTION:\ncase QUERY_ACTION:\ncase COMMIT_ACTION:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Check whether the given token is an end of a expression.\n*\n* @param tokenKind Token to check\n* @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement\n* @return true if the token represents an end of a block. false otherwise\n*/\nprivate boolean isEndOfExpression(SyntaxKind tokenKind, boolean isRhsExpr, boolean isInMatchGuard,\nSyntaxKind precedingNodeKind) {\nif (!isRhsExpr) {\nif (isCompoundBinaryOperator(tokenKind)) {\nreturn true;\n}\nif (isInMatchGuard && tokenKind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {\nreturn true;\n}\nreturn !isValidExprRhsStart(tokenKind, precedingNodeKind);\n}\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase SEMICOLON_TOKEN:\ncase COMMA_TOKEN:\ncase PUBLIC_KEYWORD:\ncase EOF_TOKEN:\ncase CONST_KEYWORD:\ncase LISTENER_KEYWORD:\ncase EQUAL_TOKEN:\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\ncase AS_KEYWORD:\ncase IN_KEYWORD:\ncase BACKTICK_TOKEN:\ncase FROM_KEYWORD:\ncase WHERE_KEYWORD:\ncase LET_KEYWORD:\ncase SELECT_KEYWORD:\ncase DO_KEYWORD:\ncase COLON_TOKEN:\ncase ON_KEYWORD:\ncase CONFLICT_KEYWORD:\ncase LIMIT_KEYWORD:\ncase JOIN_KEYWORD:\ncase OUTER_KEYWORD:\nreturn true;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn isInMatchGuard;\ndefault:\nreturn isSimpleType(tokenKind);\n}\n}\n/**\n* Parse basic literals. It is assumed that we come here after validation.\n*\n* @return Parsed node\n*/\nprivate STNode parseBasicLiteral() {\nSTToken literalToken = consume();\nreturn STNodeFactory.createBasicLiteralNode(literalToken.kind, literalToken);\n}\n/**\n* Parse function call expression.\n* function-call-expr := function-reference ( arg-list )\n* function-reference := variable-reference\n*\n* @param identifier Function name\n* @return Function call expression\n*/\nprivate STNode parseFuncCall(STNode identifier) {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode args = parseArgsList();\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createFunctionCallExpressionNode(identifier, openParen, args, closeParen);\n}\n/**\n*

\n* Parse error constructor expression.\n*

\n* \n* error-constructor-expr := error ( arg-list )\n* \n*\n* @return Error constructor expression\n*/\nprivate STNode parseErrorConstructorExpr() {\nSTNode errorKeyword = parseErrorKeyword();\nreturn parseFuncCall(errorKeyword);\n}\n/**\n* Parse function call argument list.\n*\n* @return Parsed args list\n*/\nprivate STNode parseArgsList() {\nstartContext(ParserRuleContext.ARG_LIST);\nSTToken token = peek();\nif (isEndOfParametersList(token.kind)) {\nSTNode args = STNodeFactory.createEmptyNodeList();\nendContext();\nreturn args;\n}\nSTNode firstArg = parseArgument();\nif (firstArg == null) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode argsList = parseArgList(firstArg);\nendContext();\nreturn argsList;\n}\n/**\n* Parse follow up arguments.\n*\n* @param firstArg first argument in the list\n* @return the argument list\n*/\nprivate STNode parseArgList(STNode firstArg) {\nArrayList argsList = new ArrayList<>();\nargsList.add(firstArg);\nSyntaxKind lastValidArgKind = firstArg.kind;\nSTToken nextToken = peek();\nwhile (!isEndOfParametersList(nextToken.kind)) {\nSTNode argEnd = parseArgEnd(nextToken.kind);\nif (argEnd == null) {\nbreak;\n}\nnextToken = peek();\nif (isEndOfParametersList(nextToken.kind)) {\nint prevArgIndex = argsList.size() - 1;\nSTNode prevArg = argsList.remove(prevArgIndex);\nSTNode prevArgWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(prevArg, argEnd,\nDiagnosticErrorCode.ERROR_INVALID_TOKEN, ((STToken) argEnd).text());\nargsList.add(prevArgWithDiagnostics);\nbreak;\n}\nSTNode curArg = parseArgument(nextToken.kind);\nDiagnosticErrorCode errorCode = validateArgumentOrder(lastValidArgKind, curArg.kind);\nif (errorCode == null) {\nargsList.add(argEnd);\nargsList.add(curArg);\nlastValidArgKind = curArg.kind;\n} else {\nupdateLastNodeInListWithInvalidNode(argsList, argEnd, null);\nupdateLastNodeInListWithInvalidNode(argsList, curArg, errorCode);\n}\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(argsList);\n}\nprivate DiagnosticErrorCode validateArgumentOrder(SyntaxKind prevArgKind, SyntaxKind curArgKind) {\nDiagnosticErrorCode errorCode = null;\nswitch (prevArgKind) {\ncase POSITIONAL_ARG:\nbreak;\ncase NAMED_ARG:\nif (curArgKind == SyntaxKind.POSITIONAL_ARG) {\nerrorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG;\n}\nbreak;\ncase REST_ARG:\nerrorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG;\nbreak;\ndefault:\nthrow new IllegalStateException(\"Invalid SyntaxKind in an argument\");\n}\nreturn errorCode;\n}\nprivate STNode parseArgEnd() {\nreturn parseArgEnd(peek().kind);\n}\nprivate STNode parseArgEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgEnd(solution.tokenKind);\n}\n}\n/**\n* Parse function call argument.\n*\n* @return Parsed argument node\n*/\nprivate STNode parseArgument() {\nSTToken token = peek();\nreturn parseArgument(token.kind);\n}\nprivate STNode parseArgument(SyntaxKind kind) {\nSTNode arg;\nswitch (kind) {\ncase ELLIPSIS_TOKEN:\nSTToken ellipsis = consume();\nSTNode expr = parseExpression();\narg = STNodeFactory.createRestArgumentNode(ellipsis, expr);\nbreak;\ncase IDENTIFIER_TOKEN:\narg = parseNamedOrPositionalArg(kind);\nbreak;\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nif (isValidExprStart(kind)) {\nexpr = parseExpression();\narg = STNodeFactory.createPositionalArgumentNode(expr);\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.ARG_START_OR_ARG_LIST_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgument(solution.tokenKind);\n}\nreturn arg;\n}\n/**\n* Parse positional or named arg. This method assumed peek()/peek(1)\n* is always an identifier.\n*\n* @return Parsed argument node\n*/\nprivate STNode parseNamedOrPositionalArg(SyntaxKind nextTokenKind) {\nSTNode argNameOrExpr = parseTerminalExpression(peek().kind, true, false, false);\nSTToken secondToken = peek();\nswitch (secondToken.kind) {\ncase EQUAL_TOKEN:\nSTNode equal = parseAssignOp();\nSTNode valExpr = parseExpression();\nreturn STNodeFactory.createNamedArgumentNode(argNameOrExpr, equal, valExpr);\ncase COMMA_TOKEN:\ncase CLOSE_PAREN_TOKEN:\nreturn STNodeFactory.createPositionalArgumentNode(argNameOrExpr);\ndefault:\nargNameOrExpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, argNameOrExpr, false, false);\nreturn STNodeFactory.createPositionalArgumentNode(argNameOrExpr);\n}\n}\n/**\n* Parse object type descriptor.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectTypeDescriptor() {\nstartContext(ParserRuleContext.OBJECT_TYPE_DESCRIPTOR);\nSTNode objectTypeQualifiers = parseObjectTypeQualifiers();\nSTNode objectKeyword = parseObjectKeyword();\nSTNode openBrace = parseOpenBrace();\nSTNode objectMembers = parseObjectMembers();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createObjectTypeDescriptorNode(objectTypeQualifiers, objectKeyword, openBrace,\nobjectMembers, closeBrace);\n}\n/**\n* Parse object type qualifiers.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectTypeQualifiers() {\nSTToken nextToken = peek();\nreturn parseObjectTypeQualifiers(nextToken.kind);\n}\nprivate STNode parseObjectTypeQualifiers(SyntaxKind kind) {\nSTNode firstQualifier;\nswitch (kind) {\ncase CLIENT_KEYWORD:\nfirstQualifier = parseClientKeyword();\nbreak;\ncase ABSTRACT_KEYWORD:\nfirstQualifier = parseAbstractKeyword();\nbreak;\ncase READONLY_KEYWORD:\nfirstQualifier = parseReadonlyKeyword();\nbreak;\ncase OBJECT_KEYWORD:\nreturn STNodeFactory.createEmptyNodeList();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_QUALIFIER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectTypeQualifiers(solution.tokenKind);\n}\nreturn parseObjectTypeNextQualifiers(firstQualifier);\n}\nprivate STNode parseObjectTypeNextQualifiers(STNode firstQualifier) {\nList qualifiers = new ArrayList<>();\nqualifiers.add(firstQualifier);\nfor (int i = 0; i < 2; i++) {\nSTNode nextToken = peek();\nif (isNodeWithSyntaxKindInList(qualifiers, nextToken.kind)) {\nnextToken = consume();\nupdateLastNodeInListWithInvalidNode(qualifiers, nextToken,\nDiagnosticErrorCode.ERROR_SAME_OBJECT_TYPE_QUALIFIER);\ncontinue;\n}\nSTNode nextQualifier;\nswitch (nextToken.kind) {\ncase CLIENT_KEYWORD:\nnextQualifier = parseClientKeyword();\nbreak;\ncase ABSTRACT_KEYWORD:\nnextQualifier = parseAbstractKeyword();\nbreak;\ncase READONLY_KEYWORD:\nnextQualifier = parseReadonlyKeyword();\nbreak;\ncase OBJECT_KEYWORD:\ndefault:\nreturn STNodeFactory.createNodeList(qualifiers);\n}\nqualifiers.add(nextQualifier);\n}\nreturn STNodeFactory.createNodeList(qualifiers);\n}\n/**\n* Parse client keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseClientKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLIENT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLIENT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse abstract keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseAbstractKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ABSTRACT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ABSTRACT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse object keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OBJECT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OBJECT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse object members.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectMembers() {\nArrayList objectMembers = new ArrayList<>();\nwhile (!isEndOfObjectTypeNode()) {\nstartContext(ParserRuleContext.OBJECT_MEMBER);\nSTNode member = parseObjectMember(peek().kind);\nendContext();\nif (member == null) {\nbreak;\n}\nobjectMembers.add(member);\n}\nreturn STNodeFactory.createNodeList(objectMembers);\n}\nprivate STNode parseObjectMember() {\nSTToken nextToken = peek();\nreturn parseObjectMember(nextToken.kind);\n}\nprivate STNode parseObjectMember(SyntaxKind nextTokenKind) {\nSTNode metadata;\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ncase ASTERISK_TOKEN:\ncase PUBLIC_KEYWORD:\ncase PRIVATE_KEYWORD:\ncase REMOTE_KEYWORD:\ncase FUNCTION_KEYWORD:\nmetadata = createEmptyMetadata();\nbreak;\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData(nextTokenKind);\nnextTokenKind = peek().kind;\nbreak;\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nmetadata = createEmptyMetadata();\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_START);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectMember(solution.tokenKind);\n}\nreturn parseObjectMember(nextTokenKind, metadata);\n}\nprivate STNode parseObjectMember(SyntaxKind nextTokenKind, STNode metadata) {\nSTNode member;\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ncase ASTERISK_TOKEN:\nSTNode asterisk = consume();\nSTNode type = parseTypeReference();\nSTNode semicolonToken = parseSemicolon();\nmember = STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken);\nbreak;\ncase PUBLIC_KEYWORD:\ncase PRIVATE_KEYWORD:\nSTNode visibilityQualifier = parseObjectMemberVisibility();\nmember = parseObjectMethodOrField(metadata, visibilityQualifier);\nbreak;\ncase REMOTE_KEYWORD:\nmember = parseObjectMethodOrField(metadata, STNodeFactory.createEmptyNode());\nbreak;\ncase FUNCTION_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nmember = parseObjectMethod(metadata, STNodeFactory.createEmptyNode(), STNodeFactory.createEmptyNode());\nbreak;\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nmember = parseObjectField(metadata, STNodeFactory.createEmptyNode());\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectMember(solution.tokenKind);\n}\nreturn member;\n}\nprivate STNode parseObjectMethodOrField(STNode metadata, STNode methodQualifiers) {\nSTToken nextToken = peek(1);\nSTToken nextNextToken = peek(2);\nreturn parseObjectMethodOrField(nextToken.kind, nextNextToken.kind, metadata, methodQualifiers);\n}\n/**\n* Parse an object member, given the visibility modifier. Object member can have\n* only one visibility qualifier. This mean the methodQualifiers list can have\n* one qualifier at-most.\n*\n* @param visibilityQualifier Visibility qualifier. A modifier can be\n* a syntax node with either 'PUBLIC' or 'PRIVATE'.\n* @param nextTokenKind Next token kind\n* @param nextNextTokenKind Kind of the token after the\n* @param metadata Metadata\n* @param visibilityQualifier Visibility qualifiers\n* @return Parse object member node\n*/\nprivate STNode parseObjectMethodOrField(SyntaxKind nextTokenKind, SyntaxKind nextNextTokenKind, STNode metadata,\nSTNode visibilityQualifier) {\nswitch (nextTokenKind) {\ncase REMOTE_KEYWORD:\nSTNode remoteKeyword = parseRemoteKeyword();\nreturn parseObjectMethod(metadata, visibilityQualifier, remoteKeyword);\ncase FUNCTION_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nremoteKeyword = STNodeFactory.createEmptyNode();\nreturn parseObjectMethod(metadata, visibilityQualifier, remoteKeyword);\ncase IDENTIFIER_TOKEN:\nif (nextNextTokenKind != SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseObjectField(metadata, visibilityQualifier);\n}\nbreak;\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nreturn parseObjectField(metadata, visibilityQualifier);\n}\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY, metadata,\nvisibilityQualifier);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectMethodOrField(solution.tokenKind, nextTokenKind, metadata, visibilityQualifier);\n}\n/**\n* Parse object visibility. Visibility can be public or private.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectMemberVisibility() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PUBLIC_KEYWORD || token.kind == SyntaxKind.PRIVATE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseRemoteKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.REMOTE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.REMOTE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseObjectField(STNode metadata, STNode methodQualifiers) {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.READONLY_KEYWORD) {\nSTNode readonlyQualifier = STNodeFactory.createEmptyNode();\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode fieldName = parseVariableName();\nreturn parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type, fieldName);\n}\nSTNode type;\nSTNode readonlyQualifier = parseReadonlyKeyword();\nnextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nSTNode fieldNameOrTypeDesc = parseQualifiedIdentifier(ParserRuleContext.RECORD_FIELD_NAME_OR_TYPE_NAME);\nif (fieldNameOrTypeDesc.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\ntype = fieldNameOrTypeDesc;\n} else {\nnextToken = peek();\nswitch (nextToken.kind) {\ncase SEMICOLON_TOKEN:\ncase EQUAL_TOKEN:\ntype = createBuiltinSimpleNameReference(readonlyQualifier);\nreadonlyQualifier = STNodeFactory.createEmptyNode();\nreturn parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type,\nfieldNameOrTypeDesc);\ndefault:\ntype = fieldNameOrTypeDesc;\nbreak;\n}\n}\n} else if (isTypeStartingToken(nextToken.kind)) {\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD);\n} else {\nreadonlyQualifier = createBuiltinSimpleNameReference(readonlyQualifier);\ntype = parseComplexTypeDescriptor(readonlyQualifier, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD, false);\nreadonlyQualifier = STNodeFactory.createEmptyNode();\n}\nSTNode fieldName = parseVariableName();\nreturn parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type, fieldName);\n}\n/**\n* Parse object field rhs, and complete the object field parsing. Returns the parsed object field.\n*\n* @param metadata Metadata\n* @param visibilityQualifier Visibility qualifier\n* @param readonlyQualifier Readonly qualifier\n* @param type Type descriptor\n* @param fieldName Field name\n* @return Parsed object field\n*/\nprivate STNode parseObjectFieldRhs(STNode metadata, STNode visibilityQualifier, STNode readonlyQualifier,\nSTNode type, STNode fieldName) {\nSTToken nextToken = peek();\nreturn parseObjectFieldRhs(nextToken.kind, metadata, visibilityQualifier, readonlyQualifier, type, fieldName);\n}\n/**\n* Parse object field rhs, and complete the object field parsing. Returns the parsed object field.\n*\n* @param nextTokenKind Kind of the next token\n* @param metadata Metadata\n* @param visibilityQualifier Visibility qualifier\n* @param readonlyQualifier Readonly qualifier\n* @param type Type descriptor\n* @param fieldName Field name\n* @return Parsed object field\n*/\nprivate STNode parseObjectFieldRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier,\nSTNode readonlyQualifier, STNode type, STNode fieldName) {\nSTNode equalsToken;\nSTNode expression;\nSTNode semicolonToken;\nswitch (nextTokenKind) {\ncase SEMICOLON_TOKEN:\nequalsToken = STNodeFactory.createEmptyNode();\nexpression = STNodeFactory.createEmptyNode();\nsemicolonToken = parseSemicolon();\nbreak;\ncase EQUAL_TOKEN:\nequalsToken = parseAssignOp();\nexpression = parseExpression();\nsemicolonToken = parseSemicolon();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.OBJECT_FIELD_RHS, metadata, visibilityQualifier,\nreadonlyQualifier, type, fieldName);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectFieldRhs(solution.tokenKind, metadata, visibilityQualifier, readonlyQualifier, type,\nfieldName);\n}\nreturn STNodeFactory.createObjectFieldNode(metadata, visibilityQualifier, readonlyQualifier, type, fieldName,\nequalsToken, expression, semicolonToken);\n}\nprivate STNode parseObjectMethod(STNode metadata, STNode visibilityQualifier, STNode remoteKeyword) {\nreturn parseFuncDefOrFuncTypeDesc(metadata, true, visibilityQualifier, remoteKeyword, null);\n}\n/**\n* Parse if-else statement.\n* \n* if-else-stmt := if expression block-stmt [else-block]\n* \n*\n* @return If-else block\n*/\nprivate STNode parseIfElseBlock() {\nstartContext(ParserRuleContext.IF_BLOCK);\nSTNode ifKeyword = parseIfKeyword();\nSTNode condition = parseExpression();\nSTNode ifBody = parseBlockNode();\nendContext();\nSTNode elseBody = parseElseBlock();\nreturn STNodeFactory.createIfElseStatementNode(ifKeyword, condition, ifBody, elseBody);\n}\n/**\n* Parse if-keyword.\n*\n* @return Parsed if-keyword node\n*/\nprivate STNode parseIfKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IF_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.IF_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse else-keyword.\n*\n* @return Parsed else keyword node\n*/\nprivate STNode parseElseKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ELSE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ELSE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse block node.\n* \n* block-stmt := { sequence-stmt }\n* sequence-stmt := statement*\n* \n*\n* @return Parse block node\n*/\nprivate STNode parseBlockNode() {\nstartContext(ParserRuleContext.BLOCK_STMT);\nSTNode openBrace = parseOpenBrace();\nSTNode stmts = parseStatements();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createBlockStatementNode(openBrace, stmts, closeBrace);\n}\n/**\n* Parse else block.\n* else-block := else (if-else-stmt | block-stmt)\n*\n* @return Else block\n*/\nprivate STNode parseElseBlock() {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.ELSE_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode elseKeyword = parseElseKeyword();\nSTNode elseBody = parseElseBody();\nreturn STNodeFactory.createElseBlockNode(elseKeyword, elseBody);\n}\n/**\n* Parse else node body.\n* else-body := if-else-stmt | block-stmt\n*\n* @return Else node body\n*/\nprivate STNode parseElseBody() {\nSTToken nextToken = peek();\nreturn parseElseBody(nextToken.kind);\n}\nprivate STNode parseElseBody(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IF_KEYWORD:\nreturn parseIfElseBlock();\ncase OPEN_BRACE_TOKEN:\nreturn parseBlockNode();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.ELSE_BODY);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseElseBody(solution.tokenKind);\n}\n}\n/**\n* Parse while statement.\n* while-stmt := while expression block-stmt\n*\n* @return While statement\n*/\nprivate STNode parseWhileStatement() {\nstartContext(ParserRuleContext.WHILE_BLOCK);\nSTNode whileKeyword = parseWhileKeyword();\nSTNode condition = parseExpression();\nSTNode whileBody = parseBlockNode();\nendContext();\nreturn STNodeFactory.createWhileStatementNode(whileKeyword, condition, whileBody);\n}\n/**\n* Parse while-keyword.\n*\n* @return While-keyword node\n*/\nprivate STNode parseWhileKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.WHILE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.WHILE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse panic statement.\n* panic-stmt := panic expression ;\n*\n* @return Panic statement\n*/\nprivate STNode parsePanicStatement() {\nstartContext(ParserRuleContext.PANIC_STMT);\nSTNode panicKeyword = parsePanicKeyword();\nSTNode expression = parseExpression();\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createPanicStatementNode(panicKeyword, expression, semicolon);\n}\n/**\n* Parse panic-keyword.\n*\n* @return Panic-keyword node\n*/\nprivate STNode parsePanicKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PANIC_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.PANIC_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse check expression. This method is used to parse both check expression\n* as well as check action.\n*\n*

\n* \n* checking-expr := checking-keyword expression\n* checking-action := checking-keyword action\n* \n*\n* @param allowActions Allow actions\n* @param isRhsExpr Is rhs expression\n* @return Check expression node\n*/\nprivate STNode parseCheckExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nSTNode checkingKeyword = parseCheckingKeyword();\nSTNode expr =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nif (isAction(expr)) {\nreturn STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_ACTION, checkingKeyword, expr);\n} else {\nreturn STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, expr);\n}\n}\n/**\n* Parse checking keyword.\n*

\n* \n* checking-keyword := check | checkpanic\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseCheckingKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CHECK_KEYWORD || token.kind == SyntaxKind.CHECKPANIC_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CHECKING_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*\n* Parse continue statement.\n* continue-stmt := continue ; \n*\n* @return continue statement\n*/\nprivate STNode parseContinueStatement() {\nstartContext(ParserRuleContext.CONTINUE_STATEMENT);\nSTNode continueKeyword = parseContinueKeyword();\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createContinueStatementNode(continueKeyword, semicolon);\n}\n/**\n* Parse continue-keyword.\n*\n* @return continue-keyword node\n*/\nprivate STNode parseContinueKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CONTINUE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CONTINUE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse return statement.\n* return-stmt := return [ action-or-expr ] ;\n*\n* @return Return statement\n*/\nprivate STNode parseReturnStatement() {\nstartContext(ParserRuleContext.RETURN_STMT);\nSTNode returnKeyword = parseReturnKeyword();\nSTNode returnRhs = parseReturnStatementRhs(returnKeyword);\nendContext();\nreturn returnRhs;\n}\n/**\n* Parse return-keyword.\n*\n* @return Return-keyword node\n*/\nprivate STNode parseReturnKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RETURN_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RETURN_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse break statement.\n* break-stmt := break ; \n*\n* @return break statement\n*/\nprivate STNode parseBreakStatement() {\nstartContext(ParserRuleContext.BREAK_STATEMENT);\nSTNode breakKeyword = parseBreakKeyword();\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createBreakStatementNode(breakKeyword, semicolon);\n}\n/**\n* Parse break-keyword.\n*\n* @return break-keyword node\n*/\nprivate STNode parseBreakKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BREAK_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BREAK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse the right hand side of a return statement.\n*

\n* \n* return-stmt-rhs := ; | action-or-expr ;\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseReturnStatementRhs(STNode returnKeyword) {\nSTNode expr;\nSTToken token = peek();\nswitch (token.kind) {\ncase SEMICOLON_TOKEN:\nexpr = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nexpr = parseActionOrExpression();\nbreak;\n}\nSTNode semicolon = parseSemicolon();\nreturn STNodeFactory.createReturnStatementNode(returnKeyword, expr, semicolon);\n}\n/**\n* Parse mapping constructor expression.\n*

\n* mapping-constructor-expr := { [field (, field)*] }\n*\n* @return Parsed node\n*/\nprivate STNode parseMappingConstructorExpr() {\nstartContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nSTNode openBrace = parseOpenBrace();\nSTNode fields = parseMappingConstructorFields();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace);\n}\n/**\n* Parse mapping constructor fields.\n*\n* @return Parsed node\n*/\nprivate STNode parseMappingConstructorFields() {\nSTToken nextToken = peek();\nif (isEndOfMappingConstructor(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList fields = new ArrayList<>();\nSTNode field = parseMappingField(ParserRuleContext.FIRST_MAPPING_FIELD);\nfields.add(field);\nreturn parseMappingConstructorFields(fields);\n}\nprivate STNode parseMappingConstructorFields(List fields) {\nSTToken nextToken;\nSTNode mappingFieldEnd;\nnextToken = peek();\nwhile (!isEndOfMappingConstructor(nextToken.kind)) {\nmappingFieldEnd = parseMappingFieldEnd(nextToken.kind);\nif (mappingFieldEnd == null) {\nbreak;\n}\nfields.add(mappingFieldEnd);\nSTNode field = parseMappingField(ParserRuleContext.MAPPING_FIELD);\nfields.add(field);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(fields);\n}\nprivate STNode parseMappingFieldEnd() {\nreturn parseMappingFieldEnd(peek().kind);\n}\nprivate STNode parseMappingFieldEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.MAPPING_FIELD_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMappingFieldEnd(solution.tokenKind);\n}\n}\nprivate boolean isEndOfMappingConstructor(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase IDENTIFIER_TOKEN:\ncase READONLY_KEYWORD:\nreturn false;\ncase EOF_TOKEN:\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase PUBLIC_KEYWORD:\ncase PRIVATE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase RETURNS_KEYWORD:\ncase SERVICE_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase CONST_KEYWORD:\ncase FINAL_KEYWORD:\ncase RESOURCE_KEYWORD:\nreturn true;\ndefault:\nreturn isSimpleType(tokenKind);\n}\n}\n/**\n* Parse mapping constructor field.\n*

\n* field := specific-field | computed-name-field | spread-field\n*\n* @param fieldContext Context of the mapping field\n* @param leadingComma Leading comma\n* @return Parsed node\n*/\nprivate STNode parseMappingField(ParserRuleContext fieldContext) {\nSTToken nextToken = peek();\nreturn parseMappingField(nextToken.kind, fieldContext);\n}\nprivate STNode parseMappingField(SyntaxKind tokenKind, ParserRuleContext fieldContext) {\nswitch (tokenKind) {\ncase IDENTIFIER_TOKEN:\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nreturn parseSpecificFieldWithOptionalValue(readonlyKeyword);\ncase STRING_LITERAL:\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nreturn parseQualifiedSpecificField(readonlyKeyword);\ncase READONLY_KEYWORD:\nreadonlyKeyword = parseReadonlyKeyword();\nreturn parseSpecificField(readonlyKeyword);\ncase OPEN_BRACKET_TOKEN:\nreturn parseComputedField();\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\nSTNode expr = parseExpression();\nreturn STNodeFactory.createSpreadFieldNode(ellipsis, expr);\ncase CLOSE_BRACE_TOKEN:\nif (fieldContext == ParserRuleContext.FIRST_MAPPING_FIELD) {\nreturn null;\n}\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, fieldContext, fieldContext);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMappingField(solution.tokenKind, fieldContext);\n}\n}\nprivate STNode parseSpecificField(STNode readonlyKeyword) {\nSTToken nextToken = peek();\nreturn parseSpecificField(nextToken.kind, readonlyKeyword);\n}\nprivate STNode parseSpecificField(SyntaxKind nextTokenKind, STNode readonlyKeyword) {\nswitch (nextTokenKind) {\ncase STRING_LITERAL:\nreturn parseQualifiedSpecificField(readonlyKeyword);\ncase IDENTIFIER_TOKEN:\nreturn parseSpecificFieldWithOptionalValue(readonlyKeyword);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD, readonlyKeyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseSpecificField(solution.tokenKind, readonlyKeyword);\n}\n}\nprivate STNode parseQualifiedSpecificField(STNode readonlyKeyword) {\nSTNode key = parseStringLiteral();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\n}\n/**\n* Parse mapping constructor specific-field with an optional value.\n*\n* @return Parsed node\n*/\nprivate STNode parseSpecificFieldWithOptionalValue(STNode readonlyKeyword) {\nSTNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME);\nreturn parseSpecificFieldRhs(readonlyKeyword, key);\n}\nprivate STNode parseSpecificFieldRhs(STNode readonlyKeyword, STNode key) {\nSTToken nextToken = peek();\nreturn parseSpecificFieldRhs(nextToken.kind, readonlyKeyword, key);\n}\nprivate STNode parseSpecificFieldRhs(SyntaxKind tokenKind, STNode readonlyKeyword, STNode key) {\nSTNode colon;\nSTNode valueExpr;\nswitch (tokenKind) {\ncase COLON_TOKEN:\ncolon = parseColon();\nvalueExpr = parseExpression();\nbreak;\ncase COMMA_TOKEN:\ncolon = STNodeFactory.createEmptyNode();\nvalueExpr = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nif (isEndOfMappingConstructor(tokenKind)) {\ncolon = STNodeFactory.createEmptyNode();\nvalueExpr = STNodeFactory.createEmptyNode();\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, readonlyKeyword, key);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseSpecificFieldRhs(solution.tokenKind, readonlyKeyword, key);\n}\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\n}\n/**\n* Parse string literal.\n*\n* @return Parsed node\n*/\nprivate STNode parseStringLiteral() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.STRING_LITERAL) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.STRING_LITERAL);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse colon token.\n*\n* @return Parsed node\n*/\nprivate STNode parseColon() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COLON_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.COLON);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse readonly keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseReadonlyKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.READONLY_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.READONLY_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse computed-name-field of a mapping constructor expression.\n*

\n* computed-name-field := [ field-name-expr ] : value-expr\n*\n* @param leadingComma Leading comma\n* @return Parsed node\n*/\nprivate STNode parseComputedField() {\nstartContext(ParserRuleContext.COMPUTED_FIELD_NAME);\nSTNode openBracket = parseOpenBracket();\nSTNode fieldNameExpr = parseExpression();\nSTNode closeBracket = parseCloseBracket();\nendContext();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createComputedNameFieldNode(openBracket, fieldNameExpr, closeBracket, colon, valueExpr);\n}\n/**\n* Parse open bracket.\n*\n* @return Parsed node\n*/\nprivate STNode parseOpenBracket() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPEN_BRACKET_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OPEN_BRACKET);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse compound assignment statement, which takes the following format.\n*

\n* assignment-stmt := lvexpr CompoundAssignmentOperator action-or-expr ;\n*\n* @return Parsed node\n*/\nprivate STNode parseCompoundAssignmentStmt() {\nstartContext(ParserRuleContext.COMPOUND_ASSIGNMENT_STMT);\nSTNode varName = parseVariableName();\nSTNode compoundAssignmentStmt = parseCompoundAssignmentStmtRhs(varName);\nendContext();\nreturn compoundAssignmentStmt;\n}\n/**\n*

\n* Parse the RHS portion of the compound assignment.\n*

\n* compound-assignment-stmt-rhs := CompoundAssignmentOperator action-or-expr ;\n*\n* @param lvExpr LHS expression\n* @return Parsed node\n*/\nprivate STNode parseCompoundAssignmentStmtRhs(STNode lvExpr) {\nSTNode binaryOperator = parseCompoundBinaryOperator();\nSTNode equalsToken = parseAssignOp();\nSTNode expr = parseActionOrExpression();\nSTNode semicolon = parseSemicolon();\nendContext();\nboolean lvExprValid = isValidLVExpr(lvExpr);\nif (!lvExprValid) {\nSTNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nSTNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier);\nlvExpr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(simpleNameRef, lvExpr,\nDiagnosticErrorCode.ERROR_INVALID_EXPR_IN_COMPOUND_ASSIGNMENT_LHS);\n}\nreturn STNodeFactory.createCompoundAssignmentStatementNode(lvExpr, binaryOperator, equalsToken, expr,\nsemicolon);\n}\n/**\n* Parse compound binary operator.\n* BinaryOperator := + | - | * | / | & | | | ^ | << | >> | >>>\n*\n* @return Parsed node\n*/\nprivate STNode parseCompoundBinaryOperator() {\nSTToken token = peek();\nif (isCompoundBinaryOperator(token.kind)) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.COMPOUND_BINARY_OPERATOR);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse service declaration.\n*

\n* \n* service-decl := metadata service [variable-name] on expression-list service-body-block\n*
\n* expression-list := expression (, expression)*\n*
\n*\n* @param metadata Metadata\n* @return Parsed node\n*/\nprivate STNode parseServiceDecl(STNode metadata) {\nstartContext(ParserRuleContext.SERVICE_DECL);\nSTNode serviceKeyword = parseServiceKeyword();\nSTNode serviceDecl = parseServiceRhs(metadata, serviceKeyword);\nendContext();\nreturn serviceDecl;\n}\n/**\n* Parse rhs of the service declaration.\n*

\n* \n* service-rhs := [variable-name] on expression-list service-body-block\n* \n*\n* @param metadata Metadata\n* @param serviceKeyword Service keyword\n* @return Parsed node\n*/\nprivate STNode parseServiceRhs(STNode metadata, STNode serviceKeyword) {\nSTNode serviceName = parseServiceName();\nSTNode onKeyword = parseOnKeyword();\nSTNode expressionList = parseListeners();\nSTNode serviceBody = parseServiceBody();\nonKeyword =\ncloneWithDiagnosticIfListEmpty(expressionList, onKeyword, DiagnosticErrorCode.ERROR_MISSING_EXPRESSION);\nreturn STNodeFactory.createServiceDeclarationNode(metadata, serviceKeyword, serviceName, onKeyword,\nexpressionList, serviceBody);\n}\nprivate STNode parseServiceName() {\nSTToken nextToken = peek();\nreturn parseServiceName(nextToken.kind);\n}\nprivate STNode parseServiceName(SyntaxKind kind) {\nswitch (kind) {\ncase IDENTIFIER_TOKEN:\nreturn parseIdentifier(ParserRuleContext.SERVICE_NAME);\ncase ON_KEYWORD:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.OPTIONAL_SERVICE_NAME);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseServiceName(solution.tokenKind);\n}\n}\n/**\n* Parse service keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseServiceKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SERVICE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SERVICE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Check whether the given token kind is a compound binary operator.\n*

\n* compound-binary-operator := + | - | * | / | & | | | ^ | << | >> | >>>\n*\n* @param tokenKind STToken kind\n* @return true if the token kind refers to a binary operator. false otherwise\n*/\nprivate boolean isCompoundBinaryOperator(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase SLASH_TOKEN:\ncase ASTERISK_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase BITWISE_XOR_TOKEN:\ncase PIPE_TOKEN:\ncase DOUBLE_LT_TOKEN:\ncase DOUBLE_GT_TOKEN:\ncase TRIPPLE_GT_TOKEN:\nreturn getNextNextToken(tokenKind).kind == SyntaxKind.EQUAL_TOKEN;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse on keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseOnKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ON_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ON_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse listener references.\n*

\n* expression-list := expression (, expression)*\n*\n* @return Parsed node\n*/\nprivate STNode parseListeners() {\nstartContext(ParserRuleContext.LISTENERS_LIST);\nList listeners = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfExpressionsList(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode leadingComma = STNodeFactory.createEmptyNode();\nSTNode exprListItem = parseExpressionListItem(leadingComma);\nlisteners.add(exprListItem);\nnextToken = peek();\nwhile (!isEndOfExpressionsList(nextToken.kind)) {\nleadingComma = parseComma();\nexprListItem = parseExpressionListItem(leadingComma);\nlisteners.add(exprListItem);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(listeners);\n}\nprivate boolean isEndOfExpressionsList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\nreturn false;\ncase EOF_TOKEN:\ncase SEMICOLON_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase OPEN_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn !isValidExprStart(tokenKind);\n}\n}\n/**\n* Parse expression list item.\n*\n* @param leadingComma Leading comma\n* @return Parsed node\n*/\nprivate STNode parseExpressionListItem(STNode leadingComma) {\nSTNode expr = parseExpression();\nreturn STNodeFactory.createExpressionListItemNode(leadingComma, expr);\n}\n/**\n* Parse service body.\n*

\n* \n* service-body-block := { service-method-defn* }\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseServiceBody() {\nSTNode openBrace = parseOpenBrace();\nSTNode resources = parseResources();\nSTNode closeBrace = parseCloseBrace();\nreturn STNodeFactory.createServiceBodyNode(openBrace, resources, closeBrace);\n}\n/**\n* Parse service resource definitions.\n*\n* @return Parsed node\n*/\nprivate STNode parseResources() {\nList resources = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (!isEndOfServiceDecl(nextToken.kind)) {\nSTNode serviceMethod = parseResource();\nif (serviceMethod == null) {\nbreak;\n}\nresources.add(serviceMethod);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(resources);\n}\nprivate boolean isEndOfServiceDecl(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase EOF_TOKEN:\ncase CLOSE_BRACE_PIPE_TOKEN:\ncase TYPE_KEYWORD:\ncase SERVICE_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse resource definition (i.e. service-method-defn).\n*

\n* \n* service-body-block := { service-method-defn* }\n*
\n* service-method-defn := metadata [resource] function identifier function-signature method-defn-body\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseResource() {\nSTToken nextToken = peek();\nreturn parseResource(nextToken.kind);\n}\nprivate STNode parseResource(SyntaxKind nextTokenKind) {\nSTNode metadata;\nswitch (nextTokenKind) {\ncase RESOURCE_KEYWORD:\ncase FUNCTION_KEYWORD:\nmetadata = createEmptyMetadata();\nbreak;\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData(nextTokenKind);\nnextTokenKind = peek().kind;\nbreak;\ndefault:\nif (isEndOfServiceDecl(nextTokenKind)) {\nreturn null;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RESOURCE_DEF);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseResource(solution.tokenKind);\n}\nreturn parseResource(nextTokenKind, metadata);\n}\nprivate STNode parseResource(SyntaxKind nextTokenKind, STNode metadata) {\nswitch (nextTokenKind) {\ncase RESOURCE_KEYWORD:\nSTNode resourceKeyword = parseResourceKeyword();\nreturn parseFuncDefinition(metadata, false, resourceKeyword, null);\ncase FUNCTION_KEYWORD:\nreturn parseFuncDefinition(metadata, false, STNodeFactory.createEmptyNode(), null);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RESOURCE_DEF, metadata);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseResource(solution.tokenKind, metadata);\n}\n}\n/**\n* Parse resource keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseResourceKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RESOURCE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RESOURCE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Check whether next construct is a service declaration or not. This method is\n* used to determine whether an end-of-block is reached, if the next token is\n* a service-keyword. Because service-keyword can be used in statements as well\n* as in top-level node (service-decl). We have reached a service-decl, then\n* it could be due to missing close-brace at the end of the current block.\n*\n* @return true if the next construct is a service declaration.\n* false otherwise\n*/", + "context_after": "class BallerinaParser extends AbstractParser {\nprivate static final OperatorPrecedence DEFAULT_OP_PRECEDENCE = OperatorPrecedence.DEFAULT;\nprotected BallerinaParser(AbstractTokenReader tokenReader) {\nsuper(tokenReader, new BallerinaParserErrorHandler(tokenReader));\n}\n/**\n* Start parsing the given input.\n*\n* @return Parsed node\n*/\n@Override\npublic STNode parse() {\nreturn parseCompUnit();\n}\n/**\n* Start parsing the input from a given context. Supported starting points are:\n*

    \n*
  • Module part (a file)
  • \n*
  • Top level node
  • \n*
  • Statement
  • \n*
  • Expression
  • \n*
\n*\n* @param context Context to start parsing\n* @return Parsed node\n*/\npublic STNode parse(ParserRuleContext context) {\nswitch (context) {\ncase COMP_UNIT:\nreturn parseCompUnit();\ncase TOP_LEVEL_NODE:\nstartContext(ParserRuleContext.COMP_UNIT);\nreturn parseTopLevelNode();\ncase STATEMENT:\nstartContext(ParserRuleContext.COMP_UNIT);\nstartContext(ParserRuleContext.FUNC_BODY_BLOCK);\nreturn parseStatement();\ncase EXPRESSION:\nstartContext(ParserRuleContext.COMP_UNIT);\nstartContext(ParserRuleContext.FUNC_BODY_BLOCK);\nstartContext(ParserRuleContext.STATEMENT);\nreturn parseExpression();\ndefault:\nthrow new UnsupportedOperationException(\"Cannot start parsing from: \" + context);\n}\n}\n/**\n* Resume the parsing from the given context.\n*\n* @param context Context to resume parsing\n* @param args Arguments that requires to continue parsing from the given parser context\n* @return Parsed node\n*/\n@Override\npublic STNode resumeParsing(ParserRuleContext context, Object... args) {\nswitch (context) {\ncase FUNC_BODY:\nreturn parseFunctionBody((boolean) args[0]);\ncase OPEN_BRACE:\nreturn parseOpenBrace();\ncase CLOSE_BRACE:\nreturn parseCloseBrace();\ncase FUNC_NAME:\nreturn parseFunctionName();\ncase OPEN_PARENTHESIS:\ncase ARG_LIST_START:\nreturn parseOpenParenthesis(context);\ncase SIMPLE_TYPE_DESCRIPTOR:\nreturn parseSimpleTypeDescriptor();\ncase ASSIGN_OP:\nreturn parseAssignOp();\ncase EXTERNAL_KEYWORD:\nreturn parseExternalKeyword();\ncase SEMICOLON:\nreturn parseSemicolon();\ncase CLOSE_PARENTHESIS:\nreturn parseCloseParenthesis();\ncase VARIABLE_NAME:\nreturn parseVariableName();\ncase TERMINAL_EXPRESSION:\nreturn parseTerminalExpression((STNode) args[0], (boolean) args[1], (boolean) args[2],\n(boolean) args[3]);\ncase STATEMENT:\nreturn parseStatement();\ncase STATEMENT_WITHOUT_ANNOTS:\nreturn parseStatement((STNode) args[0]);\ncase EXPRESSION_RHS:\nreturn parseExpressionRhs((OperatorPrecedence) args[0], (STNode) args[1], (boolean) args[2],\n(boolean) args[3], (boolean) args[4], (boolean) args[5]);\ncase PARAMETER_START:\nreturn parseParameter((SyntaxKind) args[0], (STNode) args[1], (int) args[2], (boolean) args[3]);\ncase PARAMETER_WITHOUT_ANNOTS:\nreturn parseParamGivenAnnots((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (int) args[3],\n(boolean) args[4]);\ncase AFTER_PARAMETER_TYPE:\nreturn parseAfterParamType((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],\n(STNode) args[4], (boolean) args[5]);\ncase PARAMETER_NAME_RHS:\nreturn parseParameterRhs((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],\n(STNode) args[4], (STNode) args[5]);\ncase TOP_LEVEL_NODE:\nreturn parseTopLevelNode();\ncase TOP_LEVEL_NODE_WITHOUT_METADATA:\nreturn parseTopLevelNode((STNode) args[0]);\ncase TOP_LEVEL_NODE_WITHOUT_MODIFIER:\nreturn parseTopLevelNode((STNode) args[0], (STNode) args[1]);\ncase TYPE_NAME_OR_VAR_NAME:\ncase RECORD_FIELD_NAME_OR_TYPE_NAME:\ncase TYPE_REFERENCE:\ncase ANNOT_REFERENCE:\ncase FIELD_ACCESS_IDENTIFIER:\nreturn parseQualifiedIdentifier(context, (boolean) args[0]);\ncase VAR_DECL_STMT_RHS:\nreturn parseVarDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (boolean) args[3]);\ncase FIELD_DESCRIPTOR_RHS:\nreturn parseFieldDescriptorRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]);\ncase RECORD_BODY_START:\nreturn parseRecordBodyStartDelimiter();\ncase TYPE_DESCRIPTOR:\nreturn parseTypeDescriptorInternal((ParserRuleContext) args[0], (boolean) args[1]);\ncase OBJECT_MEMBER_START:\nreturn parseObjectMember();\ncase OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY:\nreturn parseObjectMethodOrField((STNode) args[0], (STNode) args[1]);\ncase OBJECT_FIELD_RHS:\nreturn parseObjectFieldRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],\n(STNode) args[4]);\ncase OBJECT_TYPE_QUALIFIER:\nreturn parseObjectTypeQualifiers();\ncase OBJECT_KEYWORD:\nreturn parseObjectKeyword();\ncase TYPE_NAME:\nreturn parseTypeName();\ncase IF_KEYWORD:\nreturn parseIfKeyword();\ncase ELSE_KEYWORD:\nreturn parseElseKeyword();\ncase ELSE_BODY:\nreturn parseElseBody();\ncase WHILE_KEYWORD:\nreturn parseWhileKeyword();\ncase PANIC_KEYWORD:\nreturn parsePanicKeyword();\ncase IMPORT_DECL_RHS:\nreturn parseImportDecl((STNode) args[0], (STNode) args[1]);\ncase IMPORT_PREFIX:\nreturn parseImportPrefix();\ncase IMPORT_MODULE_NAME:\ncase IMPORT_ORG_OR_MODULE_NAME:\ncase VARIABLE_REF:\ncase SERVICE_NAME:\ncase IMPLICIT_ANON_FUNC_PARAM:\ncase MAPPING_FIELD_NAME:\ncase RECEIVE_FIELD_NAME:\ncase MODULE_ENUM_NAME:\ncase ENUM_MEMBER_NAME:\nreturn parseIdentifier(context);\ncase IMPORT_KEYWORD:\nreturn parseImportKeyword();\ncase SLASH:\nreturn parseSlashToken();\ncase DOT:\nreturn parseDotToken();\ncase IMPORT_VERSION_DECL:\nreturn parseVersion();\ncase VERSION_KEYWORD:\nreturn parseVersionKeyword();\ncase VERSION_NUMBER:\nreturn parseVersionNumber();\ncase DECIMAL_INTEGER_LITERAL:\ncase MAJOR_VERSION:\ncase MINOR_VERSION:\ncase PATCH_VERSION:\nreturn parseDecimalIntLiteral(context);\ncase IMPORT_SUB_VERSION:\nreturn parseSubVersion(context);\ncase IMPORT_PREFIX_DECL:\nreturn parseImportPrefixDecl();\ncase AS_KEYWORD:\nreturn parseAsKeyword();\ncase CONTINUE_KEYWORD:\nreturn parseContinueKeyword();\ncase BREAK_KEYWORD:\nreturn parseBreakKeyword();\ncase RETURN_KEYWORD:\nreturn parseReturnKeyword();\ncase MAPPING_FIELD:\ncase FIRST_MAPPING_FIELD:\nreturn parseMappingField((ParserRuleContext) args[0]);\ncase SPECIFIC_FIELD_RHS:\nreturn parseSpecificFieldRhs((STNode) args[0], (STNode) args[1]);\ncase STRING_LITERAL:\nreturn parseStringLiteral();\ncase COLON:\nreturn parseColon();\ncase OPEN_BRACKET:\nreturn parseOpenBracket();\ncase RESOURCE_DEF:\nreturn parseResource();\ncase OPTIONAL_SERVICE_NAME:\nreturn parseServiceName();\ncase SERVICE_KEYWORD:\nreturn parseServiceKeyword();\ncase ON_KEYWORD:\nreturn parseOnKeyword();\ncase RESOURCE_KEYWORD:\nreturn parseResourceKeyword();\ncase LISTENER_KEYWORD:\nreturn parseListenerKeyword();\ncase NIL_TYPE_DESCRIPTOR:\nreturn parseNilTypeDescriptor();\ncase COMPOUND_ASSIGNMENT_STMT:\nreturn parseCompoundAssignmentStmt();\ncase TYPEOF_KEYWORD:\nreturn parseTypeofKeyword();\ncase ARRAY_LENGTH:\nreturn parseArrayLength();\ncase IS_KEYWORD:\nreturn parseIsKeyword();\ncase STMT_START_WITH_EXPR_RHS:\nreturn parseStatementStartWithExprRhs((STNode) args[0]);\ncase COMMA:\nreturn parseComma();\ncase CONST_DECL_TYPE:\nreturn parseConstDecl((STNode) args[0], (STNode) args[1], (STNode) args[2]);\ncase BINDING_PATTERN_OR_EXPR_RHS:\nreturn parseTypedBindingPatternOrExprRhs((STNode) args[0], (boolean) args[1]);\ncase LT:\nreturn parseLTToken();\ncase GT:\nreturn parseGTToken();\ncase RECORD_FIELD_OR_RECORD_END:\nreturn parseFieldOrRestDescriptor((boolean) args[0]);\ncase ANNOTATION_KEYWORD:\nreturn parseAnnotationKeyword();\ncase ANNOT_DECL_OPTIONAL_TYPE:\nreturn parseAnnotationDeclFromType((STNode) args[0], (STNode) args[1], (STNode) args[2],\n(STNode) args[3]);\ncase ANNOT_DECL_RHS:\nreturn parseAnnotationDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],\n(STNode) args[4]);\ncase ANNOT_OPTIONAL_ATTACH_POINTS:\nreturn parseAnnotationDeclAttachPoints((STNode) args[0], (STNode) args[1], (STNode) args[2],\n(STNode) args[3], (STNode) args[4], (STNode) args[5]);\ncase SOURCE_KEYWORD:\nreturn parseSourceKeyword();\ncase ATTACH_POINT_IDENT:\nreturn parseAttachPointIdent((STNode) args[0]);\ncase IDENT_AFTER_OBJECT_IDENT:\nreturn parseIdentAfterObjectIdent();\ncase FUNCTION_IDENT:\nreturn parseFunctionIdent();\ncase FIELD_IDENT:\nreturn parseFieldIdent();\ncase ATTACH_POINT_END:\nreturn parseAttachPointEnd();\ncase XMLNS_KEYWORD:\nreturn parseXMLNSKeyword();\ncase XML_NAMESPACE_PREFIX_DECL:\nreturn parseXMLDeclRhs((STNode) args[0], (STNode) args[1], (boolean) args[2]);\ncase NAMESPACE_PREFIX:\nreturn parseNamespacePrefix();\ncase WORKER_KEYWORD:\nreturn parseWorkerKeyword();\ncase WORKER_NAME:\nreturn parseWorkerName();\ncase FORK_KEYWORD:\nreturn parseForkKeyword();\ncase TRAP_KEYWORD:\nreturn parseTrapKeyword();\ncase IN_KEYWORD:\nreturn parseInKeyword();\ncase FOREACH_KEYWORD:\nreturn parseForEachKeyword();\ncase TABLE_KEYWORD:\nreturn parseTableKeyword();\ncase KEY_KEYWORD:\nreturn parseKeyKeyword();\ncase TABLE_KEYWORD_RHS:\nreturn parseTableConstructorOrQuery((STNode) args[0], (boolean) args[1]);\ncase ERROR_KEYWORD:\nreturn parseErrorKeyword();\ncase LET_KEYWORD:\nreturn parseLetKeyword();\ncase STREAM_KEYWORD:\nreturn parseStreamKeyword();\ncase STREAM_TYPE_FIRST_PARAM_RHS:\nreturn parseStreamTypeParamsNode((STNode) args[0], (STNode) args[1]);\ncase TEMPLATE_START:\ncase TEMPLATE_END:\nreturn parseBacktickToken(context);\ncase KEY_CONSTRAINTS_RHS:\nreturn parseKeyConstraint((STNode) args[0]);\ncase FUNCTION_KEYWORD_RHS:\nreturn parseFunctionKeywordRhs((STNode) args[0], (STNode) args[1], (boolean) args[2], (boolean) args[3],\n(STNode[]) args[4]);\ncase RETURNS_KEYWORD:\nreturn parseReturnsKeyword();\ncase NEW_KEYWORD:\nreturn parseNewKeyword();\ncase FROM_KEYWORD:\nreturn parseFromKeyword();\ncase WHERE_KEYWORD:\nreturn parseWhereKeyword();\ncase SELECT_KEYWORD:\nreturn parseSelectKeyword();\ncase TABLE_CONSTRUCTOR_OR_QUERY_START:\nreturn parseTableConstructorOrQuery((boolean) args[0]);\ncase TABLE_CONSTRUCTOR_OR_QUERY_RHS:\nreturn parseTableConstructorOrQueryRhs((STNode) args[0], (STNode) args[1], (boolean) args[2]);\ncase QUERY_PIPELINE_RHS:\nreturn parseIntermediateClause((boolean) args[0]);\ncase ANON_FUNC_BODY:\nreturn parseAnonFuncBody((boolean) args[0]);\ncase CLOSE_BRACKET:\nreturn parseCloseBracket();\ncase ARG_START_OR_ARG_LIST_END:\nreturn parseArgument();\ncase ARG_END:\nreturn parseArgEnd();\ncase MAPPING_FIELD_END:\nreturn parseMappingFieldEnd();\ncase FUNCTION_KEYWORD:\nreturn parseFunctionKeyword();\ncase FIELD_OR_REST_DESCIPTOR_RHS:\nreturn parseFieldOrRestDescriptorRhs((STNode) args[0], (STNode) args[1]);\ncase TYPE_DESC_IN_TUPLE_RHS:\nreturn parseTupleMemberRhs();\ncase LIST_BINDING_PATTERN_MEMBER_END:\nreturn parseListBindingPatternMemberRhs();\ncase MAPPING_BINDING_PATTERN_END:\nreturn parseMappingBindingPatternEnd();\ncase FIELD_BINDING_PATTERN_NAME:\nreturn parseFieldBindingPattern();\ncase CONSTANT_EXPRESSION_START:\nreturn parseSimpleConstExprInternal();\ncase LIST_CONSTRUCTOR_MEMBER_END:\nreturn parseListConstructorMemberEnd();\ncase NIL_OR_PARENTHESISED_TYPE_DESC_RHS:\nreturn parseNilOrParenthesisedTypeDescRhs((STNode) args[0]);\ncase ANON_FUNC_PARAM_RHS:\nreturn parseImplicitAnonFuncParamEnd();\ncase LIST_BINDING_PATTERN:\nreturn parseListBindingPattern();\ncase BINDING_PATTERN:\nreturn parseBindingPattern();\ncase PEER_WORKER_NAME:\nreturn parsePeerWorkerName();\ncase SYNC_SEND_TOKEN:\nreturn parseSyncSendToken();\ncase LEFT_ARROW_TOKEN:\nreturn parseLeftArrowToken();\ncase RECEIVE_WORKERS:\nreturn parseReceiveWorkers();\ncase WAIT_KEYWORD:\nreturn parseWaitKeyword();\ncase WAIT_FUTURE_EXPR_END:\nreturn parseWaitFutureExprEnd((int) args[0]);\ncase WAIT_FIELD_NAME:\nreturn parseWaitField();\ncase WAIT_FIELD_END:\nreturn parseWaitFieldEnd();\ncase ANNOT_CHAINING_TOKEN:\nreturn parseAnnotChainingToken();\ncase DO_KEYWORD:\nreturn parseDoKeyword();\ncase MEMBER_ACCESS_KEY_EXPR_END:\nreturn parseMemberAccessKeyExprEnd();\ncase OPTIONAL_CHAINING_TOKEN:\nreturn parseOptionalChainingToken();\ncase RETRY_KEYWORD_RHS:\nreturn parseRetryKeywordRhs((STNode) args[0]);\ncase RETRY_TYPE_PARAM_RHS:\nreturn parseRetryTypeParamRhs((STNode) args[0], (STNode) args[1]);\ncase TRANSACTION_KEYWORD:\nreturn parseTransactionKeyword();\ncase COMMIT_KEYWORD:\nreturn parseCommitKeyword();\ncase RETRY_KEYWORD:\nreturn parseRetryKeyword();\ncase ROLLBACK_KEYWORD:\nreturn parseRollbackKeyword();\ncase RETRY_BODY:\nreturn parseRetryBody();\ncase ENUM_MEMBER_END:\nreturn parseEnumMemberEnd();\ncase BRACKETED_LIST_MEMBER_END:\nreturn parseBracketedListMemberEnd();\ncase STMT_START_BRACKETED_LIST_MEMBER:\nreturn parseStatementStartBracketedListMember();\ncase TYPED_BINDING_PATTERN_TYPE_RHS:\nreturn parseTypedBindingPatternTypeRhs((STNode) args[0], (ParserRuleContext) args[1],\n(boolean) args[2]);\ncase BRACKETED_LIST_RHS:\nreturn parseTypedBindingPatternOrMemberAccessRhs((STNode) args[0], (STNode) args[1], (STNode) args[2],\n(STNode) args[3], (boolean) args[4], (boolean) args[5], (ParserRuleContext) args[6]);\ncase UNION_OR_INTERSECTION_TOKEN:\nreturn parseUnionOrIntersectionToken();\ncase BRACKETED_LIST_MEMBER:\ncase LIST_BINDING_MEMBER_OR_ARRAY_LENGTH:\nreturn parseBracketedListMember((boolean) args[0]);\ncase BASE16_KEYWORD:\nreturn parseBase16Keyword();\ncase BASE64_KEYWORD:\nreturn parseBase64Keyword();\ncase DOT_LT_TOKEN:\nreturn parseDotLTToken();\ncase SLASH_LT_TOKEN:\nreturn parseSlashLTToken();\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\nreturn parseDoubleSlashDoubleAsteriskLTToken();\ncase XML_ATOMIC_NAME_PATTERN_START:\nreturn parseXMLAtomicNamePatternBody();\ncase BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS:\nreturn parseBracedExprOrAnonFuncParamRhs((STNode) args[0], (STNode) args[1], (boolean) args[2]);\ncase READONLY_KEYWORD:\nreturn parseReadonlyKeyword();\ncase SPECIFIC_FIELD:\nreturn parseSpecificField((STNode) args[0]);\ncase OPTIONAL_MATCH_GUARD:\nreturn parseMatchGuard();\ncase MATCH_PATTERN_START:\nreturn parseMatchPattern();\ncase MATCH_PATTERN_RHS:\nreturn parseMatchPatternEnd();\ncase ENUM_MEMBER_RHS:\nreturn parseEnumMemberRhs((STNode) args[0], (STNode) args[1]);\ncase RECEIVE_FIELD:\nreturn parseReceiveField();\ncase PUBLIC_KEYWORD:\nreturn parseQualifier();\ncase PARAM_END:\nreturn parseParameterRhs();\ncase ELLIPSIS:\nreturn parseEllipsis();\ncase BINARY_OPERATOR:\nreturn parseBinaryOperator();\ncase TYPE_KEYWORD:\nreturn parseTypeKeyword();\ncase CLOSED_RECORD_BODY_START:\nreturn parseClosedRecordBodyStart();\ncase CLOSED_RECORD_BODY_END:\nreturn parseClosedRecordBodyEnd();\ncase QUESTION_MARK:\nreturn parseQuestionMark();\ncase FINAL_KEYWORD:\nreturn parseFinalKeyword();\ncase CLIENT_KEYWORD:\nreturn parseClientKeyword();\ncase ABSTRACT_KEYWORD:\nreturn parseAbstractKeyword();\ncase REMOTE_KEYWORD:\nreturn parseRemoteKeyword();\ncase CHECKING_KEYWORD:\nreturn parseCheckingKeyword();\ncase COMPOUND_BINARY_OPERATOR:\nreturn parseCompoundBinaryOperator();\ncase CONST_DECL_RHS:\nreturn parseConstantOrListenerDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2],\n(STNode) args[3], (boolean) args[4]);\ncase CONST_KEYWORD:\nreturn parseConstantKeyword();\ncase UNARY_OPERATOR:\nreturn parseUnaryOperator();\ncase AT:\nreturn parseAtToken();\ncase REMOTE_CALL_OR_ASYNC_SEND_RHS:\nreturn parseRemoteCallOrAsyncSendActionRhs((STNode) args[0], (boolean) args[1], (STNode) args[2]);\ncase DEFAULT_KEYWORD:\nreturn parseDefaultKeyword();\ncase RIGHT_ARROW:\nreturn parseRightArrow();\ncase PARAMETERIZED_TYPE:\nreturn parseParameterizedTypeKeyword();\ncase ANNOTATION_TAG:\nreturn parseAnnotationTag();\ncase ATTACH_POINT:\nreturn parseAnnotationAttachPoint();\ncase LOCK_KEYWORD:\nreturn parseLockKeyword();\ncase PIPE:\nreturn parsePipeToken();\ncase STRING_KEYWORD:\nreturn parseStringKeyword();\ncase XML_KEYWORD:\nreturn parseXMLKeyword();\ncase INTERPOLATION_START_TOKEN:\nreturn parseInterpolationStart();\ncase EXPR_FUNC_BODY_START:\nreturn parseDoubleRightArrow();\ncase START_KEYWORD:\nreturn parseStartKeyword();\ncase FLUSH_KEYWORD:\nreturn parseFlushKeyword();\ncase ENUM_KEYWORD:\nreturn parseEnumKeyword();\ncase MATCH_KEYWORD:\nreturn parseMatchKeyword();\ncase RECORD_KEYWORD:\nreturn parseRecordKeyword();\ncase LIST_MATCH_PATTERN_MEMBER_RHS:\nreturn parseListMatchPatternMemberRhs();\ncase LIST_BINDING_PATTERN_MEMBER:\nreturn parseListBindingPatternMember();\ncase FIELD_MATCH_PATTERN_MEMBER:\nreturn parseFieldMatchPatternMember();\ncase FIELD_MATCH_PATTERN_MEMBER_RHS:\nreturn parseFieldMatchPatternRhs();\ncase FUNC_MATCH_PATTERN_OR_CONST_PATTERN:\nreturn parseFunctionalMatchPatternOrConsPattern((STNode) args[0]);\ncase ARG_MATCH_PATTERN:\nreturn parseArgMatchPattern();\ncase ARG_MATCH_PATTERN_RHS:\nreturn parseArgMatchPatternRhs();\ndefault:\nthrow new IllegalStateException(\"cannot resume parsing the rule: \" + context);\n}\n}\n/*\n* Private methods.\n*/\n/**\n* Parse a given input and returns the AST. Starts parsing from the top of a compilation unit.\n*\n* @return Parsed node\n*/\nprivate STNode parseCompUnit() {\nstartContext(ParserRuleContext.COMP_UNIT);\nSTToken token = peek();\nList otherDecls = new ArrayList<>();\nList importDecls = new ArrayList<>();\nboolean processImports = true;\nwhile (token.kind != SyntaxKind.EOF_TOKEN) {\nSTNode decl = parseTopLevelNode(token.kind);\nif (decl == null) {\nbreak;\n}\nif (decl.kind == SyntaxKind.IMPORT_DECLARATION) {\nif (processImports) {\nimportDecls.add(decl);\n} else {\nupdateLastNodeInListWithInvalidNode(otherDecls, decl,\nDiagnosticErrorCode.ERROR_IMPORT_DECLARATION_AFTER_OTHER_DECLARATIONS);\n}\n} else {\nif (processImports) {\nprocessImports = false;\n}\notherDecls.add(decl);\n}\ntoken = peek();\n}\nSTToken eof = consume();\nendContext();\nreturn STNodeFactory.createModulePartNode(STNodeFactory.createNodeList(importDecls),\nSTNodeFactory.createNodeList(otherDecls), eof);\n}\n/**\n* Parse top level node having an optional modifier preceding it.\n*\n* @return Parsed node\n*/\nprivate STNode parseTopLevelNode() {\nSTToken token = peek();\nreturn parseTopLevelNode(token.kind);\n}\nprotected STNode parseTopLevelNode(SyntaxKind tokenKind) {\nSTNode metadata;\nswitch (tokenKind) {\ncase EOF_TOKEN:\nreturn null;\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData(tokenKind);\nreturn parseTopLevelNode(metadata);\ncase IMPORT_KEYWORD:\ncase FINAL_KEYWORD:\ncase PUBLIC_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase CONST_KEYWORD:\ncase ANNOTATION_KEYWORD:\ncase XMLNS_KEYWORD:\ncase SERVICE_KEYWORD:\ncase ENUM_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nmetadata = createEmptyMetadata();\nbreak;\ncase IDENTIFIER_TOKEN:\nif (isModuleVarDeclStart(1)) {\nreturn parseModuleVarDecl(createEmptyMetadata(), null);\n}\ndefault:\nif (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\nmetadata = createEmptyMetadata();\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE);\nif (solution.action == Action.KEEP) {\nmetadata = STNodeFactory.createEmptyNodeList();\nbreak;\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTopLevelNode(solution.tokenKind);\n}\nreturn parseTopLevelNode(tokenKind, metadata);\n}\n/**\n* Parse top level node having an optional modifier preceding it, given the next token kind.\n*\n* @param metadata Next token kind\n* @return Parsed node\n*/\nprivate STNode parseTopLevelNode(STNode metadata) {\nSTToken nextToken = peek();\nreturn parseTopLevelNode(nextToken.kind, metadata);\n}\nprivate STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata) {\nSTNode qualifier = null;\nswitch (tokenKind) {\ncase EOF_TOKEN:\nif (metadata != null) {\naddInvalidNodeToNextToken(metadata, DiagnosticErrorCode.ERROR_INVALID_METADATA);\n}\nreturn null;\ncase PUBLIC_KEYWORD:\nqualifier = parseQualifier();\ntokenKind = peek().kind;\nbreak;\ncase FUNCTION_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase CONST_KEYWORD:\ncase FINAL_KEYWORD:\ncase IMPORT_KEYWORD:\ncase ANNOTATION_KEYWORD:\ncase XMLNS_KEYWORD:\ncase ENUM_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nbreak;\ncase IDENTIFIER_TOKEN:\nif (isModuleVarDeclStart(1)) {\nreturn parseModuleVarDecl(metadata, null);\n}\ndefault:\nif (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA, metadata);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.action == Action.KEEP) {\nqualifier = STNodeFactory.createEmptyNode();\nbreak;\n}\nreturn parseTopLevelNode(solution.tokenKind, metadata);\n}\nreturn parseTopLevelNode(tokenKind, metadata, qualifier);\n}\n/**\n* Check whether the cursor is at the start of a module level var-decl.\n*\n* @param lookahead Offset of the token to to check\n* @return true if the cursor is at the start of a module level var-decl.\n* false otherwise.\n*/\nprivate boolean isModuleVarDeclStart(int lookahead) {\nSTToken nextToken = peek(lookahead + 1);\nswitch (nextToken.kind) {\ncase EQUAL_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase QUESTION_MARK_TOKEN:\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase ERROR_KEYWORD:\nreturn true;\ncase IDENTIFIER_TOKEN:\nswitch (peek(lookahead + 2).kind) {\ncase EQUAL_TOKEN:\ncase SEMICOLON_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\ncase COLON_TOKEN:\nif (lookahead > 1) {\nreturn false;\n}\nif (peek(lookahead + 2).kind != SyntaxKind.IDENTIFIER_TOKEN) {\nreturn false;\n}\nreturn isModuleVarDeclStart(lookahead + 2);\ndefault:\nreturn false;\n}\n}\n/**\n* Parse import declaration.\n*

\n* import-decl := import [org-name /] module-name [version sem-ver] [as import-prefix] ;\n*\n* @return Parsed node\n*/\nprivate STNode parseImportDecl() {\nstartContext(ParserRuleContext.IMPORT_DECL);\nthis.tokenReader.startMode(ParserMode.IMPORT);\nSTNode importKeyword = parseImportKeyword();\nSTNode identifier = parseIdentifier(ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME);\nSTToken token = peek();\nSTNode importDecl = parseImportDecl(token.kind, importKeyword, identifier);\nthis.tokenReader.endMode();\nendContext();\nreturn importDecl;\n}\n/**\n* Parse import keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseImportKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IMPORT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.IMPORT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse identifier.\n*\n* @return Parsed node\n*/\nprivate STNode parseIdentifier(ParserRuleContext currentCtx) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else if (token.kind == SyntaxKind.MAP_KEYWORD) {\nSTToken mapKeyword = consume();\nreturn STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(),\nmapKeyword.trailingMinutiae(), mapKeyword.diagnostics());\n} else {\nSolution sol = recover(token, currentCtx);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse RHS of the import declaration. This includes the components after the\n* starting identifier (org-name/module-name) of the import decl.\n*\n* @param importKeyword Import keyword\n* @param identifier Org-name or the module name\n* @return Parsed node\n*/\nprivate STNode parseImportDecl(STNode importKeyword, STNode identifier) {\nSTToken nextToken = peek();\nreturn parseImportDecl(nextToken.kind, importKeyword, identifier);\n}\nprivate STNode parseImportDecl(SyntaxKind tokenKind, STNode importKeyword, STNode identifier) {\nSTNode orgName;\nSTNode moduleName;\nSTNode version;\nSTNode alias;\nswitch (tokenKind) {\ncase SLASH_TOKEN:\nSTNode slash = parseSlashToken();\norgName = STNodeFactory.createImportOrgNameNode(identifier, slash);\nmoduleName = parseModuleName();\nversion = parseVersion();\nalias = parseImportPrefixDecl();\nbreak;\ncase DOT_TOKEN:\ncase VERSION_KEYWORD:\norgName = STNodeFactory.createEmptyNode();\nmoduleName = parseModuleName(tokenKind, identifier);\nversion = parseVersion();\nalias = parseImportPrefixDecl();\nbreak;\ncase AS_KEYWORD:\norgName = STNodeFactory.createEmptyNode();\nmoduleName = parseModuleName(tokenKind, identifier);\nversion = STNodeFactory.createEmptyNode();\nalias = parseImportPrefixDecl();\nbreak;\ncase SEMICOLON_TOKEN:\norgName = STNodeFactory.createEmptyNode();\nmoduleName = parseModuleName(tokenKind, identifier);\nversion = STNodeFactory.createEmptyNode();\nalias = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.IMPORT_DECL_RHS, importKeyword, identifier);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseImportDecl(solution.tokenKind, importKeyword, identifier);\n}\nSTNode semicolon = parseSemicolon();\nreturn STNodeFactory.createImportDeclarationNode(importKeyword, orgName, moduleName, version, alias, semicolon);\n}\n/**\n* parse slash token.\n*\n* @return Parsed node\n*/\nprivate STNode parseSlashToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SLASH_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SLASH);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse dot token.\n*\n* @return Parsed node\n*/\nprivate STNode parseDotToken() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DOT_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.DOT);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse module name of a import declaration.\n*\n* @return Parsed node\n*/\nprivate STNode parseModuleName() {\nSTNode moduleNameStart = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME);\nreturn parseModuleName(peek().kind, moduleNameStart);\n}\n/**\n* Parse import module name of a import declaration, given the module name start identifier.\n*\n* @param moduleNameStart Starting identifier of the module name\n* @return Parsed node\n*/\nprivate STNode parseModuleName(SyntaxKind nextTokenKind, STNode moduleNameStart) {\nList moduleNameParts = new ArrayList<>();\nmoduleNameParts.add(moduleNameStart);\nwhile (!isEndOfImportModuleName(nextTokenKind)) {\nmoduleNameParts.add(parseDotToken());\nmoduleNameParts.add(parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME));\nnextTokenKind = peek().kind;\n}\nreturn STNodeFactory.createNodeList(moduleNameParts);\n}\nprivate boolean isEndOfImportModuleName(SyntaxKind nextTokenKind) {\nreturn nextTokenKind != SyntaxKind.DOT_TOKEN && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN;\n}\nprivate boolean isEndOfImportDecl(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase SEMICOLON_TOKEN:\ncase PUBLIC_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase TYPE_KEYWORD:\ncase ABSTRACT_KEYWORD:\ncase CONST_KEYWORD:\ncase EOF_TOKEN:\ncase SERVICE_KEYWORD:\ncase IMPORT_KEYWORD:\ncase FINAL_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse version component of a import declaration.\n*

\n* version-decl := version sem-ver\n*\n* @return Parsed node\n*/\nprivate STNode parseVersion() {\nSTToken nextToken = peek();\nreturn parseVersion(nextToken.kind);\n}\nprivate STNode parseVersion(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase VERSION_KEYWORD:\nSTNode versionKeyword = parseVersionKeyword();\nSTNode versionNumber = parseVersionNumber();\nreturn STNodeFactory.createImportVersionNode(versionKeyword, versionNumber);\ncase AS_KEYWORD:\ncase SEMICOLON_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nif (isEndOfImportDecl(nextTokenKind)) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.IMPORT_VERSION_DECL);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseVersion(solution.tokenKind);\n}\n}\n/**\n* Parse version keywrod.\n*\n* @return Parsed node\n*/\nprivate STNode parseVersionKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.VERSION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.VERSION_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse version number.\n*

\n* sem-ver := major-num [. minor-num [. patch-num]]\n*
\n* major-num := DecimalNumber\n*
\n* minor-num := DecimalNumber\n*
\n* patch-num := DecimalNumber\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseVersionNumber() {\nSTToken nextToken = peek();\nreturn parseVersionNumber(nextToken.kind);\n}\nprivate STNode parseVersionNumber(SyntaxKind nextTokenKind) {\nSTNode majorVersion;\nswitch (nextTokenKind) {\ncase DECIMAL_INTEGER_LITERAL:\nmajorVersion = parseMajorVersion();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.VERSION_NUMBER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseVersionNumber(solution.tokenKind);\n}\nList versionParts = new ArrayList<>();\nversionParts.add(majorVersion);\nSTNode minorVersion = parseMinorVersion();\nif (minorVersion != null) {\nversionParts.add(minorVersion);\nSTNode patchVersion = parsePatchVersion();\nif (patchVersion != null) {\nversionParts.add(patchVersion);\n}\n}\nreturn STNodeFactory.createNodeList(versionParts);\n}\nprivate STNode parseMajorVersion() {\nreturn parseDecimalIntLiteral(ParserRuleContext.MAJOR_VERSION);\n}\nprivate STNode parseMinorVersion() {\nreturn parseSubVersion(ParserRuleContext.MINOR_VERSION);\n}\nprivate STNode parsePatchVersion() {\nreturn parseSubVersion(ParserRuleContext.PATCH_VERSION);\n}\n/**\n* Parse decimal literal.\n*\n* @param context Context in which the decimal literal is used.\n* @return Parsed node\n*/\nprivate STNode parseDecimalIntLiteral(ParserRuleContext context) {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), context);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse sub version. i.e: minor-version/patch-version.\n*\n* @param context Context indicating what kind of sub-version is being parsed.\n* @return Parsed node\n*/\nprivate STNode parseSubVersion(ParserRuleContext context) {\nSTToken nextToken = peek();\nreturn parseSubVersion(nextToken.kind, context);\n}\nprivate STNode parseSubVersion(SyntaxKind nextTokenKind, ParserRuleContext context) {\nswitch (nextTokenKind) {\ncase AS_KEYWORD:\ncase SEMICOLON_TOKEN:\nreturn null;\ncase DOT_TOKEN:\nSTNode leadingDot = parseDotToken();\nSTNode versionNumber = parseDecimalIntLiteral(context);\nreturn STNodeFactory.createImportSubVersionNode(leadingDot, versionNumber);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.IMPORT_SUB_VERSION);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseSubVersion(solution.tokenKind, context);\n}\n}\n/**\n* Parse import prefix declaration.\n*

\n* import-prefix-decl := as import-prefix\n*
\n* import-prefix := a identifier | _\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseImportPrefixDecl() {\nSTToken token = peek();\nreturn parseImportPrefixDecl(token.kind);\n}\nprivate STNode parseImportPrefixDecl(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase AS_KEYWORD:\nSTNode asKeyword = parseAsKeyword();\nSTNode prefix = parseImportPrefix();\nreturn STNodeFactory.createImportPrefixNode(asKeyword, prefix);\ncase SEMICOLON_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nif (isEndOfImportDecl(nextTokenKind)) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.IMPORT_PREFIX_DECL);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseImportPrefixDecl(solution.tokenKind);\n}\n}\n/**\n* Parse as keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseAsKeyword() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.AS_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.AS_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse import prefix.\n*\n* @return Parsed node\n*/\nprivate STNode parseImportPrefix() {\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.IMPORT_PREFIX);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse top level node, given the modifier that precedes it.\n*\n* @param qualifier Qualifier that precedes the top level node\n* @return Parsed node\n*/\nprivate STNode parseTopLevelNode(STNode metadata, STNode qualifier) {\nSTToken token = peek();\nreturn parseTopLevelNode(token.kind, metadata, qualifier);\n}\n/**\n* Parse top level node given the next token kind and the modifier that precedes it.\n*\n* @param tokenKind Next token kind\n* @param qualifier Qualifier that precedes the top level node\n* @return Parsed top-level node\n*/\nprivate STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata, STNode qualifier) {\nswitch (tokenKind) {\ncase FUNCTION_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nreturn parseFuncDefOrFuncTypeDesc(metadata, false, getQualifier(qualifier), null);\ncase TYPE_KEYWORD:\nreturn parseModuleTypeDefinition(metadata, getQualifier(qualifier));\ncase LISTENER_KEYWORD:\nreturn parseListenerDeclaration(metadata, getQualifier(qualifier));\ncase CONST_KEYWORD:\nreturn parseConstantDeclaration(metadata, getQualifier(qualifier));\ncase ANNOTATION_KEYWORD:\nSTNode constKeyword = STNodeFactory.createEmptyNode();\nreturn parseAnnotationDeclaration(metadata, getQualifier(qualifier), constKeyword);\ncase IMPORT_KEYWORD:\nreportInvalidQualifier(qualifier);\nreturn parseImportDecl();\ncase XMLNS_KEYWORD:\nreportInvalidQualifier(qualifier);\nreturn parseXMLNamespaceDeclaration(true);\ncase FINAL_KEYWORD:\nreportInvalidQualifier(qualifier);\nSTNode finalKeyword = parseFinalKeyword();\nreturn parseVariableDecl(metadata, finalKeyword, true);\ncase SERVICE_KEYWORD:\nif (isServiceDeclStart(ParserRuleContext.TOP_LEVEL_NODE, 1)) {\nreportInvalidQualifier(qualifier);\nreturn parseServiceDecl(metadata);\n}\nreturn parseModuleVarDecl(metadata, qualifier);\ncase ENUM_KEYWORD:\nreturn parseEnumDeclaration(metadata, getQualifier(qualifier));\ncase IDENTIFIER_TOKEN:\nif (isModuleVarDeclStart(1)) {\nreturn parseModuleVarDecl(metadata, qualifier);\n}\ndefault:\nif (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\nreturn parseModuleVarDecl(metadata, qualifier);\n}\nSTToken token = peek();\nSolution solution =\nrecover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER, metadata, qualifier);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.action == Action.KEEP) {\nreturn parseModuleVarDecl(metadata, qualifier);\n}\nreturn parseTopLevelNode(solution.tokenKind, metadata, qualifier);\n}\n}\nprivate STNode parseModuleVarDecl(STNode metadata, STNode qualifier) {\nreportInvalidQualifier(qualifier);\nSTNode finalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVariableDecl(metadata, finalKeyword, true);\n}\nprivate STNode getQualifier(STNode qualifier) {\nreturn qualifier == null ? STNodeFactory.createEmptyNode() : qualifier;\n}\nprivate void reportInvalidQualifier(STNode qualifier) {\nif (qualifier != null && qualifier.kind != SyntaxKind.NONE) {\naddInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_INVALID_QUALIFIER,\nqualifier.toString().trim());\n}\n}\n/**\n* Parse access modifiers.\n*\n* @return Parsed node\n*/\nprivate STNode parseQualifier() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PUBLIC_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseFuncDefinition(STNode metadata, boolean isObjectMethod, STNode... qualifiers) {\nparseTransactionalQUalifier(qualifiers);\nstartContext(ParserRuleContext.FUNC_DEF);\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode funcDef = parseFunctionKeywordRhs(metadata, functionKeyword, true, isObjectMethod, qualifiers);\nreturn funcDef;\n}\n/**\n* Parse function definition for the function type descriptor.\n*

\n* \n* function-defn := FUNCTION identifier function-signature function-body\n*
\n* function-type-descriptor := function function-signature\n*
\n*\n* @param metadata Metadata\n* @param visibilityQualifier Visibility qualifier\n* @return Parsed node\n*/\nprivate STNode parseFuncDefOrFuncTypeDesc(STNode metadata, boolean isObjectMethod, STNode... qualifiers) {\nparseTransactionalQUalifier(qualifiers);\nstartContext(ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE);\nSTNode functionKeyword = parseFunctionKeyword();\nSTNode funcDefOrType = parseFunctionKeywordRhs(metadata, functionKeyword, false, isObjectMethod, qualifiers);\nreturn funcDefOrType;\n}\nprivate void parseTransactionalQUalifier(STNode... qualifiers) {\nif (peek().kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {\nqualifiers[qualifiers.length - 1] = consume();\n} else {\nqualifiers[qualifiers.length - 1] = STNodeFactory.createEmptyNode();\n}\n}\nprivate STNode parseFunctionKeywordRhs(STNode metadata, STNode functionKeyword, boolean isFuncDef,\nboolean isObjectMethod, STNode... qualifiers) {\nreturn parseFunctionKeywordRhs(peek().kind, metadata, functionKeyword, isFuncDef, isObjectMethod, qualifiers);\n}\nprivate STNode parseFunctionKeywordRhs(SyntaxKind nextTokenKind, STNode metadata, STNode functionKeyword,\nboolean isFuncDef, boolean isObjectMethod, STNode... qualifiers) {\nSTNode name;\nswitch (nextTokenKind) {\ncase IDENTIFIER_TOKEN:\nname = parseFunctionName();\nisFuncDef = true;\nbreak;\ncase OPEN_PAREN_TOKEN:\nname = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.FUNCTION_KEYWORD_RHS, metadata, functionKeyword,\nisFuncDef, isObjectMethod, qualifiers);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFunctionKeywordRhs(solution.tokenKind, metadata, functionKeyword, isFuncDef, isObjectMethod,\nqualifiers);\n}\nif (isFuncDef) {\nswitchContext(ParserRuleContext.FUNC_DEF);\nSTNode funcSignature = parseFuncSignature(false);\nSTNode funcDef = createFuncDefOrMethodDecl(metadata, functionKeyword, isObjectMethod, name, funcSignature,\nqualifiers);\nendContext();\nreturn funcDef;\n}\nSTNode funcSignature = parseFuncSignature(true);\nreturn parseReturnTypeDescRhs(metadata, functionKeyword, funcSignature, isObjectMethod, qualifiers);\n}\nprivate STNode createFuncDefOrMethodDecl(STNode metadata, STNode functionKeyword, boolean isObjectMethod,\nSTNode name, STNode funcSignature, STNode... qualifiers) {\nSTNode body = parseFunctionBody(isObjectMethod);\nif (body.kind == SyntaxKind.SEMICOLON_TOKEN) {\nreturn STNodeFactory.createMethodDeclarationNode(metadata, qualifiers[0], functionKeyword, name,\nfuncSignature, body);\n}\nif (isObjectMethod) {\nreturn STNodeFactory.createObjectMethodDefinitionNode(metadata, qualifiers[0], qualifiers[1], qualifiers[2],\nfunctionKeyword, name, funcSignature, body);\n}\nreturn STNodeFactory.createFunctionDefinitionNode(metadata, qualifiers[0], qualifiers[1], functionKeyword, name,\nfuncSignature, body);\n}\n/**\n* Parse function signature.\n*

\n* \n* function-signature := ( param-list ) return-type-descriptor\n*
\n* return-type-descriptor := [ returns [annots] type-descriptor ]\n*
\n*\n* @param isParamNameOptional Whether the parameter names are optional\n* @return Function signature node\n*/\nprivate STNode parseFuncSignature(boolean isParamNameOptional) {\nSTNode openParenthesis = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nSTNode parameters = parseParamList(isParamNameOptional);\nSTNode closeParenthesis = parseCloseParenthesis();\nendContext();\nSTNode returnTypeDesc = parseFuncReturnTypeDescriptor();\nreturn STNodeFactory.createFunctionSignatureNode(openParenthesis, parameters, closeParenthesis, returnTypeDesc);\n}\nprivate STNode parseReturnTypeDescRhs(STNode metadata, STNode functionKeyword, STNode funcSignature,\nboolean isObjectMethod, STNode... qualifiers) {\nswitch (peek().kind) {\ncase SEMICOLON_TOKEN:\ncase IDENTIFIER_TOKEN:\ncase OPEN_BRACKET_TOKEN:\nendContext();\nSTNode typeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature);\nif (isObjectMethod) {\nSTNode readonlyQualifier = STNodeFactory.createEmptyNode();\nSTNode fieldName = parseVariableName();\nreturn parseObjectFieldRhs(metadata, qualifiers[0], readonlyQualifier, typeDesc, fieldName);\n}\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(metadata, qualifiers[0], typedBindingPattern, true);\ncase OPEN_BRACE_TOKEN:\ncase EQUAL_TOKEN:\nbreak;\ndefault:\nbreak;\n}\nSTNode name = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_FUNCTION_NAME);\nfuncSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);\nSTNode funcDef =\ncreateFuncDefOrMethodDecl(metadata, functionKeyword, isObjectMethod, name, funcSignature, qualifiers);\nendContext();\nreturn funcDef;\n}\n/**\n* Validate the param list and return. If there are params without param-name,\n* then this method will create a new set of params with missing param-name\n* and return.\n*\n* @param signature Function signature\n* @return\n*/\nprivate STNode validateAndGetFuncParams(STFunctionSignatureNode signature) {\nSTNode parameters = signature.parameters;\nint paramCount = parameters.bucketCount();\nint index = 0;\nfor (; index < paramCount; index++) {\nSTNode param = parameters.childInBucket(index);\nswitch (param.kind) {\ncase REQUIRED_PARAM:\nSTRequiredParameterNode requiredParam = (STRequiredParameterNode) param;\nif (isEmpty(requiredParam.paramName)) {\nbreak;\n}\ncontinue;\ncase DEFAULTABLE_PARAM:\nSTDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param;\nif (isEmpty(defaultableParam.paramName)) {\nbreak;\n}\ncontinue;\ncase REST_PARAM:\nSTRestParameterNode restParam = (STRestParameterNode) param;\nif (isEmpty(restParam.paramName)) {\nbreak;\n}\ncontinue;\ndefault:\ncontinue;\n}\nbreak;\n}\nif (index == paramCount) {\nreturn signature;\n}\nSTNode updatedParams = getUpdatedParamList(parameters, index);\nreturn STNodeFactory.createFunctionSignatureNode(signature.openParenToken, updatedParams,\nsignature.closeParenToken, signature.returnTypeDesc);\n}\nprivate STNode getUpdatedParamList(STNode parameters, int index) {\nint paramCount = parameters.bucketCount();\nint newIndex = 0;\nArrayList newParams = new ArrayList<>();\nfor (; newIndex < index; newIndex++) {\nnewParams.add(parameters.childInBucket(index));\n}\nfor (; newIndex < paramCount; newIndex++) {\nSTNode param = parameters.childInBucket(newIndex);\nSTNode paramName = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nswitch (param.kind) {\ncase REQUIRED_PARAM:\nSTRequiredParameterNode requiredParam = (STRequiredParameterNode) param;\nif (isEmpty(requiredParam.paramName)) {\nparam = STNodeFactory.createRequiredParameterNode(requiredParam.leadingComma,\nrequiredParam.annotations, requiredParam.visibilityQualifier, requiredParam.typeName,\nparamName);\n}\nbreak;\ncase DEFAULTABLE_PARAM:\nSTDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param;\nif (isEmpty(defaultableParam.paramName)) {\nparam = STNodeFactory.createDefaultableParameterNode(defaultableParam.leadingComma,\ndefaultableParam.annotations, defaultableParam.visibilityQualifier,\ndefaultableParam.typeName, paramName, defaultableParam.equalsToken,\ndefaultableParam.expression);\n}\nbreak;\ncase REST_PARAM:\nSTRestParameterNode restParam = (STRestParameterNode) param;\nif (isEmpty(restParam.paramName)) {\nparam = STNodeFactory.createRestParameterNode(restParam.leadingComma, restParam.annotations,\nrestParam.typeName, restParam.ellipsisToken, paramName);\n}\nbreak;\ndefault:\nbreak;\n}\nnewParams.add(param);\n}\nreturn STNodeFactory.createNodeList(newParams);\n}\nprivate boolean isEmpty(STNode node) {\nreturn !SyntaxUtils.isSTNodePresent(node);\n}\n/**\n* Parse function keyword. Need to validate the token before consuming,\n* since we can reach here while recovering.\n*\n* @return Parsed node\n*/\nprivate STNode parseFunctionKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FUNCTION_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FUNCTION_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse function name.\n*\n* @return Parsed node\n*/\nprivate STNode parseFunctionName() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FUNC_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse open parenthesis.\n*\n* @param ctx Context of the parenthesis\n* @return Parsed node\n*/\nprivate STNode parseOpenParenthesis(ParserRuleContext ctx) {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ctx);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse close parenthesis.\n*\n* @return Parsed node\n*/\nprivate STNode parseCloseParenthesis() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSE_PARENTHESIS);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse parameter list.\n*

\n* \n* param-list := required-params [, defaultable-params] [, rest-param]\n*
 | defaultable-params [, rest-param]\n*
 | [rest-param]\n*

\n* required-params := required-param (, required-param)*\n*

\n* required-param := [annots] [public] type-descriptor [param-name]\n*

\n* defaultable-params := defaultable-param (, defaultable-param)*\n*

\n* defaultable-param := [annots] [public] type-descriptor [param-name] default-value\n*

\n* rest-param := [annots] type-descriptor ... [param-name]\n*

\n* param-name := identifier\n*
\n*\n* @param isParamNameOptional Whether the param names in the signature is optional or not.\n* @return Parsed node\n*/\nprivate STNode parseParamList(boolean isParamNameOptional) {\nstartContext(ParserRuleContext.PARAM_LIST);\nSTToken token = peek();\nif (isEndOfParametersList(token.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nArrayList paramsList = new ArrayList<>();\nSTNode startingComma = STNodeFactory.createEmptyNode();\nstartContext(ParserRuleContext.REQUIRED_PARAM);\nSTNode firstParam = parseParameter(startingComma, SyntaxKind.REQUIRED_PARAM, isParamNameOptional);\nSyntaxKind prevParamKind = firstParam.kind;\nparamsList.add(firstParam);\nboolean paramOrderErrorPresent = false;\ntoken = peek();\nwhile (!isEndOfParametersList(token.kind)) {\nif (prevParamKind == SyntaxKind.DEFAULTABLE_PARAM) {\nstartContext(ParserRuleContext.DEFAULTABLE_PARAM);\n} else {\nstartContext(ParserRuleContext.REQUIRED_PARAM);\n}\nSTNode paramEnd = parseParameterRhs();\nif (paramEnd == null) {\nendContext();\nbreak;\n}\nSTNode param = parseParameter(paramEnd, prevParamKind, isParamNameOptional);\nif (paramOrderErrorPresent) {\nupdateLastNodeInListWithInvalidNode(paramsList, param, null);\n} else {\nDiagnosticCode paramOrderError = validateParamOrder(param, prevParamKind);\nif (paramOrderError == null) {\nparamsList.add(param);\n} else {\nparamOrderErrorPresent = true;\nupdateLastNodeInListWithInvalidNode(paramsList, param, paramOrderError);\n}\n}\nprevParamKind = param.kind;\ntoken = peek();\n}\nreturn STNodeFactory.createNodeList(paramsList);\n}\n/**\n* Return the appropriate {@code DiagnosticCode} if there are parameter order issues.\n*\n* @param param the new parameter\n* @param prevParamKind the SyntaxKind of the previously added parameter\n*/\nprivate DiagnosticCode validateParamOrder(STNode param, SyntaxKind prevParamKind) {\nif (prevParamKind == SyntaxKind.REST_PARAM) {\nreturn DiagnosticErrorCode.ERROR_PARAMETER_AFTER_THE_REST_PARAMETER;\n} else if (prevParamKind == SyntaxKind.DEFAULTABLE_PARAM && param.kind == SyntaxKind.REQUIRED_PARAM) {\nreturn DiagnosticErrorCode.ERROR_REQUIRED_PARAMETER_AFTER_THE_DEFAULTABLE_PARAMETER;\n} else {\nreturn null;\n}\n}\nprivate boolean isNodeWithSyntaxKindInList(List nodeList, SyntaxKind kind) {\nfor (STNode node : nodeList) {\nif (node.kind == kind) {\nreturn true;\n}\n}\nreturn false;\n}\nprivate STNode parseParameterRhs() {\nreturn parseParameterRhs(peek().kind);\n}\nprivate STNode parseParameterRhs(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.PARAM_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseParameterRhs(solution.tokenKind);\n}\n}\n/**\n* Parse a single parameter. Parameter can be a required parameter, a defaultable\n* parameter, or a rest parameter.\n*\n* @param prevParamKind Kind of the parameter that precedes current parameter\n* @param leadingComma Comma that occurs before the param\n* @param isParamNameOptional Whether the param names in the signature is optional or not.\n* @return Parsed node\n*/\nprivate STNode parseParameter(STNode leadingComma, SyntaxKind prevParamKind, boolean isParamNameOptional) {\nSTToken token = peek();\nreturn parseParameter(token.kind, prevParamKind, leadingComma, 1, isParamNameOptional);\n}\nprivate STNode parseParameter(SyntaxKind prevParamKind, STNode leadingComma, int nextTokenOffset,\nboolean isParamNameOptional) {\nreturn parseParameter(peek().kind, prevParamKind, leadingComma, nextTokenOffset, isParamNameOptional);\n}\nprivate STNode parseParameter(SyntaxKind nextTokenKind, SyntaxKind prevParamKind, STNode leadingComma,\nint nextTokenOffset, boolean isParamNameOptional) {\nSTNode annots;\nswitch (nextTokenKind) {\ncase AT_TOKEN:\nannots = parseAnnotations(nextTokenKind);\nnextTokenKind = peek().kind;\nbreak;\ncase PUBLIC_KEYWORD:\ncase IDENTIFIER_TOKEN:\nannots = STNodeFactory.createEmptyNodeList();\nbreak;\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nannots = STNodeFactory.createNodeList(new ArrayList<>());\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.PARAMETER_START, prevParamKind, leadingComma,\nnextTokenOffset, isParamNameOptional);\nif (solution.action == Action.KEEP) {\nannots = STNodeFactory.createEmptyNodeList();\nbreak;\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseParameter(solution.tokenKind, prevParamKind, leadingComma, 0, isParamNameOptional);\n}\nreturn parseParamGivenAnnots(nextTokenKind, prevParamKind, leadingComma, annots, 1, isParamNameOptional);\n}\nprivate STNode parseParamGivenAnnots(SyntaxKind prevParamKind, STNode leadingComma, STNode annots,\nint nextNextTokenOffset, boolean isFuncDef) {\nreturn parseParamGivenAnnots(peek().kind, prevParamKind, leadingComma, annots, nextNextTokenOffset, isFuncDef);\n}\nprivate STNode parseParamGivenAnnots(SyntaxKind nextTokenKind, SyntaxKind prevParamKind, STNode leadingComma,\nSTNode annots, int nextTokenOffset, boolean isParamNameOptional) {\nSTNode qualifier;\nswitch (nextTokenKind) {\ncase PUBLIC_KEYWORD:\nqualifier = parseQualifier();\nbreak;\ncase IDENTIFIER_TOKEN:\nqualifier = STNodeFactory.createEmptyNode();\nbreak;\ncase AT_TOKEN:\ndefault:\nif (isTypeStartingToken(nextTokenKind) && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\nqualifier = STNodeFactory.createEmptyNode();\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.PARAMETER_WITHOUT_ANNOTS, prevParamKind,\nleadingComma, annots, nextTokenOffset, isParamNameOptional);\nif (solution.action == Action.KEEP) {\nqualifier = STNodeFactory.createEmptyNode();\nbreak;\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseParamGivenAnnots(solution.tokenKind, prevParamKind, leadingComma, annots, 0,\nisParamNameOptional);\n}\nreturn parseParamGivenAnnotsAndQualifier(prevParamKind, leadingComma, annots, qualifier, isParamNameOptional);\n}\nprivate STNode parseParamGivenAnnotsAndQualifier(SyntaxKind prevParamKind, STNode leadingComma, STNode annots,\nSTNode qualifier, boolean isParamNameOptional) {\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode param = parseAfterParamType(prevParamKind, leadingComma, annots, qualifier, type, isParamNameOptional);\nendContext();\nreturn param;\n}\nprivate STNode parseAfterParamType(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier,\nSTNode type, boolean isParamNameOptional) {\nSTToken token = peek();\nreturn parseAfterParamType(token.kind, prevParamKind, leadingComma, annots, qualifier, type,\nisParamNameOptional);\n}\nprivate STNode parseAfterParamType(SyntaxKind tokenKind, SyntaxKind prevParamKind, STNode leadingComma,\nSTNode annots, STNode qualifier, STNode type, boolean isParamNameOptional) {\nSTNode paramName;\nswitch (tokenKind) {\ncase ELLIPSIS_TOKEN:\nswitchContext(ParserRuleContext.REST_PARAM);\nreportInvalidQualifier(qualifier);\nSTNode ellipsis = parseEllipsis();\nif (isParamNameOptional && peek().kind != SyntaxKind.IDENTIFIER_TOKEN) {\nparamName = STNodeFactory.createEmptyNode();\n} else {\nparamName = parseVariableName();\n}\nreturn STNodeFactory.createRestParameterNode(leadingComma, annots, type, ellipsis, paramName);\ncase IDENTIFIER_TOKEN:\nparamName = parseVariableName();\nreturn parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);\ncase EQUAL_TOKEN:\nif (!isParamNameOptional) {\nbreak;\n}\nparamName = STNodeFactory.createEmptyNode();\nreturn parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);\ndefault:\nif (!isParamNameOptional) {\nbreak;\n}\nparamName = STNodeFactory.createEmptyNode();\nreturn parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.AFTER_PARAMETER_TYPE, prevParamKind, leadingComma, annots,\nqualifier, type, isParamNameOptional);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseAfterParamType(solution.tokenKind, prevParamKind, leadingComma, annots, qualifier, type,\nisParamNameOptional);\n}\n/**\n* Parse ellipsis.\n*\n* @return Parsed node\n*/\nprivate STNode parseEllipsis() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ELLIPSIS_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ELLIPSIS);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse the right hand side of a required/defaultable parameter.\n*

\n* parameter-rhs := [= expression]\n*\n* @param leadingComma Comma that precedes this parameter\n* @param prevParamKind Kind of the parameter that precedes current parameter\n* @param annots Annotations attached to the parameter\n* @param qualifier Visibility qualifier\n* @param type Type descriptor\n* @param paramName Name of the parameter\n* @return Parsed parameter node\n*/\nprivate STNode parseParameterRhs(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier,\nSTNode type, STNode paramName) {\nSTToken token = peek();\nreturn parseParameterRhs(token.kind, prevParamKind, leadingComma, annots, qualifier, type, paramName);\n}\nprivate STNode parseParameterRhs(SyntaxKind tokenKind, SyntaxKind prevParamKind, STNode leadingComma, STNode annots,\nSTNode qualifier, STNode type, STNode paramName) {\nif (isEndOfParameter(tokenKind)) {\nreturn STNodeFactory.createRequiredParameterNode(leadingComma, annots, qualifier, type, paramName);\n} else if (tokenKind == SyntaxKind.EQUAL_TOKEN) {\nif (prevParamKind == SyntaxKind.REQUIRED_PARAM) {\nswitchContext(ParserRuleContext.DEFAULTABLE_PARAM);\n}\nSTNode equal = parseAssignOp();\nSTNode expr = parseExpression();\nreturn STNodeFactory.createDefaultableParameterNode(leadingComma, annots, qualifier, type, paramName, equal,\nexpr);\n} else {\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.PARAMETER_NAME_RHS, prevParamKind, leadingComma,\nannots, qualifier, type, paramName);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseParameterRhs(solution.tokenKind, prevParamKind, leadingComma, annots, qualifier, type,\nparamName);\n}\n}\n/**\n* Parse comma.\n*\n* @return Parsed node\n*/\nprivate STNode parseComma() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COMMA_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.COMMA);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse return type descriptor of a function. A return type descriptor has the following structure.\n*\n* return-type-descriptor := [ returns annots type-descriptor ]\n*\n* @return Parsed node\n*/\nprivate STNode parseFuncReturnTypeDescriptor() {\nreturn parseFuncReturnTypeDescriptor(peek().kind);\n}\nprivate STNode parseFuncReturnTypeDescriptor(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase OPEN_BRACE_TOKEN:\ncase EQUAL_TOKEN:\nreturn STNodeFactory.createEmptyNode();\ncase RETURNS_KEYWORD:\nbreak;\ndefault:\nSTToken nextNextToken = getNextNextToken(nextTokenKind);\nif (nextNextToken.kind == SyntaxKind.RETURNS_KEYWORD) {\nbreak;\n}\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode returnsKeyword = parseReturnsKeyword();\nSTNode annot = parseAnnotations();\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);\nreturn STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);\n}\n/**\n* Parse 'returns' keyword.\n*\n* @return Return-keyword node\n*/\nprivate STNode parseReturnsKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RETURNS_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RETURNS_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse a type descriptor. A type descriptor has the following structure.\n*

\n* type-descriptor :=\n*  simple-type-descriptor
\n*  | structured-type-descriptor
\n*  | behavioral-type-descriptor
\n*  | singleton-type-descriptor
\n*  | union-type-descriptor
\n*  | optional-type-descriptor
\n*  | any-type-descriptor
\n*  | anydata-type-descriptor
\n*  | byte-type-descriptor
\n*  | json-type-descriptor
\n*  | type-descriptor-reference
\n*  | ( type-descriptor )\n*
\n* type-descriptor-reference := qualified-identifier
\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeDescriptor(ParserRuleContext context) {\nreturn parseTypeDescriptor(context, false, false);\n}\nprivate STNode parseTypeDescriptorInExpression(ParserRuleContext context, boolean isInConditionalExpr) {\nreturn parseTypeDescriptor(context, false, isInConditionalExpr);\n}\nprivate STNode parseTypeDescriptor(ParserRuleContext context, boolean isTypedBindingPattern,\nboolean isInConditionalExpr) {\nstartContext(context);\nSTNode typeDesc = parseTypeDescriptorInternal(context, isTypedBindingPattern, isInConditionalExpr);\nendContext();\nreturn typeDesc;\n}\nprivate STNode parseTypeDescriptorInternal(ParserRuleContext context, boolean isInConditionalExpr) {\nreturn parseTypeDescriptorInternal(context, false, isInConditionalExpr);\n}\nprivate STNode parseTypeDescriptorInternal(ParserRuleContext context, boolean isTypedBindingPattern,\nboolean isInConditionalExpr) {\nSTToken token = peek();\nSTNode typeDesc = parseTypeDescriptorInternal(token.kind, context, isInConditionalExpr);\nreturn parseComplexTypeDescriptor(typeDesc, context, isTypedBindingPattern);\n}\n/**\n* This will handle the parsing of optional,array,union type desc to infinite length.\n*\n* @param typeDesc\n*\n* @return Parsed type descriptor node\n*/\nprivate STNode parseComplexTypeDescriptor(STNode typeDesc, ParserRuleContext context,\nboolean isTypedBindingPattern) {\nSTToken nextToken = peek();\nswitch (nextToken.kind) {\ncase QUESTION_MARK_TOKEN:\nif (context == ParserRuleContext.TYPE_DESC_IN_EXPRESSION &&\n!isValidTypeContinuationToken(getNextNextToken(nextToken.kind)) &&\nisValidExprStart(getNextNextToken(nextToken.kind).kind)) {\nreturn typeDesc;\n}\nreturn parseComplexTypeDescriptor(parseOptionalTypeDescriptor(typeDesc), context,\nisTypedBindingPattern);\ncase OPEN_BRACKET_TOKEN:\nif (isTypedBindingPattern) {\nreturn typeDesc;\n}\nreturn parseComplexTypeDescriptor(parseArrayTypeDescriptor(typeDesc), context, isTypedBindingPattern);\ncase PIPE_TOKEN:\nreturn parseUnionTypeDescriptor(typeDesc, context, isTypedBindingPattern);\ncase BITWISE_AND_TOKEN:\nreturn parseIntersectionTypeDescriptor(typeDesc, context, isTypedBindingPattern);\ndefault:\nreturn typeDesc;\n}\n}\nprivate boolean isValidTypeContinuationToken(STToken nextToken) {\nswitch (nextToken.kind) {\ncase QUESTION_MARK_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase PIPE_TOKEN:\ncase BITWISE_AND_TOKEN:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n*

\n* Parse a type descriptor, given the next token kind.\n*

\n* If the preceding token is ? then it is an optional type descriptor\n*\n* @param tokenKind Next token kind\n* @param context Current context\n* @param isInConditionalExpr\n* @return Parsed node\n*/\nprivate STNode parseTypeDescriptorInternal(SyntaxKind tokenKind, ParserRuleContext context,\nboolean isInConditionalExpr) {\nswitch (tokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseTypeReference(isInConditionalExpr);\ncase RECORD_KEYWORD:\nreturn parseRecordTypeDescriptor();\ncase READONLY_KEYWORD:\nSTToken nextNextToken = getNextNextToken(tokenKind);\nSyntaxKind nextNextTokenKind = nextNextToken.kind;\nif (nextNextTokenKind != SyntaxKind.OBJECT_KEYWORD &&\nnextNextTokenKind != SyntaxKind.ABSTRACT_KEYWORD &&\nnextNextTokenKind != SyntaxKind.CLIENT_KEYWORD) {\nreturn parseSimpleTypeDescriptor();\n}\ncase OBJECT_KEYWORD:\ncase ABSTRACT_KEYWORD:\ncase CLIENT_KEYWORD:\nreturn parseObjectTypeDescriptor();\ncase OPEN_PAREN_TOKEN:\nreturn parseNilOrParenthesisedTypeDesc();\ncase MAP_KEYWORD:\ncase FUTURE_KEYWORD:\nreturn parseParameterizedTypeDescriptor();\ncase TYPEDESC_KEYWORD:\nreturn parseTypedescTypeDescriptor();\ncase ERROR_KEYWORD:\nreturn parseErrorTypeDescriptor();\ncase XML_KEYWORD:\nreturn parseXmlTypeDescriptor();\ncase STREAM_KEYWORD:\nreturn parseStreamTypeDescriptor();\ncase TABLE_KEYWORD:\nreturn parseTableTypeDescriptor();\ncase FUNCTION_KEYWORD:\nreturn parseFunctionTypeDesc();\ncase OPEN_BRACKET_TOKEN:\nreturn parseTupleTypeDesc();\ncase DISTINCT_KEYWORD:\nreturn parseDistinctTypeDesc(context);\ndefault:\nif (isSingletonTypeDescStart(tokenKind, true)) {\nreturn parseSingletonTypeDesc();\n}\nif (isSimpleType(tokenKind)) {\nreturn parseSimpleTypeDescriptor();\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.TYPE_DESCRIPTOR, context, isInConditionalExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseTypeDescriptorInternal(solution.tokenKind, context, isInConditionalExpr);\n}\n}\n/**\n* Parse distinct type descriptor.\n*

\n* \n* distinct-type-descriptor := distinct type-descriptor\n* \n*\n* @param context Context in which the type desc is used.\n* @return Distinct type descriptor\n*/\nprivate STNode parseDistinctTypeDesc(ParserRuleContext context) {\nSTNode distinctKeyword = parseDistinctKeyword();\nSTNode typeDesc = parseTypeDescriptor(context);\nreturn STNodeFactory.createDistinctTypeDescriptorNode(distinctKeyword, typeDesc);\n}\nprivate STNode parseDistinctKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.DISTINCT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.DISTINCT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseNilOrParenthesisedTypeDesc() {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nreturn parseNilOrParenthesisedTypeDescRhs(openParen);\n}\nprivate STNode parseNilOrParenthesisedTypeDescRhs(STNode openParen) {\nreturn parseNilOrParenthesisedTypeDescRhs(peek().kind, openParen);\n}\nprivate STNode parseNilOrParenthesisedTypeDescRhs(SyntaxKind nextTokenKind, STNode openParen) {\nSTNode closeParen;\nswitch (nextTokenKind) {\ncase CLOSE_PAREN_TOKEN:\ncloseParen = parseCloseParenthesis();\nreturn STNodeFactory.createNilTypeDescriptorNode(openParen, closeParen);\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nSTNode typedesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_PARENTHESIS);\ncloseParen = parseCloseParenthesis();\nreturn STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typedesc, closeParen);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.NIL_OR_PARENTHESISED_TYPE_DESC_RHS, openParen);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseNilOrParenthesisedTypeDescRhs(solution.tokenKind, openParen);\n}\n}\n/**\n* Parse simple type descriptor.\n*\n* @return Parsed node\n*/\nprivate STNode parseSimpleTypeDescriptor() {\nSTToken node = peek();\nif (isSimpleType(node.kind)) {\nSTToken token = consume();\nreturn createBuiltinSimpleNameReference(token);\n} else {\nSolution sol = recover(peek(), ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR);\nSTNode recoveredNode = sol.recoveredNode;\nreturn createBuiltinSimpleNameReference(recoveredNode);\n}\n}\nprivate STNode createBuiltinSimpleNameReference(STNode token) {\nSyntaxKind typeKind = getTypeSyntaxKind(token.kind);\nreturn STNodeFactory.createBuiltinSimpleNameReferenceNode(typeKind, token);\n}\n/**\n*

\n* Parse function body. A function body has the following structure.\n*

\n* \n* function-body := function-body-block | external-function-body\n* external-function-body := = annots external ;\n* function-body-block := { [default-worker-init, named-worker-decl+] default-worker }\n* \n*\n* @param isObjectMethod Flag indicating whether this is an object-method\n* @return Parsed node\n*/\nprivate STNode parseFunctionBody(boolean isObjectMethod) {\nSTToken token = peek();\nreturn parseFunctionBody(token.kind, isObjectMethod);\n}\n/**\n* Parse function body, given the next token kind.\n*\n* @param tokenKind Next token kind\n* @param isObjectMethod Flag indicating whether this is an object-method\n* @return Parsed node\n*/\nprotected STNode parseFunctionBody(SyntaxKind tokenKind, boolean isObjectMethod) {\nswitch (tokenKind) {\ncase EQUAL_TOKEN:\nreturn parseExternalFunctionBody();\ncase OPEN_BRACE_TOKEN:\nreturn parseFunctionBodyBlock(false);\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn parseExpressionFuncBody(false, false);\ncase SEMICOLON_TOKEN:\nif (isObjectMethod) {\nreturn parseSemicolon();\n}\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.FUNC_BODY, isObjectMethod);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.tokenKind == SyntaxKind.NONE) {\nreturn STNodeFactory.createMissingToken(solution.tokenKind);\n}\nreturn parseFunctionBody(solution.tokenKind, isObjectMethod);\n}\n}\n/**\n*

\n* Parse function body block. A function body block has the following structure.\n*

\n*\n* \n* function-body-block := { [default-worker-init, named-worker-decl+] default-worker }
\n* default-worker-init := sequence-stmt
\n* default-worker := sequence-stmt
\n* named-worker-decl := worker worker-name return-type-descriptor { sequence-stmt }
\n* worker-name := identifier
\n*
\n*\n* @param isAnonFunc Flag indicating whether the func body belongs to an anonymous function\n* @return Parsed node\n*/\nprivate STNode parseFunctionBodyBlock(boolean isAnonFunc) {\nstartContext(ParserRuleContext.FUNC_BODY_BLOCK);\nSTNode openBrace = parseOpenBrace();\nSTToken token = peek();\nArrayList firstStmtList = new ArrayList<>();\nArrayList workers = new ArrayList<>();\nArrayList secondStmtList = new ArrayList<>();\nParserRuleContext currentCtx = ParserRuleContext.DEFAULT_WORKER_INIT;\nboolean hasNamedWorkers = false;\nwhile (!isEndOfFuncBodyBlock(token.kind, isAnonFunc)) {\nSTNode stmt = parseStatement();\nif (stmt == null) {\nbreak;\n}\nswitch (currentCtx) {\ncase DEFAULT_WORKER_INIT:\nif (stmt.kind != SyntaxKind.NAMED_WORKER_DECLARATION) {\nfirstStmtList.add(stmt);\nbreak;\n}\ncurrentCtx = ParserRuleContext.NAMED_WORKERS;\nhasNamedWorkers = true;\ncase NAMED_WORKERS:\nif (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {\nworkers.add(stmt);\nbreak;\n}\ncurrentCtx = ParserRuleContext.DEFAULT_WORKER;\ncase DEFAULT_WORKER:\ndefault:\nif (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {\nupdateLastNodeInListWithInvalidNode(secondStmtList, stmt,\nDiagnosticErrorCode.ERROR_NAMED_WORKER_NOT_ALLOWED_HERE);\nbreak;\n}\nsecondStmtList.add(stmt);\nbreak;\n}\ntoken = peek();\n}\nSTNode namedWorkersList;\nSTNode statements;\nif (hasNamedWorkers) {\nSTNode workerInitStatements = STNodeFactory.createNodeList(firstStmtList);\nSTNode namedWorkers = STNodeFactory.createNodeList(workers);\nnamedWorkersList = STNodeFactory.createNamedWorkerDeclarator(workerInitStatements, namedWorkers);\nstatements = STNodeFactory.createNodeList(secondStmtList);\n} else {\nnamedWorkersList = STNodeFactory.createEmptyNode();\nstatements = STNodeFactory.createNodeList(firstStmtList);\n}\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createFunctionBodyBlockNode(openBrace, namedWorkersList, statements, closeBrace);\n}\nprivate boolean isEndOfFuncBodyBlock(SyntaxKind nextTokenKind, boolean isAnonFunc) {\nif (isAnonFunc) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase COMMA_TOKEN:\ncase PUBLIC_KEYWORD:\ncase EOF_TOKEN:\ncase EQUAL_TOKEN:\ncase BACKTICK_TOKEN:\nreturn true;\ndefault:\nbreak;\n}\n}\nreturn isEndOfStatements();\n}\nprivate boolean isEndOfRecordTypeNode(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase TYPE_KEYWORD:\ncase PUBLIC_KEYWORD:\ndefault:\nreturn endOfModuleLevelNode(1);\n}\n}\nprivate boolean isEndOfObjectTypeNode() {\nreturn endOfModuleLevelNode(1, true);\n}\nprivate boolean isEndOfStatements() {\nswitch (peek().kind) {\ncase RESOURCE_KEYWORD:\nreturn true;\ndefault:\nreturn endOfModuleLevelNode(1);\n}\n}\nprivate boolean endOfModuleLevelNode(int peekIndex) {\nreturn endOfModuleLevelNode(peekIndex, false);\n}\nprivate boolean endOfModuleLevelNode(int peekIndex, boolean isObject) {\nswitch (peek(peekIndex).kind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACE_PIPE_TOKEN:\ncase IMPORT_KEYWORD:\ncase CONST_KEYWORD:\ncase ANNOTATION_KEYWORD:\ncase LISTENER_KEYWORD:\nreturn true;\ncase SERVICE_KEYWORD:\nreturn isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 1);\ncase PUBLIC_KEYWORD:\nreturn endOfModuleLevelNode(peekIndex + 1, isObject);\ncase FUNCTION_KEYWORD:\nif (isObject) {\nreturn false;\n}\nreturn peek(peekIndex + 1).kind == SyntaxKind.IDENTIFIER_TOKEN;\ndefault:\nreturn false;\n}\n}\n/**\n* Check whether the given token is an end of a parameter.\n*\n* @param tokenKind Next token kind\n* @return true if the token represents an end of a parameter. false otherwise\n*/\nprivate boolean isEndOfParameter(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase SEMICOLON_TOKEN:\ncase COMMA_TOKEN:\ncase RETURNS_KEYWORD:\ncase TYPE_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\ncase AT_TOKEN:\nreturn true;\ndefault:\nreturn endOfModuleLevelNode(1);\n}\n}\n/**\n* Check whether the given token is an end of a parameter-list.\n*\n* @param tokenKind Next token kind\n* @return true if the token represents an end of a parameter-list. false otherwise\n*/\nprivate boolean isEndOfParametersList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase CLOSE_PAREN_TOKEN:\ncase SEMICOLON_TOKEN:\ncase RETURNS_KEYWORD:\ncase TYPE_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\nreturn true;\ndefault:\nreturn endOfModuleLevelNode(1);\n}\n}\n/**\n* Parse type reference or variable reference.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatementStartIdentifier() {\nreturn parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\n}\n/**\n* Parse variable name.\n*\n* @return Parsed node\n*/\nprivate STNode parseVariableName() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(peek(), ParserRuleContext.VARIABLE_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse open brace.\n*\n* @return Parsed node\n*/\nprivate STNode parseOpenBrace() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPEN_BRACE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OPEN_BRACE);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse close brace.\n*\n* @return Parsed node\n*/\nprivate STNode parseCloseBrace() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSE_BRACE);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse external function body. An external function body has the following structure.\n*

\n* \n* external-function-body := = annots external ;\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseExternalFunctionBody() {\nstartContext(ParserRuleContext.EXTERNAL_FUNC_BODY);\nSTNode assign = parseAssignOp();\nSTNode annotation = parseAnnotations();\nSTNode externalKeyword = parseExternalKeyword();\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createExternalFunctionBodyNode(assign, annotation, externalKeyword, semicolon);\n}\n/**\n* Parse semicolon.\n*\n* @return Parsed node\n*/\nprivate STNode parseSemicolon() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SEMICOLON_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SEMICOLON);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse external keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseExternalKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.EXTERNAL_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.EXTERNAL_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/*\n* Operators\n*/\n/**\n* Parse assign operator.\n*\n* @return Parsed node\n*/\nprivate STNode parseAssignOp() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.EQUAL_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ASSIGN_OP);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse binary operator.\n*\n* @return Parsed node\n*/\nprivate STNode parseBinaryOperator() {\nSTToken token = peek();\nif (isBinaryOperator(token.kind)) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BINARY_OPERATOR);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Check whether the given token kind is a binary operator.\n*\n* @param kind STToken kind\n* @return true if the token kind refers to a binary operator. false otherwise\n*/\nprivate boolean isBinaryOperator(SyntaxKind kind) {\nswitch (kind) {\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase SLASH_TOKEN:\ncase ASTERISK_TOKEN:\ncase GT_TOKEN:\ncase LT_TOKEN:\ncase DOUBLE_EQUAL_TOKEN:\ncase TRIPPLE_EQUAL_TOKEN:\ncase LT_EQUAL_TOKEN:\ncase GT_EQUAL_TOKEN:\ncase NOT_EQUAL_TOKEN:\ncase NOT_DOUBLE_EQUAL_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase BITWISE_XOR_TOKEN:\ncase PIPE_TOKEN:\ncase LOGICAL_AND_TOKEN:\ncase LOGICAL_OR_TOKEN:\ncase PERCENT_TOKEN:\ncase DOUBLE_LT_TOKEN:\ncase DOUBLE_GT_TOKEN:\ncase TRIPPLE_GT_TOKEN:\ncase ELLIPSIS_TOKEN:\ncase DOUBLE_DOT_LT_TOKEN:\ncase ELVIS_TOKEN:\ncase EQUALS_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Get the precedence of a given operator.\n*\n* @param binaryOpKind Operator kind\n* @return Precedence of the given operator\n*/\nprivate OperatorPrecedence getOpPrecedence(SyntaxKind binaryOpKind) {\nswitch (binaryOpKind) {\ncase ASTERISK_TOKEN:\ncase SLASH_TOKEN:\ncase PERCENT_TOKEN:\nreturn OperatorPrecedence.MULTIPLICATIVE;\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\nreturn OperatorPrecedence.ADDITIVE;\ncase GT_TOKEN:\ncase LT_TOKEN:\ncase GT_EQUAL_TOKEN:\ncase LT_EQUAL_TOKEN:\ncase IS_KEYWORD:\nreturn OperatorPrecedence.BINARY_COMPARE;\ncase DOT_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase OPEN_PAREN_TOKEN:\ncase ANNOT_CHAINING_TOKEN:\ncase OPTIONAL_CHAINING_TOKEN:\ncase DOT_LT_TOKEN:\ncase SLASH_LT_TOKEN:\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\ncase SLASH_ASTERISK_TOKEN:\nreturn OperatorPrecedence.MEMBER_ACCESS;\ncase DOUBLE_EQUAL_TOKEN:\ncase TRIPPLE_EQUAL_TOKEN:\ncase NOT_EQUAL_TOKEN:\ncase NOT_DOUBLE_EQUAL_TOKEN:\ncase EQUALS_KEYWORD:\nreturn OperatorPrecedence.EQUALITY;\ncase BITWISE_AND_TOKEN:\nreturn OperatorPrecedence.BITWISE_AND;\ncase BITWISE_XOR_TOKEN:\nreturn OperatorPrecedence.BITWISE_XOR;\ncase PIPE_TOKEN:\nreturn OperatorPrecedence.BITWISE_OR;\ncase LOGICAL_AND_TOKEN:\nreturn OperatorPrecedence.LOGICAL_AND;\ncase LOGICAL_OR_TOKEN:\nreturn OperatorPrecedence.LOGICAL_OR;\ncase RIGHT_ARROW_TOKEN:\nreturn OperatorPrecedence.REMOTE_CALL_ACTION;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn OperatorPrecedence.ANON_FUNC_OR_LET;\ncase SYNC_SEND_TOKEN:\nreturn OperatorPrecedence.ACTION;\ncase DOUBLE_LT_TOKEN:\ncase DOUBLE_GT_TOKEN:\ncase TRIPPLE_GT_TOKEN:\nreturn OperatorPrecedence.SHIFT;\ncase ELLIPSIS_TOKEN:\ncase DOUBLE_DOT_LT_TOKEN:\nreturn OperatorPrecedence.RANGE;\ncase ELVIS_TOKEN:\nreturn OperatorPrecedence.ELVIS_CONDITIONAL;\ncase QUESTION_MARK_TOKEN:\ncase COLON_TOKEN:\nreturn OperatorPrecedence.CONDITIONAL;\ndefault:\nthrow new UnsupportedOperationException(\"Unsupported binary operator '\" + binaryOpKind + \"'\");\n}\n}\n/**\n*

\n* Get the operator kind to insert during recovery, given the precedence level.\n*

\n*\n* @param opPrecedenceLevel Precedence of the given operator\n* @return Kind of the operator to insert\n*/\nprivate SyntaxKind getBinaryOperatorKindToInsert(OperatorPrecedence opPrecedenceLevel) {\nswitch (opPrecedenceLevel) {\ncase DEFAULT:\ncase UNARY:\ncase ACTION:\ncase EXPRESSION_ACTION:\ncase REMOTE_CALL_ACTION:\ncase ANON_FUNC_OR_LET:\ncase QUERY:\ncase MULTIPLICATIVE:\nreturn SyntaxKind.ASTERISK_TOKEN;\ncase ADDITIVE:\nreturn SyntaxKind.PLUS_TOKEN;\ncase SHIFT:\nreturn SyntaxKind.DOUBLE_LT_TOKEN;\ncase RANGE:\nreturn SyntaxKind.ELLIPSIS_TOKEN;\ncase BINARY_COMPARE:\nreturn SyntaxKind.LT_TOKEN;\ncase EQUALITY:\nreturn SyntaxKind.DOUBLE_EQUAL_TOKEN;\ncase BITWISE_AND:\nreturn SyntaxKind.BITWISE_AND_TOKEN;\ncase BITWISE_XOR:\nreturn SyntaxKind.BITWISE_XOR_TOKEN;\ncase BITWISE_OR:\nreturn SyntaxKind.PIPE_TOKEN;\ncase LOGICAL_AND:\nreturn SyntaxKind.LOGICAL_AND_TOKEN;\ncase LOGICAL_OR:\nreturn SyntaxKind.LOGICAL_OR_TOKEN;\ncase ELVIS_CONDITIONAL:\nreturn SyntaxKind.ELVIS_TOKEN;\ndefault:\nthrow new UnsupportedOperationException(\n\"Unsupported operator precedence level'\" + opPrecedenceLevel + \"'\");\n}\n}\n/**\n*

\n* Parse a module type definition.\n*

\n* module-type-defn := metadata [public] type identifier type-descriptor ;\n*\n* @param metadata Metadata\n* @param qualifier Visibility qualifier\n* @return Parsed node\n*/\nprivate STNode parseModuleTypeDefinition(STNode metadata, STNode qualifier) {\nstartContext(ParserRuleContext.MODULE_TYPE_DEFINITION);\nSTNode typeKeyword = parseTypeKeyword();\nSTNode typeName = parseTypeName();\nSTNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createTypeDefinitionNode(metadata, qualifier, typeKeyword, typeName, typeDescriptor,\nsemicolon);\n}\n/**\n* Parse type keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.TYPE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TYPE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse type name.\n*\n* @return Parsed node\n*/\nprivate STNode parseTypeName() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.TYPE_NAME);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse record type descriptor. A record type descriptor body has the following structure.\n*

\n*\n* record-type-descriptor := inclusive-record-type-descriptor | exclusive-record-type-descriptor\n*

inclusive-record-type-descriptor := record { field-descriptor* }\n*

exclusive-record-type-descriptor := record {| field-descriptor* [record-rest-descriptor] |}\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseRecordTypeDescriptor() {\nstartContext(ParserRuleContext.RECORD_TYPE_DESCRIPTOR);\nSTNode recordKeyword = parseRecordKeyword();\nSTNode bodyStartDelimiter = parseRecordBodyStartDelimiter();\nboolean isInclusive = bodyStartDelimiter.kind == SyntaxKind.OPEN_BRACE_TOKEN;\nSTNode fields = parseFieldDescriptors(isInclusive);\nSTNode bodyEndDelimiter = parseRecordBodyCloseDelimiter(bodyStartDelimiter.kind);\nendContext();\nreturn STNodeFactory.createRecordTypeDescriptorNode(recordKeyword, bodyStartDelimiter, fields,\nbodyEndDelimiter);\n}\n/**\n* Parse record body start delimiter.\n*\n* @return Parsed node\n*/\nprivate STNode parseRecordBodyStartDelimiter() {\nSTToken token = peek();\nreturn parseRecordBodyStartDelimiter(token.kind);\n}\nprivate STNode parseRecordBodyStartDelimiter(SyntaxKind kind) {\nswitch (kind) {\ncase OPEN_BRACE_PIPE_TOKEN:\nreturn parseClosedRecordBodyStart();\ncase OPEN_BRACE_TOKEN:\nreturn parseOpenBrace();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RECORD_BODY_START);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseRecordBodyStartDelimiter(solution.tokenKind);\n}\n}\n/**\n* Parse closed-record body start delimiter.\n*\n* @return Parsed node\n*/\nprivate STNode parseClosedRecordBodyStart() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_START);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse record body close delimiter.\n*\n* @return Parsed node\n*/\nprivate STNode parseRecordBodyCloseDelimiter(SyntaxKind startingDelimeter) {\nswitch (startingDelimeter) {\ncase OPEN_BRACE_PIPE_TOKEN:\nreturn parseClosedRecordBodyEnd();\ncase OPEN_BRACE_TOKEN:\nreturn parseCloseBrace();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RECORD_BODY_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseRecordBodyCloseDelimiter(solution.tokenKind);\n}\n}\n/**\n* Parse closed-record body end delimiter.\n*\n* @return Parsed node\n*/\nprivate STNode parseClosedRecordBodyEnd() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_END);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse record keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseRecordKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RECORD_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RECORD_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse field descriptors.\n*

\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldDescriptors(boolean isInclusive) {\nArrayList recordFields = new ArrayList<>();\nSTToken token = peek();\nboolean endOfFields = false;\nwhile (!isEndOfRecordTypeNode(token.kind)) {\nSTNode field = parseFieldOrRestDescriptor(isInclusive);\nif (field == null) {\nendOfFields = true;\nbreak;\n}\nrecordFields.add(field);\ntoken = peek();\nif (field.kind == SyntaxKind.RECORD_REST_TYPE) {\nbreak;\n}\n}\nwhile (!endOfFields && !isEndOfRecordTypeNode(token.kind)) {\nSTNode invalidField = parseFieldOrRestDescriptor(isInclusive);\nupdateLastNodeInListWithInvalidNode(recordFields, invalidField,\nDiagnosticErrorCode.ERROR_MORE_RECORD_FIELDS_AFTER_REST_FIELD);\ntoken = peek();\n}\nreturn STNodeFactory.createNodeList(recordFields);\n}\n/**\n*

\n* Parse field descriptor or rest descriptor.\n*

\n*\n* \n*

field-descriptor := individual-field-descriptor | record-type-reference\n*


individual-field-descriptor := metadata type-descriptor field-name [? | default-value] ;\n*

field-name := identifier\n*

default-value := = expression\n*

record-type-reference := * type-reference ;\n*

record-rest-descriptor := type-descriptor ... ;\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseFieldOrRestDescriptor(boolean isInclusive) {\nreturn parseFieldOrRestDescriptor(peek().kind, isInclusive);\n}\nprivate STNode parseFieldOrRestDescriptor(SyntaxKind nextTokenKind, boolean isInclusive) {\nswitch (nextTokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_BRACE_PIPE_TOKEN:\nreturn null;\ncase ASTERISK_TOKEN:\nstartContext(ParserRuleContext.RECORD_FIELD);\nSTNode asterisk = consume();\nSTNode type = parseTypeReference();\nSTNode semicolonToken = parseSemicolon();\nendContext();\nreturn STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken);\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nstartContext(ParserRuleContext.RECORD_FIELD);\nSTNode metadata = parseMetaData(nextTokenKind);\nnextTokenKind = peek().kind;\nreturn parseRecordField(nextTokenKind, isInclusive, metadata);\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nstartContext(ParserRuleContext.RECORD_FIELD);\nmetadata = createEmptyMetadata();\nreturn parseRecordField(nextTokenKind, isInclusive, metadata);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RECORD_FIELD_OR_RECORD_END, isInclusive);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldOrRestDescriptor(solution.tokenKind, isInclusive);\n}\n}\nprivate STNode parseRecordField(SyntaxKind nextTokenKind, boolean isInclusive, STNode metadata) {\nif (nextTokenKind != SyntaxKind.READONLY_KEYWORD) {\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD);\nSTNode fieldOrRestDesc = parseFieldDescriptor(isInclusive, metadata, type);\nendContext();\nreturn fieldOrRestDesc;\n}\nSTNode type;\nSTNode fieldOrRestDesc;\nSTNode readOnlyQualifier;\nreadOnlyQualifier = parseReadonlyKeyword();\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nSTNode fieldNameOrTypeDesc = parseQualifiedIdentifier(ParserRuleContext.RECORD_FIELD_NAME_OR_TYPE_NAME);\nif (fieldNameOrTypeDesc.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\ntype = fieldNameOrTypeDesc;\n} else {\nnextToken = peek();\nswitch (nextToken.kind) {\ncase SEMICOLON_TOKEN:\ncase EQUAL_TOKEN:\ntype = createBuiltinSimpleNameReference(readOnlyQualifier);\nreadOnlyQualifier = STNodeFactory.createEmptyNode();\nSTNode fieldName = ((STSimpleNameReferenceNode) fieldNameOrTypeDesc).name;\nreturn parseFieldDescriptorRhs(metadata, readOnlyQualifier, type, fieldName);\ndefault:\ntype = fieldNameOrTypeDesc;\nbreak;\n}\n}\n} else if (isTypeStartingToken(nextToken.kind)) {\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD);\n} else {\nreadOnlyQualifier = createBuiltinSimpleNameReference(readOnlyQualifier);\ntype = parseComplexTypeDescriptor(readOnlyQualifier, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD, false);\nreadOnlyQualifier = STNodeFactory.createEmptyNode();\n}\nfieldOrRestDesc = parseIndividualRecordField(metadata, readOnlyQualifier, type);\nendContext();\nreturn fieldOrRestDesc;\n}\nprivate STNode parseFieldDescriptor(boolean isInclusive, STNode metadata, STNode type) {\nif (isInclusive) {\nSTNode readOnlyQualifier = STNodeFactory.createEmptyNode();\nreturn parseIndividualRecordField(metadata, readOnlyQualifier, type);\n} else {\nreturn parseFieldOrRestDescriptorRhs(metadata, type);\n}\n}\nprivate STNode parseIndividualRecordField(STNode metadata, STNode readOnlyQualifier, STNode type) {\nSTNode fieldName = parseVariableName();\nreturn parseFieldDescriptorRhs(metadata, readOnlyQualifier, type, fieldName);\n}\n/**\n* Parse type reference.\n* type-reference := identifier | qualified-identifier\n*\n* @return Type reference node\n*/\nprivate STNode parseTypeReference() {\nreturn parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE, false);\n}\nprivate STNode parseTypeReference(boolean isInConditionalExpr) {\nreturn parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE, isInConditionalExpr);\n}\n/**\n* Parse identifier or qualified identifier.\n*\n* @return Identifier node\n*/\nprivate STNode parseQualifiedIdentifier(ParserRuleContext currentCtx) {\nreturn parseQualifiedIdentifier(currentCtx, false);\n}\nprivate STNode parseQualifiedIdentifier(ParserRuleContext currentCtx, boolean isInConditionalExpr) {\nSTToken token = peek();\nSTNode typeRefOrPkgRef;\nif (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\ntypeRefOrPkgRef = consume();\n} else {\nSolution sol = recover(token, currentCtx, isInConditionalExpr);\nif (sol.action == Action.REMOVE) {\nreturn sol.recoveredNode;\n}\nif (sol.tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {\naddInvalidTokenToNextToken(errorHandler.consumeInvalidToken());\nreturn parseQualifiedIdentifier(currentCtx, isInConditionalExpr);\n}\ntypeRefOrPkgRef = sol.recoveredNode;\n}\nreturn parseQualifiedIdentifier(typeRefOrPkgRef, isInConditionalExpr);\n}\n/**\n* Parse identifier or qualified identifier, given the starting identifier.\n*\n* @param identifier Starting identifier\n* @return Parse node\n*/\nprivate STNode parseQualifiedIdentifier(STNode identifier, boolean isInConditionalExpr) {\nSTToken nextToken = peek(1);\nif (nextToken.kind != SyntaxKind.COLON_TOKEN) {\nreturn STNodeFactory.createSimpleNameReferenceNode(identifier);\n}\nSTToken nextNextToken = peek(2);\nswitch (nextNextToken.kind) {\ncase IDENTIFIER_TOKEN:\nSTToken colon = consume();\nSTNode varOrFuncName = consume();\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName);\ncase MAP_KEYWORD:\ncolon = consume();\nSTToken mapKeyword = consume();\nSTNode refName = STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(),\nmapKeyword.trailingMinutiae(), mapKeyword.diagnostics());\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, refName);\ncase COLON_TOKEN:\naddInvalidTokenToNextToken(errorHandler.consumeInvalidToken());\nreturn parseQualifiedIdentifier(identifier, isInConditionalExpr);\ndefault:\nif (isInConditionalExpr) {\nreturn STNodeFactory.createSimpleNameReferenceNode(identifier);\n}\ncolon = consume();\nvarOrFuncName = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\nDiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);\nreturn STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName);\n}\n}\n/**\n* Parse RHS of a field or rest type descriptor.\n*\n* @param metadata Metadata\n* @param type Type descriptor\n* @return Parsed node\n*/\nprivate STNode parseFieldOrRestDescriptorRhs(STNode metadata, STNode type) {\nSTToken token = peek();\nreturn parseFieldOrRestDescriptorRhs(token.kind, metadata, type);\n}\nprivate STNode parseFieldOrRestDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type) {\nswitch (kind) {\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createRecordRestDescriptorNode(type, ellipsis, semicolonToken);\ncase IDENTIFIER_TOKEN:\nSTNode readonlyQualifier = STNodeFactory.createEmptyNode();\nreturn parseIndividualRecordField(metadata, readonlyQualifier, type);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.FIELD_OR_REST_DESCIPTOR_RHS, metadata, type);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldOrRestDescriptorRhs(solution.tokenKind, metadata, type);\n}\n}\n/**\n*

\n* Parse field descriptor rhs.\n*

\n*\n* @param metadata Metadata\n* @param readonlyQualifier Readonly qualifier\n* @param type Type descriptor\n* @param fieldName Field name\n* @return Parsed node\n*/\nprivate STNode parseFieldDescriptorRhs(STNode metadata, STNode readonlyQualifier, STNode type, STNode fieldName) {\nSTToken token = peek();\nreturn parseFieldDescriptorRhs(token.kind, metadata, readonlyQualifier, type, fieldName);\n}\n/**\n*

\n* Parse field descriptor rhs.\n*

\n*\n* \n* field-descriptor := [? | default-value] ;\n*
default-value := = expression\n*
\n*\n* @param kind Kind of the next token\n* @param metadata Metadata\n* @param type Type descriptor\n* @param fieldName Field name\n* @return Parsed node\n*/\nprivate STNode parseFieldDescriptorRhs(SyntaxKind kind, STNode metadata, STNode readonlyQualifier, STNode type,\nSTNode fieldName) {\nswitch (kind) {\ncase SEMICOLON_TOKEN:\nSTNode questionMarkToken = STNodeFactory.createEmptyNode();\nSTNode semicolonToken = parseSemicolon();\nreturn STNodeFactory.createRecordFieldNode(metadata, readonlyQualifier, type, fieldName,\nquestionMarkToken, semicolonToken);\ncase QUESTION_MARK_TOKEN:\nquestionMarkToken = parseQuestionMark();\nsemicolonToken = parseSemicolon();\nreturn STNodeFactory.createRecordFieldNode(metadata, readonlyQualifier, type, fieldName,\nquestionMarkToken, semicolonToken);\ncase EQUAL_TOKEN:\nSTNode equalsToken = parseAssignOp();\nSTNode expression = parseExpression();\nsemicolonToken = parseSemicolon();\nreturn STNodeFactory.createRecordFieldWithDefaultValueNode(metadata, readonlyQualifier, type, fieldName,\nequalsToken, expression, semicolonToken);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.FIELD_DESCRIPTOR_RHS, metadata, readonlyQualifier,\ntype, fieldName);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseFieldDescriptorRhs(solution.tokenKind, metadata, readonlyQualifier, type, fieldName);\n}\n}\n/**\n* Parse question mark.\n*\n* @return Parsed node\n*/\nprivate STNode parseQuestionMark() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.QUESTION_MARK_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.QUESTION_MARK);\nreturn sol.recoveredNode;\n}\n}\n/*\n* Statements\n*/\n/**\n* Parse statements, until an end of a block is reached.\n*\n* @return Parsed node\n*/\nprivate STNode parseStatements() {\nArrayList stmts = new ArrayList<>();\nreturn parseStatements(stmts);\n}\nprivate STNode parseStatements(ArrayList stmts) {\nwhile (!isEndOfStatements()) {\nSTNode stmt = parseStatement();\nif (stmt == null) {\nbreak;\n}\nif (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {\naddInvalidNodeToNextToken(stmt, DiagnosticErrorCode.ERROR_NAMED_WORKER_NOT_ALLOWED_HERE);\nbreak;\n}\nstmts.add(stmt);\n}\nreturn STNodeFactory.createNodeList(stmts);\n}\n/**\n* Parse a single statement.\n*\n* @return Parsed node\n*/\nprotected STNode parseStatement() {\nSTToken token = peek();\nreturn parseStatement(token.kind, 1);\n}\nprivate STNode parseStatement(SyntaxKind tokenKind, int nextTokenIndex) {\nSTNode annots = null;\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ncase SEMICOLON_TOKEN:\naddInvalidTokenToNextToken(errorHandler.consumeInvalidToken());\nreturn parseStatement();\ncase AT_TOKEN:\nannots = parseAnnotations(tokenKind);\ntokenKind = peek().kind;\nbreak;\ncase FINAL_KEYWORD:\ncase IF_KEYWORD:\ncase WHILE_KEYWORD:\ncase PANIC_KEYWORD:\ncase CONTINUE_KEYWORD:\ncase BREAK_KEYWORD:\ncase RETURN_KEYWORD:\ncase TYPE_KEYWORD:\ncase LOCK_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase FORK_KEYWORD:\ncase FOREACH_KEYWORD:\ncase XMLNS_KEYWORD:\ncase TRANSACTION_KEYWORD:\ncase RETRY_KEYWORD:\ncase ROLLBACK_KEYWORD:\ncase MATCH_KEYWORD:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase TRAP_KEYWORD:\ncase START_KEYWORD:\ncase FLUSH_KEYWORD:\ncase LEFT_ARROW_TOKEN:\ncase WAIT_KEYWORD:\ncase COMMIT_KEYWORD:\ncase WORKER_KEYWORD:\nbreak;\ndefault:\nif (isTypeStartingToken(tokenKind)) {\nbreak;\n}\nif (isValidExpressionStart(tokenKind, nextTokenIndex)) {\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.STATEMENT, nextTokenIndex);\nif (solution.action == Action.KEEP) {\nbreak;\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatement(solution.tokenKind, nextTokenIndex);\n}\nreturn parseStatement(tokenKind, annots, nextTokenIndex);\n}\nprivate STNode getAnnotations(STNode nullbaleAnnot) {\nif (nullbaleAnnot != null) {\nreturn nullbaleAnnot;\n}\nreturn STNodeFactory.createEmptyNodeList();\n}\nprivate STNode parseStatement(STNode annots) {\nreturn parseStatement(peek().kind, annots, 1);\n}\n/**\n* Parse a single statement, given the next token kind.\n*\n* @param tokenKind Next token kind\n* @return Parsed node\n*/\nprivate STNode parseStatement(SyntaxKind tokenKind, STNode annots, int nextTokenIndex) {\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\naddInvalidNodeToNextToken(annots, DiagnosticErrorCode.ERROR_INVALID_ANNOTATIONS);\nreturn null;\ncase SEMICOLON_TOKEN:\naddInvalidTokenToNextToken(errorHandler.consumeInvalidToken());\nreturn parseStatement(annots);\ncase FINAL_KEYWORD:\nSTNode finalKeyword = parseFinalKeyword();\nreturn parseVariableDecl(getAnnotations(annots), finalKeyword, false);\ncase IF_KEYWORD:\nreturn parseIfElseBlock();\ncase WHILE_KEYWORD:\nreturn parseWhileStatement();\ncase PANIC_KEYWORD:\nreturn parsePanicStatement();\ncase CONTINUE_KEYWORD:\nreturn parseContinueStatement();\ncase BREAK_KEYWORD:\nreturn parseBreakStatement();\ncase RETURN_KEYWORD:\nreturn parseReturnStatement();\ncase TYPE_KEYWORD:\nreturn parseLocalTypeDefinitionStatement(getAnnotations(annots));\ncase LOCK_KEYWORD:\nreturn parseLockStatement();\ncase OPEN_BRACE_TOKEN:\nreturn parseStatementStartsWithOpenBrace();\ncase WORKER_KEYWORD:\nreturn parseNamedWorkerDeclaration(getAnnotations(annots));\ncase FORK_KEYWORD:\nreturn parseForkStatement();\ncase FOREACH_KEYWORD:\nreturn parseForEachStatement();\ncase START_KEYWORD:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase TRAP_KEYWORD:\ncase FLUSH_KEYWORD:\ncase LEFT_ARROW_TOKEN:\ncase WAIT_KEYWORD:\ncase FROM_KEYWORD:\ncase COMMIT_KEYWORD:\nreturn parseExpressionStatement(tokenKind, getAnnotations(annots));\ncase XMLNS_KEYWORD:\nreturn parseXMLNamespaceDeclaration(false);\ncase TRANSACTION_KEYWORD:\nreturn parseTransactionStatement();\ncase RETRY_KEYWORD:\nreturn parseRetryStatement();\ncase ROLLBACK_KEYWORD:\nreturn parseRollbackStatement();\ncase OPEN_BRACKET_TOKEN:\nreturn parseStatementStartsWithOpenBracket(getAnnotations(annots), false);\ncase FUNCTION_KEYWORD:\ncase OPEN_PAREN_TOKEN:\ncase IDENTIFIER_TOKEN:\ncase NIL_LITERAL:\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase STRING_KEYWORD:\ncase XML_KEYWORD:\nreturn parseStmtStartsWithTypeOrExpr(tokenKind, getAnnotations(annots));\ncase MATCH_KEYWORD:\nreturn parseMatchStatement();\ndefault:\nif (isValidExpressionStart(tokenKind, nextTokenIndex)) {\nreturn parseStatementStartWithExpr(getAnnotations(annots));\n}\nif (isTypeStartingToken(tokenKind)) {\nfinalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVariableDecl(getAnnotations(annots), finalKeyword, false);\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.STATEMENT_WITHOUT_ANNOTS, annots, nextTokenIndex);\nif (solution.action == Action.KEEP) {\nfinalKeyword = STNodeFactory.createEmptyNode();\nreturn parseVariableDecl(getAnnotations(annots), finalKeyword, false);\n}\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseStatement(solution.tokenKind, annots, nextTokenIndex - 1);\n}\n}\n/**\n*

\n* Parse variable declaration. Variable declaration can be a local or module level.\n*

\n*\n* \n* local-var-decl-stmt := local-init-var-decl-stmt | local-no-init-var-decl-stmt\n*

\n* local-init-var-decl-stmt := [annots] [final] typed-binding-pattern = action-or-expr ;\n*

\n* local-no-init-var-decl-stmt := [annots] [final] type-descriptor variable-name ;\n*
\n*\n* @param annots Annotations or metadata\n* @param finalKeyword Final keyword\n* @return Parsed node\n*/\nprivate STNode parseVariableDecl(STNode annots, STNode finalKeyword, boolean isModuleVar) {\nstartContext(ParserRuleContext.VAR_DECL_STMT);\nSTNode typeBindingPattern = parseTypedBindingPattern(ParserRuleContext.VAR_DECL_STMT);\nreturn parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, isModuleVar);\n}\n/**\n* Parse final keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseFinalKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.FINAL_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.FINAL_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse the right hand side of a variable declaration statement.\n*

\n* \n* var-decl-rhs := ; | = action-or-expr ;\n* \n*\n* @param metadata metadata\n* @param finalKeyword Final keyword\n* @param typedBindingPattern Typed binding pattern\n* @return Parsed node\n*/\nprivate STNode parseVarDeclRhs(STNode metadata, STNode finalKeyword, STNode typedBindingPattern,\nboolean isModuleVar) {\nSTToken token = peek();\nreturn parseVarDeclRhs(token.kind, metadata, finalKeyword, typedBindingPattern, isModuleVar);\n}\n/**\n* Parse the right hand side of a variable declaration statement, given the\n* next token kind.\n*\n* @param tokenKind Next token kind\n* @param metadata Metadata\n* @param finalKeyword Final keyword\n* @param typedBindingPattern Typed binding pattern\n* @param isModuleVar flag indicating whether the var is module level\n* @return Parsed node\n*/\nprivate STNode parseVarDeclRhs(SyntaxKind tokenKind, STNode metadata, STNode finalKeyword,\nSTNode typedBindingPattern, boolean isModuleVar) {\nSTNode assign;\nSTNode expr;\nSTNode semicolon;\nswitch (tokenKind) {\ncase EQUAL_TOKEN:\nassign = parseAssignOp();\nif (isModuleVar) {\nexpr = parseExpression();\n} else {\nexpr = parseActionOrExpression();\n}\nsemicolon = parseSemicolon();\nbreak;\ncase SEMICOLON_TOKEN:\nassign = STNodeFactory.createEmptyNode();\nexpr = STNodeFactory.createEmptyNode();\nsemicolon = parseSemicolon();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.VAR_DECL_STMT_RHS, metadata, finalKeyword,\ntypedBindingPattern, isModuleVar);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseVarDeclRhs(solution.tokenKind, metadata, finalKeyword, typedBindingPattern, isModuleVar);\n}\nendContext();\nif (isModuleVar) {\nreturn STNodeFactory.createModuleVariableDeclarationNode(metadata, finalKeyword, typedBindingPattern,\nassign, expr, semicolon);\n}\nreturn STNodeFactory.createVariableDeclarationNode(metadata, finalKeyword, typedBindingPattern, assign, expr,\nsemicolon);\n}\n/**\n*

\n* Parse the RHS portion of the assignment.\n*

\n* assignment-stmt-rhs := = action-or-expr ;\n*\n* @param lvExpr LHS expression\n* @return Parsed node\n*/\nprivate STNode parseAssignmentStmtRhs(STNode lvExpr) {\nSTNode assign = parseAssignOp();\nSTNode expr = parseActionOrExpression();\nSTNode semicolon = parseSemicolon();\nendContext();\nif (lvExpr.kind == SyntaxKind.FUNCTION_CALL &&\nisPosibleFunctionalBindingPattern((STFunctionCallExpressionNode) lvExpr)) {\nlvExpr = getBindingPattern(lvExpr);\n}\nboolean lvExprValid = isValidLVExpr(lvExpr);\nif (!lvExprValid) {\nSTNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nSTNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier);\nlvExpr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(simpleNameRef, lvExpr,\nDiagnosticErrorCode.ERROR_INVALID_EXPR_IN_ASSIGNMENT_LHS);\n}\nreturn STNodeFactory.createAssignmentStatementNode(lvExpr, assign, expr, semicolon);\n}\n/*\n* Expressions\n*/\n/**\n* Parse expression. This will start parsing expressions from the lowest level of precedence.\n*\n* @return Parsed node\n*/\nprotected STNode parseExpression() {\nreturn parseExpression(DEFAULT_OP_PRECEDENCE, true, false);\n}\n/**\n* Parse action or expression. This will start parsing actions or expressions from the lowest level of precedence.\n*\n* @return Parsed node\n*/\nprivate STNode parseActionOrExpression() {\nreturn parseExpression(DEFAULT_OP_PRECEDENCE, true, true);\n}\nprivate STNode parseActionOrExpressionInLhs(SyntaxKind tokenKind, STNode annots) {\nreturn parseExpression(tokenKind, DEFAULT_OP_PRECEDENCE, annots, false, true, false);\n}\n/**\n* Parse expression.\n*\n* @param isRhsExpr Flag indicating whether this is a rhs expression\n* @return Parsed node\n*/\nprivate STNode parseExpression(boolean isRhsExpr) {\nreturn parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, false);\n}\nprivate boolean isValidLVExpr(STNode expression) {\nswitch (expression.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\ncase LIST_BINDING_PATTERN:\ncase MAPPING_BINDING_PATTERN:\ncase FUNCTIONAL_BINDING_PATTERN:\nreturn true;\ncase FIELD_ACCESS:\nreturn isValidLVMemberExpr(((STFieldAccessExpressionNode) expression).expression);\ncase INDEXED_EXPRESSION:\nreturn isValidLVMemberExpr(((STIndexedExpressionNode) expression).containerExpression);\ndefault:\nreturn (expression instanceof STMissingToken);\n}\n}\nprivate boolean isValidLVMemberExpr(STNode expression) {\nswitch (expression.kind) {\ncase SIMPLE_NAME_REFERENCE:\ncase QUALIFIED_NAME_REFERENCE:\nreturn true;\ncase FIELD_ACCESS:\nreturn isValidLVMemberExpr(((STFieldAccessExpressionNode) expression).expression);\ncase INDEXED_EXPRESSION:\nreturn isValidLVMemberExpr(((STIndexedExpressionNode) expression).containerExpression);\ncase BRACED_EXPRESSION:\nreturn isValidLVMemberExpr(((STBracedExpressionNode) expression).expression);\ndefault:\nreturn (expression instanceof STMissingToken);\n}\n}\n/**\n* Parse an expression that has an equal or higher precedence than a given level.\n*\n* @param precedenceLevel Precedence level of expression to be parsed\n* @param isRhsExpr Flag indicating whether this is a rhs expression\n* @param allowActions Flag indicating whether the current context support actions\n* @return Parsed node\n*/\nprivate STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) {\nreturn parseExpression(precedenceLevel, isRhsExpr, allowActions, false);\n}\nprivate STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions,\nboolean isInConditionalExpr) {\nSTToken token = peek();\nreturn parseExpression(token.kind, precedenceLevel, isRhsExpr, allowActions, false, isInConditionalExpr);\n}\nprivate STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, boolean isRhsExpr,\nboolean allowActions, boolean isInMatchGuard, boolean isInConditionalExpr) {\nSTNode expr = parseTerminalExpression(kind, isRhsExpr, allowActions, isInConditionalExpr);\nreturn parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr);\n}\nprivate STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, STNode annots,\nboolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nSTNode expr = parseTerminalExpression(kind, annots, isRhsExpr, allowActions, isInConditionalExpr);\nreturn parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions, false, isInConditionalExpr);\n}\n/**\n* Parse terminal expressions. A terminal expression has the highest precedence level\n* out of all expressions, and will be at the leaves of an expression tree.\n*\n* @param annots Annotations\n* @param isRhsExpr Is a rhs expression\n* @param allowActions Allow actions\n* @return Parsed node\n*/\nprivate STNode parseTerminalExpression(STNode annots, boolean isRhsExpr, boolean allowActions,\nboolean isInConditionalExpr) {\nreturn parseTerminalExpression(peek().kind, annots, isRhsExpr, allowActions, isInConditionalExpr);\n}\nprivate STNode parseTerminalExpression(SyntaxKind kind, boolean isRhsExpr, boolean allowActions,\nboolean isInConditionalExpr) {\nSTNode annots;\nif (kind == SyntaxKind.AT_TOKEN) {\nannots = parseAnnotations();\nkind = peek().kind;\n} else {\nannots = STNodeFactory.createEmptyNodeList();\n}\nSTNode expr = parseTerminalExpression(kind, annots, isRhsExpr, allowActions, isInConditionalExpr);\nif (!isNodeListEmpty(annots) && expr.kind != SyntaxKind.START_ACTION) {\nexpr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(expr, annots,\nDiagnosticErrorCode.ERROR_ANNOTATIONS_ATTACHED_TO_EXPRESSION);\n}\nreturn expr;\n}\nprivate STNode parseTerminalExpression(SyntaxKind kind, STNode annots, boolean isRhsExpr, boolean allowActions,\nboolean isInConditionalExpr) {\nswitch (kind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn parseBasicLiteral();\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF, isInConditionalExpr);\ncase OPEN_PAREN_TOKEN:\nreturn parseBracedExpression(isRhsExpr, allowActions);\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\nreturn parseCheckExpression(isRhsExpr, allowActions, isInConditionalExpr);\ncase OPEN_BRACE_TOKEN:\nreturn parseMappingConstructorExpr();\ncase TYPEOF_KEYWORD:\nreturn parseTypeofExpression(isRhsExpr, isInConditionalExpr);\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\nreturn parseUnaryExpression(isRhsExpr, isInConditionalExpr);\ncase TRAP_KEYWORD:\nreturn parseTrapExpression(isRhsExpr, allowActions, isInConditionalExpr);\ncase OPEN_BRACKET_TOKEN:\nreturn parseListConstructorExpr();\ncase LT_TOKEN:\nreturn parseTypeCastExpr(isRhsExpr, allowActions, isInConditionalExpr);\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\ncase FROM_KEYWORD:\nreturn parseTableConstructorOrQuery(isRhsExpr);\ncase ERROR_KEYWORD:\nreturn parseErrorConstructorExpr();\ncase LET_KEYWORD:\nreturn parseLetExpression(isRhsExpr);\ncase BACKTICK_TOKEN:\nreturn parseTemplateExpression();\ncase XML_KEYWORD:\nSTToken nextNextToken = getNextNextToken(kind);\nif (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseXMLTemplateExpression();\n}\nreturn parseSimpleTypeDescriptor();\ncase STRING_KEYWORD:\nnextNextToken = getNextNextToken(kind);\nif (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) {\nreturn parseStringTemplateExpression();\n}\nreturn parseSimpleTypeDescriptor();\ncase FUNCTION_KEYWORD:\nreturn parseExplicitFunctionExpression(annots, isRhsExpr);\ncase AT_TOKEN:\nbreak;\ncase NEW_KEYWORD:\nreturn parseNewExpression();\ncase START_KEYWORD:\nreturn parseStartAction(annots);\ncase FLUSH_KEYWORD:\nreturn parseFlushAction();\ncase LEFT_ARROW_TOKEN:\nreturn parseReceiveAction();\ncase WAIT_KEYWORD:\nreturn parseWaitAction();\ncase COMMIT_KEYWORD:\nreturn parseCommitAction();\ncase TRANSACTIONAL_KEYWORD:\nreturn parseTransactionalExpression();\ncase SERVICE_KEYWORD:\nreturn parseServiceConstructorExpression(annots);\ncase BASE16_KEYWORD:\ncase BASE64_KEYWORD:\nreturn parseByteArrayLiteral(kind);\ndefault:\nif (isSimpleType(kind)) {\nreturn parseSimpleTypeDescriptor();\n}\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.TERMINAL_EXPRESSION, annots, isRhsExpr, allowActions,\nisInConditionalExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.action == Action.KEEP) {\nif (kind == SyntaxKind.XML_KEYWORD) {\nreturn parseXMLTemplateExpression();\n}\nreturn parseStringTemplateExpression();\n}\nswitch (solution.tokenKind) {\ncase IDENTIFIER_TOKEN:\nreturn parseQualifiedIdentifier(solution.recoveredNode, isInConditionalExpr);\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\nreturn solution.recoveredNode;\ndefault:\nreturn parseTerminalExpression(solution.tokenKind, annots, isRhsExpr, allowActions,\nisInConditionalExpr);\n}\n}\nprivate boolean isValidExprStart(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase DECIMAL_INTEGER_LITERAL:\ncase HEX_INTEGER_LITERAL:\ncase STRING_LITERAL:\ncase NULL_KEYWORD:\ncase TRUE_KEYWORD:\ncase FALSE_KEYWORD:\ncase DECIMAL_FLOATING_POINT_LITERAL:\ncase HEX_FLOATING_POINT_LITERAL:\ncase IDENTIFIER_TOKEN:\ncase OPEN_PAREN_TOKEN:\ncase CHECK_KEYWORD:\ncase CHECKPANIC_KEYWORD:\ncase OPEN_BRACE_TOKEN:\ncase TYPEOF_KEYWORD:\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase NEGATION_TOKEN:\ncase EXCLAMATION_MARK_TOKEN:\ncase TRAP_KEYWORD:\ncase OPEN_BRACKET_TOKEN:\ncase LT_TOKEN:\ncase TABLE_KEYWORD:\ncase STREAM_KEYWORD:\ncase FROM_KEYWORD:\ncase ERROR_KEYWORD:\ncase LET_KEYWORD:\ncase BACKTICK_TOKEN:\ncase XML_KEYWORD:\ncase STRING_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase AT_TOKEN:\ncase NEW_KEYWORD:\ncase START_KEYWORD:\ncase FLUSH_KEYWORD:\ncase LEFT_ARROW_TOKEN:\ncase WAIT_KEYWORD:\ncase SERVICE_KEYWORD:\nreturn true;\ndefault:\nreturn isSimpleType(tokenKind);\n}\n}\n/**\n*

\n* Parse a new expression.\n*

\n* \n* new-expr := explicit-new-expr | implicit-new-expr\n*
\n* explicit-new-expr := new type-descriptor ( arg-list )\n*
\n* implicit-new-expr := new [( arg-list )]\n*
\n*\n* @return Parsed NewExpression node.\n*/\nprivate STNode parseNewExpression() {\nSTNode newKeyword = parseNewKeyword();\nreturn parseNewKeywordRhs(newKeyword);\n}\n/**\n*

\n* Parse `new` keyword.\n*

\n*\n* @return Parsed NEW_KEYWORD Token.\n*/\nprivate STNode parseNewKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.NEW_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.NEW_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseNewKeywordRhs(STNode newKeyword) {\nSTNode token = peek();\nreturn parseNewKeywordRhs(token.kind, newKeyword);\n}\n/**\n*

\n* Parse an implicit or explicit new expression.\n*

\n*\n* @param kind next token kind.\n* @param newKeyword parsed node for `new` keyword.\n* @return Parsed new-expression node.\n*/\nprivate STNode parseNewKeywordRhs(SyntaxKind kind, STNode newKeyword) {\nswitch (kind) {\ncase OPEN_PAREN_TOKEN:\nreturn parseImplicitNewRhs(newKeyword);\ncase SEMICOLON_TOKEN:\nbreak;\ncase IDENTIFIER_TOKEN:\ncase OBJECT_KEYWORD:\ncase STREAM_KEYWORD:\nreturn parseTypeDescriptorInNewExpr(newKeyword);\ndefault:\nbreak;\n}\nreturn STNodeFactory.createImplicitNewExpressionNode(newKeyword, STNodeFactory.createEmptyNode());\n}\n/**\n*

\n* Parse an Explicit New expression.\n*

\n* \n* explicit-new-expr := new type-descriptor ( arg-list )\n* \n*\n* @param newKeyword Parsed `new` keyword.\n* @return the Parsed Explicit New Expression.\n*/\nprivate STNode parseTypeDescriptorInNewExpr(STNode newKeyword) {\nSTNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_NEW_EXPR);\nSTNode parenthesizedArgsList = parseParenthesizedArgList();\nreturn STNodeFactory.createExplicitNewExpressionNode(newKeyword, typeDescriptor, parenthesizedArgsList);\n}\n/**\n*

\n* Parse an implicit-new-expr with arguments.\n*

\n*\n* @param newKeyword Parsed `new` keyword.\n* @return Parsed implicit-new-expr.\n*/\nprivate STNode parseImplicitNewRhs(STNode newKeyword) {\nSTNode implicitNewArgList = parseParenthesizedArgList();\nreturn STNodeFactory.createImplicitNewExpressionNode(newKeyword, implicitNewArgList);\n}\n/**\n*

\n* Parse the parenthesized argument list for a new-expr.\n*

\n*\n* @return Parsed parenthesized rhs of new-expr.\n*/\nprivate STNode parseParenthesizedArgList() {\nSTNode openParan = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode arguments = parseArgsList();\nSTNode closeParan = parseCloseParenthesis();\nreturn STNodeFactory.createParenthesizedArgList(openParan, arguments, closeParan);\n}\n/**\n*

\n* Parse the right-hand-side of an expression.\n*

\n* expr-rhs := (binary-op expression\n* | dot identifier\n* | open-bracket expression close-bracket\n* )*\n*\n* @param precedenceLevel Precedence level of the expression that is being parsed currently\n* @param lhsExpr LHS expression of the expression\n* @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement\n* @param allowActions Flag indicating whether the current context support actions\n* @return Parsed node\n*/\nprivate STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isRhsExpr,\nboolean allowActions) {\nreturn parseExpressionRhs(precedenceLevel, lhsExpr, isRhsExpr, allowActions, false, false);\n}\nprivate STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isRhsExpr,\nboolean allowActions, boolean isInMatchGuard, boolean isInConditionalExpr) {\nSTToken token = peek();\nreturn parseExpressionRhs(token.kind, precedenceLevel, lhsExpr, isRhsExpr, allowActions, isInMatchGuard,\nisInConditionalExpr);\n}\n/**\n* Parse the right hand side of an expression given the next token kind.\n*\n* @param tokenKind Next token kind\n* @param currentPrecedenceLevel Precedence level of the expression that is being parsed currently\n* @param lhsExpr LHS expression\n* @param isRhsExpr Flag indicating whether this is a rhs expr or not\n* @param allowActions Flag indicating whether to allow actions or not\n* @param isInMatchGuard Flag indicating whether this expression is in a match-guard\n* @return Parsed node\n*/\nprivate STNode parseExpressionRhs(SyntaxKind tokenKind, OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr,\nboolean isRhsExpr, boolean allowActions, boolean isInMatchGuard,\nboolean isInConditionalExpr) {\nif (isEndOfExpression(tokenKind, isRhsExpr, isInMatchGuard, lhsExpr.kind)) {\nreturn lhsExpr;\n}\nif (lhsExpr.kind == SyntaxKind.ASYNC_SEND_ACTION) {\nreturn lhsExpr;\n}\nif (!isValidExprRhsStart(tokenKind, lhsExpr.kind)) {\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.EXPRESSION_RHS, currentPrecedenceLevel, lhsExpr,\nisRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nif (solution.ctx == ParserRuleContext.BINARY_OPERATOR) {\nSyntaxKind binaryOpKind = getBinaryOperatorKindToInsert(currentPrecedenceLevel);\nreturn parseExpressionRhs(binaryOpKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions,\nisInMatchGuard, isInConditionalExpr);\n} else {\nreturn parseExpressionRhs(solution.tokenKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions,\nisInMatchGuard, isInConditionalExpr);\n}\n}\nif (tokenKind == SyntaxKind.GT_TOKEN && peek(2).kind == SyntaxKind.GT_TOKEN) {\nif (peek(3).kind == SyntaxKind.GT_TOKEN) {\ntokenKind = SyntaxKind.TRIPPLE_GT_TOKEN;\n} else {\ntokenKind = SyntaxKind.DOUBLE_GT_TOKEN;\n}\n}\nOperatorPrecedence nextOperatorPrecedence = getOpPrecedence(tokenKind);\nif (currentPrecedenceLevel.isHigherThanOrEqual(nextOperatorPrecedence, allowActions)) {\nreturn lhsExpr;\n}\nSTNode newLhsExpr;\nSTNode operator;\nswitch (tokenKind) {\ncase OPEN_PAREN_TOKEN:\nnewLhsExpr = parseFuncCall(lhsExpr);\nbreak;\ncase OPEN_BRACKET_TOKEN:\nnewLhsExpr = parseMemberAccessExpr(lhsExpr, isRhsExpr);\nbreak;\ncase DOT_TOKEN:\nnewLhsExpr = parseFieldAccessOrMethodCall(lhsExpr, isInConditionalExpr);\nbreak;\ncase IS_KEYWORD:\nnewLhsExpr = parseTypeTestExpression(lhsExpr, isInConditionalExpr);\nbreak;\ncase RIGHT_ARROW_TOKEN:\nnewLhsExpr = parseRemoteMethodCallOrAsyncSendAction(lhsExpr, isRhsExpr);\nif (!allowActions) {\nnewLhsExpr = SyntaxErrors.addDiagnostic(newLhsExpr,\nDiagnosticErrorCode.ERROR_EXPRESSION_EXPECTED_ACTION_FOUND);\n}\nbreak;\ncase SYNC_SEND_TOKEN:\nnewLhsExpr = parseSyncSendAction(lhsExpr);\nif (!allowActions) {\nnewLhsExpr = SyntaxErrors.addDiagnostic(newLhsExpr,\nDiagnosticErrorCode.ERROR_EXPRESSION_EXPECTED_ACTION_FOUND);\n}\nbreak;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nnewLhsExpr = parseImplicitAnonFunc(lhsExpr, isRhsExpr);\nbreak;\ncase ANNOT_CHAINING_TOKEN:\nnewLhsExpr = parseAnnotAccessExpression(lhsExpr, isInConditionalExpr);\nbreak;\ncase OPTIONAL_CHAINING_TOKEN:\nnewLhsExpr = parseOptionalFieldAccessExpression(lhsExpr, isInConditionalExpr);\nbreak;\ncase QUESTION_MARK_TOKEN:\nnewLhsExpr = parseConditionalExpression(lhsExpr);\nbreak;\ncase DOT_LT_TOKEN:\nnewLhsExpr = parseXMLFilterExpression(lhsExpr);\nbreak;\ncase SLASH_LT_TOKEN:\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\ncase SLASH_ASTERISK_TOKEN:\nnewLhsExpr = parseXMLStepExpression(lhsExpr);\nbreak;\ndefault:\nif (tokenKind == SyntaxKind.DOUBLE_GT_TOKEN) {\noperator = parseSignedRightShiftToken();\n} else if (tokenKind == SyntaxKind.TRIPPLE_GT_TOKEN) {\noperator = parseUnsignedRightShiftToken();\n} else {\noperator = parseBinaryOperator();\n}\nSTNode rhsExpr = parseExpression(nextOperatorPrecedence, isRhsExpr, false, isInConditionalExpr);\nnewLhsExpr = STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, operator,\nrhsExpr);\nbreak;\n}\nreturn parseExpressionRhs(currentPrecedenceLevel, newLhsExpr, isRhsExpr, allowActions, isInMatchGuard,\nisInConditionalExpr);\n}\nprivate boolean isValidExprRhsStart(SyntaxKind tokenKind, SyntaxKind precedingNodeKind) {\nswitch (tokenKind) {\ncase OPEN_PAREN_TOKEN:\nreturn precedingNodeKind == SyntaxKind.QUALIFIED_NAME_REFERENCE ||\nprecedingNodeKind == SyntaxKind.SIMPLE_NAME_REFERENCE;\ncase DOT_TOKEN:\ncase OPEN_BRACKET_TOKEN:\ncase IS_KEYWORD:\ncase RIGHT_ARROW_TOKEN:\ncase RIGHT_DOUBLE_ARROW_TOKEN:\ncase SYNC_SEND_TOKEN:\ncase ANNOT_CHAINING_TOKEN:\ncase OPTIONAL_CHAINING_TOKEN:\ncase QUESTION_MARK_TOKEN:\ncase COLON_TOKEN:\ncase DOT_LT_TOKEN:\ncase SLASH_LT_TOKEN:\ncase DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\ncase SLASH_ASTERISK_TOKEN:\nreturn true;\ndefault:\nreturn isBinaryOperator(tokenKind);\n}\n}\n/**\n* Parse member access expression.\n*\n* @param lhsExpr Container expression\n* @param isRhsExpr Is this is a rhs expression\n* @return Member access expression\n*/\nprivate STNode parseMemberAccessExpr(STNode lhsExpr, boolean isRhsExpr) {\nstartContext(ParserRuleContext.MEMBER_ACCESS_KEY_EXPR);\nSTNode openBracket = parseOpenBracket();\nSTNode keyExpr = parseMemberAccessKeyExprs(isRhsExpr);\nSTNode closeBracket = parseCloseBracket();\nendContext();\nif (isRhsExpr && ((STNodeList) keyExpr).isEmpty()) {\nkeyExpr = STNodeFactory.createNodeList(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\ncloseBracket = SyntaxErrors.addDiagnostic(closeBracket,\nDiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR);\n}\nreturn STNodeFactory.createIndexedExpressionNode(lhsExpr, openBracket, keyExpr, closeBracket);\n}\n/**\n* Parse key expression of a member access expression. A type descriptor\n* that starts with a type-ref (e.g: T[a][b]) also goes through this\n* method.\n*

\n* key-expression := single-key-expression | multi-key-expression\n*\n* @param isRhsExpr Is this is a rhs expression\n* @return Key expression\n*/\nprivate STNode parseMemberAccessKeyExprs(boolean isRhsExpr) {\nList exprList = new ArrayList<>();\nSTNode keyExpr;\nSTNode keyExprEnd;\nwhile (!isEndOfTypeList(peek().kind)) {\nkeyExpr = parseKeyExpr(isRhsExpr);\nexprList.add(keyExpr);\nkeyExprEnd = parseMemberAccessKeyExprEnd();\nif (keyExprEnd == null) {\nbreak;\n}\nexprList.add(keyExprEnd);\n}\nreturn STNodeFactory.createNodeList(exprList);\n}\nprivate STNode parseKeyExpr(boolean isRhsExpr) {\nif (!isRhsExpr && peek().kind == SyntaxKind.ASTERISK_TOKEN) {\nreturn STNodeFactory.createBasicLiteralNode(SyntaxKind.ASTERISK_TOKEN, consume());\n}\nreturn parseExpression(isRhsExpr);\n}\nprivate STNode parseMemberAccessKeyExprEnd() {\nreturn parseMemberAccessKeyExprEnd(peek().kind);\n}\nprivate STNode parseMemberAccessKeyExprEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACKET_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.MEMBER_ACCESS_KEY_EXPR_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMemberAccessKeyExprEnd(solution.tokenKind);\n}\n}\n/**\n* Parse close bracket.\n*\n* @return Parsed node\n*/\nprivate STNode parseCloseBracket() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLOSE_BRACKET);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse field access, xml required attribute access expressions or method call expression.\n*

\n* \n* field-access-expr := expression . field-name\n*
\n* xml-required-attribute-access-expr := expression . xml-attribute-name\n*
\n* xml-attribute-name := xml-qualified-name | qualified-identifier | identifier\n*
\n* method-call-expr := expression . method-name ( arg-list )\n*
\n*\n* @param lhsExpr Preceding expression of the field access or method call\n* @return One of field-access-expression or method-call-expression.\n*/\nprivate STNode parseFieldAccessOrMethodCall(STNode lhsExpr, boolean isInConditionalExpr) {\nSTNode dotToken = parseDotToken();\nSTToken token = peek();\nif (token.kind == SyntaxKind.MAP_KEYWORD || token.kind == SyntaxKind.START_KEYWORD) {\nSTNode methodName = getKeywordAsSimpleNameRef();\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode args = parseArgsList();\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, methodName, openParen, args,\ncloseParen);\n}\nSTNode fieldOrMethodName = parseFieldAccessIdentifier(isInConditionalExpr);\nif (fieldOrMethodName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\nreturn STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName);\n}\nSTToken nextToken = peek();\nif (nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode args = parseArgsList();\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, fieldOrMethodName, openParen, args,\ncloseParen);\n}\nreturn STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName);\n}\nprivate STNode getKeywordAsSimpleNameRef() {\nSTToken mapKeyword = consume();\nSTNode methodName = STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(),\nmapKeyword.trailingMinutiae(), mapKeyword.diagnostics());\nmethodName = STNodeFactory.createSimpleNameReferenceNode(methodName);\nreturn methodName;\n}\n/**\n*

\n* Parse braced expression.\n*

\n* braced-expr := ( expression )\n*\n* @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement\n* @param allowActions Allow actions\n* @return Parsed node\n*/\nprivate STNode parseBracedExpression(boolean isRhsExpr, boolean allowActions) {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);\nif (peek().kind == SyntaxKind.CLOSE_PAREN_TOKEN) {\nreturn parseNilLiteralOrEmptyAnonFuncParamRhs(openParen);\n}\nstartContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);\nSTNode expr;\nif (allowActions) {\nexpr = parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, true);\n} else {\nexpr = parseExpression(isRhsExpr);\n}\nreturn parseBracedExprOrAnonFuncParamRhs(peek().kind, openParen, expr, isRhsExpr);\n}\nprivate STNode parseNilLiteralOrEmptyAnonFuncParamRhs(STNode openParen) {\nSTNode closeParen = parseCloseParenthesis();\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {\nreturn STNodeFactory.createNilLiteralNode(openParen, closeParen);\n} else {\nSTNode params = STNodeFactory.createNodeList();\nSTNode anonFuncParam =\nSTNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\nreturn anonFuncParam;\n}\n}\nprivate STNode parseBracedExprOrAnonFuncParamRhs(STNode openParen, STNode expr, boolean isRhsExpr) {\nSTToken nextToken = peek();\nreturn parseBracedExprOrAnonFuncParamRhs(nextToken.kind, openParen, expr, isRhsExpr);\n}\nprivate STNode parseBracedExprOrAnonFuncParamRhs(SyntaxKind nextTokenKind, STNode openParen, STNode expr,\nboolean isRhsExpr) {\nif (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\nswitch (nextTokenKind) {\ncase CLOSE_PAREN_TOKEN:\nbreak;\ncase COMMA_TOKEN:\nreturn parseImplicitAnonFunc(openParen, expr, isRhsExpr);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS, openParen,\nexpr, isRhsExpr);\nif (solution.action == Action.REMOVE) {\nendContext();\nreturn solution.recoveredNode;\n}\nreturn parseBracedExprOrAnonFuncParamRhs(solution.tokenKind, openParen, expr, isRhsExpr);\n}\n}\nSTNode closeParen = parseCloseParenthesis();\nendContext();\nif (isAction(expr)) {\nreturn STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen);\n}\nreturn STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen);\n}\n/**\n* Check whether a given node is an action node.\n*\n* @param node Node to check\n* @return true if the node is an action node. false otherwise\n*/\nprivate boolean isAction(STNode node) {\nswitch (node.kind) {\ncase REMOTE_METHOD_CALL_ACTION:\ncase BRACED_ACTION:\ncase CHECK_ACTION:\ncase START_ACTION:\ncase TRAP_ACTION:\ncase FLUSH_ACTION:\ncase ASYNC_SEND_ACTION:\ncase SYNC_SEND_ACTION:\ncase RECEIVE_ACTION:\ncase WAIT_ACTION:\ncase QUERY_ACTION:\ncase COMMIT_ACTION:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Check whether the given token is an end of a expression.\n*\n* @param tokenKind Token to check\n* @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement\n* @return true if the token represents an end of a block. false otherwise\n*/\nprivate boolean isEndOfExpression(SyntaxKind tokenKind, boolean isRhsExpr, boolean isInMatchGuard,\nSyntaxKind precedingNodeKind) {\nif (!isRhsExpr) {\nif (isCompoundBinaryOperator(tokenKind)) {\nreturn true;\n}\nif (isInMatchGuard && tokenKind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {\nreturn true;\n}\nreturn !isValidExprRhsStart(tokenKind, precedingNodeKind);\n}\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase OPEN_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase SEMICOLON_TOKEN:\ncase COMMA_TOKEN:\ncase PUBLIC_KEYWORD:\ncase EOF_TOKEN:\ncase CONST_KEYWORD:\ncase LISTENER_KEYWORD:\ncase EQUAL_TOKEN:\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\ncase AS_KEYWORD:\ncase IN_KEYWORD:\ncase BACKTICK_TOKEN:\ncase FROM_KEYWORD:\ncase WHERE_KEYWORD:\ncase LET_KEYWORD:\ncase SELECT_KEYWORD:\ncase DO_KEYWORD:\ncase COLON_TOKEN:\ncase ON_KEYWORD:\ncase CONFLICT_KEYWORD:\ncase LIMIT_KEYWORD:\ncase JOIN_KEYWORD:\ncase OUTER_KEYWORD:\nreturn true;\ncase RIGHT_DOUBLE_ARROW_TOKEN:\nreturn isInMatchGuard;\ndefault:\nreturn isSimpleType(tokenKind);\n}\n}\n/**\n* Parse basic literals. It is assumed that we come here after validation.\n*\n* @return Parsed node\n*/\nprivate STNode parseBasicLiteral() {\nSTToken literalToken = consume();\nreturn STNodeFactory.createBasicLiteralNode(literalToken.kind, literalToken);\n}\n/**\n* Parse function call expression.\n* function-call-expr := function-reference ( arg-list )\n* function-reference := variable-reference\n*\n* @param identifier Function name\n* @return Function call expression\n*/\nprivate STNode parseFuncCall(STNode identifier) {\nSTNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);\nSTNode args = parseArgsList();\nSTNode closeParen = parseCloseParenthesis();\nreturn STNodeFactory.createFunctionCallExpressionNode(identifier, openParen, args, closeParen);\n}\n/**\n*

\n* Parse error constructor expression.\n*

\n* \n* error-constructor-expr := error ( arg-list )\n* \n*\n* @return Error constructor expression\n*/\nprivate STNode parseErrorConstructorExpr() {\nSTNode errorKeyword = parseErrorKeyword();\nreturn parseFuncCall(errorKeyword);\n}\n/**\n* Parse function call argument list.\n*\n* @return Parsed args list\n*/\nprivate STNode parseArgsList() {\nstartContext(ParserRuleContext.ARG_LIST);\nSTToken token = peek();\nif (isEndOfParametersList(token.kind)) {\nSTNode args = STNodeFactory.createEmptyNodeList();\nendContext();\nreturn args;\n}\nSTNode firstArg = parseArgument();\nif (firstArg == null) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode argsList = parseArgList(firstArg);\nendContext();\nreturn argsList;\n}\n/**\n* Parse follow up arguments.\n*\n* @param firstArg first argument in the list\n* @return the argument list\n*/\nprivate STNode parseArgList(STNode firstArg) {\nArrayList argsList = new ArrayList<>();\nargsList.add(firstArg);\nSyntaxKind lastValidArgKind = firstArg.kind;\nSTToken nextToken = peek();\nwhile (!isEndOfParametersList(nextToken.kind)) {\nSTNode argEnd = parseArgEnd(nextToken.kind);\nif (argEnd == null) {\nbreak;\n}\nnextToken = peek();\nif (isEndOfParametersList(nextToken.kind)) {\nint prevArgIndex = argsList.size() - 1;\nSTNode prevArg = argsList.remove(prevArgIndex);\nSTNode prevArgWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(prevArg, argEnd,\nDiagnosticErrorCode.ERROR_INVALID_TOKEN, ((STToken) argEnd).text());\nargsList.add(prevArgWithDiagnostics);\nbreak;\n}\nSTNode curArg = parseArgument(nextToken.kind);\nDiagnosticErrorCode errorCode = validateArgumentOrder(lastValidArgKind, curArg.kind);\nif (errorCode == null) {\nargsList.add(argEnd);\nargsList.add(curArg);\nlastValidArgKind = curArg.kind;\n} else {\nupdateLastNodeInListWithInvalidNode(argsList, argEnd, null);\nupdateLastNodeInListWithInvalidNode(argsList, curArg, errorCode);\n}\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(argsList);\n}\nprivate DiagnosticErrorCode validateArgumentOrder(SyntaxKind prevArgKind, SyntaxKind curArgKind) {\nDiagnosticErrorCode errorCode = null;\nswitch (prevArgKind) {\ncase POSITIONAL_ARG:\nbreak;\ncase NAMED_ARG:\nif (curArgKind == SyntaxKind.POSITIONAL_ARG) {\nerrorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG;\n}\nbreak;\ncase REST_ARG:\nerrorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG;\nbreak;\ndefault:\nthrow new IllegalStateException(\"Invalid SyntaxKind in an argument\");\n}\nreturn errorCode;\n}\nprivate STNode parseArgEnd() {\nreturn parseArgEnd(peek().kind);\n}\nprivate STNode parseArgEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.ARG_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgEnd(solution.tokenKind);\n}\n}\n/**\n* Parse function call argument.\n*\n* @return Parsed argument node\n*/\nprivate STNode parseArgument() {\nSTToken token = peek();\nreturn parseArgument(token.kind);\n}\nprivate STNode parseArgument(SyntaxKind kind) {\nSTNode arg;\nswitch (kind) {\ncase ELLIPSIS_TOKEN:\nSTToken ellipsis = consume();\nSTNode expr = parseExpression();\narg = STNodeFactory.createRestArgumentNode(ellipsis, expr);\nbreak;\ncase IDENTIFIER_TOKEN:\narg = parseNamedOrPositionalArg(kind);\nbreak;\ncase CLOSE_PAREN_TOKEN:\nreturn null;\ndefault:\nif (isValidExprStart(kind)) {\nexpr = parseExpression();\narg = STNodeFactory.createPositionalArgumentNode(expr);\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.ARG_START_OR_ARG_LIST_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseArgument(solution.tokenKind);\n}\nreturn arg;\n}\n/**\n* Parse positional or named arg. This method assumed peek()/peek(1)\n* is always an identifier.\n*\n* @return Parsed argument node\n*/\nprivate STNode parseNamedOrPositionalArg(SyntaxKind nextTokenKind) {\nSTNode argNameOrExpr = parseTerminalExpression(peek().kind, true, false, false);\nSTToken secondToken = peek();\nswitch (secondToken.kind) {\ncase EQUAL_TOKEN:\nSTNode equal = parseAssignOp();\nSTNode valExpr = parseExpression();\nreturn STNodeFactory.createNamedArgumentNode(argNameOrExpr, equal, valExpr);\ncase COMMA_TOKEN:\ncase CLOSE_PAREN_TOKEN:\nreturn STNodeFactory.createPositionalArgumentNode(argNameOrExpr);\ndefault:\nargNameOrExpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, argNameOrExpr, false, false);\nreturn STNodeFactory.createPositionalArgumentNode(argNameOrExpr);\n}\n}\n/**\n* Parse object type descriptor.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectTypeDescriptor() {\nstartContext(ParserRuleContext.OBJECT_TYPE_DESCRIPTOR);\nSTNode objectTypeQualifiers = parseObjectTypeQualifiers();\nSTNode objectKeyword = parseObjectKeyword();\nSTNode openBrace = parseOpenBrace();\nSTNode objectMembers = parseObjectMembers();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createObjectTypeDescriptorNode(objectTypeQualifiers, objectKeyword, openBrace,\nobjectMembers, closeBrace);\n}\n/**\n* Parse object type qualifiers.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectTypeQualifiers() {\nSTToken nextToken = peek();\nreturn parseObjectTypeQualifiers(nextToken.kind);\n}\nprivate STNode parseObjectTypeQualifiers(SyntaxKind kind) {\nSTNode firstQualifier;\nswitch (kind) {\ncase CLIENT_KEYWORD:\nfirstQualifier = parseClientKeyword();\nbreak;\ncase ABSTRACT_KEYWORD:\nfirstQualifier = parseAbstractKeyword();\nbreak;\ncase READONLY_KEYWORD:\nfirstQualifier = parseReadonlyKeyword();\nbreak;\ncase OBJECT_KEYWORD:\nreturn STNodeFactory.createEmptyNodeList();\ndefault:\nSolution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_QUALIFIER);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectTypeQualifiers(solution.tokenKind);\n}\nreturn parseObjectTypeNextQualifiers(firstQualifier);\n}\nprivate STNode parseObjectTypeNextQualifiers(STNode firstQualifier) {\nList qualifiers = new ArrayList<>();\nqualifiers.add(firstQualifier);\nfor (int i = 0; i < 2; i++) {\nSTNode nextToken = peek();\nif (isNodeWithSyntaxKindInList(qualifiers, nextToken.kind)) {\nnextToken = consume();\nupdateLastNodeInListWithInvalidNode(qualifiers, nextToken,\nDiagnosticErrorCode.ERROR_SAME_OBJECT_TYPE_QUALIFIER);\ncontinue;\n}\nSTNode nextQualifier;\nswitch (nextToken.kind) {\ncase CLIENT_KEYWORD:\nnextQualifier = parseClientKeyword();\nbreak;\ncase ABSTRACT_KEYWORD:\nnextQualifier = parseAbstractKeyword();\nbreak;\ncase READONLY_KEYWORD:\nnextQualifier = parseReadonlyKeyword();\nbreak;\ncase OBJECT_KEYWORD:\ndefault:\nreturn STNodeFactory.createNodeList(qualifiers);\n}\nqualifiers.add(nextQualifier);\n}\nreturn STNodeFactory.createNodeList(qualifiers);\n}\n/**\n* Parse client keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseClientKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CLIENT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CLIENT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse abstract keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseAbstractKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ABSTRACT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ABSTRACT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse object keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OBJECT_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OBJECT_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse object members.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectMembers() {\nArrayList objectMembers = new ArrayList<>();\nwhile (!isEndOfObjectTypeNode()) {\nstartContext(ParserRuleContext.OBJECT_MEMBER);\nSTNode member = parseObjectMember(peek().kind);\nendContext();\nif (member == null) {\nbreak;\n}\nobjectMembers.add(member);\n}\nreturn STNodeFactory.createNodeList(objectMembers);\n}\nprivate STNode parseObjectMember() {\nSTToken nextToken = peek();\nreturn parseObjectMember(nextToken.kind);\n}\nprivate STNode parseObjectMember(SyntaxKind nextTokenKind) {\nSTNode metadata;\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ncase ASTERISK_TOKEN:\ncase PUBLIC_KEYWORD:\ncase PRIVATE_KEYWORD:\ncase REMOTE_KEYWORD:\ncase FUNCTION_KEYWORD:\nmetadata = createEmptyMetadata();\nbreak;\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData(nextTokenKind);\nnextTokenKind = peek().kind;\nbreak;\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nmetadata = createEmptyMetadata();\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_START);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectMember(solution.tokenKind);\n}\nreturn parseObjectMember(nextTokenKind, metadata);\n}\nprivate STNode parseObjectMember(SyntaxKind nextTokenKind, STNode metadata) {\nSTNode member;\nswitch (nextTokenKind) {\ncase EOF_TOKEN:\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ncase ASTERISK_TOKEN:\nSTNode asterisk = consume();\nSTNode type = parseTypeReference();\nSTNode semicolonToken = parseSemicolon();\nmember = STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken);\nbreak;\ncase PUBLIC_KEYWORD:\ncase PRIVATE_KEYWORD:\nSTNode visibilityQualifier = parseObjectMemberVisibility();\nmember = parseObjectMethodOrField(metadata, visibilityQualifier);\nbreak;\ncase REMOTE_KEYWORD:\nmember = parseObjectMethodOrField(metadata, STNodeFactory.createEmptyNode());\nbreak;\ncase FUNCTION_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nmember = parseObjectMethod(metadata, STNodeFactory.createEmptyNode(), STNodeFactory.createEmptyNode());\nbreak;\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nmember = parseObjectField(metadata, STNodeFactory.createEmptyNode());\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectMember(solution.tokenKind);\n}\nreturn member;\n}\nprivate STNode parseObjectMethodOrField(STNode metadata, STNode methodQualifiers) {\nSTToken nextToken = peek(1);\nSTToken nextNextToken = peek(2);\nreturn parseObjectMethodOrField(nextToken.kind, nextNextToken.kind, metadata, methodQualifiers);\n}\n/**\n* Parse an object member, given the visibility modifier. Object member can have\n* only one visibility qualifier. This mean the methodQualifiers list can have\n* one qualifier at-most.\n*\n* @param visibilityQualifier Visibility qualifier. A modifier can be\n* a syntax node with either 'PUBLIC' or 'PRIVATE'.\n* @param nextTokenKind Next token kind\n* @param nextNextTokenKind Kind of the token after the\n* @param metadata Metadata\n* @param visibilityQualifier Visibility qualifiers\n* @return Parse object member node\n*/\nprivate STNode parseObjectMethodOrField(SyntaxKind nextTokenKind, SyntaxKind nextNextTokenKind, STNode metadata,\nSTNode visibilityQualifier) {\nswitch (nextTokenKind) {\ncase REMOTE_KEYWORD:\nSTNode remoteKeyword = parseRemoteKeyword();\nreturn parseObjectMethod(metadata, visibilityQualifier, remoteKeyword);\ncase FUNCTION_KEYWORD:\ncase TRANSACTIONAL_KEYWORD:\nremoteKeyword = STNodeFactory.createEmptyNode();\nreturn parseObjectMethod(metadata, visibilityQualifier, remoteKeyword);\ncase IDENTIFIER_TOKEN:\nif (nextNextTokenKind != SyntaxKind.OPEN_PAREN_TOKEN) {\nreturn parseObjectField(metadata, visibilityQualifier);\n}\nbreak;\ndefault:\nif (isTypeStartingToken(nextTokenKind)) {\nreturn parseObjectField(metadata, visibilityQualifier);\n}\nbreak;\n}\nSolution solution = recover(peek(), ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY, metadata,\nvisibilityQualifier);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectMethodOrField(solution.tokenKind, nextTokenKind, metadata, visibilityQualifier);\n}\n/**\n* Parse object visibility. Visibility can be public or private.\n*\n* @return Parsed node\n*/\nprivate STNode parseObjectMemberVisibility() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PUBLIC_KEYWORD || token.kind == SyntaxKind.PRIVATE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseRemoteKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.REMOTE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.REMOTE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\nprivate STNode parseObjectField(STNode metadata, STNode methodQualifiers) {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.READONLY_KEYWORD) {\nSTNode readonlyQualifier = STNodeFactory.createEmptyNode();\nSTNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\nSTNode fieldName = parseVariableName();\nreturn parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type, fieldName);\n}\nSTNode type;\nSTNode readonlyQualifier = parseReadonlyKeyword();\nnextToken = peek();\nif (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\nSTNode fieldNameOrTypeDesc = parseQualifiedIdentifier(ParserRuleContext.RECORD_FIELD_NAME_OR_TYPE_NAME);\nif (fieldNameOrTypeDesc.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\ntype = fieldNameOrTypeDesc;\n} else {\nnextToken = peek();\nswitch (nextToken.kind) {\ncase SEMICOLON_TOKEN:\ncase EQUAL_TOKEN:\ntype = createBuiltinSimpleNameReference(readonlyQualifier);\nreadonlyQualifier = STNodeFactory.createEmptyNode();\nreturn parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type,\nfieldNameOrTypeDesc);\ndefault:\ntype = fieldNameOrTypeDesc;\nbreak;\n}\n}\n} else if (isTypeStartingToken(nextToken.kind)) {\ntype = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD);\n} else {\nreadonlyQualifier = createBuiltinSimpleNameReference(readonlyQualifier);\ntype = parseComplexTypeDescriptor(readonlyQualifier, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD, false);\nreadonlyQualifier = STNodeFactory.createEmptyNode();\n}\nSTNode fieldName = parseVariableName();\nreturn parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type, fieldName);\n}\n/**\n* Parse object field rhs, and complete the object field parsing. Returns the parsed object field.\n*\n* @param metadata Metadata\n* @param visibilityQualifier Visibility qualifier\n* @param readonlyQualifier Readonly qualifier\n* @param type Type descriptor\n* @param fieldName Field name\n* @return Parsed object field\n*/\nprivate STNode parseObjectFieldRhs(STNode metadata, STNode visibilityQualifier, STNode readonlyQualifier,\nSTNode type, STNode fieldName) {\nSTToken nextToken = peek();\nreturn parseObjectFieldRhs(nextToken.kind, metadata, visibilityQualifier, readonlyQualifier, type, fieldName);\n}\n/**\n* Parse object field rhs, and complete the object field parsing. Returns the parsed object field.\n*\n* @param nextTokenKind Kind of the next token\n* @param metadata Metadata\n* @param visibilityQualifier Visibility qualifier\n* @param readonlyQualifier Readonly qualifier\n* @param type Type descriptor\n* @param fieldName Field name\n* @return Parsed object field\n*/\nprivate STNode parseObjectFieldRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier,\nSTNode readonlyQualifier, STNode type, STNode fieldName) {\nSTNode equalsToken;\nSTNode expression;\nSTNode semicolonToken;\nswitch (nextTokenKind) {\ncase SEMICOLON_TOKEN:\nequalsToken = STNodeFactory.createEmptyNode();\nexpression = STNodeFactory.createEmptyNode();\nsemicolonToken = parseSemicolon();\nbreak;\ncase EQUAL_TOKEN:\nequalsToken = parseAssignOp();\nexpression = parseExpression();\nsemicolonToken = parseSemicolon();\nbreak;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.OBJECT_FIELD_RHS, metadata, visibilityQualifier,\nreadonlyQualifier, type, fieldName);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseObjectFieldRhs(solution.tokenKind, metadata, visibilityQualifier, readonlyQualifier, type,\nfieldName);\n}\nreturn STNodeFactory.createObjectFieldNode(metadata, visibilityQualifier, readonlyQualifier, type, fieldName,\nequalsToken, expression, semicolonToken);\n}\nprivate STNode parseObjectMethod(STNode metadata, STNode visibilityQualifier, STNode remoteKeyword) {\nreturn parseFuncDefOrFuncTypeDesc(metadata, true, visibilityQualifier, remoteKeyword, null);\n}\n/**\n* Parse if-else statement.\n* \n* if-else-stmt := if expression block-stmt [else-block]\n* \n*\n* @return If-else block\n*/\nprivate STNode parseIfElseBlock() {\nstartContext(ParserRuleContext.IF_BLOCK);\nSTNode ifKeyword = parseIfKeyword();\nSTNode condition = parseExpression();\nSTNode ifBody = parseBlockNode();\nendContext();\nSTNode elseBody = parseElseBlock();\nreturn STNodeFactory.createIfElseStatementNode(ifKeyword, condition, ifBody, elseBody);\n}\n/**\n* Parse if-keyword.\n*\n* @return Parsed if-keyword node\n*/\nprivate STNode parseIfKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.IF_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.IF_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse else-keyword.\n*\n* @return Parsed else keyword node\n*/\nprivate STNode parseElseKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ELSE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ELSE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse block node.\n* \n* block-stmt := { sequence-stmt }\n* sequence-stmt := statement*\n* \n*\n* @return Parse block node\n*/\nprivate STNode parseBlockNode() {\nstartContext(ParserRuleContext.BLOCK_STMT);\nSTNode openBrace = parseOpenBrace();\nSTNode stmts = parseStatements();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createBlockStatementNode(openBrace, stmts, closeBrace);\n}\n/**\n* Parse else block.\n* else-block := else (if-else-stmt | block-stmt)\n*\n* @return Else block\n*/\nprivate STNode parseElseBlock() {\nSTToken nextToken = peek();\nif (nextToken.kind != SyntaxKind.ELSE_KEYWORD) {\nreturn STNodeFactory.createEmptyNode();\n}\nSTNode elseKeyword = parseElseKeyword();\nSTNode elseBody = parseElseBody();\nreturn STNodeFactory.createElseBlockNode(elseKeyword, elseBody);\n}\n/**\n* Parse else node body.\n* else-body := if-else-stmt | block-stmt\n*\n* @return Else node body\n*/\nprivate STNode parseElseBody() {\nSTToken nextToken = peek();\nreturn parseElseBody(nextToken.kind);\n}\nprivate STNode parseElseBody(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase IF_KEYWORD:\nreturn parseIfElseBlock();\ncase OPEN_BRACE_TOKEN:\nreturn parseBlockNode();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.ELSE_BODY);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseElseBody(solution.tokenKind);\n}\n}\n/**\n* Parse while statement.\n* while-stmt := while expression block-stmt\n*\n* @return While statement\n*/\nprivate STNode parseWhileStatement() {\nstartContext(ParserRuleContext.WHILE_BLOCK);\nSTNode whileKeyword = parseWhileKeyword();\nSTNode condition = parseExpression();\nSTNode whileBody = parseBlockNode();\nendContext();\nreturn STNodeFactory.createWhileStatementNode(whileKeyword, condition, whileBody);\n}\n/**\n* Parse while-keyword.\n*\n* @return While-keyword node\n*/\nprivate STNode parseWhileKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.WHILE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.WHILE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse panic statement.\n* panic-stmt := panic expression ;\n*\n* @return Panic statement\n*/\nprivate STNode parsePanicStatement() {\nstartContext(ParserRuleContext.PANIC_STMT);\nSTNode panicKeyword = parsePanicKeyword();\nSTNode expression = parseExpression();\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createPanicStatementNode(panicKeyword, expression, semicolon);\n}\n/**\n* Parse panic-keyword.\n*\n* @return Panic-keyword node\n*/\nprivate STNode parsePanicKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.PANIC_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.PANIC_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse check expression. This method is used to parse both check expression\n* as well as check action.\n*\n*

\n* \n* checking-expr := checking-keyword expression\n* checking-action := checking-keyword action\n* \n*\n* @param allowActions Allow actions\n* @param isRhsExpr Is rhs expression\n* @return Check expression node\n*/\nprivate STNode parseCheckExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\nSTNode checkingKeyword = parseCheckingKeyword();\nSTNode expr =\nparseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\nif (isAction(expr)) {\nreturn STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_ACTION, checkingKeyword, expr);\n} else {\nreturn STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, expr);\n}\n}\n/**\n* Parse checking keyword.\n*

\n* \n* checking-keyword := check | checkpanic\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseCheckingKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CHECK_KEYWORD || token.kind == SyntaxKind.CHECKPANIC_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CHECKING_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*\n* Parse continue statement.\n* continue-stmt := continue ; \n*\n* @return continue statement\n*/\nprivate STNode parseContinueStatement() {\nstartContext(ParserRuleContext.CONTINUE_STATEMENT);\nSTNode continueKeyword = parseContinueKeyword();\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createContinueStatementNode(continueKeyword, semicolon);\n}\n/**\n* Parse continue-keyword.\n*\n* @return continue-keyword node\n*/\nprivate STNode parseContinueKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.CONTINUE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.CONTINUE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse return statement.\n* return-stmt := return [ action-or-expr ] ;\n*\n* @return Return statement\n*/\nprivate STNode parseReturnStatement() {\nstartContext(ParserRuleContext.RETURN_STMT);\nSTNode returnKeyword = parseReturnKeyword();\nSTNode returnRhs = parseReturnStatementRhs(returnKeyword);\nendContext();\nreturn returnRhs;\n}\n/**\n* Parse return-keyword.\n*\n* @return Return-keyword node\n*/\nprivate STNode parseReturnKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RETURN_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RETURN_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse break statement.\n* break-stmt := break ; \n*\n* @return break statement\n*/\nprivate STNode parseBreakStatement() {\nstartContext(ParserRuleContext.BREAK_STATEMENT);\nSTNode breakKeyword = parseBreakKeyword();\nSTNode semicolon = parseSemicolon();\nendContext();\nreturn STNodeFactory.createBreakStatementNode(breakKeyword, semicolon);\n}\n/**\n* Parse break-keyword.\n*\n* @return break-keyword node\n*/\nprivate STNode parseBreakKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.BREAK_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.BREAK_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse the right hand side of a return statement.\n*

\n* \n* return-stmt-rhs := ; | action-or-expr ;\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseReturnStatementRhs(STNode returnKeyword) {\nSTNode expr;\nSTToken token = peek();\nswitch (token.kind) {\ncase SEMICOLON_TOKEN:\nexpr = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nexpr = parseActionOrExpression();\nbreak;\n}\nSTNode semicolon = parseSemicolon();\nreturn STNodeFactory.createReturnStatementNode(returnKeyword, expr, semicolon);\n}\n/**\n* Parse mapping constructor expression.\n*

\n* mapping-constructor-expr := { [field (, field)*] }\n*\n* @return Parsed node\n*/\nprivate STNode parseMappingConstructorExpr() {\nstartContext(ParserRuleContext.MAPPING_CONSTRUCTOR);\nSTNode openBrace = parseOpenBrace();\nSTNode fields = parseMappingConstructorFields();\nSTNode closeBrace = parseCloseBrace();\nendContext();\nreturn STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace);\n}\n/**\n* Parse mapping constructor fields.\n*\n* @return Parsed node\n*/\nprivate STNode parseMappingConstructorFields() {\nSTToken nextToken = peek();\nif (isEndOfMappingConstructor(nextToken.kind)) {\nreturn STNodeFactory.createEmptyNodeList();\n}\nList fields = new ArrayList<>();\nSTNode field = parseMappingField(ParserRuleContext.FIRST_MAPPING_FIELD);\nfields.add(field);\nreturn parseMappingConstructorFields(fields);\n}\nprivate STNode parseMappingConstructorFields(List fields) {\nSTToken nextToken;\nSTNode mappingFieldEnd;\nnextToken = peek();\nwhile (!isEndOfMappingConstructor(nextToken.kind)) {\nmappingFieldEnd = parseMappingFieldEnd(nextToken.kind);\nif (mappingFieldEnd == null) {\nbreak;\n}\nfields.add(mappingFieldEnd);\nSTNode field = parseMappingField(ParserRuleContext.MAPPING_FIELD);\nfields.add(field);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(fields);\n}\nprivate STNode parseMappingFieldEnd() {\nreturn parseMappingFieldEnd(peek().kind);\n}\nprivate STNode parseMappingFieldEnd(SyntaxKind nextTokenKind) {\nswitch (nextTokenKind) {\ncase COMMA_TOKEN:\nreturn parseComma();\ncase CLOSE_BRACE_TOKEN:\nreturn null;\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.MAPPING_FIELD_END);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMappingFieldEnd(solution.tokenKind);\n}\n}\nprivate boolean isEndOfMappingConstructor(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase IDENTIFIER_TOKEN:\ncase READONLY_KEYWORD:\nreturn false;\ncase EOF_TOKEN:\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase SEMICOLON_TOKEN:\ncase PUBLIC_KEYWORD:\ncase PRIVATE_KEYWORD:\ncase FUNCTION_KEYWORD:\ncase RETURNS_KEYWORD:\ncase SERVICE_KEYWORD:\ncase TYPE_KEYWORD:\ncase LISTENER_KEYWORD:\ncase CONST_KEYWORD:\ncase FINAL_KEYWORD:\ncase RESOURCE_KEYWORD:\nreturn true;\ndefault:\nreturn isSimpleType(tokenKind);\n}\n}\n/**\n* Parse mapping constructor field.\n*

\n* field := specific-field | computed-name-field | spread-field\n*\n* @param fieldContext Context of the mapping field\n* @param leadingComma Leading comma\n* @return Parsed node\n*/\nprivate STNode parseMappingField(ParserRuleContext fieldContext) {\nSTToken nextToken = peek();\nreturn parseMappingField(nextToken.kind, fieldContext);\n}\nprivate STNode parseMappingField(SyntaxKind tokenKind, ParserRuleContext fieldContext) {\nswitch (tokenKind) {\ncase IDENTIFIER_TOKEN:\nSTNode readonlyKeyword = STNodeFactory.createEmptyNode();\nreturn parseSpecificFieldWithOptionalValue(readonlyKeyword);\ncase STRING_LITERAL:\nreadonlyKeyword = STNodeFactory.createEmptyNode();\nreturn parseQualifiedSpecificField(readonlyKeyword);\ncase READONLY_KEYWORD:\nreadonlyKeyword = parseReadonlyKeyword();\nreturn parseSpecificField(readonlyKeyword);\ncase OPEN_BRACKET_TOKEN:\nreturn parseComputedField();\ncase ELLIPSIS_TOKEN:\nSTNode ellipsis = parseEllipsis();\nSTNode expr = parseExpression();\nreturn STNodeFactory.createSpreadFieldNode(ellipsis, expr);\ncase CLOSE_BRACE_TOKEN:\nif (fieldContext == ParserRuleContext.FIRST_MAPPING_FIELD) {\nreturn null;\n}\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, fieldContext, fieldContext);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseMappingField(solution.tokenKind, fieldContext);\n}\n}\nprivate STNode parseSpecificField(STNode readonlyKeyword) {\nSTToken nextToken = peek();\nreturn parseSpecificField(nextToken.kind, readonlyKeyword);\n}\nprivate STNode parseSpecificField(SyntaxKind nextTokenKind, STNode readonlyKeyword) {\nswitch (nextTokenKind) {\ncase STRING_LITERAL:\nreturn parseQualifiedSpecificField(readonlyKeyword);\ncase IDENTIFIER_TOKEN:\nreturn parseSpecificFieldWithOptionalValue(readonlyKeyword);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD, readonlyKeyword);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseSpecificField(solution.tokenKind, readonlyKeyword);\n}\n}\nprivate STNode parseQualifiedSpecificField(STNode readonlyKeyword) {\nSTNode key = parseStringLiteral();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\n}\n/**\n* Parse mapping constructor specific-field with an optional value.\n*\n* @return Parsed node\n*/\nprivate STNode parseSpecificFieldWithOptionalValue(STNode readonlyKeyword) {\nSTNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME);\nreturn parseSpecificFieldRhs(readonlyKeyword, key);\n}\nprivate STNode parseSpecificFieldRhs(STNode readonlyKeyword, STNode key) {\nSTToken nextToken = peek();\nreturn parseSpecificFieldRhs(nextToken.kind, readonlyKeyword, key);\n}\nprivate STNode parseSpecificFieldRhs(SyntaxKind tokenKind, STNode readonlyKeyword, STNode key) {\nSTNode colon;\nSTNode valueExpr;\nswitch (tokenKind) {\ncase COLON_TOKEN:\ncolon = parseColon();\nvalueExpr = parseExpression();\nbreak;\ncase COMMA_TOKEN:\ncolon = STNodeFactory.createEmptyNode();\nvalueExpr = STNodeFactory.createEmptyNode();\nbreak;\ndefault:\nif (isEndOfMappingConstructor(tokenKind)) {\ncolon = STNodeFactory.createEmptyNode();\nvalueExpr = STNodeFactory.createEmptyNode();\nbreak;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, readonlyKeyword, key);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseSpecificFieldRhs(solution.tokenKind, readonlyKeyword, key);\n}\nreturn STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr);\n}\n/**\n* Parse string literal.\n*\n* @return Parsed node\n*/\nprivate STNode parseStringLiteral() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.STRING_LITERAL) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.STRING_LITERAL);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse colon token.\n*\n* @return Parsed node\n*/\nprivate STNode parseColon() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.COLON_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.COLON);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse readonly keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseReadonlyKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.READONLY_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.READONLY_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse computed-name-field of a mapping constructor expression.\n*

\n* computed-name-field := [ field-name-expr ] : value-expr\n*\n* @param leadingComma Leading comma\n* @return Parsed node\n*/\nprivate STNode parseComputedField() {\nstartContext(ParserRuleContext.COMPUTED_FIELD_NAME);\nSTNode openBracket = parseOpenBracket();\nSTNode fieldNameExpr = parseExpression();\nSTNode closeBracket = parseCloseBracket();\nendContext();\nSTNode colon = parseColon();\nSTNode valueExpr = parseExpression();\nreturn STNodeFactory.createComputedNameFieldNode(openBracket, fieldNameExpr, closeBracket, colon, valueExpr);\n}\n/**\n* Parse open bracket.\n*\n* @return Parsed node\n*/\nprivate STNode parseOpenBracket() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.OPEN_BRACKET_TOKEN) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.OPEN_BRACKET);\nreturn sol.recoveredNode;\n}\n}\n/**\n*

\n* Parse compound assignment statement, which takes the following format.\n*

\n* assignment-stmt := lvexpr CompoundAssignmentOperator action-or-expr ;\n*\n* @return Parsed node\n*/\nprivate STNode parseCompoundAssignmentStmt() {\nstartContext(ParserRuleContext.COMPOUND_ASSIGNMENT_STMT);\nSTNode varName = parseVariableName();\nSTNode compoundAssignmentStmt = parseCompoundAssignmentStmtRhs(varName);\nendContext();\nreturn compoundAssignmentStmt;\n}\n/**\n*

\n* Parse the RHS portion of the compound assignment.\n*

\n* compound-assignment-stmt-rhs := CompoundAssignmentOperator action-or-expr ;\n*\n* @param lvExpr LHS expression\n* @return Parsed node\n*/\nprivate STNode parseCompoundAssignmentStmtRhs(STNode lvExpr) {\nSTNode binaryOperator = parseCompoundBinaryOperator();\nSTNode equalsToken = parseAssignOp();\nSTNode expr = parseActionOrExpression();\nSTNode semicolon = parseSemicolon();\nendContext();\nboolean lvExprValid = isValidLVExpr(lvExpr);\nif (!lvExprValid) {\nSTNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\nSTNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier);\nlvExpr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(simpleNameRef, lvExpr,\nDiagnosticErrorCode.ERROR_INVALID_EXPR_IN_COMPOUND_ASSIGNMENT_LHS);\n}\nreturn STNodeFactory.createCompoundAssignmentStatementNode(lvExpr, binaryOperator, equalsToken, expr,\nsemicolon);\n}\n/**\n* Parse compound binary operator.\n* BinaryOperator := + | - | * | / | & | | | ^ | << | >> | >>>\n*\n* @return Parsed node\n*/\nprivate STNode parseCompoundBinaryOperator() {\nSTToken token = peek();\nif (isCompoundBinaryOperator(token.kind)) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.COMPOUND_BINARY_OPERATOR);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse service declaration.\n*

\n* \n* service-decl := metadata service [variable-name] on expression-list service-body-block\n*
\n* expression-list := expression (, expression)*\n*
\n*\n* @param metadata Metadata\n* @return Parsed node\n*/\nprivate STNode parseServiceDecl(STNode metadata) {\nstartContext(ParserRuleContext.SERVICE_DECL);\nSTNode serviceKeyword = parseServiceKeyword();\nSTNode serviceDecl = parseServiceRhs(metadata, serviceKeyword);\nendContext();\nreturn serviceDecl;\n}\n/**\n* Parse rhs of the service declaration.\n*

\n* \n* service-rhs := [variable-name] on expression-list service-body-block\n* \n*\n* @param metadata Metadata\n* @param serviceKeyword Service keyword\n* @return Parsed node\n*/\nprivate STNode parseServiceRhs(STNode metadata, STNode serviceKeyword) {\nSTNode serviceName = parseServiceName();\nSTNode onKeyword = parseOnKeyword();\nSTNode expressionList = parseListeners();\nSTNode serviceBody = parseServiceBody();\nonKeyword =\ncloneWithDiagnosticIfListEmpty(expressionList, onKeyword, DiagnosticErrorCode.ERROR_MISSING_EXPRESSION);\nreturn STNodeFactory.createServiceDeclarationNode(metadata, serviceKeyword, serviceName, onKeyword,\nexpressionList, serviceBody);\n}\nprivate STNode parseServiceName() {\nSTToken nextToken = peek();\nreturn parseServiceName(nextToken.kind);\n}\nprivate STNode parseServiceName(SyntaxKind kind) {\nswitch (kind) {\ncase IDENTIFIER_TOKEN:\nreturn parseIdentifier(ParserRuleContext.SERVICE_NAME);\ncase ON_KEYWORD:\nreturn STNodeFactory.createEmptyNode();\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.OPTIONAL_SERVICE_NAME);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseServiceName(solution.tokenKind);\n}\n}\n/**\n* Parse service keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseServiceKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.SERVICE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.SERVICE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Check whether the given token kind is a compound binary operator.\n*

\n* compound-binary-operator := + | - | * | / | & | | | ^ | << | >> | >>>\n*\n* @param tokenKind STToken kind\n* @return true if the token kind refers to a binary operator. false otherwise\n*/\nprivate boolean isCompoundBinaryOperator(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase PLUS_TOKEN:\ncase MINUS_TOKEN:\ncase SLASH_TOKEN:\ncase ASTERISK_TOKEN:\ncase BITWISE_AND_TOKEN:\ncase BITWISE_XOR_TOKEN:\ncase PIPE_TOKEN:\ncase DOUBLE_LT_TOKEN:\ncase DOUBLE_GT_TOKEN:\ncase TRIPPLE_GT_TOKEN:\nreturn getNextNextToken(tokenKind).kind == SyntaxKind.EQUAL_TOKEN;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse on keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseOnKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.ON_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.ON_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Parse listener references.\n*

\n* expression-list := expression (, expression)*\n*\n* @return Parsed node\n*/\nprivate STNode parseListeners() {\nstartContext(ParserRuleContext.LISTENERS_LIST);\nList listeners = new ArrayList<>();\nSTToken nextToken = peek();\nif (isEndOfExpressionsList(nextToken.kind)) {\nendContext();\nreturn STNodeFactory.createEmptyNodeList();\n}\nSTNode leadingComma = STNodeFactory.createEmptyNode();\nSTNode exprListItem = parseExpressionListItem(leadingComma);\nlisteners.add(exprListItem);\nnextToken = peek();\nwhile (!isEndOfExpressionsList(nextToken.kind)) {\nleadingComma = parseComma();\nexprListItem = parseExpressionListItem(leadingComma);\nlisteners.add(exprListItem);\nnextToken = peek();\n}\nendContext();\nreturn STNodeFactory.createNodeList(listeners);\n}\nprivate boolean isEndOfExpressionsList(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase COMMA_TOKEN:\nreturn false;\ncase EOF_TOKEN:\ncase SEMICOLON_TOKEN:\ncase CLOSE_BRACKET_TOKEN:\ncase CLOSE_BRACE_TOKEN:\ncase CLOSE_PAREN_TOKEN:\ncase OPEN_BRACE_TOKEN:\nreturn true;\ndefault:\nreturn !isValidExprStart(tokenKind);\n}\n}\n/**\n* Parse expression list item.\n*\n* @param leadingComma Leading comma\n* @return Parsed node\n*/\nprivate STNode parseExpressionListItem(STNode leadingComma) {\nSTNode expr = parseExpression();\nreturn STNodeFactory.createExpressionListItemNode(leadingComma, expr);\n}\n/**\n* Parse service body.\n*

\n* \n* service-body-block := { service-method-defn* }\n* \n*\n* @return Parsed node\n*/\nprivate STNode parseServiceBody() {\nSTNode openBrace = parseOpenBrace();\nSTNode resources = parseResources();\nSTNode closeBrace = parseCloseBrace();\nreturn STNodeFactory.createServiceBodyNode(openBrace, resources, closeBrace);\n}\n/**\n* Parse service resource definitions.\n*\n* @return Parsed node\n*/\nprivate STNode parseResources() {\nList resources = new ArrayList<>();\nSTToken nextToken = peek();\nwhile (!isEndOfServiceDecl(nextToken.kind)) {\nSTNode serviceMethod = parseResource();\nif (serviceMethod == null) {\nbreak;\n}\nresources.add(serviceMethod);\nnextToken = peek();\n}\nreturn STNodeFactory.createNodeList(resources);\n}\nprivate boolean isEndOfServiceDecl(SyntaxKind tokenKind) {\nswitch (tokenKind) {\ncase CLOSE_BRACE_TOKEN:\ncase EOF_TOKEN:\ncase CLOSE_BRACE_PIPE_TOKEN:\ncase TYPE_KEYWORD:\ncase SERVICE_KEYWORD:\nreturn true;\ndefault:\nreturn false;\n}\n}\n/**\n* Parse resource definition (i.e. service-method-defn).\n*

\n* \n* service-body-block := { service-method-defn* }\n*
\n* service-method-defn := metadata [resource] function identifier function-signature method-defn-body\n*
\n*\n* @return Parsed node\n*/\nprivate STNode parseResource() {\nSTToken nextToken = peek();\nreturn parseResource(nextToken.kind);\n}\nprivate STNode parseResource(SyntaxKind nextTokenKind) {\nSTNode metadata;\nswitch (nextTokenKind) {\ncase RESOURCE_KEYWORD:\ncase FUNCTION_KEYWORD:\nmetadata = createEmptyMetadata();\nbreak;\ncase DOCUMENTATION_STRING:\ncase AT_TOKEN:\nmetadata = parseMetaData(nextTokenKind);\nnextTokenKind = peek().kind;\nbreak;\ndefault:\nif (isEndOfServiceDecl(nextTokenKind)) {\nreturn null;\n}\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RESOURCE_DEF);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseResource(solution.tokenKind);\n}\nreturn parseResource(nextTokenKind, metadata);\n}\nprivate STNode parseResource(SyntaxKind nextTokenKind, STNode metadata) {\nswitch (nextTokenKind) {\ncase RESOURCE_KEYWORD:\nSTNode resourceKeyword = parseResourceKeyword();\nreturn parseFuncDefinition(metadata, false, resourceKeyword, null);\ncase FUNCTION_KEYWORD:\nreturn parseFuncDefinition(metadata, false, STNodeFactory.createEmptyNode(), null);\ndefault:\nSTToken token = peek();\nSolution solution = recover(token, ParserRuleContext.RESOURCE_DEF, metadata);\nif (solution.action == Action.REMOVE) {\nreturn solution.recoveredNode;\n}\nreturn parseResource(solution.tokenKind, metadata);\n}\n}\n/**\n* Parse resource keyword.\n*\n* @return Parsed node\n*/\nprivate STNode parseResourceKeyword() {\nSTToken token = peek();\nif (token.kind == SyntaxKind.RESOURCE_KEYWORD) {\nreturn consume();\n} else {\nSolution sol = recover(token, ParserRuleContext.RESOURCE_KEYWORD);\nreturn sol.recoveredNode;\n}\n}\n/**\n* Check whether next construct is a service declaration or not. This method is\n* used to determine whether an end-of-block is reached, if the next token is\n* a service-keyword. Because service-keyword can be used in statements as well\n* as in top-level node (service-decl). We have reached a service-decl, then\n* it could be due to missing close-brace at the end of the current block.\n*\n* @return true if the next construct is a service declaration.\n* false otherwise\n*/" + }, + { + "comment": "@lukecwik Thanks for the suggestions. Added changes as suggested and also added unit test for the same. Please review my changes.", + "method_body": "public static DirectRunner fromOptions(PipelineOptions options) {\noptions = MAPPER.convertValue(MAPPER.valueToTree(options), DirectOptions.class);\nreturn new DirectRunner(options.as(DirectOptions.class));\n}", + "target_code": "options = MAPPER.convertValue(MAPPER.valueToTree(options), DirectOptions.class);", + "method_body_after": "public static DirectRunner fromOptions(PipelineOptions options) {\nreturn new DirectRunner(options.as(DirectOptions.class));\n}", + "context_before": "class DirectRunner extends PipelineRunner {\nenum Enforcement {\nENCODABILITY {\n@Override\npublic boolean appliesTo(PCollection collection, DirectGraph graph) {\nreturn true;\n}\n},\nIMMUTABILITY {\n@Override\npublic boolean appliesTo(PCollection collection, DirectGraph graph) {\nreturn CONTAINS_UDF.contains(\nPTransformTranslation.urnForTransform(graph.getProducer(collection).getTransform()));\n}\n};\n/**\n* The set of {@link PTransform PTransforms} that execute a UDF. Useful for some enforcements.\n*/\nprivate static final Set CONTAINS_UDF =\nImmutableSet.of(\nPTransformTranslation.READ_TRANSFORM_URN, PTransformTranslation.PAR_DO_TRANSFORM_URN);\npublic abstract boolean appliesTo(PCollection collection, DirectGraph graph);\nstatic Set enabled(DirectOptions options) {\nEnumSet enabled = EnumSet.noneOf(Enforcement.class);\nif (options.isEnforceEncodability()) {\nenabled.add(ENCODABILITY);\n}\nif (options.isEnforceImmutability()) {\nenabled.add(IMMUTABILITY);\n}\nreturn Collections.unmodifiableSet(enabled);\n}\nstatic BundleFactory bundleFactoryFor(Set enforcements, DirectGraph graph) {\nBundleFactory bundleFactory =\nenforcements.contains(Enforcement.ENCODABILITY)\n? CloningBundleFactory.create()\n: ImmutableListBundleFactory.create();\nif (enforcements.contains(Enforcement.IMMUTABILITY)) {\nbundleFactory = ImmutabilityCheckingBundleFactory.create(bundleFactory, graph);\n}\nreturn bundleFactory;\n}\nprivate static Map> defaultModelEnforcements(\nSet enabledEnforcements) {\nImmutableMap.Builder> enforcements =\nImmutableMap.builder();\nImmutableList.Builder enabledParDoEnforcements =\nImmutableList.builder();\nif (enabledEnforcements.contains(Enforcement.IMMUTABILITY)) {\nenabledParDoEnforcements.add(ImmutabilityEnforcementFactory.create());\n}\nCollection parDoEnforcements = enabledParDoEnforcements.build();\nenforcements.put(PTransformTranslation.PAR_DO_TRANSFORM_URN, parDoEnforcements);\nreturn enforcements.build();\n}\n}\nprivate final DirectOptions options;\nprivate final Set enabledEnforcements;\nprivate Supplier clockSupplier = new NanosOffsetClockSupplier();\nprivate static final ObjectMapper MAPPER =\nnew ObjectMapper()\n.registerModules(ObjectMapper.findModules(ReflectHelpers.findClassLoader()));\n/** Construct a {@link DirectRunner} from the provided options. */\nprivate DirectRunner(DirectOptions options) {\nthis.options = options;\nthis.enabledEnforcements = Enforcement.enabled(options);\n}\nSupplier getClockSupplier() {\nreturn clockSupplier;\n}\nvoid setClockSupplier(Supplier supplier) {\nthis.clockSupplier = supplier;\n}\n@Override\npublic DirectPipelineResult run(Pipeline pipeline) {\npipeline.replaceAll(defaultTransformOverrides());\nMetricsEnvironment.setMetricsSupported(true);\ntry {\nDirectGraphVisitor graphVisitor = new DirectGraphVisitor();\npipeline.traverseTopologically(graphVisitor);\n@SuppressWarnings(\"rawtypes\")\nKeyedPValueTrackingVisitor keyedPValueVisitor = KeyedPValueTrackingVisitor.create();\npipeline.traverseTopologically(keyedPValueVisitor);\nDisplayDataValidator.validatePipeline(pipeline);\nDisplayDataValidator.validateOptions(options);\nExecutorService metricsPool =\nExecutors.newCachedThreadPool(\nnew ThreadFactoryBuilder()\n.setThreadFactory(MoreExecutors.platformThreadFactory())\n.setDaemon(false)\n.setNameFormat(\"direct-metrics-counter-committer\")\n.build());\nDirectGraph graph = graphVisitor.getGraph();\nEvaluationContext context =\nEvaluationContext.create(\nclockSupplier.get(),\nEnforcement.bundleFactoryFor(enabledEnforcements, graph),\ngraph,\nkeyedPValueVisitor.getKeyedPValues(),\nmetricsPool);\nTransformEvaluatorRegistry registry =\nTransformEvaluatorRegistry.javaSdkNativeRegistry(context, options);\nPipelineExecutor executor =\nExecutorServiceParallelExecutor.create(\noptions.getTargetParallelism(),\nregistry,\nEnforcement.defaultModelEnforcements(enabledEnforcements),\ncontext,\nmetricsPool);\nexecutor.start(graph, RootProviderRegistry.javaNativeRegistry(context, options));\nDirectPipelineResult result = new DirectPipelineResult(executor, context);\nif (options.isBlockOnRun()) {\ntry {\nresult.waitUntilFinish();\n} catch (UserCodeException userException) {\nthrow new PipelineExecutionException(userException.getCause());\n} catch (Throwable t) {\nif (t instanceof RuntimeException) {\nthrow (RuntimeException) t;\n}\nthrow new RuntimeException(t);\n}\n}\nreturn result;\n} finally {\nMetricsEnvironment.setMetricsSupported(false);\n}\n}\n/**\n* The default set of transform overrides to use in the {@link DirectRunner}.\n*\n*

The order in which overrides is applied is important, as some overrides are expanded into a\n* composite. If the composite contains {@link PTransform PTransforms} which are also overridden,\n* these PTransforms must occur later in the iteration order. {@link ImmutableMap} has an\n* iteration order based on the order at which elements are added to it.\n*/\n@SuppressWarnings(\"rawtypes\")\n@VisibleForTesting\nList defaultTransformOverrides() {\nDirectTestOptions testOptions = options.as(DirectTestOptions.class);\nImmutableList.Builder builder = ImmutableList.builder();\nif (testOptions.isRunnerDeterminedSharding()) {\nbuilder.add(\nPTransformOverride.of(\nPTransformMatchers.writeWithRunnerDeterminedSharding(),\nnew WriteWithShardingFactory())); /* Uses a view internally. */\n}\nbuilder =\nbuilder\n.add(\nPTransformOverride.of(\nMultiStepCombine.matcher(), MultiStepCombine.Factory.create()))\n.add(\nPTransformOverride.of(\nPTransformMatchers.urnEqualTo(PTransformTranslation.CREATE_VIEW_TRANSFORM_URN),\nnew ViewOverrideFactory())) /* Uses pardos and GBKs */\n.add(\nPTransformOverride.of(\nPTransformMatchers.urnEqualTo(PTransformTranslation.TEST_STREAM_TRANSFORM_URN),\nnew DirectTestStreamFactory(this))) /* primitive */\n.add(\nPTransformOverride.of(\nPTransformMatchers.splittableParDo(), new ParDoMultiOverrideFactory()))\n.add(\nPTransformOverride.of(\nPTransformMatchers.stateOrTimerParDo(), new ParDoMultiOverrideFactory()))\n.add(\nPTransformOverride.of(\nPTransformMatchers.urnEqualTo(\nPTransformTranslation.SPLITTABLE_PROCESS_KEYED_URN),\nnew SplittableParDoViaKeyedWorkItems.OverrideFactory()))\n.add(\nPTransformOverride.of(\nPTransformMatchers.urnEqualTo(SplittableParDo.SPLITTABLE_GBKIKWI_URN),\nnew DirectGBKIntoKeyedWorkItemsOverrideFactory())) /* Returns a GBKO */\n.add(\nPTransformOverride.of(\nPTransformMatchers.urnEqualTo(PTransformTranslation.GROUP_BY_KEY_TRANSFORM_URN),\nnew DirectGroupByKeyOverrideFactory())); /* returns two chained primitives. */\nreturn builder.build();\n}\n/** The result of running a {@link Pipeline} with the {@link DirectRunner}. */\npublic static class DirectPipelineResult implements PipelineResult {\nprivate final PipelineExecutor executor;\nprivate final EvaluationContext evaluationContext;\nprivate State state;\nprivate DirectPipelineResult(PipelineExecutor executor, EvaluationContext evaluationContext) {\nthis.executor = executor;\nthis.evaluationContext = evaluationContext;\nthis.state = State.RUNNING;\n}\n@Override\npublic State getState() {\nreturn state;\n}\n@Override\npublic MetricResults metrics() {\nreturn evaluationContext.getMetrics();\n}\n/**\n* {@inheritDoc}.\n*\n*

If the pipeline terminates abnormally by throwing an {@link Exception}, this will rethrow\n* the original {@link Exception}. Future calls to {@link\n* org.apache.beam.sdk.PipelineResult.State\n*/\n@Override\npublic State waitUntilFinish() {\nreturn waitUntilFinish(Duration.ZERO);\n}\n@Override\npublic State cancel() {\nthis.state = executor.getPipelineState();\nif (!this.state.isTerminal()) {\nexecutor.stop();\nthis.state = executor.getPipelineState();\n}\nreturn executor.getPipelineState();\n}\n/**\n* {@inheritDoc}.\n*\n*

If the pipeline terminates abnormally by throwing an {@link Exception}, this will rethrow\n* the original {@link Exception}. Future calls to {@link\n* org.apache.beam.sdk.PipelineResult.State\n*/\n@Override\npublic State waitUntilFinish(Duration duration) {\nState startState = this.state;\nif (!startState.isTerminal()) {\ntry {\nstate = executor.waitUntilFinish(duration);\n} catch (UserCodeException uce) {\nthrow new Pipeline.PipelineExecutionException(uce.getCause());\n} catch (Exception e) {\nif (e instanceof InterruptedException) {\nThread.currentThread().interrupt();\n}\nif (e instanceof RuntimeException) {\nthrow (RuntimeException) e;\n}\nthrow new RuntimeException(e);\n}\n}\nreturn this.state;\n}\n}\n/** A {@link Supplier} that creates a {@link NanosOffsetClock}. */\nprivate static class NanosOffsetClockSupplier implements Supplier {\n@Override\npublic Clock get() {\nreturn NanosOffsetClock.create();\n}\n}\n}", + "context_after": "class DirectRunner extends PipelineRunner {\nenum Enforcement {\nENCODABILITY {\n@Override\npublic boolean appliesTo(PCollection collection, DirectGraph graph) {\nreturn true;\n}\n},\nIMMUTABILITY {\n@Override\npublic boolean appliesTo(PCollection collection, DirectGraph graph) {\nreturn CONTAINS_UDF.contains(\nPTransformTranslation.urnForTransform(graph.getProducer(collection).getTransform()));\n}\n};\n/**\n* The set of {@link PTransform PTransforms} that execute a UDF. Useful for some enforcements.\n*/\nprivate static final Set CONTAINS_UDF =\nImmutableSet.of(\nPTransformTranslation.READ_TRANSFORM_URN, PTransformTranslation.PAR_DO_TRANSFORM_URN);\npublic abstract boolean appliesTo(PCollection collection, DirectGraph graph);\nstatic Set enabled(DirectOptions options) {\nEnumSet enabled = EnumSet.noneOf(Enforcement.class);\nif (options.isEnforceEncodability()) {\nenabled.add(ENCODABILITY);\n}\nif (options.isEnforceImmutability()) {\nenabled.add(IMMUTABILITY);\n}\nreturn Collections.unmodifiableSet(enabled);\n}\nstatic BundleFactory bundleFactoryFor(Set enforcements, DirectGraph graph) {\nBundleFactory bundleFactory =\nenforcements.contains(Enforcement.ENCODABILITY)\n? CloningBundleFactory.create()\n: ImmutableListBundleFactory.create();\nif (enforcements.contains(Enforcement.IMMUTABILITY)) {\nbundleFactory = ImmutabilityCheckingBundleFactory.create(bundleFactory, graph);\n}\nreturn bundleFactory;\n}\nprivate static Map> defaultModelEnforcements(\nSet enabledEnforcements) {\nImmutableMap.Builder> enforcements =\nImmutableMap.builder();\nImmutableList.Builder enabledParDoEnforcements =\nImmutableList.builder();\nif (enabledEnforcements.contains(Enforcement.IMMUTABILITY)) {\nenabledParDoEnforcements.add(ImmutabilityEnforcementFactory.create());\n}\nCollection parDoEnforcements = enabledParDoEnforcements.build();\nenforcements.put(PTransformTranslation.PAR_DO_TRANSFORM_URN, parDoEnforcements);\nreturn enforcements.build();\n}\n}\nprivate DirectOptions options;\nprivate final Set enabledEnforcements;\nprivate Supplier clockSupplier = new NanosOffsetClockSupplier();\nprivate static final ObjectMapper MAPPER =\nnew ObjectMapper()\n.registerModules(ObjectMapper.findModules(ReflectHelpers.findClassLoader()));\n/** Construct a {@link DirectRunner} from the provided options. */\nprivate DirectRunner(DirectOptions options) {\nthis.options = options;\nthis.enabledEnforcements = Enforcement.enabled(options);\n}\nSupplier getClockSupplier() {\nreturn clockSupplier;\n}\nvoid setClockSupplier(Supplier supplier) {\nthis.clockSupplier = supplier;\n}\n@Override\npublic DirectPipelineResult run(Pipeline pipeline) {\ntry {\noptions =\nMAPPER\n.readValue(MAPPER.writeValueAsBytes(options), PipelineOptions.class)\n.as(DirectOptions.class);\n} catch (IOException e) {\nthrow new IllegalArgumentException(\n\"PipelineOptions specified failed to serialize to JSON.\", e);\n}\npipeline.replaceAll(defaultTransformOverrides());\nMetricsEnvironment.setMetricsSupported(true);\ntry {\nDirectGraphVisitor graphVisitor = new DirectGraphVisitor();\npipeline.traverseTopologically(graphVisitor);\n@SuppressWarnings(\"rawtypes\")\nKeyedPValueTrackingVisitor keyedPValueVisitor = KeyedPValueTrackingVisitor.create();\npipeline.traverseTopologically(keyedPValueVisitor);\nDisplayDataValidator.validatePipeline(pipeline);\nDisplayDataValidator.validateOptions(options);\nExecutorService metricsPool =\nExecutors.newCachedThreadPool(\nnew ThreadFactoryBuilder()\n.setThreadFactory(MoreExecutors.platformThreadFactory())\n.setDaemon(false)\n.setNameFormat(\"direct-metrics-counter-committer\")\n.build());\nDirectGraph graph = graphVisitor.getGraph();\nEvaluationContext context =\nEvaluationContext.create(\nclockSupplier.get(),\nEnforcement.bundleFactoryFor(enabledEnforcements, graph),\ngraph,\nkeyedPValueVisitor.getKeyedPValues(),\nmetricsPool);\nTransformEvaluatorRegistry registry =\nTransformEvaluatorRegistry.javaSdkNativeRegistry(context, options);\nPipelineExecutor executor =\nExecutorServiceParallelExecutor.create(\noptions.getTargetParallelism(),\nregistry,\nEnforcement.defaultModelEnforcements(enabledEnforcements),\ncontext,\nmetricsPool);\nexecutor.start(graph, RootProviderRegistry.javaNativeRegistry(context, options));\nDirectPipelineResult result = new DirectPipelineResult(executor, context);\nif (options.isBlockOnRun()) {\ntry {\nresult.waitUntilFinish();\n} catch (UserCodeException userException) {\nthrow new PipelineExecutionException(userException.getCause());\n} catch (Throwable t) {\nif (t instanceof RuntimeException) {\nthrow (RuntimeException) t;\n}\nthrow new RuntimeException(t);\n}\n}\nreturn result;\n} finally {\nMetricsEnvironment.setMetricsSupported(false);\n}\n}\n/**\n* The default set of transform overrides to use in the {@link DirectRunner}.\n*\n*

The order in which overrides is applied is important, as some overrides are expanded into a\n* composite. If the composite contains {@link PTransform PTransforms} which are also overridden,\n* these PTransforms must occur later in the iteration order. {@link ImmutableMap} has an\n* iteration order based on the order at which elements are added to it.\n*/\n@SuppressWarnings(\"rawtypes\")\n@VisibleForTesting\nList defaultTransformOverrides() {\nDirectTestOptions testOptions = options.as(DirectTestOptions.class);\nImmutableList.Builder builder = ImmutableList.builder();\nif (testOptions.isRunnerDeterminedSharding()) {\nbuilder.add(\nPTransformOverride.of(\nPTransformMatchers.writeWithRunnerDeterminedSharding(),\nnew WriteWithShardingFactory())); /* Uses a view internally. */\n}\nbuilder =\nbuilder\n.add(\nPTransformOverride.of(\nMultiStepCombine.matcher(), MultiStepCombine.Factory.create()))\n.add(\nPTransformOverride.of(\nPTransformMatchers.urnEqualTo(PTransformTranslation.CREATE_VIEW_TRANSFORM_URN),\nnew ViewOverrideFactory())) /* Uses pardos and GBKs */\n.add(\nPTransformOverride.of(\nPTransformMatchers.urnEqualTo(PTransformTranslation.TEST_STREAM_TRANSFORM_URN),\nnew DirectTestStreamFactory(this))) /* primitive */\n.add(\nPTransformOverride.of(\nPTransformMatchers.splittableParDo(), new ParDoMultiOverrideFactory()))\n.add(\nPTransformOverride.of(\nPTransformMatchers.stateOrTimerParDo(), new ParDoMultiOverrideFactory()))\n.add(\nPTransformOverride.of(\nPTransformMatchers.urnEqualTo(\nPTransformTranslation.SPLITTABLE_PROCESS_KEYED_URN),\nnew SplittableParDoViaKeyedWorkItems.OverrideFactory()))\n.add(\nPTransformOverride.of(\nPTransformMatchers.urnEqualTo(SplittableParDo.SPLITTABLE_GBKIKWI_URN),\nnew DirectGBKIntoKeyedWorkItemsOverrideFactory())) /* Returns a GBKO */\n.add(\nPTransformOverride.of(\nPTransformMatchers.urnEqualTo(PTransformTranslation.GROUP_BY_KEY_TRANSFORM_URN),\nnew DirectGroupByKeyOverrideFactory())); /* returns two chained primitives. */\nreturn builder.build();\n}\n/** The result of running a {@link Pipeline} with the {@link DirectRunner}. */\npublic static class DirectPipelineResult implements PipelineResult {\nprivate final PipelineExecutor executor;\nprivate final EvaluationContext evaluationContext;\nprivate State state;\nprivate DirectPipelineResult(PipelineExecutor executor, EvaluationContext evaluationContext) {\nthis.executor = executor;\nthis.evaluationContext = evaluationContext;\nthis.state = State.RUNNING;\n}\n@Override\npublic State getState() {\nreturn state;\n}\n@Override\npublic MetricResults metrics() {\nreturn evaluationContext.getMetrics();\n}\n/**\n* {@inheritDoc}.\n*\n*

If the pipeline terminates abnormally by throwing an {@link Exception}, this will rethrow\n* the original {@link Exception}. Future calls to {@link\n* org.apache.beam.sdk.PipelineResult.State\n*/\n@Override\npublic State waitUntilFinish() {\nreturn waitUntilFinish(Duration.ZERO);\n}\n@Override\npublic State cancel() {\nthis.state = executor.getPipelineState();\nif (!this.state.isTerminal()) {\nexecutor.stop();\nthis.state = executor.getPipelineState();\n}\nreturn executor.getPipelineState();\n}\n/**\n* {@inheritDoc}.\n*\n*

If the pipeline terminates abnormally by throwing an {@link Exception}, this will rethrow\n* the original {@link Exception}. Future calls to {@link\n* org.apache.beam.sdk.PipelineResult.State\n*/\n@Override\npublic State waitUntilFinish(Duration duration) {\nState startState = this.state;\nif (!startState.isTerminal()) {\ntry {\nstate = executor.waitUntilFinish(duration);\n} catch (UserCodeException uce) {\nthrow new Pipeline.PipelineExecutionException(uce.getCause());\n} catch (Exception e) {\nif (e instanceof InterruptedException) {\nThread.currentThread().interrupt();\n}\nif (e instanceof RuntimeException) {\nthrow (RuntimeException) e;\n}\nthrow new RuntimeException(e);\n}\n}\nreturn this.state;\n}\n}\n/** A {@link Supplier} that creates a {@link NanosOffsetClock}. */\nprivate static class NanosOffsetClockSupplier implements Supplier {\n@Override\npublic Clock get() {\nreturn NanosOffsetClock.create();\n}\n}\n}" + }, + { + "comment": "Could we init `oldValue` with `Column`'s full fields constructor, just like `DataRecordMerger.mergeColumn(DataRecord, DataRecord)`?", + "method_body": "private void updateRecordOldValue(final Record record) {\nif (!(record instanceof DataRecord)) {\nreturn;\n}\nDataRecord dataRecord = (DataRecord) record;\nif (!ScalingConstant.UPDATE.equals(dataRecord.getType())) {\nreturn;\n}\nfor (Column col: dataRecord.getColumns()) {\nif (col.isPrimaryKey() && col.isUpdated()) {\ncol.setOldValue(col.getValue());\n}\n}\n}", + "target_code": "}", + "method_body_after": "private void updateRecordOldValue(final Record record) {\nif (!(record instanceof DataRecord)) {\nreturn;\n}\nDataRecord dataRecord = (DataRecord) record;\nif (!ScalingConstant.UPDATE.equals(dataRecord.getType())) {\nreturn;\n}\nfor (Column col: dataRecord.getColumns()) {\nif (col.isPrimaryKey() && col.isUpdated()) {\ncol.setOldValue(col.getValue());\n}\n}\n}", + "context_before": "class OpenGaussWalDumper extends AbstractScalingExecutor implements IncrementalDumper {\nprivate final WalPosition walPosition;\nprivate final DumperConfiguration dumperConfig;\nprivate final OpenGaussLogicalReplication logicalReplication = new OpenGaussLogicalReplication();\nprivate final WalEventConverter walEventConverter;\nprivate String slotName = OpenGaussLogicalReplication.SLOT_NAME_PREFIX;\n@Setter\nprivate Channel channel;\npublic OpenGaussWalDumper(final DumperConfiguration dumperConfig, final ScalingPosition position) {\nwalPosition = (WalPosition) position;\nif (!StandardJDBCDataSourceConfiguration.class.equals(dumperConfig.getDataSourceConfig().getClass())) {\nthrow new UnsupportedOperationException(\"PostgreSQLWalDumper only support JDBCDataSourceConfiguration\");\n}\nthis.dumperConfig = dumperConfig;\nwalEventConverter = new WalEventConverter(dumperConfig);\n}\n@Override\npublic void start() {\nsuper.start();\ndump();\n}\nprivate PgConnection getReplicationConn() throws SQLException {\nreturn logicalReplication\n.createPgConnection((StandardJDBCDataSourceConfiguration) dumperConfig.getDataSourceConfig())\n.unwrap(PgConnection.class);\n}\nprivate MppdbDecodingPlugin initReplication() {\nMppdbDecodingPlugin plugin = null;\ntry {\nDataSource dataSource = dumperConfig.getDataSourceConfig().toDataSource();\ntry (Connection conn = dataSource.getConnection()) {\nslotName = OpenGaussLogicalReplication.getUniqueSlotName(conn);\nOpenGaussLogicalReplication.createIfNotExists(conn);\nOpenGaussTimestampUtils utils = new OpenGaussTimestampUtils(conn.unwrap(PgConnection.class).getTimestampUtils());\nplugin = new MppdbDecodingPlugin(utils);\n}\n} catch (SQLException sqlExp) {\nlog.warn(\"create replication slot failed!\");\n}\nreturn plugin;\n}\nprivate void dump() {\nDecodingPlugin decodingPlugin = initReplication();\ntry (PgConnection pgConnection = getReplicationConn()) {\nPGReplicationStream stream = logicalReplication.createReplicationStream(pgConnection, walPosition.getLogSequenceNumber(), slotName);\nwhile (isRunning()) {\nByteBuffer message = stream.readPending();\nif (null == message) {\nThreadUtil.sleep(10L);\ncontinue;\n}\nAbstractWalEvent event = decodingPlugin.decode(message,\nnew OpenGaussLogSequenceNumber(stream.getLastReceiveLSN()));\nRecord record = walEventConverter.convert(event);\nif (!(event instanceof PlaceholderEvent) && log.isDebugEnabled()) {\nlog.debug(\"dump, event={}, record={}\", event, record);\n}\nupdateRecordOldValue(record);\npushRecord(record);\n}\n} catch (final SQLException ex) {\nif (ex.getMessage().contains(\"is already active\")) {\nreturn;\n}\nthrow new ScalingTaskExecuteException(ex);\n}\n}\nprivate void pushRecord(final Record record) {\ntry {\nchannel.pushRecord(record);\n} catch (final InterruptedException ignored) {\n}\n}\n}", + "context_after": "class OpenGaussWalDumper extends AbstractScalingExecutor implements IncrementalDumper {\nprivate final WalPosition walPosition;\nprivate final DumperConfiguration dumperConfig;\nprivate final OpenGaussLogicalReplication logicalReplication = new OpenGaussLogicalReplication();\nprivate final WalEventConverter walEventConverter;\nprivate String slotName = OpenGaussLogicalReplication.SLOT_NAME_PREFIX;\n@Setter\nprivate Channel channel;\npublic OpenGaussWalDumper(final DumperConfiguration dumperConfig, final ScalingPosition position) {\nwalPosition = (WalPosition) position;\nif (!StandardJDBCDataSourceConfiguration.class.equals(dumperConfig.getDataSourceConfig().getClass())) {\nthrow new UnsupportedOperationException(\"PostgreSQLWalDumper only support JDBCDataSourceConfiguration\");\n}\nthis.dumperConfig = dumperConfig;\nwalEventConverter = new WalEventConverter(dumperConfig);\n}\n@Override\npublic void start() {\nsuper.start();\ndump();\n}\nprivate PgConnection getReplicationConn() throws SQLException {\nreturn logicalReplication\n.createPgConnection((StandardJDBCDataSourceConfiguration) dumperConfig.getDataSourceConfig())\n.unwrap(PgConnection.class);\n}\nprivate MppdbDecodingPlugin initReplication() {\nMppdbDecodingPlugin plugin = null;\ntry {\nDataSource dataSource = dumperConfig.getDataSourceConfig().toDataSource();\ntry (Connection conn = dataSource.getConnection()) {\nslotName = OpenGaussLogicalReplication.getUniqueSlotName(conn);\nOpenGaussLogicalReplication.createIfNotExists(conn);\nOpenGaussTimestampUtils utils = new OpenGaussTimestampUtils(conn.unwrap(PgConnection.class).getTimestampUtils());\nplugin = new MppdbDecodingPlugin(utils);\n}\n} catch (SQLException sqlExp) {\nlog.warn(\"create replication slot failed!\");\n}\nreturn plugin;\n}\nprivate void dump() {\nDecodingPlugin decodingPlugin = initReplication();\ntry (PgConnection pgConnection = getReplicationConn()) {\nPGReplicationStream stream = logicalReplication.createReplicationStream(pgConnection, walPosition.getLogSequenceNumber(), slotName);\nwhile (isRunning()) {\nByteBuffer message = stream.readPending();\nif (null == message) {\nThreadUtil.sleep(10L);\ncontinue;\n}\nAbstractWalEvent event = decodingPlugin.decode(message,\nnew OpenGaussLogSequenceNumber(stream.getLastReceiveLSN()));\nRecord record = walEventConverter.convert(event);\nif (!(event instanceof PlaceholderEvent) && log.isDebugEnabled()) {\nlog.debug(\"dump, event={}, record={}\", event, record);\n}\nupdateRecordOldValue(record);\npushRecord(record);\n}\n} catch (final SQLException ex) {\nif (ex.getMessage().contains(\"is already active\")) {\nreturn;\n}\nthrow new ScalingTaskExecuteException(ex);\n}\n}\nprivate void pushRecord(final Record record) {\ntry {\nchannel.pushRecord(record);\n} catch (final InterruptedException ignored) {\n}\n}\n}" + }, + { + "comment": "?", + "method_body": "public boolean isGroupWellBalanced(OptionalInt groupId) {\nif (groupId.isEmpty()) return false;\nGroup group = groups().get(groupId.getAsInt());\nreturn (group != null) && group.isContentWellBalanced();\n}", + "target_code": "Group group = groups().get(groupId.getAsInt());", + "method_body_after": "public boolean isGroupWellBalanced(OptionalInt groupId) {\nif (groupId.isEmpty()) return false;\nGroup group = groups().get(groupId.getAsInt());\nreturn (group != null) && group.isContentWellBalanced();\n}", + "context_before": "class PongCallback implements PongHandler {\nprivate final ClusterMonitor clusterMonitor;\nprivate final Node node;\nPongCallback(Node node, ClusterMonitor clusterMonitor) {\nthis.node = node;\nthis.clusterMonitor = clusterMonitor;\n}\n@Override\npublic void handle(Pong pong) {\nif (pong.badResponse()) {\nclusterMonitor.failed(node, pong.error().get());\n} else {\nif (pong.activeDocuments().isPresent()) {\nnode.setActiveDocuments(pong.activeDocuments().get());\nnode.setBlockingWrites(pong.isBlockingWrites());\n}\nclusterMonitor.responded(node);\n}\n}\n}", + "context_after": "class PongCallback implements PongHandler {\nprivate final ClusterMonitor clusterMonitor;\nprivate final Node node;\nPongCallback(Node node, ClusterMonitor clusterMonitor) {\nthis.node = node;\nthis.clusterMonitor = clusterMonitor;\n}\n@Override\npublic void handle(Pong pong) {\nif (pong.badResponse()) {\nclusterMonitor.failed(node, pong.error().get());\n} else {\nif (pong.activeDocuments().isPresent()) {\nnode.setActiveDocuments(pong.activeDocuments().get());\nnode.setBlockingWrites(pong.isBlockingWrites());\n}\nclusterMonitor.responded(node);\n}\n}\n}" + }, + { + "comment": "The order doesn't matter. We are after a combined set of those.", + "method_body": "private static Configuration classpathConfig(Project project, LaunchMode mode) {\nif (LaunchMode.TEST.equals(mode)) {\nreturn project.getConfigurations().getByName(JavaPlugin.TEST_RUNTIME_CLASSPATH_CONFIGURATION_NAME);\n}\nif (LaunchMode.DEVELOPMENT.equals(mode)) {\nreturn project.getConfigurations().create(\"quarkusDevMode\").extendsFrom(\nproject.getConfigurations().getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME),\nproject.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME));\n}\nreturn project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME);\n}", + "target_code": "return project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME);", + "method_body_after": "private static Configuration classpathConfig(Project project, LaunchMode mode) {\nif (LaunchMode.TEST.equals(mode)) {\nreturn project.getConfigurations().getByName(JavaPlugin.TEST_RUNTIME_CLASSPATH_CONFIGURATION_NAME);\n}\nif (LaunchMode.DEVELOPMENT.equals(mode)) {\nreturn project.getConfigurations().create(\"quarkusDevMode\").extendsFrom(\nproject.getConfigurations().getByName(JavaPlugin.COMPILE_ONLY_CONFIGURATION_NAME),\nproject.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME));\n}\nreturn project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME);\n}", + "context_before": "class QuarkusModelBuilder implements ParameterizedToolingModelBuilder {\n@Override\npublic boolean canBuild(String modelName) {\nreturn modelName.equals(QuarkusModel.class.getName());\n}\n@Override\npublic Class getParameterType() {\nreturn ModelParameter.class;\n}\n@Override\npublic Object buildAll(String modelName, Project project) {\nfinal ModelParameterImpl modelParameter = new ModelParameterImpl();\nmodelParameter.setMode(LaunchMode.DEVELOPMENT.toString());\nreturn buildAll(modelName, modelParameter, project);\n}\n@Override\npublic Object buildAll(String modelName, ModelParameter parameter, Project project) {\nLaunchMode mode = LaunchMode.valueOf(((ModelParameter) parameter).getMode());\nfinal Set deploymentDeps = getEnforcedPlatforms(project);\nfinal Map appDependencies = new HashMap<>();\nfinal Set visitedDeps = new HashSet<>();\nfinal ResolvedConfiguration configuration = classpathConfig(project, mode).getResolvedConfiguration();\ncollectDependencies(configuration, mode, project, appDependencies);\ncollectFirstMetDeploymentDeps(configuration.getFirstLevelModuleDependencies(), appDependencies,\ndeploymentDeps, visitedDeps);\nfinal Set extensionDependencies = collectExtensionDependencies(project, deploymentDeps);\nArtifactCoords appArtifactCoords = new ArtifactCoordsImpl(project.getGroup().toString(), project.getName(),\nproject.getVersion().toString());\nreturn new QuarkusModelImpl(new WorkspaceImpl(appArtifactCoords, getWorkspace(project.getRootProject())),\nnew HashSet<>(appDependencies.values()),\nextensionDependencies);\n}\npublic Set getWorkspace(Project project) {\nSet modules = new HashSet<>();\nfor (Project subproject : project.getAllprojects()) {\nfinal Convention convention = subproject.getConvention();\nJavaPluginConvention javaConvention = convention.findPlugin(JavaPluginConvention.class);\nif (javaConvention == null) {\ncontinue;\n}\nmodules.add(getWorkspaceModule(subproject));\n}\nreturn modules;\n}\nprivate WorkspaceModule getWorkspaceModule(Project project) {\nArtifactCoords appArtifactCoords = new ArtifactCoordsImpl(project.getGroup().toString(), project.getName(),\nproject.getVersion().toString());\nfinal SourceSet mainSourceSet = QuarkusGradleUtils.getSourceSet(project, SourceSet.MAIN_SOURCE_SET_NAME);\nreturn new WorkspaceModuleImpl(appArtifactCoords, project.getProjectDir().getAbsoluteFile(),\nproject.getBuildDir().getAbsoluteFile(), getSourceSourceSet(mainSourceSet), convert(mainSourceSet));\n}\nprivate Set getEnforcedPlatforms(Project project) {\nfinal Set directExtension = new HashSet<>();\nfinal Configuration impl = project.getConfigurations()\n.getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME);\nfor (org.gradle.api.artifacts.Dependency d : impl.getAllDependencies()) {\nif (!(d instanceof ModuleDependency)) {\ncontinue;\n}\nfinal ModuleDependency module = (ModuleDependency) d;\nfinal Category category = module.getAttributes().getAttribute(Category.CATEGORY_ATTRIBUTE);\nif (category != null && Category.ENFORCED_PLATFORM.equals(category.getName())) {\ndirectExtension.add(d);\n}\n}\nreturn directExtension;\n}\nprivate void collectFirstMetDeploymentDeps(Set dependencies,\nMap appDependencies, Set extensionDeps,\nSet visited) {\nfor (ResolvedDependency d : dependencies) {\nArtifactCoords key = new ArtifactCoordsImpl(d.getModuleGroup(), d.getModuleName(), \"\");\nif (!visited.add(key)) {\ncontinue;\n}\nDependency appDep = appDependencies.get(key);\nif (appDep == null) {\ncontinue;\n}\nfinal org.gradle.api.artifacts.Dependency deploymentArtifact = getDeploymentArtifact(appDep);\nboolean addChildExtension = true;\nif (deploymentArtifact != null && addChildExtension) {\nextensionDeps.add(deploymentArtifact);\naddChildExtension = false;\n}\nfinal Set resolvedChildren = d.getChildren();\nif (addChildExtension && !resolvedChildren.isEmpty()) {\ncollectFirstMetDeploymentDeps(resolvedChildren, appDependencies, extensionDeps, visited);\n}\n}\n}\nprivate org.gradle.api.artifacts.Dependency getDeploymentArtifact(Dependency dependency) {\nfor (File file : dependency.getPaths()) {\nif (!file.exists()) {\ncontinue;\n}\nProperties depsProperties;\nif (file.isDirectory()) {\nPath quarkusDescr = file.toPath()\n.resolve(BootstrapConstants.META_INF)\n.resolve(BootstrapConstants.DESCRIPTOR_FILE_NAME);\nif (!Files.exists(quarkusDescr)) {\ncontinue;\n}\ndepsProperties = QuarkusModelHelper.resolveDescriptor(quarkusDescr);\n} else {\ntry (FileSystem artifactFs = FileSystems.newFileSystem(file.toPath(), getClass().getClassLoader())) {\nPath quarkusDescr = artifactFs.getPath(BootstrapConstants.META_INF)\n.resolve(BootstrapConstants.DESCRIPTOR_FILE_NAME);\nif (!Files.exists(quarkusDescr)) {\ncontinue;\n}\ndepsProperties = QuarkusModelHelper.resolveDescriptor(quarkusDescr);\n} catch (IOException e) {\nthrow new GradleException(\"Failed to process \" + file, e);\n}\n}\nString value = depsProperties.getProperty(BootstrapConstants.PROP_DEPLOYMENT_ARTIFACT);\nString[] split = value.split(\":\");\nreturn new DefaultExternalModuleDependency(split[0], split[1], split[2], null);\n}\nreturn null;\n}\nprivate Set collectExtensionDependencies(Project project,\nCollection extensions) {\nfinal Set platformDependencies = new HashSet<>();\nfinal Configuration deploymentConfig = project.getConfigurations()\n.detachedConfiguration(extensions.toArray(new org.gradle.api.artifacts.Dependency[0]));\nfinal ResolvedConfiguration rc = deploymentConfig.getResolvedConfiguration();\nfor (ResolvedArtifact a : rc.getResolvedArtifacts()) {\nif (!isDependency(a)) {\ncontinue;\n}\nfinal Dependency dependency = toDependency(a);\nplatformDependencies.add(dependency);\n}\nreturn platformDependencies;\n}\nprivate void collectDependencies(ResolvedConfiguration configuration,\nLaunchMode mode, Project project, Map appDependencies) {\nfor (ResolvedArtifact a : configuration.getResolvedArtifacts()) {\nif (!isDependency(a)) {\ncontinue;\n}\nfinal DependencyImpl dep = initDependency(a);\nif (LaunchMode.DEVELOPMENT.equals(mode) &&\na.getId().getComponentIdentifier() instanceof ProjectComponentIdentifier) {\nProject projectDep = project.getRootProject()\n.findProject(((ProjectComponentIdentifier) a.getId().getComponentIdentifier()).getProjectPath());\naddDevModePaths(dep, a, projectDep);\n} else {\ndep.addPath(a.getFile());\n}\nappDependencies.put((ArtifactCoords) new ArtifactCoordsImpl(dep.getGroupId(), dep.getName(), \"\"), dep);\n}\n}\nprivate void addDevModePaths(final DependencyImpl dep, ResolvedArtifact a, Project project) {\nfinal JavaPluginConvention javaConvention = project.getConvention().findPlugin(JavaPluginConvention.class);\nif (javaConvention != null) {\nSourceSet mainSourceSet = javaConvention.getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME);\nfinal File classesDir = new File(QuarkusGradleUtils.getClassesDir(mainSourceSet, project.getBuildDir(), false));\nif (classesDir.exists()) {\ndep.addPath(classesDir);\n}\nfor (File resourcesDir : mainSourceSet.getResources().getSourceDirectories()) {\nif (resourcesDir.exists()) {\ndep.addPath(resourcesDir);\n}\n}\nfor (File outputDir : project.getTasks().findByName(JavaPlugin.PROCESS_RESOURCES_TASK_NAME)\n.getOutputs().getFiles()) {\nif (outputDir.exists()) {\ndep.addPath(outputDir);\n}\n}\n} else {\ndep.addPath(a.getFile());\n}\n}\nprivate SourceSetImpl convert(SourceSet sourceSet) {\nreturn new SourceSetImpl(\nsourceSet.getOutput().getClassesDirs().getFiles(),\nsourceSet.getOutput().getResourcesDir());\n}\nprivate io.quarkus.bootstrap.resolver.model.SourceSet getSourceSourceSet(SourceSet sourceSet) {\nreturn new SourceSetImpl(sourceSet.getAllJava().getSrcDirs(),\nsourceSet.getResources().getSourceDirectories().getSingleFile());\n}\nprivate static boolean isDependency(ResolvedArtifact a) {\nreturn BootstrapConstants.JAR.equalsIgnoreCase(a.getExtension()) || \"exe\".equalsIgnoreCase(a.getExtension()) ||\na.getFile().isDirectory();\n}\n/**\n* Creates an instance of Dependency and associates it with the ResolvedArtifact's path\n*/\nstatic Dependency toDependency(ResolvedArtifact a) {\nfinal DependencyImpl dependency = initDependency(a);\ndependency.addPath(a.getFile());\nreturn dependency;\n}\n/**\n* Creates an instance of DependencyImpl but does not associates it with a path\n*/\nprivate static DependencyImpl initDependency(ResolvedArtifact a) {\nfinal String[] split = a.getModuleVersion().toString().split(\":\");\nreturn new DependencyImpl(split[1], split[0], split.length > 2 ? split[2] : null,\n\"compile\", a.getType(), a.getClassifier());\n}\n}", + "context_after": "class QuarkusModelBuilder implements ParameterizedToolingModelBuilder {\n@Override\npublic boolean canBuild(String modelName) {\nreturn modelName.equals(QuarkusModel.class.getName());\n}\n@Override\npublic Class getParameterType() {\nreturn ModelParameter.class;\n}\n@Override\npublic Object buildAll(String modelName, Project project) {\nfinal ModelParameterImpl modelParameter = new ModelParameterImpl();\nmodelParameter.setMode(LaunchMode.DEVELOPMENT.toString());\nreturn buildAll(modelName, modelParameter, project);\n}\n@Override\npublic Object buildAll(String modelName, ModelParameter parameter, Project project) {\nLaunchMode mode = LaunchMode.valueOf(((ModelParameter) parameter).getMode());\nfinal Set deploymentDeps = getEnforcedPlatforms(project);\nfinal Map appDependencies = new HashMap<>();\nfinal Set visitedDeps = new HashSet<>();\nfinal ResolvedConfiguration configuration = classpathConfig(project, mode).getResolvedConfiguration();\ncollectDependencies(configuration, mode, project, appDependencies);\ncollectFirstMetDeploymentDeps(configuration.getFirstLevelModuleDependencies(), appDependencies,\ndeploymentDeps, visitedDeps);\nfinal Set extensionDependencies = collectExtensionDependencies(project, deploymentDeps);\nArtifactCoords appArtifactCoords = new ArtifactCoordsImpl(project.getGroup().toString(), project.getName(),\nproject.getVersion().toString());\nreturn new QuarkusModelImpl(new WorkspaceImpl(appArtifactCoords, getWorkspace(project.getRootProject(), mode)),\nnew HashSet<>(appDependencies.values()),\nextensionDependencies);\n}\npublic Set getWorkspace(Project project, LaunchMode mode) {\nSet modules = new HashSet<>();\nfor (Project subproject : project.getAllprojects()) {\nfinal Convention convention = subproject.getConvention();\nJavaPluginConvention javaConvention = convention.findPlugin(JavaPluginConvention.class);\nif (javaConvention == null) {\ncontinue;\n}\nmodules.add(getWorkspaceModule(subproject, mode));\n}\nreturn modules;\n}\nprivate WorkspaceModule getWorkspaceModule(Project project, LaunchMode mode) {\nArtifactCoords appArtifactCoords = new ArtifactCoordsImpl(project.getGroup().toString(), project.getName(),\nproject.getVersion().toString());\nfinal SourceSet mainSourceSet = QuarkusGradleUtils.getSourceSet(project, SourceSet.MAIN_SOURCE_SET_NAME);\nfinal SourceSetImpl modelSourceSet = convert(mainSourceSet);\nif (mode.equals(LaunchMode.TEST) && project.getPlugins().hasPlugin(JavaTestFixturesPlugin.class)) {\nfinal SourceSet fixtureSourceSet = QuarkusGradleUtils.getSourceSet(project,\nTestFixturesSupport.TEST_FIXTURE_SOURCESET_NAME);\nmodelSourceSet.addSourceDirectories(fixtureSourceSet.getOutput().getClassesDirs().getFiles());\n}\nreturn new WorkspaceModuleImpl(appArtifactCoords, project.getProjectDir().getAbsoluteFile(),\nproject.getBuildDir().getAbsoluteFile(), getSourceSourceSet(mainSourceSet), modelSourceSet);\n}\nprivate Set getEnforcedPlatforms(Project project) {\nfinal Set directExtension = new HashSet<>();\nfinal Configuration impl = project.getConfigurations()\n.getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME);\nfor (org.gradle.api.artifacts.Dependency d : impl.getAllDependencies()) {\nif (!(d instanceof ModuleDependency)) {\ncontinue;\n}\nfinal ModuleDependency module = (ModuleDependency) d;\nfinal Category category = module.getAttributes().getAttribute(Category.CATEGORY_ATTRIBUTE);\nif (category != null && Category.ENFORCED_PLATFORM.equals(category.getName())) {\ndirectExtension.add(d);\n}\n}\nreturn directExtension;\n}\nprivate void collectFirstMetDeploymentDeps(Set dependencies,\nMap appDependencies, Set extensionDeps,\nSet visited) {\nfor (ResolvedDependency d : dependencies) {\nArtifactCoords key = new ArtifactCoordsImpl(d.getModuleGroup(), d.getModuleName(), \"\");\nif (!visited.add(key)) {\ncontinue;\n}\nDependency appDep = appDependencies.get(key);\nif (appDep == null) {\ncontinue;\n}\nfinal org.gradle.api.artifacts.Dependency deploymentArtifact = getDeploymentArtifact(appDep);\nboolean addChildExtension = true;\nif (deploymentArtifact != null && addChildExtension) {\nextensionDeps.add(deploymentArtifact);\naddChildExtension = false;\n}\nfinal Set resolvedChildren = d.getChildren();\nif (addChildExtension && !resolvedChildren.isEmpty()) {\ncollectFirstMetDeploymentDeps(resolvedChildren, appDependencies, extensionDeps, visited);\n}\n}\n}\nprivate org.gradle.api.artifacts.Dependency getDeploymentArtifact(Dependency dependency) {\nfor (File file : dependency.getPaths()) {\nif (!file.exists()) {\ncontinue;\n}\nProperties depsProperties;\nif (file.isDirectory()) {\nPath quarkusDescr = file.toPath()\n.resolve(BootstrapConstants.META_INF)\n.resolve(BootstrapConstants.DESCRIPTOR_FILE_NAME);\nif (!Files.exists(quarkusDescr)) {\ncontinue;\n}\ndepsProperties = QuarkusModelHelper.resolveDescriptor(quarkusDescr);\n} else {\ntry (FileSystem artifactFs = FileSystems.newFileSystem(file.toPath(), getClass().getClassLoader())) {\nPath quarkusDescr = artifactFs.getPath(BootstrapConstants.META_INF)\n.resolve(BootstrapConstants.DESCRIPTOR_FILE_NAME);\nif (!Files.exists(quarkusDescr)) {\ncontinue;\n}\ndepsProperties = QuarkusModelHelper.resolveDescriptor(quarkusDescr);\n} catch (IOException e) {\nthrow new GradleException(\"Failed to process \" + file, e);\n}\n}\nString value = depsProperties.getProperty(BootstrapConstants.PROP_DEPLOYMENT_ARTIFACT);\nString[] split = value.split(\":\");\nreturn new DefaultExternalModuleDependency(split[0], split[1], split[2], null);\n}\nreturn null;\n}\nprivate Set collectExtensionDependencies(Project project,\nCollection extensions) {\nfinal Set platformDependencies = new HashSet<>();\nfinal Configuration deploymentConfig = project.getConfigurations()\n.detachedConfiguration(extensions.toArray(new org.gradle.api.artifacts.Dependency[0]));\nfinal ResolvedConfiguration rc = deploymentConfig.getResolvedConfiguration();\nfor (ResolvedArtifact a : rc.getResolvedArtifacts()) {\nif (!isDependency(a)) {\ncontinue;\n}\nfinal Dependency dependency = toDependency(a);\nplatformDependencies.add(dependency);\n}\nreturn platformDependencies;\n}\nprivate void collectDependencies(ResolvedConfiguration configuration,\nLaunchMode mode, Project project, Map appDependencies) {\nfor (ResolvedArtifact a : configuration.getResolvedArtifacts()) {\nif (!isDependency(a)) {\ncontinue;\n}\nfinal DependencyImpl dep = initDependency(a);\nif (LaunchMode.DEVELOPMENT.equals(mode) &&\na.getId().getComponentIdentifier() instanceof ProjectComponentIdentifier) {\nProject projectDep = project.getRootProject()\n.findProject(((ProjectComponentIdentifier) a.getId().getComponentIdentifier()).getProjectPath());\naddDevModePaths(dep, a, projectDep);\n} else {\ndep.addPath(a.getFile());\n}\nappDependencies.put((ArtifactCoords) new ArtifactCoordsImpl(dep.getGroupId(), dep.getName(), \"\"), dep);\n}\n}\nprivate void addDevModePaths(final DependencyImpl dep, ResolvedArtifact a, Project project) {\nfinal JavaPluginConvention javaConvention = project.getConvention().findPlugin(JavaPluginConvention.class);\nif (javaConvention != null) {\nSourceSet mainSourceSet = javaConvention.getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME);\nfinal File classesDir = new File(QuarkusGradleUtils.getClassesDir(mainSourceSet, project.getBuildDir(), false));\nif (classesDir.exists()) {\ndep.addPath(classesDir);\n}\nfor (File resourcesDir : mainSourceSet.getResources().getSourceDirectories()) {\nif (resourcesDir.exists()) {\ndep.addPath(resourcesDir);\n}\n}\nfor (File outputDir : project.getTasks().findByName(JavaPlugin.PROCESS_RESOURCES_TASK_NAME)\n.getOutputs().getFiles()) {\nif (outputDir.exists()) {\ndep.addPath(outputDir);\n}\n}\n} else {\ndep.addPath(a.getFile());\n}\n}\nprivate SourceSetImpl convert(SourceSet sourceSet) {\nreturn new SourceSetImpl(\nsourceSet.getOutput().getClassesDirs().getFiles(),\nsourceSet.getOutput().getResourcesDir());\n}\nprivate io.quarkus.bootstrap.resolver.model.SourceSet getSourceSourceSet(SourceSet sourceSet) {\nreturn new SourceSetImpl(sourceSet.getAllJava().getSrcDirs(),\nsourceSet.getResources().getSourceDirectories().getSingleFile());\n}\nprivate static boolean isDependency(ResolvedArtifact a) {\nreturn BootstrapConstants.JAR.equalsIgnoreCase(a.getExtension()) || \"exe\".equalsIgnoreCase(a.getExtension()) ||\na.getFile().isDirectory();\n}\n/**\n* Creates an instance of Dependency and associates it with the ResolvedArtifact's path\n*/\nstatic Dependency toDependency(ResolvedArtifact a) {\nfinal DependencyImpl dependency = initDependency(a);\ndependency.addPath(a.getFile());\nreturn dependency;\n}\n/**\n* Creates an instance of DependencyImpl but does not associates it with a path\n*/\nprivate static DependencyImpl initDependency(ResolvedArtifact a) {\nfinal String[] split = a.getModuleVersion().toString().split(\":\");\nreturn new DependencyImpl(split[1], split[0], split.length > 2 ? split[2] : null,\n\"compile\", a.getType(), a.getClassifier());\n}\n}" + }, + { + "comment": "We should be sure to update the client code before 2.15 is released. The use of UnknownField types is probably not appropriate for production code.", + "method_body": "private static float getFractionConsumed(ReadRowsResponse response) {\nList fractionConsumedField =\nresponse.getStatus().getUnknownFields().getField(2).getFixed32List();\nif (fractionConsumedField.isEmpty()) {\nMetrics.counter(BigQueryStorageStreamReader.class, \"fraction-consumed-not-set\").inc();\nreturn 0f;\n}\nreturn Float.intBitsToFloat(Iterables.getOnlyElement(fractionConsumedField));\n}", + "target_code": "", + "method_body_after": "private static float getFractionConsumed(ReadRowsResponse response) {\nList fractionConsumedField =\nresponse.getStatus().getUnknownFields().getField(2).getFixed32List();\nif (fractionConsumedField.isEmpty()) {\nMetrics.counter(BigQueryStorageStreamReader.class, \"fraction-consumed-not-set\").inc();\nreturn 0f;\n}\nreturn Float.intBitsToFloat(Iterables.getOnlyElement(fractionConsumedField));\n}", + "context_before": "class BigQueryStorageStreamReader extends BoundedSource.BoundedReader {\nprivate final DatumReader datumReader;\nprivate final SerializableFunction parseFn;\nprivate final StorageClient storageClient;\nprivate final TableSchema tableSchema;\nprivate BigQueryStorageStreamSource source;\nprivate BigQueryServerStream responseStream;\nprivate Iterator responseIterator;\nprivate BinaryDecoder decoder;\nprivate GenericRecord record;\nprivate T current;\nprivate long currentOffset;\nprivate double fractionConsumed;\nprivate double fractionConsumedFromLastResponse;\nprivate BigQueryStorageStreamReader(\nBigQueryStorageStreamSource source, BigQueryOptions options) throws IOException {\nthis.source = source;\nthis.datumReader =\nnew GenericDatumReader<>(\nnew Schema.Parser().parse(source.readSession.getAvroSchema().getSchema()));\nthis.parseFn = source.parseFn;\nthis.storageClient = source.bqServices.getStorageClient(options);\nthis.tableSchema = fromJsonString(source.jsonTableSchema, TableSchema.class);\nthis.fractionConsumed = 0d;\nthis.fractionConsumedFromLastResponse = 0d;\n}\n@Override\npublic synchronized boolean start() throws IOException {\nBigQueryStorageStreamSource source = getCurrentSource();\nReadRowsRequest request =\nReadRowsRequest.newBuilder()\n.setReadPosition(\nStreamPosition.newBuilder().setStream(source.stream).setOffset(currentOffset))\n.build();\nresponseStream = storageClient.readRows(request);\nresponseIterator = responseStream.iterator();\nLOGGER.info(\"Started BigQuery Storage API read from stream {}.\", source.stream.getName());\nreturn readNextRecord();\n}\n@Override\npublic synchronized boolean advance() throws IOException {\ncurrentOffset++;\nreturn readNextRecord();\n}\nprivate synchronized boolean readNextRecord() throws IOException {\nwhile (decoder == null || decoder.isEnd()) {\nif (!responseIterator.hasNext()) {\nfractionConsumed = 1d;\nreturn false;\n}\nfractionConsumed = fractionConsumedFromLastResponse;\nReadRowsResponse nextResponse = responseIterator.next();\ndecoder =\nDecoderFactory.get()\n.binaryDecoder(\nnextResponse.getAvroRows().getSerializedBinaryRows().toByteArray(), decoder);\nfractionConsumedFromLastResponse = getFractionConsumed(nextResponse);\n}\nrecord = datumReader.read(record, decoder);\ncurrent = parseFn.apply(new SchemaAndRecord(record, tableSchema));\nreturn true;\n}\n@Override\npublic T getCurrent() throws NoSuchElementException {\nreturn current;\n}\n@Override\npublic synchronized void close() {\nstorageClient.close();\n}\n@Override\npublic synchronized BigQueryStorageStreamSource getCurrentSource() {\nreturn source;\n}\n@Override\npublic BoundedSource splitAtFraction(double fraction) {\nMetrics.counter(BigQueryStorageStreamReader.class, \"split-at-fraction-calls\").inc();\nLOGGER.debug(\n\"Received BigQuery Storage API split request for stream {} at fraction {}.\",\nsource.stream.getName(),\nfraction);\nSplitReadStreamRequest splitRequest =\nSplitReadStreamRequest.newBuilder()\n.setOriginalStream(source.stream)\n.setUnknownFields(\nUnknownFieldSet.newBuilder()\n.addField(\n2,\nUnknownFieldSet.Field.newBuilder()\n.addFixed32(java.lang.Float.floatToIntBits((float) fraction))\n.build())\n.build())\n.build();\nSplitReadStreamResponse splitResponse = storageClient.splitReadStream(splitRequest);\nif (!splitResponse.hasPrimaryStream() || !splitResponse.hasRemainderStream()) {\nMetrics.counter(\nBigQueryStorageStreamReader.class,\n\"split-at-fraction-calls-failed-due-to-impossible-split-point\")\n.inc();\nLOGGER.info(\n\"BigQuery Storage API stream {} cannot be split at {}.\",\nsource.stream.getName(),\nfraction);\nreturn null;\n}\nsynchronized (this) {\nBigQueryServerStream newResponseStream;\nIterator newResponseIterator;\ntry {\nnewResponseStream =\nstorageClient.readRows(\nReadRowsRequest.newBuilder()\n.setReadPosition(\nStreamPosition.newBuilder()\n.setStream(splitResponse.getPrimaryStream())\n.setOffset(currentOffset + 1))\n.build());\nnewResponseIterator = newResponseStream.iterator();\nnewResponseIterator.hasNext();\n} catch (FailedPreconditionException e) {\nMetrics.counter(\nBigQueryStorageStreamReader.class,\n\"split-at-fraction-calls-failed-due-to-bad-split-point\")\n.inc();\nLOGGER.info(\n\"BigQuery Storage API split of stream {} abandoned because the primary stream is to \"\n+ \"the left of the split fraction {}.\",\nsource.stream.getName(),\nfraction);\nreturn null;\n} catch (Exception e) {\nMetrics.counter(\nBigQueryStorageStreamReader.class,\n\"split-at-fraction-calls-failed-due-to-other-reasons\")\n.inc();\nLOGGER.error(\"BigQuery Storage API stream split failed.\", e);\nreturn null;\n}\nresponseStream.cancel();\nsource = source.fromExisting(splitResponse.getPrimaryStream());\nresponseStream = newResponseStream;\nresponseIterator = newResponseIterator;\ndecoder = null;\n}\nMetrics.counter(BigQueryStorageStreamReader.class, \"split-at-fraction-calls-successful\")\n.inc();\nLOGGER.info(\n\"Successfully split BigQuery Storage API stream. Split response: {}\", splitResponse);\nreturn source.fromExisting(splitResponse.getRemainderStream());\n}\n@Override\npublic synchronized Double getFractionConsumed() {\nreturn fractionConsumed;\n}\n}", + "context_after": "class BigQueryStorageStreamReader extends BoundedSource.BoundedReader {\nprivate final DatumReader datumReader;\nprivate final SerializableFunction parseFn;\nprivate final StorageClient storageClient;\nprivate final TableSchema tableSchema;\nprivate BigQueryStorageStreamSource source;\nprivate BigQueryServerStream responseStream;\nprivate Iterator responseIterator;\nprivate BinaryDecoder decoder;\nprivate GenericRecord record;\nprivate T current;\nprivate long currentOffset;\nprivate double fractionConsumed;\nprivate double fractionConsumedFromLastResponse;\nprivate BigQueryStorageStreamReader(\nBigQueryStorageStreamSource source, BigQueryOptions options) throws IOException {\nthis.source = source;\nthis.datumReader =\nnew GenericDatumReader<>(\nnew Schema.Parser().parse(source.readSession.getAvroSchema().getSchema()));\nthis.parseFn = source.parseFn;\nthis.storageClient = source.bqServices.getStorageClient(options);\nthis.tableSchema = fromJsonString(source.jsonTableSchema, TableSchema.class);\nthis.fractionConsumed = 0d;\nthis.fractionConsumedFromLastResponse = 0d;\n}\n@Override\npublic synchronized boolean start() throws IOException {\nBigQueryStorageStreamSource source = getCurrentSource();\nReadRowsRequest request =\nReadRowsRequest.newBuilder()\n.setReadPosition(\nStreamPosition.newBuilder().setStream(source.stream).setOffset(currentOffset))\n.build();\nresponseStream = storageClient.readRows(request);\nresponseIterator = responseStream.iterator();\nLOGGER.info(\"Started BigQuery Storage API read from stream {}.\", source.stream.getName());\nreturn readNextRecord();\n}\n@Override\npublic synchronized boolean advance() throws IOException {\ncurrentOffset++;\nreturn readNextRecord();\n}\nprivate synchronized boolean readNextRecord() throws IOException {\nwhile (decoder == null || decoder.isEnd()) {\nif (!responseIterator.hasNext()) {\nfractionConsumed = 1d;\nreturn false;\n}\nfractionConsumed = fractionConsumedFromLastResponse;\nReadRowsResponse nextResponse = responseIterator.next();\ndecoder =\nDecoderFactory.get()\n.binaryDecoder(\nnextResponse.getAvroRows().getSerializedBinaryRows().toByteArray(), decoder);\nfractionConsumedFromLastResponse = getFractionConsumed(nextResponse);\n}\nrecord = datumReader.read(record, decoder);\ncurrent = parseFn.apply(new SchemaAndRecord(record, tableSchema));\nreturn true;\n}\n@Override\npublic T getCurrent() throws NoSuchElementException {\nreturn current;\n}\n@Override\npublic synchronized void close() {\nstorageClient.close();\n}\n@Override\npublic synchronized BigQueryStorageStreamSource getCurrentSource() {\nreturn source;\n}\n@Override\npublic BoundedSource splitAtFraction(double fraction) {\nMetrics.counter(BigQueryStorageStreamReader.class, \"split-at-fraction-calls\").inc();\nLOGGER.debug(\n\"Received BigQuery Storage API split request for stream {} at fraction {}.\",\nsource.stream.getName(),\nfraction);\nSplitReadStreamRequest splitRequest =\nSplitReadStreamRequest.newBuilder()\n.setOriginalStream(source.stream)\n.setUnknownFields(\nUnknownFieldSet.newBuilder()\n.addField(\n2,\nUnknownFieldSet.Field.newBuilder()\n.addFixed32(java.lang.Float.floatToIntBits((float) fraction))\n.build())\n.build())\n.build();\nSplitReadStreamResponse splitResponse = storageClient.splitReadStream(splitRequest);\nif (!splitResponse.hasPrimaryStream() || !splitResponse.hasRemainderStream()) {\nMetrics.counter(\nBigQueryStorageStreamReader.class,\n\"split-at-fraction-calls-failed-due-to-impossible-split-point\")\n.inc();\nLOGGER.info(\n\"BigQuery Storage API stream {} cannot be split at {}.\",\nsource.stream.getName(),\nfraction);\nreturn null;\n}\nsynchronized (this) {\nBigQueryServerStream newResponseStream;\nIterator newResponseIterator;\ntry {\nnewResponseStream =\nstorageClient.readRows(\nReadRowsRequest.newBuilder()\n.setReadPosition(\nStreamPosition.newBuilder()\n.setStream(splitResponse.getPrimaryStream())\n.setOffset(currentOffset + 1))\n.build());\nnewResponseIterator = newResponseStream.iterator();\nnewResponseIterator.hasNext();\n} catch (FailedPreconditionException e) {\nMetrics.counter(\nBigQueryStorageStreamReader.class,\n\"split-at-fraction-calls-failed-due-to-bad-split-point\")\n.inc();\nLOGGER.info(\n\"BigQuery Storage API split of stream {} abandoned because the primary stream is to \"\n+ \"the left of the split fraction {}.\",\nsource.stream.getName(),\nfraction);\nreturn null;\n} catch (Exception e) {\nMetrics.counter(\nBigQueryStorageStreamReader.class,\n\"split-at-fraction-calls-failed-due-to-other-reasons\")\n.inc();\nLOGGER.error(\"BigQuery Storage API stream split failed.\", e);\nreturn null;\n}\nresponseStream.cancel();\nsource = source.fromExisting(splitResponse.getPrimaryStream());\nresponseStream = newResponseStream;\nresponseIterator = newResponseIterator;\ndecoder = null;\n}\nMetrics.counter(BigQueryStorageStreamReader.class, \"split-at-fraction-calls-successful\")\n.inc();\nLOGGER.info(\n\"Successfully split BigQuery Storage API stream. Split response: {}\", splitResponse);\nreturn source.fromExisting(splitResponse.getRemainderStream());\n}\n@Override\npublic synchronized Double getFractionConsumed() {\nreturn fractionConsumed;\n}\n}" + }, + { + "comment": "I did, but I misinterpreted it. No worries though, I'll update the PR tomorrow to set the timeout extremely high, thus preserving the essence of what this change attempts.", + "method_body": "public void nextEvent(RoutingContext ctx) {\nvertx.executeBlocking(new Handler<>() {\n@Override\npublic void handle(Promise event) {\nfinal AtomicBoolean closed = new AtomicBoolean(false);\nctx.response().closeHandler((v) -> closed.set(true));\nctx.response().exceptionHandler((v) -> closed.set(true));\nctx.request().connection().closeHandler((v) -> closed.set(true));\nctx.request().connection().exceptionHandler((v) -> closed.set(true));\nRoutingContext request = null;\ntry {\nfor (;;) {\nrequest = queue.poll(10, TimeUnit.MILLISECONDS);\nif (request != null) {\nif (closed.get()) {\nlog.debugf(\"Polled message %s but connection was closed, returning to queue\",\nrequest.get(AmazonLambdaApi.LAMBDA_RUNTIME_AWS_REQUEST_ID));\nqueue.put(request);\nreturn;\n} else {\nbreak;\n}\n} else if (closed.get()) {\nreturn;\n}\n}\n} catch (InterruptedException e) {\nlog.error(\"nextEvent interrupted\");\nctx.fail(500);\n}\nString contentType = getEventContentType(request);\nif (contentType != null) {\nctx.response().putHeader(\"content-type\", contentType);\n}\nString traceId = request.get(AmazonLambdaApi.LAMBDA_TRACE_HEADER_KEY);\nif (traceId != null) {\nctx.response().putHeader(AmazonLambdaApi.LAMBDA_TRACE_HEADER_KEY, traceId);\n}\nString requestId = request.get(AmazonLambdaApi.LAMBDA_RUNTIME_AWS_REQUEST_ID);\nlog.debugf(\"Starting processing %s, added to pending request map\", requestId);\nresponsePending.put(requestId, request);\nctx.response().putHeader(AmazonLambdaApi.LAMBDA_RUNTIME_AWS_REQUEST_ID, requestId);\nBuffer body = processEventBody(request);\nif (body != null) {\nctx.response().setStatusCode(200).end(body);\n} else {\nctx.response().setStatusCode(200).end();\n}\n}\n}, false, null);\n}", + "target_code": "@Override", + "method_body_after": "public void nextEvent(RoutingContext ctx) {\nvertx.executeBlocking(new Handler<>() {\n@Override\npublic void handle(Promise event) {\nfinal AtomicBoolean closed = new AtomicBoolean(false);\nctx.response().closeHandler((v) -> closed.set(true));\nctx.response().exceptionHandler((v) -> closed.set(true));\nctx.request().connection().closeHandler((v) -> closed.set(true));\nctx.request().connection().exceptionHandler((v) -> closed.set(true));\nRoutingContext request = null;\ntry {\nfor (;;) {\nrequest = queue.poll(10, TimeUnit.MILLISECONDS);\nif (request != null) {\nif (closed.get()) {\nlog.debugf(\"Polled message %s but connection was closed, returning to queue\",\nrequest.get(AmazonLambdaApi.LAMBDA_RUNTIME_AWS_REQUEST_ID));\nqueue.put(request);\nreturn;\n} else {\nbreak;\n}\n} else if (closed.get()) {\nreturn;\n}\n}\n} catch (InterruptedException e) {\nlog.error(\"nextEvent interrupted\");\nctx.fail(500);\n}\nString contentType = getEventContentType(request);\nif (contentType != null) {\nctx.response().putHeader(\"content-type\", contentType);\n}\nString traceId = request.get(AmazonLambdaApi.LAMBDA_TRACE_HEADER_KEY);\nif (traceId != null) {\nctx.response().putHeader(AmazonLambdaApi.LAMBDA_TRACE_HEADER_KEY, traceId);\n}\nString requestId = request.get(AmazonLambdaApi.LAMBDA_RUNTIME_AWS_REQUEST_ID);\nlog.debugf(\"Starting processing %s, added to pending request map\", requestId);\nresponsePending.put(requestId, request);\nctx.response().putHeader(AmazonLambdaApi.LAMBDA_RUNTIME_AWS_REQUEST_ID, requestId);\nBuffer body = processEventBody(request);\nif (body != null) {\nctx.response().setStatusCode(200).end(body);\n} else {\nctx.response().setStatusCode(200).end();\n}\n}\n}, false, null);\n}", + "context_before": "class MockEventServer implements Closeable {\nprotected static final Logger log = Logger.getLogger(MockEventServer.class);\npublic static final int DEFAULT_PORT = 8081;\nprivate Vertx vertx;\nprotected HttpServer httpServer;\nprotected Router router;\nprotected BlockingQueue queue;\nprotected ConcurrentHashMap responsePending = new ConcurrentHashMap<>();\nprotected ExecutorService blockingPool = Executors.newCachedThreadPool();\npublic static final String BASE_PATH = AmazonLambdaApi.API_BASE_PATH_TEST;\npublic static final String INVOCATION = BASE_PATH + AmazonLambdaApi.API_PATH_INVOCATION;\npublic static final String NEXT_INVOCATION = BASE_PATH + AmazonLambdaApi.API_PATH_INVOCATION_NEXT;\npublic static final String POST_EVENT = BASE_PATH;\nfinal AtomicBoolean closed = new AtomicBoolean();\npublic MockEventServer() {\nqueue = new LinkedBlockingQueue<>();\n}\npublic void start() {\nstart(DEFAULT_PORT);\n}\npublic void start(int port) {\nvertx = Vertx.vertx();\nhttpServer = vertx.createHttpServer();\nrouter = Router.router(vertx);\nsetupRoutes();\ntry {\nhttpServer.requestHandler(router).listen(port).toCompletionStage().toCompletableFuture().get();\n} catch (InterruptedException | ExecutionException e) {\nthrow new RuntimeException(e);\n}\nlog.info(\"Mock Lambda Event Server Started\");\n}\npublic void setupRoutes() {\nrouter.route().handler(BodyHandler.create());\nrouter.post(POST_EVENT).handler(this::postEvent);\nrouter.route(NEXT_INVOCATION).blockingHandler(this::nextEvent);\nrouter.route(INVOCATION + \":requestId\" + AmazonLambdaApi.API_PATH_REQUEUE).handler(this::handleRequeue);\nrouter.route(INVOCATION + \":requestId\" + AmazonLambdaApi.API_PATH_RESPONSE).handler(this::handleResponse);\nrouter.route(INVOCATION + \":requestId\" + AmazonLambdaApi.API_PATH_ERROR).handler(this::handleError);\ndefaultHanderSetup();\n}\nprotected void defaultHanderSetup() {\nrouter.post().handler(this::postEvent);\n}\npublic void postEvent(RoutingContext ctx) {\nString requestId = ctx.request().getHeader(AmazonLambdaApi.LAMBDA_RUNTIME_AWS_REQUEST_ID);\nif (requestId == null) {\nrequestId = UUID.randomUUID().toString();\n}\nctx.put(AmazonLambdaApi.LAMBDA_RUNTIME_AWS_REQUEST_ID, requestId);\nString traceId = ctx.request().getHeader(AmazonLambdaApi.LAMBDA_RUNTIME_AWS_REQUEST_ID);\nif (traceId == null) {\ntraceId = UUID.randomUUID().toString();\n}\nctx.put(AmazonLambdaApi.LAMBDA_TRACE_HEADER_KEY, traceId);\ntry {\nlog.debugf(\"Putting message %s into the queue\", requestId);\nqueue.put(ctx);\n} catch (InterruptedException e) {\nlog.error(\"Publish interrupted\");\nctx.fail(500);\n}\n}\nprotected String getEventContentType(RoutingContext request) {\nreturn request.request().getHeader(\"content-type\");\n}\nprotected Buffer processEventBody(RoutingContext request) {\nreturn request.getBody();\n}\npublic void handleResponse(RoutingContext ctx) {\nString requestId = ctx.pathParam(\"requestId\");\nRoutingContext pending = responsePending.remove(requestId);\nif (pending == null) {\nlog.error(\"Unknown lambda request: \" + requestId);\nctx.fail(404);\nreturn;\n}\nlog.debugf(\"Sending response %s\", requestId);\nBuffer buffer = ctx.getBody();\nprocessResponse(ctx, pending, buffer);\nctx.response().setStatusCode(204);\nctx.end();\n}\npublic void handleRequeue(RoutingContext ctx) {\nString requestId = ctx.pathParam(\"requestId\");\nRoutingContext pending = responsePending.remove(requestId);\nif (pending == null) {\nlog.error(\"Unknown lambda request: \" + requestId);\nctx.fail(404);\nreturn;\n}\nlog.debugf(\"Requeue %s\", requestId);\ntry {\nqueue.put(pending);\n} catch (InterruptedException e) {\nlog.error(\"Publish interrupted\");\nctx.fail(500);\n}\nctx.response().setStatusCode(204);\nctx.end();\n}\npublic void processResponse(RoutingContext ctx, RoutingContext pending, Buffer buffer) {\nif (buffer != null) {\nif (ctx.request().getHeader(\"Content-Type\") != null) {\npending.response().putHeader(\"Content-Type\", ctx.request().getHeader(\"Content-Type\"));\n}\npending.response()\n.setStatusCode(200)\n.end(buffer);\n} else {\npending.response()\n.setStatusCode(204)\n.end();\n}\n}\npublic void handleError(RoutingContext ctx) {\nString requestId = ctx.pathParam(\"requestId\");\nRoutingContext pending = responsePending.remove(requestId);\nif (pending == null) {\nlog.error(\"Unknown lambda request: \" + requestId);\nctx.fail(404);\nreturn;\n}\nlog.debugf(\"Sending response %s\", requestId);\nBuffer buffer = ctx.getBody();\nprocessError(ctx, pending, buffer);\nctx.response().setStatusCode(204);\nctx.end();\n}\npublic void processError(RoutingContext ctx, RoutingContext pending, Buffer buffer) {\nif (buffer != null) {\nif (ctx.request().getHeader(\"Content-Type\") != null) {\npending.response().putHeader(\"Content-Type\", ctx.request().getHeader(\"Content-Type\"));\n}\npending.response()\n.setStatusCode(500)\n.end(buffer);\n} else {\npending.response()\n.setStatusCode(500)\n.end();\n}\n}\n@Override\npublic void close() throws IOException {\nif (!closed.compareAndSet(false, true)) {\nreturn;\n}\nlog.info(\"Stopping Mock Lambda Event Server\");\nfor (var i : responsePending.entrySet()) {\ni.getValue().response().setStatusCode(503).end();\n}\nfor (var i : queue) {\ni.response().setStatusCode(503).end();\n}\ntry {\nhttpServer.close().toCompletionStage().toCompletableFuture().get();\n} catch (InterruptedException | ExecutionException e) {\nthrow new RuntimeException(e);\n} finally {\ntry {\nvertx.close().toCompletionStage().toCompletableFuture().get();\n} catch (InterruptedException | ExecutionException e) {\nthrow new RuntimeException(e);\n} finally {\nblockingPool.shutdown();\n}\n}\n}\n}", + "context_after": "class MockEventServer implements Closeable {\nprotected static final Logger log = Logger.getLogger(MockEventServer.class);\npublic static final int DEFAULT_PORT = 8081;\nprivate Vertx vertx;\nprotected HttpServer httpServer;\nprotected Router router;\nprotected BlockingQueue queue;\nprotected ConcurrentHashMap responsePending = new ConcurrentHashMap<>();\nprotected ExecutorService blockingPool = Executors.newCachedThreadPool();\npublic static final String BASE_PATH = AmazonLambdaApi.API_BASE_PATH_TEST;\npublic static final String INVOCATION = BASE_PATH + AmazonLambdaApi.API_PATH_INVOCATION;\npublic static final String NEXT_INVOCATION = BASE_PATH + AmazonLambdaApi.API_PATH_INVOCATION_NEXT;\npublic static final String POST_EVENT = BASE_PATH;\nfinal AtomicBoolean closed = new AtomicBoolean();\npublic MockEventServer() {\nqueue = new LinkedBlockingQueue<>();\n}\npublic void start() {\nstart(DEFAULT_PORT);\n}\npublic void start(int port) {\nvertx = Vertx.vertx(new VertxOptions().setMaxWorkerExecuteTime(60).setMaxWorkerExecuteTimeUnit(TimeUnit.MINUTES));\nhttpServer = vertx.createHttpServer();\nrouter = Router.router(vertx);\nsetupRoutes();\ntry {\nhttpServer.requestHandler(router).listen(port).toCompletionStage().toCompletableFuture().get();\n} catch (InterruptedException | ExecutionException e) {\nthrow new RuntimeException(e);\n}\nlog.info(\"Mock Lambda Event Server Started\");\n}\npublic void setupRoutes() {\nrouter.route().handler(BodyHandler.create());\nrouter.post(POST_EVENT).handler(this::postEvent);\nrouter.route(NEXT_INVOCATION).blockingHandler(this::nextEvent);\nrouter.route(INVOCATION + \":requestId\" + AmazonLambdaApi.API_PATH_REQUEUE).handler(this::handleRequeue);\nrouter.route(INVOCATION + \":requestId\" + AmazonLambdaApi.API_PATH_RESPONSE).handler(this::handleResponse);\nrouter.route(INVOCATION + \":requestId\" + AmazonLambdaApi.API_PATH_ERROR).handler(this::handleError);\ndefaultHanderSetup();\n}\nprotected void defaultHanderSetup() {\nrouter.post().handler(this::postEvent);\n}\npublic void postEvent(RoutingContext ctx) {\nString requestId = ctx.request().getHeader(AmazonLambdaApi.LAMBDA_RUNTIME_AWS_REQUEST_ID);\nif (requestId == null) {\nrequestId = UUID.randomUUID().toString();\n}\nctx.put(AmazonLambdaApi.LAMBDA_RUNTIME_AWS_REQUEST_ID, requestId);\nString traceId = ctx.request().getHeader(AmazonLambdaApi.LAMBDA_RUNTIME_AWS_REQUEST_ID);\nif (traceId == null) {\ntraceId = UUID.randomUUID().toString();\n}\nctx.put(AmazonLambdaApi.LAMBDA_TRACE_HEADER_KEY, traceId);\ntry {\nlog.debugf(\"Putting message %s into the queue\", requestId);\nqueue.put(ctx);\n} catch (InterruptedException e) {\nlog.error(\"Publish interrupted\");\nctx.fail(500);\n}\n}\nprotected String getEventContentType(RoutingContext request) {\nreturn request.request().getHeader(\"content-type\");\n}\nprotected Buffer processEventBody(RoutingContext request) {\nreturn request.getBody();\n}\npublic void handleResponse(RoutingContext ctx) {\nString requestId = ctx.pathParam(\"requestId\");\nRoutingContext pending = responsePending.remove(requestId);\nif (pending == null) {\nlog.error(\"Unknown lambda request: \" + requestId);\nctx.fail(404);\nreturn;\n}\nlog.debugf(\"Sending response %s\", requestId);\nBuffer buffer = ctx.getBody();\nprocessResponse(ctx, pending, buffer);\nctx.response().setStatusCode(204);\nctx.end();\n}\npublic void handleRequeue(RoutingContext ctx) {\nString requestId = ctx.pathParam(\"requestId\");\nRoutingContext pending = responsePending.remove(requestId);\nif (pending == null) {\nlog.error(\"Unknown lambda request: \" + requestId);\nctx.fail(404);\nreturn;\n}\nlog.debugf(\"Requeue %s\", requestId);\ntry {\nqueue.put(pending);\n} catch (InterruptedException e) {\nlog.error(\"Publish interrupted\");\nctx.fail(500);\n}\nctx.response().setStatusCode(204);\nctx.end();\n}\npublic void processResponse(RoutingContext ctx, RoutingContext pending, Buffer buffer) {\nif (buffer != null) {\nif (ctx.request().getHeader(\"Content-Type\") != null) {\npending.response().putHeader(\"Content-Type\", ctx.request().getHeader(\"Content-Type\"));\n}\npending.response()\n.setStatusCode(200)\n.end(buffer);\n} else {\npending.response()\n.setStatusCode(204)\n.end();\n}\n}\npublic void handleError(RoutingContext ctx) {\nString requestId = ctx.pathParam(\"requestId\");\nRoutingContext pending = responsePending.remove(requestId);\nif (pending == null) {\nlog.error(\"Unknown lambda request: \" + requestId);\nctx.fail(404);\nreturn;\n}\nlog.debugf(\"Sending response %s\", requestId);\nBuffer buffer = ctx.getBody();\nprocessError(ctx, pending, buffer);\nctx.response().setStatusCode(204);\nctx.end();\n}\npublic void processError(RoutingContext ctx, RoutingContext pending, Buffer buffer) {\nif (buffer != null) {\nif (ctx.request().getHeader(\"Content-Type\") != null) {\npending.response().putHeader(\"Content-Type\", ctx.request().getHeader(\"Content-Type\"));\n}\npending.response()\n.setStatusCode(500)\n.end(buffer);\n} else {\npending.response()\n.setStatusCode(500)\n.end();\n}\n}\n@Override\npublic void close() throws IOException {\nif (!closed.compareAndSet(false, true)) {\nreturn;\n}\nlog.info(\"Stopping Mock Lambda Event Server\");\nfor (var i : responsePending.entrySet()) {\ni.getValue().response().setStatusCode(503).end();\n}\nfor (var i : queue) {\ni.response().setStatusCode(503).end();\n}\ntry {\nhttpServer.close().toCompletionStage().toCompletableFuture().get();\n} catch (InterruptedException | ExecutionException e) {\nthrow new RuntimeException(e);\n} finally {\ntry {\nvertx.close().toCompletionStage().toCompletableFuture().get();\n} catch (InterruptedException | ExecutionException e) {\nthrow new RuntimeException(e);\n} finally {\nblockingPool.shutdown();\n}\n}\n}\n}" + }, + { + "comment": "Need to trim the space, eg: `\"delete, sequence\" => \"delete,sequence\"`", + "method_body": "private void setOptionalFromTSLPutRequest(TStreamLoadPutRequest request) throws UserException {\nif (request.isSetColumns()) {\nsetColumnToColumnExpr(request.getColumns());\n}\nif (request.isSetWhere()) {\nwhereExpr = parseWhereExpr(request.getWhere());\n}\nif (request.isSetColumnSeparator()) {\nsetColumnSeparator(request.getColumnSeparator());\n}\nif (request.isSetLineDelimiter()) {\nsetLineDelimiter(request.getLineDelimiter());\n}\nif (request.isSetHeaderType()) {\nheaderType = request.getHeaderType();\n}\nif (request.isSetPartitions()) {\nString[] partNames = request.getPartitions().trim().split(\"\\\\s*,\\\\s*\");\nif (request.isSetIsTempPartition()) {\npartitions = new PartitionNames(request.isIsTempPartition(), Lists.newArrayList(partNames));\n} else {\npartitions = new PartitionNames(false, Lists.newArrayList(partNames));\n}\n}\nswitch (request.getFileType()) {\ncase FILE_STREAM:\ncase FILE_LOCAL:\npath = request.getPath();\nbreak;\ndefault:\nthrow new UserException(\"unsupported file type, type=\" + request.getFileType());\n}\nif (request.isSetNegative()) {\nnegative = request.isNegative();\n}\nif (request.isSetTimeout()) {\ntimeout = request.getTimeout();\n}\nif (request.isSetStrictMode()) {\nstrictMode = request.isStrictMode();\n}\nif (request.isSetTimezone()) {\ntimezone = TimeUtils.checkTimeZoneValidAndStandardize(request.getTimezone());\n}\nif (request.isSetExecMemLimit()) {\nexecMemLimit = request.getExecMemLimit();\n}\nif (request.getFormatType() == TFileFormatType.FORMAT_JSON) {\nif (request.getJsonpaths() != null) {\njsonPaths = request.getJsonpaths();\n}\nif (request.getJsonRoot() != null) {\njsonRoot = request.getJsonRoot();\n}\nstripOuterArray = request.isStripOuterArray();\nnumAsString = request.isNumAsString();\nfuzzyParse = request.isFuzzyParse();\nreadJsonByLine = request.isReadJsonByLine();\n}\nif (request.isSetMergeType()) {\ntry {\nmergeType = LoadTask.MergeType.valueOf(request.getMergeType().toString());\n} catch (IllegalArgumentException e) {\nthrow new UserException(\"unknown merge type \" + request.getMergeType().toString());\n}\n}\nif (request.isSetDeleteCondition()) {\ndeleteCondition = parseWhereExpr(request.getDeleteCondition());\n}\nif (negative && mergeType != LoadTask.MergeType.APPEND) {\nthrow new AnalysisException(\"Negative is only used when merge type is APPEND.\");\n}\nif (request.isSetSequenceCol()) {\nsequenceCol = request.getSequenceCol();\n}\nif (request.isSetSendBatchParallelism()) {\nsendBatchParallelism = request.getSendBatchParallelism();\n}\nif (request.isSetMaxFilterRatio()) {\nmaxFilterRatio = request.getMaxFilterRatio();\n}\nif (request.isSetLoadToSingleTablet()) {\nloadToSingleTablet = request.isLoadToSingleTablet();\n}\nif (request.isSetHiddenColumns()) {\nhiddenColumns = Arrays.asList(request.getHiddenColumns().split(\",\"));\n}\n}", + "target_code": "hiddenColumns = Arrays.asList(request.getHiddenColumns().split(\",\"));", + "method_body_after": "private void setOptionalFromTSLPutRequest(TStreamLoadPutRequest request) throws UserException {\nif (request.isSetColumns()) {\nsetColumnToColumnExpr(request.getColumns());\n}\nif (request.isSetWhere()) {\nwhereExpr = parseWhereExpr(request.getWhere());\n}\nif (request.isSetColumnSeparator()) {\nsetColumnSeparator(request.getColumnSeparator());\n}\nif (request.isSetLineDelimiter()) {\nsetLineDelimiter(request.getLineDelimiter());\n}\nif (request.isSetHeaderType()) {\nheaderType = request.getHeaderType();\n}\nif (request.isSetPartitions()) {\nString[] partNames = request.getPartitions().trim().split(\"\\\\s*,\\\\s*\");\nif (request.isSetIsTempPartition()) {\npartitions = new PartitionNames(request.isIsTempPartition(), Lists.newArrayList(partNames));\n} else {\npartitions = new PartitionNames(false, Lists.newArrayList(partNames));\n}\n}\nswitch (request.getFileType()) {\ncase FILE_STREAM:\ncase FILE_LOCAL:\npath = request.getPath();\nbreak;\ndefault:\nthrow new UserException(\"unsupported file type, type=\" + request.getFileType());\n}\nif (request.isSetNegative()) {\nnegative = request.isNegative();\n}\nif (request.isSetTimeout()) {\ntimeout = request.getTimeout();\n}\nif (request.isSetStrictMode()) {\nstrictMode = request.isStrictMode();\n}\nif (request.isSetTimezone()) {\ntimezone = TimeUtils.checkTimeZoneValidAndStandardize(request.getTimezone());\n}\nif (request.isSetExecMemLimit()) {\nexecMemLimit = request.getExecMemLimit();\n}\nif (request.getFormatType() == TFileFormatType.FORMAT_JSON) {\nif (request.getJsonpaths() != null) {\njsonPaths = request.getJsonpaths();\n}\nif (request.getJsonRoot() != null) {\njsonRoot = request.getJsonRoot();\n}\nstripOuterArray = request.isStripOuterArray();\nnumAsString = request.isNumAsString();\nfuzzyParse = request.isFuzzyParse();\nreadJsonByLine = request.isReadJsonByLine();\n}\nif (request.isSetMergeType()) {\ntry {\nmergeType = LoadTask.MergeType.valueOf(request.getMergeType().toString());\n} catch (IllegalArgumentException e) {\nthrow new UserException(\"unknown merge type \" + request.getMergeType().toString());\n}\n}\nif (request.isSetDeleteCondition()) {\ndeleteCondition = parseWhereExpr(request.getDeleteCondition());\n}\nif (negative && mergeType != LoadTask.MergeType.APPEND) {\nthrow new AnalysisException(\"Negative is only used when merge type is APPEND.\");\n}\nif (request.isSetSequenceCol()) {\nsequenceCol = request.getSequenceCol();\n}\nif (request.isSetSendBatchParallelism()) {\nsendBatchParallelism = request.getSendBatchParallelism();\n}\nif (request.isSetMaxFilterRatio()) {\nmaxFilterRatio = request.getMaxFilterRatio();\n}\nif (request.isSetLoadToSingleTablet()) {\nloadToSingleTablet = request.isLoadToSingleTablet();\n}\nif (request.isSetHiddenColumns()) {\nhiddenColumns = Arrays.asList(request.getHiddenColumns().replaceAll(\"\\\\s+\", \"\").split(\",\"));\n}\n}", + "context_before": "class StreamLoadTask implements LoadTaskInfo {\nprivate static final Logger LOG = LogManager.getLogger(StreamLoadTask.class);\nprivate TUniqueId id;\nprivate long txnId;\nprivate TFileType fileType;\nprivate TFileFormatType formatType;\nprivate boolean stripOuterArray;\nprivate boolean numAsString;\nprivate String jsonPaths;\nprivate String jsonRoot;\nprivate boolean fuzzyParse;\nprivate boolean readJsonByLine;\nprivate ImportColumnDescs columnExprDescs = new ImportColumnDescs();\nprivate Expr whereExpr;\nprivate Separator columnSeparator;\nprivate Separator lineDelimiter;\nprivate PartitionNames partitions;\nprivate String path;\nprivate boolean negative;\nprivate boolean strictMode = false;\nprivate String timezone = TimeUtils.DEFAULT_TIME_ZONE;\nprivate int timeout = Config.stream_load_default_timeout_second;\nprivate long execMemLimit = 2 * 1024 * 1024 * 1024L;\nprivate LoadTask.MergeType mergeType = LoadTask.MergeType.APPEND;\nprivate Expr deleteCondition;\nprivate String sequenceCol;\nprivate int sendBatchParallelism = 1;\nprivate double maxFilterRatio = 0.0;\nprivate boolean loadToSingleTablet = false;\nprivate String headerType = \"\";\nprivate List hiddenColumns;\npublic StreamLoadTask(TUniqueId id, long txnId, TFileType fileType, TFileFormatType formatType) {\nthis.id = id;\nthis.txnId = txnId;\nthis.fileType = fileType;\nthis.formatType = formatType;\nthis.jsonPaths = \"\";\nthis.jsonRoot = \"\";\nthis.stripOuterArray = false;\nthis.numAsString = false;\nthis.fuzzyParse = false;\nthis.readJsonByLine = false;\n}\npublic TUniqueId getId() {\nreturn id;\n}\npublic long getTxnId() {\nreturn txnId;\n}\npublic TFileType getFileType() {\nreturn fileType;\n}\npublic TFileFormatType getFormatType() {\nreturn formatType;\n}\npublic ImportColumnDescs getColumnExprDescs() {\nreturn columnExprDescs;\n}\npublic Expr getPrecedingFilter() {\nreturn null;\n}\npublic Expr getWhereExpr() {\nreturn whereExpr;\n}\npublic Separator getColumnSeparator() {\nreturn columnSeparator;\n}\npublic String getHeaderType() {\nreturn headerType;\n}\npublic Separator getLineDelimiter() {\nreturn lineDelimiter;\n}\n@Override\npublic int getSendBatchParallelism() {\nreturn sendBatchParallelism;\n}\n@Override\npublic boolean isLoadToSingleTablet() {\nreturn loadToSingleTablet;\n}\npublic PartitionNames getPartitions() {\nreturn partitions;\n}\npublic String getPath() {\nreturn path;\n}\npublic boolean getNegative() {\nreturn negative;\n}\npublic boolean isStrictMode() {\nreturn strictMode;\n}\npublic String getTimezone() {\nreturn timezone;\n}\npublic int getTimeout() {\nreturn timeout;\n}\npublic boolean isStripOuterArray() {\nreturn stripOuterArray;\n}\n@Override\npublic boolean isNumAsString() {\nreturn numAsString;\n}\n@Override\npublic boolean isReadJsonByLine() {\nreturn readJsonByLine;\n}\n@Override\npublic boolean isFuzzyParse() {\nreturn fuzzyParse;\n}\npublic void setFuzzyParse(boolean fuzzyParse) {\nthis.fuzzyParse = fuzzyParse;\n}\npublic void setStripOuterArray(boolean stripOuterArray) {\nthis.stripOuterArray = stripOuterArray;\n}\npublic void setNumAsString(boolean numAsString) {\nthis.numAsString = numAsString;\n}\npublic String getJsonPaths() {\nreturn jsonPaths;\n}\npublic void setJsonPath(String jsonPaths) {\nthis.jsonPaths = jsonPaths;\n}\npublic String getJsonRoot() {\nreturn jsonRoot;\n}\npublic void setJsonRoot(String jsonRoot) {\nthis.jsonRoot = jsonRoot;\n}\npublic LoadTask.MergeType getMergeType() {\nreturn mergeType;\n}\npublic Expr getDeleteCondition() {\nreturn deleteCondition;\n}\npublic boolean hasSequenceCol() {\nreturn !Strings.isNullOrEmpty(sequenceCol);\n}\n@Override\npublic String getSequenceCol() {\nreturn sequenceCol;\n}\n@Override\npublic List getHiddenColumns() {\nreturn hiddenColumns;\n}\npublic static StreamLoadTask fromTStreamLoadPutRequest(TStreamLoadPutRequest request) throws UserException {\nStreamLoadTask streamLoadTask = new StreamLoadTask(request.getLoadId(), request.getTxnId(),\nrequest.getFileType(), request.getFormatType());\nstreamLoadTask.setOptionalFromTSLPutRequest(request);\nreturn streamLoadTask;\n}\nprivate void setColumnToColumnExpr(String columns) throws UserException {\nString columnsSQL = new String(\"COLUMNS (\" + columns + \")\");\nSqlParser parser = new SqlParser(new SqlScanner(new StringReader(columnsSQL)));\nImportColumnsStmt columnsStmt;\ntry {\ncolumnsStmt = (ImportColumnsStmt) SqlParserUtils.getFirstStmt(parser);\n} catch (Error e) {\nLOG.warn(\"error happens when parsing columns, sql={}\", columnsSQL, e);\nthrow new AnalysisException(\"failed to parsing columns' header, maybe contain unsupported character\");\n} catch (AnalysisException e) {\nLOG.warn(\"analyze columns' statement failed, sql={}, error={}\",\ncolumnsSQL, parser.getErrorMsg(columnsSQL), e);\nString errorMessage = parser.getErrorMsg(columnsSQL);\nif (errorMessage == null) {\nthrow e;\n} else {\nthrow new AnalysisException(errorMessage, e);\n}\n} catch (Exception e) {\nLOG.warn(\"failed to parse columns header, sql={}\", columnsSQL, e);\nthrow new UserException(\"parse columns header failed\", e);\n}\nif (columnsStmt.getColumns() != null && !columnsStmt.getColumns().isEmpty()) {\ncolumnExprDescs.descs = columnsStmt.getColumns();\n}\n}\nprivate Expr parseWhereExpr(String whereString) throws UserException {\nString whereSQL = new String(\"WHERE \" + whereString);\nSqlParser parser = new SqlParser(new SqlScanner(new StringReader(whereSQL)));\nImportWhereStmt whereStmt;\ntry {\nwhereStmt = (ImportWhereStmt) SqlParserUtils.getFirstStmt(parser);\n} catch (Error e) {\nLOG.warn(\"error happens when parsing where header, sql={}\", whereSQL, e);\nthrow new AnalysisException(\"failed to parsing where header, maybe contain unsupported character\");\n} catch (AnalysisException e) {\nLOG.warn(\"analyze where statement failed, sql={}, error={}\",\nwhereSQL, parser.getErrorMsg(whereSQL), e);\nString errorMessage = parser.getErrorMsg(whereSQL);\nif (errorMessage == null) {\nthrow e;\n} else {\nthrow new AnalysisException(errorMessage, e);\n}\n} catch (Exception e) {\nLOG.warn(\"failed to parse where header, sql={}\", whereSQL, e);\nthrow new UserException(\"parse columns header failed\", e);\n}\nreturn whereStmt.getExpr();\n}\nprivate void setColumnSeparator(String oriSeparator) throws AnalysisException {\ncolumnSeparator = new Separator(oriSeparator);\ncolumnSeparator.analyze();\n}\nprivate void setLineDelimiter(String oriLineDelimiter) throws AnalysisException {\nlineDelimiter = new Separator(oriLineDelimiter);\nlineDelimiter.analyze();\n}\n@Override\npublic long getMemLimit() {\nreturn execMemLimit;\n}\n@Override\npublic double getMaxFilterRatio() {\nreturn maxFilterRatio;\n}\n}", + "context_after": "class StreamLoadTask implements LoadTaskInfo {\nprivate static final Logger LOG = LogManager.getLogger(StreamLoadTask.class);\nprivate TUniqueId id;\nprivate long txnId;\nprivate TFileType fileType;\nprivate TFileFormatType formatType;\nprivate boolean stripOuterArray;\nprivate boolean numAsString;\nprivate String jsonPaths;\nprivate String jsonRoot;\nprivate boolean fuzzyParse;\nprivate boolean readJsonByLine;\nprivate ImportColumnDescs columnExprDescs = new ImportColumnDescs();\nprivate Expr whereExpr;\nprivate Separator columnSeparator;\nprivate Separator lineDelimiter;\nprivate PartitionNames partitions;\nprivate String path;\nprivate boolean negative;\nprivate boolean strictMode = false;\nprivate String timezone = TimeUtils.DEFAULT_TIME_ZONE;\nprivate int timeout = Config.stream_load_default_timeout_second;\nprivate long execMemLimit = 2 * 1024 * 1024 * 1024L;\nprivate LoadTask.MergeType mergeType = LoadTask.MergeType.APPEND;\nprivate Expr deleteCondition;\nprivate String sequenceCol;\nprivate int sendBatchParallelism = 1;\nprivate double maxFilterRatio = 0.0;\nprivate boolean loadToSingleTablet = false;\nprivate String headerType = \"\";\nprivate List hiddenColumns;\npublic StreamLoadTask(TUniqueId id, long txnId, TFileType fileType, TFileFormatType formatType) {\nthis.id = id;\nthis.txnId = txnId;\nthis.fileType = fileType;\nthis.formatType = formatType;\nthis.jsonPaths = \"\";\nthis.jsonRoot = \"\";\nthis.stripOuterArray = false;\nthis.numAsString = false;\nthis.fuzzyParse = false;\nthis.readJsonByLine = false;\n}\npublic TUniqueId getId() {\nreturn id;\n}\npublic long getTxnId() {\nreturn txnId;\n}\npublic TFileType getFileType() {\nreturn fileType;\n}\npublic TFileFormatType getFormatType() {\nreturn formatType;\n}\npublic ImportColumnDescs getColumnExprDescs() {\nreturn columnExprDescs;\n}\npublic Expr getPrecedingFilter() {\nreturn null;\n}\npublic Expr getWhereExpr() {\nreturn whereExpr;\n}\npublic Separator getColumnSeparator() {\nreturn columnSeparator;\n}\npublic String getHeaderType() {\nreturn headerType;\n}\npublic Separator getLineDelimiter() {\nreturn lineDelimiter;\n}\n@Override\npublic int getSendBatchParallelism() {\nreturn sendBatchParallelism;\n}\n@Override\npublic boolean isLoadToSingleTablet() {\nreturn loadToSingleTablet;\n}\npublic PartitionNames getPartitions() {\nreturn partitions;\n}\npublic String getPath() {\nreturn path;\n}\npublic boolean getNegative() {\nreturn negative;\n}\npublic boolean isStrictMode() {\nreturn strictMode;\n}\npublic String getTimezone() {\nreturn timezone;\n}\npublic int getTimeout() {\nreturn timeout;\n}\npublic boolean isStripOuterArray() {\nreturn stripOuterArray;\n}\n@Override\npublic boolean isNumAsString() {\nreturn numAsString;\n}\n@Override\npublic boolean isReadJsonByLine() {\nreturn readJsonByLine;\n}\n@Override\npublic boolean isFuzzyParse() {\nreturn fuzzyParse;\n}\npublic void setFuzzyParse(boolean fuzzyParse) {\nthis.fuzzyParse = fuzzyParse;\n}\npublic void setStripOuterArray(boolean stripOuterArray) {\nthis.stripOuterArray = stripOuterArray;\n}\npublic void setNumAsString(boolean numAsString) {\nthis.numAsString = numAsString;\n}\npublic String getJsonPaths() {\nreturn jsonPaths;\n}\npublic void setJsonPath(String jsonPaths) {\nthis.jsonPaths = jsonPaths;\n}\npublic String getJsonRoot() {\nreturn jsonRoot;\n}\npublic void setJsonRoot(String jsonRoot) {\nthis.jsonRoot = jsonRoot;\n}\npublic LoadTask.MergeType getMergeType() {\nreturn mergeType;\n}\npublic Expr getDeleteCondition() {\nreturn deleteCondition;\n}\npublic boolean hasSequenceCol() {\nreturn !Strings.isNullOrEmpty(sequenceCol);\n}\n@Override\npublic String getSequenceCol() {\nreturn sequenceCol;\n}\n@Override\npublic List getHiddenColumns() {\nreturn hiddenColumns;\n}\npublic static StreamLoadTask fromTStreamLoadPutRequest(TStreamLoadPutRequest request) throws UserException {\nStreamLoadTask streamLoadTask = new StreamLoadTask(request.getLoadId(), request.getTxnId(),\nrequest.getFileType(), request.getFormatType());\nstreamLoadTask.setOptionalFromTSLPutRequest(request);\nreturn streamLoadTask;\n}\nprivate void setColumnToColumnExpr(String columns) throws UserException {\nString columnsSQL = new String(\"COLUMNS (\" + columns + \")\");\nSqlParser parser = new SqlParser(new SqlScanner(new StringReader(columnsSQL)));\nImportColumnsStmt columnsStmt;\ntry {\ncolumnsStmt = (ImportColumnsStmt) SqlParserUtils.getFirstStmt(parser);\n} catch (Error e) {\nLOG.warn(\"error happens when parsing columns, sql={}\", columnsSQL, e);\nthrow new AnalysisException(\"failed to parsing columns' header, maybe contain unsupported character\");\n} catch (AnalysisException e) {\nLOG.warn(\"analyze columns' statement failed, sql={}, error={}\",\ncolumnsSQL, parser.getErrorMsg(columnsSQL), e);\nString errorMessage = parser.getErrorMsg(columnsSQL);\nif (errorMessage == null) {\nthrow e;\n} else {\nthrow new AnalysisException(errorMessage, e);\n}\n} catch (Exception e) {\nLOG.warn(\"failed to parse columns header, sql={}\", columnsSQL, e);\nthrow new UserException(\"parse columns header failed\", e);\n}\nif (columnsStmt.getColumns() != null && !columnsStmt.getColumns().isEmpty()) {\ncolumnExprDescs.descs = columnsStmt.getColumns();\n}\n}\nprivate Expr parseWhereExpr(String whereString) throws UserException {\nString whereSQL = new String(\"WHERE \" + whereString);\nSqlParser parser = new SqlParser(new SqlScanner(new StringReader(whereSQL)));\nImportWhereStmt whereStmt;\ntry {\nwhereStmt = (ImportWhereStmt) SqlParserUtils.getFirstStmt(parser);\n} catch (Error e) {\nLOG.warn(\"error happens when parsing where header, sql={}\", whereSQL, e);\nthrow new AnalysisException(\"failed to parsing where header, maybe contain unsupported character\");\n} catch (AnalysisException e) {\nLOG.warn(\"analyze where statement failed, sql={}, error={}\",\nwhereSQL, parser.getErrorMsg(whereSQL), e);\nString errorMessage = parser.getErrorMsg(whereSQL);\nif (errorMessage == null) {\nthrow e;\n} else {\nthrow new AnalysisException(errorMessage, e);\n}\n} catch (Exception e) {\nLOG.warn(\"failed to parse where header, sql={}\", whereSQL, e);\nthrow new UserException(\"parse columns header failed\", e);\n}\nreturn whereStmt.getExpr();\n}\nprivate void setColumnSeparator(String oriSeparator) throws AnalysisException {\ncolumnSeparator = new Separator(oriSeparator);\ncolumnSeparator.analyze();\n}\nprivate void setLineDelimiter(String oriLineDelimiter) throws AnalysisException {\nlineDelimiter = new Separator(oriLineDelimiter);\nlineDelimiter.analyze();\n}\n@Override\npublic long getMemLimit() {\nreturn execMemLimit;\n}\n@Override\npublic double getMaxFilterRatio() {\nreturn maxFilterRatio;\n}\n}" + }, + { + "comment": "I think it is OK that add the submitted count after the task submitted.", + "method_body": "public void execute(final ChannelHandlerContext context, final Object message, final DatabaseProtocolFrontendEngine databaseProtocolFrontendEngine, final BackendConnection backendConnection) {\nboolean supportHint = ProxyContext.getInstance().getMetaDataContexts().getProps().getValue(ConfigurationPropertyKey.PROXY_HINT_ENABLED);\nboolean isOccupyThreadForPerConnection = databaseProtocolFrontendEngine.getFrontendContext().isOccupyThreadForPerConnection();\nExecutorService executorService = CommandExecutorSelector.getExecutorService(\nisOccupyThreadForPerConnection, supportHint, backendConnection.getTransactionStatus().getTransactionType(), context.channel().id());\nexecutorService.execute(new CommandExecutorTask(databaseProtocolFrontendEngine, backendConnection, context, message));\nbackendConnection.getSubmittedTaskCount().incrementAndGet();\n}", + "target_code": "backendConnection.getSubmittedTaskCount().incrementAndGet();", + "method_body_after": "public void execute(final ChannelHandlerContext context, final Object message, final DatabaseProtocolFrontendEngine databaseProtocolFrontendEngine, final BackendConnection backendConnection) {\nboolean supportHint = ProxyContext.getInstance().getMetaDataContexts().getProps().getValue(ConfigurationPropertyKey.PROXY_HINT_ENABLED);\nboolean isOccupyThreadForPerConnection = databaseProtocolFrontendEngine.getFrontendContext().isOccupyThreadForPerConnection();\nExecutorService executorService = CommandExecutorSelector.getExecutorService(\nisOccupyThreadForPerConnection, supportHint, backendConnection.getTransactionStatus().getTransactionType(), context.channel().id());\nbackendConnection.getSubmittedTaskCount().incrementAndGet();\ntry {\nexecutorService.execute(new CommandExecutorTask(databaseProtocolFrontendEngine, backendConnection, context, message));\n} catch (final RejectedExecutionException ignored) {\nbackendConnection.getSubmittedTaskCount().decrementAndGet();\n}\n}", + "context_before": "class OKProxyState implements ProxyState {\n@Override\n}", + "context_after": "class OKProxyState implements ProxyState {\n@Override\n}" + }, + { + "comment": "This was a mistake on my part. It was supposed to be return false but i think something changed when i rebased it. I have corrected the condition", + "method_body": "private boolean getSticky(PackageContext rootPackageContext) {\nboolean sticky = rootPackageContext.project().buildOptions().sticky();\nif (sticky) {\nthis.autoUpdate = false;\nreturn true;\n}\nif (rootPackageContext.project().kind() == ProjectKind.BUILD_PROJECT) {\nPath buildFilePath = this.rootPackageContext.project().sourceRoot().resolve(TARGET_DIR_NAME)\n.resolve(BUILD_FILE);\nif (Files.exists(buildFilePath) && buildFilePath.toFile().length() > 0) {\ntry {\nBuildJson buildJson = readBuildJson(buildFilePath);\nif (buildJson != null && !buildJson.isExpiredLastUpdateTime()) {\nthis.autoUpdate = false;\nreturn true;\n} else {\nthis.autoUpdate = true;\nreturn true;\n}\n} catch (IOException | JsonSyntaxException e) {\nthis.autoUpdate = true;\nreturn false;\n}\n}\nthis.autoUpdate = true;\nreturn false;\n}\nthis.autoUpdate = true;\nreturn false;\n}", + "target_code": "return true;", + "method_body_after": "private boolean getSticky(PackageContext rootPackageContext) {\nboolean sticky = rootPackageContext.project().buildOptions().sticky();\nif (sticky) {\nthis.autoUpdate = false;\nreturn true;\n}\nif (rootPackageContext.project().kind() == ProjectKind.BUILD_PROJECT) {\nPath buildFilePath = this.rootPackageContext.project().sourceRoot().resolve(TARGET_DIR_NAME)\n.resolve(BUILD_FILE);\nif (Files.exists(buildFilePath) && buildFilePath.toFile().length() > 0) {\ntry {\nBuildJson buildJson = readBuildJson(buildFilePath);\nif (buildJson != null && !buildJson.isExpiredLastUpdateTime()) {\nthis.autoUpdate = false;\nreturn true;\n} else {\nthis.autoUpdate = true;\nreturn false;\n}\n} catch (IOException | JsonSyntaxException e) {\nthis.autoUpdate = true;\nreturn false;\n}\n}\nthis.autoUpdate = true;\nreturn false;\n}\nthis.autoUpdate = true;\nreturn false;\n}", + "context_before": "class PackageResolution {\nprivate final PackageContext rootPackageContext;\nprivate final BlendedManifest blendedManifest;\nprivate final DependencyGraph dependencyGraph;\nprivate final CompilationOptions compilationOptions;\nprivate final PackageResolver packageResolver;\nprivate final ModuleResolver moduleResolver;\nprivate final List diagnosticList;\nprivate DiagnosticResult diagnosticResult;\nprivate boolean autoUpdate;\nprivate List topologicallySortedModuleList;\nprivate Collection dependenciesWithTransitives;\nprivate PackageResolution(PackageContext rootPackageContext, CompilationOptions compilationOptions) {\nthis.rootPackageContext = rootPackageContext;\nthis.diagnosticList = new ArrayList<>();\nthis.compilationOptions = compilationOptions;\nResolutionOptions resolutionOptions = getResolutionOptions(rootPackageContext, compilationOptions);\nProjectEnvironment projectEnvContext = rootPackageContext.project().projectEnvironmentContext();\nthis.packageResolver = projectEnvContext.getService(PackageResolver.class);\nthis.blendedManifest = createBlendedManifest(rootPackageContext, projectEnvContext);\nthis.moduleResolver = createModuleResolver(rootPackageContext, projectEnvContext, resolutionOptions);\nthis.dependencyGraph = buildDependencyGraph(resolutionOptions);\nDependencyResolution dependencyResolution = new DependencyResolution(\nprojectEnvContext.getService(PackageCache.class), moduleResolver, dependencyGraph);\nresolveDependencies(dependencyResolution);\n}\nstatic PackageResolution from(PackageContext rootPackageContext, CompilationOptions compilationOptions) {\nreturn new PackageResolution(rootPackageContext, compilationOptions);\n}\n/**\n* Returns the package dependency graph of this package.\n*\n* @return the package dependency graph of this package\n*/\npublic DependencyGraph dependencyGraph() {\nreturn dependencyGraph;\n}\n/**\n* Returns all the dependencies of this package including it's transitive dependencies.\n*\n* @return all the dependencies of this package including it's transitive dependencies\n*/\npublic Collection allDependencies() {\nif (dependenciesWithTransitives != null) {\nreturn dependenciesWithTransitives;\n}\ndependenciesWithTransitives = dependencyGraph.toTopologicallySortedList()\n.stream()\n.filter(resolvedPkg -> resolvedPkg.packageId() != rootPackageContext.packageId())\n.collect(Collectors.toList());\nreturn dependenciesWithTransitives;\n}\nPackageContext packageContext() {\nreturn rootPackageContext;\n}\nList topologicallySortedModuleList() {\nreturn topologicallySortedModuleList;\n}\npublic DiagnosticResult diagnosticResult() {\nif (this.diagnosticResult == null) {\nthis.diagnosticResult = new DefaultDiagnosticResult(this.diagnosticList);\n}\nreturn diagnosticResult;\n}\nvoid reportDiagnostic(String message, String diagnosticErrorCode, DiagnosticSeverity severity, Location location,\nModuleDescriptor moduleDescriptor) {\nvar diagnosticInfo = new DiagnosticInfo(diagnosticErrorCode, message, severity);\nvar diagnostic = DiagnosticFactory.createDiagnostic(diagnosticInfo, location);\nvar packageDiagnostic = new PackageDiagnostic(diagnostic, moduleDescriptor, rootPackageContext.project());\nthis.diagnosticList.add(packageDiagnostic);\nthis.diagnosticResult = new DefaultDiagnosticResult(this.diagnosticList);\n}\npublic boolean autoUpdate() {\nreturn autoUpdate;\n}\n/**\n* The goal of this method is to build the complete package dependency graph of this package.\n* 1) Combine {@code ModuleLoadRequest}s of all the modules in this package.\n* 2) Filter out such requests that does not requests modules of this package.\n* 3) Create {@code PackageLoadRequest}s by incorporating the versions specified in Ballerina.toml file.\n*

\n* Now you have a set of PackageLoadRequests that contains all the direct dependencies of this package.\n* Load allbthese packages using the PackageResolver service. With this model PackageResolver does not\n* need to be aware of the current package. Once all the direct dependencies are loaded,\n* combine there dependency graphs into a single that contains all the transitives.\n* Now check for cycles and version conflicts. Once the version conflicts are resolved, return the graph.\n*\n* @return package dependency graph of this package\n*/\nprivate DependencyGraph buildDependencyGraph(ResolutionOptions resolutionOptions) {\nif (rootPackageContext.project().kind() == ProjectKind.BALA_PROJECT) {\nreturn resolveBALADependencies(resolutionOptions);\n} else {\nreturn resolveSourceDependencies(resolutionOptions);\n}\n}\nprivate LinkedHashSet getModuleLoadRequestsOfDirectDependencies() {\nLinkedHashSet allModuleLoadRequests = new LinkedHashSet<>();\nfor (ModuleId moduleId : rootPackageContext.moduleIds()) {\nModuleContext moduleContext = rootPackageContext.moduleContext(moduleId);\nallModuleLoadRequests.addAll(moduleContext.populateModuleLoadRequests());\n}\nfor (ModuleId moduleId : rootPackageContext.moduleIds()) {\nModuleContext moduleContext = rootPackageContext.moduleContext(moduleId);\nallModuleLoadRequests.addAll(moduleContext.populateTestSrcModuleLoadRequests());\n}\nif (compilationOptions.observabilityIncluded()) {\n{\nString moduleName = Names.OBSERVE.getValue();\nModuleLoadRequest observeModuleLoadReq = new ModuleLoadRequest(\nPackageOrg.from(Names.BALLERINA_INTERNAL_ORG.value), moduleName,\nPackageDependencyScope.DEFAULT, DependencyResolutionType.PLATFORM_PROVIDED);\nallModuleLoadRequests.add(observeModuleLoadReq);\n}\n}\nif (\"k8s\".equals(compilationOptions.getCloud()) || \"docker\".equals(compilationOptions.getCloud()) ||\n\"choreo\".equals(compilationOptions.getCloud())) {\nString moduleName = Names.CLOUD.getValue();\nModuleLoadRequest c2cModuleLoadReq = new ModuleLoadRequest(\nPackageOrg.from(Names.BALLERINA_ORG.value), moduleName,\nPackageDependencyScope.DEFAULT, DependencyResolutionType.COMPILER_PLUGIN);\nallModuleLoadRequests.add(c2cModuleLoadReq);\n}\nreturn allModuleLoadRequests;\n}\nprivate DependencyGraph resolveBALADependencies(ResolutionOptions resolutionOptions) {\nDependencyGraph dependencyNodeGraph = createDependencyNodeGraph(\nrootPackageContext.dependencyGraph());\nreturn buildPackageGraph(dependencyNodeGraph, rootPackageContext.project().currentPackage(),\npackageResolver, resolutionOptions);\n}\nprivate DependencyGraph resolveSourceDependencies(ResolutionOptions resolutionOptions) {\nLinkedHashSet moduleLoadRequests = getModuleLoadRequestsOfDirectDependencies();\nResolutionEngine resolutionEngine = new ResolutionEngine(rootPackageContext.descriptor(),\nblendedManifest, packageResolver, moduleResolver, resolutionOptions);\nDependencyGraph dependencyNodeGraph =\nresolutionEngine.resolveDependencies(moduleLoadRequests);\nreturn buildPackageGraph(dependencyNodeGraph, rootPackageContext.project().currentPackage(),\npackageResolver, resolutionOptions);\n}\nstatic Optional findModuleInPackage(PackageContext resolvedPackage, String moduleNameStr) {\nPackageName packageName = resolvedPackage.packageName();\nModuleName moduleName;\nif (packageName.value().equals(moduleNameStr)) {\nmoduleName = ModuleName.from(packageName);\n} else {\nString moduleNamePart = moduleNameStr.substring(packageName.value().length() + 1);\nif (moduleNamePart.isEmpty()) {\nmoduleNamePart = null;\n}\nmoduleName = ModuleName.from(packageName, moduleNamePart);\n}\nModuleContext resolvedModule = resolvedPackage.moduleContext(moduleName);\nif (resolvedModule == null) {\nreturn Optional.empty();\n}\nreturn Optional.of(resolvedModule);\n}\nprivate DependencyGraph buildPackageGraph(DependencyGraph depGraph,\nPackage rootPackage,\nPackageResolver packageResolver,\nResolutionOptions resolutionOptions) {\nPackageContainer resolvedPkgContainer = new PackageContainer<>();\nDependencyNode rootNode = depGraph.getRoot();\nResolvedPackageDependency rootResolvedPackage = new ResolvedPackageDependency(rootPackage,\nrootNode.scope(), rootNode.resolutionType());\nresolvedPkgContainer.add(rootNode.pkgDesc().org(), rootNode.pkgDesc().name(), rootResolvedPackage);\nList resolutionRequests = depGraph.getNodes().stream()\n.filter(depNode -> !depNode.equals(rootNode))\n.map(this::createFromDepNode)\n.collect(Collectors.toList());\nCollection resolutionResponses =\npackageResolver.resolvePackages(resolutionRequests, resolutionOptions);\nfor (ResolutionResponse resolutionResp : resolutionResponses) {\nif (resolutionResp.resolutionStatus().equals(ResolutionResponse.ResolutionStatus.RESOLVED)) {\nPackageDescriptor pkgDesc = resolutionResp.responseDescriptor();\nResolutionRequest resolutionReq = resolutionResp.resolutionRequest();\nResolvedPackageDependency resolvedPkg = new ResolvedPackageDependency(\nresolutionResp.resolvedPackage(),\nresolutionReq.scope(),\nresolutionReq.resolutionType());\nresolvedPkgContainer.add(pkgDesc.org(), pkgDesc.name(), resolvedPkg);\n}\n}\nDependencyGraphBuilder depGraphBuilder =\nDependencyGraphBuilder.getBuilder(rootResolvedPackage);\nfor (DependencyNode depNode : depGraph.getNodes()) {\nOptional resolvedPkgOptional = resolvedPkgContainer.get(\ndepNode.pkgDesc().org(), depNode.pkgDesc().name());\nif (resolvedPkgOptional.isPresent()) {\nResolvedPackageDependency resolvedPkg = resolvedPkgOptional.get();\ndepGraphBuilder.add(resolvedPkg);\nList directPkgDependencies =\ndepGraph.getDirectDependencies(depNode)\n.stream()\n.map(directDepNode -> resolvedPkgContainer.get(\ndirectDepNode.pkgDesc().org(), directDepNode.pkgDesc().name()))\n.flatMap(Optional::stream)\n.collect(Collectors.toList());\ndepGraphBuilder.addDependencies(resolvedPkg, directPkgDependencies);\n}\n}\nreturn depGraphBuilder.build();\n}\nprivate ResolutionRequest createFromDepNode(DependencyNode depNode) {\nreturn ResolutionRequest.from(depNode.pkgDesc(), depNode.scope(), depNode.resolutionType());\n}\nprivate DependencyGraph createDependencyNodeGraph(\nDependencyGraph pkgDescDepGraph) {\nDependencyNode rootNode = new DependencyNode(rootPackageContext.descriptor());\nDependencyGraphBuilder graphBuilder = DependencyGraphBuilder.getBuilder(rootNode);\nfor (PackageDescriptor pkgDesc : pkgDescDepGraph.getNodes()) {\nDependencyNode dependencyNode = new DependencyNode(pkgDesc);\ngraphBuilder.add(dependencyNode);\nfor (PackageDescriptor directDepPkgDesc : pkgDescDepGraph.getDirectDependencies(pkgDesc)) {\ngraphBuilder.addDependency(dependencyNode, new DependencyNode(directDepPkgDesc));\n}\n}\nreturn graphBuilder.build();\n}\n/**\n* Resolve dependencies of each package, which in turn resolves dependencies of each module.\n*

\n* This logic should get packages from the dependency graph, not from the PackageCache.\n* Because PackageCache may contain various versions of a single package,\n* but the dependency graph contains only the resolved version.\n*/\nprivate void resolveDependencies(DependencyResolution dependencyResolution) {\nList sortedModuleList = new ArrayList<>();\nList sortedPackages = dependencyGraph.toTopologicallySortedList();\nfor (ResolvedPackageDependency pkgDependency : sortedPackages) {\nPackage resolvedPackage = pkgDependency.packageInstance();\nresolvedPackage.packageContext().resolveDependencies(dependencyResolution);\nDependencyGraph moduleDependencyGraph = resolvedPackage.moduleDependencyGraph();\nList sortedModuleIds = moduleDependencyGraph.toTopologicallySortedList();\nfor (ModuleId moduleId : sortedModuleIds) {\nModuleContext moduleContext = resolvedPackage.module(moduleId).moduleContext();\nsortedModuleList.add(moduleContext);\n}\n}\nthis.topologicallySortedModuleList = Collections.unmodifiableList(sortedModuleList);\n}\nprivate ModuleResolver createModuleResolver(PackageContext rootPackageContext,\nProjectEnvironment projectEnvContext,\nResolutionOptions resolutionOptions) {\nList moduleNames = rootPackageContext.moduleIds().stream()\n.map(rootPackageContext::moduleContext)\n.map(ModuleContext::moduleName)\n.collect(Collectors.toList());\nreturn new ModuleResolver(rootPackageContext.descriptor(), moduleNames, blendedManifest,\nprojectEnvContext.getService(PackageResolver.class), resolutionOptions);\n}\nprivate BlendedManifest createBlendedManifest(PackageContext rootPackageContext,\nProjectEnvironment projectEnvContext) {\nreturn BlendedManifest.from(rootPackageContext.dependencyManifest(),\nrootPackageContext.packageManifest(),\nprojectEnvContext.getService(LocalPackageRepository.class));\n}\nprivate ResolutionOptions getResolutionOptions(PackageContext rootPackageContext,\nCompilationOptions compilationOptions) {\nreturn ResolutionOptions.builder()\n.setOffline(compilationOptions.offlineBuild())\n.setSticky(getSticky(rootPackageContext))\n.setDumpGraph(compilationOptions.dumpGraph())\n.setDumpRawGraphs(compilationOptions.dumpRawGraphs())\n.build();\n}\n/**\n* This entity is used by packages and modules to resolve their dependencies from the dependency graph.\n*\n* @since 2.0.0\n*/\nstatic class DependencyResolution {\nprivate final PackageCache delegate;\nprivate final ModuleResolver moduleResolver;\nprivate final DependencyGraph dependencyGraph;\nprivate DependencyResolution(PackageCache delegate,\nModuleResolver moduleResolver,\nDependencyGraph dependencyGraph) {\nthis.delegate = delegate;\nthis.moduleResolver = moduleResolver;\nthis.dependencyGraph = dependencyGraph;\n}\npublic Optional getPackage(PackageOrg packageOrg, PackageName packageName) {\nList resolvedPackages = delegate.getPackages(packageOrg, packageName);\nfor (Package resolvedPackage : resolvedPackages) {\nif (containsPackage(resolvedPackage)) {\nreturn Optional.of(resolvedPackage);\n}\n}\nreturn Optional.empty();\n}\npublic Optional getModule(PackageOrg packageOrg, PackageName packageName, ModuleName moduleName) {\nOptional resolvedPkg = getPackage(packageOrg, packageName);\nif (resolvedPkg.isEmpty()) {\nreturn Optional.empty();\n}\nModule resolvedModule = resolvedPkg.get().module(moduleName);\nif (resolvedModule == null) {\nreturn Optional.empty();\n}\nreturn Optional.of(resolvedModule);\n}\npublic Optional getModule(PackageOrg packageOrg, String moduleNameStr) {\nImportModuleRequest importModuleRequest = new ImportModuleRequest(packageOrg, moduleNameStr);\nImportModuleResponse importModuleResponse = moduleResolver.getImportModuleResponse(importModuleRequest);\nif (importModuleResponse == null) {\nreturn Optional.empty();\n}\nPackageName packageName;\npackageName = importModuleResponse.packageDescriptor().name();\nOptional optionalPackage = getPackage(packageOrg,\npackageName);\nif (optionalPackage.isEmpty()) {\nreturn Optional.empty();\n}\nreturn PackageResolution.findModuleInPackage(optionalPackage.get().packageContext(), moduleNameStr);\n}\nprivate boolean containsPackage(Package pkg) {\nfor (ResolvedPackageDependency graphNode : dependencyGraph.getNodes()) {\nif (graphNode.packageId() == pkg.packageId()) {\nreturn true;\n}\n}\nreturn false;\n}\n}\n}", + "context_after": "class PackageResolution {\nprivate final PackageContext rootPackageContext;\nprivate final BlendedManifest blendedManifest;\nprivate final DependencyGraph dependencyGraph;\nprivate final CompilationOptions compilationOptions;\nprivate final PackageResolver packageResolver;\nprivate final ModuleResolver moduleResolver;\nprivate final List diagnosticList;\nprivate DiagnosticResult diagnosticResult;\nprivate boolean autoUpdate;\nprivate List topologicallySortedModuleList;\nprivate Collection dependenciesWithTransitives;\nprivate PackageResolution(PackageContext rootPackageContext, CompilationOptions compilationOptions) {\nthis.rootPackageContext = rootPackageContext;\nthis.diagnosticList = new ArrayList<>();\nthis.compilationOptions = compilationOptions;\nResolutionOptions resolutionOptions = getResolutionOptions(rootPackageContext, compilationOptions);\nProjectEnvironment projectEnvContext = rootPackageContext.project().projectEnvironmentContext();\nthis.packageResolver = projectEnvContext.getService(PackageResolver.class);\nthis.blendedManifest = createBlendedManifest(rootPackageContext, projectEnvContext);\nthis.moduleResolver = createModuleResolver(rootPackageContext, projectEnvContext, resolutionOptions);\nthis.dependencyGraph = buildDependencyGraph(resolutionOptions);\nDependencyResolution dependencyResolution = new DependencyResolution(\nprojectEnvContext.getService(PackageCache.class), moduleResolver, dependencyGraph);\nresolveDependencies(dependencyResolution);\n}\nstatic PackageResolution from(PackageContext rootPackageContext, CompilationOptions compilationOptions) {\nreturn new PackageResolution(rootPackageContext, compilationOptions);\n}\n/**\n* Returns the package dependency graph of this package.\n*\n* @return the package dependency graph of this package\n*/\npublic DependencyGraph dependencyGraph() {\nreturn dependencyGraph;\n}\n/**\n* Returns all the dependencies of this package including it's transitive dependencies.\n*\n* @return all the dependencies of this package including it's transitive dependencies\n*/\npublic Collection allDependencies() {\nif (dependenciesWithTransitives != null) {\nreturn dependenciesWithTransitives;\n}\ndependenciesWithTransitives = dependencyGraph.toTopologicallySortedList()\n.stream()\n.filter(resolvedPkg -> resolvedPkg.packageId() != rootPackageContext.packageId())\n.collect(Collectors.toList());\nreturn dependenciesWithTransitives;\n}\nPackageContext packageContext() {\nreturn rootPackageContext;\n}\nList topologicallySortedModuleList() {\nreturn topologicallySortedModuleList;\n}\npublic DiagnosticResult diagnosticResult() {\nif (this.diagnosticResult == null) {\nthis.diagnosticResult = new DefaultDiagnosticResult(this.diagnosticList);\n}\nreturn diagnosticResult;\n}\nvoid reportDiagnostic(String message, String diagnosticErrorCode, DiagnosticSeverity severity, Location location,\nModuleDescriptor moduleDescriptor) {\nvar diagnosticInfo = new DiagnosticInfo(diagnosticErrorCode, message, severity);\nvar diagnostic = DiagnosticFactory.createDiagnostic(diagnosticInfo, location);\nvar packageDiagnostic = new PackageDiagnostic(diagnostic, moduleDescriptor, rootPackageContext.project());\nthis.diagnosticList.add(packageDiagnostic);\nthis.diagnosticResult = new DefaultDiagnosticResult(this.diagnosticList);\n}\npublic boolean autoUpdate() {\nreturn autoUpdate;\n}\n/**\n* The goal of this method is to build the complete package dependency graph of this package.\n* 1) Combine {@code ModuleLoadRequest}s of all the modules in this package.\n* 2) Filter out such requests that does not requests modules of this package.\n* 3) Create {@code PackageLoadRequest}s by incorporating the versions specified in Ballerina.toml file.\n*

\n* Now you have a set of PackageLoadRequests that contains all the direct dependencies of this package.\n* Load allbthese packages using the PackageResolver service. With this model PackageResolver does not\n* need to be aware of the current package. Once all the direct dependencies are loaded,\n* combine there dependency graphs into a single that contains all the transitives.\n* Now check for cycles and version conflicts. Once the version conflicts are resolved, return the graph.\n*\n* @return package dependency graph of this package\n*/\nprivate DependencyGraph buildDependencyGraph(ResolutionOptions resolutionOptions) {\nif (rootPackageContext.project().kind() == ProjectKind.BALA_PROJECT) {\nreturn resolveBALADependencies(resolutionOptions);\n} else {\nreturn resolveSourceDependencies(resolutionOptions);\n}\n}\nprivate LinkedHashSet getModuleLoadRequestsOfDirectDependencies() {\nLinkedHashSet allModuleLoadRequests = new LinkedHashSet<>();\nfor (ModuleId moduleId : rootPackageContext.moduleIds()) {\nModuleContext moduleContext = rootPackageContext.moduleContext(moduleId);\nallModuleLoadRequests.addAll(moduleContext.populateModuleLoadRequests());\n}\nfor (ModuleId moduleId : rootPackageContext.moduleIds()) {\nModuleContext moduleContext = rootPackageContext.moduleContext(moduleId);\nallModuleLoadRequests.addAll(moduleContext.populateTestSrcModuleLoadRequests());\n}\nif (compilationOptions.observabilityIncluded()) {\n{\nString moduleName = Names.OBSERVE.getValue();\nModuleLoadRequest observeModuleLoadReq = new ModuleLoadRequest(\nPackageOrg.from(Names.BALLERINA_INTERNAL_ORG.value), moduleName,\nPackageDependencyScope.DEFAULT, DependencyResolutionType.PLATFORM_PROVIDED);\nallModuleLoadRequests.add(observeModuleLoadReq);\n}\n}\nif (\"k8s\".equals(compilationOptions.getCloud()) || \"docker\".equals(compilationOptions.getCloud()) ||\n\"choreo\".equals(compilationOptions.getCloud())) {\nString moduleName = Names.CLOUD.getValue();\nModuleLoadRequest c2cModuleLoadReq = new ModuleLoadRequest(\nPackageOrg.from(Names.BALLERINA_ORG.value), moduleName,\nPackageDependencyScope.DEFAULT, DependencyResolutionType.COMPILER_PLUGIN);\nallModuleLoadRequests.add(c2cModuleLoadReq);\n}\nreturn allModuleLoadRequests;\n}\nprivate DependencyGraph resolveBALADependencies(ResolutionOptions resolutionOptions) {\nDependencyGraph dependencyNodeGraph = createDependencyNodeGraph(\nrootPackageContext.dependencyGraph());\nreturn buildPackageGraph(dependencyNodeGraph, rootPackageContext.project().currentPackage(),\npackageResolver, resolutionOptions);\n}\nprivate DependencyGraph resolveSourceDependencies(ResolutionOptions resolutionOptions) {\nLinkedHashSet moduleLoadRequests = getModuleLoadRequestsOfDirectDependencies();\nResolutionEngine resolutionEngine = new ResolutionEngine(rootPackageContext.descriptor(),\nblendedManifest, packageResolver, moduleResolver, resolutionOptions);\nDependencyGraph dependencyNodeGraph =\nresolutionEngine.resolveDependencies(moduleLoadRequests);\nreturn buildPackageGraph(dependencyNodeGraph, rootPackageContext.project().currentPackage(),\npackageResolver, resolutionOptions);\n}\nstatic Optional findModuleInPackage(PackageContext resolvedPackage, String moduleNameStr) {\nPackageName packageName = resolvedPackage.packageName();\nModuleName moduleName;\nif (packageName.value().equals(moduleNameStr)) {\nmoduleName = ModuleName.from(packageName);\n} else {\nString moduleNamePart = moduleNameStr.substring(packageName.value().length() + 1);\nif (moduleNamePart.isEmpty()) {\nmoduleNamePart = null;\n}\nmoduleName = ModuleName.from(packageName, moduleNamePart);\n}\nModuleContext resolvedModule = resolvedPackage.moduleContext(moduleName);\nif (resolvedModule == null) {\nreturn Optional.empty();\n}\nreturn Optional.of(resolvedModule);\n}\nprivate DependencyGraph buildPackageGraph(DependencyGraph depGraph,\nPackage rootPackage,\nPackageResolver packageResolver,\nResolutionOptions resolutionOptions) {\nPackageContainer resolvedPkgContainer = new PackageContainer<>();\nDependencyNode rootNode = depGraph.getRoot();\nResolvedPackageDependency rootResolvedPackage = new ResolvedPackageDependency(rootPackage,\nrootNode.scope(), rootNode.resolutionType());\nresolvedPkgContainer.add(rootNode.pkgDesc().org(), rootNode.pkgDesc().name(), rootResolvedPackage);\nList resolutionRequests = depGraph.getNodes().stream()\n.filter(depNode -> !depNode.equals(rootNode))\n.map(this::createFromDepNode)\n.collect(Collectors.toList());\nCollection resolutionResponses =\npackageResolver.resolvePackages(resolutionRequests, resolutionOptions);\nfor (ResolutionResponse resolutionResp : resolutionResponses) {\nif (resolutionResp.resolutionStatus().equals(ResolutionResponse.ResolutionStatus.RESOLVED)) {\nPackageDescriptor pkgDesc = resolutionResp.responseDescriptor();\nResolutionRequest resolutionReq = resolutionResp.resolutionRequest();\nResolvedPackageDependency resolvedPkg = new ResolvedPackageDependency(\nresolutionResp.resolvedPackage(),\nresolutionReq.scope(),\nresolutionReq.resolutionType());\nresolvedPkgContainer.add(pkgDesc.org(), pkgDesc.name(), resolvedPkg);\n}\n}\nDependencyGraphBuilder depGraphBuilder =\nDependencyGraphBuilder.getBuilder(rootResolvedPackage);\nfor (DependencyNode depNode : depGraph.getNodes()) {\nOptional resolvedPkgOptional = resolvedPkgContainer.get(\ndepNode.pkgDesc().org(), depNode.pkgDesc().name());\nif (resolvedPkgOptional.isPresent()) {\nResolvedPackageDependency resolvedPkg = resolvedPkgOptional.get();\ndepGraphBuilder.add(resolvedPkg);\nList directPkgDependencies =\ndepGraph.getDirectDependencies(depNode)\n.stream()\n.map(directDepNode -> resolvedPkgContainer.get(\ndirectDepNode.pkgDesc().org(), directDepNode.pkgDesc().name()))\n.flatMap(Optional::stream)\n.collect(Collectors.toList());\ndepGraphBuilder.addDependencies(resolvedPkg, directPkgDependencies);\n}\n}\nreturn depGraphBuilder.build();\n}\nprivate ResolutionRequest createFromDepNode(DependencyNode depNode) {\nreturn ResolutionRequest.from(depNode.pkgDesc(), depNode.scope(), depNode.resolutionType());\n}\nprivate DependencyGraph createDependencyNodeGraph(\nDependencyGraph pkgDescDepGraph) {\nDependencyNode rootNode = new DependencyNode(rootPackageContext.descriptor());\nDependencyGraphBuilder graphBuilder = DependencyGraphBuilder.getBuilder(rootNode);\nfor (PackageDescriptor pkgDesc : pkgDescDepGraph.getNodes()) {\nDependencyNode dependencyNode = new DependencyNode(pkgDesc);\ngraphBuilder.add(dependencyNode);\nfor (PackageDescriptor directDepPkgDesc : pkgDescDepGraph.getDirectDependencies(pkgDesc)) {\ngraphBuilder.addDependency(dependencyNode, new DependencyNode(directDepPkgDesc));\n}\n}\nreturn graphBuilder.build();\n}\n/**\n* Resolve dependencies of each package, which in turn resolves dependencies of each module.\n*

\n* This logic should get packages from the dependency graph, not from the PackageCache.\n* Because PackageCache may contain various versions of a single package,\n* but the dependency graph contains only the resolved version.\n*/\nprivate void resolveDependencies(DependencyResolution dependencyResolution) {\nList sortedModuleList = new ArrayList<>();\nList sortedPackages = dependencyGraph.toTopologicallySortedList();\nfor (ResolvedPackageDependency pkgDependency : sortedPackages) {\nPackage resolvedPackage = pkgDependency.packageInstance();\nresolvedPackage.packageContext().resolveDependencies(dependencyResolution);\nDependencyGraph moduleDependencyGraph = resolvedPackage.moduleDependencyGraph();\nList sortedModuleIds = moduleDependencyGraph.toTopologicallySortedList();\nfor (ModuleId moduleId : sortedModuleIds) {\nModuleContext moduleContext = resolvedPackage.module(moduleId).moduleContext();\nsortedModuleList.add(moduleContext);\n}\n}\nthis.topologicallySortedModuleList = Collections.unmodifiableList(sortedModuleList);\n}\nprivate ModuleResolver createModuleResolver(PackageContext rootPackageContext,\nProjectEnvironment projectEnvContext,\nResolutionOptions resolutionOptions) {\nList moduleNames = rootPackageContext.moduleIds().stream()\n.map(rootPackageContext::moduleContext)\n.map(ModuleContext::moduleName)\n.collect(Collectors.toList());\nreturn new ModuleResolver(rootPackageContext.descriptor(), moduleNames, blendedManifest,\nprojectEnvContext.getService(PackageResolver.class), resolutionOptions);\n}\nprivate BlendedManifest createBlendedManifest(PackageContext rootPackageContext,\nProjectEnvironment projectEnvContext) {\nreturn BlendedManifest.from(rootPackageContext.dependencyManifest(),\nrootPackageContext.packageManifest(),\nprojectEnvContext.getService(LocalPackageRepository.class));\n}\nprivate ResolutionOptions getResolutionOptions(PackageContext rootPackageContext,\nCompilationOptions compilationOptions) {\nreturn ResolutionOptions.builder()\n.setOffline(compilationOptions.offlineBuild())\n.setSticky(getSticky(rootPackageContext))\n.setDumpGraph(compilationOptions.dumpGraph())\n.setDumpRawGraphs(compilationOptions.dumpRawGraphs())\n.build();\n}\n/**\n* This entity is used by packages and modules to resolve their dependencies from the dependency graph.\n*\n* @since 2.0.0\n*/\nstatic class DependencyResolution {\nprivate final PackageCache delegate;\nprivate final ModuleResolver moduleResolver;\nprivate final DependencyGraph dependencyGraph;\nprivate DependencyResolution(PackageCache delegate,\nModuleResolver moduleResolver,\nDependencyGraph dependencyGraph) {\nthis.delegate = delegate;\nthis.moduleResolver = moduleResolver;\nthis.dependencyGraph = dependencyGraph;\n}\npublic Optional getPackage(PackageOrg packageOrg, PackageName packageName) {\nList resolvedPackages = delegate.getPackages(packageOrg, packageName);\nfor (Package resolvedPackage : resolvedPackages) {\nif (containsPackage(resolvedPackage)) {\nreturn Optional.of(resolvedPackage);\n}\n}\nreturn Optional.empty();\n}\npublic Optional getModule(PackageOrg packageOrg, PackageName packageName, ModuleName moduleName) {\nOptional resolvedPkg = getPackage(packageOrg, packageName);\nif (resolvedPkg.isEmpty()) {\nreturn Optional.empty();\n}\nModule resolvedModule = resolvedPkg.get().module(moduleName);\nif (resolvedModule == null) {\nreturn Optional.empty();\n}\nreturn Optional.of(resolvedModule);\n}\npublic Optional getModule(PackageOrg packageOrg, String moduleNameStr) {\nImportModuleRequest importModuleRequest = new ImportModuleRequest(packageOrg, moduleNameStr);\nImportModuleResponse importModuleResponse = moduleResolver.getImportModuleResponse(importModuleRequest);\nif (importModuleResponse == null) {\nreturn Optional.empty();\n}\nPackageName packageName;\npackageName = importModuleResponse.packageDescriptor().name();\nOptional optionalPackage = getPackage(packageOrg,\npackageName);\nif (optionalPackage.isEmpty()) {\nreturn Optional.empty();\n}\nreturn PackageResolution.findModuleInPackage(optionalPackage.get().packageContext(), moduleNameStr);\n}\nprivate boolean containsPackage(Package pkg) {\nfor (ResolvedPackageDependency graphNode : dependencyGraph.getNodes()) {\nif (graphNode.packageId() == pkg.packageId()) {\nreturn true;\n}\n}\nreturn false;\n}\n}\n}" + }, + { + "comment": "why not make this a constant", + "method_body": "private static Class getJDBCConnectionClass() {\ntry {\nreturn (Class) Class.forName(\"com.mysql.jdbc.Connection\");\n} catch (final ClassNotFoundException ignored) {\nreturn (Class) Class.forName(\"com.mysql.cj.jdbc.JdbcConnection\");\n}\n}", + "target_code": "return (Class) Class.forName(\"com.mysql.cj.jdbc.JdbcConnection\");", + "method_body_after": "private static Class getJDBCConnectionClass() {\ntry {\nreturn (Class) Class.forName(\"com.mysql.jdbc.Connection\");\n} catch (final ClassNotFoundException ignored) {\nreturn (Class) Class.forName(\"com.mysql.cj.jdbc.JdbcConnection\");\n}\n}", + "context_before": "class MySQLXAConnectionWrapper implements XAConnectionWrapper {\nprivate static final Class JDBC_CONNECTION_CLASS = getJDBCConnectionClass();\nprivate static final Method XA_CONNECTION_CREATOR_METHOD = getXAConnectionCreatorMethod();\n@SuppressWarnings(\"unchecked\")\n@SneakyThrows(ReflectiveOperationException.class)\n@SneakyThrows(ReflectiveOperationException.class)\nprivate static Method getXAConnectionCreatorMethod() {\nMethod result = getXADataSourceClass().getDeclaredMethod(\"wrapConnection\", Connection.class);\nresult.setAccessible(true);\nreturn result;\n}\n@SuppressWarnings(\"unchecked\")\n@SneakyThrows(ReflectiveOperationException.class)\nprivate static Class getXADataSourceClass() {\ntry {\nreturn (Class) Class.forName(\"com.mysql.jdbc.jdbc2.optional.MysqlXADataSource\");\n} catch (final ClassNotFoundException ignored) {\nreturn (Class) Class.forName(\"com.mysql.cj.jdbc.MysqlXADataSource\");\n}\n}\n@Override\npublic XAConnection wrap(final XADataSource xaDataSource, final Connection connection) throws SQLException {\nreturn createXAConnection(xaDataSource, connection.unwrap(JDBC_CONNECTION_CLASS));\n}\n@SneakyThrows(ReflectiveOperationException.class)\nprivate XAConnection createXAConnection(final XADataSource xaDataSource, final Connection connection) {\nreturn (XAConnection) XA_CONNECTION_CREATOR_METHOD.invoke(xaDataSource, connection);\n}\n}", + "context_after": "class MySQLXAConnectionWrapper implements XAConnectionWrapper {\nprivate static final Class JDBC_CONNECTION_CLASS = getJDBCConnectionClass();\nprivate static final Method XA_CONNECTION_CREATOR_METHOD = getXAConnectionCreatorMethod();\n@SuppressWarnings(\"unchecked\")\n@SneakyThrows(ReflectiveOperationException.class)\n@SneakyThrows(ReflectiveOperationException.class)\nprivate static Method getXAConnectionCreatorMethod() {\nMethod result = getXADataSourceClass().getDeclaredMethod(\"wrapConnection\", Connection.class);\nresult.setAccessible(true);\nreturn result;\n}\n@SuppressWarnings(\"unchecked\")\n@SneakyThrows(ReflectiveOperationException.class)\nprivate static Class getXADataSourceClass() {\ntry {\nreturn (Class) Class.forName(\"com.mysql.jdbc.jdbc2.optional.MysqlXADataSource\");\n} catch (final ClassNotFoundException ignored) {\nreturn (Class) Class.forName(\"com.mysql.cj.jdbc.MysqlXADataSource\");\n}\n}\n@Override\npublic XAConnection wrap(final XADataSource xaDataSource, final Connection connection) throws SQLException {\nreturn createXAConnection(xaDataSource, connection.unwrap(JDBC_CONNECTION_CLASS));\n}\n@SneakyThrows(ReflectiveOperationException.class)\nprivate XAConnection createXAConnection(final XADataSource xaDataSource, final Connection connection) {\nreturn (XAConnection) XA_CONNECTION_CREATOR_METHOD.invoke(xaDataSource, connection);\n}\n}" + }, + { + "comment": "One of these should be **b**`.allocation().isPresent()`", + "method_body": "private static int compareForRelocation(Node a, Node b) {\nint capacity = ResourceCapacity.of(a).compare(ResourceCapacity.of(b));\nif (capacity != 0) return capacity;\nif (!a.allocation().isPresent() && b.allocation().isPresent()) return -1;\nif (a.allocation().isPresent() && !b.allocation().isPresent()) return 1;\nif (a.allocation().isPresent() && a.allocation().isPresent()) {\nif (a.allocation().get().membership().cluster().type().equals(ClusterSpec.Type.container) &&\n!b.allocation().get().membership().cluster().type().equals(ClusterSpec.Type.container))\nreturn -1;\nif (!a.allocation().get().membership().cluster().type().equals(ClusterSpec.Type.container) &&\nb.allocation().get().membership().cluster().type().equals(ClusterSpec.Type.container))\nreturn 1;\n}\nreturn a.hostname().compareTo(b.hostname());\n}", + "target_code": "if (a.allocation().isPresent() && a.allocation().isPresent()) {", + "method_body_after": "private static int compareForRelocation(Node a, Node b) {\nint capacity = ResourceCapacity.of(a).compare(ResourceCapacity.of(b));\nif (capacity != 0) return capacity;\nif (!a.allocation().isPresent() && b.allocation().isPresent()) return -1;\nif (a.allocation().isPresent() && !b.allocation().isPresent()) return 1;\nif (a.allocation().isPresent() && a.allocation().isPresent()) {\nif (a.allocation().get().membership().cluster().type().equals(ClusterSpec.Type.container) &&\n!b.allocation().get().membership().cluster().type().equals(ClusterSpec.Type.container))\nreturn -1;\nif (!a.allocation().get().membership().cluster().type().equals(ClusterSpec.Type.container) &&\nb.allocation().get().membership().cluster().type().equals(ClusterSpec.Type.container))\nreturn 1;\n}\nreturn a.hostname().compareTo(b.hostname());\n}", + "context_before": "class NodePrioritizer {\nprivate final Map nodes = new HashMap<>();\nprivate final List allNodes;\nprivate final DockerHostCapacity capacity;\nprivate final NodeSpec requestedNodes;\nprivate final ApplicationId appId;\nprivate final ClusterSpec clusterSpec;\nprivate final boolean isDocker;\nprivate final boolean isAllocatingForReplacement;\nprivate final Set spareHosts;\nprivate final Map headroomHosts;\nNodePrioritizer(List allNodes, ApplicationId appId, ClusterSpec clusterSpec, NodeSpec nodeSpec, NodeFlavors nodeFlavors, int spares) {\nthis.allNodes = Collections.unmodifiableList(allNodes);\nthis.requestedNodes = nodeSpec;\nthis.clusterSpec = clusterSpec;\nthis.appId = appId;\nthis.spareHosts = findSpareHosts(allNodes, spares);\nthis.headroomHosts = findHeadroomHosts(allNodes, spareHosts, nodeFlavors);\nthis.capacity = new DockerHostCapacity(allNodes);\nlong nofFailedNodes = allNodes.stream()\n.filter(node -> node.state().equals(Node.State.failed))\n.filter(node -> node.allocation().isPresent())\n.filter(node -> node.allocation().get().owner().equals(appId))\n.filter(node -> node.allocation().get().membership().cluster().id().equals(clusterSpec.id()))\n.count();\nlong nofNodesInCluster = allNodes.stream()\n.filter(node -> node.allocation().isPresent())\n.filter(node -> node.allocation().get().owner().equals(appId))\n.filter(node -> node.allocation().get().membership().cluster().id().equals(clusterSpec.id()))\n.count();\nthis.isAllocatingForReplacement = isReplacement(nofNodesInCluster, nofFailedNodes);\nthis.isDocker = isDocker();\n}\n/**\n* From ipAddress - get hostname\n*\n* @return hostname or null if not able to do the lookup\n*/\nprivate static String lookupHostname(String ipAddress) {\ntry {\nreturn InetAddress.getByName(ipAddress).getHostName();\n} catch (UnknownHostException e) {\ne.printStackTrace();\n}\nreturn null;\n}\n/**\n* Spare hosts are the two hosts in the system with the most free capacity.\n*\n* We do not count retired or inactive nodes as used capacity (as they could have been\n* moved to create space for the spare node in the first place).\n*/\nprivate static Set findSpareHosts(List nodes, int spares) {\nDockerHostCapacity capacity = new DockerHostCapacity(new ArrayList<>(nodes));\nreturn nodes.stream()\n.filter(node -> node.type().equals(NodeType.host))\n.filter(dockerHost -> dockerHost.state().equals(Node.State.active))\n.filter(dockerHost -> capacity.freeIPs(dockerHost) > 0)\n.sorted(capacity::compareWithoutInactive)\n.limit(spares)\n.collect(Collectors.toSet());\n}\n/**\n* Headroom hosts are the host with the least but sufficient capacity for the requested headroom.\n*\n* If not enough headroom - the headroom violating hosts are the once that are closest to fulfill\n* a headroom request.\n*/\nprivate static Map findHeadroomHosts(List nodes, Set spareNodes, NodeFlavors flavors) {\nDockerHostCapacity capacity = new DockerHostCapacity(nodes);\nMap headroomHosts = new HashMap<>();\nList hostsSortedOnLeastCapacity = nodes.stream()\n.filter(n -> !spareNodes.contains(n))\n.filter(node -> node.type().equals(NodeType.host))\n.filter(dockerHost -> dockerHost.state().equals(Node.State.active))\n.filter(dockerHost -> capacity.freeIPs(dockerHost) > 0)\n.sorted((a, b) -> capacity.compareWithoutInactive(b, a))\n.collect(Collectors.toList());\nfor (Flavor flavor : flavors.getFlavors().stream().filter(f -> f.getIdealHeadroom() > 0).collect(Collectors.toList())) {\nSet tempHeadroom = new HashSet<>();\nSet notEnoughCapacity = new HashSet<>();\nResourceCapacity headroomCapacity = ResourceCapacity.of(flavor);\nfor (Node host : hostsSortedOnLeastCapacity) {\nif (headroomHosts.containsKey(host)) continue;\nif (capacity.hasCapacityWhenRetiredAndInactiveNodesAreGone(host, headroomCapacity)) {\nheadroomHosts.put(host, headroomCapacity);\ntempHeadroom.add(host);\n} else {\nnotEnoughCapacity.add(host);\n}\nif (tempHeadroom.size() == flavor.getIdealHeadroom()) {\nbreak;\n}\n}\nif (tempHeadroom.size() < flavor.getIdealHeadroom()) {\nList violations = notEnoughCapacity.stream()\n.sorted((a, b) -> capacity.compare(b, a))\n.limit(flavor.getIdealHeadroom() - tempHeadroom.size())\n.collect(Collectors.toList());\nfor (Node hostViolatingHeadrom : violations) {\nheadroomHosts.put(hostViolatingHeadrom, headroomCapacity);\n}\n}\n}\nreturn headroomHosts;\n}\n/**\n* @return The list of nodes sorted by PrioritizableNode::compare\n*/\nList prioritize() {\nList priorityList = new ArrayList<>(nodes.values());\nCollections.sort(priorityList);\nreturn priorityList;\n}\n/**\n* Add nodes that have been previously reserved to the same application from\n* an earlier downsizing of a cluster\n*/\nvoid addSurplusNodes(List surplusNodes) {\nfor (Node node : surplusNodes) {\nPrioritizableNode nodePri = toNodePriority(node, true, false);\nif (!nodePri.violatesSpares || isAllocatingForReplacement) {\nnodes.put(node, nodePri);\n}\n}\n}\n/**\n* Add a node on each docker host with enough capacity for the requested flavor\n*/\nvoid addNewDockerNodes() {\nif (!isDocker) return;\nDockerHostCapacity capacity = new DockerHostCapacity(allNodes);\nfor (Node node : allNodes) {\nif (node.type() == NodeType.host && node.state() == Node.State.active) {\nboolean conflictingCluster = false;\nNodeList list = new NodeList(allNodes);\nNodeList childrenWithSameApp = list.childNodes(node).owner(appId);\nfor (Node child : childrenWithSameApp.asList()) {\nif (child.allocation().get().membership().cluster().id().equals(clusterSpec.id())) {\nconflictingCluster = true;\nbreak;\n}\n}\nif (!conflictingCluster && capacity.hasCapacity(node, ResourceCapacity.of(getFlavor(requestedNodes)))) {\nSet ipAddresses = DockerHostCapacity.findFreeIps(node, allNodes);\nif (ipAddresses.isEmpty()) continue;\nString ipAddress = ipAddresses.stream().findFirst().get();\nString hostname = lookupHostname(ipAddress);\nif (hostname == null) continue;\nNode newNode = Node.createDockerNode(\"fake-\" + hostname, Collections.singleton(ipAddress),\nCollections.emptySet(), hostname, Optional.of(node.hostname()), getFlavor(requestedNodes), NodeType.tenant);\nPrioritizableNode nodePri = toNodePriority(newNode, false, true);\nif (!nodePri.violatesSpares || isAllocatingForReplacement) {\nnodes.put(newNode, nodePri);\n}\n}\n}\n}\n}\n/**\n* Add existing nodes allocated to the application\n*/\nvoid addApplicationNodes() {\nList legalStates = Arrays.asList(Node.State.active, Node.State.inactive, Node.State.reserved);\nallNodes.stream()\n.filter(node -> node.type().equals(requestedNodes.type()))\n.filter(node -> legalStates.contains(node.state()))\n.filter(node -> node.allocation().isPresent())\n.filter(node -> node.allocation().get().owner().equals(appId))\n.map(node -> toNodePriority(node, false, false))\n.forEach(prioritizableNode -> nodes.put(prioritizableNode.node, prioritizableNode));\n}\n/**\n* Add nodes already provisioned, but not allocatied to any application\n*/\nvoid addReadyNodes() {\nallNodes.stream()\n.filter(node -> node.type().equals(requestedNodes.type()))\n.filter(node -> node.state().equals(Node.State.ready))\n.map(node -> toNodePriority(node, false, false))\n.filter(n -> !n.violatesSpares || isAllocatingForReplacement)\n.forEach(prioritizableNode -> nodes.put(prioritizableNode.node, prioritizableNode));\n}\n/**\n* Convert a list of nodes to a list of node priorities. This includes finding, calculating\n* parameters to the priority sorting procedure.\n*/\nprivate PrioritizableNode toNodePriority(Node node, boolean isSurplusNode, boolean isNewNode) {\nPrioritizableNode pri = new PrioritizableNode();\npri.node = node;\npri.isSurplusNode = isSurplusNode;\npri.isNewNode = isNewNode;\npri.preferredOnFlavor = requestedNodes.specifiesNonStockFlavor() && node.flavor().equals(getFlavor(requestedNodes));\npri.parent = findParentNode(node);\nif (pri.parent.isPresent()) {\nNode parent = pri.parent.get();\npri.freeParentCapacity = capacity.freeCapacityOf(parent, false);\nif (spareHosts.contains(parent)) {\npri.violatesSpares = true;\n}\nif (headroomHosts.containsKey(parent) && isPreferredNodeToBeReloacted(allNodes, node, parent)) {\nResourceCapacity neededCapacity = headroomHosts.get(parent);\nif (isNewNode) {\nneededCapacity = ResourceCapacity.composite(neededCapacity, new ResourceCapacity(node));\n}\npri.violatesHeadroom = !capacity.hasCapacity(parent, neededCapacity);\n}\n}\nreturn pri;\n}\nstatic boolean isPreferredNodeToBeReloacted(List nodes, Node node, Node parent) {\nNodeList list = new NodeList(nodes);\nreturn list.childNodes(parent).asList().stream()\n.sorted(NodePrioritizer::compareForRelocation)\n.findFirst()\n.filter(n -> n.equals(node))\n.isPresent();\n}\nprivate boolean isReplacement(long nofNodesInCluster, long nodeFailedNodes) {\nif (nodeFailedNodes == 0) return false;\nint wantedCount = 0;\nif (requestedNodes instanceof NodeSpec.CountNodeSpec) {\nNodeSpec.CountNodeSpec countSpec = (NodeSpec.CountNodeSpec) requestedNodes;\nwantedCount = countSpec.getCount();\n}\nreturn (wantedCount > nofNodesInCluster - nodeFailedNodes);\n}\nprivate static Flavor getFlavor(NodeSpec requestedNodes) {\nif (requestedNodes instanceof NodeSpec.CountNodeSpec) {\nNodeSpec.CountNodeSpec countSpec = (NodeSpec.CountNodeSpec) requestedNodes;\nreturn countSpec.getFlavor();\n}\nreturn null;\n}\nprivate boolean isDocker() {\nFlavor flavor = getFlavor(requestedNodes);\nreturn (flavor != null) && flavor.getType().equals(Flavor.Type.DOCKER_CONTAINER);\n}\nprivate Optional findParentNode(Node node) {\nif (!node.parentHostname().isPresent()) return Optional.empty();\nreturn allNodes.stream()\n.filter(n -> n.hostname().equals(node.parentHostname().orElse(\" NOT A NODE\")))\n.findAny();\n}\n}", + "context_after": "class NodePrioritizer {\nprivate final Map nodes = new HashMap<>();\nprivate final List allNodes;\nprivate final DockerHostCapacity capacity;\nprivate final NodeSpec requestedNodes;\nprivate final ApplicationId appId;\nprivate final ClusterSpec clusterSpec;\nprivate final boolean isDocker;\nprivate final boolean isAllocatingForReplacement;\nprivate final Set spareHosts;\nprivate final Map headroomHosts;\nNodePrioritizer(List allNodes, ApplicationId appId, ClusterSpec clusterSpec, NodeSpec nodeSpec, NodeFlavors nodeFlavors, int spares) {\nthis.allNodes = Collections.unmodifiableList(allNodes);\nthis.requestedNodes = nodeSpec;\nthis.clusterSpec = clusterSpec;\nthis.appId = appId;\nthis.spareHosts = findSpareHosts(allNodes, spares);\nthis.headroomHosts = findHeadroomHosts(allNodes, spareHosts, nodeFlavors);\nthis.capacity = new DockerHostCapacity(allNodes);\nlong nofFailedNodes = allNodes.stream()\n.filter(node -> node.state().equals(Node.State.failed))\n.filter(node -> node.allocation().isPresent())\n.filter(node -> node.allocation().get().owner().equals(appId))\n.filter(node -> node.allocation().get().membership().cluster().id().equals(clusterSpec.id()))\n.count();\nlong nofNodesInCluster = allNodes.stream()\n.filter(node -> node.allocation().isPresent())\n.filter(node -> node.allocation().get().owner().equals(appId))\n.filter(node -> node.allocation().get().membership().cluster().id().equals(clusterSpec.id()))\n.count();\nthis.isAllocatingForReplacement = isReplacement(nofNodesInCluster, nofFailedNodes);\nthis.isDocker = isDocker();\n}\n/**\n* From ipAddress - get hostname\n*\n* @return hostname or null if not able to do the lookup\n*/\nprivate static String lookupHostname(String ipAddress) {\ntry {\nreturn InetAddress.getByName(ipAddress).getHostName();\n} catch (UnknownHostException e) {\ne.printStackTrace();\n}\nreturn null;\n}\n/**\n* Spare hosts are the two hosts in the system with the most free capacity.\n*\n* We do not count retired or inactive nodes as used capacity (as they could have been\n* moved to create space for the spare node in the first place).\n*/\nprivate static Set findSpareHosts(List nodes, int spares) {\nDockerHostCapacity capacity = new DockerHostCapacity(new ArrayList<>(nodes));\nreturn nodes.stream()\n.filter(node -> node.type().equals(NodeType.host))\n.filter(dockerHost -> dockerHost.state().equals(Node.State.active))\n.filter(dockerHost -> capacity.freeIPs(dockerHost) > 0)\n.sorted(capacity::compareWithoutInactive)\n.limit(spares)\n.collect(Collectors.toSet());\n}\n/**\n* Headroom hosts are the host with the least but sufficient capacity for the requested headroom.\n*\n* If not enough headroom - the headroom violating hosts are the once that are closest to fulfill\n* a headroom request.\n*/\nprivate static Map findHeadroomHosts(List nodes, Set spareNodes, NodeFlavors flavors) {\nDockerHostCapacity capacity = new DockerHostCapacity(nodes);\nMap headroomHosts = new HashMap<>();\nList hostsSortedOnLeastCapacity = nodes.stream()\n.filter(n -> !spareNodes.contains(n))\n.filter(node -> node.type().equals(NodeType.host))\n.filter(dockerHost -> dockerHost.state().equals(Node.State.active))\n.filter(dockerHost -> capacity.freeIPs(dockerHost) > 0)\n.sorted((a, b) -> capacity.compareWithoutInactive(b, a))\n.collect(Collectors.toList());\nfor (Flavor flavor : flavors.getFlavors().stream().filter(f -> f.getIdealHeadroom() > 0).collect(Collectors.toList())) {\nSet tempHeadroom = new HashSet<>();\nSet notEnoughCapacity = new HashSet<>();\nResourceCapacity headroomCapacity = ResourceCapacity.of(flavor);\nfor (Node host : hostsSortedOnLeastCapacity) {\nif (headroomHosts.containsKey(host)) continue;\nif (capacity.hasCapacityWhenRetiredAndInactiveNodesAreGone(host, headroomCapacity)) {\nheadroomHosts.put(host, headroomCapacity);\ntempHeadroom.add(host);\n} else {\nnotEnoughCapacity.add(host);\n}\nif (tempHeadroom.size() == flavor.getIdealHeadroom()) {\nbreak;\n}\n}\nif (tempHeadroom.size() < flavor.getIdealHeadroom()) {\nList violations = notEnoughCapacity.stream()\n.sorted((a, b) -> capacity.compare(b, a))\n.limit(flavor.getIdealHeadroom() - tempHeadroom.size())\n.collect(Collectors.toList());\nfor (Node hostViolatingHeadrom : violations) {\nheadroomHosts.put(hostViolatingHeadrom, headroomCapacity);\n}\n}\n}\nreturn headroomHosts;\n}\n/**\n* @return The list of nodes sorted by PrioritizableNode::compare\n*/\nList prioritize() {\nList priorityList = new ArrayList<>(nodes.values());\nCollections.sort(priorityList);\nreturn priorityList;\n}\n/**\n* Add nodes that have been previously reserved to the same application from\n* an earlier downsizing of a cluster\n*/\nvoid addSurplusNodes(List surplusNodes) {\nfor (Node node : surplusNodes) {\nPrioritizableNode nodePri = toNodePriority(node, true, false);\nif (!nodePri.violatesSpares || isAllocatingForReplacement) {\nnodes.put(node, nodePri);\n}\n}\n}\n/**\n* Add a node on each docker host with enough capacity for the requested flavor\n*/\nvoid addNewDockerNodes() {\nif (!isDocker) return;\nDockerHostCapacity capacity = new DockerHostCapacity(allNodes);\nfor (Node node : allNodes) {\nif (node.type() == NodeType.host && node.state() == Node.State.active) {\nboolean conflictingCluster = false;\nNodeList list = new NodeList(allNodes);\nNodeList childrenWithSameApp = list.childNodes(node).owner(appId);\nfor (Node child : childrenWithSameApp.asList()) {\nif (child.allocation().get().membership().cluster().id().equals(clusterSpec.id())) {\nconflictingCluster = true;\nbreak;\n}\n}\nif (!conflictingCluster && capacity.hasCapacity(node, ResourceCapacity.of(getFlavor(requestedNodes)))) {\nSet ipAddresses = DockerHostCapacity.findFreeIps(node, allNodes);\nif (ipAddresses.isEmpty()) continue;\nString ipAddress = ipAddresses.stream().findFirst().get();\nString hostname = lookupHostname(ipAddress);\nif (hostname == null) continue;\nNode newNode = Node.createDockerNode(\"fake-\" + hostname, Collections.singleton(ipAddress),\nCollections.emptySet(), hostname, Optional.of(node.hostname()), getFlavor(requestedNodes), NodeType.tenant);\nPrioritizableNode nodePri = toNodePriority(newNode, false, true);\nif (!nodePri.violatesSpares || isAllocatingForReplacement) {\nnodes.put(newNode, nodePri);\n}\n}\n}\n}\n}\n/**\n* Add existing nodes allocated to the application\n*/\nvoid addApplicationNodes() {\nList legalStates = Arrays.asList(Node.State.active, Node.State.inactive, Node.State.reserved);\nallNodes.stream()\n.filter(node -> node.type().equals(requestedNodes.type()))\n.filter(node -> legalStates.contains(node.state()))\n.filter(node -> node.allocation().isPresent())\n.filter(node -> node.allocation().get().owner().equals(appId))\n.map(node -> toNodePriority(node, false, false))\n.forEach(prioritizableNode -> nodes.put(prioritizableNode.node, prioritizableNode));\n}\n/**\n* Add nodes already provisioned, but not allocatied to any application\n*/\nvoid addReadyNodes() {\nallNodes.stream()\n.filter(node -> node.type().equals(requestedNodes.type()))\n.filter(node -> node.state().equals(Node.State.ready))\n.map(node -> toNodePriority(node, false, false))\n.filter(n -> !n.violatesSpares || isAllocatingForReplacement)\n.forEach(prioritizableNode -> nodes.put(prioritizableNode.node, prioritizableNode));\n}\n/**\n* Convert a list of nodes to a list of node priorities. This includes finding, calculating\n* parameters to the priority sorting procedure.\n*/\nprivate PrioritizableNode toNodePriority(Node node, boolean isSurplusNode, boolean isNewNode) {\nPrioritizableNode pri = new PrioritizableNode();\npri.node = node;\npri.isSurplusNode = isSurplusNode;\npri.isNewNode = isNewNode;\npri.preferredOnFlavor = requestedNodes.specifiesNonStockFlavor() && node.flavor().equals(getFlavor(requestedNodes));\npri.parent = findParentNode(node);\nif (pri.parent.isPresent()) {\nNode parent = pri.parent.get();\npri.freeParentCapacity = capacity.freeCapacityOf(parent, false);\nif (spareHosts.contains(parent)) {\npri.violatesSpares = true;\n}\nif (headroomHosts.containsKey(parent) && isPreferredNodeToBeReloacted(allNodes, node, parent)) {\nResourceCapacity neededCapacity = headroomHosts.get(parent);\nif (isNewNode) {\nneededCapacity = ResourceCapacity.composite(neededCapacity, new ResourceCapacity(node));\n}\npri.violatesHeadroom = !capacity.hasCapacity(parent, neededCapacity);\n}\n}\nreturn pri;\n}\nstatic boolean isPreferredNodeToBeReloacted(List nodes, Node node, Node parent) {\nNodeList list = new NodeList(nodes);\nreturn list.childNodes(parent).asList().stream()\n.sorted(NodePrioritizer::compareForRelocation)\n.findFirst()\n.filter(n -> n.equals(node))\n.isPresent();\n}\nprivate boolean isReplacement(long nofNodesInCluster, long nodeFailedNodes) {\nif (nodeFailedNodes == 0) return false;\nint wantedCount = 0;\nif (requestedNodes instanceof NodeSpec.CountNodeSpec) {\nNodeSpec.CountNodeSpec countSpec = (NodeSpec.CountNodeSpec) requestedNodes;\nwantedCount = countSpec.getCount();\n}\nreturn (wantedCount > nofNodesInCluster - nodeFailedNodes);\n}\nprivate static Flavor getFlavor(NodeSpec requestedNodes) {\nif (requestedNodes instanceof NodeSpec.CountNodeSpec) {\nNodeSpec.CountNodeSpec countSpec = (NodeSpec.CountNodeSpec) requestedNodes;\nreturn countSpec.getFlavor();\n}\nreturn null;\n}\nprivate boolean isDocker() {\nFlavor flavor = getFlavor(requestedNodes);\nreturn (flavor != null) && flavor.getType().equals(Flavor.Type.DOCKER_CONTAINER);\n}\nprivate Optional findParentNode(Node node) {\nif (!node.parentHostname().isPresent()) return Optional.empty();\nreturn allNodes.stream()\n.filter(n -> n.hostname().equals(node.parentHostname().orElse(\" NOT A NODE\")))\n.findAny();\n}\n}" + }, + { + "comment": "@cheese8 Can it be replaced by the `isContainsJoinQuery` method?", + "method_body": "private boolean isGenerateSQLTokenForEncryptOnJoinSegments(final SQLStatementContext sqlStatementContext) {\nreturn sqlStatementContext instanceof SelectStatementContext && !WhereExtractUtil.getJoinWhereSegments((SelectStatement) sqlStatementContext.getSqlStatement()).isEmpty();\n}", + "target_code": "return sqlStatementContext instanceof SelectStatementContext && !WhereExtractUtil.getJoinWhereSegments((SelectStatement) sqlStatementContext.getSqlStatement()).isEmpty();", + "method_body_after": "private boolean isGenerateSQLTokenForEncryptOnJoinSegments(final SQLStatementContext sqlStatementContext) {\nreturn sqlStatementContext instanceof SelectStatementContext && ((SelectStatementContext) sqlStatementContext).isContainsJoinQuery();\n}", + "context_before": "class EncryptPredicateColumnTokenGenerator extends BaseEncryptSQLTokenGenerator implements CollectionSQLTokenGenerator, SchemaMetaDataAware, QueryWithCipherColumnAware {\nprivate ShardingSphereSchema schema;\nprivate boolean queryWithCipherColumn;\n@Override\nprotected boolean isGenerateSQLTokenForEncrypt(final SQLStatementContext sqlStatementContext) {\nreturn isGenerateSQLTokenForEncryptOnWhereAvailable(sqlStatementContext) || isGenerateSQLTokenForEncryptOnJoinSegments(sqlStatementContext);\n}\nprivate boolean isGenerateSQLTokenForEncryptOnWhereAvailable(final SQLStatementContext sqlStatementContext) {\nreturn sqlStatementContext instanceof WhereAvailable && ((WhereAvailable) sqlStatementContext).getWhere().isPresent();\n}\n@Override\npublic Collection generateSQLTokens(final SQLStatementContext sqlStatementContext) {\nCollection result = new LinkedHashSet<>();\nCollection andPredicates = new LinkedHashSet<>();\nif (isGenerateSQLTokenForEncryptOnWhereAvailable(sqlStatementContext)) {\nExpressionSegment expression = ((WhereAvailable) sqlStatementContext).getWhere().get().getExpr();\nandPredicates.addAll(ExpressionExtractUtil.getAndPredicates(expression));\n}\nOptional> whereSegments = Optional.empty();\nif (sqlStatementContext instanceof SelectStatementContext) {\nwhereSegments = Optional.of(WhereExtractUtil.getJoinWhereSegments((SelectStatement) sqlStatementContext.getSqlStatement()));\nwhereSegments.get().forEach(each -> andPredicates.addAll(ExpressionExtractUtil.getAndPredicates(each.getExpr())));\n}\nMap columnTableNames = getColumnTableNames(sqlStatementContext, andPredicates, whereSegments);\nandPredicates.forEach(each -> result.addAll(generateSQLTokens(each.getPredicates(), columnTableNames)));\nreturn result;\n}\nprivate Collection generateSQLTokens(final Collection predicates, final Map columnTableNames) {\nCollection result = new LinkedList<>();\npredicates.forEach(each -> result.addAll(generateSQLTokensOnColumnSegments(ColumnExtractor.extractAll(each), columnTableNames)));\nreturn result;\n}\nprivate Collection generateSQLTokensOnColumnSegments(final Collection columnSegments, final Map columnTableNames) {\nCollection result = new LinkedList<>();\nfor (ColumnSegment each : columnSegments) {\nOptional encryptTable = findEncryptTable(columnTableNames, each);\nif (!encryptTable.isPresent() || !encryptTable.get().findEncryptorName(each.getIdentifier().getValue()).isPresent()) {\ncontinue;\n}\nint startIndex = each.getOwner().isPresent() ? each.getOwner().get().getStopIndex() + 2 : each.getStartIndex();\nint stopIndex = each.getStopIndex();\nif (!queryWithCipherColumn) {\nOptional plainColumn = encryptTable.get().findPlainColumn(each.getIdentifier().getValue());\nif (plainColumn.isPresent()) {\nresult.add(new SubstitutableColumnNameToken(startIndex, stopIndex, getColumnProjections(plainColumn.get())));\ncontinue;\n}\n}\nOptional assistedQueryColumn = encryptTable.get().findAssistedQueryColumn(each.getIdentifier().getValue());\nSubstitutableColumnNameToken encryptColumnNameToken = assistedQueryColumn.map(columnName\n-> new SubstitutableColumnNameToken(startIndex, stopIndex, getColumnProjections(columnName))).orElseGet(()\n-> new SubstitutableColumnNameToken(startIndex, stopIndex, getColumnProjections(encryptTable.get().getCipherColumn(each.getIdentifier().getValue()))));\nresult.add(encryptColumnNameToken);\n}\nreturn result;\n}\nprivate Map getColumnTableNames(final SQLStatementContext sqlStatementContext, final Collection andPredicates,\nfinal Optional> whereSegments) {\nCollection columns = new ArrayList();\nandPredicates.forEach(each -> columns.addAll(generateColumnSegments(each.getPredicates())));\nif (whereSegments.isPresent()) {\nwhereSegments.get().forEach(each -> columns.addAll(ColumnExtractor.extractAll(each.getExpr())));\n}\nreturn sqlStatementContext.getTablesContext().findTableName(columns, schema);\n}\nprivate Collection generateColumnSegments(final Collection expressionSegments) {\nCollection result = new ArrayList();\nexpressionSegments.forEach(each -> result.addAll(ColumnExtractor.extractAll(each)));\nreturn result;\n}\nprivate Optional findEncryptTable(final Map columnTableNames, final ColumnSegment column) {\nreturn Optional.ofNullable(columnTableNames.get(column.getQualifiedName())).flatMap(tableName -> getEncryptRule().findEncryptTable(tableName));\n}\nprivate Collection getColumnProjections(final String columnName) {\nreturn Collections.singletonList(new ColumnProjection(null, columnName, null));\n}\n}", + "context_after": "class EncryptPredicateColumnTokenGenerator extends BaseEncryptSQLTokenGenerator implements CollectionSQLTokenGenerator, SchemaMetaDataAware, QueryWithCipherColumnAware {\nprivate ShardingSphereSchema schema;\nprivate boolean queryWithCipherColumn;\n@Override\nprotected boolean isGenerateSQLTokenForEncrypt(final SQLStatementContext sqlStatementContext) {\nreturn isGenerateSQLTokenForEncryptOnWhereAvailable(sqlStatementContext) || isGenerateSQLTokenForEncryptOnJoinSegments(sqlStatementContext);\n}\nprivate boolean isGenerateSQLTokenForEncryptOnWhereAvailable(final SQLStatementContext sqlStatementContext) {\nreturn sqlStatementContext instanceof WhereAvailable && ((WhereAvailable) sqlStatementContext).getWhere().isPresent();\n}\n@Override\npublic Collection generateSQLTokens(final SQLStatementContext sqlStatementContext) {\nCollection result = new LinkedHashSet<>();\nCollection andPredicates = new LinkedHashSet<>();\nif (isGenerateSQLTokenForEncryptOnWhereAvailable(sqlStatementContext)) {\nExpressionSegment expression = ((WhereAvailable) sqlStatementContext).getWhere().get().getExpr();\nandPredicates.addAll(ExpressionExtractUtil.getAndPredicates(expression));\n}\nCollection whereSegments = Collections.emptyList();\nif (sqlStatementContext instanceof SelectStatementContext) {\nwhereSegments = WhereExtractUtil.getJoinWhereSegments((SelectStatement) sqlStatementContext.getSqlStatement());\nandPredicates.addAll(whereSegments.stream().map(each -> ExpressionExtractUtil.getAndPredicates(each.getExpr())).flatMap(Collection::stream).collect(Collectors.toList()));\n}\nMap columnTableNames = getColumnTableNames(sqlStatementContext, andPredicates, whereSegments);\nresult.addAll(andPredicates.stream().map(each -> generateSQLTokens(each.getPredicates(), columnTableNames)).flatMap(Collection::stream).collect(Collectors.toList()));\nreturn result;\n}\nprivate Collection generateSQLTokens(final Collection predicates, final Map columnTableNames) {\nCollection result = new LinkedList<>();\nfor (ExpressionSegment each : predicates) {\nfor (ColumnSegment column : ColumnExtractor.extract(each)) {\nOptional encryptTable = findEncryptTable(columnTableNames, column);\nif (!encryptTable.isPresent() || !encryptTable.get().findEncryptorName(column.getIdentifier().getValue()).isPresent()) {\ncontinue;\n}\nint startIndex = column.getOwner().isPresent() ? column.getOwner().get().getStopIndex() + 2 : column.getStartIndex();\nint stopIndex = column.getStopIndex();\nif (!queryWithCipherColumn) {\nOptional plainColumn = encryptTable.get().findPlainColumn(column.getIdentifier().getValue());\nif (plainColumn.isPresent()) {\nresult.add(new SubstitutableColumnNameToken(startIndex, stopIndex, getColumnProjections(plainColumn.get())));\ncontinue;\n}\n}\nOptional assistedQueryColumn = encryptTable.get().findAssistedQueryColumn(column.getIdentifier().getValue());\nSubstitutableColumnNameToken encryptColumnNameToken = assistedQueryColumn.map(columnName\n-> new SubstitutableColumnNameToken(startIndex, stopIndex, getColumnProjections(columnName))).orElseGet(()\n-> new SubstitutableColumnNameToken(startIndex, stopIndex, getColumnProjections(encryptTable.get().getCipherColumn(column.getIdentifier().getValue()))));\nresult.add(encryptColumnNameToken);\n}\n}\nreturn result;\n}\nprivate Map getColumnTableNames(final SQLStatementContext sqlStatementContext, final Collection andPredicates,\nfinal Collection whereSegments) {\nCollection columns = andPredicates.stream().flatMap(each -> each.getPredicates().stream())\n.flatMap(each -> ColumnExtractor.extract(each).stream()).filter(Objects::nonNull).collect(Collectors.toList());\ncolumns.addAll(whereSegments.stream().map(each -> ColumnExtractor.extract(each.getExpr())).flatMap(Collection::stream).collect(Collectors.toList()));\nreturn sqlStatementContext.getTablesContext().findTableName(columns, schema);\n}\nprivate Optional findEncryptTable(final Map columnTableNames, final ColumnSegment column) {\nreturn Optional.ofNullable(columnTableNames.get(column.getQualifiedName())).flatMap(tableName -> getEncryptRule().findEncryptTable(tableName));\n}\nprivate Collection getColumnProjections(final String columnName) {\nreturn Collections.singletonList(new ColumnProjection(null, columnName, null));\n}\n}" + }, + { + "comment": "Ok, I will extracted it.", + "method_body": "public MapDataForMapUnion(MapData map1, MapData map2) throws Throwable {\nList keysList = new ArrayList<>();\nList valuesList = new ArrayList<>();\nboolean isKeyNullExist = false;\nfor (int i = 0; i < map2.size(); i++) {\nObject key = keyElementGetter.getElementOrNull(map2.keyArray(), i);\nif (key == null) {\nisKeyNullExist = true;\n}\nkeysList.add(key);\nvaluesList.add(valueElementGetter.getElementOrNull(map2.valueArray(), i));\n}\nfor (int i = 0; i < map1.size(); i++) {\nfinal Object key1 = keyElementGetter.getElementOrNull(map1.keyArray(), i);\nfinal Object value1 = valueElementGetter.getElementOrNull(map1.valueArray(), i);\nboolean keyExists = false;\nif (key1 != null) {\nfor (int j = 0; j < keysList.size(); j++) {\nfinal Object key2 = keysList.get(j);\nif (key2 != null && (boolean) keyEqualityHandle.invoke(key1, key2)) {\nkeyExists = true;\nbreak;\n}\n}\n}\nif (isKeyNullExist && key1 == null) {\ncontinue;\n}\nif (!keyExists) {\nkeysList.add(key1);\nvaluesList.add(value1);\n}\n}\nthis.keysArray = new GenericArrayData(keysList.toArray());\nthis.valuesArray = new GenericArrayData(valuesList.toArray());\n}", + "target_code": "Object key = keyElementGetter.getElementOrNull(map2.keyArray(), i);", + "method_body_after": "public MapDataForMapUnion(MapData map1, MapData map2) throws Throwable {\nList keysList = new ArrayList<>();\nList valuesList = new ArrayList<>();\nboolean isKeyNullExist = false;\nArrayData keyArray2 = map2.keyArray();\nArrayData valueArray2 = map2.valueArray();\nfor (int i = 0; i < map2.size(); i++) {\nObject key = keyElementGetter.getElementOrNull(keyArray2, i);\nif (key == null) {\nisKeyNullExist = true;\n}\nkeysList.add(key);\nvaluesList.add(valueElementGetter.getElementOrNull(valueArray2, i));\n}\nArrayData keyArray1 = map1.keyArray();\nArrayData valueArray1 = map1.valueArray();\nfor (int i = 0; i < map1.size(); i++) {\nfinal Object key1 = keyElementGetter.getElementOrNull(keyArray1, i);\nboolean keyExists = false;\nif (key1 != null) {\nfor (int j = 0; j < keysList.size(); j++) {\nfinal Object key2 = keysList.get(j);\nif (key2 != null && (boolean) keyEqualityHandle.invoke(key1, key2)) {\nkeyExists = true;\nbreak;\n}\n}\n}\nif (isKeyNullExist && key1 == null) {\ncontinue;\n}\nif (!keyExists) {\nfinal Object value1 = valueElementGetter.getElementOrNull(valueArray1, i);\nkeysList.add(key1);\nvaluesList.add(value1);\n}\n}\nthis.keysArray = new GenericArrayData(keysList.toArray());\nthis.valuesArray = new GenericArrayData(valuesList.toArray());\n}", + "context_before": "class MapDataForMapUnion implements MapData {\nprivate final GenericArrayData keysArray;\nprivate final GenericArrayData valuesArray;\n@Override\npublic int size() {\nreturn keysArray.size();\n}\n@Override\npublic ArrayData keyArray() {\nreturn keysArray;\n}\n@Override\npublic ArrayData valueArray() {\nreturn valuesArray;\n}\n}", + "context_after": "class MapDataForMapUnion implements MapData {\nprivate final GenericArrayData keysArray;\nprivate final GenericArrayData valuesArray;\n@Override\npublic int size() {\nreturn keysArray.size();\n}\n@Override\npublic ArrayData keyArray() {\nreturn keysArray;\n}\n@Override\npublic ArrayData valueArray() {\nreturn valuesArray;\n}\n}" + }, + { + "comment": "Agree; I was using `joda.time.Instant.getMillis() / 1000` but had failures. I prefer using joda as in all the beam codebase. I'll dig into it.", + "method_body": "void runAll(OptionT options, NexmarkLauncher nexmarkLauncher) throws IOException {\nInstant start = Instant.now();\nMap baseline = loadBaseline(options.getBaselineFilename());\nMap actual = new LinkedHashMap<>();\nIterable configurations = options.getSuite().getConfigurations(options);\nboolean successful = true;\ntry {\nfor (NexmarkConfiguration configuration : configurations) {\nNexmarkPerf perf = nexmarkLauncher.run(configuration);\nif (perf != null) {\nif (perf.errors == null || perf.errors.size() > 0) {\nsuccessful = false;\n}\nappendPerf(options.getPerfFilename(), configuration, perf);\nactual.put(configuration, perf);\nsaveSummary(null, configurations, actual, baseline, start, options);\n}\n}\nif (options.getExportSummaryToBigQuery()) {\nsavePerfsToBigQuery(options, actual, null, java.time.Instant.now());\n}\n} finally {\nif (options.getMonitorJobs()) {\nsaveSummary(options.getSummaryFilename(), configurations, actual, baseline, start, options);\nsaveJavascript(options.getJavascriptFilename(), configurations, actual, baseline, start);\n}\n}\nif (!successful) {\nthrow new RuntimeException(\"Execution was not successful\");\n}\n}", + "target_code": "savePerfsToBigQuery(options, actual, null, java.time.Instant.now());", + "method_body_after": "void runAll(OptionT options, NexmarkLauncher nexmarkLauncher) throws IOException {\nInstant start = Instant.now();\nMap baseline = loadBaseline(options.getBaselineFilename());\nMap actual = new LinkedHashMap<>();\nIterable configurations = options.getSuite().getConfigurations(options);\nboolean successful = true;\ntry {\nfor (NexmarkConfiguration configuration : configurations) {\nNexmarkPerf perf = nexmarkLauncher.run(configuration);\nif (perf != null) {\nif (perf.errors == null || perf.errors.size() > 0) {\nsuccessful = false;\n}\nappendPerf(options.getPerfFilename(), configuration, perf);\nactual.put(configuration, perf);\nsaveSummary(null, configurations, actual, baseline, start, options);\n}\n}\nif (options.getExportSummaryToBigQuery()) {\nsavePerfsToBigQuery(options, actual, null, start);\n}\n} finally {\nif (options.getMonitorJobs()) {\nsaveSummary(options.getSummaryFilename(), configurations, actual, baseline, start, options);\nsaveJavascript(options.getJavascriptFilename(), configurations, actual, baseline, start);\n}\n}\nif (!successful) {\nthrow new RuntimeException(\"Execution was not successful\");\n}\n}", + "context_before": "class Main {\n/** Entry point. */\n@VisibleForTesting\nstatic void savePerfsToBigQuery(\nNexmarkOptions options,\nMap perfs,\n@Nullable BigQueryServices testBigQueryServices,\njava.time.Instant start) {\nPipeline pipeline = Pipeline.create(options);\nPCollection> perfsPCollection =\npipeline.apply(\nCreate.of(perfs)\n.withCoder(\nKvCoder.of(\nSerializableCoder.of(NexmarkConfiguration.class),\nnew CustomCoder() {\n@Override\npublic void encode(NexmarkPerf value, OutputStream outStream)\nthrows CoderException, IOException {\nStringUtf8Coder.of().encode(value.toString(), outStream);\n}\n@Override\npublic NexmarkPerf decode(InputStream inStream)\nthrows CoderException, IOException {\nString perf = StringUtf8Coder.of().decode(inStream);\nreturn NexmarkPerf.fromString(perf);\n}\n})));\nTableSchema tableSchema =\nnew TableSchema()\n.setFields(\nImmutableList.of(\nnew TableFieldSchema().setName(\"timestamp\").setType(\"TIMESTAMP\"),\nnew TableFieldSchema().setName(\"runtimeSec\").setType(\"FLOAT\"),\nnew TableFieldSchema().setName(\"eventsPerSec\").setType(\"FLOAT\"),\nnew TableFieldSchema().setName(\"numResults\").setType(\"INTEGER\")));\nString tableSpec = NexmarkUtils.tableSpec(options, \"{query}\", 0L, null);\nSerializableFunction<\nValueInSingleWindow>, TableDestination>\ntableFunction =\ninput ->\nnew TableDestination(\ntableSpec.replace(\"{query}\", String.valueOf(input.getValue().getKey().query)),\n\"perfkit queries\");\nSerializableFunction, TableRow> rowFunction =\ninput -> {\nNexmarkPerf nexmarkPerf = input.getValue();\nTableRow row =\nnew TableRow()\n.set(\"timestamp\", start.getEpochSecond())\n.set(\"runtimeSec\", nexmarkPerf.runtimeSec)\n.set(\"eventsPerSec\", nexmarkPerf.eventsPerSec)\n.set(\"numResults\", nexmarkPerf.numResults);\nreturn row;\n};\nBigQueryIO.Write io =\nBigQueryIO.>write()\n.to(tableFunction)\n.withSchema(tableSchema)\n.withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED)\n.withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND)\n.withFormatFunction(rowFunction);\nif (testBigQueryServices != null) {\nio = io.withTestServices(testBigQueryServices);\n}\nperfsPCollection.apply(\"savePerfsToBigQuery\", io);\npipeline.run();\n}\n/** Append the pair of {@code configuration} and {@code perf} to perf file. */\nprivate void appendPerf(\n@Nullable String perfFilename, NexmarkConfiguration configuration, NexmarkPerf perf) {\nif (perfFilename == null) {\nreturn;\n}\nList lines = new ArrayList<>();\nlines.add(\"\");\nlines.add(String.format(\"\nlines.add(String.format(\"\nlines.add(configuration.toString());\nlines.add(perf.toString());\ntry {\nFiles.write(\nPaths.get(perfFilename),\nlines,\nStandardCharsets.UTF_8,\nStandardOpenOption.CREATE,\nStandardOpenOption.APPEND);\n} catch (IOException e) {\nthrow new RuntimeException(\"Unable to write perf file: \", e);\n}\nNexmarkUtils.console(\"appended results to perf file %s.\", perfFilename);\n}\n/** Load the baseline perf. */\n@Nullable\nprivate static Map loadBaseline(\n@Nullable String baselineFilename) {\nif (baselineFilename == null) {\nreturn null;\n}\nMap baseline = new LinkedHashMap<>();\nList lines;\ntry {\nlines = Files.readAllLines(Paths.get(baselineFilename), StandardCharsets.UTF_8);\n} catch (IOException e) {\nthrow new RuntimeException(\"Unable to read baseline perf file: \", e);\n}\nfor (int i = 0; i < lines.size(); i++) {\nif (lines.get(i).startsWith(\"\ncontinue;\n}\nNexmarkConfiguration configuration = NexmarkConfiguration.fromString(lines.get(i++));\nNexmarkPerf perf = NexmarkPerf.fromString(lines.get(i));\nbaseline.put(configuration, perf);\n}\nNexmarkUtils.console(\n\"loaded %d entries from baseline file %s.\", baseline.size(), baselineFilename);\nreturn baseline;\n}\nprivate static final String LINE =\n\"==========================================================================================\";\n/** Print summary of {@code actual} vs (if non-null) {@code baseline}. */\nprivate static void saveSummary(\n@Nullable String summaryFilename,\nIterable configurations,\nMap actual,\n@Nullable Map baseline,\nInstant start,\nNexmarkOptions options) {\nList lines = new ArrayList<>();\nlines.add(\"\");\nlines.add(LINE);\nlines.add(\nString.format(\"Run started %s and ran for %s\", start, new Duration(start, Instant.now())));\nlines.add(\"\");\nlines.add(\"Default configuration:\");\nlines.add(NexmarkConfiguration.DEFAULT.toString());\nlines.add(\"\");\nlines.add(\"Configurations:\");\nlines.add(\" Conf Description\");\nint conf = 0;\nfor (NexmarkConfiguration configuration : configurations) {\nlines.add(String.format(\" %04d %s\", conf++, configuration.toShortString()));\nNexmarkPerf actualPerf = actual.get(configuration);\nif (actualPerf != null && actualPerf.jobId != null) {\nlines.add(String.format(\" %4s [Ran as job %s]\", \"\", actualPerf.jobId));\n}\n}\nlines.add(\"\");\nlines.add(\"Performance:\");\nlines.add(\nString.format(\n\" %4s %12s %12s %12s %12s %12s %12s\",\n\"Conf\",\n\"Runtime(sec)\",\n\"(Baseline)\",\n\"Events(/sec)\",\n\"(Baseline)\",\n\"Results\",\n\"(Baseline)\"));\nconf = 0;\nfor (NexmarkConfiguration configuration : configurations) {\nString line = String.format(\" %04d \", conf++);\nNexmarkPerf actualPerf = actual.get(configuration);\nif (actualPerf == null) {\nline += \"*** not run ***\";\n} else {\nNexmarkPerf baselinePerf = baseline == null ? null : baseline.get(configuration);\ndouble runtimeSec = actualPerf.runtimeSec;\nline += String.format(\"%12.1f \", runtimeSec);\nif (baselinePerf == null) {\nline += String.format(\"%12s \", \"\");\n} else {\ndouble baselineRuntimeSec = baselinePerf.runtimeSec;\ndouble diff = ((runtimeSec - baselineRuntimeSec) / baselineRuntimeSec) * 100.0;\nline += String.format(\"%+11.2f%% \", diff);\n}\ndouble eventsPerSec = actualPerf.eventsPerSec;\nline += String.format(\"%12.1f \", eventsPerSec);\nif (baselinePerf == null) {\nline += String.format(\"%12s \", \"\");\n} else {\ndouble baselineEventsPerSec = baselinePerf.eventsPerSec;\ndouble diff = ((eventsPerSec - baselineEventsPerSec) / baselineEventsPerSec) * 100.0;\nline += String.format(\"%+11.2f%% \", diff);\n}\nlong numResults = actualPerf.numResults;\nline += String.format(\"%12d \", numResults);\nif (baselinePerf == null) {\nline += String.format(\"%12s\", \"\");\n} else {\nlong baselineNumResults = baselinePerf.numResults;\nlong diff = numResults - baselineNumResults;\nline += String.format(\"%+12d\", diff);\n}\n}\nlines.add(line);\nif (actualPerf != null) {\nList errors = actualPerf.errors;\nif (errors == null) {\nerrors = new ArrayList<>();\nerrors.add(\"NexmarkGoogleRunner returned null errors list\");\n}\nfor (String error : errors) {\nlines.add(String.format(\" %4s *** %s ***\", \"\", error));\n}\n}\n}\nlines.add(LINE);\nlines.add(\"\");\nfor (String line : lines) {\nSystem.out.println(line);\n}\nif (summaryFilename != null) {\ntry {\nFiles.write(\nPaths.get(summaryFilename),\nlines,\nStandardCharsets.UTF_8,\nStandardOpenOption.CREATE,\nStandardOpenOption.APPEND);\n} catch (IOException e) {\nthrow new RuntimeException(\"Unable to save summary file: \", e);\n}\nNexmarkUtils.console(\"appended summary to summary file %s.\", summaryFilename);\n}\n}\n/**\n* Write all perf data and any baselines to a javascript file which can be used by graphing page\n* etc.\n*/\nprivate static void saveJavascript(\n@Nullable String javascriptFilename,\nIterable configurations,\nMap actual,\n@Nullable Map baseline,\nInstant start) {\nif (javascriptFilename == null) {\nreturn;\n}\nList lines = new ArrayList<>();\nlines.add(\nString.format(\n\"\nlines.add(\"var all = [\");\nfor (NexmarkConfiguration configuration : configurations) {\nlines.add(\" {\");\nlines.add(String.format(\" config: %s\", configuration));\nNexmarkPerf actualPerf = actual.get(configuration);\nif (actualPerf != null) {\nlines.add(String.format(\" ,perf: %s\", actualPerf));\n}\nNexmarkPerf baselinePerf = baseline == null ? null : baseline.get(configuration);\nif (baselinePerf != null) {\nlines.add(String.format(\" ,baseline: %s\", baselinePerf));\n}\nlines.add(\" },\");\n}\nlines.add(\"];\");\ntry {\nFiles.write(\nPaths.get(javascriptFilename),\nlines,\nStandardCharsets.UTF_8,\nStandardOpenOption.CREATE,\nStandardOpenOption.TRUNCATE_EXISTING);\n} catch (IOException e) {\nthrow new RuntimeException(\"Unable to save javascript file: \", e);\n}\nNexmarkUtils.console(\"saved javascript to file %s.\", javascriptFilename);\n}\npublic static void main(String[] args) throws IOException {\nNexmarkOptions options =\nPipelineOptionsFactory.fromArgs(args).withValidation().as(NexmarkOptions.class);\nNexmarkLauncher nexmarkLauncher = new NexmarkLauncher<>(options);\nnew Main<>().runAll(options, nexmarkLauncher);\n}\n}", + "context_after": "class Main {\n/** Entry point. */\n@VisibleForTesting\nstatic void savePerfsToBigQuery(\nNexmarkOptions options,\nMap perfs,\n@Nullable BigQueryServices testBigQueryServices,\nInstant start) {\nPipeline pipeline = Pipeline.create(options);\nPCollection> perfsPCollection =\npipeline.apply(\nCreate.of(perfs)\n.withCoder(\nKvCoder.of(\nSerializableCoder.of(NexmarkConfiguration.class),\nnew CustomCoder() {\n@Override\npublic void encode(NexmarkPerf value, OutputStream outStream)\nthrows CoderException, IOException {\nStringUtf8Coder.of().encode(value.toString(), outStream);\n}\n@Override\npublic NexmarkPerf decode(InputStream inStream)\nthrows CoderException, IOException {\nString perf = StringUtf8Coder.of().decode(inStream);\nreturn NexmarkPerf.fromString(perf);\n}\n})));\nTableSchema tableSchema =\nnew TableSchema()\n.setFields(\nImmutableList.of(\nnew TableFieldSchema().setName(\"timestamp\").setType(\"TIMESTAMP\"),\nnew TableFieldSchema().setName(\"runtimeSec\").setType(\"FLOAT\"),\nnew TableFieldSchema().setName(\"eventsPerSec\").setType(\"FLOAT\"),\nnew TableFieldSchema().setName(\"numResults\").setType(\"INTEGER\")));\nString tableSpec = NexmarkUtils.tableSpec(options, \"{query}\", 0L, null);\nSerializableFunction<\nValueInSingleWindow>, TableDestination>\ntableFunction =\ninput ->\nnew TableDestination(\ntableSpec.replace(\"{query}\", String.valueOf(input.getValue().getKey().query)),\n\"perfkit queries\");\nSerializableFunction, TableRow> rowFunction =\ninput -> {\nNexmarkPerf nexmarkPerf = input.getValue();\nTableRow row =\nnew TableRow()\n.set(\"timestamp\", start.getMillis() / 1000)\n.set(\"runtimeSec\", nexmarkPerf.runtimeSec)\n.set(\"eventsPerSec\", nexmarkPerf.eventsPerSec)\n.set(\"numResults\", nexmarkPerf.numResults);\nreturn row;\n};\nBigQueryIO.Write io =\nBigQueryIO.>write()\n.to(tableFunction)\n.withSchema(tableSchema)\n.withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED)\n.withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND)\n.withFormatFunction(rowFunction);\nif (testBigQueryServices != null) {\nio = io.withTestServices(testBigQueryServices);\n}\nperfsPCollection.apply(\"savePerfsToBigQuery\", io);\npipeline.run();\n}\n/** Append the pair of {@code configuration} and {@code perf} to perf file. */\nprivate void appendPerf(\n@Nullable String perfFilename, NexmarkConfiguration configuration, NexmarkPerf perf) {\nif (perfFilename == null) {\nreturn;\n}\nList lines = new ArrayList<>();\nlines.add(\"\");\nlines.add(String.format(\"\nlines.add(String.format(\"\nlines.add(configuration.toString());\nlines.add(perf.toString());\ntry {\nFiles.write(\nPaths.get(perfFilename),\nlines,\nStandardCharsets.UTF_8,\nStandardOpenOption.CREATE,\nStandardOpenOption.APPEND);\n} catch (IOException e) {\nthrow new RuntimeException(\"Unable to write perf file: \", e);\n}\nNexmarkUtils.console(\"appended results to perf file %s.\", perfFilename);\n}\n/** Load the baseline perf. */\n@Nullable\nprivate static Map loadBaseline(\n@Nullable String baselineFilename) {\nif (baselineFilename == null) {\nreturn null;\n}\nMap baseline = new LinkedHashMap<>();\nList lines;\ntry {\nlines = Files.readAllLines(Paths.get(baselineFilename), StandardCharsets.UTF_8);\n} catch (IOException e) {\nthrow new RuntimeException(\"Unable to read baseline perf file: \", e);\n}\nfor (int i = 0; i < lines.size(); i++) {\nif (lines.get(i).startsWith(\"\ncontinue;\n}\nNexmarkConfiguration configuration = NexmarkConfiguration.fromString(lines.get(i++));\nNexmarkPerf perf = NexmarkPerf.fromString(lines.get(i));\nbaseline.put(configuration, perf);\n}\nNexmarkUtils.console(\n\"loaded %d entries from baseline file %s.\", baseline.size(), baselineFilename);\nreturn baseline;\n}\nprivate static final String LINE =\n\"==========================================================================================\";\n/** Print summary of {@code actual} vs (if non-null) {@code baseline}. */\nprivate static void saveSummary(\n@Nullable String summaryFilename,\nIterable configurations,\nMap actual,\n@Nullable Map baseline,\nInstant start,\nNexmarkOptions options) {\nList lines = new ArrayList<>();\nlines.add(\"\");\nlines.add(LINE);\nlines.add(\nString.format(\"Run started %s and ran for %s\", start, new Duration(start, Instant.now())));\nlines.add(\"\");\nlines.add(\"Default configuration:\");\nlines.add(NexmarkConfiguration.DEFAULT.toString());\nlines.add(\"\");\nlines.add(\"Configurations:\");\nlines.add(\" Conf Description\");\nint conf = 0;\nfor (NexmarkConfiguration configuration : configurations) {\nlines.add(String.format(\" %04d %s\", conf++, configuration.toShortString()));\nNexmarkPerf actualPerf = actual.get(configuration);\nif (actualPerf != null && actualPerf.jobId != null) {\nlines.add(String.format(\" %4s [Ran as job %s]\", \"\", actualPerf.jobId));\n}\n}\nlines.add(\"\");\nlines.add(\"Performance:\");\nlines.add(\nString.format(\n\" %4s %12s %12s %12s %12s %12s %12s\",\n\"Conf\",\n\"Runtime(sec)\",\n\"(Baseline)\",\n\"Events(/sec)\",\n\"(Baseline)\",\n\"Results\",\n\"(Baseline)\"));\nconf = 0;\nfor (NexmarkConfiguration configuration : configurations) {\nString line = String.format(\" %04d \", conf++);\nNexmarkPerf actualPerf = actual.get(configuration);\nif (actualPerf == null) {\nline += \"*** not run ***\";\n} else {\nNexmarkPerf baselinePerf = baseline == null ? null : baseline.get(configuration);\ndouble runtimeSec = actualPerf.runtimeSec;\nline += String.format(\"%12.1f \", runtimeSec);\nif (baselinePerf == null) {\nline += String.format(\"%12s \", \"\");\n} else {\ndouble baselineRuntimeSec = baselinePerf.runtimeSec;\ndouble diff = ((runtimeSec - baselineRuntimeSec) / baselineRuntimeSec) * 100.0;\nline += String.format(\"%+11.2f%% \", diff);\n}\ndouble eventsPerSec = actualPerf.eventsPerSec;\nline += String.format(\"%12.1f \", eventsPerSec);\nif (baselinePerf == null) {\nline += String.format(\"%12s \", \"\");\n} else {\ndouble baselineEventsPerSec = baselinePerf.eventsPerSec;\ndouble diff = ((eventsPerSec - baselineEventsPerSec) / baselineEventsPerSec) * 100.0;\nline += String.format(\"%+11.2f%% \", diff);\n}\nlong numResults = actualPerf.numResults;\nline += String.format(\"%12d \", numResults);\nif (baselinePerf == null) {\nline += String.format(\"%12s\", \"\");\n} else {\nlong baselineNumResults = baselinePerf.numResults;\nlong diff = numResults - baselineNumResults;\nline += String.format(\"%+12d\", diff);\n}\n}\nlines.add(line);\nif (actualPerf != null) {\nList errors = actualPerf.errors;\nif (errors == null) {\nerrors = new ArrayList<>();\nerrors.add(\"NexmarkGoogleRunner returned null errors list\");\n}\nfor (String error : errors) {\nlines.add(String.format(\" %4s *** %s ***\", \"\", error));\n}\n}\n}\nlines.add(LINE);\nlines.add(\"\");\nfor (String line : lines) {\nSystem.out.println(line);\n}\nif (summaryFilename != null) {\ntry {\nFiles.write(\nPaths.get(summaryFilename),\nlines,\nStandardCharsets.UTF_8,\nStandardOpenOption.CREATE,\nStandardOpenOption.APPEND);\n} catch (IOException e) {\nthrow new RuntimeException(\"Unable to save summary file: \", e);\n}\nNexmarkUtils.console(\"appended summary to summary file %s.\", summaryFilename);\n}\n}\n/**\n* Write all perf data and any baselines to a javascript file which can be used by graphing page\n* etc.\n*/\nprivate static void saveJavascript(\n@Nullable String javascriptFilename,\nIterable configurations,\nMap actual,\n@Nullable Map baseline,\nInstant start) {\nif (javascriptFilename == null) {\nreturn;\n}\nList lines = new ArrayList<>();\nlines.add(\nString.format(\n\"\nlines.add(\"var all = [\");\nfor (NexmarkConfiguration configuration : configurations) {\nlines.add(\" {\");\nlines.add(String.format(\" config: %s\", configuration));\nNexmarkPerf actualPerf = actual.get(configuration);\nif (actualPerf != null) {\nlines.add(String.format(\" ,perf: %s\", actualPerf));\n}\nNexmarkPerf baselinePerf = baseline == null ? null : baseline.get(configuration);\nif (baselinePerf != null) {\nlines.add(String.format(\" ,baseline: %s\", baselinePerf));\n}\nlines.add(\" },\");\n}\nlines.add(\"];\");\ntry {\nFiles.write(\nPaths.get(javascriptFilename),\nlines,\nStandardCharsets.UTF_8,\nStandardOpenOption.CREATE,\nStandardOpenOption.TRUNCATE_EXISTING);\n} catch (IOException e) {\nthrow new RuntimeException(\"Unable to save javascript file: \", e);\n}\nNexmarkUtils.console(\"saved javascript to file %s.\", javascriptFilename);\n}\npublic static void main(String[] args) throws IOException {\nNexmarkOptions options =\nPipelineOptionsFactory.fromArgs(args).withValidation().as(NexmarkOptions.class);\nNexmarkLauncher nexmarkLauncher = new NexmarkLauncher<>(options);\nnew Main<>().runAll(options, nexmarkLauncher);\n}\n}" + }, + { + "comment": "Sure sounds good", + "method_body": "private List createNextAvailableBatch(RequestInfo requestInfo) {\nList batch = new ArrayList<>(requestInfo.batchSize);\nint batchSizeBytes = 0;\nfor (int i = 0; i < requestInfo.batchSize; i++) {\nlong requestEntrySize = bufferedRequestEntries.peek().getSize();\nif (batchSizeBytes + requestEntrySize > maxBatchSizeInBytes) {\nbreak;\n}\nRequestEntryWrapper elem = bufferedRequestEntries.remove();\nbatch.add(elem.getRequestEntry());\nbufferedRequestEntriesTotalSizeInBytes -= requestEntrySize;\nbatchSizeBytes += requestEntrySize;\n}\nnumRecordsSendCounter.inc(batch.size());\nnumBytesSendCounter.inc(batchSizeBytes);\nreturn batch;\n}", + "target_code": "int batchSizeBytes = 0;", + "method_body_after": "private List createNextAvailableBatch(RequestInfo requestInfo) {\nList batch = new ArrayList<>(requestInfo.getBatchSize());\nlong batchSizeBytes = 0;\nfor (int i = 0; i < requestInfo.getBatchSize(); i++) {\nlong requestEntrySize = bufferedRequestEntries.peek().getSize();\nif (batchSizeBytes + requestEntrySize > maxBatchSizeInBytes) {\nbreak;\n}\nRequestEntryWrapper elem = bufferedRequestEntries.remove();\nbatch.add(elem.getRequestEntry());\nbufferedRequestEntriesTotalSizeInBytes -= requestEntrySize;\nbatchSizeBytes += requestEntrySize;\n}\nnumRecordsSendCounter.inc(batch.size());\nnumBytesSendCounter.inc(batchSizeBytes);\nreturn batch;\n}", + "context_before": "class AsyncSinkWriter\nimplements StatefulSink.StatefulSinkWriter> {\nprivate final MailboxExecutor mailboxExecutor;\nprivate final ProcessingTimeService timeService;\n/* The timestamp of the previous batch of records was sent from this sink. */\nprivate long lastSendTimestamp = 0;\n/* The timestamp of the response to the previous request from this sink. */\nprivate long ackTime = Long.MAX_VALUE;\n/* The sink writer metric group. */\nprivate final SinkWriterMetricGroup metrics;\n/* Counter for number of bytes this sink has attempted to send to the destination. */\nprivate final Counter numBytesSendCounter;\n/* Counter for number of records this sink has attempted to send to the destination. */\nprivate final Counter numRecordsSendCounter;\nprivate final RateLimitingStrategy rateLimitingStrategy;\nprivate final int maxBatchSize;\nprivate final int maxBufferedRequests;\nprivate final long maxBatchSizeInBytes;\nprivate final long maxTimeInBufferMS;\nprivate final long maxRecordSizeInBytes;\n/**\n* The ElementConverter provides a mapping between for the elements of a stream to request\n* entries that can be sent to the destination.\n*\n*

The resulting request entry is buffered by the AsyncSinkWriter and sent to the destination\n* when the {@code submitRequestEntries} method is invoked.\n*/\nprivate final ElementConverter elementConverter;\n/**\n* Buffer to hold request entries that should be persisted into the destination, along with its\n* size in bytes.\n*\n*

A request entry contain all relevant details to make a call to the destination. Eg, for\n* Kinesis Data Streams a request entry contains the payload and partition key.\n*\n*

It seems more natural to buffer InputT, ie, the events that should be persisted, rather\n* than RequestEntryT. However, in practice, the response of a failed request call can make it\n* very hard, if not impossible, to reconstruct the original event. It is much easier, to just\n* construct a new (retry) request entry from the response and add that back to the queue for\n* later retry.\n*/\nprivate final Deque> bufferedRequestEntries =\nnew ArrayDeque<>();\n/**\n* Tracks all pending async calls that have been executed since the last checkpoint. Calls that\n* completed (successfully or unsuccessfully) are automatically decrementing the counter. Any\n* request entry that was not successfully persisted needs to be handled and retried by the\n* logic in {@code submitRequestsToApi}.\n*\n*

To complete a checkpoint, we need to make sure that no requests are in flight, as they may\n* fail, which could then lead to data loss.\n*/\nprivate int inFlightRequestsCount;\n/**\n* Tracks the cumulative size of all elements in {@code bufferedRequestEntries} to facilitate\n* the criterion for flushing after {@code maxBatchSizeInBytes} is reached.\n*/\nprivate double bufferedRequestEntriesTotalSizeInBytes;\nprivate boolean existsActiveTimerCallback = false;\n/**\n* The {@code accept} method should be called on this Consumer if the processing of the {@code\n* requestEntries} raises an exception that should not be retried. Specifically, any action that\n* we are sure will result in the same exception no matter how many times we retry should raise\n* a {@code RuntimeException} here. For example, wrong user credentials. However, it is possible\n* intermittent failures will recover, e.g. flaky network connections, in which case, some other\n* mechanism may be more appropriate.\n*/\nprivate final Consumer fatalExceptionCons;\n/**\n* This method specifies how to persist buffered request entries into the destination. It is\n* implemented when support for a new destination is added.\n*\n*

The method is invoked with a set of request entries according to the buffering hints (and\n* the valid limits of the destination). The logic then needs to create and execute the request\n* asynchronously against the destination (ideally by batching together multiple request entries\n* to increase efficiency). The logic also needs to identify individual request entries that\n* were not persisted successfully and resubmit them using the {@code requestResult} callback.\n*\n*

From a threading perspective, the mailbox thread will call this method and initiate the\n* asynchronous request to persist the {@code requestEntries}. NOTE: The client must support\n* asynchronous requests and the method called to persist the records must asynchronously\n* execute and return a future with the results of that request. A thread from the destination\n* client thread pool should complete the request and submit the failed entries that should be\n* retried. The {@code requestResult} will then trigger the mailbox thread to requeue the\n* unsuccessful elements.\n*\n*

An example implementation of this method is included:\n*\n*

{@code\n* @Override\n* protected void submitRequestEntries\n*   (List records, Consumer> requestResult) {\n*     Future response = destinationClient.putRecords(records);\n*     response.whenComplete(\n*         (response, error) -> {\n*             if(error){\n*                 List retryableFailedRecords = getRetryableFailed(response);\n*                 requestResult.accept(retryableFailedRecords);\n*             }else{\n*                 requestResult.accept(Collections.emptyList());\n*             }\n*         }\n*     );\n* }\n*\n* }
\n*\n*

During checkpointing, the sink needs to ensure that there are no outstanding in-flight\n* requests.\n*\n* @param requestEntries a set of request entries that should be sent to the destination\n* @param requestResult the {@code accept} method should be called on this Consumer once the\n* processing of the {@code requestEntries} are complete. Any entries that encountered\n* difficulties in persisting should be re-queued through {@code requestResult} by including\n* that element in the collection of {@code RequestEntryT}s passed to the {@code accept}\n* method. All other elements are assumed to have been successfully persisted.\n*/\nprotected abstract void submitRequestEntries(\nList requestEntries, Consumer> requestResult);\n/**\n* This method allows the getting of the size of a {@code RequestEntryT} in bytes. The size in\n* this case is measured as the total bytes that is written to the destination as a result of\n* persisting this particular {@code RequestEntryT} rather than the serialized length (which may\n* be the same).\n*\n* @param requestEntry the requestEntry for which we want to know the size\n* @return the size of the requestEntry, as defined previously\n*/\nprotected abstract long getSizeInBytes(RequestEntryT requestEntry);\n@Deprecated\npublic AsyncSinkWriter(\nElementConverter elementConverter,\nSink.InitContext context,\nint maxBatchSize,\nint maxInFlightRequests,\nint maxBufferedRequests,\nlong maxBatchSizeInBytes,\nlong maxTimeInBufferMS,\nlong maxRecordSizeInBytes) {\nthis(\nelementConverter,\ncontext,\nmaxBatchSize,\nmaxInFlightRequests,\nmaxBufferedRequests,\nmaxBatchSizeInBytes,\nmaxTimeInBufferMS,\nmaxRecordSizeInBytes,\nCollections.emptyList());\n}\n@Deprecated\npublic AsyncSinkWriter(\nElementConverter elementConverter,\nSink.InitContext context,\nint maxBatchSize,\nint maxInFlightRequests,\nint maxBufferedRequests,\nlong maxBatchSizeInBytes,\nlong maxTimeInBufferMS,\nlong maxRecordSizeInBytes,\nCollection> states) {\nthis(\nelementConverter,\ncontext,\nmaxBatchSize,\nmaxBufferedRequests,\nmaxBatchSizeInBytes,\nmaxTimeInBufferMS,\nmaxRecordSizeInBytes,\nstates,\nCongestionControlRateLimitingStrategy.builder()\n.setMaxInFlightRequests(maxInFlightRequests)\n.setInitialMaxInFlightMessages(maxBatchSize)\n.setAimdScalingStrategy(\nAIMDScalingStrategy.builder()\n.setRateThreshold(maxBatchSize * maxInFlightRequests)\n.build())\n.build());\n}\npublic AsyncSinkWriter(\nElementConverter elementConverter,\nSink.InitContext context,\nint maxBatchSize,\nint maxBufferedRequests,\nlong maxBatchSizeInBytes,\nlong maxTimeInBufferMS,\nlong maxRecordSizeInBytes,\nCollection> states,\nRateLimitingStrategy rateLimitingStrategy) {\nthis.elementConverter = elementConverter;\nthis.mailboxExecutor = context.getMailboxExecutor();\nthis.timeService = context.getProcessingTimeService();\nPreconditions.checkNotNull(elementConverter);\nPreconditions.checkArgument(maxBatchSize > 0);\nPreconditions.checkArgument(maxBufferedRequests > 0);\nPreconditions.checkArgument(maxBatchSizeInBytes > 0);\nPreconditions.checkArgument(maxTimeInBufferMS > 0);\nPreconditions.checkArgument(maxRecordSizeInBytes > 0);\nPreconditions.checkArgument(\nmaxBufferedRequests > maxBatchSize,\n\"The maximum number of requests that may be buffered should be strictly\"\n+ \" greater than the maximum number of requests per batch.\");\nPreconditions.checkArgument(\nmaxBatchSizeInBytes >= maxRecordSizeInBytes,\n\"The maximum allowed size in bytes per flush must be greater than or equal to the\"\n+ \" maximum allowed size in bytes of a single record.\");\nPreconditions.checkNotNull(rateLimitingStrategy);\nthis.maxBatchSize = maxBatchSize;\nthis.maxBufferedRequests = maxBufferedRequests;\nthis.maxBatchSizeInBytes = maxBatchSizeInBytes;\nthis.maxTimeInBufferMS = maxTimeInBufferMS;\nthis.maxRecordSizeInBytes = maxRecordSizeInBytes;\nthis.rateLimitingStrategy = rateLimitingStrategy;\nthis.inFlightRequestsCount = 0;\nthis.bufferedRequestEntriesTotalSizeInBytes = 0;\nthis.metrics = context.metricGroup();\nthis.metrics.setCurrentSendTimeGauge(() -> this.ackTime - this.lastSendTimestamp);\nthis.numBytesSendCounter = this.metrics.getNumBytesSendCounter();\nthis.numRecordsSendCounter = this.metrics.getNumRecordsSendCounter();\nthis.fatalExceptionCons =\nexception ->\nmailboxExecutor.execute(\n() -> {\nthrow exception;\n},\n\"A fatal exception occurred in the sink that cannot be recovered from or should not be retried.\");\ninitializeState(states);\n}\nprivate void registerCallback() {\nProcessingTimeService.ProcessingTimeCallback ptc =\ninstant -> {\nexistsActiveTimerCallback = false;\nwhile (!bufferedRequestEntries.isEmpty()) {\nflush();\n}\n};\ntimeService.registerTimer(timeService.getCurrentProcessingTime() + maxTimeInBufferMS, ptc);\nexistsActiveTimerCallback = true;\n}\n@Override\npublic void write(InputT element, Context context) throws IOException, InterruptedException {\nwhile (bufferedRequestEntries.size() >= maxBufferedRequests) {\nflush();\n}\naddEntryToBuffer(elementConverter.apply(element, context), false);\nnonBlockingFlush();\n}\n/**\n* Determines if a call to flush will be non-blocking (i.e. {@code inFlightRequestsCount} is\n* strictly smaller than {@code maxInFlightRequests}). Also requires one of the following\n* requirements to be met:\n*\n*

    \n*
  • The number of elements buffered is greater than or equal to the {@code maxBatchSize}\n*
  • The sum of the size in bytes of all records in the buffer is greater than or equal to\n* {@code maxBatchSizeInBytes}\n*
\n*/\nprivate void nonBlockingFlush() throws InterruptedException {\nwhile (!rateLimitingStrategy.shouldBlock(createRequestInfo())\n&& (bufferedRequestEntries.size() >= getNextBatchSizeLimit()\n|| bufferedRequestEntriesTotalSizeInBytes >= maxBatchSizeInBytes)) {\nflush();\n}\n}\nprivate RequestInfo createRequestInfo() {\nint batchSize = getNextBatchSize();\nlong requestStartTime = System.currentTimeMillis();\nreturn RequestInfo.builder()\n.setBatchSize(batchSize)\n.setRequestStartTime(requestStartTime)\n.build();\n}\n/**\n* Persists buffered RequestsEntries into the destination by invoking {@code\n* submitRequestEntries} with batches according to the user specified buffering hints.\n*\n*

The method checks with the {@code rateLimitingStrategy} to see if it should block the\n* request.\n*/\nprivate void flush() throws InterruptedException {\nRequestInfo requestInfo = createRequestInfo();\nwhile (rateLimitingStrategy.shouldBlock(requestInfo)) {\nmailboxExecutor.yield();\nrequestInfo = createRequestInfo();\n}\nList batch = createNextAvailableBatch(requestInfo);\nint batchSize = requestInfo.batchSize;\nif (batchSize == 0) {\nreturn;\n}\nrequestInfo.setBatchSize(batchSize);\nlong timestampOfRequest = requestInfo.requestStartTime;\nConsumer> requestResultCallback =\nfailedRequestEntries ->\nmailboxExecutor.execute(\n() ->\ncompleteRequest(\nfailedRequestEntries,\nbatchSize,\ntimestampOfRequest),\n\"Mark in-flight request as completed and requeue %d request entries\",\nfailedRequestEntries.size());\nrateLimitingStrategy.registerInFlightRequest(requestInfo);\ninFlightRequestsCount++;\nsubmitRequestEntries(batch, requestResultCallback);\n}\nprivate int getNextBatchSize() {\nreturn Math.min(rateLimitingStrategy.getMaxBatchSize(), bufferedRequestEntries.size());\n}\n/**\n* Creates the next batch of request entries while respecting the {@code maxBatchSize} and\n* {@code maxBatchSizeInBytes}. Also adds these to the metrics counters.\n*/\n/**\n* Marks an in-flight request as completed and prepends failed requestEntries back to the\n* internal requestEntry buffer for later retry.\n*\n* @param failedRequestEntries requestEntries that need to be retried\n*/\nprivate void completeRequest(\nList failedRequestEntries, int batchSize, long requestStartTime)\nthrows InterruptedException {\nlastSendTimestamp = requestStartTime;\nackTime = System.currentTimeMillis();\ninFlightRequestsCount--;\nrateLimitingStrategy.registerCompletedRequest(\nRequestInfo.builder()\n.setFailedMessages(failedRequestEntries.size())\n.setRequestStartTime(requestStartTime)\n.setBatchSize(batchSize)\n.build());\nListIterator iterator =\nfailedRequestEntries.listIterator(failedRequestEntries.size());\nwhile (iterator.hasPrevious()) {\naddEntryToBuffer(iterator.previous(), true);\n}\nnonBlockingFlush();\n}\nprivate void addEntryToBuffer(RequestEntryT entry, boolean insertAtHead) {\nif (bufferedRequestEntries.isEmpty() && !existsActiveTimerCallback) {\nregisterCallback();\n}\nRequestEntryWrapper wrappedEntry =\nnew RequestEntryWrapper<>(entry, getSizeInBytes(entry));\nif (wrappedEntry.getSize() > maxRecordSizeInBytes) {\nthrow new IllegalArgumentException(\nString.format(\n\"The request entry sent to the buffer was of size [%s], when the maxRecordSizeInBytes was set to [%s].\",\nwrappedEntry.getSize(), maxRecordSizeInBytes));\n}\nif (insertAtHead) {\nbufferedRequestEntries.addFirst(wrappedEntry);\n} else {\nbufferedRequestEntries.add(wrappedEntry);\n}\nbufferedRequestEntriesTotalSizeInBytes += wrappedEntry.getSize();\n}\n/**\n* In flight requests will be retried if the sink is still healthy. But if in-flight requests\n* fail after a checkpoint has been triggered and Flink needs to recover from the checkpoint,\n* the (failed) in-flight requests are gone and cannot be retried. Hence, there cannot be any\n* outstanding in-flight requests when a commit is initialized.\n*\n*

To this end, all in-flight requests need to completed before proceeding with the commit.\n*/\n@Override\npublic void flush(boolean flush) throws InterruptedException {\nwhile (inFlightRequestsCount > 0 || (bufferedRequestEntries.size() > 0 && flush)) {\nyieldIfThereExistsInFlightRequests();\nif (flush) {\nflush();\n}\n}\n}\nprivate void yieldIfThereExistsInFlightRequests() throws InterruptedException {\nif (inFlightRequestsCount > 0) {\nmailboxExecutor.yield();\n}\n}\n/**\n* All in-flight requests that are relevant for the snapshot have been completed, but there may\n* still be request entries in the internal buffers that are yet to be sent to the endpoint.\n* These request entries are stored in the snapshot state so that they don't get lost in case of\n* a failure/restart of the application.\n*/\n@Override\npublic List> snapshotState(long checkpointId) {\nreturn Collections.singletonList(new BufferedRequestState<>((bufferedRequestEntries)));\n}\nprivate void initializeState(Collection> states) {\nfor (BufferedRequestState state : states) {\ninitializeState(state);\n}\n}\nprivate void initializeState(BufferedRequestState state) {\nthis.bufferedRequestEntries.addAll(state.getBufferedRequestEntries());\nfor (RequestEntryWrapper wrapper : bufferedRequestEntries) {\nif (wrapper.getSize() > maxRecordSizeInBytes) {\nthrow new IllegalStateException(\nString.format(\n\"State contains record of size %d which exceeds sink maximum record size %d.\",\nwrapper.getSize(), maxRecordSizeInBytes));\n}\n}\nthis.bufferedRequestEntriesTotalSizeInBytes += state.getStateSize();\n}\n@Override\npublic void close() {}\nprivate int getNextBatchSizeLimit() {\nreturn Math.min(maxBatchSize, rateLimitingStrategy.getMaxBatchSize());\n}\nprotected Consumer getFatalExceptionCons() {\nreturn fatalExceptionCons;\n}\n}", + "context_after": "class AsyncSinkWriter\nimplements StatefulSink.StatefulSinkWriter> {\nprivate final MailboxExecutor mailboxExecutor;\nprivate final ProcessingTimeService timeService;\n/* The timestamp of the previous batch of records was sent from this sink. */\nprivate long lastSendTimestamp = 0;\n/* The timestamp of the response to the previous request from this sink. */\nprivate long ackTime = Long.MAX_VALUE;\n/* The sink writer metric group. */\nprivate final SinkWriterMetricGroup metrics;\n/* Counter for number of bytes this sink has attempted to send to the destination. */\nprivate final Counter numBytesSendCounter;\n/* Counter for number of records this sink has attempted to send to the destination. */\nprivate final Counter numRecordsSendCounter;\nprivate final RateLimitingStrategy rateLimitingStrategy;\nprivate final int maxBatchSize;\nprivate final int maxBufferedRequests;\nprivate final long maxBatchSizeInBytes;\nprivate final long maxTimeInBufferMS;\nprivate final long maxRecordSizeInBytes;\n/**\n* The ElementConverter provides a mapping between for the elements of a stream to request\n* entries that can be sent to the destination.\n*\n*

The resulting request entry is buffered by the AsyncSinkWriter and sent to the destination\n* when the {@code submitRequestEntries} method is invoked.\n*/\nprivate final ElementConverter elementConverter;\n/**\n* Buffer to hold request entries that should be persisted into the destination, along with its\n* size in bytes.\n*\n*

A request entry contain all relevant details to make a call to the destination. Eg, for\n* Kinesis Data Streams a request entry contains the payload and partition key.\n*\n*

It seems more natural to buffer InputT, ie, the events that should be persisted, rather\n* than RequestEntryT. However, in practice, the response of a failed request call can make it\n* very hard, if not impossible, to reconstruct the original event. It is much easier, to just\n* construct a new (retry) request entry from the response and add that back to the queue for\n* later retry.\n*/\nprivate final Deque> bufferedRequestEntries =\nnew ArrayDeque<>();\n/**\n* Tracks all pending async calls that have been executed since the last checkpoint. Calls that\n* completed (successfully or unsuccessfully) are automatically decrementing the counter. Any\n* request entry that was not successfully persisted needs to be handled and retried by the\n* logic in {@code submitRequestsToApi}.\n*\n*

To complete a checkpoint, we need to make sure that no requests are in flight, as they may\n* fail, which could then lead to data loss.\n*/\nprivate int inFlightRequestsCount;\n/**\n* Tracks the cumulative size of all elements in {@code bufferedRequestEntries} to facilitate\n* the criterion for flushing after {@code maxBatchSizeInBytes} is reached.\n*/\nprivate double bufferedRequestEntriesTotalSizeInBytes;\nprivate boolean existsActiveTimerCallback = false;\n/**\n* The {@code accept} method should be called on this Consumer if the processing of the {@code\n* requestEntries} raises an exception that should not be retried. Specifically, any action that\n* we are sure will result in the same exception no matter how many times we retry should raise\n* a {@code RuntimeException} here. For example, wrong user credentials. However, it is possible\n* intermittent failures will recover, e.g. flaky network connections, in which case, some other\n* mechanism may be more appropriate.\n*/\nprivate final Consumer fatalExceptionCons;\n/**\n* This method specifies how to persist buffered request entries into the destination. It is\n* implemented when support for a new destination is added.\n*\n*

The method is invoked with a set of request entries according to the buffering hints (and\n* the valid limits of the destination). The logic then needs to create and execute the request\n* asynchronously against the destination (ideally by batching together multiple request entries\n* to increase efficiency). The logic also needs to identify individual request entries that\n* were not persisted successfully and resubmit them using the {@code requestToRetry} callback.\n*\n*

From a threading perspective, the mailbox thread will call this method and initiate the\n* asynchronous request to persist the {@code requestEntries}. NOTE: The client must support\n* asynchronous requests and the method called to persist the records must asynchronously\n* execute and return a future with the results of that request. A thread from the destination\n* client thread pool should complete the request and submit the failed entries that should be\n* retried. The {@code requestToRetry} will then trigger the mailbox thread to requeue the\n* unsuccessful elements.\n*\n*

An example implementation of this method is included:\n*\n*

{@code\n* @Override\n* protected void submitRequestEntries\n*   (List records, Consumer> requestToRetry) {\n*     Future response = destinationClient.putRecords(records);\n*     response.whenComplete(\n*         (response, error) -> {\n*             if(error){\n*                 List retryableFailedRecords = getRetryableFailed(response);\n*                 requestToRetry.accept(retryableFailedRecords);\n*             }else{\n*                 requestToRetry.accept(Collections.emptyList());\n*             }\n*         }\n*     );\n* }\n*\n* }
\n*\n*

During checkpointing, the sink needs to ensure that there are no outstanding in-flight\n* requests.\n*\n* @param requestEntries a set of request entries that should be sent to the destination\n* @param requestToRetry the {@code accept} method should be called on this Consumer once the\n* processing of the {@code requestEntries} are complete. Any entries that encountered\n* difficulties in persisting should be re-queued through {@code requestToRetry} by\n* including that element in the collection of {@code RequestEntryT}s passed to the {@code\n* accept} method. All other elements are assumed to have been successfully persisted.\n*/\nprotected abstract void submitRequestEntries(\nList requestEntries, Consumer> requestToRetry);\n/**\n* This method allows the getting of the size of a {@code RequestEntryT} in bytes. The size in\n* this case is measured as the total bytes that is written to the destination as a result of\n* persisting this particular {@code RequestEntryT} rather than the serialized length (which may\n* be the same).\n*\n* @param requestEntry the requestEntry for which we want to know the size\n* @return the size of the requestEntry, as defined previously\n*/\nprotected abstract long getSizeInBytes(RequestEntryT requestEntry);\n/**\n* This method is deprecated, please use the constructor that specifies the {@link\n* AsyncSinkWriterConfiguration}.\n*/\n@Deprecated\npublic AsyncSinkWriter(\nElementConverter elementConverter,\nSink.InitContext context,\nint maxBatchSize,\nint maxInFlightRequests,\nint maxBufferedRequests,\nlong maxBatchSizeInBytes,\nlong maxTimeInBufferMS,\nlong maxRecordSizeInBytes) {\nthis(\nelementConverter,\ncontext,\nmaxBatchSize,\nmaxInFlightRequests,\nmaxBufferedRequests,\nmaxBatchSizeInBytes,\nmaxTimeInBufferMS,\nmaxRecordSizeInBytes,\nCollections.emptyList());\n}\n/**\n* This method is deprecated, please use the constructor that specifies the {@link\n* AsyncSinkWriterConfiguration}.\n*/\n@Deprecated\npublic AsyncSinkWriter(\nElementConverter elementConverter,\nSink.InitContext context,\nint maxBatchSize,\nint maxInFlightRequests,\nint maxBufferedRequests,\nlong maxBatchSizeInBytes,\nlong maxTimeInBufferMS,\nlong maxRecordSizeInBytes,\nCollection> states) {\nthis(\nelementConverter,\ncontext,\nAsyncSinkWriterConfiguration.builder()\n.setMaxBatchSize(maxBatchSize)\n.setMaxBatchSizeInBytes(maxBatchSizeInBytes)\n.setMaxInFlightRequests(maxInFlightRequests)\n.setMaxBufferedRequests(maxBufferedRequests)\n.setMaxTimeInBufferMS(maxTimeInBufferMS)\n.setMaxRecordSizeInBytes(maxRecordSizeInBytes)\n.build(),\nstates);\n}\npublic AsyncSinkWriter(\nElementConverter elementConverter,\nSink.InitContext context,\nAsyncSinkWriterConfiguration configuration,\nCollection> states) {\nthis.elementConverter = elementConverter;\nthis.mailboxExecutor = context.getMailboxExecutor();\nthis.timeService = context.getProcessingTimeService();\nPreconditions.checkNotNull(elementConverter);\nPreconditions.checkArgument(configuration.getMaxBatchSize() > 0);\nPreconditions.checkArgument(configuration.getMaxBufferedRequests() > 0);\nPreconditions.checkArgument(configuration.getMaxBatchSizeInBytes() > 0);\nPreconditions.checkArgument(configuration.getMaxTimeInBufferMS() > 0);\nPreconditions.checkArgument(configuration.getMaxRecordSizeInBytes() > 0);\nPreconditions.checkArgument(\nconfiguration.getMaxBufferedRequests() > configuration.getMaxBatchSize(),\n\"The maximum number of requests that may be buffered should be strictly\"\n+ \" greater than the maximum number of requests per batch.\");\nPreconditions.checkArgument(\nconfiguration.getMaxBatchSizeInBytes() >= configuration.getMaxRecordSizeInBytes(),\n\"The maximum allowed size in bytes per flush must be greater than or equal to the\"\n+ \" maximum allowed size in bytes of a single record.\");\nPreconditions.checkNotNull(configuration.getRateLimitingStrategy());\nthis.maxBatchSize = configuration.getMaxBatchSize();\nthis.maxBufferedRequests = configuration.getMaxBufferedRequests();\nthis.maxBatchSizeInBytes = configuration.getMaxBatchSizeInBytes();\nthis.maxTimeInBufferMS = configuration.getMaxTimeInBufferMS();\nthis.maxRecordSizeInBytes = configuration.getMaxRecordSizeInBytes();\nthis.rateLimitingStrategy = configuration.getRateLimitingStrategy();\nthis.inFlightRequestsCount = 0;\nthis.bufferedRequestEntriesTotalSizeInBytes = 0;\nthis.metrics = context.metricGroup();\nthis.metrics.setCurrentSendTimeGauge(() -> this.ackTime - this.lastSendTimestamp);\nthis.numBytesSendCounter = this.metrics.getNumBytesSendCounter();\nthis.numRecordsSendCounter = this.metrics.getNumRecordsSendCounter();\nthis.fatalExceptionCons =\nexception ->\nmailboxExecutor.execute(\n() -> {\nthrow exception;\n},\n\"A fatal exception occurred in the sink that cannot be recovered from or should not be retried.\");\ninitializeState(states);\n}\nprivate void registerCallback() {\nProcessingTimeService.ProcessingTimeCallback ptc =\ninstant -> {\nexistsActiveTimerCallback = false;\nwhile (!bufferedRequestEntries.isEmpty()) {\nflush();\n}\n};\ntimeService.registerTimer(timeService.getCurrentProcessingTime() + maxTimeInBufferMS, ptc);\nexistsActiveTimerCallback = true;\n}\n@Override\npublic void write(InputT element, Context context) throws IOException, InterruptedException {\nwhile (bufferedRequestEntries.size() >= maxBufferedRequests) {\nflush();\n}\naddEntryToBuffer(elementConverter.apply(element, context), false);\nnonBlockingFlush();\n}\n/**\n* Determines if a call to flush will be non-blocking (i.e. {@code inFlightRequestsCount} is\n* strictly smaller than {@code maxInFlightRequests}). Also requires one of the following\n* requirements to be met:\n*\n*

    \n*
  • The number of elements buffered is greater than or equal to the {@code maxBatchSize}\n*
  • The sum of the size in bytes of all records in the buffer is greater than or equal to\n* {@code maxBatchSizeInBytes}\n*
\n*/\nprivate void nonBlockingFlush() throws InterruptedException {\nwhile (!rateLimitingStrategy.shouldBlock(createRequestInfo())\n&& (bufferedRequestEntries.size() >= getNextBatchSizeLimit()\n|| bufferedRequestEntriesTotalSizeInBytes >= maxBatchSizeInBytes)) {\nflush();\n}\n}\nprivate BasicRequestInfo createRequestInfo() {\nint batchSize = getNextBatchSize();\nreturn new BasicRequestInfo(batchSize);\n}\n/**\n* Persists buffered RequestsEntries into the destination by invoking {@code\n* submitRequestEntries} with batches according to the user specified buffering hints.\n*\n*

The method checks with the {@code rateLimitingStrategy} to see if it should block the\n* request.\n*/\nprivate void flush() throws InterruptedException {\nRequestInfo requestInfo = createRequestInfo();\nwhile (rateLimitingStrategy.shouldBlock(requestInfo)) {\nmailboxExecutor.yield();\nrequestInfo = createRequestInfo();\n}\nList batch = createNextAvailableBatch(requestInfo);\nif (batch.size() == 0) {\nreturn;\n}\nint batchSize = requestInfo.getBatchSize();\nlong requestTimestamp = System.currentTimeMillis();\nConsumer> requestToRetry =\nfailedRequestEntries ->\nmailboxExecutor.execute(\n() ->\ncompleteRequest(\nfailedRequestEntries, batchSize, requestTimestamp),\n\"Mark in-flight request as completed and requeue %d request entries\",\nfailedRequestEntries.size());\nrateLimitingStrategy.registerInFlightRequest(requestInfo);\ninFlightRequestsCount++;\nsubmitRequestEntries(batch, requestToRetry);\n}\nprivate int getNextBatchSize() {\nreturn Math.min(getNextBatchSizeLimit(), bufferedRequestEntries.size());\n}\n/**\n* Creates the next batch of request entries while respecting the {@code maxBatchSize} and\n* {@code maxBatchSizeInBytes}. Also adds these to the metrics counters.\n*/\n/**\n* Marks an in-flight request as completed and prepends failed requestEntries back to the\n* internal requestEntry buffer for later retry.\n*\n* @param failedRequestEntries requestEntries that need to be retried\n*/\nprivate void completeRequest(\nList failedRequestEntries, int batchSize, long requestStartTime)\nthrows InterruptedException {\nlastSendTimestamp = requestStartTime;\nackTime = System.currentTimeMillis();\ninFlightRequestsCount--;\nrateLimitingStrategy.registerCompletedRequest(\nnew BasicResultInfo(failedRequestEntries.size(), batchSize));\nListIterator iterator =\nfailedRequestEntries.listIterator(failedRequestEntries.size());\nwhile (iterator.hasPrevious()) {\naddEntryToBuffer(iterator.previous(), true);\n}\nnonBlockingFlush();\n}\nprivate void addEntryToBuffer(RequestEntryT entry, boolean insertAtHead) {\nif (bufferedRequestEntries.isEmpty() && !existsActiveTimerCallback) {\nregisterCallback();\n}\nRequestEntryWrapper wrappedEntry =\nnew RequestEntryWrapper<>(entry, getSizeInBytes(entry));\nif (wrappedEntry.getSize() > maxRecordSizeInBytes) {\nthrow new IllegalArgumentException(\nString.format(\n\"The request entry sent to the buffer was of size [%s], when the maxRecordSizeInBytes was set to [%s].\",\nwrappedEntry.getSize(), maxRecordSizeInBytes));\n}\nif (insertAtHead) {\nbufferedRequestEntries.addFirst(wrappedEntry);\n} else {\nbufferedRequestEntries.add(wrappedEntry);\n}\nbufferedRequestEntriesTotalSizeInBytes += wrappedEntry.getSize();\n}\n/**\n* In flight requests will be retried if the sink is still healthy. But if in-flight requests\n* fail after a checkpoint has been triggered and Flink needs to recover from the checkpoint,\n* the (failed) in-flight requests are gone and cannot be retried. Hence, there cannot be any\n* outstanding in-flight requests when a commit is initialized.\n*\n*

To this end, all in-flight requests need to completed before proceeding with the commit.\n*/\n@Override\npublic void flush(boolean flush) throws InterruptedException {\nwhile (inFlightRequestsCount > 0 || (bufferedRequestEntries.size() > 0 && flush)) {\nyieldIfThereExistsInFlightRequests();\nif (flush) {\nflush();\n}\n}\n}\nprivate void yieldIfThereExistsInFlightRequests() throws InterruptedException {\nif (inFlightRequestsCount > 0) {\nmailboxExecutor.yield();\n}\n}\n/**\n* All in-flight requests that are relevant for the snapshot have been completed, but there may\n* still be request entries in the internal buffers that are yet to be sent to the endpoint.\n* These request entries are stored in the snapshot state so that they don't get lost in case of\n* a failure/restart of the application.\n*/\n@Override\npublic List> snapshotState(long checkpointId) {\nreturn Collections.singletonList(new BufferedRequestState<>((bufferedRequestEntries)));\n}\nprivate void initializeState(Collection> states) {\nfor (BufferedRequestState state : states) {\ninitializeState(state);\n}\n}\nprivate void initializeState(BufferedRequestState state) {\nthis.bufferedRequestEntries.addAll(state.getBufferedRequestEntries());\nfor (RequestEntryWrapper wrapper : bufferedRequestEntries) {\nif (wrapper.getSize() > maxRecordSizeInBytes) {\nthrow new IllegalStateException(\nString.format(\n\"State contains record of size %d which exceeds sink maximum record size %d.\",\nwrapper.getSize(), maxRecordSizeInBytes));\n}\n}\nthis.bufferedRequestEntriesTotalSizeInBytes += state.getStateSize();\n}\n@Override\npublic void close() {}\nprivate int getNextBatchSizeLimit() {\nreturn Math.min(maxBatchSize, rateLimitingStrategy.getMaxBatchSize());\n}\nprotected Consumer getFatalExceptionCons() {\nreturn fatalExceptionCons;\n}\n}" + }, + { + "comment": "PS: It's just for a message, so it's not a big deal.", + "method_body": "private static TestLauncher determineTestLauncher() {\nStackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();\nint i = stackTrace.length - 1;\nTestLauncher testLauncher = TestLauncher.UNKNOWN;\nwhile (true) {\nStackTraceElement element = stackTrace[i--];\nString className = element.getClassName();\nif (className.startsWith(\"org.apache.maven\")) {\ntestLauncher = TestLauncher.MAVEN;\nbreak;\n}\nif (className.startsWith(\"org.gradle\")) {\ntestLauncher = TestLauncher.GRADLE;\n}\nif (i == 0) {\nbreak;\n}\n}\nreturn testLauncher;\n}", + "target_code": "StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();", + "method_body_after": "private static TestLauncher determineTestLauncher() {\nStackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();\nint i = stackTrace.length - 1;\nTestLauncher testLauncher = TestLauncher.UNKNOWN;\nwhile (true) {\nStackTraceElement element = stackTrace[i--];\nString className = element.getClassName();\nif (className.startsWith(\"org.apache.maven\")) {\ntestLauncher = TestLauncher.MAVEN;\nbreak;\n}\nif (className.startsWith(\"org.gradle\")) {\ntestLauncher = TestLauncher.GRADLE;\n}\nif (i == 0) {\nbreak;\n}\n}\nreturn testLauncher;\n}", + "context_before": "class IntegrationTestUtil {\nprivate IntegrationTestUtil() {\n}\nstatic void ensureNoInjectAnnotationIsUsed(Class testClass) {\nClass current = testClass;\nwhile (current.getSuperclass() != null) {\nfor (Field field : current.getDeclaredFields()) {\nInject injectAnnotation = field.getAnnotation(Inject.class);\nif (injectAnnotation != null) {\nthrow new JUnitException(\n\"@Inject is not supported in @NativeImageTest and @QuarkusIntegrationTest tests. Offending field is \"\n+ field.getDeclaringClass().getTypeName() + \".\"\n+ field.getName());\n}\n}\ncurrent = current.getSuperclass();\n}\n}\nstatic Class findProfile(Class testClass) {\nwhile (testClass != Object.class) {\nTestProfile annotation = testClass.getAnnotation(TestProfile.class);\nif (annotation != null) {\nreturn annotation.value();\n}\ntestClass = testClass.getSuperclass();\n}\nreturn null;\n}\nstatic void doProcessTestInstance(Object testInstance, ExtensionContext context) {\nTestHTTPResourceManager.inject(testInstance);\nExtensionContext root = context.getRoot();\nExtensionContext.Store store = root.getStore(ExtensionContext.Namespace.GLOBAL);\nIntegrationTestExtensionState state = store.get(IntegrationTestExtensionState.class.getName(),\nIntegrationTestExtensionState.class);\nstate.getTestResourceManager().inject(testInstance);\n}\nstatic Map getSysPropsToRestore() {\nMap sysPropRestore = new HashMap<>();\nsysPropRestore.put(ProfileManager.QUARKUS_TEST_PROFILE_PROP,\nSystem.getProperty(ProfileManager.QUARKUS_TEST_PROFILE_PROP));\nreturn sysPropRestore;\n}\nstatic TestProfileAndProperties determineTestProfileAndProperties(Class profile,\nMap sysPropRestore) throws InstantiationException, IllegalAccessException {\nfinal Map properties = new HashMap<>();\nQuarkusTestProfile testProfile = null;\nif (profile != null) {\ntestProfile = profile.newInstance();\nproperties.putAll(testProfile.getConfigOverrides());\nfinal Set> enabledAlternatives = testProfile.getEnabledAlternatives();\nif (!enabledAlternatives.isEmpty()) {\nproperties.put(\"quarkus.arc.selected-alternatives\", enabledAlternatives.stream()\n.peek((c) -> {\nif (!c.isAnnotationPresent(Alternative.class)) {\nthrow new RuntimeException(\n\"Enabled alternative \" + c + \" is not annotated with @Alternative\");\n}\n})\n.map(Class::getName).collect(Collectors.joining(\",\")));\n}\nfinal String configProfile = testProfile.getConfigProfile();\nif (configProfile != null) {\nproperties.put(ProfileManager.QUARKUS_PROFILE_PROP, configProfile);\n}\nproperties.put(\"quarkus.configuration.build-time-mismatch-at-runtime\", \"fail\");\nfor (Map.Entry i : properties.entrySet()) {\nsysPropRestore.put(i.getKey(), System.getProperty(i.getKey()));\n}\nfor (Map.Entry i : properties.entrySet()) {\nSystem.setProperty(i.getKey(), i.getValue());\n}\n}\nreturn new TestProfileAndProperties(testProfile, properties);\n}\nstatic void startLauncher(ArtifactLauncher launcher, Map additionalProperties, Runnable sslSetter)\nthrows IOException {\nlauncher.addSystemProperties(additionalProperties);\ntry {\nlauncher.start();\n} catch (IOException e) {\ntry {\nlauncher.close();\n} catch (Throwable ignored) {\n}\nthrow e;\n}\nif (launcher.listensOnSsl()) {\nif (sslSetter != null) {\nsslSetter.run();\n}\n}\n}\nstatic Map handleDevDb(ExtensionContext context) throws Exception {\nClass requiredTestClass = context.getRequiredTestClass();\nPath testClassLocation = getTestClassesLocation(requiredTestClass);\nfinal Path appClassLocation = getAppClassLocationForTestLocation(testClassLocation.toString());\nPathsCollection.Builder rootBuilder = PathsCollection.builder();\nif (!appClassLocation.equals(testClassLocation)) {\nrootBuilder.add(testClassLocation);\nfinal Path testResourcesLocation = PathTestHelper.getResourcesForClassesDirOrNull(testClassLocation, \"test\");\nif (testResourcesLocation != null) {\nrootBuilder.add(testResourcesLocation);\n}\n}\nfinal QuarkusBootstrap.Builder runnerBuilder = QuarkusBootstrap.builder()\n.setIsolateDeployment(true)\n.setMode(QuarkusBootstrap.Mode.TEST);\nQuarkusTestProfile profileInstance = null;\nfinal Path projectRoot = Paths.get(\"\").normalize().toAbsolutePath();\nrunnerBuilder.setProjectRoot(projectRoot);\nPath outputDir;\ntry {\noutputDir = projectRoot.resolve(projectRoot.relativize(testClassLocation).getName(0));\n} catch (Exception e) {\noutputDir = projectRoot;\n}\nrunnerBuilder.setTargetDirectory(outputDir);\nrootBuilder.add(appClassLocation);\nfinal Path appResourcesLocation = PathTestHelper.getResourcesForClassesDirOrNull(appClassLocation, \"main\");\nif (appResourcesLocation != null) {\nrootBuilder.add(appResourcesLocation);\n}\nif (System.getProperty(BootstrapConstants.SERIALIZED_TEST_APP_MODEL) == null) {\nQuarkusModel model = BuildToolHelper.enableGradleAppModelForTest(projectRoot);\nif (model != null) {\nfinal PathsCollection classDirectories = PathsUtils\n.toPathsCollection(model.getWorkspace().getMainModule().getSourceSet()\n.getSourceDirectories());\nfor (Path classes : classDirectories) {\nif (Files.exists(classes) && !rootBuilder.contains(classes)) {\nrootBuilder.add(classes);\n}\n}\n}\n} else if (System.getProperty(BootstrapConstants.OUTPUT_SOURCES_DIR) != null) {\nfinal String[] sourceDirectories = System.getProperty(BootstrapConstants.OUTPUT_SOURCES_DIR).split(\",\");\nfor (String sourceDirectory : sourceDirectories) {\nfinal Path directory = Paths.get(sourceDirectory);\nif (Files.exists(directory) && !rootBuilder.contains(directory)) {\nrootBuilder.add(directory);\n}\n}\n}\nrunnerBuilder.setApplicationRoot(rootBuilder.build());\nCuratedApplication curatedApplication = runnerBuilder\n.setTest(true)\n.build()\n.bootstrap();\nIndex testClassesIndex = TestClassIndexer.indexTestClasses(requiredTestClass);\nTestClassIndexer.writeIndex(testClassesIndex, requiredTestClass);\nMap propertyMap = new HashMap<>();\ncuratedApplication\n.createAugmentor()\n.performCustomBuild(NativeDevServicesHandler.class.getName(), new BiConsumer() {\n@Override\npublic void accept(String s, String s2) {\npropertyMap.put(s, s2);\n}\n}, DevServicesNativeConfigResultBuildItem.class.getName());\nreturn propertyMap;\n}\nstatic Properties readQuarkusArtifactProperties(ExtensionContext context) {\nPath buildOutputDirectory = determineBuildOutputDirectory(context);\nPath artifactProperties = buildOutputDirectory.resolve(\"quarkus-artifact.properties\");\nif (!Files.exists(artifactProperties)) {\nTestLauncher testLauncher = determineTestLauncher();\nString errorMessage = \"Unable to locate the artifact metadata file created that must be created by Quarkus in order to run integration tests. \";\nif (testLauncher == TestLauncher.MAVEN) {\nerrorMessage += \"Make sure this test is run after 'mvn package'. \";\nif (context.getTestClass().isPresent()) {\nString testClassName = context.getTestClass().get().getName();\nif (testClassName.endsWith(\"Test\")) {\nerrorMessage += \"The easiest way to ensure this is by having the 'maven-failsafe-plugin' run the test instead of the 'maven-surefire-plugin'.\";\n}\n}\n} else if (testLauncher == TestLauncher.GRADLE) {\nerrorMessage += \"Make sure this test is run after the 'quarkusBuild' Gradle task.\";\n} else {\nerrorMessage += \"Make sure this test is run after the Quarkus artifact is built from your build tool.\";\n}\nthrow new IllegalStateException(errorMessage);\n}\ntry {\nProperties properties = new Properties();\nproperties.load(new FileInputStream(artifactProperties.toFile()));\nreturn properties;\n} catch (IOException e) {\nthrow new UncheckedIOException(\n\"Unable to read artifact metadata file created that must be created by Quarkus in order to run integration tests.\",\ne);\n}\n}\nprivate enum TestLauncher {\nMAVEN,\nGRADLE,\nUNKNOWN\n}\nstatic Path determineBuildOutputDirectory(ExtensionContext context) {\nString buildOutputDirStr = System.getProperty(\"build.output.directory\");\nPath result = null;\nif (buildOutputDirStr != null) {\nresult = Paths.get(buildOutputDirStr);\n} else {\nClass testClass = context.getRequiredTestClass();\nfinal CodeSource codeSource = testClass.getProtectionDomain().getCodeSource();\nif (codeSource != null) {\nURL codeSourceLocation = codeSource.getLocation();\nFile artifactPropertiesDirectory = determineBuildOutputDirectory(codeSourceLocation);\nif (artifactPropertiesDirectory == null) {\nthrow new IllegalStateException(\n\"Unable to determine the output of the Quarkus build. Consider setting the 'build.output.directory' system property.\");\n}\nresult = artifactPropertiesDirectory.toPath();\n}\n}\nif (result == null) {\nthrow new IllegalStateException(\n\"Unable to locate the artifact metadata file created that must be created by Quarkus in order to run tests annotated with '@QuarkusIntegrationTest'.\");\n}\nif (!Files.isDirectory(result)) {\nthrow new IllegalStateException(\n\"The determined Quarkus build output '\" + result.toAbsolutePath().toString() + \"' is not a directory\");\n}\nreturn result;\n}\nprivate static File determineBuildOutputDirectory(final URL url) {\nif (url == null) {\nreturn null;\n}\nif (url.getProtocol().equals(\"file\") && url.getPath().endsWith(\"test-classes/\")) {\nreturn toPath(url).getParent().toFile();\n} else if (url.getProtocol().equals(\"file\") && url.getPath().endsWith(\"test/\")) {\nreturn toPath(url).getParent().getParent().getParent().toFile();\n} else if (url.getProtocol().equals(\"file\") && url.getPath().contains(\"/target/surefire/\")) {\nString path = url.getPath();\nint index = path.lastIndexOf(\"/target/\");\ntry {\nreturn Paths.get(new URI(\"file:\" + (path.substring(0, index) + \"/target/\"))).toFile();\n} catch (URISyntaxException e) {\nthrow new RuntimeException(e);\n}\n}\nreturn null;\n}\nprivate static Path toPath(URL url) {\ntry {\nreturn Paths.get(url.toURI());\n} catch (URISyntaxException e) {\nthrow new RuntimeException(e);\n}\n}\n}", + "context_after": "class IntegrationTestUtil {\nprivate IntegrationTestUtil() {\n}\nstatic void ensureNoInjectAnnotationIsUsed(Class testClass) {\nClass current = testClass;\nwhile (current.getSuperclass() != null) {\nfor (Field field : current.getDeclaredFields()) {\nInject injectAnnotation = field.getAnnotation(Inject.class);\nif (injectAnnotation != null) {\nthrow new JUnitException(\n\"@Inject is not supported in @NativeImageTest and @QuarkusIntegrationTest tests. Offending field is \"\n+ field.getDeclaringClass().getTypeName() + \".\"\n+ field.getName());\n}\n}\ncurrent = current.getSuperclass();\n}\n}\nstatic Class findProfile(Class testClass) {\nwhile (testClass != Object.class) {\nTestProfile annotation = testClass.getAnnotation(TestProfile.class);\nif (annotation != null) {\nreturn annotation.value();\n}\ntestClass = testClass.getSuperclass();\n}\nreturn null;\n}\nstatic void doProcessTestInstance(Object testInstance, ExtensionContext context) {\nTestHTTPResourceManager.inject(testInstance);\nExtensionContext root = context.getRoot();\nExtensionContext.Store store = root.getStore(ExtensionContext.Namespace.GLOBAL);\nIntegrationTestExtensionState state = store.get(IntegrationTestExtensionState.class.getName(),\nIntegrationTestExtensionState.class);\nstate.getTestResourceManager().inject(testInstance);\n}\nstatic Map getSysPropsToRestore() {\nMap sysPropRestore = new HashMap<>();\nsysPropRestore.put(ProfileManager.QUARKUS_TEST_PROFILE_PROP,\nSystem.getProperty(ProfileManager.QUARKUS_TEST_PROFILE_PROP));\nreturn sysPropRestore;\n}\nstatic TestProfileAndProperties determineTestProfileAndProperties(Class profile,\nMap sysPropRestore) throws InstantiationException, IllegalAccessException {\nfinal Map properties = new HashMap<>();\nQuarkusTestProfile testProfile = null;\nif (profile != null) {\ntestProfile = profile.newInstance();\nproperties.putAll(testProfile.getConfigOverrides());\nfinal Set> enabledAlternatives = testProfile.getEnabledAlternatives();\nif (!enabledAlternatives.isEmpty()) {\nproperties.put(\"quarkus.arc.selected-alternatives\", enabledAlternatives.stream()\n.peek((c) -> {\nif (!c.isAnnotationPresent(Alternative.class)) {\nthrow new RuntimeException(\n\"Enabled alternative \" + c + \" is not annotated with @Alternative\");\n}\n})\n.map(Class::getName).collect(Collectors.joining(\",\")));\n}\nfinal String configProfile = testProfile.getConfigProfile();\nif (configProfile != null) {\nproperties.put(ProfileManager.QUARKUS_PROFILE_PROP, configProfile);\n}\nproperties.put(\"quarkus.configuration.build-time-mismatch-at-runtime\", \"fail\");\nfor (Map.Entry i : properties.entrySet()) {\nsysPropRestore.put(i.getKey(), System.getProperty(i.getKey()));\n}\nfor (Map.Entry i : properties.entrySet()) {\nSystem.setProperty(i.getKey(), i.getValue());\n}\n}\nreturn new TestProfileAndProperties(testProfile, properties);\n}\nstatic void startLauncher(ArtifactLauncher launcher, Map additionalProperties, Runnable sslSetter)\nthrows IOException {\nlauncher.addSystemProperties(additionalProperties);\ntry {\nlauncher.start();\n} catch (IOException e) {\ntry {\nlauncher.close();\n} catch (Throwable ignored) {\n}\nthrow e;\n}\nif (launcher.listensOnSsl()) {\nif (sslSetter != null) {\nsslSetter.run();\n}\n}\n}\nstatic Map handleDevDb(ExtensionContext context) throws Exception {\nClass requiredTestClass = context.getRequiredTestClass();\nPath testClassLocation = getTestClassesLocation(requiredTestClass);\nfinal Path appClassLocation = getAppClassLocationForTestLocation(testClassLocation.toString());\nPathsCollection.Builder rootBuilder = PathsCollection.builder();\nif (!appClassLocation.equals(testClassLocation)) {\nrootBuilder.add(testClassLocation);\nfinal Path testResourcesLocation = PathTestHelper.getResourcesForClassesDirOrNull(testClassLocation, \"test\");\nif (testResourcesLocation != null) {\nrootBuilder.add(testResourcesLocation);\n}\n}\nfinal QuarkusBootstrap.Builder runnerBuilder = QuarkusBootstrap.builder()\n.setIsolateDeployment(true)\n.setMode(QuarkusBootstrap.Mode.TEST);\nQuarkusTestProfile profileInstance = null;\nfinal Path projectRoot = Paths.get(\"\").normalize().toAbsolutePath();\nrunnerBuilder.setProjectRoot(projectRoot);\nPath outputDir;\ntry {\noutputDir = projectRoot.resolve(projectRoot.relativize(testClassLocation).getName(0));\n} catch (Exception e) {\noutputDir = projectRoot;\n}\nrunnerBuilder.setTargetDirectory(outputDir);\nrootBuilder.add(appClassLocation);\nfinal Path appResourcesLocation = PathTestHelper.getResourcesForClassesDirOrNull(appClassLocation, \"main\");\nif (appResourcesLocation != null) {\nrootBuilder.add(appResourcesLocation);\n}\nif (System.getProperty(BootstrapConstants.SERIALIZED_TEST_APP_MODEL) == null) {\nQuarkusModel model = BuildToolHelper.enableGradleAppModelForTest(projectRoot);\nif (model != null) {\nfinal PathsCollection classDirectories = PathsUtils\n.toPathsCollection(model.getWorkspace().getMainModule().getSourceSet()\n.getSourceDirectories());\nfor (Path classes : classDirectories) {\nif (Files.exists(classes) && !rootBuilder.contains(classes)) {\nrootBuilder.add(classes);\n}\n}\n}\n} else if (System.getProperty(BootstrapConstants.OUTPUT_SOURCES_DIR) != null) {\nfinal String[] sourceDirectories = System.getProperty(BootstrapConstants.OUTPUT_SOURCES_DIR).split(\",\");\nfor (String sourceDirectory : sourceDirectories) {\nfinal Path directory = Paths.get(sourceDirectory);\nif (Files.exists(directory) && !rootBuilder.contains(directory)) {\nrootBuilder.add(directory);\n}\n}\n}\nrunnerBuilder.setApplicationRoot(rootBuilder.build());\nCuratedApplication curatedApplication = runnerBuilder\n.setTest(true)\n.build()\n.bootstrap();\nIndex testClassesIndex = TestClassIndexer.indexTestClasses(requiredTestClass);\nTestClassIndexer.writeIndex(testClassesIndex, requiredTestClass);\nMap propertyMap = new HashMap<>();\ncuratedApplication\n.createAugmentor()\n.performCustomBuild(NativeDevServicesHandler.class.getName(), new BiConsumer() {\n@Override\npublic void accept(String s, String s2) {\npropertyMap.put(s, s2);\n}\n}, DevServicesNativeConfigResultBuildItem.class.getName());\nreturn propertyMap;\n}\nstatic Properties readQuarkusArtifactProperties(ExtensionContext context) {\nPath buildOutputDirectory = determineBuildOutputDirectory(context);\nPath artifactProperties = buildOutputDirectory.resolve(\"quarkus-artifact.properties\");\nif (!Files.exists(artifactProperties)) {\nTestLauncher testLauncher = determineTestLauncher();\nString errorMessage = \"Unable to locate the artifact metadata file created that must be created by Quarkus in order to run integration tests. \";\nif (testLauncher == TestLauncher.MAVEN) {\nerrorMessage += \"Make sure this test is run after 'mvn package'. \";\nif (context.getTestClass().isPresent()) {\nString testClassName = context.getTestClass().get().getName();\nif (testClassName.endsWith(\"Test\")) {\nerrorMessage += \"The easiest way to ensure this is by having the 'maven-failsafe-plugin' run the test instead of the 'maven-surefire-plugin'.\";\n}\n}\n} else if (testLauncher == TestLauncher.GRADLE) {\nerrorMessage += \"Make sure this test is run after the 'quarkusBuild' Gradle task.\";\n} else {\nerrorMessage += \"Make sure this test is run after the Quarkus artifact is built from your build tool.\";\n}\nthrow new IllegalStateException(errorMessage);\n}\ntry {\nProperties properties = new Properties();\nproperties.load(new FileInputStream(artifactProperties.toFile()));\nreturn properties;\n} catch (IOException e) {\nthrow new UncheckedIOException(\n\"Unable to read artifact metadata file created that must be created by Quarkus in order to run integration tests.\",\ne);\n}\n}\nprivate enum TestLauncher {\nMAVEN,\nGRADLE,\nUNKNOWN\n}\nstatic Path determineBuildOutputDirectory(ExtensionContext context) {\nString buildOutputDirStr = System.getProperty(\"build.output.directory\");\nPath result = null;\nif (buildOutputDirStr != null) {\nresult = Paths.get(buildOutputDirStr);\n} else {\nClass testClass = context.getRequiredTestClass();\nfinal CodeSource codeSource = testClass.getProtectionDomain().getCodeSource();\nif (codeSource != null) {\nURL codeSourceLocation = codeSource.getLocation();\nFile artifactPropertiesDirectory = determineBuildOutputDirectory(codeSourceLocation);\nif (artifactPropertiesDirectory == null) {\nthrow new IllegalStateException(\n\"Unable to determine the output of the Quarkus build. Consider setting the 'build.output.directory' system property.\");\n}\nresult = artifactPropertiesDirectory.toPath();\n}\n}\nif (result == null) {\nthrow new IllegalStateException(\n\"Unable to locate the artifact metadata file created that must be created by Quarkus in order to run tests annotated with '@QuarkusIntegrationTest'.\");\n}\nif (!Files.isDirectory(result)) {\nthrow new IllegalStateException(\n\"The determined Quarkus build output '\" + result.toAbsolutePath().toString() + \"' is not a directory\");\n}\nreturn result;\n}\nprivate static File determineBuildOutputDirectory(final URL url) {\nif (url == null) {\nreturn null;\n}\nif (url.getProtocol().equals(\"file\") && url.getPath().endsWith(\"test-classes/\")) {\nreturn toPath(url).getParent().toFile();\n} else if (url.getProtocol().equals(\"file\") && url.getPath().endsWith(\"test/\")) {\nreturn toPath(url).getParent().getParent().getParent().toFile();\n} else if (url.getProtocol().equals(\"file\") && url.getPath().contains(\"/target/surefire/\")) {\nString path = url.getPath();\nint index = path.lastIndexOf(\"/target/\");\ntry {\nreturn Paths.get(new URI(\"file:\" + (path.substring(0, index) + \"/target/\"))).toFile();\n} catch (URISyntaxException e) {\nthrow new RuntimeException(e);\n}\n}\nreturn null;\n}\nprivate static Path toPath(URL url) {\ntry {\nreturn Paths.get(url.toURI());\n} catch (URISyntaxException e) {\nthrow new RuntimeException(e);\n}\n}\n}" + }, + { + "comment": "The 10MB is a soft-limit as that seems large enough to get efficiency wins of batching over rpcs while keeping things more granular for paging in later. But for the dataflow streaming backend there is a hard-limit of 100MB. Incoming elements can be up to that limit, so if those elements are directly written to a bag I want to ensure we don't batch beyond that. I will add some comments", + "method_body": "public void asyncClose() throws Exception {\ncheckState(\n!isClosed,\n\"Bag user state is no longer usable because it is closed for %s\",\nrequest.getStateKey());\nisClosed = true;\nif (!isCleared && newValues.isEmpty()) {\nreturn;\n}\nif (isCleared) {\nbeamFnStateClient.handle(\nrequest.toBuilder().setClear(StateClearRequest.getDefaultInstance()));\n}\nif (!newValues.isEmpty()) {\nByteStringOutputStream out = new ByteStringOutputStream();\nfor (T newValue : newValues) {\nint previousSize = out.size();\nvalueCoder.encode(newValue, out);\nif (out.size() > BAG_APPEND_BATCHING_LIMIT && previousSize > 0) {\nbeamFnStateClient.handle(\nrequest\n.toBuilder()\n.setAppend(\nStateAppendRequest.newBuilder()\n.setData(out.consumePrefixToByteString(previousSize))));\n}\nif (out.size() > BAG_APPEND_BATCHING_LIMIT) {\nbeamFnStateClient.handle(\nrequest\n.toBuilder()\n.setAppend(StateAppendRequest.newBuilder().setData(out.toByteStringAndReset())));\n}\n}\nif (out.size() > 0) {\nbeamFnStateClient.handle(\nrequest\n.toBuilder()\n.setAppend(StateAppendRequest.newBuilder().setData(out.toByteStringAndReset())));\n}\n}\nif (isCleared) {\noldValues.clearAndAppend(newValues);\n} else {\noldValues.append(newValues);\n}\n}", + "target_code": "if (out.size() > BAG_APPEND_BATCHING_LIMIT) {", + "method_body_after": "public void asyncClose() throws Exception {\ncheckState(\n!isClosed,\n\"Bag user state is no longer usable because it is closed for %s\",\nrequest.getStateKey());\nisClosed = true;\nif (!isCleared && newValues.isEmpty()) {\nreturn;\n}\nif (isCleared) {\nbeamFnStateClient.handle(\nrequest.toBuilder().setClear(StateClearRequest.getDefaultInstance()));\n}\nif (!newValues.isEmpty()) {\nByteStringOutputStream out = new ByteStringOutputStream();\nfor (T newValue : newValues) {\nint previousSize = out.size();\nvalueCoder.encode(newValue, out);\nif (out.size() > BAG_APPEND_BATCHING_LIMIT && previousSize > 0) {\nbeamFnStateClient.handle(\nrequest\n.toBuilder()\n.setAppend(\nStateAppendRequest.newBuilder()\n.setData(out.consumePrefixToByteString(previousSize))));\n}\nif (out.size() > BAG_APPEND_BATCHING_LIMIT) {\nbeamFnStateClient.handle(\nrequest\n.toBuilder()\n.setAppend(StateAppendRequest.newBuilder().setData(out.toByteStringAndReset())));\n}\n}\nif (out.size() > 0) {\nbeamFnStateClient.handle(\nrequest\n.toBuilder()\n.setAppend(StateAppendRequest.newBuilder().setData(out.toByteStringAndReset())));\n}\n}\nif (isCleared) {\noldValues.clearAndAppend(newValues);\n} else {\noldValues.append(newValues);\n}\n}", + "context_before": "class BagUserState {\nprivate final Cache cache;\nprivate final BeamFnStateClient beamFnStateClient;\nprivate final StateRequest request;\nprivate final Coder valueCoder;\nprivate final CachingStateIterable oldValues;\nprivate List newValues;\nprivate boolean isCleared;\nprivate boolean isClosed;\nstatic final int BAG_APPEND_BATCHING_LIMIT = 10 * 1024 * 1024;\n/** The cache must be namespaced for this state object accordingly. */\npublic BagUserState(\nCache cache,\nBeamFnStateClient beamFnStateClient,\nString instructionId,\nStateKey stateKey,\nCoder valueCoder) {\ncheckArgument(\nstateKey.hasBagUserState(), \"Expected BagUserState StateKey but received %s.\", stateKey);\nthis.cache = cache;\nthis.beamFnStateClient = beamFnStateClient;\nthis.valueCoder = valueCoder;\nthis.request =\nStateRequest.newBuilder().setInstructionId(instructionId).setStateKey(stateKey).build();\nthis.oldValues =\nStateFetchingIterators.readAllAndDecodeStartingFrom(\nthis.cache, beamFnStateClient, request, valueCoder);\nthis.newValues = new ArrayList<>();\n}\npublic PrefetchableIterable get() {\ncheckState(\n!isClosed,\n\"Bag user state is no longer usable because it is closed for %s\",\nrequest.getStateKey());\nif (isCleared) {\nreturn PrefetchableIterables.limit(Collections.unmodifiableList(newValues), newValues.size());\n} else if (newValues.isEmpty()) {\nreturn oldValues;\n}\nreturn PrefetchableIterables.concat(\noldValues, Iterables.limit(Collections.unmodifiableList(newValues), newValues.size()));\n}\npublic void append(T t) {\ncheckState(\n!isClosed,\n\"Bag user state is no longer usable because it is closed for %s\",\nrequest.getStateKey());\nnewValues.add(t);\n}\npublic void clear() {\ncheckState(\n!isClosed,\n\"Bag user state is no longer usable because it is closed for %s\",\nrequest.getStateKey());\nisCleared = true;\nnewValues = new ArrayList<>();\n}\n@SuppressWarnings(\"FutureReturnValueIgnored\")\n}", + "context_after": "class BagUserState {\nprivate final Cache cache;\nprivate final BeamFnStateClient beamFnStateClient;\nprivate final StateRequest request;\nprivate final Coder valueCoder;\nprivate final CachingStateIterable oldValues;\nprivate List newValues;\nprivate boolean isCleared;\nprivate boolean isClosed;\nstatic final int BAG_APPEND_BATCHING_LIMIT = 10 * 1024 * 1024;\n/** The cache must be namespaced for this state object accordingly. */\npublic BagUserState(\nCache cache,\nBeamFnStateClient beamFnStateClient,\nString instructionId,\nStateKey stateKey,\nCoder valueCoder) {\ncheckArgument(\nstateKey.hasBagUserState(), \"Expected BagUserState StateKey but received %s.\", stateKey);\nthis.cache = cache;\nthis.beamFnStateClient = beamFnStateClient;\nthis.valueCoder = valueCoder;\nthis.request =\nStateRequest.newBuilder().setInstructionId(instructionId).setStateKey(stateKey).build();\nthis.oldValues =\nStateFetchingIterators.readAllAndDecodeStartingFrom(\nthis.cache, beamFnStateClient, request, valueCoder);\nthis.newValues = new ArrayList<>();\n}\npublic PrefetchableIterable get() {\ncheckState(\n!isClosed,\n\"Bag user state is no longer usable because it is closed for %s\",\nrequest.getStateKey());\nif (isCleared) {\nreturn PrefetchableIterables.limit(Collections.unmodifiableList(newValues), newValues.size());\n} else if (newValues.isEmpty()) {\nreturn oldValues;\n}\nreturn PrefetchableIterables.concat(\noldValues, Iterables.limit(Collections.unmodifiableList(newValues), newValues.size()));\n}\npublic void append(T t) {\ncheckState(\n!isClosed,\n\"Bag user state is no longer usable because it is closed for %s\",\nrequest.getStateKey());\nnewValues.add(t);\n}\npublic void clear() {\ncheckState(\n!isClosed,\n\"Bag user state is no longer usable because it is closed for %s\",\nrequest.getStateKey());\nisCleared = true;\nnewValues = new ArrayList<>();\n}\n@SuppressWarnings(\"FutureReturnValueIgnored\")\n}" + }, + { + "comment": "Removed `println`.", + "method_body": "public void testNettyClientConnectRetryMultipleThread() throws Exception {\nNettyTestUtil.NettyServerAndClient serverAndClient = createNettyServerAndClient();\nUnstableNettyClient unstableNettyClient = new UnstableNettyClient(serverAndClient.client(), 2);\nPartitionRequestClientFactory factory = new PartitionRequestClientFactory(unstableNettyClient, 2);\nConnectionID serverAddress = new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(),\nserverAndClient.server().getConfig().getServerPort()), 0);\nExecutorService threadPoolExecutor = Executors.newFixedThreadPool(10);\nList> futures = new ArrayList<>();\nfor (int i = 0; i < 10; i++) {\nFuture future = threadPoolExecutor.submit(new Callable() {\n@Override\npublic NettyPartitionRequestClient call() {\nNettyPartitionRequestClient client = null;\ntry {\nclient = factory.createPartitionRequestClient(serverAddress);\n} catch (Exception e) {\nSystem.out.println(e.getMessage());\nfail();\n}\nreturn client;\n}\n});\nfutures.add(future);\n}\nfutures.forEach(runnableFuture -> {\nNettyPartitionRequestClient client = null;\ntry {\nclient = runnableFuture.get();\nSystem.out.println(\"Result = \" + client == null ? \"null\" : client.toString());\nassertNotNull(client);\n} catch (Exception e) {\nSystem.out.println(e.getMessage());\nfail();\n}\n});\nthreadPoolExecutor.shutdown();\nserverAndClient.client().shutdown();\nserverAndClient.server().shutdown();\n}", + "target_code": "try {", + "method_body_after": "public void testNettyClientConnectRetryMultipleThread() throws Exception {\nNettyTestUtil.NettyServerAndClient serverAndClient = createNettyServerAndClient();\nUnstableNettyClient unstableNettyClient = new UnstableNettyClient(serverAndClient.client(), 2);\nPartitionRequestClientFactory factory = new PartitionRequestClientFactory(unstableNettyClient, 2);\nConnectionID serverAddress = new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(),\nserverAndClient.server().getConfig().getServerPort()), 0);\nExecutorService threadPoolExecutor = Executors.newFixedThreadPool(10);\nList> futures = new ArrayList<>();\nfor (int i = 0; i < 10; i++) {\nFuture future = threadPoolExecutor.submit(new Callable() {\n@Override\npublic NettyPartitionRequestClient call() {\nNettyPartitionRequestClient client = null;\ntry {\nclient = factory.createPartitionRequestClient(serverAddress);\n} catch (Exception e) {\nfail(e.getMessage());\n}\nreturn client;\n}\n});\nfutures.add(future);\n}\nfutures.forEach(runnableFuture -> {\nNettyPartitionRequestClient client = null;\ntry {\nclient = runnableFuture.get();\nassertNotNull(client);\n} catch (Exception e) {\nSystem.out.println(e.getMessage());\nfail();\n}\n});\nthreadPoolExecutor.shutdown();\nserverAndClient.client().shutdown();\nserverAndClient.server().shutdown();\n}", + "context_before": "class PartitionRequestClientFactoryTest {\nprivate static final int SERVER_PORT = NetUtils.getAvailablePort();\n@Test\npublic void testNettyClientConnectRetry() throws Exception {\nNettyTestUtil.NettyServerAndClient serverAndClient = createNettyServerAndClient();\nUnstableNettyClient unstableNettyClient = new UnstableNettyClient(serverAndClient.client(), 2);\nPartitionRequestClientFactory factory = new PartitionRequestClientFactory(unstableNettyClient, 2);\nConnectionID serverAddress = new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(),\nserverAndClient.server().getConfig().getServerPort()), 0);\nfactory.createPartitionRequestClient(serverAddress);\nserverAndClient.client().shutdown();\nserverAndClient.server().shutdown();\n}\n@Test(expected = CompletionException.class)\npublic void testNettyClientConnectRetryFailure() throws Exception {\nNettyTestUtil.NettyServerAndClient serverAndClient = createNettyServerAndClient();\nUnstableNettyClient unstableNettyClient = new UnstableNettyClient(serverAndClient.client(), 3);\ntry {\nPartitionRequestClientFactory factory = new PartitionRequestClientFactory(unstableNettyClient, 2);\nConnectionID serverAddress = new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(),\nserverAndClient.server().getConfig().getServerPort()), 0);\nfactory.createPartitionRequestClient(serverAddress);\n} catch (Exception e) {\nthrow e;\n} finally {\nserverAndClient.client().shutdown();\nserverAndClient.server().shutdown();\n}\n}\n@Test\nprivate NettyTestUtil.NettyServerAndClient createNettyServerAndClient() throws Exception {\nNettyTestUtil.NettyServerAndClient serverAndClient = NettyTestUtil.initServerAndClient(\nnew NettyProtocol(null, null) {\n@Override\npublic ChannelHandler[] getServerChannelHandlers () {\nreturn new ChannelHandler[10];\n}\n@Override\npublic ChannelHandler[] getClientChannelHandlers () {\nreturn new ChannelHandler[]{mock(NetworkClientHandler.class)};\n}\n});\nreturn serverAndClient;\n}\nprivate static class UnstableNettyClient extends NettyClient {\nprivate NettyClient nettyClient;\nprivate int retry;\npublic UnstableNettyClient(NettyClient nettyClient, int retry) {\nsuper(null);\nthis.nettyClient = nettyClient;\nthis.retry = retry;\n}\n@Override\nChannelFuture connect(final InetSocketAddress serverSocketAddress) {\nif (retry > 0) {\nretry--;\nthrow new ChannelException(\"Simulate connect failure\");\n}\nreturn nettyClient.connect(serverSocketAddress);\n}\n}\nprivate static class CountDownLatchOnConnectHandler extends ChannelOutboundHandlerAdapter {\nprivate final CountDownLatch syncOnConnect;\npublic CountDownLatchOnConnectHandler(CountDownLatch syncOnConnect) {\nthis.syncOnConnect = syncOnConnect;\n}\n@Override\npublic void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) throws Exception {\nsyncOnConnect.countDown();\n}\n}\nprivate static class UncaughtTestExceptionHandler implements UncaughtExceptionHandler {\nprivate final List errors = new ArrayList(1);\n@Override\npublic void uncaughtException(Thread t, Throwable e) {\nerrors.add(e);\n}\nprivate List getErrors() {\nreturn errors;\n}\n}\nprivate static Tuple2 createNettyServerAndClient(NettyProtocol protocol) throws IOException {\nfinal NettyConfig config = new NettyConfig(InetAddress.getLocalHost(), SERVER_PORT, 32 * 1024, 1, new Configuration());\nfinal NettyServer server = new NettyServer(config);\nfinal NettyClient client = new NettyClient(config);\nboolean success = false;\ntry {\nNettyBufferPool bufferPool = new NettyBufferPool(1);\nserver.init(protocol, bufferPool);\nclient.init(protocol, bufferPool);\nsuccess = true;\n}\nfinally {\nif (!success) {\nserver.shutdown();\nclient.shutdown();\n}\n}\nreturn new Tuple2(server, client);\n}\nprivate static ConnectionID createServerConnectionID(int connectionIndex) throws UnknownHostException {\nreturn new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), SERVER_PORT), connectionIndex);\n}\n}", + "context_after": "class PartitionRequestClientFactoryTest {\nprivate static final int SERVER_PORT = NetUtils.getAvailablePort();\n@Test\npublic void testNettyClientConnectRetry() throws Exception {\nNettyTestUtil.NettyServerAndClient serverAndClient = createNettyServerAndClient();\nUnstableNettyClient unstableNettyClient = new UnstableNettyClient(serverAndClient.client(), 2);\nPartitionRequestClientFactory factory = new PartitionRequestClientFactory(unstableNettyClient, 2);\nConnectionID serverAddress = new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(),\nserverAndClient.server().getConfig().getServerPort()), 0);\nfactory.createPartitionRequestClient(serverAddress);\nserverAndClient.client().shutdown();\nserverAndClient.server().shutdown();\n}\n@Test(expected = CompletionException.class)\npublic void testNettyClientConnectRetryFailure() throws Exception {\nNettyTestUtil.NettyServerAndClient serverAndClient = createNettyServerAndClient();\nUnstableNettyClient unstableNettyClient = new UnstableNettyClient(serverAndClient.client(), 3);\ntry {\nPartitionRequestClientFactory factory = new PartitionRequestClientFactory(unstableNettyClient, 2);\nConnectionID serverAddress = new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(),\nserverAndClient.server().getConfig().getServerPort()), 0);\nfactory.createPartitionRequestClient(serverAddress);\n} catch (Exception e) {\nthrow e;\n} finally {\nserverAndClient.client().shutdown();\nserverAndClient.server().shutdown();\n}\n}\n@Test\nprivate NettyTestUtil.NettyServerAndClient createNettyServerAndClient() throws Exception {\nNettyTestUtil.NettyServerAndClient serverAndClient = NettyTestUtil.initServerAndClient(\nnew NettyProtocol(null, null) {\n@Override\npublic ChannelHandler[] getServerChannelHandlers () {\nreturn new ChannelHandler[10];\n}\n@Override\npublic ChannelHandler[] getClientChannelHandlers () {\nreturn new ChannelHandler[]{mock(NetworkClientHandler.class)};\n}\n});\nreturn serverAndClient;\n}\nprivate static class UnstableNettyClient extends NettyClient {\nprivate NettyClient nettyClient;\nprivate int retry;\npublic UnstableNettyClient(NettyClient nettyClient, int retry) {\nsuper(null);\nthis.nettyClient = nettyClient;\nthis.retry = retry;\n}\n@Override\nChannelFuture connect(final InetSocketAddress serverSocketAddress) {\nif (retry > 0) {\nretry--;\nthrow new ChannelException(\"Simulate connect failure\");\n}\nreturn nettyClient.connect(serverSocketAddress);\n}\n}\nprivate static class CountDownLatchOnConnectHandler extends ChannelOutboundHandlerAdapter {\nprivate final CountDownLatch syncOnConnect;\npublic CountDownLatchOnConnectHandler(CountDownLatch syncOnConnect) {\nthis.syncOnConnect = syncOnConnect;\n}\n@Override\npublic void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) throws Exception {\nsyncOnConnect.countDown();\n}\n}\nprivate static class UncaughtTestExceptionHandler implements UncaughtExceptionHandler {\nprivate final List errors = new ArrayList(1);\n@Override\npublic void uncaughtException(Thread t, Throwable e) {\nerrors.add(e);\n}\nprivate List getErrors() {\nreturn errors;\n}\n}\nprivate static Tuple2 createNettyServerAndClient(NettyProtocol protocol) throws IOException {\nfinal NettyConfig config = new NettyConfig(InetAddress.getLocalHost(), SERVER_PORT, 32 * 1024, 1, new Configuration());\nfinal NettyServer server = new NettyServer(config);\nfinal NettyClient client = new NettyClient(config);\nboolean success = false;\ntry {\nNettyBufferPool bufferPool = new NettyBufferPool(1);\nserver.init(protocol, bufferPool);\nclient.init(protocol, bufferPool);\nsuccess = true;\n}\nfinally {\nif (!success) {\nserver.shutdown();\nclient.shutdown();\n}\n}\nreturn new Tuple2(server, client);\n}\nprivate static ConnectionID createServerConnectionID(int connectionIndex) throws UnknownHostException {\nreturn new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), SERVER_PORT), connectionIndex);\n}\n}" + }, + { + "comment": "I updated the PR to check for the classes' presence in the classpath instead. ", + "method_body": "private void registerCompulsoryClasses(BuildProducer reflectiveClasses) {\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, false, StreamsPartitionAssignor.class));\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, false, DefaultProductionExceptionHandler.class));\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, false, FailOnInvalidTimestamp.class));\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, true, true,\norg.apache.kafka.streams.processor.internals.assignment.HighAvailabilityTaskAssignor.class));\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, true, true,\norg.apache.kafka.streams.processor.internals.assignment.StickyTaskAssignor.class));\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, true, true,\norg.apache.kafka.streams.processor.internals.assignment.FallbackPriorTaskAssignor.class));\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, true, true,\n\"org.apache.kafka.streams.processor.internals.StateDirectory$StateDirectoryProcessFile\"));\n}", + "target_code": "reflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, false, DefaultProductionExceptionHandler.class));", + "method_body_after": "private void registerCompulsoryClasses(BuildProducer reflectiveClasses) {\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, false, StreamsPartitionAssignor.class));\nif (QuarkusClassLoader.isClassPresentAtRuntime(DEFAULT_PARTITION_GROUPER)) {\nreflectiveClasses.produce(\nnew ReflectiveClassBuildItem(true, false, false, DEFAULT_PARTITION_GROUPER));\n}\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, false, DefaultProductionExceptionHandler.class));\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, false, FailOnInvalidTimestamp.class));\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, true, true,\norg.apache.kafka.streams.processor.internals.assignment.HighAvailabilityTaskAssignor.class));\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, true, true,\norg.apache.kafka.streams.processor.internals.assignment.StickyTaskAssignor.class));\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, true, true,\norg.apache.kafka.streams.processor.internals.assignment.FallbackPriorTaskAssignor.class));\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, true, true,\n\"org.apache.kafka.streams.processor.internals.StateDirectory$StateDirectoryProcessFile\"));\n}", + "context_before": "class KafkaStreamsProcessor {\n@BuildStep\nvoid build(BuildProducer feature,\nBuildProducer reflectiveClasses,\nBuildProducer jniRuntimeAccessibleClasses,\nBuildProducer reinitialized,\nBuildProducer nativeLibs,\nLaunchModeBuildItem launchMode,\nNativeConfig config) throws IOException {\nfeature.produce(new FeatureBuildItem(Feature.KAFKA_STREAMS));\nregisterClassesThatAreLoadedThroughReflection(reflectiveClasses, launchMode);\nregisterClassesThatAreAccessedViaJni(jniRuntimeAccessibleClasses);\naddSupportForRocksDbLib(nativeLibs, config);\nenableLoadOfNativeLibs(reinitialized);\n}\nprivate void registerClassesThatAreLoadedThroughReflection(BuildProducer reflectiveClasses,\nLaunchModeBuildItem launchMode) {\nregisterCompulsoryClasses(reflectiveClasses);\nregisterClassesThatClientMaySpecify(reflectiveClasses, launchMode);\n}\nprivate void registerClassesThatClientMaySpecify(BuildProducer reflectiveClasses,\nLaunchModeBuildItem launchMode) {\nProperties properties = buildKafkaStreamsProperties(launchMode.getLaunchMode());\nregisterExceptionHandler(reflectiveClasses, properties);\nregisterDefaultSerdes(reflectiveClasses, properties);\n}\nprivate void registerExceptionHandler(BuildProducer reflectiveClasses,\nProperties kafkaStreamsProperties) {\nString exceptionHandlerClassName = kafkaStreamsProperties\n.getProperty(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG);\nif (exceptionHandlerClassName == null) {\nregisterDefaultExceptionHandler(reflectiveClasses);\n} else {\nregisterClassName(reflectiveClasses, exceptionHandlerClassName);\n}\n}\nprivate void registerDefaultExceptionHandler(BuildProducer reflectiveClasses) {\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, false, LogAndFailExceptionHandler.class));\n}\nprivate void registerDefaultSerdes(BuildProducer reflectiveClasses,\nProperties kafkaStreamsProperties) {\nString defaultKeySerdeClass = kafkaStreamsProperties.getProperty(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG);\nString defaultValueSerdeClass = kafkaStreamsProperties.getProperty(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG);\nif (defaultKeySerdeClass != null) {\nregisterClassName(reflectiveClasses, defaultKeySerdeClass);\n}\nif (defaultValueSerdeClass != null) {\nregisterClassName(reflectiveClasses, defaultValueSerdeClass);\n}\nif (!allDefaultSerdesAreDefinedInProperties(defaultKeySerdeClass, defaultValueSerdeClass)) {\nregisterDefaultSerde(reflectiveClasses);\n}\n}\nprivate void registerClassesThatAreAccessedViaJni(BuildProducer jniRuntimeAccessibleClasses) {\njniRuntimeAccessibleClasses\n.produce(new JniRuntimeAccessBuildItem(true, false, false, RocksDBException.class, Status.class));\n}\nprivate void addSupportForRocksDbLib(BuildProducer nativeLibs, NativeConfig nativeConfig) {\nif (nativeConfig.isContainerBuild()) {\nnativeLibs.produce(new NativeImageResourceBuildItem(\"librocksdbjni-linux64.so\"));\n}\nelse {\nnativeLibs.produce(new NativeImageResourceBuildItem(Environment.getJniLibraryFileName(\"rocksdb\")));\n}\n}\nprivate void enableLoadOfNativeLibs(BuildProducer reinitialized) {\nreinitialized.produce(new RuntimeReinitializedClassBuildItem(\"org.rocksdb.RocksDB\"));\n}\nprivate void registerClassName(BuildProducer reflectiveClasses, String defaultKeySerdeClass) {\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, false, defaultKeySerdeClass));\n}\nprivate boolean allDefaultSerdesAreDefinedInProperties(String defaultKeySerdeClass, String defaultValueSerdeClass) {\nreturn defaultKeySerdeClass != null && defaultValueSerdeClass != null;\n}\nprivate void registerDefaultSerde(BuildProducer reflectiveClasses) {\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, false, ByteArraySerde.class));\n}\n@BuildStep\n@Record(ExecutionTime.STATIC_INIT)\nvoid processBuildTimeConfig(KafkaStreamsRecorder recorder, LaunchModeBuildItem launchMode,\nBuildProducer syntheticBeanBuildItemBuildProducer,\nBuildProducer additionalBeans) {\nProperties kafkaStreamsProperties = buildKafkaStreamsProperties(launchMode.getLaunchMode());\nsyntheticBeanBuildItemBuildProducer.produce(SyntheticBeanBuildItem.configure(KafkaStreamsSupport.class)\n.scope(Singleton.class)\n.supplier(recorder.kafkaStreamsSupportSupplier(kafkaStreamsProperties))\n.done());\nadditionalBeans\n.produce(AdditionalBeanBuildItem.builder().addBeanClasses(KafkaStreamsProducer.class).setUnremovable().build());\n}\n@BuildStep\n@Record(ExecutionTime.RUNTIME_INIT)\nvoid loadRocksDb(KafkaStreamsRecorder recorder, KafkaStreamsRuntimeConfig runtimeConfig) {\nrecorder.loadRocksDb();\n}\n@BuildStep\nvoid addHealthChecks(KafkaStreamsBuildTimeConfig buildTimeConfig, BuildProducer healthChecks) {\nhealthChecks.produce(\nnew HealthBuildItem(\n\"io.quarkus.kafka.streams.runtime.health.KafkaStreamsTopicsHealthCheck\",\nbuildTimeConfig.healthEnabled));\nhealthChecks.produce(\nnew HealthBuildItem(\n\"io.quarkus.kafka.streams.runtime.health.KafkaStreamsStateHealthCheck\",\nbuildTimeConfig.healthEnabled));\n}\n}", + "context_after": "class KafkaStreamsProcessor {\npublic static final String DEFAULT_PARTITION_GROUPER = \"org.apache.kafka.streams.processor.DefaultPartitionGrouper\";\n@BuildStep\nvoid build(BuildProducer feature,\nBuildProducer reflectiveClasses,\nBuildProducer jniRuntimeAccessibleClasses,\nBuildProducer reinitialized,\nBuildProducer nativeLibs,\nLaunchModeBuildItem launchMode,\nNativeConfig config) throws IOException {\nfeature.produce(new FeatureBuildItem(Feature.KAFKA_STREAMS));\nregisterClassesThatAreLoadedThroughReflection(reflectiveClasses, launchMode);\nregisterClassesThatAreAccessedViaJni(jniRuntimeAccessibleClasses);\naddSupportForRocksDbLib(nativeLibs, config);\nenableLoadOfNativeLibs(reinitialized);\n}\nprivate void registerClassesThatAreLoadedThroughReflection(BuildProducer reflectiveClasses,\nLaunchModeBuildItem launchMode) {\nregisterCompulsoryClasses(reflectiveClasses);\nregisterClassesThatClientMaySpecify(reflectiveClasses, launchMode);\n}\nprivate void registerClassesThatClientMaySpecify(BuildProducer reflectiveClasses,\nLaunchModeBuildItem launchMode) {\nProperties properties = buildKafkaStreamsProperties(launchMode.getLaunchMode());\nregisterExceptionHandler(reflectiveClasses, properties);\nregisterDefaultSerdes(reflectiveClasses, properties);\n}\nprivate void registerExceptionHandler(BuildProducer reflectiveClasses,\nProperties kafkaStreamsProperties) {\nString exceptionHandlerClassName = kafkaStreamsProperties\n.getProperty(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG);\nif (exceptionHandlerClassName == null) {\nregisterDefaultExceptionHandler(reflectiveClasses);\n} else {\nregisterClassName(reflectiveClasses, exceptionHandlerClassName);\n}\n}\nprivate void registerDefaultExceptionHandler(BuildProducer reflectiveClasses) {\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, false, LogAndFailExceptionHandler.class));\n}\nprivate void registerDefaultSerdes(BuildProducer reflectiveClasses,\nProperties kafkaStreamsProperties) {\nString defaultKeySerdeClass = kafkaStreamsProperties.getProperty(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG);\nString defaultValueSerdeClass = kafkaStreamsProperties.getProperty(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG);\nif (defaultKeySerdeClass != null) {\nregisterClassName(reflectiveClasses, defaultKeySerdeClass);\n}\nif (defaultValueSerdeClass != null) {\nregisterClassName(reflectiveClasses, defaultValueSerdeClass);\n}\nif (!allDefaultSerdesAreDefinedInProperties(defaultKeySerdeClass, defaultValueSerdeClass)) {\nregisterDefaultSerde(reflectiveClasses);\n}\n}\nprivate void registerClassesThatAreAccessedViaJni(BuildProducer jniRuntimeAccessibleClasses) {\njniRuntimeAccessibleClasses\n.produce(new JniRuntimeAccessBuildItem(true, false, false, RocksDBException.class, Status.class));\n}\nprivate void addSupportForRocksDbLib(BuildProducer nativeLibs, NativeConfig nativeConfig) {\nif (nativeConfig.isContainerBuild()) {\nnativeLibs.produce(new NativeImageResourceBuildItem(\"librocksdbjni-linux64.so\"));\n}\nelse {\nnativeLibs.produce(new NativeImageResourceBuildItem(Environment.getJniLibraryFileName(\"rocksdb\")));\n}\n}\nprivate void enableLoadOfNativeLibs(BuildProducer reinitialized) {\nreinitialized.produce(new RuntimeReinitializedClassBuildItem(\"org.rocksdb.RocksDB\"));\n}\nprivate void registerClassName(BuildProducer reflectiveClasses, String defaultKeySerdeClass) {\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, false, defaultKeySerdeClass));\n}\nprivate boolean allDefaultSerdesAreDefinedInProperties(String defaultKeySerdeClass, String defaultValueSerdeClass) {\nreturn defaultKeySerdeClass != null && defaultValueSerdeClass != null;\n}\nprivate void registerDefaultSerde(BuildProducer reflectiveClasses) {\nreflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, false, ByteArraySerde.class));\n}\n@BuildStep\n@Record(ExecutionTime.STATIC_INIT)\nvoid processBuildTimeConfig(KafkaStreamsRecorder recorder, LaunchModeBuildItem launchMode,\nBuildProducer syntheticBeanBuildItemBuildProducer,\nBuildProducer additionalBeans) {\nProperties kafkaStreamsProperties = buildKafkaStreamsProperties(launchMode.getLaunchMode());\nsyntheticBeanBuildItemBuildProducer.produce(SyntheticBeanBuildItem.configure(KafkaStreamsSupport.class)\n.scope(Singleton.class)\n.supplier(recorder.kafkaStreamsSupportSupplier(kafkaStreamsProperties))\n.done());\nadditionalBeans\n.produce(AdditionalBeanBuildItem.builder().addBeanClasses(KafkaStreamsProducer.class).setUnremovable().build());\n}\n@BuildStep\n@Record(ExecutionTime.RUNTIME_INIT)\nvoid loadRocksDb(KafkaStreamsRecorder recorder, KafkaStreamsRuntimeConfig runtimeConfig) {\nrecorder.loadRocksDb();\n}\n@BuildStep\nvoid addHealthChecks(KafkaStreamsBuildTimeConfig buildTimeConfig, BuildProducer healthChecks) {\nhealthChecks.produce(\nnew HealthBuildItem(\n\"io.quarkus.kafka.streams.runtime.health.KafkaStreamsTopicsHealthCheck\",\nbuildTimeConfig.healthEnabled));\nhealthChecks.produce(\nnew HealthBuildItem(\n\"io.quarkus.kafka.streams.runtime.health.KafkaStreamsStateHealthCheck\",\nbuildTimeConfig.healthEnabled));\n}\n}" + }, + { + "comment": "Shall we assert this in `join-clause.bal` itself. ", + "method_body": "public void testJoinClauseWithLargeList() {\nObject values = BRunUtil.invoke(result, \"testJoinClauseWithLargeList\");\nAssert.assertTrue((Boolean) values);\n}", + "target_code": "Assert.assertTrue((Boolean) values);", + "method_body_after": "public void testJoinClauseWithLargeList() {\nBRunUtil.invoke(result, \"testJoinClauseWithLargeList\");\n}", + "context_before": "class JoinClauseTest {\nprivate CompileResult result;\nprivate CompileResult negativeResult;\n@BeforeClass\npublic void setup() {\nresult = BCompileUtil.compile(\"test-src/query/join-clause.bal\");\nnegativeResult = BCompileUtil.compile(\"test-src/query/join-clause-negative.bal\");\n}\n@Test(description = \"Test join clause with record variable definition\")\npublic void testSimpleJoinClauseWithRecordVariable() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithRecordVariable\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause having mapping binding with field name : variable name\")\npublic void testSimpleJoinClauseWithRecordVariable2() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithRecordVariable2\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause having mapping binding with variable name\")\npublic void testSimpleJoinClauseWithRecordVariable3() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithRecordVariable3\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause with simple variable definition and stream\")\npublic void testJoinClauseWithStream() {\nObject values = BRunUtil.invoke(result, \"testJoinClauseWithStream\", new Object[]{});\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test query expr with join and limit clause\")\npublic void testJoinClauseWithLimit() {\nObject values = BRunUtil.invoke(result, \"testJoinClauseWithLimit\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test outer join clause with record variable definition\")\npublic void testOuterJoinClauseWithRecordVariable() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithRecordVariable\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause having mapping binding with field name : variable name\")\npublic void testOuterJoinClauseWithRecordVariable2() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithRecordVariable2\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause having mapping binding with variable name\")\npublic void testOuterJoinClauseWithRecordVariable3() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithRecordVariable3\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause with simple variable definition and stream\")\npublic void testOuterJoinClauseWithStream() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithStream\", new Object[]{});\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test query expr with join and limit clause\")\npublic void testOuterJoinClauseWithLimit() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithLimit\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test equals clause with a variable defined from a let clause\")\npublic void testSimpleJoinClauseWithLetAndEquals() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithLetAndEquals\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test equals clause with a function invocation\")\npublic void testSimpleJoinClauseWithFunctionInAnEquals() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithFunctionInAnEquals\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test outer join with null results\")\npublic void testOuterJoinWithNullResults() {\nBRunUtil.invoke(result, \"testOuterJoin\");\n}\n@Test(description = \"Test join clause with a large list\")\n@Test(description = \"Test negative scenarios for query expr with join clause\")\npublic void testNegativeScenarios() {\nAssert.assertEquals(negativeResult.getErrorCount(), 40);\nint i = 0;\nvalidateError(negativeResult, i++, \"incompatible types: expected 'Department', found 'Person'\", 46, 13);\nvalidateError(negativeResult, i++, \"undeclared field 'name' in record 'Person'\", 51, 19);\nvalidateError(negativeResult, i++, \"unknown type 'XYZ'\", 69, 13);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'int', found 'other'\", 70, 28);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptId'\", 93, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'person'\", 93, 25);\nvalidateError(negativeResult, i++, \"undefined symbol 'id'\", 116, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'person'\", 116, 21);\nvalidateError(negativeResult, i++, \"undefined symbol 'name'\", 140, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptName'\", 140, 23);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptId'\", 163, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'person'\", 163, 25);\nvalidateError(negativeResult, i++, \"undefined symbol 'id'\", 186, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'person'\", 186, 21);\nvalidateError(negativeResult, i++, \"undefined symbol 'name'\", 210, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptName'\", 210, 23);\nvalidateError(negativeResult, i++, \"undefined symbol 'id'\", 234, 23);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'string', found 'other'\", 234, 34);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptName'\", 234, 34);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'boolean', found 'other'\", 266, 1);\nvalidateError(negativeResult, i++, \"missing equals keyword\", 266, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 266, 1);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'boolean', found 'other'\", 289, 1);\nvalidateError(negativeResult, i++, \"missing equals keyword\", 289, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 289, 1);\nvalidateError(negativeResult, i++, \"missing equals keyword\", 309, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 309, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 309, 1);\nvalidateError(negativeResult, i++, \"missing on keyword\", 309, 1);\nvalidateError(negativeResult, i++, \"undefined symbol 'dept'\", 329, 24);\nvalidateError(negativeResult, i++, \"missing equals keyword\", 330, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 330, 1);\nvalidateError(negativeResult, i++, \"outer join must be declared with 'var'\", 353, 19);\nvalidateError(negativeResult, i++, \"undefined symbol 'dept'\", 357, 19);\nvalidateError(negativeResult, i++, \"invalid operation: type 'Person?' does not support field access\", 374, 16);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'int', found 'other'\", 389, 59);\nvalidateError(negativeResult, i++, \"invalid operation: type 'Person?' does not support field access\", 389, 59);\nvalidateError(negativeResult, i++, \"invalid operation: type 'Person?' does not support field access\", 395, 22);\nvalidateError(negativeResult, i++, \"order by not supported for complex type fields, order key should belong\" +\n\" to a basic type\", 395, 22);\nvalidateError(negativeResult, i++, \"invalid operation: type 'Person?' does not support field access\", 397, 36);\n}\n@AfterClass\npublic void tearDown() {\nresult = null;\nnegativeResult = null;\n}\n}", + "context_after": "class JoinClauseTest {\nprivate CompileResult result;\nprivate CompileResult negativeResult;\n@BeforeClass\npublic void setup() {\nresult = BCompileUtil.compile(\"test-src/query/join-clause.bal\");\nnegativeResult = BCompileUtil.compile(\"test-src/query/join-clause-negative.bal\");\n}\n@Test(description = \"Test join clause with record variable definition\")\npublic void testSimpleJoinClauseWithRecordVariable() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithRecordVariable\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause having mapping binding with field name : variable name\")\npublic void testSimpleJoinClauseWithRecordVariable2() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithRecordVariable2\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause having mapping binding with variable name\")\npublic void testSimpleJoinClauseWithRecordVariable3() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithRecordVariable3\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause with simple variable definition and stream\")\npublic void testJoinClauseWithStream() {\nObject values = BRunUtil.invoke(result, \"testJoinClauseWithStream\", new Object[]{});\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test query expr with join and limit clause\")\npublic void testJoinClauseWithLimit() {\nObject values = BRunUtil.invoke(result, \"testJoinClauseWithLimit\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test outer join clause with record variable definition\")\npublic void testOuterJoinClauseWithRecordVariable() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithRecordVariable\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause having mapping binding with field name : variable name\")\npublic void testOuterJoinClauseWithRecordVariable2() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithRecordVariable2\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause having mapping binding with variable name\")\npublic void testOuterJoinClauseWithRecordVariable3() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithRecordVariable3\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test join clause with simple variable definition and stream\")\npublic void testOuterJoinClauseWithStream() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithStream\", new Object[]{});\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test query expr with join and limit clause\")\npublic void testOuterJoinClauseWithLimit() {\nObject values = BRunUtil.invoke(result, \"testOuterJoinClauseWithLimit\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test equals clause with a variable defined from a let clause\")\npublic void testSimpleJoinClauseWithLetAndEquals() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithLetAndEquals\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test equals clause with a function invocation\")\npublic void testSimpleJoinClauseWithFunctionInAnEquals() {\nObject values = BRunUtil.invoke(result, \"testSimpleJoinClauseWithFunctionInAnEquals\");\nAssert.assertTrue((Boolean) values);\n}\n@Test(description = \"Test outer join with null results\")\npublic void testOuterJoinWithNullResults() {\nBRunUtil.invoke(result, \"testOuterJoin\");\n}\n@Test(description = \"Test join clause with a large list\")\n@Test(description = \"Test negative scenarios for query expr with join clause\")\npublic void testNegativeScenarios() {\nAssert.assertEquals(negativeResult.getErrorCount(), 40);\nint i = 0;\nvalidateError(negativeResult, i++, \"incompatible types: expected 'Department', found 'Person'\", 46, 13);\nvalidateError(negativeResult, i++, \"undeclared field 'name' in record 'Person'\", 51, 19);\nvalidateError(negativeResult, i++, \"unknown type 'XYZ'\", 69, 13);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'int', found 'other'\", 70, 28);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptId'\", 93, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'person'\", 93, 25);\nvalidateError(negativeResult, i++, \"undefined symbol 'id'\", 116, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'person'\", 116, 21);\nvalidateError(negativeResult, i++, \"undefined symbol 'name'\", 140, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptName'\", 140, 23);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptId'\", 163, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'person'\", 163, 25);\nvalidateError(negativeResult, i++, \"undefined symbol 'id'\", 186, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'person'\", 186, 21);\nvalidateError(negativeResult, i++, \"undefined symbol 'name'\", 210, 11);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptName'\", 210, 23);\nvalidateError(negativeResult, i++, \"undefined symbol 'id'\", 234, 23);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'string', found 'other'\", 234, 34);\nvalidateError(negativeResult, i++, \"undefined symbol 'deptName'\", 234, 34);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'boolean', found 'other'\", 266, 1);\nvalidateError(negativeResult, i++, \"missing equals keyword\", 266, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 266, 1);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'boolean', found 'other'\", 289, 1);\nvalidateError(negativeResult, i++, \"missing equals keyword\", 289, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 289, 1);\nvalidateError(negativeResult, i++, \"missing equals keyword\", 309, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 309, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 309, 1);\nvalidateError(negativeResult, i++, \"missing on keyword\", 309, 1);\nvalidateError(negativeResult, i++, \"undefined symbol 'dept'\", 329, 24);\nvalidateError(negativeResult, i++, \"missing equals keyword\", 330, 1);\nvalidateError(negativeResult, i++, \"missing identifier\", 330, 1);\nvalidateError(negativeResult, i++, \"outer join must be declared with 'var'\", 353, 19);\nvalidateError(negativeResult, i++, \"undefined symbol 'dept'\", 357, 19);\nvalidateError(negativeResult, i++, \"invalid operation: type 'Person?' does not support field access\", 374, 16);\nvalidateError(negativeResult, i++, \"incompatible types: expected 'int', found 'other'\", 389, 59);\nvalidateError(negativeResult, i++, \"invalid operation: type 'Person?' does not support field access\", 389, 59);\nvalidateError(negativeResult, i++, \"invalid operation: type 'Person?' does not support field access\", 395, 22);\nvalidateError(negativeResult, i++, \"order by not supported for complex type fields, order key should belong\" +\n\" to a basic type\", 395, 22);\nvalidateError(negativeResult, i++, \"invalid operation: type 'Person?' does not support field access\", 397, 36);\n}\n@AfterClass\npublic void tearDown() {\nresult = null;\nnegativeResult = null;\n}\n}" + }, + { + "comment": "Please add unit tests to fully cover this logic.", + "method_body": "public void start(Receiver sparkReceiver) {\nthis.sparkReceiver = sparkReceiver;\nfinal SerializableFunction storeFn =\n(input) -> {\nif (input == null) {\nreturn null;\n}\nif (input[0] instanceof ByteBuffer) {\nfinal ByteBuffer byteBuffer = ((ByteBuffer) input[0]).asReadOnlyBuffer();\nfinal byte[] bytes = new byte[byteBuffer.limit()];\nbyteBuffer.get(bytes);\nfinal V record = SerializationUtils.deserialize(bytes);\nrecordsQueue.offer(record);\n} else if (input[0] instanceof Iterator) {\nfinal Iterator iterator = (Iterator) input[0];\nwhile (iterator.hasNext()) {\nV record = (V) iterator.next();\nrecordsQueue.offer(record);\n}\n} else if (input[0] instanceof ArrayBuffer) {\nfinal ArrayBuffer arrayBuffer = (ArrayBuffer) input[0];\nfinal Iterator iterator = arrayBuffer.iterator();\nwhile (iterator.hasNext()) {\nV record = (V) iterator.next();\nrecordsQueue.offer(record);\n}\n} else {\nV record = (V) input[0];\nrecordsQueue.offer(record);\n}\nreturn null;\n};\ntry {\nnew WrappedSupervisor(sparkReceiver, new SparkConf(), storeFn);\n} catch (Exception e) {\nLOG.error(\"Can not init Spark Receiver!\", e);\nthrow new IllegalStateException(\"Spark Receiver was not initialized\");\n}\n((HasOffset) sparkReceiver).setStartOffset(startOffset);\nsparkReceiver.supervisor().startReceiver();\ntry {\nTimeUnit.MILLISECONDS.sleep(START_POLL_TIMEOUT_MS);\n} catch (InterruptedException e) {\nLOG.error(\"SparkReceiver was interrupted before polling started\", e);\nthrow new IllegalStateException(\"Spark Receiver was interrupted before polling started\");\n}\n}", + "target_code": "while (iterator.hasNext()) {", + "method_body_after": "public void start(Receiver sparkReceiver) {\nthis.sparkReceiver = sparkReceiver;\nfinal SerializableFunction storeFn =\n(input) -> {\nif (input == null) {\nreturn null;\n}\n/*\nUse only [0] element - data.\nThe other elements are not needed because they are related to Spark environment options.\n*/\nObject data = input[0];\nif (data instanceof ByteBuffer) {\nfinal ByteBuffer byteBuffer = ((ByteBuffer) data).asReadOnlyBuffer();\nfinal byte[] bytes = new byte[byteBuffer.limit()];\nbyteBuffer.get(bytes);\nfinal V record = SerializationUtils.deserialize(bytes);\nrecordsQueue.offer(record);\n} else if (data instanceof Iterator) {\nfinal Iterator iterator = (Iterator) data;\nwhile (iterator.hasNext()) {\nV record = iterator.next();\nrecordsQueue.offer(record);\n}\n} else if (data instanceof ArrayBuffer) {\nfinal ArrayBuffer arrayBuffer = (ArrayBuffer) data;\nfinal Iterator iterator = arrayBuffer.iterator();\nwhile (iterator.hasNext()) {\nV record = iterator.next();\nrecordsQueue.offer(record);\n}\n} else {\nV record = (V) data;\nrecordsQueue.offer(record);\n}\nreturn null;\n};\ntry {\nnew WrappedSupervisor(sparkReceiver, new SparkConf(), storeFn);\n} catch (Exception e) {\nLOG.error(\"Can not init Spark Receiver!\", e);\nthrow new IllegalStateException(\"Spark Receiver was not initialized\");\n}\n((HasOffset) sparkReceiver).setStartOffset(startOffset);\nsparkReceiver.supervisor().startReceiver();\ntry {\nTimeUnit.MILLISECONDS.sleep(START_POLL_TIMEOUT_MS);\n} catch (InterruptedException e) {\nLOG.error(\"SparkReceiver was interrupted before polling started\", e);\nthrow new IllegalStateException(\"Spark Receiver was interrupted before polling started\");\n}\n}", + "context_before": "class SparkConsumerWithOffset implements SparkConsumer {\nprivate final Queue recordsQueue;\nprivate @Nullable Receiver sparkReceiver;\nprivate final Long startOffset;\nSparkConsumerWithOffset(Long startOffset) {\nthis.startOffset = startOffset;\nthis.recordsQueue = new ConcurrentLinkedQueue<>();\n}\n@Override\npublic boolean hasRecords() {\nreturn !recordsQueue.isEmpty();\n}\n@Override\npublic @Nullable V poll() {\nreturn recordsQueue.poll();\n}\n@Override\n@Override\npublic void stop() {\nif (sparkReceiver != null) {\nsparkReceiver.stop(\"SparkReceiver is stopped.\");\n}\nrecordsQueue.clear();\n}\n}", + "context_after": "class SparkConsumerWithOffset implements SparkConsumer {\nprivate final Queue recordsQueue;\nprivate @Nullable Receiver sparkReceiver;\nprivate final Long startOffset;\nSparkConsumerWithOffset(Long startOffset) {\nthis.startOffset = startOffset;\nthis.recordsQueue = new ConcurrentLinkedQueue<>();\n}\n@Override\npublic boolean hasRecords() {\nreturn !recordsQueue.isEmpty();\n}\n@Override\npublic @Nullable V poll() {\nreturn recordsQueue.poll();\n}\n@Override\n@Override\npublic void stop() {\nif (sparkReceiver != null) {\nsparkReceiver.stop(\"SparkReceiver is stopped.\");\n}\nrecordsQueue.clear();\n}\n}" + }, + { + "comment": "We should use the methods in TestUtils to load the project.", + "method_body": "private String readTomlContent(Path projectPath, boolean isSingleFileProject) {\nPath configTomlPath;\nif (isSingleFileProject) {\nconfigTomlPath = projectPath.getParent();\n} else {\nconfigTomlPath = projectPath;\n}\nreturn readFileContent(configTomlPath.resolve(CONFIGURATION_TOML));\n}", + "target_code": "Path configTomlPath;", + "method_body_after": "private String readTomlContent(Path projectPath, boolean isSingleFileProject) {\nPath configTomlPath;\nif (isSingleFileProject) {\nconfigTomlPath = projectPath.getParent();\n} else {\nconfigTomlPath = projectPath;\n}\nreturn readFileContent(configTomlPath.resolve(CONFIGURATION_TOML));\n}", + "context_before": "class ConfigSchemaGenTaskTest {\nprivate static final Path RESOURCES_DIR = Paths.get(\"src/test/resources/\").toAbsolutePath();\nprivate static final String BALLERINA_HOME_KEY = \"ballerina.home\";\n@Test(dataProvider = \"project-data-provider\")\npublic void testGeneratedSchema(String projectType, String projectName, boolean isSingleFileProject) {\nPath projectPath = RESOURCES_DIR.resolve(projectType).resolve(projectName);\nProject projectInstance = loadBuildProject(projectPath, isSingleFileProject);\nprojectInstance.currentPackage().getCompilation();\nTomlValidator tomlValidator = new TomlValidator(Schema.from(\nreadConfigJSONSchema(projectPath, isSingleFileProject)));\nTomlDocument configToml = TomlDocument.from(CONFIGURATION_TOML,\nreadTomlContent(projectPath, isSingleFileProject));\ntomlValidator.validate(configToml.toml());\nTomlTableNode tomlAstNode = configToml.toml().rootNode();\nif (!tomlAstNode.diagnostics().isEmpty()) {\nString errorMsg = \"\";\nfor (Diagnostic diagnostic : tomlAstNode.diagnostics()) {\nerrorMsg = errorMsg.concat(diagnostic.message() + \"\\n\");\n}\nAssert.fail(errorMsg);\n}\n}\n@DataProvider(name = \"project-data-provider\")\npublic Object[][] dpMethod() {\nreturn new Object[][]{{\"DefaultModuleProjects\", \"SimpleTypeConfigs\", false},\n{\"DefaultModuleProjects\", \"ComplexTypeConfigs\", false},\n{\"MultiModuleProjects\", \"SimpleTypeConfigs\", false},\n{\"SingleFileProject1\", \"testconfig.bal\", true}};\n}\nstatic Project loadBuildProject(Path projectPath, boolean isSingleFileProject) {\nSystem.setProperty(BALLERINA_HOME_KEY, System.getenv(BALLERINA_HOME_KEY));\nBuildOptions buildOptions = BuildOptions.builder().setOffline(true).build();\nif (isSingleFileProject) {\nreturn SingleFileProject.load(projectPath, buildOptions);\n} else {\nreturn BuildProject.load(projectPath, buildOptions);\n}\n}\n/**\n* Read config JSON schema content.\n*\n* @param path Path to read schema from\n* @param isSingleFileProject flag to indicate if single file project\n* @return JSON schema content as String\n*/\nprivate String readConfigJSONSchema(Path path, boolean isSingleFileProject) {\nPath targetPath;\nif (isSingleFileProject) {\ntargetPath = path.getParent();\n} else {\ntargetPath = path.resolve(ProjectConstants.TARGET_DIR_NAME);\n}\nreturn readFileContent(targetPath.resolve(ProjectConstants.BIN_DIR_NAME).\nresolve(CONFIG_SCHEMA));\n}\n/**\n* Read config toml content.\n*\n* @param projectPath Path to read toml from\n* @param isSingleFileProject flag to indicate if single file project\n* @return Config.toml content as String\n*/\n/**\n* Read content of given file.\n*\n* @param file Path to read content from\n* @return content as String\n*/\nprivate String readFileContent(Path file) {\nString content;\ntry {\ncontent = Files.readString(file);\n} catch (IOException ioException) {\nthrow new CompilerPluginException(\"Error occurred while reading the file \" + file.toString());\n}\nreturn content;\n}\n}", + "context_after": "class ConfigSchemaGenTaskTest {\nprivate static final Path RESOURCES_DIR = Paths.get(\"src/test/resources/\").toAbsolutePath();\nprivate static final String BALLERINA_HOME_KEY = \"ballerina.home\";\n@Test(dataProvider = \"project-data-provider\")\npublic void testAgainstToml(String projectType, String projectName, boolean isSingleFileProject) {\nPath projectPath = RESOURCES_DIR.resolve(projectType).resolve(projectName);\nProject projectInstance = loadBuildProject(projectPath, isSingleFileProject);\nprojectInstance.currentPackage().getCompilation();\nTomlValidator tomlValidator = new TomlValidator(Schema.from(\nreadConfigJSONSchema(projectPath, isSingleFileProject)));\nTomlDocument configToml = TomlDocument.from(CONFIGURATION_TOML,\nreadTomlContent(projectPath, isSingleFileProject));\ntomlValidator.validate(configToml.toml());\nTomlTableNode tomlAstNode = configToml.toml().rootNode();\nif (!tomlAstNode.diagnostics().isEmpty()) {\nString errorMsg = \"Test failed for project \" + projectPath + \"\\n\";\nfor (Diagnostic diagnostic : tomlAstNode.diagnostics()) {\nerrorMsg = errorMsg.concat(diagnostic.message() + \"\\n\");\n}\nAssert.fail(errorMsg);\n}\n}\n@Test(dataProvider = \"project-data-provider-for-schema-validation\")\npublic void testAgainstExpectedSchema(String projectType, String projectName, boolean isSingleFileProject) {\nPath projectPath = RESOURCES_DIR.resolve(projectType).resolve(projectName);\nProject projectInstance = loadBuildProject(projectPath, isSingleFileProject);\nprojectInstance.currentPackage().getCompilation();\nPath expectedSchemaPath = projectPath.resolve(\"expected-schema.json\");\nString errorMsg = \"Test failed for project \" + projectPath + \"\\nThe generated config-schema.json \" +\n\"does not match the expected.\";\nAssert.assertEquals(\nJsonParser.parseString((readConfigJSONSchema(projectPath, isSingleFileProject))),\nJsonParser.parseString(readFileContent(expectedSchemaPath)),\nerrorMsg);\n}\n@DataProvider(name = \"project-data-provider\")\npublic Object[][] dpMethod() {\nreturn new Object[][]{{\"DefaultModuleProjects\", \"SimpleTypeConfigs\", false},\n{\"DefaultModuleProjects\", \"ComplexTypeConfigs\", false},\n{\"MultiModuleProjects\", \"SimpleTypeConfigs\", false},\n{\"SingleFileProject\", \"testconfig.bal\", true}};\n}\n@DataProvider(name = \"project-data-provider-for-schema-validation\")\npublic Object[][] dpMethod2() {\nreturn new Object[][]{{\"DefaultModuleProjects\", \"ComplexTypeConfigs2\", false}};\n}\nstatic Project loadBuildProject(Path projectPath, boolean isSingleFileProject) {\nSystem.setProperty(BALLERINA_HOME_KEY, System.getenv(BALLERINA_HOME_KEY));\nBuildOptions buildOptions = BuildOptions.builder().setOffline(true).build();\nif (isSingleFileProject) {\nreturn SingleFileProject.load(projectPath, buildOptions);\n} else {\nreturn BuildProject.load(projectPath, buildOptions);\n}\n}\n/**\n* Read config JSON schema content.\n*\n* @param path Path to read schema from\n* @param isSingleFileProject flag to indicate if single file project\n* @return JSON schema content as String\n*/\nprivate String readConfigJSONSchema(Path path, boolean isSingleFileProject) {\nPath targetPath;\nif (isSingleFileProject) {\ntargetPath = Paths.get(System.getProperty(\"user.dir\"));\n} else {\ntargetPath = path.resolve(ProjectConstants.TARGET_DIR_NAME).resolve(ProjectConstants.BIN_DIR_NAME);\n}\nreturn readFileContent(targetPath.resolve(CONFIG_SCHEMA));\n}\n/**\n* Read config toml content.\n*\n* @param projectPath Path to read toml from\n* @param isSingleFileProject flag to indicate if single file project\n* @return Config.toml content as String\n*/\n/**\n* Read content of given file.\n*\n* @param file Path to read content from\n* @return content as String\n*/\nprivate String readFileContent(Path file) {\nString content;\ntry {\ncontent = Files.readString(file);\n} catch (IOException ioException) {\nthrow new CompilerPluginException(\"Error occurred while reading the file \" + file.toString());\n}\nreturn content;\n}\n}" + }, + { + "comment": "If we use `error` level, it will be prompted to the idea fatal error log and will be printed multiple times since we are retrying to connect.", + "method_body": "void createConnection() {\ntry {\nmyConnectionState = ConnectionState.CONNECTING;\nstreamConnectionProvider = createConnectionProvider(project);\nif (streamConnectionProvider == null) {\nLOG.warn(\"Unable to establish the socket connection provider.\");\nreturn;\n}\nstreamConnectionProvider.start();\nPair streams = new ImmutablePair<>(streamConnectionProvider.getInputStream(),\nstreamConnectionProvider.getOutputStream());\nInputStream inputStream = streams.getKey();\nOutputStream outputStream = streams.getValue();\nif (inputStream == null || outputStream == null) {\nLOG.warn(\"Unable to establish connection with the debug server.\");\n}\ndebugClient = new DAPClient();\nLauncher clientLauncher = DSPLauncher.createClientLauncher(debugClient,\ninputStream, outputStream);\ndebugServer = clientLauncher.getRemoteProxy();\nlauncherFuture = clientLauncher.startListening();\nInitializeRequestArguments initParams = new InitializeRequestArguments();\ninitParams.setAdapterID(\"BallerinaDebugClient\");\ninitializeFuture = debugServer.initialize(initParams).thenApply(res -> {\ninitializeResult = res;\nLOG.info(\"Debug server initialize result received.\");\ndebugClient.initialized();\nrequestManager = new DAPRequestManager(this, debugClient, debugServer, initializeResult);\ndebugClient.connect(requestManager);\nmyConnectionState = ConnectionState.CONNECTED;\nreturn res;\n});\n} catch (IOException e) {\nmyConnectionState = ConnectionState.NOT_CONNECTED;\nLOG.warn(\"Connecting to the DAP server failed.\", e);\n}\n}", + "target_code": "LOG.warn(\"Connecting to the DAP server failed.\", e);", + "method_body_after": "void createConnection() {\ntry {\nmyConnectionState = ConnectionState.CONNECTING;\nstreamConnectionProvider = createConnectionProvider(project);\nif (streamConnectionProvider == null) {\nLOG.warn(\"Unable to establish the socket connection provider.\");\nreturn;\n}\nstreamConnectionProvider.start();\nPair streams = new ImmutablePair<>(streamConnectionProvider.getInputStream(),\nstreamConnectionProvider.getOutputStream());\nInputStream inputStream = streams.getKey();\nOutputStream outputStream = streams.getValue();\nif (inputStream == null || outputStream == null) {\nLOG.warn(\"Unable to establish connection with the debug server.\");\nreturn;\n}\ndebugClient = new DAPClient();\nLauncher clientLauncher = DSPLauncher.createClientLauncher(debugClient,\ninputStream, outputStream);\ndebugServer = clientLauncher.getRemoteProxy();\nlauncherFuture = clientLauncher.startListening();\nInitializeRequestArguments initParams = new InitializeRequestArguments();\ninitParams.setAdapterID(\"BallerinaDebugClient\");\ninitializeFuture = debugServer.initialize(initParams).thenApply(res -> {\ninitializeResult = res;\nLOG.info(\"initialize response received from the debug server.\");\ndebugClient.initialized();\nrequestManager = new DAPRequestManager(this, debugClient, debugServer, initializeResult);\ndebugClient.connect(requestManager);\nmyConnectionState = ConnectionState.CONNECTED;\nreturn res;\n});\n} catch (IOException e) {\nmyConnectionState = ConnectionState.NOT_CONNECTED;\nLOG.warn(\"Error occurred when trying to initialize connection with the debug server.\", e);\n}\n}", + "context_before": "class BallerinaDAPClientConnector {\nprivate static final Logger LOG = Logger.getInstance(BallerinaDAPClientConnector.class);\nprivate BallerinaDebugProcess context;\nprivate Project project;\nprivate String host;\nprivate int port;\nprivate DAPClient debugClient;\nprivate IDebugProtocolServer debugServer;\nprivate DAPRequestManager requestManager;\nprivate BallerinaStreamConnectionProvider streamConnectionProvider;\nprivate Future launcherFuture;\nprivate CompletableFuture initializeFuture;\nprivate Capabilities initializeResult;\nprivate ConnectionState myConnectionState;\nprivate static final int DEBUG_ADAPTOR_PORT = 4711;\nprivate static final String CONFIG_SOURCEROOT = \"sourceRoot\";\nprivate static final String CONFIG_DEBUGEE_PORT = \"debuggeePort\";\npublic BallerinaDAPClientConnector(@NotNull Project project, @NotNull String host, int port) {\nthis.project = project;\nthis.host = host;\nthis.port = port;\nmyConnectionState = ConnectionState.NOT_CONNECTED;\n}\npublic DAPRequestManager getRequestManager() {\nreturn requestManager;\n}\npublic Project getProject() {\nreturn project;\n}\npublic int getPort() {\nreturn port;\n}\npublic BallerinaDebugProcess getContext() {\nreturn context;\n}\npublic void setContext(BallerinaDebugProcess context) {\nthis.context = context;\n}\n@NotNull\npublic String getAddress() {\nreturn String.format(\"host:%s and port: %d\", host, port);\n}\nvoid attachToServer() {\nMap requestArgs = new HashMap<>();\nrequestArgs.put(CONFIG_SOURCEROOT, project.getBasePath());\nrequestArgs.put(CONFIG_DEBUGEE_PORT, Integer.toString(port));\ntry {\nrequestManager.attach(requestArgs);\n} catch (Exception e) {\nLOG.warn(\"Attaching to the debug adapter failed\", e);\n}\n}\nvoid disconnectFromServer() throws Exception {\ntry {\nDisconnectArguments disconnectArgs = new DisconnectArguments();\ndisconnectArgs.setTerminateDebuggee(false);\nrequestManager.disconnect(disconnectArgs);\nstop();\n} catch (Exception e) {\nLOG.warn(\"Disconnecting from the debug adapter failed\", e);\nthrow e;\n}\n}\npublic boolean isConnected() {\nreturn debugClient != null && launcherFuture != null && !launcherFuture.isDone()\n&& !launcherFuture.isCancelled() && myConnectionState == ConnectionState.CONNECTED;\n}\nvoid stop() {\nstreamConnectionProvider.stop();\nmyConnectionState = ConnectionState.NOT_CONNECTED;\n}\nString getState() {\nif (myConnectionState == ConnectionState.NOT_CONNECTED) {\nreturn \"Not connected. Waiting for a connection.\";\n} else if (myConnectionState == ConnectionState.CONNECTED) {\nreturn \"Connected to \" + getAddress() + \".\";\n} else if (myConnectionState == ConnectionState.DISCONNECTED) {\nreturn \"Disconnected.\";\n} else if (myConnectionState == ConnectionState.CONNECTING) {\nreturn \"Connecting to \" + getAddress() + \".\";\n}\nreturn \"Unknown\";\n}\nprivate enum ConnectionState {\nNOT_CONNECTED, CONNECTING, CONNECTED, DISCONNECTED\n}\nprivate BallerinaStreamConnectionProvider createConnectionProvider(Project project) {\nString debugLauncherPath = \"\";\nString os = OSUtils.getOperatingSystem();\nif (os != null) {\nString balSdkPath = BallerinaSdkUtils.getBallerinaSdkFor(project).getSdkPath();\nif (balSdkPath == null) {\nLOG.warn(String.format(\"Couldn't find ballerina SDK for the project%sto start debug server.\",\nproject.getName()));\nreturn null;\n}\nif (os.equals(OSUtils.UNIX) || os.equals(OSUtils.MAC)) {\ndebugLauncherPath = Paths.get(balSdkPath, BALLERINA_DEBUG_LAUNCHER_PATH,\nBALLERINA_DEBUG_LAUNCHER_NAME + \".sh\").toString();\n} else if (os.equals(OSUtils.WINDOWS)) {\ndebugLauncherPath = Paths.get(balSdkPath, BALLERINA_DEBUG_LAUNCHER_PATH,\nBALLERINA_DEBUG_LAUNCHER_NAME + \".bat\").toString();\n}\n}\nreturn !debugLauncherPath.isEmpty() ? new BallerinaSocketStreamConnectionProvider(\nnew ArrayList<>(Collections.singleton(debugLauncherPath)), project.getBasePath(), host,\nDEBUG_ADAPTOR_PORT) : null;\n}\n}", + "context_after": "class BallerinaDAPClientConnector {\nprivate static final Logger LOG = Logger.getInstance(BallerinaDAPClientConnector.class);\nprivate BallerinaDebugProcess context;\nprivate Project project;\nprivate String host;\nprivate int port;\nprivate DAPClient debugClient;\nprivate IDebugProtocolServer debugServer;\nprivate DAPRequestManager requestManager;\nprivate BallerinaStreamConnectionProvider streamConnectionProvider;\nprivate Future launcherFuture;\nprivate CompletableFuture initializeFuture;\nprivate Capabilities initializeResult;\nprivate ConnectionState myConnectionState;\nprivate static final int DEBUG_ADAPTOR_PORT = 4711;\nprivate static final String CONFIG_SOURCEROOT = \"sourceRoot\";\nprivate static final String CONFIG_DEBUGEE_PORT = \"debuggeePort\";\npublic BallerinaDAPClientConnector(@NotNull Project project, @NotNull String host, int port) {\nthis.project = project;\nthis.host = host;\nthis.port = port;\nmyConnectionState = ConnectionState.NOT_CONNECTED;\n}\npublic DAPRequestManager getRequestManager() {\nreturn requestManager;\n}\npublic Project getProject() {\nreturn project;\n}\npublic int getPort() {\nreturn port;\n}\npublic BallerinaDebugProcess getContext() {\nreturn context;\n}\npublic void setContext(BallerinaDebugProcess context) {\nthis.context = context;\n}\n@NotNull\npublic String getAddress() {\nreturn String.format(\"host:%s and port: %d\", host, port);\n}\nvoid attachToServer() {\nMap requestArgs = new HashMap<>();\nrequestArgs.put(CONFIG_SOURCEROOT, project.getBasePath());\nrequestArgs.put(CONFIG_DEBUGEE_PORT, Integer.toString(port));\ntry {\nrequestManager.attach(requestArgs);\n} catch (Exception e) {\nLOG.warn(\"Attaching to the debug adapter failed\", e);\n}\n}\nvoid disconnectFromServer() throws Exception {\ntry {\nDisconnectArguments disconnectArgs = new DisconnectArguments();\ndisconnectArgs.setTerminateDebuggee(false);\nrequestManager.disconnect(disconnectArgs);\nstop();\n} catch (Exception e) {\nLOG.warn(\"Disconnecting from the debug adapter failed\", e);\nthrow e;\n}\n}\npublic boolean isConnected() {\nreturn debugClient != null && launcherFuture != null && !launcherFuture.isDone()\n&& !launcherFuture.isCancelled() && myConnectionState == ConnectionState.CONNECTED;\n}\nvoid stop() {\nstreamConnectionProvider.stop();\nmyConnectionState = ConnectionState.NOT_CONNECTED;\n}\nString getState() {\nif (myConnectionState == ConnectionState.NOT_CONNECTED) {\nreturn \"Not connected. Waiting for a connection.\";\n} else if (myConnectionState == ConnectionState.CONNECTED) {\nreturn \"Connected to \" + getAddress() + \".\";\n} else if (myConnectionState == ConnectionState.DISCONNECTED) {\nreturn \"Disconnected.\";\n} else if (myConnectionState == ConnectionState.CONNECTING) {\nreturn \"Connecting to \" + getAddress() + \".\";\n}\nreturn \"Unknown\";\n}\nprivate enum ConnectionState {\nNOT_CONNECTED, CONNECTING, CONNECTED, DISCONNECTED\n}\nprivate BallerinaStreamConnectionProvider createConnectionProvider(Project project) {\nString debugLauncherPath = \"\";\nString os = OSUtils.getOperatingSystem();\nif (os != null) {\nString balSdkPath = BallerinaSdkUtils.getBallerinaSdkFor(project).getSdkPath();\nif (balSdkPath == null) {\nLOG.warn(String.format(\"Couldn't find ballerina SDK for the project%sto start debug server.\",\nproject.getName()));\nreturn null;\n}\nif (os.equals(OSUtils.UNIX) || os.equals(OSUtils.MAC)) {\ndebugLauncherPath = Paths.get(balSdkPath, BALLERINA_DEBUG_LAUNCHER_PATH,\nBALLERINA_DEBUG_LAUNCHER_NAME + \".sh\").toString();\n} else if (os.equals(OSUtils.WINDOWS)) {\ndebugLauncherPath = Paths.get(balSdkPath, BALLERINA_DEBUG_LAUNCHER_PATH,\nBALLERINA_DEBUG_LAUNCHER_NAME + \".bat\").toString();\n}\n}\nreturn !debugLauncherPath.isEmpty() ? new BallerinaSocketStreamConnectionProvider(\nnew ArrayList<>(Collections.singleton(debugLauncherPath)), project.getBasePath(), host,\nDEBUG_ADAPTOR_PORT) : null;\n}\n}" + }, + { + "comment": "Maybe it can be simplified as ```java assertThat(res).hasSize(1).containsEntry(\"LD_LIBRARY_PATH\", \"/usr/lib/native\") ```", + "method_body": "void testGetEnvironmentVariables() {\nConfiguration testConf = new Configuration();\ntestConf.setString(\"containerized.master.env.LD_LIBRARY_PATH\", \"/usr/lib/native\");\nMap res =\nConfigurationUtils.getPrefixedKeyValuePairs(\"containerized.master.env.\", testConf);\nassertThat(res).hasSize(1);\nMap.Entry entry = res.entrySet().iterator().next();\nassertThat(entry.getKey()).isEqualTo(\"LD_LIBRARY_PATH\");\nassertThat(entry.getValue()).isEqualTo(\"/usr/lib/native\");\n}", + "target_code": "assertThat(entry.getValue()).isEqualTo(\"/usr/lib/native\");", + "method_body_after": "void testGetEnvironmentVariables() {\nConfiguration testConf = new Configuration();\ntestConf.setString(\"containerized.master.env.LD_LIBRARY_PATH\", \"/usr/lib/native\");\nMap res =\nConfigurationUtils.getPrefixedKeyValuePairs(\"containerized.master.env.\", testConf);\nassertThat(res).hasSize(1).containsEntry(\"LD_LIBRARY_PATH\", \"/usr/lib/native\");\n}", + "context_before": "class BootstrapToolsTest {\n@TempDir private static java.nio.file.Path temporaryFolder;\n@Test\nvoid testSubstituteConfigKey() {\nString deprecatedKey1 = \"deprecated-key\";\nString deprecatedKey2 = \"another-out_of-date_key\";\nString deprecatedKey3 = \"yet-one-more\";\nString designatedKey1 = \"newkey1\";\nString designatedKey2 = \"newKey2\";\nString designatedKey3 = \"newKey3\";\nString value1 = \"value1\";\nString value2Designated = \"designated-value2\";\nString value2Deprecated = \"deprecated-value2\";\nConfiguration cfg = new Configuration();\ncfg.setString(deprecatedKey1, value1);\ncfg.setString(deprecatedKey2, value2Deprecated);\ncfg.setString(designatedKey2, value2Designated);\nBootstrapTools.substituteDeprecatedConfigKey(cfg, deprecatedKey1, designatedKey1);\nBootstrapTools.substituteDeprecatedConfigKey(cfg, deprecatedKey2, designatedKey2);\nBootstrapTools.substituteDeprecatedConfigKey(cfg, deprecatedKey3, designatedKey3);\nassertThat(cfg.getString(designatedKey1, null)).isEqualTo(value1);\nassertThat(cfg.getString(designatedKey2, null)).isEqualTo(value2Designated);\nassertThat(cfg.getString(designatedKey3, null)).isNull();\nassertThat(cfg.getString(deprecatedKey3, null)).isNull();\n}\n@Test\nvoid testSubstituteConfigKeyPrefix() {\nString deprecatedPrefix1 = \"deprecated-prefix\";\nString deprecatedPrefix2 = \"-prefix-2\";\nString deprecatedPrefix3 = \"prefix-3\";\nString designatedPrefix1 = \"p1\";\nString designatedPrefix2 = \"ppp\";\nString designatedPrefix3 = \"zzz\";\nString depr1 = deprecatedPrefix1 + \"var\";\nString depr2 = deprecatedPrefix2 + \"env\";\nString depr3 = deprecatedPrefix2 + \"x\";\nString desig1 = designatedPrefix1 + \"var\";\nString desig2 = designatedPrefix2 + \"env\";\nString desig3 = designatedPrefix2 + \"x\";\nString val1 = \"1\";\nString val2 = \"2\";\nString val3Depr = \"3-\";\nString val3Desig = \"3+\";\nConfiguration cfg = new Configuration();\ncfg.setString(depr1, val1);\ncfg.setString(depr2, val2);\ncfg.setString(depr3, val3Depr);\ncfg.setString(desig3, val3Desig);\nBootstrapTools.substituteDeprecatedConfigPrefix(cfg, deprecatedPrefix1, designatedPrefix1);\nBootstrapTools.substituteDeprecatedConfigPrefix(cfg, deprecatedPrefix2, designatedPrefix2);\nBootstrapTools.substituteDeprecatedConfigPrefix(cfg, deprecatedPrefix3, designatedPrefix3);\nassertThat(cfg.getString(desig1, null)).isEqualTo(val1);\nassertThat(cfg.getString(desig2, null)).isEqualTo(val2);\nassertThat(cfg.getString(desig3, null)).isEqualTo(val3Desig);\nfor (String key : cfg.keySet()) {\nassertThat(key.startsWith(designatedPrefix3)).isFalse();\nassertThat(key.startsWith(deprecatedPrefix3)).isFalse();\n}\n}\n@Test\nvoid testGetTaskManagerShellCommand() {\nfinal Configuration cfg = new Configuration();\nfinal TaskExecutorProcessSpec taskExecutorProcessSpec =\nnew TaskExecutorProcessSpec(\nnew CPUResource(1.0),\nnew MemorySize(0),\nnew MemorySize(0),\nnew MemorySize(111),\nnew MemorySize(0),\nnew MemorySize(222),\nnew MemorySize(0),\nnew MemorySize(333),\nnew MemorySize(0),\nCollections.emptyList());\nfinal ContaineredTaskManagerParameters containeredParams =\nnew ContaineredTaskManagerParameters(taskExecutorProcessSpec, new HashMap<>());\nfinal String java = \"$JAVA_HOME/bin/java\";\nfinal String jvmmem =\n\"-Xmx111 -Xms111 -XX:MaxDirectMemorySize=222 -XX:MaxMetaspaceSize=333\";\nfinal String jvmOpts = \"-Djvm\";\nfinal String tmJvmOpts = \"-DtmJvm\";\nfinal String logfile = \"-Dlog.file=./logs/taskmanager.log\";\nfinal String logback = \"-Dlogback.configurationFile=file:./conf/logback.xml\";\nfinal String log4j =\n\"-Dlog4j.configuration=file:./conf/log4j.properties\"\n+ \" -Dlog4j.configurationFile=file:./conf/log4j.properties\";\nfinal String mainClass = \"org.apache.flink.runtime.clusterframework.BootstrapToolsTest\";\nfinal String dynamicConfigs =\nTaskExecutorProcessUtils.generateDynamicConfigsStr(taskExecutorProcessSpec).trim();\nfinal String basicArgs = \"--configDir ./conf\";\nfinal String mainArgs = \"-Djobmanager.rpc.address=host1 -Dkey.a=v1\";\nfinal String args = dynamicConfigs + \" \" + basicArgs + \" \" + mainArgs;\nfinal String redirects = \"1> ./logs/taskmanager.out 2> ./logs/taskmanager.err\";\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\nfalse,\nfalse,\nfalse,\nthis.getClass(),\n\"\"))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nmainClass,\ndynamicConfigs,\nbasicArgs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\nfalse,\nfalse,\nfalse,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nmainClass,\nargs,\nredirects));\nfinal String krb5 = \"-Djava.security.krb5.conf=krb5.conf\";\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\nfalse,\nfalse,\ntrue,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nkrb5,\nmainClass,\nargs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\nfalse,\nfalse,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nlogfile,\nlogback,\nmainClass,\nargs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\nfalse,\ntrue,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nkrb5,\nlogfile,\nlogback,\nmainClass,\nargs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\nfalse,\ntrue,\nfalse,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nlogfile,\nlog4j,\nmainClass,\nargs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\nfalse,\ntrue,\ntrue,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nkrb5,\nlogfile,\nlog4j,\nmainClass,\nargs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\ntrue,\nfalse,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nlogfile,\nlogback,\nlog4j,\nmainClass,\nargs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\ntrue,\ntrue,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nkrb5,\nlogfile,\nlogback,\nlog4j,\nmainClass,\nargs,\nredirects));\ncfg.setString(CoreOptions.FLINK_JVM_OPTIONS, jvmOpts);\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\ntrue,\nfalse,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\njvmOpts,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nlogfile,\nlogback,\nlog4j,\nmainClass,\nargs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\ntrue,\ntrue,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\njvmOpts,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nkrb5,\nlogfile,\nlogback,\nlog4j,\nmainClass,\nargs,\nredirects));\ncfg.setString(CoreOptions.FLINK_TM_JVM_OPTIONS, tmJvmOpts);\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\ntrue,\nfalse,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\njvmOpts,\ntmJvmOpts,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nlogfile,\nlogback,\nlog4j,\nmainClass,\nargs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\ntrue,\ntrue,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\njvmOpts,\ntmJvmOpts,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nkrb5,\nlogfile,\nlogback,\nlog4j,\nmainClass,\nargs,\nredirects));\ncfg.setString(\nConfigConstants.YARN_CONTAINER_START_COMMAND_TEMPLATE,\n\"%java% 1 %jvmmem% 2 %jvmopts% 3 %logging% 4 %class% 5 %args% 6 %redirects%\");\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\ntrue,\ntrue,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\n\"1\",\njvmmem,\n\"2\",\njvmOpts,\ntmJvmOpts,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nkrb5,\n\"3\",\nlogfile,\nlogback,\nlog4j,\n\"4\",\nmainClass,\n\"5\",\nargs,\n\"6\",\nredirects));\ncfg.setString(\nConfigConstants.YARN_CONTAINER_START_COMMAND_TEMPLATE,\n\"%java% %logging% %jvmopts% %jvmmem% %class% %args% %redirects%\");\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\ntrue,\ntrue,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\nlogfile,\nlogback,\nlog4j,\njvmOpts,\ntmJvmOpts,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nkrb5,\njvmmem,\nmainClass,\nargs,\nredirects));\n}\n@Test\nvoid testUpdateTmpDirectoriesInConfiguration() {\nConfiguration config = new Configuration();\nBootstrapTools.updateTmpDirectoriesInConfiguration(config, \"default/directory/path\");\nassertThat(config.getString(CoreOptions.TMP_DIRS)).isEqualTo(\"default/directory/path\");\nBootstrapTools.updateTmpDirectoriesInConfiguration(config, \"not/default/directory/path\");\nassertThat(config.getString(CoreOptions.TMP_DIRS)).isEqualTo(\"default/directory/path\");\nconfig.setString(CoreOptions.TMP_DIRS, \"\");\nBootstrapTools.updateTmpDirectoriesInConfiguration(config, \"some/new/path\");\nassertThat(config.getString(CoreOptions.TMP_DIRS)).isEmpty();\n}\n@Test\nvoid testShouldNotUpdateTmpDirectoriesInConfigurationIfNoValueConfigured() {\nConfiguration config = new Configuration();\nBootstrapTools.updateTmpDirectoriesInConfiguration(config, null);\nassertThat(CoreOptions.TMP_DIRS.defaultValue())\n.isEqualTo(config.getString(CoreOptions.TMP_DIRS));\n}\n@Test\nvoid testGetDynamicPropertiesAsString() {\nfinal Configuration baseConfig = new Configuration();\nbaseConfig.setString(\"key.a\", \"a\");\nbaseConfig.setString(\"key.b\", \"b1\");\nfinal Configuration targetConfig = new Configuration();\ntargetConfig.setString(\"key.b\", \"b2\");\ntargetConfig.setString(\"key.c\", \"c\");\nfinal String dynamicProperties =\nBootstrapTools.getDynamicPropertiesAsString(baseConfig, targetConfig);\nif (OperatingSystem.isWindows()) {\nassertThat(dynamicProperties).isEqualTo(\"-Dkey.b=\\\"b2\\\" -Dkey.c=\\\"c\\\"\");\n} else {\nassertThat(dynamicProperties).isEqualTo(\"-Dkey.b='b2' -Dkey.c='c'\");\n}\n}\n@Test\nvoid testEscapeDynamicPropertyValueWithSingleQuote() {\nfinal String value1 = \"\nassertThat(BootstrapTools.escapeWithSingleQuote(value1)).isEqualTo(\"'\" + value1 + \"'\");\nfinal String value2 = \"'foobar\";\nassertThat(BootstrapTools.escapeWithSingleQuote(value2)).isEqualTo(\"''\\\\''foobar'\");\nfinal String value3 = \"foo''bar\";\nassertThat(BootstrapTools.escapeWithSingleQuote(value3)).isEqualTo(\"'foo'\\\\'''\\\\''bar'\");\nfinal String value4 = \"'foo' 'bar'\";\nassertThat(BootstrapTools.escapeWithSingleQuote(value4))\n.isEqualTo(\"''\\\\''foo'\\\\'' '\\\\''bar'\\\\'''\");\n}\n@Test\nvoid testEscapeDynamicPropertyValueWithDoubleQuote() {\nfinal String value1 = \"\nassertThat(BootstrapTools.escapeWithDoubleQuote(value1))\n.isEqualTo(\"\\\"\nfinal String value2 = \"foo\\\"bar'\";\nassertThat(BootstrapTools.escapeWithDoubleQuote(value2)).isEqualTo(\"\\\"foo\\\\\\\"bar'\\\"\");\nfinal String value3 = \"\\\"foo\\\" \\\"bar\\\"\";\nassertThat(BootstrapTools.escapeWithDoubleQuote(value3))\n.isEqualTo(\"\\\"\\\\\\\"foo\\\\\\\" \\\\\\\"bar\\\\\\\"\\\"\");\n}\n@Test\n@Test\nvoid testGetEnvironmentVariablesErroneous() {\nConfiguration testConf = new Configuration();\ntestConf.setString(\"containerized.master.env.\", \"/usr/lib/native\");\nMap res =\nConfigurationUtils.getPrefixedKeyValuePairs(\"containerized.master.env.\", testConf);\nassertThat(res).isEmpty();\n}\n@Test\nvoid testWriteConfigurationAndReload() throws IOException {\nfinal File flinkConfDir = TempDirUtils.newFolder(temporaryFolder).getAbsoluteFile();\nfinal Configuration flinkConfig = new Configuration();\nfinal ConfigOption> listStringConfigOption =\nConfigOptions.key(\"test-list-string-key\").stringType().asList().noDefaultValue();\nfinal List list =\nArrays.asList(\"A,B,C,D\", \"A'B'C'D\", \"A;BCD\", \"AB\\\"C\\\"D\", \"AB'\\\"D:B\");\nflinkConfig.set(listStringConfigOption, list);\nassertThat(flinkConfig.get(listStringConfigOption))\n.containsAnyOf(list.toArray(new String[0]));\nfinal ConfigOption> listDurationConfigOption =\nConfigOptions.key(\"test-list-duration-key\")\n.durationType()\n.asList()\n.noDefaultValue();\nfinal List durationList =\nArrays.asList(Duration.ofSeconds(3), Duration.ofMinutes(1));\nflinkConfig.set(listDurationConfigOption, durationList);\nassertThat(flinkConfig.get(listDurationConfigOption))\n.containsAnyOf(durationList.toArray(new Duration[0]));\nfinal ConfigOption> mapConfigOption =\nConfigOptions.key(\"test-map-key\").mapType().noDefaultValue();\nfinal Map map = new HashMap<>();\nmap.put(\"key1\", \"A,B,C,D\");\nmap.put(\"key2\", \"A;BCD\");\nmap.put(\"key3\", \"A'B'C'D\");\nmap.put(\"key4\", \"AB\\\"C\\\"D\");\nmap.put(\"key5\", \"AB'\\\"D:B\");\nflinkConfig.set(mapConfigOption, map);\nassertThat(flinkConfig.get(mapConfigOption)).containsAllEntriesOf(map);\nfinal ConfigOption durationConfigOption =\nConfigOptions.key(\"test-duration-key\").durationType().noDefaultValue();\nfinal Duration duration = Duration.ofMillis(3000);\nflinkConfig.set(durationConfigOption, duration);\nassertThat(flinkConfig.get(durationConfigOption)).isEqualTo(duration);\nBootstrapTools.writeConfiguration(flinkConfig, new File(flinkConfDir, FLINK_CONF_FILENAME));\nfinal Configuration loadedFlinkConfig =\nGlobalConfiguration.loadConfiguration(flinkConfDir.getAbsolutePath());\nassertThat(loadedFlinkConfig.get(listStringConfigOption))\n.contains(list.toArray((new String[0])));\nassertThat(loadedFlinkConfig.get(listDurationConfigOption))\n.contains(durationList.toArray((new Duration[0])));\nassertThat(loadedFlinkConfig.get(mapConfigOption)).containsAllEntriesOf(map);\nassertThat(loadedFlinkConfig.get(durationConfigOption)).isEqualTo(duration);\n}\n}", + "context_after": "class BootstrapToolsTest {\n@TempDir private static java.nio.file.Path temporaryFolder;\n@Test\nvoid testSubstituteConfigKey() {\nString deprecatedKey1 = \"deprecated-key\";\nString deprecatedKey2 = \"another-out_of-date_key\";\nString deprecatedKey3 = \"yet-one-more\";\nString designatedKey1 = \"newkey1\";\nString designatedKey2 = \"newKey2\";\nString designatedKey3 = \"newKey3\";\nString value1 = \"value1\";\nString value2Designated = \"designated-value2\";\nString value2Deprecated = \"deprecated-value2\";\nConfiguration cfg = new Configuration();\ncfg.setString(deprecatedKey1, value1);\ncfg.setString(deprecatedKey2, value2Deprecated);\ncfg.setString(designatedKey2, value2Designated);\nBootstrapTools.substituteDeprecatedConfigKey(cfg, deprecatedKey1, designatedKey1);\nBootstrapTools.substituteDeprecatedConfigKey(cfg, deprecatedKey2, designatedKey2);\nBootstrapTools.substituteDeprecatedConfigKey(cfg, deprecatedKey3, designatedKey3);\nassertThat(cfg.getString(designatedKey1, null)).isEqualTo(value1);\nassertThat(cfg.getString(designatedKey2, null)).isEqualTo(value2Designated);\nassertThat(cfg.getString(designatedKey3, null)).isNull();\nassertThat(cfg.getString(deprecatedKey3, null)).isNull();\n}\n@Test\nvoid testSubstituteConfigKeyPrefix() {\nString deprecatedPrefix1 = \"deprecated-prefix\";\nString deprecatedPrefix2 = \"-prefix-2\";\nString deprecatedPrefix3 = \"prefix-3\";\nString designatedPrefix1 = \"p1\";\nString designatedPrefix2 = \"ppp\";\nString designatedPrefix3 = \"zzz\";\nString depr1 = deprecatedPrefix1 + \"var\";\nString depr2 = deprecatedPrefix2 + \"env\";\nString depr3 = deprecatedPrefix2 + \"x\";\nString desig1 = designatedPrefix1 + \"var\";\nString desig2 = designatedPrefix2 + \"env\";\nString desig3 = designatedPrefix2 + \"x\";\nString val1 = \"1\";\nString val2 = \"2\";\nString val3Depr = \"3-\";\nString val3Desig = \"3+\";\nConfiguration cfg = new Configuration();\ncfg.setString(depr1, val1);\ncfg.setString(depr2, val2);\ncfg.setString(depr3, val3Depr);\ncfg.setString(desig3, val3Desig);\nBootstrapTools.substituteDeprecatedConfigPrefix(cfg, deprecatedPrefix1, designatedPrefix1);\nBootstrapTools.substituteDeprecatedConfigPrefix(cfg, deprecatedPrefix2, designatedPrefix2);\nBootstrapTools.substituteDeprecatedConfigPrefix(cfg, deprecatedPrefix3, designatedPrefix3);\nassertThat(cfg.getString(desig1, null)).isEqualTo(val1);\nassertThat(cfg.getString(desig2, null)).isEqualTo(val2);\nassertThat(cfg.getString(desig3, null)).isEqualTo(val3Desig);\nfor (String key : cfg.keySet()) {\nassertThat(key.startsWith(designatedPrefix3)).isFalse();\nassertThat(key.startsWith(deprecatedPrefix3)).isFalse();\n}\n}\n@Test\nvoid testGetTaskManagerShellCommand() {\nfinal Configuration cfg = new Configuration();\nfinal TaskExecutorProcessSpec taskExecutorProcessSpec =\nnew TaskExecutorProcessSpec(\nnew CPUResource(1.0),\nnew MemorySize(0),\nnew MemorySize(0),\nnew MemorySize(111),\nnew MemorySize(0),\nnew MemorySize(222),\nnew MemorySize(0),\nnew MemorySize(333),\nnew MemorySize(0),\nCollections.emptyList());\nfinal ContaineredTaskManagerParameters containeredParams =\nnew ContaineredTaskManagerParameters(taskExecutorProcessSpec, new HashMap<>());\nfinal String java = \"$JAVA_HOME/bin/java\";\nfinal String jvmmem =\n\"-Xmx111 -Xms111 -XX:MaxDirectMemorySize=222 -XX:MaxMetaspaceSize=333\";\nfinal String jvmOpts = \"-Djvm\";\nfinal String tmJvmOpts = \"-DtmJvm\";\nfinal String logfile = \"-Dlog.file=./logs/taskmanager.log\";\nfinal String logback = \"-Dlogback.configurationFile=file:./conf/logback.xml\";\nfinal String log4j =\n\"-Dlog4j.configuration=file:./conf/log4j.properties\"\n+ \" -Dlog4j.configurationFile=file:./conf/log4j.properties\";\nfinal String mainClass = \"org.apache.flink.runtime.clusterframework.BootstrapToolsTest\";\nfinal String dynamicConfigs =\nTaskExecutorProcessUtils.generateDynamicConfigsStr(taskExecutorProcessSpec).trim();\nfinal String basicArgs = \"--configDir ./conf\";\nfinal String mainArgs = \"-Djobmanager.rpc.address=host1 -Dkey.a=v1\";\nfinal String args = dynamicConfigs + \" \" + basicArgs + \" \" + mainArgs;\nfinal String redirects = \"1> ./logs/taskmanager.out 2> ./logs/taskmanager.err\";\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\nfalse,\nfalse,\nfalse,\nthis.getClass(),\n\"\"))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nmainClass,\ndynamicConfigs,\nbasicArgs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\nfalse,\nfalse,\nfalse,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nmainClass,\nargs,\nredirects));\nfinal String krb5 = \"-Djava.security.krb5.conf=krb5.conf\";\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\nfalse,\nfalse,\ntrue,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nkrb5,\nmainClass,\nargs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\nfalse,\nfalse,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nlogfile,\nlogback,\nmainClass,\nargs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\nfalse,\ntrue,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nkrb5,\nlogfile,\nlogback,\nmainClass,\nargs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\nfalse,\ntrue,\nfalse,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nlogfile,\nlog4j,\nmainClass,\nargs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\nfalse,\ntrue,\ntrue,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nkrb5,\nlogfile,\nlog4j,\nmainClass,\nargs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\ntrue,\nfalse,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nlogfile,\nlogback,\nlog4j,\nmainClass,\nargs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\ntrue,\ntrue,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nkrb5,\nlogfile,\nlogback,\nlog4j,\nmainClass,\nargs,\nredirects));\ncfg.setString(CoreOptions.FLINK_JVM_OPTIONS, jvmOpts);\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\ntrue,\nfalse,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\njvmOpts,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nlogfile,\nlogback,\nlog4j,\nmainClass,\nargs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\ntrue,\ntrue,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\njvmOpts,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nkrb5,\nlogfile,\nlogback,\nlog4j,\nmainClass,\nargs,\nredirects));\ncfg.setString(CoreOptions.FLINK_TM_JVM_OPTIONS, tmJvmOpts);\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\ntrue,\nfalse,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\njvmOpts,\ntmJvmOpts,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nlogfile,\nlogback,\nlog4j,\nmainClass,\nargs,\nredirects));\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\ntrue,\ntrue,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\njvmmem,\njvmOpts,\ntmJvmOpts,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nkrb5,\nlogfile,\nlogback,\nlog4j,\nmainClass,\nargs,\nredirects));\ncfg.setString(\nConfigConstants.YARN_CONTAINER_START_COMMAND_TEMPLATE,\n\"%java% 1 %jvmmem% 2 %jvmopts% 3 %logging% 4 %class% 5 %args% 6 %redirects%\");\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\ntrue,\ntrue,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\n\"1\",\njvmmem,\n\"2\",\njvmOpts,\ntmJvmOpts,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nkrb5,\n\"3\",\nlogfile,\nlogback,\nlog4j,\n\"4\",\nmainClass,\n\"5\",\nargs,\n\"6\",\nredirects));\ncfg.setString(\nConfigConstants.YARN_CONTAINER_START_COMMAND_TEMPLATE,\n\"%java% %logging% %jvmopts% %jvmmem% %class% %args% %redirects%\");\nassertThat(\nBootstrapTools.getTaskManagerShellCommand(\ncfg,\ncontaineredParams,\n\"./conf\",\n\"./logs\",\ntrue,\ntrue,\ntrue,\nthis.getClass(),\nmainArgs))\n.isEqualTo(\nString.join(\n\" \",\njava,\nlogfile,\nlogback,\nlog4j,\njvmOpts,\ntmJvmOpts,\nBootstrapTools.IGNORE_UNRECOGNIZED_VM_OPTIONS,\nkrb5,\njvmmem,\nmainClass,\nargs,\nredirects));\n}\n@Test\nvoid testUpdateTmpDirectoriesInConfiguration() {\nConfiguration config = new Configuration();\nBootstrapTools.updateTmpDirectoriesInConfiguration(config, \"default/directory/path\");\nassertThat(config.getString(CoreOptions.TMP_DIRS)).isEqualTo(\"default/directory/path\");\nBootstrapTools.updateTmpDirectoriesInConfiguration(config, \"not/default/directory/path\");\nassertThat(config.getString(CoreOptions.TMP_DIRS)).isEqualTo(\"default/directory/path\");\nconfig.setString(CoreOptions.TMP_DIRS, \"\");\nBootstrapTools.updateTmpDirectoriesInConfiguration(config, \"some/new/path\");\nassertThat(config.getString(CoreOptions.TMP_DIRS)).isEmpty();\n}\n@Test\nvoid testShouldNotUpdateTmpDirectoriesInConfigurationIfNoValueConfigured() {\nConfiguration config = new Configuration();\nBootstrapTools.updateTmpDirectoriesInConfiguration(config, null);\nassertThat(CoreOptions.TMP_DIRS.defaultValue())\n.isEqualTo(config.getString(CoreOptions.TMP_DIRS));\n}\n@Test\nvoid testGetDynamicPropertiesAsString() {\nfinal Configuration baseConfig = new Configuration();\nbaseConfig.setString(\"key.a\", \"a\");\nbaseConfig.setString(\"key.b\", \"b1\");\nfinal Configuration targetConfig = new Configuration();\ntargetConfig.setString(\"key.b\", \"b2\");\ntargetConfig.setString(\"key.c\", \"c\");\nfinal String dynamicProperties =\nBootstrapTools.getDynamicPropertiesAsString(baseConfig, targetConfig);\nif (OperatingSystem.isWindows()) {\nassertThat(dynamicProperties).isEqualTo(\"-Dkey.b=\\\"b2\\\" -Dkey.c=\\\"c\\\"\");\n} else {\nassertThat(dynamicProperties).isEqualTo(\"-Dkey.b='b2' -Dkey.c='c'\");\n}\n}\n@Test\nvoid testEscapeDynamicPropertyValueWithSingleQuote() {\nfinal String value1 = \"\nassertThat(BootstrapTools.escapeWithSingleQuote(value1)).isEqualTo(\"'\" + value1 + \"'\");\nfinal String value2 = \"'foobar\";\nassertThat(BootstrapTools.escapeWithSingleQuote(value2)).isEqualTo(\"''\\\\''foobar'\");\nfinal String value3 = \"foo''bar\";\nassertThat(BootstrapTools.escapeWithSingleQuote(value3)).isEqualTo(\"'foo'\\\\'''\\\\''bar'\");\nfinal String value4 = \"'foo' 'bar'\";\nassertThat(BootstrapTools.escapeWithSingleQuote(value4))\n.isEqualTo(\"''\\\\''foo'\\\\'' '\\\\''bar'\\\\'''\");\n}\n@Test\nvoid testEscapeDynamicPropertyValueWithDoubleQuote() {\nfinal String value1 = \"\nassertThat(BootstrapTools.escapeWithDoubleQuote(value1))\n.isEqualTo(\"\\\"\nfinal String value2 = \"foo\\\"bar'\";\nassertThat(BootstrapTools.escapeWithDoubleQuote(value2)).isEqualTo(\"\\\"foo\\\\\\\"bar'\\\"\");\nfinal String value3 = \"\\\"foo\\\" \\\"bar\\\"\";\nassertThat(BootstrapTools.escapeWithDoubleQuote(value3))\n.isEqualTo(\"\\\"\\\\\\\"foo\\\\\\\" \\\\\\\"bar\\\\\\\"\\\"\");\n}\n@Test\n@Test\nvoid testGetEnvironmentVariablesErroneous() {\nConfiguration testConf = new Configuration();\ntestConf.setString(\"containerized.master.env.\", \"/usr/lib/native\");\nMap res =\nConfigurationUtils.getPrefixedKeyValuePairs(\"containerized.master.env.\", testConf);\nassertThat(res).isEmpty();\n}\n@Test\nvoid testWriteConfigurationAndReload() throws IOException {\nfinal File flinkConfDir = TempDirUtils.newFolder(temporaryFolder).getAbsoluteFile();\nfinal Configuration flinkConfig = new Configuration();\nfinal ConfigOption> listStringConfigOption =\nConfigOptions.key(\"test-list-string-key\").stringType().asList().noDefaultValue();\nfinal List list =\nArrays.asList(\"A,B,C,D\", \"A'B'C'D\", \"A;BCD\", \"AB\\\"C\\\"D\", \"AB'\\\"D:B\");\nflinkConfig.set(listStringConfigOption, list);\nassertThat(flinkConfig.get(listStringConfigOption))\n.containsExactlyInAnyOrderElementsOf(list);\nfinal ConfigOption> listDurationConfigOption =\nConfigOptions.key(\"test-list-duration-key\")\n.durationType()\n.asList()\n.noDefaultValue();\nfinal List durationList =\nArrays.asList(Duration.ofSeconds(3), Duration.ofMinutes(1));\nflinkConfig.set(listDurationConfigOption, durationList);\nassertThat(flinkConfig.get(listDurationConfigOption))\n.containsExactlyInAnyOrderElementsOf(durationList);\nfinal ConfigOption> mapConfigOption =\nConfigOptions.key(\"test-map-key\").mapType().noDefaultValue();\nfinal Map map = new HashMap<>();\nmap.put(\"key1\", \"A,B,C,D\");\nmap.put(\"key2\", \"A;BCD\");\nmap.put(\"key3\", \"A'B'C'D\");\nmap.put(\"key4\", \"AB\\\"C\\\"D\");\nmap.put(\"key5\", \"AB'\\\"D:B\");\nflinkConfig.set(mapConfigOption, map);\nassertThat(flinkConfig.get(mapConfigOption)).containsAllEntriesOf(map);\nfinal ConfigOption durationConfigOption =\nConfigOptions.key(\"test-duration-key\").durationType().noDefaultValue();\nfinal Duration duration = Duration.ofMillis(3000);\nflinkConfig.set(durationConfigOption, duration);\nassertThat(flinkConfig.get(durationConfigOption)).isEqualTo(duration);\nBootstrapTools.writeConfiguration(flinkConfig, new File(flinkConfDir, FLINK_CONF_FILENAME));\nfinal Configuration loadedFlinkConfig =\nGlobalConfiguration.loadConfiguration(flinkConfDir.getAbsolutePath());\nassertThat(loadedFlinkConfig.get(listStringConfigOption))\n.containsExactlyInAnyOrderElementsOf(list);\nassertThat(loadedFlinkConfig.get(listDurationConfigOption))\n.containsExactlyInAnyOrderElementsOf(durationList);\nassertThat(loadedFlinkConfig.get(mapConfigOption)).containsAllEntriesOf(map);\nassertThat(loadedFlinkConfig.get(durationConfigOption)).isEqualTo(duration);\n}\n}" + }, + { + "comment": "isHost() can now be removed?", + "method_body": "private void checkAndRedeploy(InfrastructureApplication application) {\nlog.log(INFO, () -> \"Checking if \" + application.name() + \" should be redeployed\");\nif ( ! readiedTypes.remove(application)) return;\nlog.log(INFO, () -> \"Trying to redeploy \" + application.id() + \" after completing provisioning of \" + application.name());\ntry (Mutex lock = locks.apply(application.id())) {\nif (application.nodeType().isHost() && nodes.get().state(State.ready).nodeType(application.nodeType()).isEmpty()) return;\nlog.log(INFO, () -> \"Redeploying \" + application.id() + \" after completing provisioning of \" + application.name());\ntry {\ndeployer.getDeployment(application.id()).ifPresent(Deployment::activate);\nreadied(childOf(application));\n}\ncatch (RuntimeException e) {\nlog.log(WARNING, \"Failed redeploying \" + application.id() + \", will be retried by maintainer\", e);\n}\n}\ncatch (UncheckedTimeoutException collision) {\nreadied(application);\n}\n}", + "target_code": "log.log(INFO, () -> \"Trying to redeploy \" + application.id() + \" after completing provisioning of \" + application.name());", + "method_body_after": "private void checkAndRedeploy(InfrastructureApplication application) {\nif ( ! readiedTypes.remove(application)) return;\ntry (Mutex lock = locks.apply(application.id())) {\nif (application.nodeType().isHost() && nodes.get().state(State.ready).nodeType(application.nodeType()).isEmpty()) return;\nlog.log(FINE, () -> \"Redeploying \" + application.id() + \" after completing provisioning for \" + application.name());\ntry {\ndeployer.getDeployment(application.id()).ifPresent(Deployment::activate);\nchildOf(application).ifPresent(this::readied);\n}\ncatch (RuntimeException e) {\nlog.log(INFO, \"Failed redeploying \" + application.id() + \", will be retried by maintainer\", e);\n}\n}\ncatch (UncheckedTimeoutException collision) {\nreadied(application);\n}\n}", + "context_before": "class InfraApplicationRedeployer extends AbstractComponent {\nprivate static final Logger log = Logger.getLogger(InfraApplicationRedeployer.class.getName());\nprivate final ExecutorService executor = Executors.newSingleThreadExecutor(new DaemonThreadFactory(\"infra-application-redeployer-\"));\nprivate final Set readiedTypes = new ConcurrentSkipListSet<>();\nprivate final InfraDeployer deployer;\nprivate final Function locks;\nprivate final Supplier nodes;\n@Inject\npublic InfraApplicationRedeployer(InfraDeployer deployer, NodeRepository nodes) {\nthis(deployer, nodes.applications()::lockMaintenance, nodes.nodes()::list);\n}\nInfraApplicationRedeployer(InfraDeployer deployer, Function locks, Supplier nodes) {\nthis.deployer = deployer;\nthis.locks = locks;\nthis.nodes = nodes;\n}\npublic void readied(NodeType type) {\nreadied(applicationOf(type));\n}\nprivate void readied(InfrastructureApplication application) {\nif (application == null) return;\nif (readiedTypes.add(application)) executor.execute(() -> checkAndRedeploy(application));\n}\nprivate static InfrastructureApplication applicationOf(NodeType type) {\nreturn switch (type) {\ncase host -> InfrastructureApplication.TENANT_HOST;\ncase confighost -> InfrastructureApplication.CONFIG_SERVER_HOST;\ncase controllerhost -> InfrastructureApplication.CONTROLLER_HOST;\ncase proxyhost -> InfrastructureApplication.PROXY_HOST;\ndefault -> null;\n};\n}\nprivate static InfrastructureApplication childOf(InfrastructureApplication application) {\nreturn switch (application) {\ncase CONFIG_SERVER_HOST -> InfrastructureApplication.CONFIG_SERVER;\ncase CONTROLLER_HOST -> InfrastructureApplication.CONTROLLER;\ndefault -> null;\n};\n}\n@Override\npublic void deconstruct() {\nexecutor.shutdown();\ntry {\nif (executor.awaitTermination(10, TimeUnit.SECONDS)) return;\nlog.log(WARNING, \"Redeployer did not shut down within 10 seconds\");\n}\ncatch (InterruptedException e) {\nThread.currentThread().interrupt();\n}\nexecutor.shutdownNow();\n}\n}", + "context_after": "class InfraApplicationRedeployer implements AutoCloseable {\nprivate static final Logger log = Logger.getLogger(InfraApplicationRedeployer.class.getName());\nprivate final ExecutorService executor = Executors.newSingleThreadExecutor(new DaemonThreadFactory(\"infra-application-redeployer-\"));\nprivate final Set readiedTypes = new ConcurrentSkipListSet<>();\nprivate final InfraDeployer deployer;\nprivate final Function locks;\nprivate final Supplier nodes;\n@Inject\npublic InfraApplicationRedeployer(InfraDeployer deployer, NodeRepository nodes) {\nthis(deployer, nodes.applications()::lockMaintenance, nodes.nodes()::list);\n}\nInfraApplicationRedeployer(InfraDeployer deployer, Function locks, Supplier nodes) {\nthis.deployer = deployer;\nthis.locks = locks;\nthis.nodes = nodes;\n}\npublic void readied(NodeType type) {\napplicationOf(type).ifPresent(this::readied);\n}\nprivate void readied(InfrastructureApplication application) {\nif (application == null) return;\nif (readiedTypes.add(application)) executor.execute(() -> checkAndRedeploy(application));\n}\nprivate static Optional applicationOf(NodeType type) {\nreturn switch (type) {\ncase host -> Optional.of(InfrastructureApplication.TENANT_HOST);\ncase confighost -> Optional.of(InfrastructureApplication.CONFIG_SERVER_HOST);\ncase config -> Optional.of(InfrastructureApplication.CONFIG_SERVER);\ncase controllerhost -> Optional.of(InfrastructureApplication.CONTROLLER_HOST);\ncase controller -> Optional.of(InfrastructureApplication.CONTROLLER);\ncase proxyhost -> Optional.of(InfrastructureApplication.PROXY_HOST);\ndefault -> Optional.empty();\n};\n}\nprivate static Optional childOf(InfrastructureApplication application) {\nreturn switch (application) {\ncase CONFIG_SERVER_HOST -> Optional.of(InfrastructureApplication.CONFIG_SERVER);\ncase CONTROLLER_HOST -> Optional.of(InfrastructureApplication.CONTROLLER);\ndefault -> Optional.empty();\n};\n}\n@Override\npublic void close() {\nexecutor.shutdown();\ntry {\nif (executor.awaitTermination(10, TimeUnit.SECONDS)) return;\nlog.log(WARNING, \"Redeployer did not shut down within 10 seconds\");\n}\ncatch (InterruptedException e) {\nThread.currentThread().interrupt();\n}\nexecutor.shutdownNow();\n}\n}" + }, + { + "comment": "I'm afraid the default parallelism will be quite large on CI (# CPU cores) and occupy too much resources. Setting parallelism to 2 is enough to reproduce the issue. This case is for testing the case that only some partitions are empty, so that readers assigned with empty partitions can be closed without exception and other readers can process messages as expected. ", + "method_body": "public void testConsumingTopicWithEmptyPartitions() throws Throwable {\nString topicWithEmptyPartitions = \"topicWithEmptyPartitions-\" + UUID.randomUUID();\nKafkaSourceTestEnv.createTestTopic(\ntopicWithEmptyPartitions, KafkaSourceTestEnv.NUM_PARTITIONS, 1);\nList> records =\nKafkaSourceTestEnv.getRecordsForTopicWithoutTimestamp(topicWithEmptyPartitions);\nint partitionWithRecords = 5;\nrecords.removeIf(record -> record.partition() != partitionWithRecords);\nKafkaSourceTestEnv.produceToKafka(records);\nKafkaSourceTestEnv.setupEarliestOffsets(\nCollections.singletonList(\nnew TopicPartition(topicWithEmptyPartitions, partitionWithRecords)));\nKafkaSource source =\nKafkaSource.builder()\n.setBootstrapServers(KafkaSourceTestEnv.brokerConnectionStrings)\n.setTopics(topicWithEmptyPartitions)\n.setGroupId(\"topic-with-empty-partition-test\")\n.setDeserializer(new TestingKafkaRecordDeserializationSchema(false))\n.setStartingOffsets(OffsetsInitializer.earliest())\n.setBounded(OffsetsInitializer.latest())\n.build();\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(2);\nexecuteAndVerify(\nenv,\nenv.fromSource(\nsource,\nWatermarkStrategy.noWatermarks(),\n\"testConsumingTopicWithEmptyPartitions\"));\n}", + "target_code": "executeAndVerify(", + "method_body_after": "public void testConsumingTopicWithEmptyPartitions() throws Throwable {\nString topicWithEmptyPartitions = \"topicWithEmptyPartitions-\" + UUID.randomUUID();\nKafkaSourceTestEnv.createTestTopic(\ntopicWithEmptyPartitions, KafkaSourceTestEnv.NUM_PARTITIONS, 1);\nList> records =\nKafkaSourceTestEnv.getRecordsForTopicWithoutTimestamp(topicWithEmptyPartitions);\nint partitionWithRecords = 5;\nrecords.removeIf(record -> record.partition() != partitionWithRecords);\nKafkaSourceTestEnv.produceToKafka(records);\nKafkaSourceTestEnv.setupEarliestOffsets(\nCollections.singletonList(\nnew TopicPartition(topicWithEmptyPartitions, partitionWithRecords)));\nKafkaSource source =\nKafkaSource.builder()\n.setBootstrapServers(KafkaSourceTestEnv.brokerConnectionStrings)\n.setTopics(topicWithEmptyPartitions)\n.setGroupId(\"topic-with-empty-partition-test\")\n.setDeserializer(new TestingKafkaRecordDeserializationSchema(false))\n.setStartingOffsets(OffsetsInitializer.earliest())\n.setBounded(OffsetsInitializer.latest())\n.build();\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(2);\nexecuteAndVerify(\nenv,\nenv.fromSource(\nsource,\nWatermarkStrategy.noWatermarks(),\n\"testConsumingTopicWithEmptyPartitions\"));\n}", + "context_before": "class KafkaSpecificTests {\n@BeforeAll\npublic void setup() throws Throwable {\nKafkaSourceTestEnv.setup();\nKafkaSourceTestEnv.setupTopic(\nTOPIC1, true, true, KafkaSourceTestEnv::getRecordsForTopicWithoutTimestamp);\nKafkaSourceTestEnv.setupTopic(\nTOPIC2, true, true, KafkaSourceTestEnv::getRecordsForTopicWithoutTimestamp);\n}\n@AfterAll\npublic void tearDown() throws Exception {\nKafkaSourceTestEnv.tearDown();\n}\n@ParameterizedTest(name = \"Object reuse in deserializer = {arguments}\")\n@ValueSource(booleans = {false, true})\npublic void testTimestamp(boolean enableObjectReuse) throws Throwable {\nfinal String topic =\n\"testTimestamp-\" + ThreadLocalRandom.current().nextLong(0, Long.MAX_VALUE);\nfinal long currentTimestamp = System.currentTimeMillis();\nKafkaSourceTestEnv.createTestTopic(topic, 1, 1);\nKafkaSourceTestEnv.produceToKafka(\nArrays.asList(\nnew ProducerRecord<>(topic, 0, currentTimestamp + 1L, \"key0\", 0),\nnew ProducerRecord<>(topic, 0, currentTimestamp + 2L, \"key1\", 1),\nnew ProducerRecord<>(topic, 0, currentTimestamp + 3L, \"key2\", 2)));\nKafkaSource source =\nKafkaSource.builder()\n.setBootstrapServers(KafkaSourceTestEnv.brokerConnectionStrings)\n.setGroupId(\"testTimestampAndWatermark\")\n.setTopics(topic)\n.setDeserializer(\nnew TestingKafkaRecordDeserializationSchema(enableObjectReuse))\n.setStartingOffsets(OffsetsInitializer.earliest())\n.setBounded(OffsetsInitializer.latest())\n.build();\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(1);\nDataStream stream =\nenv.fromSource(source, WatermarkStrategy.noWatermarks(), \"testTimestamp\");\nstream.transform(\n\"timestampVerifier\",\nTypeInformation.of(PartitionAndValue.class),\nnew WatermarkVerifyingOperator(v -> v));\nstream.addSink(new DiscardingSink<>());\nJobExecutionResult result = env.execute();\nassertEquals(\nArrays.asList(\ncurrentTimestamp + 1L, currentTimestamp + 2L, currentTimestamp + 3L),\nresult.getAccumulatorResult(\"timestamp\"));\n}\n@ParameterizedTest(name = \"Object reuse in deserializer = {arguments}\")\n@ValueSource(booleans = {false, true})\npublic void testBasicRead(boolean enableObjectReuse) throws Exception {\nKafkaSource source =\nKafkaSource.builder()\n.setBootstrapServers(KafkaSourceTestEnv.brokerConnectionStrings)\n.setGroupId(\"testBasicRead\")\n.setTopics(Arrays.asList(TOPIC1, TOPIC2))\n.setDeserializer(\nnew TestingKafkaRecordDeserializationSchema(enableObjectReuse))\n.setStartingOffsets(OffsetsInitializer.earliest())\n.setBounded(OffsetsInitializer.latest())\n.build();\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(1);\nDataStream stream =\nenv.fromSource(source, WatermarkStrategy.noWatermarks(), \"testBasicRead\");\nexecuteAndVerify(env, stream);\n}\n@Test\npublic void testValueOnlyDeserializer() throws Exception {\nKafkaSource source =\nKafkaSource.builder()\n.setBootstrapServers(KafkaSourceTestEnv.brokerConnectionStrings)\n.setGroupId(\"testValueOnlyDeserializer\")\n.setTopics(Arrays.asList(TOPIC1, TOPIC2))\n.setDeserializer(\nKafkaRecordDeserializationSchema.valueOnly(\nIntegerDeserializer.class))\n.setStartingOffsets(OffsetsInitializer.earliest())\n.setBounded(OffsetsInitializer.latest())\n.build();\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(1);\nfinal CloseableIterator resultIterator =\nenv.fromSource(\nsource,\nWatermarkStrategy.noWatermarks(),\n\"testValueOnlyDeserializer\")\n.executeAndCollect();\nAtomicInteger actualSum = new AtomicInteger();\nresultIterator.forEachRemaining(actualSum::addAndGet);\nint expectedSum = 0;\nfor (int partition = 0; partition < KafkaSourceTestEnv.NUM_PARTITIONS; partition++) {\nfor (int value = partition;\nvalue < KafkaSourceTestEnv.NUM_RECORDS_PER_PARTITION;\nvalue++) {\nexpectedSum += value;\n}\n}\nexpectedSum *= 2;\nassertEquals(expectedSum, actualSum.get());\n}\n@ParameterizedTest(name = \"Object reuse in deserializer = {arguments}\")\n@ValueSource(booleans = {false, true})\npublic void testRedundantParallelism(boolean enableObjectReuse) throws Exception {\nKafkaSource source =\nKafkaSource.builder()\n.setBootstrapServers(KafkaSourceTestEnv.brokerConnectionStrings)\n.setGroupId(\"testRedundantParallelism\")\n.setTopics(Collections.singletonList(TOPIC1))\n.setDeserializer(\nnew TestingKafkaRecordDeserializationSchema(enableObjectReuse))\n.setStartingOffsets(OffsetsInitializer.earliest())\n.setBounded(OffsetsInitializer.latest())\n.build();\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(KafkaSourceTestEnv.NUM_PARTITIONS + 1);\nDataStream stream =\nenv.fromSource(\nsource, WatermarkStrategy.noWatermarks(), \"testRedundantParallelism\");\nexecuteAndVerify(env, stream);\n}\n@ParameterizedTest(name = \"Object reuse in deserializer = {arguments}\")\n@ValueSource(booleans = {false, true})\npublic void testBasicReadWithoutGroupId(boolean enableObjectReuse) throws Exception {\nKafkaSource source =\nKafkaSource.builder()\n.setBootstrapServers(KafkaSourceTestEnv.brokerConnectionStrings)\n.setTopics(Arrays.asList(TOPIC1, TOPIC2))\n.setDeserializer(\nnew TestingKafkaRecordDeserializationSchema(enableObjectReuse))\n.setStartingOffsets(OffsetsInitializer.earliest())\n.setBounded(OffsetsInitializer.latest())\n.build();\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(1);\nDataStream stream =\nenv.fromSource(\nsource,\nWatermarkStrategy.noWatermarks(),\n\"testBasicReadWithoutGroupId\");\nexecuteAndVerify(env, stream);\n}\n@Test\npublic void testPerPartitionWatermark() throws Throwable {\nString watermarkTopic = \"watermarkTestTopic-\" + UUID.randomUUID();\nKafkaSourceTestEnv.createTestTopic(watermarkTopic, 2, 1);\nList> records =\nArrays.asList(\nnew ProducerRecord<>(watermarkTopic, 0, 100L, null, 100),\nnew ProducerRecord<>(watermarkTopic, 0, 200L, null, 200),\nnew ProducerRecord<>(watermarkTopic, 0, 300L, null, 300),\nnew ProducerRecord<>(watermarkTopic, 1, 150L, null, 150),\nnew ProducerRecord<>(watermarkTopic, 1, 250L, null, 250),\nnew ProducerRecord<>(watermarkTopic, 1, 350L, null, 350));\nKafkaSourceTestEnv.produceToKafka(records);\nKafkaSource source =\nKafkaSource.builder()\n.setBootstrapServers(KafkaSourceTestEnv.brokerConnectionStrings)\n.setTopics(watermarkTopic)\n.setGroupId(\"watermark-test\")\n.setDeserializer(new TestingKafkaRecordDeserializationSchema(false))\n.setStartingOffsets(OffsetsInitializer.earliest())\n.setBounded(OffsetsInitializer.latest())\n.build();\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(1);\nenv.fromSource(\nsource,\nWatermarkStrategy.forGenerator(\n(context) -> new OnEventWatermarkGenerator()),\n\"testPerPartitionWatermark\")\n.process(\nnew ProcessFunction() {\n@Override\npublic void processElement(\nPartitionAndValue value,\nProcessFunction.Context ctx,\nCollector out) {\nassertThat(ctx.timestamp())\n.as(\n\"Event time should never behind watermark \"\n+ \"because of per-split watermark multiplexing logic\")\n.isGreaterThanOrEqualTo(\nctx.timerService().currentWatermark());\n}\n});\nenv.execute();\n}\n@Test\npublic void testConsumingEmptyTopic() throws Throwable {\nString emptyTopic = \"emptyTopic-\" + UUID.randomUUID();\nKafkaSourceTestEnv.createTestTopic(emptyTopic, 3, 1);\nKafkaSource source =\nKafkaSource.builder()\n.setBootstrapServers(KafkaSourceTestEnv.brokerConnectionStrings)\n.setTopics(emptyTopic)\n.setGroupId(\"empty-topic-test\")\n.setDeserializer(new TestingKafkaRecordDeserializationSchema(false))\n.setStartingOffsets(OffsetsInitializer.earliest())\n.setBounded(OffsetsInitializer.latest())\n.build();\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(1);\ntry (CloseableIterator iterator =\nenv.fromSource(\nsource,\nWatermarkStrategy.noWatermarks(),\n\"testConsumingEmptyTopic\")\n.executeAndCollect()) {\nassertThat(iterator.hasNext()).isFalse();\n}\n}\n@Test\n}", + "context_after": "class KafkaSpecificTests {\n@BeforeAll\npublic void setup() throws Throwable {\nKafkaSourceTestEnv.setup();\nKafkaSourceTestEnv.setupTopic(\nTOPIC1, true, true, KafkaSourceTestEnv::getRecordsForTopicWithoutTimestamp);\nKafkaSourceTestEnv.setupTopic(\nTOPIC2, true, true, KafkaSourceTestEnv::getRecordsForTopicWithoutTimestamp);\n}\n@AfterAll\npublic void tearDown() throws Exception {\nKafkaSourceTestEnv.tearDown();\n}\n@ParameterizedTest(name = \"Object reuse in deserializer = {arguments}\")\n@ValueSource(booleans = {false, true})\npublic void testTimestamp(boolean enableObjectReuse) throws Throwable {\nfinal String topic =\n\"testTimestamp-\" + ThreadLocalRandom.current().nextLong(0, Long.MAX_VALUE);\nfinal long currentTimestamp = System.currentTimeMillis();\nKafkaSourceTestEnv.createTestTopic(topic, 1, 1);\nKafkaSourceTestEnv.produceToKafka(\nArrays.asList(\nnew ProducerRecord<>(topic, 0, currentTimestamp + 1L, \"key0\", 0),\nnew ProducerRecord<>(topic, 0, currentTimestamp + 2L, \"key1\", 1),\nnew ProducerRecord<>(topic, 0, currentTimestamp + 3L, \"key2\", 2)));\nKafkaSource source =\nKafkaSource.builder()\n.setBootstrapServers(KafkaSourceTestEnv.brokerConnectionStrings)\n.setGroupId(\"testTimestampAndWatermark\")\n.setTopics(topic)\n.setDeserializer(\nnew TestingKafkaRecordDeserializationSchema(enableObjectReuse))\n.setStartingOffsets(OffsetsInitializer.earliest())\n.setBounded(OffsetsInitializer.latest())\n.build();\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(1);\nDataStream stream =\nenv.fromSource(source, WatermarkStrategy.noWatermarks(), \"testTimestamp\");\nstream.transform(\n\"timestampVerifier\",\nTypeInformation.of(PartitionAndValue.class),\nnew WatermarkVerifyingOperator(v -> v));\nstream.addSink(new DiscardingSink<>());\nJobExecutionResult result = env.execute();\nassertThat(result.>getAccumulatorResult(\"timestamp\"))\n.containsExactly(\ncurrentTimestamp + 1L, currentTimestamp + 2L, currentTimestamp + 3L);\n}\n@ParameterizedTest(name = \"Object reuse in deserializer = {arguments}\")\n@ValueSource(booleans = {false, true})\npublic void testBasicRead(boolean enableObjectReuse) throws Exception {\nKafkaSource source =\nKafkaSource.builder()\n.setBootstrapServers(KafkaSourceTestEnv.brokerConnectionStrings)\n.setGroupId(\"testBasicRead\")\n.setTopics(Arrays.asList(TOPIC1, TOPIC2))\n.setDeserializer(\nnew TestingKafkaRecordDeserializationSchema(enableObjectReuse))\n.setStartingOffsets(OffsetsInitializer.earliest())\n.setBounded(OffsetsInitializer.latest())\n.build();\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(1);\nDataStream stream =\nenv.fromSource(source, WatermarkStrategy.noWatermarks(), \"testBasicRead\");\nexecuteAndVerify(env, stream);\n}\n@Test\npublic void testValueOnlyDeserializer() throws Exception {\nKafkaSource source =\nKafkaSource.builder()\n.setBootstrapServers(KafkaSourceTestEnv.brokerConnectionStrings)\n.setGroupId(\"testValueOnlyDeserializer\")\n.setTopics(Arrays.asList(TOPIC1, TOPIC2))\n.setDeserializer(\nKafkaRecordDeserializationSchema.valueOnly(\nIntegerDeserializer.class))\n.setStartingOffsets(OffsetsInitializer.earliest())\n.setBounded(OffsetsInitializer.latest())\n.build();\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(1);\ntry (CloseableIterator resultIterator =\nenv.fromSource(\nsource,\nWatermarkStrategy.noWatermarks(),\n\"testValueOnlyDeserializer\")\n.executeAndCollect()) {\nAtomicInteger actualSum = new AtomicInteger();\nresultIterator.forEachRemaining(actualSum::addAndGet);\nint expectedSum = 0;\nfor (int partition = 0;\npartition < KafkaSourceTestEnv.NUM_PARTITIONS;\npartition++) {\nfor (int value = partition;\nvalue < KafkaSourceTestEnv.NUM_RECORDS_PER_PARTITION;\nvalue++) {\nexpectedSum += value;\n}\n}\nexpectedSum *= 2;\nassertThat(actualSum.get()).isEqualTo(expectedSum);\n}\n}\n@ParameterizedTest(name = \"Object reuse in deserializer = {arguments}\")\n@ValueSource(booleans = {false, true})\npublic void testRedundantParallelism(boolean enableObjectReuse) throws Exception {\nKafkaSource source =\nKafkaSource.builder()\n.setBootstrapServers(KafkaSourceTestEnv.brokerConnectionStrings)\n.setGroupId(\"testRedundantParallelism\")\n.setTopics(Collections.singletonList(TOPIC1))\n.setDeserializer(\nnew TestingKafkaRecordDeserializationSchema(enableObjectReuse))\n.setStartingOffsets(OffsetsInitializer.earliest())\n.setBounded(OffsetsInitializer.latest())\n.build();\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(KafkaSourceTestEnv.NUM_PARTITIONS + 1);\nDataStream stream =\nenv.fromSource(\nsource, WatermarkStrategy.noWatermarks(), \"testRedundantParallelism\");\nexecuteAndVerify(env, stream);\n}\n@ParameterizedTest(name = \"Object reuse in deserializer = {arguments}\")\n@ValueSource(booleans = {false, true})\npublic void testBasicReadWithoutGroupId(boolean enableObjectReuse) throws Exception {\nKafkaSource source =\nKafkaSource.builder()\n.setBootstrapServers(KafkaSourceTestEnv.brokerConnectionStrings)\n.setTopics(Arrays.asList(TOPIC1, TOPIC2))\n.setDeserializer(\nnew TestingKafkaRecordDeserializationSchema(enableObjectReuse))\n.setStartingOffsets(OffsetsInitializer.earliest())\n.setBounded(OffsetsInitializer.latest())\n.build();\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(1);\nDataStream stream =\nenv.fromSource(\nsource,\nWatermarkStrategy.noWatermarks(),\n\"testBasicReadWithoutGroupId\");\nexecuteAndVerify(env, stream);\n}\n@Test\npublic void testPerPartitionWatermark() throws Throwable {\nString watermarkTopic = \"watermarkTestTopic-\" + UUID.randomUUID();\nKafkaSourceTestEnv.createTestTopic(watermarkTopic, 2, 1);\nList> records =\nArrays.asList(\nnew ProducerRecord<>(watermarkTopic, 0, 100L, null, 100),\nnew ProducerRecord<>(watermarkTopic, 0, 200L, null, 200),\nnew ProducerRecord<>(watermarkTopic, 0, 300L, null, 300),\nnew ProducerRecord<>(watermarkTopic, 1, 150L, null, 150),\nnew ProducerRecord<>(watermarkTopic, 1, 250L, null, 250),\nnew ProducerRecord<>(watermarkTopic, 1, 350L, null, 350));\nKafkaSourceTestEnv.produceToKafka(records);\nKafkaSource source =\nKafkaSource.builder()\n.setBootstrapServers(KafkaSourceTestEnv.brokerConnectionStrings)\n.setTopics(watermarkTopic)\n.setGroupId(\"watermark-test\")\n.setDeserializer(new TestingKafkaRecordDeserializationSchema(false))\n.setStartingOffsets(OffsetsInitializer.earliest())\n.setBounded(OffsetsInitializer.latest())\n.build();\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(1);\nenv.fromSource(\nsource,\nWatermarkStrategy.forGenerator(\n(context) -> new OnEventWatermarkGenerator()),\n\"testPerPartitionWatermark\")\n.process(\nnew ProcessFunction() {\n@Override\npublic void processElement(\nPartitionAndValue value,\nProcessFunction.Context ctx,\nCollector out) {\nassertThat(ctx.timestamp())\n.as(\n\"Event time should never behind watermark \"\n+ \"because of per-split watermark multiplexing logic\")\n.isGreaterThanOrEqualTo(\nctx.timerService().currentWatermark());\n}\n});\nenv.execute();\n}\n@Test\npublic void testConsumingEmptyTopic() throws Throwable {\nString emptyTopic = \"emptyTopic-\" + UUID.randomUUID();\nKafkaSourceTestEnv.createTestTopic(emptyTopic, 3, 1);\nKafkaSource source =\nKafkaSource.builder()\n.setBootstrapServers(KafkaSourceTestEnv.brokerConnectionStrings)\n.setTopics(emptyTopic)\n.setGroupId(\"empty-topic-test\")\n.setDeserializer(new TestingKafkaRecordDeserializationSchema(false))\n.setStartingOffsets(OffsetsInitializer.earliest())\n.setBounded(OffsetsInitializer.latest())\n.build();\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(1);\ntry (CloseableIterator iterator =\nenv.fromSource(\nsource,\nWatermarkStrategy.noWatermarks(),\n\"testConsumingEmptyTopic\")\n.executeAndCollect()) {\nassertThat(iterator.hasNext()).isFalse();\n}\n}\n@Test\n}" + }, + { + "comment": "nice! thanks for adding these tests :)", + "method_body": "public void testSendsInvalidToDLQ() {\nSchema payloadSchema = Schema.builder().addInt32Field(\"id\").addStringField(\"name\").build();\nSchema messageSchema =\nSchema.builder()\n.addDateTimeField(\"event_timestamp\")\n.addMapField(\"attributes\", VARCHAR, VARCHAR)\n.addRowField(\"payload\", payloadSchema)\n.build();\nPCollectionTuple outputs =\npipeline\n.apply(\n\"create\",\nCreate.timestamped(\nmessage(1, map(\"attr1\", \"val1\"), \"{ \\\"invalid1\\\" : \\\"sdfsd\\\" }\"),\nmessage(2, map(\"attr2\", \"val2\"), \"{ \\\"invalid2\"),\nmessage(3, map(\"attr\", \"val\"), \"{ \\\"id\\\" : 3, \\\"name\\\" : \\\"foo\\\" }\"),\nmessage(4, map(\"bttr\", \"vbl\"), \"{ \\\"name\\\" : \\\"baz\\\", \\\"id\\\" : 5 }\")))\n.apply(\n\"convert\",\nPubsubMessageToRow.builder()\n.messageSchema(messageSchema)\n.useDlq(true)\n.useFlatSchema(false)\n.build());\nPCollection rows = outputs.get(MAIN_TAG);\nPCollection dlqMessages = outputs.get(DLQ_TAG);\nPAssert.that(dlqMessages)\n.satisfies(\nmessages -> {\nassertEquals(2, size(messages));\nassertEquals(\nImmutableSet.of(map(\"attr1\", \"val1\"), map(\"attr2\", \"val2\")),\nconvertToSet(messages, m -> m.getAttributeMap()));\nassertEquals(\nImmutableSet.of(\"{ \\\"invalid1\\\" : \\\"sdfsd\\\" }\", \"{ \\\"invalid2\"),\nconvertToSet(messages, m -> new String(m.getPayload(), UTF_8)));\nreturn null;\n});\nPAssert.that(rows)\n.containsInAnyOrder(\nRow.withSchema(messageSchema)\n.addValues(ts(3), map(\"attr\", \"val\"), row(payloadSchema, 3, \"foo\"))\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(4), map(\"bttr\", \"vbl\"), row(payloadSchema, 5, \"baz\"))\n.build());\npipeline.run();\n}\n@Test\npublic void testConvertsMessagesToFlatRow() {\nSchema messageSchema =\nSchema.builder()\n.addDateTimeField(\"event_timestamp\")\n.addNullableField(\"id\", FieldType.INT32)\n.addNullableField(\"name\", FieldType.STRING)\n.build();\nPCollectionTuple rows =\npipeline\n.apply(\n\"create\",\nCreate.timestamped(\nmessage(1, map(\"attr\", \"val\"), \"{ \\\"id\\\" : 3, \\\"name\\\" : \\\"foo\\\" }\"),\nmessage(2, map(\"bttr\", \"vbl\"), \"{ \\\"name\\\" : \\\"baz\\\", \\\"id\\\" : 5 }\"),\nmessage(3, map(\"cttr\", \"vcl\"), \"{ \\\"id\\\" : 7, \\\"name\\\" : \\\"bar\\\" }\"),\nmessage(4, map(\"dttr\", \"vdl\"), \"{ \\\"name\\\" : \\\"qaz\\\", \\\"id\\\" : 8 }\"),\nmessage(4, map(\"dttr\", \"vdl\"), \"{ \\\"name\\\" : null, \\\"id\\\" : null }\")))\n.apply(\n\"convert\",\nPubsubMessageToRow.builder()\n.messageSchema(messageSchema)\n.useDlq(false)\n.useFlatSchema(true)\n.build());\nPAssert.that(rows.get(MAIN_TAG))\n.containsInAnyOrder(\nRow.withSchema(messageSchema)\n.addValues(ts(1), /* map(\"attr\", \"val\"), */ 3, \"foo\")\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(2), /* map(\"bttr\", \"vbl\"), */ 5, \"baz\")\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(3), /* map(\"cttr\", \"vcl\"), */ 7, \"bar\")\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(4), /* map(\"dttr\", \"vdl\"), */ 8, \"qaz\")\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(4), /* map(\"dttr\", \"vdl\"), */ null, null)\n.build());\npipeline.run();\n}\n@Test\npublic void testSendsFlatRowInvalidToDLQ() {\nSchema messageSchema =\nSchema.builder()\n.addDateTimeField(\"event_timestamp\")\n.addInt32Field(\"id\")\n.addStringField(\"name\")\n.build();\nPCollectionTuple outputs =\npipeline\n.apply(\n\"create\",\nCreate.timestamped(\nmessage(1, map(\"attr1\", \"val1\"), \"{ \\\"invalid1\\\" : \\\"sdfsd\\\" }\"),\nmessage(2, map(\"attr2\", \"val2\"), \"{ \\\"invalid2\"),\nmessage(3, map(\"attr\", \"val\"), \"{ \\\"id\\\" : 3, \\\"name\\\" : \\\"foo\\\" }\"),\nmessage(4, map(\"bttr\", \"vbl\"), \"{ \\\"name\\\" : \\\"baz\\\", \\\"id\\\" : 5 }\")))\n.apply(\n\"convert\",\nPubsubMessageToRow.builder()\n.messageSchema(messageSchema)\n.useDlq(true)\n.useFlatSchema(true)\n.build());\nPCollection rows = outputs.get(MAIN_TAG);\nPCollection dlqMessages = outputs.get(DLQ_TAG);\nPAssert.that(dlqMessages)\n.satisfies(\nmessages -> {\nassertEquals(2, size(messages));\nassertEquals(\nImmutableSet.of(map(\"attr1\", \"val1\"), map(\"attr2\", \"val2\")),\nconvertToSet(messages, m -> m.getAttributeMap()));\nassertEquals(\nImmutableSet.of(\"{ \\\"invalid1\\\" : \\\"sdfsd\\\" }\", \"{ \\\"invalid2\"),\nconvertToSet(messages, m -> new String(m.getPayload(), UTF_8)));\nreturn null;\n});\nPAssert.that(rows)\n.containsInAnyOrder(\nRow.withSchema(messageSchema)\n.addValues(ts(3), /* map(\"attr\", \"val\"), */ 3, \"foo\")\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(4), /* map(\"bttr\", \"vbl\"), */ 5, \"baz\")\n.build());\npipeline.run();\n}\n@Test\npublic void testFlatSchemaMessageInvalidElement() {\nSchema messageSchema =\nSchema.builder()\n.addDateTimeField(\"event_timestamp\")\n.addInt32Field(\"id\")\n.addStringField(\"name\")\n.build();\nPCollectionTuple rows =\npipeline\n.apply(\n\"create\",\nCreate.timestamped(\nmessage(1, map(\"attr\", \"val\"), \"{ \\\"id\\\" : 3, \\\"name\\\" : \\\"foo\\\" }\"),\nmessage(2, map(\"attr1\", \"val1\"), \"{ \\\"invalid1\\\" : \\\"sdfsd\\\" }\")))\n.apply(\n\"convert\",\nPubsubMessageToRow.builder()\n.messageSchema(messageSchema)\n.useDlq(false)\n.useFlatSchema(true)\n.build());\nException exception = Assert.assertThrows(RuntimeException.class, () -> pipeline.run());\nAssert.assertTrue(exception.getMessage().contains(\"Error parsing message\"));\n}\n@Test\npublic void testNestedSchemaMessageInvalidElement() {\nSchema payloadSchema =\nSchema.builder()\n.addNullableField(\"id\", FieldType.INT32)\n.addNullableField(\"name\", FieldType.STRING)\n.build();\nSchema messageSchema =\nSchema.builder()\n.addDateTimeField(\"event_timestamp\")\n.addMapField(\"attributes\", VARCHAR, VARCHAR)\n.addRowField(\"payload\", payloadSchema)\n.build();\nPCollectionTuple rows =\npipeline\n.apply(\n\"create\",\nCreate.timestamped(\nmessage(1, map(\"attr\", \"val\"), \"{ \\\"id\\\" : 3, \\\"name\\\" : \\\"foo\\\" }\"),\nmessage(2, map(\"attr1\", \"val1\"), \"{ \\\"invalid1\\\" : \\\"sdfsd\\\" }\")))\n.apply(\n\"convert\",\nPubsubMessageToRow.builder()\n.messageSchema(messageSchema)\n.useDlq(false)\n.useFlatSchema(false)\n.build());\nException exception = Assert.assertThrows(RuntimeException.class, () -> pipeline.run());\nAssert.assertTrue(exception.getMessage().contains(\"Error parsing message\"));\n}\nprivate Row row(Schema schema, Object... objects) {\nreturn Row.withSchema(schema).addValues(objects).build();\n}\nprivate Map map(String attr, String val) {\nreturn ImmutableMap.of(attr, val);\n}\nprivate TimestampedValue message(\nint timestamp, Map attributes, String payload) {\nreturn TimestampedValue.of(\nnew PubsubMessage(payload.getBytes(UTF_8), attributes), ts(timestamp));\n}\nprivate Instant ts(long epochMills) {\nreturn new DateTime(epochMills).toInstant();\n}\nprivate static Set convertToSet(\nIterable messages, Function mapper) {\nreturn StreamSupport.stream(messages.spliterator(), false).map(mapper).collect(toSet());\n}\n}", + "target_code": ".build());", + "method_body_after": "public void testSendsInvalidToDLQ() {\nSchema payloadSchema = Schema.builder().addInt32Field(\"id\").addStringField(\"name\").build();\nSchema messageSchema =\nSchema.builder()\n.addDateTimeField(\"event_timestamp\")\n.addMapField(\"attributes\", VARCHAR, VARCHAR)\n.addRowField(\"payload\", payloadSchema)\n.build();\nPCollectionTuple outputs =\npipeline\n.apply(\n\"create\",\nCreate.timestamped(\nmessage(1, map(\"attr1\", \"val1\"), \"{ \\\"invalid1\\\" : \\\"sdfsd\\\" }\"),\nmessage(2, map(\"attr2\", \"val2\"), \"{ \\\"invalid2\"),\nmessage(3, map(\"attr\", \"val\"), \"{ \\\"id\\\" : 3, \\\"name\\\" : \\\"foo\\\" }\"),\nmessage(4, map(\"bttr\", \"vbl\"), \"{ \\\"name\\\" : \\\"baz\\\", \\\"id\\\" : 5 }\")))\n.apply(\n\"convert\",\nPubsubMessageToRow.builder()\n.messageSchema(messageSchema)\n.useDlq(true)\n.useFlatSchema(false)\n.build());\nPCollection rows = outputs.get(MAIN_TAG);\nPCollection dlqMessages = outputs.get(DLQ_TAG);\nPAssert.that(dlqMessages)\n.satisfies(\nmessages -> {\nassertEquals(2, size(messages));\nassertEquals(\nImmutableSet.of(map(\"attr1\", \"val1\"), map(\"attr2\", \"val2\")),\nconvertToSet(messages, m -> m.getAttributeMap()));\nassertEquals(\nImmutableSet.of(\"{ \\\"invalid1\\\" : \\\"sdfsd\\\" }\", \"{ \\\"invalid2\"),\nconvertToSet(messages, m -> new String(m.getPayload(), UTF_8)));\nreturn null;\n});\nPAssert.that(rows)\n.containsInAnyOrder(\nRow.withSchema(messageSchema)\n.addValues(ts(3), map(\"attr\", \"val\"), row(payloadSchema, 3, \"foo\"))\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(4), map(\"bttr\", \"vbl\"), row(payloadSchema, 5, \"baz\"))\n.build());\npipeline.run();\n}\n@Test\npublic void testConvertsMessagesToFlatRow() {\nSchema messageSchema =\nSchema.builder()\n.addDateTimeField(\"event_timestamp\")\n.addNullableField(\"id\", FieldType.INT32)\n.addNullableField(\"name\", FieldType.STRING)\n.build();\nPCollectionTuple rows =\npipeline\n.apply(\n\"create\",\nCreate.timestamped(\nmessage(1, map(\"attr\", \"val\"), \"{ \\\"id\\\" : 3, \\\"name\\\" : \\\"foo\\\" }\"),\nmessage(2, map(\"bttr\", \"vbl\"), \"{ \\\"name\\\" : \\\"baz\\\", \\\"id\\\" : 5 }\"),\nmessage(3, map(\"cttr\", \"vcl\"), \"{ \\\"id\\\" : 7, \\\"name\\\" : \\\"bar\\\" }\"),\nmessage(4, map(\"dttr\", \"vdl\"), \"{ \\\"name\\\" : \\\"qaz\\\", \\\"id\\\" : 8 }\"),\nmessage(4, map(\"dttr\", \"vdl\"), \"{ \\\"name\\\" : null, \\\"id\\\" : null }\")))\n.apply(\n\"convert\",\nPubsubMessageToRow.builder()\n.messageSchema(messageSchema)\n.useDlq(false)\n.useFlatSchema(true)\n.build());\nPAssert.that(rows.get(MAIN_TAG))\n.containsInAnyOrder(\nRow.withSchema(messageSchema)\n.addValues(ts(1), /* map(\"attr\", \"val\"), */ 3, \"foo\")\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(2), /* map(\"bttr\", \"vbl\"), */ 5, \"baz\")\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(3), /* map(\"cttr\", \"vcl\"), */ 7, \"bar\")\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(4), /* map(\"dttr\", \"vdl\"), */ 8, \"qaz\")\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(4), /* map(\"dttr\", \"vdl\"), */ null, null)\n.build());\npipeline.run();\n}\n@Test\npublic void testSendsFlatRowInvalidToDLQ() {\nSchema messageSchema =\nSchema.builder()\n.addDateTimeField(\"event_timestamp\")\n.addInt32Field(\"id\")\n.addStringField(\"name\")\n.build();\nPCollectionTuple outputs =\npipeline\n.apply(\n\"create\",\nCreate.timestamped(\nmessage(1, map(\"attr1\", \"val1\"), \"{ \\\"invalid1\\\" : \\\"sdfsd\\\" }\"),\nmessage(2, map(\"attr2\", \"val2\"), \"{ \\\"invalid2\"),\nmessage(3, map(\"attr\", \"val\"), \"{ \\\"id\\\" : 3, \\\"name\\\" : \\\"foo\\\" }\"),\nmessage(4, map(\"bttr\", \"vbl\"), \"{ \\\"name\\\" : \\\"baz\\\", \\\"id\\\" : 5 }\")))\n.apply(\n\"convert\",\nPubsubMessageToRow.builder()\n.messageSchema(messageSchema)\n.useDlq(true)\n.useFlatSchema(true)\n.build());\nPCollection rows = outputs.get(MAIN_TAG);\nPCollection dlqMessages = outputs.get(DLQ_TAG);\nPAssert.that(dlqMessages)\n.satisfies(\nmessages -> {\nassertEquals(2, size(messages));\nassertEquals(\nImmutableSet.of(map(\"attr1\", \"val1\"), map(\"attr2\", \"val2\")),\nconvertToSet(messages, m -> m.getAttributeMap()));\nassertEquals(\nImmutableSet.of(\"{ \\\"invalid1\\\" : \\\"sdfsd\\\" }\", \"{ \\\"invalid2\"),\nconvertToSet(messages, m -> new String(m.getPayload(), UTF_8)));\nreturn null;\n});\nPAssert.that(rows)\n.containsInAnyOrder(\nRow.withSchema(messageSchema)\n.addValues(ts(3), /* map(\"attr\", \"val\"), */ 3, \"foo\")\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(4), /* map(\"bttr\", \"vbl\"), */ 5, \"baz\")\n.build());\npipeline.run();\n}\n@Test\npublic void testFlatSchemaMessageInvalidElement() {\nSchema messageSchema =\nSchema.builder()\n.addDateTimeField(\"event_timestamp\")\n.addInt32Field(\"id\")\n.addStringField(\"name\")\n.build();\npipeline\n.apply(\n\"create\",\nCreate.timestamped(\nmessage(1, map(\"attr\", \"val\"), \"{ \\\"id\\\" : 3, \\\"name\\\" : \\\"foo\\\" }\"),\nmessage(2, map(\"attr1\", \"val1\"), \"{ \\\"invalid1\\\" : \\\"sdfsd\\\" }\")))\n.apply(\n\"convert\",\nPubsubMessageToRow.builder()\n.messageSchema(messageSchema)\n.useDlq(false)\n.useFlatSchema(true)\n.build());\nException exception = Assert.assertThrows(RuntimeException.class, () -> pipeline.run());\nAssert.assertTrue(exception.getMessage().contains(\"Error parsing message\"));\n}\n@Test\npublic void testNestedSchemaMessageInvalidElement() {\nSchema payloadSchema =\nSchema.builder()\n.addNullableField(\"id\", FieldType.INT32)\n.addNullableField(\"name\", FieldType.STRING)\n.build();\nSchema messageSchema =\nSchema.builder()\n.addDateTimeField(\"event_timestamp\")\n.addMapField(\"attributes\", VARCHAR, VARCHAR)\n.addRowField(\"payload\", payloadSchema)\n.build();\npipeline\n.apply(\n\"create\",\nCreate.timestamped(\nmessage(1, map(\"attr\", \"val\"), \"{ \\\"id\\\" : 3, \\\"name\\\" : \\\"foo\\\" }\"),\nmessage(2, map(\"attr1\", \"val1\"), \"{ \\\"invalid1\\\" : \\\"sdfsd\\\" }\")))\n.apply(\n\"convert\",\nPubsubMessageToRow.builder()\n.messageSchema(messageSchema)\n.useDlq(false)\n.useFlatSchema(false)\n.build());\nException exception = Assert.assertThrows(RuntimeException.class, () -> pipeline.run());\nAssert.assertTrue(exception.getMessage().contains(\"Error parsing message\"));\n}\nprivate Row row(Schema schema, Object... objects) {\nreturn Row.withSchema(schema).addValues(objects).build();\n}\nprivate Map map(String attr, String val) {\nreturn ImmutableMap.of(attr, val);\n}\nprivate TimestampedValue message(\nint timestamp, Map attributes, String payload) {\nreturn TimestampedValue.of(\nnew PubsubMessage(payload.getBytes(UTF_8), attributes), ts(timestamp));\n}\nprivate Instant ts(long epochMills) {\nreturn new DateTime(epochMills).toInstant();\n}\nprivate static Set convertToSet(\nIterable messages, Function mapper) {\nreturn StreamSupport.stream(messages.spliterator(), false).map(mapper).collect(toSet());\n}\n}", + "context_before": "class PubsubMessageToRowTest implements Serializable {\n@Rule public transient TestPipeline pipeline = TestPipeline.create();\nprivate static final String DEAD_FILE_QUEUE = \"projects/a12345z/topics/test\";\n@Test\npublic void testConvertsMessages() {\nSchema payloadSchema =\nSchema.builder()\n.addNullableField(\"id\", FieldType.INT32)\n.addNullableField(\"name\", FieldType.STRING)\n.build();\nSchema messageSchema =\nSchema.builder()\n.addDateTimeField(\"event_timestamp\")\n.addMapField(\"attributes\", VARCHAR, VARCHAR)\n.addRowField(\"payload\", payloadSchema)\n.build();\nPCollectionTuple rows =\npipeline\n.apply(\n\"create\",\nCreate.timestamped(\nmessage(1, map(\"attr\", \"val\"), \"{ \\\"id\\\" : 3, \\\"name\\\" : \\\"foo\\\" }\"),\nmessage(2, map(\"bttr\", \"vbl\"), \"{ \\\"name\\\" : \\\"baz\\\", \\\"id\\\" : 5 }\"),\nmessage(3, map(\"cttr\", \"vcl\"), \"{ \\\"id\\\" : 7, \\\"name\\\" : \\\"bar\\\" }\"),\nmessage(4, map(\"dttr\", \"vdl\"), \"{ \\\"name\\\" : \\\"qaz\\\", \\\"id\\\" : 8 }\"),\nmessage(4, map(\"dttr\", \"vdl\"), \"{ \\\"name\\\" : null, \\\"id\\\" : null }\")))\n.apply(\n\"convert\",\nPubsubMessageToRow.builder()\n.messageSchema(messageSchema)\n.useDlq(false)\n.useFlatSchema(false)\n.build());\nPAssert.that(rows.get(MAIN_TAG))\n.containsInAnyOrder(\nRow.withSchema(messageSchema)\n.addValues(ts(1), map(\"attr\", \"val\"), row(payloadSchema, 3, \"foo\"))\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(2), map(\"bttr\", \"vbl\"), row(payloadSchema, 5, \"baz\"))\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(3), map(\"cttr\", \"vcl\"), row(payloadSchema, 7, \"bar\"))\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(4), map(\"dttr\", \"vdl\"), row(payloadSchema, 8, \"qaz\"))\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(4), map(\"dttr\", \"vdl\"), row(payloadSchema, null, null))\n.build());\npipeline.run();\n}\n@Test", + "context_after": "class PubsubMessageToRowTest implements Serializable {\n@Rule public transient TestPipeline pipeline = TestPipeline.create();\n@Test\npublic void testConvertsMessages() {\nSchema payloadSchema =\nSchema.builder()\n.addNullableField(\"id\", FieldType.INT32)\n.addNullableField(\"name\", FieldType.STRING)\n.build();\nSchema messageSchema =\nSchema.builder()\n.addDateTimeField(\"event_timestamp\")\n.addMapField(\"attributes\", VARCHAR, VARCHAR)\n.addRowField(\"payload\", payloadSchema)\n.build();\nPCollectionTuple rows =\npipeline\n.apply(\n\"create\",\nCreate.timestamped(\nmessage(1, map(\"attr\", \"val\"), \"{ \\\"id\\\" : 3, \\\"name\\\" : \\\"foo\\\" }\"),\nmessage(2, map(\"bttr\", \"vbl\"), \"{ \\\"name\\\" : \\\"baz\\\", \\\"id\\\" : 5 }\"),\nmessage(3, map(\"cttr\", \"vcl\"), \"{ \\\"id\\\" : 7, \\\"name\\\" : \\\"bar\\\" }\"),\nmessage(4, map(\"dttr\", \"vdl\"), \"{ \\\"name\\\" : \\\"qaz\\\", \\\"id\\\" : 8 }\"),\nmessage(4, map(\"dttr\", \"vdl\"), \"{ \\\"name\\\" : null, \\\"id\\\" : null }\")))\n.apply(\n\"convert\",\nPubsubMessageToRow.builder()\n.messageSchema(messageSchema)\n.useDlq(false)\n.useFlatSchema(false)\n.build());\nPAssert.that(rows.get(MAIN_TAG))\n.containsInAnyOrder(\nRow.withSchema(messageSchema)\n.addValues(ts(1), map(\"attr\", \"val\"), row(payloadSchema, 3, \"foo\"))\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(2), map(\"bttr\", \"vbl\"), row(payloadSchema, 5, \"baz\"))\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(3), map(\"cttr\", \"vcl\"), row(payloadSchema, 7, \"bar\"))\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(4), map(\"dttr\", \"vdl\"), row(payloadSchema, 8, \"qaz\"))\n.build(),\nRow.withSchema(messageSchema)\n.addValues(ts(4), map(\"dttr\", \"vdl\"), row(payloadSchema, null, null))\n.build());\npipeline.run();\n}\n@Test" + }, + { + "comment": "Aren't we creating a `Path` that's converted to a `File` for it to be converted to a `Path` again? :thinking: ...this also applies to the other occurrences below", + "method_body": "static void setup() throws IOException {\nschema =\nnew Schema.Parser()\n.parse(\n\"{\\\"type\\\": \\\"record\\\", \"\n+ \"\\\"name\\\": \\\"User\\\", \"\n+ \"\\\"namespace\\\": \\\"org.apache.flink.formats.parquet.avro.AvroParquetRecordFormatTest\\\", \"\n+ \"\\\"fields\\\": [\\n\"\n+ \" {\\\"name\\\": \\\"name\\\", \\\"type\\\": \\\"string\\\" },\\n\"\n+ \" {\\\"name\\\": \\\"favoriteNumber\\\", \\\"type\\\": [\\\"int\\\", \\\"null\\\"] },\\n\"\n+ \" {\\\"name\\\": \\\"favoriteColor\\\", \\\"type\\\": [\\\"string\\\", \\\"null\\\"] }\\n\"\n+ \" ]\\n\"\n+ \" }\");\nuserRecords.add(createUser(\"Peter\", 1, \"red\"));\nuserRecords.add(createUser(\"Tom\", 2, \"yellow\"));\nuserRecords.add(createUser(\"Jack\", 3, \"green\"));\ncreateParquetFile(\nAvroParquetWriters.forGenericRecord(schema),\nPath.fromLocalFile(temporaryFolder.resolve(USER_PARQUET_FILE_1).toFile()),\nuserRecords.toArray(new GenericRecord[0]));\nGenericRecord user = createUser(\"Max\", 4, \"blue\");\nuserRecords.add(user);\ncreateParquetFile(\nAvroParquetWriters.forGenericRecord(schema),\nPath.fromLocalFile(temporaryFolder.resolve(USER_PARQUET_FILE_2).toFile()),\nuser);\nuser = createUser(\"Alex\", 5, \"White\");\nGenericRecord user1 = createUser(\"Anna\", 6, \"Pink\");\nuserRecords.add(user);\nuserRecords.add(user1);\ncreateParquetFile(\nAvroParquetWriters.forGenericRecord(schema),\nPath.fromLocalFile(temporaryFolder.resolve(USER_PARQUET_FILE_3).toFile()),\nuser,\nuser1);\n}", + "target_code": "Path.fromLocalFile(temporaryFolder.resolve(USER_PARQUET_FILE_1).toFile()),", + "method_body_after": "static void setup() throws IOException {\nschema =\nnew Schema.Parser()\n.parse(\n\"{\\\"type\\\": \\\"record\\\", \"\n+ \"\\\"name\\\": \\\"User\\\", \"\n+ \"\\\"namespace\\\": \\\"org.apache.flink.formats.parquet.avro.AvroParquetRecordFormatTest\\\", \"\n+ \"\\\"fields\\\": [\\n\"\n+ \" {\\\"name\\\": \\\"name\\\", \\\"type\\\": \\\"string\\\" },\\n\"\n+ \" {\\\"name\\\": \\\"favoriteNumber\\\", \\\"type\\\": [\\\"int\\\", \\\"null\\\"] },\\n\"\n+ \" {\\\"name\\\": \\\"favoriteColor\\\", \\\"type\\\": [\\\"string\\\", \\\"null\\\"] }\\n\"\n+ \" ]\\n\"\n+ \" }\");\nuserRecords.add(createUser(\"Peter\", 1, \"red\"));\nuserRecords.add(createUser(\"Tom\", 2, \"yellow\"));\nuserRecords.add(createUser(\"Jack\", 3, \"green\"));\ncreateParquetFile(\nAvroParquetWriters.forGenericRecord(schema),\nPath.fromLocalFile(temporaryFolder.resolve(USER_PARQUET_FILE_1).toFile()),\nuserRecords.toArray(new GenericRecord[0]));\nGenericRecord user = createUser(\"Max\", 4, \"blue\");\nuserRecords.add(user);\ncreateParquetFile(\nAvroParquetWriters.forGenericRecord(schema),\nPath.fromLocalFile(temporaryFolder.resolve(USER_PARQUET_FILE_2).toFile()),\nuser);\nuser = createUser(\"Alex\", 5, \"White\");\nGenericRecord user1 = createUser(\"Anna\", 6, \"Pink\");\nuserRecords.add(user);\nuserRecords.add(user1);\ncreateParquetFile(\nAvroParquetWriters.forGenericRecord(schema),\nPath.fromLocalFile(temporaryFolder.resolve(USER_PARQUET_FILE_3).toFile()),\nuser,\nuser1);\n}", + "context_before": "class AvroParquetFileReadITCase {\nprivate static final int PARALLELISM = 4;\nprivate static final String USER_PARQUET_FILE_1 = \"user1.parquet\";\nprivate static final String USER_PARQUET_FILE_2 = \"user2.parquet\";\nprivate static final String USER_PARQUET_FILE_3 = \"user3.parquet\";\n@TempDir static java.nio.file.Path temporaryFolder;\nprivate static Schema schema;\nprivate static final List userRecords = new ArrayList<>(3);\n@RegisterExtension\npublic static final MiniClusterExtension MINI_CLUSTER_RESOURCE =\nnew MiniClusterExtension(\nnew MiniClusterResourceConfiguration.Builder()\n.setNumberTaskManagers(1)\n.setNumberSlotsPerTaskManager(PARALLELISM)\n.build());\n@BeforeAll\n@Test\nvoid testReadAvroRecord() throws Exception {\nfinal FileSource source =\nFileSource.forRecordStreamFormat(\nAvroParquetReaders.forGenericRecord(schema),\nPath.fromLocalFile(temporaryFolder.toFile()))\n.monitorContinuously(Duration.ofMillis(5))\n.build();\nfinal StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(PARALLELISM);\nenv.enableCheckpointing(10L);\nDataStream stream =\nenv.fromSource(source, WatermarkStrategy.noWatermarks(), \"file-source\");\ntry (CloseableIterator iterator =\nstream.executeAndCollect(\"Reading Avro GenericRecords\")) {\nList list = collectRecords(iterator, 6);\nassertThat(list).hasSize(6);\nfor (int i = 0; i < 6; i++) {\nassertThat(list).contains(userRecords.get(i));\n}\n}\n}\n@Test\nvoid testReadAvroReflectRecord() throws Exception {\nfinal FileSource source =\nFileSource.forRecordStreamFormat(\nAvroParquetReaders.forReflectRecord(\nAvroParquetRecordFormatTest.User.class),\nPath.fromLocalFile(temporaryFolder.toFile()))\n.monitorContinuously(Duration.ofMillis(5))\n.build();\nfinal StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(PARALLELISM);\nenv.enableCheckpointing(10L);\nDataStream stream =\nenv.fromSource(source, WatermarkStrategy.noWatermarks(), \"file-source\");\ntry (CloseableIterator iterator =\nstream.executeAndCollect(\"Reading Avro Reflect Records\")) {\nList list = collectRecords(iterator, 6);\nCollections.sort(\nlist,\nComparator.comparing(AvroParquetRecordFormatTest.User::getFavoriteNumber));\nassertThat(list).hasSize(6);\nfor (int i = 0; i < 6; i++) {\nassertUserEquals(list.get(i), userRecords.get(i));\n}\n}\n}\nprivate static List collectRecords(\nfinal CloseableIterator iterator, final int numElements) {\ncheckNotNull(iterator, \"iterator\");\ncheckArgument(numElements > 0, \"numElement must be > 0\");\nfinal ArrayList result = new ArrayList<>(numElements);\nwhile (iterator.hasNext()) {\nresult.add(iterator.next());\nif (result.size() == numElements) {\nbreak;\n}\n}\nreturn result;\n}\n@SafeVarargs\nprivate static void createParquetFile(\nParquetWriterFactory writerFactory, Path parquetFilePath, T... records)\nthrows IOException {\nBulkWriter writer =\nwriterFactory.create(\nparquetFilePath\n.getFileSystem()\n.create(parquetFilePath, FileSystem.WriteMode.OVERWRITE));\nfor (T record : records) {\nwriter.addElement(record);\n}\nwriter.flush();\nwriter.finish();\n}\nprivate void assertUserEquals(AvroParquetRecordFormatTest.User user, GenericRecord expected) {\nassertThat(user).isNotNull();\nassertThat(String.valueOf(user.getName())).isNotNull().isEqualTo(expected.get(\"name\"));\nassertThat(user.getFavoriteNumber()).isEqualTo(expected.get(\"favoriteNumber\"));\nassertThat(String.valueOf(user.getFavoriteColor()))\n.isEqualTo(String.valueOf(expected.get(\"favoriteColor\")));\n}\nprivate static GenericRecord createUser(String name, int favoriteNumber, String favoriteColor) {\nGenericRecord record = new GenericData.Record(schema);\nrecord.put(\"name\", name);\nrecord.put(\"favoriteNumber\", favoriteNumber);\nrecord.put(\"favoriteColor\", favoriteColor);\nreturn record;\n}\n}", + "context_after": "class AvroParquetFileReadITCase {\nprivate static final int PARALLELISM = 4;\nprivate static final String USER_PARQUET_FILE_1 = \"user1.parquet\";\nprivate static final String USER_PARQUET_FILE_2 = \"user2.parquet\";\nprivate static final String USER_PARQUET_FILE_3 = \"user3.parquet\";\n@TempDir static java.nio.file.Path temporaryFolder;\nprivate static Schema schema;\nprivate static final List userRecords = new ArrayList<>(3);\n@RegisterExtension\npublic static final MiniClusterExtension MINI_CLUSTER_RESOURCE =\nnew MiniClusterExtension(\nnew MiniClusterResourceConfiguration.Builder()\n.setNumberTaskManagers(1)\n.setNumberSlotsPerTaskManager(PARALLELISM)\n.build());\n@BeforeAll\n@Test\nvoid testReadAvroRecord() throws Exception {\nfinal FileSource source =\nFileSource.forRecordStreamFormat(\nAvroParquetReaders.forGenericRecord(schema),\nPath.fromLocalFile(temporaryFolder.toFile()))\n.monitorContinuously(Duration.ofMillis(5))\n.build();\nfinal StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(PARALLELISM);\nenv.enableCheckpointing(10L);\nDataStream stream =\nenv.fromSource(source, WatermarkStrategy.noWatermarks(), \"file-source\");\ntry (CloseableIterator iterator =\nstream.executeAndCollect(\"Reading Avro GenericRecords\")) {\nList list = collectRecords(iterator, 6);\nassertThat(list).hasSize(6);\nfor (int i = 0; i < 6; i++) {\nassertThat(list).contains(userRecords.get(i));\n}\n}\n}\n@Test\nvoid testReadAvroReflectRecord() throws Exception {\nfinal FileSource source =\nFileSource.forRecordStreamFormat(\nAvroParquetReaders.forReflectRecord(\nAvroParquetRecordFormatTest.User.class),\nPath.fromLocalFile(temporaryFolder.toFile()))\n.monitorContinuously(Duration.ofMillis(5))\n.build();\nfinal StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\nenv.setParallelism(PARALLELISM);\nenv.enableCheckpointing(10L);\nDataStream stream =\nenv.fromSource(source, WatermarkStrategy.noWatermarks(), \"file-source\");\ntry (CloseableIterator iterator =\nstream.executeAndCollect(\"Reading Avro Reflect Records\")) {\nList list = collectRecords(iterator, 6);\nCollections.sort(\nlist,\nComparator.comparing(AvroParquetRecordFormatTest.User::getFavoriteNumber));\nassertThat(list).hasSize(6);\nfor (int i = 0; i < 6; i++) {\nassertUserEquals(list.get(i), userRecords.get(i));\n}\n}\n}\nprivate static List collectRecords(\nfinal CloseableIterator iterator, final int numElements) {\ncheckNotNull(iterator, \"iterator\");\ncheckArgument(numElements > 0, \"numElement must be > 0\");\nfinal ArrayList result = new ArrayList<>(numElements);\nwhile (iterator.hasNext()) {\nresult.add(iterator.next());\nif (result.size() == numElements) {\nbreak;\n}\n}\nreturn result;\n}\n@SafeVarargs\nprivate static void createParquetFile(\nParquetWriterFactory writerFactory, Path parquetFilePath, T... records)\nthrows IOException {\nBulkWriter writer =\nwriterFactory.create(\nparquetFilePath\n.getFileSystem()\n.create(parquetFilePath, FileSystem.WriteMode.OVERWRITE));\nfor (T record : records) {\nwriter.addElement(record);\n}\nwriter.flush();\nwriter.finish();\n}\nprivate void assertUserEquals(AvroParquetRecordFormatTest.User user, GenericRecord expected) {\nassertThat(user).isNotNull();\nassertThat(String.valueOf(user.getName())).isNotNull().isEqualTo(expected.get(\"name\"));\nassertThat(user.getFavoriteNumber()).isEqualTo(expected.get(\"favoriteNumber\"));\nassertThat(String.valueOf(user.getFavoriteColor()))\n.isEqualTo(String.valueOf(expected.get(\"favoriteColor\")));\n}\nprivate static GenericRecord createUser(String name, int favoriteNumber, String favoriteColor) {\nGenericRecord record = new GenericData.Record(schema);\nrecord.put(\"name\", name);\nrecord.put(\"favoriteNumber\", favoriteNumber);\nrecord.put(\"favoriteColor\", favoriteColor);\nreturn record;\n}\n}" + }, + { + "comment": "We've generally used either 4KB or 8KB as the buffer sizes as it hits a nice point between memory and CPU usage, unless this certificate is always known to be < 1KB then I'll revert this.", + "method_body": "private byte[] getCertificateBytes() throws IOException {\nif (certificatePath != null) {\nreturn Files.readAllBytes(Paths.get(certificatePath));\n} else if (certificate != null) {\nByteArrayOutputStream outputStream = new ByteArrayOutputStream();\nbyte[] buffer = new byte[4096];\nint read = certificate.read(buffer, 0, buffer.length);\nwhile (read != -1) {\noutputStream.write(buffer, 0, read);\nread = certificate.read(buffer, 0, buffer.length);\n}\nreturn outputStream.toByteArray();\n} else {\nreturn new byte[0];\n}\n}", + "target_code": "byte[] buffer = new byte[4096];", + "method_body_after": "private byte[] getCertificateBytes() throws IOException {\nif (certificatePath != null) {\nreturn Files.readAllBytes(Paths.get(certificatePath));\n} else if (certificate != null) {\nByteArrayOutputStream outputStream = new ByteArrayOutputStream();\nbyte[] buffer = new byte[4096];\nint read = certificate.read(buffer, 0, buffer.length);\nwhile (read != -1) {\noutputStream.write(buffer, 0, read);\nread = certificate.read(buffer, 0, buffer.length);\n}\nreturn outputStream.toByteArray();\n} else {\nreturn new byte[0];\n}\n}", + "context_before": "class IdentityClientBase {\nstatic final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter();\nstatic final String WINDOWS_STARTER = \"cmd.exe\";\nstatic final String LINUX_MAC_STARTER = \"/bin/sh\";\nstatic final String WINDOWS_SWITCHER = \"/c\";\nstatic final String LINUX_MAC_SWITCHER = \"-c\";\nstatic final String WINDOWS_PROCESS_ERROR_MESSAGE = \"'az' is not recognized\";\nstatic final Pattern LINUX_MAC_PROCESS_ERROR_MESSAGE = Pattern.compile(\"(.*)az:(.*)not found\");\nstatic final String DEFAULT_WINDOWS_SYSTEM_ROOT = System.getenv(\"SystemRoot\");\nstatic final String DEFAULT_WINDOWS_PS_EXECUTABLE = \"pwsh.exe\";\nstatic final String LEGACY_WINDOWS_PS_EXECUTABLE = \"powershell.exe\";\nstatic final String DEFAULT_LINUX_PS_EXECUTABLE = \"pwsh\";\nstatic final String DEFAULT_MAC_LINUX_PATH = \"/bin/\";\nstatic final Duration REFRESH_OFFSET = Duration.ofMinutes(5);\nstatic final String IDENTITY_ENDPOINT_VERSION = \"2019-08-01\";\nstatic final String MSI_ENDPOINT_VERSION = \"2017-09-01\";\nstatic final String ARC_MANAGED_IDENTITY_ENDPOINT_API_VERSION = \"2019-11-01\";\nstatic final String ADFS_TENANT = \"adfs\";\nstatic final String HTTP_LOCALHOST = \"http:\nstatic final String SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION = \"2019-07-01-preview\";\nstatic final ClientLogger LOGGER = new ClientLogger(IdentityClient.class);\nstatic final Pattern ACCESS_TOKEN_PATTERN = Pattern.compile(\"\\\"accessToken\\\": \\\"(.*?)(\\\"|$)\");\nstatic final Pattern TRAILING_FORWARD_SLASHES = Pattern.compile(\"/+$\");\nprivate static final String AZURE_IDENTITY_PROPERTIES = \"azure-identity.properties\";\nprivate static final String SDK_NAME = \"name\";\nprivate static final String SDK_VERSION = \"version\";\nprivate final Map properties;\nfinal IdentityClientOptions options;\nfinal String tenantId;\nfinal String clientId;\nfinal String resourceId;\nfinal String clientSecret;\nfinal String clientAssertionFilePath;\nfinal InputStream certificate;\nfinal String certificatePath;\nfinal Supplier clientAssertionSupplier;\nfinal String certificatePassword;\nHttpPipelineAdapter httpPipelineAdapter;\nString userAgent = UserAgentUtil.DEFAULT_USER_AGENT_HEADER;\n/**\n* Creates an IdentityClient with the given options.\n*\n* @param tenantId the tenant ID of the application.\n* @param clientId the client ID of the application.\n* @param clientSecret the client secret of the application.\n* @param resourceId the resource ID of the application\n* @param certificatePath the path to the PKCS12 or PEM certificate of the application.\n* @param certificate the PKCS12 or PEM certificate of the application.\n* @param certificatePassword the password protecting the PFX certificate.\n* @param isSharedTokenCacheCredential Indicate whether the credential is\n* {@link com.azure.identity.SharedTokenCacheCredential} or not.\n* @param clientAssertionTimeout the timeout to use for the client assertion.\n* @param options the options configuring the client.\n*/\nIdentityClientBase(String tenantId, String clientId, String clientSecret, String certificatePath,\nString clientAssertionFilePath, String resourceId, Supplier clientAssertionSupplier,\nInputStream certificate, String certificatePassword, boolean isSharedTokenCacheCredential,\nDuration clientAssertionTimeout, IdentityClientOptions options) {\nif (tenantId == null) {\ntenantId = IdentityUtil.DEFAULT_TENANT;\noptions.setAdditionallyAllowedTenants(Collections.singletonList(IdentityUtil.ALL_TENANTS));\n}\nif (options == null) {\noptions = new IdentityClientOptions();\n}\nthis.tenantId = tenantId;\nthis.clientId = clientId;\nthis.resourceId = resourceId;\nthis.clientSecret = clientSecret;\nthis.clientAssertionFilePath = clientAssertionFilePath;\nthis.certificatePath = certificatePath;\nthis.certificate = certificate;\nthis.certificatePassword = certificatePassword;\nthis.clientAssertionSupplier = clientAssertionSupplier;\nthis.options = options;\nproperties = CoreUtils.getProperties(AZURE_IDENTITY_PROPERTIES);\n}\nConfidentialClientApplication getConfidentialClient() {\nif (clientId == null) {\nthrow LOGGER.logExceptionAsError(new IllegalArgumentException(\n\"A non-null value for client ID must be provided for user authentication.\"));\n}\nString authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll(\"\") + \"/\"\n+ tenantId;\nIClientCredential credential;\nif (clientSecret != null) {\ncredential = ClientCredentialFactory.createFromSecret(clientSecret);\n} else if (certificate != null || certificatePath != null) {\ntry {\nif (certificatePassword == null) {\nbyte[] pemCertificateBytes = getCertificateBytes();\nList x509CertificateList = CertificateUtil.publicKeyFromPem(pemCertificateBytes);\nPrivateKey privateKey = CertificateUtil.privateKeyFromPem(pemCertificateBytes);\nif (x509CertificateList.size() == 1) {\ncredential = ClientCredentialFactory.createFromCertificate(\nprivateKey, x509CertificateList.get(0));\n} else {\ncredential = ClientCredentialFactory.createFromCertificateChain(\nprivateKey, x509CertificateList);\n}\n} else {\ntry (InputStream pfxCertificateStream = getCertificateInputStream()) {\ncredential = ClientCredentialFactory.createFromCertificate(pfxCertificateStream,\ncertificatePassword);\n}\n}\n} catch (IOException | GeneralSecurityException e) {\nthrow LOGGER.logExceptionAsError(new RuntimeException(\n\"Failed to parse the certificate for the credential: \" + e.getMessage(), e));\n}\n} else if (clientAssertionSupplier != null) {\ncredential = ClientCredentialFactory.createFromClientAssertion(clientAssertionSupplier.get());\n} else {\nthrow LOGGER.logExceptionAsError(\nnew IllegalArgumentException(\"Must provide client secret or client certificate path.\"\n+ \" To mitigate this issue, please refer to the troubleshooting guidelines here at \"\n+ \"https:\n}\nConfidentialClientApplication.Builder applicationBuilder =\nConfidentialClientApplication.builder(clientId, credential);\ntry {\napplicationBuilder = applicationBuilder.authority(authorityUrl);\n} catch (MalformedURLException e) {\nthrow LOGGER.logExceptionAsWarning(new IllegalStateException(e));\n}\napplicationBuilder.sendX5c(options.isIncludeX5c());\ninitializeHttpPipelineAdapter();\nif (httpPipelineAdapter != null) {\napplicationBuilder.httpClient(httpPipelineAdapter);\n} else {\napplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));\n}\nif (options.getExecutorService() != null) {\napplicationBuilder.executorService(options.getExecutorService());\n}\nTokenCachePersistenceOptions tokenCachePersistenceOptions = options.getTokenCacheOptions();\nPersistentTokenCacheImpl tokenCache = null;\nif (tokenCachePersistenceOptions != null) {\ntry {\ntokenCache = new PersistentTokenCacheImpl()\n.setAllowUnencryptedStorage(tokenCachePersistenceOptions.isUnencryptedStorageAllowed())\n.setName(tokenCachePersistenceOptions.getName());\napplicationBuilder.setTokenCacheAccessAspect(tokenCache);\n} catch (Throwable t) {\nthrow LOGGER.logExceptionAsError(new ClientAuthenticationException(\n\"Shared token cache is unavailable in this environment.\", null, t));\n}\n}\nif (options.getRegionalAuthority() != null) {\nif (options.getRegionalAuthority() == RegionalAuthority.AUTO_DISCOVER_REGION) {\napplicationBuilder.autoDetectRegion(true);\n} else {\napplicationBuilder.azureRegion(options.getRegionalAuthority().toString());\n}\n}\nConfidentialClientApplication confidentialClientApplication = applicationBuilder.build();\nif (tokenCache != null) {\ntokenCache.registerCache();\n}\nreturn confidentialClientApplication;\n}\nPublicClientApplication getPublicClient(boolean sharedTokenCacheCredential) {\nif (clientId == null) {\nthrow LOGGER.logExceptionAsError(new IllegalArgumentException(\n\"A non-null value for client ID must be provided for user authentication.\"));\n}\nString authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll(\"\") + \"/\"\n+ tenantId;\nPublicClientApplication.Builder builder = PublicClientApplication.builder(clientId);\ntry {\nbuilder = builder.authority(authorityUrl);\n} catch (MalformedURLException e) {\nthrow LOGGER.logExceptionAsWarning(new IllegalStateException(e));\n}\ninitializeHttpPipelineAdapter();\nif (httpPipelineAdapter != null) {\nbuilder.httpClient(httpPipelineAdapter);\n} else {\nbuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));\n}\nif (options.getExecutorService() != null) {\nbuilder.executorService(options.getExecutorService());\n}\nif (!options.isCp1Disabled()) {\nSet set = new HashSet<>(1);\nset.add(\"CP1\");\nbuilder.clientCapabilities(set);\n}\nTokenCachePersistenceOptions tokenCachePersistenceOptions = options.getTokenCacheOptions();\nPersistentTokenCacheImpl tokenCache = null;\nif (tokenCachePersistenceOptions != null) {\ntry {\ntokenCache = new PersistentTokenCacheImpl()\n.setAllowUnencryptedStorage(tokenCachePersistenceOptions.isUnencryptedStorageAllowed())\n.setName(tokenCachePersistenceOptions.getName());\nbuilder.setTokenCacheAccessAspect(tokenCache);\n} catch (Throwable t) {\nthrow LOGGER.logExceptionAsError(new ClientAuthenticationException(\n\"Shared token cache is unavailable in this environment.\", null, t));\n}\n}\nPublicClientApplication publicClientApplication = builder.build();\nif (tokenCache != null) {\ntokenCache.registerCache();\n}\nreturn publicClientApplication;\n}\nConfidentialClientApplication getManagedIdentityConfidentialClient() {\nString authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll(\"\")\n+ \"/\" + tenantId;\nIClientCredential credential = ClientCredentialFactory\n.createFromSecret(clientSecret != null ? clientSecret : \"dummy-secret\");\nConfidentialClientApplication.Builder applicationBuilder =\nConfidentialClientApplication.builder(clientId == null ? \"SYSTEM-ASSIGNED-MANAGED-IDENTITY\"\n: clientId, credential);\napplicationBuilder.validateAuthority(false);\ntry {\napplicationBuilder = applicationBuilder.authority(authorityUrl);\n} catch (MalformedURLException e) {\nthrow LOGGER.logExceptionAsWarning(new IllegalStateException(e));\n}\nif (options.getManagedIdentityType() == null) {\nthrow LOGGER.logExceptionAsError(\nnew CredentialUnavailableException(\"Managed Identity type not configured, authentication not available.\"));\n}\napplicationBuilder.appTokenProvider(appTokenProviderParameters -> {\nTokenRequestContext trc = new TokenRequestContext()\n.setScopes(new ArrayList<>(appTokenProviderParameters.scopes))\n.setClaims(appTokenProviderParameters.claims)\n.setTenantId(appTokenProviderParameters.tenantId);\nMono accessTokenAsync = getTokenFromTargetManagedIdentity(trc);\nreturn accessTokenAsync.map(accessToken -> {\nTokenProviderResult result = new TokenProviderResult();\nresult.setAccessToken(accessToken.getToken());\nresult.setTenantId(trc.getTenantId());\nresult.setExpiresInSeconds(accessToken.getExpiresAt().toEpochSecond());\nreturn result;\n}).toFuture();\n});\ninitializeHttpPipelineAdapter();\nif (httpPipelineAdapter != null) {\napplicationBuilder.httpClient(httpPipelineAdapter);\n} else {\napplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));\n}\nif (options.getExecutorService() != null) {\napplicationBuilder.executorService(options.getExecutorService());\n}\nreturn applicationBuilder.build();\n}\nDeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder buildDeviceCodeFlowParameters(TokenRequestContext request, Consumer deviceCodeConsumer) {\nDeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder parametersBuilder =\nDeviceCodeFlowParameters.builder(\nnew HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(\nnew DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(),\nOffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message())))\n.tenant(IdentityUtil\n.resolveTenantId(tenantId, request, options));\nif (request.getClaims() != null) {\nClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());\nparametersBuilder.claims(customClaimRequest);\n}\nreturn parametersBuilder;\n}\nOnBehalfOfParameters buildOBOFlowParameters(TokenRequestContext request) {\nreturn OnBehalfOfParameters\n.builder(new HashSet<>(request.getScopes()), options.getUserAssertion())\n.tenant(IdentityUtil.resolveTenantId(tenantId, request, options))\n.build();\n}\nInteractiveRequestParameters.InteractiveRequestParametersBuilder buildInteractiveRequestParameters(TokenRequestContext request, String loginHint, URI redirectUri) {\nInteractiveRequestParameters.InteractiveRequestParametersBuilder builder =\nInteractiveRequestParameters.builder(redirectUri)\n.scopes(new HashSet<>(request.getScopes()))\n.prompt(Prompt.SELECT_ACCOUNT)\n.tenant(IdentityUtil\n.resolveTenantId(tenantId, request, options));\nif (request.getClaims() != null) {\nClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());\nbuilder.claims(customClaimRequest);\n}\nif (loginHint != null) {\nbuilder.loginHint(loginHint);\n}\nreturn builder;\n}\nUserNamePasswordParameters.UserNamePasswordParametersBuilder buildUsernamePasswordFlowParameters(TokenRequestContext request, String username, String password) {\nUserNamePasswordParameters.UserNamePasswordParametersBuilder userNamePasswordParametersBuilder =\nUserNamePasswordParameters.builder(new HashSet<>(request.getScopes()),\nusername, password.toCharArray());\nif (request.getClaims() != null) {\nClaimsRequest customClaimRequest = CustomClaimRequest\n.formatAsClaimsRequest(request.getClaims());\nuserNamePasswordParametersBuilder.claims(customClaimRequest);\n}\nuserNamePasswordParametersBuilder.tenant(\nIdentityUtil.resolveTenantId(tenantId, request, options));\nreturn userNamePasswordParametersBuilder;\n}\nAccessToken getTokenFromAzureCLIAuthentication(StringBuilder azCommand) {\nAccessToken token;\ntry {\nString starter;\nString switcher;\nif (isWindowsPlatform()) {\nstarter = WINDOWS_STARTER;\nswitcher = WINDOWS_SWITCHER;\n} else {\nstarter = LINUX_MAC_STARTER;\nswitcher = LINUX_MAC_SWITCHER;\n}\nProcessBuilder builder = new ProcessBuilder(starter, switcher, azCommand.toString());\nString workingDirectory = getSafeWorkingDirectory();\nif (workingDirectory != null) {\nbuilder.directory(new File(workingDirectory));\n} else {\nthrow LOGGER.logExceptionAsError(new IllegalStateException(\"A Safe Working directory could not be\"\n+ \" found to execute CLI command from. To mitigate this issue, please refer to the troubleshooting \"\n+ \" guidelines here at https:\n}\nbuilder.redirectErrorStream(true);\nProcess process = builder.start();\nStringBuilder output = new StringBuilder();\ntry (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream(),\nStandardCharsets.UTF_8))) {\nString line;\nwhile (true) {\nline = reader.readLine();\nif (line == null) {\nbreak;\n}\nif (line.startsWith(WINDOWS_PROCESS_ERROR_MESSAGE)\n|| LINUX_MAC_PROCESS_ERROR_MESSAGE.matcher(line).matches()) {\nthrow LoggingUtil.logCredentialUnavailableException(LOGGER, options,\nnew CredentialUnavailableException(\n\"AzureCliCredential authentication unavailable. Azure CLI not installed.\"\n+ \"To mitigate this issue, please refer to the troubleshooting guidelines here at \"\n+ \"https:\n}\noutput.append(line);\n}\n}\nString processOutput = output.toString();\nprocess.waitFor(10, TimeUnit.SECONDS);\nif (process.exitValue() != 0) {\nif (processOutput.length() > 0) {\nString redactedOutput = redactInfo(processOutput);\nif (redactedOutput.contains(\"az login\") || redactedOutput.contains(\"az account set\")) {\nthrow LoggingUtil.logCredentialUnavailableException(LOGGER, options,\nnew CredentialUnavailableException(\n\"AzureCliCredential authentication unavailable.\"\n+ \" Please run 'az login' to set up account. To further mitigate this\"\n+ \" issue, please refer to the troubleshooting guidelines here at \"\n+ \"https:\n}\nthrow LOGGER.logExceptionAsError(new ClientAuthenticationException(redactedOutput, null));\n} else {\nthrow LOGGER.logExceptionAsError(\nnew ClientAuthenticationException(\"Failed to invoke Azure CLI \", null));\n}\n}\nLOGGER.verbose(\"Azure CLI Authentication => A token response was received from Azure CLI, deserializing the\"\n+ \" response into an Access Token.\");\nMap objectMap = SERIALIZER_ADAPTER.deserialize(processOutput, Map.class,\nSerializerEncoding.JSON);\nString accessToken = objectMap.get(\"accessToken\");\nString time = objectMap.get(\"expiresOn\");\nString timeToSecond = time.substring(0, time.indexOf(\".\"));\nString timeJoinedWithT = String.join(\"T\", timeToSecond.split(\" \"));\nOffsetDateTime expiresOn = LocalDateTime.parse(timeJoinedWithT, DateTimeFormatter.ISO_LOCAL_DATE_TIME)\n.atZone(ZoneId.systemDefault())\n.toOffsetDateTime().withOffsetSameInstant(ZoneOffset.UTC);\ntoken = new AccessToken(accessToken, expiresOn);\n} catch (IOException | InterruptedException e) {\nthrow LOGGER.logExceptionAsError(new IllegalStateException(e));\n}\nreturn token;\n}\nString getSafeWorkingDirectory() {\nif (isWindowsPlatform()) {\nif (CoreUtils.isNullOrEmpty(DEFAULT_WINDOWS_SYSTEM_ROOT)) {\nreturn null;\n}\nreturn DEFAULT_WINDOWS_SYSTEM_ROOT + \"\\\\system32\";\n} else {\nreturn DEFAULT_MAC_LINUX_PATH;\n}\n}\nboolean isWindowsPlatform() {\nreturn System.getProperty(\"os.name\").contains(\"Windows\");\n}\nString redactInfo(String input) {\nreturn ACCESS_TOKEN_PATTERN.matcher(input).replaceAll(\"****\");\n}\nabstract Mono getTokenFromTargetManagedIdentity(TokenRequestContext tokenRequestContext);\nHttpPipeline setupPipeline(HttpClient httpClient) {\nList policies = new ArrayList<>();\nHttpLogOptions httpLogOptions = new HttpLogOptions();\nString clientName = properties.getOrDefault(SDK_NAME, \"UnknownName\");\nString clientVersion = properties.getOrDefault(SDK_VERSION, \"UnknownVersion\");\nConfiguration buildConfiguration = Configuration.getGlobalConfiguration().clone();\nuserAgent = UserAgentUtil.toUserAgentString(null, clientName, clientVersion, buildConfiguration);\npolicies.add(new UserAgentPolicy(userAgent));\nHttpPolicyProviders.addBeforeRetryPolicies(policies);\npolicies.add(new RetryPolicy());\nHttpPolicyProviders.addAfterRetryPolicies(policies);\npolicies.add(new HttpLoggingPolicy(httpLogOptions));\nreturn new HttpPipelineBuilder().httpClient(httpClient)\n.policies(policies.toArray(new HttpPipelinePolicy[0])).build();\n}\nvoid initializeHttpPipelineAdapter() {\nHttpPipeline httpPipeline = options.getHttpPipeline();\nif (httpPipeline != null) {\nhttpPipelineAdapter = new HttpPipelineAdapter(httpPipeline, options);\n} else {\nHttpClient httpClient = options.getHttpClient();\nif (httpClient != null) {\nhttpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient), options);\n} else if (options.getProxyOptions() == null) {\nhttpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault()), options);\n}\n}\n}\nprivate InputStream getCertificateInputStream() throws IOException {\nif (certificatePath != null) {\nreturn new BufferedInputStream(new FileInputStream(certificatePath));\n} else {\nreturn certificate;\n}\n}\nprivate static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) {\nswitch (options.getType()) {\ncase SOCKS4:\ncase SOCKS5:\nreturn new Proxy(Proxy.Type.SOCKS, options.getAddress());\ncase HTTP:\ndefault:\nreturn new Proxy(Proxy.Type.HTTP, options.getAddress());\n}\n}\n}", + "context_after": "class IdentityClientBase {\nstatic final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter();\nstatic final String WINDOWS_STARTER = \"cmd.exe\";\nstatic final String LINUX_MAC_STARTER = \"/bin/sh\";\nstatic final String WINDOWS_SWITCHER = \"/c\";\nstatic final String LINUX_MAC_SWITCHER = \"-c\";\nstatic final String WINDOWS_PROCESS_ERROR_MESSAGE = \"'az' is not recognized\";\nstatic final Pattern LINUX_MAC_PROCESS_ERROR_MESSAGE = Pattern.compile(\"(.*)az:(.*)not found\");\nstatic final String DEFAULT_WINDOWS_PS_EXECUTABLE = \"pwsh.exe\";\nstatic final String LEGACY_WINDOWS_PS_EXECUTABLE = \"powershell.exe\";\nstatic final String DEFAULT_LINUX_PS_EXECUTABLE = \"pwsh\";\nstatic final String DEFAULT_MAC_LINUX_PATH = \"/bin/\";\nstatic final Duration REFRESH_OFFSET = Duration.ofMinutes(5);\nstatic final String IDENTITY_ENDPOINT_VERSION = \"2019-08-01\";\nstatic final String MSI_ENDPOINT_VERSION = \"2017-09-01\";\nstatic final String ARC_MANAGED_IDENTITY_ENDPOINT_API_VERSION = \"2019-11-01\";\nstatic final String ADFS_TENANT = \"adfs\";\nstatic final String HTTP_LOCALHOST = \"http:\nstatic final String SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION = \"2019-07-01-preview\";\nstatic final ClientLogger LOGGER = new ClientLogger(IdentityClient.class);\nstatic final Pattern ACCESS_TOKEN_PATTERN = Pattern.compile(\"\\\"accessToken\\\": \\\"(.*?)(\\\"|$)\");\nstatic final Pattern TRAILING_FORWARD_SLASHES = Pattern.compile(\"/+$\");\nprivate static final String AZURE_IDENTITY_PROPERTIES = \"azure-identity.properties\";\nprivate static final String SDK_NAME = \"name\";\nprivate static final String SDK_VERSION = \"version\";\nprivate final Map properties;\nfinal IdentityClientOptions options;\nfinal String tenantId;\nfinal String clientId;\nfinal String resourceId;\nfinal String clientSecret;\nfinal String clientAssertionFilePath;\nfinal InputStream certificate;\nfinal String certificatePath;\nfinal Supplier clientAssertionSupplier;\nfinal String certificatePassword;\nHttpPipelineAdapter httpPipelineAdapter;\nString userAgent = UserAgentUtil.DEFAULT_USER_AGENT_HEADER;\n/**\n* Creates an IdentityClient with the given options.\n*\n* @param tenantId the tenant ID of the application.\n* @param clientId the client ID of the application.\n* @param clientSecret the client secret of the application.\n* @param resourceId the resource ID of the application\n* @param certificatePath the path to the PKCS12 or PEM certificate of the application.\n* @param certificate the PKCS12 or PEM certificate of the application.\n* @param certificatePassword the password protecting the PFX certificate.\n* @param isSharedTokenCacheCredential Indicate whether the credential is\n* {@link com.azure.identity.SharedTokenCacheCredential} or not.\n* @param clientAssertionTimeout the timeout to use for the client assertion.\n* @param options the options configuring the client.\n*/\nIdentityClientBase(String tenantId, String clientId, String clientSecret, String certificatePath,\nString clientAssertionFilePath, String resourceId, Supplier clientAssertionSupplier,\nInputStream certificate, String certificatePassword, boolean isSharedTokenCacheCredential,\nDuration clientAssertionTimeout, IdentityClientOptions options) {\nif (tenantId == null) {\ntenantId = IdentityUtil.DEFAULT_TENANT;\noptions.setAdditionallyAllowedTenants(Collections.singletonList(IdentityUtil.ALL_TENANTS));\n}\nif (options == null) {\noptions = new IdentityClientOptions();\n}\nthis.tenantId = tenantId;\nthis.clientId = clientId;\nthis.resourceId = resourceId;\nthis.clientSecret = clientSecret;\nthis.clientAssertionFilePath = clientAssertionFilePath;\nthis.certificatePath = certificatePath;\nthis.certificate = certificate;\nthis.certificatePassword = certificatePassword;\nthis.clientAssertionSupplier = clientAssertionSupplier;\nthis.options = options;\nproperties = CoreUtils.getProperties(AZURE_IDENTITY_PROPERTIES);\n}\nConfidentialClientApplication getConfidentialClient() {\nif (clientId == null) {\nthrow LOGGER.logExceptionAsError(new IllegalArgumentException(\n\"A non-null value for client ID must be provided for user authentication.\"));\n}\nString authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll(\"\") + \"/\"\n+ tenantId;\nIClientCredential credential;\nif (clientSecret != null) {\ncredential = ClientCredentialFactory.createFromSecret(clientSecret);\n} else if (certificate != null || certificatePath != null) {\ntry {\nif (certificatePassword == null) {\nbyte[] pemCertificateBytes = getCertificateBytes();\nList x509CertificateList = CertificateUtil.publicKeyFromPem(pemCertificateBytes);\nPrivateKey privateKey = CertificateUtil.privateKeyFromPem(pemCertificateBytes);\nif (x509CertificateList.size() == 1) {\ncredential = ClientCredentialFactory.createFromCertificate(\nprivateKey, x509CertificateList.get(0));\n} else {\ncredential = ClientCredentialFactory.createFromCertificateChain(\nprivateKey, x509CertificateList);\n}\n} else {\ntry (InputStream pfxCertificateStream = getCertificateInputStream()) {\ncredential = ClientCredentialFactory.createFromCertificate(pfxCertificateStream,\ncertificatePassword);\n}\n}\n} catch (IOException | GeneralSecurityException e) {\nthrow LOGGER.logExceptionAsError(new RuntimeException(\n\"Failed to parse the certificate for the credential: \" + e.getMessage(), e));\n}\n} else if (clientAssertionSupplier != null) {\ncredential = ClientCredentialFactory.createFromClientAssertion(clientAssertionSupplier.get());\n} else {\nthrow LOGGER.logExceptionAsError(\nnew IllegalArgumentException(\"Must provide client secret or client certificate path.\"\n+ \" To mitigate this issue, please refer to the troubleshooting guidelines here at \"\n+ \"https:\n}\nConfidentialClientApplication.Builder applicationBuilder =\nConfidentialClientApplication.builder(clientId, credential);\ntry {\napplicationBuilder = applicationBuilder.authority(authorityUrl);\n} catch (MalformedURLException e) {\nthrow LOGGER.logExceptionAsWarning(new IllegalStateException(e));\n}\napplicationBuilder.sendX5c(options.isIncludeX5c());\ninitializeHttpPipelineAdapter();\nif (httpPipelineAdapter != null) {\napplicationBuilder.httpClient(httpPipelineAdapter);\n} else {\napplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));\n}\nif (options.getExecutorService() != null) {\napplicationBuilder.executorService(options.getExecutorService());\n}\nTokenCachePersistenceOptions tokenCachePersistenceOptions = options.getTokenCacheOptions();\nPersistentTokenCacheImpl tokenCache = null;\nif (tokenCachePersistenceOptions != null) {\ntry {\ntokenCache = new PersistentTokenCacheImpl()\n.setAllowUnencryptedStorage(tokenCachePersistenceOptions.isUnencryptedStorageAllowed())\n.setName(tokenCachePersistenceOptions.getName());\napplicationBuilder.setTokenCacheAccessAspect(tokenCache);\n} catch (Throwable t) {\nthrow LOGGER.logExceptionAsError(new ClientAuthenticationException(\n\"Shared token cache is unavailable in this environment.\", null, t));\n}\n}\nif (options.getRegionalAuthority() != null) {\nif (options.getRegionalAuthority() == RegionalAuthority.AUTO_DISCOVER_REGION) {\napplicationBuilder.autoDetectRegion(true);\n} else {\napplicationBuilder.azureRegion(options.getRegionalAuthority().toString());\n}\n}\nConfidentialClientApplication confidentialClientApplication = applicationBuilder.build();\nif (tokenCache != null) {\ntokenCache.registerCache();\n}\nreturn confidentialClientApplication;\n}\nPublicClientApplication getPublicClient(boolean sharedTokenCacheCredential) {\nif (clientId == null) {\nthrow LOGGER.logExceptionAsError(new IllegalArgumentException(\n\"A non-null value for client ID must be provided for user authentication.\"));\n}\nString authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll(\"\") + \"/\"\n+ tenantId;\nPublicClientApplication.Builder builder = PublicClientApplication.builder(clientId);\ntry {\nbuilder = builder.authority(authorityUrl);\n} catch (MalformedURLException e) {\nthrow LOGGER.logExceptionAsWarning(new IllegalStateException(e));\n}\ninitializeHttpPipelineAdapter();\nif (httpPipelineAdapter != null) {\nbuilder.httpClient(httpPipelineAdapter);\n} else {\nbuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));\n}\nif (options.getExecutorService() != null) {\nbuilder.executorService(options.getExecutorService());\n}\nif (!options.isCp1Disabled()) {\nSet set = new HashSet<>(1);\nset.add(\"CP1\");\nbuilder.clientCapabilities(set);\n}\nTokenCachePersistenceOptions tokenCachePersistenceOptions = options.getTokenCacheOptions();\nPersistentTokenCacheImpl tokenCache = null;\nif (tokenCachePersistenceOptions != null) {\ntry {\ntokenCache = new PersistentTokenCacheImpl()\n.setAllowUnencryptedStorage(tokenCachePersistenceOptions.isUnencryptedStorageAllowed())\n.setName(tokenCachePersistenceOptions.getName());\nbuilder.setTokenCacheAccessAspect(tokenCache);\n} catch (Throwable t) {\nthrow LOGGER.logExceptionAsError(new ClientAuthenticationException(\n\"Shared token cache is unavailable in this environment.\", null, t));\n}\n}\nPublicClientApplication publicClientApplication = builder.build();\nif (tokenCache != null) {\ntokenCache.registerCache();\n}\nreturn publicClientApplication;\n}\nConfidentialClientApplication getManagedIdentityConfidentialClient() {\nString authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll(\"\")\n+ \"/\" + tenantId;\nIClientCredential credential = ClientCredentialFactory\n.createFromSecret(clientSecret != null ? clientSecret : \"dummy-secret\");\nConfidentialClientApplication.Builder applicationBuilder =\nConfidentialClientApplication.builder(clientId == null ? \"SYSTEM-ASSIGNED-MANAGED-IDENTITY\"\n: clientId, credential);\napplicationBuilder.validateAuthority(false);\ntry {\napplicationBuilder = applicationBuilder.authority(authorityUrl);\n} catch (MalformedURLException e) {\nthrow LOGGER.logExceptionAsWarning(new IllegalStateException(e));\n}\nif (options.getManagedIdentityType() == null) {\nthrow LOGGER.logExceptionAsError(\nnew CredentialUnavailableException(\"Managed Identity type not configured, authentication not available.\"));\n}\napplicationBuilder.appTokenProvider(appTokenProviderParameters -> {\nTokenRequestContext trc = new TokenRequestContext()\n.setScopes(new ArrayList<>(appTokenProviderParameters.scopes))\n.setClaims(appTokenProviderParameters.claims)\n.setTenantId(appTokenProviderParameters.tenantId);\nMono accessTokenAsync = getTokenFromTargetManagedIdentity(trc);\nreturn accessTokenAsync.map(accessToken -> {\nTokenProviderResult result = new TokenProviderResult();\nresult.setAccessToken(accessToken.getToken());\nresult.setTenantId(trc.getTenantId());\nresult.setExpiresInSeconds(accessToken.getExpiresAt().toEpochSecond());\nreturn result;\n}).toFuture();\n});\ninitializeHttpPipelineAdapter();\nif (httpPipelineAdapter != null) {\napplicationBuilder.httpClient(httpPipelineAdapter);\n} else {\napplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));\n}\nif (options.getExecutorService() != null) {\napplicationBuilder.executorService(options.getExecutorService());\n}\nreturn applicationBuilder.build();\n}\nDeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder buildDeviceCodeFlowParameters(TokenRequestContext request, Consumer deviceCodeConsumer) {\nDeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder parametersBuilder =\nDeviceCodeFlowParameters.builder(\nnew HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(\nnew DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(),\nOffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message())))\n.tenant(IdentityUtil\n.resolveTenantId(tenantId, request, options));\nif (request.getClaims() != null) {\nClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());\nparametersBuilder.claims(customClaimRequest);\n}\nreturn parametersBuilder;\n}\nOnBehalfOfParameters buildOBOFlowParameters(TokenRequestContext request) {\nreturn OnBehalfOfParameters\n.builder(new HashSet<>(request.getScopes()), options.getUserAssertion())\n.tenant(IdentityUtil.resolveTenantId(tenantId, request, options))\n.build();\n}\nInteractiveRequestParameters.InteractiveRequestParametersBuilder buildInteractiveRequestParameters(TokenRequestContext request, String loginHint, URI redirectUri) {\nInteractiveRequestParameters.InteractiveRequestParametersBuilder builder =\nInteractiveRequestParameters.builder(redirectUri)\n.scopes(new HashSet<>(request.getScopes()))\n.prompt(Prompt.SELECT_ACCOUNT)\n.tenant(IdentityUtil\n.resolveTenantId(tenantId, request, options));\nif (request.getClaims() != null) {\nClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());\nbuilder.claims(customClaimRequest);\n}\nif (loginHint != null) {\nbuilder.loginHint(loginHint);\n}\nreturn builder;\n}\nUserNamePasswordParameters.UserNamePasswordParametersBuilder buildUsernamePasswordFlowParameters(TokenRequestContext request, String username, String password) {\nUserNamePasswordParameters.UserNamePasswordParametersBuilder userNamePasswordParametersBuilder =\nUserNamePasswordParameters.builder(new HashSet<>(request.getScopes()),\nusername, password.toCharArray());\nif (request.getClaims() != null) {\nClaimsRequest customClaimRequest = CustomClaimRequest\n.formatAsClaimsRequest(request.getClaims());\nuserNamePasswordParametersBuilder.claims(customClaimRequest);\n}\nuserNamePasswordParametersBuilder.tenant(\nIdentityUtil.resolveTenantId(tenantId, request, options));\nreturn userNamePasswordParametersBuilder;\n}\nAccessToken getTokenFromAzureCLIAuthentication(StringBuilder azCommand) {\nAccessToken token;\ntry {\nString starter;\nString switcher;\nif (isWindowsPlatform()) {\nstarter = WINDOWS_STARTER;\nswitcher = WINDOWS_SWITCHER;\n} else {\nstarter = LINUX_MAC_STARTER;\nswitcher = LINUX_MAC_SWITCHER;\n}\nProcessBuilder builder = new ProcessBuilder(starter, switcher, azCommand.toString());\nString workingDirectory = getSafeWorkingDirectory();\nif (workingDirectory != null) {\nbuilder.directory(new File(workingDirectory));\n} else {\nthrow LOGGER.logExceptionAsError(new IllegalStateException(\"A Safe Working directory could not be\"\n+ \" found to execute CLI command from. To mitigate this issue, please refer to the troubleshooting \"\n+ \" guidelines here at https:\n}\nbuilder.redirectErrorStream(true);\nProcess process = builder.start();\nStringBuilder output = new StringBuilder();\ntry (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream(),\nStandardCharsets.UTF_8))) {\nString line;\nwhile (true) {\nline = reader.readLine();\nif (line == null) {\nbreak;\n}\nif (line.startsWith(WINDOWS_PROCESS_ERROR_MESSAGE)\n|| LINUX_MAC_PROCESS_ERROR_MESSAGE.matcher(line).matches()) {\nthrow LoggingUtil.logCredentialUnavailableException(LOGGER, options,\nnew CredentialUnavailableException(\n\"AzureCliCredential authentication unavailable. Azure CLI not installed.\"\n+ \"To mitigate this issue, please refer to the troubleshooting guidelines here at \"\n+ \"https:\n}\noutput.append(line);\n}\n}\nString processOutput = output.toString();\nprocess.waitFor(10, TimeUnit.SECONDS);\nif (process.exitValue() != 0) {\nif (processOutput.length() > 0) {\nString redactedOutput = redactInfo(processOutput);\nif (redactedOutput.contains(\"az login\") || redactedOutput.contains(\"az account set\")) {\nthrow LoggingUtil.logCredentialUnavailableException(LOGGER, options,\nnew CredentialUnavailableException(\n\"AzureCliCredential authentication unavailable.\"\n+ \" Please run 'az login' to set up account. To further mitigate this\"\n+ \" issue, please refer to the troubleshooting guidelines here at \"\n+ \"https:\n}\nthrow LOGGER.logExceptionAsError(new ClientAuthenticationException(redactedOutput, null));\n} else {\nthrow LOGGER.logExceptionAsError(\nnew ClientAuthenticationException(\"Failed to invoke Azure CLI \", null));\n}\n}\nLOGGER.verbose(\"Azure CLI Authentication => A token response was received from Azure CLI, deserializing the\"\n+ \" response into an Access Token.\");\nMap objectMap = SERIALIZER_ADAPTER.deserialize(processOutput, Map.class,\nSerializerEncoding.JSON);\nString accessToken = objectMap.get(\"accessToken\");\nString time = objectMap.get(\"expiresOn\");\nString timeToSecond = time.substring(0, time.indexOf(\".\"));\nString timeJoinedWithT = String.join(\"T\", timeToSecond.split(\" \"));\nOffsetDateTime expiresOn = LocalDateTime.parse(timeJoinedWithT, DateTimeFormatter.ISO_LOCAL_DATE_TIME)\n.atZone(ZoneId.systemDefault())\n.toOffsetDateTime().withOffsetSameInstant(ZoneOffset.UTC);\ntoken = new AccessToken(accessToken, expiresOn);\n} catch (IOException | InterruptedException e) {\nthrow LOGGER.logExceptionAsError(new IllegalStateException(e));\n}\nreturn token;\n}\nString getSafeWorkingDirectory() {\nif (isWindowsPlatform()) {\nString windowsSystemRoot = System.getenv(\"SystemRoot\");\nif (CoreUtils.isNullOrEmpty(windowsSystemRoot)) {\nreturn null;\n}\nreturn windowsSystemRoot + \"\\\\system32\";\n} else {\nreturn DEFAULT_MAC_LINUX_PATH;\n}\n}\nboolean isWindowsPlatform() {\nreturn System.getProperty(\"os.name\").contains(\"Windows\");\n}\nString redactInfo(String input) {\nreturn ACCESS_TOKEN_PATTERN.matcher(input).replaceAll(\"****\");\n}\nabstract Mono getTokenFromTargetManagedIdentity(TokenRequestContext tokenRequestContext);\nHttpPipeline setupPipeline(HttpClient httpClient) {\nList policies = new ArrayList<>();\nHttpLogOptions httpLogOptions = new HttpLogOptions();\nString clientName = properties.getOrDefault(SDK_NAME, \"UnknownName\");\nString clientVersion = properties.getOrDefault(SDK_VERSION, \"UnknownVersion\");\nConfiguration buildConfiguration = Configuration.getGlobalConfiguration().clone();\nuserAgent = UserAgentUtil.toUserAgentString(null, clientName, clientVersion, buildConfiguration);\npolicies.add(new UserAgentPolicy(userAgent));\nHttpPolicyProviders.addBeforeRetryPolicies(policies);\npolicies.add(new RetryPolicy());\nHttpPolicyProviders.addAfterRetryPolicies(policies);\npolicies.add(new HttpLoggingPolicy(httpLogOptions));\nreturn new HttpPipelineBuilder().httpClient(httpClient)\n.policies(policies.toArray(new HttpPipelinePolicy[0])).build();\n}\nvoid initializeHttpPipelineAdapter() {\nHttpPipeline httpPipeline = options.getHttpPipeline();\nif (httpPipeline != null) {\nhttpPipelineAdapter = new HttpPipelineAdapter(httpPipeline, options);\n} else {\nHttpClient httpClient = options.getHttpClient();\nif (httpClient != null) {\nhttpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient), options);\n} else if (options.getProxyOptions() == null) {\nhttpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault()), options);\n}\n}\n}\nprivate InputStream getCertificateInputStream() throws IOException {\nif (certificatePath != null) {\nreturn new BufferedInputStream(new FileInputStream(certificatePath));\n} else {\nreturn certificate;\n}\n}\nprivate static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) {\nswitch (options.getType()) {\ncase SOCKS4:\ncase SOCKS5:\nreturn new Proxy(Proxy.Type.SOCKS, options.getAddress());\ncase HTTP:\ndefault:\nreturn new Proxy(Proxy.Type.HTTP, options.getAddress());\n}\n}\n}" + }, + { + "comment": "Could we force constraints on the order of closing `context` and `enumerator` as follows: Because `context` was used by `enumerator`, This is a bit like the close order in the I/O mechanism. ``` if (started) { closeQuietly(enumerator); } closeQuietly(context); ``` Although not doing so will not result in semantic errors.", + "method_body": "public void close() throws Exception {\nLOG.info(\"Closing SourceCoordinator for source {}.\", operatorName);\ncloseQuietly(context);\nif (started) {\ncloseQuietly(enumerator);\n}\nLOG.info(\"Source coordinator for source {} closed.\", operatorName);\n}", + "target_code": "closeQuietly(enumerator);", + "method_body_after": "public void close() throws Exception {\nLOG.info(\"Closing SourceCoordinator for source {}.\", operatorName);\nif (started) {\ncloseQuietly(enumerator);\n}\ncloseQuietly(context);\nLOG.info(\"Source coordinator for source {} closed.\", operatorName);\n}", + "context_before": "class SourceCoordinator\nimplements OperatorCoordinator {\nprivate static final Logger LOG = LoggerFactory.getLogger(SourceCoordinator.class);\nprivate final WatermarkAggregator combinedWatermark = new WatermarkAggregator<>();\nprivate final WatermarkAlignmentParams watermarkAlignmentParams;\n/** The name of the operator this SourceCoordinator is associated with. */\nprivate final String operatorName;\n/** The Source that is associated with this SourceCoordinator. */\nprivate final Source source;\n/** The serializer that handles the serde of the SplitEnumerator checkpoints. */\nprivate final SimpleVersionedSerializer enumCheckpointSerializer;\n/** The context containing the states of the coordinator. */\nprivate final SourceCoordinatorContext context;\nprivate final CoordinatorStore coordinatorStore;\n/**\n* The split enumerator created from the associated Source. This one is created either during\n* resetting the coordinator to a checkpoint, or when the coordinator is started.\n*/\nprivate SplitEnumerator enumerator;\n/** A flag marking whether the coordinator has started. */\nprivate boolean started;\n/**\n* An ID that the coordinator will register self in the coordinator store with. Other\n* coordinators may send events to this coordinator by the ID.\n*/\n@Nullable private final String coordinatorListeningID;\npublic SourceCoordinator(\nString operatorName,\nSource source,\nSourceCoordinatorContext context,\nCoordinatorStore coordinatorStore) {\nthis(\noperatorName,\nsource,\ncontext,\ncoordinatorStore,\nWatermarkAlignmentParams.WATERMARK_ALIGNMENT_DISABLED,\nnull);\n}\npublic SourceCoordinator(\nString operatorName,\nSource source,\nSourceCoordinatorContext context,\nCoordinatorStore coordinatorStore,\nWatermarkAlignmentParams watermarkAlignmentParams,\n@Nullable String coordinatorListeningID) {\nthis.operatorName = operatorName;\nthis.source = source;\nthis.enumCheckpointSerializer = source.getEnumeratorCheckpointSerializer();\nthis.context = context;\nthis.coordinatorStore = coordinatorStore;\nthis.watermarkAlignmentParams = watermarkAlignmentParams;\nthis.coordinatorListeningID = coordinatorListeningID;\nif (watermarkAlignmentParams.isEnabled()) {\nif (context.isConcurrentExecutionAttemptsSupported()) {\nthrow new IllegalArgumentException(\n\"Watermark alignment is not supported in concurrent execution attempts \"\n+ \"scenario (e.g. if speculative execution is enabled)\");\n}\ncoordinatorStore.putIfAbsent(\nwatermarkAlignmentParams.getWatermarkGroup(), new WatermarkAggregator<>());\ncontext.getCoordinatorExecutor()\n.scheduleAtFixedRate(\nthis::announceCombinedWatermark,\nwatermarkAlignmentParams.getUpdateInterval(),\nwatermarkAlignmentParams.getUpdateInterval(),\nTimeUnit.MILLISECONDS);\n}\n}\n@VisibleForTesting\nvoid announceCombinedWatermark() {\ncheckState(\nwatermarkAlignmentParams != WatermarkAlignmentParams.WATERMARK_ALIGNMENT_DISABLED);\nWatermark globalCombinedWatermark =\ncoordinatorStore.apply(\nwatermarkAlignmentParams.getWatermarkGroup(),\n(value) -> {\nWatermarkAggregator aggregator = (WatermarkAggregator) value;\nreturn new Watermark(\naggregator.getAggregatedWatermark().getTimestamp());\n});\nlong maxAllowedWatermark;\ntry {\nmaxAllowedWatermark =\nMath.addExact(\nglobalCombinedWatermark.getTimestamp(),\nwatermarkAlignmentParams.getMaxAllowedWatermarkDrift());\n} catch (ArithmeticException e) {\nmaxAllowedWatermark = Watermark.MAX_WATERMARK.getTimestamp();\n}\nSet subTaskIds = combinedWatermark.keySet();\nLOG.info(\n\"Distributing maxAllowedWatermark={} to subTaskIds={}\",\nmaxAllowedWatermark,\nsubTaskIds);\nfor (Integer subtaskId : subTaskIds) {\ncontext.sendEventToSourceOperator(\nsubtaskId, new WatermarkAlignmentEvent(maxAllowedWatermark));\n}\n}\n@Override\npublic void start() throws Exception {\nLOG.info(\"Starting split enumerator for source {}.\", operatorName);\nstarted = true;\nif (enumerator == null) {\nfinal ClassLoader userCodeClassLoader =\ncontext.getCoordinatorContext().getUserCodeClassloader();\ntry (TemporaryClassLoaderContext ignored =\nTemporaryClassLoaderContext.of(userCodeClassLoader)) {\nenumerator = source.createEnumerator(context);\n} catch (Throwable t) {\nExceptionUtils.rethrowIfFatalErrorOrOOM(t);\nLOG.error(\"Failed to create Source Enumerator for source {}\", operatorName, t);\ncontext.failJob(t);\nreturn;\n}\n}\nrunInEventLoop(() -> enumerator.start(), \"starting the SplitEnumerator.\");\nif (coordinatorListeningID != null) {\ncoordinatorStore.compute(\ncoordinatorListeningID,\n(key, oldValue) -> {\nif (oldValue == null || oldValue instanceof OperatorCoordinator) {\nreturn this;\n} else {\ncheckState(\noldValue instanceof OperatorEvent,\n\"The existing value for \"\n+ coordinatorStore\n+ \"is expected to be an operator event, but it is in fact \"\n+ oldValue);\nLOG.info(\n\"Handling event {} received before the source coordinator with ID {} is registered\",\noldValue,\ncoordinatorListeningID);\nhandleEventFromOperator(0, 0, (OperatorEvent) oldValue);\nreturn null;\n}\n});\n}\n}\n@Override\n@Override\npublic void handleEventFromOperator(int subtask, int attemptNumber, OperatorEvent event) {\nrunInEventLoop(\n() -> {\nif (event instanceof RequestSplitEvent) {\nhandleRequestSplitEvent(subtask, attemptNumber, (RequestSplitEvent) event);\n} else if (event instanceof SourceEventWrapper) {\nhandleSourceEvent(\nsubtask,\nattemptNumber,\n((SourceEventWrapper) event).getSourceEvent());\n} else if (event instanceof ReaderRegistrationEvent) {\nhandleReaderRegistrationEvent(\nsubtask, attemptNumber, (ReaderRegistrationEvent) event);\n} else if (event instanceof ReportedWatermarkEvent) {\nhandleReportedWatermark(\nsubtask,\nnew Watermark(((ReportedWatermarkEvent) event).getWatermark()));\n} else {\nthrow new FlinkException(\"Unrecognized Operator Event: \" + event);\n}\n},\n\"handling operator event %s from subtask %d (\nevent,\nsubtask,\nattemptNumber);\n}\n@Override\npublic void executionAttemptFailed(\nint subtaskId, int attemptNumber, @Nullable Throwable reason) {\nrunInEventLoop(\n() -> {\nLOG.info(\n\"Removing registered reader after failure for subtask {} (\nsubtaskId,\nattemptNumber,\noperatorName);\ncontext.unregisterSourceReader(subtaskId, attemptNumber);\ncontext.attemptFailed(subtaskId, attemptNumber);\n},\n\"handling subtask %d (\nsubtaskId,\nattemptNumber);\n}\n@Override\npublic void subtaskReset(int subtaskId, long checkpointId) {\nrunInEventLoop(\n() -> {\nLOG.info(\n\"Recovering subtask {} to checkpoint {} for source {} to checkpoint.\",\nsubtaskId,\ncheckpointId,\noperatorName);\ncontext.subtaskReset(subtaskId);\nfinal List splitsToAddBack =\ncontext.getAndRemoveUncheckpointedAssignment(subtaskId, checkpointId);\nLOG.debug(\n\"Adding splits back to the split enumerator of source {}: {}\",\noperatorName,\nsplitsToAddBack);\nenumerator.addSplitsBack(splitsToAddBack, subtaskId);\n},\n\"handling subtask %d recovery to checkpoint %d\",\nsubtaskId,\ncheckpointId);\n}\n@Override\npublic void executionAttemptReady(int subtask, int attemptNumber, SubtaskGateway gateway) {\ncheckArgument(subtask == gateway.getSubtask());\ncheckArgument(attemptNumber == gateway.getExecution().getAttemptNumber());\nrunInEventLoop(\n() -> context.attemptReady(gateway),\n\"making event gateway to subtask %d (\nsubtask,\nattemptNumber);\n}\n@Override\npublic void checkpointCoordinator(long checkpointId, CompletableFuture result) {\nrunInEventLoop(\n() -> {\nLOG.debug(\n\"Taking a state snapshot on operator {} for checkpoint {}\",\noperatorName,\ncheckpointId);\ntry {\ncontext.onCheckpoint(checkpointId);\nresult.complete(toBytes(checkpointId));\n} catch (Throwable e) {\nExceptionUtils.rethrowIfFatalErrorOrOOM(e);\nresult.completeExceptionally(\nnew CompletionException(\nString.format(\n\"Failed to checkpoint SplitEnumerator for source %s\",\noperatorName),\ne));\n}\n},\n\"taking checkpoint %d\",\ncheckpointId);\n}\n@Override\npublic void notifyCheckpointComplete(long checkpointId) {\nrunInEventLoop(\n() -> {\nLOG.info(\n\"Marking checkpoint {} as completed for source {}.\",\ncheckpointId,\noperatorName);\ncontext.onCheckpointComplete(checkpointId);\nenumerator.notifyCheckpointComplete(checkpointId);\n},\n\"notifying the enumerator of completion of checkpoint %d\",\ncheckpointId);\n}\n@Override\npublic void notifyCheckpointAborted(long checkpointId) {\nrunInEventLoop(\n() -> {\nLOG.info(\n\"Marking checkpoint {} as aborted for source {}.\",\ncheckpointId,\noperatorName);\nenumerator.notifyCheckpointAborted(checkpointId);\n},\n\"calling notifyCheckpointAborted()\");\n}\n@Override\npublic void resetToCheckpoint(final long checkpointId, @Nullable final byte[] checkpointData)\nthrows Exception {\ncheckState(!started, \"The coordinator can only be reset if it was not yet started\");\nassert enumerator == null;\nif (checkpointData == null) {\nreturn;\n}\nLOG.info(\"Restoring SplitEnumerator of source {} from checkpoint.\", operatorName);\nfinal ClassLoader userCodeClassLoader =\ncontext.getCoordinatorContext().getUserCodeClassloader();\ntry (TemporaryClassLoaderContext ignored =\nTemporaryClassLoaderContext.of(userCodeClassLoader)) {\nfinal EnumChkT enumeratorCheckpoint = deserializeCheckpoint(checkpointData);\nenumerator = source.restoreEnumerator(context, enumeratorCheckpoint);\n}\n}\nprivate void runInEventLoop(\nfinal ThrowingRunnable action,\nfinal String actionName,\nfinal Object... actionNameFormatParameters) {\nensureStarted();\nif (enumerator == null) {\nreturn;\n}\ncontext.runInCoordinatorThread(\n() -> {\ntry {\naction.run();\n} catch (Throwable t) {\nExceptionUtils.rethrowIfFatalErrorOrOOM(t);\nfinal String actionString =\nString.format(actionName, actionNameFormatParameters);\nLOG.error(\n\"Uncaught exception in the SplitEnumerator for Source {} while {}. Triggering job failover.\",\noperatorName,\nactionString,\nt);\ncontext.failJob(t);\n}\n});\n}\n@VisibleForTesting\nSplitEnumerator getEnumerator() {\nreturn enumerator;\n}\n@VisibleForTesting\nSourceCoordinatorContext getContext() {\nreturn context;\n}\n/**\n* Serialize the coordinator state. The current implementation may not be super efficient, but\n* it should not matter that much because most of the state should be rather small. Large states\n* themselves may already be a problem regardless of how the serialization is implemented.\n*\n* @return A byte array containing the serialized state of the source coordinator.\n* @throws Exception When something goes wrong in serialization.\n*/\nprivate byte[] toBytes(long checkpointId) throws Exception {\nreturn writeCheckpointBytes(\nenumerator.snapshotState(checkpointId), enumCheckpointSerializer);\n}\nstatic byte[] writeCheckpointBytes(\nfinal EnumChkT enumeratorCheckpoint,\nfinal SimpleVersionedSerializer enumeratorCheckpointSerializer)\nthrows Exception {\ntry (ByteArrayOutputStream baos = new ByteArrayOutputStream();\nDataOutputStream out = new DataOutputViewStreamWrapper(baos)) {\nwriteCoordinatorSerdeVersion(out);\nout.writeInt(enumeratorCheckpointSerializer.getVersion());\nbyte[] serialziedEnumChkpt =\nenumeratorCheckpointSerializer.serialize(enumeratorCheckpoint);\nout.writeInt(serialziedEnumChkpt.length);\nout.write(serialziedEnumChkpt);\nout.flush();\nreturn baos.toByteArray();\n}\n}\n/**\n* Restore the state of this source coordinator from the state bytes.\n*\n* @param bytes The checkpoint bytes that was returned from {@link\n* @throws Exception When the deserialization failed.\n*/\nprivate EnumChkT deserializeCheckpoint(byte[] bytes) throws Exception {\ntry (ByteArrayInputStream bais = new ByteArrayInputStream(bytes);\nDataInputStream in = new DataInputViewStreamWrapper(bais)) {\nfinal int coordinatorSerdeVersion = readAndVerifyCoordinatorSerdeVersion(in);\nint enumSerializerVersion = in.readInt();\nint serializedEnumChkptSize = in.readInt();\nbyte[] serializedEnumChkpt = readBytes(in, serializedEnumChkptSize);\nif (coordinatorSerdeVersion != SourceCoordinatorSerdeUtils.VERSION_0\n&& bais.available() > 0) {\nthrow new IOException(\"Unexpected trailing bytes in enumerator checkpoint data\");\n}\nreturn enumCheckpointSerializer.deserialize(enumSerializerVersion, serializedEnumChkpt);\n}\n}\nprivate void handleRequestSplitEvent(int subtask, int attemptNumber, RequestSplitEvent event) {\nLOG.info(\n\"Source {} received split request from parallel task {} (\noperatorName,\nsubtask,\nattemptNumber);\nif (!context.hasNoMoreSplits(subtask)) {\nenumerator.handleSplitRequest(subtask, event.hostName());\n}\n}\nprivate void handleSourceEvent(int subtask, int attemptNumber, SourceEvent event) {\nLOG.debug(\n\"Source {} received custom event from parallel task {} (\noperatorName,\nsubtask,\nattemptNumber,\nevent);\nif (context.isConcurrentExecutionAttemptsSupported()) {\ncheckState(\nenumerator instanceof SupportsHandleExecutionAttemptSourceEvent,\n\"The split enumerator %s must implement SupportsHandleExecutionAttemptSourceEvent \"\n+ \"to be used in concurrent execution attempts scenario (e.g. if \"\n+ \"speculative execution is enabled).\",\nenumerator.getClass().getCanonicalName());\n((SupportsHandleExecutionAttemptSourceEvent) enumerator)\n.handleSourceEvent(subtask, attemptNumber, event);\n} else {\nenumerator.handleSourceEvent(subtask, event);\n}\n}\nprivate void handleReaderRegistrationEvent(\nint subtask, int attemptNumber, ReaderRegistrationEvent event) {\ncheckArgument(subtask == event.subtaskId());\nLOG.info(\n\"Source {} registering reader for parallel task {} (\noperatorName,\nsubtask,\nattemptNumber,\nevent.location());\nfinal boolean subtaskReaderExisted =\ncontext.registeredReadersOfAttempts().containsKey(subtask);\ncontext.registerSourceReader(subtask, attemptNumber, event.location());\nif (!subtaskReaderExisted) {\nenumerator.addReader(event.subtaskId());\n}\n}\nprivate void handleReportedWatermark(int subtask, Watermark watermark) throws FlinkException {\nif (context.isConcurrentExecutionAttemptsSupported()) {\nthrow new FlinkException(\n\"ReportedWatermarkEvent is not supported in concurrent execution attempts \"\n+ \"scenario (e.g. if speculative execution is enabled)\");\n}\nLOG.debug(\"New reported watermark={} from subTaskId={}\", watermark, subtask);\ncheckState(watermarkAlignmentParams.isEnabled());\ncombinedWatermark\n.aggregate(subtask, watermark)\n.ifPresent(\nnewCombinedWatermark ->\ncoordinatorStore.computeIfPresent(\nwatermarkAlignmentParams.getWatermarkGroup(),\n(key, oldValue) -> {\nWatermarkAggregator watermarkAggregator =\n(WatermarkAggregator) oldValue;\nwatermarkAggregator.aggregate(\noperatorName, newCombinedWatermark);\nreturn watermarkAggregator;\n}));\n}\nprivate void ensureStarted() {\nif (!started) {\nthrow new IllegalStateException(\"The coordinator has not started yet.\");\n}\n}\nprivate static class WatermarkAggregator {\nprivate final Map watermarks = new HashMap<>();\nprivate Watermark aggregatedWatermark = new Watermark(Long.MIN_VALUE);\n/**\n* Update the {@link Watermark} for the given {@code key)}.\n*\n* @return the new updated combined {@link Watermark} if the value has changed. {@code\n* Optional.empty()} otherwise.\n*/\npublic Optional aggregate(T key, Watermark watermark) {\nwatermarks.put(key, watermark);\nWatermark newMinimum =\nwatermarks.values().stream()\n.min(Comparator.comparingLong(Watermark::getTimestamp))\n.orElseThrow(IllegalStateException::new);\nif (newMinimum.equals(aggregatedWatermark)) {\nreturn Optional.empty();\n} else {\naggregatedWatermark = newMinimum;\nreturn Optional.of(aggregatedWatermark);\n}\n}\npublic Set keySet() {\nreturn watermarks.keySet();\n}\npublic Watermark getAggregatedWatermark() {\nreturn aggregatedWatermark;\n}\n}\n}", + "context_after": "class SourceCoordinator\nimplements OperatorCoordinator {\nprivate static final Logger LOG = LoggerFactory.getLogger(SourceCoordinator.class);\nprivate final WatermarkAggregator combinedWatermark = new WatermarkAggregator<>();\nprivate final WatermarkAlignmentParams watermarkAlignmentParams;\n/** The name of the operator this SourceCoordinator is associated with. */\nprivate final String operatorName;\n/** The Source that is associated with this SourceCoordinator. */\nprivate final Source source;\n/** The serializer that handles the serde of the SplitEnumerator checkpoints. */\nprivate final SimpleVersionedSerializer enumCheckpointSerializer;\n/** The context containing the states of the coordinator. */\nprivate final SourceCoordinatorContext context;\nprivate final CoordinatorStore coordinatorStore;\n/**\n* The split enumerator created from the associated Source. This one is created either during\n* resetting the coordinator to a checkpoint, or when the coordinator is started.\n*/\nprivate SplitEnumerator enumerator;\n/** A flag marking whether the coordinator has started. */\nprivate boolean started;\n/**\n* An ID that the coordinator will register self in the coordinator store with. Other\n* coordinators may send events to this coordinator by the ID.\n*/\n@Nullable private final String coordinatorListeningID;\npublic SourceCoordinator(\nString operatorName,\nSource source,\nSourceCoordinatorContext context,\nCoordinatorStore coordinatorStore) {\nthis(\noperatorName,\nsource,\ncontext,\ncoordinatorStore,\nWatermarkAlignmentParams.WATERMARK_ALIGNMENT_DISABLED,\nnull);\n}\npublic SourceCoordinator(\nString operatorName,\nSource source,\nSourceCoordinatorContext context,\nCoordinatorStore coordinatorStore,\nWatermarkAlignmentParams watermarkAlignmentParams,\n@Nullable String coordinatorListeningID) {\nthis.operatorName = operatorName;\nthis.source = source;\nthis.enumCheckpointSerializer = source.getEnumeratorCheckpointSerializer();\nthis.context = context;\nthis.coordinatorStore = coordinatorStore;\nthis.watermarkAlignmentParams = watermarkAlignmentParams;\nthis.coordinatorListeningID = coordinatorListeningID;\nif (watermarkAlignmentParams.isEnabled()) {\nif (context.isConcurrentExecutionAttemptsSupported()) {\nthrow new IllegalArgumentException(\n\"Watermark alignment is not supported in concurrent execution attempts \"\n+ \"scenario (e.g. if speculative execution is enabled)\");\n}\ncoordinatorStore.putIfAbsent(\nwatermarkAlignmentParams.getWatermarkGroup(), new WatermarkAggregator<>());\ncontext.getCoordinatorExecutor()\n.scheduleAtFixedRate(\nthis::announceCombinedWatermark,\nwatermarkAlignmentParams.getUpdateInterval(),\nwatermarkAlignmentParams.getUpdateInterval(),\nTimeUnit.MILLISECONDS);\n}\n}\n@VisibleForTesting\nvoid announceCombinedWatermark() {\ncheckState(\nwatermarkAlignmentParams != WatermarkAlignmentParams.WATERMARK_ALIGNMENT_DISABLED);\nWatermark globalCombinedWatermark =\ncoordinatorStore.apply(\nwatermarkAlignmentParams.getWatermarkGroup(),\n(value) -> {\nWatermarkAggregator aggregator = (WatermarkAggregator) value;\nreturn new Watermark(\naggregator.getAggregatedWatermark().getTimestamp());\n});\nlong maxAllowedWatermark;\ntry {\nmaxAllowedWatermark =\nMath.addExact(\nglobalCombinedWatermark.getTimestamp(),\nwatermarkAlignmentParams.getMaxAllowedWatermarkDrift());\n} catch (ArithmeticException e) {\nmaxAllowedWatermark = Watermark.MAX_WATERMARK.getTimestamp();\n}\nSet subTaskIds = combinedWatermark.keySet();\nLOG.info(\n\"Distributing maxAllowedWatermark={} to subTaskIds={}\",\nmaxAllowedWatermark,\nsubTaskIds);\nfor (Integer subtaskId : subTaskIds) {\ncontext.sendEventToSourceOperator(\nsubtaskId, new WatermarkAlignmentEvent(maxAllowedWatermark));\n}\n}\n@Override\npublic void start() throws Exception {\nLOG.info(\"Starting split enumerator for source {}.\", operatorName);\nstarted = true;\nif (enumerator == null) {\nfinal ClassLoader userCodeClassLoader =\ncontext.getCoordinatorContext().getUserCodeClassloader();\ntry (TemporaryClassLoaderContext ignored =\nTemporaryClassLoaderContext.of(userCodeClassLoader)) {\nenumerator = source.createEnumerator(context);\n} catch (Throwable t) {\nExceptionUtils.rethrowIfFatalErrorOrOOM(t);\nLOG.error(\"Failed to create Source Enumerator for source {}\", operatorName, t);\ncontext.failJob(t);\nreturn;\n}\n}\nrunInEventLoop(() -> enumerator.start(), \"starting the SplitEnumerator.\");\nif (coordinatorListeningID != null) {\ncoordinatorStore.compute(\ncoordinatorListeningID,\n(key, oldValue) -> {\nif (oldValue == null || oldValue instanceof OperatorCoordinator) {\nreturn this;\n} else {\ncheckState(\noldValue instanceof OperatorEvent,\n\"The existing value for \"\n+ coordinatorStore\n+ \"is expected to be an operator event, but it is in fact \"\n+ oldValue);\nLOG.info(\n\"Handling event {} received before the source coordinator with ID {} is registered\",\noldValue,\ncoordinatorListeningID);\nhandleEventFromOperator(0, 0, (OperatorEvent) oldValue);\nreturn null;\n}\n});\n}\n}\n@Override\n@Override\npublic void handleEventFromOperator(int subtask, int attemptNumber, OperatorEvent event) {\nrunInEventLoop(\n() -> {\nif (event instanceof RequestSplitEvent) {\nhandleRequestSplitEvent(subtask, attemptNumber, (RequestSplitEvent) event);\n} else if (event instanceof SourceEventWrapper) {\nhandleSourceEvent(\nsubtask,\nattemptNumber,\n((SourceEventWrapper) event).getSourceEvent());\n} else if (event instanceof ReaderRegistrationEvent) {\nhandleReaderRegistrationEvent(\nsubtask, attemptNumber, (ReaderRegistrationEvent) event);\n} else if (event instanceof ReportedWatermarkEvent) {\nhandleReportedWatermark(\nsubtask,\nnew Watermark(((ReportedWatermarkEvent) event).getWatermark()));\n} else {\nthrow new FlinkException(\"Unrecognized Operator Event: \" + event);\n}\n},\n\"handling operator event %s from subtask %d (\nevent,\nsubtask,\nattemptNumber);\n}\n@Override\npublic void executionAttemptFailed(\nint subtaskId, int attemptNumber, @Nullable Throwable reason) {\nrunInEventLoop(\n() -> {\nLOG.info(\n\"Removing registered reader after failure for subtask {} (\nsubtaskId,\nattemptNumber,\noperatorName);\ncontext.unregisterSourceReader(subtaskId, attemptNumber);\ncontext.attemptFailed(subtaskId, attemptNumber);\n},\n\"handling subtask %d (\nsubtaskId,\nattemptNumber);\n}\n@Override\npublic void subtaskReset(int subtaskId, long checkpointId) {\nrunInEventLoop(\n() -> {\nLOG.info(\n\"Recovering subtask {} to checkpoint {} for source {} to checkpoint.\",\nsubtaskId,\ncheckpointId,\noperatorName);\ncontext.subtaskReset(subtaskId);\nfinal List splitsToAddBack =\ncontext.getAndRemoveUncheckpointedAssignment(subtaskId, checkpointId);\nLOG.debug(\n\"Adding splits back to the split enumerator of source {}: {}\",\noperatorName,\nsplitsToAddBack);\nenumerator.addSplitsBack(splitsToAddBack, subtaskId);\n},\n\"handling subtask %d recovery to checkpoint %d\",\nsubtaskId,\ncheckpointId);\n}\n@Override\npublic void executionAttemptReady(int subtask, int attemptNumber, SubtaskGateway gateway) {\ncheckArgument(subtask == gateway.getSubtask());\ncheckArgument(attemptNumber == gateway.getExecution().getAttemptNumber());\nrunInEventLoop(\n() -> context.attemptReady(gateway),\n\"making event gateway to subtask %d (\nsubtask,\nattemptNumber);\n}\n@Override\npublic void checkpointCoordinator(long checkpointId, CompletableFuture result) {\nrunInEventLoop(\n() -> {\nLOG.debug(\n\"Taking a state snapshot on operator {} for checkpoint {}\",\noperatorName,\ncheckpointId);\ntry {\ncontext.onCheckpoint(checkpointId);\nresult.complete(toBytes(checkpointId));\n} catch (Throwable e) {\nExceptionUtils.rethrowIfFatalErrorOrOOM(e);\nresult.completeExceptionally(\nnew CompletionException(\nString.format(\n\"Failed to checkpoint SplitEnumerator for source %s\",\noperatorName),\ne));\n}\n},\n\"taking checkpoint %d\",\ncheckpointId);\n}\n@Override\npublic void notifyCheckpointComplete(long checkpointId) {\nrunInEventLoop(\n() -> {\nLOG.info(\n\"Marking checkpoint {} as completed for source {}.\",\ncheckpointId,\noperatorName);\ncontext.onCheckpointComplete(checkpointId);\nenumerator.notifyCheckpointComplete(checkpointId);\n},\n\"notifying the enumerator of completion of checkpoint %d\",\ncheckpointId);\n}\n@Override\npublic void notifyCheckpointAborted(long checkpointId) {\nrunInEventLoop(\n() -> {\nLOG.info(\n\"Marking checkpoint {} as aborted for source {}.\",\ncheckpointId,\noperatorName);\nenumerator.notifyCheckpointAborted(checkpointId);\n},\n\"calling notifyCheckpointAborted()\");\n}\n@Override\npublic void resetToCheckpoint(final long checkpointId, @Nullable final byte[] checkpointData)\nthrows Exception {\ncheckState(!started, \"The coordinator can only be reset if it was not yet started\");\nassert enumerator == null;\nif (checkpointData == null) {\nreturn;\n}\nLOG.info(\"Restoring SplitEnumerator of source {} from checkpoint.\", operatorName);\nfinal ClassLoader userCodeClassLoader =\ncontext.getCoordinatorContext().getUserCodeClassloader();\ntry (TemporaryClassLoaderContext ignored =\nTemporaryClassLoaderContext.of(userCodeClassLoader)) {\nfinal EnumChkT enumeratorCheckpoint = deserializeCheckpoint(checkpointData);\nenumerator = source.restoreEnumerator(context, enumeratorCheckpoint);\n}\n}\nprivate void runInEventLoop(\nfinal ThrowingRunnable action,\nfinal String actionName,\nfinal Object... actionNameFormatParameters) {\nensureStarted();\nif (enumerator == null) {\nreturn;\n}\ncontext.runInCoordinatorThread(\n() -> {\ntry {\naction.run();\n} catch (Throwable t) {\nExceptionUtils.rethrowIfFatalErrorOrOOM(t);\nfinal String actionString =\nString.format(actionName, actionNameFormatParameters);\nLOG.error(\n\"Uncaught exception in the SplitEnumerator for Source {} while {}. Triggering job failover.\",\noperatorName,\nactionString,\nt);\ncontext.failJob(t);\n}\n});\n}\n@VisibleForTesting\nSplitEnumerator getEnumerator() {\nreturn enumerator;\n}\n@VisibleForTesting\nSourceCoordinatorContext getContext() {\nreturn context;\n}\n/**\n* Serialize the coordinator state. The current implementation may not be super efficient, but\n* it should not matter that much because most of the state should be rather small. Large states\n* themselves may already be a problem regardless of how the serialization is implemented.\n*\n* @return A byte array containing the serialized state of the source coordinator.\n* @throws Exception When something goes wrong in serialization.\n*/\nprivate byte[] toBytes(long checkpointId) throws Exception {\nreturn writeCheckpointBytes(\nenumerator.snapshotState(checkpointId), enumCheckpointSerializer);\n}\nstatic byte[] writeCheckpointBytes(\nfinal EnumChkT enumeratorCheckpoint,\nfinal SimpleVersionedSerializer enumeratorCheckpointSerializer)\nthrows Exception {\ntry (ByteArrayOutputStream baos = new ByteArrayOutputStream();\nDataOutputStream out = new DataOutputViewStreamWrapper(baos)) {\nwriteCoordinatorSerdeVersion(out);\nout.writeInt(enumeratorCheckpointSerializer.getVersion());\nbyte[] serialziedEnumChkpt =\nenumeratorCheckpointSerializer.serialize(enumeratorCheckpoint);\nout.writeInt(serialziedEnumChkpt.length);\nout.write(serialziedEnumChkpt);\nout.flush();\nreturn baos.toByteArray();\n}\n}\n/**\n* Restore the state of this source coordinator from the state bytes.\n*\n* @param bytes The checkpoint bytes that was returned from {@link\n* @throws Exception When the deserialization failed.\n*/\nprivate EnumChkT deserializeCheckpoint(byte[] bytes) throws Exception {\ntry (ByteArrayInputStream bais = new ByteArrayInputStream(bytes);\nDataInputStream in = new DataInputViewStreamWrapper(bais)) {\nfinal int coordinatorSerdeVersion = readAndVerifyCoordinatorSerdeVersion(in);\nint enumSerializerVersion = in.readInt();\nint serializedEnumChkptSize = in.readInt();\nbyte[] serializedEnumChkpt = readBytes(in, serializedEnumChkptSize);\nif (coordinatorSerdeVersion != SourceCoordinatorSerdeUtils.VERSION_0\n&& bais.available() > 0) {\nthrow new IOException(\"Unexpected trailing bytes in enumerator checkpoint data\");\n}\nreturn enumCheckpointSerializer.deserialize(enumSerializerVersion, serializedEnumChkpt);\n}\n}\nprivate void handleRequestSplitEvent(int subtask, int attemptNumber, RequestSplitEvent event) {\nLOG.info(\n\"Source {} received split request from parallel task {} (\noperatorName,\nsubtask,\nattemptNumber);\nif (!context.hasNoMoreSplits(subtask)) {\nenumerator.handleSplitRequest(subtask, event.hostName());\n}\n}\nprivate void handleSourceEvent(int subtask, int attemptNumber, SourceEvent event) {\nLOG.debug(\n\"Source {} received custom event from parallel task {} (\noperatorName,\nsubtask,\nattemptNumber,\nevent);\nif (context.isConcurrentExecutionAttemptsSupported()) {\ncheckState(\nenumerator instanceof SupportsHandleExecutionAttemptSourceEvent,\n\"The split enumerator %s must implement SupportsHandleExecutionAttemptSourceEvent \"\n+ \"to be used in concurrent execution attempts scenario (e.g. if \"\n+ \"speculative execution is enabled).\",\nenumerator.getClass().getCanonicalName());\n((SupportsHandleExecutionAttemptSourceEvent) enumerator)\n.handleSourceEvent(subtask, attemptNumber, event);\n} else {\nenumerator.handleSourceEvent(subtask, event);\n}\n}\nprivate void handleReaderRegistrationEvent(\nint subtask, int attemptNumber, ReaderRegistrationEvent event) {\ncheckArgument(subtask == event.subtaskId());\nLOG.info(\n\"Source {} registering reader for parallel task {} (\noperatorName,\nsubtask,\nattemptNumber,\nevent.location());\nfinal boolean subtaskReaderExisted =\ncontext.registeredReadersOfAttempts().containsKey(subtask);\ncontext.registerSourceReader(subtask, attemptNumber, event.location());\nif (!subtaskReaderExisted) {\nenumerator.addReader(event.subtaskId());\n}\n}\nprivate void handleReportedWatermark(int subtask, Watermark watermark) throws FlinkException {\nif (context.isConcurrentExecutionAttemptsSupported()) {\nthrow new FlinkException(\n\"ReportedWatermarkEvent is not supported in concurrent execution attempts \"\n+ \"scenario (e.g. if speculative execution is enabled)\");\n}\nLOG.debug(\"New reported watermark={} from subTaskId={}\", watermark, subtask);\ncheckState(watermarkAlignmentParams.isEnabled());\ncombinedWatermark\n.aggregate(subtask, watermark)\n.ifPresent(\nnewCombinedWatermark ->\ncoordinatorStore.computeIfPresent(\nwatermarkAlignmentParams.getWatermarkGroup(),\n(key, oldValue) -> {\nWatermarkAggregator watermarkAggregator =\n(WatermarkAggregator) oldValue;\nwatermarkAggregator.aggregate(\noperatorName, newCombinedWatermark);\nreturn watermarkAggregator;\n}));\n}\nprivate void ensureStarted() {\nif (!started) {\nthrow new IllegalStateException(\"The coordinator has not started yet.\");\n}\n}\nprivate static class WatermarkAggregator {\nprivate final Map watermarks = new HashMap<>();\nprivate Watermark aggregatedWatermark = new Watermark(Long.MIN_VALUE);\n/**\n* Update the {@link Watermark} for the given {@code key)}.\n*\n* @return the new updated combined {@link Watermark} if the value has changed. {@code\n* Optional.empty()} otherwise.\n*/\npublic Optional aggregate(T key, Watermark watermark) {\nwatermarks.put(key, watermark);\nWatermark newMinimum =\nwatermarks.values().stream()\n.min(Comparator.comparingLong(Watermark::getTimestamp))\n.orElseThrow(IllegalStateException::new);\nif (newMinimum.equals(aggregatedWatermark)) {\nreturn Optional.empty();\n} else {\naggregatedWatermark = newMinimum;\nreturn Optional.of(aggregatedWatermark);\n}\n}\npublic Set keySet() {\nreturn watermarks.keySet();\n}\npublic Watermark getAggregatedWatermark() {\nreturn aggregatedWatermark;\n}\n}\n}" + }, + { + "comment": "org.opengauss.core.v3.QueryExecutorImpl#startCopy ``` Utils.encodeUTF8(statementName); ``` seem the client always use the utf-8, and tested succeed at local when contain chinese characters", + "method_body": "public AbstractWALEvent decode(final ByteBuffer data, final BaseLogSequenceNumber logSequenceNumber) {\nAbstractWALEvent result;\nString dataText = StandardCharsets.UTF_8.decode(data).toString();\nif (decodeWithXid) {\nresult = decodeDataWithXid(dataText);\n} else {\nresult = decodeDataIgnoreXid(dataText);\n}\nresult.setLogSequenceNumber(logSequenceNumber);\nreturn result;\n}", + "target_code": "String dataText = StandardCharsets.UTF_8.decode(data).toString();", + "method_body_after": "public AbstractWALEvent decode(final ByteBuffer data, final BaseLogSequenceNumber logSequenceNumber) {\nAbstractWALEvent result;\nbyte[] bytes = new byte[data.remaining()];\ndata.get(bytes);\nString dataText = new String(bytes, StandardCharsets.UTF_8);\nif (decodeWithTX) {\nresult = decodeDataWithTX(dataText);\n} else {\nresult = decodeDataIgnoreTX(dataText);\n}\nresult.setLogSequenceNumber(logSequenceNumber);\nreturn result;\n}", + "context_before": "class MppdbDecodingPlugin implements DecodingPlugin {\nprivate static final Pattern PATTERN_BEGIN_XID = Pattern.compile(\"BEGIN\\\\s+(\\\\d+)\", Pattern.CASE_INSENSITIVE);\nprivate static final Pattern PATTERN_COMMIT_XID = Pattern.compile(\"COMMIT\\\\s+(\\\\d+).*CSN\\\\s+(\\\\d+)\", Pattern.CASE_INSENSITIVE);\nprivate static final ObjectMapper OBJECT_MAPPER;\nstatic {\nOBJECT_MAPPER = new ObjectMapper();\nOBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);\n}\nprivate final BaseTimestampUtils timestampUtils;\nprivate final boolean decodeWithXid;\npublic MppdbDecodingPlugin(final BaseTimestampUtils timestampUtils) {\nthis.timestampUtils = timestampUtils;\ndecodeWithXid = false;\n}\n@Override\nprivate AbstractWALEvent decodeDataWithXid(final String dataText) {\nAbstractWALEvent result;\nif (dataText.startsWith(\"{\")) {\nresult = readTableEvent(dataText);\nreturn result;\n}\nMatcher beginXidMatcher = PATTERN_BEGIN_XID.matcher(dataText);\nMatcher commitXidMatcher = PATTERN_COMMIT_XID.matcher(dataText);\nif (beginXidMatcher.matches()) {\nresult = new BeginXidEvent(Long.parseLong(beginXidMatcher.group(1)));\n} else if (commitXidMatcher.matches()) {\nresult = new CommitXidEvent(Long.parseLong(commitXidMatcher.group(1)), Long.parseLong(commitXidMatcher.group(2)));\n} else {\nresult = new PlaceholderEvent();\n}\nreturn result;\n}\nprivate AbstractWALEvent decodeDataIgnoreXid(final String dataText) {\nreturn dataText.startsWith(\"{\") ? readTableEvent(dataText) : new PlaceholderEvent();\n}\nprivate AbstractRowEvent readTableEvent(final String mppData) {\nMppTableData mppTableData;\ntry {\nmppTableData = OBJECT_MAPPER.readValue(mppData, MppTableData.class);\n} catch (final JsonProcessingException ex) {\nthrow new RuntimeException(ex);\n}\nAbstractRowEvent result;\nString rowEventType = mppTableData.getOpType();\nswitch (rowEventType) {\ncase IngestDataChangeType.INSERT:\nresult = readWriteRowEvent(mppTableData);\nbreak;\ncase IngestDataChangeType.UPDATE:\nresult = readUpdateRowEvent(mppTableData);\nbreak;\ncase IngestDataChangeType.DELETE:\nresult = readDeleteRowEvent(mppTableData);\nbreak;\ndefault:\nthrow new IngestException(\"Unknown rowEventType: \" + rowEventType);\n}\nString[] tableMetaData = mppTableData.getTableName().split(\"\\\\.\");\nresult.setDatabaseName(tableMetaData[0]);\nresult.setTableName(tableMetaData[1]);\nreturn result;\n}\nprivate AbstractRowEvent readWriteRowEvent(final MppTableData data) {\nWriteRowEvent result = new WriteRowEvent();\nresult.setAfterRow(getColumnDataFromMppDataEvent(data));\nreturn result;\n}\nprivate AbstractRowEvent readUpdateRowEvent(final MppTableData data) {\nUpdateRowEvent result = new UpdateRowEvent();\nresult.setAfterRow(getColumnDataFromMppDataEvent(data));\nreturn result;\n}\nprivate AbstractRowEvent readDeleteRowEvent(final MppTableData data) {\nDeleteRowEvent result = new DeleteRowEvent();\nresult.setPrimaryKeys(getDeleteColumnDataFromMppDataEvent(data));\nreturn result;\n}\nprivate List getColumnDataFromMppDataEvent(final MppTableData data) {\nList result = new ArrayList<>(data.getColumnsType().length);\nfor (int i = 0; i < data.getColumnsType().length; i++) {\nresult.add(readColumnData(data.getColumnsVal()[i], data.getColumnsType()[i]));\n}\nreturn result;\n}\nprivate List getDeleteColumnDataFromMppDataEvent(final MppTableData data) {\nList result = new ArrayList<>(data.getOldKeysType().length);\nfor (int i = 0; i < data.getOldKeysType().length; i++) {\nresult.add(readColumnData(data.getOldKeysVal()[i], data.getOldKeysType()[i]));\n}\nreturn result;\n}\nprivate Object readColumnData(final String data, final String columnType) {\nif (\"null\".equals(data)) {\nreturn null;\n}\nif (columnType.startsWith(\"numeric\")) {\nreturn new BigDecimal(data);\n}\nif (columnType.startsWith(\"bit\")) {\nreturn decodeString(data.substring(1));\n}\nswitch (columnType) {\ncase \"smallint\":\nreturn Short.parseShort(data);\ncase \"integer\":\nreturn Integer.parseInt(data);\ncase \"bigint\":\nreturn Long.parseLong(data);\ncase \"real\":\nreturn Float.parseFloat(data);\ncase \"double precision\":\nreturn Double.parseDouble(data);\ncase \"boolean\":\nreturn Boolean.parseBoolean(data);\ncase \"time without time zone\":\ncase \"time with time zone\":\ntry {\nreturn timestampUtils.toTime(null, decodeString(data));\n} catch (final SQLException ex) {\nthrow new DecodingException(ex);\n}\ncase \"date\":\nreturn Date.valueOf(decodeString(data));\ncase \"timestamp without time zone\":\ncase \"timestamp with time zone\":\ncase \"smalldatetime\":\ntry {\nreturn timestampUtils.toTimestamp(null, decodeString(data));\n} catch (final SQLException ex) {\nthrow new DecodingException(ex);\n}\ncase \"bytea\":\nreturn decodeBytea(data);\ncase \"raw\":\ncase \"reltime\":\nreturn decodePgObject(data, columnType);\ncase \"money\":\nreturn decodeMoney(data);\ncase \"interval\":\nreturn decodeInterval(data);\ncase \"character varying\":\ncase \"text\":\ncase \"character\":\ncase \"nvarchar2\":\ndefault:\nreturn decodeString(data);\n}\n}\nprivate static PGobject decodeInterval(final String data) {\ntry {\nreturn new PGInterval(decodeString(data));\n} catch (final SQLException ignored) {\nreturn null;\n}\n}\nprivate static PGobject decodePgObject(final String data, final String type) {\ntry {\nPGobject result = new PGobject();\nresult.setType(type);\nresult.setValue(decodeString(data));\nreturn result;\n} catch (final SQLException ignored) {\nreturn null;\n}\n}\nprivate static PGobject decodeBytea(final String data) {\ntry {\nPGobject result = new PGobject();\nresult.setType(\"bytea\");\nbyte[] decodeByte = decodeHex(decodeString(data).substring(2));\nresult.setValue(new String(decodeByte));\nreturn result;\n} catch (final SQLException ignored) {\nreturn null;\n}\n}\nprivate static String decodeMoney(final String data) {\nString result = decodeString(data);\nreturn '$' == result.charAt(0) ? result.substring(1) : result;\n}\nprivate static String decodeString(final String data) {\nif (data.length() > 1) {\nint begin = '\\'' == data.charAt(0) ? 1 : 0;\nint end = data.length() + (data.charAt(data.length() - 1) == '\\'' ? -1 : 0);\nreturn data.substring(begin, end);\n}\nreturn data;\n}\nprivate static byte[] decodeHex(final String hexString) {\nint dataLength = hexString.length();\nPreconditions.checkArgument(0 == (dataLength & 1), \"Illegal hex data `%s`\", hexString);\nif (0 == dataLength) {\nreturn new byte[0];\n}\nbyte[] result = new byte[dataLength >>> 1];\nfor (int i = 0; i < dataLength; i += 2) {\nresult[i >>> 1] = decodeHexByte(hexString, i);\n}\nreturn result;\n}\nprivate static byte decodeHexByte(final String hexString, final int index) {\nint firstHexChar = Character.digit(hexString.charAt(index), 16);\nint secondHexChar = Character.digit(hexString.charAt(index + 1), 16);\nPreconditions.checkArgument(-1 != firstHexChar && -1 != secondHexChar, \"Illegal hex byte `%s` in index `%d`\", hexString, index);\nreturn (byte) ((firstHexChar << 4) + secondHexChar);\n}\n}", + "context_after": "class MppdbDecodingPlugin implements DecodingPlugin {\nprivate static final ObjectMapper OBJECT_MAPPER;\nstatic {\nOBJECT_MAPPER = new ObjectMapper();\nOBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);\n}\nprivate final BaseTimestampUtils timestampUtils;\nprivate final boolean decodeWithTX;\npublic MppdbDecodingPlugin(final BaseTimestampUtils timestampUtils) {\nthis.timestampUtils = timestampUtils;\ndecodeWithTX = false;\n}\n@Override\nprivate AbstractWALEvent decodeDataWithTX(final String dataText) {\nAbstractWALEvent result = new PlaceholderEvent();\nif (dataText.startsWith(\"BEGIN\")) {\nint beginIndex = dataText.indexOf(\"BEGIN\") + \"BEGIN\".length() + 1;\nresult = new BeginTXEvent(Long.parseLong(dataText.substring(beginIndex)));\n} else if (dataText.startsWith(\"COMMIT\")) {\nint commitBeginIndex = dataText.indexOf(\"COMMIT\") + \"COMMIT\".length() + 1;\nint csnBeginIndex = dataText.indexOf(\"CSN\") + \"CSN\".length() + 1;\nresult = new CommitTXEvent(Long.parseLong(dataText.substring(commitBeginIndex, dataText.indexOf(\" \", commitBeginIndex))), Long.parseLong(dataText.substring(csnBeginIndex)));\n} else if (dataText.startsWith(\"{\")) {\nresult = readTableEvent(dataText);\n}\nreturn result;\n}\nprivate AbstractWALEvent decodeDataIgnoreTX(final String dataText) {\nreturn dataText.startsWith(\"{\") ? readTableEvent(dataText) : new PlaceholderEvent();\n}\nprivate AbstractRowEvent readTableEvent(final String mppData) {\nMppTableData mppTableData;\ntry {\nmppTableData = OBJECT_MAPPER.readValue(mppData, MppTableData.class);\n} catch (final JsonProcessingException ex) {\nthrow new RuntimeException(ex);\n}\nAbstractRowEvent result;\nString rowEventType = mppTableData.getOpType();\nswitch (rowEventType) {\ncase IngestDataChangeType.INSERT:\nresult = readWriteRowEvent(mppTableData);\nbreak;\ncase IngestDataChangeType.UPDATE:\nresult = readUpdateRowEvent(mppTableData);\nbreak;\ncase IngestDataChangeType.DELETE:\nresult = readDeleteRowEvent(mppTableData);\nbreak;\ndefault:\nthrow new IngestException(\"Unknown rowEventType: \" + rowEventType);\n}\nString[] tableMetaData = mppTableData.getTableName().split(\"\\\\.\");\nresult.setDatabaseName(tableMetaData[0]);\nresult.setTableName(tableMetaData[1]);\nreturn result;\n}\nprivate AbstractRowEvent readWriteRowEvent(final MppTableData data) {\nWriteRowEvent result = new WriteRowEvent();\nresult.setAfterRow(getColumnDataFromMppDataEvent(data));\nreturn result;\n}\nprivate AbstractRowEvent readUpdateRowEvent(final MppTableData data) {\nUpdateRowEvent result = new UpdateRowEvent();\nresult.setAfterRow(getColumnDataFromMppDataEvent(data));\nreturn result;\n}\nprivate AbstractRowEvent readDeleteRowEvent(final MppTableData data) {\nDeleteRowEvent result = new DeleteRowEvent();\nresult.setPrimaryKeys(getDeleteColumnDataFromMppDataEvent(data));\nreturn result;\n}\nprivate List getColumnDataFromMppDataEvent(final MppTableData data) {\nList result = new ArrayList<>(data.getColumnsType().length);\nfor (int i = 0; i < data.getColumnsType().length; i++) {\nresult.add(readColumnData(data.getColumnsVal()[i], data.getColumnsType()[i]));\n}\nreturn result;\n}\nprivate List getDeleteColumnDataFromMppDataEvent(final MppTableData data) {\nList result = new ArrayList<>(data.getOldKeysType().length);\nfor (int i = 0; i < data.getOldKeysType().length; i++) {\nresult.add(readColumnData(data.getOldKeysVal()[i], data.getOldKeysType()[i]));\n}\nreturn result;\n}\nprivate Object readColumnData(final String data, final String columnType) {\nif (\"null\".equals(data)) {\nreturn null;\n}\nif (columnType.startsWith(\"numeric\")) {\nreturn new BigDecimal(data);\n}\nif (columnType.startsWith(\"bit\")) {\nreturn decodeString(data.substring(1));\n}\nswitch (columnType) {\ncase \"smallint\":\nreturn Short.parseShort(data);\ncase \"integer\":\nreturn Integer.parseInt(data);\ncase \"bigint\":\nreturn Long.parseLong(data);\ncase \"real\":\nreturn Float.parseFloat(data);\ncase \"double precision\":\nreturn Double.parseDouble(data);\ncase \"boolean\":\nreturn Boolean.parseBoolean(data);\ncase \"time without time zone\":\ncase \"time with time zone\":\ntry {\nreturn timestampUtils.toTime(null, decodeString(data));\n} catch (final SQLException ex) {\nthrow new DecodingException(ex);\n}\ncase \"date\":\nreturn Date.valueOf(decodeString(data));\ncase \"timestamp without time zone\":\ncase \"timestamp with time zone\":\ncase \"smalldatetime\":\ntry {\nreturn timestampUtils.toTimestamp(null, decodeString(data));\n} catch (final SQLException ex) {\nthrow new DecodingException(ex);\n}\ncase \"bytea\":\nreturn decodeBytea(data);\ncase \"raw\":\ncase \"reltime\":\nreturn decodePgObject(data, columnType);\ncase \"money\":\nreturn decodeMoney(data);\ncase \"interval\":\nreturn decodeInterval(data);\ncase \"character varying\":\ncase \"text\":\ncase \"character\":\ncase \"nvarchar2\":\ndefault:\nreturn decodeString(data);\n}\n}\nprivate static PGobject decodeInterval(final String data) {\ntry {\nreturn new PGInterval(decodeString(data));\n} catch (final SQLException ignored) {\nreturn null;\n}\n}\nprivate static PGobject decodePgObject(final String data, final String type) {\ntry {\nPGobject result = new PGobject();\nresult.setType(type);\nresult.setValue(decodeString(data));\nreturn result;\n} catch (final SQLException ignored) {\nreturn null;\n}\n}\nprivate static PGobject decodeBytea(final String data) {\ntry {\nPGobject result = new PGobject();\nresult.setType(\"bytea\");\nbyte[] decodeByte = decodeHex(decodeString(data).substring(2));\nresult.setValue(new String(decodeByte));\nreturn result;\n} catch (final SQLException ignored) {\nreturn null;\n}\n}\nprivate static String decodeMoney(final String data) {\nString result = decodeString(data);\nreturn '$' == result.charAt(0) ? result.substring(1) : result;\n}\nprivate static String decodeString(final String data) {\nif (data.length() > 1) {\nint begin = '\\'' == data.charAt(0) ? 1 : 0;\nint end = data.length() + (data.charAt(data.length() - 1) == '\\'' ? -1 : 0);\nreturn data.substring(begin, end);\n}\nreturn data;\n}\nprivate static byte[] decodeHex(final String hexString) {\nint dataLength = hexString.length();\nPreconditions.checkArgument(0 == (dataLength & 1), \"Illegal hex data `%s`\", hexString);\nif (0 == dataLength) {\nreturn new byte[0];\n}\nbyte[] result = new byte[dataLength >>> 1];\nfor (int i = 0; i < dataLength; i += 2) {\nresult[i >>> 1] = decodeHexByte(hexString, i);\n}\nreturn result;\n}\nprivate static byte decodeHexByte(final String hexString, final int index) {\nint firstHexChar = Character.digit(hexString.charAt(index), 16);\nint secondHexChar = Character.digit(hexString.charAt(index + 1), 16);\nPreconditions.checkArgument(-1 != firstHexChar && -1 != secondHexChar, \"Illegal hex byte `%s` in index `%d`\", hexString, index);\nreturn (byte) ((firstHexChar << 4) + secondHexChar);\n}\n}" + }, + { + "comment": "Make sense.", + "method_body": "private static Schema deserializeSchema(Map map, String schemaKey) {\nfinal Builder builder = Schema.newBuilder();\ndeserializeColumns(map, schemaKey, builder);\ndeserializeWatermark(map, schemaKey, builder);\ndeserializePrimaryKey(map, schemaKey, builder);\nreturn builder.build();\n}", + "target_code": "deserializePrimaryKey(map, schemaKey, builder);", + "method_body_after": "private static Schema deserializeSchema(Map map, String schemaKey) {\nfinal Builder builder = Schema.newBuilder();\ndeserializeColumns(map, schemaKey, builder);\ndeserializeWatermark(map, schemaKey, builder);\ndeserializePrimaryKey(map, schemaKey, builder);\nreturn builder.build();\n}", + "context_before": "class CatalogPropertiesUtil {\n/**\n* Flag to distinguish if a meta-object is a generic Flink object or not.\n*\n*

It is used to distinguish between Flink's generic connector discovery logic or specialized\n* catalog connectors.\n*/\npublic static final String IS_GENERIC = \"is_generic\";\n/**\n* Globally reserved prefix for catalog properties. User-defined properties should not use this\n* prefix. E.g. it is used to distinguish properties created by Hive and Flink, as Hive\n* metastore has its own properties created upon table creation and migration between different\n* versions of metastore.\n*/\npublic static final String FLINK_PROPERTY_PREFIX = \"flink.\";\n/** Serializes the given {@link ResolvedCatalogTable} into a map of string properties. */\npublic static Map serializeCatalogTable(ResolvedCatalogTable resolvedTable) {\ntry {\nfinal Map properties = new HashMap<>();\nserializeResolvedSchema(properties, resolvedTable.getResolvedSchema());\nfinal String comment = resolvedTable.getComment();\nif (comment != null && comment.length() > 0) {\nproperties.put(COMMENT, comment);\n}\nserializePartitionKeys(properties, resolvedTable.getPartitionKeys());\nproperties.putAll(resolvedTable.getOptions());\nproperties.remove(IS_GENERIC);\nreturn properties;\n} catch (Exception e) {\nthrow new CatalogException(\"Error in serializing catalog table.\", e);\n}\n}\n/** Serializes the given {@link ResolvedCatalogView} into a map of string properties. */\npublic static Map serializeCatalogView(ResolvedCatalogView resolvedTable) {\ntry {\nfinal Map properties = new HashMap<>();\nserializeResolvedSchema(properties, resolvedTable.getResolvedSchema());\nfinal String comment = resolvedTable.getComment();\nif (comment != null && comment.length() > 0) {\nproperties.put(COMMENT, comment);\n}\nproperties.putAll(resolvedTable.getOptions());\nproperties.remove(IS_GENERIC);\nreturn properties;\n} catch (Exception e) {\nthrow new CatalogException(\"Error in serializing catalog view.\", e);\n}\n}\n/** Deserializes the given map of string properties into an unresolved {@link CatalogTable}. */\npublic static CatalogTable deserializeCatalogTable(Map properties) {\nreturn deserializeCatalogTable(properties, null);\n}\n/** Deserializes the given map of string properties into an unresolved {@link CatalogTable}. */\npublic static CatalogTable deserializeCatalogTable(\nMap properties, @Nullable String fallbackKey) {\ntry {\nint count = getCount(properties, SCHEMA, NAME);\nString schemaKey = SCHEMA;\nif (count == 0 && fallbackKey != null) {\nschemaKey = fallbackKey;\n}\nfinal Schema schema = deserializeSchema(properties, schemaKey);\nfinal @Nullable String comment = properties.get(COMMENT);\nfinal List partitionKeys = deserializePartitionKeys(properties);\nfinal Map options = deserializeOptions(properties, schemaKey);\nreturn CatalogTable.of(schema, comment, partitionKeys, options);\n} catch (Exception e) {\nthrow new CatalogException(\"Error in deserializing catalog table.\", e);\n}\n}\nprivate static final String SEPARATOR = \".\";\nprivate static final String SCHEMA = \"schema\";\nprivate static final String NAME = \"name\";\nprivate static final String DATA_TYPE = \"data-type\";\nprivate static final String EXPR = \"expr\";\nprivate static final String METADATA = \"metadata\";\nprivate static final String VIRTUAL = \"virtual\";\nprivate static final String PRIMARY_KEY = \"primary-key\";\nprivate static final String COLUMNS = \"columns\";\nprivate static final String PARTITION = \"partition\";\nprivate static final String KEYS = \"keys\";\nprivate static final String PARTITION_KEYS = compoundKey(PARTITION, KEYS);\nprivate static final String WATERMARK = \"watermark\";\nprivate static final String WATERMARK_ROWTIME = \"rowtime\";\nprivate static final String WATERMARK_STRATEGY = \"strategy\";\nprivate static final String WATERMARK_STRATEGY_EXPR = compoundKey(WATERMARK_STRATEGY, EXPR);\nprivate static final String WATERMARK_STRATEGY_DATA_TYPE =\ncompoundKey(WATERMARK_STRATEGY, DATA_TYPE);\nprivate static final String PRIMARY_KEY_NAME = compoundKey(PRIMARY_KEY, NAME);\nprivate static final String PRIMARY_KEY_COLUMNS = compoundKey(PRIMARY_KEY, COLUMNS);\nprivate static final String COMMENT = \"comment\";\nprivate static Map deserializeOptions(\nMap map, String schemaKey) {\nreturn map.entrySet().stream()\n.filter(\ne -> {\nfinal String key = e.getKey();\nreturn !key.startsWith(schemaKey + SEPARATOR)\n&& !key.startsWith(PARTITION_KEYS + SEPARATOR)\n&& !key.equals(COMMENT);\n})\n.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));\n}\nprivate static List deserializePartitionKeys(Map map) {\nfinal int partitionCount = getCount(map, PARTITION_KEYS, NAME);\nfinal List partitionKeys = new ArrayList<>();\nfor (int i = 0; i < partitionCount; i++) {\nfinal String partitionNameKey = compoundKey(PARTITION_KEYS, i, NAME);\nfinal String partitionName = getValue(map, partitionNameKey);\npartitionKeys.add(partitionName);\n}\nreturn partitionKeys;\n}\nprivate static void deserializePrimaryKey(\nMap map, String schemaKey, Builder builder) {\nfinal String constraintNameKey = compoundKey(schemaKey, PRIMARY_KEY_NAME);\nfinal String columnsKey = compoundKey(schemaKey, PRIMARY_KEY_COLUMNS);\nif (map.containsKey(constraintNameKey)) {\nfinal String constraintName = getValue(map, constraintNameKey);\nfinal String[] columns = getValue(map, columnsKey, s -> s.split(\",\"));\nbuilder.primaryKeyNamed(constraintName, columns);\n}\n}\nprivate static void deserializeWatermark(\nMap map, String schemaKey, Builder builder) {\nfinal String watermarkKey = compoundKey(schemaKey, WATERMARK);\nfinal int watermarkCount = getCount(map, watermarkKey, WATERMARK_ROWTIME);\nfor (int i = 0; i < watermarkCount; i++) {\nfinal String rowtimeKey = compoundKey(watermarkKey, i, WATERMARK_ROWTIME);\nfinal String exprKey = compoundKey(watermarkKey, i, WATERMARK_STRATEGY_EXPR);\nfinal String rowtime = getValue(map, rowtimeKey);\nfinal String expr = getValue(map, exprKey);\nbuilder.watermark(rowtime, expr);\n}\n}\nprivate static void deserializeColumns(\nMap map, String schemaKey, Builder builder) {\nfinal int fieldCount = getCount(map, schemaKey, NAME);\nfor (int i = 0; i < fieldCount; i++) {\nfinal String nameKey = compoundKey(schemaKey, i, NAME);\nfinal String dataTypeKey = compoundKey(schemaKey, i, DATA_TYPE);\nfinal String exprKey = compoundKey(schemaKey, i, EXPR);\nfinal String metadataKey = compoundKey(schemaKey, i, METADATA);\nfinal String virtualKey = compoundKey(schemaKey, i, VIRTUAL);\nfinal String commentKey = compoundKey(schemaKey, i, COMMENT);\nfinal String name = getValue(map, nameKey);\nif (map.containsKey(exprKey)) {\nfinal String expr = getValue(map, exprKey);\nbuilder.columnByExpression(name, expr);\n}\nelse if (map.containsKey(metadataKey)) {\nfinal String metadata = getValue(map, metadataKey);\nfinal String dataType = getValue(map, dataTypeKey);\nfinal boolean isVirtual = getValue(map, virtualKey, Boolean::parseBoolean);\nif (metadata.equals(name)) {\nbuilder.columnByMetadata(name, dataType, null, isVirtual);\n} else {\nbuilder.columnByMetadata(name, dataType, metadata, isVirtual);\n}\n}\nelse {\nfinal String dataType = getValue(map, dataTypeKey);\nbuilder.column(name, dataType);\n}\nif (map.containsKey(commentKey)) {\nfinal String comment = getValue(map, commentKey);\nbuilder.withComment(comment);\n}\n}\n}\nprivate static void serializePartitionKeys(Map map, List keys) {\ncheckNotNull(keys);\nputIndexedProperties(\nmap,\nPARTITION_KEYS,\nCollections.singletonList(NAME),\nkeys.stream().map(Collections::singletonList).collect(Collectors.toList()));\n}\nprivate static void serializeResolvedSchema(Map map, ResolvedSchema schema) {\ncheckNotNull(schema);\nserializeColumns(map, schema.getColumns());\nserializeWatermarkSpecs(map, schema.getWatermarkSpecs());\nschema.getPrimaryKey().ifPresent(pk -> serializePrimaryKey(map, pk));\n}\nprivate static void serializePrimaryKey(Map map, UniqueConstraint constraint) {\nmap.put(compoundKey(SCHEMA, PRIMARY_KEY_NAME), constraint.getName());\nmap.put(\ncompoundKey(SCHEMA, PRIMARY_KEY_COLUMNS),\nString.join(\",\", constraint.getColumns()));\n}\nprivate static void serializeWatermarkSpecs(\nMap map, List specs) {\nif (!specs.isEmpty()) {\nfinal List> watermarkValues = new ArrayList<>();\nfor (WatermarkSpec spec : specs) {\nwatermarkValues.add(\nArrays.asList(\nspec.getRowtimeAttribute(),\nserializeResolvedExpression(spec.getWatermarkExpression()),\nserializeDataType(\nspec.getWatermarkExpression().getOutputDataType())));\n}\nputIndexedProperties(\nmap,\ncompoundKey(SCHEMA, WATERMARK),\nArrays.asList(\nWATERMARK_ROWTIME,\nWATERMARK_STRATEGY_EXPR,\nWATERMARK_STRATEGY_DATA_TYPE),\nwatermarkValues);\n}\n}\nprivate static void serializeColumns(Map map, List columns) {\nfinal String[] names = serializeColumnNames(columns);\nfinal String[] dataTypes = serializeColumnDataTypes(columns);\nfinal String[] expressions = serializeColumnComputations(columns);\nfinal String[] metadata = serializeColumnMetadataKeys(columns);\nfinal String[] virtual = serializeColumnVirtuality(columns);\nfinal String[] comments = serializeColumnComments(columns);\nfinal List> values = new ArrayList<>();\nfor (int i = 0; i < columns.size(); i++) {\nvalues.add(\nArrays.asList(\nnames[i],\ndataTypes[i],\nexpressions[i],\nmetadata[i],\nvirtual[i],\ncomments[i]));\n}\nputIndexedProperties(\nmap,\nSCHEMA,\nArrays.asList(NAME, DATA_TYPE, EXPR, METADATA, VIRTUAL, COMMENT),\nvalues);\n}\nprivate static String serializeResolvedExpression(ResolvedExpression resolvedExpression) {\ntry {\nreturn resolvedExpression.asSerializableString();\n} catch (TableException e) {\nthrow new TableException(\nString.format(\n\"Expression '%s' cannot be stored in a durable catalog. \"\n+ \"Currently, only SQL expressions have a well-defined string \"\n+ \"representation that is used to serialize a catalog object \"\n+ \"into a map of string-based properties.\",\nresolvedExpression.asSummaryString()),\ne);\n}\n}\nprivate static String serializeDataType(DataType dataType) {\nfinal LogicalType type = dataType.getLogicalType();\ntry {\nreturn type.asSerializableString();\n} catch (TableException e) {\nthrow new TableException(\nString.format(\n\"Data type '%s' cannot be stored in a durable catalog. Only data types \"\n+ \"that have a well-defined string representation can be used \"\n+ \"when serializing a catalog object into a map of string-based \"\n+ \"properties. This excludes anonymously defined, unregistered \"\n+ \"types such as structured types in particular.\",\ntype.asSummaryString()),\ne);\n}\n}\nprivate static String[] serializeColumnNames(List columns) {\nreturn columns.stream().map(Column::getName).toArray(String[]::new);\n}\nprivate static String[] serializeColumnDataTypes(List columns) {\nreturn columns.stream()\n.map(Column::getDataType)\n.map(CatalogPropertiesUtil::serializeDataType)\n.toArray(String[]::new);\n}\nprivate static String[] serializeColumnComputations(List columns) {\nreturn columns.stream()\n.map(\ncolumn -> {\nif (column instanceof ComputedColumn) {\nfinal ComputedColumn c = (ComputedColumn) column;\nreturn serializeResolvedExpression(c.getExpression());\n}\nreturn null;\n})\n.toArray(String[]::new);\n}\nprivate static String[] serializeColumnMetadataKeys(List columns) {\nreturn columns.stream()\n.map(\ncolumn -> {\nif (column instanceof MetadataColumn) {\nfinal MetadataColumn c = (MetadataColumn) column;\nreturn c.getMetadataKey().orElse(c.getName());\n}\nreturn null;\n})\n.toArray(String[]::new);\n}\nprivate static String[] serializeColumnVirtuality(List columns) {\nreturn columns.stream()\n.map(\ncolumn -> {\nif (column instanceof MetadataColumn) {\nfinal MetadataColumn c = (MetadataColumn) column;\nreturn Boolean.toString(c.isVirtual());\n}\nreturn null;\n})\n.toArray(String[]::new);\n}\nprivate static String[] serializeColumnComments(List columns) {\nreturn columns.stream().map(c -> c.getComment().orElse(null)).toArray(String[]::new);\n}\n/**\n* Adds an indexed sequence of properties (with sub-properties) under a common key. It supports\n* the property's value to be null, in which case it would be ignored. The sub-properties should\n* at least have one non-null value.\n*\n*

For example:\n*\n*

\n*     schema.fields.0.type = INT, schema.fields.0.name = test\n*     schema.fields.1.type = LONG, schema.fields.1.name = test2\n*     schema.fields.2.type = LONG, schema.fields.2.name = test3, schema.fields.2.expr = test2 + 1\n* 
\n*\n*

The arity of each subKeyValues must match the arity of propertyKeys.\n*/\nprivate static void putIndexedProperties(\nMap map,\nString key,\nList subKeys,\nList> subKeyValues) {\ncheckNotNull(key);\ncheckNotNull(subKeys);\ncheckNotNull(subKeyValues);\nfor (int idx = 0; idx < subKeyValues.size(); idx++) {\nfinal List values = subKeyValues.get(idx);\nif (values == null || values.size() != subKeys.size()) {\nthrow new IllegalArgumentException(\"Values must have same arity as keys.\");\n}\nif (values.stream().allMatch(Objects::isNull)) {\nthrow new IllegalArgumentException(\"Values must have at least one non-null value.\");\n}\nfor (int keyIdx = 0; keyIdx < values.size(); keyIdx++) {\nString value = values.get(keyIdx);\nif (value != null) {\nmap.put(compoundKey(key, idx, subKeys.get(keyIdx)), values.get(keyIdx));\n}\n}\n}\n}\n/**\n* Extracts the property count under the given key and suffix.\n*\n*

For example:\n*\n*

\n*     schema.0.name, schema.1.name -> 2\n* 
\n*/\nprivate static int getCount(Map map, String key, String suffix) {\nfinal String escapedKey = Pattern.quote(key);\nfinal String escapedSuffix = Pattern.quote(suffix);\nfinal String escapedSeparator = Pattern.quote(SEPARATOR);\nfinal Pattern pattern =\nPattern.compile(\n\"^\"\n+ escapedKey\n+ escapedSeparator\n+ \"(\\\\d+)\"\n+ escapedSeparator\n+ escapedSuffix);\nfinal IntStream indexes =\nmap.keySet().stream()\n.flatMapToInt(\nk -> {\nfinal Matcher matcher = pattern.matcher(k);\nif (matcher.find()) {\nreturn IntStream.of(Integer.parseInt(matcher.group(1)));\n}\nreturn IntStream.empty();\n});\nreturn indexes.max().orElse(-1) + 1;\n}\nprivate static String getValue(Map map, String key) {\nreturn getValue(map, key, Function.identity());\n}\nprivate static T getValue(Map map, String key, Function parser) {\nfinal String value = map.get(key);\nif (value == null) {\nthrow new IllegalArgumentException(\nString.format(\"Could not find property key '%s'.\", key));\n}\ntry {\nreturn parser.apply(value);\n} catch (Exception e) {\nthrow new IllegalArgumentException(\nString.format(\"Could not parse value for property key '%s': %s\", key, value));\n}\n}\nprivate static String compoundKey(Object... components) {\nreturn Stream.of(components).map(Object::toString).collect(Collectors.joining(SEPARATOR));\n}\nprivate CatalogPropertiesUtil() {\n}\n}", + "context_after": "class CatalogPropertiesUtil {\n/**\n* Flag to distinguish if a meta-object is a generic Flink object or not.\n*\n*

It is used to distinguish between Flink's generic connector discovery logic or specialized\n* catalog connectors.\n*/\npublic static final String IS_GENERIC = \"is_generic\";\n/**\n* Globally reserved prefix for catalog properties. User-defined properties should not use this\n* prefix. E.g. it is used to distinguish properties created by Hive and Flink, as Hive\n* metastore has its own properties created upon table creation and migration between different\n* versions of metastore.\n*/\npublic static final String FLINK_PROPERTY_PREFIX = \"flink.\";\n/** Serializes the given {@link ResolvedCatalogTable} into a map of string properties. */\npublic static Map serializeCatalogTable(ResolvedCatalogTable resolvedTable) {\ntry {\nfinal Map properties = new HashMap<>();\nserializeResolvedSchema(properties, resolvedTable.getResolvedSchema());\nfinal String comment = resolvedTable.getComment();\nif (comment != null && comment.length() > 0) {\nproperties.put(COMMENT, comment);\n}\nserializePartitionKeys(properties, resolvedTable.getPartitionKeys());\nproperties.putAll(resolvedTable.getOptions());\nproperties.remove(IS_GENERIC);\nreturn properties;\n} catch (Exception e) {\nthrow new CatalogException(\"Error in serializing catalog table.\", e);\n}\n}\n/** Serializes the given {@link ResolvedCatalogView} into a map of string properties. */\npublic static Map serializeCatalogView(ResolvedCatalogView resolvedView) {\ntry {\nfinal Map properties = new HashMap<>();\nserializeResolvedSchema(properties, resolvedView.getResolvedSchema());\nfinal String comment = resolvedView.getComment();\nif (comment != null && comment.length() > 0) {\nproperties.put(COMMENT, comment);\n}\nproperties.putAll(resolvedView.getOptions());\nproperties.remove(IS_GENERIC);\nreturn properties;\n} catch (Exception e) {\nthrow new CatalogException(\"Error in serializing catalog view.\", e);\n}\n}\n/** Deserializes the given map of string properties into an unresolved {@link CatalogTable}. */\npublic static CatalogTable deserializeCatalogTable(Map properties) {\nreturn deserializeCatalogTable(properties, null);\n}\n/**\n* Deserializes the given map of string properties into an unresolved {@link CatalogTable}.\n*\n* @param properties The properties to deserialize from\n* @param fallbackKey The fallback key to get the schema properties. This is meant to support\n* the old table (1.10) deserialization\n* @return\n*/\npublic static CatalogTable deserializeCatalogTable(\nMap properties, @Nullable String fallbackKey) {\ntry {\nint count = getCount(properties, SCHEMA, NAME);\nString schemaKey = SCHEMA;\nif (count == 0 && fallbackKey != null) {\nschemaKey = fallbackKey;\n}\nfinal Schema schema = deserializeSchema(properties, schemaKey);\nfinal @Nullable String comment = properties.get(COMMENT);\nfinal List partitionKeys = deserializePartitionKeys(properties);\nfinal Map options = deserializeOptions(properties, schemaKey);\nreturn CatalogTable.of(schema, comment, partitionKeys, options);\n} catch (Exception e) {\nthrow new CatalogException(\"Error in deserializing catalog table.\", e);\n}\n}\nprivate static final String SEPARATOR = \".\";\nprivate static final String SCHEMA = \"schema\";\nprivate static final String NAME = \"name\";\nprivate static final String DATA_TYPE = \"data-type\";\nprivate static final String EXPR = \"expr\";\nprivate static final String METADATA = \"metadata\";\nprivate static final String VIRTUAL = \"virtual\";\nprivate static final String PRIMARY_KEY = \"primary-key\";\nprivate static final String COLUMNS = \"columns\";\nprivate static final String PARTITION = \"partition\";\nprivate static final String KEYS = \"keys\";\nprivate static final String PARTITION_KEYS = compoundKey(PARTITION, KEYS);\nprivate static final String WATERMARK = \"watermark\";\nprivate static final String WATERMARK_ROWTIME = \"rowtime\";\nprivate static final String WATERMARK_STRATEGY = \"strategy\";\nprivate static final String WATERMARK_STRATEGY_EXPR = compoundKey(WATERMARK_STRATEGY, EXPR);\nprivate static final String WATERMARK_STRATEGY_DATA_TYPE =\ncompoundKey(WATERMARK_STRATEGY, DATA_TYPE);\nprivate static final String PRIMARY_KEY_NAME = compoundKey(PRIMARY_KEY, NAME);\nprivate static final String PRIMARY_KEY_COLUMNS = compoundKey(PRIMARY_KEY, COLUMNS);\nprivate static final String COMMENT = \"comment\";\nprivate static Map deserializeOptions(\nMap map, String schemaKey) {\nreturn map.entrySet().stream()\n.filter(\ne -> {\nfinal String key = e.getKey();\nreturn !key.startsWith(schemaKey + SEPARATOR)\n&& !key.startsWith(PARTITION_KEYS + SEPARATOR)\n&& !key.equals(COMMENT);\n})\n.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));\n}\nprivate static List deserializePartitionKeys(Map map) {\nfinal int partitionCount = getCount(map, PARTITION_KEYS, NAME);\nfinal List partitionKeys = new ArrayList<>();\nfor (int i = 0; i < partitionCount; i++) {\nfinal String partitionNameKey = compoundKey(PARTITION_KEYS, i, NAME);\nfinal String partitionName = getValue(map, partitionNameKey);\npartitionKeys.add(partitionName);\n}\nreturn partitionKeys;\n}\nprivate static void deserializePrimaryKey(\nMap map, String schemaKey, Builder builder) {\nfinal String constraintNameKey = compoundKey(schemaKey, PRIMARY_KEY_NAME);\nfinal String columnsKey = compoundKey(schemaKey, PRIMARY_KEY_COLUMNS);\nif (map.containsKey(constraintNameKey)) {\nfinal String constraintName = getValue(map, constraintNameKey);\nfinal String[] columns = getValue(map, columnsKey, s -> s.split(\",\"));\nbuilder.primaryKeyNamed(constraintName, columns);\n}\n}\nprivate static void deserializeWatermark(\nMap map, String schemaKey, Builder builder) {\nfinal String watermarkKey = compoundKey(schemaKey, WATERMARK);\nfinal int watermarkCount = getCount(map, watermarkKey, WATERMARK_ROWTIME);\nfor (int i = 0; i < watermarkCount; i++) {\nfinal String rowtimeKey = compoundKey(watermarkKey, i, WATERMARK_ROWTIME);\nfinal String exprKey = compoundKey(watermarkKey, i, WATERMARK_STRATEGY_EXPR);\nfinal String rowtime = getValue(map, rowtimeKey);\nfinal String expr = getValue(map, exprKey);\nbuilder.watermark(rowtime, expr);\n}\n}\nprivate static void deserializeColumns(\nMap map, String schemaKey, Builder builder) {\nfinal int fieldCount = getCount(map, schemaKey, NAME);\nfor (int i = 0; i < fieldCount; i++) {\nfinal String nameKey = compoundKey(schemaKey, i, NAME);\nfinal String dataTypeKey = compoundKey(schemaKey, i, DATA_TYPE);\nfinal String exprKey = compoundKey(schemaKey, i, EXPR);\nfinal String metadataKey = compoundKey(schemaKey, i, METADATA);\nfinal String virtualKey = compoundKey(schemaKey, i, VIRTUAL);\nfinal String commentKey = compoundKey(schemaKey, i, COMMENT);\nfinal String name = getValue(map, nameKey);\nif (map.containsKey(exprKey)) {\nfinal String expr = getValue(map, exprKey);\nbuilder.columnByExpression(name, expr);\n}\nelse if (map.containsKey(metadataKey)) {\nfinal String metadata = getValue(map, metadataKey);\nfinal String dataType = getValue(map, dataTypeKey);\nfinal boolean isVirtual = getValue(map, virtualKey, Boolean::parseBoolean);\nif (metadata.equals(name)) {\nbuilder.columnByMetadata(name, dataType, null, isVirtual);\n} else {\nbuilder.columnByMetadata(name, dataType, metadata, isVirtual);\n}\n}\nelse {\nfinal String dataType = getValue(map, dataTypeKey);\nbuilder.column(name, dataType);\n}\nif (map.containsKey(commentKey)) {\nfinal String comment = getValue(map, commentKey);\nbuilder.withComment(comment);\n}\n}\n}\nprivate static void serializePartitionKeys(Map map, List keys) {\ncheckNotNull(keys);\nputIndexedProperties(\nmap,\nPARTITION_KEYS,\nCollections.singletonList(NAME),\nkeys.stream().map(Collections::singletonList).collect(Collectors.toList()));\n}\nprivate static void serializeResolvedSchema(Map map, ResolvedSchema schema) {\ncheckNotNull(schema);\nserializeColumns(map, schema.getColumns());\nserializeWatermarkSpecs(map, schema.getWatermarkSpecs());\nschema.getPrimaryKey().ifPresent(pk -> serializePrimaryKey(map, pk));\n}\nprivate static void serializePrimaryKey(Map map, UniqueConstraint constraint) {\nmap.put(compoundKey(SCHEMA, PRIMARY_KEY_NAME), constraint.getName());\nmap.put(\ncompoundKey(SCHEMA, PRIMARY_KEY_COLUMNS),\nString.join(\",\", constraint.getColumns()));\n}\nprivate static void serializeWatermarkSpecs(\nMap map, List specs) {\nif (!specs.isEmpty()) {\nfinal List> watermarkValues = new ArrayList<>();\nfor (WatermarkSpec spec : specs) {\nwatermarkValues.add(\nArrays.asList(\nspec.getRowtimeAttribute(),\nserializeResolvedExpression(spec.getWatermarkExpression()),\nserializeDataType(\nspec.getWatermarkExpression().getOutputDataType())));\n}\nputIndexedProperties(\nmap,\ncompoundKey(SCHEMA, WATERMARK),\nArrays.asList(\nWATERMARK_ROWTIME,\nWATERMARK_STRATEGY_EXPR,\nWATERMARK_STRATEGY_DATA_TYPE),\nwatermarkValues);\n}\n}\nprivate static void serializeColumns(Map map, List columns) {\nfinal String[] names = serializeColumnNames(columns);\nfinal String[] dataTypes = serializeColumnDataTypes(columns);\nfinal String[] expressions = serializeColumnComputations(columns);\nfinal String[] metadata = serializeColumnMetadataKeys(columns);\nfinal String[] virtual = serializeColumnVirtuality(columns);\nfinal String[] comments = serializeColumnComments(columns);\nfinal List> values = new ArrayList<>();\nfor (int i = 0; i < columns.size(); i++) {\nvalues.add(\nArrays.asList(\nnames[i],\ndataTypes[i],\nexpressions[i],\nmetadata[i],\nvirtual[i],\ncomments[i]));\n}\nputIndexedProperties(\nmap,\nSCHEMA,\nArrays.asList(NAME, DATA_TYPE, EXPR, METADATA, VIRTUAL, COMMENT),\nvalues);\n}\nprivate static String serializeResolvedExpression(ResolvedExpression resolvedExpression) {\ntry {\nreturn resolvedExpression.asSerializableString();\n} catch (TableException e) {\nthrow new TableException(\nString.format(\n\"Expression '%s' cannot be stored in a durable catalog. \"\n+ \"Currently, only SQL expressions have a well-defined string \"\n+ \"representation that is used to serialize a catalog object \"\n+ \"into a map of string-based properties.\",\nresolvedExpression.asSummaryString()),\ne);\n}\n}\nprivate static String serializeDataType(DataType dataType) {\nfinal LogicalType type = dataType.getLogicalType();\ntry {\nreturn type.asSerializableString();\n} catch (TableException e) {\nthrow new TableException(\nString.format(\n\"Data type '%s' cannot be stored in a durable catalog. Only data types \"\n+ \"that have a well-defined string representation can be used \"\n+ \"when serializing a catalog object into a map of string-based \"\n+ \"properties. This excludes anonymously defined, unregistered \"\n+ \"types such as structured types in particular.\",\ntype.asSummaryString()),\ne);\n}\n}\nprivate static String[] serializeColumnNames(List columns) {\nreturn columns.stream().map(Column::getName).toArray(String[]::new);\n}\nprivate static String[] serializeColumnDataTypes(List columns) {\nreturn columns.stream()\n.map(Column::getDataType)\n.map(CatalogPropertiesUtil::serializeDataType)\n.toArray(String[]::new);\n}\nprivate static String[] serializeColumnComputations(List columns) {\nreturn columns.stream()\n.map(\ncolumn -> {\nif (column instanceof ComputedColumn) {\nfinal ComputedColumn c = (ComputedColumn) column;\nreturn serializeResolvedExpression(c.getExpression());\n}\nreturn null;\n})\n.toArray(String[]::new);\n}\nprivate static String[] serializeColumnMetadataKeys(List columns) {\nreturn columns.stream()\n.map(\ncolumn -> {\nif (column instanceof MetadataColumn) {\nfinal MetadataColumn c = (MetadataColumn) column;\nreturn c.getMetadataKey().orElse(c.getName());\n}\nreturn null;\n})\n.toArray(String[]::new);\n}\nprivate static String[] serializeColumnVirtuality(List columns) {\nreturn columns.stream()\n.map(\ncolumn -> {\nif (column instanceof MetadataColumn) {\nfinal MetadataColumn c = (MetadataColumn) column;\nreturn Boolean.toString(c.isVirtual());\n}\nreturn null;\n})\n.toArray(String[]::new);\n}\nprivate static String[] serializeColumnComments(List columns) {\nreturn columns.stream().map(c -> c.getComment().orElse(null)).toArray(String[]::new);\n}\n/**\n* Adds an indexed sequence of properties (with sub-properties) under a common key. It supports\n* the property's value to be null, in which case it would be ignored. The sub-properties should\n* at least have one non-null value.\n*\n*

For example:\n*\n*

\n*     schema.fields.0.type = INT, schema.fields.0.name = test\n*     schema.fields.1.type = LONG, schema.fields.1.name = test2\n*     schema.fields.2.type = LONG, schema.fields.2.name = test3, schema.fields.2.expr = test2 + 1\n* 
\n*\n*

The arity of each subKeyValues must match the arity of propertyKeys.\n*/\nprivate static void putIndexedProperties(\nMap map,\nString key,\nList subKeys,\nList> subKeyValues) {\ncheckNotNull(key);\ncheckNotNull(subKeys);\ncheckNotNull(subKeyValues);\nfor (int idx = 0; idx < subKeyValues.size(); idx++) {\nfinal List values = subKeyValues.get(idx);\nif (values == null || values.size() != subKeys.size()) {\nthrow new IllegalArgumentException(\"Values must have same arity as keys.\");\n}\nif (values.stream().allMatch(Objects::isNull)) {\nthrow new IllegalArgumentException(\"Values must have at least one non-null value.\");\n}\nfor (int keyIdx = 0; keyIdx < values.size(); keyIdx++) {\nString value = values.get(keyIdx);\nif (value != null) {\nmap.put(compoundKey(key, idx, subKeys.get(keyIdx)), values.get(keyIdx));\n}\n}\n}\n}\n/**\n* Extracts the property count under the given key and suffix.\n*\n*

For example:\n*\n*

\n*     schema.0.name, schema.1.name -> 2\n* 
\n*/\nprivate static int getCount(Map map, String key, String suffix) {\nfinal String escapedKey = Pattern.quote(key);\nfinal String escapedSuffix = Pattern.quote(suffix);\nfinal String escapedSeparator = Pattern.quote(SEPARATOR);\nfinal Pattern pattern =\nPattern.compile(\n\"^\"\n+ escapedKey\n+ escapedSeparator\n+ \"(\\\\d+)\"\n+ escapedSeparator\n+ escapedSuffix);\nfinal IntStream indexes =\nmap.keySet().stream()\n.flatMapToInt(\nk -> {\nfinal Matcher matcher = pattern.matcher(k);\nif (matcher.find()) {\nreturn IntStream.of(Integer.parseInt(matcher.group(1)));\n}\nreturn IntStream.empty();\n});\nreturn indexes.max().orElse(-1) + 1;\n}\nprivate static String getValue(Map map, String key) {\nreturn getValue(map, key, Function.identity());\n}\nprivate static T getValue(Map map, String key, Function parser) {\nfinal String value = map.get(key);\nif (value == null) {\nthrow new IllegalArgumentException(\nString.format(\"Could not find property key '%s'.\", key));\n}\ntry {\nreturn parser.apply(value);\n} catch (Exception e) {\nthrow new IllegalArgumentException(\nString.format(\"Could not parse value for property key '%s': %s\", key, value));\n}\n}\nprivate static String compoundKey(Object... components) {\nreturn Stream.of(components).map(Object::toString).collect(Collectors.joining(SEPARATOR));\n}\nprivate CatalogPropertiesUtil() {\n}\n}" + }, + { + "comment": "Would it be possible to assert somehow that the `q_session` value has not been created by the default state manager ? May be the session cookie value can be posted to the test resource and used there to return a JSON rep of the stored record and here in the test we can check the id token/access token/rt values are not null or may be something simpler ?", + "method_body": "public void testCodeFlow() throws IOException {\ntry (final WebClient webClient = createWebClient()) {\nTextPage textPage = webClient.getPage(url.toString() + \"unprotected\");\nassertEquals(\"unprotected\", textPage.getContent());\nHtmlPage page;\npage = webClient.getPage(url.toString() + \"protected\");\nassertEquals(\"Sign in to quarkus\", page.getTitleText());\nHtmlForm loginForm = page.getForms().get(0);\nloginForm.getInputByName(\"username\").setValueAttribute(\"alice\");\nloginForm.getInputByName(\"password\").setValueAttribute(\"alice\");\ntextPage = loginForm.getInputByName(\"login\").click();\nassertEquals(\"alice\", textPage.getContent());\nassertTokenStateCount(1);\nwebClient.getOptions().setRedirectEnabled(false);\nWebResponse webResponse = webClient\n.loadWebResponse(new WebRequest(URI.create(url.toString() + \"protected/logout\").toURL()));\nassertEquals(302, webResponse.getStatusCode());\nassertNull(webClient.getCookieManager().getCookie(\"q_session\"));\nwebClient.getCookieManager().clearCookies();\nassertTokenStateCount(0);\n}\n}", + "target_code": "assertTokenStateCount(1);", + "method_body_after": "public void testCodeFlow() throws IOException {\ntry (final WebClient webClient = createWebClient()) {\nTextPage textPage = webClient.getPage(url.toString() + \"unprotected\");\nassertEquals(\"unprotected\", textPage.getContent());\nHtmlPage page;\npage = webClient.getPage(url.toString() + \"protected\");\nassertEquals(\"Sign in to quarkus\", page.getTitleText());\nHtmlForm loginForm = page.getForms().get(0);\nloginForm.getInputByName(\"username\").setValueAttribute(\"alice\");\nloginForm.getInputByName(\"password\").setValueAttribute(\"alice\");\ntextPage = loginForm.getInputByName(\"login\").click();\nassertEquals(\"alice\", textPage.getContent());\nassertTokenStateCount(1);\nwebClient.getOptions().setRedirectEnabled(false);\nWebResponse webResponse = webClient\n.loadWebResponse(new WebRequest(URI.create(url.toString() + \"protected/logout\").toURL()));\nassertEquals(302, webResponse.getStatusCode());\nassertNull(webClient.getCookieManager().getCookie(\"q_session\"));\nwebClient.getCookieManager().clearCookies();\nassertTokenStateCount(0);\n}\n}", + "context_before": "class AbstractDbTokenStateManagerTest {\nprotected static QuarkusUnitTest createQuarkusUnitTest(String reactiveSqlClientExtension) {\nreturn createQuarkusUnitTest(reactiveSqlClientExtension, null);\n}\nprotected static QuarkusUnitTest createQuarkusUnitTest(String reactiveSqlClientExtension,\nConsumer customizer) {\nreturn new QuarkusUnitTest()\n.withApplicationRoot((jar) -> {\njar\n.addClasses(ProtectedResource.class, UnprotectedResource.class, PublicResource.class)\n.addAsResource(\"application.properties\");\nif (customizer != null) {\ncustomizer.accept(jar);\n}\n})\n.setForcedDependencies(\nList.of(Dependency.of(\"io.quarkus\", reactiveSqlClientExtension, Version.getVersion())));\n}\n@TestHTTPResource\nURL url;\n@Test\nprotected static void assertTokenStateCount(Integer tokenStateCount) {\nRestAssured\n.given()\n.get(\"public/db-state-manager-table-content\")\n.then()\n.statusCode(200)\n.body(Matchers.is(tokenStateCount.toString()));\n}\nprotected static WebClient createWebClient() {\nWebClient webClient = new WebClient();\nwebClient.setCssErrorHandler(new SilentCssErrorHandler());\nreturn webClient;\n}\n}", + "context_after": "class AbstractDbTokenStateManagerTest {\nprotected static QuarkusUnitTest createQuarkusUnitTest(String reactiveSqlClientExtension) {\nreturn createQuarkusUnitTest(reactiveSqlClientExtension, null);\n}\nprotected static QuarkusUnitTest createQuarkusUnitTest(String reactiveSqlClientExtension,\nConsumer customizer) {\nreturn new QuarkusUnitTest()\n.withApplicationRoot((jar) -> {\njar\n.addClasses(ProtectedResource.class, UnprotectedResource.class, PublicResource.class)\n.addAsResource(\"application.properties\");\nif (customizer != null) {\ncustomizer.accept(jar);\n}\n})\n.setForcedDependencies(\nList.of(Dependency.of(\"io.quarkus\", reactiveSqlClientExtension, Version.getVersion())));\n}\n@TestHTTPResource\nURL url;\n@Test\nprotected static void assertTokenStateCount(Integer tokenStateCount) {\nRestAssured\n.given()\n.get(\"public/db-state-manager-table-content\")\n.then()\n.statusCode(200)\n.body(Matchers.is(tokenStateCount.toString()));\n}\nprotected static WebClient createWebClient() {\nWebClient webClient = new WebClient();\nwebClient.setCssErrorHandler(new SilentCssErrorHandler());\nreturn webClient;\n}\n}" + }, + { + "comment": "It can accept only one argument, the output of array_generate(3) is [1,2,3]", + "method_body": "public Void visitFunctionCall(FunctionCallExpr node, Scope scope) {\nType[] argumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new);\nif (node.isNondeterministicBuiltinFnName()) {\nExprId exprId = analyzeState.getNextNondeterministicId();\nnode.setNondeterministicId(exprId);\n}\nFunction fn;\nString fnName = node.getFnName().getFunction();\nif (fnName.equals(FunctionSet.COUNT) && node.getParams().isDistinct()) {\nfn = Expr.getBuiltinFunction(FunctionSet.COUNT, new Type[] {argumentTypes[0]},\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n} else if (fnName.equals(FunctionSet.EXCHANGE_BYTES) || fnName.equals(FunctionSet.EXCHANGE_SPEED)) {\nfn = Expr.getBuiltinFunction(fnName, argumentTypes,\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nfn.setArgsType(argumentTypes);\nfn.setIsNullable(false);\n} else if (fnName.equals(FunctionSet.TIME_SLICE) || fnName.equals(FunctionSet.DATE_SLICE)) {\nif (!(node.getChild(1) instanceof IntLiteral)) {\nthrow new SemanticException(\nfnName + \" requires second parameter must be a constant interval\");\n}\nif (((IntLiteral) node.getChild(1)).getValue() <= 0) {\nthrow new SemanticException(\nfnName + \" requires second parameter must be greater than 0\");\n}\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n} else if (FunctionSet.decimalRoundFunctions.contains(fnName) ||\nArrays.stream(argumentTypes).anyMatch(Type::isDecimalV3)) {\nif (FunctionSet.varianceFunctions.contains(fnName)) {\nType[] doubleArgTypes = Stream.of(argumentTypes).map(t -> Type.DOUBLE).toArray(Type[]::new);\nfn = Expr.getBuiltinFunction(fnName, doubleArgTypes,\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n} else {\nfn = getDecimalV3Function(node, argumentTypes);\n}\n} else if (Arrays.stream(argumentTypes).anyMatch(arg -> arg.matchesType(Type.TIME))) {\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nif (fn instanceof AggregateFunction) {\nthrow new SemanticException(\"Time Type can not used in %s function\",\nfnName);\n}\n} else if (FunctionSet.STR_TO_DATE.equals(fnName)) {\nfn = getStrToDateFunction(node, argumentTypes);\n} else if (fnName.equals(FunctionSet.ARRAY_FILTER)) {\nif (node.getChildren().size() != 2) {\nthrow new SemanticException(fnName + \" should have 2 array inputs or lambda functions.\");\n}\nif (!node.getChild(0).getType().isArrayType() && !node.getChild(0).getType().isNull()) {\nthrow new SemanticException(\"The first input of \" + fnName +\n\" should be an array or a lambda function.\");\n}\nif (!node.getChild(1).getType().isArrayType() && !node.getChild(1).getType().isNull()) {\nthrow new SemanticException(\"The second input of \" + fnName +\n\" should be an array or a lambda function.\");\n}\nif (!Type.canCastTo(node.getChild(1).getType(), Type.ARRAY_BOOLEAN)) {\nthrow new SemanticException(\"The second input of array_filter \" +\nnode.getChild(1).getType().toString() + \" can't cast to ARRAY\");\n}\nnode.setChild(1, new CastExpr(Type.ARRAY_BOOLEAN, node.getChild(1)));\nargumentTypes[1] = Type.ARRAY_BOOLEAN;\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n} else if (fnName.equals(FunctionSet.ARRAY_SORTBY)) {\nif (node.getChildren().size() != 2) {\nthrow new SemanticException(fnName + \" should have 2 array inputs or lambda functions.\");\n}\nif (!node.getChild(0).getType().isArrayType() && !node.getChild(0).getType().isNull()) {\nthrow new SemanticException(\"The first input of \" + fnName +\n\" should be an array or a lambda function.\");\n}\nif (!node.getChild(1).getType().isArrayType() && !node.getChild(1).getType().isNull()) {\nthrow new SemanticException(\"The second input of \" + fnName +\n\" should be an array or a lambda function.\");\n}\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n} else if (fnName.equals(FunctionSet.ARRAY_SLICE)) {\nfor (int i = 1; i < argumentTypes.length; i++) {\nargumentTypes[i] = Type.BIGINT;\n}\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_SUPERTYPE_OF);\n} else if (fnName.equals(FunctionSet.ARRAY_CONCAT)) {\nif (node.getChildren().size() < 2) {\nthrow new SemanticException(fnName + \" should have at least two inputs\");\n}\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n} else if (fnName.equals(\"array_generate\")) {\nif (node.getChildren().size() < 1 || node.getChildren().size() > 3) {\nthrow new SemanticException(fnName + \" has wrong input numbers\");\n}\nfor (Expr expr : node.getChildren()) {\nif ((expr instanceof SlotRef) && node.getChildren().size() != 3) {\nthrow new SemanticException(fnName + \" with IntColumn doesn't support default parameters\");\n}\nif (!(expr instanceof IntLiteral) && !(expr instanceof LargeIntLiteral) &&\n!(expr instanceof SlotRef) && !(expr instanceof NullLiteral)) {\nthrow new SemanticException(fnName + \"'s parameter only support Integer\");\n}\n}\nif (node.getChildren().size() == 1) {\nLiteralExpr secondParam = (LiteralExpr) node.getChild(0);\nnode.clearChildren();\ntry {\nnode.addChild(new IntLiteral(\"1\", Type.TINYINT));\nnode.addChild(secondParam);\n} catch (AnalysisException e) {\nthrow new SemanticException(e.getMessage());\n}\n}\nif (node.getChildren().size() == 2) {\nint idx = 0;\nBigInteger[] childValues = new BigInteger[2];\nfor (Expr expr : node.getChildren()) {\nif (expr instanceof NullLiteral) {\nthrow new SemanticException(fnName + \"'s parameter only support Integer\");\n} else if (expr instanceof IntLiteral) {\nchildValues[idx++] = BigInteger.valueOf(((IntLiteral) expr).getValue());\n} else {\nchildValues[idx++] = ((LargeIntLiteral) expr).getValue();\n}\n}\nif (childValues[0].compareTo(childValues[1]) < 0) {\nnode.addChild(new IntLiteral(1));\n} else {\nnode.addChild(new IntLiteral(-1));\n}\n}\nargumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new);\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_SUPERTYPE_OF);\n} else {\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n}\nif (fn == null) {\nfn = AnalyzerUtils.getUdfFunction(session, node.getFnName(), argumentTypes);\n}\nif (fn == null) {\nthrow new SemanticException(\"No matching function with signature: %s(%s).\",\nfnName,\nnode.getParams().isStar() ? \"*\" : Joiner.on(\", \")\n.join(Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.toList())));\n}\nif (fn instanceof TableFunction) {\nthrow unsupportedException(\"Table function cannot be used in expression\");\n}\nfor (int i = 0; i < fn.getNumArgs(); i++) {\nif (!argumentTypes[i].matchesType(fn.getArgs()[i]) &&\n!Type.canCastToAsFunctionParameter(argumentTypes[i], fn.getArgs()[i])) {\nthrow new SemanticException(\"No matching function with signature: %s(%s).\", fnName,\nnode.getParams().isStar() ? \"*\" : Joiner.on(\", \")\n.join(Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.toList())));\n}\n}\nnode.setFn(fn);\nnode.setType(fn.getReturnType());\nFunctionAnalyzer.analyze(node);\nreturn null;\n}", + "target_code": "if (node.getChildren().size() < 1 || node.getChildren().size() > 3) {", + "method_body_after": "public Void visitFunctionCall(FunctionCallExpr node, Scope scope) {\nType[] argumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new);\nif (node.isNondeterministicBuiltinFnName()) {\nExprId exprId = analyzeState.getNextNondeterministicId();\nnode.setNondeterministicId(exprId);\n}\nFunction fn;\nString fnName = node.getFnName().getFunction();\ncheckFunction(fnName, node);\nif (fnName.equals(FunctionSet.COUNT) && node.getParams().isDistinct()) {\nfn = Expr.getBuiltinFunction(FunctionSet.COUNT, new Type[] {argumentTypes[0]},\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n} else if (fnName.equals(FunctionSet.EXCHANGE_BYTES) || fnName.equals(FunctionSet.EXCHANGE_SPEED)) {\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nfn.setArgsType(argumentTypes);\nfn.setIsNullable(false);\n} else if (DecimalV3FunctionAnalyzer.argumentTypeContainDecimalV3(fnName, argumentTypes)) {\nfn = DecimalV3FunctionAnalyzer.getDecimalV3Function(session, node, argumentTypes);\n} else if (Arrays.stream(argumentTypes).anyMatch(arg -> arg.matchesType(Type.TIME))) {\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nif (fn instanceof AggregateFunction) {\nthrow new SemanticException(\"Time Type can not used in\" + fnName + \" function\", node.getPos());\n}\n} else if (FunctionSet.STR_TO_DATE.equals(fnName)) {\nfn = getStrToDateFunction(node, argumentTypes);\n} else if (FunctionSet.ARRAY_GENERATE.equals(fnName)) {\nfn = getArrayGenerateFunction(node);\nargumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new);\n} else {\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n}\nif (fn == null) {\nfn = AnalyzerUtils.getUdfFunction(session, node.getFnName(), argumentTypes);\n}\nif (fn == null) {\nString msg = String.format(\"No matching function with signature: %s(%s)\",\nfnName,\nnode.getParams().isStar() ? \"*\" : Joiner.on(\", \")\n.join(Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.toList())));\nthrow new SemanticException(msg, node.getPos());\n}\nif (fn instanceof TableFunction) {\nthrow new SemanticException(\"Table function cannot be used in expression\", node.getPos());\n}\nfor (int i = 0; i < fn.getNumArgs(); i++) {\nif (!argumentTypes[i].matchesType(fn.getArgs()[i]) &&\n!Type.canCastToAsFunctionParameter(argumentTypes[i], fn.getArgs()[i])) {\nString msg = String.format(\"No matching function with signature: %s(%s)\", fnName,\nnode.getParams().isStar() ? \"*\" :\nArrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.joining(\", \")));\nthrow new SemanticException(msg, node.getPos());\n}\n}\nif (fn.hasVarArgs()) {\nType varType = fn.getArgs()[fn.getNumArgs() - 1];\nfor (int i = fn.getNumArgs(); i < argumentTypes.length; i++) {\nif (!argumentTypes[i].matchesType(varType) &&\n!Type.canCastToAsFunctionParameter(argumentTypes[i], varType)) {\nString msg = String.format(\"Variadic function %s(%s) can't support type: %s\", fnName,\nArrays.stream(fn.getArgs()).map(Type::toSql).collect(Collectors.joining(\", \")),\nargumentTypes[i]);\nthrow new SemanticException(msg, node.getPos());\n}\n}\n}\nnode.setFn(fn);\nnode.setType(fn.getReturnType());\nFunctionAnalyzer.analyze(node);\nreturn null;\n}", + "context_before": "class Visitor extends AstVisitor {\nprivate static final List ADD_DATE_FUNCTIONS = Lists.newArrayList(FunctionSet.DATE_ADD,\nFunctionSet.ADDDATE, FunctionSet.DAYS_ADD, FunctionSet.TIMESTAMPADD);\nprivate static final List SUB_DATE_FUNCTIONS =\nLists.newArrayList(FunctionSet.DATE_SUB, FunctionSet.SUBDATE,\nFunctionSet.DAYS_SUB);\nprivate final AnalyzeState analyzeState;\nprivate final ConnectContext session;\npublic Visitor(AnalyzeState analyzeState, ConnectContext session) {\nthis.analyzeState = analyzeState;\nthis.session = session;\n}\n@Override\npublic Void visitExpression(Expr node, Scope scope) {\nthrow unsupportedException(\"not yet implemented: expression analyzer for \" + node.getClass().getName());\n}\nprivate void handleResolvedField(SlotRef slot, ResolvedField resolvedField) {\nanalyzeState.addColumnReference(slot, FieldId.from(resolvedField));\n}\n@Override\npublic Void visitSubfieldExpr(SubfieldExpr node, Scope scope) {\nExpr child = node.getChild(0);\nPreconditions.checkArgument(child.getType().isStructType(),\nString.format(\"%s must be a struct type, check if you are using `'`\", child.toSql()));\nList fieldNames = node.getFieldNames();\nType tmpType = child.getType();\nfor (String fieldName : fieldNames) {\nStructType structType = (StructType) tmpType;\nStructField structField = structType.getField(fieldName);\nif (structField == null) {\nthrow new SemanticException(\"Struct subfield '%s' cannot be resolved\", fieldName);\n}\ntmpType = structField.getType();\n}\nnode.setType(tmpType);\nreturn null;\n}\n@Override\npublic Void visitSlot(SlotRef node, Scope scope) {\nResolvedField resolvedField = scope.resolveField(node);\nnode.setType(resolvedField.getField().getType());\nnode.setTblName(resolvedField.getField().getRelationAlias());\nif (node.getType().isStructType()) {\nnode.setCol(resolvedField.getField().getName());\nnode.setLabel(resolvedField.getField().getName());\nif (resolvedField.getField().getTmpUsedStructFieldPos().size() > 0) {\nnode.setUsedStructFieldPos(resolvedField.getField().getTmpUsedStructFieldPos());\nnode.resetStructInfo();\n}\n}\nhandleResolvedField(node, resolvedField);\nreturn null;\n}\n@Override\npublic Void visitFieldReference(FieldReference node, Scope scope) {\nField field = scope.getRelationFields().getFieldByIndex(node.getFieldIndex());\nnode.setType(field.getType());\nreturn null;\n}\n@Override\npublic Void visitArrayExpr(ArrayExpr node, Scope scope) {\nif (!node.getChildren().isEmpty()) {\ntry {\nType targetItemType;\nif (node.getType() != null) {\ntargetItemType = ((ArrayType) node.getType()).getItemType();\n} else {\ntargetItemType = TypeManager.getCommonSuperType(\nnode.getChildren().stream().map(Expr::getType).collect(Collectors.toList()));\n}\nfor (int i = 0; i < node.getChildren().size(); i++) {\nif (!node.getChildren().get(i).getType().matchesType(targetItemType)) {\nnode.castChild(targetItemType, i);\n}\n}\nnode.setType(new ArrayType(targetItemType));\n} catch (AnalysisException e) {\nthrow new SemanticException(e.getMessage());\n}\n} else {\nnode.setType(Type.ARRAY_NULL);\n}\nreturn null;\n}\n@Override\npublic Void visitCollectionElementExpr(CollectionElementExpr node, Scope scope) {\nExpr expr = node.getChild(0);\nExpr subscript = node.getChild(1);\nif (!expr.getType().isArrayType() && !expr.getType().isMapType()) {\nthrow new SemanticException(\"cannot subscript type \" + expr.getType()\n+ \" because it is not an array or a map\");\n}\nif (expr.getType().isArrayType()) {\nif (!subscript.getType().isNumericType()) {\nthrow new SemanticException(\"array subscript must have type integer\");\n}\ntry {\nif (subscript.getType().getPrimitiveType() != PrimitiveType.INT) {\nnode.castChild(Type.INT, 1);\n}\nnode.setType(((ArrayType) expr.getType()).getItemType());\n} catch (AnalysisException e) {\nthrow new SemanticException(e.getMessage());\n}\n} else {\ntry {\nif (subscript.getType().getPrimitiveType() !=\n((MapType) expr.getType()).getKeyType().getPrimitiveType()) {\nnode.castChild(((MapType) expr.getType()).getKeyType(), 1);\n}\nnode.setType(((MapType) expr.getType()).getValueType());\n} catch (AnalysisException e) {\nthrow new SemanticException(e.getMessage());\n}\n}\nreturn null;\n}\n@Override\npublic Void visitArraySliceExpr(ArraySliceExpr node, Scope scope) {\nif (!node.getChild(0).getType().isArrayType()) {\nthrow new SemanticException(\"cannot subscript type\" +\nnode.getChild(0).getType() + \" because it is not an array\");\n}\nnode.setType(node.getChild(0).getType());\nreturn null;\n}\n@Override\npublic Void visitArrowExpr(ArrowExpr node, Scope scope) {\nExpr item = node.getChild(0);\nExpr key = node.getChild(1);\nif (!key.isLiteral() || !key.getType().isStringType()) {\nthrow new SemanticException(\"right operand of -> should be string literal, but got \" + key);\n}\nif (!item.getType().isJsonType()) {\nthrow new SemanticException(\n\"-> operator could only be used for json column, but got \" + item.getType());\n}\nnode.setType(Type.JSON);\nreturn null;\n}\n@Override\npublic Void visitLambdaFunctionExpr(LambdaFunctionExpr node, Scope scope) {\nif (scope.getLambdaInputs().size() == 0) {\nthrow new SemanticException(\"Lambda Functions can only be used in high-order functions with arrays.\");\n}\nif (scope.getLambdaInputs().size() != node.getChildren().size() - 1) {\nthrow new SemanticException(\"Lambda arguments should equal to lambda input arrays.\");\n}\nSet set = new HashSet<>();\nList args = Lists.newArrayList();\nfor (int i = 1; i < node.getChildren().size(); ++i) {\nargs.add((LambdaArgument) node.getChild(i));\nString name = ((LambdaArgument) node.getChild(i)).getName();\nif (set.contains(name)) {\nthrow new SemanticException(\"Lambda argument: \" + name + \" is duplicated.\");\n}\nset.add(name);\n((LambdaArgument) node.getChild(i)).setNullable(scope.getLambdaInputs().get(i - 1).isNullable());\nnode.getChild(i).setType(scope.getLambdaInputs().get(i - 1).getType());\n}\nScope lambdaScope = new Scope(args, scope);\nExpressionAnalyzer.analyzeExpression(node.getChild(0), this.analyzeState, lambdaScope, this.session);\nnode.setType(Type.FUNCTION);\nscope.clearLambdaInputs();\nreturn null;\n}\n@Override\npublic Void visitCompoundPredicate(CompoundPredicate node, Scope scope) {\nfor (int i = 0; i < node.getChildren().size(); i++) {\nType type = node.getChild(i).getType();\nif (!type.isBoolean() && !type.isNull()) {\nthrow new SemanticException(\"Operand '%s' part of predicate \" +\n\"'%s' should return type 'BOOLEAN' but returns type '%s'.\",\nAstToStringBuilder.toString(node), AstToStringBuilder.toString(node.getChild(i)),\ntype.toSql());\n}\n}\nnode.setType(Type.BOOLEAN);\nreturn null;\n}\n@Override\npublic Void visitBetweenPredicate(BetweenPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nList list = node.getChildren().stream().map(Expr::getType).collect(Collectors.toList());\nType compatibleType = TypeManager.getCompatibleTypeForBetweenAndIn(list);\nfor (Type type : list) {\nif (!Type.canCastTo(type, compatibleType)) {\nthrow new SemanticException(\n\"between predicate type \" + type.toSql() + \" with type \" + compatibleType.toSql()\n+ \" is invalid.\");\n}\n}\nreturn null;\n}\n@Override\npublic Void visitBinaryPredicate(BinaryPredicate node, Scope scope) {\nType type1 = node.getChild(0).getType();\nType type2 = node.getChild(1).getType();\nType compatibleType =\nTypeManager.getCompatibleTypeForBinary(node.getOp().isNotRangeComparison(), type1, type2);\nfinal String ERROR_MSG = \"Column type %s does not support binary predicate operation.\";\nif (!Type.canCastTo(type1, compatibleType)) {\nthrow new SemanticException(String.format(ERROR_MSG, type1.toSql()));\n}\nif (!Type.canCastTo(type2, compatibleType)) {\nthrow new SemanticException(String.format(ERROR_MSG, type1.toSql()));\n}\nnode.setType(Type.BOOLEAN);\nreturn null;\n}\n@Override\npublic Void visitArithmeticExpr(ArithmeticExpr node, Scope scope) {\nif (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.BINARY_INFIX) {\nArithmeticExpr.Operator op = node.getOp();\nType t1 = node.getChild(0).getType().getNumResultType();\nType t2 = node.getChild(1).getType().getNumResultType();\nif (t1.isDecimalV3() || t2.isDecimalV3()) {\ntry {\nnode.rewriteDecimalOperation();\n} catch (AnalysisException ex) {\nthrow new SemanticException(ex.getMessage());\n}\nType lhsType = node.getChild(0).getType();\nType rhsType = node.getChild(1).getType();\nType resultType = node.getType();\nType[] args = {lhsType, rhsType};\nFunction fn = Expr.getBuiltinFunction(op.getName(), args, Function.CompareMode.IS_IDENTICAL);\nFunction newFn = new ScalarFunction(fn.getFunctionName(), args, resultType, fn.hasVarArgs());\nnode.setType(resultType);\nnode.setFn(newFn);\nreturn null;\n}\nType lhsType;\nType rhsType;\nswitch (op) {\ncase MULTIPLY:\ncase ADD:\ncase SUBTRACT:\nlhsType = ArithmeticExpr.getBiggerType(ArithmeticExpr.getCommonType(t1, t2));\nrhsType = lhsType;\nbreak;\ncase MOD:\nlhsType = ArithmeticExpr.getCommonType(t1, t2);\nrhsType = lhsType;\nbreak;\ncase DIVIDE:\nlhsType = ArithmeticExpr.getCommonType(t1, t2);\nif (lhsType.isFixedPointType()) {\nlhsType = Type.DOUBLE;\n}\nrhsType = lhsType;\nbreak;\ncase INT_DIVIDE:\ncase BITAND:\ncase BITOR:\ncase BITXOR:\nlhsType = ArithmeticExpr.getCommonType(t1, t2);\nif (!lhsType.isFixedPointType()) {\nlhsType = Type.BIGINT;\n}\nrhsType = lhsType;\nbreak;\ncase BIT_SHIFT_LEFT:\ncase BIT_SHIFT_RIGHT:\ncase BIT_SHIFT_RIGHT_LOGICAL:\nlhsType = t1;\nrhsType = Type.BIGINT;\nbreak;\ndefault:\nthrow unsupportedException(\"Unknown arithmetic operation \" + op + \" in: \" + node);\n}\nif (node.getChild(0).getType().equals(Type.NULL) && node.getChild(1).getType().equals(Type.NULL)) {\nlhsType = Type.NULL;\nrhsType = Type.NULL;\n}\nif (!Type.NULL.equals(node.getChild(0).getType()) && !Type.canCastTo(t1, lhsType)) {\nthrow new SemanticException(\n\"cast type \" + node.getChild(0).getType().toSql() + \" with type \" + lhsType.toSql()\n+ \" is invalid.\");\n}\nif (!Type.NULL.equals(node.getChild(1).getType()) && !Type.canCastTo(t2, rhsType)) {\nthrow new SemanticException(\n\"cast type \" + node.getChild(1).getType().toSql() + \" with type \" + rhsType.toSql()\n+ \" is invalid.\");\n}\nFunction fn = Expr.getBuiltinFunction(op.getName(), new Type[] {lhsType, rhsType},\nFunction.CompareMode.IS_SUPERTYPE_OF);\n/*\n* commonType is the common type of the parameters of the function,\n* and fn.getReturnType() is the return type of the function after execution\n* So we use fn.getReturnType() as node type\n*/\nnode.setType(fn.getReturnType());\nnode.setFn(fn);\n} else if (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.UNARY_PREFIX) {\nFunction fn = Expr.getBuiltinFunction(\nnode.getOp().getName(), new Type[] {Type.BIGINT}, Function.CompareMode.IS_SUPERTYPE_OF);\nnode.setType(Type.BIGINT);\nnode.setFn(fn);\n} else if (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.UNARY_POSTFIX) {\nthrow unsupportedException(\"not yet implemented: expression analyzer for \" + node.getClass().getName());\n} else {\nthrow unsupportedException(\"not yet implemented: expression analyzer for \" + node.getClass().getName());\n}\nreturn null;\n}\nList addDateFunctions = Lists.newArrayList(FunctionSet.DATE_ADD,\nFunctionSet.ADDDATE, FunctionSet.DAYS_ADD, FunctionSet.TIMESTAMPADD);\nList subDateFunctions = Lists.newArrayList(FunctionSet.DATE_SUB, FunctionSet.SUBDATE,\nFunctionSet.DAYS_SUB);\n@Override\npublic Void visitTimestampArithmeticExpr(TimestampArithmeticExpr node, Scope scope) {\nnode.setChild(0, TypeManager.addCastExpr(node.getChild(0), Type.DATETIME));\nString funcOpName;\nif (node.getFuncName() != null) {\nif (ADD_DATE_FUNCTIONS.contains(node.getFuncName())) {\nfuncOpName = String.format(\"%sS_%s\", node.getTimeUnitIdent(), \"add\");\n} else if (SUB_DATE_FUNCTIONS.contains(node.getFuncName())) {\nfuncOpName = String.format(\"%sS_%s\", node.getTimeUnitIdent(), \"sub\");\n} else {\nnode.setChild(1, TypeManager.addCastExpr(node.getChild(1), Type.DATETIME));\nfuncOpName = String.format(\"%sS_%s\", node.getTimeUnitIdent(), \"diff\");\n}\n} else {\nfuncOpName = String.format(\"%sS_%s\", node.getTimeUnitIdent(),\n(node.getOp() == ArithmeticExpr.Operator.ADD) ? \"add\" : \"sub\");\n}\nType[] argumentTypes = node.getChildren().stream().map(Expr::getType)\n.toArray(Type[]::new);\nFunction fn = Expr.getBuiltinFunction(funcOpName.toLowerCase(), argumentTypes,\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nif (fn == null) {\nthrow new SemanticException(\"No matching function with signature: %s(%s).\", funcOpName, Joiner.on(\", \")\n.join(Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.toList())));\n}\nnode.setType(fn.getReturnType());\nnode.setFn(fn);\nreturn null;\n}\n@Override\npublic Void visitExistsPredicate(ExistsPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nreturn null;\n}\n@Override\npublic Void visitInPredicate(InPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nList queryExpressions = Lists.newArrayList();\nnode.collect(arg -> arg instanceof Subquery, queryExpressions);\nif (queryExpressions.size() > 0 && node.getChildren().size() > 2) {\nthrow new SemanticException(\"In Predicate only support literal expression list\");\n}\nList list = node.getChildren().stream().map(Expr::getType).collect(Collectors.toList());\nType compatibleType = TypeManager.getCompatibleTypeForBetweenAndIn(list);\nfor (Type type : list) {\nif (type.isJsonType()) {\nthrow new SemanticException(\"InPredicate of JSON is not supported\");\n}\nif (!Type.canCastTo(type, compatibleType)) {\nthrow new SemanticException(\n\"in predicate type \" + type.toSql() + \" with type \" + compatibleType.toSql()\n+ \" is invalid.\");\n}\n}\nreturn null;\n}\n@Override\npublic Void visitMultiInPredicate(MultiInPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nList leftTypes =\nnode.getChildren().stream().limit(node.getNumberOfColumns()).map(Expr::getType)\n.collect(Collectors.toList());\nSubquery inSubquery = (Subquery) node.getChild(node.getNumberOfColumns());\nList rightTypes =\ninSubquery.getQueryStatement().getQueryRelation().getOutputExpression().stream().map(Expr::getType).\ncollect(Collectors.toList());\nif (leftTypes.size() != rightTypes.size()) {\nthrow new SemanticException(\n\"subquery must return the same number of columns as provided by the IN predicate\");\n}\nfor (int i = 0; i < rightTypes.size(); ++i) {\nif (leftTypes.get(i).isJsonType() || rightTypes.get(i).isJsonType() || leftTypes.get(i).isMapType() ||\nrightTypes.get(i).isMapType() || leftTypes.get(i).isStructType() ||\nrightTypes.get(i).isStructType()) {\nthrow new SemanticException(\"InPredicate of JSON, Map, Struct types is not supported\");\n}\nif (!Type.canCastTo(leftTypes.get(i), rightTypes.get(i))) {\nthrow new SemanticException(\n\"in predicate type \" + leftTypes.get(i).toSql() + \" with type \" + rightTypes.get(i).toSql()\n+ \" is invalid.\");\n}\n}\nreturn null;\n}\n@Override\npublic Void visitLiteral(LiteralExpr node, Scope scope) {\nif (node instanceof LargeIntLiteral) {\nBigInteger value = ((LargeIntLiteral) node).getValue();\nif (value.compareTo(LargeIntLiteral.LARGE_INT_MIN) < 0 ||\nvalue.compareTo(LargeIntLiteral.LARGE_INT_MAX) > 0) {\nthrow new SemanticException(\"Number Overflow. literal: \" + value);\n}\n}\nreturn null;\n}\n@Override\npublic Void visitIsNullPredicate(IsNullPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nreturn null;\n}\n@Override\npublic Void visitLikePredicate(LikePredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nType type1 = node.getChild(0).getType();\nType type2 = node.getChild(1).getType();\nif (!type1.isStringType() && !type1.isNull()) {\nthrow new SemanticException(\n\"left operand of \" + node.getOp().toString() + \" must be of type STRING: \" +\nAstToStringBuilder.toString(node));\n}\nif (!type2.isStringType() && !type2.isNull()) {\nthrow new SemanticException(\n\"right operand of \" + node.getOp().toString() + \" must be of type STRING: \" +\nAstToStringBuilder.toString(node));\n}\nif (LikePredicate.Operator.REGEXP.equals(node.getOp()) && !type2.isNull() && node.getChild(1).isLiteral()) {\ntry {\nPattern.compile(((StringLiteral) node.getChild(1)).getValue());\n} catch (PatternSyntaxException e) {\nthrow new SemanticException(\n\"Invalid regular expression in '\" + AstToStringBuilder.toString(node) + \"'\");\n}\n}\nreturn null;\n}\nprivate void predicateBaseAndCheck(Predicate node) {\nnode.setType(Type.BOOLEAN);\nfor (Expr expr : node.getChildren()) {\nif (expr.getType().isOnlyMetricType() ||\n(expr.getType().isComplexType() && !(node instanceof IsNullPredicate))) {\nthrow new SemanticException(\n\"HLL, BITMAP, PERCENTILE and ARRAY, MAP, STRUCT type couldn't as Predicate\");\n}\n}\n}\n@Override\npublic Void visitCastExpr(CastExpr cast, Scope context) {\nType castType;\nif (cast.isImplicit()) {\ncastType = cast.getType();\n} else {\ncastType = cast.getTargetTypeDef().getType();\n}\nif (!Type.canCastTo(cast.getChild(0).getType(), castType)) {\nthrow new SemanticException(\"Invalid type cast from \" + cast.getChild(0).getType().toSql() + \" to \"\n+ castType.toSql() + \" in sql `\" +\nAstToStringBuilder.toString(cast.getChild(0)).replace(\"%\", \"%%\") + \"`\");\n}\ncast.setType(castType);\nreturn null;\n}\n@Override\nprivate Function getStrToDateFunction(FunctionCallExpr node, Type[] argumentTypes) {\n/*\n* @TODO: Determine the return type of this function\n* If is format is constant and don't contains time part, return date type, to compatible with mysql.\n* In fact we don't want to support str_to_date return date like mysql, reason:\n* 1. The return type of FE/BE str_to_date function signature is datetime, return date\n* let type different, it's will throw unpredictable error\n* 2. Support return date and datetime at same time in one function is complicated.\n* 3. The meaning of the function is confusing. In mysql, will return date if format is a constant\n* string and it's not contains \"%H/%M/%S\" pattern, but it's a trick logic, if format is a variable\n* expression, like: str_to_date(col1, col2), and the col2 is '%Y%m%d', the result always be\n* datetime.\n*/\nFunction fn = Expr.getBuiltinFunction(node.getFnName().getFunction(),\nargumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nif (fn == null) {\nreturn null;\n}\nif (!node.getChild(1).isConstant()) {\nreturn fn;\n}\nExpressionMapping expressionMapping =\nnew ExpressionMapping(new Scope(RelationId.anonymous(), new RelationFields()),\ncom.google.common.collect.Lists.newArrayList());\nScalarOperator format = SqlToScalarOperatorTranslator.translate(node.getChild(1), expressionMapping,\nnew ColumnRefFactory());\nif (format.isConstantRef() && !HAS_TIME_PART.matcher(format.toString()).matches()) {\nreturn Expr.getBuiltinFunction(\"str2date\", argumentTypes,\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n}\nreturn fn;\n}\nFunction getDecimalV3Function(FunctionCallExpr node, Type[] argumentTypes) {\nFunction fn;\nString fnName = node.getFnName().getFunction();\nType commonType = DecimalV3FunctionAnalyzer.normalizeDecimalArgTypes(argumentTypes, fnName);\nfn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nif (fn == null) {\nfn = AnalyzerUtils.getUdfFunction(session, node.getFnName(), argumentTypes);\n}\nif (fn == null) {\nthrow new SemanticException(\"No matching function with signature: %s(%s).\", fnName,\nnode.getParams().isStar() ? \"*\" : Joiner.on(\", \")\n.join(Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.toList())));\n}\nif (DecimalV3FunctionAnalyzer.DECIMAL_AGG_FUNCTION.contains(fnName)) {\nType argType = node.getChild(0).getType();\nif (DecimalV3FunctionAnalyzer.DECIMAL_AGG_VARIANCE_STDDEV_TYPE\n.contains(fnName) && argType.isDecimalV3()) {\nargType = ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, 38, 9);\nnode.setChild(0, TypeManager.addCastExpr(node.getChild(0), argType));\n}\nfn = DecimalV3FunctionAnalyzer\n.rectifyAggregationFunction((AggregateFunction) fn, argType, commonType);\n} else if (DecimalV3FunctionAnalyzer.DECIMAL_UNARY_FUNCTION_SET.contains(fnName) ||\nDecimalV3FunctionAnalyzer.DECIMAL_IDENTICAL_TYPE_FUNCTION_SET.contains(fnName) ||\nFunctionSet.IF.equals(fnName) || FunctionSet.MAX_BY.equals(fnName)) {\nList argTypes;\nif (FunctionSet.MONEY_FORMAT.equals(fnName)) {\nargTypes = Arrays.asList(argumentTypes);\n} else {\nargTypes = Arrays.stream(fn.getArgs()).map(t -> t.isDecimalV3() ? commonType : t)\n.collect(Collectors.toList());\n}\nType returnType = fn.getReturnType();\nif (returnType.isDecimalV3() && commonType.isValid()) {\nreturnType = commonType;\n}\nif (FunctionSet.MAX_BY.equals(fnName)) {\nAggregateFunction newFn = new AggregateFunction(fn.getFunctionName(),\nArrays.asList(argumentTypes), returnType,\nType.VARCHAR, fn.hasVarArgs());\nnewFn.setFunctionId(fn.getFunctionId());\nnewFn.setChecksum(fn.getChecksum());\nnewFn.setBinaryType(fn.getBinaryType());\nnewFn.setHasVarArgs(fn.hasVarArgs());\nnewFn.setId(fn.getId());\nnewFn.setUserVisible(fn.isUserVisible());\nnewFn.setisAnalyticFn(true);\nfn = newFn;\nreturn fn;\n}\nScalarFunction newFn = new ScalarFunction(fn.getFunctionName(), argTypes, returnType,\nfn.getLocation(), ((ScalarFunction) fn).getSymbolName(),\n((ScalarFunction) fn).getPrepareFnSymbol(),\n((ScalarFunction) fn).getCloseFnSymbol());\nnewFn.setFunctionId(fn.getFunctionId());\nnewFn.setChecksum(fn.getChecksum());\nnewFn.setBinaryType(fn.getBinaryType());\nnewFn.setHasVarArgs(fn.hasVarArgs());\nnewFn.setId(fn.getId());\nnewFn.setUserVisible(fn.isUserVisible());\nfn = newFn;\n} else if (FunctionSet.decimalRoundFunctions.contains(fnName)) {\nList argTypes = Arrays.stream(fn.getArgs()).map(t -> t.isDecimalV3() ? commonType : t)\n.collect(Collectors.toList());\nfn = DecimalV3FunctionAnalyzer.getFunctionOfRound(node, fn, argTypes);\n}\nreturn fn;\n}\n@Override\npublic Void visitGroupingFunctionCall(GroupingFunctionCallExpr node, Scope scope) {\nif (node.getChildren().size() < 1) {\nthrow new SemanticException(\"GROUPING functions required at least one parameters\");\n}\nif (node.getChildren().stream().anyMatch(e -> !(e instanceof SlotRef))) {\nthrow new SemanticException(\"grouping functions only support column.\");\n}\nType[] childTypes = new Type[1];\nchildTypes[0] = Type.BIGINT;\nFunction fn = Expr.getBuiltinFunction(node.getFnName().getFunction(),\nchildTypes, Function.CompareMode.IS_IDENTICAL);\nnode.setFn(fn);\nnode.setType(fn.getReturnType());\nreturn null;\n}\n@Override\npublic Void visitCaseWhenExpr(CaseExpr node, Scope context) {\nint start = 0;\nint end = node.getChildren().size();\nExpr caseExpr = null;\nExpr elseExpr = null;\nif (node.hasCaseExpr()) {\ncaseExpr = node.getChild(0);\nstart++;\n}\nif (node.hasElseExpr()) {\nelseExpr = node.getChild(end - 1);\nend--;\n}\nif (node.getChildren().stream().anyMatch(d -> !d.getType().isScalarType())) {\nthrow new SemanticException(\"case-when only support scalar type\");\n}\nList whenTypes = Lists.newArrayList();\nif (null != caseExpr) {\nwhenTypes.add(caseExpr.getType());\n}\nfor (int i = start; i < end; i = i + 2) {\nwhenTypes.add(node.getChild(i).getType());\n}\nType compatibleType = Type.NULL;\nif (null != caseExpr) {\ncompatibleType = TypeManager.getCompatibleTypeForCaseWhen(whenTypes);\n}\nfor (Type type : whenTypes) {\nif (!Type.canCastTo(type, compatibleType)) {\nthrow new SemanticException(\"Invalid when type cast \" + type.toSql()\n+ \" to \" + compatibleType.toSql());\n}\n}\nList thenTypes = Lists.newArrayList();\nfor (int i = start + 1; i < end; i = i + 2) {\nthenTypes.add(node.getChild(i).getType());\n}\nif (null != elseExpr) {\nthenTypes.add(elseExpr.getType());\n}\nType returnType = thenTypes.stream().allMatch(Type.NULL::equals) ? Type.BOOLEAN :\nTypeManager.getCompatibleTypeForCaseWhen(thenTypes);\nfor (Type type : thenTypes) {\nif (!Type.canCastTo(type, returnType)) {\nthrow new SemanticException(\"Invalid then type cast \" + type.toSql()\n+ \" to \" + returnType.toSql());\n}\n}\nnode.setType(returnType);\nreturn null;\n}\n@Override\npublic Void visitSubquery(Subquery node, Scope context) {\nQueryAnalyzer queryAnalyzer = new QueryAnalyzer(session);\nqueryAnalyzer.analyze(node.getQueryStatement(), context);\nnode.setType(node.getQueryStatement().getQueryRelation().getRelationFields().getFieldByIndex(0).getType());\nreturn null;\n}\n@Override\npublic Void visitAnalyticExpr(AnalyticExpr node, Scope context) {\nvisit(node.getFnCall(), context);\nnode.setType(node.getFnCall().getType());\nif (node.getWindow() != null) {\nif (node.getWindow().getLeftBoundary() != null &&\nnode.getWindow().getLeftBoundary().getExpr() != null) {\nvisit(node.getWindow().getLeftBoundary().getExpr(), context);\n}\nif (node.getWindow().getRightBoundary() != null &&\nnode.getWindow().getRightBoundary().getExpr() != null) {\nvisit(node.getWindow().getRightBoundary().getExpr(), context);\n}\n}\nnode.getPartitionExprs().forEach(e -> visit(e, context));\nnode.getOrderByElements().stream().map(OrderByElement::getExpr).forEach(e -> visit(e, context));\nverifyAnalyticExpression(node);\nreturn null;\n}\n@Override\npublic Void visitInformationFunction(InformationFunction node, Scope context) {\nString funcType = node.getFuncType();\nif (funcType.equalsIgnoreCase(\"DATABASE\") || funcType.equalsIgnoreCase(\"SCHEMA\")) {\nnode.setType(Type.VARCHAR);\nnode.setStrValue(ClusterNamespace.getNameFromFullName(session.getDatabase()));\n} else if (funcType.equalsIgnoreCase(\"USER\")) {\nnode.setType(Type.VARCHAR);\nnode.setStrValue(session.getUserIdentity().toString());\n} else if (funcType.equalsIgnoreCase(\"CURRENT_USER\")) {\nnode.setType(Type.VARCHAR);\nnode.setStrValue(session.getCurrentUserIdentity().toString());\n} else if (funcType.equalsIgnoreCase(\"CURRENT_ROLE\")) {\nnode.setType(Type.VARCHAR);\nPrivilegeManager manager = session.getGlobalStateMgr().getPrivilegeManager();\nList roleName = new ArrayList<>();\ntry {\nfor (Long roleId : session.getCurrentRoleIds()) {\nRolePrivilegeCollection rolePrivilegeCollection =\nmanager.getRolePrivilegeCollectionUnlocked(roleId, true);\nroleName.add(rolePrivilegeCollection.getName());\n}\n} catch (PrivilegeException e) {\nthrow new SemanticException(e.getMessage());\n}\nif (roleName.isEmpty()) {\nnode.setStrValue(\"NONE\");\n} else {\nnode.setStrValue(Joiner.on(\", \").join(roleName));\n}\n} else if (funcType.equalsIgnoreCase(\"CONNECTION_ID\")) {\nnode.setType(Type.BIGINT);\nnode.setIntValue(session.getConnectionId());\nnode.setStrValue(\"\");\n}\nreturn null;\n}\n@Override\npublic Void visitVariableExpr(VariableExpr node, Scope context) {\ntry {\nif (node.getSetType().equals(SetType.USER)) {\nUserVariable userVariable = session.getUserVariables(node.getName());\nif (userVariable == null) {\nnode.setType(Type.STRING);\nnode.setIsNull();\nreturn null;\n}\nType variableType = userVariable.getEvaluatedExpression().getType();\nnode.setType(variableType);\nif (userVariable.getEvaluatedExpression() instanceof NullLiteral) {\nnode.setIsNull();\n} else {\nnode.setValue(userVariable.getEvaluatedExpression().getRealObjectValue());\n}\n} else {\nVariableMgr.fillValue(session.getSessionVariable(), node);\nif (!Strings.isNullOrEmpty(node.getName()) &&\nnode.getName().equalsIgnoreCase(SessionVariable.SQL_MODE)) {\nnode.setType(Type.VARCHAR);\nnode.setValue(SqlModeHelper.decode((long) node.getValue()));\n}\n}\n} catch (AnalysisException | DdlException e) {\nthrow new SemanticException(e.getMessage());\n}\nreturn null;\n}\n@Override\npublic Void visitDefaultValueExpr(DefaultValueExpr node, Scope context) {\nnode.setType(Type.VARCHAR);\nreturn null;\n}\n@Override\npublic Void visitCloneExpr(CloneExpr node, Scope context) {\nreturn null;\n}\n}", + "context_after": "class Visitor extends AstVisitor {\nprivate static final List ADD_DATE_FUNCTIONS = Lists.newArrayList(FunctionSet.DATE_ADD,\nFunctionSet.ADDDATE, FunctionSet.DAYS_ADD, FunctionSet.TIMESTAMPADD);\nprivate static final List SUB_DATE_FUNCTIONS =\nLists.newArrayList(FunctionSet.DATE_SUB, FunctionSet.SUBDATE,\nFunctionSet.DAYS_SUB);\nprivate final AnalyzeState analyzeState;\nprivate final ConnectContext session;\npublic Visitor(AnalyzeState analyzeState, ConnectContext session) {\nthis.analyzeState = analyzeState;\nthis.session = session;\n}\n@Override\npublic Void visitExpression(Expr node, Scope scope) {\nthrow new SemanticException(\"not yet implemented: expression analyzer for \" + node.getClass().getName(),\nnode.getPos());\n}\nprivate void handleResolvedField(SlotRef slot, ResolvedField resolvedField) {\nanalyzeState.addColumnReference(slot, FieldId.from(resolvedField));\n}\n@Override\npublic Void visitSubfieldExpr(SubfieldExpr node, Scope scope) {\nExpr child = node.getChild(0);\nif (!child.getType().isStructType()) {\nthrow new SemanticException(child.toSql() + \" must be a struct type, check if you are using `'`\",\nchild.getPos());\n}\nList fieldNames = node.getFieldNames();\nType tmpType = child.getType();\nfor (String fieldName : fieldNames) {\nStructType structType = (StructType) tmpType;\nStructField structField = structType.getField(fieldName);\nif (structField == null) {\nthrow new SemanticException(String.format(\"Struct subfield '%s' cannot be resolved\", fieldName),\nnode.getPos());\n}\ntmpType = structField.getType();\n}\nnode.setType(tmpType);\nreturn null;\n}\n@Override\npublic Void visitSlot(SlotRef node, Scope scope) {\nResolvedField resolvedField = scope.resolveField(node);\nnode.setType(resolvedField.getField().getType());\nnode.setTblName(resolvedField.getField().getRelationAlias());\nif (node.getType().isStructType()) {\nnode.setCol(resolvedField.getField().getName());\nnode.setLabel(resolvedField.getField().getName());\nif (resolvedField.getField().getTmpUsedStructFieldPos().size() > 0) {\nnode.setUsedStructFieldPos(resolvedField.getField().getTmpUsedStructFieldPos());\nnode.resetStructInfo();\n}\n}\nhandleResolvedField(node, resolvedField);\nreturn null;\n}\n@Override\npublic Void visitFieldReference(FieldReference node, Scope scope) {\nField field = scope.getRelationFields().getFieldByIndex(node.getFieldIndex());\nnode.setType(field.getType());\nreturn null;\n}\n@Override\npublic Void visitArrayExpr(ArrayExpr node, Scope scope) {\nif (!node.getChildren().isEmpty()) {\ntry {\nType targetItemType;\nif (node.getType() != null) {\ntargetItemType = ((ArrayType) node.getType()).getItemType();\n} else {\ntargetItemType = TypeManager.getCommonSuperType(\nnode.getChildren().stream().map(Expr::getType).collect(Collectors.toList()));\n}\nfor (int i = 0; i < node.getChildren().size(); i++) {\nif (!node.getChildren().get(i).getType().matchesType(targetItemType)) {\nnode.castChild(targetItemType, i);\n}\n}\nnode.setType(new ArrayType(targetItemType));\n} catch (AnalysisException e) {\nthrow new SemanticException(e.getMessage());\n}\n} else {\nnode.setType(Type.ARRAY_NULL);\n}\nreturn null;\n}\n@Override\npublic Void visitMapExpr(MapExpr node, Scope scope) {\nif (!node.getChildren().isEmpty()) {\nType keyType = Type.NULL;\nType valueType = Type.NULL;\nif (node.getKeyExpr() != null) {\nkeyType = node.getKeyExpr().getType();\n}\nif (node.getValueExpr() != null) {\nvalueType = node.getValueExpr().getType();\n}\nnode.setType(new MapType(keyType, valueType));\n} else {\nnode.setType(new MapType(Type.NULL, Type.NULL));\n}\nreturn null;\n}\n@Override\npublic Void visitCollectionElementExpr(CollectionElementExpr node, Scope scope) {\nExpr expr = node.getChild(0);\nExpr subscript = node.getChild(1);\nif (!expr.getType().isArrayType() && !expr.getType().isMapType()) {\nthrow new SemanticException(\"cannot subscript type \" + expr.getType()\n+ \" because it is not an array or a map\", expr.getPos());\n}\nif (expr.getType().isArrayType()) {\nif (!subscript.getType().isNumericType()) {\nthrow new SemanticException(\"array subscript must have type integer\", subscript.getPos());\n}\ntry {\nif (subscript.getType().getPrimitiveType() != PrimitiveType.INT) {\nnode.castChild(Type.INT, 1);\n}\nnode.setType(((ArrayType) expr.getType()).getItemType());\n} catch (AnalysisException e) {\nthrow new SemanticException(e.getMessage());\n}\n} else {\ntry {\nif (subscript.getType().getPrimitiveType() !=\n((MapType) expr.getType()).getKeyType().getPrimitiveType()) {\nnode.castChild(((MapType) expr.getType()).getKeyType(), 1);\n}\nnode.setType(((MapType) expr.getType()).getValueType());\n} catch (AnalysisException e) {\nthrow new SemanticException(e.getMessage());\n}\n}\nreturn null;\n}\n@Override\npublic Void visitArraySliceExpr(ArraySliceExpr node, Scope scope) {\nif (!node.getChild(0).getType().isArrayType()) {\nthrow new SemanticException(\"cannot subscript type\" +\nnode.getChild(0).getType() + \" because it is not an array\", node.getChild(0).getPos());\n}\nnode.setType(node.getChild(0).getType());\nreturn null;\n}\n@Override\npublic Void visitArrowExpr(ArrowExpr node, Scope scope) {\nExpr item = node.getChild(0);\nExpr key = node.getChild(1);\nif (!key.isLiteral() || !key.getType().isStringType()) {\nthrow new SemanticException(\"right operand of -> should be string literal, but got \" + key,\nkey.getPos());\n}\nif (!item.getType().isJsonType()) {\nthrow new SemanticException(\n\"-> operator could only be used for json column, but got \" + item.getType(), item.getPos());\n}\nnode.setType(Type.JSON);\nreturn null;\n}\n@Override\npublic Void visitLambdaFunctionExpr(LambdaFunctionExpr node, Scope scope) {\nif (scope.getLambdaInputs().size() == 0) {\nthrow new SemanticException(\n\"Lambda Functions can only be used in high-order functions with arrays/maps\",\nnode.getPos());\n}\nif (scope.getLambdaInputs().size() != node.getChildren().size() - 1) {\nthrow new SemanticException(\"Lambda arguments should equal to lambda input arrays\", node.getPos());\n}\nSet set = new HashSet<>();\nList args = Lists.newArrayList();\nfor (int i = 1; i < node.getChildren().size(); ++i) {\nargs.add((LambdaArgument) node.getChild(i));\nString name = ((LambdaArgument) node.getChild(i)).getName();\nif (set.contains(name)) {\nthrow new SemanticException(\"Lambda argument: \" + name + \" is duplicated\",\nnode.getChild(i).getPos());\n}\nset.add(name);\n((LambdaArgument) node.getChild(i)).setNullable(scope.getLambdaInputs().get(i - 1).isNullable());\nnode.getChild(i).setType(scope.getLambdaInputs().get(i - 1).getType());\n}\nScope lambdaScope = new Scope(args, scope);\nExpressionAnalyzer.analyzeExpression(node.getChild(0), this.analyzeState, lambdaScope, this.session);\nnode.setType(Type.FUNCTION);\nscope.clearLambdaInputs();\nreturn null;\n}\n@Override\npublic Void visitCompoundPredicate(CompoundPredicate node, Scope scope) {\nfor (int i = 0; i < node.getChildren().size(); i++) {\nType type = node.getChild(i).getType();\nif (!type.isBoolean() && !type.isNull()) {\nString msg = String.format(\"Operand '%s' part of predicate \" +\n\"'%s' should return type 'BOOLEAN' but returns type '%s'\",\nAstToStringBuilder.toString(node), AstToStringBuilder.toString(node.getChild(i)),\ntype.toSql());\nthrow new SemanticException(msg, node.getChild(i).getPos());\n}\n}\nnode.setType(Type.BOOLEAN);\nreturn null;\n}\n@Override\npublic Void visitBetweenPredicate(BetweenPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nList list = node.getChildren().stream().map(Expr::getType).collect(Collectors.toList());\nType compatibleType = TypeManager.getCompatibleTypeForBetweenAndIn(list);\nfor (Type type : list) {\nif (!Type.canCastTo(type, compatibleType)) {\nthrow new SemanticException(\n\"between predicate type \" + type.toSql() + \" with type \" + compatibleType.toSql()\n+ \" is invalid\", node.getPos());\n}\n}\nreturn null;\n}\n@Override\npublic Void visitBinaryPredicate(BinaryPredicate node, Scope scope) {\nType type1 = node.getChild(0).getType();\nType type2 = node.getChild(1).getType();\nType compatibleType =\nTypeManager.getCompatibleTypeForBinary(node.getOp().isNotRangeComparison(), type1, type2);\nfinal String ERROR_MSG = \"Column type %s does not support binary predicate operation\";\nif (!Type.canCastTo(type1, compatibleType)) {\nthrow new SemanticException(String.format(ERROR_MSG, type1.toSql()), node.getPos());\n}\nif (!Type.canCastTo(type2, compatibleType)) {\nthrow new SemanticException(String.format(ERROR_MSG, type1.toSql()), node.getPos());\n}\nnode.setType(Type.BOOLEAN);\nreturn null;\n}\n@Override\npublic Void visitArithmeticExpr(ArithmeticExpr node, Scope scope) {\nif (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.BINARY_INFIX) {\nArithmeticExpr.Operator op = node.getOp();\nType t1 = node.getChild(0).getType().getNumResultType();\nType t2 = node.getChild(1).getType().getNumResultType();\nif (t1.isDecimalV3() || t2.isDecimalV3()) {\ntry {\nnode.rewriteDecimalOperation();\n} catch (AnalysisException ex) {\nthrow new SemanticException(ex.getMessage());\n}\nType lhsType = node.getChild(0).getType();\nType rhsType = node.getChild(1).getType();\nType resultType = node.getType();\nType[] args = {lhsType, rhsType};\nFunction fn = Expr.getBuiltinFunction(op.getName(), args, Function.CompareMode.IS_IDENTICAL);\nFunction newFn = new ScalarFunction(fn.getFunctionName(), args, resultType, fn.hasVarArgs());\nnode.setType(resultType);\nnode.setFn(newFn);\nreturn null;\n}\nType lhsType;\nType rhsType;\nswitch (op) {\ncase MULTIPLY:\ncase ADD:\ncase SUBTRACT:\nlhsType = ArithmeticExpr.getBiggerType(ArithmeticExpr.getCommonType(t1, t2));\nrhsType = lhsType;\nbreak;\ncase MOD:\nlhsType = ArithmeticExpr.getCommonType(t1, t2);\nrhsType = lhsType;\nbreak;\ncase DIVIDE:\nlhsType = ArithmeticExpr.getCommonType(t1, t2);\nif (lhsType.isFixedPointType()) {\nlhsType = Type.DOUBLE;\n}\nrhsType = lhsType;\nbreak;\ncase INT_DIVIDE:\ncase BITAND:\ncase BITOR:\ncase BITXOR:\nlhsType = ArithmeticExpr.getCommonType(t1, t2);\nif (!lhsType.isFixedPointType()) {\nlhsType = Type.BIGINT;\n}\nrhsType = lhsType;\nbreak;\ncase BIT_SHIFT_LEFT:\ncase BIT_SHIFT_RIGHT:\ncase BIT_SHIFT_RIGHT_LOGICAL:\nlhsType = t1;\nrhsType = Type.BIGINT;\nbreak;\ndefault:\nthrow new SemanticException(\"Unknown arithmetic operation \" + op + \" in: \" + node,\nnode.getPos());\n}\nif (node.getChild(0).getType().equals(Type.NULL) && node.getChild(1).getType().equals(Type.NULL)) {\nlhsType = Type.NULL;\nrhsType = Type.NULL;\n}\nif (lhsType.isInvalid() || rhsType.isInvalid()) {\nthrow new SemanticException(\"Any function type can not cast to \" + Type.INVALID.toSql());\n}\nif (!Type.NULL.equals(node.getChild(0).getType()) && !Type.canCastTo(t1, lhsType)) {\nthrow new SemanticException(\n\"cast type \" + node.getChild(0).getType().toSql() + \" with type \" + lhsType.toSql()\n+ \" is invalid\", node.getPos());\n}\nif (!Type.NULL.equals(node.getChild(1).getType()) && !Type.canCastTo(t2, rhsType)) {\nthrow new SemanticException(\n\"cast type \" + node.getChild(1).getType().toSql() + \" with type \" + rhsType.toSql()\n+ \" is invalid\", node.getPos());\n}\nFunction fn = Expr.getBuiltinFunction(op.getName(), new Type[] {lhsType, rhsType},\nFunction.CompareMode.IS_SUPERTYPE_OF);\nif (fn == null) {\nthrow new SemanticException(String.format(\n\"No matching function '%s' with operand types %s and %s\", node.getOp().getName(), t1, t2));\n}\n/*\n* commonType is the common type of the parameters of the function,\n* and fn.getReturnType() is the return type of the function after execution\n* So we use fn.getReturnType() as node type\n*/\nnode.setType(fn.getReturnType());\nnode.setFn(fn);\n} else if (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.UNARY_PREFIX) {\nFunction fn = Expr.getBuiltinFunction(\nnode.getOp().getName(), new Type[] {Type.BIGINT}, Function.CompareMode.IS_SUPERTYPE_OF);\nnode.setType(Type.BIGINT);\nnode.setFn(fn);\n} else if (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.UNARY_POSTFIX) {\nthrow new SemanticException(\"not yet implemented: expression analyzer for \" + node.getClass().getName(),\nnode.getPos());\n} else {\nthrow new SemanticException(\"not yet implemented: expression analyzer for \" + node.getClass().getName(),\nnode.getPos());\n}\nreturn null;\n}\n@Override\npublic Void visitTimestampArithmeticExpr(TimestampArithmeticExpr node, Scope scope) {\nnode.setChild(0, TypeManager.addCastExpr(node.getChild(0), Type.DATETIME));\nString funcOpName;\nif (node.getFuncName() != null) {\nif (ADD_DATE_FUNCTIONS.contains(node.getFuncName())) {\nfuncOpName = String.format(\"%sS_%s\", node.getTimeUnitIdent(), \"add\");\n} else if (SUB_DATE_FUNCTIONS.contains(node.getFuncName())) {\nfuncOpName = String.format(\"%sS_%s\", node.getTimeUnitIdent(), \"sub\");\n} else {\nnode.setChild(1, TypeManager.addCastExpr(node.getChild(1), Type.DATETIME));\nfuncOpName = String.format(\"%sS_%s\", node.getTimeUnitIdent(), \"diff\");\n}\n} else {\nfuncOpName = String.format(\"%sS_%s\", node.getTimeUnitIdent(),\n(node.getOp() == ArithmeticExpr.Operator.ADD) ? \"add\" : \"sub\");\n}\nType[] argumentTypes = node.getChildren().stream().map(Expr::getType)\n.toArray(Type[]::new);\nFunction fn = Expr.getBuiltinFunction(funcOpName.toLowerCase(), argumentTypes,\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nif (fn == null) {\nString msg = String.format(\"No matching function with signature: %s(%s)\", funcOpName, Joiner.on(\", \")\n.join(Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.toList())));\nthrow new SemanticException(msg, node.getPos());\n}\nnode.setType(fn.getReturnType());\nnode.setFn(fn);\nreturn null;\n}\n@Override\npublic Void visitExistsPredicate(ExistsPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nreturn null;\n}\n@Override\npublic Void visitInPredicate(InPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nList queryExpressions = Lists.newArrayList();\nnode.collect(arg -> arg instanceof Subquery, queryExpressions);\nif (queryExpressions.size() > 0 && node.getChildren().size() > 2) {\nthrow new SemanticException(\"In Predicate only support literal expression list\", node.getPos());\n}\nList list = node.getChildren().stream().map(Expr::getType).collect(Collectors.toList());\nType compatibleType = TypeManager.getCompatibleTypeForBetweenAndIn(list);\nfor (Expr child : node.getChildren()) {\nType type = child.getType();\nif (type.isJsonType()) {\nthrow new SemanticException(\"InPredicate of JSON is not supported\", child.getPos());\n}\nif (!Type.canCastTo(type, compatibleType)) {\nthrow new SemanticException(\n\"in predicate type \" + type.toSql() + \" with type \" + compatibleType.toSql()\n+ \" is invalid\", child.getPos());\n}\n}\nreturn null;\n}\n@Override\npublic Void visitMultiInPredicate(MultiInPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nList leftTypes =\nnode.getChildren().stream().limit(node.getNumberOfColumns()).map(Expr::getType)\n.collect(Collectors.toList());\nSubquery inSubquery = (Subquery) node.getChild(node.getNumberOfColumns());\nList rightTypes =\ninSubquery.getQueryStatement().getQueryRelation().getOutputExpression().stream().map(Expr::getType).\ncollect(Collectors.toList());\nif (leftTypes.size() != rightTypes.size()) {\nthrow new SemanticException(\n\"subquery must return the same number of columns as provided by the IN predicate\",\nnode.getPos());\n}\nfor (int i = 0; i < rightTypes.size(); ++i) {\nif (leftTypes.get(i).isJsonType() || rightTypes.get(i).isJsonType() || leftTypes.get(i).isMapType() ||\nrightTypes.get(i).isMapType() || leftTypes.get(i).isStructType() ||\nrightTypes.get(i).isStructType()) {\nthrow new SemanticException(\"InPredicate of JSON, Map, Struct types is not supported\");\n}\nif (!Type.canCastTo(leftTypes.get(i), rightTypes.get(i))) {\nthrow new SemanticException(\n\"in predicate type \" + leftTypes.get(i).toSql() + \" with type \" + rightTypes.get(i).toSql()\n+ \" is invalid\");\n}\n}\nreturn null;\n}\n@Override\npublic Void visitLiteral(LiteralExpr node, Scope scope) {\nif (node instanceof LargeIntLiteral) {\nBigInteger value = ((LargeIntLiteral) node).getValue();\nif (value.compareTo(LargeIntLiteral.LARGE_INT_MIN) < 0 ||\nvalue.compareTo(LargeIntLiteral.LARGE_INT_MAX) > 0) {\nthrow new SemanticException(PARSER_ERROR_MSG.numOverflow(value.toString()), node.getPos());\n}\n}\nreturn null;\n}\n@Override\npublic Void visitIsNullPredicate(IsNullPredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nreturn null;\n}\n@Override\npublic Void visitLikePredicate(LikePredicate node, Scope scope) {\npredicateBaseAndCheck(node);\nType type1 = node.getChild(0).getType();\nType type2 = node.getChild(1).getType();\nif (!type1.isStringType() && !type1.isNull()) {\nthrow new SemanticException(\n\"left operand of \" + node.getOp().toString() + \" must be of type STRING: \" +\nAstToStringBuilder.toString(node), node.getPos());\n}\nif (!type2.isStringType() && !type2.isNull()) {\nthrow new SemanticException(\n\"right operand of \" + node.getOp().toString() + \" must be of type STRING: \" +\nAstToStringBuilder.toString(node), node.getPos());\n}\nif (LikePredicate.Operator.REGEXP.equals(node.getOp()) && !type2.isNull() && node.getChild(1).isLiteral()) {\ntry {\nPattern.compile(((StringLiteral) node.getChild(1)).getValue());\n} catch (PatternSyntaxException e) {\nthrow new SemanticException(\n\"Invalid regular expression in '\" + AstToStringBuilder.toString(node) + \"'\", node.getPos());\n}\n}\nreturn null;\n}\nprivate void predicateBaseAndCheck(Predicate node) {\nnode.setType(Type.BOOLEAN);\nfor (Expr expr : node.getChildren()) {\nif (expr.getType().isOnlyMetricType() ||\n(expr.getType().isComplexType() && !(node instanceof IsNullPredicate))) {\nthrow new SemanticException(\n\"HLL, BITMAP, PERCENTILE and ARRAY, MAP, STRUCT type couldn't as Predicate\", node.getPos());\n}\n}\n}\n@Override\npublic Void visitCastExpr(CastExpr cast, Scope context) {\nType castType;\nif (cast.isImplicit()) {\ncastType = cast.getType();\n} else {\ncastType = cast.getTargetTypeDef().getType();\n}\nif (!Type.canCastTo(cast.getChild(0).getType(), castType)) {\nthrow new SemanticException(\"Invalid type cast from \" + cast.getChild(0).getType().toSql() + \" to \"\n+ castType.toSql() + \" in sql `\" +\nAstToStringBuilder.toString(cast.getChild(0)).replace(\"%\", \"%%\") + \"`\",\ncast.getPos());\n}\ncast.setType(castType);\nreturn null;\n}\n@Override\nprivate void checkFunction(String fnName, FunctionCallExpr node) {\nswitch (fnName) {\ncase FunctionSet.TIME_SLICE:\ncase FunctionSet.DATE_SLICE:\nif (!(node.getChild(1) instanceof IntLiteral)) {\nthrow new SemanticException(\nfnName + \" requires second parameter must be a constant interval\", node.getPos());\n}\nif (((IntLiteral) node.getChild(1)).getValue() <= 0) {\nthrow new SemanticException(\nfnName + \" requires second parameter must be greater than 0\", node.getPos());\n}\nbreak;\ncase FunctionSet.ARRAY_FILTER:\nif (node.getChildren().size() != 2) {\nthrow new SemanticException(fnName + \" should have 2 array inputs or lambda functions\",\nnode.getPos());\n}\nif (!node.getChild(0).getType().isArrayType() && !node.getChild(0).getType().isNull()) {\nthrow new SemanticException(\"The first input of \" + fnName +\n\" should be an array or a lambda function\", node.getPos());\n}\nif (!node.getChild(1).getType().isArrayType() && !node.getChild(1).getType().isNull()) {\nthrow new SemanticException(\"The second input of \" + fnName +\n\" should be an array or a lambda function\", node.getPos());\n}\nif (!Type.canCastTo(node.getChild(1).getType(), Type.ARRAY_BOOLEAN)) {\nthrow new SemanticException(\"The second input of array_filter \" +\nnode.getChild(1).getType().toString() + \" can't cast to ARRAY\", node.getPos());\n}\nbreak;\ncase FunctionSet.ARRAY_SORTBY:\nif (node.getChildren().size() != 2) {\nthrow new SemanticException(fnName + \" should have 2 array inputs or lambda functions\",\nnode.getPos());\n}\nif (!node.getChild(0).getType().isArrayType() && !node.getChild(0).getType().isNull()) {\nthrow new SemanticException(\"The first input of \" + fnName +\n\" should be an array or a lambda function\", node.getPos());\n}\nif (!node.getChild(1).getType().isArrayType() && !node.getChild(1).getType().isNull()) {\nthrow new SemanticException(\"The second input of \" + fnName +\n\" should be an array or a lambda function\", node.getPos());\n}\nbreak;\ncase FunctionSet.ARRAY_CONCAT:\nif (node.getChildren().size() < 2) {\nthrow new SemanticException(fnName + \" should have at least two inputs\", node.getPos());\n}\nbreak;\ncase FunctionSet.ARRAY_GENERATE:\nif (node.getChildren().size() < 1 || node.getChildren().size() > 3) {\nthrow new SemanticException(fnName + \" has wrong input numbers\");\n}\nfor (Expr expr : node.getChildren()) {\nif ((expr instanceof SlotRef) && node.getChildren().size() != 3) {\nthrow new SemanticException(fnName + \" with IntColumn doesn't support default parameters\");\n}\nif (!(expr instanceof IntLiteral) && !(expr instanceof LargeIntLiteral) &&\n!(expr instanceof SlotRef) && !(expr instanceof NullLiteral)) {\nthrow new SemanticException(fnName + \"'s parameter only support Integer\");\n}\n}\nbreak;\ncase FunctionSet.MAP_FILTER:\nif (node.getChildren().size() != 2) {\nthrow new SemanticException(fnName + \" should have 2 inputs, \" +\n\"but there are just \" + node.getChildren().size() + \" inputs.\");\n}\nif (!node.getChild(0).getType().isMapType() && !node.getChild(0).getType().isNull()) {\nthrow new SemanticException(\"The first input of \" + fnName +\n\" should be a map or a lambda function.\");\n}\nif (!node.getChild(1).getType().isArrayType() && !node.getChild(1).getType().isNull()) {\nthrow new SemanticException(\"The second input of \" + fnName +\n\" should be a array or a lambda function.\");\n}\nif (!Type.canCastTo(node.getChild(1).getType(), Type.ARRAY_BOOLEAN)) {\nthrow new SemanticException(\"The second input of map_filter \" +\nnode.getChild(1).getType().toString() + \" can't cast to ARRAY\");\n}\nbreak;\n}\n}\nprivate Function getStrToDateFunction(FunctionCallExpr node, Type[] argumentTypes) {\n/*\n* @TODO: Determine the return type of this function\n* If is format is constant and don't contains time part, return date type, to compatible with mysql.\n* In fact we don't want to support str_to_date return date like mysql, reason:\n* 1. The return type of FE/BE str_to_date function signature is datetime, return date\n* let type different, it's will throw unpredictable error\n* 2. Support return date and datetime at same time in one function is complicated.\n* 3. The meaning of the function is confusing. In mysql, will return date if format is a constant\n* string and it's not contains \"%H/%M/%S\" pattern, but it's a trick logic, if format is a variable\n* expression, like: str_to_date(col1, col2), and the col2 is '%Y%m%d', the result always be\n* datetime.\n*/\nFunction fn = Expr.getBuiltinFunction(node.getFnName().getFunction(),\nargumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\nif (fn == null) {\nreturn null;\n}\nif (!node.getChild(1).isConstant()) {\nreturn fn;\n}\nExpressionMapping expressionMapping =\nnew ExpressionMapping(new Scope(RelationId.anonymous(), new RelationFields()),\ncom.google.common.collect.Lists.newArrayList());\nScalarOperator format = SqlToScalarOperatorTranslator.translate(node.getChild(1), expressionMapping,\nnew ColumnRefFactory());\nif (format.isConstantRef() && !HAS_TIME_PART.matcher(format.toString()).matches()) {\nreturn Expr.getBuiltinFunction(\"str2date\", argumentTypes,\nFunction.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n}\nreturn fn;\n}\nprivate Function getArrayGenerateFunction(FunctionCallExpr node) {\nif (node.getChildren().size() == 1) {\nLiteralExpr secondParam = (LiteralExpr) node.getChild(0);\nnode.clearChildren();\nnode.addChild(new IntLiteral(1));\nnode.addChild(secondParam);\n}\nif (node.getChildren().size() == 2) {\nint idx = 0;\nBigInteger[] childValues = new BigInteger[2];\nBoolean hasNUll = false;\nfor (Expr expr : node.getChildren()) {\nif (expr instanceof NullLiteral) {\nhasNUll = true;\n} else if (expr instanceof IntLiteral) {\nchildValues[idx++] = BigInteger.valueOf(((IntLiteral) expr).getValue());\n} else {\nchildValues[idx++] = ((LargeIntLiteral) expr).getValue();\n}\n}\nif (hasNUll || childValues[0].compareTo(childValues[1]) < 0) {\nnode.addChild(new IntLiteral(1));\n} else {\nnode.addChild(new IntLiteral(-1));\n}\n}\nType[] argumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new);\nreturn Expr.getBuiltinFunction(FunctionSet.ARRAY_GENERATE, argumentTypes, Function.CompareMode.IS_SUPERTYPE_OF);\n}\n@Override\npublic Void visitGroupingFunctionCall(GroupingFunctionCallExpr node, Scope scope) {\nif (node.getChildren().size() < 1) {\nthrow new SemanticException(\"GROUPING functions required at least one parameters\", node.getPos());\n}\nif (node.getChildren().stream().anyMatch(e -> !(e instanceof SlotRef))) {\nthrow new SemanticException(\"grouping functions only support column\", node.getPos());\n}\nType[] childTypes = new Type[1];\nchildTypes[0] = Type.BIGINT;\nFunction fn = Expr.getBuiltinFunction(node.getFnName().getFunction(),\nchildTypes, Function.CompareMode.IS_IDENTICAL);\nnode.setFn(fn);\nnode.setType(fn.getReturnType());\nreturn null;\n}\n@Override\npublic Void visitCaseWhenExpr(CaseExpr node, Scope context) {\nint start = 0;\nint end = node.getChildren().size();\nExpr caseExpr = null;\nExpr elseExpr = null;\nif (node.hasCaseExpr()) {\ncaseExpr = node.getChild(0);\nstart++;\n}\nif (node.hasElseExpr()) {\nelseExpr = node.getChild(end - 1);\nend--;\n}\nif (node.getChildren().stream().anyMatch(d -> !d.getType().isScalarType())) {\nthrow new SemanticException(\"case-when only support scalar type\", node.getPos());\n}\nList whenTypes = Lists.newArrayList();\nif (null != caseExpr) {\nwhenTypes.add(caseExpr.getType());\n}\nfor (int i = start; i < end; i = i + 2) {\nwhenTypes.add(node.getChild(i).getType());\n}\nType compatibleType = Type.NULL;\nif (null != caseExpr) {\ncompatibleType = TypeManager.getCompatibleTypeForCaseWhen(whenTypes);\n}\nfor (Type type : whenTypes) {\nif (!Type.canCastTo(type, compatibleType)) {\nthrow new SemanticException(\"Invalid when type cast \" + type.toSql()\n+ \" to \" + compatibleType.toSql(), node.getPos());\n}\n}\nList thenTypes = Lists.newArrayList();\nfor (int i = start + 1; i < end; i = i + 2) {\nthenTypes.add(node.getChild(i).getType());\n}\nif (null != elseExpr) {\nthenTypes.add(elseExpr.getType());\n}\nType returnType = thenTypes.stream().allMatch(Type.NULL::equals) ? Type.BOOLEAN :\nTypeManager.getCompatibleTypeForCaseWhen(thenTypes);\nfor (Type type : thenTypes) {\nif (!Type.canCastTo(type, returnType)) {\nthrow new SemanticException(\"Invalid then type cast \" + type.toSql()\n+ \" to \" + returnType.toSql(), node.getPos());\n}\n}\nnode.setType(returnType);\nreturn null;\n}\n@Override\npublic Void visitSubquery(Subquery node, Scope context) {\nQueryAnalyzer queryAnalyzer = new QueryAnalyzer(session);\nqueryAnalyzer.analyze(node.getQueryStatement(), context);\nnode.setType(node.getQueryStatement().getQueryRelation().getRelationFields().getFieldByIndex(0).getType());\nreturn null;\n}\n@Override\npublic Void visitAnalyticExpr(AnalyticExpr node, Scope context) {\nvisit(node.getFnCall(), context);\nnode.setType(node.getFnCall().getType());\nif (node.getWindow() != null) {\nif (node.getWindow().getLeftBoundary() != null &&\nnode.getWindow().getLeftBoundary().getExpr() != null) {\nvisit(node.getWindow().getLeftBoundary().getExpr(), context);\n}\nif (node.getWindow().getRightBoundary() != null &&\nnode.getWindow().getRightBoundary().getExpr() != null) {\nvisit(node.getWindow().getRightBoundary().getExpr(), context);\n}\n}\nnode.getPartitionExprs().forEach(e -> visit(e, context));\nnode.getOrderByElements().stream().map(OrderByElement::getExpr).forEach(e -> visit(e, context));\nverifyAnalyticExpression(node);\nreturn null;\n}\n@Override\npublic Void visitInformationFunction(InformationFunction node, Scope context) {\nString funcType = node.getFuncType();\nif (funcType.equalsIgnoreCase(\"DATABASE\") || funcType.equalsIgnoreCase(\"SCHEMA\")) {\nnode.setType(Type.VARCHAR);\nnode.setStrValue(ClusterNamespace.getNameFromFullName(session.getDatabase()));\n} else if (funcType.equalsIgnoreCase(\"USER\")) {\nnode.setType(Type.VARCHAR);\nString user = session.getQualifiedUser();\nString remoteIP = session.getRemoteIP();\nnode.setStrValue(new UserIdentity(user, remoteIP).toString());\n} else if (funcType.equalsIgnoreCase(\"CURRENT_USER\")) {\nnode.setType(Type.VARCHAR);\nnode.setStrValue(session.getCurrentUserIdentity().toString());\n} else if (funcType.equalsIgnoreCase(\"CURRENT_ROLE\")) {\nnode.setType(Type.VARCHAR);\nAuthorizationManager manager = session.getGlobalStateMgr().getAuthorizationManager();\nList roleName = new ArrayList<>();\ntry {\nfor (Long roleId : session.getCurrentRoleIds()) {\nRolePrivilegeCollection rolePrivilegeCollection =\nmanager.getRolePrivilegeCollectionUnlocked(roleId, false);\nif (rolePrivilegeCollection != null) {\nroleName.add(rolePrivilegeCollection.getName());\n}\n}\n} catch (PrivilegeException e) {\nthrow new SemanticException(e.getMessage());\n}\nif (roleName.isEmpty()) {\nnode.setStrValue(\"NONE\");\n} else {\nnode.setStrValue(Joiner.on(\", \").join(roleName));\n}\n} else if (funcType.equalsIgnoreCase(\"CONNECTION_ID\")) {\nnode.setType(Type.BIGINT);\nnode.setIntValue(session.getConnectionId());\nnode.setStrValue(\"\");\n} else if (funcType.equalsIgnoreCase(\"CURRENT_CATALOG\")) {\nnode.setType(Type.VARCHAR);\nnode.setStrValue(session.getCurrentCatalog().toString());\n}\nreturn null;\n}\n@Override\npublic Void visitVariableExpr(VariableExpr node, Scope context) {\ntry {\nif (node.getSetType().equals(SetType.USER)) {\nUserVariable userVariable = session.getUserVariables(node.getName());\nif (userVariable == null) {\nnode.setType(Type.STRING);\nnode.setIsNull();\nreturn null;\n}\nType variableType = userVariable.getEvaluatedExpression().getType();\nnode.setType(variableType);\nif (userVariable.getEvaluatedExpression() instanceof NullLiteral) {\nnode.setIsNull();\n} else {\nnode.setValue(userVariable.getEvaluatedExpression().getRealObjectValue());\n}\n} else {\nVariableMgr.fillValue(session.getSessionVariable(), node);\nif (!Strings.isNullOrEmpty(node.getName()) &&\nnode.getName().equalsIgnoreCase(SessionVariable.SQL_MODE)) {\nnode.setType(Type.VARCHAR);\nnode.setValue(SqlModeHelper.decode((long) node.getValue()));\n}\n}\n} catch (AnalysisException | DdlException e) {\nthrow new SemanticException(e.getMessage());\n}\nreturn null;\n}\n@Override\npublic Void visitDefaultValueExpr(DefaultValueExpr node, Scope context) {\nnode.setType(Type.VARCHAR);\nreturn null;\n}\n@Override\npublic Void visitCloneExpr(CloneExpr node, Scope context) {\nreturn null;\n}\n}" + }, + { + "comment": "yes, if we wouldn't have the exists check, `deleteObject` would be called on the empty directory which would internally call `delete(path, false)` that calls the `directory` causing a `FileNotFoundException` in [PrestoS3FileSystem:483](https://github.com/prestodb/presto/blob/master/presto-hive%2Fsrc%2Fmain%2Fjava%2Fcom%2Ffacebook%2Fpresto%2Fhive%2Fs3%2FPrestoS3FileSystem.java#L483). The return value will be `false` which makes us do the `exists` check anyway at the end of `deleteObject`. The rational was to have a clearer code instead handling the empty directory explicitly.", + "method_body": "private void deleteObject(Path path) throws IOException {\nif (!exists(path)) {\nreturn;\n}\nboolean success = true;\nIOException actualException = null;\ntry {\nsuccess = super.delete(path, false);\n} catch (IOException e) {\nactualException = e;\n}\nif (!success || actualException != null) {\nif (exists(path)) {\nthrow Optional.ofNullable(actualException)\n.orElse(\nnew IOException(\npath.getPath()\n+ \" could not be deleted for unknown reasons.\"));\n}\n}\n}", + "target_code": "if (!exists(path)) {", + "method_body_after": "private void deleteObject(Path path) throws IOException {\nboolean success = true;\nIOException actualException = null;\ntry {\nsuccess = super.delete(path, false);\n} catch (IOException e) {\nactualException = e;\n}\nif (!success || actualException != null) {\nif (exists(path)) {\nthrow Optional.ofNullable(actualException)\n.orElse(\nnew IOException(\npath.getPath()\n+ \" could not be deleted for unknown reasons.\"));\n}\n}\n}", + "context_before": "class FlinkS3PrestoFileSystem extends FlinkS3FileSystem {\npublic FlinkS3PrestoFileSystem(\nFileSystem hadoopS3FileSystem,\nString localTmpDirectory,\n@Nullable String entropyInjectionKey,\nint entropyLength,\n@Nullable S3AccessHelper s3UploadHelper,\nlong s3uploadPartSize,\nint maxConcurrentUploadsPerStream) {\nsuper(\nhadoopS3FileSystem,\nlocalTmpDirectory,\nentropyInjectionKey,\nentropyLength,\ns3UploadHelper,\ns3uploadPartSize,\nmaxConcurrentUploadsPerStream);\n}\n@Override\npublic boolean delete(Path path, boolean recursive) throws IOException {\nif (recursive) {\ndeleteRecursively(path);\n} else {\ndeleteObject(path);\n}\nreturn true;\n}\nprivate void deleteRecursively(Path path) throws IOException {\nfinal FileStatus[] containingFiles =\nPreconditions.checkNotNull(\nlistStatus(path),\n\"Hadoop FileSystem.listStatus should never return null based on its contract.\");\nif (containingFiles.length == 0) {\ndeleteObject(path);\nreturn;\n}\nIOException exception = null;\nfor (FileStatus fileStatus : containingFiles) {\nfinal Path childPath = fileStatus.getPath();\ntry {\nif (fileStatus.isDir()) {\ndeleteRecursively(childPath);\n} else {\ndeleteObject(childPath);\n}\n} catch (IOException e) {\nexception = ExceptionUtils.firstOrSuppressed(e, exception);\n}\n}\nif (exception != null) {\nthrow exception;\n}\n}\n/**\n* Deletes the object referenced by the passed {@code path}.\n*\n* @param path The path referring to the object that shall be deleted.\n* @throws IOException if an error occurred while deleting the file other than the {@code path}\n* referring to a non-empty directory.\n*/\n}", + "context_after": "class FlinkS3PrestoFileSystem extends FlinkS3FileSystem {\npublic FlinkS3PrestoFileSystem(\nFileSystem hadoopS3FileSystem,\nString localTmpDirectory,\n@Nullable String entropyInjectionKey,\nint entropyLength,\n@Nullable S3AccessHelper s3UploadHelper,\nlong s3uploadPartSize,\nint maxConcurrentUploadsPerStream) {\nsuper(\nhadoopS3FileSystem,\nlocalTmpDirectory,\nentropyInjectionKey,\nentropyLength,\ns3UploadHelper,\ns3uploadPartSize,\nmaxConcurrentUploadsPerStream);\n}\n@Override\npublic boolean delete(Path path, boolean recursive) throws IOException {\nif (recursive) {\ndeleteRecursively(path);\n} else {\ndeleteObject(path);\n}\nreturn true;\n}\nprivate void deleteRecursively(Path path) throws IOException {\nfinal FileStatus[] containingFiles =\nPreconditions.checkNotNull(\nlistStatus(path),\n\"Hadoop FileSystem.listStatus should never return null based on its contract.\");\nif (containingFiles.length == 0) {\ndeleteObject(path);\nreturn;\n}\nIOException exception = null;\nfor (FileStatus fileStatus : containingFiles) {\nfinal Path childPath = fileStatus.getPath();\ntry {\nif (fileStatus.isDir()) {\ndeleteRecursively(childPath);\n} else {\ndeleteObject(childPath);\n}\n} catch (IOException e) {\nexception = ExceptionUtils.firstOrSuppressed(e, exception);\n}\n}\nif (exception != null) {\nthrow exception;\n}\n}\n/**\n* Deletes the object referenced by the passed {@code path}. This method is used to work around\n* the fact that Presto doesn't allow us to differentiate between deleting a non-existing object\n* and some other errors. Therefore, a final check for existence is necessary in case of an\n* error or false return value.\n*\n* @param path The path referring to the object that shall be deleted.\n* @throws IOException if an error occurred while deleting the file other than the {@code path}\n* referring to a non-empty directory.\n*/\n}" + }, + { + "comment": "Yes", + "method_body": "public Optional transform(AnnotationDeclarationNode annotationDeclarationNode) {\nStringBuilder name = new StringBuilder(SyntaxKind.ANNOTATION_KEYWORD.stringValue());\nOptional typeDesc = annotationDeclarationNode.typeDescriptor();\nif (typeDesc.isPresent()) {\nname.append(\" \").append(typeDesc.get().toSourceCode());\n}\nname.append(\" \").append(annotationDeclarationNode.annotationTag().text());\nSymbolKind symbolKind = SymbolKind.Property;\nRange range = DocumentSymbolUtil.generateNodeRange(annotationDeclarationNode);\nOptional metadata = annotationDeclarationNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.ofNullable(createDocumentSymbol(name.toString(), symbolKind, null,\nrange, range, isDeprecated, Collections.emptyList(), this.context));\n}", + "target_code": "name.append(\" \").append(annotationDeclarationNode.annotationTag().text());", + "method_body_after": "public Optional transform(AnnotationDeclarationNode annotationDeclarationNode) {\nString name = annotationDeclarationNode.annotationTag().text();\nSymbolKind symbolKind = SymbolKind.Property;\nRange range = DocumentSymbolUtil.generateNodeRange(annotationDeclarationNode);\nOptional metadata = annotationDeclarationNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated,\nCollections.emptyList()));\n}", + "context_before": "class DocumentSymbolResolver extends NodeTransformer> {\nprivate List documentSymbolStore;\nprivate DocumentSymbolContext context;\nDocumentSymbolResolver(DocumentSymbolContext context) {\nthis.context = context;\ndocumentSymbolStore = new ArrayList<>();\n}\npublic List getDocumentSymbolStore() {\nreturn this.documentSymbolStore;\n}\n@Override\npublic Optional transform(Token token) {\nreturn Optional.empty();\n}\n@Override\nprotected Optional transformSyntaxNode(Node node) {\nreturn Optional.empty();\n}\n@Override\npublic Optional transform(ModulePartNode modulePartNode) {\nList memberSymbols = new ArrayList<>();\nfor (ModuleMemberDeclarationNode member : modulePartNode.members()) {\nmember.apply(this).ifPresent(memberSymbols::add);\n}\nif (context.getHierarchicalDocumentSymbolSupport()) {\nthis.documentSymbolStore.addAll(memberSymbols);\n}\n/* since module node is a collection of multiple documents. We don't create the\ndocument symbol node corresponding to the module node here.\n*/\nreturn Optional.empty();\n}\n@Override\npublic Optional transform(FunctionDefinitionNode functionDefinitionNode) {\nString name = \"\";\nRange range = DocumentSymbolUtil.generateNodeRange(functionDefinitionNode);\nSymbolKind symbolKind;\nOptional metadata = functionDefinitionNode.metadata();\nboolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get());\nswitch (functionDefinitionNode.kind()) {\ncase FUNCTION_DEFINITION:\nname = functionDefinitionNode.functionName().text();\nsymbolKind = SymbolKind.Function;\nbreak;\ncase OBJECT_METHOD_DEFINITION:\nname = functionDefinitionNode.functionName().text();\nif (\"init\".equals(name)) {\nsymbolKind = SymbolKind.Constructor;\n} else {\nsymbolKind = SymbolKind.Method;\n}\nbreak;\ncase RESOURCE_ACCESSOR_DEFINITION:\nString accessor = functionDefinitionNode.functionName().text();\nList pathParams = new ArrayList<>();\nString resourcePath = \"\";\nfor (Node child : functionDefinitionNode.children()) {\nif (child.kind() == SyntaxKind.IDENTIFIER_TOKEN &&\n!((IdentifierToken) child).text().equals(accessor)) {\nresourcePath = ((IdentifierToken) child).text();\n} else if (child.kind() == SyntaxKind.RESOURCE_PATH_SEGMENT_PARAM) {\nString[] param = child.toSourceCode()\n.replaceAll(\"\\\\[|\\\\]\", \"\").split(\"\\\\s+\");\npathParams.add(param[param.length - 1]);\n} else if (child.kind() == SyntaxKind.RESOURCE_PATH_REST_PARAM) {\npathParams.add(\"*\");\n}\n}\nif (!accessor.isEmpty()) {\nname = accessor + \":\" + resourcePath;\nif (!pathParams.isEmpty()) {\nString params = pathParams.stream().map(param -> \"{\" + param + \"}\")\n.collect(Collectors.joining(\"/\"));\nname = name + (resourcePath.isEmpty() ? params : \"/\" + params);\n} else if (resourcePath.isEmpty()) {\nname = name + \"/\";\n}\n}\nsymbolKind = SymbolKind.Function;\nbreak;\ndefault:\nreturn Optional.empty();\n}\nreturn Optional.ofNullable(createDocumentSymbol(name, symbolKind,\nnull, range, range, isDeprecated, Collections.emptyList(), this.context));\n}\n@Override\npublic Optional transform(MethodDeclarationNode methodDeclarationNode) {\nString name = methodDeclarationNode.methodName().text();\nSymbolKind symbolKind = SymbolKind.Method;\nRange range = DocumentSymbolUtil.generateNodeRange(methodDeclarationNode);\nOptional metadata = methodDeclarationNode.metadata();\nboolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.ofNullable(createDocumentSymbol(name, symbolKind,\nnull, range, range, isDeprecated, Collections.emptyList(), this.context));\n}\n@Override\npublic Optional transform(ClassDefinitionNode classDefinitionNode) {\nString name = classDefinitionNode.className().text();\nSymbolKind symbolKind = SymbolKind.Class;\nRange range = DocumentSymbolUtil.generateNodeRange(classDefinitionNode);\nOptional metadata = classDefinitionNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nList children = transformMembers(classDefinitionNode.members());\nreturn Optional.ofNullable(createDocumentSymbol(name, symbolKind,\nnull, range, range, isDeprecated, children, this.context));\n}\n@Override\npublic Optional transform(ServiceDeclarationNode serviceDeclarationNode) {\nOptional typeDesc = serviceDeclarationNode.typeDescriptor();\nStringBuilder name = new StringBuilder(\"service\");\nif (typeDesc.isPresent()) {\nname.append(\" \").append(typeDesc.get().toSourceCode());\n}\nname.append(\" \").append(serviceDeclarationNode.absoluteResourcePath().stream()\n.map(Node::toSourceCode).collect(Collectors.joining(\"\")))\n.append(\" on \").append(serviceDeclarationNode.expressions().stream()\n.map(Node::toSourceCode).collect(Collectors.joining(\",\")));\nSymbolKind symbolKind = SymbolKind.Object;\nRange range = DocumentSymbolUtil.generateNodeRange(serviceDeclarationNode);\nOptional metadata = serviceDeclarationNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nList children = transformMembers(serviceDeclarationNode.members());\nreturn Optional.ofNullable(createDocumentSymbol(name.toString(), symbolKind, null,\nrange, range, isDeprecated, children, this.context));\n}\n@Override\npublic Optional transform(TypeDefinitionNode typeDefinitionNode) {\nString name = typeDefinitionNode.typeName().text();\nNode typeDescriptor = typeDefinitionNode.typeDescriptor();\nSymbolKind symbolKind;\nList children = new ArrayList<>();\nswitch (typeDescriptor.kind()) {\ncase RECORD_TYPE_DESC:\nsymbolKind = SymbolKind.Struct;\nbreak;\ncase OBJECT_TYPE_DESC:\nsymbolKind = SymbolKind.Interface;\nchildren.addAll(transformMembers(((ObjectTypeDescriptorNode) typeDescriptor).members()));\nbreak;\ndefault:\nsymbolKind = SymbolKind.TypeParameter;\n}\nRange range = DocumentSymbolUtil.generateNodeRange(typeDefinitionNode);\nOptional metadata = typeDefinitionNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.ofNullable(createDocumentSymbol(name, symbolKind, null,\nrange, range, isDeprecated, children, this.context));\n}\n@Override\npublic Optional transform(ModuleVariableDeclarationNode moduleVariableDeclarationNode) {\nString name = moduleVariableDeclarationNode.typedBindingPattern().bindingPattern().toSourceCode();\nSymbolKind symbolKind = SymbolKind.Variable;\nRange range = DocumentSymbolUtil.generateNodeRange(moduleVariableDeclarationNode);\nOptional metadata = moduleVariableDeclarationNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.ofNullable(createDocumentSymbol(name, symbolKind, null,\nrange, range, isDeprecated, Collections.emptyList(), this.context));\n}\n@Override\npublic Optional transform(ConstantDeclarationNode constantDeclarationNode) {\nString name = constantDeclarationNode.variableName().text();\nSymbolKind symbolKind = SymbolKind.Constant;\nRange range = DocumentSymbolUtil.generateNodeRange(constantDeclarationNode);\nOptional metadata = constantDeclarationNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.ofNullable(createDocumentSymbol(name, symbolKind, null,\nrange, range, isDeprecated, Collections.emptyList(), this.context));\n}\n@Override\npublic Optional transform(EnumDeclarationNode enumDeclarationNode) {\nString name = enumDeclarationNode.identifier().text();\nSymbolKind symbolKind = SymbolKind.Enum;\nRange range = DocumentSymbolUtil.generateNodeRange(enumDeclarationNode);\nOptional metadata = enumDeclarationNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.ofNullable(createDocumentSymbol(name, symbolKind, null,\nrange, range, isDeprecated, Collections.emptyList(), this.context));\n}\n@Override\npublic Optional transform(ModuleXMLNamespaceDeclarationNode moduleXMLNamespaceDeclarationNode) {\nString name = SyntaxKind.XMLNS_KEYWORD.stringValue() + \" \"\n+ moduleXMLNamespaceDeclarationNode.namespaceuri().toSourceCode();\nOptional prefix = moduleXMLNamespaceDeclarationNode.namespacePrefix();\nif (prefix.isPresent()) {\nname = name + \" as \" + prefix.get();\n}\nSymbolKind symbolKind = SymbolKind.Namespace;\nRange range = DocumentSymbolUtil.generateNodeRange(moduleXMLNamespaceDeclarationNode);\nreturn Optional.ofNullable(createDocumentSymbol(name, symbolKind, null,\nrange, range, false, Collections.emptyList(), this.context));\n}\n@Override\npublic Optional transform(ListenerDeclarationNode listenerDeclarationNode) {\nStringBuilder name = new StringBuilder(SyntaxKind.LISTENER_KEYWORD.stringValue());\nOptional typeDescriptorNode = listenerDeclarationNode.typeDescriptor();\nif (typeDescriptorNode.isPresent()) {\nname.append(\" \" + typeDescriptorNode.get().toSourceCode());\n}\nname.append(\" \" + listenerDeclarationNode.variableName().text());\nSymbolKind symbolKind = SymbolKind.Object;\nRange range = DocumentSymbolUtil.generateNodeRange(listenerDeclarationNode);\nOptional metadata = listenerDeclarationNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.ofNullable(createDocumentSymbol(name.toString(), symbolKind, null,\nrange, range, isDeprecated, Collections.emptyList(), this.context));\n}\n@Override\n@Override\npublic Optional transform(ObjectFieldNode objectFieldNode) {\nString name = objectFieldNode.fieldName().text();\nSymbolKind symbolKind = SymbolKind.Field;\nRange range = DocumentSymbolUtil.generateNodeRange(objectFieldNode);\nOptional metadata = objectFieldNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.ofNullable(createDocumentSymbol(name, symbolKind, null,\nrange, range, isDeprecated, Collections.emptyList(), this.context));\n}\n/**\n* Provided a ChildNodes list generate the corresponding document symbols.\n*\n* @param nodes {@link NodeList} Member nodes list.\n* @return {@link List} Generated list of document symbols.\n*/\nprivate List transformMembers(NodeList nodes) {\nList childSymbols = new ArrayList<>();\nnodes.forEach(node -> {\nnode.apply(this).ifPresent(childSymbols::add);\n});\nreturn childSymbols;\n}\n/**\n* Document symbol builder.\n*\n* @param name symbol name.\n* @param kind symbol kind.\n* @param detail symbol detail.\n* @param range Range of the symbol.\n* @param selectionRange selection range of the symbol.\n* @param isDeprecated Whether the symbol is deprecated.\n* @param children Child document symbols.\n* @param context Document symbol context.\n* @return\n*/\npublic DocumentSymbol createDocumentSymbol(String name, SymbolKind kind,\nString detail, Range range,\nRange selectionRange, boolean isDeprecated,\nList children, DocumentSymbolContext context) {\nif (name == null || name.isEmpty()) {\nreturn null;\n}\nDocumentSymbol documentSymbol = new DocumentSymbol();\ndocumentSymbol.setName(name);\ndocumentSymbol.setKind(kind);\ndocumentSymbol.setDetail(detail);\ndocumentSymbol.setRange(range);\ndocumentSymbol.setSelectionRange(selectionRange);\nif (isDeprecated && context.deprecatedSupport()) {\ndocumentSymbol.setTags(List.of(SymbolTag.Deprecated));\n}\nif (context.getHierarchicalDocumentSymbolSupport()) {\ndocumentSymbol.setChildren(children);\n} else {\nthis.documentSymbolStore.add(documentSymbol);\n}\nreturn documentSymbol;\n}\n}", + "context_after": "class DocumentSymbolResolver extends NodeTransformer> {\nprivate List documentSymbolStore;\nprivate DocumentSymbolContext context;\nDocumentSymbolResolver(DocumentSymbolContext context) {\nthis.context = context;\ndocumentSymbolStore = new ArrayList<>();\n}\npublic List getDocumentSymbolStore() {\nreturn this.documentSymbolStore;\n}\n@Override\npublic Optional transform(Token token) {\nreturn Optional.empty();\n}\n@Override\nprotected Optional transformSyntaxNode(Node node) {\nreturn Optional.empty();\n}\n@Override\npublic Optional transform(ModulePartNode modulePartNode) {\nList memberSymbols = new ArrayList<>();\nfor (ModuleMemberDeclarationNode member : modulePartNode.members()) {\nmember.apply(this).ifPresent(memberSymbols::add);\n}\nif (context.getHierarchicalDocumentSymbolSupport()) {\nthis.documentSymbolStore.addAll(memberSymbols);\n}\n/* since module node is a collection of multiple documents. We don't create the\ndocument symbol node corresponding to the module node here.\n*/\nreturn Optional.empty();\n}\n@Override\npublic Optional transform(FunctionDefinitionNode functionDefinitionNode) {\nString name = \"\";\nRange range = DocumentSymbolUtil.generateNodeRange(functionDefinitionNode);\nSymbolKind symbolKind;\nOptional metadata = functionDefinitionNode.metadata();\nboolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get());\nswitch (functionDefinitionNode.kind()) {\ncase FUNCTION_DEFINITION:\nname = functionDefinitionNode.functionName().text();\nsymbolKind = SymbolKind.Function;\nbreak;\ncase OBJECT_METHOD_DEFINITION:\nname = functionDefinitionNode.functionName().text();\nif (\"init\".equals(name)) {\nsymbolKind = SymbolKind.Constructor;\n} else {\nsymbolKind = SymbolKind.Method;\n}\nbreak;\ncase RESOURCE_ACCESSOR_DEFINITION:\nString accessor = functionDefinitionNode.functionName().text();\nList pathParams = new ArrayList<>();\nString resourcePath = \"\";\nfor (Node child : functionDefinitionNode.children()) {\nif (child.kind() == SyntaxKind.IDENTIFIER_TOKEN &&\n!((IdentifierToken) child).text().equals(accessor)) {\nresourcePath = ((IdentifierToken) child).text();\n} else if (child.kind() == SyntaxKind.RESOURCE_PATH_SEGMENT_PARAM) {\nString[] param = child.toSourceCode()\n.replaceAll(\"\\\\[|\\\\]\", \"\").split(\"\\\\s+\");\npathParams.add(param[param.length - 1]);\n} else if (child.kind() == SyntaxKind.RESOURCE_PATH_REST_PARAM) {\npathParams.add(\"*\");\n}\n}\nif (!accessor.isEmpty()) {\nname = accessor + \":\" + resourcePath;\nif (!pathParams.isEmpty()) {\nString params = pathParams.stream().map(param -> \"{\" + param + \"}\")\n.collect(Collectors.joining(\"/\"));\nname = name + (resourcePath.isEmpty() ? params : \"/\" + params);\n} else if (resourcePath.isEmpty()) {\nname = name + \"/\";\n}\n}\nsymbolKind = SymbolKind.Function;\nbreak;\ndefault:\nreturn Optional.empty();\n}\nif (name == null || name.isEmpty()) {\nreturn Optional.empty();\n}\nreturn Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated,\nCollections.emptyList()));\n}\n@Override\npublic Optional transform(MethodDeclarationNode methodDeclarationNode) {\nString name = methodDeclarationNode.methodName().text();\nSymbolKind symbolKind = SymbolKind.Method;\nRange range = DocumentSymbolUtil.generateNodeRange(methodDeclarationNode);\nOptional metadata = methodDeclarationNode.metadata();\nboolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated,\nCollections.emptyList()));\n}\n@Override\npublic Optional transform(ClassDefinitionNode classDefinitionNode) {\nString name = classDefinitionNode.className().text();\nSymbolKind symbolKind = SymbolKind.Class;\nRange range = DocumentSymbolUtil.generateNodeRange(classDefinitionNode);\nOptional metadata = classDefinitionNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nList children = transformMembers(classDefinitionNode.members());\nreturn Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated, children));\n}\n@Override\npublic Optional transform(ServiceDeclarationNode serviceDeclarationNode) {\nStringBuilder name = new StringBuilder(\"service\");\nname.append(\" \").append(serviceDeclarationNode.absoluteResourcePath().stream()\n.map(Node::toSourceCode).collect(Collectors.joining(\"\")));\nSymbolKind symbolKind = SymbolKind.Object;\nRange range = DocumentSymbolUtil.generateNodeRange(serviceDeclarationNode);\nOptional metadata = serviceDeclarationNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nList children = transformMembers(serviceDeclarationNode.members());\nreturn Optional.of(createDocumentSymbol(name.toString(), symbolKind, range, range, isDeprecated, children));\n}\n@Override\npublic Optional transform(TypeDefinitionNode typeDefinitionNode) {\nString name = typeDefinitionNode.typeName().text();\nNode typeDescriptor = typeDefinitionNode.typeDescriptor();\nSymbolKind symbolKind;\nList children = new ArrayList<>();\nswitch (typeDescriptor.kind()) {\ncase RECORD_TYPE_DESC:\nsymbolKind = SymbolKind.Struct;\nRecordTypeDescriptorNode recordTypeDescriptorNode = (RecordTypeDescriptorNode) typeDescriptor;\nchildren.addAll(transformMembers(recordTypeDescriptorNode.fields()));\nOptional restTypeDec = recordTypeDescriptorNode.recordRestDescriptor();\nif (restTypeDec.isPresent()) {\nOptional restDocSymbol = restTypeDec.get().apply(this);\nrestDocSymbol.ifPresent(children::add);\n}\nbreak;\ncase OBJECT_TYPE_DESC:\nsymbolKind = SymbolKind.Interface;\nchildren.addAll(transformMembers(((ObjectTypeDescriptorNode) typeDescriptor).members()));\nbreak;\ndefault:\nsymbolKind = SymbolKind.TypeParameter;\n}\nRange range = DocumentSymbolUtil.generateNodeRange(typeDefinitionNode);\nOptional metadata = typeDefinitionNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated, children));\n}\n@Override\npublic Optional transform(ModuleVariableDeclarationNode moduleVariableDeclarationNode) {\nBindingPatternNode bindingPatternNode = moduleVariableDeclarationNode.typedBindingPattern().bindingPattern();\nif (bindingPatternNode.kind() != SyntaxKind.CAPTURE_BINDING_PATTERN) {\nreturn Optional.empty();\n}\nString name = bindingPatternNode.toSourceCode();\nSymbolKind symbolKind = SymbolKind.Variable;\nRange range = DocumentSymbolUtil.generateNodeRange(moduleVariableDeclarationNode);\nOptional metadata = moduleVariableDeclarationNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated,\nCollections.emptyList()));\n}\n@Override\npublic Optional transform(ConstantDeclarationNode constantDeclarationNode) {\nString name = constantDeclarationNode.variableName().text();\nSymbolKind symbolKind = SymbolKind.Constant;\nRange range = DocumentSymbolUtil.generateNodeRange(constantDeclarationNode);\nOptional metadata = constantDeclarationNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated,\nCollections.emptyList()));\n}\n@Override\npublic Optional transform(EnumDeclarationNode enumDeclarationNode) {\nString name = enumDeclarationNode.identifier().text();\nSymbolKind symbolKind = SymbolKind.Enum;\nRange range = DocumentSymbolUtil.generateNodeRange(enumDeclarationNode);\nOptional metadata = enumDeclarationNode.metadata();\nList children = transformMembers(enumDeclarationNode.enumMemberList());\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated, children));\n}\n@Override\npublic Optional transform(ModuleXMLNamespaceDeclarationNode moduleXMLNamespaceDeclarationNode) {\nOptional prefix = moduleXMLNamespaceDeclarationNode.namespacePrefix();\nString name = prefix.isPresent() ? prefix.get().text() : SyntaxKind.XMLNS_KEYWORD.stringValue() + \" \"\n+ moduleXMLNamespaceDeclarationNode.namespaceuri().toSourceCode();\nSymbolKind symbolKind = SymbolKind.Namespace;\nRange range = DocumentSymbolUtil.generateNodeRange(moduleXMLNamespaceDeclarationNode);\nreturn Optional.of(createDocumentSymbol(name, symbolKind, range, range, Collections.emptyList()));\n}\n@Override\npublic Optional transform(ListenerDeclarationNode listenerDeclarationNode) {\nString name = listenerDeclarationNode.variableName().text();\nSymbolKind symbolKind = SymbolKind.Object;\nRange range = DocumentSymbolUtil.generateNodeRange(listenerDeclarationNode);\nOptional metadata = listenerDeclarationNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated,\nCollections.emptyList()));\n}\n@Override\n@Override\npublic Optional transform(ObjectFieldNode objectFieldNode) {\nString name = objectFieldNode.fieldName().text();\nSymbolKind symbolKind = SymbolKind.Field;\nRange range = DocumentSymbolUtil.generateNodeRange(objectFieldNode);\nOptional metadata = objectFieldNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated,\nCollections.emptyList()));\n}\n@Override\npublic Optional transform(RecordFieldNode recordFieldNode) {\nString name = recordFieldNode.fieldName().text();\nSymbolKind symbolKind = SymbolKind.Field;\nRange range = DocumentSymbolUtil.generateNodeRange(recordFieldNode);\nOptional metadata = recordFieldNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated,\nCollections.emptyList()));\n}\n@Override\npublic Optional transform(RecordFieldWithDefaultValueNode recordFieldWithDefaultValueNode) {\nString name = recordFieldWithDefaultValueNode.fieldName().text();\nSymbolKind symbolKind = SymbolKind.Field;\nRange range = DocumentSymbolUtil.generateNodeRange(recordFieldWithDefaultValueNode);\nOptional metadata = recordFieldWithDefaultValueNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated,\nCollections.emptyList()));\n}\n@Override\npublic Optional transform(RecordRestDescriptorNode recordRestDescriptorNode) {\nString name = recordRestDescriptorNode.ellipsisToken().text() +\nrecordRestDescriptorNode.typeName().toSourceCode().trim();\nSymbolKind symbolKind = SymbolKind.Field;\nRange range = DocumentSymbolUtil.generateNodeRange(recordRestDescriptorNode);\nreturn Optional.of(createDocumentSymbol(name, symbolKind, range, range, Collections.emptyList()));\n}\n@Override\npublic Optional transform(EnumMemberNode enumMemberNode) {\nString name = enumMemberNode.identifier().text();\nSymbolKind symbolKind = SymbolKind.EnumMember;\nRange range = DocumentSymbolUtil.generateNodeRange(enumMemberNode);\nOptional metadata = enumMemberNode.metadata();\nboolean isDeprecated = metadata.isPresent() &&\nDocumentSymbolUtil.isDeprecated(metadata.get());\nreturn Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated,\nCollections.emptyList()));\n}\n/**\n* Provided a ChildNodes list generate the corresponding document symbols.\n*\n* @param nodes {@link NodeList} Member nodes list.\n* @return {@link List} Generated list of document symbols.\n*/\nprivate List transformMembers(NodeList nodes) {\nList childSymbols = new ArrayList<>();\nnodes.forEach(node -> {\nnode.apply(this).ifPresent(childSymbols::add);\n});\nreturn childSymbols;\n}\nprivate DocumentSymbol createDocumentSymbol(String name, SymbolKind kind, Range range,\nRange selectionRange, List children) {\nreturn createDocumentSymbol(name, kind, null, range, selectionRange, false, children);\n}\nprivate DocumentSymbol createDocumentSymbol(String name, SymbolKind kind, Range range,\nRange selectionRange, boolean isDeprecated,\nList children) {\nreturn createDocumentSymbol(name, kind, null, range, selectionRange, isDeprecated, children);\n}\n/**\n* Document symbol builder.\n*\n* @param name symbol name.\n* @param kind symbol kind.\n* @param detail symbol detail.\n* @param range Range of the symbol.\n* @param selectionRange selection range of the symbol.\n* @param isDeprecated Whether the symbol is deprecated.\n* @param children Child document symbols.\n* @return\n*/\nprivate DocumentSymbol createDocumentSymbol(String name, SymbolKind kind,\nString detail, Range range,\nRange selectionRange, boolean isDeprecated,\nList children) {\nDocumentSymbol documentSymbol = new DocumentSymbol();\ndocumentSymbol.setName(name);\ndocumentSymbol.setKind(kind);\ndocumentSymbol.setDetail(detail);\ndocumentSymbol.setRange(range);\ndocumentSymbol.setSelectionRange(selectionRange);\nif (isDeprecated && this.context.deprecatedSupport()) {\ndocumentSymbol.setTags(List.of(SymbolTag.Deprecated));\n}\nif (this.context.getHierarchicalDocumentSymbolSupport()) {\ndocumentSymbol.setChildren(children);\n} else {\nthis.documentSymbolStore.add(documentSymbol);\n}\nreturn documentSymbol;\n}\n}" + }, + { + "comment": "```suggestion if (warehouse != null) { ```", + "method_body": "public void replayDropComputeNode(long computeNodeId) {\nLOG.debug(\"replayDropComputeNode: {}\", computeNodeId);\nMap copiedComputeNodes = Maps.newHashMap(idToComputeNodeRef);\nComputeNode cn = copiedComputeNodes.remove(computeNodeId);\nidToComputeNodeRef = ImmutableMap.copyOf(copiedComputeNodes);\nfinal Cluster cluster = GlobalStateMgr.getCurrentState().getCluster();\nif (null != cluster) {\ncluster.removeComputeNode(computeNodeId);\n} else {\nLOG.error(\"Cluster DEFAULT_CLUSTER \" + DEFAULT_CLUSTER + \" no exist.\");\n}\nif (Config.only_use_compute_node) {\nWarehouse warehouse = GlobalStateMgr.getCurrentWarehouseMgr().\ngetWarehouse(WarehouseManager.DEFAULT_WAREHOUSE_NAME);\nif (null != warehouse) {\nwarehouse.getAnyAvailableCluster().dropNode(cn.getId());\n}\n}\n}", + "target_code": "if (null != warehouse) {", + "method_body_after": "public void replayDropComputeNode(long computeNodeId) {\nLOG.debug(\"replayDropComputeNode: {}\", computeNodeId);\nMap copiedComputeNodes = Maps.newHashMap(idToComputeNodeRef);\nComputeNode cn = copiedComputeNodes.remove(computeNodeId);\nidToComputeNodeRef = ImmutableMap.copyOf(copiedComputeNodes);\nfinal Cluster cluster = GlobalStateMgr.getCurrentState().getCluster();\nif (null != cluster) {\ncluster.removeComputeNode(computeNodeId);\n} else {\nLOG.error(\"Cluster DEFAULT_CLUSTER \" + DEFAULT_CLUSTER + \" no exist.\");\n}\nif (Config.only_use_compute_node) {\nWarehouse warehouse = GlobalStateMgr.getCurrentWarehouseMgr().\ngetWarehouse(WarehouseManager.DEFAULT_WAREHOUSE_NAME);\nif (warehouse != null) {\nwarehouse.getAnyAvailableCluster().dropNode(cn.getId());\n}\n}\n}", + "context_before": "class SerializeData {\n@SerializedName(\"computeNodes\")\npublic List computeNodes;\n}", + "context_after": "class SerializeData {\n@SerializedName(\"computeNodes\")\npublic List computeNodes;\n}" + }, + { + "comment": "I updated the test code once more. Essentially, it will show the stacktrace of a generic error-case-specific `KeeperException`. For the `testShutdownWithFailureDueToExistingChildNodes` generates the following stacktrace: ``` java.util.concurrent.CompletionException: org.apache.flink.shaded.zookeeper3.org.apache.zookeeper.KeeperException$NotEmptyException: KeeperErrorCode = Directory not empty for /checkpoint_id_counter \tat java.util.concurrent.CompletableFuture.reportJoin(CompletableFuture.java:375) \tat java.util.concurrent.CompletableFuture.join(CompletableFuture.java:1947) \tat org.apache.flink.runtime.checkpoint.ZooKeeperCheckpointIDCounterITCase.testShutdownWithFailureDueToExistingChildNodes(ZooKeeperCheckpointIDCounterITCase.java:114) \tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) \tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) \tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) [...] \tat com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33) \tat com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:221) \tat com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:54) Caused by: org.apache.flink.shaded.zookeeper3.org.apache.zookeeper.KeeperException$NotEmptyException: KeeperErrorCode = Directory not empty for /checkpoint_id_counter \tat org.apache.flink.shaded.zookeeper3.org.apache.zookeeper.KeeperException.create(KeeperException.java:132) \tat org.apache.flink.shaded.zookeeper3.org.apache.zookeeper.KeeperException.create(KeeperException.java:54) \tat org.apache.flink.runtime.checkpoint.ZooKeeperCheckpointIDCounter.handleDeletionOfCounterPath(ZooKeeperCheckpointIDCounter.java:167) \tat org.apache.flink.runtime.checkpoint.ZooKeeperCheckpointIDCounter.lambda$shutdown$0(ZooKeeperCheckpointIDCounter.java:131) \tat org.apache.flink.shaded.curator5.org.apache.curator.framework.imps.CuratorFrameworkImpl.sendToBackgroundCallback(CuratorFrameworkImpl.java:926) \tat org.apache.flink.shaded.curator5.org.apache.curator.framework.imps.CuratorFrameworkImpl.processBackgroundOperation(CuratorFrameworkImpl.java:683) \tat org.apache.flink.shaded.curator5.org.apache.curator.framework.imps.DeleteBuilderImpl$2.processResult(DeleteBuilderImpl.java:207) \tat org.apache.flink.shaded.zookeeper3.org.apache.zookeeper.ClientCnxn$EventThread.processEvent(ClientCnxn.java:675) \tat org.apache.flink.shaded.zookeeper3.org.apache.zookeeper.ClientCnxn$EventThread.run(ClientCnxn.java:510) ``` There's a log message printed on INFO beforehand in [ZooKeeperCheckpointIDCounter:L113](https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/ZooKeeperCheckpointIDCounter.java#L113)", + "method_body": "public CompletableFuture shutdown(JobStatus jobStatus) {\nsynchronized (startStopLock) {\nif (isStarted) {\nLOG.info(\"Shutting down.\");\ntry {\nsharedCount.close();\n} catch (IOException e) {\nreturn FutureUtils.completedExceptionally(e);\n}\nclient.getConnectionStateListenable().removeListener(connectionStateListener);\nif (jobStatus.isGloballyTerminalState()) {\nLOG.info(\"Removing {} from ZooKeeper\", counterPath);\nfinal CompletableFuture deletionFuture = new CompletableFuture<>();\ntry {\nclient.delete()\n.inBackground(\n(curatorFramework, curatorEvent) -> {\nPreconditions.checkArgument(\ncuratorEvent.getType()\n== CuratorEventType.DELETE,\n\"An unexpected CuratorEvent was monitored: \"\n+ curatorEvent.getType());\nPreconditions.checkArgument(\ncounterPath.equals(curatorEvent.getPath()),\n\"An unexpected path was selected for deletion: \"\n+ curatorEvent.getPath());\nfinal KeeperException.Code eventCode =\nKeeperException.Code.get(\ncuratorEvent.getResultCode());\nif (Sets.immutableEnumSet(\nKeeperException.Code.OK,\nKeeperException.Code.NONODE)\n.contains(eventCode)) {\ndeletionFuture.complete(null);\n} else {\ndeletionFuture.completeExceptionally(\nKeeperException.create(\nKeeperException.Code.get(\ncuratorEvent\n.getResultCode())));\n}\n})\n.forPath(counterPath);\n} catch (Exception e) {\nPreconditions.checkState(\n!deletionFuture.isDone(),\n\"The background task was triggered even though the curator call failed.\");\ndeletionFuture.completeExceptionally(e);\n}\nreturn deletionFuture;\n}\nisStarted = false;\n}\n}\nreturn FutureUtils.completedVoidFuture();\n}", + "target_code": ".getResultCode())));", + "method_body_after": "public CompletableFuture shutdown(JobStatus jobStatus) {\nsynchronized (startStopLock) {\nif (isStarted) {\nLOG.info(\"Shutting down.\");\ntry {\nsharedCount.close();\n} catch (IOException e) {\nreturn FutureUtils.completedExceptionally(e);\n}\nclient.getConnectionStateListenable().removeListener(connectionStateListener);\nif (jobStatus.isGloballyTerminalState()) {\nLOG.info(\"Removing {} from ZooKeeper\", counterPath);\ntry {\nfinal CompletableFuture deletionFuture = new CompletableFuture<>();\nclient.delete()\n.inBackground(\n(curatorFramework, curatorEvent) ->\nhandleDeletionOfCounterPath(\ncuratorEvent, deletionFuture))\n.forPath(counterPath);\nreturn deletionFuture;\n} catch (Exception e) {\nreturn FutureUtils.completedExceptionally(e);\n}\n}\nisStarted = false;\n}\n}\nreturn FutureUtils.completedVoidFuture();\n}", + "context_before": "class ZooKeeperCheckpointIDCounter implements CheckpointIDCounter {\nprivate static final Logger LOG = LoggerFactory.getLogger(ZooKeeperCheckpointIDCounter.class);\n/** Curator ZooKeeper client. */\nprivate final CuratorFramework client;\n/** Path of the shared count. */\nprivate final String counterPath;\n/** Curator recipe for shared counts. */\nprivate final SharedCount sharedCount;\nprivate final LastStateConnectionStateListener connectionStateListener;\nprivate final Object startStopLock = new Object();\n@GuardedBy(\"startStopLock\")\nprivate boolean isStarted;\n/**\n* Creates a {@link ZooKeeperCheckpointIDCounter} instance.\n*\n* @param client Curator ZooKeeper client\n*/\npublic ZooKeeperCheckpointIDCounter(\nCuratorFramework client, LastStateConnectionStateListener connectionStateListener) {\nthis.client = checkNotNull(client, \"Curator client\");\nthis.counterPath = ZooKeeperUtils.getCheckpointIdCounterPath();\nthis.sharedCount = new SharedCount(client, counterPath, INITIAL_CHECKPOINT_ID);\nthis.connectionStateListener = connectionStateListener;\n}\n@Override\npublic void start() throws Exception {\nsynchronized (startStopLock) {\nif (!isStarted) {\nsharedCount.start();\nclient.getConnectionStateListenable().addListener(connectionStateListener);\nisStarted = true;\n}\n}\n}\n@Override\n@Override\npublic long getAndIncrement() throws Exception {\nwhile (true) {\ncheckConnectionState();\nVersionedValue current = sharedCount.getVersionedValue();\nint newCount = current.getValue() + 1;\nif (newCount < 0) {\nthrow new Exception(\n\"Checkpoint counter overflow. ZooKeeper checkpoint counter only supports \"\n+ \"checkpoints Ids up to \"\n+ Integer.MAX_VALUE);\n}\nif (sharedCount.trySetCount(current, newCount)) {\nreturn current.getValue();\n}\n}\n}\n@Override\npublic long get() {\ncheckConnectionState();\nreturn sharedCount.getVersionedValue().getValue();\n}\n@Override\npublic void setCount(long newId) throws Exception {\ncheckConnectionState();\nif (newId > Integer.MAX_VALUE) {\nthrow new IllegalArgumentException(\n\"ZooKeeper checkpoint counter only supports \"\n+ \"checkpoints Ids up to \"\n+ Integer.MAX_VALUE\n+ \", but given value is\"\n+ newId);\n}\nsharedCount.setCount((int) newId);\n}\nprivate void checkConnectionState() {\nfinal Optional optionalLastState = connectionStateListener.getLastState();\noptionalLastState.ifPresent(\nlastState -> {\nif (lastState != ConnectionState.CONNECTED\n&& lastState != ConnectionState.RECONNECTED) {\nthrow new IllegalStateException(\"Connection state: \" + lastState);\n}\n});\n}\n@VisibleForTesting\nString getPath() {\nreturn counterPath;\n}\n}", + "context_after": "class ZooKeeperCheckpointIDCounter implements CheckpointIDCounter {\nprivate static final Logger LOG = LoggerFactory.getLogger(ZooKeeperCheckpointIDCounter.class);\n/** Curator ZooKeeper client. */\nprivate final CuratorFramework client;\n/** Path of the shared count. */\nprivate final String counterPath;\n/** Curator recipe for shared counts. */\nprivate final SharedCount sharedCount;\nprivate final LastStateConnectionStateListener connectionStateListener;\nprivate final Object startStopLock = new Object();\n@GuardedBy(\"startStopLock\")\nprivate boolean isStarted;\n/**\n* Creates a {@link ZooKeeperCheckpointIDCounter} instance.\n*\n* @param client Curator ZooKeeper client\n*/\npublic ZooKeeperCheckpointIDCounter(\nCuratorFramework client, LastStateConnectionStateListener connectionStateListener) {\nthis.client = checkNotNull(client, \"Curator client\");\nthis.counterPath = ZooKeeperUtils.getCheckpointIdCounterPath();\nthis.sharedCount = new SharedCount(client, counterPath, INITIAL_CHECKPOINT_ID);\nthis.connectionStateListener = connectionStateListener;\n}\n@Override\npublic void start() throws Exception {\nsynchronized (startStopLock) {\nif (!isStarted) {\nsharedCount.start();\nclient.getConnectionStateListenable().addListener(connectionStateListener);\nisStarted = true;\n}\n}\n}\n@Override\nprivate void handleDeletionOfCounterPath(\nCuratorEvent curatorEvent, CompletableFuture deletionFuture) {\nPreconditions.checkArgument(\ncuratorEvent.getType() == CuratorEventType.DELETE,\n\"An unexpected CuratorEvent was monitored: \" + curatorEvent.getType());\nPreconditions.checkArgument(\ncounterPath.endsWith(curatorEvent.getPath()),\n\"An unexpected path was selected for deletion: \" + curatorEvent.getPath());\nfinal KeeperException.Code eventCode =\nKeeperException.Code.get(curatorEvent.getResultCode());\nif (Sets.immutableEnumSet(KeeperException.Code.OK, KeeperException.Code.NONODE)\n.contains(eventCode)) {\ndeletionFuture.complete(null);\n} else {\nfinal String namespacedCounterPath =\nZooKeeperUtils.generateZookeeperPath(client.getNamespace(), counterPath);\ndeletionFuture.completeExceptionally(\nnew FlinkException(\nString.format(\n\"An error occurred while shutting down the CheckpointIDCounter in path '%s'.\",\nnamespacedCounterPath),\nKeeperException.create(eventCode, namespacedCounterPath)));\n}\n}\n@Override\npublic long getAndIncrement() throws Exception {\nwhile (true) {\ncheckConnectionState();\nVersionedValue current = sharedCount.getVersionedValue();\nint newCount = current.getValue() + 1;\nif (newCount < 0) {\nthrow new Exception(\n\"Checkpoint counter overflow. ZooKeeper checkpoint counter only supports \"\n+ \"checkpoints Ids up to \"\n+ Integer.MAX_VALUE);\n}\nif (sharedCount.trySetCount(current, newCount)) {\nreturn current.getValue();\n}\n}\n}\n@Override\npublic long get() {\ncheckConnectionState();\nreturn sharedCount.getVersionedValue().getValue();\n}\n@Override\npublic void setCount(long newId) throws Exception {\ncheckConnectionState();\nif (newId > Integer.MAX_VALUE) {\nthrow new IllegalArgumentException(\n\"ZooKeeper checkpoint counter only supports \"\n+ \"checkpoints Ids up to \"\n+ Integer.MAX_VALUE\n+ \", but given value is\"\n+ newId);\n}\nsharedCount.setCount((int) newId);\n}\nprivate void checkConnectionState() {\nfinal Optional optionalLastState = connectionStateListener.getLastState();\noptionalLastState.ifPresent(\nlastState -> {\nif (lastState != ConnectionState.CONNECTED\n&& lastState != ConnectionState.RECONNECTED) {\nthrow new IllegalStateException(\"Connection state: \" + lastState);\n}\n});\n}\n@VisibleForTesting\nString getPath() {\nreturn counterPath;\n}\n}" + }, + { + "comment": "We should check StorageAccountManager bean here and for below test.", + "method_body": "public void testEventHubOperationProvidedNotStorageUnderMSI() {\nthis.contextRunner.withUserConfiguration(\nTestConfigWithAzureResourceManagerAndConnectionProvider.class,\nAzureEventHubAutoConfiguration.class)\n.withPropertyValues(\nAZURE_PROPERTY_PREFIX + \"resource-group=rg1\",\nAZURE_PROPERTY_PREFIX + \"msi-enabled=true\",\nEVENT_HUB_PROPERTY_PREFIX + \"namespace=ns1\",\nAZURE_PROPERTY_PREFIX + \"subscription-id=sub\"\n)\n.run(context -> {\nassertThat(context).hasSingleBean(EventHubNamespaceManager.class);\nassertThat(context).hasSingleBean(EventHubOperation.class);\n});\n}", + "target_code": "assertThat(context).hasSingleBean(EventHubNamespaceManager.class);", + "method_body_after": "public void testEventHubOperationProvidedNotStorageUnderMSI() {\nthis.contextRunner.withUserConfiguration(\nTestConfigWithAzureResourceManagerAndConnectionProvider.class,\nAzureEventHubAutoConfiguration.class)\n.withPropertyValues(\nAZURE_PROPERTY_PREFIX + \"resource-group=rg1\",\nAZURE_PROPERTY_PREFIX + \"msi-enabled=true\",\nEVENT_HUB_PROPERTY_PREFIX + \"namespace=ns1\",\nAZURE_PROPERTY_PREFIX + \"subscription-id=sub\"\n)\n.run(context -> {\nassertThat(context).hasSingleBean(EventHubNamespaceManager.class);\nassertThat(context).hasSingleBean(EventHubOperation.class);\nassertThat(context).doesNotHaveBean(StorageAccountManager.class);\n});\n}", + "context_before": "class AzureEventHubAutoConfigurationTest {\nprivate static final String EVENT_HUB_PROPERTY_PREFIX = \"spring.cloud.azure.eventhub.\";\nprivate static final String AZURE_PROPERTY_PREFIX = \"spring.cloud.azure.\";\nprivate ApplicationContextRunner contextRunner = new ApplicationContextRunner()\n.withConfiguration(AutoConfigurations.of(AzureEventHubAutoConfiguration.class));\n@Test\npublic void testAzureEventHubDisabled() {\nthis.contextRunner.withPropertyValues(EVENT_HUB_PROPERTY_PREFIX + \"enabled=false\")\n.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubProperties.class));\n}\n@Test\npublic void testWithoutEventHubClient() {\nthis.contextRunner.withClassLoader(new FilteredClassLoader(EventHubConsumerAsyncClient.class))\n.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubProperties.class));\n}\n@Test\npublic void testAzureEventHubPropertiesStorageAccountIllegal() {\nthis.contextRunner.withPropertyValues(EVENT_HUB_PROPERTY_PREFIX + \"checkpoint-storage-account=1\")\n.run(context -> assertThrows(IllegalStateException.class,\n() -> context.getBean(AzureEventHubProperties.class)));\n}\n@Test\npublic void testAzureEventHubPropertiesConfigured() {\nthis.contextRunner.withPropertyValues(\nEVENT_HUB_PROPERTY_PREFIX + \"namespace=ns1\",\nEVENT_HUB_PROPERTY_PREFIX + \"checkpoint-storage-account=sa1\",\nEVENT_HUB_PROPERTY_PREFIX + \"connection-string=str1\")\n.run(context -> {\nassertThat(context).hasSingleBean(AzureEventHubProperties.class);\nassertThat(context.getBean(AzureEventHubProperties.class).getNamespace()).isEqualTo(\n\"ns1\");\nassertThat(context.getBean(AzureEventHubProperties.class).getConnectionString()).isEqualTo(\"str1\");\nassertThat(context.getBean(AzureEventHubProperties.class).getCheckpointStorageAccount()).isEqualTo(\"sa1\");\n});\n}\n@Test\npublic void testConnectionStringProvided() {\nthis.contextRunner.withPropertyValues(EVENT_HUB_PROPERTY_PREFIX + \"connection-string=str1\")\n.run(context -> {\nassertThat(context.getBean(EventHubConnectionStringProvider.class).getConnectionString()).isEqualTo(\"str1\");\nassertThat(context).hasSingleBean(EventHubClientFactory.class);\nassertThat(context).hasSingleBean(EventHubOperation.class);\nassertThat(context).doesNotHaveBean(EventHubNamespaceManager.class);\nassertThat(context).doesNotHaveBean(StorageAccountManager.class);\n});\n}\n@Test\npublic void testResourceManagerProvided() {\nthis.contextRunner.withUserConfiguration(\nTestConfigWithAzureResourceManagerAndConnectionProvider.class,\nAzureEventHubAutoConfiguration.class)\n.withPropertyValues(\nAZURE_PROPERTY_PREFIX + \"resource-group=rg1\",\nEVENT_HUB_PROPERTY_PREFIX + \"namespace=ns1\",\nEVENT_HUB_PROPERTY_PREFIX + \"checkpoint-storage-account=sa1\"\n)\n.run(context -> {\nassertThat(context).hasSingleBean(EventHubClientFactory.class);\nassertThat(context).hasSingleBean(EventHubOperation.class);\nassertThat(context).hasSingleBean(EventHubNamespaceManager.class);\nassertThat(context).hasSingleBean(StorageAccountManager.class);\n});\n}\n@Test\npublic void testEventHubOperationProvidedNotStorageUnder () {\nthis.contextRunner.withUserConfiguration(\nTestConfigWithAzureResourceManagerAndConnectionProvider.class,\nAzureEventHubAutoConfiguration.class)\n.withPropertyValues(\nAZURE_PROPERTY_PREFIX + \"resource-group=rg1\",\nEVENT_HUB_PROPERTY_PREFIX + \"namespace=ns1\"\n)\n.run(context -> {\nassertThat(context).hasSingleBean(EventHubNamespaceManager.class);\nassertThat(context).hasSingleBean(EventHubOperation.class);\n});\n}\n@Test\n@Configuration\n@EnableConfigurationProperties(AzureProperties.class)\npublic static class TestConfigWithAzureResourceManagerAndConnectionProvider {\n@Bean\npublic AzureResourceManager azureResourceManager() {\nfinal AzureResourceManager mockResourceManager = mock(AzureResourceManager.class);\nfinal StorageManager mockStorageManager = mock(StorageManager.class);\nfinal StorageAccounts mockStorageAccounts = mock(StorageAccounts.class);\nfinal StorageAccount mockStorageAccount = mock(StorageAccount.class);\nfinal List mockStorageAccountKeys = singletonList(mock(StorageAccountKey.class));\nwhen(mockResourceManager.storageAccounts()).thenReturn(mockStorageAccounts);\nwhen(mockStorageAccounts.getByResourceGroup(anyString(), anyString())).thenReturn(mockStorageAccount);\nwhen(mockStorageAccount.getKeys()).thenReturn(mockStorageAccountKeys);\nwhen(mockStorageAccount.manager()).thenReturn(mockStorageManager);\nwhen(mockStorageManager.environment()).thenReturn(AzureEnvironment.AZURE);\nreturn mockResourceManager;\n}\n@Bean\npublic EventHubConnectionStringProvider eventHubConnectionStringProvider() {\nreturn new EventHubConnectionStringProvider(\"fake-string\");\n}\n}\n}", + "context_after": "class AzureEventHubAutoConfigurationTest {\nprivate static final String EVENT_HUB_PROPERTY_PREFIX = \"spring.cloud.azure.eventhub.\";\nprivate static final String AZURE_PROPERTY_PREFIX = \"spring.cloud.azure.\";\nprivate ApplicationContextRunner contextRunner = new ApplicationContextRunner()\n.withConfiguration(AutoConfigurations.of(AzureEventHubAutoConfiguration.class));\n@Test\npublic void testAzureEventHubDisabled() {\nthis.contextRunner.withPropertyValues(EVENT_HUB_PROPERTY_PREFIX + \"enabled=false\")\n.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubProperties.class));\n}\n@Test\npublic void testWithoutEventHubClient() {\nthis.contextRunner.withClassLoader(new FilteredClassLoader(EventHubConsumerAsyncClient.class))\n.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubProperties.class));\n}\n@Test\npublic void testAzureEventHubPropertiesStorageAccountIllegal() {\nthis.contextRunner.withPropertyValues(EVENT_HUB_PROPERTY_PREFIX + \"checkpoint-storage-account=1\")\n.run(context -> assertThrows(IllegalStateException.class,\n() -> context.getBean(AzureEventHubProperties.class)));\n}\n@Test\npublic void testAzureEventHubPropertiesConfigured() {\nthis.contextRunner.withPropertyValues(\nEVENT_HUB_PROPERTY_PREFIX + \"namespace=ns1\",\nEVENT_HUB_PROPERTY_PREFIX + \"checkpoint-storage-account=sa1\",\nEVENT_HUB_PROPERTY_PREFIX + \"connection-string=str1\")\n.run(context -> {\nassertThat(context).hasSingleBean(AzureEventHubProperties.class);\nassertThat(context.getBean(AzureEventHubProperties.class).getNamespace()).isEqualTo(\n\"ns1\");\nassertThat(context.getBean(AzureEventHubProperties.class).getConnectionString()).isEqualTo(\"str1\");\nassertThat(context.getBean(AzureEventHubProperties.class).getCheckpointStorageAccount()).isEqualTo(\"sa1\");\n});\n}\n@Test\npublic void testConnectionStringProvided() {\nthis.contextRunner.withPropertyValues(EVENT_HUB_PROPERTY_PREFIX + \"connection-string=str1\")\n.run(context -> {\nassertThat(context.getBean(EventHubConnectionStringProvider.class).getConnectionString()).isEqualTo(\"str1\");\nassertThat(context).hasSingleBean(EventHubClientFactory.class);\nassertThat(context).hasSingleBean(EventHubOperation.class);\nassertThat(context).doesNotHaveBean(EventHubNamespaceManager.class);\nassertThat(context).doesNotHaveBean(StorageAccountManager.class);\n});\n}\n@Test\npublic void testResourceManagerProvided() {\nthis.contextRunner.withUserConfiguration(\nTestConfigWithAzureResourceManagerAndConnectionProvider.class,\nAzureEventHubAutoConfiguration.class)\n.withPropertyValues(\nAZURE_PROPERTY_PREFIX + \"resource-group=rg1\",\nEVENT_HUB_PROPERTY_PREFIX + \"namespace=ns1\",\nEVENT_HUB_PROPERTY_PREFIX + \"checkpoint-storage-account=sa1\"\n)\n.run(context -> {\nassertThat(context).hasSingleBean(EventHubClientFactory.class);\nassertThat(context).hasSingleBean(EventHubOperation.class);\nassertThat(context).hasSingleBean(EventHubNamespaceManager.class);\nassertThat(context).hasSingleBean(StorageAccountManager.class);\n});\n}\n@Test\npublic void testEventHubOperationProvidedNotStorageUnderSP() {\nthis.contextRunner.withUserConfiguration(\nTestConfigWithAzureResourceManagerAndConnectionProvider.class,\nAzureEventHubAutoConfiguration.class)\n.withPropertyValues(\nAZURE_PROPERTY_PREFIX + \"resource-group=rg1\",\nEVENT_HUB_PROPERTY_PREFIX + \"namespace=ns1\"\n)\n.run(context -> {\nassertThat(context).hasSingleBean(EventHubNamespaceManager.class);\nassertThat(context).hasSingleBean(EventHubOperation.class);\nassertThat(context).doesNotHaveBean(StorageAccountManager.class);\n});\n}\n@Test\n@Configuration\n@EnableConfigurationProperties(AzureProperties.class)\npublic static class TestConfigWithAzureResourceManagerAndConnectionProvider {\n@Bean\npublic AzureResourceManager azureResourceManager() {\nfinal AzureResourceManager mockResourceManager = mock(AzureResourceManager.class);\nfinal StorageManager mockStorageManager = mock(StorageManager.class);\nfinal StorageAccounts mockStorageAccounts = mock(StorageAccounts.class);\nfinal StorageAccount mockStorageAccount = mock(StorageAccount.class);\nfinal List mockStorageAccountKeys = singletonList(mock(StorageAccountKey.class));\nwhen(mockResourceManager.storageAccounts()).thenReturn(mockStorageAccounts);\nwhen(mockStorageAccounts.getByResourceGroup(anyString(), anyString())).thenReturn(mockStorageAccount);\nwhen(mockStorageAccount.getKeys()).thenReturn(mockStorageAccountKeys);\nwhen(mockStorageAccount.manager()).thenReturn(mockStorageManager);\nwhen(mockStorageManager.environment()).thenReturn(AzureEnvironment.AZURE);\nreturn mockResourceManager;\n}\n@Bean\npublic EventHubConnectionStringProvider eventHubConnectionStringProvider() {\nreturn new EventHubConnectionStringProvider(\"fake-string\");\n}\n}\n}" + }, + { + "comment": "Shall we have an Option constructor taking the only type since we can set 0 as default?", + "method_body": "public Object[] getMainArgs() {\nmainArgs.add(null);\nif (option != null) {\nBMap recordVal = option.parseRecord(args);\nprocessOperands(option.getOperandArgs());\nint optionLocation = option.getLocation() * 2 + 1;\nmainArgs.add(optionLocation, recordVal);\nmainArgs.add(optionLocation + 1, true);\n} else {\nRecordType type = TypeCreator.createRecordType(\"dummy\", null, 1, new HashMap<>(), null, true, 6);\nOption dummyOption = new Option(type, ValueCreator.createMapValue(type), 0);\ndummyOption.parseRecord(args);\nprocessOperands(dummyOption.getOperandArgs());\n}\nreturn mainArgs.toArray();\n}", + "target_code": "Option dummyOption = new Option(type, ValueCreator.createMapValue(type), 0);", + "method_body_after": "public Object[] getMainArgs() {\nmainArgs.add(null);\nif (option != null) {\nBMap recordVal = option.parseRecord(args);\nprocessOperands(option.getOperandArgs());\nint optionLocation = option.getLocation() * 2 + 1;\nmainArgs.add(optionLocation, recordVal);\nmainArgs.add(optionLocation + 1, true);\n} else {\nRecordType type = TypeCreator.createRecordType(\"dummy\", null, 1, new HashMap<>(), null, true, 6);\nOption dummyOption = new Option(type, ValueCreator.createMapValue(type));\ndummyOption.parseRecord(args);\nprocessOperands(dummyOption.getOperandArgs());\n}\nreturn mainArgs.toArray();\n}", + "context_before": "class CliSpec {\nprivate final Option option;\nprivate final Operand[] operands;\nprivate final List mainArgs;\nprivate final String[] args;\npublic CliSpec(Option option, Operand[] operands, String... args) {\nthis.option = option;\nthis.operands = operands;\nthis.args = args;\nmainArgs = new ArrayList<>();\n}\nprivate void processOperands(List operandArgs) {\nint opIndex = 0;\nObject bValue;\nint argIndex = 0;\nwhile (argIndex < operandArgs.size() && opIndex < operands.length) {\nOperand curOperand = operands[opIndex++];\nType typeOp = curOperand.type;\nif (typeOp.getTag() == TypeTags.ARRAY_TAG) {\nArrayType arrayType = (ArrayType) typeOp;\nBArray bArray = ValueCreator.createArrayValue(arrayType, -1);\nType elementType = arrayType.getElementType();\nint elementCount = getElementCount(operands, opIndex);\nwhile (argIndex < operandArgs.size() - elementCount) {\ntry {\nbArray.append(CliUtil.getBValue(elementType, operandArgs.get(argIndex++), curOperand.name));\n} catch (BError error) {\nargIndex--;\nbreak;\n}\n}\nbValue = bArray;\n} else {\nbValue = CliUtil.getBValueWithUnionValue(curOperand.type, operandArgs.get(argIndex++), curOperand.name);\n}\nmainArgs.add(bValue);\nmainArgs.add(true);\n}\nif (argIndex < operandArgs.size()) {\nthrow ErrorCreator.createError(StringUtils.fromString(\"all operand arguments are not matched\"));\n}\nhandleMainParametersAtTheEnd(opIndex);\n}\nprivate void handleMainParametersAtTheEnd(int opIndex) {\nwhile (opIndex < operands.length) {\nOperand operand = operands[opIndex++];\nType opType = operand.type;\nif (operand.hasDefaultable) {\nmainArgs.add(getDefaultBValue(opType));\nmainArgs.add(false);\n} else if (isSupportedArrayType(opType)) {\nmainArgs.add(ValueCreator.createArrayValue((ArrayType) opType, -1));\nmainArgs.add(true);\n} else if ((CliUtil.isUnionWithNil(opType))) {\nmainArgs.add(null);\nmainArgs.add(true);\n} else {\nthrow ErrorCreator.createError(StringUtils.fromString(\n\"missing operand arguments for parameter '\" + operand.name + \"' of type '\" + opType + \"'\"));\n}\n}\n}\nprivate boolean isSupportedArrayType(Type opType) {\nif (opType.getTag() == TypeTags.ARRAY_TAG) {\nType elementType = ((ArrayType) opType).getElementType();\nreturn CliUtil.isSupportedType(elementType.getTag());\n}\nreturn false;\n}\nprivate static Object getDefaultBValue(Type type) {\nswitch (type.getTag()) {\ncase TypeTags.INT_TAG:\ncase TypeTags.FLOAT_TAG:\ncase TypeTags.DECIMAL_TAG:\ncase TypeTags.BYTE_TAG:\nreturn 0;\ncase TypeTags.BOOLEAN_TAG:\nreturn false;\ndefault:\nreturn null;\n}\n}\nprivate int getElementCount(Operand[] operands, int opIndex) {\nint count = 0;\nwhile (opIndex < operands.length && operands[opIndex++].type.getTag() != TypeTags.RECORD_TYPE_TAG) {\ncount++;\n}\nreturn count;\n}\n}", + "context_after": "class CliSpec {\nprivate final Option option;\nprivate final Operand[] operands;\nprivate final List mainArgs;\nprivate final String[] args;\npublic CliSpec(Option option, Operand[] operands, String... args) {\nthis.option = option;\nthis.operands = operands;\nthis.args = args;\nmainArgs = new ArrayList<>();\n}\nprivate void processOperands(List operandArgs) {\nint opIndex = 0;\nObject bValue;\nint argIndex = 0;\nwhile (argIndex < operandArgs.size() && opIndex < operands.length) {\nOperand curOperand = operands[opIndex++];\nType typeOp = curOperand.type;\nif (typeOp.getTag() == TypeTags.ARRAY_TAG) {\nArrayType arrayType = (ArrayType) typeOp;\nBArray bArray = ValueCreator.createArrayValue(arrayType, -1);\nType elementType = arrayType.getElementType();\nint elementCount = getElementCount(operands, opIndex);\nwhile (argIndex < operandArgs.size() - elementCount) {\ntry {\nbArray.append(CliUtil.getBValue(elementType, operandArgs.get(argIndex++), curOperand.name));\n} catch (BError error) {\nargIndex--;\nbreak;\n}\n}\nbValue = bArray;\n} else {\nbValue = CliUtil.getBValueWithUnionValue(curOperand.type, operandArgs.get(argIndex++), curOperand.name);\n}\nmainArgs.add(bValue);\nmainArgs.add(true);\n}\nif (argIndex < operandArgs.size()) {\nthrow ErrorCreator.createError(StringUtils.fromString(\"all operand arguments are not matched\"));\n}\nhandleMainParametersAtTheEnd(opIndex);\n}\nprivate void handleMainParametersAtTheEnd(int opIndex) {\nwhile (opIndex < operands.length) {\nOperand operand = operands[opIndex++];\nType opType = operand.type;\nif (operand.hasDefaultable) {\nmainArgs.add(getDefaultBValue(opType));\nmainArgs.add(false);\n} else if (isSupportedArrayType(opType)) {\nmainArgs.add(ValueCreator.createArrayValue((ArrayType) opType, -1));\nmainArgs.add(true);\n} else if ((CliUtil.isUnionWithNil(opType))) {\nmainArgs.add(null);\nmainArgs.add(true);\n} else {\nthrow ErrorCreator.createError(StringUtils.fromString(\n\"missing operand arguments for parameter '\" + operand.name + \"' of type '\" + opType + \"'\"));\n}\n}\n}\nprivate boolean isSupportedArrayType(Type opType) {\nif (opType.getTag() == TypeTags.ARRAY_TAG) {\nType elementType = ((ArrayType) opType).getElementType();\nreturn CliUtil.isSupportedType(elementType.getTag());\n}\nreturn false;\n}\nprivate static Object getDefaultBValue(Type type) {\nswitch (type.getTag()) {\ncase TypeTags.INT_TAG:\ncase TypeTags.FLOAT_TAG:\ncase TypeTags.DECIMAL_TAG:\ncase TypeTags.BYTE_TAG:\nreturn 0;\ncase TypeTags.BOOLEAN_TAG:\nreturn false;\ndefault:\nreturn null;\n}\n}\nprivate int getElementCount(Operand[] operands, int opIndex) {\nint count = 0;\nwhile (opIndex < operands.length && operands[opIndex++].type.getTag() != TypeTags.RECORD_TYPE_TAG) {\ncount++;\n}\nreturn count;\n}\n}" + }, + { + "comment": "I can replace the \"isCopied\" or \"isCopyable\" with \"isBufferShared\" which may be easier to understand.", + "method_body": "BufferAndBacklog pollBuffer(boolean isLocalChannel) {\nsynchronized (buffers) {\nBuffer buffer = null;\nif (buffers.isEmpty()) {\nflushRequested = false;\n}\nwhile (!buffers.isEmpty()) {\nBufferConsumer bufferConsumer = buffers.peek();\nbuffer = bufferConsumer.build();\nif (!isLocalChannel && !bufferConsumer.isCopied() && canBeCompressed(buffer)) {\nbuffer = parent.bufferCompressor.compressToOriginalBuffer(buffer);\n}\ncheckState(bufferConsumer.isFinished() || buffers.size() == 1,\n\"When there are multiple buffers, an unfinished bufferConsumer can not be at the head of the buffers queue.\");\nif (buffers.size() == 1) {\nflushRequested = false;\n}\nif (bufferConsumer.isFinished()) {\nbuffers.pop().close();\ndecreaseBuffersInBacklogUnsafe(bufferConsumer.isBuffer());\n}\nif (buffer.readableBytes() > 0) {\nbreak;\n}\nbuffer.recycleBuffer();\nbuffer = null;\nif (!bufferConsumer.isFinished()) {\nbreak;\n}\n}\nif (buffer == null) {\nreturn null;\n}\nupdateStatistics(buffer);\nreturn new BufferAndBacklog(\nbuffer,\nisAvailableUnsafe(),\ngetBuffersInBacklog(),\nnextBufferIsEventUnsafe());\n}\n}", + "target_code": "if (!isLocalChannel && !bufferConsumer.isCopied() && canBeCompressed(buffer)) {", + "method_body_after": "BufferAndBacklog pollBuffer(boolean isLocalChannel) {\nsynchronized (buffers) {\nBuffer buffer = null;\nif (buffers.isEmpty()) {\nflushRequested = false;\n}\nwhile (!buffers.isEmpty()) {\nBufferConsumer bufferConsumer = buffers.peek();\nbuffer = bufferConsumer.build();\nif (!isLocalChannel && !bufferConsumer.isShareable() && canBeCompressed(buffer)) {\nbuffer = parent.bufferCompressor.compressToOriginalBuffer(buffer);\n}\ncheckState(bufferConsumer.isFinished() || buffers.size() == 1,\n\"When there are multiple buffers, an unfinished bufferConsumer can not be at the head of the buffers queue.\");\nif (buffers.size() == 1) {\nflushRequested = false;\n}\nif (bufferConsumer.isFinished()) {\nbuffers.pop().close();\ndecreaseBuffersInBacklogUnsafe(bufferConsumer.isBuffer());\n}\nif (buffer.readableBytes() > 0) {\nbreak;\n}\nbuffer.recycleBuffer();\nbuffer = null;\nif (!bufferConsumer.isFinished()) {\nbreak;\n}\n}\nif (buffer == null) {\nreturn null;\n}\nupdateStatistics(buffer);\nreturn new BufferAndBacklog(\nbuffer,\nisAvailableUnsafe(),\ngetBuffersInBacklog(),\nnextBufferIsEventUnsafe());\n}\n}", + "context_before": "class PipelinedSubpartition extends ResultSubpartition {\nprivate static final Logger LOG = LoggerFactory.getLogger(PipelinedSubpartition.class);\n/** All buffers of this subpartition. Access to the buffers is synchronized on this object. */\nprivate final ArrayDeque buffers = new ArrayDeque<>();\n/** The number of non-event buffers currently in this subpartition. */\n@GuardedBy(\"buffers\")\nprivate int buffersInBacklog;\n/** The read view to consume this subpartition. */\nprivate PipelinedSubpartitionView readView;\n/** Flag indicating whether the subpartition has been finished. */\nprivate boolean isFinished;\n@GuardedBy(\"buffers\")\nprivate boolean flushRequested;\n/** Flag indicating whether the subpartition has been released. */\nprivate volatile boolean isReleased;\n/** The total number of buffers (both data and event buffers). */\nprivate long totalNumberOfBuffers;\n/** The total number of bytes (both data and event buffers). */\nprivate long totalNumberOfBytes;\nPipelinedSubpartition(int index, ResultPartition parent) {\nsuper(index, parent);\n}\n@Override\npublic boolean add(BufferConsumer bufferConsumer) {\nreturn add(bufferConsumer, false);\n}\n@Override\npublic void finish() throws IOException {\nadd(EventSerializer.toBufferConsumer(EndOfPartitionEvent.INSTANCE), true);\nLOG.debug(\"{}: Finished {}.\", parent.getOwningTaskName(), this);\n}\nprivate boolean add(BufferConsumer bufferConsumer, boolean finish) {\ncheckNotNull(bufferConsumer);\nfinal boolean notifyDataAvailable;\nsynchronized (buffers) {\nif (isFinished || isReleased) {\nbufferConsumer.close();\nreturn false;\n}\nbuffers.add(bufferConsumer);\nupdateStatistics(bufferConsumer);\nincreaseBuffersInBacklog(bufferConsumer);\nnotifyDataAvailable = shouldNotifyDataAvailable() || finish;\nisFinished |= finish;\n}\nif (notifyDataAvailable) {\nnotifyDataAvailable();\n}\nreturn true;\n}\n@Override\npublic void release() {\nfinal PipelinedSubpartitionView view;\nsynchronized (buffers) {\nif (isReleased) {\nreturn;\n}\nfor (BufferConsumer buffer : buffers) {\nbuffer.close();\n}\nbuffers.clear();\nview = readView;\nreadView = null;\nisReleased = true;\n}\nLOG.debug(\"{}: Released {}.\", parent.getOwningTaskName(), this);\nif (view != null) {\nview.releaseAllResources();\n}\n}\n@Nullable\nboolean nextBufferIsEvent() {\nsynchronized (buffers) {\nreturn nextBufferIsEventUnsafe();\n}\n}\nprivate boolean nextBufferIsEventUnsafe() {\nassert Thread.holdsLock(buffers);\nreturn !buffers.isEmpty() && !buffers.peekFirst().isBuffer();\n}\n@Override\npublic int releaseMemory() {\nreturn 0;\n}\n@Override\npublic boolean isReleased() {\nreturn isReleased;\n}\n@Override\npublic PipelinedSubpartitionView createReadView(BufferAvailabilityListener availabilityListener) throws IOException {\nfinal boolean notifyDataAvailable;\nsynchronized (buffers) {\ncheckState(!isReleased);\ncheckState(readView == null,\n\"Subpartition %s of is being (or already has been) consumed, \" +\n\"but pipelined subpartitions can only be consumed once.\", index, parent.getPartitionId());\nLOG.debug(\"{}: Creating read view for subpartition {} of partition {}.\",\nparent.getOwningTaskName(), index, parent.getPartitionId());\nreadView = new PipelinedSubpartitionView(this, availabilityListener);\nnotifyDataAvailable = !buffers.isEmpty();\n}\nif (notifyDataAvailable) {\nnotifyDataAvailable();\n}\nreturn readView;\n}\npublic boolean isAvailable() {\nsynchronized (buffers) {\nreturn isAvailableUnsafe();\n}\n}\nprivate boolean isAvailableUnsafe() {\nreturn flushRequested || getNumberOfFinishedBuffers() > 0;\n}\nint getCurrentNumberOfBuffers() {\nreturn buffers.size();\n}\n@Override\npublic String toString() {\nfinal long numBuffers;\nfinal long numBytes;\nfinal boolean finished;\nfinal boolean hasReadView;\nsynchronized (buffers) {\nnumBuffers = getTotalNumberOfBuffers();\nnumBytes = getTotalNumberOfBytes();\nfinished = isFinished;\nhasReadView = readView != null;\n}\nreturn String.format(\n\"PipelinedSubpartition\nindex, numBuffers, numBytes, getBuffersInBacklog(), finished, hasReadView);\n}\n@Override\npublic int unsynchronizedGetNumberOfQueuedBuffers() {\nreturn Math.max(buffers.size(), 0);\n}\n@Override\npublic void flush() {\nfinal boolean notifyDataAvailable;\nsynchronized (buffers) {\nif (buffers.isEmpty()) {\nreturn;\n}\nnotifyDataAvailable = !flushRequested && buffers.size() == 1 && buffers.peek().isDataAvailable();\nflushRequested = flushRequested || buffers.size() > 1 || notifyDataAvailable;\n}\nif (notifyDataAvailable) {\nnotifyDataAvailable();\n}\n}\n@Override\nprotected long getTotalNumberOfBuffers() {\nreturn totalNumberOfBuffers;\n}\n@Override\nprotected long getTotalNumberOfBytes() {\nreturn totalNumberOfBytes;\n}\nThrowable getFailureCause() {\nreturn parent.getFailureCause();\n}\nprivate void updateStatistics(BufferConsumer buffer) {\ntotalNumberOfBuffers++;\n}\nprivate void updateStatistics(Buffer buffer) {\ntotalNumberOfBytes += buffer.getSize();\n}\n@GuardedBy(\"buffers\")\nprivate void decreaseBuffersInBacklogUnsafe(boolean isBuffer) {\nassert Thread.holdsLock(buffers);\nif (isBuffer) {\nbuffersInBacklog--;\n}\n}\n/**\n* Increases the number of non-event buffers by one after adding a non-event\n* buffer into this subpartition.\n*/\n@GuardedBy(\"buffers\")\nprivate void increaseBuffersInBacklog(BufferConsumer buffer) {\nassert Thread.holdsLock(buffers);\nif (buffer != null && buffer.isBuffer()) {\nbuffersInBacklog++;\n}\n}\n/**\n* Gets the number of non-event buffers in this subpartition.\n*\n*

Beware: This method should only be used in tests in non-concurrent access\n* scenarios since it does not make any concurrency guarantees.\n*/\n@SuppressWarnings(\"FieldAccessNotGuarded\")\n@VisibleForTesting\npublic int getBuffersInBacklog() {\nif (flushRequested || isFinished) {\nreturn buffersInBacklog;\n} else {\nreturn Math.max(buffersInBacklog - 1, 0);\n}\n}\nprivate boolean shouldNotifyDataAvailable() {\nreturn readView != null && !flushRequested && getNumberOfFinishedBuffers() == 1;\n}\nprivate void notifyDataAvailable() {\nif (readView != null) {\nreadView.notifyDataAvailable();\n}\n}\nprivate int getNumberOfFinishedBuffers() {\nassert Thread.holdsLock(buffers);\nif (buffers.size() == 1 && buffers.peekLast().isFinished()) {\nreturn 1;\n}\nreturn Math.max(0, buffers.size() - 1);\n}\n}", + "context_after": "class PipelinedSubpartition extends ResultSubpartition {\nprivate static final Logger LOG = LoggerFactory.getLogger(PipelinedSubpartition.class);\n/** All buffers of this subpartition. Access to the buffers is synchronized on this object. */\nprivate final ArrayDeque buffers = new ArrayDeque<>();\n/** The number of non-event buffers currently in this subpartition. */\n@GuardedBy(\"buffers\")\nprivate int buffersInBacklog;\n/** The read view to consume this subpartition. */\nprivate PipelinedSubpartitionView readView;\n/** Flag indicating whether the subpartition has been finished. */\nprivate boolean isFinished;\n@GuardedBy(\"buffers\")\nprivate boolean flushRequested;\n/** Flag indicating whether the subpartition has been released. */\nprivate volatile boolean isReleased;\n/** The total number of buffers (both data and event buffers). */\nprivate long totalNumberOfBuffers;\n/** The total number of bytes (both data and event buffers). */\nprivate long totalNumberOfBytes;\nPipelinedSubpartition(int index, ResultPartition parent) {\nsuper(index, parent);\n}\n@Override\npublic boolean add(BufferConsumer bufferConsumer) {\nreturn add(bufferConsumer, false);\n}\n@Override\npublic void finish() throws IOException {\nadd(EventSerializer.toBufferConsumer(EndOfPartitionEvent.INSTANCE, false), true);\nLOG.debug(\"{}: Finished {}.\", parent.getOwningTaskName(), this);\n}\nprivate boolean add(BufferConsumer bufferConsumer, boolean finish) {\ncheckNotNull(bufferConsumer);\nfinal boolean notifyDataAvailable;\nsynchronized (buffers) {\nif (isFinished || isReleased) {\nbufferConsumer.close();\nreturn false;\n}\nbuffers.add(bufferConsumer);\nupdateStatistics(bufferConsumer);\nincreaseBuffersInBacklog(bufferConsumer);\nnotifyDataAvailable = shouldNotifyDataAvailable() || finish;\nisFinished |= finish;\n}\nif (notifyDataAvailable) {\nnotifyDataAvailable();\n}\nreturn true;\n}\n@Override\npublic void release() {\nfinal PipelinedSubpartitionView view;\nsynchronized (buffers) {\nif (isReleased) {\nreturn;\n}\nfor (BufferConsumer buffer : buffers) {\nbuffer.close();\n}\nbuffers.clear();\nview = readView;\nreadView = null;\nisReleased = true;\n}\nLOG.debug(\"{}: Released {}.\", parent.getOwningTaskName(), this);\nif (view != null) {\nview.releaseAllResources();\n}\n}\n@Nullable\nboolean nextBufferIsEvent() {\nsynchronized (buffers) {\nreturn nextBufferIsEventUnsafe();\n}\n}\nprivate boolean nextBufferIsEventUnsafe() {\nassert Thread.holdsLock(buffers);\nreturn !buffers.isEmpty() && !buffers.peekFirst().isBuffer();\n}\n@Override\npublic int releaseMemory() {\nreturn 0;\n}\n@Override\npublic boolean isReleased() {\nreturn isReleased;\n}\n@Override\npublic PipelinedSubpartitionView createReadView(BufferAvailabilityListener availabilityListener) throws IOException {\nfinal boolean notifyDataAvailable;\nsynchronized (buffers) {\ncheckState(!isReleased);\ncheckState(readView == null,\n\"Subpartition %s of is being (or already has been) consumed, \" +\n\"but pipelined subpartitions can only be consumed once.\", index, parent.getPartitionId());\nLOG.debug(\"{}: Creating read view for subpartition {} of partition {}.\",\nparent.getOwningTaskName(), index, parent.getPartitionId());\nreadView = new PipelinedSubpartitionView(this, availabilityListener);\nnotifyDataAvailable = !buffers.isEmpty();\n}\nif (notifyDataAvailable) {\nnotifyDataAvailable();\n}\nreturn readView;\n}\npublic boolean isAvailable() {\nsynchronized (buffers) {\nreturn isAvailableUnsafe();\n}\n}\nprivate boolean isAvailableUnsafe() {\nreturn flushRequested || getNumberOfFinishedBuffers() > 0;\n}\nint getCurrentNumberOfBuffers() {\nreturn buffers.size();\n}\n@Override\npublic String toString() {\nfinal long numBuffers;\nfinal long numBytes;\nfinal boolean finished;\nfinal boolean hasReadView;\nsynchronized (buffers) {\nnumBuffers = getTotalNumberOfBuffers();\nnumBytes = getTotalNumberOfBytes();\nfinished = isFinished;\nhasReadView = readView != null;\n}\nreturn String.format(\n\"PipelinedSubpartition\nindex, numBuffers, numBytes, getBuffersInBacklog(), finished, hasReadView);\n}\n@Override\npublic int unsynchronizedGetNumberOfQueuedBuffers() {\nreturn Math.max(buffers.size(), 0);\n}\n@Override\npublic void flush() {\nfinal boolean notifyDataAvailable;\nsynchronized (buffers) {\nif (buffers.isEmpty()) {\nreturn;\n}\nnotifyDataAvailable = !flushRequested && buffers.size() == 1 && buffers.peek().isDataAvailable();\nflushRequested = flushRequested || buffers.size() > 1 || notifyDataAvailable;\n}\nif (notifyDataAvailable) {\nnotifyDataAvailable();\n}\n}\n@Override\nprotected long getTotalNumberOfBuffers() {\nreturn totalNumberOfBuffers;\n}\n@Override\nprotected long getTotalNumberOfBytes() {\nreturn totalNumberOfBytes;\n}\nThrowable getFailureCause() {\nreturn parent.getFailureCause();\n}\nprivate void updateStatistics(BufferConsumer buffer) {\ntotalNumberOfBuffers++;\n}\nprivate void updateStatistics(Buffer buffer) {\ntotalNumberOfBytes += buffer.getSize();\n}\n@GuardedBy(\"buffers\")\nprivate void decreaseBuffersInBacklogUnsafe(boolean isBuffer) {\nassert Thread.holdsLock(buffers);\nif (isBuffer) {\nbuffersInBacklog--;\n}\n}\n/**\n* Increases the number of non-event buffers by one after adding a non-event\n* buffer into this subpartition.\n*/\n@GuardedBy(\"buffers\")\nprivate void increaseBuffersInBacklog(BufferConsumer buffer) {\nassert Thread.holdsLock(buffers);\nif (buffer != null && buffer.isBuffer()) {\nbuffersInBacklog++;\n}\n}\n/**\n* Gets the number of non-event buffers in this subpartition.\n*\n*

Beware: This method should only be used in tests in non-concurrent access\n* scenarios since it does not make any concurrency guarantees.\n*/\n@SuppressWarnings(\"FieldAccessNotGuarded\")\n@VisibleForTesting\npublic int getBuffersInBacklog() {\nif (flushRequested || isFinished) {\nreturn buffersInBacklog;\n} else {\nreturn Math.max(buffersInBacklog - 1, 0);\n}\n}\nprivate boolean shouldNotifyDataAvailable() {\nreturn readView != null && !flushRequested && getNumberOfFinishedBuffers() == 1;\n}\nprivate void notifyDataAvailable() {\nif (readView != null) {\nreadView.notifyDataAvailable();\n}\n}\nprivate int getNumberOfFinishedBuffers() {\nassert Thread.holdsLock(buffers);\nif (buffers.size() == 1 && buffers.peekLast().isFinished()) {\nreturn 1;\n}\nreturn Math.max(0, buffers.size() - 1);\n}\n}" + }, + { + "comment": "These changes are already contained in #6638. You could base this PR on #6638.", + "method_body": "private void scheduleRelease(JobInfo jobInfo) {\nWrappedContext wrapper = getCache().get(jobInfo.jobId());\nPreconditions.checkState(\nwrapper != null, \"Releasing context for unknown job: \" + jobInfo.jobId());\nPipelineOptions pipelineOptions =\nPipelineOptionsTranslation.fromProto(jobInfo.pipelineOptions());\nint environmentCacheTTLMillis =\npipelineOptions.as(PortablePipelineOptions.class).getEnvironmentCacheMillis();\nif (environmentCacheTTLMillis > 0) {\ngetExecutor()\n.schedule(() -> release(wrapper), environmentCacheTTLMillis, TimeUnit.MILLISECONDS);\n} else {\nrelease(wrapper);\n}\n}", + "target_code": ".schedule(() -> release(wrapper), environmentCacheTTLMillis, TimeUnit.MILLISECONDS);", + "method_body_after": "private void scheduleRelease(JobInfo jobInfo) {\nWrappedContext wrapper = getCache().get(jobInfo.jobId());\nPreconditions.checkState(\nwrapper != null, \"Releasing context for unknown job: \" + jobInfo.jobId());\nPipelineOptions pipelineOptions =\nPipelineOptionsTranslation.fromProto(jobInfo.pipelineOptions());\nint environmentCacheTTLMillis =\npipelineOptions.as(PortablePipelineOptions.class).getEnvironmentCacheMillis();\nif (environmentCacheTTLMillis > 0) {\nif (this.getClass().getClassLoader() != ExecutionEnvironment.class.getClassLoader()) {\nLOG.warn(\n\"{} is not loaded on parent Flink classloader. \"\n+ \"Falling back to synchronous environment release for job {}.\",\nthis.getClass(),\njobInfo.jobId());\nrelease(wrapper);\n} else {\ngetExecutor()\n.schedule(() -> release(wrapper), environmentCacheTTLMillis, TimeUnit.MILLISECONDS);\n}\n} else {\nrelease(wrapper);\n}\n}", + "context_before": "class ReferenceCountingFlinkExecutableStageContextFactory\nimplements FlinkExecutableStageContext.Factory {\nprivate static final Logger LOG =\nLoggerFactory.getLogger(ReferenceCountingFlinkExecutableStageContextFactory.class);\nprivate static final int MAX_RETRY = 3;\nprivate final Creator creator;\nprivate transient volatile ScheduledExecutorService executor;\nprivate transient volatile ConcurrentHashMap keyRegistry;\npublic static ReferenceCountingFlinkExecutableStageContextFactory create(Creator creator) {\nreturn new ReferenceCountingFlinkExecutableStageContextFactory(creator);\n}\nprivate ReferenceCountingFlinkExecutableStageContextFactory(Creator creator) {\nthis.creator = creator;\n}\n@Override\npublic FlinkExecutableStageContext get(JobInfo jobInfo) {\nfor (int retry = 0; retry < MAX_RETRY; retry++) {\nWrappedContext wrapper =\ngetCache()\n.computeIfAbsent(\njobInfo.jobId(),\njobId -> {\ntry {\nreturn new WrappedContext(jobInfo, creator.apply(jobInfo));\n} catch (Exception e) {\nthrow new RuntimeException(\n\"Unable to create context for job \" + jobInfo.jobId(), e);\n}\n});\nsynchronized (wrapper) {\nif (wrapper.referenceCount != null) {\nwrapper.referenceCount.incrementAndGet();\nreturn wrapper;\n}\n}\n}\nthrow new RuntimeException(\nString.format(\n\"Max retry %s exhausted while creating Context for job %s\",\nMAX_RETRY, jobInfo.jobId()));\n}\n@SuppressWarnings(\"FutureReturnValueIgnored\")\nprivate ConcurrentHashMap getCache() {\nif (keyRegistry != null) {\nreturn keyRegistry;\n}\nsynchronized (this) {\nif (keyRegistry == null) {\nkeyRegistry = new ConcurrentHashMap<>();\n}\nreturn keyRegistry;\n}\n}\nprivate ScheduledExecutorService getExecutor() {\nif (executor != null) {\nreturn executor;\n}\nsynchronized (this) {\nif (executor == null) {\nexecutor =\nExecutors.newScheduledThreadPool(1, new ThreadFactoryBuilder().setDaemon(true).build());\n}\nreturn executor;\n}\n}\n@VisibleForTesting\nvoid release(FlinkExecutableStageContext context) {\n@SuppressWarnings({\"unchecked\", \"Not exected to be called from outside.\"})\nWrappedContext wrapper = (WrappedContext) context;\nsynchronized (wrapper) {\nif (wrapper.referenceCount.decrementAndGet() == 0) {\nwrapper.referenceCount = null;\nif (getCache().remove(wrapper.jobInfo.jobId(), wrapper)) {\ntry {\nwrapper.closeActual();\n} catch (Throwable t) {\nLOG.error(\"Unable to close FlinkExecutableStageContext.\", t);\n}\n}\n}\n}\n}\n/**\n* {@link WrappedContext} does not expose equals of actual {@link FlinkExecutableStageContext}.\n*/\nprivate class WrappedContext implements FlinkExecutableStageContext {\nprivate JobInfo jobInfo;\nprivate AtomicInteger referenceCount;\nprivate FlinkExecutableStageContext context;\n/** {@link WrappedContext\nWrappedContext(JobInfo jobInfo, FlinkExecutableStageContext context) {\nthis.jobInfo = jobInfo;\nthis.context = context;\nthis.referenceCount = new AtomicInteger(0);\n}\n@Override\npublic StageBundleFactory getStageBundleFactory(ExecutableStage executableStage) {\nreturn context.getStageBundleFactory(executableStage);\n}\n@Override\npublic void close() {\nscheduleRelease(jobInfo);\n}\nprivate void closeActual() throws Exception {\ncontext.close();\n}\n@Override\npublic boolean equals(Object o) {\nif (this == o) {\nreturn true;\n}\nif (o == null || getClass() != o.getClass()) {\nreturn false;\n}\nWrappedContext that = (WrappedContext) o;\nreturn Objects.equals(jobInfo.jobId(), that.jobInfo.jobId());\n}\n@Override\npublic int hashCode() {\nreturn Objects.hash(jobInfo);\n}\n@Override\npublic String toString() {\nreturn \"ContextWrapper{\"\n+ \"jobId='\"\n+ jobInfo\n+ '\\''\n+ \", referenceCount=\"\n+ referenceCount\n+ '}';\n}\n}\n/** Interface for creator which extends Serializable. */\npublic interface Creator\nextends ThrowingFunction, Serializable {}\n}", + "context_after": "class ReferenceCountingFlinkExecutableStageContextFactory\nimplements FlinkExecutableStageContext.Factory {\nprivate static final Logger LOG =\nLoggerFactory.getLogger(ReferenceCountingFlinkExecutableStageContextFactory.class);\nprivate static final int MAX_RETRY = 3;\nprivate final Creator creator;\nprivate transient volatile ScheduledExecutorService executor;\nprivate transient volatile ConcurrentHashMap keyRegistry;\npublic static ReferenceCountingFlinkExecutableStageContextFactory create(Creator creator) {\nreturn new ReferenceCountingFlinkExecutableStageContextFactory(creator);\n}\nprivate ReferenceCountingFlinkExecutableStageContextFactory(Creator creator) {\nthis.creator = creator;\n}\n@Override\npublic FlinkExecutableStageContext get(JobInfo jobInfo) {\nfor (int retry = 0; retry < MAX_RETRY; retry++) {\nWrappedContext wrapper =\ngetCache()\n.computeIfAbsent(\njobInfo.jobId(),\njobId -> {\ntry {\nreturn new WrappedContext(jobInfo, creator.apply(jobInfo));\n} catch (Exception e) {\nthrow new RuntimeException(\n\"Unable to create context for job \" + jobInfo.jobId(), e);\n}\n});\nsynchronized (wrapper) {\nif (wrapper.referenceCount != null) {\nwrapper.referenceCount.incrementAndGet();\nreturn wrapper;\n}\n}\n}\nthrow new RuntimeException(\nString.format(\n\"Max retry %s exhausted while creating Context for job %s\",\nMAX_RETRY, jobInfo.jobId()));\n}\n@SuppressWarnings(\"FutureReturnValueIgnored\")\nprivate ConcurrentHashMap getCache() {\nif (keyRegistry != null) {\nreturn keyRegistry;\n}\nsynchronized (this) {\nif (keyRegistry == null) {\nkeyRegistry = new ConcurrentHashMap<>();\n}\nreturn keyRegistry;\n}\n}\nprivate ScheduledExecutorService getExecutor() {\nif (executor != null) {\nreturn executor;\n}\nsynchronized (this) {\nif (executor == null) {\nexecutor =\nExecutors.newScheduledThreadPool(1, new ThreadFactoryBuilder().setDaemon(true).build());\n}\nreturn executor;\n}\n}\n@VisibleForTesting\nvoid release(FlinkExecutableStageContext context) {\n@SuppressWarnings({\"unchecked\", \"Not exected to be called from outside.\"})\nWrappedContext wrapper = (WrappedContext) context;\nsynchronized (wrapper) {\nif (wrapper.referenceCount.decrementAndGet() == 0) {\nwrapper.referenceCount = null;\nif (getCache().remove(wrapper.jobInfo.jobId(), wrapper)) {\ntry {\nwrapper.closeActual();\n} catch (Throwable t) {\nLOG.error(\"Unable to close FlinkExecutableStageContext.\", t);\n}\n}\n}\n}\n}\n/**\n* {@link WrappedContext} does not expose equals of actual {@link FlinkExecutableStageContext}.\n*/\nprivate class WrappedContext implements FlinkExecutableStageContext {\nprivate JobInfo jobInfo;\nprivate AtomicInteger referenceCount;\nprivate FlinkExecutableStageContext context;\n/** {@link WrappedContext\nWrappedContext(JobInfo jobInfo, FlinkExecutableStageContext context) {\nthis.jobInfo = jobInfo;\nthis.context = context;\nthis.referenceCount = new AtomicInteger(0);\n}\n@Override\npublic StageBundleFactory getStageBundleFactory(ExecutableStage executableStage) {\nreturn context.getStageBundleFactory(executableStage);\n}\n@Override\npublic void close() {\nscheduleRelease(jobInfo);\n}\nprivate void closeActual() throws Exception {\ncontext.close();\n}\n@Override\npublic boolean equals(Object o) {\nif (this == o) {\nreturn true;\n}\nif (o == null || getClass() != o.getClass()) {\nreturn false;\n}\nWrappedContext that = (WrappedContext) o;\nreturn Objects.equals(jobInfo.jobId(), that.jobInfo.jobId());\n}\n@Override\npublic int hashCode() {\nreturn Objects.hash(jobInfo);\n}\n@Override\npublic String toString() {\nreturn \"ContextWrapper{\"\n+ \"jobId='\"\n+ jobInfo\n+ '\\''\n+ \", referenceCount=\"\n+ referenceCount\n+ '}';\n}\n}\n/** Interface for creator which extends Serializable. */\npublic interface Creator\nextends ThrowingFunction, Serializable {}\n}" + }, + { + "comment": "```suggestion \"function mocking is not supported with standalone Ballerina files\"); ```", + "method_body": "public void process(SimpleVariableNode simpleVariableNode, List annotations) {\nBLangPackage parent = (BLangPackage) ((BLangSimpleVariable) simpleVariableNode).parent;\nString packageName = getPackageName(parent);\nannotations = annotations.stream().distinct().collect(Collectors.toList());\nfor (AnnotationAttachmentNode attachmentNode : annotations) {\nif (packageName.equals(DOT)) {\ndiagnosticLog.logDiagnostic(\nDiagnosticSeverity.ERROR, (ModuleDescriptor) null, attachmentNode.getPosition(),\n\"function mocking is not supported for single file projects\");\nreturn;\n}\nString annotationName = attachmentNode.getAnnotationName().getValue();\nif (MOCK_ANNOTATION_NAME.equals(annotationName)) {\nString type = ((BLangUserDefinedType) ((BLangSimpleVariable) simpleVariableNode).typeNode).\ntypeName.getValue();\nif (type.equals(\"MockFunction\")) {\nString mockFnObjectName = simpleVariableNode.getName().getValue();\nString[] annotationValues = new String[2];\nannotationValues[0] = packageName;\nif (null == attachmentNode.getExpression()\n|| attachmentNode.getExpression().getKind() != NodeKind.RECORD_LITERAL_EXPR) {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"annotation should be a record with 'functionName' and 'moduleName'(optional) fields\");\ncontinue;\n}\nList fields =\n((BLangRecordLiteral) attachmentNode.getExpression()).getFields();\nsetAnnotationValues(fields, annotationValues, attachmentNode, parent);\nPackageID functionToMockID = getPackageID(annotationValues[0]);\nboolean validFunctionName = isValidFunctionName(\nannotationValues[1], annotationValues[0], functionToMockID, attachmentNode);\nif (!validFunctionName) {\nreturn;\n}\nBLangTestablePackage bLangTestablePackage =\n(BLangTestablePackage) ((BLangSimpleVariable) simpleVariableNode).parent;\nbLangTestablePackage.addMockFunction(\nfunctionToMockID + MOCK_FN_DELIMITER + annotationValues[1],\nmockFnObjectName);\nif (functionToMockID != null) {\nString className = getQualifiedClassName(bLangTestablePackage,\nfunctionToMockID.toString(), annotationValues[1]);\nregistry.addMockFunctionsSourceMap(bLangTestablePackage.packageID.getName().toString()\n+ MODULE_DELIMITER + className + MOCK_FN_DELIMITER + annotationValues[1],\nmockFnObjectName);\n}\n} else {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"Annotation can only be attached to a test:MockFunction object\");\n}\n}\n}\n}", + "target_code": "\"function mocking is not supported for single file projects\");", + "method_body_after": "public void process(SimpleVariableNode simpleVariableNode, List annotations) {\nBLangPackage parent = (BLangPackage) ((BLangSimpleVariable) simpleVariableNode).parent;\nString packageName = getPackageName(parent);\nannotations = annotations.stream().distinct().collect(Collectors.toList());\nfor (AnnotationAttachmentNode attachmentNode : annotations) {\nif (packageName.equals(DOT)) {\ndiagnosticLog.logDiagnostic(\nDiagnosticSeverity.ERROR, (ModuleDescriptor) null, attachmentNode.getPosition(),\n\"function mocking is not supported with standalone Ballerina files\");\nreturn;\n}\nString annotationName = attachmentNode.getAnnotationName().getValue();\nif (MOCK_ANNOTATION_NAME.equals(annotationName)) {\nString type = ((BLangUserDefinedType) ((BLangSimpleVariable) simpleVariableNode).typeNode).\ntypeName.getValue();\nif (type.equals(\"MockFunction\")) {\nString mockFnObjectName = simpleVariableNode.getName().getValue();\nString[] annotationValues = new String[2];\nannotationValues[0] = packageName;\nif (null == attachmentNode.getExpression()\n|| attachmentNode.getExpression().getKind() != NodeKind.RECORD_LITERAL_EXPR) {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"missing required 'functionName' field\");\ncontinue;\n}\nList fields =\n((BLangRecordLiteral) attachmentNode.getExpression()).getFields();\nsetAnnotationValues(fields, annotationValues, attachmentNode, parent);\nPackageID functionToMockID = getPackageID(annotationValues[0]);\nboolean validFunctionName = isValidFunctionName(\nannotationValues[1], annotationValues[0], functionToMockID, attachmentNode);\nif (!validFunctionName) {\nreturn;\n}\nBLangTestablePackage bLangTestablePackage =\n(BLangTestablePackage) ((BLangSimpleVariable) simpleVariableNode).parent;\nbLangTestablePackage.addMockFunction(\nfunctionToMockID + MOCK_FN_DELIMITER + annotationValues[1],\nmockFnObjectName);\nif (functionToMockID != null) {\nString className = getQualifiedClassName(bLangTestablePackage,\nfunctionToMockID.toString(), annotationValues[1]);\nregistry.addMockFunctionsSourceMap(bLangTestablePackage.packageID.getName().toString()\n+ MODULE_DELIMITER + className + MOCK_FN_DELIMITER + annotationValues[1],\nmockFnObjectName);\n}\n} else {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"Annotation can only be attached to a test:MockFunction object\");\n}\n}\n}\n}", + "context_before": "class MockAnnotationProcessor extends AbstractCompilerPlugin {\nprivate static final String MOCK_ANNOTATION_NAME = \"Mock\";\nprivate static final String MODULE = \"moduleName\";\nprivate static final String FUNCTION = \"functionName\";\nprivate static final String MOCK_FN_DELIMITER = \"\nprivate static final String MOCK_LEGACY_DELIMITER = \"~\";\nprivate static final String MODULE_DELIMITER = \"\u00a7\";\nprivate CompilerContext compilerContext;\nprivate DiagnosticLog diagnosticLog;\nprivate PackageCache packageCache;\nprivate Map packageEnvironmentMap;\nprivate SymbolResolver symbolResolver;\nprivate Types typeChecker;\nprivate final TesterinaRegistry registry = TesterinaRegistry.getInstance();\n/**\n* this property is used as a work-around to initialize test suites only once for a package as Compiler\n* Annotation currently emits package import events too to the process method.\n*/\n@Override\npublic void init(DiagnosticLog diagnosticLog) {\nthis.diagnosticLog = diagnosticLog;\nthis.packageEnvironmentMap = SymbolTable.getInstance(compilerContext).pkgEnvMap;\nthis.packageCache = PackageCache.getInstance(compilerContext);\nthis.symbolResolver = SymbolResolver.getInstance(compilerContext);\nthis.typeChecker = Types.getInstance(compilerContext);\n}\n@Override\npublic void setCompilerContext(CompilerContext context) {\nthis.compilerContext = context;\n}\n@Override\n@Override\npublic void process(FunctionNode functionNode, List annotations) {\nBLangPackage parent = (BLangPackage) ((BLangFunction) functionNode).parent;\nString packageName = getPackageName(parent);\nannotations = annotations.stream().distinct().collect(Collectors.toList());\nfor (AnnotationAttachmentNode attachmentNode : annotations) {\nString annotationName = attachmentNode.getAnnotationName().getValue();\nString functionName = functionNode.getName().getValue();\nif (MOCK_ANNOTATION_NAME.equals(annotationName)) {\nString[] vals = new String[2];\nvals[0] = packageName;\nvals[1] = \"\";\nif (attachmentNode.getExpression() instanceof BLangRecordLiteral) {\nList attributes = ((BLangRecordLiteral) attachmentNode\n.getExpression()).getFields();\nattributes.forEach(field -> {\nString name;\nBLangExpression valueExpr;\nif (field.isKeyValueField()) {\nBLangRecordLiteral.BLangRecordKeyValueField attributeNode =\n(BLangRecordLiteral.BLangRecordKeyValueField) field;\nname = attributeNode.getKey().toString();\nvalueExpr = attributeNode.getValue();\n} else {\nBLangRecordLiteral.BLangRecordVarNameField varNameField =\n(BLangRecordLiteral.BLangRecordVarNameField) field;\nname = varNameField.variableName.value;\nvalueExpr = varNameField;\n}\nString value = valueExpr.toString();\nif (MODULE.equals(name)) {\nvalue = formatPackageName(value, parent);\nvals[0] = value;\n} else if (FUNCTION.equals(name)) {\nvals[1] = value;\n}\n});\nif (vals[1].isEmpty()) {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"function name cannot be empty\");\nbreak;\n}\nPackageID functionToMockID = getPackageID(vals[0]);\nif (functionToMockID == null) {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"could not find specified module '\" + vals[0] + \"'\");\nbreak;\n}\nBType functionToMockType = getFunctionType(packageEnvironmentMap, functionToMockID, vals[1]);\nBType mockFunctionType = getFunctionType(packageEnvironmentMap, parent.packageID,\n((BLangFunction) functionNode).name.toString());\nif (functionToMockType != null && mockFunctionType != null) {\nif (!typeChecker.isAssignable(mockFunctionType, functionToMockType)) {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, ((BLangFunction) functionNode).pos,\n\"incompatible types: expected \" + functionToMockType\n+ \" but found \" + mockFunctionType);\nbreak;\n}\n} else {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"could not find function '\" + vals[1] + \"' in module '\" + vals[0] + \"'\");\nbreak;\n}\nBLangTestablePackage bLangTestablePackage =\n(BLangTestablePackage) ((BLangFunction) functionNode).parent;\nbLangTestablePackage.addMockFunction(functionToMockID + MOCK_LEGACY_DELIMITER + vals[1],\nfunctionName);\nString className = getQualifiedClassName(bLangTestablePackage,\nfunctionToMockID.toString(), vals[1]);\nvals[1] = vals[1].replaceAll(\"\\\\\\\\\", \"\");\nregistry.addMockFunctionsSourceMap(bLangTestablePackage.packageID.getName().toString()\n+ MODULE_DELIMITER + className + MOCK_LEGACY_DELIMITER + vals[1], functionName);\n} else {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"annotation should be a record with 'functionName' and 'moduleName'(optional) fields\");\n}\n}\n}\n}\n/**\n* Iterate through each field and assign the annotation values for moduleName and functionName.\n*\n* @param fields list of fields\n* @param annotationValues Array of annotation values\n* @param attachmentNode AnnotationAttachmentNode\n* @param parent BLangPackage\n*/\nprivate void setAnnotationValues(List fields, String[] annotationValues,\nAnnotationAttachmentNode attachmentNode, BLangPackage parent) {\nfields.forEach(field -> {\nString name;\nBLangExpression valueExpr;\nif (field.isKeyValueField()) {\nBLangRecordLiteral.BLangRecordKeyValueField attributeNode =\n(BLangRecordLiteral.BLangRecordKeyValueField) field;\nname = attributeNode.getKey().toString();\nvalueExpr = attributeNode.getValue();\nString value = valueExpr.toString();\nif (MODULE.equals(name)) {\nvalue = formatPackageName(value, parent);\nannotationValues[0] = value;\n} else if (FUNCTION.equals(name)) {\nannotationValues[1] = value;\n}\n} else {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"Annotation fields must be key-value pairs\");\n}\n});\n}\n/**\n* Returns a PackageID for the passed moduleName.\n*\n* @param moduleName Module name passed via function annotation\n* @return Module packageID\n*/\nprivate PackageID getPackageID(String moduleName) {\nif (packageCache.getSymbol(moduleName) != null) {\nreturn packageCache.getSymbol(moduleName).pkgID;\n} else {\nreturn null;\n}\n}\n/**\n* Formats the package name obtained from the mock annotation.\n* Checks for empty, '.', or single module names and replaces them.\n* Ballerina modules and fully qualified packages are simply returned\n*\n* @param value package name\n* @return formatted package name\n*/\nprivate String formatPackageName(String value, BLangPackage parent) {\nif (value.isEmpty() || value.equals(Names.DOT.value)) {\nvalue = parent.packageID.toString();\n} else if (!value.contains(Names.ORG_NAME_SEPARATOR.value) && !value.contains(Names.VERSION_SEPARATOR.value)) {\nvalue = new PackageID(parent.packageID.orgName, new Name(value),\nparent.packageID.version).toString();\n}\nreturn value;\n}\n/**\n* Validates the function name provided in the annotation.\n*\n* @param functionName Name of the function to mock\n* @param attachmentNode MockFunction object attachment node\n* @return true if the provided function name valid\n*/\nprivate boolean isValidFunctionName(String functionName, String moduleName, PackageID functionToMockID,\nAnnotationAttachmentNode attachmentNode) {\nif (functionToMockID == null) {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"could not find specified module '\" + moduleName + \"'\");\n} else {\nif (functionName == null) {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"function name cannot be empty\");\n} else {\nfor (Map.Entry entry : this.packageEnvironmentMap.entrySet()) {\nif (entry.getKey().pkgID.equals(functionToMockID)) {\nif (entry.getValue().scope.entries.containsKey(new Name(functionName))) {\nreturn true;\n}\n}\n}\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"could not find function '\" + functionName + \"' in module '\" + moduleName + \"'\");\n}\n}\nreturn false;\n}\n/**\n* Get Package Name.\n* @param packageNode PackageNode instance\n* @return package name\n*/\nprivate String getPackageName(PackageNode packageNode) {\nBLangPackage bLangPackage = ((BLangPackage) packageNode);\nreturn bLangPackage.packageID.toString();\n}\n/**\n* Get the function type by iterating through the packageEnvironmentMap.\n*\n* @param pkgEnvMap map of BPackageSymbol and its respective SymbolEnv\n* @param packageID Fully qualified package ID of the respective function\n* @param functionName Name of the function\n* @return Function type if found, null if not found\n*/\nprivate BType getFunctionType(Map pkgEnvMap, PackageID packageID, String functionName) {\nfor (Map.Entry entry : pkgEnvMap.entrySet()) {\nif (entry.getKey().pkgID.equals(packageID)) {\nBSymbol symbol = symbolResolver.lookupSymbolInMainSpace(entry.getValue(), new Name(functionName));\nif (!symbol.getType().toString().equals(\"other\")) {\nreturn symbol.getType();\n}\n}\n}\nreturn null;\n}\nprivate String getQualifiedClassName(BLangTestablePackage bLangTestablePackage,\nString pkgId, String functionName) {\nString className;\nif (bLangTestablePackage.packageID.toString().equals(pkgId)) {\nif (bLangTestablePackage.symbol.scope.entries.containsKey(new Name(functionName))) {\nBSymbol symbol = bLangTestablePackage.symbol.scope.entries.get(new Name(functionName)).symbol;\nclassName = getClassName(bLangTestablePackage.symbol, symbol.getPosition());\n} else {\nBLangPackage parentPkg = bLangTestablePackage.parent;\nBSymbol symbol = parentPkg.symbol.scope.entries.get(new Name(functionName)).symbol;\nclassName = getClassName(parentPkg.symbol, symbol.getPosition());\n}\n} else {\nclassName = getImportedFunctionClassName(bLangTestablePackage,\npkgId, functionName);\n}\nreturn className;\n}\nprivate String getImportedFunctionClassName(BLangTestablePackage bLangTestablePackage,\nString pkgId, String functionName) {\nString className = getClassName(bLangTestablePackage.getImports(), pkgId, functionName);\nif (className == null) {\nclassName = getClassName(bLangTestablePackage.parent.getImports(), pkgId, functionName);\n}\nreturn className;\n}\nprivate String getClassName(List imports, String pkgId, String functionName) {\nfor (BLangImportPackage importPackage : imports) {\nif (importPackage.symbol.pkgID.toString().equals(pkgId)) {\nBSymbol bInvokableSymbol = importPackage.symbol.scope.entries\n.get(new Name(functionName)).symbol;\nreturn getClassName(importPackage.symbol, bInvokableSymbol.getPosition());\n}\n}\nreturn null;\n}\nprivate String getClassName(BPackageSymbol bPackageSymbol, Location pos) {\nreturn JarResolver.getQualifiedClassName(\nbPackageSymbol.pkgID.orgName.getValue(),\nbPackageSymbol.pkgID.name.getValue(),\nbPackageSymbol.pkgID.version.getValue(),\npos.lineRange().filePath()\n.replace(ProjectConstants.BLANG_SOURCE_EXT, \"\")\n.replace(ProjectConstants.DOT, FILE_NAME_PERIOD_SEPARATOR)\n.replace(\"/\", ProjectConstants.DOT));\n}\n}", + "context_after": "class MockAnnotationProcessor extends AbstractCompilerPlugin {\nprivate static final String MOCK_ANNOTATION_NAME = \"Mock\";\nprivate static final String MODULE = \"moduleName\";\nprivate static final String FUNCTION = \"functionName\";\nprivate static final String MOCK_FN_DELIMITER = \"\nprivate static final String MOCK_LEGACY_DELIMITER = \"~\";\nprivate static final String MODULE_DELIMITER = \"\u00a7\";\nprivate CompilerContext compilerContext;\nprivate DiagnosticLog diagnosticLog;\nprivate PackageCache packageCache;\nprivate Map packageEnvironmentMap;\nprivate SymbolResolver symbolResolver;\nprivate Types typeChecker;\nprivate final TesterinaRegistry registry = TesterinaRegistry.getInstance();\n/**\n* this property is used as a work-around to initialize test suites only once for a package as Compiler\n* Annotation currently emits package import events too to the process method.\n*/\n@Override\npublic void init(DiagnosticLog diagnosticLog) {\nthis.diagnosticLog = diagnosticLog;\nthis.packageEnvironmentMap = SymbolTable.getInstance(compilerContext).pkgEnvMap;\nthis.packageCache = PackageCache.getInstance(compilerContext);\nthis.symbolResolver = SymbolResolver.getInstance(compilerContext);\nthis.typeChecker = Types.getInstance(compilerContext);\n}\n@Override\npublic void setCompilerContext(CompilerContext context) {\nthis.compilerContext = context;\n}\n@Override\n@Override\npublic void process(FunctionNode functionNode, List annotations) {\nBLangPackage parent = (BLangPackage) ((BLangFunction) functionNode).parent;\nString packageName = getPackageName(parent);\nannotations = annotations.stream().distinct().collect(Collectors.toList());\nfor (AnnotationAttachmentNode attachmentNode : annotations) {\nString annotationName = attachmentNode.getAnnotationName().getValue();\nString functionName = functionNode.getName().getValue();\nif (MOCK_ANNOTATION_NAME.equals(annotationName)) {\nString[] vals = new String[2];\nvals[0] = packageName;\nvals[1] = \"\";\nif (attachmentNode.getExpression() instanceof BLangRecordLiteral) {\nList attributes = ((BLangRecordLiteral) attachmentNode\n.getExpression()).getFields();\nattributes.forEach(field -> {\nString name;\nBLangExpression valueExpr;\nif (field.isKeyValueField()) {\nBLangRecordLiteral.BLangRecordKeyValueField attributeNode =\n(BLangRecordLiteral.BLangRecordKeyValueField) field;\nname = attributeNode.getKey().toString();\nvalueExpr = attributeNode.getValue();\n} else {\nBLangRecordLiteral.BLangRecordVarNameField varNameField =\n(BLangRecordLiteral.BLangRecordVarNameField) field;\nname = varNameField.variableName.value;\nvalueExpr = varNameField;\n}\nString value = valueExpr.toString();\nif (MODULE.equals(name)) {\nvalue = formatPackageName(value, parent);\nvals[0] = value;\n} else if (FUNCTION.equals(name)) {\nvals[1] = value;\n}\n});\nif (vals[1].isEmpty()) {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"function name cannot be empty\");\nbreak;\n}\nPackageID functionToMockID = getPackageID(vals[0]);\nif (functionToMockID == null) {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"cannot find the specified module '\" + vals[0] + \"'\");\nbreak;\n}\nBType functionToMockType = getFunctionType(packageEnvironmentMap, functionToMockID, vals[1]);\nBType mockFunctionType = getFunctionType(packageEnvironmentMap, parent.packageID,\n((BLangFunction) functionNode).name.toString());\nif (functionToMockType != null && mockFunctionType != null) {\nif (!typeChecker.isAssignable(mockFunctionType, functionToMockType)) {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, ((BLangFunction) functionNode).pos,\n\"incompatible types: expected \" + functionToMockType\n+ \" but found \" + mockFunctionType);\nbreak;\n}\n} else {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"cannot find the function '\" + vals[1] + \"' in module '\" + vals[0] + \"'\");\nbreak;\n}\nBLangTestablePackage bLangTestablePackage =\n(BLangTestablePackage) ((BLangFunction) functionNode).parent;\nbLangTestablePackage.addMockFunction(functionToMockID + MOCK_LEGACY_DELIMITER + vals[1],\nfunctionName);\nString className = getQualifiedClassName(bLangTestablePackage,\nfunctionToMockID.toString(), vals[1]);\nvals[1] = vals[1].replaceAll(\"\\\\\\\\\", \"\");\nregistry.addMockFunctionsSourceMap(bLangTestablePackage.packageID.getName().toString()\n+ MODULE_DELIMITER + className + MOCK_LEGACY_DELIMITER + vals[1], functionName);\n} else {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"missing required 'functionName' field\");\n}\n}\n}\n}\n/**\n* Iterate through each field and assign the annotation values for moduleName and functionName.\n*\n* @param fields list of fields\n* @param annotationValues Array of annotation values\n* @param attachmentNode AnnotationAttachmentNode\n* @param parent BLangPackage\n*/\nprivate void setAnnotationValues(List fields, String[] annotationValues,\nAnnotationAttachmentNode attachmentNode, BLangPackage parent) {\nfields.forEach(field -> {\nString name;\nBLangExpression valueExpr;\nif (field.isKeyValueField()) {\nBLangRecordLiteral.BLangRecordKeyValueField attributeNode =\n(BLangRecordLiteral.BLangRecordKeyValueField) field;\nname = attributeNode.getKey().toString();\nvalueExpr = attributeNode.getValue();\nString value = valueExpr.toString();\nif (MODULE.equals(name)) {\nvalue = formatPackageName(value, parent);\nannotationValues[0] = value;\n} else if (FUNCTION.equals(name)) {\nannotationValues[1] = value;\n}\n} else {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"Annotation fields must be key-value pairs\");\n}\n});\n}\n/**\n* Returns a PackageID for the passed moduleName.\n*\n* @param moduleName Module name passed via function annotation\n* @return Module packageID\n*/\nprivate PackageID getPackageID(String moduleName) {\nif (packageCache.getSymbol(moduleName) != null) {\nreturn packageCache.getSymbol(moduleName).pkgID;\n} else {\nreturn null;\n}\n}\n/**\n* Formats the package name obtained from the mock annotation.\n* Checks for empty, '.', or single module names and replaces them.\n* Ballerina modules and fully qualified packages are simply returned\n*\n* @param value package name\n* @return formatted package name\n*/\nprivate String formatPackageName(String value, BLangPackage parent) {\nif (value.isEmpty() || value.equals(Names.DOT.value)) {\nvalue = parent.packageID.toString();\n} else if (!value.contains(Names.ORG_NAME_SEPARATOR.value) && !value.contains(Names.VERSION_SEPARATOR.value)) {\nvalue = new PackageID(parent.packageID.orgName, new Name(value),\nparent.packageID.version).toString();\n}\nreturn value;\n}\n/**\n* Validates the function name provided in the annotation.\n*\n* @param functionName Name of the function to mock\n* @param attachmentNode MockFunction object attachment node\n* @return true if the provided function name valid\n*/\nprivate boolean isValidFunctionName(String functionName, String moduleName, PackageID functionToMockID,\nAnnotationAttachmentNode attachmentNode) {\nif (functionToMockID == null) {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"cannot find the specified module '\" + moduleName + \"'\");\n} else {\nif (functionName == null) {\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"function name cannot be empty\");\n} else {\nfor (Map.Entry entry : this.packageEnvironmentMap.entrySet()) {\nif (entry.getKey().pkgID.equals(functionToMockID)) {\nif (entry.getValue().scope.entries.containsKey(new Name(functionName))) {\nreturn true;\n}\n}\n}\ndiagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(),\n\"cannot find the function '\" + functionName + \"' in module '\" + moduleName + \"'\");\n}\n}\nreturn false;\n}\n/**\n* Get Package Name.\n* @param packageNode PackageNode instance\n* @return package name\n*/\nprivate String getPackageName(PackageNode packageNode) {\nBLangPackage bLangPackage = ((BLangPackage) packageNode);\nreturn bLangPackage.packageID.toString();\n}\n/**\n* Get the function type by iterating through the packageEnvironmentMap.\n*\n* @param pkgEnvMap map of BPackageSymbol and its respective SymbolEnv\n* @param packageID Fully qualified package ID of the respective function\n* @param functionName Name of the function\n* @return Function type if found, null if not found\n*/\nprivate BType getFunctionType(Map pkgEnvMap, PackageID packageID, String functionName) {\nfor (Map.Entry entry : pkgEnvMap.entrySet()) {\nif (entry.getKey().pkgID.equals(packageID)) {\nBSymbol symbol = symbolResolver.lookupSymbolInMainSpace(entry.getValue(), new Name(functionName));\nif (!symbol.getType().toString().equals(\"other\")) {\nreturn symbol.getType();\n}\n}\n}\nreturn null;\n}\nprivate String getQualifiedClassName(BLangTestablePackage bLangTestablePackage,\nString pkgId, String functionName) {\nString className;\nif (bLangTestablePackage.packageID.toString().equals(pkgId)) {\nif (bLangTestablePackage.symbol.scope.entries.containsKey(new Name(functionName))) {\nBSymbol symbol = bLangTestablePackage.symbol.scope.entries.get(new Name(functionName)).symbol;\nclassName = getClassName(bLangTestablePackage.symbol, symbol.getPosition());\n} else {\nBLangPackage parentPkg = bLangTestablePackage.parent;\nBSymbol symbol = parentPkg.symbol.scope.entries.get(new Name(functionName)).symbol;\nclassName = getClassName(parentPkg.symbol, symbol.getPosition());\n}\n} else {\nclassName = getImportedFunctionClassName(bLangTestablePackage,\npkgId, functionName);\n}\nreturn className;\n}\nprivate String getImportedFunctionClassName(BLangTestablePackage bLangTestablePackage,\nString pkgId, String functionName) {\nString className = getClassName(bLangTestablePackage.getImports(), pkgId, functionName);\nif (className == null) {\nclassName = getClassName(bLangTestablePackage.parent.getImports(), pkgId, functionName);\n}\nreturn className;\n}\nprivate String getClassName(List imports, String pkgId, String functionName) {\nfor (BLangImportPackage importPackage : imports) {\nif (importPackage.symbol.pkgID.toString().equals(pkgId)) {\nBSymbol bInvokableSymbol = importPackage.symbol.scope.entries\n.get(new Name(functionName)).symbol;\nreturn getClassName(importPackage.symbol, bInvokableSymbol.getPosition());\n}\n}\nreturn null;\n}\nprivate String getClassName(BPackageSymbol bPackageSymbol, Location pos) {\nreturn JarResolver.getQualifiedClassName(\nbPackageSymbol.pkgID.orgName.getValue(),\nbPackageSymbol.pkgID.name.getValue(),\nbPackageSymbol.pkgID.version.getValue(),\npos.lineRange().filePath()\n.replace(ProjectConstants.BLANG_SOURCE_EXT, \"\")\n.replace(ProjectConstants.DOT, FILE_NAME_PERIOD_SEPARATOR)\n.replace(\"/\", ProjectConstants.DOT));\n}\n}" + }, + { + "comment": "if status == null, I think we should throw something because this should not happen. Not sure what you'd do with a null status.", + "method_body": "public static OperationStatus fromString(String name, boolean isComplete) {\nOperationStatus status = fromString(name, OperationStatus.class);\nif (status != null) {\nfor (OperationStatus opStatus : values(OperationStatus.class)) {\nif (opStatus.toString().equals(name)) {\nif (!(opStatus.isComplete() == isComplete)) {\nthrow new IllegalArgumentException(String.format(\"Cannot set complete status %s for\"\n+ \" operation status %s\", isComplete, name));\n}\n}\n}\nstatus.completed = isComplete;\n}\nreturn status;\n}", + "target_code": "if (status != null) {", + "method_body_after": "public static OperationStatus fromString(String name, boolean isComplete) {\nOperationStatus status = fromString(name, OperationStatus.class);\nif (status != null) {\nif (operationStatusMap != null && operationStatusMap.containsKey(name)) {\nOperationStatus operationStatus = operationStatusMap.get(name);\nif (operationStatus.isComplete() != isComplete) {\nthrow new IllegalArgumentException(String.format(\"Cannot set complete status %s for operation\"\n+ \"status %s\", isComplete, name));\n}\n}\nstatus.completed = isComplete;\n}\nreturn status;\n}", + "context_before": "class OperationStatus extends ExpandableStringEnum {\nprivate boolean completed;\n/** Represents that polling has not yet started for this long-running operation. */\npublic static final OperationStatus NOT_STARTED = fromString(\"NOT_STARTED\", false);\n/** Represents that this long-running operation is in progress and not yet complete. */\npublic static final OperationStatus IN_PROGRESS = fromString(\"IN_PROGRESS\", false);\n/** Represent that this long-running operation is completed successfully. */\npublic static final OperationStatus SUCCESSFULLY_COMPLETED = fromString(\"SUCCESSFULLY_COMPLETED\",\ntrue);\n/**\n* Represents that this long-running operation has failed to successfully complete, however this is still\n* considered as complete long-running operation, meaning that the {@link Poller} instance will report that it\n* is complete.\n*/\npublic static final OperationStatus FAILED = fromString(\"FAILED\", true);\n/**\n* Represents that this long-running operation is cancelled by user, however this is still\n* considered as complete long-running operation.\n*/\npublic static final OperationStatus USER_CANCELLED = fromString(\"USER_CANCELLED\", true);\n/**\n* Creates or finds a {@link OperationStatus} from its string representation.\n* @param name a name to look for\n* @param isComplete a status to indicate if the operation is complete or not.\n* @throws IllegalArgumentException if invalid {2code isComplete} is provided for a pre-configured\n* {@link OperationStatus} with {@code name}\n* @return the corresponding {@link OperationStatus}\n*/\npublic boolean isComplete() {\nreturn completed;\n}\n}", + "context_after": "class OperationStatus extends ExpandableStringEnum {\nprivate boolean completed;\n/** Represents that polling has not yet started for this long-running operation. */\npublic static final OperationStatus NOT_STARTED = fromString(\"NOT_STARTED\", false);\n/** Represents that this long-running operation is in progress and not yet complete. */\npublic static final OperationStatus IN_PROGRESS = fromString(\"IN_PROGRESS\", false);\n/** Represent that this long-running operation is completed successfully. */\npublic static final OperationStatus SUCCESSFULLY_COMPLETED = fromString(\"SUCCESSFULLY_COMPLETED\",\ntrue);\n/**\n* Represents that this long-running operation has failed to successfully complete, however this is still\n* considered as complete long-running operation, meaning that the {@link Poller} instance will report that it\n* is complete.\n*/\npublic static final OperationStatus FAILED = fromString(\"FAILED\", true);\n/**\n* Represents that this long-running operation is cancelled by user, however this is still\n* considered as complete long-running operation.\n*/\npublic static final OperationStatus USER_CANCELLED = fromString(\"USER_CANCELLED\", true);\nprivate static Map operationStatusMap;\nstatic {\nMap opStatusMap = new HashMap<>();\nopStatusMap.put(NOT_STARTED.toString(), NOT_STARTED);\nopStatusMap.put(IN_PROGRESS.toString(), IN_PROGRESS);\nopStatusMap.put(SUCCESSFULLY_COMPLETED.toString(), SUCCESSFULLY_COMPLETED);\nopStatusMap.put(FAILED.toString(), FAILED);\nopStatusMap.put(USER_CANCELLED.toString(), USER_CANCELLED);\noperationStatusMap = Collections.unmodifiableMap(opStatusMap);\n}\n/**\n* Creates or finds a {@link OperationStatus} from its string representation.\n* @param name a name to look for\n* @param isComplete a status to indicate if the operation is complete or not.\n* @throws IllegalArgumentException if {@code name} matches a pre-configured {@link OperationStatus} but\n* {@code isComplete} doesn't match its pre-configured complete status.\n* @return the corresponding {@link OperationStatus}\n*/\npublic boolean isComplete() {\nreturn completed;\n}\n}" + }, + { + "comment": "Seems PostgreSQL and openGauss could not get `beforeRows` from `UpdateRowsEvent`, not like MySQL binlog. And update event include all columns. ``` scaling1=# update t_order_0 set status='ok1' where order_id=1; UPDATE 1 scaling1=# SELECT * FROM pg_logical_slot_get_changes('regression_slot', NULL, NULL); lsn | xid | data -----------+-----+-------------------------------------------------------------------------------------------------------- 0/17B18A8 | 599 | BEGIN 599 0/17B18A8 | 599 | table public.t_order_0: UPDATE: order_id[integer]:1 user_id[integer]:2 status[character varying]:'ok1' 0/17B1CB8 | 599 | COMMIT 599 (3 rows) ``` We'll make `Column.oldValue` not final for now.", + "method_body": "private void updateRecordOldValue(final Record record) {\nif (!(record instanceof DataRecord)) {\nreturn;\n}\nDataRecord dataRecord = (DataRecord) record;\nif (!ScalingConstant.UPDATE.equals(dataRecord.getType())) {\nreturn;\n}\nfor (Column col: dataRecord.getColumns()) {\nif (col.isPrimaryKey() && col.isUpdated()) {\ncol.setOldValue(col.getValue());\n}\n}\n}", + "target_code": "}", + "method_body_after": "private void updateRecordOldValue(final Record record) {\nif (!(record instanceof DataRecord)) {\nreturn;\n}\nDataRecord dataRecord = (DataRecord) record;\nif (!ScalingConstant.UPDATE.equals(dataRecord.getType())) {\nreturn;\n}\nfor (Column col: dataRecord.getColumns()) {\nif (col.isPrimaryKey() && col.isUpdated()) {\ncol.setOldValue(col.getValue());\n}\n}\n}", + "context_before": "class OpenGaussWalDumper extends AbstractScalingExecutor implements IncrementalDumper {\nprivate final WalPosition walPosition;\nprivate final DumperConfiguration dumperConfig;\nprivate final OpenGaussLogicalReplication logicalReplication = new OpenGaussLogicalReplication();\nprivate final WalEventConverter walEventConverter;\nprivate String slotName = OpenGaussLogicalReplication.SLOT_NAME_PREFIX;\n@Setter\nprivate Channel channel;\npublic OpenGaussWalDumper(final DumperConfiguration dumperConfig, final ScalingPosition position) {\nwalPosition = (WalPosition) position;\nif (!StandardJDBCDataSourceConfiguration.class.equals(dumperConfig.getDataSourceConfig().getClass())) {\nthrow new UnsupportedOperationException(\"PostgreSQLWalDumper only support JDBCDataSourceConfiguration\");\n}\nthis.dumperConfig = dumperConfig;\nwalEventConverter = new WalEventConverter(dumperConfig);\n}\n@Override\npublic void start() {\nsuper.start();\ndump();\n}\nprivate PgConnection getReplicationConn() throws SQLException {\nreturn logicalReplication\n.createPgConnection((StandardJDBCDataSourceConfiguration) dumperConfig.getDataSourceConfig())\n.unwrap(PgConnection.class);\n}\nprivate MppdbDecodingPlugin initReplication() {\nMppdbDecodingPlugin plugin = null;\ntry {\nDataSource dataSource = dumperConfig.getDataSourceConfig().toDataSource();\ntry (Connection conn = dataSource.getConnection()) {\nslotName = OpenGaussLogicalReplication.getUniqueSlotName(conn);\nOpenGaussLogicalReplication.createIfNotExists(conn);\nOpenGaussTimestampUtils utils = new OpenGaussTimestampUtils(conn.unwrap(PgConnection.class).getTimestampUtils());\nplugin = new MppdbDecodingPlugin(utils);\n}\n} catch (SQLException sqlExp) {\nlog.warn(\"create replication slot failed!\");\n}\nreturn plugin;\n}\nprivate void dump() {\nDecodingPlugin decodingPlugin = initReplication();\ntry (PgConnection pgConnection = getReplicationConn()) {\nPGReplicationStream stream = logicalReplication.createReplicationStream(pgConnection, walPosition.getLogSequenceNumber(), slotName);\nwhile (isRunning()) {\nByteBuffer message = stream.readPending();\nif (null == message) {\nThreadUtil.sleep(10L);\ncontinue;\n}\nAbstractWalEvent event = decodingPlugin.decode(message,\nnew OpenGaussLogSequenceNumber(stream.getLastReceiveLSN()));\nRecord record = walEventConverter.convert(event);\nif (!(event instanceof PlaceholderEvent) && log.isDebugEnabled()) {\nlog.debug(\"dump, event={}, record={}\", event, record);\n}\nupdateRecordOldValue(record);\npushRecord(record);\n}\n} catch (final SQLException ex) {\nif (ex.getMessage().contains(\"is already active\")) {\nreturn;\n}\nthrow new ScalingTaskExecuteException(ex);\n}\n}\nprivate void pushRecord(final Record record) {\ntry {\nchannel.pushRecord(record);\n} catch (final InterruptedException ignored) {\n}\n}\n}", + "context_after": "class OpenGaussWalDumper extends AbstractScalingExecutor implements IncrementalDumper {\nprivate final WalPosition walPosition;\nprivate final DumperConfiguration dumperConfig;\nprivate final OpenGaussLogicalReplication logicalReplication = new OpenGaussLogicalReplication();\nprivate final WalEventConverter walEventConverter;\nprivate String slotName = OpenGaussLogicalReplication.SLOT_NAME_PREFIX;\n@Setter\nprivate Channel channel;\npublic OpenGaussWalDumper(final DumperConfiguration dumperConfig, final ScalingPosition position) {\nwalPosition = (WalPosition) position;\nif (!StandardJDBCDataSourceConfiguration.class.equals(dumperConfig.getDataSourceConfig().getClass())) {\nthrow new UnsupportedOperationException(\"PostgreSQLWalDumper only support JDBCDataSourceConfiguration\");\n}\nthis.dumperConfig = dumperConfig;\nwalEventConverter = new WalEventConverter(dumperConfig);\n}\n@Override\npublic void start() {\nsuper.start();\ndump();\n}\nprivate PgConnection getReplicationConn() throws SQLException {\nreturn logicalReplication\n.createPgConnection((StandardJDBCDataSourceConfiguration) dumperConfig.getDataSourceConfig())\n.unwrap(PgConnection.class);\n}\nprivate MppdbDecodingPlugin initReplication() {\nMppdbDecodingPlugin plugin = null;\ntry {\nDataSource dataSource = dumperConfig.getDataSourceConfig().toDataSource();\ntry (Connection conn = dataSource.getConnection()) {\nslotName = OpenGaussLogicalReplication.getUniqueSlotName(conn);\nOpenGaussLogicalReplication.createIfNotExists(conn);\nOpenGaussTimestampUtils utils = new OpenGaussTimestampUtils(conn.unwrap(PgConnection.class).getTimestampUtils());\nplugin = new MppdbDecodingPlugin(utils);\n}\n} catch (SQLException sqlExp) {\nlog.warn(\"create replication slot failed!\");\n}\nreturn plugin;\n}\nprivate void dump() {\nDecodingPlugin decodingPlugin = initReplication();\ntry (PgConnection pgConnection = getReplicationConn()) {\nPGReplicationStream stream = logicalReplication.createReplicationStream(pgConnection, walPosition.getLogSequenceNumber(), slotName);\nwhile (isRunning()) {\nByteBuffer message = stream.readPending();\nif (null == message) {\nThreadUtil.sleep(10L);\ncontinue;\n}\nAbstractWalEvent event = decodingPlugin.decode(message,\nnew OpenGaussLogSequenceNumber(stream.getLastReceiveLSN()));\nRecord record = walEventConverter.convert(event);\nif (!(event instanceof PlaceholderEvent) && log.isDebugEnabled()) {\nlog.debug(\"dump, event={}, record={}\", event, record);\n}\nupdateRecordOldValue(record);\npushRecord(record);\n}\n} catch (final SQLException ex) {\nif (ex.getMessage().contains(\"is already active\")) {\nreturn;\n}\nthrow new ScalingTaskExecuteException(ex);\n}\n}\nprivate void pushRecord(final Record record) {\ntry {\nchannel.pushRecord(record);\n} catch (final InterruptedException ignored) {\n}\n}\n}" + }, + { + "comment": "remove unnecessary change", + "method_body": "public AbstractInsertExecutor initPlan(ConnectContext ctx, StmtExecutor executor) throws Exception {\nif (!ctx.getSessionVariable().isEnableNereidsDML()) {\ntry {\nctx.getSessionVariable().enableFallbackToOriginalPlannerOnce();\n} catch (Exception e) {\nthrow new AnalysisException(\"failed to set fallback to original planner to true\", e);\n}\nthrow new AnalysisException(\"Nereids DML is disabled, will try to fall back to the original planner\");\n}\nTableIf targetTableIf = InsertUtils.getTargetTable(logicalQuery, ctx);\nif (!Env.getCurrentEnv().getAccessManager()\n.checkTblPriv(ConnectContext.get(), targetTableIf.getDatabase().getCatalog().getName(),\ntargetTableIf.getDatabase().getFullName(), targetTableIf.getName(),\nPrivPredicate.LOAD)) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, \"LOAD\",\nConnectContext.get().getQualifiedUser(), ConnectContext.get().getRemoteIP(),\ntargetTableIf.getDatabase().getFullName() + \".\" + targetTableIf.getName());\n}\nAbstractInsertExecutor insertExecutor;\ntargetTableIf.readLock();\ntry {\nthis.logicalQuery = (LogicalPlan) InsertUtils.normalizePlan(logicalQuery, targetTableIf);\nLogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(logicalQuery, ctx.getStatementContext());\nNereidsPlanner planner = new NereidsPlanner(ctx.getStatementContext());\nplanner.plan(logicalPlanAdapter, ctx.getSessionVariable().toThrift());\nexecutor.setPlanner(planner);\nexecutor.checkBlockRules();\nif (ctx.getMysqlChannel() != null) {\nctx.getMysqlChannel().reset();\n}\nOptional> plan = (planner.getPhysicalPlan()\n.>>collect(PhysicalSink.class::isInstance)).stream()\n.findAny();\nPreconditions.checkArgument(plan.isPresent(), \"insert into command must contain target table\");\nPhysicalSink physicalSink = plan.get();\nDataSink sink = planner.getFragments().get(0).getSink();\nString label = this.labelName.orElse(String.format(\"label_%x_%x\", ctx.queryId().hi, ctx.queryId().lo));\nif (physicalSink instanceof PhysicalOlapTableSink) {\nif (GroupCommitInserter.groupCommit(ctx, sink, physicalSink)) {\nthrow new AnalysisException(\"group commit is not supported in Nereids now\");\n}\nOlapTable olapTable = (OlapTable) targetTableIf;\ninsertExecutor = new OlapInsertExecutor(ctx, olapTable, label, planner, insertCtx);\nboolean isEnableMemtableOnSinkNode =\nolapTable.getTableProperty().getUseSchemaLightChange()\n? insertExecutor.getCoordinator().getQueryOptions().isEnableMemtableOnSinkNode()\n: false;\ninsertExecutor.getCoordinator().getQueryOptions()\n.setEnableMemtableOnSinkNode(isEnableMemtableOnSinkNode);\n} else if (physicalSink instanceof PhysicalHiveTableSink) {\nHMSExternalTable hiveExternalTable = (HMSExternalTable) targetTableIf;\ninsertExecutor = new HiveInsertExecutor(ctx, hiveExternalTable, label, planner, insertCtx);\n} else {\nthrow new AnalysisException(\"insert into command only support olap table\");\n}\ninsertExecutor.beginTransaction();\ninsertExecutor.finalizeSink(planner.getFragments().get(0), sink, physicalSink);\n} finally {\ntargetTableIf.readUnlock();\n}\nexecutor.setProfileType(ProfileType.LOAD);\nexecutor.setCoord(insertExecutor.getCoordinator());\nreturn insertExecutor;\n}", + "target_code": ".checkTblPriv(ConnectContext.get(), targetTableIf.getDatabase().getCatalog().getName(),", + "method_body_after": "public AbstractInsertExecutor initPlan(ConnectContext ctx, StmtExecutor executor) throws Exception {\nif (!ctx.getSessionVariable().isEnableNereidsDML()) {\ntry {\nctx.getSessionVariable().enableFallbackToOriginalPlannerOnce();\n} catch (Exception e) {\nthrow new AnalysisException(\"failed to set fallback to original planner to true\", e);\n}\nthrow new AnalysisException(\"Nereids DML is disabled, will try to fall back to the original planner\");\n}\nTableIf targetTableIf = InsertUtils.getTargetTable(logicalQuery, ctx);\nif (!Env.getCurrentEnv().getAccessManager()\n.checkTblPriv(ConnectContext.get(), targetTableIf.getDatabase().getCatalog().getName(),\ntargetTableIf.getDatabase().getFullName(), targetTableIf.getName(),\nPrivPredicate.LOAD)) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, \"LOAD\",\nConnectContext.get().getQualifiedUser(), ConnectContext.get().getRemoteIP(),\ntargetTableIf.getDatabase().getFullName() + \".\" + targetTableIf.getName());\n}\nAbstractInsertExecutor insertExecutor;\ntargetTableIf.readLock();\ntry {\nthis.logicalQuery = (LogicalPlan) InsertUtils.normalizePlan(logicalQuery, targetTableIf);\nLogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(logicalQuery, ctx.getStatementContext());\nNereidsPlanner planner = new NereidsPlanner(ctx.getStatementContext());\nplanner.plan(logicalPlanAdapter, ctx.getSessionVariable().toThrift());\nexecutor.setPlanner(planner);\nexecutor.checkBlockRules();\nif (ctx.getMysqlChannel() != null) {\nctx.getMysqlChannel().reset();\n}\nOptional> plan = (planner.getPhysicalPlan()\n.>>collect(PhysicalSink.class::isInstance)).stream()\n.findAny();\nPreconditions.checkArgument(plan.isPresent(), \"insert into command must contain target table\");\nPhysicalSink physicalSink = plan.get();\nDataSink sink = planner.getFragments().get(0).getSink();\nString label = this.labelName.orElse(String.format(\"label_%x_%x\", ctx.queryId().hi, ctx.queryId().lo));\nif (physicalSink instanceof PhysicalOlapTableSink) {\nif (GroupCommitInserter.groupCommit(ctx, sink, physicalSink)) {\nthrow new AnalysisException(\"group commit is not supported in Nereids now\");\n}\nOlapTable olapTable = (OlapTable) targetTableIf;\ninsertExecutor = new OlapInsertExecutor(ctx, olapTable, label, planner, insertCtx);\nboolean isEnableMemtableOnSinkNode =\nolapTable.getTableProperty().getUseSchemaLightChange()\n? insertExecutor.getCoordinator().getQueryOptions().isEnableMemtableOnSinkNode()\n: false;\ninsertExecutor.getCoordinator().getQueryOptions()\n.setEnableMemtableOnSinkNode(isEnableMemtableOnSinkNode);\n} else if (physicalSink instanceof PhysicalHiveTableSink) {\nHMSExternalTable hiveExternalTable = (HMSExternalTable) targetTableIf;\ninsertExecutor = new HiveInsertExecutor(ctx, hiveExternalTable, label, planner, insertCtx);\n} else {\nthrow new AnalysisException(\"insert into command only support olap table\");\n}\ninsertExecutor.beginTransaction();\ninsertExecutor.finalizeSink(planner.getFragments().get(0), sink, physicalSink);\n} finally {\ntargetTableIf.readUnlock();\n}\nexecutor.setProfileType(ProfileType.LOAD);\nexecutor.setCoord(insertExecutor.getCoordinator());\nreturn insertExecutor;\n}", + "context_before": "class InsertIntoTableCommand extends Command implements ForwardWithSync, Explainable {\npublic static final Logger LOG = LogManager.getLogger(InsertIntoTableCommand.class);\nprivate LogicalPlan logicalQuery;\nprivate Optional labelName;\n/**\n* When source it's from job scheduler,it will be set.\n*/\nprivate long jobId;\nprivate Optional insertCtx;\n/**\n* constructor\n*/\npublic InsertIntoTableCommand(LogicalPlan logicalQuery, Optional labelName,\nOptional insertCtx) {\nsuper(PlanType.INSERT_INTO_TABLE_COMMAND);\nthis.logicalQuery = Objects.requireNonNull(logicalQuery, \"logicalQuery should not be null\");\nthis.labelName = Objects.requireNonNull(labelName, \"labelName should not be null\");\nthis.insertCtx = insertCtx;\n}\npublic Optional getLabelName() {\nreturn labelName;\n}\npublic void setLabelName(Optional labelName) {\nthis.labelName = labelName;\n}\npublic void setJobId(long jobId) {\nthis.jobId = jobId;\n}\n@Override\npublic void run(ConnectContext ctx, StmtExecutor executor) throws Exception {\nrunInternal(ctx, executor);\n}\npublic void runWithUpdateInfo(ConnectContext ctx, StmtExecutor executor,\nLoadStatistic loadStatistic) throws Exception {\nrunInternal(ctx, executor);\n}\n/**\n* This function is used to generate the plan for Nereids.\n* There are some load functions that only need to the plan, such as stream_load.\n* Therefore, this section will be presented separately.\n*/\nprivate void runInternal(ConnectContext ctx, StmtExecutor executor) throws Exception {\nAbstractInsertExecutor insertExecutor = initPlan(ctx, executor);\ninsertExecutor.executeSingleInsert(executor, jobId);\n}\n@Override\npublic Plan getExplainPlan(ConnectContext ctx) {\nreturn InsertUtils.getPlanForExplain(ctx, this.logicalQuery);\n}\n@Override\npublic R accept(PlanVisitor visitor, C context) {\nreturn visitor.visitInsertIntoTableCommand(this, context);\n}\n}", + "context_after": "class InsertIntoTableCommand extends Command implements ForwardWithSync, Explainable {\npublic static final Logger LOG = LogManager.getLogger(InsertIntoTableCommand.class);\nprivate LogicalPlan logicalQuery;\nprivate Optional labelName;\n/**\n* When source it's from job scheduler,it will be set.\n*/\nprivate long jobId;\nprivate Optional insertCtx;\n/**\n* constructor\n*/\npublic InsertIntoTableCommand(LogicalPlan logicalQuery, Optional labelName,\nOptional insertCtx) {\nsuper(PlanType.INSERT_INTO_TABLE_COMMAND);\nthis.logicalQuery = Objects.requireNonNull(logicalQuery, \"logicalQuery should not be null\");\nthis.labelName = Objects.requireNonNull(labelName, \"labelName should not be null\");\nthis.insertCtx = insertCtx;\n}\npublic Optional getLabelName() {\nreturn labelName;\n}\npublic void setLabelName(Optional labelName) {\nthis.labelName = labelName;\n}\npublic void setJobId(long jobId) {\nthis.jobId = jobId;\n}\n@Override\npublic void run(ConnectContext ctx, StmtExecutor executor) throws Exception {\nrunInternal(ctx, executor);\n}\npublic void runWithUpdateInfo(ConnectContext ctx, StmtExecutor executor,\nLoadStatistic loadStatistic) throws Exception {\nrunInternal(ctx, executor);\n}\n/**\n* This function is used to generate the plan for Nereids.\n* There are some load functions that only need to the plan, such as stream_load.\n* Therefore, this section will be presented separately.\n*/\nprivate void runInternal(ConnectContext ctx, StmtExecutor executor) throws Exception {\nAbstractInsertExecutor insertExecutor = initPlan(ctx, executor);\ninsertExecutor.executeSingleInsert(executor, jobId);\n}\n@Override\npublic Plan getExplainPlan(ConnectContext ctx) {\nreturn InsertUtils.getPlanForExplain(ctx, this.logicalQuery);\n}\n@Override\npublic R accept(PlanVisitor visitor, C context) {\nreturn visitor.visitInsertIntoTableCommand(this, context);\n}\n}" + }, + { + "comment": "loose -> lose", + "method_body": "void assertParkedCountsByApplication(long... nums) {\nlong expected = LongStream.of(nums).filter(value -> value > 0L).sum();\nlong actual = (long) nodeRepository.getNodes(Node.State.parked).size();\nassertEquals(expected, actual);\n}", + "target_code": "", + "method_body_after": "void assertParkedCountsByApplication(long... nums) {\nlong expected = LongStream.of(nums).filter(value -> value > 0L).sum();\nlong actual = (long) nodeRepository.getNodes(Node.State.parked).size();\nassertEquals(expected, actual);\n}", + "context_before": "class NodeRetirerTester {\npublic static final Zone zone = new Zone(Environment.prod, RegionName.from(\"us-east\"));\npublic final ManualClock clock = new ManualClock();\npublic final NodeRepository nodeRepository;\nprivate final FlavorSpareChecker flavorSpareChecker = mock(FlavorSpareChecker.class);\nprivate final MockDeployer deployer;\nprivate final JobControl jobControl;\nprivate final List flavors;\nprivate final Map apps = new LinkedHashMap<>();\nprivate final Orchestrator orchestrator = mock(Orchestrator.class);\nprivate RetiredExpirer retiredExpirer;\nprivate InactiveExpirer inactiveExpirer;\nprivate int nextNodeId = 0;\nNodeRetirerTester(NodeFlavors nodeFlavors) {\nCurator curator = new MockCurator();\nnodeRepository = new NodeRepository(nodeFlavors, curator, clock, zone, new MockNameResolver().mockAnyLookup(),\nnew DockerImage(\"docker-registry.domain.tld:8080/dist/vespa\"), true);\njobControl = new JobControl(nodeRepository.database());\nNodeRepositoryProvisioner provisioner = new NodeRepositoryProvisioner(nodeRepository, nodeFlavors, zone, new MockProvisionServiceProvider(), new InMemoryFlagSource());\ndeployer = new MockDeployer(provisioner, clock, apps);\nflavors = nodeFlavors.getFlavors().stream().sorted(Comparator.comparing(Flavor::name)).collect(Collectors.toList());\ntry {\ndoThrow(new RuntimeException()).when(orchestrator).acquirePermissionToRemove(any());\n} catch (OrchestrationException e) {\ne.printStackTrace();\n}\n}\nNodeRetirer makeNodeRetirer(RetirementPolicy policy) {\nreturn new NodeRetirer(nodeRepository, flavorSpareChecker, Duration.ofDays(1), deployer, jobControl, policy);\n}\nvoid createReadyNodesByFlavor(int... nums) {\nList nodes = new ArrayList<>();\nfor (int i = 0; i < nums.length; i++) {\nFlavor flavor = flavors.get(i);\nfor (int j = 0; j < nums[i]; j++) {\nint id = nextNodeId++;\nnodes.add(nodeRepository.createNode(\"node\" + id, \"host\" + id + \".test.yahoo.com\",\nCollections.singleton(\"::1\"), Optional.empty(), flavor, NodeType.tenant));\n}\n}\nnodes = nodeRepository.addNodes(nodes);\nnodes = nodeRepository.setDirty(nodes, Agent.system, getClass().getSimpleName());\nnodeRepository.setReady(nodes, Agent.system, getClass().getSimpleName());\n}\nvoid deployApp(String tenantName, String applicationName, int[] flavorIds, int[] numNodes) {\nfinal ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, \"default\");\nfinal List clusterContexts = new ArrayList<>();\nfor (int i = 0; i < flavorIds.length; i++) {\nFlavor flavor = flavors.get(flavorIds[i]);\nClusterSpec cluster = ClusterSpec.request(ClusterSpec.Type.container, ClusterSpec.Id.from(\"cluster-\" + i), Version.fromString(\"6.99\"), false, Collections.emptySet());\nCapacity capacity = Capacity.fromNodeCount(numNodes[i], Optional.of(flavor.name()), false, true);\nint numGroups = numNodes[i] % 2 == 0 ? 2 : 1;\nclusterContexts.add(new MockDeployer.ClusterContext(applicationId, cluster, capacity, numGroups));\n}\napps.put(applicationId, new MockDeployer.ApplicationContext(applicationId, clusterContexts));\ndeployer.deployFromLocalActive(applicationId, Duration.ZERO).get().activate();\n}\nvoid iterateMaintainers() {\nif (retiredExpirer == null) {\nretiredExpirer = new RetiredExpirer(nodeRepository, orchestrator, deployer, clock, Duration.ofDays(30), Duration.ofMinutes(10), jobControl);\ninactiveExpirer = new InactiveExpirer(nodeRepository, clock, Duration.ofMinutes(10), jobControl);\n}\nclock.advance(Duration.ofMinutes(11));\nretiredExpirer.maintain();\nclock.advance(Duration.ofMinutes(11));\ninactiveExpirer.maintain();\n}\nvoid setNumberAllowedUnallocatedRetirementsPerFlavor(int... numAllowed) {\nfor (int i = 0; i < numAllowed.length; i++) {\nBoolean[] responses = new Boolean[numAllowed[i]];\nArrays.fill(responses, true);\nresponses[responses.length - 1 ] = false;\nwhen(flavorSpareChecker.canRetireUnallocatedNodeWithFlavor(eq(flavors.get(i)))).thenReturn(true, responses);\n}\n}\nvoid setNumberAllowedAllocatedRetirementsPerFlavor(int... numAllowed) {\nfor (int i = 0; i < numAllowed.length; i++) {\nBoolean[] responses = new Boolean[numAllowed[i]];\nArrays.fill(responses, true);\nresponses[responses.length - 1] = false;\nwhen(flavorSpareChecker.canRetireAllocatedNodeWithFlavor(eq(flavors.get(i)))).thenReturn(true, responses);\n}\n}\nvoid assertCountsForStateByFlavor(Node.State state, long... nums) {\nMap expected = expectedCountsByFlavor(nums);\nMap actual = nodeRepository.getNodes(state).stream()\n.collect(Collectors.groupingBy(Node::flavor, Collectors.counting()));\nassertEquals(expected, actual);\n}\nvoid assertRetiringCountsByApplication(long... nums) {\nMap expected = expectedCountsByApplication(nums);\nMap actual = nodeRepository.getNodes().stream()\n.filter(node -> node.status().wantToRetire())\n.filter(node -> node.allocation().isPresent())\n.filter(node -> node.allocation().get().membership().retired())\n.filter(node -> node.state() != Node.State.parked)\n.collect(Collectors.groupingBy(node -> node.allocation().get().owner(), Collectors.counting()));\nassertEquals(expected, actual);\n}\nprivate Map expectedCountsByFlavor(long... nums) {\nMap countsByFlavor = new HashMap<>();\nfor (int i = 0; i < nums.length; i++) {\nif (nums[i] < 0) continue;\nFlavor flavor = flavors.get(i);\ncountsByFlavor.put(flavor, nums[i]);\n}\nreturn countsByFlavor;\n}\nprivate Map expectedCountsByApplication(long... nums) {\nMap countsByApplicationId = new HashMap<>();\nIterator iterator = apps.keySet().iterator();\nfor (int i = 0; iterator.hasNext(); i++) {\nApplicationId applicationId = iterator.next();\nif (nums[i] < 0) continue;\ncountsByApplicationId.put(applicationId, nums[i]);\n}\nreturn countsByApplicationId;\n}\nstatic NodeFlavors makeFlavors(int numFlavors) {\nFlavorConfigBuilder flavorConfigBuilder = new FlavorConfigBuilder();\nfor (int i = 0; i < numFlavors; i++) {\nflavorConfigBuilder.addFlavor(\"flavor-\" + i, 1. /* cpu*/, 3. /* mem GB*/, 2. /*disk GB*/, Flavor.Type.BARE_METAL);\n}\nreturn new NodeFlavors(flavorConfigBuilder.build());\n}\n}", + "context_after": "class NodeRetirerTester {\npublic static final Zone zone = new Zone(Environment.prod, RegionName.from(\"us-east\"));\npublic final ManualClock clock = new ManualClock();\npublic final NodeRepository nodeRepository;\nprivate final FlavorSpareChecker flavorSpareChecker = mock(FlavorSpareChecker.class);\nprivate final MockDeployer deployer;\nprivate final JobControl jobControl;\nprivate final List flavors;\nprivate final Map apps = new LinkedHashMap<>();\nprivate final Orchestrator orchestrator = mock(Orchestrator.class);\nprivate RetiredExpirer retiredExpirer;\nprivate InactiveExpirer inactiveExpirer;\nprivate int nextNodeId = 0;\nNodeRetirerTester(NodeFlavors nodeFlavors) {\nCurator curator = new MockCurator();\nnodeRepository = new NodeRepository(nodeFlavors, curator, clock, zone, new MockNameResolver().mockAnyLookup(),\nnew DockerImage(\"docker-registry.domain.tld:8080/dist/vespa\"), true);\njobControl = new JobControl(nodeRepository.database());\nNodeRepositoryProvisioner provisioner = new NodeRepositoryProvisioner(nodeRepository, nodeFlavors, zone, new MockProvisionServiceProvider(), new InMemoryFlagSource());\ndeployer = new MockDeployer(provisioner, clock, apps);\nflavors = nodeFlavors.getFlavors().stream().sorted(Comparator.comparing(Flavor::name)).collect(Collectors.toList());\ntry {\ndoThrow(new RuntimeException()).when(orchestrator).acquirePermissionToRemove(any());\n} catch (OrchestrationException e) {\ne.printStackTrace();\n}\n}\nNodeRetirer makeNodeRetirer(RetirementPolicy policy) {\nreturn new NodeRetirer(nodeRepository, flavorSpareChecker, Duration.ofDays(1), deployer, jobControl, policy);\n}\nvoid createReadyNodesByFlavor(int... nums) {\nList nodes = new ArrayList<>();\nfor (int i = 0; i < nums.length; i++) {\nFlavor flavor = flavors.get(i);\nfor (int j = 0; j < nums[i]; j++) {\nint id = nextNodeId++;\nnodes.add(nodeRepository.createNode(\"node\" + id, \"host\" + id + \".test.yahoo.com\",\nCollections.singleton(\"::1\"), Optional.empty(), flavor, NodeType.tenant));\n}\n}\nnodes = nodeRepository.addNodes(nodes);\nnodes = nodeRepository.setDirty(nodes, Agent.system, getClass().getSimpleName());\nnodeRepository.setReady(nodes, Agent.system, getClass().getSimpleName());\n}\nvoid deployApp(String tenantName, String applicationName, int[] flavorIds, int[] numNodes) {\nfinal ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, \"default\");\nfinal List clusterContexts = new ArrayList<>();\nfor (int i = 0; i < flavorIds.length; i++) {\nFlavor flavor = flavors.get(flavorIds[i]);\nClusterSpec cluster = ClusterSpec.request(ClusterSpec.Type.container, ClusterSpec.Id.from(\"cluster-\" + i), Version.fromString(\"6.99\"), false, Collections.emptySet());\nCapacity capacity = Capacity.fromNodeCount(numNodes[i], Optional.of(flavor.name()), false, true);\nint numGroups = numNodes[i] % 2 == 0 ? 2 : 1;\nclusterContexts.add(new MockDeployer.ClusterContext(applicationId, cluster, capacity, numGroups));\n}\napps.put(applicationId, new MockDeployer.ApplicationContext(applicationId, clusterContexts));\ndeployer.deployFromLocalActive(applicationId, Duration.ZERO).get().activate();\n}\nvoid iterateMaintainers() {\nif (retiredExpirer == null) {\nretiredExpirer = new RetiredExpirer(nodeRepository, orchestrator, deployer, clock, Duration.ofDays(30), Duration.ofMinutes(10), jobControl);\ninactiveExpirer = new InactiveExpirer(nodeRepository, clock, Duration.ofMinutes(10), jobControl);\n}\nclock.advance(Duration.ofMinutes(11));\nretiredExpirer.maintain();\nclock.advance(Duration.ofMinutes(11));\ninactiveExpirer.maintain();\n}\nvoid setNumberAllowedUnallocatedRetirementsPerFlavor(int... numAllowed) {\nfor (int i = 0; i < numAllowed.length; i++) {\nBoolean[] responses = new Boolean[numAllowed[i]];\nArrays.fill(responses, true);\nresponses[responses.length - 1 ] = false;\nwhen(flavorSpareChecker.canRetireUnallocatedNodeWithFlavor(eq(flavors.get(i)))).thenReturn(true, responses);\n}\n}\nvoid setNumberAllowedAllocatedRetirementsPerFlavor(int... numAllowed) {\nfor (int i = 0; i < numAllowed.length; i++) {\nBoolean[] responses = new Boolean[numAllowed[i]];\nArrays.fill(responses, true);\nresponses[responses.length - 1] = false;\nwhen(flavorSpareChecker.canRetireAllocatedNodeWithFlavor(eq(flavors.get(i)))).thenReturn(true, responses);\n}\n}\nvoid assertCountsForStateByFlavor(Node.State state, long... nums) {\nMap expected = expectedCountsByFlavor(nums);\nMap actual = nodeRepository.getNodes(state).stream()\n.collect(Collectors.groupingBy(Node::flavor, Collectors.counting()));\nassertEquals(expected, actual);\n}\nvoid assertRetiringCountsByApplication(long... nums) {\nMap expected = expectedCountsByApplication(nums);\nMap actual = nodeRepository.getNodes().stream()\n.filter(node -> node.status().wantToRetire())\n.filter(node -> node.allocation().isPresent())\n.filter(node -> node.allocation().get().membership().retired())\n.filter(node -> node.state() != Node.State.parked)\n.collect(Collectors.groupingBy(node -> node.allocation().get().owner(), Collectors.counting()));\nassertEquals(expected, actual);\n}\nprivate Map expectedCountsByFlavor(long... nums) {\nMap countsByFlavor = new HashMap<>();\nfor (int i = 0; i < nums.length; i++) {\nif (nums[i] < 0) continue;\nFlavor flavor = flavors.get(i);\ncountsByFlavor.put(flavor, nums[i]);\n}\nreturn countsByFlavor;\n}\nprivate Map expectedCountsByApplication(long... nums) {\nMap countsByApplicationId = new HashMap<>();\nIterator iterator = apps.keySet().iterator();\nfor (int i = 0; iterator.hasNext(); i++) {\nApplicationId applicationId = iterator.next();\nif (nums[i] < 0) continue;\ncountsByApplicationId.put(applicationId, nums[i]);\n}\nreturn countsByApplicationId;\n}\nstatic NodeFlavors makeFlavors(int numFlavors) {\nFlavorConfigBuilder flavorConfigBuilder = new FlavorConfigBuilder();\nfor (int i = 0; i < numFlavors; i++) {\nflavorConfigBuilder.addFlavor(\"flavor-\" + i, 1. /* cpu*/, 3. /* mem GB*/, 2. /*disk GB*/, Flavor.Type.BARE_METAL);\n}\nreturn new NodeFlavors(flavorConfigBuilder.build());\n}\n}" + }, + { + "comment": "Another observation is that in `NotifyingTransactionManager#commit()` we use `this.getTransaction()` for the payload in the beforeDestroyed event but we use `destroyed(this);` for the payload in the destroyed event. If the payload is meant to be the same (which I think it should) then I think the call to `this.getTransaction()` should be cached and the same object used for both events", + "method_body": "public void begin() throws NotSupportedException, SystemException {\ndelegate.begin();\ninitialized(delegate);\n}", + "target_code": "initialized(delegate);", + "method_body_after": "public void begin() throws NotSupportedException, SystemException {\ndelegate.begin();\ninitialized(getTransactionId());\n}", + "context_before": "class NotifyingUserTransaction extends TransactionScopedNotifier implements UserTransaction {\nprivate static final Logger LOG = Logger.getLogger(NotifyingUserTransaction.class);\nprivate final UserTransaction delegate;\npublic NotifyingUserTransaction(UserTransaction delegate) {\nthis.delegate = delegate;\n}\n@Override\n@Override\npublic void commit() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException,\nIllegalStateException, SystemException {\nbeforeDestroyed(delegate);\ntry {\ndelegate.commit();\n} finally {\ndestroyed(delegate);\n}\n}\n@Override\npublic void rollback() throws IllegalStateException, SecurityException, SystemException {\ntry {\nbeforeDestroyed(delegate);\n} catch (Throwable t) {\nLOG.error(\"Failed to fire @BeforeDestroyed(TransactionScoped.class)\", t);\n}\ntry {\ndelegate.rollback();\n} finally {\ndestroyed(delegate);\n}\n}\n@Override\npublic void setRollbackOnly() throws IllegalStateException, SystemException {\ndelegate.setRollbackOnly();\n}\n@Override\npublic int getStatus() throws SystemException {\nreturn delegate.getStatus();\n}\n@Override\npublic void setTransactionTimeout(int seconds) throws SystemException {\ndelegate.setTransactionTimeout(seconds);\n}\n}", + "context_after": "class NotifyingUserTransaction extends TransactionScopedNotifier implements UserTransaction {\nprivate static final Logger LOG = Logger.getLogger(NotifyingUserTransaction.class);\nprivate final UserTransaction delegate;\npublic NotifyingUserTransaction(UserTransaction delegate) {\nthis.delegate = delegate;\n}\n@Override\n@Override\npublic void commit() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException,\nIllegalStateException, SystemException {\nTransactionId id = getTransactionId();\nbeforeDestroyed(id);\ntry {\ndelegate.commit();\n} finally {\ndestroyed(id);\n}\n}\n@Override\npublic void rollback() throws IllegalStateException, SecurityException, SystemException {\nTransactionId id = getTransactionId();\ntry {\nbeforeDestroyed(id);\n} catch (Throwable t) {\nLOG.error(\"Failed to fire @BeforeDestroyed(TransactionScoped.class)\", t);\n}\ntry {\ndelegate.rollback();\n} finally {\ndestroyed(id);\n}\n}\n@Override\npublic void setRollbackOnly() throws IllegalStateException, SystemException {\ndelegate.setRollbackOnly();\n}\n@Override\npublic int getStatus() throws SystemException {\nreturn delegate.getStatus();\n}\n@Override\npublic void setTransactionTimeout(int seconds) throws SystemException {\ndelegate.setTransactionTimeout(seconds);\n}\n}" + }, + { + "comment": "Yes, the `targetValue == null` means that the default value is used (`TemplateData.class` in this particular case). The default values are stored in the annotation defining class, not in the annotation instance. See also the `org.jboss.jandex.AnnotationInstance.valueWithDefault(IndexView, String)` method and friends. ", + "method_body": "public boolean isTargetAnnotatedType() {\nAnnotationValue targetValue = annotationInstance.value(ValueResolverGenerator.TARGET);\nreturn targetValue == null || targetValue.asClass().name().equals(ValueResolverGenerator.TEMPLATE_DATA);\n}", + "target_code": "return targetValue == null || targetValue.asClass().name().equals(ValueResolverGenerator.TEMPLATE_DATA);", + "method_body_after": "public boolean isTargetAnnotatedType() {\nAnnotationValue targetValue = annotationInstance.value(ValueResolverGenerator.TARGET);\nreturn targetValue == null || targetValue.asClass().name().equals(ValueResolverGenerator.TEMPLATE_DATA);\n}", + "context_before": "class TemplateDataBuildItem extends MultiBuildItem {\nprivate final ClassInfo targetClass;\nprivate final String namespace;\nprivate final String[] ignore;\nprivate final Pattern[] ignorePatterns;\nprivate final boolean ignoreSuperclasses;\nprivate final boolean properties;\nprivate final AnnotationInstance annotationInstance;\nTemplateDataBuildItem(AnnotationInstance annotationInstance, ClassInfo targetClass) {\nthis.annotationInstance = annotationInstance;\nAnnotationValue ignoreValue = annotationInstance.value(ValueResolverGenerator.IGNORE);\nAnnotationValue propertiesValue = annotationInstance.value(ValueResolverGenerator.PROPERTIES);\nAnnotationValue namespaceValue = annotationInstance.value(ValueResolverGenerator.NAMESPACE);\nAnnotationValue ignoreSuperclassesValue = annotationInstance.value(ValueResolverGenerator.IGNORE_SUPERCLASSES);\nthis.targetClass = targetClass;\nString namespace = namespaceValue != null ? namespaceValue.asString() : TemplateData.UNDERSCORED_FQCN;\nif (namespace.equals(TemplateData.UNDERSCORED_FQCN)) {\nnamespace = ValueResolverGenerator\n.underscoredFullyQualifiedName(targetClass.name().toString());\n} else if (namespace.equals(TemplateData.SIMPLENAME)) {\nnamespace = ValueResolverGenerator.simpleName(targetClass);\n}\nthis.namespace = namespace;\nthis.ignore = ignoreValue != null ? ignoreValue.asStringArray() : new String[] {};\nif (ignore.length > 0) {\nignorePatterns = new Pattern[ignore.length];\nfor (int i = 0; i < ignore.length; i++) {\nignorePatterns[i] = Pattern.compile(ignore[i]);\n}\n} else {\nignorePatterns = null;\n}\nthis.ignoreSuperclasses = ignoreSuperclassesValue != null ? ignoreSuperclassesValue.asBoolean() : false;\nthis.properties = propertiesValue != null ? propertiesValue.asBoolean() : false;\n}\npublic ClassInfo getTargetClass() {\nreturn targetClass;\n}\npublic boolean hasNamespace() {\nreturn namespace != null;\n}\npublic String getNamespace() {\nreturn namespace;\n}\npublic String[] getIgnore() {\nreturn ignore;\n}\npublic boolean isIgnoreSuperclasses() {\nreturn ignoreSuperclasses;\n}\npublic boolean isProperties() {\nreturn properties;\n}\npublic AnnotationInstance getAnnotationInstance() {\nreturn annotationInstance;\n}\nboolean filter(AnnotationTarget target) {\nString name = null;\nif (target.kind() == Kind.METHOD) {\nMethodInfo method = target.asMethod();\nif (properties && !method.parameterTypes().isEmpty()) {\nreturn false;\n}\nname = method.name();\n} else if (target.kind() == Kind.FIELD) {\nFieldInfo field = target.asField();\nname = field.name();\n}\nif (ignorePatterns != null) {\nfor (Pattern ignorePattern : ignorePatterns) {\nif (ignorePattern.matcher(name).matches()) {\nreturn false;\n}\n}\n}\nreturn true;\n}\n@Override\npublic String toString() {\nreturn \"TemplateDataBuildItem [targetClass=\" + targetClass + \", namespace=\" + namespace + \", ignore=\"\n+ Arrays.toString(ignore) + \", ignorePatterns=\" + Arrays.toString(ignorePatterns) + \", ignoreSuperclasses=\"\n+ ignoreSuperclasses + \", properties=\" + properties + \"]\";\n}\n}", + "context_after": "class TemplateDataBuildItem extends MultiBuildItem {\nprivate final ClassInfo targetClass;\nprivate final String namespace;\nprivate final String[] ignore;\nprivate final Pattern[] ignorePatterns;\nprivate final boolean ignoreSuperclasses;\nprivate final boolean properties;\nprivate final AnnotationInstance annotationInstance;\nTemplateDataBuildItem(AnnotationInstance annotationInstance, ClassInfo targetClass) {\nthis.annotationInstance = annotationInstance;\nAnnotationValue ignoreValue = annotationInstance.value(ValueResolverGenerator.IGNORE);\nAnnotationValue propertiesValue = annotationInstance.value(ValueResolverGenerator.PROPERTIES);\nAnnotationValue namespaceValue = annotationInstance.value(ValueResolverGenerator.NAMESPACE);\nAnnotationValue ignoreSuperclassesValue = annotationInstance.value(ValueResolverGenerator.IGNORE_SUPERCLASSES);\nthis.targetClass = targetClass;\nString namespace = namespaceValue != null ? namespaceValue.asString() : TemplateData.UNDERSCORED_FQCN;\nif (namespace.equals(TemplateData.UNDERSCORED_FQCN)) {\nnamespace = ValueResolverGenerator\n.underscoredFullyQualifiedName(targetClass.name().toString());\n} else if (namespace.equals(TemplateData.SIMPLENAME)) {\nnamespace = ValueResolverGenerator.simpleName(targetClass);\n}\nthis.namespace = namespace;\nthis.ignore = ignoreValue != null ? ignoreValue.asStringArray() : new String[] {};\nif (ignore.length > 0) {\nignorePatterns = new Pattern[ignore.length];\nfor (int i = 0; i < ignore.length; i++) {\nignorePatterns[i] = Pattern.compile(ignore[i]);\n}\n} else {\nignorePatterns = null;\n}\nthis.ignoreSuperclasses = ignoreSuperclassesValue != null ? ignoreSuperclassesValue.asBoolean() : false;\nthis.properties = propertiesValue != null ? propertiesValue.asBoolean() : false;\n}\npublic ClassInfo getTargetClass() {\nreturn targetClass;\n}\npublic boolean hasNamespace() {\nreturn namespace != null;\n}\npublic String getNamespace() {\nreturn namespace;\n}\npublic String[] getIgnore() {\nreturn ignore;\n}\npublic boolean isIgnoreSuperclasses() {\nreturn ignoreSuperclasses;\n}\npublic boolean isProperties() {\nreturn properties;\n}\npublic AnnotationInstance getAnnotationInstance() {\nreturn annotationInstance;\n}\nboolean filter(AnnotationTarget target) {\nString name = null;\nif (target.kind() == Kind.METHOD) {\nMethodInfo method = target.asMethod();\nif (properties && !method.parameterTypes().isEmpty()) {\nreturn false;\n}\nname = method.name();\n} else if (target.kind() == Kind.FIELD) {\nFieldInfo field = target.asField();\nname = field.name();\n}\nif (ignorePatterns != null) {\nfor (Pattern ignorePattern : ignorePatterns) {\nif (ignorePattern.matcher(name).matches()) {\nreturn false;\n}\n}\n}\nreturn true;\n}\n@Override\npublic String toString() {\nreturn \"TemplateDataBuildItem [targetClass=\" + targetClass + \", namespace=\" + namespace + \", ignore=\"\n+ Arrays.toString(ignore) + \", ignorePatterns=\" + Arrays.toString(ignorePatterns) + \", ignoreSuperclasses=\"\n+ ignoreSuperclasses + \", properties=\" + properties + \"]\";\n}\n}" + }, + { + "comment": "https://sonarcloud.io/project/issues?id=gwenneg_quarkus&open=AXAxfMfPgbHBif7sECGm&resolutions=FIXED&severities=CRITICAL The `DEFAULT` constant is actually defined in `ByteBufAllocator` which is implemented by `PartialPooledByteBufAllocator`.", + "method_body": "public ByteBuf allocateBuffer(boolean direct) {\nif (direct) {\nreturn ByteBufAllocator.DEFAULT.directBuffer(defaultBufferSize);\n} else {\nreturn ByteBufAllocator.DEFAULT.heapBuffer(defaultBufferSize);\n}\n}", + "target_code": "return ByteBufAllocator.DEFAULT.heapBuffer(defaultBufferSize);", + "method_body_after": "public ByteBuf allocateBuffer(boolean direct) {\nif (direct) {\nreturn ByteBufAllocator.DEFAULT.directBuffer(defaultBufferSize);\n} else {\nreturn ByteBufAllocator.DEFAULT.heapBuffer(defaultBufferSize);\n}\n}", + "context_before": "class UndertowBufferAllocator implements BufferAllocator {\nprivate final boolean defaultDirectBuffers;\nprivate final int defaultBufferSize;\nprivate UndertowBufferAllocator(boolean defaultDirectBuffers, int defaultBufferSize) {\nthis.defaultDirectBuffers = defaultDirectBuffers;\nthis.defaultBufferSize = defaultBufferSize;\n}\n@Override\npublic ByteBuf allocateBuffer() {\nreturn allocateBuffer(defaultDirectBuffers);\n}\n@Override\n@Override\npublic ByteBuf allocateBuffer(int bufferSize) {\nreturn allocateBuffer(defaultDirectBuffers, bufferSize);\n}\n@Override\npublic ByteBuf allocateBuffer(boolean direct, int bufferSize) {\nif (direct) {\nreturn ByteBufAllocator.DEFAULT.directBuffer(bufferSize);\n} else {\nreturn ByteBufAllocator.DEFAULT.heapBuffer(bufferSize);\n}\n}\n@Override\npublic int getBufferSize() {\nreturn defaultBufferSize;\n}\n}", + "context_after": "class UndertowBufferAllocator implements BufferAllocator {\nprivate final boolean defaultDirectBuffers;\nprivate final int defaultBufferSize;\nprivate UndertowBufferAllocator(boolean defaultDirectBuffers, int defaultBufferSize) {\nthis.defaultDirectBuffers = defaultDirectBuffers;\nthis.defaultBufferSize = defaultBufferSize;\n}\n@Override\npublic ByteBuf allocateBuffer() {\nreturn allocateBuffer(defaultDirectBuffers);\n}\n@Override\n@Override\npublic ByteBuf allocateBuffer(int bufferSize) {\nreturn allocateBuffer(defaultDirectBuffers, bufferSize);\n}\n@Override\npublic ByteBuf allocateBuffer(boolean direct, int bufferSize) {\nif (direct) {\nreturn ByteBufAllocator.DEFAULT.directBuffer(bufferSize);\n} else {\nreturn ByteBufAllocator.DEFAULT.heapBuffer(bufferSize);\n}\n}\n@Override\npublic int getBufferSize() {\nreturn defaultBufferSize;\n}\n}" + }, + { + "comment": "```suggestion * After computing the number of lines before each range, we can find the line number in original file as numLinesBeforeOffset + lineNumInCurrentOffset ```", + "method_body": "public PCollection expand(PCollection records) {\n/*\n* At this point the line number in RecordWithMetadata contains the relative line offset from the beginning of the read range.\n*\n* To compute the absolute position from the beginning of the input we group the lines within the same ranges, and evaluate the size of each range.\n*/\nTrigger currentTrigger = records.getWindowingStrategy().getTrigger();\nSet allowedTriggers =\nImmutableSet.of(\nRepeatedly.forever(AfterWatermark.pastEndOfWindow()), DefaultTrigger.of());\nPreconditions.checkArgument(\nallowedTriggers.contains(currentTrigger),\nString.format(\n\"getWithRecordNumMetadata only support the default trigger not. %s\", currentTrigger));\nPCollection, Row>> recordsGroupedByFileAndRange =\nrecords\n.apply(\"AddFileNameAndRange\", ParDo.of(new AddFileNameAndRange()))\n.setCoder(\nKvCoder.of(\nKvCoder.of(StringUtf8Coder.of(), BigEndianLongCoder.of()),\nRowCoder.of(RecordWithMetadata.getSchema())));\nPCollectionView, Long>> rangeSizes =\nrecordsGroupedByFileAndRange\n.apply(\"CountRecordsForEachFileRange\", Count.perKey())\n.apply(\"SizesAsView\", View.asMap());\nPCollection singletonPcoll =\nrecords.getPipeline().apply(\"CreateSingletonPcoll\", Create.of(Arrays.asList(1)));\n/*\n* For each (File, Offset) pair, calculate the number of lines occurring before the Range for each file\n*\n* After computing the number of lines before each range, we can find the line number in original file as numLiesBeforeOffset + lineNumInCurrentOffset\n*/\nPCollectionView, Long>> numRecordsBeforeEachRange =\nsingletonPcoll\n.apply(\n\"ComputeNumRecordsBeforeRange\",\nParDo.of(new ComputeRecordsBeforeEachRange(rangeSizes))\n.withSideInputs(rangeSizes))\n.apply(\"NumRecordsBeforeEachRangeAsView\", View.asMap());\nreturn recordsGroupedByFileAndRange\n.apply(\n\"AssignLineNums\",\nParDo.of(new AssignRecordNums(numRecordsBeforeEachRange))\n.withSideInputs(numRecordsBeforeEachRange))\n.setRowSchema(RecordWithMetadata.getSchema());\n}", + "target_code": "* After computing the number of lines before each range, we can find the line number in original file as numLiesBeforeOffset + lineNumInCurrentOffset", + "method_body_after": "public PCollection expand(PCollection records) {\n/*\n* At this point the line number in RecordWithMetadata contains the relative line offset from the beginning of the read range.\n*\n* To compute the absolute position from the beginning of the input we group the lines within the same ranges, and evaluate the size of each range.\n*/\nTrigger currentTrigger = records.getWindowingStrategy().getTrigger();\nSet allowedTriggers =\nImmutableSet.of(\nRepeatedly.forever(AfterWatermark.pastEndOfWindow()), DefaultTrigger.of());\nPreconditions.checkArgument(\nallowedTriggers.contains(currentTrigger),\nString.format(\n\"getWithRecordNumMetadata(true) only supports the default trigger not: %s\",\ncurrentTrigger));\nPCollection, Row>> recordsGroupedByFileAndRange =\nrecords\n.apply(\"AddFileNameAndRange\", ParDo.of(new AddFileNameAndRange()))\n.setCoder(\nKvCoder.of(\nKvCoder.of(StringUtf8Coder.of(), BigEndianLongCoder.of()),\nRowCoder.of(RecordWithMetadata.getSchema())));\nPCollectionView>>> rangeSizes =\nrecordsGroupedByFileAndRange\n.apply(\"CountRecordsForEachFileRange\", Count.perKey())\n.apply(\nMapElements.into(\nTypeDescriptors.kvs(\nTypeDescriptors.strings(),\nTypeDescriptors.kvs(\nTypeDescriptors.longs(), TypeDescriptors.longs())))\n., Long>>via(\nx ->\nKV.of(\nx.getKey().getKey(), KV.of(x.getKey().getValue(), x.getValue()))))\n.apply(\"SizesAsView\", View.asMultimap());\nPCollection singletonPcoll =\nrecords.getPipeline().apply(\"CreateSingletonPcoll\", Create.of(Arrays.asList(1)));\n/*\n* For each (File, Offset) pair, calculate the number of lines occurring before the Range for each file\n*\n* After computing the number of lines before each range, we can find the line number in original file as numLinesBeforeOffset + lineNumInCurrentOffset\n*/\nPCollectionView, Long>> numRecordsBeforeEachRange =\nsingletonPcoll\n.apply(\n\"ComputeNumRecordsBeforeRange\",\nParDo.of(new ComputeRecordsBeforeEachRange(rangeSizes))\n.withSideInputs(rangeSizes))\n.apply(\"NumRecordsBeforeEachRangeAsView\", View.asMap());\nreturn recordsGroupedByFileAndRange\n.apply(\n\"AssignLineNums\",\nParDo.of(new AssignRecordNums(numRecordsBeforeEachRange))\n.withSideInputs(numRecordsBeforeEachRange))\n.setRowSchema(RecordWithMetadata.getSchema());\n}", + "context_before": "class ProcessRecordNumbers extends PTransform, PCollection> {\n@Override\n}", + "context_after": "class ProcessRecordNumbers extends PTransform, PCollection> {\n@Override\n}" + }, + { + "comment": "The order does not make any differences. ", + "method_body": "public static void configureMapper(ObjectMapper mapper) {\nmapper.registerModule(new JavaTimeModule());\nmapper.disable(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE);\nUntypedObjectDeserializer defaultDeserializer = new UntypedObjectDeserializer(null, null);\nGeoPointDeserializer geoPointDeserializer = new GeoPointDeserializer(defaultDeserializer);\nIso8601DateDeserializer iso8601DateDeserializer = new Iso8601DateDeserializer(geoPointDeserializer);\nSimpleModule module = new SimpleModule();\nmodule.addDeserializer(Object.class, iso8601DateDeserializer);\nmapper.registerModule(Iso8601DateSerializer.getModule());\nmapper.registerModule(module);\n}", + "target_code": "Iso8601DateDeserializer iso8601DateDeserializer = new Iso8601DateDeserializer(geoPointDeserializer);", + "method_body_after": "public static void configureMapper(ObjectMapper mapper) {\nmapper.registerModule(new JavaTimeModule());\nmapper.disable(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE);\nUntypedObjectDeserializer defaultDeserializer = new UntypedObjectDeserializer(null, null);\nGeoPointDeserializer geoPointDeserializer = new GeoPointDeserializer(defaultDeserializer);\nIso8601DateDeserializer iso8601DateDeserializer = new Iso8601DateDeserializer(geoPointDeserializer);\nSimpleModule module = new SimpleModule();\nmodule.addDeserializer(Object.class, iso8601DateDeserializer);\nmapper.registerModule(Iso8601DateSerializer.getModule());\nmapper.registerModule(module);\n}", + "context_before": "class SerializationUtil {\n/**\n* Configures an {@link ObjectMapper} with custom behavior needed to work with the Azure Cognitive Search REST API.\n*\n* @param mapper the mapper to be configured\n*/\n}", + "context_after": "class SerializationUtil {\n/**\n* Configures an {@link ObjectMapper} with custom behavior needed to work with the Azure Cognitive Search REST API.\n*\n* @param mapper the mapper to be configured\n*/\n}" + }, + { + "comment": "This is practically the same, we can revert this.", + "method_body": "public void canUpdateJobState() throws Exception {\nString jobId = getStringIdWithUserNamePrefix(\"-Job-CanUpdateState\");\nPoolInformation poolInfo = new PoolInformation();\npoolInfo.withPoolId(poolId);\nbatchClient.jobOperations().createJob(jobId, poolInfo);\ntry {\nCloudJob job = batchClient.jobOperations().getJob(jobId);\nAssert.assertEquals(JobState.ACTIVE, job.state());\nJobUpdateParameter updateParam = new JobUpdateParameter();\nInteger maxTaskRetryCount = 3;\nInteger priority = 500;\nupdateParam.withPoolInfo(poolInfo).withPriority(priority).withConstraints(new JobConstraints().withMaxTaskRetryCount(maxTaskRetryCount));\nbatchClient.jobOperations().updateJob(jobId, updateParam);\njob = batchClient.jobOperations().getJob(jobId);\nAssert.assertEquals(priority, job.priority());\nAssert.assertEquals(maxTaskRetryCount, job.constraints().maxTaskRetryCount());\nbatchClient.jobOperations().disableJob(jobId, DisableJobOption.REQUEUE.REQUEUE);\njob = batchClient.jobOperations().getJob(jobId);\nAssert.assertTrue(JobState.DISABLING == job.state() || JobState.DISABLED == job.state());\nThread.sleep(5 * 1000);\njob = batchClient.jobOperations().getJob(jobId);\nAssert.assertEquals(JobState.DISABLED, job.state());\nAssert.assertEquals(OnAllTasksComplete.NO_ACTION, job.onAllTasksComplete());\nbatchClient.jobOperations().patchJob(jobId, OnAllTasksComplete.TERMINATE_JOB);\njob = batchClient.jobOperations().getJob(jobId);\nAssert.assertEquals(OnAllTasksComplete.TERMINATE_JOB, job.onAllTasksComplete());\nbatchClient.jobOperations().enableJob(jobId);\njob = batchClient.jobOperations().getJob(jobId);\nAssert.assertEquals(JobState.ACTIVE, job.state());\nbatchClient.jobOperations().terminateJob(jobId, \"myreason\");\njob = batchClient.jobOperations().getJob(jobId);\nAssert.assertTrue(JobState.TERMINATING == job.state() || JobState.COMPLETED == job.state());\nThread.sleep(2 * 1000);\njob = batchClient.jobOperations().getJob(jobId);\nAssert.assertEquals(JobState.COMPLETED, job.state());\n}\nfinally {\ntry {\nbatchClient.jobOperations().deleteJob(jobId);\n}\ncatch (Exception e) {\n}\n}\n}", + "target_code": "Assert.assertTrue(JobState.DISABLING == job.state() || JobState.DISABLED == job.state());", + "method_body_after": "public void canUpdateJobState() throws Exception {\nString jobId = getStringIdWithUserNamePrefix(\"-Job-CanUpdateState\");\nPoolInformation poolInfo = new PoolInformation();\npoolInfo.withPoolId(poolId);\nbatchClient.jobOperations().createJob(jobId, poolInfo);\ntry {\nCloudJob job = batchClient.jobOperations().getJob(jobId);\nAssert.assertEquals(JobState.ACTIVE, job.state());\nJobUpdateParameter updateParam = new JobUpdateParameter();\nInteger maxTaskRetryCount = 3;\nInteger priority = 500;\nupdateParam.withPoolInfo(poolInfo).withPriority(priority).withConstraints(new JobConstraints().withMaxTaskRetryCount(maxTaskRetryCount));\nbatchClient.jobOperations().updateJob(jobId, updateParam);\njob = batchClient.jobOperations().getJob(jobId);\nAssert.assertEquals(priority, job.priority());\nAssert.assertEquals(maxTaskRetryCount, job.constraints().maxTaskRetryCount());\nbatchClient.jobOperations().disableJob(jobId, DisableJobOption.REQUEUE.REQUEUE);\njob = batchClient.jobOperations().getJob(jobId);\nAssert.assertTrue(JobState.DISABLING == job.state() || JobState.DISABLED == job.state());\nThread.sleep(5 * 1000);\njob = batchClient.jobOperations().getJob(jobId);\nAssert.assertEquals(JobState.DISABLED, job.state());\nAssert.assertEquals(OnAllTasksComplete.NO_ACTION, job.onAllTasksComplete());\nbatchClient.jobOperations().patchJob(jobId, OnAllTasksComplete.TERMINATE_JOB);\njob = batchClient.jobOperations().getJob(jobId);\nAssert.assertEquals(OnAllTasksComplete.TERMINATE_JOB, job.onAllTasksComplete());\nbatchClient.jobOperations().enableJob(jobId);\njob = batchClient.jobOperations().getJob(jobId);\nAssert.assertEquals(JobState.ACTIVE, job.state());\nbatchClient.jobOperations().terminateJob(jobId, \"myreason\");\njob = batchClient.jobOperations().getJob(jobId);\nAssert.assertTrue(JobState.TERMINATING == job.state() || JobState.COMPLETED == job.state());\nThread.sleep(2 * 1000);\njob = batchClient.jobOperations().getJob(jobId);\nAssert.assertEquals(JobState.COMPLETED, job.state());\n}\nfinally {\ntry {\nbatchClient.jobOperations().deleteJob(jobId);\n}\ncatch (Exception e) {\n}\n}\n}", + "context_before": "class JobTests extends BatchIntegrationTestBase {\nprivate static CloudPool livePool;\nstatic String poolId;\n@BeforeClass\npublic static void setup() throws Exception {\npoolId = getStringIdWithUserNamePrefix(\"-testpool\");\nif(isRecordMode()) {\ncreateClient(AuthMode.AAD);\nlivePool = createIfNotExistIaaSPool(poolId);\nAssert.assertNotNull(livePool);\n}\n}\n@AfterClass\npublic static void cleanup() throws Exception {\ntry {\n}\ncatch (Exception e) {\n}\n}\n@Test\npublic void canCRUDJob() throws Exception {\nString jobId = getStringIdWithUserNamePrefix(\"-Job-canCRUD\");\nPoolInformation poolInfo = new PoolInformation();\npoolInfo.withPoolId(poolId);\nbatchClient.jobOperations().createJob(jobId, poolInfo);\ntry {\nCloudJob job = batchClient.jobOperations().getJob(jobId);\nAssert.assertNotNull(job);\nAssert.assertNotNull(job.allowTaskPreemption());\nAssert.assertEquals(-1, (int) job.maxParallelTasks());\nAssert.assertEquals(jobId, job.id());\nAssert.assertEquals((Integer) 0, job.priority());\nList jobs = batchClient.jobOperations().listJobs();\nAssert.assertNotNull(jobs);\nAssert.assertTrue(jobs.size() > 0);\nboolean found = false;\nfor (CloudJob j : jobs) {\nif (j.id().equals(jobId)) {\nfound = true;\nbreak;\n}\n}\nAssert.assertTrue(found);\nbatchClient.jobOperations().updateJob(jobId, poolInfo, 1, null, null, null);\njob = batchClient.jobOperations().getJob(jobId);\nAssert.assertEquals((Integer) 1, job.priority());\nbatchClient.jobOperations().deleteJob(jobId);\ntry {\nbatchClient.jobOperations().getJob(jobId);\nAssert.assertTrue(\"Shouldn't be here, the job should be deleted\", true);\n} catch (BatchErrorException err) {\nif (!err.body().code().equals(BatchErrorCodeStrings.JobNotFound)) {\nthrow err;\n}\n}\nThread.sleep(1 * 1000);\n}\nfinally {\ntry {\nbatchClient.jobOperations().deleteJob(jobId);\n}\ncatch (Exception e) {\n}\n}\n}\n@Test\n@Test\npublic void canCRUDJobWithPoolNodeCommunicationMode() throws Exception {\nString jobId = getStringIdWithUserNamePrefix(\"-Job-canCRUDWithPoolNodeComm\");\nNodeCommunicationMode targetMode = NodeCommunicationMode.SIMPLIFIED;\nImageReference imgRef = new ImageReference().withPublisher(\"Canonical\").withOffer(\"UbuntuServer\")\n.withSku(\"18.04-LTS\").withVersion(\"latest\");\nVirtualMachineConfiguration configuration = new VirtualMachineConfiguration();\nconfiguration.withNodeAgentSKUId(\"batch.node.ubuntu 18.04\").withImageReference(imgRef);\nPoolSpecification poolSpec = new PoolSpecification()\n.withVmSize(\"STANDARD_D1_V2\")\n.withVirtualMachineConfiguration(configuration)\n.withTargetNodeCommunicationMode(targetMode);\nPoolInformation poolInfo = new PoolInformation();\npoolInfo.withAutoPoolSpecification(new AutoPoolSpecification().withPool(poolSpec).withPoolLifetimeOption(PoolLifetimeOption.JOB));\nbatchClient.jobOperations().createJob(jobId, poolInfo);\ntry {\nCloudJob job = batchClient.jobOperations().getJob(jobId);\nAssert.assertNotNull(job);\nAssert.assertEquals(jobId, job.id());\nAssert.assertEquals(targetMode, job.poolInfo().autoPoolSpecification().pool().targetNodeCommunicationMode());\nbatchClient.jobOperations().deleteJob(jobId);\ntry {\nbatchClient.jobOperations().getJob(jobId);\nAssert.assertTrue(\"Shouldn't be here, the job should be deleted\", true);\n} catch (BatchErrorException err) {\nif (!err.body().code().equals(BatchErrorCodeStrings.JobNotFound)) {\nthrow err;\n}\n}\nThread.sleep(1 * 1000);\n}\nfinally {\ntry {\nbatchClient.jobOperations().deleteJob(jobId);\n}\ncatch (Exception e) {\n}\n}\n}\n}", + "context_after": "class JobTests extends BatchIntegrationTestBase {\nprivate static CloudPool livePool;\nstatic String poolId;\n@BeforeClass\npublic static void setup() throws Exception {\npoolId = getStringIdWithUserNamePrefix(\"-testpool\");\nif(isRecordMode()) {\ncreateClient(AuthMode.AAD);\nlivePool = createIfNotExistIaaSPool(poolId);\nAssert.assertNotNull(livePool);\n}\n}\n@AfterClass\npublic static void cleanup() throws Exception {\ntry {\n}\ncatch (Exception e) {\n}\n}\n@Test\npublic void canCRUDJob() throws Exception {\nString jobId = getStringIdWithUserNamePrefix(\"-Job-canCRUD\");\nPoolInformation poolInfo = new PoolInformation();\npoolInfo.withPoolId(poolId);\nbatchClient.jobOperations().createJob(jobId, poolInfo);\ntry {\nCloudJob job = batchClient.jobOperations().getJob(jobId);\nAssert.assertNotNull(job);\nAssert.assertNotNull(job.allowTaskPreemption());\nAssert.assertEquals(-1, (int) job.maxParallelTasks());\nAssert.assertEquals(jobId, job.id());\nAssert.assertEquals((Integer) 0, job.priority());\nList jobs = batchClient.jobOperations().listJobs();\nAssert.assertNotNull(jobs);\nAssert.assertTrue(jobs.size() > 0);\nboolean found = false;\nfor (CloudJob j : jobs) {\nif (j.id().equals(jobId)) {\nfound = true;\nbreak;\n}\n}\nAssert.assertTrue(found);\nbatchClient.jobOperations().updateJob(jobId, poolInfo, 1, null, null, null);\njob = batchClient.jobOperations().getJob(jobId);\nAssert.assertEquals((Integer) 1, job.priority());\nbatchClient.jobOperations().deleteJob(jobId);\ntry {\nbatchClient.jobOperations().getJob(jobId);\nAssert.assertTrue(\"Shouldn't be here, the job should be deleted\", true);\n} catch (BatchErrorException err) {\nif (!err.body().code().equals(BatchErrorCodeStrings.JobNotFound)) {\nthrow err;\n}\n}\nThread.sleep(1 * 1000);\n}\nfinally {\ntry {\nbatchClient.jobOperations().deleteJob(jobId);\n}\ncatch (Exception e) {\n}\n}\n}\n@Test\n@Test\npublic void canCRUDJobWithPoolNodeCommunicationMode() throws Exception {\nString jobId = getStringIdWithUserNamePrefix(\"-Job-canCRUDWithPoolNodeComm\");\nNodeCommunicationMode targetMode = NodeCommunicationMode.SIMPLIFIED;\nImageReference imgRef = new ImageReference().withPublisher(\"Canonical\").withOffer(\"UbuntuServer\")\n.withSku(\"18.04-LTS\").withVersion(\"latest\");\nVirtualMachineConfiguration configuration = new VirtualMachineConfiguration();\nconfiguration.withNodeAgentSKUId(\"batch.node.ubuntu 18.04\").withImageReference(imgRef);\nPoolSpecification poolSpec = new PoolSpecification()\n.withVmSize(\"STANDARD_D1_V2\")\n.withVirtualMachineConfiguration(configuration)\n.withTargetNodeCommunicationMode(targetMode);\nPoolInformation poolInfo = new PoolInformation();\npoolInfo.withAutoPoolSpecification(new AutoPoolSpecification().withPool(poolSpec).withPoolLifetimeOption(PoolLifetimeOption.JOB));\nbatchClient.jobOperations().createJob(jobId, poolInfo);\ntry {\nCloudJob job = batchClient.jobOperations().getJob(jobId);\nAssert.assertNotNull(job);\nAssert.assertEquals(jobId, job.id());\nAssert.assertEquals(targetMode, job.poolInfo().autoPoolSpecification().pool().targetNodeCommunicationMode());\nbatchClient.jobOperations().deleteJob(jobId);\ntry {\nbatchClient.jobOperations().getJob(jobId);\nAssert.assertTrue(\"Shouldn't be here, the job should be deleted\", true);\n} catch (BatchErrorException err) {\nif (!err.body().code().equals(BatchErrorCodeStrings.JobNotFound)) {\nthrow err;\n}\n}\nThread.sleep(1 * 1000);\n}\nfinally {\ntry {\nbatchClient.jobOperations().deleteJob(jobId);\n}\ncatch (Exception e) {\n}\n}\n}\n}" + }, + { + "comment": "Should look into having a ConnectionStringParser class in Azure Core, I've been seeing this functionality in a lot of places.", + "method_body": "private void getEndPointFromConnectionString(String connectionString) {\nHashMap connectionStringPieces = new HashMap<>();\nfor (String connectionStringPiece : connectionString.split(\";\")) {\nString[] kvp = connectionStringPiece.split(\"=\", 2);\nconnectionStringPieces.put(kvp[0].toLowerCase(Locale.ROOT), kvp[1]);\n}\nString accountName = connectionStringPieces.get(ACCOUNT_NAME);\ntry {\nthis.endpoint = new URL(String.format(\"https:\n} catch (MalformedURLException e) {\nthrow new IllegalArgumentException(String.format(\"There is no valid account for the connection string. \"\n+ \"Connection String: %s\", connectionString));\n}\n}", + "target_code": "for (String connectionStringPiece : connectionString.split(\";\")) {", + "method_body_after": "private void getEndPointFromConnectionString(String connectionString) {\nHashMap connectionStringPieces = new HashMap<>();\nfor (String connectionStringPiece : connectionString.split(\";\")) {\nString[] kvp = connectionStringPiece.split(\"=\", 2);\nconnectionStringPieces.put(kvp[0].toLowerCase(Locale.ROOT), kvp[1]);\n}\nString accountName = connectionStringPieces.get(ACCOUNT_NAME);\ntry {\nthis.endpoint = new URL(String.format(\"https:\n} catch (MalformedURLException e) {\nLOGGER.asError().log(\"There is no valid account for the connection string. \"\n+ \"Connection String: %s\", connectionString);\nthrow new IllegalArgumentException(String.format(\"There is no valid account for the connection string. \"\n+ \"Connection String: %s\", connectionString));\n}\n}", + "context_before": "class QueueClientBuilder {\nprivate static final String ACCOUNT_NAME = \"accountname\";\nprivate final List policies;\nprivate URL endpoint;\nprivate String queueName;\nprivate SASTokenCredential sasTokenCredential;\nprivate SharedKeyCredential sharedKeyCredential;\nprivate HttpClient httpClient;\nprivate HttpPipeline pipeline;\nprivate HttpLogDetailLevel logLevel;\nprivate RetryPolicy retryPolicy;\nprivate Configuration configuration;\n/**\n* Creates a builder instance that is able to configure and construct {@link QueueClient QueueClients}\n* and {@link QueueAsyncClient QueueAsyncClients}.\n*/\npublic QueueClientBuilder() {\nretryPolicy = new RetryPolicy();\nlogLevel = HttpLogDetailLevel.NONE;\npolicies = new ArrayList<>();\nconfiguration = ConfigurationManager.getConfiguration();\n}\n/**\n* Creates a {@link QueueClient} based on options set in the builder. Every time {@code buildClient()} is\n* called a new instance of {@link QueueClient} is created.\n*\n*

\n* If {@link QueueClientBuilder\n* {@link QueueClientBuilder\n* {@link QueueClientBuilder\n* All other builder settings are ignored.\n*

\n*\n* @return A QueueClient with the options set from the builder.\n* @throws NullPointerException If {@code endpoint} or {@code queueName} have not been set.\n* @throws IllegalStateException If neither a {@link SharedKeyCredential} or {@link SASTokenCredential} has been set.\n*/\npublic QueueClient buildClient() {\nreturn new QueueClient(buildAsyncClient());\n}\n/**\n* Creates a {@link QueueAsyncClient} based on options set in the builder. Every time {@code buildAsyncClient()} is\n* called a new instance of {@link QueueAsyncClient} is created.\n*\n*

\n* If {@link QueueClientBuilder\n* {@link QueueClientBuilder\n* {@link QueueClientBuilder\n* All other builder settings are ignored.\n*

\n*\n* @return A QueueAsyncClient with the options set from the builder.\n* @throws NullPointerException If {@code endpoint} or {@code queueName} have not been set.\n* @throws IllegalArgumentException If neither a {@link SharedKeyCredential} or {@link SASTokenCredential} has been set.\n*/\npublic QueueAsyncClient buildAsyncClient() {\nObjects.requireNonNull(endpoint);\nObjects.requireNonNull(queueName);\nif (pipeline != null) {\nreturn new QueueAsyncClient(endpoint, pipeline, queueName);\n}\nif (sasTokenCredential == null && sharedKeyCredential == null) {\nthrow new IllegalArgumentException(\"Credentials are required for authorization\");\n}\nfinal List policies = new ArrayList<>();\npolicies.add(new UserAgentPolicy(QueueConfiguration.NAME, QueueConfiguration.VERSION, configuration));\npolicies.add(new RequestIdPolicy());\npolicies.add(new AddDatePolicy());\nif (sharedKeyCredential != null) {\npolicies.add(new SharedKeyCredentialPolicy(sharedKeyCredential));\n} else {\npolicies.add(new SASTokenCredentialPolicy(sasTokenCredential));\n}\nHttpPolicyProviders.addBeforeRetryPolicies(policies);\npolicies.add(retryPolicy);\npolicies.addAll(this.policies);\nHttpPolicyProviders.addAfterRetryPolicies(policies);\npolicies.add(new HttpLoggingPolicy(logLevel));\nHttpPipeline pipeline = HttpPipeline.builder()\n.policies(policies.toArray(new HttpPipelinePolicy[0]))\n.httpClient(httpClient)\n.build();\nreturn new QueueAsyncClient(endpoint, pipeline, queueName);\n}\n/**\n* Sets the endpoint for the Azure Storage Queue instance that the client will interact with.\n*\n*

The first path segment, if the endpoint contains path segments, will be assumed to be the name of the queue\n* that the client will interact with.

\n*\n*

Query parameters of the endpoint will be parsed using {@link SASTokenCredential\n* attempt to generate a {@link SASTokenCredential} to authenticate requests sent to the service.

\n*\n* @param endpoint The URL of the Azure Storage Queue instance to send service requests to and receive responses from.\n* @return the updated QueueClientBuilder object\n* @throws IllegalArgumentException If {@code endpoint} isn't a proper URL\n*/\npublic QueueClientBuilder endpoint(String endpoint) {\nObjects.requireNonNull(endpoint);\ntry {\nURL fullURL = new URL(endpoint);\nthis.endpoint = new URL(fullURL.getProtocol() + \":\nString[] pathSegments = fullURL.getPath().split(\"/\", 2);\nif (pathSegments.length == 2 && !ImplUtils.isNullOrEmpty(pathSegments[1])) {\nthis.queueName = pathSegments[1];\n}\nSASTokenCredential credential = SASTokenCredential.fromQuery(fullURL.getQuery());\nif (credential != null) {\nthis.sasTokenCredential = credential;\n}\n} catch (MalformedURLException ex) {\nthrow new IllegalArgumentException(\"The Azure Storage Queue endpoint url is malformed.\");\n}\nreturn this;\n}\n/**\n* Sets the name of the queue that the client will interact with.\n*\n* @param queueName Name of the queue\n* @return the updated QueueClientBuilder object\n* @throws NullPointerException If {@code queueName} is {@code null}.\n*/\npublic QueueClientBuilder queueName(String queueName) {\nthis.queueName = Objects.requireNonNull(queueName);\nreturn this;\n}\n/**\n* Sets the {@link SASTokenCredential} used to authenticate requests sent to the Queue service.\n*\n* @param credential SAS token credential generated from the Storage account that authorizes requests\n* @return the updated QueueClientBuilder object\n* @throws NullPointerException If {@code credential} is {@code null}.\n*/\npublic QueueClientBuilder credential(SASTokenCredential credential) {\nthis.sasTokenCredential = Objects.requireNonNull(credential);\nreturn this;\n}\n/**\n* Creates a {@link SharedKeyCredential} from the {@code connectionString} used to authenticate requests sent to the\n* Queue service.\n*\n* @param connectionString Connection string from the Access Keys section in the Storage account\n* @return the updated QueueClientBuilder object\n* @throws NullPointerException If {@code connectionString} is {@code null}.\n*/\npublic QueueClientBuilder connectionString(String connectionString) {\nObjects.requireNonNull(connectionString);\nthis.sharedKeyCredential = SharedKeyCredential.fromConnectionString(connectionString);\nObjects.requireNonNull(connectionString);\nthis.sharedKeyCredential = SharedKeyCredential.fromConnectionString(connectionString);\ngetEndPointFromConnectionString(connectionString);\nreturn this;\n}\n/**\n* Sets the HTTP client to use for sending and receiving requests to and from the service.\n*\n* @param httpClient The HTTP client to use for requests.\n* @return The updated QueueClientBuilder object.\n* @throws NullPointerException If {@code httpClient} is {@code null}.\n*/\npublic QueueClientBuilder httpClient(HttpClient httpClient) {\nthis.httpClient = Objects.requireNonNull(httpClient);\nreturn this;\n}\n/**\n* Adds a policy to the set of existing policies that are executed after the {@link RetryPolicy}.\n*\n* @param pipelinePolicy The retry policy for service requests.\n* @return The updated QueueClientBuilder object.\n* @throws NullPointerException If {@code pipelinePolicy} is {@code null}.\n*/\npublic QueueClientBuilder addPolicy(HttpPipelinePolicy pipelinePolicy) {\nObjects.requireNonNull(pipelinePolicy);\nthis.policies.add(pipelinePolicy);\nreturn this;\n}\n/**\n* Sets the logging level for HTTP requests and responses.\n*\n* @param logLevel The amount of logging output when sending and receiving HTTP requests/responses.\n* @return The updated QueueClientBuilder object.\n*/\npublic QueueClientBuilder httpLogDetailLevel(HttpLogDetailLevel logLevel) {\nthis.logLevel = logLevel;\nreturn this;\n}\n/**\n* Sets the HTTP pipeline to use for the service client.\n*\n* If {@code pipeline} is set, all other settings are ignored, aside from {@link QueueClientBuilder\n* and {@link QueueClientBuilder\n*\n* @param pipeline The HTTP pipeline to use for sending service requests and receiving responses.\n* @return The updated QueueClientBuilder object.\n* @throws NullPointerException If {@code pipeline} is {@code null}.\n*/\npublic QueueClientBuilder pipeline(HttpPipeline pipeline) {\nObjects.requireNonNull(pipeline);\nthis.pipeline = pipeline;\nreturn this;\n}\n/**\n* Sets the configuration store that is used during construction of the service client.\n*\n* The default configuration store is a clone of the {@link ConfigurationManager\n* configuration store}, use {@link Configuration\n*\n* @param configuration The configuration store used to\n* @return The updated QueueClientBuilder object.\n*/\npublic QueueClientBuilder configuration(Configuration configuration) {\nthis.configuration = configuration;\nreturn this;\n}\n}", + "context_after": "class QueueClientBuilder {\nprivate static final ClientLogger LOGGER = new ClientLogger(QueueClientBuilder.class);\nprivate static final String ACCOUNT_NAME = \"accountname\";\nprivate final List policies;\nprivate URL endpoint;\nprivate String queueName;\nprivate SASTokenCredential sasTokenCredential;\nprivate SharedKeyCredential sharedKeyCredential;\nprivate HttpClient httpClient;\nprivate HttpPipeline pipeline;\nprivate HttpLogDetailLevel logLevel;\nprivate RetryPolicy retryPolicy;\nprivate Configuration configuration;\n/**\n* Creates a builder instance that is able to configure and construct {@link QueueClient QueueClients}\n* and {@link QueueAsyncClient QueueAsyncClients}.\n*/\npublic QueueClientBuilder() {\nretryPolicy = new RetryPolicy();\nlogLevel = HttpLogDetailLevel.NONE;\npolicies = new ArrayList<>();\nconfiguration = ConfigurationManager.getConfiguration();\n}\n/**\n* Creates a {@link QueueClient} based on options set in the builder. Every time {@code buildClient()} is\n* called a new instance of {@link QueueClient} is created.\n*\n*

\n* If {@link QueueClientBuilder\n* {@link QueueClientBuilder\n* {@link QueueClientBuilder\n* All other builder settings are ignored.\n*

\n*\n* @return A QueueClient with the options set from the builder.\n* @throws NullPointerException If {@code endpoint} or {@code queueName} have not been set.\n* @throws IllegalStateException If neither a {@link SharedKeyCredential} or {@link SASTokenCredential} has been set.\n*/\npublic QueueClient buildClient() {\nreturn new QueueClient(buildAsyncClient());\n}\n/**\n* Creates a {@link QueueAsyncClient} based on options set in the builder. Every time {@code buildAsyncClient()} is\n* called a new instance of {@link QueueAsyncClient} is created.\n*\n*

\n* If {@link QueueClientBuilder\n* {@link QueueClientBuilder\n* {@link QueueClientBuilder\n* All other builder settings are ignored.\n*

\n*\n* @return A QueueAsyncClient with the options set from the builder.\n* @throws NullPointerException If {@code endpoint} or {@code queueName} have not been set.\n* @throws IllegalArgumentException If neither a {@link SharedKeyCredential} or {@link SASTokenCredential} has been set.\n*/\npublic QueueAsyncClient buildAsyncClient() {\nObjects.requireNonNull(endpoint);\nObjects.requireNonNull(queueName);\nif (sasTokenCredential == null && sharedKeyCredential == null) {\nLOGGER.asError().log(\"Credentials are required for authorization\");\nthrow new IllegalArgumentException(\"Credentials are required for authorization\");\n}\nif (pipeline != null) {\nreturn new QueueAsyncClient(endpoint, pipeline, queueName);\n}\nfinal List policies = new ArrayList<>();\npolicies.add(new UserAgentPolicy(QueueConfiguration.NAME, QueueConfiguration.VERSION, configuration));\npolicies.add(new RequestIdPolicy());\npolicies.add(new AddDatePolicy());\nif (sharedKeyCredential != null) {\npolicies.add(new SharedKeyCredentialPolicy(sharedKeyCredential));\n} else {\npolicies.add(new SASTokenCredentialPolicy(sasTokenCredential));\n}\nHttpPolicyProviders.addBeforeRetryPolicies(policies);\npolicies.add(retryPolicy);\npolicies.addAll(this.policies);\nHttpPolicyProviders.addAfterRetryPolicies(policies);\npolicies.add(new HttpLoggingPolicy(logLevel));\nHttpPipeline pipeline = HttpPipeline.builder()\n.policies(policies.toArray(new HttpPipelinePolicy[0]))\n.httpClient(httpClient)\n.build();\nreturn new QueueAsyncClient(endpoint, pipeline, queueName);\n}\n/**\n* Sets the endpoint for the Azure Storage Queue instance that the client will interact with.\n*\n*

The first path segment, if the endpoint contains path segments, will be assumed to be the name of the queue\n* that the client will interact with.

\n*\n*

Query parameters of the endpoint will be parsed using {@link SASTokenCredential\n* attempt to generate a {@link SASTokenCredential} to authenticate requests sent to the service.

\n*\n* @param endpoint The URL of the Azure Storage Queue instance to send service requests to and receive responses from.\n* @return the updated QueueClientBuilder object\n* @throws IllegalArgumentException If {@code endpoint} isn't a proper URL\n*/\npublic QueueClientBuilder endpoint(String endpoint) {\nObjects.requireNonNull(endpoint);\ntry {\nURL fullURL = new URL(endpoint);\nthis.endpoint = new URL(fullURL.getProtocol() + \":\nString[] pathSegments = fullURL.getPath().split(\"/\", 2);\nif (pathSegments.length == 2 && !ImplUtils.isNullOrEmpty(pathSegments[1])) {\nthis.queueName = pathSegments[1];\n}\nSASTokenCredential credential = SASTokenCredential.fromQuery(fullURL.getQuery());\nif (credential != null) {\nthis.sasTokenCredential = credential;\n}\n} catch (MalformedURLException ex) {\nLOGGER.asError().log(\"The Azure Storage Queue endpoint url is malformed. Endpoint: \" + endpoint);\nthrow new IllegalArgumentException(\"The Azure Storage Queue endpoint url is malformed. Endpoint: \" + endpoint);\n}\nreturn this;\n}\n/**\n* Sets the name of the queue that the client will interact with.\n*\n* @param queueName Name of the queue\n* @return the updated QueueClientBuilder object\n* @throws NullPointerException If {@code queueName} is {@code null}.\n*/\npublic QueueClientBuilder queueName(String queueName) {\nthis.queueName = Objects.requireNonNull(queueName);\nreturn this;\n}\n/**\n* Sets the {@link SASTokenCredential} used to authenticate requests sent to the Queue.\n*\n* @param credential SAS token credential generated from the Storage account that authorizes requests\n* @return the updated QueueClientBuilder object\n* @throws NullPointerException If {@code credential} is {@code null}.\n*/\npublic QueueClientBuilder credential(SASTokenCredential credential) {\nthis.sasTokenCredential = Objects.requireNonNull(credential);\nreturn this;\n}\n/**\n* Sets the {@link SharedKeyCredential} used to authenticate requests sent to the Queue.\n*\n* @param credential Shared key credential can retrieve from the Storage account that authorizes requests\n* @return the updated QueueServiceClientBuilder object\n* @throws NullPointerException If {@code credential} is {@code null}.\n*/\npublic QueueClientBuilder credential(SharedKeyCredential credential) {\nthis.sharedKeyCredential = Objects.requireNonNull(credential);\nreturn this;\n}\n/**\n* Creates a {@link SharedKeyCredential} from the {@code connectionString} used to authenticate requests sent to the\n* Queue service.\n*\n* @param connectionString Connection string from the Access Keys section in the Storage account\n* @return the updated QueueClientBuilder object\n* @throws NullPointerException If {@code connectionString} is {@code null}.\n*/\npublic QueueClientBuilder connectionString(String connectionString) {\nObjects.requireNonNull(connectionString);\nthis.sharedKeyCredential = SharedKeyCredential.fromConnectionString(connectionString);\ngetEndPointFromConnectionString(connectionString);\nreturn this;\n}\n/**\n* Sets the HTTP client to use for sending and receiving requests to and from the service.\n*\n* @param httpClient The HTTP client to use for requests.\n* @return The updated QueueClientBuilder object.\n* @throws NullPointerException If {@code httpClient} is {@code null}.\n*/\npublic QueueClientBuilder httpClient(HttpClient httpClient) {\nthis.httpClient = Objects.requireNonNull(httpClient);\nreturn this;\n}\n/**\n* Adds a policy to the set of existing policies that are executed after the {@link RetryPolicy}.\n*\n* @param pipelinePolicy The retry policy for service requests.\n* @return The updated QueueClientBuilder object.\n* @throws NullPointerException If {@code pipelinePolicy} is {@code null}.\n*/\npublic QueueClientBuilder addPolicy(HttpPipelinePolicy pipelinePolicy) {\nObjects.requireNonNull(pipelinePolicy);\nthis.policies.add(pipelinePolicy);\nreturn this;\n}\n/**\n* Sets the logging level for HTTP requests and responses.\n*\n* @param logLevel The amount of logging output when sending and receiving HTTP requests/responses.\n* @return The updated QueueClientBuilder object.\n*/\npublic QueueClientBuilder httpLogDetailLevel(HttpLogDetailLevel logLevel) {\nthis.logLevel = logLevel;\nreturn this;\n}\n/**\n* Sets the HTTP pipeline to use for the service client.\n*\n* If {@code pipeline} is set, all other settings are ignored, aside from {@link QueueClientBuilder\n* and {@link QueueClientBuilder\n*\n* @param pipeline The HTTP pipeline to use for sending service requests and receiving responses.\n* @return The updated QueueClientBuilder object.\n* @throws NullPointerException If {@code pipeline} is {@code null}.\n*/\npublic QueueClientBuilder pipeline(HttpPipeline pipeline) {\nObjects.requireNonNull(pipeline);\nthis.pipeline = pipeline;\nreturn this;\n}\n/**\n* Sets the configuration store that is used during construction of the service client.\n*\n* The default configuration store is a clone of the {@link ConfigurationManager\n* configuration store}, use {@link Configuration\n*\n* @param configuration The configuration store used to\n* @return The updated QueueClientBuilder object.\n*/\npublic QueueClientBuilder configuration(Configuration configuration) {\nthis.configuration = configuration;\nreturn this;\n}\n}" + }, + { + "comment": "Afaik a method lambda should also work (if not I'm happy to learn that ;)), so you wouldn't need to change that part.", + "method_body": "public SinkRuntimeProvider getSinkRuntimeProvider(Context context) {\nfinal SerializationSchema keySerialization =\ncreateSerialization(context, keyEncodingFormat, keyProjection, keyPrefix);\nfinal SerializationSchema valueSerialization =\ncreateSerialization(context, valueEncodingFormat, valueProjection, null);\nfinal KafkaSinkBuilder sinkBuilder = KafkaSink.builder();\nfinal List physicalChildren = physicalDataType.getLogicalType().getChildren();\nif (transactionalIdPrefix != null) {\nsinkBuilder.setTransactionalIdPrefix(transactionalIdPrefix);\n}\nfinal KafkaSink kafkaSink =\nsinkBuilder\n.setDeliverGuarantee(deliveryGuarantee)\n.setBootstrapServers(\nproperties.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG).toString())\n.setKafkaProducerConfig(properties)\n.setRecordSerializer(\nnew DynamicKafkaRecordSerializationSchema(\ntopic,\npartitioner,\nkeySerialization,\nvalueSerialization,\ngetFieldGetters(physicalChildren, keyProjection),\ngetFieldGetters(physicalChildren, valueProjection),\nhasMetadata(),\ngetMetadataPositions(physicalChildren),\nupsertMode))\n.build();\nif (flushMode.isEnabled() && upsertMode) {\nreturn (DataStreamSinkProvider)\ndataStream -> {\nfinal boolean objectReuse =\ndataStream\n.getExecutionEnvironment()\n.getConfig()\n.isObjectReuseEnabled();\nfinal ReducingUpsertSink sink =\nnew ReducingUpsertSink<>(\nkafkaSink,\nphysicalDataType,\nkeyProjection,\nflushMode,\nobjectReuse\n? (rowData) ->\ncreateRowDataTypeSerializer(\ncontext,\ndataStream\n.getExecutionConfig())\n.copy(rowData)\n: rowData -> rowData);\nfinal DataStreamSink end = dataStream.sinkTo(sink);\nif (parallelism != null) {\nend.setParallelism(parallelism);\n}\nreturn end;\n};\n}\nreturn SinkProvider.of(kafkaSink, parallelism);\n}", + "target_code": ".copy(rowData)", + "method_body_after": "public SinkRuntimeProvider getSinkRuntimeProvider(Context context) {\nfinal SerializationSchema keySerialization =\ncreateSerialization(context, keyEncodingFormat, keyProjection, keyPrefix);\nfinal SerializationSchema valueSerialization =\ncreateSerialization(context, valueEncodingFormat, valueProjection, null);\nfinal KafkaSinkBuilder sinkBuilder = KafkaSink.builder();\nfinal List physicalChildren = physicalDataType.getLogicalType().getChildren();\nif (transactionalIdPrefix != null) {\nsinkBuilder.setTransactionalIdPrefix(transactionalIdPrefix);\n}\nfinal KafkaSink kafkaSink =\nsinkBuilder\n.setDeliverGuarantee(deliveryGuarantee)\n.setBootstrapServers(\nproperties.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG).toString())\n.setKafkaProducerConfig(properties)\n.setRecordSerializer(\nnew DynamicKafkaRecordSerializationSchema(\ntopic,\npartitioner,\nkeySerialization,\nvalueSerialization,\ngetFieldGetters(physicalChildren, keyProjection),\ngetFieldGetters(physicalChildren, valueProjection),\nhasMetadata(),\ngetMetadataPositions(physicalChildren),\nupsertMode))\n.build();\nif (flushMode.isEnabled() && upsertMode) {\nreturn (DataStreamSinkProvider)\ndataStream -> {\nfinal boolean objectReuse =\ndataStream\n.getExecutionEnvironment()\n.getConfig()\n.isObjectReuseEnabled();\nfinal ReducingUpsertSink sink =\nnew ReducingUpsertSink<>(\nkafkaSink,\nphysicalDataType,\nkeyProjection,\nflushMode,\nobjectReuse\n? createRowDataTypeSerializer(\ncontext,\ndataStream.getExecutionConfig())\n::copy\n: rowData -> rowData);\nfinal DataStreamSink end = dataStream.sinkTo(sink);\nif (parallelism != null) {\nend.setParallelism(parallelism);\n}\nreturn end;\n};\n}\nreturn SinkProvider.of(kafkaSink, parallelism);\n}", + "context_before": "class KafkaDynamicSink implements DynamicTableSink, SupportsWritingMetadata {\n/** Metadata that is appended at the end of a physical sink row. */\nprotected List metadataKeys;\n/** Data type of consumed data type. */\nprotected DataType consumedDataType;\n/** Data type to configure the formats. */\nprotected final DataType physicalDataType;\n/** Optional format for encoding keys to Kafka. */\nprotected final @Nullable EncodingFormat> keyEncodingFormat;\n/** Format for encoding values to Kafka. */\nprotected final EncodingFormat> valueEncodingFormat;\n/** Indices that determine the key fields and the source position in the consumed row. */\nprotected final int[] keyProjection;\n/** Indices that determine the value fields and the source position in the consumed row. */\nprotected final int[] valueProjection;\n/** Prefix that needs to be removed from fields when constructing the physical data type. */\nprotected final @Nullable String keyPrefix;\n/** The defined delivery guarantee. */\nprivate final DeliveryGuarantee deliveryGuarantee;\n/**\n* If the {@link\n* prefix for all ids of opened Kafka transactions.\n*/\n@Nullable private final String transactionalIdPrefix;\n/** The Kafka topic to write to. */\nprotected final String topic;\n/** Properties for the Kafka producer. */\nprotected final Properties properties;\n/** Partitioner to select Kafka partition for each item. */\nprotected final @Nullable FlinkKafkaPartitioner partitioner;\n/**\n* Flag to determine sink mode. In upsert mode sink transforms the delete/update-before message\n* to tombstone message.\n*/\nprotected final boolean upsertMode;\n/** Sink buffer flush config which only supported in upsert mode now. */\nprotected final SinkBufferFlushMode flushMode;\n/** Parallelism of the physical Kafka producer. * */\nprotected final @Nullable Integer parallelism;\npublic KafkaDynamicSink(\nDataType consumedDataType,\nDataType physicalDataType,\n@Nullable EncodingFormat> keyEncodingFormat,\nEncodingFormat> valueEncodingFormat,\nint[] keyProjection,\nint[] valueProjection,\n@Nullable String keyPrefix,\nString topic,\nProperties properties,\n@Nullable FlinkKafkaPartitioner partitioner,\nDeliveryGuarantee deliveryGuarantee,\nboolean upsertMode,\nSinkBufferFlushMode flushMode,\n@Nullable Integer parallelism,\n@Nullable String transactionalIdPrefix) {\nthis.consumedDataType =\ncheckNotNull(consumedDataType, \"Consumed data type must not be null.\");\nthis.physicalDataType =\ncheckNotNull(physicalDataType, \"Physical data type must not be null.\");\nthis.keyEncodingFormat = keyEncodingFormat;\nthis.valueEncodingFormat =\ncheckNotNull(valueEncodingFormat, \"Value encoding format must not be null.\");\nthis.keyProjection = checkNotNull(keyProjection, \"Key projection must not be null.\");\nthis.valueProjection = checkNotNull(valueProjection, \"Value projection must not be null.\");\nthis.keyPrefix = keyPrefix;\nthis.transactionalIdPrefix = transactionalIdPrefix;\nthis.metadataKeys = Collections.emptyList();\nthis.topic = checkNotNull(topic, \"Topic must not be null.\");\nthis.properties = checkNotNull(properties, \"Properties must not be null.\");\nthis.partitioner = partitioner;\nthis.deliveryGuarantee =\ncheckNotNull(deliveryGuarantee, \"DeliveryGuarantee must not be null.\");\nthis.upsertMode = upsertMode;\nthis.flushMode = checkNotNull(flushMode);\nif (flushMode.isEnabled() && !upsertMode) {\nthrow new IllegalArgumentException(\n\"Sink buffer flush is only supported in upsert-kafka.\");\n}\nthis.parallelism = parallelism;\n}\n@Override\npublic ChangelogMode getChangelogMode(ChangelogMode requestedMode) {\nreturn valueEncodingFormat.getChangelogMode();\n}\n@Override\n@Override\npublic Map listWritableMetadata() {\nfinal Map metadataMap = new LinkedHashMap<>();\nStream.of(WritableMetadata.values())\n.forEachOrdered(m -> metadataMap.put(m.key, m.dataType));\nreturn metadataMap;\n}\n@Override\npublic void applyWritableMetadata(List metadataKeys, DataType consumedDataType) {\nthis.metadataKeys = metadataKeys;\nthis.consumedDataType = consumedDataType;\n}\n@Override\npublic DynamicTableSink copy() {\nfinal KafkaDynamicSink copy =\nnew KafkaDynamicSink(\nconsumedDataType,\nphysicalDataType,\nkeyEncodingFormat,\nvalueEncodingFormat,\nkeyProjection,\nvalueProjection,\nkeyPrefix,\ntopic,\nproperties,\npartitioner,\ndeliveryGuarantee,\nupsertMode,\nflushMode,\nparallelism,\ntransactionalIdPrefix);\ncopy.metadataKeys = metadataKeys;\nreturn copy;\n}\n@Override\npublic String asSummaryString() {\nreturn \"Kafka table sink\";\n}\n@Override\npublic boolean equals(Object o) {\nif (this == o) {\nreturn true;\n}\nif (o == null || getClass() != o.getClass()) {\nreturn false;\n}\nfinal KafkaDynamicSink that = (KafkaDynamicSink) o;\nreturn Objects.equals(metadataKeys, that.metadataKeys)\n&& Objects.equals(consumedDataType, that.consumedDataType)\n&& Objects.equals(physicalDataType, that.physicalDataType)\n&& Objects.equals(keyEncodingFormat, that.keyEncodingFormat)\n&& Objects.equals(valueEncodingFormat, that.valueEncodingFormat)\n&& Arrays.equals(keyProjection, that.keyProjection)\n&& Arrays.equals(valueProjection, that.valueProjection)\n&& Objects.equals(keyPrefix, that.keyPrefix)\n&& Objects.equals(topic, that.topic)\n&& Objects.equals(properties, that.properties)\n&& Objects.equals(partitioner, that.partitioner)\n&& Objects.equals(deliveryGuarantee, that.deliveryGuarantee)\n&& Objects.equals(upsertMode, that.upsertMode)\n&& Objects.equals(flushMode, that.flushMode)\n&& Objects.equals(transactionalIdPrefix, that.transactionalIdPrefix)\n&& Objects.equals(parallelism, that.parallelism);\n}\n@Override\npublic int hashCode() {\nreturn Objects.hash(\nmetadataKeys,\nconsumedDataType,\nphysicalDataType,\nkeyEncodingFormat,\nvalueEncodingFormat,\nkeyProjection,\nvalueProjection,\nkeyPrefix,\ntopic,\nproperties,\npartitioner,\ndeliveryGuarantee,\nupsertMode,\nflushMode,\ntransactionalIdPrefix,\nparallelism);\n}\nprivate TypeSerializer createRowDataTypeSerializer(\nContext context, ExecutionConfig executionConfig) {\nfinal TypeInformation typeInformation =\ncontext.createTypeInformation(consumedDataType);\nreturn typeInformation.createSerializer(executionConfig);\n}\nprivate int[] getMetadataPositions(List physicalChildren) {\nreturn Stream.of(WritableMetadata.values())\n.mapToInt(\nm -> {\nfinal int pos = metadataKeys.indexOf(m.key);\nif (pos < 0) {\nreturn -1;\n}\nreturn physicalChildren.size() + pos;\n})\n.toArray();\n}\nprivate boolean hasMetadata() {\nreturn metadataKeys.size() > 0;\n}\nprivate RowData.FieldGetter[] getFieldGetters(\nList physicalChildren, int[] keyProjection) {\nreturn Arrays.stream(keyProjection)\n.mapToObj(\ntargetField ->\nRowData.createFieldGetter(\nphysicalChildren.get(targetField), targetField))\n.toArray(RowData.FieldGetter[]::new);\n}\nprivate @Nullable SerializationSchema createSerialization(\nDynamicTableSink.Context context,\n@Nullable EncodingFormat> format,\nint[] projection,\n@Nullable String prefix) {\nif (format == null) {\nreturn null;\n}\nDataType physicalFormatDataType =\nDataTypeUtils.projectRow(this.physicalDataType, projection);\nif (prefix != null) {\nphysicalFormatDataType = DataTypeUtils.stripRowPrefix(physicalFormatDataType, prefix);\n}\nreturn format.createRuntimeEncoder(context, physicalFormatDataType);\n}\nenum WritableMetadata {\nHEADERS(\n\"headers\",\nDataTypes.MAP(DataTypes.STRING().nullable(), DataTypes.BYTES().nullable())\n.nullable(),\nnew MetadataConverter() {\nprivate static final long serialVersionUID = 1L;\n@Override\npublic Object read(RowData row, int pos) {\nif (row.isNullAt(pos)) {\nreturn null;\n}\nfinal MapData map = row.getMap(pos);\nfinal ArrayData keyArray = map.keyArray();\nfinal ArrayData valueArray = map.valueArray();\nfinal List
headers = new ArrayList<>();\nfor (int i = 0; i < keyArray.size(); i++) {\nif (!keyArray.isNullAt(i) && !valueArray.isNullAt(i)) {\nfinal String key = keyArray.getString(i).toString();\nfinal byte[] value = valueArray.getBinary(i);\nheaders.add(new KafkaHeader(key, value));\n}\n}\nreturn headers;\n}\n}),\nTIMESTAMP(\n\"timestamp\",\nDataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3).nullable(),\nnew MetadataConverter() {\nprivate static final long serialVersionUID = 1L;\n@Override\npublic Object read(RowData row, int pos) {\nif (row.isNullAt(pos)) {\nreturn null;\n}\nreturn row.getTimestamp(pos, 3).getMillisecond();\n}\n});\nfinal String key;\nfinal DataType dataType;\nfinal MetadataConverter converter;\nWritableMetadata(String key, DataType dataType, MetadataConverter converter) {\nthis.key = key;\nthis.dataType = dataType;\nthis.converter = converter;\n}\n}\ninterface MetadataConverter extends Serializable {\nObject read(RowData consumedRow, int pos);\n}\nprivate static class KafkaHeader implements Header {\nprivate final String key;\nprivate final byte[] value;\nKafkaHeader(String key, byte[] value) {\nthis.key = key;\nthis.value = value;\n}\n@Override\npublic String key() {\nreturn key;\n}\n@Override\npublic byte[] value() {\nreturn value;\n}\n}\n}", + "context_after": "class KafkaDynamicSink implements DynamicTableSink, SupportsWritingMetadata {\n/** Metadata that is appended at the end of a physical sink row. */\nprotected List metadataKeys;\n/** Data type of consumed data type. */\nprotected DataType consumedDataType;\n/** Data type to configure the formats. */\nprotected final DataType physicalDataType;\n/** Optional format for encoding keys to Kafka. */\nprotected final @Nullable EncodingFormat> keyEncodingFormat;\n/** Format for encoding values to Kafka. */\nprotected final EncodingFormat> valueEncodingFormat;\n/** Indices that determine the key fields and the source position in the consumed row. */\nprotected final int[] keyProjection;\n/** Indices that determine the value fields and the source position in the consumed row. */\nprotected final int[] valueProjection;\n/** Prefix that needs to be removed from fields when constructing the physical data type. */\nprotected final @Nullable String keyPrefix;\n/** The defined delivery guarantee. */\nprivate final DeliveryGuarantee deliveryGuarantee;\n/**\n* If the {@link\n* prefix for all ids of opened Kafka transactions.\n*/\n@Nullable private final String transactionalIdPrefix;\n/** The Kafka topic to write to. */\nprotected final String topic;\n/** Properties for the Kafka producer. */\nprotected final Properties properties;\n/** Partitioner to select Kafka partition for each item. */\nprotected final @Nullable FlinkKafkaPartitioner partitioner;\n/**\n* Flag to determine sink mode. In upsert mode sink transforms the delete/update-before message\n* to tombstone message.\n*/\nprotected final boolean upsertMode;\n/** Sink buffer flush config which only supported in upsert mode now. */\nprotected final SinkBufferFlushMode flushMode;\n/** Parallelism of the physical Kafka producer. * */\nprotected final @Nullable Integer parallelism;\npublic KafkaDynamicSink(\nDataType consumedDataType,\nDataType physicalDataType,\n@Nullable EncodingFormat> keyEncodingFormat,\nEncodingFormat> valueEncodingFormat,\nint[] keyProjection,\nint[] valueProjection,\n@Nullable String keyPrefix,\nString topic,\nProperties properties,\n@Nullable FlinkKafkaPartitioner partitioner,\nDeliveryGuarantee deliveryGuarantee,\nboolean upsertMode,\nSinkBufferFlushMode flushMode,\n@Nullable Integer parallelism,\n@Nullable String transactionalIdPrefix) {\nthis.consumedDataType =\ncheckNotNull(consumedDataType, \"Consumed data type must not be null.\");\nthis.physicalDataType =\ncheckNotNull(physicalDataType, \"Physical data type must not be null.\");\nthis.keyEncodingFormat = keyEncodingFormat;\nthis.valueEncodingFormat =\ncheckNotNull(valueEncodingFormat, \"Value encoding format must not be null.\");\nthis.keyProjection = checkNotNull(keyProjection, \"Key projection must not be null.\");\nthis.valueProjection = checkNotNull(valueProjection, \"Value projection must not be null.\");\nthis.keyPrefix = keyPrefix;\nthis.transactionalIdPrefix = transactionalIdPrefix;\nthis.metadataKeys = Collections.emptyList();\nthis.topic = checkNotNull(topic, \"Topic must not be null.\");\nthis.properties = checkNotNull(properties, \"Properties must not be null.\");\nthis.partitioner = partitioner;\nthis.deliveryGuarantee =\ncheckNotNull(deliveryGuarantee, \"DeliveryGuarantee must not be null.\");\nthis.upsertMode = upsertMode;\nthis.flushMode = checkNotNull(flushMode);\nif (flushMode.isEnabled() && !upsertMode) {\nthrow new IllegalArgumentException(\n\"Sink buffer flush is only supported in upsert-kafka.\");\n}\nthis.parallelism = parallelism;\n}\n@Override\npublic ChangelogMode getChangelogMode(ChangelogMode requestedMode) {\nreturn valueEncodingFormat.getChangelogMode();\n}\n@Override\n@Override\npublic Map listWritableMetadata() {\nfinal Map metadataMap = new LinkedHashMap<>();\nStream.of(WritableMetadata.values())\n.forEachOrdered(m -> metadataMap.put(m.key, m.dataType));\nreturn metadataMap;\n}\n@Override\npublic void applyWritableMetadata(List metadataKeys, DataType consumedDataType) {\nthis.metadataKeys = metadataKeys;\nthis.consumedDataType = consumedDataType;\n}\n@Override\npublic DynamicTableSink copy() {\nfinal KafkaDynamicSink copy =\nnew KafkaDynamicSink(\nconsumedDataType,\nphysicalDataType,\nkeyEncodingFormat,\nvalueEncodingFormat,\nkeyProjection,\nvalueProjection,\nkeyPrefix,\ntopic,\nproperties,\npartitioner,\ndeliveryGuarantee,\nupsertMode,\nflushMode,\nparallelism,\ntransactionalIdPrefix);\ncopy.metadataKeys = metadataKeys;\nreturn copy;\n}\n@Override\npublic String asSummaryString() {\nreturn \"Kafka table sink\";\n}\n@Override\npublic boolean equals(Object o) {\nif (this == o) {\nreturn true;\n}\nif (o == null || getClass() != o.getClass()) {\nreturn false;\n}\nfinal KafkaDynamicSink that = (KafkaDynamicSink) o;\nreturn Objects.equals(metadataKeys, that.metadataKeys)\n&& Objects.equals(consumedDataType, that.consumedDataType)\n&& Objects.equals(physicalDataType, that.physicalDataType)\n&& Objects.equals(keyEncodingFormat, that.keyEncodingFormat)\n&& Objects.equals(valueEncodingFormat, that.valueEncodingFormat)\n&& Arrays.equals(keyProjection, that.keyProjection)\n&& Arrays.equals(valueProjection, that.valueProjection)\n&& Objects.equals(keyPrefix, that.keyPrefix)\n&& Objects.equals(topic, that.topic)\n&& Objects.equals(properties, that.properties)\n&& Objects.equals(partitioner, that.partitioner)\n&& Objects.equals(deliveryGuarantee, that.deliveryGuarantee)\n&& Objects.equals(upsertMode, that.upsertMode)\n&& Objects.equals(flushMode, that.flushMode)\n&& Objects.equals(transactionalIdPrefix, that.transactionalIdPrefix)\n&& Objects.equals(parallelism, that.parallelism);\n}\n@Override\npublic int hashCode() {\nreturn Objects.hash(\nmetadataKeys,\nconsumedDataType,\nphysicalDataType,\nkeyEncodingFormat,\nvalueEncodingFormat,\nkeyProjection,\nvalueProjection,\nkeyPrefix,\ntopic,\nproperties,\npartitioner,\ndeliveryGuarantee,\nupsertMode,\nflushMode,\ntransactionalIdPrefix,\nparallelism);\n}\nprivate TypeSerializer createRowDataTypeSerializer(\nContext context, ExecutionConfig executionConfig) {\nfinal TypeInformation typeInformation =\ncontext.createTypeInformation(consumedDataType);\nreturn typeInformation.createSerializer(executionConfig);\n}\nprivate int[] getMetadataPositions(List physicalChildren) {\nreturn Stream.of(WritableMetadata.values())\n.mapToInt(\nm -> {\nfinal int pos = metadataKeys.indexOf(m.key);\nif (pos < 0) {\nreturn -1;\n}\nreturn physicalChildren.size() + pos;\n})\n.toArray();\n}\nprivate boolean hasMetadata() {\nreturn metadataKeys.size() > 0;\n}\nprivate RowData.FieldGetter[] getFieldGetters(\nList physicalChildren, int[] keyProjection) {\nreturn Arrays.stream(keyProjection)\n.mapToObj(\ntargetField ->\nRowData.createFieldGetter(\nphysicalChildren.get(targetField), targetField))\n.toArray(RowData.FieldGetter[]::new);\n}\nprivate @Nullable SerializationSchema createSerialization(\nDynamicTableSink.Context context,\n@Nullable EncodingFormat> format,\nint[] projection,\n@Nullable String prefix) {\nif (format == null) {\nreturn null;\n}\nDataType physicalFormatDataType =\nDataTypeUtils.projectRow(this.physicalDataType, projection);\nif (prefix != null) {\nphysicalFormatDataType = DataTypeUtils.stripRowPrefix(physicalFormatDataType, prefix);\n}\nreturn format.createRuntimeEncoder(context, physicalFormatDataType);\n}\nenum WritableMetadata {\nHEADERS(\n\"headers\",\nDataTypes.MAP(DataTypes.STRING().nullable(), DataTypes.BYTES().nullable())\n.nullable(),\nnew MetadataConverter() {\nprivate static final long serialVersionUID = 1L;\n@Override\npublic Object read(RowData row, int pos) {\nif (row.isNullAt(pos)) {\nreturn null;\n}\nfinal MapData map = row.getMap(pos);\nfinal ArrayData keyArray = map.keyArray();\nfinal ArrayData valueArray = map.valueArray();\nfinal List
headers = new ArrayList<>();\nfor (int i = 0; i < keyArray.size(); i++) {\nif (!keyArray.isNullAt(i) && !valueArray.isNullAt(i)) {\nfinal String key = keyArray.getString(i).toString();\nfinal byte[] value = valueArray.getBinary(i);\nheaders.add(new KafkaHeader(key, value));\n}\n}\nreturn headers;\n}\n}),\nTIMESTAMP(\n\"timestamp\",\nDataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3).nullable(),\nnew MetadataConverter() {\nprivate static final long serialVersionUID = 1L;\n@Override\npublic Object read(RowData row, int pos) {\nif (row.isNullAt(pos)) {\nreturn null;\n}\nreturn row.getTimestamp(pos, 3).getMillisecond();\n}\n});\nfinal String key;\nfinal DataType dataType;\nfinal MetadataConverter converter;\nWritableMetadata(String key, DataType dataType, MetadataConverter converter) {\nthis.key = key;\nthis.dataType = dataType;\nthis.converter = converter;\n}\n}\ninterface MetadataConverter extends Serializable {\nObject read(RowData consumedRow, int pos);\n}\nprivate static class KafkaHeader implements Header {\nprivate final String key;\nprivate final byte[] value;\nKafkaHeader(String key, byte[] value) {\nthis.key = key;\nthis.value = value;\n}\n@Override\npublic String key() {\nreturn key;\n}\n@Override\npublic byte[] value() {\nreturn value;\n}\n}\n}" + }, + { + "comment": "These shouldn't have to change. Input-output models should have setter overloads for both varargs and List. ", + "method_body": "public static AnalyzeTextOptions map(com.azure.search.documents.indexes.implementation.models.AnalyzeRequest obj) {\nif (obj == null) {\nreturn null;\n}\nAnalyzeTextOptions analyzeTextOptions = null;\nif (obj.getTokenizer() != null) {\nLexicalTokenizerName tokenizer = LexicalTokenizerNameConverter.map(obj.getTokenizer());\nanalyzeTextOptions = new AnalyzeTextOptions(obj.getText(), tokenizer);\n} else {\nLexicalAnalyzerName analyzer = LexicalAnalyzerNameConverter.map(obj.getAnalyzer());\nanalyzeTextOptions = new AnalyzeTextOptions(obj.getText(), analyzer);\n}\nif (obj.getCharFilters() != null) {\nCharFilterName[] charFilters = obj.getCharFilters().stream()\n.map(CharFilterNameConverter::map)\n.toArray(CharFilterName[]::new);\nanalyzeTextOptions.setCharFilters(charFilters);\n}\nif (obj.getTokenFilters() != null) {\nTokenFilterName[] tokenFilters = obj.getTokenFilters().stream()\n.map(TokenFilterNameConverter::map)\n.toArray(TokenFilterName[]::new);\nanalyzeTextOptions.setTokenFilters(tokenFilters);\n}\nreturn analyzeTextOptions;\n}", + "target_code": ".toArray(TokenFilterName[]::new);", + "method_body_after": "public static AnalyzeTextOptions map(com.azure.search.documents.indexes.implementation.models.AnalyzeRequest obj) {\nif (obj == null) {\nreturn null;\n}\nAnalyzeTextOptions analyzeTextOptions = null;\nif (obj.getTokenizer() != null) {\nLexicalTokenizerName tokenizer = LexicalTokenizerNameConverter.map(obj.getTokenizer());\nanalyzeTextOptions = new AnalyzeTextOptions(obj.getText(), tokenizer);\n} else {\nLexicalAnalyzerName analyzer = LexicalAnalyzerNameConverter.map(obj.getAnalyzer());\nanalyzeTextOptions = new AnalyzeTextOptions(obj.getText(), analyzer);\n}\nif (obj.getCharFilters() != null) {\nanalyzeTextOptions.setCharFilters(obj.getCharFilters().stream()\n.map(CharFilterNameConverter::map)\n.toArray(CharFilterName[]::new));\n}\nif (obj.getTokenFilters() != null) {\nanalyzeTextOptions.setTokenFilters(obj.getTokenFilters().stream()\n.map(TokenFilterNameConverter::map)\n.toArray(TokenFilterName[]::new));\n}\nreturn analyzeTextOptions;\n}", + "context_before": "class AnalyzeRequestConverter {\n/**\n* Maps from {@link com.azure.search.documents.indexes.implementation.models.AnalyzeRequest} to {@link AnalyzeTextOptions}.\n*/\n/**\n* Maps from {@link AnalyzeTextOptions} to {@link com.azure.search.documents.indexes.implementation.models.AnalyzeRequest}.\n*/\npublic static com.azure.search.documents.indexes.implementation.models.AnalyzeRequest map(AnalyzeTextOptions obj) {\nif (obj == null) {\nreturn null;\n}\ncom.azure.search.documents.indexes.implementation.models.AnalyzeRequest analyzeRequest =\nnew com.azure.search.documents.indexes.implementation.models.AnalyzeRequest(obj.getText());\nif (obj.getCharFilters() != null) {\nList charFilters =\nobj.getCharFilters().stream().map(CharFilterNameConverter::map).collect(Collectors.toList());\nanalyzeRequest.setCharFilters(charFilters);\n}\nif (obj.getAnalyzerName() != null) {\ncom.azure.search.documents.indexes.implementation.models.LexicalAnalyzerName analyzer =\nLexicalAnalyzerNameConverter.map(obj.getAnalyzerName());\nanalyzeRequest.setAnalyzer(analyzer);\n}\nif (obj.getTokenFilters() != null) {\nList tokenFilters =\nobj.getTokenFilters().stream().map(TokenFilterNameConverter::map).collect(Collectors.toList());\nanalyzeRequest.setTokenFilters(tokenFilters);\n}\nif (obj.getTokenizerName() != null) {\ncom.azure.search.documents.indexes.implementation.models.LexicalTokenizerName tokenizer =\nLexicalTokenizerNameConverter.map(obj.getTokenizerName());\nanalyzeRequest.setTokenizer(tokenizer);\n}\nanalyzeRequest.validate();\nreturn analyzeRequest;\n}\nprivate AnalyzeRequestConverter() {\n}\n}", + "context_after": "class AnalyzeRequestConverter {\n/**\n* Maps from {@link com.azure.search.documents.indexes.implementation.models.AnalyzeRequest} to {@link AnalyzeTextOptions}.\n*/\n/**\n* Maps from {@link AnalyzeTextOptions} to {@link com.azure.search.documents.indexes.implementation.models.AnalyzeRequest}.\n*/\npublic static com.azure.search.documents.indexes.implementation.models.AnalyzeRequest map(AnalyzeTextOptions obj) {\nif (obj == null) {\nreturn null;\n}\ncom.azure.search.documents.indexes.implementation.models.AnalyzeRequest analyzeRequest =\nnew com.azure.search.documents.indexes.implementation.models.AnalyzeRequest(obj.getText());\nif (obj.getCharFilters() != null) {\nList charFilters =\nobj.getCharFilters().stream().map(CharFilterNameConverter::map).collect(Collectors.toList());\nanalyzeRequest.setCharFilters(charFilters);\n}\nif (obj.getAnalyzerName() != null) {\ncom.azure.search.documents.indexes.implementation.models.LexicalAnalyzerName analyzer =\nLexicalAnalyzerNameConverter.map(obj.getAnalyzerName());\nanalyzeRequest.setAnalyzer(analyzer);\n}\nif (obj.getTokenFilters() != null) {\nList tokenFilters =\nobj.getTokenFilters().stream().map(TokenFilterNameConverter::map).collect(Collectors.toList());\nanalyzeRequest.setTokenFilters(tokenFilters);\n}\nif (obj.getTokenizerName() != null) {\ncom.azure.search.documents.indexes.implementation.models.LexicalTokenizerName tokenizer =\nLexicalTokenizerNameConverter.map(obj.getTokenizerName());\nanalyzeRequest.setTokenizer(tokenizer);\n}\nanalyzeRequest.validate();\nreturn analyzeRequest;\n}\nprivate AnalyzeRequestConverter() {\n}\n}" + }, + { + "comment": "updated.", + "method_body": "public void addLinkTest() {\nfinal RecordEventsSpanImpl testSpan =\n(RecordEventsSpanImpl) tracer.spanBuilder(\"new-test-span\").startSpan();\nfinal Context traceContext = tracingContext.addData(SPAN_CONTEXT_KEY, testSpan.getContext());\nfinal RecordEventsSpanImpl parentSpanImpl = (RecordEventsSpanImpl) parentSpan;\nopenCensusTracer.addLink(traceContext);\nAssert.assertEquals(parentSpanImpl.toSpanData().getContext().getTraceId(),\ntestSpan.toSpanData().getContext().getTraceId());\n}", + "target_code": "Assert.assertEquals(parentSpanImpl.toSpanData().getContext().getTraceId(),", + "method_body_after": "public void addLinkTest() {\nfinal RecordEventsSpanImpl testSpan =\n(RecordEventsSpanImpl) tracer.spanBuilder(\"new-test-span\").startSpan();\nfinal Context traceContext = tracingContext.addData(SPAN_CONTEXT_KEY, testSpan.getContext());\nfinal RecordEventsSpanImpl parentSpanImpl = (RecordEventsSpanImpl) parentSpan;\nfinal Link expectedLink = Link.fromSpanContext(testSpan.getContext(), Link.Type.PARENT_LINKED_SPAN);\nopenCensusTracer.addLink(traceContext);\nLink createdLink = parentSpanImpl.toSpanData().getLinks().getLinks().get(0);\nAssert.assertEquals(expectedLink.getTraceId(), createdLink.getTraceId());\nAssert.assertEquals(expectedLink.getSpanId(), createdLink.getSpanId());\n}", + "context_before": "class OpenCensusTracerTest {\nprivate static final String METHOD_NAME = \"Azure.eventhubs.send\";\nprivate static final String HOSTNAME_VALUE = \"testEventDataNameSpace.servicebus.windows.net\";\nprivate static final String ENTITY_PATH_VALUE = \"test\";\nprivate static final String COMPONENT_VALUE = \"eventhubs\";\nprivate OpenCensusTracer openCensusTracer;\nprivate Tracer tracer;\nprivate Context tracingContext;\nprivate Span parentSpan;\nprivate io.opencensus.common.Scope scope;\n@Before\npublic void setUp() {\nSystem.out.println(\"Running: setUp\");\nopenCensusTracer = new OpenCensusTracer();\nfinal TraceConfig traceConfig = Tracing.getTraceConfig();\nfinal TraceParams activeTraceParams = traceConfig.getActiveTraceParams();\ntraceConfig.updateActiveTraceParams(activeTraceParams.toBuilder().setSampler(Samplers.alwaysSample()).build());\ntracer = Tracing.getTracer();\nscope = tracer.spanBuilder(PARENT_SPAN_KEY).startScopedSpan();\nparentSpan = tracer.getCurrentSpan();\ntracingContext = new Context(PARENT_SPAN_KEY, parentSpan);\n}\n@After\npublic void tearDown() {\nSystem.out.println(\"Running: tearDown\");\ntracer = null;\ntracingContext = null;\nAssert.assertNull(tracer);\nAssert.assertNull(tracingContext);\nscope.close();\n}\n@Test(expected = NullPointerException.class)\npublic void startSpanNullPointerException() {\nopenCensusTracer.start(\"\", null);\n}\n@Test\npublic void startSpanParentContextFlowTest() {\nfinal SpanId parentSpanId = parentSpan.getContext().getSpanId();\nfinal Context updatedContext = openCensusTracer.start(METHOD_NAME, tracingContext);\nassertSpanWithExplicitParent(updatedContext, parentSpanId);\nfinal RecordEventsSpanImpl recordEventsSpan =\n(RecordEventsSpanImpl) updatedContext.getData(PARENT_SPAN_KEY).get();\nAssert.assertNull(recordEventsSpan.getKind());\n}\n@Test\npublic void startSpanTestNoUserParent() {\nfinal Context updatedContext = openCensusTracer.start(METHOD_NAME, Context.NONE);\nAssert.assertNotNull(updatedContext.getData(PARENT_SPAN_KEY));\nAssert.assertTrue(updatedContext.getData(PARENT_SPAN_KEY).get() instanceof RecordEventsSpanImpl);\nfinal RecordEventsSpanImpl recordEventsSpan =\n(RecordEventsSpanImpl) updatedContext.getData(PARENT_SPAN_KEY).get();\nAssert.assertEquals(METHOD_NAME, recordEventsSpan.getName());\nAssert.assertFalse(recordEventsSpan.toSpanData().getHasRemoteParent());\nAssert.assertNotNull(recordEventsSpan.toSpanData().getParentSpanId());\n}\n@Test\npublic void startSpanProcessKindSend() {\nfinal SpanId parentSpanId = parentSpan.getContext().getSpanId();\nfinal Context traceContext = tracingContext.addData(ENTITY_PATH_KEY, ENTITY_PATH_VALUE)\n.addData(HOST_NAME_KEY, HOSTNAME_VALUE);\nfinal Context updatedContext = openCensusTracer.start(METHOD_NAME, traceContext, ProcessKind.SEND);\nassertSpanWithExplicitParent(updatedContext, parentSpanId);\nfinal RecordEventsSpanImpl recordEventsSpan =\n(RecordEventsSpanImpl) updatedContext.getData(PARENT_SPAN_KEY).get();\nAssert.assertEquals(Span.Kind.CLIENT, recordEventsSpan.getKind());\nfinal Map attributeMap = recordEventsSpan.toSpanData().getAttributes()\n.getAttributeMap();\nAssert.assertEquals(attributeMap.get(COMPONENT), AttributeValue.stringAttributeValue(COMPONENT_VALUE));\nAssert.assertEquals(attributeMap.get(MESSAGE_BUS_DESTINATION),\nAttributeValue.stringAttributeValue(ENTITY_PATH_VALUE));\nAssert.assertEquals(attributeMap.get(PEER_ENDPOINT), AttributeValue.stringAttributeValue(HOSTNAME_VALUE));\n}\n@Test\npublic void startSpanProcessKindReceive() {\nfinal SpanId parentSpanId = parentSpan.getContext().getSpanId();\nfinal Context updatedContext = openCensusTracer.start(METHOD_NAME, tracingContext, ProcessKind.RECEIVE);\nassertSpanWithExplicitParent(updatedContext, parentSpanId);\nfinal RecordEventsSpanImpl recordEventsSpan =\n(RecordEventsSpanImpl) updatedContext.getData(PARENT_SPAN_KEY).get();\nAssert.assertNull(recordEventsSpan.getKind());\nAssert.assertNotNull(updatedContext.getData(SPAN_CONTEXT_KEY).get());\nAssert.assertNotNull(updatedContext.getData(DIAGNOSTIC_ID_KEY).get());\n}\n@Test\npublic void startSpanProcessKindProcess() {\nfinal SpanId parentSpanId = parentSpan.getContext().getSpanId();\nfinal Context updatedContext = openCensusTracer.start(METHOD_NAME, tracingContext, ProcessKind.PROCESS);\nAssert.assertFalse(\"When no parent span passed in context information\",\ntracingContext.getData(SPAN_CONTEXT_KEY).isPresent());\nassertSpanWithExplicitParent(updatedContext, parentSpanId);\nAssert.assertNotNull(updatedContext.getData(\"scope\").get());\nfinal RecordEventsSpanImpl recordEventsSpan =\n(RecordEventsSpanImpl) updatedContext.getData(PARENT_SPAN_KEY).get();\nAssert.assertEquals(Span.Kind.SERVER, recordEventsSpan.getKind());\n}\n@Test\npublic void startProcessSpanWithRemoteParent() {\nfinal Span testSpan = tracer.spanBuilder(\"child-span\").startSpan();\nfinal SpanId testSpanId = testSpan.getContext().getSpanId();\nfinal Context traceContext = tracingContext.addData(SPAN_CONTEXT_KEY, testSpan.getContext());\nfinal Context updatedContext = openCensusTracer.start(METHOD_NAME, traceContext, ProcessKind.PROCESS);\nAssert.assertNotNull(updatedContext.getData(\"scope\").get());\nassertSpanWithRemoteParent(updatedContext, testSpanId);\n}\n@Test(expected = NullPointerException.class)\npublic void startSpanOverloadNullPointerException() {\nopenCensusTracer.start(\"\", Context.NONE, null);\n}\n@Test\n@Test\npublic void endSpanNoSuccessErrorMessageTest() {\nfinal RecordEventsSpanImpl recordEventsSpan = (RecordEventsSpanImpl) tracer.getCurrentSpan();\nfinal String expectedStatus = \"UNKNOWN\";\nopenCensusTracer.end(null, null, tracingContext);\nAssert.assertEquals(expectedStatus, recordEventsSpan.getStatus().getCanonicalCode().toString());\n}\n@Test\npublic void endSpanErrorMessageTest() {\nfinal RecordEventsSpanImpl recordEventsSpan = (RecordEventsSpanImpl) tracer.getCurrentSpan();\nfinal String throwableMessage = \"custom error message\";\nfinal String expectedStatus = \"UNKNOWN\";\nopenCensusTracer.end(null, new Throwable(throwableMessage), tracingContext);\nAssert.assertEquals(expectedStatus, recordEventsSpan.getStatus().getCanonicalCode().toString());\nAssert.assertEquals(throwableMessage, recordEventsSpan.getStatus().getDescription());\n}\n@Test\npublic void endSpanTestThrowableResponseCode() {\nfinal RecordEventsSpanImpl recordEventsSpan = (RecordEventsSpanImpl) tracer.getCurrentSpan();\nfinal String throwableMessage = \"Resource not found\";\nfinal String expectedStatus = \"NOT_FOUND\";\nopenCensusTracer.end(404, new Throwable(throwableMessage), tracingContext);\nAssert.assertEquals(expectedStatus, recordEventsSpan.getStatus().getCanonicalCode().toString());\nAssert.assertEquals(throwableMessage, recordEventsSpan.getStatus().getDescription());\n}\nprivate static void assertSpanWithExplicitParent(Context updatedContext, SpanId parentSpanId) {\nAssert.assertNotNull(updatedContext.getData(PARENT_SPAN_KEY));\nAssert.assertTrue(updatedContext.getData(PARENT_SPAN_KEY).get() instanceof RecordEventsSpanImpl);\nfinal RecordEventsSpanImpl recordEventsSpan =\n(RecordEventsSpanImpl) updatedContext.getData(PARENT_SPAN_KEY).get();\nAssert.assertEquals(METHOD_NAME, recordEventsSpan.getName());\nAssert.assertFalse(recordEventsSpan.toSpanData().getHasRemoteParent());\nAssert.assertEquals(parentSpanId, recordEventsSpan.toSpanData().getParentSpanId());\n}\nprivate static void assertSpanWithRemoteParent(Context updatedContext, SpanId parentSpanId) {\nAssert.assertNotNull(updatedContext.getData(PARENT_SPAN_KEY));\nAssert.assertTrue(updatedContext.getData(PARENT_SPAN_KEY).get() instanceof RecordEventsSpanImpl);\nfinal RecordEventsSpanImpl recordEventsSpan =\n(RecordEventsSpanImpl) updatedContext.getData(PARENT_SPAN_KEY).get();\nAssert.assertEquals(METHOD_NAME, recordEventsSpan.getName());\nAssert.assertEquals(Span.Kind.SERVER, recordEventsSpan.getKind());\nAssert.assertTrue(recordEventsSpan.toSpanData().getHasRemoteParent());\nAssert.assertEquals(parentSpanId, recordEventsSpan.toSpanData().getParentSpanId());\n}\n}", + "context_after": "class OpenCensusTracerTest {\nprivate static final String METHOD_NAME = \"Azure.eventhubs.send\";\nprivate static final String HOSTNAME_VALUE = \"testEventDataNameSpace.servicebus.windows.net\";\nprivate static final String ENTITY_PATH_VALUE = \"test\";\nprivate static final String COMPONENT_VALUE = \"eventhubs\";\nprivate OpenCensusTracer openCensusTracer;\nprivate Tracer tracer;\nprivate Context tracingContext;\nprivate Span parentSpan;\nprivate io.opencensus.common.Scope scope;\n@Before\npublic void setUp() {\nSystem.out.println(\"Running: setUp\");\nopenCensusTracer = new OpenCensusTracer();\nfinal TraceConfig traceConfig = Tracing.getTraceConfig();\nfinal TraceParams activeTraceParams = traceConfig.getActiveTraceParams();\ntraceConfig.updateActiveTraceParams(activeTraceParams.toBuilder().setSampler(Samplers.alwaysSample()).build());\ntracer = Tracing.getTracer();\nscope = tracer.spanBuilder(PARENT_SPAN_KEY).startScopedSpan();\nparentSpan = tracer.getCurrentSpan();\ntracingContext = new Context(PARENT_SPAN_KEY, parentSpan);\n}\n@After\npublic void tearDown() {\nSystem.out.println(\"Running: tearDown\");\ntracer = null;\ntracingContext = null;\nAssert.assertNull(tracer);\nAssert.assertNull(tracingContext);\nscope.close();\n}\n@Test(expected = NullPointerException.class)\npublic void startSpanNullPointerException() {\nopenCensusTracer.start(\"\", null);\n}\n@Test\npublic void startSpanParentContextFlowTest() {\nfinal SpanId parentSpanId = parentSpan.getContext().getSpanId();\nfinal Context updatedContext = openCensusTracer.start(METHOD_NAME, tracingContext);\nassertSpanWithExplicitParent(updatedContext, parentSpanId);\nfinal RecordEventsSpanImpl recordEventsSpan =\n(RecordEventsSpanImpl) updatedContext.getData(PARENT_SPAN_KEY).get();\nAssert.assertNull(recordEventsSpan.getKind());\n}\n@Test\npublic void startSpanTestNoUserParent() {\nfinal Context updatedContext = openCensusTracer.start(METHOD_NAME, Context.NONE);\nAssert.assertNotNull(updatedContext.getData(PARENT_SPAN_KEY));\nAssert.assertTrue(updatedContext.getData(PARENT_SPAN_KEY).get() instanceof RecordEventsSpanImpl);\nfinal RecordEventsSpanImpl recordEventsSpan =\n(RecordEventsSpanImpl) updatedContext.getData(PARENT_SPAN_KEY).get();\nAssert.assertEquals(METHOD_NAME, recordEventsSpan.getName());\nAssert.assertFalse(recordEventsSpan.toSpanData().getHasRemoteParent());\nAssert.assertNotNull(recordEventsSpan.toSpanData().getParentSpanId());\n}\n@Test\npublic void startSpanProcessKindSend() {\nfinal SpanId parentSpanId = parentSpan.getContext().getSpanId();\nfinal Context traceContext = tracingContext.addData(ENTITY_PATH_KEY, ENTITY_PATH_VALUE)\n.addData(HOST_NAME_KEY, HOSTNAME_VALUE);\nfinal Context updatedContext = openCensusTracer.start(METHOD_NAME, traceContext, ProcessKind.SEND);\nassertSpanWithExplicitParent(updatedContext, parentSpanId);\nfinal RecordEventsSpanImpl recordEventsSpan =\n(RecordEventsSpanImpl) updatedContext.getData(PARENT_SPAN_KEY).get();\nAssert.assertEquals(Span.Kind.CLIENT, recordEventsSpan.getKind());\nfinal Map attributeMap = recordEventsSpan.toSpanData().getAttributes()\n.getAttributeMap();\nAssert.assertEquals(attributeMap.get(COMPONENT), AttributeValue.stringAttributeValue(COMPONENT_VALUE));\nAssert.assertEquals(attributeMap.get(MESSAGE_BUS_DESTINATION),\nAttributeValue.stringAttributeValue(ENTITY_PATH_VALUE));\nAssert.assertEquals(attributeMap.get(PEER_ENDPOINT), AttributeValue.stringAttributeValue(HOSTNAME_VALUE));\n}\n@Test\npublic void startSpanProcessKindMessage() {\nfinal SpanId parentSpanId = parentSpan.getContext().getSpanId();\nfinal Context updatedContext = openCensusTracer.start(METHOD_NAME, tracingContext, ProcessKind.MESSAGE);\nassertSpanWithExplicitParent(updatedContext, parentSpanId);\nfinal RecordEventsSpanImpl recordEventsSpan =\n(RecordEventsSpanImpl) updatedContext.getData(PARENT_SPAN_KEY).get();\nAssert.assertNull(recordEventsSpan.getKind());\nAssert.assertNotNull(updatedContext.getData(SPAN_CONTEXT_KEY).get());\nAssert.assertNotNull(updatedContext.getData(DIAGNOSTIC_ID_KEY).get());\n}\n@Test\npublic void startSpanProcessKindProcess() {\nfinal SpanId parentSpanId = parentSpan.getContext().getSpanId();\nfinal Context updatedContext = openCensusTracer.start(METHOD_NAME, tracingContext, ProcessKind.PROCESS);\nAssert.assertFalse(\"When no parent span passed in context information\",\ntracingContext.getData(SPAN_CONTEXT_KEY).isPresent());\nassertSpanWithExplicitParent(updatedContext, parentSpanId);\nAssert.assertNotNull(updatedContext.getData(\"scope\").get());\nfinal RecordEventsSpanImpl recordEventsSpan =\n(RecordEventsSpanImpl) updatedContext.getData(PARENT_SPAN_KEY).get();\nAssert.assertEquals(Span.Kind.SERVER, recordEventsSpan.getKind());\n}\n@Test\npublic void startProcessSpanWithRemoteParent() {\nfinal Span testSpan = tracer.spanBuilder(\"child-span\").startSpan();\nfinal SpanId testSpanId = testSpan.getContext().getSpanId();\nfinal Context traceContext = tracingContext.addData(SPAN_CONTEXT_KEY, testSpan.getContext());\nfinal Context updatedContext = openCensusTracer.start(METHOD_NAME, traceContext, ProcessKind.PROCESS);\nAssert.assertNotNull(updatedContext.getData(\"scope\").get());\nassertSpanWithRemoteParent(updatedContext, testSpanId);\n}\n@Test(expected = NullPointerException.class)\npublic void startSpanOverloadNullPointerException() {\nopenCensusTracer.start(\"\", Context.NONE, null);\n}\n@Test\n@Test\npublic void endSpanNoSuccessErrorMessageTest() {\nfinal RecordEventsSpanImpl recordEventsSpan = (RecordEventsSpanImpl) tracer.getCurrentSpan();\nfinal String expectedStatus = \"UNKNOWN\";\nopenCensusTracer.end(null, null, tracingContext);\nAssert.assertEquals(expectedStatus, recordEventsSpan.getStatus().getCanonicalCode().toString());\n}\n@Test\npublic void endSpanErrorMessageTest() {\nfinal RecordEventsSpanImpl recordEventsSpan = (RecordEventsSpanImpl) tracer.getCurrentSpan();\nfinal String throwableMessage = \"custom error message\";\nfinal String expectedStatus = \"UNKNOWN\";\nopenCensusTracer.end(null, new Throwable(throwableMessage), tracingContext);\nAssert.assertEquals(expectedStatus, recordEventsSpan.getStatus().getCanonicalCode().toString());\nAssert.assertEquals(throwableMessage, recordEventsSpan.getStatus().getDescription());\n}\n@Test\npublic void endSpanTestThrowableResponseCode() {\nfinal RecordEventsSpanImpl recordEventsSpan = (RecordEventsSpanImpl) tracer.getCurrentSpan();\nfinal String throwableMessage = \"Resource not found\";\nfinal String expectedStatus = \"NOT_FOUND\";\nopenCensusTracer.end(404, new Throwable(throwableMessage), tracingContext);\nAssert.assertEquals(expectedStatus, recordEventsSpan.getStatus().getCanonicalCode().toString());\nAssert.assertEquals(throwableMessage, recordEventsSpan.getStatus().getDescription());\n}\nprivate static void assertSpanWithExplicitParent(Context updatedContext, SpanId parentSpanId) {\nAssert.assertNotNull(updatedContext.getData(PARENT_SPAN_KEY));\nAssert.assertTrue(updatedContext.getData(PARENT_SPAN_KEY).get() instanceof RecordEventsSpanImpl);\nfinal RecordEventsSpanImpl recordEventsSpan =\n(RecordEventsSpanImpl) updatedContext.getData(PARENT_SPAN_KEY).get();\nAssert.assertEquals(METHOD_NAME, recordEventsSpan.getName());\nAssert.assertFalse(recordEventsSpan.toSpanData().getHasRemoteParent());\nAssert.assertEquals(parentSpanId, recordEventsSpan.toSpanData().getParentSpanId());\n}\nprivate static void assertSpanWithRemoteParent(Context updatedContext, SpanId parentSpanId) {\nAssert.assertNotNull(updatedContext.getData(PARENT_SPAN_KEY));\nAssert.assertTrue(updatedContext.getData(PARENT_SPAN_KEY).get() instanceof RecordEventsSpanImpl);\nfinal RecordEventsSpanImpl recordEventsSpan =\n(RecordEventsSpanImpl) updatedContext.getData(PARENT_SPAN_KEY).get();\nAssert.assertEquals(METHOD_NAME, recordEventsSpan.getName());\nAssert.assertEquals(Span.Kind.SERVER, recordEventsSpan.getKind());\nAssert.assertTrue(recordEventsSpan.toSpanData().getHasRemoteParent());\nAssert.assertEquals(parentSpanId, recordEventsSpan.toSpanData().getParentSpanId());\n}\n}" + }, + { + "comment": "Who will handle the exception\uff1freturn failed response?", + "method_body": "public TBeginRemoteTxnResponse beginRemoteTxn(TBeginRemoteTxnRequest request) throws TException {\nTBeginRemoteTxnResponse response = new TBeginRemoteTxnResponse();\nCatalog catalog = Catalog.getCurrentCatalog();\nif (!catalog.isMaster()) {\ntry {\nTNetworkAddress addr = masterAddr();\nLOG.info(\"beginRemoteTxn as follower, forward it to master. Label: {}, master: {}\",\nrequest.getLabel(), addr.toString());\nFrontendService.Client client = ClientPool.frontendPool.borrowObject(addr, 1000);\nresponse = client.beginRemoteTxn(request);\nClientPool.frontendPool.returnObject(addr, client);\nreturn response;\n} catch (Exception e) {\nLOG.warn(\"create thrift client failed during beginRemoteTxn, label: {}, exception: {}\", request.getLabel(), e);\nthrow new TException(e.getMessage());\n}\n}\nDatabase db = catalog.getDb(request.getDb_id());\nif (db == null) {\nTStatus status = new TStatus(TStatusCode.NOT_FOUND);\nstatus.setError_msgs(Lists.newArrayList(\"db not exist\"));\nresponse.setStatus(status);\nreturn response;\n}\nlong txnId;\ntry {\ntxnId = Catalog.getCurrentGlobalTransactionMgr().beginTransaction(db.getId(),\nrequest.getTable_ids(), request.getLabel(),\nnew TxnCoordinator(TxnSourceType.FE, FrontendOptions.getLocalHostAddress()),\nLoadJobSourceType.valueOf(request.getSource_type()), request.getTimeout_second());\n} catch (Exception e) {\nLOG.info(\"begin remote txn error, label {}, msg {}\", request.getLabel(), e.getStackTrace());\nTStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\nstatus.setError_msgs(Lists.newArrayList(e.getMessage()));\nresponse.setStatus(status);\nreturn response;\n}\nTStatus status = new TStatus(TStatusCode.OK);\nresponse.setStatus(status);\nresponse.setTxn_id(txnId);\nresponse.setTxn_label(request.getLabel());\nLOG.info(\"begin remote txn, label: {}, txn_id: {}\", request.getLabel(), txnId);\nreturn response;\n}", + "target_code": "throw new TException(e.getMessage());", + "method_body_after": "public TBeginRemoteTxnResponse beginRemoteTxn(TBeginRemoteTxnRequest request) throws TException {\nTBeginRemoteTxnResponse response = new TBeginRemoteTxnResponse();\nCatalog catalog = Catalog.getCurrentCatalog();\nif (!catalog.isMaster()) {\nTNetworkAddress addr = masterAddr();\nFrontendService.Client client = null;\ntry {\nLOG.info(\"beginRemoteTxn as follower, forward it to master. Label: {}, master: {}\",\nrequest.getLabel(), addr.toString());\nclient = ClientPool.frontendPool.borrowObject(addr, 1000);\nresponse = client.beginRemoteTxn(request);\nClientPool.frontendPool.returnObject(addr, client);\n} catch (Exception e) {\nLOG.warn(\"create thrift client failed during beginRemoteTxn, label: {}, exception: {}\", request.getLabel(), e);\nTStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\nstatus.setError_msgs(Lists.newArrayList(\"forward request to fe master failed\"));\nresponse.setStatus(status);\nClientPool.frontendPool.invalidateObject(addr, client);\n} finally {\nreturn response;\n}\n}\nDatabase db = catalog.getDb(request.getDb_id());\nif (db == null) {\nTStatus status = new TStatus(TStatusCode.NOT_FOUND);\nstatus.setError_msgs(Lists.newArrayList(\"db not exist\"));\nresponse.setStatus(status);\nreturn response;\n}\nlong txnId;\ntry {\ntxnId = Catalog.getCurrentGlobalTransactionMgr().beginTransaction(db.getId(),\nrequest.getTable_ids(), request.getLabel(),\nnew TxnCoordinator(TxnSourceType.FE, FrontendOptions.getLocalHostAddress()),\nLoadJobSourceType.valueOf(request.getSource_type()), request.getTimeout_second());\n} catch (Exception e) {\nLOG.info(\"begin remote txn error, label {}, msg {}\", request.getLabel(), e.getStackTrace());\nTStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\nstatus.setError_msgs(Lists.newArrayList(e.getMessage()));\nresponse.setStatus(status);\nreturn response;\n}\nTStatus status = new TStatus(TStatusCode.OK);\nresponse.setStatus(status);\nresponse.setTxn_id(txnId);\nresponse.setTxn_label(request.getLabel());\nLOG.info(\"begin remote txn, label: {}, txn_id: {}\", request.getLabel(), txnId);\nreturn response;\n}", + "context_before": "class MasterImpl {\nprivate static final Logger LOG = LogManager.getLogger(MasterImpl.class);\nprivate ReportHandler reportHandler = new ReportHandler();\npublic MasterImpl() {\nreportHandler.start();\n}\npublic TMasterResult finishTask(TFinishTaskRequest request) {\nTMasterResult result = new TMasterResult();\nTStatus tStatus = new TStatus(TStatusCode.OK);\nresult.setStatus(tStatus);\nTStatus taskStatus = request.getTask_status();\nLOG.debug(\"get task report: {}\", request.toString());\nif (taskStatus.getStatus_code() != TStatusCode.OK) {\nLOG.warn(\"finish task reports bad. request: {}\", request.toString());\n}\nTBackend tBackend = request.getBackend();\nString host = tBackend.getHost();\nint bePort = tBackend.getBe_port();\nBackend backend = Catalog.getCurrentSystemInfo().getBackendWithBePort(host, bePort);\nif (backend == null) {\ntStatus.setStatus_code(TStatusCode.CANCELLED);\nList errorMsgs = new ArrayList<>();\nerrorMsgs.add(\"backend not exist.\");\ntStatus.setError_msgs(errorMsgs);\nLOG.warn(\"backend does not found. host: {}, be port: {}. task: {}\", host, bePort, request.toString());\nreturn result;\n}\nlong backendId = backend.getId();\nTTaskType taskType = request.getTask_type();\nlong signature = request.getSignature();\nAgentTask task = AgentTaskQueue.getTask(backendId, taskType, signature);\nif (task == null) {\nif (taskType != TTaskType.DROP && taskType != TTaskType.STORAGE_MEDIUM_MIGRATE\n&& taskType != TTaskType.RELEASE_SNAPSHOT && taskType != TTaskType.CLEAR_TRANSACTION_TASK) {\nString errMsg = \"cannot find task. type: \" + taskType + \", backendId: \" + backendId\n+ \", signature: \" + signature;\nLOG.warn(errMsg);\ntStatus.setStatus_code(TStatusCode.CANCELLED);\nList errorMsgs = new ArrayList();\nerrorMsgs.add(errMsg);\ntStatus.setError_msgs(errorMsgs);\n}\nif (taskType != TTaskType.STORAGE_MEDIUM_MIGRATE) {\nreturn result;\n}\n} else {\nif (taskStatus.getStatus_code() != TStatusCode.OK) {\ntask.failed();\nString errMsg = \"task type: \" + taskType + \", status_code: \" + taskStatus.getStatus_code().toString() +\n\", backendId: \" + backend + \", signature: \" + signature;\ntask.setErrorMsg(errMsg);\nif (taskType != TTaskType.MAKE_SNAPSHOT && taskType != TTaskType.UPLOAD\n&& taskType != TTaskType.DOWNLOAD && taskType != TTaskType.MOVE\n&& taskType != TTaskType.CLONE && taskType != TTaskType.PUBLISH_VERSION\n&& taskType != TTaskType.CREATE && taskType != TTaskType.UPDATE_TABLET_META_INFO) {\nreturn result;\n}\n}\n}\ntry {\nList finishTabletInfos;\nswitch (taskType) {\ncase CREATE:\nPreconditions.checkState(request.isSetReport_version());\nfinishCreateReplica(task, request);\nbreak;\ncase PUSH:\ncheckHasTabletInfo(request);\nPreconditions.checkState(request.isSetReport_version());\nfinishPush(task, request);\nbreak;\ncase REALTIME_PUSH:\ncheckHasTabletInfo(request);\nPreconditions.checkState(request.isSetReport_version());\nfinishRealtimePush(task, request);\nbreak;\ncase PUBLISH_VERSION:\nfinishPublishVersion(task, request);\nbreak;\ncase CLEAR_ALTER_TASK:\nfinishClearAlterTask(task, request);\nbreak;\ncase DROP:\nfinishDropReplica(task);\nbreak;\ncase SCHEMA_CHANGE:\nPreconditions.checkState(request.isSetReport_version());\ncheckHasTabletInfo(request);\nfinishTabletInfos = request.getFinish_tablet_infos();\nfinishSchemaChange(task, finishTabletInfos, request.getReport_version());\nbreak;\ncase ROLLUP:\ncheckHasTabletInfo(request);\nfinishTabletInfos = request.getFinish_tablet_infos();\nfinishRollup(task, finishTabletInfos);\nbreak;\ncase CLONE:\nfinishClone(task, request);\nbreak;\ncase STORAGE_MEDIUM_MIGRATE:\nfinishStorageMigration(backendId, request);\nbreak;\ncase CHECK_CONSISTENCY:\nfinishConsistenctCheck(task, request);\nbreak;\ncase MAKE_SNAPSHOT:\nfinishMakeSnapshot(task, request);\nbreak;\ncase UPLOAD:\nfinishUpload(task, request);\nbreak;\ncase DOWNLOAD:\nfinishDownloadTask(task, request);\nbreak;\ncase MOVE:\nfinishMoveDirTask(task, request);\nbreak;\ncase RECOVER_TABLET:\nfinishRecoverTablet(task);\nbreak;\ncase ALTER:\nfinishAlterTask(task);\nbreak;\ncase UPDATE_TABLET_META_INFO:\nfinishUpdateTabletMeta(task, request);\nbreak;\ndefault:\nbreak;\n}\n} catch (Exception e) {\ntStatus.setStatus_code(TStatusCode.CANCELLED);\nString errMsg = \"finish agent task error.\";\nLOG.warn(errMsg, e);\nList errorMsgs = new ArrayList();\nerrorMsgs.add(errMsg);\ntStatus.setError_msgs(errorMsgs);\n}\nif (tStatus.getStatus_code() == TStatusCode.OK) {\nLOG.debug(\"report task success. {}\", request.toString());\n}\nreturn result;\n}\nprivate void checkHasTabletInfo(TFinishTaskRequest request) throws Exception {\nif (!request.isSetFinish_tablet_infos() || request.getFinish_tablet_infos().isEmpty()) {\nthrow new Exception(\"tablet info is not set\");\n}\n}\nprivate void finishCreateReplica(AgentTask task, TFinishTaskRequest request) {\ntry {\nCreateReplicaTask createReplicaTask = (CreateReplicaTask) task;\nif (request.getTask_status().getStatus_code() != TStatusCode.OK) {\ncreateReplicaTask.countDownToZero(\ntask.getBackendId() + \": \" + request.getTask_status().getError_msgs().toString());\n} else {\nlong tabletId = createReplicaTask.getTabletId();\nTabletInvertedIndex invertedIndex = Catalog.getCurrentInvertedIndex();\nTabletMeta tabletMeta = invertedIndex.getTabletMeta(tabletId);\nif (!tabletMeta.isUseStarOS() && request.isSetFinish_tablet_infos()) {\nReplica replica = Catalog.getCurrentInvertedIndex().getReplica(tabletId, createReplicaTask.getBackendId());\nreplica.setPathHash(request.getFinish_tablet_infos().get(0).getPath_hash());\nif (createReplicaTask.isRecoverTask()) {\n/*\n* This create replica task may be generated by recovery(See comment of Config.recover_with_empty_tablet)\n* So we set replica back to good.\n*/\nreplica.setBad(false);\nLOG.info(\n\"finish recover create replica task. set replica to good. tablet {}, replica {}, backend {}\",\ntabletId, task.getBackendId(), replica.getId());\n}\n}\nCatalog.getCurrentSystemInfo()\n.updateBackendReportVersion(task.getBackendId(), request.getReport_version(), task.getDbId());\ncreateReplicaTask.countDownLatch(task.getBackendId(), task.getSignature());\nLOG.debug(\"finish create replica. tablet id: {}, be: {}, report version: {}\",\ntabletId, task.getBackendId(), request.getReport_version());\n}\n} finally {\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.CREATE, task.getSignature());\n}\n}\nprivate void finishUpdateTabletMeta(AgentTask task, TFinishTaskRequest request) {\ntry {\nUpdateTabletMetaInfoTask tabletTask = (UpdateTabletMetaInfoTask) task;\nif (request.getTask_status().getStatus_code() != TStatusCode.OK) {\ntabletTask.countDownToZero(\ntask.getBackendId() + \": \" + request.getTask_status().getError_msgs().toString());\n} else {\ntabletTask.countDownLatch(task.getBackendId(), tabletTask.getTablets());\nLOG.debug(\"finish update tablet meta. tablet id: {}, be: {}\", tabletTask.getTablets(),\ntask.getBackendId());\n}\n} finally {\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.UPDATE_TABLET_META_INFO, task.getSignature());\n}\n}\nprivate void finishRealtimePush(AgentTask task, TFinishTaskRequest request) {\nList finishTabletInfos = request.getFinish_tablet_infos();\nPreconditions.checkState(finishTabletInfos != null && !finishTabletInfos.isEmpty());\nPushTask pushTask = (PushTask) task;\nlong dbId = pushTask.getDbId();\nlong backendId = pushTask.getBackendId();\nlong signature = task.getSignature();\nlong transactionId = ((PushTask) task).getTransactionId();\nDatabase db = Catalog.getCurrentCatalog().getDb(dbId);\nif (db == null) {\nAgentTaskQueue.removeTask(backendId, TTaskType.REALTIME_PUSH, signature);\nreturn;\n}\nlong tableId = pushTask.getTableId();\nlong partitionId = pushTask.getPartitionId();\nlong pushIndexId = pushTask.getIndexId();\nlong pushTabletId = pushTask.getTabletId();\nPartitionState pushState = null;\nif (finishTabletInfos.size() == 1) {\npushState = PartitionState.NORMAL;\n} else if (finishTabletInfos.size() == 2) {\nif (finishTabletInfos.get(0).getTablet_id() == finishTabletInfos.get(1).getTablet_id()) {\npushState = PartitionState.SCHEMA_CHANGE;\n} else {\npushState = PartitionState.ROLLUP;\n}\n} else {\nLOG.warn(\"invalid push report infos. finishTabletInfos' size: \" + finishTabletInfos.size());\nreturn;\n}\nLOG.debug(\"push report state: {}\", pushState.name());\ndb.writeLock();\ntry {\nOlapTable olapTable = (OlapTable) db.getTable(tableId);\nif (olapTable == null) {\nthrow new MetaNotFoundException(\"cannot find table[\" + tableId + \"] when push finished\");\n}\nPartition partition = olapTable.getPartition(partitionId);\nif (partition == null) {\nthrow new MetaNotFoundException(\"cannot find partition[\" + partitionId + \"] when push finished\");\n}\nMaterializedIndex pushIndex = partition.getIndex(pushIndexId);\nif (pushIndex == null) {\nthrow new MetaNotFoundException(\"cannot find index[\" + pushIndex + \"] when push finished\");\n}\nlong reportVersion = request.getReport_version();\nCatalog.getCurrentSystemInfo().updateBackendReportVersion(task.getBackendId(), reportVersion,\ntask.getDbId());\nList tabletIds = finishTabletInfos.stream().map(\ntTabletInfo -> tTabletInfo.getTablet_id()).collect(Collectors.toList());\nList tabletMetaList = Catalog.getCurrentInvertedIndex().getTabletMetaList(tabletIds);\nif (pushTask.getPushType() == TPushType.LOAD || pushTask.getPushType() == TPushType.LOAD_DELETE) {\nPreconditions.checkArgument(false, \"LOAD and LOAD_DELETE not supported\");\n} else if (pushTask.getPushType() == TPushType.DELETE) {\nDeleteJob deleteJob = Catalog.getCurrentCatalog().getDeleteHandler().getDeleteJob(transactionId);\nif (deleteJob == null) {\nthrow new MetaNotFoundException(\"cannot find delete job, job[\" + transactionId + \"]\");\n}\nfor (int i = 0; i < tabletMetaList.size(); i++) {\nTabletMeta tabletMeta = tabletMetaList.get(i);\nlong tabletId = tabletIds.get(i);\nReplica replica = findRelatedReplica(olapTable, partition,\nbackendId, tabletId, tabletMeta.getIndexId());\nif (replica != null) {\ndeleteJob.addFinishedReplica(partitionId, pushTabletId, replica);\npushTask.countDownLatch(backendId, pushTabletId);\n}\n}\n} else if (pushTask.getPushType() == TPushType.LOAD_V2) {\nlong loadJobId = pushTask.getLoadJobId();\ncom.starrocks.load.loadv2.LoadJob job =\nCatalog.getCurrentCatalog().getLoadManager().getLoadJob(loadJobId);\nif (job == null) {\nthrow new MetaNotFoundException(\"cannot find load job, job[\" + loadJobId + \"]\");\n}\nfor (int i = 0; i < tabletMetaList.size(); i++) {\nTabletMeta tabletMeta = tabletMetaList.get(i);\ncheckReplica(finishTabletInfos.get(i), tabletMeta);\nlong tabletId = tabletIds.get(i);\nReplica replica =\nfindRelatedReplica(olapTable, partition, backendId, tabletId, tabletMeta.getIndexId());\nif (replica != null) {\n((SparkLoadJob) job).addFinishedReplica(replica.getId(), pushTabletId, backendId);\n}\n}\n}\nAgentTaskQueue.removeTask(backendId, TTaskType.REALTIME_PUSH, signature);\nLOG.debug(\"finish push replica. tabletId: {}, backendId: {}\", pushTabletId, backendId);\n} catch (MetaNotFoundException e) {\nAgentTaskQueue.removeTask(backendId, TTaskType.REALTIME_PUSH, signature);\nLOG.warn(\"finish push replica error\", e);\n} finally {\ndb.writeUnlock();\n}\n}\nprivate void checkReplica(TTabletInfo tTabletInfo, TabletMeta tabletMeta)\nthrows MetaNotFoundException {\nlong tabletId = tTabletInfo.getTablet_id();\nint schemaHash = tTabletInfo.getSchema_hash();\nif (tabletMeta == null || tabletMeta == TabletInvertedIndex.NOT_EXIST_TABLET_META) {\nthrow new MetaNotFoundException(\"tablet \" + tabletId + \" does not exist\");\n}\nif (!tabletMeta.containsSchemaHash(schemaHash)) {\nthrow new MetaNotFoundException(\"tablet[\" + tabletId\n+ \"] schemaHash is not equal to index's switchSchemaHash. \"\n+ tabletMeta.toString() + \" vs. \" + schemaHash);\n}\n}\nprivate Replica findRelatedReplica(OlapTable olapTable, Partition partition,\nlong backendId, long tabletId, long indexId)\nthrows MetaNotFoundException {\nif (indexId == TabletInvertedIndex.NOT_EXIST_VALUE) {\nLOG.warn(\"tablet[{}] may be dropped. push index[{}]\", tabletId, indexId);\nreturn null;\n}\nMaterializedIndex index = partition.getIndex(indexId);\nif (index == null) {\nMaterializedViewHandler materializedViewHandler = Catalog.getCurrentCatalog().getRollupHandler();\nAlterJob alterJob = materializedViewHandler.getAlterJob(olapTable.getId());\nif (alterJob == null && olapTable.getState() == OlapTableState.ROLLUP) {\nLOG.warn(\"Cannot find table[{}].\", olapTable.getId());\nreturn null;\n}\nRollupJob rollupJob = (RollupJob) alterJob;\nMaterializedIndex rollupIndex = rollupJob.getRollupIndex(partition.getId());\nif (rollupIndex == null) {\nLOG.warn(\"could not find index for tablet {}\", tabletId);\nreturn null;\n}\nindex = rollupIndex;\n}\nLocalTablet tablet = (LocalTablet) index.getTablet(tabletId);\nif (tablet == null) {\nLOG.warn(\"could not find tablet {} in rollup index {} \", tabletId, indexId);\nreturn null;\n}\nReplica replica = tablet.getReplicaByBackendId(backendId);\nif (replica == null) {\nLOG.warn(\"could not find replica with backend {} in tablet {} in rollup index {} \",\nbackendId, tabletId, indexId);\n}\nreturn replica;\n}\nprivate void finishPush(AgentTask task, TFinishTaskRequest request) {\nList finishTabletInfos = request.getFinish_tablet_infos();\nPreconditions.checkState(finishTabletInfos != null && !finishTabletInfos.isEmpty());\nPushTask pushTask = (PushTask) task;\nlong finishVersion = finishTabletInfos.get(0).getVersion();\nlong taskVersion = pushTask.getVersion();\nif (finishVersion != taskVersion) {\nLOG.debug(\"finish tablet version is not consistent with task. \"\n+ \"finish version: {}, task: {}\",\nfinishVersion, pushTask);\nreturn;\n}\nlong dbId = pushTask.getDbId();\nlong backendId = pushTask.getBackendId();\nlong signature = task.getSignature();\nDatabase db = Catalog.getCurrentCatalog().getDb(dbId);\nif (db == null) {\nAgentTaskQueue.removePushTask(backendId, signature, finishVersion,\npushTask.getPushType(), pushTask.getTaskType());\nreturn;\n}\nlong tableId = pushTask.getTableId();\nlong partitionId = pushTask.getPartitionId();\nlong pushIndexId = pushTask.getIndexId();\nlong pushTabletId = pushTask.getTabletId();\nPartitionState pushState = null;\nif (finishTabletInfos.size() == 1) {\npushState = PartitionState.NORMAL;\n} else if (finishTabletInfos.size() == 2) {\nif (finishTabletInfos.get(0).getTablet_id() == finishTabletInfos.get(1).getTablet_id()) {\npushState = PartitionState.SCHEMA_CHANGE;\n} else {\npushState = PartitionState.ROLLUP;\n}\n} else {\nLOG.warn(\"invalid push report infos. finishTabletInfos' size: \" + finishTabletInfos.size());\nreturn;\n}\nLOG.debug(\"push report state: {}\", pushState.name());\ndb.writeLock();\ntry {\nOlapTable olapTable = (OlapTable) db.getTable(tableId);\nif (olapTable == null) {\nthrow new MetaNotFoundException(\"cannot find table[\" + tableId + \"] when push finished\");\n}\nPartition partition = olapTable.getPartition(partitionId);\nif (partition == null) {\nthrow new MetaNotFoundException(\"cannot find partition[\" + partitionId + \"] when push finished\");\n}\nList infos = new LinkedList();\nList tabletIds = finishTabletInfos.stream().map(\nfinishTabletInfo -> finishTabletInfo.getTablet_id()).collect(Collectors.toList());\nList tabletMetaList = Catalog.getCurrentInvertedIndex().getTabletMetaList(tabletIds);\nfor (int i = 0; i < tabletMetaList.size(); i++) {\nTabletMeta tabletMeta = tabletMetaList.get(i);\nTTabletInfo tTabletInfo = finishTabletInfos.get(i);\nlong indexId = tabletMeta.getIndexId();\nReplicaPersistInfo info = updateReplicaInfo(olapTable, partition,\nbackendId, pushIndexId, indexId,\ntTabletInfo, pushState);\nif (info != null) {\ninfos.add(info);\n}\n}\nlong reportVersion = request.getReport_version();\nCatalog.getCurrentSystemInfo().updateBackendReportVersion(task.getBackendId(), reportVersion,\ntask.getDbId());\nif (pushTask.getPushType() == TPushType.LOAD || pushTask.getPushType() == TPushType.LOAD_DELETE) {\nPreconditions.checkArgument(false, \"LOAD and LOAD_DELETE not supported\");\n} else if (pushTask.getPushType() == TPushType.DELETE) {\nif (pushTask.getVersion() != request.getRequest_version()) {\nthrow new MetaNotFoundException(\"delete task is not match. [\" + pushTask.getVersion() + \"-\"\n+ request.getRequest_version() + \"]\");\n}\nPreconditions.checkArgument(pushTask.isSyncDelete(), \"Async DELETE not supported\");\npushTask.countDownLatch(backendId, signature);\n}\nAgentTaskQueue.removePushTask(backendId, signature, finishVersion,\npushTask.getPushType(), pushTask.getTaskType());\nLOG.debug(\"finish push replica. tabletId: {}, backendId: {}\", pushTabletId, backendId);\n} catch (MetaNotFoundException e) {\nAgentTaskQueue.removePushTask(backendId, signature, finishVersion,\npushTask.getPushType(), pushTask.getTaskType());\nLOG.warn(\"finish push replica error\", e);\n} finally {\ndb.writeUnlock();\n}\n}\nprivate void finishClearAlterTask(AgentTask task, TFinishTaskRequest request) {\nClearAlterTask clearAlterTask = (ClearAlterTask) task;\nclearAlterTask.setFinished(true);\nAgentTaskQueue.removeTask(task.getBackendId(), task.getTaskType(), task.getSignature());\n}\nprivate void finishPublishVersion(AgentTask task, TFinishTaskRequest request) {\nList errorTabletIds = null;\nif (request.isSetError_tablet_ids()) {\nerrorTabletIds = request.getError_tablet_ids();\n}\nif (request.isSetReport_version()) {\nlong reportVersion = request.getReport_version();\nCatalog.getCurrentSystemInfo()\n.updateBackendReportVersion(task.getBackendId(), reportVersion, task.getDbId());\n}\nPublishVersionTask publishVersionTask = (PublishVersionTask) task;\npublishVersionTask.addErrorTablets(errorTabletIds);\npublishVersionTask.setIsFinished(true);\nif (request.getTask_status().getStatus_code() != TStatusCode.OK) {\nreturn;\n}\nAgentTaskQueue.removeTask(publishVersionTask.getBackendId(),\npublishVersionTask.getTaskType(),\npublishVersionTask.getSignature());\n}\nprivate ReplicaPersistInfo updateReplicaInfo(OlapTable olapTable, Partition partition,\nlong backendId, long pushIndexId, long indexId,\nTTabletInfo tTabletInfo, PartitionState pushState)\nthrows MetaNotFoundException {\nlong tabletId = tTabletInfo.getTablet_id();\nint schemaHash = tTabletInfo.getSchema_hash();\nlong version = tTabletInfo.getVersion();\nlong rowCount = tTabletInfo.getRow_count();\nlong dataSize = tTabletInfo.getData_size();\nif (indexId != pushIndexId) {\nif (pushState != PartitionState.ROLLUP && indexId != TabletInvertedIndex.NOT_EXIST_VALUE) {\nLOG.warn(\"push task report tablet[{}] with different index[{}] and is not in ROLLUP. push index[{}]\",\ntabletId, indexId, pushIndexId);\nreturn null;\n}\nif (indexId == TabletInvertedIndex.NOT_EXIST_VALUE) {\nLOG.warn(\"tablet[{}] may be dropped. push index[{}]\", tabletId, pushIndexId);\nreturn null;\n}\nMaterializedViewHandler materializedViewHandler = Catalog.getCurrentCatalog().getRollupHandler();\nAlterJob alterJob = materializedViewHandler.getAlterJob(olapTable.getId());\nif (alterJob == null) {\nLOG.warn(\"Cannot find table[{}].\", olapTable.getId());\nreturn null;\n}\n((RollupJob) alterJob).updateRollupReplicaInfo(partition.getId(), indexId, tabletId, backendId,\nschemaHash, version, rowCount, dataSize);\nreturn null;\n}\nint currentSchemaHash = olapTable.getSchemaHashByIndexId(pushIndexId);\nif (schemaHash != currentSchemaHash) {\nif (pushState == PartitionState.SCHEMA_CHANGE) {\nSchemaChangeHandler schemaChangeHandler = Catalog.getCurrentCatalog().getSchemaChangeHandler();\nAlterJob alterJob = schemaChangeHandler.getAlterJob(olapTable.getId());\nif (alterJob != null &&\nschemaHash != ((SchemaChangeJob) alterJob).getSchemaHashByIndexId(pushIndexId)) {\nthrow new MetaNotFoundException(\"tablet[\" + tabletId\n+ \"] schemaHash is not equal to index's switchSchemaHash. \"\n+ ((SchemaChangeJob) alterJob).getSchemaHashByIndexId(pushIndexId) + \" vs. \" + schemaHash);\n}\n} else {\nthrow new MetaNotFoundException(\"Diff tablet[\" + tabletId + \"] schemaHash. index[\" + pushIndexId + \"]: \"\n+ currentSchemaHash + \" vs. \" + schemaHash);\n}\n}\nMaterializedIndex materializedIndex = partition.getIndex(pushIndexId);\nif (materializedIndex == null) {\nthrow new MetaNotFoundException(\"Cannot find index[\" + pushIndexId + \"]\");\n}\nLocalTablet tablet = (LocalTablet) materializedIndex.getTablet(tabletId);\nif (tablet == null) {\nthrow new MetaNotFoundException(\"Cannot find tablet[\" + tabletId + \"]\");\n}\nReplica replica = tablet.getReplicaByBackendId(backendId);\nif (replica == null) {\nthrow new MetaNotFoundException(\"cannot find replica in tablet[\" + tabletId + \"], backend[\" + backendId\n+ \"]\");\n}\nreplica.updateRowCount(version, dataSize, rowCount);\nLOG.debug(\"replica[{}] report schemaHash:{}\", replica.getId(), schemaHash);\nreturn ReplicaPersistInfo.createForLoad(olapTable.getId(), partition.getId(), pushIndexId, tabletId,\nreplica.getId(), version, schemaHash, dataSize, rowCount);\n}\nprivate void finishDropReplica(AgentTask task) {\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.DROP, task.getSignature());\n}\nprivate void finishSchemaChange(AgentTask task, List finishTabletInfos, long reportVersion)\nthrows MetaNotFoundException {\nPreconditions.checkArgument(finishTabletInfos != null && !finishTabletInfos.isEmpty());\nPreconditions.checkArgument(finishTabletInfos.size() == 1);\nSchemaChangeTask schemaChangeTask = (SchemaChangeTask) task;\nSchemaChangeHandler schemaChangeHandler = Catalog.getCurrentCatalog().getSchemaChangeHandler();\nschemaChangeHandler.handleFinishedReplica(schemaChangeTask, finishTabletInfos.get(0), reportVersion);\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.SCHEMA_CHANGE, task.getSignature());\n}\nprivate void finishRollup(AgentTask task, List finishTabletInfos)\nthrows MetaNotFoundException {\nPreconditions.checkArgument(finishTabletInfos != null && !finishTabletInfos.isEmpty());\nPreconditions.checkArgument(finishTabletInfos.size() == 1);\nCreateRollupTask createRollupTask = (CreateRollupTask) task;\nMaterializedViewHandler materializedViewHandler = Catalog.getCurrentCatalog().getRollupHandler();\nmaterializedViewHandler.handleFinishedReplica(createRollupTask, finishTabletInfos.get(0), -1L);\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.ROLLUP, task.getSignature());\n}\nprivate void finishClone(AgentTask task, TFinishTaskRequest request) {\nCloneTask cloneTask = (CloneTask) task;\nif (cloneTask.getTaskVersion() == CloneTask.VERSION_2) {\nCatalog.getCurrentCatalog().getTabletScheduler().finishCloneTask(cloneTask, request);\n} else {\nLOG.warn(\"invalid clone task, ignore it. {}\", task);\n}\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.CLONE, task.getSignature());\n}\nprivate void finishStorageMigration(long backendId, TFinishTaskRequest request) {\nif (request.getTask_status().getStatus_code() != TStatusCode.OK) {\nLOG.warn(\"tablet migrate failed. signature: {}, error msg: {}\", request.getSignature(),\nrequest.getTask_status().error_msgs);\nreturn;\n}\nif (!request.isSetFinish_tablet_infos() || request.getFinish_tablet_infos().isEmpty()) {\nLOG.warn(\"migration finish tablet infos not set. signature: {}\", request.getSignature());\nreturn;\n}\nTTabletInfo reportedTablet = request.getFinish_tablet_infos().get(0);\nlong tabletId = reportedTablet.getTablet_id();\nTabletMeta tabletMeta = Catalog.getCurrentInvertedIndex().getTabletMeta(tabletId);\nif (tabletMeta == null) {\nLOG.warn(\"tablet meta does not exist. tablet id: {}\", tabletId);\nreturn;\n}\nlong dbId = tabletMeta.getDbId();\nDatabase db = Catalog.getCurrentCatalog().getDb(dbId);\nif (db == null) {\nLOG.warn(\"db does not exist. db id: {}\", dbId);\nreturn;\n}\ndb.writeLock();\ntry {\nReplica replica = Catalog.getCurrentInvertedIndex().getReplica(tabletId, backendId);\nPreconditions.checkArgument(reportedTablet.isSetPath_hash());\nreplica.setPathHash(reportedTablet.getPath_hash());\n} finally {\ndb.writeUnlock();\n}\n}\nprivate void finishConsistenctCheck(AgentTask task, TFinishTaskRequest request) {\nCheckConsistencyTask checkConsistencyTask = (CheckConsistencyTask) task;\nif (checkConsistencyTask.getVersion() != request.getRequest_version()) {\nLOG.warn(\"check consisteny task is not match. [{}-{}]\",\ncheckConsistencyTask.getVersion(), request.getRequest_version());\nreturn;\n}\nCatalog.getCurrentCatalog().getConsistencyChecker().handleFinishedConsistencyCheck(checkConsistencyTask,\nrequest.getTablet_checksum());\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.CHECK_CONSISTENCY, task.getSignature());\n}\nprivate void finishMakeSnapshot(AgentTask task, TFinishTaskRequest request) {\nSnapshotTask snapshotTask = (SnapshotTask) task;\nif (Catalog.getCurrentCatalog().getBackupHandler().handleFinishedSnapshotTask(snapshotTask, request)) {\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.MAKE_SNAPSHOT, task.getSignature());\n}\n}\nprivate void finishUpload(AgentTask task, TFinishTaskRequest request) {\nUploadTask uploadTask = (UploadTask) task;\nif (Catalog.getCurrentCatalog().getBackupHandler().handleFinishedSnapshotUploadTask(uploadTask, request)) {\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.UPLOAD, task.getSignature());\n}\n}\nprivate void finishDownloadTask(AgentTask task, TFinishTaskRequest request) {\nDownloadTask downloadTask = (DownloadTask) task;\nif (Catalog.getCurrentCatalog().getBackupHandler().handleDownloadSnapshotTask(downloadTask, request)) {\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.DOWNLOAD, task.getSignature());\n}\n}\nprivate void finishMoveDirTask(AgentTask task, TFinishTaskRequest request) {\nDirMoveTask dirMoveTask = (DirMoveTask) task;\nif (Catalog.getCurrentCatalog().getBackupHandler().handleDirMoveTask(dirMoveTask, request)) {\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.MOVE, task.getSignature());\n}\n}\nprivate void finishRecoverTablet(AgentTask task) {\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.RECOVER_TABLET, task.getSignature());\n}\npublic TMasterResult report(TReportRequest request) throws TException {\nreturn reportHandler.handleReport(request);\n}\npublic TFetchResourceResult fetchResource() {\nreturn Catalog.getCurrentCatalog().getAuth().toResourceThrift();\n}\nprivate void finishAlterTask(AgentTask task) {\nAlterReplicaTask alterTask = (AlterReplicaTask) task;\ntry {\nif (alterTask.getJobType() == JobType.ROLLUP) {\nCatalog.getCurrentCatalog().getRollupHandler().handleFinishAlterTask(alterTask);\n} else if (alterTask.getJobType() == JobType.SCHEMA_CHANGE) {\nCatalog.getCurrentCatalog().getSchemaChangeHandler().handleFinishAlterTask(alterTask);\n}\nalterTask.setFinished(true);\n} catch (MetaNotFoundException e) {\nLOG.warn(\"failed to handle finish alter task: {}, {}\", task.getSignature(), e.getMessage());\n}\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.ALTER, task.getSignature());\n}\npublic TGetTableMetaResponse getTableMeta(TGetTableMetaRequest request) {\nString dbName = request.getDb_name();\nString tableName = request.getTable_name();\nTTableMeta tableMeta;\nTGetTableMetaResponse response = new TGetTableMetaResponse();\nif (Strings.isNullOrEmpty(dbName) || Strings.isNullOrEmpty(tableName)) {\nTStatus status = new TStatus(TStatusCode.INVALID_ARGUMENT);\nstatus.setError_msgs(Lists.newArrayList(\"missing db or table name\"));\nresponse.setStatus(status);\nreturn response;\n}\nString fullDbName = ClusterNamespace.getFullName(SystemInfoService.DEFAULT_CLUSTER, dbName);\nDatabase db = Catalog.getCurrentCatalog().getDb(fullDbName);\nif (db == null) {\nTStatus status = new TStatus(TStatusCode.NOT_FOUND);\nstatus.setError_msgs(Lists.newArrayList(\"db not exist\"));\nresponse.setStatus(status);\nreturn response;\n}\ntry {\ndb.readLock();\nTable table = db.getTable(tableName);\nif (table == null) {\nTStatus status = new TStatus(TStatusCode.NOT_FOUND);\nstatus.setError_msgs(Lists.newArrayList(\"table \" + tableName + \" not exist\"));\nresponse.setStatus(status);\nreturn response;\n}\nif (!(table instanceof OlapTable)) {\nTStatus status = new TStatus(TStatusCode.NOT_IMPLEMENTED_ERROR);\nstatus.setError_msgs(Lists.newArrayList(\"only olap table supported\"));\nresponse.setStatus(status);\nreturn response;\n}\nOlapTable olapTable = (OlapTable) table;\ntableMeta = new TTableMeta();\ntableMeta.setTable_id(table.getId());\ntableMeta.setTable_name(tableName);\ntableMeta.setDb_id(db.getId());\ntableMeta.setDb_name(dbName);\ntableMeta.setCluster_id(Catalog.getCurrentCatalog().getClusterId());\ntableMeta.setState(olapTable.getState().name());\ntableMeta.setBloomfilter_fpp(olapTable.getBfFpp());\nif (olapTable.getCopiedBfColumns() != null) {\nfor (String bfColumn : olapTable.getCopiedBfColumns()) {\ntableMeta.addToBloomfilter_columns(bfColumn);\n}\n}\ntableMeta.setBase_index_id(olapTable.getBaseIndexId());\ntableMeta.setColocate_group(olapTable.getColocateGroup());\ntableMeta.setKey_type(olapTable.getKeysType().name());\nTDistributionDesc distributionDesc = new TDistributionDesc();\nDistributionInfo distributionInfo = olapTable.getDefaultDistributionInfo();\ndistributionDesc.setDistribution_type(distributionInfo.getType().name());\nif (distributionInfo.getType() == DistributionInfoType.HASH) {\nTHashDistributionInfo tHashDistributionInfo = new THashDistributionInfo();\nHashDistributionInfo hashDistributionInfo = (HashDistributionInfo) distributionInfo;\ntHashDistributionInfo.setBucket_num(hashDistributionInfo.getBucketNum());\nfor (Column column : hashDistributionInfo.getDistributionColumns()) {\ntHashDistributionInfo.addToDistribution_columns(column.getName());\n}\ndistributionDesc.setHash_distribution(tHashDistributionInfo);\n} else {\nTRandomDistributionInfo tRandomDistributionInfo = new TRandomDistributionInfo();\nRandomDistributionInfo randomDistributionInfo = (RandomDistributionInfo) distributionInfo;\ntRandomDistributionInfo.setBucket_num(randomDistributionInfo.getBucketNum());\ndistributionDesc.setRandom_distribution(tRandomDistributionInfo);\n}\ntableMeta.setDistribution_desc(distributionDesc);\nTableProperty tableProperty = olapTable.getTableProperty();\nfor (Map.Entry property : tableProperty.getProperties().entrySet()) {\ntableMeta.putToProperties(property.getKey(), property.getValue());\n}\nPartitionInfo partitionInfo = olapTable.getPartitionInfo();\nTBasePartitionDesc basePartitionDesc = new TBasePartitionDesc();\nfor (Partition partition : olapTable.getAllPartitions()) {\nTPartitionMeta partitionMeta = new TPartitionMeta();\npartitionMeta.setPartition_id(partition.getId());\npartitionMeta.setPartition_name(partition.getName());\npartitionMeta.setState(partition.getState().name());\npartitionMeta.setVisible_version(partition.getVisibleVersion());\npartitionMeta.setVisible_time(partition.getVisibleVersionTime());\npartitionMeta.setNext_version(partition.getNextVersion());\ntableMeta.addToPartitions(partitionMeta);\nShort replicaNum = partitionInfo.getReplicationNum(partition.getId());\nboolean inMemory = partitionInfo.getIsInMemory(partition.getId());\nbasePartitionDesc.putToReplica_num_map(partition.getId(), replicaNum);\nbasePartitionDesc.putToIn_memory_map(partition.getId(), inMemory);\nDataProperty dataProperty = partitionInfo.getDataProperty(partition.getId());\nTDataProperty thriftDataProperty = new TDataProperty();\nthriftDataProperty.setStorage_medium(dataProperty.getStorageMedium());\nthriftDataProperty.setCold_time(dataProperty.getCooldownTimeMs());\nbasePartitionDesc.putToData_property(partition.getId(), thriftDataProperty);\n}\nTPartitionInfo tPartitionInfo = new TPartitionInfo();\ntPartitionInfo.setType(partitionInfo.getType().toThrift());\nif (partitionInfo.getType() == PartitionType.RANGE) {\nTRangePartitionDesc rangePartitionDesc = new TRangePartitionDesc();\nRangePartitionInfo rangePartitionInfo = (RangePartitionInfo) partitionInfo;\nfor (Column column : rangePartitionInfo.getPartitionColumns()) {\nTColumnMeta columnMeta = new TColumnMeta();\ncolumnMeta.setColumnName(column.getName());\ncolumnMeta.setColumnType(column.getType().toThrift());\ncolumnMeta.setKey(column.isKey());\nif (column.getAggregationType() != null) {\ncolumnMeta.setAggregationType(column.getAggregationType().name());\n}\ncolumnMeta.setComment(column.getComment());\nrangePartitionDesc.addToColumns(columnMeta);\n}\nMap> ranges = rangePartitionInfo.getIdToRange(false);\nfor (Map.Entry> range : ranges.entrySet()) {\nTRange tRange = new TRange();\ntRange.setPartition_id(range.getKey());\nByteArrayOutputStream output = new ByteArrayOutputStream();\nDataOutputStream stream = new DataOutputStream(output);\nrange.getValue().lowerEndpoint().write(stream);\ntRange.setStart_key(output.toByteArray());\noutput = new ByteArrayOutputStream();\nstream = new DataOutputStream(output);\nrange.getValue().upperEndpoint().write(stream);\ntRange.setEnd_key(output.toByteArray());\ntRange.setBase_desc(basePartitionDesc);\nrangePartitionDesc.putToRanges(range.getKey(), tRange);\n}\ntPartitionInfo.setRange_partition_desc(rangePartitionDesc);\n} else if (partitionInfo.getType() == PartitionType.UNPARTITIONED) {\nTSinglePartitionDesc singlePartitionDesc = new TSinglePartitionDesc();\nsinglePartitionDesc.setBase_desc(basePartitionDesc);\ntPartitionInfo.setSingle_partition_desc(singlePartitionDesc);\n} else {\nLOG.info(\"invalid partition type {}\", partitionInfo.getType());\nreturn null;\n}\ntableMeta.setPartition_info(tPartitionInfo);\nfor (Index index : olapTable.getIndexes()) {\nTIndexInfo indexInfo = new TIndexInfo();\nindexInfo.setIndex_name(index.getIndexName());\nindexInfo.setIndex_type(index.getIndexType().name());\nindexInfo.setComment(index.getComment());\nfor (String column : index.getColumns()) {\nindexInfo.addToColumns(column);\n}\ntableMeta.addToIndex_infos(indexInfo);\n}\nfor (Partition partition : olapTable.getPartitions()) {\nList indexes = partition.getMaterializedIndices(IndexExtState.ALL);\nfor (MaterializedIndex index : indexes) {\nTIndexMeta indexMeta = new TIndexMeta();\nindexMeta.setIndex_id(index.getId());\nindexMeta.setPartition_id(partition.getId());\nindexMeta.setIndex_state(index.getState().toThrift());\nindexMeta.setRow_count(index.getRowCount());\nindexMeta.setRollup_index_id(index.getRollupIndexId());\nindexMeta.setRollup_finished_version(index.getRollupFinishedVersion());\nTSchemaMeta schemaMeta = new TSchemaMeta();\nMaterializedIndexMeta materializedIndexMeta = olapTable.getIndexMetaByIndexId(index.getId());\nschemaMeta.setSchema_version(materializedIndexMeta.getSchemaVersion());\nschemaMeta.setSchema_hash(materializedIndexMeta.getSchemaHash());\nschemaMeta.setShort_key_col_count(materializedIndexMeta.getShortKeyColumnCount());\nschemaMeta.setStorage_type(materializedIndexMeta.getStorageType());\nschemaMeta.setKeys_type(materializedIndexMeta.getKeysType().name());\nfor (Column column : materializedIndexMeta.getSchema()) {\nTColumnMeta columnMeta = new TColumnMeta();\ncolumnMeta.setColumnName(column.getName());\ncolumnMeta.setColumnType(column.getType().toThrift());\ncolumnMeta.setKey(column.isKey());\ncolumnMeta.setAllowNull(column.isAllowNull());\nif (column.getAggregationType() != null) {\ncolumnMeta.setAggregationType(column.getAggregationType().name());\n}\ncolumnMeta.setComment(column.getComment());\ncolumnMeta.setDefaultValue(column.getDefaultValue());\nschemaMeta.addToColumns(columnMeta);\n}\nindexMeta.setSchema_meta(schemaMeta);\nfor (Tablet tablet : index.getTablets()) {\nLocalTablet localTablet = (LocalTablet) tablet;\nTTabletMeta tTabletMeta = new TTabletMeta();\ntTabletMeta.setTablet_id(tablet.getId());\ntTabletMeta.setChecked_version(localTablet.getCheckedVersion());\ntTabletMeta.setConsistent(localTablet.isConsistent());\nTabletMeta tabletMeta = Catalog.getCurrentInvertedIndex().getTabletMeta(tablet.getId());\ntTabletMeta.setDb_id(tabletMeta.getDbId());\ntTabletMeta.setTable_id(tabletMeta.getTableId());\ntTabletMeta.setPartition_id(tabletMeta.getPartitionId());\ntTabletMeta.setIndex_id(tabletMeta.getIndexId());\ntTabletMeta.setStorage_medium(tabletMeta.getStorageMedium());\ntTabletMeta.setOld_schema_hash(tabletMeta.getOldSchemaHash());\ntTabletMeta.setNew_schema_hash(tabletMeta.getNewSchemaHash());\nfor (Replica replica : localTablet.getReplicas()) {\nTReplicaMeta replicaMeta = new TReplicaMeta();\nreplicaMeta.setReplica_id(replica.getId());\nreplicaMeta.setBackend_id(replica.getBackendId());\nreplicaMeta.setSchema_hash(replica.getSchemaHash());\nreplicaMeta.setVersion(replica.getVersion());\nreplicaMeta.setData_size(replica.getDataSize());\nreplicaMeta.setRow_count(replica.getRowCount());\nreplicaMeta.setState(replica.getState().name());\nreplicaMeta.setLast_failed_version(replica.getLastFailedVersion());\nreplicaMeta.setLast_failed_time(replica.getLastFailedTimestamp());\nreplicaMeta.setLast_success_version(replica.getLastSuccessVersion());\nreplicaMeta.setVersion_count(replica.getVersionCount());\nreplicaMeta.setPath_hash(replica.getPathHash());\nreplicaMeta.setBad(replica.isBad());\ntTabletMeta.addToReplicas(replicaMeta);\n}\nindexMeta.addToTablets(tTabletMeta);\n}\ntableMeta.addToIndexes(indexMeta);\n}\n}\nList backends = new ArrayList<>();\nfor (Backend backend : Catalog.getCurrentCatalog().getCurrentSystemInfo()\n.getClusterBackends(db.getClusterName())) {\nTBackendMeta backendMeta = new TBackendMeta();\nbackendMeta.setBackend_id(backend.getId());\nbackendMeta.setHost(backend.getHost());\nbackendMeta.setBe_port(backend.getBeRpcPort());\nbackendMeta.setRpc_port(backend.getBrpcPort());\nbackendMeta.setHttp_port(backend.getHttpPort());\nbackendMeta.setAlive(backend.isAlive());\nbackendMeta.setState(backend.getBackendState().ordinal());\nbackends.add(backendMeta);\n}\nresponse.setStatus(new TStatus(TStatusCode.OK));\nresponse.setTable_meta(tableMeta);\nresponse.setBackends(backends);\n} catch (Exception e) {\nTStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\nstatus.setError_msgs(Lists.newArrayList(e.getMessage()));\nLOG.info(\"exception: {}\", e.getStackTrace());\nresponse.setStatus(status);\n} finally {\ndb.readUnlock();\n}\nreturn response;\n}\npublic TNetworkAddress masterAddr() {\nString masterHost = Catalog.getCurrentCatalog().getMasterIp();\nint masterRpcPort = Catalog.getCurrentCatalog().getMasterRpcPort();\nreturn new TNetworkAddress(masterHost, masterRpcPort);\n}\npublic TCommitRemoteTxnResponse commitRemoteTxn(TCommitRemoteTxnRequest request) throws TException {\nTCommitRemoteTxnResponse response = new TCommitRemoteTxnResponse();\nCatalog catalog = Catalog.getCurrentCatalog();\nif (!catalog.isMaster()) {\ntry {\nTNetworkAddress addr = masterAddr();\nLOG.info(\"commitRemoteTxn as follower, forward it to master. txn_id: {}, master: {}\",\nrequest.getTxn_id(), addr.toString());\nFrontendService.Client client = ClientPool.frontendPool.borrowObject(addr, 1000);\nresponse = client.commitRemoteTxn(request);\nClientPool.frontendPool.returnObject(addr, client);\nreturn response;\n} catch (Exception e) {\nLOG.warn(\"create thrift client failed during commitRemoteTxn, txn_id: {}, exception: {}\", request.getTxn_id(), e);\nthrow new TException(e.getMessage());\n}\n}\nDatabase db = catalog.getDb(request.getDb_id());\nif (db == null) {\nTStatus status = new TStatus(TStatusCode.NOT_FOUND);\nstatus.setError_msgs(Lists.newArrayList(\"db not exist or already deleted\"));\nresponse.setStatus(status);\nreturn response;\n}\ntry {\nTxnCommitAttachment attachment = TxnCommitAttachment.fromThrift(request.getCommit_attachment());\nlong timeoutMs = request.isSetCommit_timeout_ms() ? request.getCommit_timeout_ms() : 5000;\ntimeoutMs = timeoutMs * 3 / 4;\nboolean ret = Catalog.getCurrentGlobalTransactionMgr().commitAndPublishTransaction(\ndb, request.getTxn_id(),\nTabletCommitInfo.fromThrift(request.getCommit_infos()),\ntimeoutMs, attachment);\nif (!ret) {\nTStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\nstatus.setError_msgs(Lists.newArrayList(\"commit and publish txn failed\"));\nresponse.setStatus(status);\nreturn response;\n}\n} catch (UserException e) {\nTStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\nstatus.setError_msgs(Lists.newArrayList(e.getMessage()));\nresponse.setStatus(status);\nreturn response;\n}\nTStatus status = new TStatus(TStatusCode.OK);\nresponse.setStatus(status);\nLOG.info(\"commit remote transaction: {} success\", request.getTxn_id());\nreturn response;\n}\npublic TAbortRemoteTxnResponse abortRemoteTxn(TAbortRemoteTxnRequest request) throws TException {\nTAbortRemoteTxnResponse response = new TAbortRemoteTxnResponse();\nCatalog catalog = Catalog.getCurrentCatalog();\nif (!catalog.isMaster()) {\ntry {\nTNetworkAddress addr = masterAddr();\nLOG.info(\"abortRemoteTxn as follower, forward it to master. txn_id: {}, master: {}\",\nrequest.getTxn_id(), addr.toString());\nFrontendService.Client client = ClientPool.frontendPool.borrowObject(addr, 1000);\nresponse = client.abortRemoteTxn(request);\nClientPool.frontendPool.returnObject(addr, client);\nreturn response;\n} catch (Exception e) {\nLOG.warn(\"create thrift client failed during abortRemoteTxn, txn_id: {}, exception: {}\", request.getTxn_id(), e);\nthrow new TException(e.getMessage());\n}\n}\nDatabase db = catalog.getDb(request.getDb_id());\nif (db == null) {\nTStatus status = new TStatus(TStatusCode.NOT_FOUND);\nstatus.setError_msgs(Lists.newArrayList(\"db not exist or already deleted\"));\nresponse.setStatus(status);\nreturn response;\n}\ntry {\nCatalog.getCurrentGlobalTransactionMgr().abortTransaction(\nrequest.getDb_id(), request.getTxn_id(), request.getError_msg());\n} catch (Exception e) {\nTStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\nstatus.setError_msgs(Lists.newArrayList(e.getMessage()));\nresponse.setStatus(status);\nreturn response;\n}\nTStatus status = new TStatus(TStatusCode.OK);\nresponse.setStatus(status);\nreturn response;\n}\n}", + "context_after": "class MasterImpl {\nprivate static final Logger LOG = LogManager.getLogger(MasterImpl.class);\nprivate ReportHandler reportHandler = new ReportHandler();\npublic MasterImpl() {\nreportHandler.start();\n}\npublic TMasterResult finishTask(TFinishTaskRequest request) {\nTMasterResult result = new TMasterResult();\nTStatus tStatus = new TStatus(TStatusCode.OK);\nresult.setStatus(tStatus);\nTStatus taskStatus = request.getTask_status();\nLOG.debug(\"get task report: {}\", request.toString());\nif (taskStatus.getStatus_code() != TStatusCode.OK) {\nLOG.warn(\"finish task reports bad. request: {}\", request.toString());\n}\nTBackend tBackend = request.getBackend();\nString host = tBackend.getHost();\nint bePort = tBackend.getBe_port();\nBackend backend = Catalog.getCurrentSystemInfo().getBackendWithBePort(host, bePort);\nif (backend == null) {\ntStatus.setStatus_code(TStatusCode.CANCELLED);\nList errorMsgs = new ArrayList<>();\nerrorMsgs.add(\"backend not exist.\");\ntStatus.setError_msgs(errorMsgs);\nLOG.warn(\"backend does not found. host: {}, be port: {}. task: {}\", host, bePort, request.toString());\nreturn result;\n}\nlong backendId = backend.getId();\nTTaskType taskType = request.getTask_type();\nlong signature = request.getSignature();\nAgentTask task = AgentTaskQueue.getTask(backendId, taskType, signature);\nif (task == null) {\nif (taskType != TTaskType.DROP && taskType != TTaskType.STORAGE_MEDIUM_MIGRATE\n&& taskType != TTaskType.RELEASE_SNAPSHOT && taskType != TTaskType.CLEAR_TRANSACTION_TASK) {\nString errMsg = \"cannot find task. type: \" + taskType + \", backendId: \" + backendId\n+ \", signature: \" + signature;\nLOG.warn(errMsg);\ntStatus.setStatus_code(TStatusCode.CANCELLED);\nList errorMsgs = new ArrayList();\nerrorMsgs.add(errMsg);\ntStatus.setError_msgs(errorMsgs);\n}\nif (taskType != TTaskType.STORAGE_MEDIUM_MIGRATE) {\nreturn result;\n}\n} else {\nif (taskStatus.getStatus_code() != TStatusCode.OK) {\ntask.failed();\nString errMsg = \"task type: \" + taskType + \", status_code: \" + taskStatus.getStatus_code().toString() +\n\", backendId: \" + backend + \", signature: \" + signature;\ntask.setErrorMsg(errMsg);\nif (taskType != TTaskType.MAKE_SNAPSHOT && taskType != TTaskType.UPLOAD\n&& taskType != TTaskType.DOWNLOAD && taskType != TTaskType.MOVE\n&& taskType != TTaskType.CLONE && taskType != TTaskType.PUBLISH_VERSION\n&& taskType != TTaskType.CREATE && taskType != TTaskType.UPDATE_TABLET_META_INFO) {\nreturn result;\n}\n}\n}\ntry {\nList finishTabletInfos;\nswitch (taskType) {\ncase CREATE:\nPreconditions.checkState(request.isSetReport_version());\nfinishCreateReplica(task, request);\nbreak;\ncase PUSH:\ncheckHasTabletInfo(request);\nPreconditions.checkState(request.isSetReport_version());\nfinishPush(task, request);\nbreak;\ncase REALTIME_PUSH:\ncheckHasTabletInfo(request);\nPreconditions.checkState(request.isSetReport_version());\nfinishRealtimePush(task, request);\nbreak;\ncase PUBLISH_VERSION:\nfinishPublishVersion(task, request);\nbreak;\ncase CLEAR_ALTER_TASK:\nfinishClearAlterTask(task, request);\nbreak;\ncase DROP:\nfinishDropReplica(task);\nbreak;\ncase SCHEMA_CHANGE:\nPreconditions.checkState(request.isSetReport_version());\ncheckHasTabletInfo(request);\nfinishTabletInfos = request.getFinish_tablet_infos();\nfinishSchemaChange(task, finishTabletInfos, request.getReport_version());\nbreak;\ncase ROLLUP:\ncheckHasTabletInfo(request);\nfinishTabletInfos = request.getFinish_tablet_infos();\nfinishRollup(task, finishTabletInfos);\nbreak;\ncase CLONE:\nfinishClone(task, request);\nbreak;\ncase STORAGE_MEDIUM_MIGRATE:\nfinishStorageMigration(backendId, request);\nbreak;\ncase CHECK_CONSISTENCY:\nfinishConsistenctCheck(task, request);\nbreak;\ncase MAKE_SNAPSHOT:\nfinishMakeSnapshot(task, request);\nbreak;\ncase UPLOAD:\nfinishUpload(task, request);\nbreak;\ncase DOWNLOAD:\nfinishDownloadTask(task, request);\nbreak;\ncase MOVE:\nfinishMoveDirTask(task, request);\nbreak;\ncase RECOVER_TABLET:\nfinishRecoverTablet(task);\nbreak;\ncase ALTER:\nfinishAlterTask(task);\nbreak;\ncase UPDATE_TABLET_META_INFO:\nfinishUpdateTabletMeta(task, request);\nbreak;\ndefault:\nbreak;\n}\n} catch (Exception e) {\ntStatus.setStatus_code(TStatusCode.CANCELLED);\nString errMsg = \"finish agent task error.\";\nLOG.warn(errMsg, e);\nList errorMsgs = new ArrayList();\nerrorMsgs.add(errMsg);\ntStatus.setError_msgs(errorMsgs);\n}\nif (tStatus.getStatus_code() == TStatusCode.OK) {\nLOG.debug(\"report task success. {}\", request.toString());\n}\nreturn result;\n}\nprivate void checkHasTabletInfo(TFinishTaskRequest request) throws Exception {\nif (!request.isSetFinish_tablet_infos() || request.getFinish_tablet_infos().isEmpty()) {\nthrow new Exception(\"tablet info is not set\");\n}\n}\nprivate void finishCreateReplica(AgentTask task, TFinishTaskRequest request) {\ntry {\nCreateReplicaTask createReplicaTask = (CreateReplicaTask) task;\nif (request.getTask_status().getStatus_code() != TStatusCode.OK) {\ncreateReplicaTask.countDownToZero(\ntask.getBackendId() + \": \" + request.getTask_status().getError_msgs().toString());\n} else {\nlong tabletId = createReplicaTask.getTabletId();\nTabletInvertedIndex invertedIndex = Catalog.getCurrentInvertedIndex();\nTabletMeta tabletMeta = invertedIndex.getTabletMeta(tabletId);\nif (!tabletMeta.isUseStarOS() && request.isSetFinish_tablet_infos()) {\nReplica replica = Catalog.getCurrentInvertedIndex().getReplica(tabletId, createReplicaTask.getBackendId());\nreplica.setPathHash(request.getFinish_tablet_infos().get(0).getPath_hash());\nif (createReplicaTask.isRecoverTask()) {\n/*\n* This create replica task may be generated by recovery(See comment of Config.recover_with_empty_tablet)\n* So we set replica back to good.\n*/\nreplica.setBad(false);\nLOG.info(\n\"finish recover create replica task. set replica to good. tablet {}, replica {}, backend {}\",\ntabletId, task.getBackendId(), replica.getId());\n}\n}\nCatalog.getCurrentSystemInfo()\n.updateBackendReportVersion(task.getBackendId(), request.getReport_version(), task.getDbId());\ncreateReplicaTask.countDownLatch(task.getBackendId(), task.getSignature());\nLOG.debug(\"finish create replica. tablet id: {}, be: {}, report version: {}\",\ntabletId, task.getBackendId(), request.getReport_version());\n}\n} finally {\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.CREATE, task.getSignature());\n}\n}\nprivate void finishUpdateTabletMeta(AgentTask task, TFinishTaskRequest request) {\ntry {\nUpdateTabletMetaInfoTask tabletTask = (UpdateTabletMetaInfoTask) task;\nif (request.getTask_status().getStatus_code() != TStatusCode.OK) {\ntabletTask.countDownToZero(\ntask.getBackendId() + \": \" + request.getTask_status().getError_msgs().toString());\n} else {\ntabletTask.countDownLatch(task.getBackendId(), tabletTask.getTablets());\nLOG.debug(\"finish update tablet meta. tablet id: {}, be: {}\", tabletTask.getTablets(),\ntask.getBackendId());\n}\n} finally {\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.UPDATE_TABLET_META_INFO, task.getSignature());\n}\n}\nprivate void finishRealtimePush(AgentTask task, TFinishTaskRequest request) {\nList finishTabletInfos = request.getFinish_tablet_infos();\nPreconditions.checkState(finishTabletInfos != null && !finishTabletInfos.isEmpty());\nPushTask pushTask = (PushTask) task;\nlong dbId = pushTask.getDbId();\nlong backendId = pushTask.getBackendId();\nlong signature = task.getSignature();\nlong transactionId = ((PushTask) task).getTransactionId();\nDatabase db = Catalog.getCurrentCatalog().getDb(dbId);\nif (db == null) {\nAgentTaskQueue.removeTask(backendId, TTaskType.REALTIME_PUSH, signature);\nreturn;\n}\nlong tableId = pushTask.getTableId();\nlong partitionId = pushTask.getPartitionId();\nlong pushIndexId = pushTask.getIndexId();\nlong pushTabletId = pushTask.getTabletId();\nPartitionState pushState = null;\nif (finishTabletInfos.size() == 1) {\npushState = PartitionState.NORMAL;\n} else if (finishTabletInfos.size() == 2) {\nif (finishTabletInfos.get(0).getTablet_id() == finishTabletInfos.get(1).getTablet_id()) {\npushState = PartitionState.SCHEMA_CHANGE;\n} else {\npushState = PartitionState.ROLLUP;\n}\n} else {\nLOG.warn(\"invalid push report infos. finishTabletInfos' size: \" + finishTabletInfos.size());\nreturn;\n}\nLOG.debug(\"push report state: {}\", pushState.name());\ndb.writeLock();\ntry {\nOlapTable olapTable = (OlapTable) db.getTable(tableId);\nif (olapTable == null) {\nthrow new MetaNotFoundException(\"cannot find table[\" + tableId + \"] when push finished\");\n}\nPartition partition = olapTable.getPartition(partitionId);\nif (partition == null) {\nthrow new MetaNotFoundException(\"cannot find partition[\" + partitionId + \"] when push finished\");\n}\nMaterializedIndex pushIndex = partition.getIndex(pushIndexId);\nif (pushIndex == null) {\nthrow new MetaNotFoundException(\"cannot find index[\" + pushIndex + \"] when push finished\");\n}\nlong reportVersion = request.getReport_version();\nCatalog.getCurrentSystemInfo().updateBackendReportVersion(task.getBackendId(), reportVersion,\ntask.getDbId());\nList tabletIds = finishTabletInfos.stream().map(\ntTabletInfo -> tTabletInfo.getTablet_id()).collect(Collectors.toList());\nList tabletMetaList = Catalog.getCurrentInvertedIndex().getTabletMetaList(tabletIds);\nif (pushTask.getPushType() == TPushType.LOAD || pushTask.getPushType() == TPushType.LOAD_DELETE) {\nPreconditions.checkArgument(false, \"LOAD and LOAD_DELETE not supported\");\n} else if (pushTask.getPushType() == TPushType.DELETE) {\nDeleteJob deleteJob = Catalog.getCurrentCatalog().getDeleteHandler().getDeleteJob(transactionId);\nif (deleteJob == null) {\nthrow new MetaNotFoundException(\"cannot find delete job, job[\" + transactionId + \"]\");\n}\nfor (int i = 0; i < tabletMetaList.size(); i++) {\nTabletMeta tabletMeta = tabletMetaList.get(i);\nlong tabletId = tabletIds.get(i);\nReplica replica = findRelatedReplica(olapTable, partition,\nbackendId, tabletId, tabletMeta.getIndexId());\nif (replica != null) {\ndeleteJob.addFinishedReplica(partitionId, pushTabletId, replica);\npushTask.countDownLatch(backendId, pushTabletId);\n}\n}\n} else if (pushTask.getPushType() == TPushType.LOAD_V2) {\nlong loadJobId = pushTask.getLoadJobId();\ncom.starrocks.load.loadv2.LoadJob job =\nCatalog.getCurrentCatalog().getLoadManager().getLoadJob(loadJobId);\nif (job == null) {\nthrow new MetaNotFoundException(\"cannot find load job, job[\" + loadJobId + \"]\");\n}\nfor (int i = 0; i < tabletMetaList.size(); i++) {\nTabletMeta tabletMeta = tabletMetaList.get(i);\ncheckReplica(finishTabletInfos.get(i), tabletMeta);\nlong tabletId = tabletIds.get(i);\nReplica replica =\nfindRelatedReplica(olapTable, partition, backendId, tabletId, tabletMeta.getIndexId());\nif (replica != null) {\n((SparkLoadJob) job).addFinishedReplica(replica.getId(), pushTabletId, backendId);\n}\n}\n}\nAgentTaskQueue.removeTask(backendId, TTaskType.REALTIME_PUSH, signature);\nLOG.debug(\"finish push replica. tabletId: {}, backendId: {}\", pushTabletId, backendId);\n} catch (MetaNotFoundException e) {\nAgentTaskQueue.removeTask(backendId, TTaskType.REALTIME_PUSH, signature);\nLOG.warn(\"finish push replica error\", e);\n} finally {\ndb.writeUnlock();\n}\n}\nprivate void checkReplica(TTabletInfo tTabletInfo, TabletMeta tabletMeta)\nthrows MetaNotFoundException {\nlong tabletId = tTabletInfo.getTablet_id();\nint schemaHash = tTabletInfo.getSchema_hash();\nif (tabletMeta == null || tabletMeta == TabletInvertedIndex.NOT_EXIST_TABLET_META) {\nthrow new MetaNotFoundException(\"tablet \" + tabletId + \" does not exist\");\n}\nif (!tabletMeta.containsSchemaHash(schemaHash)) {\nthrow new MetaNotFoundException(\"tablet[\" + tabletId\n+ \"] schemaHash is not equal to index's switchSchemaHash. \"\n+ tabletMeta.toString() + \" vs. \" + schemaHash);\n}\n}\nprivate Replica findRelatedReplica(OlapTable olapTable, Partition partition,\nlong backendId, long tabletId, long indexId)\nthrows MetaNotFoundException {\nif (indexId == TabletInvertedIndex.NOT_EXIST_VALUE) {\nLOG.warn(\"tablet[{}] may be dropped. push index[{}]\", tabletId, indexId);\nreturn null;\n}\nMaterializedIndex index = partition.getIndex(indexId);\nif (index == null) {\nMaterializedViewHandler materializedViewHandler = Catalog.getCurrentCatalog().getRollupHandler();\nAlterJob alterJob = materializedViewHandler.getAlterJob(olapTable.getId());\nif (alterJob == null && olapTable.getState() == OlapTableState.ROLLUP) {\nLOG.warn(\"Cannot find table[{}].\", olapTable.getId());\nreturn null;\n}\nRollupJob rollupJob = (RollupJob) alterJob;\nMaterializedIndex rollupIndex = rollupJob.getRollupIndex(partition.getId());\nif (rollupIndex == null) {\nLOG.warn(\"could not find index for tablet {}\", tabletId);\nreturn null;\n}\nindex = rollupIndex;\n}\nLocalTablet tablet = (LocalTablet) index.getTablet(tabletId);\nif (tablet == null) {\nLOG.warn(\"could not find tablet {} in rollup index {} \", tabletId, indexId);\nreturn null;\n}\nReplica replica = tablet.getReplicaByBackendId(backendId);\nif (replica == null) {\nLOG.warn(\"could not find replica with backend {} in tablet {} in rollup index {} \",\nbackendId, tabletId, indexId);\n}\nreturn replica;\n}\nprivate void finishPush(AgentTask task, TFinishTaskRequest request) {\nList finishTabletInfos = request.getFinish_tablet_infos();\nPreconditions.checkState(finishTabletInfos != null && !finishTabletInfos.isEmpty());\nPushTask pushTask = (PushTask) task;\nlong finishVersion = finishTabletInfos.get(0).getVersion();\nlong taskVersion = pushTask.getVersion();\nif (finishVersion != taskVersion) {\nLOG.debug(\"finish tablet version is not consistent with task. \"\n+ \"finish version: {}, task: {}\",\nfinishVersion, pushTask);\nreturn;\n}\nlong dbId = pushTask.getDbId();\nlong backendId = pushTask.getBackendId();\nlong signature = task.getSignature();\nDatabase db = Catalog.getCurrentCatalog().getDb(dbId);\nif (db == null) {\nAgentTaskQueue.removePushTask(backendId, signature, finishVersion,\npushTask.getPushType(), pushTask.getTaskType());\nreturn;\n}\nlong tableId = pushTask.getTableId();\nlong partitionId = pushTask.getPartitionId();\nlong pushIndexId = pushTask.getIndexId();\nlong pushTabletId = pushTask.getTabletId();\nPartitionState pushState = null;\nif (finishTabletInfos.size() == 1) {\npushState = PartitionState.NORMAL;\n} else if (finishTabletInfos.size() == 2) {\nif (finishTabletInfos.get(0).getTablet_id() == finishTabletInfos.get(1).getTablet_id()) {\npushState = PartitionState.SCHEMA_CHANGE;\n} else {\npushState = PartitionState.ROLLUP;\n}\n} else {\nLOG.warn(\"invalid push report infos. finishTabletInfos' size: \" + finishTabletInfos.size());\nreturn;\n}\nLOG.debug(\"push report state: {}\", pushState.name());\ndb.writeLock();\ntry {\nOlapTable olapTable = (OlapTable) db.getTable(tableId);\nif (olapTable == null) {\nthrow new MetaNotFoundException(\"cannot find table[\" + tableId + \"] when push finished\");\n}\nPartition partition = olapTable.getPartition(partitionId);\nif (partition == null) {\nthrow new MetaNotFoundException(\"cannot find partition[\" + partitionId + \"] when push finished\");\n}\nList infos = new LinkedList();\nList tabletIds = finishTabletInfos.stream().map(\nfinishTabletInfo -> finishTabletInfo.getTablet_id()).collect(Collectors.toList());\nList tabletMetaList = Catalog.getCurrentInvertedIndex().getTabletMetaList(tabletIds);\nfor (int i = 0; i < tabletMetaList.size(); i++) {\nTabletMeta tabletMeta = tabletMetaList.get(i);\nTTabletInfo tTabletInfo = finishTabletInfos.get(i);\nlong indexId = tabletMeta.getIndexId();\nReplicaPersistInfo info = updateReplicaInfo(olapTable, partition,\nbackendId, pushIndexId, indexId,\ntTabletInfo, pushState);\nif (info != null) {\ninfos.add(info);\n}\n}\nlong reportVersion = request.getReport_version();\nCatalog.getCurrentSystemInfo().updateBackendReportVersion(task.getBackendId(), reportVersion,\ntask.getDbId());\nif (pushTask.getPushType() == TPushType.LOAD || pushTask.getPushType() == TPushType.LOAD_DELETE) {\nPreconditions.checkArgument(false, \"LOAD and LOAD_DELETE not supported\");\n} else if (pushTask.getPushType() == TPushType.DELETE) {\nif (pushTask.getVersion() != request.getRequest_version()) {\nthrow new MetaNotFoundException(\"delete task is not match. [\" + pushTask.getVersion() + \"-\"\n+ request.getRequest_version() + \"]\");\n}\nPreconditions.checkArgument(pushTask.isSyncDelete(), \"Async DELETE not supported\");\npushTask.countDownLatch(backendId, signature);\n}\nAgentTaskQueue.removePushTask(backendId, signature, finishVersion,\npushTask.getPushType(), pushTask.getTaskType());\nLOG.debug(\"finish push replica. tabletId: {}, backendId: {}\", pushTabletId, backendId);\n} catch (MetaNotFoundException e) {\nAgentTaskQueue.removePushTask(backendId, signature, finishVersion,\npushTask.getPushType(), pushTask.getTaskType());\nLOG.warn(\"finish push replica error\", e);\n} finally {\ndb.writeUnlock();\n}\n}\nprivate void finishClearAlterTask(AgentTask task, TFinishTaskRequest request) {\nClearAlterTask clearAlterTask = (ClearAlterTask) task;\nclearAlterTask.setFinished(true);\nAgentTaskQueue.removeTask(task.getBackendId(), task.getTaskType(), task.getSignature());\n}\nprivate void finishPublishVersion(AgentTask task, TFinishTaskRequest request) {\nList errorTabletIds = null;\nif (request.isSetError_tablet_ids()) {\nerrorTabletIds = request.getError_tablet_ids();\n}\nif (request.isSetReport_version()) {\nlong reportVersion = request.getReport_version();\nCatalog.getCurrentSystemInfo()\n.updateBackendReportVersion(task.getBackendId(), reportVersion, task.getDbId());\n}\nPublishVersionTask publishVersionTask = (PublishVersionTask) task;\npublishVersionTask.addErrorTablets(errorTabletIds);\npublishVersionTask.setIsFinished(true);\nif (request.getTask_status().getStatus_code() != TStatusCode.OK) {\nreturn;\n}\nAgentTaskQueue.removeTask(publishVersionTask.getBackendId(),\npublishVersionTask.getTaskType(),\npublishVersionTask.getSignature());\n}\nprivate ReplicaPersistInfo updateReplicaInfo(OlapTable olapTable, Partition partition,\nlong backendId, long pushIndexId, long indexId,\nTTabletInfo tTabletInfo, PartitionState pushState)\nthrows MetaNotFoundException {\nlong tabletId = tTabletInfo.getTablet_id();\nint schemaHash = tTabletInfo.getSchema_hash();\nlong version = tTabletInfo.getVersion();\nlong rowCount = tTabletInfo.getRow_count();\nlong dataSize = tTabletInfo.getData_size();\nif (indexId != pushIndexId) {\nif (pushState != PartitionState.ROLLUP && indexId != TabletInvertedIndex.NOT_EXIST_VALUE) {\nLOG.warn(\"push task report tablet[{}] with different index[{}] and is not in ROLLUP. push index[{}]\",\ntabletId, indexId, pushIndexId);\nreturn null;\n}\nif (indexId == TabletInvertedIndex.NOT_EXIST_VALUE) {\nLOG.warn(\"tablet[{}] may be dropped. push index[{}]\", tabletId, pushIndexId);\nreturn null;\n}\nMaterializedViewHandler materializedViewHandler = Catalog.getCurrentCatalog().getRollupHandler();\nAlterJob alterJob = materializedViewHandler.getAlterJob(olapTable.getId());\nif (alterJob == null) {\nLOG.warn(\"Cannot find table[{}].\", olapTable.getId());\nreturn null;\n}\n((RollupJob) alterJob).updateRollupReplicaInfo(partition.getId(), indexId, tabletId, backendId,\nschemaHash, version, rowCount, dataSize);\nreturn null;\n}\nint currentSchemaHash = olapTable.getSchemaHashByIndexId(pushIndexId);\nif (schemaHash != currentSchemaHash) {\nif (pushState == PartitionState.SCHEMA_CHANGE) {\nSchemaChangeHandler schemaChangeHandler = Catalog.getCurrentCatalog().getSchemaChangeHandler();\nAlterJob alterJob = schemaChangeHandler.getAlterJob(olapTable.getId());\nif (alterJob != null &&\nschemaHash != ((SchemaChangeJob) alterJob).getSchemaHashByIndexId(pushIndexId)) {\nthrow new MetaNotFoundException(\"tablet[\" + tabletId\n+ \"] schemaHash is not equal to index's switchSchemaHash. \"\n+ ((SchemaChangeJob) alterJob).getSchemaHashByIndexId(pushIndexId) + \" vs. \" + schemaHash);\n}\n} else {\nthrow new MetaNotFoundException(\"Diff tablet[\" + tabletId + \"] schemaHash. index[\" + pushIndexId + \"]: \"\n+ currentSchemaHash + \" vs. \" + schemaHash);\n}\n}\nMaterializedIndex materializedIndex = partition.getIndex(pushIndexId);\nif (materializedIndex == null) {\nthrow new MetaNotFoundException(\"Cannot find index[\" + pushIndexId + \"]\");\n}\nLocalTablet tablet = (LocalTablet) materializedIndex.getTablet(tabletId);\nif (tablet == null) {\nthrow new MetaNotFoundException(\"Cannot find tablet[\" + tabletId + \"]\");\n}\nReplica replica = tablet.getReplicaByBackendId(backendId);\nif (replica == null) {\nthrow new MetaNotFoundException(\"cannot find replica in tablet[\" + tabletId + \"], backend[\" + backendId\n+ \"]\");\n}\nreplica.updateRowCount(version, dataSize, rowCount);\nLOG.debug(\"replica[{}] report schemaHash:{}\", replica.getId(), schemaHash);\nreturn ReplicaPersistInfo.createForLoad(olapTable.getId(), partition.getId(), pushIndexId, tabletId,\nreplica.getId(), version, schemaHash, dataSize, rowCount);\n}\nprivate void finishDropReplica(AgentTask task) {\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.DROP, task.getSignature());\n}\nprivate void finishSchemaChange(AgentTask task, List finishTabletInfos, long reportVersion)\nthrows MetaNotFoundException {\nPreconditions.checkArgument(finishTabletInfos != null && !finishTabletInfos.isEmpty());\nPreconditions.checkArgument(finishTabletInfos.size() == 1);\nSchemaChangeTask schemaChangeTask = (SchemaChangeTask) task;\nSchemaChangeHandler schemaChangeHandler = Catalog.getCurrentCatalog().getSchemaChangeHandler();\nschemaChangeHandler.handleFinishedReplica(schemaChangeTask, finishTabletInfos.get(0), reportVersion);\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.SCHEMA_CHANGE, task.getSignature());\n}\nprivate void finishRollup(AgentTask task, List finishTabletInfos)\nthrows MetaNotFoundException {\nPreconditions.checkArgument(finishTabletInfos != null && !finishTabletInfos.isEmpty());\nPreconditions.checkArgument(finishTabletInfos.size() == 1);\nCreateRollupTask createRollupTask = (CreateRollupTask) task;\nMaterializedViewHandler materializedViewHandler = Catalog.getCurrentCatalog().getRollupHandler();\nmaterializedViewHandler.handleFinishedReplica(createRollupTask, finishTabletInfos.get(0), -1L);\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.ROLLUP, task.getSignature());\n}\nprivate void finishClone(AgentTask task, TFinishTaskRequest request) {\nCloneTask cloneTask = (CloneTask) task;\nif (cloneTask.getTaskVersion() == CloneTask.VERSION_2) {\nCatalog.getCurrentCatalog().getTabletScheduler().finishCloneTask(cloneTask, request);\n} else {\nLOG.warn(\"invalid clone task, ignore it. {}\", task);\n}\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.CLONE, task.getSignature());\n}\nprivate void finishStorageMigration(long backendId, TFinishTaskRequest request) {\nif (request.getTask_status().getStatus_code() != TStatusCode.OK) {\nLOG.warn(\"tablet migrate failed. signature: {}, error msg: {}\", request.getSignature(),\nrequest.getTask_status().error_msgs);\nreturn;\n}\nif (!request.isSetFinish_tablet_infos() || request.getFinish_tablet_infos().isEmpty()) {\nLOG.warn(\"migration finish tablet infos not set. signature: {}\", request.getSignature());\nreturn;\n}\nTTabletInfo reportedTablet = request.getFinish_tablet_infos().get(0);\nlong tabletId = reportedTablet.getTablet_id();\nTabletMeta tabletMeta = Catalog.getCurrentInvertedIndex().getTabletMeta(tabletId);\nif (tabletMeta == null) {\nLOG.warn(\"tablet meta does not exist. tablet id: {}\", tabletId);\nreturn;\n}\nlong dbId = tabletMeta.getDbId();\nDatabase db = Catalog.getCurrentCatalog().getDb(dbId);\nif (db == null) {\nLOG.warn(\"db does not exist. db id: {}\", dbId);\nreturn;\n}\ndb.writeLock();\ntry {\nReplica replica = Catalog.getCurrentInvertedIndex().getReplica(tabletId, backendId);\nPreconditions.checkArgument(reportedTablet.isSetPath_hash());\nreplica.setPathHash(reportedTablet.getPath_hash());\n} finally {\ndb.writeUnlock();\n}\n}\nprivate void finishConsistenctCheck(AgentTask task, TFinishTaskRequest request) {\nCheckConsistencyTask checkConsistencyTask = (CheckConsistencyTask) task;\nif (checkConsistencyTask.getVersion() != request.getRequest_version()) {\nLOG.warn(\"check consisteny task is not match. [{}-{}]\",\ncheckConsistencyTask.getVersion(), request.getRequest_version());\nreturn;\n}\nCatalog.getCurrentCatalog().getConsistencyChecker().handleFinishedConsistencyCheck(checkConsistencyTask,\nrequest.getTablet_checksum());\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.CHECK_CONSISTENCY, task.getSignature());\n}\nprivate void finishMakeSnapshot(AgentTask task, TFinishTaskRequest request) {\nSnapshotTask snapshotTask = (SnapshotTask) task;\nif (Catalog.getCurrentCatalog().getBackupHandler().handleFinishedSnapshotTask(snapshotTask, request)) {\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.MAKE_SNAPSHOT, task.getSignature());\n}\n}\nprivate void finishUpload(AgentTask task, TFinishTaskRequest request) {\nUploadTask uploadTask = (UploadTask) task;\nif (Catalog.getCurrentCatalog().getBackupHandler().handleFinishedSnapshotUploadTask(uploadTask, request)) {\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.UPLOAD, task.getSignature());\n}\n}\nprivate void finishDownloadTask(AgentTask task, TFinishTaskRequest request) {\nDownloadTask downloadTask = (DownloadTask) task;\nif (Catalog.getCurrentCatalog().getBackupHandler().handleDownloadSnapshotTask(downloadTask, request)) {\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.DOWNLOAD, task.getSignature());\n}\n}\nprivate void finishMoveDirTask(AgentTask task, TFinishTaskRequest request) {\nDirMoveTask dirMoveTask = (DirMoveTask) task;\nif (Catalog.getCurrentCatalog().getBackupHandler().handleDirMoveTask(dirMoveTask, request)) {\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.MOVE, task.getSignature());\n}\n}\nprivate void finishRecoverTablet(AgentTask task) {\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.RECOVER_TABLET, task.getSignature());\n}\npublic TMasterResult report(TReportRequest request) throws TException {\nreturn reportHandler.handleReport(request);\n}\npublic TFetchResourceResult fetchResource() {\nreturn Catalog.getCurrentCatalog().getAuth().toResourceThrift();\n}\nprivate void finishAlterTask(AgentTask task) {\nAlterReplicaTask alterTask = (AlterReplicaTask) task;\ntry {\nif (alterTask.getJobType() == JobType.ROLLUP) {\nCatalog.getCurrentCatalog().getRollupHandler().handleFinishAlterTask(alterTask);\n} else if (alterTask.getJobType() == JobType.SCHEMA_CHANGE) {\nCatalog.getCurrentCatalog().getSchemaChangeHandler().handleFinishAlterTask(alterTask);\n}\nalterTask.setFinished(true);\n} catch (MetaNotFoundException e) {\nLOG.warn(\"failed to handle finish alter task: {}, {}\", task.getSignature(), e.getMessage());\n}\nAgentTaskQueue.removeTask(task.getBackendId(), TTaskType.ALTER, task.getSignature());\n}\npublic TGetTableMetaResponse getTableMeta(TGetTableMetaRequest request) {\nString dbName = request.getDb_name();\nString tableName = request.getTable_name();\nTTableMeta tableMeta;\nTGetTableMetaResponse response = new TGetTableMetaResponse();\nif (Strings.isNullOrEmpty(dbName) || Strings.isNullOrEmpty(tableName)) {\nTStatus status = new TStatus(TStatusCode.INVALID_ARGUMENT);\nstatus.setError_msgs(Lists.newArrayList(\"missing db or table name\"));\nresponse.setStatus(status);\nreturn response;\n}\nString fullDbName = ClusterNamespace.getFullName(SystemInfoService.DEFAULT_CLUSTER, dbName);\nDatabase db = Catalog.getCurrentCatalog().getDb(fullDbName);\nif (db == null) {\nTStatus status = new TStatus(TStatusCode.NOT_FOUND);\nstatus.setError_msgs(Lists.newArrayList(\"db not exist\"));\nresponse.setStatus(status);\nreturn response;\n}\ntry {\ndb.readLock();\nTable table = db.getTable(tableName);\nif (table == null) {\nTStatus status = new TStatus(TStatusCode.NOT_FOUND);\nstatus.setError_msgs(Lists.newArrayList(\"table \" + tableName + \" not exist\"));\nresponse.setStatus(status);\nreturn response;\n}\nif (!(table instanceof OlapTable)) {\nTStatus status = new TStatus(TStatusCode.NOT_IMPLEMENTED_ERROR);\nstatus.setError_msgs(Lists.newArrayList(\"only olap table supported\"));\nresponse.setStatus(status);\nreturn response;\n}\nOlapTable olapTable = (OlapTable) table;\ntableMeta = new TTableMeta();\ntableMeta.setTable_id(table.getId());\ntableMeta.setTable_name(tableName);\ntableMeta.setDb_id(db.getId());\ntableMeta.setDb_name(dbName);\ntableMeta.setCluster_id(Catalog.getCurrentCatalog().getClusterId());\ntableMeta.setState(olapTable.getState().name());\ntableMeta.setBloomfilter_fpp(olapTable.getBfFpp());\nif (olapTable.getCopiedBfColumns() != null) {\nfor (String bfColumn : olapTable.getCopiedBfColumns()) {\ntableMeta.addToBloomfilter_columns(bfColumn);\n}\n}\ntableMeta.setBase_index_id(olapTable.getBaseIndexId());\ntableMeta.setColocate_group(olapTable.getColocateGroup());\ntableMeta.setKey_type(olapTable.getKeysType().name());\nTDistributionDesc distributionDesc = new TDistributionDesc();\nDistributionInfo distributionInfo = olapTable.getDefaultDistributionInfo();\ndistributionDesc.setDistribution_type(distributionInfo.getType().name());\nif (distributionInfo.getType() == DistributionInfoType.HASH) {\nTHashDistributionInfo tHashDistributionInfo = new THashDistributionInfo();\nHashDistributionInfo hashDistributionInfo = (HashDistributionInfo) distributionInfo;\ntHashDistributionInfo.setBucket_num(hashDistributionInfo.getBucketNum());\nfor (Column column : hashDistributionInfo.getDistributionColumns()) {\ntHashDistributionInfo.addToDistribution_columns(column.getName());\n}\ndistributionDesc.setHash_distribution(tHashDistributionInfo);\n} else {\nTRandomDistributionInfo tRandomDistributionInfo = new TRandomDistributionInfo();\nRandomDistributionInfo randomDistributionInfo = (RandomDistributionInfo) distributionInfo;\ntRandomDistributionInfo.setBucket_num(randomDistributionInfo.getBucketNum());\ndistributionDesc.setRandom_distribution(tRandomDistributionInfo);\n}\ntableMeta.setDistribution_desc(distributionDesc);\nTableProperty tableProperty = olapTable.getTableProperty();\nfor (Map.Entry property : tableProperty.getProperties().entrySet()) {\ntableMeta.putToProperties(property.getKey(), property.getValue());\n}\nPartitionInfo partitionInfo = olapTable.getPartitionInfo();\nTBasePartitionDesc basePartitionDesc = new TBasePartitionDesc();\nfor (Partition partition : olapTable.getAllPartitions()) {\nTPartitionMeta partitionMeta = new TPartitionMeta();\npartitionMeta.setPartition_id(partition.getId());\npartitionMeta.setPartition_name(partition.getName());\npartitionMeta.setState(partition.getState().name());\npartitionMeta.setVisible_version(partition.getVisibleVersion());\npartitionMeta.setVisible_time(partition.getVisibleVersionTime());\npartitionMeta.setNext_version(partition.getNextVersion());\ntableMeta.addToPartitions(partitionMeta);\nShort replicaNum = partitionInfo.getReplicationNum(partition.getId());\nboolean inMemory = partitionInfo.getIsInMemory(partition.getId());\nbasePartitionDesc.putToReplica_num_map(partition.getId(), replicaNum);\nbasePartitionDesc.putToIn_memory_map(partition.getId(), inMemory);\nDataProperty dataProperty = partitionInfo.getDataProperty(partition.getId());\nTDataProperty thriftDataProperty = new TDataProperty();\nthriftDataProperty.setStorage_medium(dataProperty.getStorageMedium());\nthriftDataProperty.setCold_time(dataProperty.getCooldownTimeMs());\nbasePartitionDesc.putToData_property(partition.getId(), thriftDataProperty);\n}\nTPartitionInfo tPartitionInfo = new TPartitionInfo();\ntPartitionInfo.setType(partitionInfo.getType().toThrift());\nif (partitionInfo.getType() == PartitionType.RANGE) {\nTRangePartitionDesc rangePartitionDesc = new TRangePartitionDesc();\nRangePartitionInfo rangePartitionInfo = (RangePartitionInfo) partitionInfo;\nfor (Column column : rangePartitionInfo.getPartitionColumns()) {\nTColumnMeta columnMeta = new TColumnMeta();\ncolumnMeta.setColumnName(column.getName());\ncolumnMeta.setColumnType(column.getType().toThrift());\ncolumnMeta.setKey(column.isKey());\nif (column.getAggregationType() != null) {\ncolumnMeta.setAggregationType(column.getAggregationType().name());\n}\ncolumnMeta.setComment(column.getComment());\nrangePartitionDesc.addToColumns(columnMeta);\n}\nMap> ranges = rangePartitionInfo.getIdToRange(false);\nfor (Map.Entry> range : ranges.entrySet()) {\nTRange tRange = new TRange();\ntRange.setPartition_id(range.getKey());\nByteArrayOutputStream output = new ByteArrayOutputStream();\nDataOutputStream stream = new DataOutputStream(output);\nrange.getValue().lowerEndpoint().write(stream);\ntRange.setStart_key(output.toByteArray());\noutput = new ByteArrayOutputStream();\nstream = new DataOutputStream(output);\nrange.getValue().upperEndpoint().write(stream);\ntRange.setEnd_key(output.toByteArray());\ntRange.setBase_desc(basePartitionDesc);\nrangePartitionDesc.putToRanges(range.getKey(), tRange);\n}\ntPartitionInfo.setRange_partition_desc(rangePartitionDesc);\n} else if (partitionInfo.getType() == PartitionType.UNPARTITIONED) {\nTSinglePartitionDesc singlePartitionDesc = new TSinglePartitionDesc();\nsinglePartitionDesc.setBase_desc(basePartitionDesc);\ntPartitionInfo.setSingle_partition_desc(singlePartitionDesc);\n} else {\nLOG.info(\"invalid partition type {}\", partitionInfo.getType());\nreturn null;\n}\ntableMeta.setPartition_info(tPartitionInfo);\nfor (Index index : olapTable.getIndexes()) {\nTIndexInfo indexInfo = new TIndexInfo();\nindexInfo.setIndex_name(index.getIndexName());\nindexInfo.setIndex_type(index.getIndexType().name());\nindexInfo.setComment(index.getComment());\nfor (String column : index.getColumns()) {\nindexInfo.addToColumns(column);\n}\ntableMeta.addToIndex_infos(indexInfo);\n}\nfor (Partition partition : olapTable.getPartitions()) {\nList indexes = partition.getMaterializedIndices(IndexExtState.ALL);\nfor (MaterializedIndex index : indexes) {\nTIndexMeta indexMeta = new TIndexMeta();\nindexMeta.setIndex_id(index.getId());\nindexMeta.setPartition_id(partition.getId());\nindexMeta.setIndex_state(index.getState().toThrift());\nindexMeta.setRow_count(index.getRowCount());\nindexMeta.setRollup_index_id(index.getRollupIndexId());\nindexMeta.setRollup_finished_version(index.getRollupFinishedVersion());\nTSchemaMeta schemaMeta = new TSchemaMeta();\nMaterializedIndexMeta materializedIndexMeta = olapTable.getIndexMetaByIndexId(index.getId());\nschemaMeta.setSchema_version(materializedIndexMeta.getSchemaVersion());\nschemaMeta.setSchema_hash(materializedIndexMeta.getSchemaHash());\nschemaMeta.setShort_key_col_count(materializedIndexMeta.getShortKeyColumnCount());\nschemaMeta.setStorage_type(materializedIndexMeta.getStorageType());\nschemaMeta.setKeys_type(materializedIndexMeta.getKeysType().name());\nfor (Column column : materializedIndexMeta.getSchema()) {\nTColumnMeta columnMeta = new TColumnMeta();\ncolumnMeta.setColumnName(column.getName());\ncolumnMeta.setColumnType(column.getType().toThrift());\ncolumnMeta.setKey(column.isKey());\ncolumnMeta.setAllowNull(column.isAllowNull());\nif (column.getAggregationType() != null) {\ncolumnMeta.setAggregationType(column.getAggregationType().name());\n}\ncolumnMeta.setComment(column.getComment());\ncolumnMeta.setDefaultValue(column.getDefaultValue());\nschemaMeta.addToColumns(columnMeta);\n}\nindexMeta.setSchema_meta(schemaMeta);\nfor (Tablet tablet : index.getTablets()) {\nLocalTablet localTablet = (LocalTablet) tablet;\nTTabletMeta tTabletMeta = new TTabletMeta();\ntTabletMeta.setTablet_id(tablet.getId());\ntTabletMeta.setChecked_version(localTablet.getCheckedVersion());\ntTabletMeta.setConsistent(localTablet.isConsistent());\nTabletMeta tabletMeta = Catalog.getCurrentInvertedIndex().getTabletMeta(tablet.getId());\ntTabletMeta.setDb_id(tabletMeta.getDbId());\ntTabletMeta.setTable_id(tabletMeta.getTableId());\ntTabletMeta.setPartition_id(tabletMeta.getPartitionId());\ntTabletMeta.setIndex_id(tabletMeta.getIndexId());\ntTabletMeta.setStorage_medium(tabletMeta.getStorageMedium());\ntTabletMeta.setOld_schema_hash(tabletMeta.getOldSchemaHash());\ntTabletMeta.setNew_schema_hash(tabletMeta.getNewSchemaHash());\nfor (Replica replica : localTablet.getReplicas()) {\nTReplicaMeta replicaMeta = new TReplicaMeta();\nreplicaMeta.setReplica_id(replica.getId());\nreplicaMeta.setBackend_id(replica.getBackendId());\nreplicaMeta.setSchema_hash(replica.getSchemaHash());\nreplicaMeta.setVersion(replica.getVersion());\nreplicaMeta.setData_size(replica.getDataSize());\nreplicaMeta.setRow_count(replica.getRowCount());\nreplicaMeta.setState(replica.getState().name());\nreplicaMeta.setLast_failed_version(replica.getLastFailedVersion());\nreplicaMeta.setLast_failed_time(replica.getLastFailedTimestamp());\nreplicaMeta.setLast_success_version(replica.getLastSuccessVersion());\nreplicaMeta.setVersion_count(replica.getVersionCount());\nreplicaMeta.setPath_hash(replica.getPathHash());\nreplicaMeta.setBad(replica.isBad());\ntTabletMeta.addToReplicas(replicaMeta);\n}\nindexMeta.addToTablets(tTabletMeta);\n}\ntableMeta.addToIndexes(indexMeta);\n}\n}\nList backends = new ArrayList<>();\nfor (Backend backend : Catalog.getCurrentCatalog().getCurrentSystemInfo()\n.getClusterBackends(db.getClusterName())) {\nTBackendMeta backendMeta = new TBackendMeta();\nbackendMeta.setBackend_id(backend.getId());\nbackendMeta.setHost(backend.getHost());\nbackendMeta.setBe_port(backend.getBeRpcPort());\nbackendMeta.setRpc_port(backend.getBrpcPort());\nbackendMeta.setHttp_port(backend.getHttpPort());\nbackendMeta.setAlive(backend.isAlive());\nbackendMeta.setState(backend.getBackendState().ordinal());\nbackends.add(backendMeta);\n}\nresponse.setStatus(new TStatus(TStatusCode.OK));\nresponse.setTable_meta(tableMeta);\nresponse.setBackends(backends);\n} catch (Exception e) {\nTStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\nstatus.setError_msgs(Lists.newArrayList(e.getMessage()));\nLOG.info(\"exception: {}\", e.getStackTrace());\nresponse.setStatus(status);\n} finally {\ndb.readUnlock();\n}\nreturn response;\n}\npublic TNetworkAddress masterAddr() {\nString masterHost = Catalog.getCurrentCatalog().getMasterIp();\nint masterRpcPort = Catalog.getCurrentCatalog().getMasterRpcPort();\nreturn new TNetworkAddress(masterHost, masterRpcPort);\n}\npublic TCommitRemoteTxnResponse commitRemoteTxn(TCommitRemoteTxnRequest request) throws TException {\nTCommitRemoteTxnResponse response = new TCommitRemoteTxnResponse();\nCatalog catalog = Catalog.getCurrentCatalog();\nif (!catalog.isMaster()) {\nTNetworkAddress addr = masterAddr();\nFrontendService.Client client = null;\ntry {\nLOG.info(\"commitRemoteTxn as follower, forward it to master. txn_id: {}, master: {}\",\nrequest.getTxn_id(), addr.toString());\nclient = ClientPool.frontendPool.borrowObject(addr, 1000);\nresponse = client.commitRemoteTxn(request);\nClientPool.frontendPool.returnObject(addr, client);\n} catch (Exception e) {\nLOG.warn(\"create thrift client failed during commitRemoteTxn, txn_id: {}, exception: {}\", request.getTxn_id(), e);\nTStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\nstatus.setError_msgs(Lists.newArrayList(\"forward request to fe master failed\"));\nresponse.setStatus(status);\nClientPool.frontendPool.invalidateObject(addr, client);\n} finally {\nreturn response;\n}\n}\nDatabase db = catalog.getDb(request.getDb_id());\nif (db == null) {\nTStatus status = new TStatus(TStatusCode.NOT_FOUND);\nstatus.setError_msgs(Lists.newArrayList(\"db not exist or already deleted\"));\nresponse.setStatus(status);\nreturn response;\n}\ntry {\nTxnCommitAttachment attachment = TxnCommitAttachment.fromThrift(request.getCommit_attachment());\nlong timeoutMs = request.isSetCommit_timeout_ms() ? request.getCommit_timeout_ms() : 5000;\ntimeoutMs = timeoutMs * 3 / 4;\nboolean ret = Catalog.getCurrentGlobalTransactionMgr().commitAndPublishTransaction(\ndb, request.getTxn_id(),\nTabletCommitInfo.fromThrift(request.getCommit_infos()),\ntimeoutMs, attachment);\nif (!ret) {\nTStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\nstatus.setError_msgs(Lists.newArrayList(\"commit and publish txn failed\"));\nresponse.setStatus(status);\nreturn response;\n}\n} catch (UserException e) {\nTStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\nstatus.setError_msgs(Lists.newArrayList(e.getMessage()));\nresponse.setStatus(status);\nreturn response;\n}\nTStatus status = new TStatus(TStatusCode.OK);\nresponse.setStatus(status);\nLOG.info(\"commit remote transaction: {} success\", request.getTxn_id());\nreturn response;\n}\npublic TAbortRemoteTxnResponse abortRemoteTxn(TAbortRemoteTxnRequest request) throws TException {\nTAbortRemoteTxnResponse response = new TAbortRemoteTxnResponse();\nCatalog catalog = Catalog.getCurrentCatalog();\nif (!catalog.isMaster()) {\nTNetworkAddress addr = masterAddr();\nFrontendService.Client client = null;\ntry {\nLOG.info(\"abortRemoteTxn as follower, forward it to master. txn_id: {}, master: {}\",\nrequest.getTxn_id(), addr.toString());\nclient = ClientPool.frontendPool.borrowObject(addr, 1000);\nresponse = client.abortRemoteTxn(request);\nClientPool.frontendPool.returnObject(addr, client);\n} catch (Exception e) {\nLOG.warn(\"create thrift client failed during abortRemoteTxn, txn_id: {}, exception: {}\", request.getTxn_id(), e);\nTStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\nstatus.setError_msgs(Lists.newArrayList(\"forward request to fe master failed\"));\nresponse.setStatus(status);\nClientPool.frontendPool.invalidateObject(addr, client);\n} finally {\nreturn response;\n}\n}\nDatabase db = catalog.getDb(request.getDb_id());\nif (db == null) {\nTStatus status = new TStatus(TStatusCode.NOT_FOUND);\nstatus.setError_msgs(Lists.newArrayList(\"db not exist or already deleted\"));\nresponse.setStatus(status);\nreturn response;\n}\ntry {\nCatalog.getCurrentGlobalTransactionMgr().abortTransaction(\nrequest.getDb_id(), request.getTxn_id(), request.getError_msg());\n} catch (Exception e) {\nTStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\nstatus.setError_msgs(Lists.newArrayList(e.getMessage()));\nresponse.setStatus(status);\nreturn response;\n}\nTStatus status = new TStatus(TStatusCode.OK);\nresponse.setStatus(status);\nreturn response;\n}\n}" + }, + { + "comment": "OK, thanks, I merged it.", + "method_body": "private static void logGuessedPath(String guessedPath) {\nSystem.err.println(\"======================================================================================\");\nSystem.err.println(\" native.image.path was not set, making a guess for the correct path of native image\");\nSystem.err.println(\" guessed path: \" + guessedPath);\nSystem.err.println(\"======================================================================================\");\n}", + "target_code": "System.err.println(\"======================================================================================\");", + "method_body_after": "private static void logGuessedPath(String guessedPath) {\nSystem.err.println(\"======================================================================================\");\nSystem.err.println(\" native.image.path was not set, making a guess for the correct path of native image\");\nSystem.err.println(\" guessed path: \" + guessedPath);\nSystem.err.println(\"======================================================================================\");\n}", + "context_before": "class NativeImageLauncher implements Closeable {\nprivate static final int DEFAULT_PORT = 8081;\nprivate static final long DEFAULT_IMAGE_WAIT_TIME = 60;\nprivate final Class testClass;\nprivate final String profile;\nprivate Process quarkusProcess;\nprivate final int port;\nprivate final long imageWaitTime;\nprivate final Map systemProps = new HashMap<>();\nprivate List startedNotifiers;\npublic NativeImageLauncher(Class testClass) {\nthis(testClass,\nConfigProvider.getConfig().getOptionalValue(\"quarkus.http.test-port\", Integer.class).orElse(DEFAULT_PORT),\nConfigProvider.getConfig().getOptionalValue(\"quarkus.test.native-image-wait-time\", Long.class)\n.orElse(DEFAULT_IMAGE_WAIT_TIME),\nConfigProvider.getConfig().getOptionalValue(\"quarkus.test.native-image-profile\", String.class)\n.orElse(null));\n}\npublic NativeImageLauncher(Class testClass, int port, long imageWaitTime, String profile) {\nthis.testClass = testClass;\nthis.port = port;\nthis.imageWaitTime = imageWaitTime;\nList startedNotifiers = new ArrayList<>();\nfor (NativeImageStartedNotifier i : ServiceLoader.load(NativeImageStartedNotifier.class)) {\nstartedNotifiers.add(i);\n}\nthis.startedNotifiers = startedNotifiers;\nthis.profile = profile;\n}\npublic void start() throws IOException {\nSystem.setProperty(\"test.url\", TestHTTPResourceManager.getUri());\nString path = System.getProperty(\"native.image.path\");\nif (path == null) {\npath = guessPath(testClass);\n}\nList args = new ArrayList<>();\nargs.add(path);\nargs.add(\"-Dquarkus.http.port=\" + port);\nargs.add(\"-Dtest.url=\" + TestHTTPResourceManager.getUri());\nargs.add(\"-Dquarkus.log.file.path=\" + PropertyTestUtil.getLogFileLocation());\nif (profile != null) {\nargs.add(\"-Dquarkus.profile=\" + profile);\n}\nfor (Map.Entry e : systemProps.entrySet()) {\nargs.add(\"-D\" + e.getKey() + \"=\" + e.getValue());\n}\nSystem.out.println(\"Executing \" + args);\nquarkusProcess = Runtime.getRuntime().exec(args.toArray(new String[args.size()]));\nnew Thread(new ProcessReader(quarkusProcess.getInputStream())).start();\nnew Thread(new ProcessReader(quarkusProcess.getErrorStream())).start();\nwaitForQuarkus();\n}\nprivate static String guessPath(Class testClass) {\nClassLoader cl = testClass.getClassLoader();\nif (cl instanceof URLClassLoader) {\nURL[] urls = ((URLClassLoader) cl).getURLs();\nfor (URL url : urls) {\nif (url.getProtocol().equals(\"file\") && url.getPath().endsWith(\"test-classes/\")) {\nFile testClasses = new File(url.getPath());\nfor (File file : testClasses.getParentFile().listFiles()) {\nif (file.getName().endsWith(\"-runner\")) {\nlogGuessedPath(file.getAbsolutePath());\nreturn file.getAbsolutePath();\n}\n}\n} else if (url.getProtocol().equals(\"file\") && url.getPath().endsWith(\"test/\")) {\nFile testClasses = new File(url.getPath());\nfor (File file : testClasses.getParentFile().getParentFile().getParentFile().listFiles()) {\nif (file.getName().endsWith(\"-runner\")) {\nlogGuessedPath(file.getAbsolutePath());\nreturn file.getAbsolutePath();\n}\n}\n} else if (url.getProtocol().equals(\"file\") && url.getPath().contains(\"/target/surefire/\")) {\nString path = url.getPath();\nint index = path.lastIndexOf(\"/target/\");\nFile targetDir = new File(path.substring(0, index) + \"/target/\");\nfor (File file : targetDir.listFiles()) {\nif (file.getName().endsWith(\"-runner\")) {\nlogGuessedPath(file.getAbsolutePath());\nreturn file.getAbsolutePath();\n}\n}\n}\n}\n}\nthrow new RuntimeException(\n\"Unable to automatically find native image, please set the native.image.path to the native executable you wish to test\");\n}\nprivate void waitForQuarkus() {\nlong bailout = System.currentTimeMillis() + imageWaitTime * 1000;\nwhile (System.currentTimeMillis() < bailout) {\nif (!quarkusProcess.isAlive()) {\nthrow new RuntimeException(\"Failed to start native image, process has exited\");\n}\ntry {\nThread.sleep(100);\nfor (NativeImageStartedNotifier i : startedNotifiers) {\nif (i.isNativeImageStarted()) {\nreturn;\n}\n}\ntry (Socket s = new Socket()) {\ns.connect(new InetSocketAddress(\"localhost\", port));\nreturn;\n}\n} catch (Exception expected) {\n}\n}\nthrow new RuntimeException(\"Unable to start native image in \" + imageWaitTime + \"s\");\n}\npublic void addSystemProperties(Map systemProps) {\nthis.systemProps.putAll(systemProps);\n}\nprivate static final class ProcessReader implements Runnable {\nprivate final InputStream inputStream;\nprivate ProcessReader(InputStream inputStream) {\nthis.inputStream = inputStream;\n}\n@Override\npublic void run() {\nbyte[] b = new byte[100];\nint i;\ntry {\nwhile ((i = inputStream.read(b)) > 0) {\nSystem.out.print(new String(b, 0, i, StandardCharsets.UTF_8));\n}\n} catch (IOException e) {\n}\n}\n}\n@Override\npublic void close() {\nquarkusProcess.destroy();\n}\n}", + "context_after": "class NativeImageLauncher implements Closeable {\nprivate static final int DEFAULT_PORT = 8081;\nprivate static final long DEFAULT_IMAGE_WAIT_TIME = 60;\nprivate final Class testClass;\nprivate final String profile;\nprivate Process quarkusProcess;\nprivate final int port;\nprivate final long imageWaitTime;\nprivate final Map systemProps = new HashMap<>();\nprivate List startedNotifiers;\npublic NativeImageLauncher(Class testClass) {\nthis(testClass,\nConfigProvider.getConfig().getOptionalValue(\"quarkus.http.test-port\", Integer.class).orElse(DEFAULT_PORT),\nConfigProvider.getConfig().getOptionalValue(\"quarkus.test.native-image-wait-time\", Long.class)\n.orElse(DEFAULT_IMAGE_WAIT_TIME),\nConfigProvider.getConfig().getOptionalValue(\"quarkus.test.native-image-profile\", String.class)\n.orElse(null));\n}\npublic NativeImageLauncher(Class testClass, int port, long imageWaitTime, String profile) {\nthis.testClass = testClass;\nthis.port = port;\nthis.imageWaitTime = imageWaitTime;\nList startedNotifiers = new ArrayList<>();\nfor (NativeImageStartedNotifier i : ServiceLoader.load(NativeImageStartedNotifier.class)) {\nstartedNotifiers.add(i);\n}\nthis.startedNotifiers = startedNotifiers;\nthis.profile = profile;\n}\npublic void start() throws IOException {\nSystem.setProperty(\"test.url\", TestHTTPResourceManager.getUri());\nString path = System.getProperty(\"native.image.path\");\nif (path == null) {\npath = guessPath(testClass);\n}\nList args = new ArrayList<>();\nargs.add(path);\nargs.add(\"-Dquarkus.http.port=\" + port);\nargs.add(\"-Dtest.url=\" + TestHTTPResourceManager.getUri());\nargs.add(\"-Dquarkus.log.file.path=\" + PropertyTestUtil.getLogFileLocation());\nif (profile != null) {\nargs.add(\"-Dquarkus.profile=\" + profile);\n}\nfor (Map.Entry e : systemProps.entrySet()) {\nargs.add(\"-D\" + e.getKey() + \"=\" + e.getValue());\n}\nSystem.out.println(\"Executing \" + args);\nquarkusProcess = Runtime.getRuntime().exec(args.toArray(new String[args.size()]));\nnew Thread(new ProcessReader(quarkusProcess.getInputStream())).start();\nnew Thread(new ProcessReader(quarkusProcess.getErrorStream())).start();\nwaitForQuarkus();\n}\nprivate static String guessPath(Class testClass) {\nClassLoader cl = testClass.getClassLoader();\nif (cl instanceof URLClassLoader) {\nURL[] urls = ((URLClassLoader) cl).getURLs();\nfor (URL url : urls) {\nif (url.getProtocol().equals(\"file\") && url.getPath().endsWith(\"test-classes/\")) {\nFile testClasses = new File(url.getPath());\nfor (File file : testClasses.getParentFile().listFiles()) {\nif (file.getName().endsWith(\"-runner\")) {\nlogGuessedPath(file.getAbsolutePath());\nreturn file.getAbsolutePath();\n}\n}\n} else if (url.getProtocol().equals(\"file\") && url.getPath().endsWith(\"test/\")) {\nFile testClasses = new File(url.getPath());\nfor (File file : testClasses.getParentFile().getParentFile().getParentFile().listFiles()) {\nif (file.getName().endsWith(\"-runner\")) {\nlogGuessedPath(file.getAbsolutePath());\nreturn file.getAbsolutePath();\n}\n}\n} else if (url.getProtocol().equals(\"file\") && url.getPath().contains(\"/target/surefire/\")) {\nString path = url.getPath();\nint index = path.lastIndexOf(\"/target/\");\nFile targetDir = new File(path.substring(0, index) + \"/target/\");\nfor (File file : targetDir.listFiles()) {\nif (file.getName().endsWith(\"-runner\")) {\nlogGuessedPath(file.getAbsolutePath());\nreturn file.getAbsolutePath();\n}\n}\n}\n}\n}\nthrow new RuntimeException(\n\"Unable to automatically find native image, please set the native.image.path to the native executable you wish to test\");\n}\nprivate void waitForQuarkus() {\nlong bailout = System.currentTimeMillis() + imageWaitTime * 1000;\nwhile (System.currentTimeMillis() < bailout) {\nif (!quarkusProcess.isAlive()) {\nthrow new RuntimeException(\"Failed to start native image, process has exited\");\n}\ntry {\nThread.sleep(100);\nfor (NativeImageStartedNotifier i : startedNotifiers) {\nif (i.isNativeImageStarted()) {\nreturn;\n}\n}\ntry (Socket s = new Socket()) {\ns.connect(new InetSocketAddress(\"localhost\", port));\nreturn;\n}\n} catch (Exception expected) {\n}\n}\nthrow new RuntimeException(\"Unable to start native image in \" + imageWaitTime + \"s\");\n}\npublic void addSystemProperties(Map systemProps) {\nthis.systemProps.putAll(systemProps);\n}\nprivate static final class ProcessReader implements Runnable {\nprivate final InputStream inputStream;\nprivate ProcessReader(InputStream inputStream) {\nthis.inputStream = inputStream;\n}\n@Override\npublic void run() {\nbyte[] b = new byte[100];\nint i;\ntry {\nwhile ((i = inputStream.read(b)) > 0) {\nSystem.out.print(new String(b, 0, i, StandardCharsets.UTF_8));\n}\n} catch (IOException e) {\n}\n}\n}\n@Override\npublic void close() {\nquarkusProcess.destroy();\n}\n}" + }, + { + "comment": "Could you add some test to verify the case when result?", + "method_body": "void testPivot() throws Exception {\nString sql = \"select * from t0 pivot (sum(c1) for c2 in (1, 2, 3)) order by c0\";\nString columns = String.join(\",\",\nUtFrameUtils.getPlanAndFragment(starRocksAssert.getCtx(), sql).second.getColNames());\nAssertions.assertEquals(\"c0,1,2,3\", columns);\nsql = \"select * from t0 pivot (sum(c1) for c2 in (1 as a, 2 as b, 3)) order by c0\";\ncolumns = String.join(\",\",\nUtFrameUtils.getPlanAndFragment(starRocksAssert.getCtx(), sql).second.getColNames());\nAssertions.assertEquals(\"c0,a,b,3\", columns);\nsql = \"select * from t0 pivot (sum(c1), avg(c1) as avg for c2 in (1 as a, 2 as b, 3)) order by c0\";\ncolumns = String.join(\",\",\nUtFrameUtils.getPlanAndFragment(starRocksAssert.getCtx(), sql).second.getColNames());\nAssertions.assertEquals(\"c0,a_sum(db1.t0.c1),a_avg,b_sum(db1.t0.c1),b_avg,3_sum(db1.t0.c1),3_avg\", columns);\nsql = \"select * from t0 pivot (sum(c1) as sum, avg(c1) as avg for c2 in (1 as a, 2 as b, 3)) order by c0\";\ncolumns = String.join(\",\",\nUtFrameUtils.getPlanAndFragment(starRocksAssert.getCtx(), sql).second.getColNames());\nAssertions.assertEquals( \"c0,a_sum,a_avg,b_sum,b_avg,3_sum,3_avg\", columns);\nsql = \"select * from t0 join tbl1 \"\n+ \"pivot (sum(t0.c1) as s, avg(t0.c2) as a \"\n+ \"for (k1, k2) \"\n+ \"in (('a', 'a'), ('b', 'b'), ('c', 'c'))) order by t0.c0\";\ncolumns = String.join(\",\",\nUtFrameUtils.getPlanAndFragment(starRocksAssert.getCtx(), sql).second.getColNames());\nAssertions.assertEquals(\n\"c0,k3,k4,{'a','a'}_s,{'a','a'}_a,{'b','b'}_s,{'b','b'}_a,{'c','c'}_s,{'c','c'}_a\", columns);\nsql = \"select * from t0 join tbl1 \"\n+ \"pivot (sum(t0.c1) as s, avg(t0.c2) as a \"\n+ \"for (k1, k2) \"\n+ \"in (('a', 'a') as aa, ('b', 'b') as bb, ('c', 'c') as cc, ('d', 'd') as dd)) order by t0.c0\";\ncolumns = String.join(\",\",\nUtFrameUtils.getPlanAndFragment(starRocksAssert.getCtx(), sql).second.getColNames());\nAssertions.assertEquals(\n\"c0,k3,k4,aa_s,aa_a,bb_s,bb_a,cc_s,cc_a,dd_s,dd_a\", columns);\n}", + "target_code": "\"c0,k3,k4,aa_s,aa_a,bb_s,bb_a,cc_s,cc_a,dd_s,dd_a\", columns);", + "method_body_after": "void testPivot() throws Exception {\nString sql = \"select * from t0 pivot (sum(c1) for c2 in (1, 2, 3)) order by c0\";\nString columns = String.join(\",\",\nUtFrameUtils.getPlanAndFragment(starRocksAssert.getCtx(), sql).second.getColNames());\nAssertions.assertEquals(\"c0,1,2,3\", columns);\nsql = \"select * from t0 pivot (sum(c1) for c2 in (1 as a, 2 as b, 3)) order by c0\";\ncolumns = String.join(\",\",\nUtFrameUtils.getPlanAndFragment(starRocksAssert.getCtx(), sql).second.getColNames());\nAssertions.assertEquals(\"c0,a,b,3\", columns);\nsql = \"select * from t0 pivot (sum(c1), avg(c1) as avg for c2 in (1 as a, 2 as b, 3)) order by c0\";\ncolumns = String.join(\",\",\nUtFrameUtils.getPlanAndFragment(starRocksAssert.getCtx(), sql).second.getColNames());\nAssertions.assertEquals(\"c0,a_sum(db1.t0.c1),a_avg,b_sum(db1.t0.c1),b_avg,3_sum(db1.t0.c1),3_avg\", columns);\nsql = \"select * from t0 pivot (sum(c1) as sum, avg(c1) as avg for c2 in (1 as a, 2 as b, 3)) order by c0\";\ncolumns = String.join(\",\",\nUtFrameUtils.getPlanAndFragment(starRocksAssert.getCtx(), sql).second.getColNames());\nAssertions.assertEquals( \"c0,a_sum,a_avg,b_sum,b_avg,3_sum,3_avg\", columns);\nsql = \"select * from t0 join tbl1 \"\n+ \"pivot (sum(t0.c1) as s, avg(t0.c2) as a \"\n+ \"for (k1, k2) \"\n+ \"in (('a', 'a'), ('b', 'b'), ('c', 'c'))) order by t0.c0\";\ncolumns = String.join(\",\",\nUtFrameUtils.getPlanAndFragment(starRocksAssert.getCtx(), sql).second.getColNames());\nAssertions.assertEquals(\n\"c0,k3,k4,{'a','a'}_s,{'a','a'}_a,{'b','b'}_s,{'b','b'}_a,{'c','c'}_s,{'c','c'}_a\", columns);\nsql = \"select * from t0 join tbl1 \"\n+ \"pivot (sum(t0.c1) as s, avg(t0.c2) as a \"\n+ \"for (k1, k2) \"\n+ \"in (('a', 'a') as aa, ('b', 'b') as bb, ('c', 'c') as cc, ('d', 'd') as dd)) order by t0.c0\";\ncolumns = String.join(\",\",\nUtFrameUtils.getPlanAndFragment(starRocksAssert.getCtx(), sql).second.getColNames());\nAssertions.assertEquals(\n\"c0,k3,k4,aa_s,aa_a,bb_s,bb_a,cc_s,cc_a,dd_s,dd_a\", columns);\n}", + "context_before": "class SelectStmtTest {\nprivate static StarRocksAssert starRocksAssert;\n@BeforeAll\npublic static void setUp() throws Exception {\nUtFrameUtils.createMinStarRocksCluster();\nString createTblStmtStr = \"create table db1.tbl1(k1 varchar(32), k2 varchar(32), k3 varchar(32), k4 int) \"\n+ \"AGGREGATE KEY(k1, k2,k3,k4) distributed by hash(k1) buckets 3 properties('replication_num' = '1');\";\nString createBaseAllStmtStr = \"create table db1.baseall(k1 int) distributed by hash(k1) \"\n+ \"buckets 3 properties('replication_num' = '1');\";\nString createDateTblStmtStr = \"create table db1.t(k1 int, dt date) \"\n+ \"DUPLICATE KEY(k1) distributed by hash(k1) buckets 3 properties('replication_num' = '1');\";\nString createPratitionTableStr = \"CREATE TABLE db1.partition_table (\\n\" +\n\"datekey int(11) NULL COMMENT \\\"datekey\\\",\\n\" +\n\"poi_id bigint(20) NULL COMMENT \\\"poi_id\\\"\\n\" +\n\") ENGINE=OLAP\\n\" +\n\"AGGREGATE KEY(datekey, poi_id)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"PARTITION BY RANGE(datekey)\\n\" +\n\"(PARTITION p20200727 VALUES [(\\\"20200726\\\"), (\\\"20200727\\\")),\\n\" +\n\"PARTITION p20200728 VALUES [(\\\"20200727\\\"), (\\\"20200728\\\")))\\n\" +\n\"DISTRIBUTED BY HASH(poi_id) BUCKETS 2\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"storage_type\\\" = \\\"COLUMN\\\",\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\"\\n\" +\n\");\";\nString createTable1 = \"CREATE TABLE `t0` (\\n\" +\n\" `c0` varchar(24) NOT NULL COMMENT \\\"\\\",\\n\" +\n\" `c1` decimal128(24, 5) NOT NULL COMMENT \\\"\\\",\\n\" +\n\" `c2` decimal128(24, 2) NOT NULL COMMENT \\\"\\\"\\n\" +\n\") ENGINE=OLAP \\n\" +\n\"DUPLICATE KEY(`c0`)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`c0`) BUCKETS 1 \\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"in_memory\\\" = \\\"false\\\",\\n\" +\n\"\\\"storage_format\\\" = \\\"DEFAULT\\\",\\n\" +\n\"\\\"enable_persistent_index\\\" = \\\"false\\\",\\n\" +\n\"\\\"replicated_storage\\\" = \\\"true\\\",\\n\" +\n\"\\\"compression\\\" = \\\"LZ4\\\"\\n\" +\n\"); \";\nstarRocksAssert = new StarRocksAssert();\nstarRocksAssert.withDatabase(\"db1\").useDatabase(\"db1\");\nstarRocksAssert.withTable(createTblStmtStr)\n.withTable(createBaseAllStmtStr)\n.withTable(createDateTblStmtStr)\n.withTable(createPratitionTableStr)\n.withTable(createTable1);\nFeConstants.enablePruneEmptyOutputScan = false;\n}\n@Test\n@Test\nvoid testGroupByConstantExpression() throws Exception {\nString sql = \"SELECT k1 - 4*60*60 FROM baseall GROUP BY k1 - 4*60*60\";\nstarRocksAssert.query(sql).explainQuery();\n}\n@Test\nvoid testWithWithoutDatabase() throws Exception {\nString sql = \"with tmp as (select count(*) from db1.tbl1) select * from tmp;\";\nstarRocksAssert.withoutUseDatabase();\nstarRocksAssert.query(sql).explainQuery();\nsql = \"with tmp as (select * from db1.tbl1) \" +\n\"select a.k1, b.k2, a.k3 from (select k1, k3 from tmp) a \" +\n\"left join (select k1, k2 from tmp) b on a.k1 = b.k1;\";\nstarRocksAssert.withoutUseDatabase();\nstarRocksAssert.query(sql).explainQuery();\n}\n@Test\nvoid testDataGripSupport() throws Exception {\nString sql = \"select schema();\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select\\n\" +\n\"collation_name,\\n\" +\n\"character_set_name,\\n\" +\n\"is_default collate utf8_general_ci = 'Yes' as is_default\\n\" +\n\"from information_schema.collations\";\nstarRocksAssert.query(sql).explainQuery();\n}\n@Test\nvoid testNavicatBinarySupport() throws Exception {\nString sql = \"SELECT ACTION_ORDER, \\n\" +\n\" EVENT_OBJECT_TABLE, \\n\" +\n\" TRIGGER_NAME, \\n\" +\n\" EVENT_MANIPULATION, \\n\" +\n\" EVENT_OBJECT_TABLE, \\n\" +\n\" DEFINER, \\n\" +\n\" ACTION_STATEMENT, \\n\" +\n\" ACTION_TIMING\\n\" +\n\"FROM information_schema.triggers\\n\" +\n\"WHERE BINARY event_object_schema = 'test_ods_inceptor' \\n\" +\n\" AND BINARY event_object_table = 'cus_ast_total_d_p' \\n\" +\n\"ORDER BY event_object_table\";\nstarRocksAssert.query(sql).explainQuery();\n}\n@Test\nvoid testEqualExprNotMonotonic() throws Exception {\nConnectContext ctx = UtFrameUtils.createDefaultCtx();\nString sql = \"select k1 from db1.baseall where (k1=10) = true\";\nString expectString =\n\"[TPlanNode(node_id:0, node_type:OLAP_SCAN_NODE, num_children:0, limit:-1, row_tuples:[0], \" +\n\"nullable_tuples:[false], conjuncts:[TExpr(nodes:[TExprNode(node_type:BINARY_PRED, \" +\n\"type:TTypeDesc(types:[TTypeNode(type:SCALAR, scalar_type:TScalarType(type:BOOLEAN))]), \" +\n\"opcode:EQ, num_children:2, output_scale:-1, vector_opcode:INVALID_OPCODE, child_type:INT, \" +\n\"has_nullable_child:true, is_nullable:true, is_monotonic:false,\";\nString thrift = UtFrameUtils.getPlanThriftString(ctx, sql);\nAssert.assertTrue(thrift, thrift.contains(expectString));\n}\n@Test\nvoid testCurrentUserFunSupport() throws Exception {\nString sql = \"select current_user()\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select current_user\";\nstarRocksAssert.query(sql).explainQuery();\n}\n@Test\nvoid testTimeFunSupport() throws Exception {\nString sql = \"select current_timestamp()\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select current_timestamp\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select current_time()\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select current_time\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select current_date()\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select current_date\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select localtime()\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select localtime\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select localtimestamp()\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select localtimestamp\";\nstarRocksAssert.query(sql).explainQuery();\n}\n@Test\nvoid testDateTruncUpperCase() throws Exception {\nString sql = \"select date_trunc('MONTH', CAST('2020-11-04 11:12:13' AS DATE));\";\nConnectContext ctx = starRocksAssert.getCtx();\nUtFrameUtils.parseStmtWithNewParser(sql, ctx);\n}\n@Test\nvoid testSelectFromTabletIds() throws Exception {\nFeConstants.runningUnitTest = true;\nShowResultSet tablets = starRocksAssert.showTablet(\"db1\", \"partition_table\");\nList tabletIds = tablets.getResultRows().stream().map(r -> r.get(0)).collect(Collectors.toList());\nAssert.assertEquals(tabletIds.size(), 4);\nString tabletCsv = String.join(\",\", tabletIds);\nString sql = String.format(\"select count(1) from db1.partition_table tablet (%s)\", tabletCsv);\nString explain = starRocksAssert.query(sql).explainQuery();\nAssert.assertTrue(explain.contains(tabletCsv));\nString invalidTabletCsv = tabletIds.stream().map(id -> id + \"0\").collect(Collectors.joining(\",\"));\nString invalidSql = String.format(\"select count(1) from db1.partition_table tablet (%s)\", invalidTabletCsv);\ntry {\nstarRocksAssert.query(invalidSql).explainQuery();\n} catch (Throwable ex) {\nAssert.assertTrue(ex.getMessage().contains(\"Invalid tablet\"));\n}\nFeConstants.runningUnitTest = false;\n}\n@Test\nvoid testNegateEqualForNullInWhereClause() throws Exception {\nString[] queryList = {\n\"select * from db1.tbl1 where not(k1 <=> NULL)\",\n\"select * from db1.tbl1 where not(k1 <=> k2)\",\n\"select * from db1.tbl1 where not(k1 <=> 'abc-def')\",\n};\nPattern re = Pattern.compile(\"PREDICATES: NOT.*<=>.*\");\nfor (String q : queryList) {\nString s = starRocksAssert.query(q).explainQuery();\nAssert.assertTrue(re.matcher(s).find());\n}\n}\n@Test\nvoid testSimplifiedPredicateRuleApplyToNegateEuqualForNull() throws Exception {\nString[] queryList = {\n\"select not(k1 <=> NULL) from db1.tbl1\",\n\"select not(NULL <=> k1) from db1.tbl1\",\n\"select not(k1 <=> 'abc-def') from db1.tbl1\",\n};\nPattern re = Pattern.compile(\"NOT.*<=>.*\");\nfor (String q : queryList) {\nString s = starRocksAssert.query(q).explainQuery();\nAssert.assertTrue(re.matcher(s).find());\n}\n}\nprivate void assertNoCastStringAsStringInPlan(String sql) throws Exception {\nExecPlan execPlan = UtFrameUtils.getPlanAndFragment(starRocksAssert.getCtx(), sql).second;\nList operators = execPlan.getPhysicalPlan().getInputs().stream().flatMap(input ->\ninput.getOp().getProjection().getColumnRefMap().values().stream()).collect(Collectors.toList());\nAssert.assertTrue(operators.stream().noneMatch(op -> (op instanceof CastOperator) &&\nop.getType().isStringType() &&\nop.getChild(0).getType().isStringType()));\n}\n@Test\nvoid testFoldCastOfChildExprsOfSetOperation() throws Exception {\nString sql0 = \"select cast('abcdefg' as varchar(2)) a, cast('abc' as varchar(3)) b\\n\" +\n\"intersect\\n\" +\n\"select cast('aa123456789' as varchar) a, cast('abcd' as varchar(4)) b\";\nString sql1 = \"select k1, group_concat(k2) as k2 from db1.tbl1 group by k1 \\n\" +\n\"except\\n\" +\n\"select k1, cast(k4 as varchar(255)) from db1.tbl1\";\nString sql2 = \"select k1, k2 from db1.tbl1\\n\" +\n\"union all\\n\" +\n\"select cast(concat(k1, 'abc') as varchar(256)) as k1, cast(concat(k2, 'abc') as varchar(256)) as k2 \" +\n\"from db1.tbl1\\n\" +\n\"union all\\n\" +\n\"select cast('abcdef' as varchar) k1, cast('deadbeef' as varchar(1999)) k2\";\nfor (String sql : Arrays.asList(sql0, sql1, sql2)) {\nassertNoCastStringAsStringInPlan(sql);\n}\n}\n@Test\nvoid testCatalogFunSupport() throws Exception {\nString sql = \"select catalog()\";\nstarRocksAssert.query(sql).explainQuery();\n}\n@Test\nvoid testBanSubqueryAppearsInLeftSideChildOfInPredicates() {\nString sql = \"select k1, count(k2) from db1.tbl1 group by k1 \" +\n\"having (exists (select k1 from db1.tbl1 where NULL)) in (select k1 from db1.tbl1 where NULL);\";\ntry {\nUtFrameUtils.getPlanAndFragment(starRocksAssert.getCtx(), sql);\n} catch (Exception e) {\nAssert.assertTrue(e.getMessage().contains(\"Subquery in left-side child of in-predicate is not supported\"));\n}\n}\n@Test\nvoid testGroupByCountDistinctWithSkewHint() throws Exception {\nFeConstants.runningUnitTest = true;\nString sql =\n\"select cast(k1 as int), count(distinct [skew] cast(k2 as int)) from db1.tbl1 group by cast(k1 as int)\";\nString s = starRocksAssert.query(sql).explainQuery();\nAssert.assertTrue(s, s.contains(\" 3:Project\\n\" +\n\" | : 5: cast\\n\" +\n\" | : 6: cast\\n\" +\n\" | : CAST(murmur_hash3_32(CAST(6: cast AS VARCHAR)) % 512 AS SMALLINT)\"));\nFeConstants.runningUnitTest = false;\n}\n@Test\nvoid testGroupByCountDistinctArrayWithSkewHint() throws Exception {\nFeConstants.runningUnitTest = true;\nString sql =\n\"select b1, count(distinct [skew] a1) as cnt from (select split('a,b,c', ',') as a1, 'aaa' as b1) t1 group by b1\";\nString s = starRocksAssert.query(sql).explainQuery();\nAssert.assertTrue(s, s.contains(\"PLAN FRAGMENT 0\\n\" +\n\" OUTPUT EXPRS:3: expr | 4: count\\n\" +\n\" PARTITION: UNPARTITIONED\\n\" +\n\"\\n\" +\n\" RESULT SINK\\n\" +\n\"\\n\" +\n\" 5:AGGREGATE (merge finalize)\\n\" +\n\" | output: count(4: count)\\n\" +\n\" | group by: 3: expr\\n\" +\n\" | \\n\" +\n\" 4:AGGREGATE (update serialize)\\n\" +\n\" | STREAMING\\n\" +\n\" | output: count(2: split)\\n\" +\n\" | group by: 3: expr\\n\" +\n\" | \\n\" +\n\" 3:Project\\n\" +\n\" | : 2: split\\n\" +\n\" | : 'aaa'\\n\" +\n\" | \\n\" +\n\" 2:AGGREGATE (update serialize)\\n\" +\n\" | group by: 2: split\\n\" +\n\" | \\n\" +\n\" 1:Project\\n\" +\n\" | : split('a,b,c', ',')\\n\" +\n\" | : 'aaa'\\n\" +\n\" | \\n\" +\n\" 0:UNION\\n\" +\n\" constant exprs: \\n\" +\n\" NULL\"));\nFeConstants.runningUnitTest = false;\n}\n@Test\nvoid testGroupByMultiColumnCountDistinctWithSkewHint() throws Exception {\nFeConstants.runningUnitTest = true;\nString sql =\n\"select cast(k1 as int), k3, count(distinct [skew] cast(k2 as int)) from db1.tbl1 group by cast(k1 as int), k3\";\nString s = starRocksAssert.query(sql).explainQuery();\nAssert.assertTrue(s, s.contains(\" 3:Project\\n\" +\n\" | : 3: k3\\n\" +\n\" | : 5: cast\\n\" +\n\" | : 6: cast\\n\" +\n\" | : CAST(murmur_hash3_32(CAST(6: cast AS VARCHAR)) % 512 AS SMALLINT)\"));\nFeConstants.runningUnitTest = false;\n}\n@Test\nvoid testGroupByMultiColumnMultiCountDistinctWithSkewHint() throws Exception {\nFeConstants.runningUnitTest = true;\nString sql =\n\"select k1, k3, count(distinct [skew] k2), count(distinct k4) from db1.tbl1 group by k1, k3\";\nString s = starRocksAssert.query(sql).explainQuery();\nAssert.assertTrue(s, s.contains(\" 4:Project\\n\" +\n\" | : 7: k1\\n\" +\n\" | : 8: k2\\n\" +\n\" | : 9: k3\\n\" +\n\" | : CAST(murmur_hash3_32(8: k2) % 512 AS SMALLINT)\"));\nFeConstants.runningUnitTest = false;\n}\n@Test\nvoid testGroupByCountDistinctUseTheSameColumn()\nthrows Exception {\nFeConstants.runningUnitTest = true;\nString sql =\n\"select k3, count(distinct [skew] k3) from db1.tbl1 group by k3\";\nString s = starRocksAssert.query(sql).explainQuery();\nAssert.assertFalse(s, s.contains(\"murmur_hash3_32\"));\nFeConstants.runningUnitTest = false;\n}\n@Test\nvoid testScalarCorrelatedSubquery() throws Exception {\n{\nString sql = \"select *, (select [a.k1,a.k2] from db1.tbl1 a where a.k4 = b.k1) as r from db1.baseall b;\";\nString plan = UtFrameUtils.getFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan.contains(\"any_value([2: k1,3: k2])\"));\n}\ntry {\nString sql = \"select *, (select a.k1 from db1.tbl1 a where a.k4 = b.k1) as r from db1.baseall b;\";\nString plan = UtFrameUtils.getVerboseFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan, plan.contains(\"assert_true[((7: countRows IS NULL) OR (7: countRows <= 1)\"));\n} catch (Exception e) {\nAssert.fail(\"Should not throw an exception\");\n}\n}\n@ParameterizedTest\n@MethodSource(\"multiDistinctMultiColumnWithLimitSqls\")\nvoid testMultiDistinctMultiColumnWithLimit(String sql, String pattern) throws Exception {\nstarRocksAssert.getCtx().getSessionVariable().setOptimizerExecuteTimeout(30000000);\nString plan = UtFrameUtils.getFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan, plan.contains(pattern));\n}\n@Test\npublic void testSingleMultiColumnDistinct() throws Exception {\nstarRocksAssert.getCtx().getSessionVariable().setOptimizerExecuteTimeout(30000000);\nString plan = UtFrameUtils.getFragmentPlan(starRocksAssert.getCtx(),\n\"select count(distinct k1, k2), count(distinct k3) from db1.tbl1 limit 1\");\nAssert.assertTrue(plan, plan.contains(\"18:NESTLOOP JOIN\\n\" +\n\" | join op: CROSS JOIN\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | limit: 1\\n\" +\n\" | \\n\" +\n\" |----17:EXCHANGE\"));\n}\nprivate static Stream multiDistinctMultiColumnWithLimitSqls() {\nString[][] sqlList = {\n{\"select count(distinct k1, k2), count(distinct k3) from db1.tbl1 limit 1\",\n\"18:NESTLOOP JOIN\\n\" +\n\" | join op: CROSS JOIN\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | limit: 1\\n\" +\n\" | \\n\" +\n\" |----17:EXCHANGE\"},\n{\"select * from (select count(distinct k1, k2), count(distinct k3) from db1.tbl1) t1 limit 1\",\n\"18:NESTLOOP JOIN\\n\" +\n\" | join op: CROSS JOIN\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | limit: 1\\n\" +\n\" | \\n\" +\n\" |----17:EXCHANGE\"\n},\n{\"with t1 as (select count(distinct k1, k2) as a, count(distinct k3) as b from db1.tbl1) \" +\n\"select * from t1 limit 1\",\n\"18:NESTLOOP JOIN\\n\" +\n\" | join op: CROSS JOIN\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | limit: 1\\n\" +\n\" | \\n\" +\n\" |----17:EXCHANGE\"\n},\n{\"select count(distinct k1, k2), count(distinct k3) from db1.tbl1 group by k4 limit 1\",\n\"14:Project\\n\" +\n\" | : 5: count\\n\" +\n\" | : 6: count\\n\" +\n\" | limit: 1\\n\" +\n\" | \\n\" +\n\" 13:HASH JOIN\\n\" +\n\" | join op: INNER JOIN (BUCKET_SHUFFLE(S))\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | equal join conjunct: 9: k4 <=> 11: k4\\n\" +\n\" | limit: 1\"\n},\n{\"select * from (select count(distinct k1, k2), count(distinct k3) from db1.tbl1 group by k4, k3) t1\" +\n\" limit 1\",\n\"14:Project\\n\" +\n\" | : 5: count\\n\" +\n\" | : 6: count\\n\" +\n\" | limit: 1\\n\" +\n\" | \\n\" +\n\" 13:HASH JOIN\\n\" +\n\" | join op: INNER JOIN (BUCKET_SHUFFLE(S))\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | equal join conjunct: 10: k4 <=> 12: k4\\n\" +\n\" | equal join conjunct: 9: k3 <=> 11: k3\\n\" +\n\" | limit: 1\"\n},\n{\"with t1 as (select count(distinct k1, k2) as a, count(distinct k3) as b from db1.tbl1 \" +\n\"group by k2, k3, k4) select * from t1 limit 1\",\n\"14:Project\\n\" +\n\" | : 5: count\\n\" +\n\" | : 6: count\\n\" +\n\" | limit: 1\\n\" +\n\" | \\n\" +\n\" 13:HASH JOIN\\n\" +\n\" | join op: INNER JOIN (BUCKET_SHUFFLE(S))\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | equal join conjunct: 8: k2 <=> 11: k2\\n\" +\n\" | equal join conjunct: 9: k3 <=> 12: k3\\n\" +\n\" | equal join conjunct: 10: k4 <=> 13: k4\\n\" +\n\" | limit: 1\"\n}\n};\nreturn Arrays.stream(sqlList).map(e -> Arguments.of(e[0], e[1]));\n}\n@Test\nvoid testSubstringConstantFolding() {\ntry {\nString sql =\n\"select * from db1.t where dt = \\\"2022-01-02\\\" or dt = cast(substring(\\\"2022-01-03\\\", 1, 10) as date);\";\nString plan = UtFrameUtils.getVerboseFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan, plan.contains(\"dt IN ('2022-01-02', '2022-01-03')\"));\n} catch (Exception e) {\nAssert.fail(\"Should not throw an exception\");\n}\n}\n@Test\nvoid testAnalyzeDecimalArithmeticExprIdempotently()\nthrows Exception {\n{\nString sql = \"select c0, sum(c2/(1+c1)) as a, sum(c2/(1+c1)) as b from t0 group by c0;\";\nString plan = UtFrameUtils.getVerboseFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan, plan.contains(\"PLAN FRAGMENT 0(F00)\\n\" +\n\" Output Exprs:1: c0 | 5: sum | 5: sum\\n\" +\n\" Input Partition: RANDOM\\n\" +\n\" RESULT SINK\\n\" +\n\"\\n\" +\n\" 2:AGGREGATE (update finalize)\\n\" +\n\" | aggregate: sum[([4: expr, DECIMAL128(38,8), true]); \" +\n\"args: DECIMAL128; result: DECIMAL128(38,8); args nullable: true; \" +\n\"result nullable: true]\\n\" +\n\" | group by: [1: c0, VARCHAR, false]\\n\" +\n\" | cardinality: 1\\n\" +\n\" | \\n\" +\n\" 1:Project\\n\" +\n\" | output columns:\\n\" +\n\" | 1 <-> [1: c0, VARCHAR, false]\\n\" +\n\" | 4 <-> [3: c2, DECIMAL128(24,2), false] / 1 + [2: c1, DECIMAL128(24,5), false]\\n\" +\n\" | cardinality: 1\"));\n}\n{\nString sql = \" select c0, sum(1/(1+cast(substr('1.12',1,4) as decimal(24,4)))) as a, \" +\n\"sum(1/(1+cast(substr('1.12',1,4) as decimal(24,4)))) as b from t0 group by c0;\";\nString plan = UtFrameUtils.getVerboseFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan, plan.contains(\" Output Exprs:1: c0 | 4: sum | 4: sum\\n\" +\n\" Input Partition: RANDOM\\n\" +\n\" RESULT SINK\\n\" +\n\"\\n\" +\n\" 1:AGGREGATE (update finalize)\\n\" +\n\" | aggregate: sum[(1 / 2.1200); args: DECIMAL128; result: DECIMAL128(38,6);\" +\n\" args nullable: true; result nullable: true]\\n\" +\n\" | group by: [1: c0, VARCHAR, false]\\n\" +\n\" | cardinality: 1\"));\n}\n{\nString sql = \"select c0, sum(cast(c2 as decimal(38,19))/(1+c1)) as a, \" +\n\"sum(cast(c2 as decimal(38,19))/(1+c1)) as b from t0 group by c0;\";\nString plan = UtFrameUtils.getVerboseFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan, plan.contains(\"PLAN FRAGMENT 0(F00)\\n\" +\n\" Output Exprs:1: c0 | 5: sum | 5: sum\\n\" +\n\" Input Partition: RANDOM\\n\" +\n\" RESULT SINK\\n\" +\n\"\\n\" +\n\" 2:AGGREGATE (update finalize)\\n\" +\n\" | aggregate: sum[(cast([4: expr, DECIMAL128(38,19), true] as DECIMAL128(38,18))); \" +\n\"args: DECIMAL128; result: DECIMAL128(38,18); args nullable: true; result nullable: true]\\n\" +\n\" | group by: [1: c0, VARCHAR, false]\\n\" +\n\" | cardinality: 1\\n\" +\n\" | \\n\" +\n\" 1:Project\\n\" +\n\" | output columns:\\n\" +\n\" | 1 <-> [1: c0, VARCHAR, false]\\n\" +\n\" | 4 <-> cast([3: c2, DECIMAL128(24,2), false] as DECIMAL128(38,19)) / 1 + \" +\n\"[2: c1, DECIMAL128(24,5), false]\\n\" +\n\" | cardinality: 1\"));\n}\n}\n@Test\nvoid testArraySubfieldsPrune() {\ntry {\nString sql = \"select str_to_map('age=18&sex=1&gender=1','&','=')['age'] AS age, \" +\n\"str_to_map('age=18&sex=1&gender=1','&','=')['sex'] AS sex;\";\nString plan = UtFrameUtils.getVerboseFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan, plan.contains(\"2 <-> 4: str_to_map['age']\\n\" +\n\" | 3 <-> 4: str_to_map['sex']\\n\" +\n\" | common expressions:\\n\" +\n\" | 4 <-> str_to_map[('age=18&sex=1&gender=1', '&', '='); \" +\n\"args: VARCHAR,VARCHAR,VARCHAR; \" +\n\"result: MAP; args \" +\n\"nullable: false; result nullable: true]\"));\n} catch (Exception e) {\nAssert.fail(\"Should not throw an exception\");\n}\n}\n@Test\npublic void testMergeLimitAfterPruneGroupByKeys() throws Exception {\nString sql = \"SELECT\\n\" +\n\" name\\n\" +\n\"FROM\\n\" +\n\" (\\n\" +\n\" select\\n\" +\n\" case\\n\" +\n\" when a.emp_name in('Alice', 'Bob') then 'RD'\\n\" +\n\" when a.emp_name in('Bob', 'Charlie') then 'QA'\\n\" +\n\" else 'BD'\\n\" +\n\" end as role,\\n\" +\n\" a.emp_name as name\\n\" +\n\" from\\n\" +\n\" (\\n\" +\n\" select 'Alice' as emp_name\\n\" +\n\" union all\\n\" +\n\" select 'Bob' as emp_name\\n\" +\n\" union all\\n\" +\n\" select 'Charlie' as emp_name\\n\" +\n\" ) a\\n\" +\n\" ) SUB_QRY\\n\" +\n\"WHERE name IS NOT NULL AND role IN ('QA')\\n\" +\n\"GROUP BY name\\n\" +\n\"ORDER BY name ASC\";\nString plan = UtFrameUtils.getVerboseFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan, plan.contains(\"PLAN FRAGMENT 0(F00)\\n\" +\n\" Output Exprs:7: expr\\n\" +\n\" Input Partition: UNPARTITIONED\\n\" +\n\" RESULT SINK\\n\" +\n\"\\n\" +\n\" 2:SORT\\n\" +\n\" | order by: [7, VARCHAR, false] ASC\\n\" +\n\" | offset: 0\\n\" +\n\" | cardinality: 1\\n\" +\n\" | \\n\" +\n\" 1:Project\\n\" +\n\" | output columns:\\n\" +\n\" | 7 <-> 'Charlie'\\n\" +\n\" | limit: 1\\n\" +\n\" | cardinality: 1\\n\" +\n\" | \\n\" +\n\" 0:UNION\\n\" +\n\" constant exprs: \\n\" +\n\" NULL\\n\" +\n\" limit: 1\\n\" +\n\" cardinality: 1\\n\"));\n}\n}", + "context_after": "class SelectStmtTest {\nprivate static StarRocksAssert starRocksAssert;\n@BeforeAll\npublic static void setUp() throws Exception {\nUtFrameUtils.createMinStarRocksCluster();\nString createTblStmtStr = \"create table db1.tbl1(k1 varchar(32), k2 varchar(32), k3 varchar(32), k4 int) \"\n+ \"AGGREGATE KEY(k1, k2,k3,k4) distributed by hash(k1) buckets 3 properties('replication_num' = '1');\";\nString createBaseAllStmtStr = \"create table db1.baseall(k1 int) distributed by hash(k1) \"\n+ \"buckets 3 properties('replication_num' = '1');\";\nString createDateTblStmtStr = \"create table db1.t(k1 int, dt date) \"\n+ \"DUPLICATE KEY(k1) distributed by hash(k1) buckets 3 properties('replication_num' = '1');\";\nString createPratitionTableStr = \"CREATE TABLE db1.partition_table (\\n\" +\n\"datekey int(11) NULL COMMENT \\\"datekey\\\",\\n\" +\n\"poi_id bigint(20) NULL COMMENT \\\"poi_id\\\"\\n\" +\n\") ENGINE=OLAP\\n\" +\n\"AGGREGATE KEY(datekey, poi_id)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"PARTITION BY RANGE(datekey)\\n\" +\n\"(PARTITION p20200727 VALUES [(\\\"20200726\\\"), (\\\"20200727\\\")),\\n\" +\n\"PARTITION p20200728 VALUES [(\\\"20200727\\\"), (\\\"20200728\\\")))\\n\" +\n\"DISTRIBUTED BY HASH(poi_id) BUCKETS 2\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"storage_type\\\" = \\\"COLUMN\\\",\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\"\\n\" +\n\");\";\nString createTable1 = \"CREATE TABLE `t0` (\\n\" +\n\" `c0` varchar(24) NOT NULL COMMENT \\\"\\\",\\n\" +\n\" `c1` decimal128(24, 5) NOT NULL COMMENT \\\"\\\",\\n\" +\n\" `c2` decimal128(24, 2) NOT NULL COMMENT \\\"\\\"\\n\" +\n\") ENGINE=OLAP \\n\" +\n\"DUPLICATE KEY(`c0`)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`c0`) BUCKETS 1 \\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"in_memory\\\" = \\\"false\\\",\\n\" +\n\"\\\"storage_format\\\" = \\\"DEFAULT\\\",\\n\" +\n\"\\\"enable_persistent_index\\\" = \\\"false\\\",\\n\" +\n\"\\\"replicated_storage\\\" = \\\"true\\\",\\n\" +\n\"\\\"compression\\\" = \\\"LZ4\\\"\\n\" +\n\"); \";\nstarRocksAssert = new StarRocksAssert();\nstarRocksAssert.withDatabase(\"db1\").useDatabase(\"db1\");\nstarRocksAssert.withTable(createTblStmtStr)\n.withTable(createBaseAllStmtStr)\n.withTable(createDateTblStmtStr)\n.withTable(createPratitionTableStr)\n.withTable(createTable1);\nFeConstants.enablePruneEmptyOutputScan = false;\n}\n@Test\n@Test\nvoid testGroupByConstantExpression() throws Exception {\nString sql = \"SELECT k1 - 4*60*60 FROM baseall GROUP BY k1 - 4*60*60\";\nstarRocksAssert.query(sql).explainQuery();\n}\n@Test\nvoid testWithWithoutDatabase() throws Exception {\nString sql = \"with tmp as (select count(*) from db1.tbl1) select * from tmp;\";\nstarRocksAssert.withoutUseDatabase();\nstarRocksAssert.query(sql).explainQuery();\nsql = \"with tmp as (select * from db1.tbl1) \" +\n\"select a.k1, b.k2, a.k3 from (select k1, k3 from tmp) a \" +\n\"left join (select k1, k2 from tmp) b on a.k1 = b.k1;\";\nstarRocksAssert.withoutUseDatabase();\nstarRocksAssert.query(sql).explainQuery();\n}\n@Test\nvoid testDataGripSupport() throws Exception {\nString sql = \"select schema();\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select\\n\" +\n\"collation_name,\\n\" +\n\"character_set_name,\\n\" +\n\"is_default collate utf8_general_ci = 'Yes' as is_default\\n\" +\n\"from information_schema.collations\";\nstarRocksAssert.query(sql).explainQuery();\n}\n@Test\nvoid testNavicatBinarySupport() throws Exception {\nString sql = \"SELECT ACTION_ORDER, \\n\" +\n\" EVENT_OBJECT_TABLE, \\n\" +\n\" TRIGGER_NAME, \\n\" +\n\" EVENT_MANIPULATION, \\n\" +\n\" EVENT_OBJECT_TABLE, \\n\" +\n\" DEFINER, \\n\" +\n\" ACTION_STATEMENT, \\n\" +\n\" ACTION_TIMING\\n\" +\n\"FROM information_schema.triggers\\n\" +\n\"WHERE BINARY event_object_schema = 'test_ods_inceptor' \\n\" +\n\" AND BINARY event_object_table = 'cus_ast_total_d_p' \\n\" +\n\"ORDER BY event_object_table\";\nstarRocksAssert.query(sql).explainQuery();\n}\n@Test\nvoid testEqualExprNotMonotonic() throws Exception {\nConnectContext ctx = UtFrameUtils.createDefaultCtx();\nString sql = \"select k1 from db1.baseall where (k1=10) = true\";\nString expectString =\n\"[TPlanNode(node_id:0, node_type:OLAP_SCAN_NODE, num_children:0, limit:-1, row_tuples:[0], \" +\n\"nullable_tuples:[false], conjuncts:[TExpr(nodes:[TExprNode(node_type:BINARY_PRED, \" +\n\"type:TTypeDesc(types:[TTypeNode(type:SCALAR, scalar_type:TScalarType(type:BOOLEAN))]), \" +\n\"opcode:EQ, num_children:2, output_scale:-1, vector_opcode:INVALID_OPCODE, child_type:INT, \" +\n\"has_nullable_child:true, is_nullable:true, is_monotonic:false,\";\nString thrift = UtFrameUtils.getPlanThriftString(ctx, sql);\nAssert.assertTrue(thrift, thrift.contains(expectString));\n}\n@Test\nvoid testCurrentUserFunSupport() throws Exception {\nString sql = \"select current_user()\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select current_user\";\nstarRocksAssert.query(sql).explainQuery();\n}\n@Test\nvoid testTimeFunSupport() throws Exception {\nString sql = \"select current_timestamp()\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select current_timestamp\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select current_time()\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select current_time\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select current_date()\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select current_date\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select localtime()\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select localtime\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select localtimestamp()\";\nstarRocksAssert.query(sql).explainQuery();\nsql = \"select localtimestamp\";\nstarRocksAssert.query(sql).explainQuery();\n}\n@Test\nvoid testDateTruncUpperCase() throws Exception {\nString sql = \"select date_trunc('MONTH', CAST('2020-11-04 11:12:13' AS DATE));\";\nConnectContext ctx = starRocksAssert.getCtx();\nUtFrameUtils.parseStmtWithNewParser(sql, ctx);\n}\n@Test\nvoid testSelectFromTabletIds() throws Exception {\nFeConstants.runningUnitTest = true;\nShowResultSet tablets = starRocksAssert.showTablet(\"db1\", \"partition_table\");\nList tabletIds = tablets.getResultRows().stream().map(r -> r.get(0)).collect(Collectors.toList());\nAssert.assertEquals(tabletIds.size(), 4);\nString tabletCsv = String.join(\",\", tabletIds);\nString sql = String.format(\"select count(1) from db1.partition_table tablet (%s)\", tabletCsv);\nString explain = starRocksAssert.query(sql).explainQuery();\nAssert.assertTrue(explain.contains(tabletCsv));\nString invalidTabletCsv = tabletIds.stream().map(id -> id + \"0\").collect(Collectors.joining(\",\"));\nString invalidSql = String.format(\"select count(1) from db1.partition_table tablet (%s)\", invalidTabletCsv);\ntry {\nstarRocksAssert.query(invalidSql).explainQuery();\n} catch (Throwable ex) {\nAssert.assertTrue(ex.getMessage().contains(\"Invalid tablet\"));\n}\nFeConstants.runningUnitTest = false;\n}\n@Test\nvoid testNegateEqualForNullInWhereClause() throws Exception {\nString[] queryList = {\n\"select * from db1.tbl1 where not(k1 <=> NULL)\",\n\"select * from db1.tbl1 where not(k1 <=> k2)\",\n\"select * from db1.tbl1 where not(k1 <=> 'abc-def')\",\n};\nPattern re = Pattern.compile(\"PREDICATES: NOT.*<=>.*\");\nfor (String q : queryList) {\nString s = starRocksAssert.query(q).explainQuery();\nAssert.assertTrue(re.matcher(s).find());\n}\n}\n@Test\nvoid testSimplifiedPredicateRuleApplyToNegateEuqualForNull() throws Exception {\nString[] queryList = {\n\"select not(k1 <=> NULL) from db1.tbl1\",\n\"select not(NULL <=> k1) from db1.tbl1\",\n\"select not(k1 <=> 'abc-def') from db1.tbl1\",\n};\nPattern re = Pattern.compile(\"NOT.*<=>.*\");\nfor (String q : queryList) {\nString s = starRocksAssert.query(q).explainQuery();\nAssert.assertTrue(re.matcher(s).find());\n}\n}\nprivate void assertNoCastStringAsStringInPlan(String sql) throws Exception {\nExecPlan execPlan = UtFrameUtils.getPlanAndFragment(starRocksAssert.getCtx(), sql).second;\nList operators = execPlan.getPhysicalPlan().getInputs().stream().flatMap(input ->\ninput.getOp().getProjection().getColumnRefMap().values().stream()).collect(Collectors.toList());\nAssert.assertTrue(operators.stream().noneMatch(op -> (op instanceof CastOperator) &&\nop.getType().isStringType() &&\nop.getChild(0).getType().isStringType()));\n}\n@Test\nvoid testFoldCastOfChildExprsOfSetOperation() throws Exception {\nString sql0 = \"select cast('abcdefg' as varchar(2)) a, cast('abc' as varchar(3)) b\\n\" +\n\"intersect\\n\" +\n\"select cast('aa123456789' as varchar) a, cast('abcd' as varchar(4)) b\";\nString sql1 = \"select k1, group_concat(k2) as k2 from db1.tbl1 group by k1 \\n\" +\n\"except\\n\" +\n\"select k1, cast(k4 as varchar(255)) from db1.tbl1\";\nString sql2 = \"select k1, k2 from db1.tbl1\\n\" +\n\"union all\\n\" +\n\"select cast(concat(k1, 'abc') as varchar(256)) as k1, cast(concat(k2, 'abc') as varchar(256)) as k2 \" +\n\"from db1.tbl1\\n\" +\n\"union all\\n\" +\n\"select cast('abcdef' as varchar) k1, cast('deadbeef' as varchar(1999)) k2\";\nfor (String sql : Arrays.asList(sql0, sql1, sql2)) {\nassertNoCastStringAsStringInPlan(sql);\n}\n}\n@Test\nvoid testCatalogFunSupport() throws Exception {\nString sql = \"select catalog()\";\nstarRocksAssert.query(sql).explainQuery();\n}\n@Test\nvoid testBanSubqueryAppearsInLeftSideChildOfInPredicates() {\nString sql = \"select k1, count(k2) from db1.tbl1 group by k1 \" +\n\"having (exists (select k1 from db1.tbl1 where NULL)) in (select k1 from db1.tbl1 where NULL);\";\ntry {\nUtFrameUtils.getPlanAndFragment(starRocksAssert.getCtx(), sql);\n} catch (Exception e) {\nAssert.assertTrue(e.getMessage().contains(\"Subquery in left-side child of in-predicate is not supported\"));\n}\n}\n@Test\nvoid testGroupByCountDistinctWithSkewHint() throws Exception {\nFeConstants.runningUnitTest = true;\nString sql =\n\"select cast(k1 as int), count(distinct [skew] cast(k2 as int)) from db1.tbl1 group by cast(k1 as int)\";\nString s = starRocksAssert.query(sql).explainQuery();\nAssert.assertTrue(s, s.contains(\" 3:Project\\n\" +\n\" | : 5: cast\\n\" +\n\" | : 6: cast\\n\" +\n\" | : CAST(murmur_hash3_32(CAST(6: cast AS VARCHAR)) % 512 AS SMALLINT)\"));\nFeConstants.runningUnitTest = false;\n}\n@Test\nvoid testGroupByCountDistinctArrayWithSkewHint() throws Exception {\nFeConstants.runningUnitTest = true;\nString sql =\n\"select b1, count(distinct [skew] a1) as cnt from (select split('a,b,c', ',') as a1, 'aaa' as b1) t1 group by b1\";\nString s = starRocksAssert.query(sql).explainQuery();\nAssert.assertTrue(s, s.contains(\"PLAN FRAGMENT 0\\n\" +\n\" OUTPUT EXPRS:3: expr | 4: count\\n\" +\n\" PARTITION: UNPARTITIONED\\n\" +\n\"\\n\" +\n\" RESULT SINK\\n\" +\n\"\\n\" +\n\" 5:AGGREGATE (merge finalize)\\n\" +\n\" | output: count(4: count)\\n\" +\n\" | group by: 3: expr\\n\" +\n\" | \\n\" +\n\" 4:AGGREGATE (update serialize)\\n\" +\n\" | STREAMING\\n\" +\n\" | output: count(2: split)\\n\" +\n\" | group by: 3: expr\\n\" +\n\" | \\n\" +\n\" 3:Project\\n\" +\n\" | : 2: split\\n\" +\n\" | : 'aaa'\\n\" +\n\" | \\n\" +\n\" 2:AGGREGATE (update serialize)\\n\" +\n\" | group by: 2: split\\n\" +\n\" | \\n\" +\n\" 1:Project\\n\" +\n\" | : split('a,b,c', ',')\\n\" +\n\" | : 'aaa'\\n\" +\n\" | \\n\" +\n\" 0:UNION\\n\" +\n\" constant exprs: \\n\" +\n\" NULL\"));\nFeConstants.runningUnitTest = false;\n}\n@Test\nvoid testGroupByMultiColumnCountDistinctWithSkewHint() throws Exception {\nFeConstants.runningUnitTest = true;\nString sql =\n\"select cast(k1 as int), k3, count(distinct [skew] cast(k2 as int)) from db1.tbl1 group by cast(k1 as int), k3\";\nString s = starRocksAssert.query(sql).explainQuery();\nAssert.assertTrue(s, s.contains(\" 3:Project\\n\" +\n\" | : 3: k3\\n\" +\n\" | : 5: cast\\n\" +\n\" | : 6: cast\\n\" +\n\" | : CAST(murmur_hash3_32(CAST(6: cast AS VARCHAR)) % 512 AS SMALLINT)\"));\nFeConstants.runningUnitTest = false;\n}\n@Test\nvoid testGroupByMultiColumnMultiCountDistinctWithSkewHint() throws Exception {\nFeConstants.runningUnitTest = true;\nString sql =\n\"select k1, k3, count(distinct [skew] k2), count(distinct k4) from db1.tbl1 group by k1, k3\";\nString s = starRocksAssert.query(sql).explainQuery();\nAssert.assertTrue(s, s.contains(\" 4:Project\\n\" +\n\" | : 7: k1\\n\" +\n\" | : 8: k2\\n\" +\n\" | : 9: k3\\n\" +\n\" | : CAST(murmur_hash3_32(8: k2) % 512 AS SMALLINT)\"));\nFeConstants.runningUnitTest = false;\n}\n@Test\nvoid testGroupByCountDistinctUseTheSameColumn()\nthrows Exception {\nFeConstants.runningUnitTest = true;\nString sql =\n\"select k3, count(distinct [skew] k3) from db1.tbl1 group by k3\";\nString s = starRocksAssert.query(sql).explainQuery();\nAssert.assertFalse(s, s.contains(\"murmur_hash3_32\"));\nFeConstants.runningUnitTest = false;\n}\n@Test\nvoid testScalarCorrelatedSubquery() throws Exception {\n{\nString sql = \"select *, (select [a.k1,a.k2] from db1.tbl1 a where a.k4 = b.k1) as r from db1.baseall b;\";\nString plan = UtFrameUtils.getFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan.contains(\"any_value([2: k1,3: k2])\"));\n}\ntry {\nString sql = \"select *, (select a.k1 from db1.tbl1 a where a.k4 = b.k1) as r from db1.baseall b;\";\nString plan = UtFrameUtils.getVerboseFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan, plan.contains(\"assert_true[((7: countRows IS NULL) OR (7: countRows <= 1)\"));\n} catch (Exception e) {\nAssert.fail(\"Should not throw an exception\");\n}\n}\n@ParameterizedTest\n@MethodSource(\"multiDistinctMultiColumnWithLimitSqls\")\nvoid testMultiDistinctMultiColumnWithLimit(String sql, String pattern) throws Exception {\nstarRocksAssert.getCtx().getSessionVariable().setOptimizerExecuteTimeout(30000000);\nString plan = UtFrameUtils.getFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan, plan.contains(pattern));\n}\n@Test\npublic void testSingleMultiColumnDistinct() throws Exception {\nstarRocksAssert.getCtx().getSessionVariable().setOptimizerExecuteTimeout(30000000);\nString plan = UtFrameUtils.getFragmentPlan(starRocksAssert.getCtx(),\n\"select count(distinct k1, k2), count(distinct k3) from db1.tbl1 limit 1\");\nAssert.assertTrue(plan, plan.contains(\"18:NESTLOOP JOIN\\n\" +\n\" | join op: CROSS JOIN\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | limit: 1\\n\" +\n\" | \\n\" +\n\" |----17:EXCHANGE\"));\n}\nprivate static Stream multiDistinctMultiColumnWithLimitSqls() {\nString[][] sqlList = {\n{\"select count(distinct k1, k2), count(distinct k3) from db1.tbl1 limit 1\",\n\"18:NESTLOOP JOIN\\n\" +\n\" | join op: CROSS JOIN\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | limit: 1\\n\" +\n\" | \\n\" +\n\" |----17:EXCHANGE\"},\n{\"select * from (select count(distinct k1, k2), count(distinct k3) from db1.tbl1) t1 limit 1\",\n\"18:NESTLOOP JOIN\\n\" +\n\" | join op: CROSS JOIN\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | limit: 1\\n\" +\n\" | \\n\" +\n\" |----17:EXCHANGE\"\n},\n{\"with t1 as (select count(distinct k1, k2) as a, count(distinct k3) as b from db1.tbl1) \" +\n\"select * from t1 limit 1\",\n\"18:NESTLOOP JOIN\\n\" +\n\" | join op: CROSS JOIN\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | limit: 1\\n\" +\n\" | \\n\" +\n\" |----17:EXCHANGE\"\n},\n{\"select count(distinct k1, k2), count(distinct k3) from db1.tbl1 group by k4 limit 1\",\n\"14:Project\\n\" +\n\" | : 5: count\\n\" +\n\" | : 6: count\\n\" +\n\" | limit: 1\\n\" +\n\" | \\n\" +\n\" 13:HASH JOIN\\n\" +\n\" | join op: INNER JOIN (BUCKET_SHUFFLE(S))\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | equal join conjunct: 9: k4 <=> 11: k4\\n\" +\n\" | limit: 1\"\n},\n{\"select * from (select count(distinct k1, k2), count(distinct k3) from db1.tbl1 group by k4, k3) t1\" +\n\" limit 1\",\n\"14:Project\\n\" +\n\" | : 5: count\\n\" +\n\" | : 6: count\\n\" +\n\" | limit: 1\\n\" +\n\" | \\n\" +\n\" 13:HASH JOIN\\n\" +\n\" | join op: INNER JOIN (BUCKET_SHUFFLE(S))\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | equal join conjunct: 10: k4 <=> 12: k4\\n\" +\n\" | equal join conjunct: 9: k3 <=> 11: k3\\n\" +\n\" | limit: 1\"\n},\n{\"with t1 as (select count(distinct k1, k2) as a, count(distinct k3) as b from db1.tbl1 \" +\n\"group by k2, k3, k4) select * from t1 limit 1\",\n\"14:Project\\n\" +\n\" | : 5: count\\n\" +\n\" | : 6: count\\n\" +\n\" | limit: 1\\n\" +\n\" | \\n\" +\n\" 13:HASH JOIN\\n\" +\n\" | join op: INNER JOIN (BUCKET_SHUFFLE(S))\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | equal join conjunct: 8: k2 <=> 11: k2\\n\" +\n\" | equal join conjunct: 9: k3 <=> 12: k3\\n\" +\n\" | equal join conjunct: 10: k4 <=> 13: k4\\n\" +\n\" | limit: 1\"\n}\n};\nreturn Arrays.stream(sqlList).map(e -> Arguments.of(e[0], e[1]));\n}\n@Test\nvoid testSubstringConstantFolding() {\ntry {\nString sql =\n\"select * from db1.t where dt = \\\"2022-01-02\\\" or dt = cast(substring(\\\"2022-01-03\\\", 1, 10) as date);\";\nString plan = UtFrameUtils.getVerboseFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan, plan.contains(\"dt IN ('2022-01-02', '2022-01-03')\"));\n} catch (Exception e) {\nAssert.fail(\"Should not throw an exception\");\n}\n}\n@Test\nvoid testAnalyzeDecimalArithmeticExprIdempotently()\nthrows Exception {\n{\nString sql = \"select c0, sum(c2/(1+c1)) as a, sum(c2/(1+c1)) as b from t0 group by c0;\";\nString plan = UtFrameUtils.getVerboseFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan, plan.contains(\"PLAN FRAGMENT 0(F00)\\n\" +\n\" Output Exprs:1: c0 | 5: sum | 5: sum\\n\" +\n\" Input Partition: RANDOM\\n\" +\n\" RESULT SINK\\n\" +\n\"\\n\" +\n\" 2:AGGREGATE (update finalize)\\n\" +\n\" | aggregate: sum[([4: expr, DECIMAL128(38,8), true]); \" +\n\"args: DECIMAL128; result: DECIMAL128(38,8); args nullable: true; \" +\n\"result nullable: true]\\n\" +\n\" | group by: [1: c0, VARCHAR, false]\\n\" +\n\" | cardinality: 1\\n\" +\n\" | \\n\" +\n\" 1:Project\\n\" +\n\" | output columns:\\n\" +\n\" | 1 <-> [1: c0, VARCHAR, false]\\n\" +\n\" | 4 <-> [3: c2, DECIMAL128(24,2), false] / 1 + [2: c1, DECIMAL128(24,5), false]\\n\" +\n\" | cardinality: 1\"));\n}\n{\nString sql = \" select c0, sum(1/(1+cast(substr('1.12',1,4) as decimal(24,4)))) as a, \" +\n\"sum(1/(1+cast(substr('1.12',1,4) as decimal(24,4)))) as b from t0 group by c0;\";\nString plan = UtFrameUtils.getVerboseFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan, plan.contains(\" Output Exprs:1: c0 | 4: sum | 4: sum\\n\" +\n\" Input Partition: RANDOM\\n\" +\n\" RESULT SINK\\n\" +\n\"\\n\" +\n\" 1:AGGREGATE (update finalize)\\n\" +\n\" | aggregate: sum[(1 / 2.1200); args: DECIMAL128; result: DECIMAL128(38,6);\" +\n\" args nullable: true; result nullable: true]\\n\" +\n\" | group by: [1: c0, VARCHAR, false]\\n\" +\n\" | cardinality: 1\"));\n}\n{\nString sql = \"select c0, sum(cast(c2 as decimal(38,19))/(1+c1)) as a, \" +\n\"sum(cast(c2 as decimal(38,19))/(1+c1)) as b from t0 group by c0;\";\nString plan = UtFrameUtils.getVerboseFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan, plan.contains(\"PLAN FRAGMENT 0(F00)\\n\" +\n\" Output Exprs:1: c0 | 5: sum | 5: sum\\n\" +\n\" Input Partition: RANDOM\\n\" +\n\" RESULT SINK\\n\" +\n\"\\n\" +\n\" 2:AGGREGATE (update finalize)\\n\" +\n\" | aggregate: sum[(cast([4: expr, DECIMAL128(38,19), true] as DECIMAL128(38,18))); \" +\n\"args: DECIMAL128; result: DECIMAL128(38,18); args nullable: true; result nullable: true]\\n\" +\n\" | group by: [1: c0, VARCHAR, false]\\n\" +\n\" | cardinality: 1\\n\" +\n\" | \\n\" +\n\" 1:Project\\n\" +\n\" | output columns:\\n\" +\n\" | 1 <-> [1: c0, VARCHAR, false]\\n\" +\n\" | 4 <-> cast([3: c2, DECIMAL128(24,2), false] as DECIMAL128(38,19)) / 1 + \" +\n\"[2: c1, DECIMAL128(24,5), false]\\n\" +\n\" | cardinality: 1\"));\n}\n}\n@Test\nvoid testArraySubfieldsPrune() {\ntry {\nString sql = \"select str_to_map('age=18&sex=1&gender=1','&','=')['age'] AS age, \" +\n\"str_to_map('age=18&sex=1&gender=1','&','=')['sex'] AS sex;\";\nString plan = UtFrameUtils.getVerboseFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan, plan.contains(\"2 <-> 4: str_to_map['age']\\n\" +\n\" | 3 <-> 4: str_to_map['sex']\\n\" +\n\" | common expressions:\\n\" +\n\" | 4 <-> str_to_map[('age=18&sex=1&gender=1', '&', '='); \" +\n\"args: VARCHAR,VARCHAR,VARCHAR; \" +\n\"result: MAP; args \" +\n\"nullable: false; result nullable: true]\"));\n} catch (Exception e) {\nAssert.fail(\"Should not throw an exception\");\n}\n}\n@Test\npublic void testMergeLimitAfterPruneGroupByKeys() throws Exception {\nString sql = \"SELECT\\n\" +\n\" name\\n\" +\n\"FROM\\n\" +\n\" (\\n\" +\n\" select\\n\" +\n\" case\\n\" +\n\" when a.emp_name in('Alice', 'Bob') then 'RD'\\n\" +\n\" when a.emp_name in('Bob', 'Charlie') then 'QA'\\n\" +\n\" else 'BD'\\n\" +\n\" end as role,\\n\" +\n\" a.emp_name as name\\n\" +\n\" from\\n\" +\n\" (\\n\" +\n\" select 'Alice' as emp_name\\n\" +\n\" union all\\n\" +\n\" select 'Bob' as emp_name\\n\" +\n\" union all\\n\" +\n\" select 'Charlie' as emp_name\\n\" +\n\" ) a\\n\" +\n\" ) SUB_QRY\\n\" +\n\"WHERE name IS NOT NULL AND role IN ('QA')\\n\" +\n\"GROUP BY name\\n\" +\n\"ORDER BY name ASC\";\nString plan = UtFrameUtils.getVerboseFragmentPlan(starRocksAssert.getCtx(), sql);\nAssert.assertTrue(plan, plan.contains(\"PLAN FRAGMENT 0(F00)\\n\" +\n\" Output Exprs:7: expr\\n\" +\n\" Input Partition: UNPARTITIONED\\n\" +\n\" RESULT SINK\\n\" +\n\"\\n\" +\n\" 2:SORT\\n\" +\n\" | order by: [7, VARCHAR, false] ASC\\n\" +\n\" | offset: 0\\n\" +\n\" | cardinality: 1\\n\" +\n\" | \\n\" +\n\" 1:Project\\n\" +\n\" | output columns:\\n\" +\n\" | 7 <-> 'Charlie'\\n\" +\n\" | limit: 1\\n\" +\n\" | cardinality: 1\\n\" +\n\" | \\n\" +\n\" 0:UNION\\n\" +\n\" constant exprs: \\n\" +\n\" NULL\\n\" +\n\" limit: 1\\n\" +\n\" cardinality: 1\\n\"));\n}\n}" + }, + { + "comment": "Currently the `throwable` is handled.", + "method_body": "private void startJobMasterServiceProcessAsync(UUID leaderSessionId) {\nboolean isValid;\nsynchronized (lock) {\nisValid = isValidLeader(leaderSessionId);\n}\nif (isValid) {\nsequentialOperation =\nsequentialOperation.thenCompose(\nunused ->\nrunIfValidLeader(\nleaderSessionId,\n() ->\njobResultStore.hasJobResultEntryAsync(\ngetJobID()),\n\"verify jbb result entry\")\n.handle(\n(hasJobResult, throwable) -> {\nif (hasJobResult == null) {\nreturn null;\n}\nif (hasJobResult) {\nrunIfValidLeader(\nleaderSessionId,\n() -> {\njobAlreadyDone(\nleaderSessionId);\nreturn CompletableFuture\n.completedFuture(\nnull);\n},\n\"check completed job\");\n} else {\nrunIfValidLeader(\nleaderSessionId,\n() -> {\nThrowingRunnable.unchecked(\n() ->\ncreateNewJobMasterServiceProcess(\nleaderSessionId))\n.run();\nreturn CompletableFuture\n.completedFuture(\nnull);\n},\n\"create new job master service process\");\n}\nreturn null;\n}));\n} else {\nLOG.trace(\n\"Ignore leader action '{}' because the leadership runner is no longer the valid leader for {}.\",\n\"verify job scheduling status and create JobMasterServiceProcess\",\nleaderSessionId);\n}\nhandleAsyncOperationError(sequentialOperation, \"Could not start the job manager.\");\n}", + "target_code": "if (hasJobResult == null) {", + "method_body_after": "private void startJobMasterServiceProcessAsync(UUID leaderSessionId) {\nsequentialOperation =\nsequentialOperation.thenCompose(\nunused ->\nsupplyAsyncIfValidLeader(\nleaderSessionId,\n() ->\njobResultStore.hasJobResultEntryAsync(\ngetJobID()),\n() ->\nFutureUtils.completedExceptionally(\nnew LeadershipLostException(\n\"The leadership is lost.\")))\n.handle(\n(hasJobResult, throwable) -> {\nif (throwable\ninstanceof LeadershipLostException) {\nprintLogIfNotValidLeader(\n\"verify job result entry\",\nleaderSessionId);\nreturn null;\n} else if (throwable != null) {\nExceptionUtils.rethrow(throwable);\n}\nif (hasJobResult) {\nhandleJobAlreadyDoneIfValidLeader(\nleaderSessionId);\n} else {\ncreateNewJobMasterServiceProcessIfValidLeader(\nleaderSessionId);\n}\nreturn null;\n}));\nhandleAsyncOperationError(sequentialOperation, \"Could not start the job manager.\");\n}", + "context_before": "class JobMasterServiceLeadershipRunner implements JobManagerRunner, LeaderContender {\nprivate static final Logger LOG =\nLoggerFactory.getLogger(JobMasterServiceLeadershipRunner.class);\nprivate final Object lock = new Object();\nprivate final JobMasterServiceProcessFactory jobMasterServiceProcessFactory;\nprivate final LeaderElection leaderElection;\nprivate final JobResultStore jobResultStore;\nprivate final LibraryCacheManager.ClassLoaderLease classLoaderLease;\nprivate final FatalErrorHandler fatalErrorHandler;\nprivate final CompletableFuture terminationFuture = new CompletableFuture<>();\nprivate final CompletableFuture resultFuture =\nnew CompletableFuture<>();\n@GuardedBy(\"lock\")\nprivate State state = State.RUNNING;\n@GuardedBy(\"lock\")\nprivate CompletableFuture sequentialOperation = FutureUtils.completedVoidFuture();\n@GuardedBy(\"lock\")\nprivate JobMasterServiceProcess jobMasterServiceProcess =\nJobMasterServiceProcess.waitingForLeadership();\n@GuardedBy(\"lock\")\nprivate CompletableFuture jobMasterGatewayFuture = new CompletableFuture<>();\n@GuardedBy(\"lock\")\nprivate boolean hasCurrentLeaderBeenCancelled = false;\npublic JobMasterServiceLeadershipRunner(\nJobMasterServiceProcessFactory jobMasterServiceProcessFactory,\nLeaderElection leaderElection,\nJobResultStore jobResultStore,\nLibraryCacheManager.ClassLoaderLease classLoaderLease,\nFatalErrorHandler fatalErrorHandler) {\nthis.jobMasterServiceProcessFactory = jobMasterServiceProcessFactory;\nthis.leaderElection = leaderElection;\nthis.jobResultStore = jobResultStore;\nthis.classLoaderLease = classLoaderLease;\nthis.fatalErrorHandler = fatalErrorHandler;\n}\n@Override\npublic CompletableFuture closeAsync() {\nfinal CompletableFuture processTerminationFuture;\nsynchronized (lock) {\nif (state == State.STOPPED) {\nreturn terminationFuture;\n}\nstate = State.STOPPED;\nLOG.debug(\"Terminating the leadership runner for job {}.\", getJobID());\njobMasterGatewayFuture.completeExceptionally(\nnew FlinkException(\n\"JobMasterServiceLeadershipRunner is closed. Therefore, the corresponding JobMaster will never acquire the leadership.\"));\nresultFuture.complete(\nJobManagerRunnerResult.forSuccess(\ncreateExecutionGraphInfoWithJobStatus(JobStatus.SUSPENDED)));\nprocessTerminationFuture = jobMasterServiceProcess.closeAsync();\n}\nfinal CompletableFuture serviceTerminationFuture =\nFutureUtils.runAfterwards(\nprocessTerminationFuture,\n() -> {\nclassLoaderLease.release();\nleaderElection.close();\n});\nFutureUtils.forward(serviceTerminationFuture, terminationFuture);\nterminationFuture.whenComplete(\n(unused, throwable) ->\nLOG.debug(\"Leadership runner for job {} has been terminated.\", getJobID()));\nreturn terminationFuture;\n}\n@Override\npublic void start() throws Exception {\nLOG.debug(\"Start leadership runner for job {}.\", getJobID());\nleaderElection.startLeaderElection(this);\n}\n@VisibleForTesting\npublic LeaderElection getLeaderElection() {\nreturn leaderElection;\n}\n@Override\npublic CompletableFuture getJobMasterGateway() {\nsynchronized (lock) {\nreturn jobMasterGatewayFuture;\n}\n}\n@Override\npublic CompletableFuture getResultFuture() {\nreturn resultFuture;\n}\n@Override\npublic JobID getJobID() {\nreturn jobMasterServiceProcessFactory.getJobId();\n}\n@Override\npublic CompletableFuture cancel(Time timeout) {\nsynchronized (lock) {\nhasCurrentLeaderBeenCancelled = true;\nreturn getJobMasterGateway()\n.thenCompose(jobMasterGateway -> jobMasterGateway.cancel(timeout))\n.exceptionally(\ne -> {\nthrow new CompletionException(\nnew JobCancellationFailedException(\n\"Cancellation failed.\",\nExceptionUtils.stripCompletionException(e)));\n});\n}\n}\n@Override\npublic CompletableFuture requestJobStatus(Time timeout) {\nreturn requestJob(timeout)\n.thenApply(\nexecutionGraphInfo ->\nexecutionGraphInfo.getArchivedExecutionGraph().getState());\n}\n@Override\npublic CompletableFuture requestJobDetails(Time timeout) {\nreturn requestJob(timeout)\n.thenApply(\nexecutionGraphInfo ->\nJobDetails.createDetailsForJob(\nexecutionGraphInfo.getArchivedExecutionGraph()));\n}\n@Override\npublic CompletableFuture requestJob(Time timeout) {\nsynchronized (lock) {\nif (state == State.RUNNING) {\nif (jobMasterServiceProcess.isInitializedAndRunning()) {\nreturn getJobMasterGateway()\n.thenCompose(jobMasterGateway -> jobMasterGateway.requestJob(timeout));\n} else {\nreturn CompletableFuture.completedFuture(\ncreateExecutionGraphInfoWithJobStatus(\nhasCurrentLeaderBeenCancelled\n? JobStatus.CANCELLING\n: JobStatus.INITIALIZING));\n}\n} else {\nreturn resultFuture.thenApply(JobManagerRunnerResult::getExecutionGraphInfo);\n}\n}\n}\n@Override\npublic boolean isInitialized() {\nsynchronized (lock) {\nreturn jobMasterServiceProcess.isInitializedAndRunning();\n}\n}\n@Override\npublic void grantLeadership(UUID leaderSessionID) {\nrunIfStateRunning(\n() -> startJobMasterServiceProcessAsync(leaderSessionID),\n\"starting a new JobMasterServiceProcess\");\n}\n@GuardedBy(\"lock\")\n@GuardedBy(\"lock\")\nprivate void verifyJobSchedulingStatusAndCreateJobMasterServiceProcess(UUID leaderSessionId)\nthrows FlinkException, ExecutionException, InterruptedException {\nif (jobResultStore.hasJobResultEntryAsync(getJobID()).get()) {\njobAlreadyDone(leaderSessionId);\n} else {\ncreateNewJobMasterServiceProcess(leaderSessionId);\n}\n}\nprivate ExecutionGraphInfo createExecutionGraphInfoWithJobStatus(JobStatus jobStatus) {\nreturn new ExecutionGraphInfo(\njobMasterServiceProcessFactory.createArchivedExecutionGraph(jobStatus, null));\n}\nprivate void jobAlreadyDone(UUID leaderSessionId) {\nLOG.info(\n\"{} for job {} was granted leadership with leader id {}, but job was already done.\",\ngetClass().getSimpleName(),\ngetJobID(),\nleaderSessionId);\nresultFuture.complete(\nJobManagerRunnerResult.forSuccess(\nnew ExecutionGraphInfo(\njobMasterServiceProcessFactory.createArchivedExecutionGraph(\nJobStatus.FAILED,\nnew JobAlreadyDoneException(getJobID())))));\n}\n@GuardedBy(\"lock\")\nprivate void createNewJobMasterServiceProcess(UUID leaderSessionId) throws FlinkException {\nPreconditions.checkState(jobMasterServiceProcess.closeAsync().isDone());\nLOG.info(\n\"{} for job {} was granted leadership with leader id {}. Creating new {}.\",\ngetClass().getSimpleName(),\ngetJobID(),\nleaderSessionId,\nJobMasterServiceProcess.class.getSimpleName());\njobMasterServiceProcess = jobMasterServiceProcessFactory.create(leaderSessionId);\nforwardIfValidLeader(\nleaderSessionId,\njobMasterServiceProcess.getJobMasterGatewayFuture(),\njobMasterGatewayFuture,\n\"JobMasterGatewayFuture from JobMasterServiceProcess\");\nforwardResultFuture(leaderSessionId, jobMasterServiceProcess.getResultFuture());\nconfirmLeadership(leaderSessionId, jobMasterServiceProcess.getLeaderAddressFuture());\n}\nprivate void confirmLeadership(\nUUID leaderSessionId, CompletableFuture leaderAddressFuture) {\nFutureUtils.assertNoException(\nleaderAddressFuture.thenAccept(\naddress -> {\nsynchronized (lock) {\nif (isValidLeader(leaderSessionId)) {\nLOG.debug(\"Confirm leadership {}.\", leaderSessionId);\nleaderElection.confirmLeadership(leaderSessionId, address);\n} else {\nLOG.trace(\n\"Ignore confirming leadership because the leader {} is no longer valid.\",\nleaderSessionId);\n}\n}\n}));\n}\nprivate void forwardResultFuture(\nUUID leaderSessionId, CompletableFuture resultFuture) {\nresultFuture.whenComplete(\n(jobManagerRunnerResult, throwable) -> {\nsynchronized (lock) {\nif (isValidLeader(leaderSessionId)) {\nonJobCompletion(jobManagerRunnerResult, throwable);\n} else {\nLOG.trace(\n\"Ignore result future forwarding because the leader {} is no longer valid.\",\nleaderSessionId);\n}\n}\n});\n}\n@GuardedBy(\"lock\")\nprivate void onJobCompletion(\nJobManagerRunnerResult jobManagerRunnerResult, Throwable throwable) {\nstate = State.JOB_COMPLETED;\nLOG.debug(\"Completing the result for job {}.\", getJobID());\nif (throwable != null) {\nresultFuture.completeExceptionally(throwable);\njobMasterGatewayFuture.completeExceptionally(\nnew FlinkException(\n\"Could not retrieve JobMasterGateway because the JobMaster failed.\",\nthrowable));\n} else {\nif (!jobManagerRunnerResult.isSuccess()) {\njobMasterGatewayFuture.completeExceptionally(\nnew FlinkException(\n\"Could not retrieve JobMasterGateway because the JobMaster initialization failed.\",\njobManagerRunnerResult.getInitializationFailure()));\n}\nresultFuture.complete(jobManagerRunnerResult);\n}\n}\n@Override\npublic void revokeLeadership() {\nrunIfStateRunning(\nthis::stopJobMasterServiceProcessAsync,\n\"revoke leadership from JobMasterServiceProcess\");\n}\n@GuardedBy(\"lock\")\nprivate void stopJobMasterServiceProcessAsync() {\nsequentialOperation =\nsequentialOperation.thenCompose(\nignored ->\ncallIfRunning(\nthis::stopJobMasterServiceProcess,\n\"stop leading JobMasterServiceProcess\")\n.orElse(FutureUtils.completedVoidFuture()));\nhandleAsyncOperationError(sequentialOperation, \"Could not suspend the job manager.\");\n}\n@GuardedBy(\"lock\")\nprivate CompletableFuture stopJobMasterServiceProcess() {\nLOG.info(\n\"{} for job {} was revoked leadership with leader id {}. Stopping current {}.\",\ngetClass().getSimpleName(),\ngetJobID(),\njobMasterServiceProcess.getLeaderSessionId(),\nJobMasterServiceProcess.class.getSimpleName());\njobMasterGatewayFuture.completeExceptionally(\nnew FlinkException(\n\"Cannot obtain JobMasterGateway because the JobMaster lost leadership.\"));\njobMasterGatewayFuture = new CompletableFuture<>();\nhasCurrentLeaderBeenCancelled = false;\nreturn jobMasterServiceProcess.closeAsync();\n}\n@Override\npublic void handleError(Exception exception) {\nfatalErrorHandler.onFatalError(exception);\n}\nprivate void handleAsyncOperationError(CompletableFuture operation, String message) {\noperation.whenComplete(\n(unused, throwable) -> {\nif (throwable != null) {\nrunIfStateRunning(\n() ->\nhandleJobMasterServiceLeadershipRunnerError(\nnew FlinkException(message, throwable)),\n\"handle JobMasterServiceLeadershipRunner error\");\n}\n});\n}\nprivate void handleJobMasterServiceLeadershipRunnerError(Throwable cause) {\nif (ExceptionUtils.isJvmFatalError(cause)) {\nfatalErrorHandler.onFatalError(cause);\n} else {\nresultFuture.completeExceptionally(cause);\n}\n}\nprivate void runIfStateRunning(Runnable action, String actionDescription) {\nsynchronized (lock) {\nif (isRunning()) {\naction.run();\n} else {\nLOG.trace(\n\"Ignore '{}' because the leadership runner is no longer running.\",\nactionDescription);\n}\n}\n}\nprivate Optional callIfRunning(\nSupplier supplier, String supplierDescription) {\nsynchronized (lock) {\nif (isRunning()) {\nreturn Optional.of(supplier.get());\n} else {\nLOG.trace(\n\"Ignore '{}' because the leadership runner is no longer running.\",\nsupplierDescription);\nreturn Optional.empty();\n}\n}\n}\n@GuardedBy(\"lock\")\nprivate boolean isRunning() {\nreturn state == State.RUNNING;\n}\nprivate CompletableFuture runIfValidLeader(\nUUID expectedLeaderId,\nSupplier> supplier,\nString actionDescription) {\nsynchronized (lock) {\nif (isValidLeader(expectedLeaderId)) {\nreturn supplier.get();\n} else {\nLOG.trace(\n\"Ignore leader action '{}' because the leadership runner is no longer the valid leader for {}.\",\nactionDescription,\nexpectedLeaderId);\n}\n}\nreturn CompletableFuture.completedFuture(null);\n}\n@GuardedBy(\"lock\")\nprivate boolean isValidLeader(UUID expectedLeaderId) {\nreturn isRunning()\n&& leaderElection != null\n&& leaderElection.hasLeadership(expectedLeaderId);\n}\nprivate void forwardIfValidLeader(\nUUID expectedLeaderId,\nCompletableFuture source,\nCompletableFuture target,\nString forwardDescription) {\nsource.whenComplete(\n(t, throwable) -> {\nsynchronized (lock) {\nif (isValidLeader(expectedLeaderId)) {\nif (throwable != null) {\ntarget.completeExceptionally(throwable);\n} else {\ntarget.complete(t);\n}\n} else {\nLOG.trace(\n\"Ignore forwarding '{}' because the leadership runner is no longer the valid leader for {}.\",\nforwardDescription,\nexpectedLeaderId);\n}\n}\n});\n}\nenum State {\nRUNNING,\nSTOPPED,\nJOB_COMPLETED,\n}\n}", + "context_after": "class JobMasterServiceLeadershipRunner implements JobManagerRunner, LeaderContender {\nprivate static final Logger LOG =\nLoggerFactory.getLogger(JobMasterServiceLeadershipRunner.class);\nprivate final Object lock = new Object();\nprivate final JobMasterServiceProcessFactory jobMasterServiceProcessFactory;\nprivate final LeaderElection leaderElection;\nprivate final JobResultStore jobResultStore;\nprivate final LibraryCacheManager.ClassLoaderLease classLoaderLease;\nprivate final FatalErrorHandler fatalErrorHandler;\nprivate final CompletableFuture terminationFuture = new CompletableFuture<>();\nprivate final CompletableFuture resultFuture =\nnew CompletableFuture<>();\n@GuardedBy(\"lock\")\nprivate State state = State.RUNNING;\n@GuardedBy(\"lock\")\nprivate CompletableFuture sequentialOperation = FutureUtils.completedVoidFuture();\n@GuardedBy(\"lock\")\nprivate JobMasterServiceProcess jobMasterServiceProcess =\nJobMasterServiceProcess.waitingForLeadership();\n@GuardedBy(\"lock\")\nprivate CompletableFuture jobMasterGatewayFuture = new CompletableFuture<>();\n@GuardedBy(\"lock\")\nprivate boolean hasCurrentLeaderBeenCancelled = false;\npublic JobMasterServiceLeadershipRunner(\nJobMasterServiceProcessFactory jobMasterServiceProcessFactory,\nLeaderElection leaderElection,\nJobResultStore jobResultStore,\nLibraryCacheManager.ClassLoaderLease classLoaderLease,\nFatalErrorHandler fatalErrorHandler) {\nthis.jobMasterServiceProcessFactory = jobMasterServiceProcessFactory;\nthis.leaderElection = leaderElection;\nthis.jobResultStore = jobResultStore;\nthis.classLoaderLease = classLoaderLease;\nthis.fatalErrorHandler = fatalErrorHandler;\n}\n@Override\npublic CompletableFuture closeAsync() {\nfinal CompletableFuture processTerminationFuture;\nsynchronized (lock) {\nif (state == State.STOPPED) {\nreturn terminationFuture;\n}\nstate = State.STOPPED;\nLOG.debug(\"Terminating the leadership runner for job {}.\", getJobID());\njobMasterGatewayFuture.completeExceptionally(\nnew FlinkException(\n\"JobMasterServiceLeadershipRunner is closed. Therefore, the corresponding JobMaster will never acquire the leadership.\"));\nresultFuture.complete(\nJobManagerRunnerResult.forSuccess(\ncreateExecutionGraphInfoWithJobStatus(JobStatus.SUSPENDED)));\nprocessTerminationFuture = jobMasterServiceProcess.closeAsync();\n}\nfinal CompletableFuture serviceTerminationFuture =\nFutureUtils.runAfterwards(\nprocessTerminationFuture,\n() -> {\nclassLoaderLease.release();\nleaderElection.close();\n});\nFutureUtils.forward(serviceTerminationFuture, terminationFuture);\nterminationFuture.whenComplete(\n(unused, throwable) ->\nLOG.debug(\"Leadership runner for job {} has been terminated.\", getJobID()));\nreturn terminationFuture;\n}\n@Override\npublic void start() throws Exception {\nLOG.debug(\"Start leadership runner for job {}.\", getJobID());\nleaderElection.startLeaderElection(this);\n}\n@Override\npublic CompletableFuture getJobMasterGateway() {\nsynchronized (lock) {\nreturn jobMasterGatewayFuture;\n}\n}\n@Override\npublic CompletableFuture getResultFuture() {\nreturn resultFuture;\n}\n@Override\npublic JobID getJobID() {\nreturn jobMasterServiceProcessFactory.getJobId();\n}\n@Override\npublic CompletableFuture cancel(Time timeout) {\nsynchronized (lock) {\nhasCurrentLeaderBeenCancelled = true;\nreturn getJobMasterGateway()\n.thenCompose(jobMasterGateway -> jobMasterGateway.cancel(timeout))\n.exceptionally(\ne -> {\nthrow new CompletionException(\nnew JobCancellationFailedException(\n\"Cancellation failed.\",\nExceptionUtils.stripCompletionException(e)));\n});\n}\n}\n@Override\npublic CompletableFuture requestJobStatus(Time timeout) {\nreturn requestJob(timeout)\n.thenApply(\nexecutionGraphInfo ->\nexecutionGraphInfo.getArchivedExecutionGraph().getState());\n}\n@Override\npublic CompletableFuture requestJobDetails(Time timeout) {\nreturn requestJob(timeout)\n.thenApply(\nexecutionGraphInfo ->\nJobDetails.createDetailsForJob(\nexecutionGraphInfo.getArchivedExecutionGraph()));\n}\n@Override\npublic CompletableFuture requestJob(Time timeout) {\nsynchronized (lock) {\nif (state == State.RUNNING) {\nif (jobMasterServiceProcess.isInitializedAndRunning()) {\nreturn getJobMasterGateway()\n.thenCompose(jobMasterGateway -> jobMasterGateway.requestJob(timeout));\n} else {\nreturn CompletableFuture.completedFuture(\ncreateExecutionGraphInfoWithJobStatus(\nhasCurrentLeaderBeenCancelled\n? JobStatus.CANCELLING\n: JobStatus.INITIALIZING));\n}\n} else {\nreturn resultFuture.thenApply(JobManagerRunnerResult::getExecutionGraphInfo);\n}\n}\n}\n@Override\npublic boolean isInitialized() {\nsynchronized (lock) {\nreturn jobMasterServiceProcess.isInitializedAndRunning();\n}\n}\n@Override\npublic void grantLeadership(UUID leaderSessionID) {\nrunIfStateRunning(\n() -> startJobMasterServiceProcessAsync(leaderSessionID),\n\"starting a new JobMasterServiceProcess\");\n}\n@GuardedBy(\"lock\")\nprivate void handleJobAlreadyDoneIfValidLeader(UUID leaderSessionId) {\nrunIfValidLeader(\nleaderSessionId, () -> jobAlreadyDone(leaderSessionId), \"check completed job\");\n}\nprivate void createNewJobMasterServiceProcessIfValidLeader(UUID leaderSessionId) {\nrunIfValidLeader(\nleaderSessionId,\n() ->\nThrowingRunnable.unchecked(\n() -> createNewJobMasterServiceProcess(leaderSessionId))\n.run(),\n\"create new job master service process\");\n}\nprivate void printLogIfNotValidLeader(String actionDescription, UUID leaderSessionId) {\nLOG.debug(\n\"Ignore leader action '{}' because the leadership runner is no longer the valid leader for {}.\",\nactionDescription,\nleaderSessionId);\n}\nprivate ExecutionGraphInfo createExecutionGraphInfoWithJobStatus(JobStatus jobStatus) {\nreturn new ExecutionGraphInfo(\njobMasterServiceProcessFactory.createArchivedExecutionGraph(jobStatus, null));\n}\nprivate void jobAlreadyDone(UUID leaderSessionId) {\nLOG.info(\n\"{} for job {} was granted leadership with leader id {}, but job was already done.\",\ngetClass().getSimpleName(),\ngetJobID(),\nleaderSessionId);\nresultFuture.complete(\nJobManagerRunnerResult.forSuccess(\nnew ExecutionGraphInfo(\njobMasterServiceProcessFactory.createArchivedExecutionGraph(\nJobStatus.FAILED,\nnew JobAlreadyDoneException(getJobID())))));\n}\n@GuardedBy(\"lock\")\nprivate void createNewJobMasterServiceProcess(UUID leaderSessionId) throws FlinkException {\nPreconditions.checkState(jobMasterServiceProcess.closeAsync().isDone());\nLOG.info(\n\"{} for job {} was granted leadership with leader id {}. Creating new {}.\",\ngetClass().getSimpleName(),\ngetJobID(),\nleaderSessionId,\nJobMasterServiceProcess.class.getSimpleName());\njobMasterServiceProcess = jobMasterServiceProcessFactory.create(leaderSessionId);\nforwardIfValidLeader(\nleaderSessionId,\njobMasterServiceProcess.getJobMasterGatewayFuture(),\njobMasterGatewayFuture,\n\"JobMasterGatewayFuture from JobMasterServiceProcess\");\nforwardResultFuture(leaderSessionId, jobMasterServiceProcess.getResultFuture());\nconfirmLeadership(leaderSessionId, jobMasterServiceProcess.getLeaderAddressFuture());\n}\nprivate void confirmLeadership(\nUUID leaderSessionId, CompletableFuture leaderAddressFuture) {\nFutureUtils.assertNoException(\nleaderAddressFuture.thenAccept(\naddress ->\nrunIfValidLeader(\nleaderSessionId,\n() -> {\nLOG.debug(\"Confirm leadership {}.\", leaderSessionId);\nleaderElection.confirmLeadership(\nleaderSessionId, address);\n},\n\"confirming leadership\")));\n}\nprivate void forwardResultFuture(\nUUID leaderSessionId, CompletableFuture resultFuture) {\nresultFuture.whenComplete(\n(jobManagerRunnerResult, throwable) ->\nrunIfValidLeader(\nleaderSessionId,\n() -> onJobCompletion(jobManagerRunnerResult, throwable),\n\"result future forwarding\"));\n}\n@GuardedBy(\"lock\")\nprivate void onJobCompletion(\nJobManagerRunnerResult jobManagerRunnerResult, Throwable throwable) {\nstate = State.JOB_COMPLETED;\nLOG.debug(\"Completing the result for job {}.\", getJobID());\nif (throwable != null) {\nresultFuture.completeExceptionally(throwable);\njobMasterGatewayFuture.completeExceptionally(\nnew FlinkException(\n\"Could not retrieve JobMasterGateway because the JobMaster failed.\",\nthrowable));\n} else {\nif (!jobManagerRunnerResult.isSuccess()) {\njobMasterGatewayFuture.completeExceptionally(\nnew FlinkException(\n\"Could not retrieve JobMasterGateway because the JobMaster initialization failed.\",\njobManagerRunnerResult.getInitializationFailure()));\n}\nresultFuture.complete(jobManagerRunnerResult);\n}\n}\n@Override\npublic void revokeLeadership() {\nrunIfStateRunning(\nthis::stopJobMasterServiceProcessAsync,\n\"revoke leadership from JobMasterServiceProcess\");\n}\n@GuardedBy(\"lock\")\nprivate void stopJobMasterServiceProcessAsync() {\nsequentialOperation =\nsequentialOperation.thenCompose(\nignored ->\ncallIfRunning(\nthis::stopJobMasterServiceProcess,\n\"stop leading JobMasterServiceProcess\")\n.orElse(FutureUtils.completedVoidFuture()));\nhandleAsyncOperationError(sequentialOperation, \"Could not suspend the job manager.\");\n}\n@GuardedBy(\"lock\")\nprivate CompletableFuture stopJobMasterServiceProcess() {\nLOG.info(\n\"{} for job {} was revoked leadership with leader id {}. Stopping current {}.\",\ngetClass().getSimpleName(),\ngetJobID(),\njobMasterServiceProcess.getLeaderSessionId(),\nJobMasterServiceProcess.class.getSimpleName());\njobMasterGatewayFuture.completeExceptionally(\nnew FlinkException(\n\"Cannot obtain JobMasterGateway because the JobMaster lost leadership.\"));\njobMasterGatewayFuture = new CompletableFuture<>();\nhasCurrentLeaderBeenCancelled = false;\nreturn jobMasterServiceProcess.closeAsync();\n}\n@Override\npublic void handleError(Exception exception) {\nfatalErrorHandler.onFatalError(exception);\n}\nprivate void handleAsyncOperationError(CompletableFuture operation, String message) {\noperation.whenComplete(\n(unused, throwable) -> {\nif (throwable != null) {\nrunIfStateRunning(\n() ->\nhandleJobMasterServiceLeadershipRunnerError(\nnew FlinkException(message, throwable)),\n\"handle JobMasterServiceLeadershipRunner error\");\n}\n});\n}\nprivate void handleJobMasterServiceLeadershipRunnerError(Throwable cause) {\nif (ExceptionUtils.isJvmFatalError(cause)) {\nfatalErrorHandler.onFatalError(cause);\n} else {\nresultFuture.completeExceptionally(cause);\n}\n}\nprivate void runIfStateRunning(Runnable action, String actionDescription) {\nsynchronized (lock) {\nif (isRunning()) {\naction.run();\n} else {\nLOG.debug(\n\"Ignore '{}' because the leadership runner is no longer running.\",\nactionDescription);\n}\n}\n}\nprivate Optional callIfRunning(\nSupplier supplier, String supplierDescription) {\nsynchronized (lock) {\nif (isRunning()) {\nreturn Optional.of(supplier.get());\n} else {\nLOG.debug(\n\"Ignore '{}' because the leadership runner is no longer running.\",\nsupplierDescription);\nreturn Optional.empty();\n}\n}\n}\n@GuardedBy(\"lock\")\nprivate boolean isRunning() {\nreturn state == State.RUNNING;\n}\nprivate void runIfValidLeader(\nUUID expectedLeaderId, Runnable action, Runnable noLeaderFallback) {\nsynchronized (lock) {\nif (isValidLeader(expectedLeaderId)) {\naction.run();\n} else {\nnoLeaderFallback.run();\n}\n}\n}\nprivate void runIfValidLeader(\nUUID expectedLeaderId, Runnable action, String noLeaderFallbackCommandDescription) {\nrunIfValidLeader(\nexpectedLeaderId,\naction,\n() ->\nprintLogIfNotValidLeader(\nnoLeaderFallbackCommandDescription, expectedLeaderId));\n}\nprivate CompletableFuture supplyAsyncIfValidLeader(\nUUID expectedLeaderId,\nSupplier> supplier,\nSupplier> noLeaderFallback) {\nfinal CompletableFuture resultFuture = new CompletableFuture<>();\nrunIfValidLeader(\nexpectedLeaderId,\n() -> FutureUtils.forward(supplier.get(), resultFuture),\n() -> FutureUtils.forward(noLeaderFallback.get(), resultFuture));\nreturn resultFuture;\n}\n@GuardedBy(\"lock\")\nprivate boolean isValidLeader(UUID expectedLeaderId) {\nreturn isRunning()\n&& leaderElection != null\n&& leaderElection.hasLeadership(expectedLeaderId);\n}\nprivate void forwardIfValidLeader(\nUUID expectedLeaderId,\nCompletableFuture source,\nCompletableFuture target,\nString forwardDescription) {\nsource.whenComplete(\n(t, throwable) ->\nrunIfValidLeader(\nexpectedLeaderId,\n() -> {\nif (throwable != null) {\ntarget.completeExceptionally(throwable);\n} else {\ntarget.complete(t);\n}\n},\nforwardDescription));\n}\nenum State {\nRUNNING,\nSTOPPED,\nJOB_COMPLETED,\n}\n}" + }, + { + "comment": "\"false\" has to be a constant. ", + "method_body": "private void addPackageInfo(BPackageSymbol packageSymbol, ProgramFile programFile) {\nBLangPackage pkgNode = this.packageCache.get(packageSymbol.pkgID);\nif (pkgNode == null) {\npackageSymbol.imports.forEach(importPkdSymbol -> addPackageInfo(importPkdSymbol, programFile));\nif (!programFile.packageFileMap.containsKey(packageSymbol.pkgID.toString())\n&& !packageSymbol.pkgID.orgName.equals(Names.BUILTIN_ORG)) {\nprogramFile.packageFileMap.put(packageSymbol.pkgID.toString(), packageSymbol.packageFile);\n}\nreturn;\n}\nHashSet importPkgList = new HashSet<>();\nimportPkgList.addAll(pkgNode.imports);\nString testsEnabled = options.get(CompilerOptionName.SKIP_TESTS);\nif (testsEnabled != null && testsEnabled.equals(\"false\")) {\npkgNode.getTestablePkgs().forEach(testablePackage -> importPkgList.addAll(testablePackage.imports));\n}\nimportPkgList.forEach(importPkdNode -> addPackageInfo(importPkdNode.symbol, programFile));\nif (!programFile.packageFileMap.containsKey(packageSymbol.pkgID.toString())\n&& !packageSymbol.pkgID.orgName.equals(Names.BUILTIN_ORG)) {\nprogramFile.packageFileMap.put(packageSymbol.pkgID.toString(), packageSymbol.packageFile);\n}\n}", + "target_code": "if (testsEnabled != null && testsEnabled.equals(\"false\")) {", + "method_body_after": "private void addPackageInfo(BPackageSymbol packageSymbol, ProgramFile programFile) {\nBLangPackage pkgNode = this.packageCache.get(packageSymbol.pkgID);\nif (pkgNode == null) {\npackageSymbol.imports.forEach(importPkdSymbol -> addPackageInfo(importPkdSymbol, programFile));\nif (!programFile.packageFileMap.containsKey(packageSymbol.pkgID.toString())\n&& !packageSymbol.pkgID.orgName.equals(Names.BUILTIN_ORG)) {\nprogramFile.packageFileMap.put(packageSymbol.pkgID.toString(), packageSymbol.packageFile);\n}\nreturn;\n}\nHashSet importPkgList = new HashSet<>();\nimportPkgList.addAll(pkgNode.imports);\nString testsEnabled = options.get(CompilerOptionName.SKIP_TESTS);\nif (testsEnabled != null && testsEnabled.equals(\"false\")) {\npkgNode.getTestablePkgs().forEach(testablePackage -> importPkgList.addAll(testablePackage.imports));\n}\nimportPkgList.forEach(importPkdNode -> addPackageInfo(importPkdNode.symbol, programFile));\nif (!programFile.packageFileMap.containsKey(packageSymbol.pkgID.toString())\n&& !packageSymbol.pkgID.orgName.equals(Names.BUILTIN_ORG)) {\nprogramFile.packageFileMap.put(packageSymbol.pkgID.toString(), packageSymbol.packageFile);\n}\n}", + "context_before": "class VariableIndex {\npublic enum Kind {\nLOCAL,\nFIELD,\nPACKAGE,\nREG\n}\nint tInt = -1;\nint tFloat = -1;\nint tString = -1;\nint tBoolean = -1;\nint tRef = -1;\nKind kind;\nVariableIndex(Kind kind) {\nthis.kind = kind;\n}\npublic int[] toArray() {\nint[] result = new int[5];\nresult[0] = this.tInt;\nresult[1] = this.tFloat;\nresult[2] = this.tString;\nresult[3] = this.tBoolean;\nresult[4] = this.tRef;\nreturn result;\n}\n}", + "context_after": "class VariableIndex {\npublic enum Kind {\nLOCAL,\nFIELD,\nPACKAGE,\nREG\n}\nint tInt = -1;\nint tFloat = -1;\nint tString = -1;\nint tBoolean = -1;\nint tRef = -1;\nKind kind;\nVariableIndex(Kind kind) {\nthis.kind = kind;\n}\npublic int[] toArray() {\nint[] result = new int[5];\nresult[0] = this.tInt;\nresult[1] = this.tFloat;\nresult[2] = this.tString;\nresult[3] = this.tBoolean;\nresult[4] = this.tRef;\nreturn result;\n}\n}" + }, + { + "comment": "Before this change, the following didn't apply the actual config: https://github.com/quarkusio/quarkus/blob/2b4496718ec46836e2775f669c7c8c5f970ccaa1/core/runtime/src/main/java/io/quarkus/runtime/logging/LoggingSetupRecorder.java#L73-L74 because `quarkus.log-build-time.min-level` was looked up, which obviously doesn't exist.", + "method_body": "public static void handleObject(Object o) {\nfinal SmallRyeConfig config = (SmallRyeConfig) ConfigProvider.getConfig();\nfinal String clsNameSuffix = getClassNameSuffix(o);\nif (clsNameSuffix == null) {\nreturn;\n}\nfinal Class cls = o.getClass();\nfinal String name;\nConfigRoot configRoot = cls.getAnnotation(ConfigRoot.class);\nif (configRoot != null && !configRoot.name().equals(ConfigItem.HYPHENATED_ELEMENT_NAME)) {\nname = configRoot.name();\nif (name.startsWith(\"<<\")) {\nthrow new IllegalArgumentException(\"Found unsupported @ConfigRoot.name = \" + name + \" on \" + cls);\n}\n} else {\nname = dashify(cls.getSimpleName().substring(0, cls.getSimpleName().length() - clsNameSuffix.length()));\n}\nhandleObject(QUARKUS_PROPERTY_PREFIX + name, o, config, gatherQuarkusPropertyNames(config));\n}", + "target_code": "final Class cls = o.getClass();", + "method_body_after": "public static void handleObject(Object o) {\nfinal SmallRyeConfig config = (SmallRyeConfig) ConfigProvider.getConfig();\nfinal String clsNameSuffix = getClassNameSuffix(o);\nif (clsNameSuffix == null) {\nreturn;\n}\nfinal Class cls = o.getClass();\nfinal String name;\nConfigRoot configRoot = cls.getAnnotation(ConfigRoot.class);\nif (configRoot != null && !configRoot.name().equals(ConfigItem.HYPHENATED_ELEMENT_NAME)) {\nname = configRoot.name();\nif (name.startsWith(\"<<\")) {\nthrow new IllegalArgumentException(\"Found unsupported @ConfigRoot.name = \" + name + \" on \" + cls);\n}\n} else {\nname = dashify(cls.getSimpleName().substring(0, cls.getSimpleName().length() - clsNameSuffix.length()));\n}\nhandleObject(QUARKUS_PROPERTY_PREFIX + name, o, config, gatherQuarkusPropertyNames(config));\n}", + "context_before": "class ConfigInstantiator {\nprivate static Set SUPPORTED_CLASS_NAME_SUFFIXES = Set.of(\"Config\", \"Configuration\");\nprivate static final String QUARKUS_PROPERTY_PREFIX = \"quarkus.\";\nprivate static final Pattern SEGMENT_EXTRACTION_PATTERN = Pattern.compile(\"(\\\"[^\\\"]+\\\"|[^.\\\"]+).*\");\npublic static T handleObject(Supplier supplier) {\nT o = supplier.get();\nhandleObject(o);\nreturn o;\n}\nprivate static List gatherQuarkusPropertyNames(SmallRyeConfig config) {\nvar names = new ArrayList(50);\nfor (String name : config.getPropertyNames()) {\nif (name.startsWith(QUARKUS_PROPERTY_PREFIX)) {\nnames.add(name);\n}\n}\nreturn names;\n}\nprivate static void handleObject(String prefix, Object o, SmallRyeConfig config, List quarkusPropertyNames) {\ntry {\nfinal Class cls = o.getClass();\nif (!isClassNameSuffixSupported(o)) {\nreturn;\n}\nfor (Field field : cls.getDeclaredFields()) {\nif (field.isSynthetic() || Modifier.isFinal(field.getModifiers())) {\ncontinue;\n}\nfield.setAccessible(true);\nConfigItem configItem = field.getDeclaredAnnotation(ConfigItem.class);\nfinal Class fieldClass = field.getType();\nif (configItem == null || fieldClass.isAnnotationPresent(ConfigGroup.class)) {\nConstructor constructor = fieldClass.getConstructor();\nconstructor.setAccessible(true);\nObject newInstance = constructor.newInstance();\nfield.set(o, newInstance);\nhandleObject(prefix + \".\" + dashify(field.getName()), newInstance, config, quarkusPropertyNames);\n} else {\nString name = configItem.name();\nif (name.equals(ConfigItem.HYPHENATED_ELEMENT_NAME)) {\nname = dashify(field.getName());\n} else if (name.equals(ConfigItem.ELEMENT_NAME)) {\nname = field.getName();\n}\nString fullName = prefix + \".\" + name;\nfinal Type genericType = field.getGenericType();\nif (fieldClass == Map.class) {\nfield.set(o, handleMap(fullName, genericType, config, quarkusPropertyNames));\ncontinue;\n}\nfinal Converter conv = getConverterFor(genericType, config);\ntry {\nOptional value = config.getOptionalValue(fullName, conv);\nif (value.isPresent()) {\nfield.set(o, value.get());\n} else if (!configItem.defaultValue().equals(ConfigItem.NO_DEFAULT)) {\nfield.set(o, conv.convert(configItem.defaultValue()));\n}\n} catch (NoSuchElementException ignored) {\n}\n}\n}\n} catch (Exception e) {\nthrow new RuntimeException(e);\n}\n}\nprivate static Map handleMap(String fullName, Type genericType, SmallRyeConfig config,\nList quarkusPropertyNames) throws ReflectiveOperationException {\nvar map = new HashMap<>();\nif (typeOfParameter(genericType, 0) != String.class) {\nreturn map;\n}\nvar processedSegments = new HashSet();\nfor (String propertyName : quarkusPropertyNames) {\nvar fullNameWithDot = fullName + \".\";\nString withoutPrefix = propertyName.replace(fullNameWithDot, \"\");\nif (withoutPrefix.equals(propertyName)) {\ncontinue;\n}\nMatcher matcher = SEGMENT_EXTRACTION_PATTERN.matcher(withoutPrefix);\nif (!matcher.find()) {\ncontinue;\n}\nvar segment = matcher.group(1);\nif (!processedSegments.add(segment)) {\ncontinue;\n}\nvar mapKey = segment.replace(\"\\\"\", \"\");\nvar nextFullName = fullNameWithDot + segment;\nvar mapValueType = typeOfParameter(genericType, 1);\nObject mapValue;\nif (mapValueType instanceof ParameterizedType\n&& ((ParameterizedType) mapValueType).getRawType().equals(Map.class)) {\nmapValue = handleMap(nextFullName, mapValueType, config, quarkusPropertyNames);\n} else {\nClass mapValueClass = mapValueType instanceof Class ? (Class) mapValueType : null;\nif (mapValueClass != null && mapValueClass.isAnnotationPresent(ConfigGroup.class)) {\nConstructor constructor = mapValueClass.getConstructor();\nconstructor.setAccessible(true);\nmapValue = constructor.newInstance();\nhandleObject(nextFullName, mapValue, config, quarkusPropertyNames);\n} else {\nfinal Converter conv = getConverterFor(mapValueType, config);\nmapValue = config.getOptionalValue(nextFullName, conv).orElse(null);\n}\n}\nmap.put(mapKey, mapValue);\n}\nreturn map;\n}\nprivate static Converter getConverterFor(Type type, SmallRyeConfig config) {\nClass rawType = rawTypeOf(type);\nif (Enum.class.isAssignableFrom(rawType)) {\nreturn new HyphenateEnumConverter(rawType);\n} else if (rawType == Optional.class) {\nreturn Converters.newOptionalConverter(getConverterFor(typeOfParameter(type, 0), config));\n} else if (rawType == List.class) {\nreturn Converters.newCollectionConverter(getConverterFor(typeOfParameter(type, 0), config), ArrayList::new);\n} else {\nreturn config.requireConverter(rawTypeOf(type));\n}\n}\nprivate static Class rawTypeOf(final Type type) {\nif (type instanceof Class) {\nreturn (Class) type;\n} else if (type instanceof ParameterizedType) {\nreturn rawTypeOf(((ParameterizedType) type).getRawType());\n} else if (type instanceof GenericArrayType) {\nreturn Array.newInstance(rawTypeOf(((GenericArrayType) type).getGenericComponentType()), 0).getClass();\n} else {\nthrow new IllegalArgumentException(\"Type has no raw type class: \" + type);\n}\n}\nstatic Type typeOfParameter(final Type type, final int paramIdx) {\nif (type instanceof ParameterizedType) {\nreturn ((ParameterizedType) type).getActualTypeArguments()[paramIdx];\n} else {\nthrow new IllegalArgumentException(\"Type is not parameterized: \" + type);\n}\n}\nprivate static String dashify(String substring) {\nfinal StringBuilder ret = new StringBuilder();\nfinal char[] chars = substring.toCharArray();\nfor (int i = 0; i < chars.length; i++) {\nfinal char c = chars[i];\nif (i != 0 && i != (chars.length - 1) && c >= 'A' && c <= 'Z') {\nret.append('-');\n}\nret.append(Character.toLowerCase(c));\n}\nreturn ret.toString();\n}\nprivate static String getClassNameSuffix(final Object o) {\nif (o == null) {\nreturn null;\n}\nfinal String klassName = o.getClass().getName();\nfor (final String supportedSuffix : SUPPORTED_CLASS_NAME_SUFFIXES) {\nif (klassName.endsWith(supportedSuffix)) {\nreturn supportedSuffix;\n}\n}\nreturn null;\n}\nprivate static boolean isClassNameSuffixSupported(final Object o) {\nif (o == null) {\nreturn false;\n}\nfinal String klassName = o.getClass().getName();\nfor (final String supportedSuffix : SUPPORTED_CLASS_NAME_SUFFIXES) {\nif (klassName.endsWith(supportedSuffix)) {\nreturn true;\n}\n}\nreturn false;\n}\n}", + "context_after": "class ConfigInstantiator {\nprivate static Set SUPPORTED_CLASS_NAME_SUFFIXES = Set.of(\"Config\", \"Configuration\");\nprivate static final String QUARKUS_PROPERTY_PREFIX = \"quarkus.\";\nprivate static final Pattern SEGMENT_EXTRACTION_PATTERN = Pattern.compile(\"(\\\"[^\\\"]+\\\"|[^.\\\"]+).*\");\npublic static T handleObject(Supplier supplier) {\nT o = supplier.get();\nhandleObject(o);\nreturn o;\n}\nprivate static List gatherQuarkusPropertyNames(SmallRyeConfig config) {\nvar names = new ArrayList(50);\nfor (String name : config.getPropertyNames()) {\nif (name.startsWith(QUARKUS_PROPERTY_PREFIX)) {\nnames.add(name);\n}\n}\nreturn names;\n}\nprivate static void handleObject(String prefix, Object o, SmallRyeConfig config, List quarkusPropertyNames) {\ntry {\nfinal Class cls = o.getClass();\nif (!isClassNameSuffixSupported(o)) {\nreturn;\n}\nfor (Field field : cls.getDeclaredFields()) {\nif (field.isSynthetic() || Modifier.isFinal(field.getModifiers())) {\ncontinue;\n}\nfield.setAccessible(true);\nConfigItem configItem = field.getDeclaredAnnotation(ConfigItem.class);\nfinal Class fieldClass = field.getType();\nif (configItem == null || fieldClass.isAnnotationPresent(ConfigGroup.class)) {\nConstructor constructor = fieldClass.getConstructor();\nconstructor.setAccessible(true);\nObject newInstance = constructor.newInstance();\nfield.set(o, newInstance);\nhandleObject(prefix + \".\" + dashify(field.getName()), newInstance, config, quarkusPropertyNames);\n} else {\nString name = configItem.name();\nif (name.equals(ConfigItem.HYPHENATED_ELEMENT_NAME)) {\nname = dashify(field.getName());\n} else if (name.equals(ConfigItem.ELEMENT_NAME)) {\nname = field.getName();\n}\nString fullName = prefix + \".\" + name;\nfinal Type genericType = field.getGenericType();\nif (fieldClass == Map.class) {\nfield.set(o, handleMap(fullName, genericType, config, quarkusPropertyNames));\ncontinue;\n}\nfinal Converter conv = getConverterFor(genericType, config);\ntry {\nOptional value = config.getOptionalValue(fullName, conv);\nif (value.isPresent()) {\nfield.set(o, value.get());\n} else if (!configItem.defaultValue().equals(ConfigItem.NO_DEFAULT)) {\nfield.set(o, conv.convert(configItem.defaultValue()));\n}\n} catch (NoSuchElementException ignored) {\n}\n}\n}\n} catch (Exception e) {\nthrow new RuntimeException(e);\n}\n}\nprivate static Map handleMap(String fullName, Type genericType, SmallRyeConfig config,\nList quarkusPropertyNames) throws ReflectiveOperationException {\nvar map = new HashMap<>();\nif (typeOfParameter(genericType, 0) != String.class) {\nreturn map;\n}\nvar processedSegments = new HashSet();\nfor (String propertyName : quarkusPropertyNames) {\nvar fullNameWithDot = fullName + \".\";\nString withoutPrefix = propertyName.replace(fullNameWithDot, \"\");\nif (withoutPrefix.equals(propertyName)) {\ncontinue;\n}\nMatcher matcher = SEGMENT_EXTRACTION_PATTERN.matcher(withoutPrefix);\nif (!matcher.find()) {\ncontinue;\n}\nvar segment = matcher.group(1);\nif (!processedSegments.add(segment)) {\ncontinue;\n}\nvar mapKey = segment.replace(\"\\\"\", \"\");\nvar nextFullName = fullNameWithDot + segment;\nvar mapValueType = typeOfParameter(genericType, 1);\nObject mapValue;\nif (mapValueType instanceof ParameterizedType\n&& ((ParameterizedType) mapValueType).getRawType().equals(Map.class)) {\nmapValue = handleMap(nextFullName, mapValueType, config, quarkusPropertyNames);\n} else {\nClass mapValueClass = mapValueType instanceof Class ? (Class) mapValueType : null;\nif (mapValueClass != null && mapValueClass.isAnnotationPresent(ConfigGroup.class)) {\nConstructor constructor = mapValueClass.getConstructor();\nconstructor.setAccessible(true);\nmapValue = constructor.newInstance();\nhandleObject(nextFullName, mapValue, config, quarkusPropertyNames);\n} else {\nfinal Converter conv = getConverterFor(mapValueType, config);\nmapValue = config.getOptionalValue(nextFullName, conv).orElse(null);\n}\n}\nmap.put(mapKey, mapValue);\n}\nreturn map;\n}\nprivate static Converter getConverterFor(Type type, SmallRyeConfig config) {\nClass rawType = rawTypeOf(type);\nif (Enum.class.isAssignableFrom(rawType)) {\nreturn new HyphenateEnumConverter(rawType);\n} else if (rawType == Optional.class) {\nreturn Converters.newOptionalConverter(getConverterFor(typeOfParameter(type, 0), config));\n} else if (rawType == List.class) {\nreturn Converters.newCollectionConverter(getConverterFor(typeOfParameter(type, 0), config), ArrayList::new);\n} else {\nreturn config.requireConverter(rawTypeOf(type));\n}\n}\nprivate static Class rawTypeOf(final Type type) {\nif (type instanceof Class) {\nreturn (Class) type;\n} else if (type instanceof ParameterizedType) {\nreturn rawTypeOf(((ParameterizedType) type).getRawType());\n} else if (type instanceof GenericArrayType) {\nreturn Array.newInstance(rawTypeOf(((GenericArrayType) type).getGenericComponentType()), 0).getClass();\n} else {\nthrow new IllegalArgumentException(\"Type has no raw type class: \" + type);\n}\n}\nstatic Type typeOfParameter(final Type type, final int paramIdx) {\nif (type instanceof ParameterizedType) {\nreturn ((ParameterizedType) type).getActualTypeArguments()[paramIdx];\n} else {\nthrow new IllegalArgumentException(\"Type is not parameterized: \" + type);\n}\n}\nprivate static String dashify(String substring) {\nfinal StringBuilder ret = new StringBuilder();\nfinal char[] chars = substring.toCharArray();\nfor (int i = 0; i < chars.length; i++) {\nfinal char c = chars[i];\nif (i != 0 && i != (chars.length - 1) && c >= 'A' && c <= 'Z') {\nret.append('-');\n}\nret.append(Character.toLowerCase(c));\n}\nreturn ret.toString();\n}\nprivate static String getClassNameSuffix(final Object o) {\nif (o == null) {\nreturn null;\n}\nfinal String klassName = o.getClass().getName();\nfor (final String supportedSuffix : SUPPORTED_CLASS_NAME_SUFFIXES) {\nif (klassName.endsWith(supportedSuffix)) {\nreturn supportedSuffix;\n}\n}\nreturn null;\n}\nprivate static boolean isClassNameSuffixSupported(final Object o) {\nif (o == null) {\nreturn false;\n}\nfinal String klassName = o.getClass().getName();\nfor (final String supportedSuffix : SUPPORTED_CLASS_NAME_SUFFIXES) {\nif (klassName.endsWith(supportedSuffix)) {\nreturn true;\n}\n}\nreturn false;\n}\n}" + }, + { + "comment": "In this [comment](https://github.com/apache/beam/pull/25927#discussion_r1174141095)'s context, could we do this? I agree it looks like a game with the checker framework but the benefit of having the checker framework and removing the SuppressWarnings outweighs the cost of this I'd argue. Even if one checks nullorempty in a prior step and still calls getSchema, the checker framework complains. ``` if (!Strings.isNullOrEmpty(configuration.getSchema())) { String schema = configuration.getSafeSchema(); ... } ```", + "method_body": "public PCollectionRowTuple expand(PCollectionRowTuple input) {\ncheckArgument(\ninput.getAll().isEmpty() ^ Strings.isNullOrEmpty(configuration.getFilepattern()),\n\"Either an input PCollection of file patterns or the filepattern parameter must be set,\"\n+ \"but not both.\");\nString schema = configuration.getSchema();\nif (!Strings.isNullOrEmpty(schema)) {\nschema = resolveSchemaStringOrFilePath(schema);\nconfiguration = configuration.toBuilder().setSchema(schema).build();\n}\nPCollection files;\nif (!Strings.isNullOrEmpty(configuration.getFilepattern())) {\nPipeline p = input.getPipeline();\nFileIO.Match matchFiles = FileIO.match().filepattern(configuration.getFilepattern());\nmatchFiles = (FileIO.Match) maybeApplyStreaming(matchFiles);\nfiles = p.apply(matchFiles);\n} else {\nFileIO.MatchAll matchAllFiles = FileIO.matchAll();\nmatchAllFiles = (FileIO.MatchAll) maybeApplyStreaming(matchAllFiles);\nfiles =\ninput\n.get(INPUT_TAG)\n.apply(\n\"Get filepatterns\",\nMapElements.into(TypeDescriptors.strings())\n.via((row) -> row.getString(\"filepattern\")))\n.apply(\"Match files\", matchAllFiles);\n}\nPCollection output =\nfiles\n.apply(FileIO.readMatches())\n.apply(\"Read files\", getProvider().buildTransform(configuration));\nreturn PCollectionRowTuple.of(OUTPUT_TAG, output);\n}", + "target_code": "String schema = configuration.getSchema();", + "method_body_after": "public PCollectionRowTuple expand(PCollectionRowTuple input) {\ncheckArgument(\ninput.getAll().isEmpty() ^ Strings.isNullOrEmpty(configuration.getFilepattern()),\n\"Either an input PCollection of file patterns or the filepattern parameter must be set,\"\n+ \"but not both.\");\nString schema = configuration.getSchema();\nif (!Strings.isNullOrEmpty(schema)) {\nschema = resolveSchemaStringOrFilePath(configuration.getSafeSchema());\nconfiguration = configuration.toBuilder().setSchema(schema).build();\n}\nPCollection files;\nif (useInputPCollection) {\nfiles =\ninput\n.get(INPUT_TAG)\n.apply(\n\"Get filepatterns\",\nMapElements.into(TypeDescriptors.strings())\n.via(\n(Row row) ->\nObjects.requireNonNull(row.getString(FILEPATTERN_ROW_FIELD_NAME)))\n.exceptionsInto(TypeDescriptors.nulls())\n.exceptionsVia(\nexceptionElement -> {\nString faultyFilepattern =\nOptional.ofNullable(\nexceptionElement\n.element()\n.getString(FILEPATTERN_ROW_FIELD_NAME))\n.orElse(\"[null filepattern]\");\nLOG.warn(\n\"Could not acquire a faulty filepattern: {}. This will be skipped.\",\nfaultyFilepattern);\nreturn null;\n}))\n.output()\n.apply(\"Match files\", (FileIO.MatchAll) buildMatchTransform());\n} else {\nPipeline p = input.getPipeline();\nfiles = p.apply((FileIO.Match) buildMatchTransform());\n}\nPCollection output =\nfiles\n.apply(FileIO.readMatches())\n.apply(\"Read files\", getProvider().buildTransform(configuration));\nreturn PCollectionRowTuple.of(OUTPUT_TAG, output);\n}", + "context_before": "class FileReadSchemaTransform\nextends PTransform implements SchemaTransform {\nprivate FileReadSchemaTransformConfiguration configuration;\nFileReadSchemaTransform(FileReadSchemaTransformConfiguration configuration) {\nconfiguration.validate();\nthis.configuration = configuration;\n}\n@Override\npublic PTransform> maybeApplyStreaming(\nPTransform> matchTransform) {\nLong terminateAfterSeconds = configuration.getTerminateAfterSecondsSinceNewOutput();\nLong pollIntervalMillis = configuration.getPollIntervalMillis();\nif (pollIntervalMillis != null && pollIntervalMillis > 0L) {\nDuration pollDuration = Duration.millis(pollIntervalMillis);\nTerminationCondition terminationCondition = Growth.never();\nif (terminateAfterSeconds != null && terminateAfterSeconds > 0L) {\nterminationCondition =\nGrowth.afterTimeSinceNewOutput(Duration.standardSeconds(terminateAfterSeconds));\n}\nif (matchTransform instanceof FileIO.Match) {\nmatchTransform =\n((FileIO.Match) matchTransform).continuously(pollDuration, terminationCondition);\n} else if (matchTransform instanceof FileIO.MatchAll) {\nmatchTransform =\n((FileIO.MatchAll) matchTransform).continuously(pollDuration, terminationCondition);\n}\n}\nreturn matchTransform;\n}\npublic String resolveSchemaStringOrFilePath(String schema) {\ntry {\nMatchResult result;\ntry {\nLOG.info(\"Attempting to locate input schema as a file path.\");\nresult = FileSystems.match(schema);\ncheckArgument(result.status() == MatchResult.Status.OK);\n} catch (IllegalArgumentException e) {\nLOG.info(\n\"Input schema is not a valid file path. Will attempt to use it as a schema string.\");\nreturn schema;\n}\ncheckArgument(\n!result.metadata().isEmpty(),\n\"Failed to match any files for the input schema file path.\");\nList resource =\nresult.metadata().stream()\n.map(MatchResult.Metadata::resourceId)\n.collect(Collectors.toList());\ncheckArgument(\nresource.size() == 1,\n\"Expected exactly 1 schema file, but got \" + resource.size() + \" files.\");\nReadableByteChannel byteChannel = FileSystems.open(resource.get(0));\nReader reader = Channels.newReader(byteChannel, UTF_8.name());\nreturn CharStreams.toString(reader);\n} catch (IOException e) {\nthrow new RuntimeException(\"Error when parsing input schema file: \", e);\n}\n}\nprivate FileReadSchemaTransformFormatProvider getProvider() {\nString format = configuration.getFormat();\nMap providers =\nProviders.loadProviders(FileReadSchemaTransformFormatProvider.class);\ncheckArgument(\nproviders.containsKey(format),\nString.format(\n\"Received unsupported file format: %s. Supported formats are %s\",\nformat, providers.keySet()));\nreturn providers.get(format);\n}\n@Override\npublic PTransform buildTransform() {\nreturn this;\n}\n}", + "context_after": "class FileReadSchemaTransform extends PTransform\nimplements SchemaTransform {\nprivate FileReadSchemaTransformConfiguration configuration;\nprivate boolean useInputPCollection;\nFileReadSchemaTransform(FileReadSchemaTransformConfiguration configuration) {\nthis.configuration = configuration;\nuseInputPCollection = Strings.isNullOrEmpty(configuration.getFilepattern());\n}\n@Override\n@VisibleForTesting\nPTransform> buildMatchTransform() {\nPTransform> matchTransform =\nuseInputPCollection\n? FileIO.matchAll()\n: FileIO.match().filepattern(configuration.getSafeFilepattern());\nLong terminateAfterSeconds = configuration.getTerminateAfterSecondsSinceNewOutput();\nLong pollIntervalMillis = configuration.getPollIntervalMillis();\nif (pollIntervalMillis != null && pollIntervalMillis > 0L) {\nDuration pollDuration = Duration.millis(pollIntervalMillis);\nTerminationCondition terminationCondition =\n(terminateAfterSeconds != null && terminateAfterSeconds > 0L)\n? Growth.afterTimeSinceNewOutput(Duration.standardSeconds(terminateAfterSeconds))\n: Growth.never();\nmatchTransform =\nuseInputPCollection\n? FileIO.matchAll().continuously(pollDuration, terminationCondition)\n: FileIO.match()\n.filepattern(configuration.getSafeFilepattern())\n.continuously(pollDuration, terminationCondition);\n}\nreturn matchTransform;\n}\nprivate String resolveSchemaStringOrFilePath(String schema) {\ntry {\nMatchResult result;\ntry {\nLOG.info(\"Attempting to locate input schema as a file path.\");\nresult = FileSystems.match(schema);\ncheckArgument(result.status() == MatchResult.Status.OK);\n} catch (IllegalArgumentException e) {\nLOG.info(\n\"Input schema is not a valid file path. Will attempt to use it as a schema string.\");\nreturn schema;\n}\ncheckArgument(\n!result.metadata().isEmpty(),\n\"Failed to match any files for the input schema file path.\");\nList resource =\nresult.metadata().stream()\n.map(MatchResult.Metadata::resourceId)\n.collect(Collectors.toList());\ncheckArgument(\nresource.size() == 1,\n\"Expected exactly 1 schema file, but got \" + resource.size() + \" files.\");\nReadableByteChannel byteChannel = FileSystems.open(resource.get(0));\nReader reader = Channels.newReader(byteChannel, UTF_8.name());\nreturn CharStreams.toString(reader);\n} catch (IOException e) {\nthrow new RuntimeException(\"Error when parsing input schema file: \", e);\n}\n}\nprivate FileReadSchemaTransformFormatProvider getProvider() {\nString format = configuration.getFormat();\nMap providers =\nProviders.loadProviders(FileReadSchemaTransformFormatProvider.class);\ncheckArgument(\nproviders.containsKey(format),\nString.format(\n\"Received unsupported file format: %s. Supported formats are %s\",\nformat, providers.keySet()));\nOptional provider =\nOptional.ofNullable(providers.get(format));\ncheckState(provider.isPresent());\nreturn provider.get();\n}\n@Override\npublic PTransform buildTransform() {\nreturn this;\n}\n}" + }, + { + "comment": "move these lines out, then the next comment line is unnecesary", + "method_body": "void testSchemaIncludeOption() {\nMap options = getAllOptions();\noptions.put(\"debezium-json.schema-include\", \"true\");\nfinal DebeziumJsonDeserializationSchema expectedDeser =\nnew DebeziumJsonDeserializationSchema(\nPHYSICAL_DATA_TYPE,\nCollections.emptyList(),\nInternalTypeInfo.of(PHYSICAL_DATA_TYPE.getLogicalType()),\ntrue,\ntrue,\nTimestampFormat.ISO_8601);\nfinal DynamicTableSource actualSource = createTableSource(SCHEMA, options);\nTestDynamicTableFactory.DynamicTableSourceMock scanSourceMock =\n(TestDynamicTableFactory.DynamicTableSourceMock) actualSource;\nDeserializationSchema actualDeser =\nscanSourceMock.valueFormat.createRuntimeDecoder(\nScanRuntimeProviderContext.INSTANCE, PHYSICAL_DATA_TYPE);\nassertThat(actualDeser).isEqualTo(expectedDeser);\nassertThatThrownBy(\n() -> {\nfinal DynamicTableSink actualSink = createTableSink(SCHEMA, options);\nTestDynamicTableFactory.DynamicTableSinkMock sinkMock =\n(TestDynamicTableFactory.DynamicTableSinkMock) actualSink;\nsinkMock.valueFormat.createRuntimeEncoder(\nnew SinkRuntimeProviderContext(false), PHYSICAL_DATA_TYPE);\nfail();\n})\n.satisfies(\nanyCauseMatches(\nRuntimeException.class,\n\"Debezium JSON serialization doesn't support \"\n+ \"'debezium-json.schema-include' option been set to true.\"));\n}", + "target_code": "(TestDynamicTableFactory.DynamicTableSinkMock) actualSink;", + "method_body_after": "void testSchemaIncludeOption() {\nMap options = getAllOptions();\noptions.put(\"debezium-json.schema-include\", \"true\");\nfinal DebeziumJsonDeserializationSchema expectedDeser =\nnew DebeziumJsonDeserializationSchema(\nPHYSICAL_DATA_TYPE,\nCollections.emptyList(),\nInternalTypeInfo.of(PHYSICAL_DATA_TYPE.getLogicalType()),\ntrue,\ntrue,\nTimestampFormat.ISO_8601);\nfinal DynamicTableSource actualSource = createTableSource(SCHEMA, options);\nTestDynamicTableFactory.DynamicTableSourceMock scanSourceMock =\n(TestDynamicTableFactory.DynamicTableSourceMock) actualSource;\nDeserializationSchema actualDeser =\nscanSourceMock.valueFormat.createRuntimeDecoder(\nScanRuntimeProviderContext.INSTANCE, PHYSICAL_DATA_TYPE);\nassertThat(actualDeser).isEqualTo(expectedDeser);\nassertThatThrownBy(\n() -> {\nfinal DynamicTableSink actualSink = createTableSink(SCHEMA, options);\nTestDynamicTableFactory.DynamicTableSinkMock sinkMock =\n(TestDynamicTableFactory.DynamicTableSinkMock) actualSink;\nsinkMock.valueFormat.createRuntimeEncoder(\nnew SinkRuntimeProviderContext(false), PHYSICAL_DATA_TYPE);\n})\n.satisfies(\nanyCauseMatches(\nRuntimeException.class,\n\"Debezium JSON serialization doesn't support \"\n+ \"'debezium-json.schema-include' option been set to true.\"));\n}", + "context_before": "class DebeziumJsonFormatFactoryTest {\n@Test\nvoid testSeDeSchema() {\nfinal DebeziumJsonDeserializationSchema expectedDeser =\nnew DebeziumJsonDeserializationSchema(\nPHYSICAL_DATA_TYPE,\nCollections.emptyList(),\nInternalTypeInfo.of(PHYSICAL_TYPE),\nfalse,\ntrue,\nTimestampFormat.ISO_8601);\nfinal Map options = getAllOptions();\nfinal DynamicTableSource actualSource = createTableSource(SCHEMA, options);\nassert actualSource instanceof TestDynamicTableFactory.DynamicTableSourceMock;\nTestDynamicTableFactory.DynamicTableSourceMock scanSourceMock =\n(TestDynamicTableFactory.DynamicTableSourceMock) actualSource;\nDeserializationSchema actualDeser =\nscanSourceMock.valueFormat.createRuntimeDecoder(\nScanRuntimeProviderContext.INSTANCE, PHYSICAL_DATA_TYPE);\nassertThat(actualDeser).isEqualTo(expectedDeser);\nfinal DebeziumJsonSerializationSchema expectedSer =\nnew DebeziumJsonSerializationSchema(\n(RowType) PHYSICAL_DATA_TYPE.getLogicalType(),\nTimestampFormat.ISO_8601,\nJsonFormatOptions.MapNullKeyMode.LITERAL,\n\"null\",\ntrue);\nfinal DynamicTableSink actualSink = createTableSink(SCHEMA, options);\nassert actualSink instanceof TestDynamicTableFactory.DynamicTableSinkMock;\nTestDynamicTableFactory.DynamicTableSinkMock sinkMock =\n(TestDynamicTableFactory.DynamicTableSinkMock) actualSink;\nSerializationSchema actualSer =\nsinkMock.valueFormat.createRuntimeEncoder(\nnew SinkRuntimeProviderContext(false), PHYSICAL_DATA_TYPE);\nassertThat(actualSer).isEqualTo(expectedSer);\n}\n@Test\nvoid testInvalidIgnoreParseError() {\nfinal Map options =\ngetModifiedOptions(opts -> opts.put(\"debezium-json.ignore-parse-errors\", \"abc\"));\nassertThatThrownBy(() -> createTableSource(SCHEMA, options))\n.satisfies(\nanyCauseMatches(\nIllegalArgumentException.class,\n\"Unrecognized option for boolean: abc. \"\n+ \"Expected either true or false(case insensitive)\"));\n}\n@Test\nvoid testInvalidOptionForTimestampFormat() {\nfinal Map tableOptions =\ngetModifiedOptions(\nopts -> opts.put(\"debezium-json.timestamp-format.standard\", \"test\"));\nassertThatThrownBy(() -> createTableSource(SCHEMA, tableOptions))\n.isInstanceOf(ValidationException.class)\n.satisfies(\nanyCauseMatches(\nValidationException.class,\n\"Unsupported value 'test' for timestamp-format.standard. \"\n+ \"Supported values are [SQL, ISO-8601].\"));\n}\n@Test\n@Test\nvoid testInvalidOptionForMapNullKeyMode() {\nfinal Map tableOptions =\ngetModifiedOptions(opts -> opts.put(\"debezium-json.map-null-key.mode\", \"invalid\"));\nassertThatThrownBy(() -> createTableSink(SCHEMA, tableOptions))\n.isInstanceOf(ValidationException.class)\n.satisfies(\nanyCauseMatches(\nValidationException.class,\n\"Unsupported value 'invalid' for option map-null-key.mode. \"\n+ \"Supported values are [LITERAL, FAIL, DROP].\"));\n}\n/**\n* Returns the full options modified by the given consumer {@code optionModifier}.\n*\n* @param optionModifier Consumer to modify the options\n*/\nprivate Map getModifiedOptions(Consumer> optionModifier) {\nMap options = getAllOptions();\noptionModifier.accept(options);\nreturn options;\n}\nprivate Map getAllOptions() {\nfinal Map options = new HashMap<>();\noptions.put(\"connector\", TestDynamicTableFactory.IDENTIFIER);\noptions.put(\"target\", \"MyTarget\");\noptions.put(\"buffer-size\", \"1000\");\noptions.put(\"format\", \"debezium-json\");\noptions.put(\"debezium-json.ignore-parse-errors\", \"true\");\noptions.put(\"debezium-json.timestamp-format.standard\", \"ISO-8601\");\noptions.put(\"debezium-json.map-null-key.mode\", \"LITERAL\");\noptions.put(\"debezium-json.map-null-key.literal\", \"null\");\noptions.put(\"debezium-json.encode.decimal-as-plain-number\", \"true\");\nreturn options;\n}\n}", + "context_after": "class DebeziumJsonFormatFactoryTest {\n@Test\nvoid testSeDeSchema() {\nfinal DebeziumJsonDeserializationSchema expectedDeser =\nnew DebeziumJsonDeserializationSchema(\nPHYSICAL_DATA_TYPE,\nCollections.emptyList(),\nInternalTypeInfo.of(PHYSICAL_TYPE),\nfalse,\ntrue,\nTimestampFormat.ISO_8601);\nfinal Map options = getAllOptions();\nfinal DynamicTableSource actualSource = createTableSource(SCHEMA, options);\nassert actualSource instanceof TestDynamicTableFactory.DynamicTableSourceMock;\nTestDynamicTableFactory.DynamicTableSourceMock scanSourceMock =\n(TestDynamicTableFactory.DynamicTableSourceMock) actualSource;\nDeserializationSchema actualDeser =\nscanSourceMock.valueFormat.createRuntimeDecoder(\nScanRuntimeProviderContext.INSTANCE, PHYSICAL_DATA_TYPE);\nassertThat(actualDeser).isEqualTo(expectedDeser);\nfinal DebeziumJsonSerializationSchema expectedSer =\nnew DebeziumJsonSerializationSchema(\n(RowType) PHYSICAL_DATA_TYPE.getLogicalType(),\nTimestampFormat.ISO_8601,\nJsonFormatOptions.MapNullKeyMode.LITERAL,\n\"null\",\ntrue);\nfinal DynamicTableSink actualSink = createTableSink(SCHEMA, options);\nassert actualSink instanceof TestDynamicTableFactory.DynamicTableSinkMock;\nTestDynamicTableFactory.DynamicTableSinkMock sinkMock =\n(TestDynamicTableFactory.DynamicTableSinkMock) actualSink;\nSerializationSchema actualSer =\nsinkMock.valueFormat.createRuntimeEncoder(\nnew SinkRuntimeProviderContext(false), PHYSICAL_DATA_TYPE);\nassertThat(actualSer).isEqualTo(expectedSer);\n}\n@Test\nvoid testInvalidIgnoreParseError() {\nfinal Map options =\ngetModifiedOptions(opts -> opts.put(\"debezium-json.ignore-parse-errors\", \"abc\"));\nassertThatThrownBy(() -> createTableSource(SCHEMA, options))\n.satisfies(\nanyCauseMatches(\nIllegalArgumentException.class,\n\"Unrecognized option for boolean: abc. \"\n+ \"Expected either true or false(case insensitive)\"));\n}\n@Test\n@Test\nvoid testInvalidOptionForTimestampFormat() {\nfinal Map tableOptions =\ngetModifiedOptions(\nopts -> opts.put(\"debezium-json.timestamp-format.standard\", \"test\"));\nassertThatThrownBy(() -> createTableSource(SCHEMA, tableOptions))\n.isInstanceOf(ValidationException.class)\n.satisfies(\nanyCauseMatches(\nValidationException.class,\n\"Unsupported value 'test' for timestamp-format.standard. \"\n+ \"Supported values are [SQL, ISO-8601].\"));\n}\n@Test\nvoid testInvalidOptionForMapNullKeyMode() {\nfinal Map tableOptions =\ngetModifiedOptions(opts -> opts.put(\"debezium-json.map-null-key.mode\", \"invalid\"));\nassertThatThrownBy(() -> createTableSink(SCHEMA, tableOptions))\n.isInstanceOf(ValidationException.class)\n.satisfies(\nanyCauseMatches(\nValidationException.class,\n\"Unsupported value 'invalid' for option map-null-key.mode. \"\n+ \"Supported values are [LITERAL, FAIL, DROP].\"));\n}\n/**\n* Returns the full options modified by the given consumer {@code optionModifier}.\n*\n* @param optionModifier Consumer to modify the options\n*/\nprivate Map getModifiedOptions(Consumer> optionModifier) {\nMap options = getAllOptions();\noptionModifier.accept(options);\nreturn options;\n}\nprivate Map getAllOptions() {\nfinal Map options = new HashMap<>();\noptions.put(\"connector\", TestDynamicTableFactory.IDENTIFIER);\noptions.put(\"target\", \"MyTarget\");\noptions.put(\"buffer-size\", \"1000\");\noptions.put(\"format\", \"debezium-json\");\noptions.put(\"debezium-json.ignore-parse-errors\", \"true\");\noptions.put(\"debezium-json.timestamp-format.standard\", \"ISO-8601\");\noptions.put(\"debezium-json.map-null-key.mode\", \"LITERAL\");\noptions.put(\"debezium-json.map-null-key.literal\", \"null\");\noptions.put(\"debezium-json.encode.decimal-as-plain-number\", \"true\");\nreturn options;\n}\n}" + }, + { + "comment": "no", + "method_body": "private void doPreHeat() {\nList recentStatsUpdatedCols = null;\nlong retryTimes = 0;\nwhile (retryTimes < StatisticConstants.PRELOAD_RETRY_TIMES) {\ntry {\nrecentStatsUpdatedCols = StatisticsRepository.fetchRecentStatsUpdatedCol();\nbreak;\n} catch (Throwable t) {\n}\nretryTimes++;\ntry {\nThread.sleep(StatisticConstants.PRELOAD_RETRY_INTERVAL_IN_SECONDS);\n} catch (Throwable t) {\n}\n}\nif (CollectionUtils.isEmpty(recentStatsUpdatedCols)) {\nreturn;\n}\nfor (ResultRow r : recentStatsUpdatedCols) {\ntry {\nString tblId = r.getColumnValue(\"tbl_id\");\nString idxId = r.getColumnValue(\"idx_id\");\nString colId = r.getColumnValue(\"col_id\");\nfinal StatisticsCacheKey k =\nnew StatisticsCacheKey(Long.parseLong(tblId), Long.parseLong(idxId), colId);\nfinal ColumnStatistic c = ColumnStatistic.fromResultRow(r);\nCompletableFuture> f = new CompletableFuture>() {\n@Override\npublic Optional get() throws InterruptedException, ExecutionException {\nreturn Optional.of(c);\n}\n@Override\npublic boolean isDone() {\nreturn true;\n}\n@Override\npublic boolean complete(Optional value) {\nreturn true;\n}\n@Override\npublic Optional join() {\nreturn Optional.of(c);\n}\n};\ncolumnStatisticsCache.put(k, f);\n} catch (Throwable t) {\nLOG.warn(\"Error when preheating stats cache\", t);\n}\n}\n}", + "target_code": "try {", + "method_body_after": "private void doPreHeat() {\nList recentStatsUpdatedCols = null;\nlong retryTimes = 0;\nwhile (retryTimes < StatisticConstants.PRELOAD_RETRY_TIMES) {\ntry {\nrecentStatsUpdatedCols = StatisticsRepository.fetchRecentStatsUpdatedCol();\nbreak;\n} catch (Throwable t) {\n}\nretryTimes++;\ntry {\nThread.sleep(StatisticConstants.PRELOAD_RETRY_INTERVAL_IN_SECONDS);\n} catch (Throwable t) {\n}\n}\nif (CollectionUtils.isEmpty(recentStatsUpdatedCols)) {\nreturn;\n}\nfor (ResultRow r : recentStatsUpdatedCols) {\ntry {\nString tblId = r.getColumnValue(\"tbl_id\");\nString idxId = r.getColumnValue(\"idx_id\");\nString colId = r.getColumnValue(\"col_id\");\nfinal StatisticsCacheKey k =\nnew StatisticsCacheKey(Long.parseLong(tblId), Long.parseLong(idxId), colId);\nfinal ColumnStatistic c = ColumnStatistic.fromResultRow(r);\nCompletableFuture> f = new CompletableFuture>() {\n@Override\npublic Optional get() throws InterruptedException, ExecutionException {\nreturn Optional.of(c);\n}\n@Override\npublic boolean isDone() {\nreturn true;\n}\n@Override\npublic boolean complete(Optional value) {\nreturn true;\n}\n@Override\npublic Optional join() {\nreturn Optional.of(c);\n}\n};\ncolumnStatisticsCache.put(k, f);\n} catch (Throwable t) {\nLOG.warn(\"Error when preheating stats cache\", t);\n}\n}\n}", + "context_before": "class StatisticsCache {\nprivate static final Logger LOG = LogManager.getLogger(StatisticsCache.class);\n/**\n* Use a standalone thread pool to avoid interference between this and any other jdk function\n* that use the thread of ForkJoinPool\n*/\nprivate final ThreadPoolExecutor threadPool\n= ThreadPoolManager.newDaemonFixedThreadPool(\n10, Integer.MAX_VALUE, \"STATS_FETCH\", true);\nprivate final ColumnStatisticsCacheLoader columnStatisticsCacheLoader = new ColumnStatisticsCacheLoader();\nprivate final HistogramCacheLoader histogramCacheLoader = new HistogramCacheLoader();\nprivate final AsyncLoadingCache> columnStatisticsCache =\nCaffeine.newBuilder()\n.maximumSize(StatisticConstants.STATISTICS_RECORDS_CACHE_SIZE)\n.expireAfterAccess(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_VALID_DURATION_IN_HOURS))\n.refreshAfterWrite(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_REFRESH_INTERVAL))\n.executor(threadPool)\n.buildAsync(columnStatisticsCacheLoader);\nprivate final AsyncLoadingCache> histogramCache =\nCaffeine.newBuilder()\n.maximumSize(StatisticConstants.STATISTICS_RECORDS_CACHE_SIZE)\n.expireAfterAccess(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_VALID_DURATION_IN_HOURS))\n.refreshAfterWrite(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_REFRESH_INTERVAL))\n.executor(threadPool)\n.buildAsync(histogramCacheLoader);\n{\nthreadPool.submit(() -> {\nwhile (true) {\ntry {\ncolumnStatisticsCacheLoader.removeExpiredInProgressing();\nhistogramCacheLoader.removeExpiredInProgressing();\n} catch (Throwable t) {\n}\nThread.sleep(TimeUnit.MINUTES.toMillis(15));\n}\n});\n}\npublic ColumnStatistic getColumnStatistics(long tblId, String colName) {\nreturn getColumnStatistics(tblId, -1, colName).orElse(ColumnStatistic.UNKNOWN);\n}\npublic Optional getColumnStatistics(long tblId, long idxId, String colName) {\nConnectContext ctx = ConnectContext.get();\nif (ctx != null && ctx.getSessionVariable().internalSession) {\nreturn null;\n}\nStatisticsCacheKey k = new StatisticsCacheKey(tblId, idxId, colName);\ntry {\nCompletableFuture> f = columnStatisticsCache.get(k);\nif (f.isDone() && f.get() != null) {\nreturn f.get();\n}\n} catch (Exception e) {\nLOG.warn(\"Unexpected exception while returning ColumnStatistic\", e);\n}\nreturn Optional.empty();\n}\npublic Histogram getHistogram(long tblId, String colName) {\nreturn getHistogram(tblId, -1, colName).orElse(null);\n}\npublic Optional getHistogram(long tblId, long idxId, String colName) {\nConnectContext ctx = ConnectContext.get();\nif (ctx != null && ctx.getSessionVariable().internalSession) {\nreturn Optional.empty();\n}\nStatisticsCacheKey k = new StatisticsCacheKey(tblId, idxId, colName);\ntry {\nCompletableFuture> f = histogramCache.get(k);\nif (f.isDone() && f.get() != null) {\nreturn f.get();\n}\n} catch (Exception e) {\nLOG.warn(\"Unexpected exception while returning Histogram\", e);\n}\nreturn Optional.empty();\n}\npublic void eraseExpiredCache(long tblId, long idxId, String colName) {\ncolumnStatisticsCache.synchronous().invalidate(new StatisticsCacheKey(tblId, idxId, colName));\n}\npublic void updateCache(long tblId, long idxId, String colName, ColumnStatistic statistic) {\ncolumnStatisticsCache.synchronous().put(new StatisticsCacheKey(tblId, idxId, colName), Optional.of(statistic));\n}\npublic void refreshSync(long tblId, long idxId, String colName) {\ncolumnStatisticsCache.synchronous().refresh(new StatisticsCacheKey(tblId, idxId, colName));\n}\npublic void preHeat() {\nthreadPool.submit(this::doPreHeat);\n}\n}", + "context_after": "class StatisticsCache {\nprivate static final Logger LOG = LogManager.getLogger(StatisticsCache.class);\n/**\n* Use a standalone thread pool to avoid interference between this and any other jdk function\n* that use the thread of ForkJoinPool\n*/\nprivate final ThreadPoolExecutor threadPool\n= ThreadPoolManager.newDaemonFixedThreadPool(\n10, Integer.MAX_VALUE, \"STATS_FETCH\", true);\nprivate final ColumnStatisticsCacheLoader columnStatisticsCacheLoader = new ColumnStatisticsCacheLoader();\nprivate final HistogramCacheLoader histogramCacheLoader = new HistogramCacheLoader();\nprivate final AsyncLoadingCache> columnStatisticsCache =\nCaffeine.newBuilder()\n.maximumSize(StatisticConstants.STATISTICS_RECORDS_CACHE_SIZE)\n.expireAfterAccess(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_VALID_DURATION_IN_HOURS))\n.refreshAfterWrite(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_REFRESH_INTERVAL))\n.executor(threadPool)\n.buildAsync(columnStatisticsCacheLoader);\nprivate final AsyncLoadingCache> histogramCache =\nCaffeine.newBuilder()\n.maximumSize(StatisticConstants.STATISTICS_RECORDS_CACHE_SIZE)\n.expireAfterAccess(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_VALID_DURATION_IN_HOURS))\n.refreshAfterWrite(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_REFRESH_INTERVAL))\n.executor(threadPool)\n.buildAsync(histogramCacheLoader);\n{\nthreadPool.submit(() -> {\nwhile (true) {\ntry {\ncolumnStatisticsCacheLoader.removeExpiredInProgressing();\nhistogramCacheLoader.removeExpiredInProgressing();\n} catch (Throwable t) {\n}\nThread.sleep(TimeUnit.MINUTES.toMillis(15));\n}\n});\n}\npublic ColumnStatistic getColumnStatistics(long tblId, String colName) {\nreturn getColumnStatistics(tblId, -1, colName).orElse(ColumnStatistic.UNKNOWN);\n}\npublic Optional getColumnStatistics(long tblId, long idxId, String colName) {\nConnectContext ctx = ConnectContext.get();\nif (ctx != null && ctx.getSessionVariable().internalSession) {\nreturn Optional.empty();\n}\nStatisticsCacheKey k = new StatisticsCacheKey(tblId, idxId, colName);\ntry {\nCompletableFuture> f = columnStatisticsCache.get(k);\nif (f.isDone() && f.get() != null) {\nreturn f.get();\n}\n} catch (Exception e) {\nLOG.warn(\"Unexpected exception while returning ColumnStatistic\", e);\n}\nreturn Optional.empty();\n}\npublic Histogram getHistogram(long tblId, String colName) {\nreturn getHistogram(tblId, -1, colName).orElse(null);\n}\npublic Optional getHistogram(long tblId, long idxId, String colName) {\nConnectContext ctx = ConnectContext.get();\nif (ctx != null && ctx.getSessionVariable().internalSession) {\nreturn Optional.empty();\n}\nStatisticsCacheKey k = new StatisticsCacheKey(tblId, idxId, colName);\ntry {\nCompletableFuture> f = histogramCache.get(k);\nif (f.isDone() && f.get() != null) {\nreturn f.get();\n}\n} catch (Exception e) {\nLOG.warn(\"Unexpected exception while returning Histogram\", e);\n}\nreturn Optional.empty();\n}\npublic void eraseExpiredCache(long tblId, long idxId, String colName) {\ncolumnStatisticsCache.synchronous().invalidate(new StatisticsCacheKey(tblId, idxId, colName));\n}\npublic void updateColStatsCache(long tblId, long idxId, String colName, ColumnStatistic statistic) {\ncolumnStatisticsCache.synchronous().put(new StatisticsCacheKey(tblId, idxId, colName), Optional.of(statistic));\n}\npublic void refreshColStatsSync(long tblId, long idxId, String colName) {\ncolumnStatisticsCache.synchronous().refresh(new StatisticsCacheKey(tblId, idxId, colName));\n}\npublic void refreshHistogramSync(long tblId, long idxId, String colName) {\nhistogramCache.synchronous().refresh(new StatisticsCacheKey(tblId, idxId, colName));\n}\npublic void preHeat() {\nthreadPool.submit(this::doPreHeat);\n}\n}" + }, + { + "comment": "```suggestion throw new WebSocketServerException(\"Unable to obtain the connection from the Vert.x duplicated context\"); ```", + "method_body": "public Supplier connectionSupplier() {\nreturn new Supplier() {\n@Override\npublic Object get() {\nContext context = Vertx.currentContext();\nif (context != null && VertxContext.isDuplicatedContext(context)) {\nObject connection = context.getLocal(WEB_SOCKET_CONN_KEY);\nif (connection != null) {\nreturn connection;\n}\n}\nthrow new WebSocketServerException(\"Unable to obtain the connection from the Vertx duplicated context\");\n}\n};\n}", + "target_code": "throw new WebSocketServerException(\"Unable to obtain the connection from the Vertx duplicated context\");", + "method_body_after": "public Supplier connectionSupplier() {\nreturn new Supplier() {\n@Override\npublic Object get() {\nContext context = Vertx.currentContext();\nif (context != null && VertxContext.isDuplicatedContext(context)) {\nObject connection = context.getLocal(WEB_SOCKET_CONN_KEY);\nif (connection != null) {\nreturn connection;\n}\n}\nthrow new WebSocketServerException(\"Unable to obtain the connection from the Vert.x duplicated context\");\n}\n};\n}", + "context_before": "class WebSocketServerRecorder {\nprivate static final Logger LOG = Logger.getLogger(WebSocketServerRecorder.class);\nstatic final String WEB_SOCKET_CONN_KEY = WebSocketServerConnection.class.getName();\nprivate final WebSocketRuntimeConfig config;\npublic WebSocketServerRecorder(WebSocketRuntimeConfig config) {\nthis.config = config;\n}\npublic Handler createEndpointHandler(String endpointClass) {\nArcContainer container = Arc.container();\nConnectionManager connectionManager = container.instance(ConnectionManager.class).get();\nCodecs codecs = container.instance(Codecs.class).get();\nreturn new Handler() {\n@Override\npublic void handle(RoutingContext ctx) {\nFuture future = ctx.request().toWebSocket();\nfuture.onSuccess(ws -> {\nContext context = VertxCoreRecorder.getVertx().get().getOrCreateContext();\nWebSocketServerConnection connection = new WebSocketServerConnectionImpl(endpointClass, ws,\nconnectionManager, codecs, ctx);\nconnectionManager.add(endpointClass, connection);\nLOG.debugf(\"Connnected: %s\", connection);\nWebSocketSessionContext sessionContext = sessionContext(container);\nSessionContextState sessionContextState = sessionContext.initializeContextState();\nContextSupport contextSupport = new ContextSupport(connection, sessionContextState,\nsessionContext(container),\ncontainer.requestContext());\nWebSocketEndpoint endpoint = createEndpoint(endpointClass, context, connection, codecs, config,\ncontextSupport);\nBroadcastProcessor broadcastProcessor = endpoint.consumedMultiType() != null\n? BroadcastProcessor.create()\n: null;\nContext onOpenContext = ContextSupport.createNewDuplicatedContext(context, connection);\nonOpenContext.runOnContext(new Handler() {\n@Override\npublic void handle(Void event) {\nendpoint.onOpen().onComplete(r -> {\nif (r.succeeded()) {\nLOG.debugf(\"@OnOpen callback completed: %s\", connection);\nif (broadcastProcessor != null) {\nMulti multi = broadcastProcessor.onCancellation().call(connection::close);\nonOpenContext.runOnContext(new Handler() {\n@Override\npublic void handle(Void event) {\nendpoint.onMessage(multi).onComplete(r -> {\nif (r.succeeded()) {\nLOG.debugf(\"@OnMessage callback consuming Multi completed: %s\",\nconnection);\n} else {\nLOG.errorf(r.cause(),\n\"Unable to complete @OnMessage callback consuming Multi: %s\",\nconnection);\n}\n});\n}\n});\n}\n} else {\nLOG.errorf(r.cause(), \"Unable to complete @OnOpen callback: %s\", connection);\n}\n});\n}\n});\nif (broadcastProcessor == null) {\nmessageHandlers(connection, endpoint, ws, context, m -> {\nendpoint.onMessage(m).onComplete(r -> {\nif (r.succeeded()) {\nLOG.debugf(\"@OnMessage callback consumed binary message: %s\", connection);\n} else {\nLOG.errorf(r.cause(), \"Unable to consume binary message in @OnMessage callback: %s\",\nconnection);\n}\n});\n}, m -> {\nendpoint.onMessage(m).onComplete(r -> {\nif (r.succeeded()) {\nLOG.debugf(\"@OnMessage callback consumed text message: %s\", connection);\n} else {\nLOG.errorf(r.cause(), \"Unable to consume text message in @OnMessage callback: %s\",\nconnection);\n}\n});\n}, true);\n} else {\nmessageHandlers(connection, endpoint, ws, onOpenContext, m -> {\ncontextSupport.start();\nbroadcastProcessor.onNext(endpoint.decodeMultiItem(m));\nLOG.debugf(\"Binary message >> Multi: %s\", connection);\ncontextSupport.end(false);\n}, m -> {\ncontextSupport.start();\nbroadcastProcessor.onNext(endpoint.decodeMultiItem(m));\nLOG.debugf(\"Text message >> Multi: %s\", connection);\ncontextSupport.end(false);\n}, false);\n}\nws.closeHandler(new Handler() {\n@Override\npublic void handle(Void event) {\nContextSupport.createNewDuplicatedContext(context, connection).runOnContext(new Handler() {\n@Override\npublic void handle(Void event) {\nendpoint.onClose().onComplete(r -> {\nif (r.succeeded()) {\nLOG.debugf(\"@OnClose callback completed: %s\", connection);\n} else {\nLOG.errorf(r.cause(), \"Unable to complete @OnClose callback: %s\", connection);\n}\nconnectionManager.remove(endpointClass, connection);\n});\n}\n});\n}\n});\n});\n}\n};\n}\nprivate void messageHandlers(WebSocketServerConnection connection, WebSocketEndpoint endpoint, ServerWebSocket ws,\nContext context, Consumer binaryAction, Consumer textAction, boolean newDuplicatedContext) {\nif (endpoint.consumedMessageType() == MessageType.BINARY) {\nws.binaryMessageHandler(new Handler() {\n@Override\npublic void handle(Buffer message) {\nContext duplicatedContext = newDuplicatedContext\n? ContextSupport.createNewDuplicatedContext(context, connection)\n: context;\nduplicatedContext.runOnContext(new Handler() {\n@Override\npublic void handle(Void event) {\nbinaryAction.accept(message);\n}\n});\n}\n});\n} else if (endpoint.consumedMessageType() == MessageType.TEXT) {\nws.textMessageHandler(new Handler() {\n@Override\npublic void handle(String message) {\nContext duplicatedContext = newDuplicatedContext\n? ContextSupport.createNewDuplicatedContext(context, connection)\n: context;\nduplicatedContext.runOnContext(new Handler() {\n@Override\npublic void handle(Void event) {\ntextAction.accept(message);\n}\n});\n}\n});\n}\n}\nprivate WebSocketEndpoint createEndpoint(String endpointClassName, Context context, WebSocketServerConnection connection,\nCodecs codecs, WebSocketRuntimeConfig config, ContextSupport contextSupport) {\ntry {\nClassLoader cl = Thread.currentThread().getContextClassLoader();\nif (cl == null) {\ncl = WebSocketServerRecorder.class.getClassLoader();\n}\n@SuppressWarnings(\"unchecked\")\nClass endpointClazz = (Class) cl\n.loadClass(endpointClassName);\nWebSocketEndpoint endpoint = (WebSocketEndpoint) endpointClazz\n.getDeclaredConstructor(WebSocketServerConnection.class, Codecs.class,\nWebSocketRuntimeConfig.class, ContextSupport.class)\n.newInstance(connection, codecs, config, contextSupport);\nreturn endpoint;\n} catch (Exception e) {\nthrow new WebSocketServerException(\"Unable to create endpoint instance: \" + endpointClassName, e);\n}\n}\nprivate static WebSocketSessionContext sessionContext(ArcContainer container) {\nfor (InjectableContext injectableContext : container.getContexts(SessionScoped.class)) {\nif (WebSocketSessionContext.class.equals(injectableContext.getClass())) {\nreturn (WebSocketSessionContext) injectableContext;\n}\n}\nthrow new WebSocketServerException(\"CDI session context not registered\");\n}\n}", + "context_after": "class WebSocketServerRecorder {\nprivate static final Logger LOG = Logger.getLogger(WebSocketServerRecorder.class);\nstatic final String WEB_SOCKET_CONN_KEY = WebSocketServerConnection.class.getName();\nprivate final WebSocketsRuntimeConfig config;\npublic WebSocketServerRecorder(WebSocketsRuntimeConfig config) {\nthis.config = config;\n}\npublic Handler createEndpointHandler(String endpointClass) {\nArcContainer container = Arc.container();\nConnectionManager connectionManager = container.instance(ConnectionManager.class).get();\nCodecs codecs = container.instance(Codecs.class).get();\nreturn new Handler() {\n@Override\npublic void handle(RoutingContext ctx) {\nFuture future = ctx.request().toWebSocket();\nfuture.onSuccess(ws -> {\nContext context = VertxCoreRecorder.getVertx().get().getOrCreateContext();\nWebSocketServerConnection connection = new WebSocketServerConnectionImpl(endpointClass, ws,\nconnectionManager, codecs, ctx);\nconnectionManager.add(endpointClass, connection);\nLOG.debugf(\"Connnected: %s\", connection);\nWebSocketSessionContext sessionContext = sessionContext(container);\nSessionContextState sessionContextState = sessionContext.initializeContextState();\nContextSupport contextSupport = new ContextSupport(connection, sessionContextState,\nsessionContext(container),\ncontainer.requestContext());\nWebSocketEndpoint endpoint = createEndpoint(endpointClass, context, connection, codecs, config,\ncontextSupport);\nBroadcastProcessor broadcastProcessor = endpoint.consumedMultiType() != null\n? BroadcastProcessor.create()\n: null;\nContext onOpenContext = ContextSupport.createNewDuplicatedContext(context, connection);\nonOpenContext.runOnContext(new Handler() {\n@Override\npublic void handle(Void event) {\nendpoint.onOpen().onComplete(r -> {\nif (r.succeeded()) {\nLOG.debugf(\"@OnOpen callback completed: %s\", connection);\nif (broadcastProcessor != null) {\nMulti multi = broadcastProcessor.onCancellation().call(connection::close);\nonOpenContext.runOnContext(new Handler() {\n@Override\npublic void handle(Void event) {\nendpoint.onMessage(multi).onComplete(r -> {\nif (r.succeeded()) {\nLOG.debugf(\"@OnMessage callback consuming Multi completed: %s\",\nconnection);\n} else {\nLOG.errorf(r.cause(),\n\"Unable to complete @OnMessage callback consuming Multi: %s\",\nconnection);\n}\n});\n}\n});\n}\n} else {\nLOG.errorf(r.cause(), \"Unable to complete @OnOpen callback: %s\", connection);\n}\n});\n}\n});\nif (broadcastProcessor == null) {\nmessageHandlers(connection, endpoint, ws, context, m -> {\nendpoint.onMessage(m).onComplete(r -> {\nif (r.succeeded()) {\nLOG.debugf(\"@OnMessage callback consumed binary message: %s\", connection);\n} else {\nLOG.errorf(r.cause(), \"Unable to consume binary message in @OnMessage callback: %s\",\nconnection);\n}\n});\n}, m -> {\nendpoint.onMessage(m).onComplete(r -> {\nif (r.succeeded()) {\nLOG.debugf(\"@OnMessage callback consumed text message: %s\", connection);\n} else {\nLOG.errorf(r.cause(), \"Unable to consume text message in @OnMessage callback: %s\",\nconnection);\n}\n});\n}, true);\n} else {\nmessageHandlers(connection, endpoint, ws, onOpenContext, m -> {\ncontextSupport.start();\nbroadcastProcessor.onNext(endpoint.decodeMultiItem(m));\nLOG.debugf(\"Binary message >> Multi: %s\", connection);\ncontextSupport.end(false);\n}, m -> {\ncontextSupport.start();\nbroadcastProcessor.onNext(endpoint.decodeMultiItem(m));\nLOG.debugf(\"Text message >> Multi: %s\", connection);\ncontextSupport.end(false);\n}, false);\n}\nws.closeHandler(new Handler() {\n@Override\npublic void handle(Void event) {\nContextSupport.createNewDuplicatedContext(context, connection).runOnContext(new Handler() {\n@Override\npublic void handle(Void event) {\nendpoint.onClose().onComplete(r -> {\nif (r.succeeded()) {\nLOG.debugf(\"@OnClose callback completed: %s\", connection);\n} else {\nLOG.errorf(r.cause(), \"Unable to complete @OnClose callback: %s\", connection);\n}\nconnectionManager.remove(endpointClass, connection);\n});\n}\n});\n}\n});\n});\n}\n};\n}\nprivate void messageHandlers(WebSocketServerConnection connection, WebSocketEndpoint endpoint, ServerWebSocket ws,\nContext context, Consumer binaryAction, Consumer textAction, boolean newDuplicatedContext) {\nif (endpoint.consumedMessageType() == MessageType.BINARY) {\nws.binaryMessageHandler(new Handler() {\n@Override\npublic void handle(Buffer message) {\nContext duplicatedContext = newDuplicatedContext\n? ContextSupport.createNewDuplicatedContext(context, connection)\n: context;\nduplicatedContext.runOnContext(new Handler() {\n@Override\npublic void handle(Void event) {\nbinaryAction.accept(message);\n}\n});\n}\n});\n} else if (endpoint.consumedMessageType() == MessageType.TEXT) {\nws.textMessageHandler(new Handler() {\n@Override\npublic void handle(String message) {\nContext duplicatedContext = newDuplicatedContext\n? ContextSupport.createNewDuplicatedContext(context, connection)\n: context;\nduplicatedContext.runOnContext(new Handler() {\n@Override\npublic void handle(Void event) {\ntextAction.accept(message);\n}\n});\n}\n});\n}\n}\nprivate WebSocketEndpoint createEndpoint(String endpointClassName, Context context, WebSocketServerConnection connection,\nCodecs codecs, WebSocketsRuntimeConfig config, ContextSupport contextSupport) {\ntry {\nClassLoader cl = Thread.currentThread().getContextClassLoader();\nif (cl == null) {\ncl = WebSocketServerRecorder.class.getClassLoader();\n}\n@SuppressWarnings(\"unchecked\")\nClass endpointClazz = (Class) cl\n.loadClass(endpointClassName);\nWebSocketEndpoint endpoint = (WebSocketEndpoint) endpointClazz\n.getDeclaredConstructor(WebSocketServerConnection.class, Codecs.class,\nWebSocketsRuntimeConfig.class, ContextSupport.class)\n.newInstance(connection, codecs, config, contextSupport);\nreturn endpoint;\n} catch (Exception e) {\nthrow new WebSocketServerException(\"Unable to create endpoint instance: \" + endpointClassName, e);\n}\n}\nprivate static WebSocketSessionContext sessionContext(ArcContainer container) {\nfor (InjectableContext injectableContext : container.getContexts(SessionScoped.class)) {\nif (WebSocketSessionContext.class.equals(injectableContext.getClass())) {\nreturn (WebSocketSessionContext) injectableContext;\n}\n}\nthrow new WebSocketServerException(\"CDI session context not registered\");\n}\n}" + }, + { + "comment": "this isn't the same, since `locationIp` may or may not be assigned in the if condition", + "method_body": "private TelemetryItem exportRequest(SpanData span, long itemCount) {\nRequestTelemetryBuilder telemetryBuilder = RequestTelemetryBuilder.create();\ntelemetryInitializer.accept(telemetryBuilder, span.getResource());\nAttributes attributes = span.getAttributes();\nlong startEpochNanos = span.getStartEpochNanos();\ntelemetryBuilder.setId(span.getSpanId());\nsetTime(telemetryBuilder, startEpochNanos);\nsetItemCount(telemetryBuilder, itemCount);\nMAPPINGS.map(attributes, telemetryBuilder);\naddLinks(telemetryBuilder, span.getLinks());\nString operationName = getOperationName(span);\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_NAME.toString(), operationName);\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());\nString aiLegacyParentId = span.getAttributes().get(AiSemanticAttributes.LEGACY_PARENT_ID);\nif (aiLegacyParentId != null) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);\n} else if (span.getParentSpanContext().isValid()) {\ntelemetryBuilder.addTag(\nContextTagKeys.AI_OPERATION_PARENT_ID.toString(),\nspan.getParentSpanContext().getSpanId());\n}\nString aiLegacyRootId = span.getAttributes().get(AiSemanticAttributes.LEGACY_ROOT_ID);\nif (aiLegacyRootId != null) {\ntelemetryBuilder.addTag(\"ai_legacyRootID\", aiLegacyRootId);\n}\ntelemetryBuilder.setName(operationName);\ntelemetryBuilder.setDuration(\nFormattedDuration.fromNanos(span.getEndEpochNanos() - startEpochNanos));\ntelemetryBuilder.setSuccess(getSuccess(span));\nString httpUrl = getHttpUrlFromServerSpan(attributes);\nif (httpUrl != null) {\ntelemetryBuilder.setUrl(httpUrl);\n}\nLong httpStatusCode = getStableAttribute(attributes, SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);\nif (httpStatusCode == null) {\nhttpStatusCode = attributes.get(SemanticAttributes.RPC_GRPC_STATUS_CODE);\n}\nif (httpStatusCode != null) {\ntelemetryBuilder.setResponseCode(Long.toString(httpStatusCode));\n} else {\ntelemetryBuilder.setResponseCode(\"0\");\n}\nString locationIp = getStableAttribute(attributes, SemanticAttributes.CLIENT_ADDRESS, SemanticAttributes.HTTP_CLIENT_IP);\nif (locationIp == null) {\nlocationIp = attributes.get(SemanticAttributes.NET_SOCK_PEER_ADDR);\n}\nif (locationIp != null) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_LOCATION_IP.toString(), locationIp);\n}\ntelemetryBuilder.setSource(getSource(attributes));\nString sessionId = attributes.get(AiSemanticAttributes.SESSION_ID);\nif (sessionId != null) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_SESSION_ID.toString(), sessionId);\n}\nString deviceOs = attributes.get(AiSemanticAttributes.DEVICE_OS);\nif (deviceOs != null) {\ntelemetryBuilder.addTag(AI_DEVICE_OS.toString(), deviceOs);\n}\nString deviceOsVersion = attributes.get(AiSemanticAttributes.DEVICE_OS_VERSION);\nif (deviceOsVersion != null) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_DEVICE_OS_VERSION.toString(), deviceOsVersion);\n}\nif (checkIsPreAggregatedStandardMetric(span)) {\ntelemetryBuilder.addProperty(MS_PROCESSED_BY_METRIC_EXTRACTORS, \"True\");\n}\nLong enqueuedTime = attributes.get(AiSemanticAttributes.AZURE_SDK_ENQUEUED_TIME);\nif (enqueuedTime != null) {\nlong timeSinceEnqueuedMillis =\nMath.max(\n0L, NANOSECONDS.toMillis(span.getStartEpochNanos()) - SECONDS.toMillis(enqueuedTime));\ntelemetryBuilder.addMeasurement(\"timeSinceEnqueued\", (double) timeSinceEnqueuedMillis);\n}\nLong timeSinceEnqueuedMillis = attributes.get(AiSemanticAttributes.KAFKA_RECORD_QUEUE_TIME_MS);\nif (timeSinceEnqueuedMillis != null) {\ntelemetryBuilder.addMeasurement(\"timeSinceEnqueued\", (double) timeSinceEnqueuedMillis);\n}\nreturn telemetryBuilder.build();\n}", + "target_code": "if (locationIp != null) {", + "method_body_after": "private TelemetryItem exportRequest(SpanData span, long itemCount) {\nRequestTelemetryBuilder telemetryBuilder = RequestTelemetryBuilder.create();\ntelemetryInitializer.accept(telemetryBuilder, span.getResource());\nAttributes attributes = span.getAttributes();\nlong startEpochNanos = span.getStartEpochNanos();\ntelemetryBuilder.setId(span.getSpanId());\nsetTime(telemetryBuilder, startEpochNanos);\nsetItemCount(telemetryBuilder, itemCount);\nMAPPINGS.map(attributes, telemetryBuilder);\naddLinks(telemetryBuilder, span.getLinks());\nString operationName = getOperationName(span);\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_NAME.toString(), operationName);\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());\nString aiLegacyParentId = span.getAttributes().get(AiSemanticAttributes.LEGACY_PARENT_ID);\nif (aiLegacyParentId != null) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);\n} else if (span.getParentSpanContext().isValid()) {\ntelemetryBuilder.addTag(\nContextTagKeys.AI_OPERATION_PARENT_ID.toString(),\nspan.getParentSpanContext().getSpanId());\n}\nString aiLegacyRootId = span.getAttributes().get(AiSemanticAttributes.LEGACY_ROOT_ID);\nif (aiLegacyRootId != null) {\ntelemetryBuilder.addTag(\"ai_legacyRootID\", aiLegacyRootId);\n}\ntelemetryBuilder.setName(operationName);\ntelemetryBuilder.setDuration(\nFormattedDuration.fromNanos(span.getEndEpochNanos() - startEpochNanos));\ntelemetryBuilder.setSuccess(getSuccess(span));\nString httpUrl = getHttpUrlFromServerSpan(attributes);\nif (httpUrl != null) {\ntelemetryBuilder.setUrl(httpUrl);\n}\nLong httpStatusCode = getStableOrOldAttribute(attributes, SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);\nif (httpStatusCode == null) {\nhttpStatusCode = attributes.get(SemanticAttributes.RPC_GRPC_STATUS_CODE);\n}\nif (httpStatusCode != null) {\ntelemetryBuilder.setResponseCode(Long.toString(httpStatusCode));\n} else {\ntelemetryBuilder.setResponseCode(\"0\");\n}\nString locationIp = getStableOrOldAttribute(attributes, SemanticAttributes.CLIENT_ADDRESS, SemanticAttributes.HTTP_CLIENT_IP);\nif (locationIp == null) {\nlocationIp = attributes.get(SemanticAttributes.NET_SOCK_PEER_ADDR);\n}\nif (locationIp != null) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_LOCATION_IP.toString(), locationIp);\n}\ntelemetryBuilder.setSource(getSource(attributes));\nString sessionId = attributes.get(AiSemanticAttributes.SESSION_ID);\nif (sessionId != null) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_SESSION_ID.toString(), sessionId);\n}\nString deviceOs = attributes.get(AiSemanticAttributes.DEVICE_OS);\nif (deviceOs != null) {\ntelemetryBuilder.addTag(AI_DEVICE_OS.toString(), deviceOs);\n}\nString deviceOsVersion = attributes.get(AiSemanticAttributes.DEVICE_OS_VERSION);\nif (deviceOsVersion != null) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_DEVICE_OS_VERSION.toString(), deviceOsVersion);\n}\nif (checkIsPreAggregatedStandardMetric(span)) {\ntelemetryBuilder.addProperty(MS_PROCESSED_BY_METRIC_EXTRACTORS, \"True\");\n}\nLong enqueuedTime = attributes.get(AiSemanticAttributes.AZURE_SDK_ENQUEUED_TIME);\nif (enqueuedTime != null) {\nlong timeSinceEnqueuedMillis =\nMath.max(\n0L, NANOSECONDS.toMillis(span.getStartEpochNanos()) - SECONDS.toMillis(enqueuedTime));\ntelemetryBuilder.addMeasurement(\"timeSinceEnqueued\", (double) timeSinceEnqueuedMillis);\n}\nLong timeSinceEnqueuedMillis = attributes.get(AiSemanticAttributes.KAFKA_RECORD_QUEUE_TIME_MS);\nif (timeSinceEnqueuedMillis != null) {\ntelemetryBuilder.addMeasurement(\"timeSinceEnqueued\", (double) timeSinceEnqueuedMillis);\n}\nreturn telemetryBuilder.build();\n}", + "context_before": "class SpanDataMapper {\npublic static final String MS_PROCESSED_BY_METRIC_EXTRACTORS = \"_MS.ProcessedByMetricExtractors\";\nprivate static final Set SQL_DB_SYSTEMS =\nnew HashSet<>(\nasList(\nSemanticAttributes.DbSystemValues.DB2,\nSemanticAttributes.DbSystemValues.DERBY,\nSemanticAttributes.DbSystemValues.MARIADB,\nSemanticAttributes.DbSystemValues.MSSQL,\nSemanticAttributes.DbSystemValues.MYSQL,\nSemanticAttributes.DbSystemValues.ORACLE,\nSemanticAttributes.DbSystemValues.POSTGRESQL,\nSemanticAttributes.DbSystemValues.SQLITE,\nSemanticAttributes.DbSystemValues.OTHER_SQL,\nSemanticAttributes.DbSystemValues.HSQLDB,\nSemanticAttributes.DbSystemValues.H2));\nprivate static final String COSMOS = \"Cosmos\";\nprivate static final Mappings MAPPINGS;\nprivate static final ContextTagKeys AI_DEVICE_OS = ContextTagKeys.fromString(\"ai.device.os\");\nstatic {\nMappingsBuilder mappingsBuilder =\nnew MappingsBuilder()\n.ignoreExact(AiSemanticAttributes.AZURE_SDK_NAMESPACE.getKey())\n.ignoreExact(AiSemanticAttributes.AZURE_SDK_MESSAGE_BUS_DESTINATION.getKey())\n.ignoreExact(AiSemanticAttributes.AZURE_SDK_ENQUEUED_TIME.getKey())\n.ignoreExact(AiSemanticAttributes.KAFKA_RECORD_QUEUE_TIME_MS.getKey())\n.ignoreExact(AiSemanticAttributes.KAFKA_OFFSET.getKey())\n.exact(\nSemanticAttributes.USER_AGENT_ORIGINAL.getKey(),\n(builder, value) -> {\nif (value instanceof String) {\nbuilder.addTag(\"ai.user.userAgent\", (String) value);\n}\n})\n.ignorePrefix(\"applicationinsights.internal.\")\n.prefix(\n\"http.request.header.\",\n(telemetryBuilder, key, value) -> {\nif (value instanceof List) {\ntelemetryBuilder.addProperty(key, Mappings.join((List) value));\n}\n})\n.prefix(\n\"http.response.header.\",\n(telemetryBuilder, key, value) -> {\nif (value instanceof List) {\ntelemetryBuilder.addProperty(key, Mappings.join((List) value));\n}\n});\napplyCommonTags(mappingsBuilder);\nMAPPINGS = mappingsBuilder.build();\n}\nprivate final boolean captureHttpServer4xxAsError;\nprivate final BiConsumer telemetryInitializer;\nprivate final BiPredicate eventSuppressor;\nprivate final BiPredicate shouldSuppress;\npublic SpanDataMapper(\nboolean captureHttpServer4xxAsError,\nBiConsumer telemetryInitializer,\nBiPredicate eventSuppressor,\nBiPredicate shouldSuppress) {\nthis.captureHttpServer4xxAsError = captureHttpServer4xxAsError;\nthis.telemetryInitializer = telemetryInitializer;\nthis.eventSuppressor = eventSuppressor;\nthis.shouldSuppress = shouldSuppress;\n}\npublic TelemetryItem map(SpanData span) {\nlong itemCount = getItemCount(span);\nreturn map(span, itemCount);\n}\npublic void map(SpanData span, Consumer consumer) {\nlong itemCount = getItemCount(span);\nTelemetryItem telemetryItem = map(span, itemCount);\nconsumer.accept(telemetryItem);\nexportEvents(\nspan,\ntelemetryItem.getTags().get(ContextTagKeys.AI_OPERATION_NAME.toString()),\nitemCount,\nconsumer);\n}\npublic TelemetryItem map(SpanData span, long itemCount) {\nif (RequestChecker.isRequest(span)) {\nreturn exportRequest(span, itemCount);\n} else {\nreturn exportRemoteDependency(span, span.getKind() == SpanKind.INTERNAL, itemCount);\n}\n}\nprivate static boolean checkIsPreAggregatedStandardMetric(SpanData span) {\nBoolean isPreAggregatedStandardMetric =\nspan.getAttributes().get(AiSemanticAttributes.IS_PRE_AGGREGATED);\nreturn isPreAggregatedStandardMetric != null && isPreAggregatedStandardMetric;\n}\nprivate TelemetryItem exportRemoteDependency(SpanData span, boolean inProc, long itemCount) {\nRemoteDependencyTelemetryBuilder telemetryBuilder = RemoteDependencyTelemetryBuilder.create();\ntelemetryInitializer.accept(telemetryBuilder, span.getResource());\nsetOperationTags(telemetryBuilder, span);\nsetTime(telemetryBuilder, span.getStartEpochNanos());\nsetItemCount(telemetryBuilder, itemCount);\nMAPPINGS.map(span.getAttributes(), telemetryBuilder);\naddLinks(telemetryBuilder, span.getLinks());\ntelemetryBuilder.setId(span.getSpanId());\ntelemetryBuilder.setName(getDependencyName(span));\ntelemetryBuilder.setDuration(\nFormattedDuration.fromNanos(span.getEndEpochNanos() - span.getStartEpochNanos()));\ntelemetryBuilder.setSuccess(getSuccess(span));\nif (inProc) {\ntelemetryBuilder.setType(\"InProc\");\n} else {\napplySemanticConventions(telemetryBuilder, span);\n}\nif (checkIsPreAggregatedStandardMetric(span)) {\ntelemetryBuilder.addProperty(MS_PROCESSED_BY_METRIC_EXTRACTORS, \"True\");\n}\nreturn telemetryBuilder.build();\n}\nprivate static final Set DEFAULT_HTTP_SPAN_NAMES =\nnew HashSet<>(\nasList(\"OPTIONS\", \"GET\", \"HEAD\", \"POST\", \"PUT\", \"DELETE\", \"TRACE\", \"CONNECT\", \"PATCH\"));\nprivate static String getDependencyName(SpanData span) {\nString name = span.getName();\nString method = getStableAttribute(span.getAttributes(), SemanticAttributes.HTTP_REQUEST_METHOD, SemanticAttributes.HTTP_METHOD);\nif (method == null) {\nreturn name;\n}\nif (!DEFAULT_HTTP_SPAN_NAMES.contains(name)) {\nreturn name;\n}\nString url = getStableAttribute(span.getAttributes(), SemanticAttributes.URL_FULL, SemanticAttributes.HTTP_URL);\nif (url == null) {\nreturn name;\n}\nString path = UrlParser.getPath(url);\nif (path == null) {\nreturn name;\n}\nreturn path.isEmpty() ? method + \" /\" : method + \" \" + path;\n}\nprivate static void applySemanticConventions(\nRemoteDependencyTelemetryBuilder telemetryBuilder, SpanData span) {\nAttributes attributes = span.getAttributes();\nString httpMethod = getStableAttribute(attributes, SemanticAttributes.HTTP_REQUEST_METHOD, SemanticAttributes.HTTP_METHOD);\nif (httpMethod != null) {\napplyHttpClientSpan(telemetryBuilder, attributes);\nreturn;\n}\nString rpcSystem = attributes.get(SemanticAttributes.RPC_SYSTEM);\nif (rpcSystem != null) {\napplyRpcClientSpan(telemetryBuilder, rpcSystem, attributes);\nreturn;\n}\nString dbSystem = attributes.get(SemanticAttributes.DB_SYSTEM);\nif (dbSystem == null) {\ndbSystem = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_TYPE);\n}\nif (dbSystem != null) {\napplyDatabaseClientSpan(telemetryBuilder, dbSystem, attributes);\nreturn;\n}\nString messagingSystem = getMessagingSystem(attributes);\nif (messagingSystem != null) {\napplyMessagingClientSpan(telemetryBuilder, span.getKind(), messagingSystem, attributes);\nreturn;\n}\nString target = getTargetOrDefault(attributes, Integer.MAX_VALUE, null);\nif (target != null) {\ntelemetryBuilder.setTarget(target);\nreturn;\n}\ntelemetryBuilder.setType(\"InProc\");\n}\n@Nullable\nprivate static String getMessagingSystem(Attributes attributes) {\nString azureNamespace = attributes.get(AiSemanticAttributes.AZURE_SDK_NAMESPACE);\nif (isAzureSdkMessaging(azureNamespace)) {\nreturn azureNamespace;\n}\nreturn attributes.get(SemanticAttributes.MESSAGING_SYSTEM);\n}\nprivate static void setOperationTags(AbstractTelemetryBuilder telemetryBuilder, SpanData span) {\nsetOperationId(telemetryBuilder, span.getTraceId());\nsetOperationParentId(telemetryBuilder, span.getParentSpanContext().getSpanId());\nsetOperationName(telemetryBuilder, span.getAttributes());\n}\nprivate static void setOperationId(AbstractTelemetryBuilder telemetryBuilder, String traceId) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_ID.toString(), traceId);\n}\nprivate static void setOperationParentId(\nAbstractTelemetryBuilder telemetryBuilder, String parentSpanId) {\nif (SpanId.isValid(parentSpanId)) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);\n}\n}\nprivate static void setOperationName(\nAbstractTelemetryBuilder telemetryBuilder, Attributes attributes) {\nString operationName = attributes.get(AiSemanticAttributes.OPERATION_NAME);\nif (operationName != null) {\nsetOperationName(telemetryBuilder, operationName);\n}\n}\nprivate static void setOperationName(\nAbstractTelemetryBuilder telemetryBuilder, String operationName) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_NAME.toString(), operationName);\n}\nprivate static void applyHttpClientSpan(\nRemoteDependencyTelemetryBuilder telemetryBuilder, Attributes attributes) {\nString httpUrl = getStableAttribute(attributes, SemanticAttributes.URL_FULL, SemanticAttributes.HTTP_URL);\nint defaultPort = getDefaultPortForHttpUrl(httpUrl);\nString target = getTargetOrDefault(attributes, defaultPort, \"Http\");\ntelemetryBuilder.setType(\"Http\");\ntelemetryBuilder.setTarget(target);\nLong httpStatusCode = getStableAttribute(attributes, SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);\nif (httpStatusCode != null) {\ntelemetryBuilder.setResultCode(Long.toString(httpStatusCode));\n} else {\ntelemetryBuilder.setResultCode(\"0\");\n}\ntelemetryBuilder.setData(httpUrl);\n}\nprivate static void applyRpcClientSpan(\nRemoteDependencyTelemetryBuilder telemetryBuilder, String rpcSystem, Attributes attributes) {\ntelemetryBuilder.setType(rpcSystem);\nString target = getTargetOrDefault(attributes, Integer.MAX_VALUE, rpcSystem);\ntelemetryBuilder.setTarget(target);\n}\nprivate static int getDefaultPortForHttpUrl(@Nullable String httpUrl) {\nif (httpUrl == null) {\nreturn Integer.MAX_VALUE;\n}\nif (httpUrl.startsWith(\"https:\nreturn 443;\n}\nif (httpUrl.startsWith(\"http:\nreturn 80;\n}\nreturn Integer.MAX_VALUE;\n}\npublic static String getTargetOrDefault(\nAttributes attributes, int defaultPort, String defaultTarget) {\nString target = getTargetOrNullStableSemconv(attributes, defaultPort);\nif (target != null) {\nreturn target;\n}\ntarget = getTargetOrNullOldSemconv(attributes, defaultPort);\nif (target != null) {\nreturn target;\n}\nreturn defaultTarget;\n}\n@Nullable\nprivate static String getTargetOrNullStableSemconv(Attributes attributes, int defaultPort) {\nString peerService = attributes.get(SemanticAttributes.PEER_SERVICE);\nif (peerService != null) {\nreturn peerService;\n}\nString host = attributes.get(SemanticAttributes.SERVER_ADDRESS);\nif (host != null) {\nLong port = attributes.get(SemanticAttributes.SERVER_PORT);\nreturn getTarget(host, port, defaultPort);\n}\nreturn null;\n}\n@Nullable\nprivate static String getTargetOrNullOldSemconv(Attributes attributes, int defaultPort) {\nString peerService = attributes.get(SemanticAttributes.PEER_SERVICE);\nif (peerService != null) {\nreturn peerService;\n}\nString host = attributes.get(SemanticAttributes.NET_PEER_NAME);\nif (host != null) {\nLong port = attributes.get(SemanticAttributes.NET_PEER_PORT);\nreturn getTarget(host, port, defaultPort);\n}\nhost = attributes.get(SemanticAttributes.NET_SOCK_PEER_NAME);\nif (host == null) {\nhost = attributes.get(SemanticAttributes.NET_SOCK_PEER_ADDR);\n}\nif (host != null) {\nLong port = attributes.get(SemanticAttributes.NET_SOCK_PEER_PORT);\nreturn getTarget(host, port, defaultPort);\n}\nString httpUrl = attributes.get(SemanticAttributes.HTTP_URL);\nif (httpUrl != null) {\nreturn UrlParser.getTarget(httpUrl);\n}\nreturn null;\n}\nprivate static String getTarget(String host, @Nullable Long port, int defaultPort) {\nif (port != null && port != defaultPort) {\nreturn host + \":\" + port;\n} else {\nreturn host;\n}\n}\nprivate static void applyDatabaseClientSpan(\nRemoteDependencyTelemetryBuilder telemetryBuilder, String dbSystem, Attributes attributes) {\nString dbStatement = attributes.get(SemanticAttributes.DB_STATEMENT);\nif (dbStatement == null) {\ndbStatement = attributes.get(SemanticAttributes.DB_OPERATION);\n}\nString type;\nif (SQL_DB_SYSTEMS.contains(dbSystem)) {\nif (dbSystem.equals(SemanticAttributes.DbSystemValues.MYSQL)) {\ntype = \"mysql\";\n} else if (dbSystem.equals(SemanticAttributes.DbSystemValues.POSTGRESQL)) {\ntype = \"postgresql\";\n} else {\ntype = \"SQL\";\n}\n} else if (dbSystem.equals(COSMOS)) {\ntype = \"Microsoft.DocumentDb\";\n} else {\ntype = dbSystem;\n}\ntelemetryBuilder.setType(type);\ntelemetryBuilder.setData(dbStatement);\nString target;\nString dbName;\nif (dbSystem.equals(COSMOS)) {\nString dbUrl = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_URL);\nif (dbUrl != null) {\ntarget = UrlParser.getTarget(dbUrl);\n} else {\ntarget = null;\n}\ndbName = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_INSTANCE);\n} else {\ntarget = getTargetOrDefault(attributes, getDefaultPortForDbSystem(dbSystem), dbSystem);\ndbName = attributes.get(SemanticAttributes.DB_NAME);\n}\ntarget = nullAwareConcat(target, dbName, \" | \");\nif (target == null) {\ntarget = dbSystem;\n}\ntelemetryBuilder.setTarget(target);\n}\nprivate static void applyMessagingClientSpan(\nRemoteDependencyTelemetryBuilder telemetryBuilder,\nSpanKind spanKind,\nString messagingSystem,\nAttributes attributes) {\nif (spanKind == SpanKind.PRODUCER) {\ntelemetryBuilder.setType(\"Queue Message | \" + messagingSystem);\n} else {\ntelemetryBuilder.setType(messagingSystem);\n}\ntelemetryBuilder.setTarget(getMessagingTargetSource(attributes));\n}\nprivate static int getDefaultPortForDbSystem(String dbSystem) {\nswitch (dbSystem) {\ncase SemanticAttributes.DbSystemValues.MONGODB:\nreturn 27017;\ncase SemanticAttributes.DbSystemValues.CASSANDRA:\nreturn 9042;\ncase SemanticAttributes.DbSystemValues.REDIS:\nreturn 6379;\ncase SemanticAttributes.DbSystemValues.MARIADB:\ncase SemanticAttributes.DbSystemValues.MYSQL:\nreturn 3306;\ncase SemanticAttributes.DbSystemValues.MSSQL:\nreturn 1433;\ncase SemanticAttributes.DbSystemValues.DB2:\nreturn 50000;\ncase SemanticAttributes.DbSystemValues.ORACLE:\nreturn 1521;\ncase SemanticAttributes.DbSystemValues.H2:\nreturn 8082;\ncase SemanticAttributes.DbSystemValues.DERBY:\nreturn 1527;\ncase SemanticAttributes.DbSystemValues.POSTGRESQL:\nreturn 5432;\ndefault:\nreturn Integer.MAX_VALUE;\n}\n}\nprivate boolean getSuccess(SpanData span) {\nswitch (span.getStatus().getStatusCode()) {\ncase ERROR:\nreturn false;\ncase OK:\nreturn true;\ncase UNSET:\nif (captureHttpServer4xxAsError) {\nLong statusCode = getStableAttribute(span.getAttributes(), SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);\nreturn statusCode == null || statusCode < 400;\n}\nreturn true;\n}\nreturn true;\n}\n@Nullable\npublic static String getHttpUrlFromServerSpan(Attributes attributes) {\nString httpUrl = getHttpUrlFromServerSpanStableSemconv(attributes);\nif (httpUrl != null) {\nreturn httpUrl;\n}\nreturn getHttpUrlFromServerSpanOldSemconv(attributes);\n}\n@Nullable\nprivate static String getHttpUrlFromServerSpanStableSemconv(Attributes attributes) {\nString scheme = attributes.get(SemanticAttributes.URL_SCHEME);\nif (scheme == null) {\nreturn null;\n}\nString path = attributes.get(SemanticAttributes.URL_PATH);\nif (path == null) {\nreturn null;\n}\nString host = attributes.get(SemanticAttributes.SERVER_ADDRESS);\nif (host == null) {\nreturn null;\n}\nLong port = attributes.get(SemanticAttributes.SERVER_PORT);\nif (port != null && port > 0) {\nreturn scheme + \":\n}\nreturn scheme + \":\n}\n@Nullable\nprivate static String getHttpUrlFromServerSpanOldSemconv(Attributes attributes) {\nString httpUrl = attributes.get(SemanticAttributes.HTTP_URL);\nif (httpUrl != null) {\nreturn httpUrl;\n}\nString scheme = attributes.get(SemanticAttributes.HTTP_SCHEME);\nif (scheme == null) {\nreturn null;\n}\nString target = attributes.get(SemanticAttributes.HTTP_TARGET);\nif (target == null) {\nreturn null;\n}\nString host = attributes.get(SemanticAttributes.NET_HOST_NAME);\nLong port = attributes.get(SemanticAttributes.NET_HOST_PORT);\nif (port != null && port > 0) {\nreturn scheme + \":\n}\nreturn scheme + \":\n}\n@Nullable\nprivate static String getSource(Attributes attributes) {\nString source = attributes.get(AiSemanticAttributes.SPAN_SOURCE);\nif (source != null) {\nreturn source;\n}\nreturn getMessagingTargetSource(attributes);\n}\n@Nullable\nprivate static String getMessagingTargetSource(Attributes attributes) {\nif (isAzureSdkMessaging(attributes.get(AiSemanticAttributes.AZURE_SDK_NAMESPACE))) {\nString peerAddress = attributes.get(AiSemanticAttributes.AZURE_SDK_PEER_ADDRESS);\nif (peerAddress != null) {\nString destination = attributes.get(AiSemanticAttributes.AZURE_SDK_MESSAGE_BUS_DESTINATION);\nreturn peerAddress + \"/\" + destination;\n}\n}\nString messagingSystem = getMessagingSystem(attributes);\nif (messagingSystem == null) {\nreturn null;\n}\nString source =\nnullAwareConcat(\ngetTargetOrNullOldSemconv(attributes, Integer.MAX_VALUE),\nattributes.get(SemanticAttributes.MESSAGING_DESTINATION_NAME),\n\"/\");\nif (source != null) {\nreturn source;\n}\nreturn messagingSystem;\n}\nprivate static boolean isAzureSdkMessaging(String messagingSystem) {\nreturn \"Microsoft.EventHub\".equals(messagingSystem)\n|| \"Microsoft.ServiceBus\".equals(messagingSystem);\n}\nprivate static String getOperationName(SpanData span) {\nString operationName = span.getAttributes().get(AiSemanticAttributes.OPERATION_NAME);\nif (operationName != null) {\nreturn operationName;\n}\nreturn span.getName();\n}\nprivate static String nullAwareConcat(\n@Nullable String str1, @Nullable String str2, String separator) {\nif (str1 == null) {\nreturn str2;\n}\nif (str2 == null) {\nreturn str1;\n}\nreturn str1 + separator + str2;\n}\nprivate void exportEvents(\nSpanData span,\n@Nullable String operationName,\nlong itemCount,\nConsumer consumer) {\nfor (EventData event : span.getEvents()) {\nString instrumentationScopeName = span.getInstrumentationScopeInfo().getName();\nif (eventSuppressor.test(event, instrumentationScopeName)) {\ncontinue;\n}\nif (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null\n|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {\nSpanContext parentSpanContext = span.getParentSpanContext();\nif (!parentSpanContext.isValid() || parentSpanContext.isRemote()) {\nString stacktrace = event.getAttributes().get(SemanticAttributes.EXCEPTION_STACKTRACE);\nif (stacktrace != null && !shouldSuppress.test(span, event)) {\nconsumer.accept(\ncreateExceptionTelemetryItem(stacktrace, span, operationName, itemCount));\n}\n}\nreturn;\n}\nMessageTelemetryBuilder telemetryBuilder = MessageTelemetryBuilder.create();\ntelemetryInitializer.accept(telemetryBuilder, span.getResource());\nsetOperationId(telemetryBuilder, span.getTraceId());\nsetOperationParentId(telemetryBuilder, span.getSpanId());\nif (operationName != null) {\nsetOperationName(telemetryBuilder, operationName);\n} else {\nsetOperationName(telemetryBuilder, span.getAttributes());\n}\nsetTime(telemetryBuilder, event.getEpochNanos());\nsetItemCount(telemetryBuilder, itemCount);\nMAPPINGS.map(event.getAttributes(), telemetryBuilder);\ntelemetryBuilder.setMessage(event.getName());\nconsumer.accept(telemetryBuilder.build());\n}\n}\nprivate TelemetryItem createExceptionTelemetryItem(\nString errorStack, SpanData span, @Nullable String operationName, long itemCount) {\nExceptionTelemetryBuilder telemetryBuilder = ExceptionTelemetryBuilder.create();\ntelemetryInitializer.accept(telemetryBuilder, span.getResource());\nsetOperationId(telemetryBuilder, span.getTraceId());\nsetOperationParentId(telemetryBuilder, span.getSpanId());\nif (operationName != null) {\nsetOperationName(telemetryBuilder, operationName);\n} else {\nsetOperationName(telemetryBuilder, span.getAttributes());\n}\nsetTime(telemetryBuilder, span.getEndEpochNanos());\nsetItemCount(telemetryBuilder, itemCount);\nMAPPINGS.map(span.getAttributes(), telemetryBuilder);\ntelemetryBuilder.setExceptions(Exceptions.minimalParse(errorStack));\nreturn telemetryBuilder.build();\n}\npublic static T getStableAttribute(Attributes attributes, AttributeKey stable, AttributeKey old) {\nT value = attributes.get(stable);\nif (value != null) {\nreturn value;\n}\nreturn attributes.get(old);\n}\nprivate static void setTime(AbstractTelemetryBuilder telemetryBuilder, long epochNanos) {\ntelemetryBuilder.setTime(FormattedTime.offSetDateTimeFromEpochNanos(epochNanos));\n}\nprivate static void setItemCount(AbstractTelemetryBuilder telemetryBuilder, long itemCount) {\nif (itemCount != 1) {\ntelemetryBuilder.setSampleRate(100.0f / itemCount);\n}\n}\nprivate static long getItemCount(SpanData span) {\nLong itemCount = span.getAttributes().get(AiSemanticAttributes.ITEM_COUNT);\nreturn itemCount == null ? 1 : itemCount;\n}\nprivate static void addLinks(AbstractTelemetryBuilder telemetryBuilder, List links) {\nif (links.isEmpty()) {\nreturn;\n}\nStringBuilder sb = new StringBuilder();\nsb.append(\"[\");\nboolean first = true;\nfor (LinkData link : links) {\nif (!first) {\nsb.append(\",\");\n}\nsb.append(\"{\\\"operation_Id\\\":\\\"\");\nsb.append(link.getSpanContext().getTraceId());\nsb.append(\"\\\",\\\"id\\\":\\\"\");\nsb.append(link.getSpanContext().getSpanId());\nsb.append(\"\\\"}\");\nfirst = false;\n}\nsb.append(\"]\");\ntelemetryBuilder.addProperty(\"_MS.links\", sb.toString());\n}\nstatic void applyCommonTags(MappingsBuilder mappingsBuilder) {\nmappingsBuilder\n.exact(\nSemanticAttributes.ENDUSER_ID.getKey(),\n(telemetryBuilder, value) -> {\nif (value instanceof String) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_USER_ID.toString(), (String) value);\n}\n})\n.exact(\nAiSemanticAttributes.PREVIEW_APPLICATION_VERSION.getKey(),\n(telemetryBuilder, value) -> {\nif (value instanceof String) {\ntelemetryBuilder.addTag(\nContextTagKeys.AI_APPLICATION_VER.toString(), (String) value);\n}\n});\napplyConnectionStringAndRoleNameOverrides(mappingsBuilder);\n}\n@SuppressWarnings(\"deprecation\")\nprivate static final WarningLogger connectionStringAttributeNoLongerSupported =\nnew WarningLogger(\nSpanDataMapper.class,\nAiSemanticAttributes.DEPRECATED_CONNECTION_STRING.getKey()\n+ \" is no longer supported because it\"\n+ \" is incompatible with pre-aggregated standard metrics. Please use\"\n+ \" \\\"connectionStringOverrides\\\" configuration, or reach out to\"\n+ \" https:\n+ \" different use case.\");\n@SuppressWarnings(\"deprecation\")\nprivate static final WarningLogger roleNameAttributeNoLongerSupported =\nnew WarningLogger(\nSpanDataMapper.class,\nAiSemanticAttributes.DEPRECATED_ROLE_NAME.getKey()\n+ \" is no longer supported because it\"\n+ \" is incompatible with pre-aggregated standard metrics. Please use\"\n+ \" \\\"roleNameOverrides\\\" configuration, or reach out to\"\n+ \" https:\n+ \" different use case.\");\n@SuppressWarnings(\"deprecation\")\nprivate static final WarningLogger roleInstanceAttributeNoLongerSupported =\nnew WarningLogger(\nSpanDataMapper.class,\nAiSemanticAttributes.DEPRECATED_ROLE_INSTANCE.getKey()\n+ \" is no longer supported because it\"\n+ \" is incompatible with pre-aggregated standard metrics. Please reach out to\"\n+ \" https:\n+ \" case for this.\");\n@SuppressWarnings(\"deprecation\")\nprivate static final WarningLogger instrumentationKeyAttributeNoLongerSupported =\nnew WarningLogger(\nSpanDataMapper.class,\nAiSemanticAttributes.DEPRECATED_INSTRUMENTATION_KEY.getKey()\n+ \" is no longer supported because it\"\n+ \" is incompatible with pre-aggregated standard metrics. Please use\"\n+ \" \\\"connectionStringOverrides\\\" configuration, or reach out to\"\n+ \" https:\n+ \" different use case.\");\n@SuppressWarnings(\"deprecation\")\nstatic void applyConnectionStringAndRoleNameOverrides(MappingsBuilder mappingsBuilder) {\nmappingsBuilder\n.exact(\nAiSemanticAttributes.INTERNAL_CONNECTION_STRING.getKey(),\n(telemetryBuilder, value) -> {\ntelemetryBuilder.setConnectionString(ConnectionString.parse((String) value));\n})\n.exact(\nAiSemanticAttributes.INTERNAL_ROLE_NAME.getKey(),\n(telemetryBuilder, value) -> {\nif (value instanceof String) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_CLOUD_ROLE.toString(), (String) value);\n}\n})\n.exact(\nAiSemanticAttributes.DEPRECATED_CONNECTION_STRING.getKey(),\n(telemetryBuilder, value) -> {\nconnectionStringAttributeNoLongerSupported.recordWarning();\n})\n.exact(\nAiSemanticAttributes.DEPRECATED_ROLE_NAME.getKey(),\n(telemetryBuilder, value) -> {\nroleNameAttributeNoLongerSupported.recordWarning();\n})\n.exact(\nAiSemanticAttributes.DEPRECATED_ROLE_INSTANCE.getKey(),\n(telemetryBuilder, value) -> {\nroleInstanceAttributeNoLongerSupported.recordWarning();\n})\n.exact(\nAiSemanticAttributes.DEPRECATED_INSTRUMENTATION_KEY.getKey(),\n(telemetryBuilder, value) -> {\ninstrumentationKeyAttributeNoLongerSupported.recordWarning();\n});\n}\n}", + "context_after": "class SpanDataMapper {\npublic static final String MS_PROCESSED_BY_METRIC_EXTRACTORS = \"_MS.ProcessedByMetricExtractors\";\nprivate static final Set SQL_DB_SYSTEMS =\nnew HashSet<>(\nasList(\nSemanticAttributes.DbSystemValues.DB2,\nSemanticAttributes.DbSystemValues.DERBY,\nSemanticAttributes.DbSystemValues.MARIADB,\nSemanticAttributes.DbSystemValues.MSSQL,\nSemanticAttributes.DbSystemValues.MYSQL,\nSemanticAttributes.DbSystemValues.ORACLE,\nSemanticAttributes.DbSystemValues.POSTGRESQL,\nSemanticAttributes.DbSystemValues.SQLITE,\nSemanticAttributes.DbSystemValues.OTHER_SQL,\nSemanticAttributes.DbSystemValues.HSQLDB,\nSemanticAttributes.DbSystemValues.H2));\nprivate static final String COSMOS = \"Cosmos\";\nprivate static final Mappings MAPPINGS;\nprivate static final ContextTagKeys AI_DEVICE_OS = ContextTagKeys.fromString(\"ai.device.os\");\nstatic {\nMappingsBuilder mappingsBuilder =\nnew MappingsBuilder(SPAN)\n.ignoreExact(AiSemanticAttributes.AZURE_SDK_NAMESPACE.getKey())\n.ignoreExact(AiSemanticAttributes.AZURE_SDK_MESSAGE_BUS_DESTINATION.getKey())\n.ignoreExact(AiSemanticAttributes.AZURE_SDK_ENQUEUED_TIME.getKey())\n.ignoreExact(AiSemanticAttributes.KAFKA_RECORD_QUEUE_TIME_MS.getKey())\n.ignoreExact(AiSemanticAttributes.KAFKA_OFFSET.getKey())\n.exact(\nSemanticAttributes.USER_AGENT_ORIGINAL.getKey(),\n(builder, value) -> {\nif (value instanceof String) {\nbuilder.addTag(\"ai.user.userAgent\", (String) value);\n}\n})\n.ignorePrefix(\"applicationinsights.internal.\")\n.prefix(\n\"http.request.header.\",\n(telemetryBuilder, key, value) -> {\nif (value instanceof List) {\ntelemetryBuilder.addProperty(key, Mappings.join((List) value));\n}\n})\n.prefix(\n\"http.response.header.\",\n(telemetryBuilder, key, value) -> {\nif (value instanceof List) {\ntelemetryBuilder.addProperty(key, Mappings.join((List) value));\n}\n});\napplyCommonTags(mappingsBuilder);\nMAPPINGS = mappingsBuilder.build();\n}\nprivate final boolean captureHttpServer4xxAsError;\nprivate final BiConsumer telemetryInitializer;\nprivate final BiPredicate eventSuppressor;\nprivate final BiPredicate shouldSuppress;\npublic SpanDataMapper(\nboolean captureHttpServer4xxAsError,\nBiConsumer telemetryInitializer,\nBiPredicate eventSuppressor,\nBiPredicate shouldSuppress) {\nthis.captureHttpServer4xxAsError = captureHttpServer4xxAsError;\nthis.telemetryInitializer = telemetryInitializer;\nthis.eventSuppressor = eventSuppressor;\nthis.shouldSuppress = shouldSuppress;\n}\npublic TelemetryItem map(SpanData span) {\nlong itemCount = getItemCount(span);\nreturn map(span, itemCount);\n}\npublic void map(SpanData span, Consumer consumer) {\nlong itemCount = getItemCount(span);\nTelemetryItem telemetryItem = map(span, itemCount);\nconsumer.accept(telemetryItem);\nexportEvents(\nspan,\ntelemetryItem.getTags().get(ContextTagKeys.AI_OPERATION_NAME.toString()),\nitemCount,\nconsumer);\n}\npublic TelemetryItem map(SpanData span, long itemCount) {\nif (RequestChecker.isRequest(span)) {\nreturn exportRequest(span, itemCount);\n} else {\nreturn exportRemoteDependency(span, span.getKind() == SpanKind.INTERNAL, itemCount);\n}\n}\nprivate static boolean checkIsPreAggregatedStandardMetric(SpanData span) {\nBoolean isPreAggregatedStandardMetric =\nspan.getAttributes().get(AiSemanticAttributes.IS_PRE_AGGREGATED);\nreturn isPreAggregatedStandardMetric != null && isPreAggregatedStandardMetric;\n}\nprivate TelemetryItem exportRemoteDependency(SpanData span, boolean inProc, long itemCount) {\nRemoteDependencyTelemetryBuilder telemetryBuilder = RemoteDependencyTelemetryBuilder.create();\ntelemetryInitializer.accept(telemetryBuilder, span.getResource());\nsetOperationTags(telemetryBuilder, span);\nsetTime(telemetryBuilder, span.getStartEpochNanos());\nsetItemCount(telemetryBuilder, itemCount);\nMAPPINGS.map(span.getAttributes(), telemetryBuilder);\naddLinks(telemetryBuilder, span.getLinks());\ntelemetryBuilder.setId(span.getSpanId());\ntelemetryBuilder.setName(getDependencyName(span));\ntelemetryBuilder.setDuration(\nFormattedDuration.fromNanos(span.getEndEpochNanos() - span.getStartEpochNanos()));\ntelemetryBuilder.setSuccess(getSuccess(span));\nif (inProc) {\ntelemetryBuilder.setType(\"InProc\");\n} else {\napplySemanticConventions(telemetryBuilder, span);\n}\nif (checkIsPreAggregatedStandardMetric(span)) {\ntelemetryBuilder.addProperty(MS_PROCESSED_BY_METRIC_EXTRACTORS, \"True\");\n}\nreturn telemetryBuilder.build();\n}\nprivate static final Set DEFAULT_HTTP_SPAN_NAMES =\nnew HashSet<>(\nasList(\"OPTIONS\", \"GET\", \"HEAD\", \"POST\", \"PUT\", \"DELETE\", \"TRACE\", \"CONNECT\", \"PATCH\"));\nprivate static String getDependencyName(SpanData span) {\nString name = span.getName();\nString method = getStableOrOldAttribute(span.getAttributes(), SemanticAttributes.HTTP_REQUEST_METHOD, SemanticAttributes.HTTP_METHOD);\nif (method == null) {\nreturn name;\n}\nif (!DEFAULT_HTTP_SPAN_NAMES.contains(name)) {\nreturn name;\n}\nString url = getStableOrOldAttribute(span.getAttributes(), SemanticAttributes.URL_FULL, SemanticAttributes.HTTP_URL);\nif (url == null) {\nreturn name;\n}\nString path = UrlParser.getPath(url);\nif (path == null) {\nreturn name;\n}\nreturn path.isEmpty() ? method + \" /\" : method + \" \" + path;\n}\nprivate static void applySemanticConventions(\nRemoteDependencyTelemetryBuilder telemetryBuilder, SpanData span) {\nAttributes attributes = span.getAttributes();\nString httpMethod = getStableOrOldAttribute(attributes, SemanticAttributes.HTTP_REQUEST_METHOD, SemanticAttributes.HTTP_METHOD);\nif (httpMethod != null) {\napplyHttpClientSpan(telemetryBuilder, attributes);\nreturn;\n}\nString rpcSystem = attributes.get(SemanticAttributes.RPC_SYSTEM);\nif (rpcSystem != null) {\napplyRpcClientSpan(telemetryBuilder, rpcSystem, attributes);\nreturn;\n}\nString dbSystem = attributes.get(SemanticAttributes.DB_SYSTEM);\nif (dbSystem == null) {\ndbSystem = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_TYPE);\n}\nif (dbSystem != null) {\napplyDatabaseClientSpan(telemetryBuilder, dbSystem, attributes);\nreturn;\n}\nString messagingSystem = getMessagingSystem(attributes);\nif (messagingSystem != null) {\napplyMessagingClientSpan(telemetryBuilder, span.getKind(), messagingSystem, attributes);\nreturn;\n}\nString target = getTargetOrDefault(attributes, Integer.MAX_VALUE, null);\nif (target != null) {\ntelemetryBuilder.setTarget(target);\nreturn;\n}\ntelemetryBuilder.setType(\"InProc\");\n}\n@Nullable\nprivate static String getMessagingSystem(Attributes attributes) {\nString azureNamespace = attributes.get(AiSemanticAttributes.AZURE_SDK_NAMESPACE);\nif (isAzureSdkMessaging(azureNamespace)) {\nreturn azureNamespace;\n}\nreturn attributes.get(SemanticAttributes.MESSAGING_SYSTEM);\n}\nprivate static void setOperationTags(AbstractTelemetryBuilder telemetryBuilder, SpanData span) {\nsetOperationId(telemetryBuilder, span.getTraceId());\nsetOperationParentId(telemetryBuilder, span.getParentSpanContext().getSpanId());\nsetOperationName(telemetryBuilder, span.getAttributes());\n}\nprivate static void setOperationId(AbstractTelemetryBuilder telemetryBuilder, String traceId) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_ID.toString(), traceId);\n}\nprivate static void setOperationParentId(\nAbstractTelemetryBuilder telemetryBuilder, String parentSpanId) {\nif (SpanId.isValid(parentSpanId)) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);\n}\n}\nprivate static void setOperationName(\nAbstractTelemetryBuilder telemetryBuilder, Attributes attributes) {\nString operationName = attributes.get(AiSemanticAttributes.OPERATION_NAME);\nif (operationName != null) {\nsetOperationName(telemetryBuilder, operationName);\n}\n}\nprivate static void setOperationName(\nAbstractTelemetryBuilder telemetryBuilder, String operationName) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_OPERATION_NAME.toString(), operationName);\n}\nprivate static void applyHttpClientSpan(\nRemoteDependencyTelemetryBuilder telemetryBuilder, Attributes attributes) {\nString httpUrl = getStableOrOldAttribute(attributes, SemanticAttributes.URL_FULL, SemanticAttributes.HTTP_URL);\nint defaultPort = getDefaultPortForHttpUrl(httpUrl);\nString target = getTargetOrDefault(attributes, defaultPort, \"Http\");\ntelemetryBuilder.setType(\"Http\");\ntelemetryBuilder.setTarget(target);\nLong httpStatusCode = getStableOrOldAttribute(attributes, SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);\nif (httpStatusCode != null) {\ntelemetryBuilder.setResultCode(Long.toString(httpStatusCode));\n} else {\ntelemetryBuilder.setResultCode(\"0\");\n}\ntelemetryBuilder.setData(httpUrl);\n}\nprivate static void applyRpcClientSpan(\nRemoteDependencyTelemetryBuilder telemetryBuilder, String rpcSystem, Attributes attributes) {\ntelemetryBuilder.setType(rpcSystem);\nString target = getTargetOrDefault(attributes, Integer.MAX_VALUE, rpcSystem);\ntelemetryBuilder.setTarget(target);\n}\nprivate static int getDefaultPortForHttpUrl(@Nullable String httpUrl) {\nif (httpUrl == null) {\nreturn Integer.MAX_VALUE;\n}\nif (httpUrl.startsWith(\"https:\nreturn 443;\n}\nif (httpUrl.startsWith(\"http:\nreturn 80;\n}\nreturn Integer.MAX_VALUE;\n}\npublic static String getTargetOrDefault(\nAttributes attributes, int defaultPort, String defaultTarget) {\nString target = getTargetOrNullStableSemconv(attributes, defaultPort);\nif (target != null) {\nreturn target;\n}\ntarget = getTargetOrNullOldSemconv(attributes, defaultPort);\nif (target != null) {\nreturn target;\n}\nreturn defaultTarget;\n}\n@Nullable\nprivate static String getTargetOrNullStableSemconv(Attributes attributes, int defaultPort) {\nString peerService = attributes.get(SemanticAttributes.PEER_SERVICE);\nif (peerService != null) {\nreturn peerService;\n}\nString host = attributes.get(SemanticAttributes.SERVER_ADDRESS);\nif (host != null) {\nLong port = attributes.get(SemanticAttributes.SERVER_PORT);\nreturn getTarget(host, port, defaultPort);\n}\nreturn null;\n}\n@Nullable\nprivate static String getTargetOrNullOldSemconv(Attributes attributes, int defaultPort) {\nString peerService = attributes.get(SemanticAttributes.PEER_SERVICE);\nif (peerService != null) {\nreturn peerService;\n}\nString host = attributes.get(SemanticAttributes.NET_PEER_NAME);\nif (host != null) {\nLong port = attributes.get(SemanticAttributes.NET_PEER_PORT);\nreturn getTarget(host, port, defaultPort);\n}\nhost = attributes.get(SemanticAttributes.NET_SOCK_PEER_NAME);\nif (host == null) {\nhost = attributes.get(SemanticAttributes.NET_SOCK_PEER_ADDR);\n}\nif (host != null) {\nLong port = attributes.get(SemanticAttributes.NET_SOCK_PEER_PORT);\nreturn getTarget(host, port, defaultPort);\n}\nString httpUrl = attributes.get(SemanticAttributes.HTTP_URL);\nif (httpUrl != null) {\nreturn UrlParser.getTarget(httpUrl);\n}\nreturn null;\n}\nprivate static String getTarget(String host, @Nullable Long port, int defaultPort) {\nif (port != null && port != defaultPort) {\nreturn host + \":\" + port;\n} else {\nreturn host;\n}\n}\nprivate static void applyDatabaseClientSpan(\nRemoteDependencyTelemetryBuilder telemetryBuilder, String dbSystem, Attributes attributes) {\nString dbStatement = attributes.get(SemanticAttributes.DB_STATEMENT);\nif (dbStatement == null) {\ndbStatement = attributes.get(SemanticAttributes.DB_OPERATION);\n}\nString type;\nif (SQL_DB_SYSTEMS.contains(dbSystem)) {\nif (dbSystem.equals(SemanticAttributes.DbSystemValues.MYSQL)) {\ntype = \"mysql\";\n} else if (dbSystem.equals(SemanticAttributes.DbSystemValues.POSTGRESQL)) {\ntype = \"postgresql\";\n} else {\ntype = \"SQL\";\n}\n} else if (dbSystem.equals(COSMOS)) {\ntype = \"Microsoft.DocumentDb\";\n} else {\ntype = dbSystem;\n}\ntelemetryBuilder.setType(type);\ntelemetryBuilder.setData(dbStatement);\nString target;\nString dbName;\nif (dbSystem.equals(COSMOS)) {\nString dbUrl = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_URL);\nif (dbUrl != null) {\ntarget = UrlParser.getTarget(dbUrl);\n} else {\ntarget = null;\n}\ndbName = attributes.get(AiSemanticAttributes.AZURE_SDK_DB_INSTANCE);\n} else {\ntarget = getTargetOrDefault(attributes, getDefaultPortForDbSystem(dbSystem), dbSystem);\ndbName = attributes.get(SemanticAttributes.DB_NAME);\n}\ntarget = nullAwareConcat(target, dbName, \" | \");\nif (target == null) {\ntarget = dbSystem;\n}\ntelemetryBuilder.setTarget(target);\n}\nprivate static void applyMessagingClientSpan(\nRemoteDependencyTelemetryBuilder telemetryBuilder,\nSpanKind spanKind,\nString messagingSystem,\nAttributes attributes) {\nif (spanKind == SpanKind.PRODUCER) {\ntelemetryBuilder.setType(\"Queue Message | \" + messagingSystem);\n} else {\ntelemetryBuilder.setType(messagingSystem);\n}\ntelemetryBuilder.setTarget(getMessagingTargetSource(attributes));\n}\nprivate static int getDefaultPortForDbSystem(String dbSystem) {\nswitch (dbSystem) {\ncase SemanticAttributes.DbSystemValues.MONGODB:\nreturn 27017;\ncase SemanticAttributes.DbSystemValues.CASSANDRA:\nreturn 9042;\ncase SemanticAttributes.DbSystemValues.REDIS:\nreturn 6379;\ncase SemanticAttributes.DbSystemValues.MARIADB:\ncase SemanticAttributes.DbSystemValues.MYSQL:\nreturn 3306;\ncase SemanticAttributes.DbSystemValues.MSSQL:\nreturn 1433;\ncase SemanticAttributes.DbSystemValues.DB2:\nreturn 50000;\ncase SemanticAttributes.DbSystemValues.ORACLE:\nreturn 1521;\ncase SemanticAttributes.DbSystemValues.H2:\nreturn 8082;\ncase SemanticAttributes.DbSystemValues.DERBY:\nreturn 1527;\ncase SemanticAttributes.DbSystemValues.POSTGRESQL:\nreturn 5432;\ndefault:\nreturn Integer.MAX_VALUE;\n}\n}\nprivate boolean getSuccess(SpanData span) {\nswitch (span.getStatus().getStatusCode()) {\ncase ERROR:\nreturn false;\ncase OK:\nreturn true;\ncase UNSET:\nif (captureHttpServer4xxAsError) {\nLong statusCode = getStableOrOldAttribute(span.getAttributes(), SemanticAttributes.HTTP_RESPONSE_STATUS_CODE, SemanticAttributes.HTTP_STATUS_CODE);\nreturn statusCode == null || statusCode < 400;\n}\nreturn true;\n}\nreturn true;\n}\n@Nullable\npublic static String getHttpUrlFromServerSpan(Attributes attributes) {\nString httpUrl = getHttpUrlFromServerSpanStableSemconv(attributes);\nif (httpUrl != null) {\nreturn httpUrl;\n}\nreturn getHttpUrlFromServerSpanOldSemconv(attributes);\n}\n@Nullable\nprivate static String getHttpUrlFromServerSpanStableSemconv(Attributes attributes) {\nString scheme = attributes.get(SemanticAttributes.URL_SCHEME);\nif (scheme == null) {\nreturn null;\n}\nString path = attributes.get(SemanticAttributes.URL_PATH);\nif (path == null) {\nreturn null;\n}\nString host = attributes.get(SemanticAttributes.SERVER_ADDRESS);\nif (host == null) {\nreturn null;\n}\nLong port = attributes.get(SemanticAttributes.SERVER_PORT);\nif (port != null && port > 0) {\nreturn scheme + \":\n}\nreturn scheme + \":\n}\n@Nullable\nprivate static String getHttpUrlFromServerSpanOldSemconv(Attributes attributes) {\nString httpUrl = attributes.get(SemanticAttributes.HTTP_URL);\nif (httpUrl != null) {\nreturn httpUrl;\n}\nString scheme = attributes.get(SemanticAttributes.HTTP_SCHEME);\nif (scheme == null) {\nreturn null;\n}\nString target = attributes.get(SemanticAttributes.HTTP_TARGET);\nif (target == null) {\nreturn null;\n}\nString host = attributes.get(SemanticAttributes.NET_HOST_NAME);\nLong port = attributes.get(SemanticAttributes.NET_HOST_PORT);\nif (port != null && port > 0) {\nreturn scheme + \":\n}\nreturn scheme + \":\n}\n@Nullable\nprivate static String getSource(Attributes attributes) {\nString source = attributes.get(AiSemanticAttributes.SPAN_SOURCE);\nif (source != null) {\nreturn source;\n}\nreturn getMessagingTargetSource(attributes);\n}\n@Nullable\nprivate static String getMessagingTargetSource(Attributes attributes) {\nif (isAzureSdkMessaging(attributes.get(AiSemanticAttributes.AZURE_SDK_NAMESPACE))) {\nString peerAddress = attributes.get(AiSemanticAttributes.AZURE_SDK_PEER_ADDRESS);\nif (peerAddress != null) {\nString destination = attributes.get(AiSemanticAttributes.AZURE_SDK_MESSAGE_BUS_DESTINATION);\nreturn peerAddress + \"/\" + destination;\n}\n}\nString messagingSystem = getMessagingSystem(attributes);\nif (messagingSystem == null) {\nreturn null;\n}\nString source =\nnullAwareConcat(\ngetTargetOrNullOldSemconv(attributes, Integer.MAX_VALUE),\nattributes.get(SemanticAttributes.MESSAGING_DESTINATION_NAME),\n\"/\");\nif (source != null) {\nreturn source;\n}\nreturn messagingSystem;\n}\nprivate static boolean isAzureSdkMessaging(String messagingSystem) {\nreturn \"Microsoft.EventHub\".equals(messagingSystem)\n|| \"Microsoft.ServiceBus\".equals(messagingSystem);\n}\nprivate static String getOperationName(SpanData span) {\nString operationName = span.getAttributes().get(AiSemanticAttributes.OPERATION_NAME);\nif (operationName != null) {\nreturn operationName;\n}\nreturn span.getName();\n}\nprivate static String nullAwareConcat(\n@Nullable String str1, @Nullable String str2, String separator) {\nif (str1 == null) {\nreturn str2;\n}\nif (str2 == null) {\nreturn str1;\n}\nreturn str1 + separator + str2;\n}\nprivate void exportEvents(\nSpanData span,\n@Nullable String operationName,\nlong itemCount,\nConsumer consumer) {\nfor (EventData event : span.getEvents()) {\nString instrumentationScopeName = span.getInstrumentationScopeInfo().getName();\nif (eventSuppressor.test(event, instrumentationScopeName)) {\ncontinue;\n}\nif (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null\n|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {\nSpanContext parentSpanContext = span.getParentSpanContext();\nif (!parentSpanContext.isValid() || parentSpanContext.isRemote()) {\nString stacktrace = event.getAttributes().get(SemanticAttributes.EXCEPTION_STACKTRACE);\nif (stacktrace != null && !shouldSuppress.test(span, event)) {\nconsumer.accept(\ncreateExceptionTelemetryItem(stacktrace, span, operationName, itemCount));\n}\n}\nreturn;\n}\nMessageTelemetryBuilder telemetryBuilder = MessageTelemetryBuilder.create();\ntelemetryInitializer.accept(telemetryBuilder, span.getResource());\nsetOperationId(telemetryBuilder, span.getTraceId());\nsetOperationParentId(telemetryBuilder, span.getSpanId());\nif (operationName != null) {\nsetOperationName(telemetryBuilder, operationName);\n} else {\nsetOperationName(telemetryBuilder, span.getAttributes());\n}\nsetTime(telemetryBuilder, event.getEpochNanos());\nsetItemCount(telemetryBuilder, itemCount);\nMAPPINGS.map(event.getAttributes(), telemetryBuilder);\ntelemetryBuilder.setMessage(event.getName());\nconsumer.accept(telemetryBuilder.build());\n}\n}\nprivate TelemetryItem createExceptionTelemetryItem(\nString errorStack, SpanData span, @Nullable String operationName, long itemCount) {\nExceptionTelemetryBuilder telemetryBuilder = ExceptionTelemetryBuilder.create();\ntelemetryInitializer.accept(telemetryBuilder, span.getResource());\nsetOperationId(telemetryBuilder, span.getTraceId());\nsetOperationParentId(telemetryBuilder, span.getSpanId());\nif (operationName != null) {\nsetOperationName(telemetryBuilder, operationName);\n} else {\nsetOperationName(telemetryBuilder, span.getAttributes());\n}\nsetTime(telemetryBuilder, span.getEndEpochNanos());\nsetItemCount(telemetryBuilder, itemCount);\nMAPPINGS.map(span.getAttributes(), telemetryBuilder);\ntelemetryBuilder.setExceptions(Exceptions.minimalParse(errorStack));\nreturn telemetryBuilder.build();\n}\npublic static T getStableOrOldAttribute(Attributes attributes, AttributeKey stable, AttributeKey old) {\nT value = attributes.get(stable);\nif (value != null) {\nreturn value;\n}\nreturn attributes.get(old);\n}\nprivate static void setTime(AbstractTelemetryBuilder telemetryBuilder, long epochNanos) {\ntelemetryBuilder.setTime(FormattedTime.offSetDateTimeFromEpochNanos(epochNanos));\n}\nprivate static void setItemCount(AbstractTelemetryBuilder telemetryBuilder, long itemCount) {\nif (itemCount != 1) {\ntelemetryBuilder.setSampleRate(100.0f / itemCount);\n}\n}\nprivate static long getItemCount(SpanData span) {\nLong itemCount = span.getAttributes().get(AiSemanticAttributes.ITEM_COUNT);\nreturn itemCount == null ? 1 : itemCount;\n}\nprivate static void addLinks(AbstractTelemetryBuilder telemetryBuilder, List links) {\nif (links.isEmpty()) {\nreturn;\n}\nStringBuilder sb = new StringBuilder();\nsb.append(\"[\");\nboolean first = true;\nfor (LinkData link : links) {\nif (!first) {\nsb.append(\",\");\n}\nsb.append(\"{\\\"operation_Id\\\":\\\"\");\nsb.append(link.getSpanContext().getTraceId());\nsb.append(\"\\\",\\\"id\\\":\\\"\");\nsb.append(link.getSpanContext().getSpanId());\nsb.append(\"\\\"}\");\nfirst = false;\n}\nsb.append(\"]\");\ntelemetryBuilder.addProperty(\"_MS.links\", sb.toString());\n}\nstatic void applyCommonTags(MappingsBuilder mappingsBuilder) {\nmappingsBuilder\n.exact(\nSemanticAttributes.ENDUSER_ID.getKey(),\n(telemetryBuilder, value) -> {\nif (value instanceof String) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_USER_ID.toString(), (String) value);\n}\n})\n.exact(\nAiSemanticAttributes.PREVIEW_APPLICATION_VERSION.getKey(),\n(telemetryBuilder, value) -> {\nif (value instanceof String) {\ntelemetryBuilder.addTag(\nContextTagKeys.AI_APPLICATION_VER.toString(), (String) value);\n}\n});\napplyConnectionStringAndRoleNameOverrides(mappingsBuilder);\n}\n@SuppressWarnings(\"deprecation\")\nprivate static final WarningLogger connectionStringAttributeNoLongerSupported =\nnew WarningLogger(\nSpanDataMapper.class,\nAiSemanticAttributes.DEPRECATED_CONNECTION_STRING.getKey()\n+ \" is no longer supported because it\"\n+ \" is incompatible with pre-aggregated standard metrics. Please use\"\n+ \" \\\"connectionStringOverrides\\\" configuration, or reach out to\"\n+ \" https:\n+ \" different use case.\");\n@SuppressWarnings(\"deprecation\")\nprivate static final WarningLogger roleNameAttributeNoLongerSupported =\nnew WarningLogger(\nSpanDataMapper.class,\nAiSemanticAttributes.DEPRECATED_ROLE_NAME.getKey()\n+ \" is no longer supported because it\"\n+ \" is incompatible with pre-aggregated standard metrics. Please use\"\n+ \" \\\"roleNameOverrides\\\" configuration, or reach out to\"\n+ \" https:\n+ \" different use case.\");\n@SuppressWarnings(\"deprecation\")\nprivate static final WarningLogger roleInstanceAttributeNoLongerSupported =\nnew WarningLogger(\nSpanDataMapper.class,\nAiSemanticAttributes.DEPRECATED_ROLE_INSTANCE.getKey()\n+ \" is no longer supported because it\"\n+ \" is incompatible with pre-aggregated standard metrics. Please reach out to\"\n+ \" https:\n+ \" case for this.\");\n@SuppressWarnings(\"deprecation\")\nprivate static final WarningLogger instrumentationKeyAttributeNoLongerSupported =\nnew WarningLogger(\nSpanDataMapper.class,\nAiSemanticAttributes.DEPRECATED_INSTRUMENTATION_KEY.getKey()\n+ \" is no longer supported because it\"\n+ \" is incompatible with pre-aggregated standard metrics. Please use\"\n+ \" \\\"connectionStringOverrides\\\" configuration, or reach out to\"\n+ \" https:\n+ \" different use case.\");\n@SuppressWarnings(\"deprecation\")\nstatic void applyConnectionStringAndRoleNameOverrides(MappingsBuilder mappingsBuilder) {\nmappingsBuilder\n.exact(\nAiSemanticAttributes.INTERNAL_CONNECTION_STRING.getKey(),\n(telemetryBuilder, value) -> {\ntelemetryBuilder.setConnectionString(ConnectionString.parse((String) value));\n})\n.exact(\nAiSemanticAttributes.INTERNAL_ROLE_NAME.getKey(),\n(telemetryBuilder, value) -> {\nif (value instanceof String) {\ntelemetryBuilder.addTag(ContextTagKeys.AI_CLOUD_ROLE.toString(), (String) value);\n}\n})\n.exact(\nAiSemanticAttributes.DEPRECATED_CONNECTION_STRING.getKey(),\n(telemetryBuilder, value) -> {\nconnectionStringAttributeNoLongerSupported.recordWarning();\n})\n.exact(\nAiSemanticAttributes.DEPRECATED_ROLE_NAME.getKey(),\n(telemetryBuilder, value) -> {\nroleNameAttributeNoLongerSupported.recordWarning();\n})\n.exact(\nAiSemanticAttributes.DEPRECATED_ROLE_INSTANCE.getKey(),\n(telemetryBuilder, value) -> {\nroleInstanceAttributeNoLongerSupported.recordWarning();\n})\n.exact(\nAiSemanticAttributes.DEPRECATED_INSTRUMENTATION_KEY.getKey(),\n(telemetryBuilder, value) -> {\ninstrumentationKeyAttributeNoLongerSupported.recordWarning();\n});\n}\n}" + }, + { + "comment": ">MSICredentials() [](start = 49, length = 16) Replaced our custom implementations with this API. Verified it works on vm. #Resolved", + "method_body": "public CompletableFuture getSecurityTokenAsync(String audience) {\nCompletableFuture tokenGeneratingFuture = new CompletableFuture<>();\nMessagingFactory.INTERNAL_THREAD_POOL.execute(() -> {\ntry {\nMSICredentials credentials = new MSICredentials();\nString rawToken = credentials.getToken(SecurityConstants.SERVICEBUS_AAD_AUDIENCE_RESOURCE_URL);\nDate expiry = getExpirationDateTimeUtcFromToken(rawToken);\ntokenGeneratingFuture.complete(new SecurityToken(SecurityTokenType.JWT, audience, rawToken, Instant.now(), expiry.toInstant()));\n} catch (IOException e) {\nTRACE_LOGGER.error(\"ManagedIdentity token generation failed.\", e);\ntokenGeneratingFuture.completeExceptionally(e);\n} catch (ParseException e) {\nTRACE_LOGGER.error(\"Could not parse the expiry time from the Managed Identity token string.\", e);\ntokenGeneratingFuture.completeExceptionally(e);\n}\n});\nreturn tokenGeneratingFuture;\n}", + "target_code": "MSICredentials credentials = new MSICredentials();", + "method_body_after": "public CompletableFuture getSecurityTokenAsync(String audience) {\nCompletableFuture tokenGeneratingFuture = new CompletableFuture<>();\nMessagingFactory.INTERNAL_THREAD_POOL.execute(() -> {\ntry {\nMSICredentials credentials = new MSICredentials();\nString rawToken = credentials.getToken(SecurityConstants.SERVICEBUS_AAD_AUDIENCE_RESOURCE_URL);\nDate expiry = getExpirationDateTimeUtcFromToken(rawToken);\ntokenGeneratingFuture.complete(new SecurityToken(SecurityTokenType.JWT, audience, rawToken, Instant.now(), expiry.toInstant()));\n} catch (IOException e) {\nTRACE_LOGGER.error(\"ManagedIdentity token generation failed.\", e);\ntokenGeneratingFuture.completeExceptionally(e);\n} catch (ParseException e) {\nTRACE_LOGGER.error(\"Could not parse the expiry time from the Managed Identity token string.\", e);\ntokenGeneratingFuture.completeExceptionally(e);\n}\n});\nreturn tokenGeneratingFuture;\n}", + "context_before": "class ManagedIdentityTokenProvider extends TokenProvider {\nprivate static final Logger TRACE_LOGGER = LoggerFactory.getLogger(ManagedIdentityTokenProvider.class);\n@Override\nstatic Date getExpirationDateTimeUtcFromToken(String token) throws ParseException {\nJWT jwt = JWTParser.parse(token);\nJWTClaimsSet claims = jwt.getJWTClaimsSet();\nreturn claims.getExpirationTime();\n}\n}", + "context_after": "class ManagedIdentityTokenProvider extends TokenProvider {\nprivate static final Logger TRACE_LOGGER = LoggerFactory.getLogger(ManagedIdentityTokenProvider.class);\n@Override\nprivate static Date getExpirationDateTimeUtcFromToken(String token) throws ParseException {\nJWT jwt = JWTParser.parse(token);\nJWTClaimsSet claims = jwt.getJWTClaimsSet();\nreturn claims.getExpirationTime();\n}\n}" + }, + { + "comment": "Done", + "method_body": "public boolean replaceMatch(Pattern pattern, Function replacer) {\nOptional match = moveForwardToStartOfMatch(pattern);\nif (!match.isPresent()) {\nreturn false;\n}\ntextBuffer.delete(match.get().startOfMatch(), match.get().endOfMatch());\nwrite(replacer.apply(match.get()));\nreturn true;\n}", + "target_code": "", + "method_body_after": "public boolean replaceMatch(Pattern pattern, Function replacer) {\nOptional match = moveForwardToStartOfMatch(pattern);\nif (!match.isPresent()) {\nreturn false;\n}\ntextBuffer.delete(match.get().startOfMatch(), match.get().endOfMatch());\nwrite(replacer.apply(match.get()));\nreturn true;\n}", + "context_before": "class CursorImpl implements Cursor {\nprivate final TextBuffer textBuffer;\nprivate final Object unique = new Object();\nprivate Position position;\n/**\n* Creates a cursor to a text buffer.\n*\n* WARNING: The text buffer MUST NOT be accessed outside this cursor. This cursor\n* takes sole ownership of the text buffer.\n*\n* @param textBuffer the text buffer this cursor owns and operates on\n*/\nCursorImpl(TextBuffer textBuffer) {\nthis.textBuffer = textBuffer;\nposition = textBuffer.getStartOfText();\n}\n@Override\npublic Position getPosition() {\nreturn position;\n}\n@Override\npublic Mark createMark() {\nreturn new Mark(position, textBuffer.getVersion(), unique);\n}\n@Override\npublic String getBufferText() {\nreturn textBuffer.getString();\n}\n@Override\npublic String getLine() {\nreturn textBuffer.getLine(position);\n}\n@Override\npublic String getPrefix() {\nreturn textBuffer.getLinePrefix(position);\n}\n@Override\npublic String getSuffix() {\nreturn textBuffer.getLineSuffix(position);\n}\n@Override\npublic String getTextTo(Mark mark) {\nvalidateMark(mark);\nPosition start = mark.position();\nPosition end = position;\nif (start.isAfter(end)) {\nPosition tmp = start;\nstart = end;\nend = tmp;\n}\nreturn textBuffer.getSubstring(start, end);\n}\n@Override\npublic Cursor moveToStartOfBuffer() {\nposition = textBuffer.getStartOfText();\nreturn this;\n}\n@Override\npublic Cursor moveToEndOfBuffer() {\nposition = textBuffer.getEndOfText();\nreturn this;\n}\n@Override\npublic Cursor moveToStartOfLine() {\nposition = textBuffer.getStartOfLine(position);\nreturn this;\n}\n@Override\npublic Cursor moveToStartOfPreviousLine() {\nposition = textBuffer.getStartOfPreviousLine(position);\nreturn this;\n}\n@Override\npublic Cursor moveToStartOfNextLine() {\nposition = textBuffer.getStartOfNextLine(position);\nreturn this;\n}\n@Override\npublic Cursor moveToStartOf(int lineIndex) {\nvalidateLineIndex(lineIndex);\nposition = new Position(lineIndex, 0);\nreturn this;\n}\n@Override\npublic Cursor moveToEndOfLine() {\nposition = textBuffer.getEndOfLine(position);\nreturn this;\n}\n@Override\npublic Cursor moveToEndOfPreviousLine() {\nreturn moveToStartOfPreviousLine().moveToEndOfLine();\n}\n@Override\npublic Cursor moveToEndOfNextLine() {\nreturn moveToStartOfNextLine().moveToEndOfLine();\n}\n@Override\npublic Cursor moveToEndOf(int lineIndex) {\nreturn moveToStartOf(lineIndex).moveToEndOfLine();\n}\n@Override\npublic Cursor moveForward() {\nreturn moveForward(1);\n}\n@Override\npublic Cursor moveForward(int times) {\nposition = textBuffer.forward(position, times);\nreturn this;\n}\n@Override\npublic Cursor moveBackward() {\nreturn moveBackward(1);\n}\n@Override\npublic Cursor moveBackward(int times) {\nposition = textBuffer.backward(position, times);\nreturn this;\n}\n@Override\npublic Cursor moveTo(Mark mark) {\nvalidateMark(mark);\nposition = mark.position();\nreturn this;\n}\n@Override\npublic boolean skipBackward(String text) {\nString prefix = getPrefix();\nif (prefix.endsWith(text)) {\nposition = new Position(position.lineIndex(), position.columnIndex() - text.length());\nreturn true;\n} else {\nreturn false;\n}\n}\n@Override\npublic boolean skipForward(String text) {\nString suffix = getSuffix();\nif (suffix.startsWith(text)) {\nposition = new Position(position.lineIndex(), position.columnIndex() + text.length());\nreturn true;\n} else {\nreturn false;\n}\n}\n@Override\npublic Optional moveForwardToStartOfMatch(Pattern pattern) {\nreturn moveForwardToXOfMatch(pattern, match -> position = match.startOfMatch());\n}\n@Override\npublic Optional moveForwardToEndOfMatch(Pattern pattern) {\nreturn moveForwardToXOfMatch(pattern, match -> position = match.endOfMatch());\n}\nprivate Optional moveForwardToXOfMatch(Pattern pattern, Consumer callback) {\nOptional match = textBuffer.findForward(position, pattern);\nmatch.ifPresent(callback);\nreturn match;\n}\n@Override\npublic Cursor moveTo(Position position) {\nvalidatePosition(position);\nthis.position = position;\nreturn this;\n}\n@Override\npublic Cursor moveTo(int lineIndex, int columnIndex) {\nreturn moveTo(new Position(lineIndex, columnIndex));\n}\n@Override\npublic Cursor write(String text) {\nposition = textBuffer.write(position, text);\nreturn this;\n}\n@Override\npublic Cursor writeLine(String line) {\nreturn write(line).write(\"\\n\");\n}\n@Override\npublic Cursor writeLines(String... lines) {\nreturn writeLines(Arrays.asList(lines));\n}\n@Override\npublic Cursor writeLines(List lines) {\nreturn writeLine(String.join(\"\\n\", lines));\n}\n@Override\npublic Cursor writeNewline() {\nreturn write(\"\\n\");\n}\n@Override\npublic Cursor writeNewlineAfter() {\nreturn writeNewline().moveBackward();\n}\n@Override\npublic Cursor deleteAll() {\nmoveToStartOfBuffer();\ntextBuffer.clear();\nreturn this;\n}\n@Override\npublic Cursor deleteLine() {\nmoveToStartOfLine();\ntextBuffer.delete(position, textBuffer.getStartOfNextLine(position));\nreturn this;\n}\n@Override\npublic Cursor deletePrefix() {\nPosition originalPosition = position;\nmoveToStartOfLine();\ntextBuffer.delete(position, originalPosition);\nreturn this;\n}\n@Override\npublic Cursor deleteSuffix() {\ntextBuffer.delete(position, textBuffer.getEndOfLine(position));\nreturn this;\n}\n@Override\npublic Cursor deleteForward() {\nreturn deleteForward(1);\n}\n@Override\npublic Cursor deleteForward(int times) {\nPosition end = textBuffer.forward(position, times);\ntextBuffer.delete(position, end);\nreturn this;\n}\n@Override\npublic Cursor deleteBackward() {\nreturn deleteBackward(1);\n}\n@Override\npublic Cursor deleteBackward(int times) {\nPosition end = position;\nmoveBackward(times);\ntextBuffer.delete(position, end);\nreturn this;\n}\n@Override\npublic Cursor deleteTo(Mark mark) {\nPosition start = mark.position();\nPosition end = position;\nif (start.isAfter(end)) {\nPosition tmp = start;\nstart = end;\nend = tmp;\n}\ntextBuffer.delete(start, end);\nreturn this;\n}\n@Override\n@Override\npublic int replaceMatches(Pattern pattern, Function replacer) {\nint count = 0;\nfor (; replaceMatch(pattern, replacer); ++count) {\n}\nreturn count;\n}\nprivate void validatePosition(Position position) {\nvalidateLineIndex(position.lineIndex());\nint maxColumnIndex = textBuffer.getLine(position.lineIndex()).length();\nif (position.columnIndex() < 0 || position.columnIndex() > maxColumnIndex) {\nthrow new IndexOutOfBoundsException(\"Column index of \" + position.coordinateString() +\n\" is not in permitted range [0,\" + maxColumnIndex + \"]\");\n}\n}\nprivate void validateLineIndex(int lineIndex) {\nint maxLineIndex = textBuffer.getMaxLineIndex();\nif (lineIndex < 0 || lineIndex > maxLineIndex) {\nthrow new IndexOutOfBoundsException(\"Line index \" + lineIndex +\n\" not in permitted range [0,\" + maxLineIndex + \"]\");\n}\n}\nprivate void validateMark(Mark mark) {\nif (mark.secret() != unique) {\nthrow new IllegalArgumentException(\"Unknown mark \" + mark);\n}\nif (!mark.version().equals(textBuffer.getVersion())) {\nthrow new IllegalArgumentException(\"Mark \" + mark + \" is outdated\");\n}\n}\n}", + "context_after": "class CursorImpl implements Cursor {\nprivate final TextBuffer textBuffer;\nprivate final Object unique = new Object();\nprivate Position position;\n/**\n* Creates a cursor to a text buffer.\n*\n* WARNING: The text buffer MUST NOT be accessed outside this cursor. This cursor\n* takes sole ownership of the text buffer.\n*\n* @param textBuffer the text buffer this cursor owns and operates on\n*/\nCursorImpl(TextBuffer textBuffer) {\nthis.textBuffer = textBuffer;\nposition = textBuffer.getStartOfText();\n}\n@Override\npublic Position getPosition() {\nreturn position;\n}\n@Override\npublic Mark createMark() {\nreturn new Mark(position, textBuffer.getVersion(), unique);\n}\n@Override\npublic String getBufferText() {\nreturn textBuffer.getString();\n}\n@Override\npublic String getLine() {\nreturn textBuffer.getLine(position);\n}\n@Override\npublic String getPrefix() {\nreturn textBuffer.getLinePrefix(position);\n}\n@Override\npublic String getSuffix() {\nreturn textBuffer.getLineSuffix(position);\n}\n@Override\npublic String getTextTo(Mark mark) {\nvalidateMark(mark);\nPosition start = min(mark.position(), position);\nPosition end = max(mark.position(), position);\nreturn textBuffer.getSubstring(start, end);\n}\n@Override\npublic Cursor moveToStartOfBuffer() {\nposition = textBuffer.getStartOfText();\nreturn this;\n}\n@Override\npublic Cursor moveToEndOfBuffer() {\nposition = textBuffer.getEndOfText();\nreturn this;\n}\n@Override\npublic Cursor moveToStartOfLine() {\nposition = textBuffer.getStartOfLine(position);\nreturn this;\n}\n@Override\npublic Cursor moveToStartOfPreviousLine() {\nposition = textBuffer.getStartOfPreviousLine(position);\nreturn this;\n}\n@Override\npublic Cursor moveToStartOfNextLine() {\nposition = textBuffer.getStartOfNextLine(position);\nreturn this;\n}\n@Override\npublic Cursor moveToStartOf(int lineIndex) {\nvalidateLineIndex(lineIndex);\nposition = new Position(lineIndex, 0);\nreturn this;\n}\n@Override\npublic Cursor moveToEndOfLine() {\nposition = textBuffer.getEndOfLine(position);\nreturn this;\n}\n@Override\npublic Cursor moveToEndOfPreviousLine() {\nreturn moveToStartOfPreviousLine().moveToEndOfLine();\n}\n@Override\npublic Cursor moveToEndOfNextLine() {\nreturn moveToStartOfNextLine().moveToEndOfLine();\n}\n@Override\npublic Cursor moveToEndOf(int lineIndex) {\nreturn moveToStartOf(lineIndex).moveToEndOfLine();\n}\n@Override\npublic Cursor moveForward() {\nreturn moveForward(1);\n}\n@Override\npublic Cursor moveForward(int times) {\nposition = textBuffer.forward(position, times);\nreturn this;\n}\n@Override\npublic Cursor moveBackward() {\nreturn moveBackward(1);\n}\n@Override\npublic Cursor moveBackward(int times) {\nposition = textBuffer.backward(position, times);\nreturn this;\n}\n@Override\npublic Cursor moveTo(Mark mark) {\nvalidateMark(mark);\nposition = mark.position();\nreturn this;\n}\n@Override\npublic boolean skipBackward(String text) {\nString prefix = getPrefix();\nif (prefix.endsWith(text)) {\nposition = new Position(position.lineIndex(), position.columnIndex() - text.length());\nreturn true;\n} else {\nreturn false;\n}\n}\n@Override\npublic boolean skipForward(String text) {\nString suffix = getSuffix();\nif (suffix.startsWith(text)) {\nposition = new Position(position.lineIndex(), position.columnIndex() + text.length());\nreturn true;\n} else {\nreturn false;\n}\n}\n@Override\npublic Optional moveForwardToStartOfMatch(Pattern pattern) {\nreturn moveForwardToXOfMatch(pattern, match -> position = match.startOfMatch());\n}\n@Override\npublic Optional moveForwardToEndOfMatch(Pattern pattern) {\nreturn moveForwardToXOfMatch(pattern, match -> position = match.endOfMatch());\n}\nprivate Optional moveForwardToXOfMatch(Pattern pattern, Consumer callback) {\nOptional match = textBuffer.findForward(position, pattern);\nmatch.ifPresent(callback);\nreturn match;\n}\n@Override\npublic Cursor moveTo(Position position) {\nvalidatePosition(position);\nthis.position = position;\nreturn this;\n}\n@Override\npublic Cursor moveTo(int lineIndex, int columnIndex) {\nreturn moveTo(new Position(lineIndex, columnIndex));\n}\n@Override\npublic Cursor write(String text) {\nposition = textBuffer.write(position, text);\nreturn this;\n}\n@Override\npublic Cursor writeLine(String line) {\nreturn write(line).write(\"\\n\");\n}\n@Override\npublic Cursor writeLines(String... lines) {\nreturn writeLines(Arrays.asList(lines));\n}\n@Override\npublic Cursor writeLines(Iterable lines) {\nreturn writeLine(String.join(\"\\n\", lines));\n}\n@Override\npublic Cursor writeNewline() {\nreturn write(\"\\n\");\n}\n@Override\npublic Cursor writeNewlineAfter() {\nreturn writeNewline().moveBackward();\n}\n@Override\npublic Cursor deleteAll() {\nmoveToStartOfBuffer();\ntextBuffer.clear();\nreturn this;\n}\n@Override\npublic Cursor deleteLine() {\nmoveToStartOfLine();\ntextBuffer.delete(position, textBuffer.getStartOfNextLine(position));\nreturn this;\n}\n@Override\npublic Cursor deletePrefix() {\nPosition originalPosition = position;\nmoveToStartOfLine();\ntextBuffer.delete(position, originalPosition);\nreturn this;\n}\n@Override\npublic Cursor deleteSuffix() {\ntextBuffer.delete(position, textBuffer.getEndOfLine(position));\nreturn this;\n}\n@Override\npublic Cursor deleteForward() {\nreturn deleteForward(1);\n}\n@Override\npublic Cursor deleteForward(int times) {\nPosition end = textBuffer.forward(position, times);\ntextBuffer.delete(position, end);\nreturn this;\n}\n@Override\npublic Cursor deleteBackward() {\nreturn deleteBackward(1);\n}\n@Override\npublic Cursor deleteBackward(int times) {\nPosition end = position;\nmoveBackward(times);\ntextBuffer.delete(position, end);\nreturn this;\n}\n@Override\npublic Cursor deleteTo(Mark mark) {\nvalidateMark(mark);\nPosition start = min(mark.position(), position);\nPosition end = max(mark.position(), position);\ntextBuffer.delete(start, end);\nreturn this;\n}\n@Override\n@Override\npublic int replaceMatches(Pattern pattern, Function replacer) {\nint count = 0;\nfor (; replaceMatch(pattern, replacer); ++count) {\n}\nreturn count;\n}\nprivate void validatePosition(Position position) {\nvalidateLineIndex(position.lineIndex());\nint maxColumnIndex = textBuffer.getLine(position.lineIndex()).length();\nif (position.columnIndex() < 0 || position.columnIndex() > maxColumnIndex) {\nthrow new IndexOutOfBoundsException(\"Column index of \" + position.coordinateString() +\n\" is not in permitted range [0,\" + maxColumnIndex + \"]\");\n}\n}\nprivate void validateLineIndex(int lineIndex) {\nint maxLineIndex = textBuffer.getMaxLineIndex();\nif (lineIndex < 0 || lineIndex > maxLineIndex) {\nthrow new IndexOutOfBoundsException(\"Line index \" + lineIndex +\n\" not in permitted range [0,\" + maxLineIndex + \"]\");\n}\n}\nprivate void validateMark(Mark mark) {\nif (mark.secret() != unique) {\nthrow new IllegalArgumentException(\"Unknown mark \" + mark);\n}\nif (!mark.version().equals(textBuffer.getVersion())) {\nthrow new IllegalArgumentException(\"Mark \" + mark + \" is outdated\");\n}\n}\n}" + }, + { + "comment": "Done", + "method_body": "new BundleProgressHandler() {\n@Override\npublic void onProgress(ProcessBundleProgressResponse progress) {\nif (progress.hasSplit()) {\nfeeder.split(progress.getSplit());\n}\n}\n@Override\npublic void onCompleted(ProcessBundleResponse response) {\nif (response.hasSplit()) {\nfeeder.split(response.getSplit());\n}\n}\n});", + "target_code": "feeder.split(progress.getSplit());", + "method_body_after": "new BundleProgressHandler() {\n@Override\npublic void onProgress(ProcessBundleProgressResponse progress) {\nif (progress.hasSplit()) {\nfeeder.split(progress.getSplit());\n}\n}\n@Override\npublic void onCompleted(ProcessBundleResponse response) {\nif (response.hasSplit()) {\nfeeder.split(response.getSplit());\n}\n}\n}", + "context_before": "class SplittableRemoteStageEvaluator\nimplements TransformEvaluator>> {\nprivate final PTransformNode transform;\nprivate final ExecutableStage stage;\nprivate final CopyOnAccessInMemoryStateInternals stateInternals;\nprivate final DirectTimerInternals timerInternals;\nprivate final RemoteBundle> bundle;\nprivate final Collection> outputs;\nprivate final SDFFeederViaStateAndTimers feeder;\nprivate SplittableRemoteStageEvaluator(\nBundleFactory bundleFactory,\nJobBundleFactory jobBundleFactory,\nStepStateAndTimers stp,\nPTransformNode transform)\nthrows Exception {\nthis.stateInternals = stp.stateInternals();\nthis.timerInternals = stp.timerInternals();\nthis.transform = transform;\nthis.stage =\nExecutableStage.fromPayload(\nExecutableStagePayload.parseFrom(transform.getTransform().getSpec().getPayload()));\nthis.outputs = new ArrayList<>();\nthis.bundle =\njobBundleFactory\n.>forStage(stage)\n.getBundle(\nBundleFactoryOutputReceiverFactory.create(\nbundleFactory, stage.getComponents(), outputs::add),\nStateRequestHandler.unsupported(),\n);\nFullWindowedValueCoder> windowedValueCoder =\n(FullWindowedValueCoder>)\nWireCoders.>instantiateRunnerWireCoder(\nstage.getInputPCollection(), stage.getComponents());\nKvCoder kvCoder =\n((KvCoder) windowedValueCoder.getValueCoder());\nthis.feeder =\nnew SDFFeederViaStateAndTimers<>(\nstateInternals,\ntimerInternals,\nkvCoder.getKeyCoder(),\nkvCoder.getValueCoder(),\n(Coder) windowedValueCoder.getWindowCoder());\n}\n@Override\npublic void processElement(\nWindowedValue>> windowedWorkItem)\nthrows Exception {\nKeyedWorkItem> kwi = windowedWorkItem.getValue();\nWindowedValue> elementRestriction =\nIterables.getOnlyElement(kwi.elementsIterable(), null);\nif (elementRestriction != null) {\nfeeder.seed(elementRestriction);\n} else {\nelementRestriction = feeder.resume(Iterables.getOnlyElement(kwi.timersIterable()));\n}\nbundle.getInputReceiver().accept(elementRestriction);\n}\n@Override\npublic TransformResult>> finishBundle()\nthrows Exception {\nbundle.close();\nfeeder.commit();\nCopyOnAccessInMemoryStateInternals state = stateInternals.commit();\nStepTransformResult.Builder>> result =\nStepTransformResult.withHold(transform, state.getEarliestWatermarkHold());\nreturn result\n.addOutput(outputs)\n.withState(state)\n.withTimerUpdate(timerInternals.getTimerUpdate())\n.build();\n}\n}", + "context_after": "class SplittableRemoteStageEvaluator\nimplements TransformEvaluator>> {\nprivate final PTransformNode transform;\nprivate final ExecutableStage stage;\nprivate final CopyOnAccessInMemoryStateInternals stateInternals;\nprivate final DirectTimerInternals timerInternals;\nprivate final RemoteBundle> bundle;\nprivate final Collection> outputs;\nprivate final SDFFeederViaStateAndTimers feeder;\nprivate SplittableRemoteStageEvaluator(\nBundleFactory bundleFactory,\nJobBundleFactory jobBundleFactory,\nStepStateAndTimers stp,\nPTransformNode transform)\nthrows Exception {\nthis.stateInternals = stp.stateInternals();\nthis.timerInternals = stp.timerInternals();\nthis.transform = transform;\nthis.stage =\nExecutableStage.fromPayload(\nExecutableStagePayload.parseFrom(transform.getTransform().getSpec().getPayload()));\nthis.outputs = new ArrayList<>();\nFullWindowedValueCoder> windowedValueCoder =\n(FullWindowedValueCoder>)\nWireCoders.>instantiateRunnerWireCoder(\nstage.getInputPCollection(), stage.getComponents());\nKvCoder kvCoder =\n((KvCoder) windowedValueCoder.getValueCoder());\nthis.feeder =\nnew SDFFeederViaStateAndTimers<>(\nstateInternals,\ntimerInternals,\nkvCoder.getKeyCoder(),\nkvCoder.getValueCoder(),\n(Coder) windowedValueCoder.getWindowCoder());\nthis.bundle =\njobBundleFactory\n.>forStage(stage)\n.getBundle(\nBundleFactoryOutputReceiverFactory.create(\nbundleFactory, stage.getComponents(), outputs::add),\nStateRequestHandler.unsupported(),\n);\n}\n@Override\npublic void processElement(\nWindowedValue>> windowedWorkItem)\nthrows Exception {\nKeyedWorkItem> kwi = windowedWorkItem.getValue();\nWindowedValue> elementRestriction =\nIterables.getOnlyElement(kwi.elementsIterable(), null);\nif (elementRestriction != null) {\nfeeder.seed(elementRestriction);\n} else {\nelementRestriction = feeder.resume(Iterables.getOnlyElement(kwi.timersIterable()));\n}\nbundle.getInputReceiver().accept(elementRestriction);\n}\n@Override\npublic TransformResult>> finishBundle()\nthrows Exception {\nbundle.close();\nfeeder.commit();\nCopyOnAccessInMemoryStateInternals state = stateInternals.commit();\nStepTransformResult.Builder>> result =\nStepTransformResult.withHold(transform, state.getEarliestWatermarkHold());\nreturn result\n.addOutput(outputs)\n.withState(state)\n.withTimerUpdate(timerInternals.getTimerUpdate())\n.build();\n}\n}" + }, + { + "comment": "see below", + "method_body": "void testFailover() throws Exception {\nOperatorSubtaskState state;\ntry (OneInputStreamOperatorTestHarness harness = create()) {\nharness.setup();\nharness.initializeEmptyState();\nharness.open();\nharness.processElement(row(\"1\"), 0);\nharness.processElement(row(\"2\"), 0);\nharness.processElement(row(\"2\"), 0);\nstate = harness.snapshot(1, 1);\nharness.processElement(row(\"3\"), 0);\nharness.processElement(row(\"4\"), 0);\nharness.notifyOfCompletedCheckpoint(1);\nList partitions = collect(harness);\nassertThat(partitions).isEqualTo(Arrays.asList(\"1\", \"2\"));\n}\ntry (OneInputStreamOperatorTestHarness harness = create()) {\nharness.setup();\nharness.initializeState(state);\nharness.open();\nharness.processElement(row(\"3\"), 0);\nharness.processElement(row(\"4\"), 0);\nstate = harness.snapshot(2, 2);\nharness.notifyOfCompletedCheckpoint(2);\nList partitions = collect(harness);\nassertThat(partitions).isEqualTo(Arrays.asList(\"1\", \"2\", \"3\", \"4\"));\n}\ntry (OneInputStreamOperatorTestHarness harness = create()) {\nharness.setup();\nharness.initializeState(state);\nharness.open();\nharness.processElement(row(\"4\"), 0);\nharness.processElement(row(\"5\"), 0);\nstate = harness.snapshot(3, 3);\nharness.notifyOfCompletedCheckpoint(3);\nList partitions = collect(harness);\nassertThat(partitions).isEqualTo(Arrays.asList(\"3\", \"4\", \"5\"));\n}\ntry (OneInputStreamOperatorTestHarness harness = create()) {\nharness.setup();\nharness.initializeState(state);\nharness.open();\nharness.processElement(row(\"6\"), 0);\nharness.processElement(row(\"7\"), 0);\nharness.snapshot(4, 4);\nharness.processElement(row(\"8\"), 0);\nharness.snapshot(5, 5);\nharness.processElement(row(\"9\"), 0);\nharness.snapshot(6, 6);\nharness.notifyOfCompletedCheckpoint(5);\nList partitions = collect(harness);\nassertThat(partitions).isEqualTo(Arrays.asList(\"4\", \"5\", \"6\", \"7\", \"8\"));\n}\n}", + "target_code": "assertThat(partitions).isEqualTo(Arrays.asList(\"4\", \"5\", \"6\", \"7\", \"8\"));", + "method_body_after": "void testFailover() throws Exception {\nOperatorSubtaskState state;\ntry (OneInputStreamOperatorTestHarness harness = create()) {\nharness.setup();\nharness.initializeEmptyState();\nharness.open();\nharness.processElement(row(\"1\"), 0);\nharness.processElement(row(\"2\"), 0);\nharness.processElement(row(\"2\"), 0);\nstate = harness.snapshot(1, 1);\nharness.processElement(row(\"3\"), 0);\nharness.processElement(row(\"4\"), 0);\nharness.notifyOfCompletedCheckpoint(1);\nList partitions = collect(harness);\nassertThat(partitions).containsExactly(\"1\", \"2\");\n}\ntry (OneInputStreamOperatorTestHarness harness = create()) {\nharness.setup();\nharness.initializeState(state);\nharness.open();\nharness.processElement(row(\"3\"), 0);\nharness.processElement(row(\"4\"), 0);\nstate = harness.snapshot(2, 2);\nharness.notifyOfCompletedCheckpoint(2);\nList partitions = collect(harness);\nassertThat(partitions).containsExactly(\"1\", \"2\", \"3\", \"4\");\n}\ntry (OneInputStreamOperatorTestHarness harness = create()) {\nharness.setup();\nharness.initializeState(state);\nharness.open();\nharness.processElement(row(\"4\"), 0);\nharness.processElement(row(\"5\"), 0);\nstate = harness.snapshot(3, 3);\nharness.notifyOfCompletedCheckpoint(3);\nList partitions = collect(harness);\nassertThat(partitions).containsExactly(\"3\", \"4\", \"5\");\n}\ntry (OneInputStreamOperatorTestHarness harness = create()) {\nharness.setup();\nharness.initializeState(state);\nharness.open();\nharness.processElement(row(\"6\"), 0);\nharness.processElement(row(\"7\"), 0);\nharness.snapshot(4, 4);\nharness.processElement(row(\"8\"), 0);\nharness.snapshot(5, 5);\nharness.processElement(row(\"9\"), 0);\nharness.snapshot(6, 6);\nharness.notifyOfCompletedCheckpoint(5);\nList partitions = collect(harness);\nassertThat(partitions).containsExactly(\"4\", \"5\", \"6\", \"7\", \"8\");\n}\n}", + "context_before": "class StreamingFileWriterTest {\nprivate final OutputFileConfig outputFileConfig = OutputFileConfig.builder().build();\nprivate final DateFormat dateFormat = new SimpleDateFormat(\"yyyy-MM-dd\");\n@TempDir private java.nio.file.Path tmpDir;\nprivate Path path;\n@BeforeEach\nvoid before() throws IOException {\nFile file = tmpDir.toFile();\nfile.delete();\npath = new Path(file.toURI());\n}\n@Test\n@Test\nvoid testCommitImmediately() throws Exception {\ntry (OneInputStreamOperatorTestHarness harness = create()) {\nharness.setup();\nharness.initializeEmptyState();\nharness.open();\nharness.processElement(row(\"1\"), 0);\nharness.processElement(row(\"2\"), 0);\nharness.processElement(row(\"2\"), 0);\nharness.snapshot(1, 1);\nharness.processElement(row(\"1\"), 0);\nharness.processElement(row(\"3\"), 0);\nharness.processElement(row(\"4\"), 0);\nharness.notifyOfCompletedCheckpoint(1);\nList partitions = collect(harness);\nassertThat(partitions).isEqualTo(Arrays.asList(\"1\", \"2\"));\n}\n}\n@Test\nvoid testCommitFileWhenPartitionIsCommittableByProcessTime() throws Exception {\nFileSystemTableSink.TableRollingPolicy tableRollingPolicy =\nnew FileSystemTableSink.TableRollingPolicy(\nfalse,\nLong.MAX_VALUE,\nDuration.ofDays(1).toMillis(),\nDuration.ofDays(1).toMillis());\nList partitionKeys = Collections.singletonList(\"d\");\nConfiguration conf = getProcTimeCommitTriggerConf(Duration.ofSeconds(1).toMillis());\nOperatorSubtaskState state;\nlong currentTimeMillis = System.currentTimeMillis();\ntry (OneInputStreamOperatorTestHarness harness =\ncreate(tableRollingPolicy, partitionKeys, conf)) {\nharness.setup();\nharness.initializeEmptyState();\nharness.open();\nharness.setProcessingTime(currentTimeMillis);\nharness.processElement(row(\"1\"), 0);\nharness.processElement(row(\"2\"), 0);\nstate = harness.snapshot(1, 1);\nharness.processElement(row(\"3\"), 0);\nharness.notifyOfCompletedCheckpoint(1);\nassertThat(isPartitionFileCommitted(\"1\", 0, 0)).isFalse();\nassertThat(isPartitionFileCommitted(\"2\", 0, 1)).isFalse();\n}\ntry (OneInputStreamOperatorTestHarness harness =\ncreate(tableRollingPolicy, partitionKeys, conf)) {\nharness.setup();\nharness.initializeState(state);\nharness.open();\nharness.processElement(row(\"3\"), 0);\ncurrentTimeMillis += Duration.ofSeconds(2).toMillis();\nharness.setProcessingTime(currentTimeMillis);\nharness.processElement(row(\"4\"), 0);\nharness.snapshot(2, 2);\nharness.notifyOfCompletedCheckpoint(2);\nassertThat(isPartitionFileCommitted(\"3\", 0, 2)).isTrue();\nassertThat(isPartitionFileCommitted(\"4\", 0, 3)).isFalse();\ncurrentTimeMillis += Duration.ofSeconds(2).toMillis();\nharness.setProcessingTime(currentTimeMillis);\nstate = harness.snapshot(3, 3);\nharness.notifyOfCompletedCheckpoint(3);\nassertThat(isPartitionFileCommitted(\"4\", 0, 3)).isTrue();\n}\ntry (OneInputStreamOperatorTestHarness harness =\ncreate(tableRollingPolicy, partitionKeys, conf)) {\nharness.setup();\nharness.initializeState(state);\nharness.open();\nharness.processElement(row(\"4\"), 0);\nharness.processElement(row(\"4\"), 0);\nharness.snapshot(4, 4);\nharness.processElement(row(\"5\"), 5);\nharness.endInput();\nassertThat(isPartitionFileCommitted(\"4\", 0, 4)).isTrue();\nassertThat(isPartitionFileCommitted(\"5\", 0, 5)).isTrue();\n}\n}\n@Test\nvoid testCommitFileWhenPartitionIsCommittableByPartitionTime() throws Exception {\nFileSystemTableSink.TableRollingPolicy tableRollingPolicy =\nnew FileSystemTableSink.TableRollingPolicy(\nfalse,\nLong.MAX_VALUE,\nDuration.ofDays(1).toMillis(),\nDuration.ofDays(1).toMillis());\nList partitionKeys = Collections.singletonList(\"d\");\nConfiguration conf = getPartitionCommitTriggerConf(Duration.ofDays(1).toMillis());\nlong currentTimeMillis = System.currentTimeMillis();\nDate nextYear = new Date(currentTimeMillis + Duration.ofDays(365).toMillis());\nString nextYearPartition = \"d=\" + dateFormat.format(nextYear);\nDate yesterday = new Date(currentTimeMillis - Duration.ofDays(1).toMillis());\nString yesterdayPartition = \"d=\" + dateFormat.format(yesterday);\nDate today = new Date(currentTimeMillis);\nString todayPartition = \"d=\" + dateFormat.format(today);\nDate tomorrow = new Date(currentTimeMillis + Duration.ofDays(1).toMillis());\nString tomorrowPartition = \"d=\" + dateFormat.format(tomorrow);\nOperatorSubtaskState state;\ntry (OneInputStreamOperatorTestHarness harness =\ncreate(tableRollingPolicy, partitionKeys, conf)) {\nharness.setup();\nharness.initializeEmptyState();\nharness.open();\nharness.processElement(row(yesterdayPartition), 0);\nharness.processWatermark(currentTimeMillis);\nstate = harness.snapshot(1, 1);\nharness.notifyOfCompletedCheckpoint(1);\nassertThat(isPartitionFileCommitted(yesterdayPartition, 0, 0)).isTrue();\n}\ntry (OneInputStreamOperatorTestHarness harness =\ncreate(tableRollingPolicy, partitionKeys, conf)) {\nharness.setup();\nharness.initializeState(state);\nharness.open();\nharness.processElement(row(tomorrowPartition), 0);\nharness.processElement(row(todayPartition), 0);\ncurrentTimeMillis += Duration.ofDays(1).toMillis();\nharness.processWatermark(currentTimeMillis);\nharness.snapshot(2, 2);\nharness.notifyOfCompletedCheckpoint(2);\nassertThat(isPartitionFileCommitted(todayPartition, 0, 2)).isTrue();\nassertThat(isPartitionFileCommitted(tomorrowPartition, 0, 1)).isFalse();\ncurrentTimeMillis += Duration.ofDays(1).toMillis();\nharness.processWatermark(currentTimeMillis);\nstate = harness.snapshot(3, 3);\nharness.notifyOfCompletedCheckpoint(3);\nassertThat(isPartitionFileCommitted(tomorrowPartition, 0, 1)).isTrue();\nharness.processElement(row(nextYearPartition), 0);\n}\ntry (OneInputStreamOperatorTestHarness harness =\ncreate(tableRollingPolicy, partitionKeys, conf)) {\nharness.setup();\nharness.initializeState(state);\nharness.open();\nharness.processElement(row(nextYearPartition), 0);\nharness.processElement(row(tomorrowPartition), 0);\nharness.endInput();\nassertThat(isPartitionFileCommitted(tomorrowPartition, 0, 4)).isTrue();\nassertThat(isPartitionFileCommitted(nextYearPartition, 0, 3)).isTrue();\n}\n}\nprivate static RowData row(String s) {\nreturn GenericRowData.of(StringData.fromString(s));\n}\nprivate static List collect(\nOneInputStreamOperatorTestHarness harness) {\nList parts = new ArrayList<>();\nharness.extractOutputValues().forEach(m -> parts.addAll(m.getPartitions()));\nreturn parts;\n}\nprivate OneInputStreamOperatorTestHarness create()\nthrows Exception {\nConfiguration configuration = new Configuration();\nconfiguration.setString(SINK_PARTITION_COMMIT_TRIGGER.key(), \"process-time\");\nreturn create(OnCheckpointRollingPolicy.build(), new ArrayList<>(), configuration);\n}\nprivate OneInputStreamOperatorTestHarness create(\nRollingPolicy rollingPolicy,\nList partitionKeys,\nConfiguration conf)\nthrows Exception {\nStreamingFileWriter writer =\nnew StreamingFileWriter<>(\n1000,\nStreamingFileSink.forRowFormat(\npath,\n(Encoder)\n(element, stream) ->\nstream.write(\n(element.getString(0) + \"\\n\")\n.getBytes(\nStandardCharsets\n.UTF_8)))\n.withBucketAssigner(\nnew BucketAssigner() {\n@Override\npublic String getBucketId(\nRowData element, Context context) {\nreturn element.getString(0).toString();\n}\n@Override\npublic SimpleVersionedSerializer\ngetSerializer() {\nreturn SimpleVersionedStringSerializer.INSTANCE;\n}\n})\n.withRollingPolicy(rollingPolicy),\npartitionKeys,\nconf);\nOneInputStreamOperatorTestHarness harness =\nnew OneInputStreamOperatorTestHarness<>(writer, 1, 1, 0);\nharness.getStreamConfig().setTimeCharacteristic(TimeCharacteristic.ProcessingTime);\nreturn harness;\n}\nprivate Configuration getPartitionCommitTriggerConf(long commitDelay) {\nConfiguration configuration = new Configuration();\nconfiguration.setString(SINK_PARTITION_COMMIT_POLICY_KIND, \"success-file\");\nconfiguration.setString(PARTITION_TIME_EXTRACTOR_TIMESTAMP_FORMATTER.key(), \"yyyy-MM-dd\");\nconfiguration.setString(SINK_PARTITION_COMMIT_TRIGGER.key(), \"partition-time\");\nconfiguration.setLong(SINK_PARTITION_COMMIT_DELAY.key(), commitDelay);\nconfiguration.setString(SINK_PARTITION_COMMIT_WATERMARK_TIME_ZONE.key(), \"UTC\");\nreturn configuration;\n}\nprivate Configuration getProcTimeCommitTriggerConf(long commitDelay) {\nConfiguration configuration = new Configuration();\nconfiguration.setString(SINK_PARTITION_COMMIT_POLICY_KIND, \"success-file\");\nconfiguration.setString(SINK_PARTITION_COMMIT_TRIGGER.key(), \"process-time\");\nconfiguration.setLong(SINK_PARTITION_COMMIT_DELAY.key(), commitDelay);\nconfiguration.setString(SINK_PARTITION_COMMIT_WATERMARK_TIME_ZONE.key(), \"UTC\");\nreturn configuration;\n}\nprivate boolean isPartitionFileCommitted(String partition, int subtaskIndex, int partCounter) {\njava.nio.file.Path bucketPath = Paths.get(path.getPath(), partition);\nString fileName =\noutputFileConfig.getPartPrefix()\n+ '-'\n+ subtaskIndex\n+ '-'\n+ partCounter\n+ outputFileConfig.getPartSuffix();\njava.nio.file.Path filePath = bucketPath.resolve(fileName);\nreturn filePath.toFile().exists();\n}\n}", + "context_after": "class StreamingFileWriterTest {\nprivate final OutputFileConfig outputFileConfig = OutputFileConfig.builder().build();\nprivate final DateFormat dateFormat = new SimpleDateFormat(\"yyyy-MM-dd\");\n@TempDir private java.nio.file.Path tmpDir;\nprivate Path path;\n@BeforeEach\nvoid before() throws IOException {\npath = new Path(tmpDir.resolve(\"tmp\").toUri());\n}\n@Test\n@Test\nvoid testCommitImmediately() throws Exception {\ntry (OneInputStreamOperatorTestHarness harness = create()) {\nharness.setup();\nharness.initializeEmptyState();\nharness.open();\nharness.processElement(row(\"1\"), 0);\nharness.processElement(row(\"2\"), 0);\nharness.processElement(row(\"2\"), 0);\nharness.snapshot(1, 1);\nharness.processElement(row(\"1\"), 0);\nharness.processElement(row(\"3\"), 0);\nharness.processElement(row(\"4\"), 0);\nharness.notifyOfCompletedCheckpoint(1);\nList partitions = collect(harness);\nassertThat(partitions).containsExactly(\"1\", \"2\");\n}\n}\n@Test\nvoid testCommitFileWhenPartitionIsCommittableByProcessTime() throws Exception {\nFileSystemTableSink.TableRollingPolicy tableRollingPolicy =\nnew FileSystemTableSink.TableRollingPolicy(\nfalse,\nLong.MAX_VALUE,\nDuration.ofDays(1).toMillis(),\nDuration.ofDays(1).toMillis());\nList partitionKeys = Collections.singletonList(\"d\");\nConfiguration conf = getProcTimeCommitTriggerConf(Duration.ofSeconds(1).toMillis());\nOperatorSubtaskState state;\nlong currentTimeMillis = System.currentTimeMillis();\ntry (OneInputStreamOperatorTestHarness harness =\ncreate(tableRollingPolicy, partitionKeys, conf)) {\nharness.setup();\nharness.initializeEmptyState();\nharness.open();\nharness.setProcessingTime(currentTimeMillis);\nharness.processElement(row(\"1\"), 0);\nharness.processElement(row(\"2\"), 0);\nstate = harness.snapshot(1, 1);\nharness.processElement(row(\"3\"), 0);\nharness.notifyOfCompletedCheckpoint(1);\nassertThat(isPartitionFileCommitted(\"1\", 0, 0)).isFalse();\nassertThat(isPartitionFileCommitted(\"2\", 0, 1)).isFalse();\n}\ntry (OneInputStreamOperatorTestHarness harness =\ncreate(tableRollingPolicy, partitionKeys, conf)) {\nharness.setup();\nharness.initializeState(state);\nharness.open();\nharness.processElement(row(\"3\"), 0);\ncurrentTimeMillis += Duration.ofSeconds(2).toMillis();\nharness.setProcessingTime(currentTimeMillis);\nharness.processElement(row(\"4\"), 0);\nharness.snapshot(2, 2);\nharness.notifyOfCompletedCheckpoint(2);\nassertThat(isPartitionFileCommitted(\"3\", 0, 2)).isTrue();\nassertThat(isPartitionFileCommitted(\"4\", 0, 3)).isFalse();\ncurrentTimeMillis += Duration.ofSeconds(2).toMillis();\nharness.setProcessingTime(currentTimeMillis);\nstate = harness.snapshot(3, 3);\nharness.notifyOfCompletedCheckpoint(3);\nassertThat(isPartitionFileCommitted(\"4\", 0, 3)).isTrue();\n}\ntry (OneInputStreamOperatorTestHarness harness =\ncreate(tableRollingPolicy, partitionKeys, conf)) {\nharness.setup();\nharness.initializeState(state);\nharness.open();\nharness.processElement(row(\"4\"), 0);\nharness.processElement(row(\"4\"), 0);\nharness.snapshot(4, 4);\nharness.processElement(row(\"5\"), 5);\nharness.endInput();\nassertThat(isPartitionFileCommitted(\"4\", 0, 4)).isTrue();\nassertThat(isPartitionFileCommitted(\"5\", 0, 5)).isTrue();\n}\n}\n@Test\nvoid testCommitFileWhenPartitionIsCommittableByPartitionTime() throws Exception {\nFileSystemTableSink.TableRollingPolicy tableRollingPolicy =\nnew FileSystemTableSink.TableRollingPolicy(\nfalse,\nLong.MAX_VALUE,\nDuration.ofDays(1).toMillis(),\nDuration.ofDays(1).toMillis());\nList partitionKeys = Collections.singletonList(\"d\");\nConfiguration conf = getPartitionCommitTriggerConf(Duration.ofDays(1).toMillis());\nlong currentTimeMillis = System.currentTimeMillis();\nDate nextYear = new Date(currentTimeMillis + Duration.ofDays(365).toMillis());\nString nextYearPartition = \"d=\" + dateFormat.format(nextYear);\nDate yesterday = new Date(currentTimeMillis - Duration.ofDays(1).toMillis());\nString yesterdayPartition = \"d=\" + dateFormat.format(yesterday);\nDate today = new Date(currentTimeMillis);\nString todayPartition = \"d=\" + dateFormat.format(today);\nDate tomorrow = new Date(currentTimeMillis + Duration.ofDays(1).toMillis());\nString tomorrowPartition = \"d=\" + dateFormat.format(tomorrow);\nOperatorSubtaskState state;\ntry (OneInputStreamOperatorTestHarness harness =\ncreate(tableRollingPolicy, partitionKeys, conf)) {\nharness.setup();\nharness.initializeEmptyState();\nharness.open();\nharness.processElement(row(yesterdayPartition), 0);\nharness.processWatermark(currentTimeMillis);\nstate = harness.snapshot(1, 1);\nharness.notifyOfCompletedCheckpoint(1);\nassertThat(isPartitionFileCommitted(yesterdayPartition, 0, 0)).isTrue();\n}\ntry (OneInputStreamOperatorTestHarness harness =\ncreate(tableRollingPolicy, partitionKeys, conf)) {\nharness.setup();\nharness.initializeState(state);\nharness.open();\nharness.processElement(row(tomorrowPartition), 0);\nharness.processElement(row(todayPartition), 0);\ncurrentTimeMillis += Duration.ofDays(1).toMillis();\nharness.processWatermark(currentTimeMillis);\nharness.snapshot(2, 2);\nharness.notifyOfCompletedCheckpoint(2);\nassertThat(isPartitionFileCommitted(todayPartition, 0, 2)).isTrue();\nassertThat(isPartitionFileCommitted(tomorrowPartition, 0, 1)).isFalse();\ncurrentTimeMillis += Duration.ofDays(1).toMillis();\nharness.processWatermark(currentTimeMillis);\nstate = harness.snapshot(3, 3);\nharness.notifyOfCompletedCheckpoint(3);\nassertThat(isPartitionFileCommitted(tomorrowPartition, 0, 1)).isTrue();\nharness.processElement(row(nextYearPartition), 0);\n}\ntry (OneInputStreamOperatorTestHarness harness =\ncreate(tableRollingPolicy, partitionKeys, conf)) {\nharness.setup();\nharness.initializeState(state);\nharness.open();\nharness.processElement(row(nextYearPartition), 0);\nharness.processElement(row(tomorrowPartition), 0);\nharness.endInput();\nassertThat(isPartitionFileCommitted(tomorrowPartition, 0, 4)).isTrue();\nassertThat(isPartitionFileCommitted(nextYearPartition, 0, 3)).isTrue();\n}\n}\nprivate static RowData row(String s) {\nreturn GenericRowData.of(StringData.fromString(s));\n}\nprivate static List collect(\nOneInputStreamOperatorTestHarness harness) {\nList parts = new ArrayList<>();\nharness.extractOutputValues().forEach(m -> parts.addAll(m.getPartitions()));\nreturn parts;\n}\nprivate OneInputStreamOperatorTestHarness create()\nthrows Exception {\nConfiguration configuration = new Configuration();\nconfiguration.setString(SINK_PARTITION_COMMIT_TRIGGER.key(), \"process-time\");\nreturn create(OnCheckpointRollingPolicy.build(), new ArrayList<>(), configuration);\n}\nprivate OneInputStreamOperatorTestHarness create(\nRollingPolicy rollingPolicy,\nList partitionKeys,\nConfiguration conf)\nthrows Exception {\nStreamingFileWriter writer =\nnew StreamingFileWriter<>(\n1000,\nStreamingFileSink.forRowFormat(\npath,\n(Encoder)\n(element, stream) ->\nstream.write(\n(element.getString(0) + \"\\n\")\n.getBytes(\nStandardCharsets\n.UTF_8)))\n.withBucketAssigner(\nnew BucketAssigner() {\n@Override\npublic String getBucketId(\nRowData element, Context context) {\nreturn element.getString(0).toString();\n}\n@Override\npublic SimpleVersionedSerializer\ngetSerializer() {\nreturn SimpleVersionedStringSerializer.INSTANCE;\n}\n})\n.withRollingPolicy(rollingPolicy),\npartitionKeys,\nconf);\nOneInputStreamOperatorTestHarness harness =\nnew OneInputStreamOperatorTestHarness<>(writer, 1, 1, 0);\nharness.getStreamConfig().setTimeCharacteristic(TimeCharacteristic.ProcessingTime);\nreturn harness;\n}\nprivate Configuration getPartitionCommitTriggerConf(long commitDelay) {\nConfiguration configuration = new Configuration();\nconfiguration.setString(SINK_PARTITION_COMMIT_POLICY_KIND, \"success-file\");\nconfiguration.setString(PARTITION_TIME_EXTRACTOR_TIMESTAMP_FORMATTER.key(), \"yyyy-MM-dd\");\nconfiguration.setString(SINK_PARTITION_COMMIT_TRIGGER.key(), \"partition-time\");\nconfiguration.setLong(SINK_PARTITION_COMMIT_DELAY.key(), commitDelay);\nconfiguration.setString(SINK_PARTITION_COMMIT_WATERMARK_TIME_ZONE.key(), \"UTC\");\nreturn configuration;\n}\nprivate Configuration getProcTimeCommitTriggerConf(long commitDelay) {\nConfiguration configuration = new Configuration();\nconfiguration.setString(SINK_PARTITION_COMMIT_POLICY_KIND, \"success-file\");\nconfiguration.setString(SINK_PARTITION_COMMIT_TRIGGER.key(), \"process-time\");\nconfiguration.setLong(SINK_PARTITION_COMMIT_DELAY.key(), commitDelay);\nconfiguration.setString(SINK_PARTITION_COMMIT_WATERMARK_TIME_ZONE.key(), \"UTC\");\nreturn configuration;\n}\nprivate boolean isPartitionFileCommitted(String partition, int subtaskIndex, int partCounter) {\njava.nio.file.Path bucketPath = Paths.get(path.getPath(), partition);\nString fileName =\noutputFileConfig.getPartPrefix()\n+ '-'\n+ subtaskIndex\n+ '-'\n+ partCounter\n+ outputFileConfig.getPartSuffix();\njava.nio.file.Path filePath = bucketPath.resolve(fileName);\nreturn filePath.toFile().exists();\n}\n}" + }, + { + "comment": "`node.getKind()` can be extracted out to a variable.", + "method_body": "private void updateAndCleanPrevEnvsForNarrowedEnvFollowingIfWithoutElse(BLangNode node) {\nif (node.getKind() != NodeKind.IF && node.getKind() != NodeKind.BLOCK &&\nnode.getKind() != NodeKind.BLOCK_FUNCTION_BODY) {\nreturn;\n}\nif (node.getKind() == NodeKind.BLOCK || node.getKind() == NodeKind.BLOCK_FUNCTION_BODY) {\nif (this.prevEnvs.peek() != null && this.prevEnvs.peek().node == node) {\nthis.prevEnvs.pop();\n}\nreturn;\n}\nBLangIf ifNode = (BLangIf) node;\nif (ifNode.elseStmt == null && this.notCompletedNormally) {\nBLangExpression expr = ifNode.expr;\nboolean constTrueCondition =\nConditionResolver.checkConstCondition(types, symTable, expr) == symTable.trueType;\nif (!constTrueCondition) {\nSymbolEnv narrowedEnv = typeNarrower.evaluateFalsityFollowingIfWithoutElse(expr, env);\nthis.prevEnvs.push(narrowedEnv);\n}\nthis.notCompletedNormally = constTrueCondition;\n}\n}", + "target_code": "if (node.getKind() != NodeKind.IF && node.getKind() != NodeKind.BLOCK &&", + "method_body_after": "private void updateAndCleanPrevEnvsForNarrowedEnvFollowingIfWithoutElse(BLangNode node) {\nif (node.getKind() != NodeKind.IF && node.getKind() != NodeKind.BLOCK &&\nnode.getKind() != NodeKind.BLOCK_FUNCTION_BODY) {\nreturn;\n}\nif (node.getKind() == NodeKind.BLOCK || node.getKind() == NodeKind.BLOCK_FUNCTION_BODY) {\nif (this.prevEnvs.peek() != null && this.prevEnvs.peek().node == node) {\nthis.prevEnvs.pop();\n}\nreturn;\n}\nBLangIf ifNode = (BLangIf) node;\nif (ifNode.elseStmt == null && this.notCompletedNormally) {\nBLangExpression expr = ifNode.expr;\nboolean constTrueCondition =\nConditionResolver.checkConstCondition(types, symTable, expr) == symTable.trueType;\nif (!constTrueCondition) {\nSymbolEnv narrowedEnv = typeNarrower.evaluateFalsityFollowingIfWithoutElse(expr, env);\nthis.prevEnvs.push(narrowedEnv);\n}\nthis.notCompletedNormally = constTrueCondition;\n}\n}", + "context_before": "class representing a service-decl or object-ctor with service prefix\nAttachPoint.Point attachedPoint;\nSet flagSet = classDefinition.flagSet;\nif (flagSet.contains(Flag.OBJECT_CTOR) && flagSet.contains(Flag.SERVICE)) {\nattachedPoint = AttachPoint.Point.SERVICE;\n}", + "context_after": "class representing a service-decl or object-ctor with service prefix\nAttachPoint.Point attachedPoint;\nSet flagSet = classDefinition.flagSet;\nif (flagSet.contains(Flag.OBJECT_CTOR) && flagSet.contains(Flag.SERVICE)) {\nattachedPoint = AttachPoint.Point.SERVICE;\n}" + }, + { + "comment": "`cursorsOfFinishedSplits.keySet().removeAll(cursors.keySet())`", + "method_body": "public void notifyCheckpointComplete(long checkpointId) throws Exception {\nLOG.debug(\"Committing cursors for checkpoint {}\", checkpointId);\nMap cursors = cursorsToCommit.get(checkpointId);\ntry {\n((PulsarOrderedFetcherManager) splitFetcherManager).acknowledgeMessages(cursors);\nLOG.debug(\"Successfully acknowledge cursors for checkpoint {}\", checkpointId);\ncursorsOfFinishedSplits\n.entrySet()\n.removeIf(entry -> cursors.containsKey(entry.getKey()));\nwhile (!cursorsToCommit.isEmpty() && cursorsToCommit.firstKey() <= checkpointId) {\ncursorsToCommit.remove(cursorsToCommit.firstKey());\n}\n} catch (Exception e) {\nLOG.error(\"Failed to acknowledge cursors for checkpoint {}\", checkpointId, e);\n}\n}", + "target_code": ".removeIf(entry -> cursors.containsKey(entry.getKey()));", + "method_body_after": "public void notifyCheckpointComplete(long checkpointId) throws Exception {\nLOG.debug(\"Committing cursors for checkpoint {}\", checkpointId);\nMap cursors = cursorsToCommit.get(checkpointId);\ntry {\n((PulsarOrderedFetcherManager) splitFetcherManager).acknowledgeMessages(cursors);\nLOG.debug(\"Successfully acknowledge cursors for checkpoint {}\", checkpointId);\ncursorsOfFinishedSplits.keySet().removeAll(cursors.keySet());\ncursorsToCommit.headMap(checkpointId + 1).clear();\n} catch (Exception e) {\nLOG.error(\"Failed to acknowledge cursors for checkpoint {}\", checkpointId, e);\ncursorCommitThrowable.compareAndSet(null, e);\n}\n}", + "context_before": "class PulsarOrderedSourceReader extends PulsarSourceReaderBase {\nprivate static final Logger LOG = LoggerFactory.getLogger(PulsarOrderedSourceReader.class);\nprivate final SortedMap> cursorsToCommit;\nprivate final ConcurrentMap cursorsOfFinishedSplits;\nprivate ScheduledExecutorService cursorScheduler;\npublic PulsarOrderedSourceReader(\nFutureCompletingBlockingQueue>> elementsQueue,\nSupplier> splitReaderSupplier,\nConfiguration configuration,\nSourceReaderContext context,\nSourceConfiguration sourceConfiguration,\nPulsarClient pulsarClient,\nPulsarAdmin pulsarAdmin) {\nsuper(\nelementsQueue,\nnew PulsarOrderedFetcherManager<>(elementsQueue, splitReaderSupplier::get),\nconfiguration,\ncontext,\nsourceConfiguration,\npulsarClient,\npulsarAdmin);\nthis.cursorsToCommit = Collections.synchronizedSortedMap(new TreeMap<>());\nthis.cursorsOfFinishedSplits = new ConcurrentHashMap<>();\n}\n@Override\npublic void start() {\nsuper.start();\nif (sourceConfiguration.isEnableAutoAcknowledgeMessage()) {\nthis.cursorScheduler = Executors.newSingleThreadScheduledExecutor();\ncursorScheduler.scheduleAtFixedRate(\n() -> {\nMap cursors =\nnew HashMap<>(cursorsOfFinishedSplits);\nList splits = super.snapshotState(1L);\nfor (PulsarPartitionSplit split : splits) {\nMessageId latestConsumedId = split.getLatestConsumedId();\nif (latestConsumedId != null) {\ncursors.put(split.getPartition(), latestConsumedId);\n}\n}\ntry {\n((PulsarOrderedFetcherManager) splitFetcherManager)\n.acknowledgeMessages(cursors);\ncursorsOfFinishedSplits\n.entrySet()\n.removeIf(entry -> cursors.containsKey(entry.getKey()));\n} catch (Exception e) {\nLOG.error(\"Fail in auto cursor commit.\", e);\n}\n},\nsourceConfiguration.getMaxFetchTime().toMillis(),\nsourceConfiguration.getAutoCommitCursorInterval(),\nTimeUnit.MILLISECONDS);\n}\n}\n@Override\nprotected void onSplitFinished(Map finishedSplitIds) {\nif (LOG.isDebugEnabled()) {\nLOG.debug(\"onSplitFinished event: {}\", finishedSplitIds);\n}\nfor (Map.Entry entry : finishedSplitIds.entrySet()) {\nPulsarPartitionSplitState state = entry.getValue();\nMessageId latestConsumedId = state.getLatestConsumedId();\nif (latestConsumedId != null) {\ncursorsOfFinishedSplits.put(state.getPartition(), latestConsumedId);\n}\n}\n}\n@Override\npublic List snapshotState(long checkpointId) {\nList splits = super.snapshotState(checkpointId);\nif (splits.isEmpty() && cursorsOfFinishedSplits.isEmpty()) {\ncursorsToCommit.put(checkpointId, Collections.emptyMap());\n} else {\nMap cursors =\ncursorsToCommit.computeIfAbsent(checkpointId, id -> new HashMap<>());\nfor (PulsarPartitionSplit split : splits) {\nMessageId latestConsumedId = split.getLatestConsumedId();\nif (latestConsumedId != null) {\ncursors.put(split.getPartition(), latestConsumedId);\n}\n}\ncursors.putAll(cursorsOfFinishedSplits);\n}\nreturn splits;\n}\n@Override\n@Override\npublic void close() throws Exception {\nsuper.close();\ncursorScheduler.shutdown();\n}\n}", + "context_after": "class PulsarOrderedSourceReader extends PulsarSourceReaderBase {\nprivate static final Logger LOG = LoggerFactory.getLogger(PulsarOrderedSourceReader.class);\nprivate final SortedMap> cursorsToCommit;\nprivate final ConcurrentMap cursorsOfFinishedSplits;\nprivate final AtomicReference cursorCommitThrowable = new AtomicReference<>();\nprivate ScheduledExecutorService cursorScheduler;\npublic PulsarOrderedSourceReader(\nFutureCompletingBlockingQueue>> elementsQueue,\nSupplier> splitReaderSupplier,\nConfiguration configuration,\nSourceReaderContext context,\nSourceConfiguration sourceConfiguration,\nPulsarClient pulsarClient,\nPulsarAdmin pulsarAdmin) {\nsuper(\nelementsQueue,\nnew PulsarOrderedFetcherManager<>(elementsQueue, splitReaderSupplier::get),\nconfiguration,\ncontext,\nsourceConfiguration,\npulsarClient,\npulsarAdmin);\nthis.cursorsToCommit = Collections.synchronizedSortedMap(new TreeMap<>());\nthis.cursorsOfFinishedSplits = new ConcurrentHashMap<>();\n}\n@Override\npublic void start() {\nsuper.start();\nif (sourceConfiguration.isEnableAutoAcknowledgeMessage()) {\nthis.cursorScheduler = Executors.newSingleThreadScheduledExecutor();\ncursorScheduler.scheduleAtFixedRate(\nthis::cumulativeAcknowledgmentMessage,\nsourceConfiguration.getMaxFetchTime().toMillis(),\nsourceConfiguration.getAutoCommitCursorInterval(),\nTimeUnit.MILLISECONDS);\n}\n}\n@Override\npublic InputStatus pollNext(ReaderOutput output) throws Exception {\ncheckErrorAndRethrow();\nreturn super.pollNext(output);\n}\n@Override\nprotected void onSplitFinished(Map finishedSplitIds) {\nif (LOG.isDebugEnabled()) {\nLOG.debug(\"onSplitFinished event: {}\", finishedSplitIds);\n}\nfor (Map.Entry entry : finishedSplitIds.entrySet()) {\nPulsarPartitionSplitState state = entry.getValue();\nMessageId latestConsumedId = state.getLatestConsumedId();\nif (latestConsumedId != null) {\ncursorsOfFinishedSplits.put(state.getPartition(), latestConsumedId);\n}\n}\n}\n@Override\npublic List snapshotState(long checkpointId) {\nList splits = super.snapshotState(checkpointId);\nMap cursors =\ncursorsToCommit.computeIfAbsent(checkpointId, id -> new HashMap<>());\nfor (PulsarPartitionSplit split : splits) {\nMessageId latestConsumedId = split.getLatestConsumedId();\nif (latestConsumedId != null) {\ncursors.put(split.getPartition(), latestConsumedId);\n}\n}\ncursors.putAll(cursorsOfFinishedSplits);\nreturn splits;\n}\n@Override\n@Override\npublic void close() throws Exception {\nif (cursorScheduler != null) {\ncursorScheduler.shutdown();\n}\nsuper.close();\n}\nprivate void checkErrorAndRethrow() {\nThrowable cause = cursorCommitThrowable.get();\nif (cause != null) {\nthrow new RuntimeException(\"An error occurred in acknowledge message.\", cause);\n}\n}\n/** Acknowledge the pulsar topic partition cursor by the last consumed message id. */\nprivate void cumulativeAcknowledgmentMessage() {\nMap cursors = new HashMap<>(cursorsOfFinishedSplits);\nList splits = super.snapshotState(1L);\nfor (PulsarPartitionSplit split : splits) {\nMessageId latestConsumedId = split.getLatestConsumedId();\nif (latestConsumedId != null) {\ncursors.put(split.getPartition(), latestConsumedId);\n}\n}\ntry {\n((PulsarOrderedFetcherManager) splitFetcherManager).acknowledgeMessages(cursors);\ncursorsOfFinishedSplits.keySet().removeAll(cursors.keySet());\n} catch (Exception e) {\nLOG.error(\"Fail in auto cursor commit.\", e);\ncursorCommitThrowable.compareAndSet(null, e);\n}\n}\n}" + }, + { + "comment": "See https://github.com/quarkusio/quarkus/blob/master/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SyntheticBeanBuildItem.java#L75 .", + "method_body": "public Supplier getCacheManagerSupplier(Set cacheNames) {\nObjects.requireNonNull(cacheNames);\nCacheManager cacheManager;\nif (cacheNames.isEmpty()) {\ncacheManager = new CacheManagerImpl(Collections.emptyMap());\n} else {\nMap caches = new HashMap<>(cacheNames.size() + 1, 1.0F);\nNoOpCache cache = new NoOpCache();\nfor (String cacheName : cacheNames) {\ncaches.put(cacheName, cache);\n}\ncacheManager = new CacheManagerImpl(caches);\n}\nreturn new Supplier() {\n@Override\npublic CacheManager get() {\nreturn cacheManager;\n}\n};\n}", + "target_code": "cacheManager = new CacheManagerImpl(caches);", + "method_body_after": "public Supplier getCacheManagerSupplier(Set cacheNames) {\nObjects.requireNonNull(cacheNames);\nreturn new Supplier() {\n@Override\npublic CacheManager get() {\nif (cacheNames.isEmpty()) {\nreturn new CacheManagerImpl(Collections.emptyMap());\n} else {\nMap caches = new HashMap<>(cacheNames.size() + 1, 1.0F);\nNoOpCache cache = new NoOpCache();\nfor (String cacheName : cacheNames) {\ncaches.put(cacheName, cache);\n}\nreturn new CacheManagerImpl(caches);\n}\n}\n};\n}", + "context_before": "class NoOpCacheBuildRecorder {\n}", + "context_after": "class NoOpCacheBuildRecorder {\n}" + }, + { + "comment": "could we simplify the case?", + "method_body": "public static void beforeClass() throws Exception {\nPlanTestBase.beforeClass();\nStarRocksAssert starRocksAssert = new StarRocksAssert(connectContext);\nstarRocksAssert.withTable(\"CREATE TABLE supplier_nullable ( S_SUPPKEY INTEGER NOT NULL,\\n\" +\n\" S_NAME CHAR(25) NOT NULL,\\n\" +\n\" S_ADDRESS VARCHAR(40), \\n\" +\n\" S_NATIONKEY INTEGER NOT NULL,\\n\" +\n\" S_PHONE CHAR(15) NOT NULL,\\n\" +\n\" S_ACCTBAL double NOT NULL,\\n\" +\n\" S_COMMENT VARCHAR(101) NOT NULL,\\n\" +\n\" PAD char(1) NOT NULL)\\n\" +\n\"ENGINE=OLAP\\n\" +\n\"DUPLICATE KEY(`s_suppkey`)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 1\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"in_memory\\\" = \\\"false\\\"\\n\" +\n\");\");\nstarRocksAssert.withTable(\"CREATE TABLE table_int (id_int INT, id_bigint BIGINT) \" +\n\"DUPLICATE KEY(`id_int`) \" +\n\"DISTRIBUTED BY HASH(`id_int`) BUCKETS 1 \" +\n\"PROPERTIES (\\\"replication_num\\\" = \\\"1\\\");\");\nstarRocksAssert.withTable(\"CREATE TABLE part_v2 ( P_PARTKEY INTEGER NOT NULL,\\n\" +\n\" P_NAME VARCHAR(55) NOT NULL,\\n\" +\n\" P_MFGR VARCHAR(25) NOT NULL,\\n\" +\n\" P_BRAND VARCHAR(10) NOT NULL,\\n\" +\n\" P_TYPE VARCHAR(25) NOT NULL,\\n\" +\n\" P_SIZE INTEGER NOT NULL,\\n\" +\n\" P_CONTAINER VARCHAR(10) NOT NULL,\\n\" +\n\" P_RETAILPRICE double NOT NULL,\\n\" +\n\" P_COMMENT VARCHAR(23) NOT NULL,\\n\" +\n\" PAD char(1) NOT NULL)\\n\" +\n\"ENGINE=OLAP\\n\" +\n\"DUPLICATE KEY(`p_partkey`)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`p_partkey`) BUCKETS 10\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"in_memory\\\" = \\\"false\\\"\\n\" +\n\");\");\nstarRocksAssert.withTable(\"CREATE TABLE lineorder_flat (\\n\" +\n\"LO_ORDERDATE date NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_ORDERKEY int(11) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_LINENUMBER tinyint(4) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_CUSTKEY int(11) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_PARTKEY int(11) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_SUPPKEY int(11) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_ORDERPRIORITY varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_SHIPPRIORITY tinyint(4) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_QUANTITY tinyint(4) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_EXTENDEDPRICE int(11) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_ORDTOTALPRICE int(11) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_DISCOUNT tinyint(4) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_REVENUE int(11) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_SUPPLYCOST int(11) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_TAX tinyint(4) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_COMMITDATE date NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_SHIPMODE varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"C_NAME varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"C_ADDRESS varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"C_CITY varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"C_NATION varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"C_REGION varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"C_PHONE varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"C_MKTSEGMENT varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"S_NAME varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"S_ADDRESS varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"S_CITY varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"S_NATION varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"S_REGION varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"S_PHONE varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"P_NAME varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"P_MFGR varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"P_CATEGORY varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"P_BRAND varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"P_COLOR varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"P_TYPE varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"P_SIZE tinyint(4) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"P_CONTAINER varchar(100) NOT NULL COMMENT \\\"\\\"\\n\" +\n\") ENGINE=OLAP\\n\" +\n\"DUPLICATE KEY(LO_ORDERDATE, LO_ORDERKEY)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"DISTRIBUTED BY HASH(LO_ORDERKEY) BUCKETS 48\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"in_memory\\\" = \\\"false\\\"\\n\" +\n\");\");\nstarRocksAssert.withTable(\"CREATE TABLE `low_card_t1` (\\n\" +\n\" `d_date` date ,\\n\" +\n\" `c_user` varchar(50) ,\\n\" +\n\" `c_dept` varchar(50) ,\\n\" +\n\" `c_par` varchar(50) ,\\n\" +\n\" `vst` varchar(3) ,\\n\" +\n\" `vc_busness_sec_type` varchar(5) ,\\n\" +\n\" `c_nodevalue` varchar(50) ,\\n\" +\n\" `c_brokername` varchar(50) ,\\n\" +\n\" `is_neworold` varchar(3) ,\\n\" +\n\" `f_asset` decimal128(20, 5) ,\\n\" +\n\" `f_asset_zb` decimal128(20, 5) ,\\n\" +\n\" `f_managerfee` decimal128(20, 5) ,\\n\" +\n\" `fee_zb` decimal128(20, 5) ,\\n\" +\n\" `f_icapital` decimal128(20, 5) ,\\n\" +\n\" `f_ocapital` decimal128(20, 5) ,\\n\" +\n\" `net_cap` decimal128(20, 5) ,\\n\" +\n\" `vc_custno_c` int(11) ,\\n\" +\n\" `c_fundacco_c` int(11) ,\\n\" +\n\" `c_new` int(11) ,\\n\" +\n\" `cpc` int(11) \\n\" +\n\") ENGINE=OLAP \\n\" +\n\"DUPLICATE KEY(`d_date`, `c_user`, `c_dept`, `c_par`)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`d_date`, `c_user`, `c_dept`, `c_par`) BUCKETS 16 \\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"in_memory\\\" = \\\"false\\\",\\n\" +\n\"\\\"storage_format\\\" = \\\"DEFAULT\\\",\\n\" +\n\"\\\"enable_persistent_index\\\" = \\\"false\\\",\\n\" +\n\"\\\"compression\\\" = \\\"LZ4\\\"\\n\" +\n\");\");\nstarRocksAssert.withTable(\"CREATE TABLE `low_card_t2` (\\n\" +\n\" `d_date` date ,\\n\" +\n\" `c_mr` varchar(40) ,\\n\" +\n\" `vst` varchar(3) ,\\n\" +\n\" `vc_busness_sec_type` varchar(5) ,\\n\" +\n\" `f_asset` decimal128(20, 5) ,\\n\" +\n\" `f_asset_zb` decimal128(20, 5) ,\\n\" +\n\" `f_managerfee` decimal128(20, 5) ,\\n\" +\n\" `fee_zb` decimal128(20, 5) ,\\n\" +\n\" `f_icapital` decimal128(20, 5) ,\\n\" +\n\" `f_ocapital` decimal128(20, 5) ,\\n\" +\n\" `net_cap` decimal128(20, 5) ,\\n\" +\n\" `vc_custno_c` int(11) ,\\n\" +\n\" `c_fundacco_c` int(11) ,\\n\" +\n\" `c_new` int(11) ,\\n\" +\n\" `cpc` int(11)\\n\" +\n\") ENGINE=OLAP \\n\" +\n\"DUPLICATE KEY(`d_date`, `c_mr`)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`d_date`, `c_mr`) BUCKETS 16 \\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"in_memory\\\" = \\\"false\\\",\\n\" +\n\"\\\"storage_format\\\" = \\\"DEFAULT\\\",\\n\" +\n\"\\\"enable_persistent_index\\\" = \\\"false\\\",\\n\" +\n\"\\\"compression\\\" = \\\"LZ4\\\"\\n\" +\n\");\");\nFeConstants.USE_MOCK_DICT_MANAGER = true;\nconnectContext.getSessionVariable().setSqlMode(2);\nconnectContext.getSessionVariable().setEnableLowCardinalityOptimize(true);\nconnectContext.getSessionVariable().setCboCteReuse(false);\n}", + "target_code": "starRocksAssert.withTable(\"CREATE TABLE `low_card_t1` (\\n\" +", + "method_body_after": "public static void beforeClass() throws Exception {\nPlanTestBase.beforeClass();\nStarRocksAssert starRocksAssert = new StarRocksAssert(connectContext);\nstarRocksAssert.withTable(\"CREATE TABLE supplier_nullable ( S_SUPPKEY INTEGER NOT NULL,\\n\" +\n\" S_NAME CHAR(25) NOT NULL,\\n\" +\n\" S_ADDRESS VARCHAR(40), \\n\" +\n\" S_NATIONKEY INTEGER NOT NULL,\\n\" +\n\" S_PHONE CHAR(15) NOT NULL,\\n\" +\n\" S_ACCTBAL double NOT NULL,\\n\" +\n\" S_COMMENT VARCHAR(101) NOT NULL,\\n\" +\n\" PAD char(1) NOT NULL)\\n\" +\n\"ENGINE=OLAP\\n\" +\n\"DUPLICATE KEY(`s_suppkey`)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 1\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"in_memory\\\" = \\\"false\\\"\\n\" +\n\");\");\nstarRocksAssert.withTable(\"CREATE TABLE table_int (id_int INT, id_bigint BIGINT) \" +\n\"DUPLICATE KEY(`id_int`) \" +\n\"DISTRIBUTED BY HASH(`id_int`) BUCKETS 1 \" +\n\"PROPERTIES (\\\"replication_num\\\" = \\\"1\\\");\");\nstarRocksAssert.withTable(\"CREATE TABLE part_v2 ( P_PARTKEY INTEGER NOT NULL,\\n\" +\n\" P_NAME VARCHAR(55) NOT NULL,\\n\" +\n\" P_MFGR VARCHAR(25) NOT NULL,\\n\" +\n\" P_BRAND VARCHAR(10) NOT NULL,\\n\" +\n\" P_TYPE VARCHAR(25) NOT NULL,\\n\" +\n\" P_SIZE INTEGER NOT NULL,\\n\" +\n\" P_CONTAINER VARCHAR(10) NOT NULL,\\n\" +\n\" P_RETAILPRICE double NOT NULL,\\n\" +\n\" P_COMMENT VARCHAR(23) NOT NULL,\\n\" +\n\" PAD char(1) NOT NULL)\\n\" +\n\"ENGINE=OLAP\\n\" +\n\"DUPLICATE KEY(`p_partkey`)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`p_partkey`) BUCKETS 10\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"in_memory\\\" = \\\"false\\\"\\n\" +\n\");\");\nstarRocksAssert.withTable(\"CREATE TABLE lineorder_flat (\\n\" +\n\"LO_ORDERDATE date NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_ORDERKEY int(11) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_LINENUMBER tinyint(4) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_CUSTKEY int(11) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_PARTKEY int(11) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_SUPPKEY int(11) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_ORDERPRIORITY varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_SHIPPRIORITY tinyint(4) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_QUANTITY tinyint(4) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_EXTENDEDPRICE int(11) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_ORDTOTALPRICE int(11) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_DISCOUNT tinyint(4) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_REVENUE int(11) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_SUPPLYCOST int(11) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_TAX tinyint(4) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_COMMITDATE date NOT NULL COMMENT \\\"\\\",\\n\" +\n\"LO_SHIPMODE varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"C_NAME varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"C_ADDRESS varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"C_CITY varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"C_NATION varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"C_REGION varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"C_PHONE varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"C_MKTSEGMENT varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"S_NAME varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"S_ADDRESS varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"S_CITY varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"S_NATION varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"S_REGION varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"S_PHONE varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"P_NAME varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"P_MFGR varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"P_CATEGORY varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"P_BRAND varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"P_COLOR varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"P_TYPE varchar(100) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"P_SIZE tinyint(4) NOT NULL COMMENT \\\"\\\",\\n\" +\n\"P_CONTAINER varchar(100) NOT NULL COMMENT \\\"\\\"\\n\" +\n\") ENGINE=OLAP\\n\" +\n\"DUPLICATE KEY(LO_ORDERDATE, LO_ORDERKEY)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"DISTRIBUTED BY HASH(LO_ORDERKEY) BUCKETS 48\\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"in_memory\\\" = \\\"false\\\"\\n\" +\n\");\");\nstarRocksAssert.withTable(\"CREATE TABLE `low_card_t1` (\\n\" +\n\" `d_date` date ,\\n\" +\n\" `c_user` varchar(50) ,\\n\" +\n\" `c_dept` varchar(50) ,\\n\" +\n\" `c_par` varchar(50) ,\\n\" +\n\" `c_nodevalue` varchar(50) ,\\n\" +\n\" `c_brokername` varchar(50) ,\\n\" +\n\" `f_asset` decimal128(20, 5) ,\\n\" +\n\" `f_asset_zb` decimal128(20, 5) ,\\n\" +\n\" `f_managerfee` decimal128(20, 5) ,\\n\" +\n\" `fee_zb` decimal128(20, 5) ,\\n\" +\n\" `cpc` int(11) \\n\" +\n\") ENGINE=OLAP \\n\" +\n\"DUPLICATE KEY(`d_date`, `c_user`, `c_dept`, `c_par`)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`d_date`, `c_user`, `c_dept`, `c_par`) BUCKETS 16 \\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"in_memory\\\" = \\\"false\\\",\\n\" +\n\"\\\"storage_format\\\" = \\\"DEFAULT\\\",\\n\" +\n\"\\\"enable_persistent_index\\\" = \\\"false\\\",\\n\" +\n\"\\\"compression\\\" = \\\"LZ4\\\"\\n\" +\n\");\");\nstarRocksAssert.withTable(\"CREATE TABLE `low_card_t2` (\\n\" +\n\" `d_date` date ,\\n\" +\n\" `c_mr` varchar(40) ,\\n\" +\n\" `fee_zb` decimal128(20, 5) ,\\n\" +\n\" `c_new` int(11) ,\\n\" +\n\" `cpc` int(11)\\n\" +\n\") ENGINE=OLAP \\n\" +\n\"DUPLICATE KEY(`d_date`, `c_mr`)\\n\" +\n\"COMMENT \\\"OLAP\\\"\\n\" +\n\"DISTRIBUTED BY HASH(`d_date`, `c_mr`) BUCKETS 16 \\n\" +\n\"PROPERTIES (\\n\" +\n\"\\\"replication_num\\\" = \\\"1\\\",\\n\" +\n\"\\\"in_memory\\\" = \\\"false\\\",\\n\" +\n\"\\\"storage_format\\\" = \\\"DEFAULT\\\",\\n\" +\n\"\\\"enable_persistent_index\\\" = \\\"false\\\",\\n\" +\n\"\\\"compression\\\" = \\\"LZ4\\\"\\n\" +\n\");\");\nFeConstants.USE_MOCK_DICT_MANAGER = true;\nconnectContext.getSessionVariable().setSqlMode(2);\nconnectContext.getSessionVariable().setEnableLowCardinalityOptimize(true);\nconnectContext.getSessionVariable().setCboCteReuse(false);\n}", + "context_before": "class LowCardinalityTest extends PlanTestBase {\n@BeforeClass\n@AfterClass\npublic static void afterClass() {\nconnectContext.getSessionVariable().setSqlMode(0);\nconnectContext.getSessionVariable().setEnableLowCardinalityOptimize(false);\n}\n@Test\npublic void testOlapScanNodeOutputColumns() throws Exception {\nconnectContext.getSessionVariable().enableTrimOnlyFilteredColumnsInScanStage();\nString sql =\n\"SELECT C_CITY, S_CITY, year(LO_ORDERDATE) as year, sum(LO_REVENUE) AS revenue FROM lineorder_flat \" +\n\"WHERE C_CITY in ('UNITED KI1', 'UNITED KI5') AND S_CITY in ( 'UNITED KI1', 'UNITED\\n\" +\n\"KI5') AND LO_ORDERDATE >= '1997-12-01' AND LO_ORDERDATE <= '1997-12-31' GROUP BY C_CITY, S_CITY, year \" +\n\"ORDER BY year ASC, revenue DESC;\";\nString plan = getThriftPlan(sql);\nAssert.assertTrue(plan.contains(\"unused_output_column_name:[]\"));\nconnectContext.getSessionVariable().disableTrimOnlyFilteredColumnsInScanStage();\n}\n@Test\npublic void testDecodeNodeRewrite() throws Exception {\nString sql = \"select\\n\" +\n\" 100.00 * sum(case\\n\" +\n\" when p_type like 'PROMO%'\\n\" +\n\" then l_extendedprice * (1 - l_discount)\\n\" +\n\" else 0\\n\" +\n\" end) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue\\n\" +\n\"from\\n\" +\n\" lineitem,\\n\" +\n\" part\\n\" +\n\"where\\n\" +\n\" l_partkey = p_partkey\\n\" +\n\" and l_shipdate >= date '1997-02-01'\\n\" +\n\" and l_shipdate < date '1997-03-01';\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testDecodeNodeRewrite2() throws Exception {\nString sql = \"select\\n\" +\n\" p_brand,\\n\" +\n\" p_type,\\n\" +\n\" p_size,\\n\" +\n\" count(distinct ps_suppkey) as supplier_cnt\\n\" +\n\"from\\n\" +\n\" partsupp,\\n\" +\n\" part\\n\" +\n\"where\\n\" +\n\" p_partkey = ps_partkey\\n\" +\n\" and p_brand <> 'Brand\n\" and p_type not like 'PROMO BURNISHED%'\\n\" +\n\" and p_size in (31, 43, 9, 6, 18, 11, 25, 1)\\n\" +\n\" and ps_suppkey not in (\\n\" +\n\" select\\n\" +\n\" s_suppkey\\n\" +\n\" from\\n\" +\n\" supplier\\n\" +\n\" where\\n\" +\n\" s_comment like '%Customer%Complaints%'\\n\" +\n\")\\n\" +\n\"group by\\n\" +\n\" p_brand,\\n\" +\n\" p_type,\\n\" +\n\" p_size\\n;\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testDecodeNodeRewrite3() throws Exception {\nString sql = \"select L_COMMENT from lineitem group by L_COMMENT\";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 2:Decode\\n\" +\n\" | : \\n\"));\n}\n@Test\npublic void testDecodeNodeRewrite4() throws Exception {\nString sql = \"select dept_name from dept group by dept_name,state\";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 3:Decode\\n\" +\n\" | : \\n\" +\n\" | \\n\" +\n\" 2:Project\\n\" +\n\" | : 4: dept_name\"));\n}\n@Test\npublic void testDecodeNodeRewrite5() throws Exception {\nString sql = \"select S_ADDRESS from supplier where S_ADDRESS \" +\n\"like '%Customer%Complaints%' group by S_ADDRESS \";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 2:Decode\\n\" +\n\" | : \"));\nAssert.assertTrue(\nplan.contains(\"PREDICATES: DictExpr(9: S_ADDRESS,[ LIKE '%Customer%Complaints%'])\"));\n}\n@Test\npublic void testDecodeNodeRewrite6() throws Exception {\nString sql = \"select count(S_ADDRESS) from supplier\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nAssert.assertTrue(plan.contains(\"count(10: S_ADDRESS)\"));\nsql = \"select count(distinct S_ADDRESS) from supplier\";\nconnectContext.getSessionVariable().setNewPlanerAggStage(4);\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nAssert.assertTrue(plan.contains(\"count(10: S_ADDRESS)\"));\nAssert.assertTrue(plan.contains(\"HASH_PARTITIONED: 10: S_ADDRESS\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testDecodeNodeRewriteMultiAgg()\nthrows Exception {\nboolean cboCteReuse = connectContext.getSessionVariable().isCboCteReuse();\nboolean enableLowCardinalityOptimize = connectContext.getSessionVariable().isEnableLowCardinalityOptimize();\nint newPlannerAggStage = connectContext.getSessionVariable().getNewPlannerAggStage();\nconnectContext.getSessionVariable().setCboCteReuse(false);\nconnectContext.getSessionVariable().setEnableLowCardinalityOptimize(true);\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\ntry {\nString sql = \"select count(distinct S_ADDRESS), count(distinct S_NATIONKEY) from supplier\";\nString plan = getVerboseExplain(sql);\nAssert.assertTrue(plan, plan.contains(\"dict_col=S_ADDRESS\"));\nsql = \"select count(distinct S_ADDRESS), count(distinct S_NATIONKEY) from supplier \" +\n\"having count(1) > 0\";\nplan = getVerboseExplain(sql);\nAssert.assertFalse(plan, plan.contains(\"dict_col=\"));\nAssert.assertFalse(plan, plan.contains(\"Decode\"));\n} finally {\nconnectContext.getSessionVariable().setCboCteReuse(cboCteReuse);\nconnectContext.getSessionVariable().setEnableLowCardinalityOptimize(enableLowCardinalityOptimize);\nconnectContext.getSessionVariable().setNewPlanerAggStage(newPlannerAggStage);\n}\n}\n@Test\npublic void testDecodeNodeRewrite7() throws Exception {\nString sql = \"select S_ADDRESS, count(S_ADDRESS) from supplier group by S_ADDRESS\";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 2:Decode\\n\" +\n\" | : \"));\nString thrift = getThriftPlan(sql);\nAssert.assertTrue(thrift.contains(\"TGlobalDict(columnId:10, strings:[6D 6F 63 6B], ids:[1])\"));\n}\n@Test\npublic void testDecodeNodeRewrite8() throws Exception {\nString sql = \"select S_ADDRESS, count(S_ADDRESS) from supplier group by S_ADDRESS\";\nString plan = getCostExplain(sql);\nAssert.assertTrue(plan.contains(\" 2:Decode\\n\" +\n\" | : \\n\" +\n\" | cardinality: 1\\n\" +\n\" | column statistics: \\n\" +\n\" | * S_ADDRESS-->[-Infinity, Infinity, 0.0, 40.0, 10000.0] ESTIMATE\\n\" +\n\" | * count-->[0.0, 1.0, 0.0, 8.0, 1.0] ESTIMATE\"));\n}\n@Test\npublic void testDecodeNodeRewrite9() throws Exception {\nString sql = \"select S_ADDRESS, upper(S_ADDRESS) from supplier\";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" | : \\n\" +\n\" | : \"));\nString thriftPlan = getThriftPlan(sql);\nAssert.assertTrue(plan.contains(\" | : \\n\" +\n\" | : \"));\nAssert.assertTrue(thriftPlan.contains(\"could_apply_dict_optimize:true\"));\nAssert.assertTrue(thriftPlan.contains(\"string_functions:{11=TExpr(nodes\"));\n}\n@Test\npublic void testDecodeRewrite9Scan() throws Exception {\nString sql = \"select S_ADDRESS from supplier\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testDecodeNodeRewrite10() throws Exception {\nString sql = \"select upper(S_ADDRESS) as a, count(*) from supplier group by a\";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan, plan.contains(\" 3:Decode\\n\" +\n\" | : \"));\nAssert.assertTrue(plan.contains(\" : DictExpr(11: S_ADDRESS,[upper()])\"));\nsql = \"select S_ADDRESS, count(*) from supplier_nullable group by S_ADDRESS\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\"group by: [10: S_ADDRESS, INT, true]\"));\n}\n@Test\npublic void testDecodeNodeRewriteMultiCountDistinct() throws Exception {\nString sql;\nString plan;\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nsql = \"select count(distinct a),count(distinct b) from (\" +\n\"select lower(upper(S_ADDRESS)) as a, upper(S_ADDRESS) as b, \" +\n\"count(*) from supplier group by a,b) as t \";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nAssert.assertTrue(plan.contains(\"7:AGGREGATE (merge finalize)\\n\" +\n\" | output: multi_distinct_count(12: count), multi_distinct_count(13: count)\"));\nsql = \"select count(distinct S_ADDRESS), count(distinct S_COMMENT) from supplier;\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" multi_distinct_count(11: S_ADDRESS), \" +\n\"multi_distinct_count(12: S_COMMENT)\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(3);\nsql = \"select max(S_ADDRESS), count(distinct S_ADDRESS) from supplier group by S_ADDRESS;\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 4:AGGREGATE (update finalize)\\n\" +\n\" | output: max(13: S_ADDRESS), count(11: S_ADDRESS)\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testDecodeNodeRewriteDistinct() throws Exception {\nString sql;\nString plan;\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\nsql = \"select count(distinct S_ADDRESS) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 1:AGGREGATE (update finalize)\\n\" +\n\" | aggregate: multi_distinct_count[([10: S_ADDRESS, INT, false]); \" +\n\"args: INT; result: BIGINT; args nullable: false; result nullable: false]\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 3:AGGREGATE (merge finalize)\\n\" +\n\" | aggregate: multi_distinct_count[([9: count, VARBINARY, false]); \" +\n\"args: INT; result: BIGINT; args nullable: true; result nullable: false]\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(3);\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 4:AGGREGATE (update serialize)\\n\" +\n\" | aggregate: count[([10: S_ADDRESS, INT, false]); args: INT; result: BIGINT; \" +\n\"args nullable: false; result nullable: false]\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(4);\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 6:AGGREGATE (merge finalize)\\n\" +\n\" | aggregate: count[([9: count, BIGINT, false]); args: VARCHAR; result: BIGINT; \" +\n\"args nullable: true; result nullable: false]\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\nsql = \"select count(distinct S_ADDRESS, S_COMMENT) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\n\"aggregate: count[(if[(3 IS NULL, NULL, [7, VARCHAR, false]); \" +\n\"args: BOOLEAN,VARCHAR,VARCHAR; result: VARCHAR; \" +\n\"args nullable: true; result nullable: true]); \" +\n\"args: VARCHAR; result: BIGINT; args nullable: true; result nullable: false]\\n\"));\nAssert.assertTrue(plan.contains(\" 4:Decode\\n\" +\n\" | : \\n\" +\n\" | : \\n\" +\n\" | cardinality: 1\"));\n}\n@Test\npublic void testDecodeNodeRewriteTwoPaseDistinct() throws Exception {\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nString sql = \"select count(distinct S_ADDRESS), count(distinct S_NATIONKEY) from supplier\";\nString plan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\"3:AGGREGATE (merge finalize)\\n\" +\n\" | aggregate: multi_distinct_count[([9: count, VARBINARY, false]); \" +\n\"args: INT; result: BIGINT; args nullable: true; result nullable: false], \" +\n\"multi_distinct_count[([10: count, VARBINARY, false]); args: INT; result: BIGINT; \" +\n\"args nullable: true; result nullable: false]\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testDecodeNodeRewriteTwoPhaseAgg() throws Exception {\nString sql = \"select lower(upper(S_ADDRESS)) as a, upper(S_ADDRESS) as b, count(*) from supplier group by a,b\";\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 1:Project\\n\" +\n\" | : DictExpr(12: S_ADDRESS,[lower(upper())])\\n\" +\n\" | : DictExpr(12: S_ADDRESS,[upper()])\"));\nAssert.assertFalse(plan.contains(\"common expressions\"));\nplan = getThriftPlan(sql);\nAssert.assertTrue(plan.contains(\"global_dicts:[TGlobalDict(columnId:12, strings:[6D 6F 63 6B], ids:[1])]\"));\nAssert.assertTrue(plan.contains(\"global_dicts:[TGlobalDict(columnId:12, strings:[6D 6F 63 6B], ids:[1])]\"));\nsql = \"select count(*) from supplier group by S_ADDRESS\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nAssert.assertTrue(plan.contains(\" 3:AGGREGATE (merge finalize)\\n\" +\n\" | output: count(9: count)\\n\" +\n\" | group by: 10: S_ADDRESS\"));\nsql = \"select count(*) from supplier group by S_ADDRESS\";\nplan = getThriftPlan(sql);\nAssert.assertTrue(plan.contains(\"global_dicts:[TGlobalDict(columnId:10, strings:[6D 6F 63 6B], ids:[1])\"));\nAssert.assertTrue(plan.contains(\"partition:TDataPartition(type:RANDOM, partition_exprs:[]), \" +\n\"query_global_dicts:[TGlobalDict(columnId:10, strings:[6D 6F 63 6B], ids:[1])\"));\nsql = \"select count(distinct S_NATIONKEY) from supplier group by S_ADDRESS\";\nplan = getThriftPlan(sql);\nSystem.out.println(plan);\nAssert.assertTrue(plan, plan.contains(\n\"partition:TDataPartition(type:RANDOM, partition_exprs:[]), \" +\n\"query_global_dicts:[TGlobalDict(columnId:10, strings:[6D 6F 63 6B], ids:[1])]\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testDecodeRewriteTwoFunctions() throws Exception {\nString sql;\nString plan;\nsql = \"select substr(S_ADDRESS, 0, S_NATIONKEY), upper(S_ADDRESS) from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select substr(S_ADDRESS, 0, 1), S_ADDRESS from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" | : \\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(10: S_ADDRESS,[substr(, 0, 1)])\"));\nsql = \"select substr(S_ADDRESS, 0, 1), lower(upper(S_ADDRESS)), S_ADDRESS from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 2:Decode\\n\" +\n\" | : \\n\" +\n\" | : \\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(11: S_ADDRESS,[substr(, 0, 1)])\\n\" +\n\" | : DictExpr(11: S_ADDRESS,[lower(upper())])\"));\n}\n@Test\npublic void testDecodeRewrite1() throws Exception {\nString sql = \"select substr(S_ADDRESS, 0, S_NATIONKEY), S_ADDRESS from supplier\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testDecodeNodeTupleId() throws Exception {\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nString sql = \"select count(*), S_ADDRESS from supplier group by S_ADDRESS\";\nString plan = getThriftPlan(sql);\nAssert.assertTrue(plan.contains(\"node_type:DECODE_NODE, num_children:1, limit:-1, row_tuples:[3]\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testDecodeNodeRewrite11() throws Exception {\nString sql = \"select lower(upper(S_ADDRESS)) as a, count(*) from supplier group by a\";\nString plan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" : DictExpr(11: S_ADDRESS,[lower(upper())])\"));\nAssert.assertTrue(plan.contains(\"group by: [12: lower, INT, true]\"));\nsql = \"select lower(substr(S_ADDRESS, 0, 1)) as a, count(*) from supplier group by a\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(\nplan.contains(\" : DictExpr(11: S_ADDRESS,[lower(substr(, 0, 1))])\"));\nsql = \"select lower(upper(S_ADDRESS)) as a, upper(S_ADDRESS) as b, count(*) from supplier group by a,b\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan, plan.contains(\" 3:Decode\\n\" +\n\" | : \\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(12: S_ADDRESS,[lower(upper())])\\n\" +\n\" | : DictExpr(12: S_ADDRESS,[upper()])\"));\nsql = \"select lower(upper(S_ADDRESS)) as a, upper(S_ADDRESS) as b, count(*) from supplier group by S_ADDRESS\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 3:Decode\\n\" +\n\" | : \\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(12: S_ADDRESS,[lower(upper())])\\n\" +\n\" | : DictExpr(12: S_ADDRESS,[upper()])\"));\n}\n@Test\npublic void testDecodeNodeRewrite12() throws Exception {\nString sql;\nString plan;\nsql = \"select max(S_ADDRESS) from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\"Decode\"));\nsql = \"select min(S_ADDRESS) from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\"Decode\"));\nsql = \"select max(upper(S_ADDRESS)) from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan, plan.contains(\" 3:Decode\\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(11: S_ADDRESS,[upper()])\"));\nsql = \"select max(\\\"CONST\\\") from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testDecodeNodeRewrite13() throws Exception {\nFeConstants.runningUnitTest = true;\nString sql;\nString plan;\nsql = \"select coalesce(l.S_ADDRESS,l.S_NATIONKEY) from supplier l join supplier r on l.s_suppkey = r.s_suppkey\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 4:Project\\n\" +\n\" | : coalesce(3, CAST(4: S_NATIONKEY AS VARCHAR))\"));\nAssert.assertTrue(plan.contains(\" 3:Decode\\n\" +\n\" | : \"));\nsql = \"select coalesce(l.S_ADDRESS,l.S_NATIONKEY),l.S_ADDRESS,r.S_ADDRESS \" +\n\"from supplier l join supplier r on l.s_suppkey = r.s_suppkey\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 4:Project\\n\" +\n\" | : 3\\n\" +\n\" | : 11\\n\" +\n\" | : coalesce(3, CAST(4: S_NATIONKEY AS VARCHAR))\"));\nAssert.assertTrue(plan.contains(\" 3:Decode\\n\" +\n\" | : \\n\" +\n\" | : \"));\nsql = \"select coalesce(l.S_ADDRESS,l.S_NATIONKEY), upper(l.S_ADDRESS), l.S_ADDRESS \" +\n\"from supplier l join supplier r on l.s_suppkey = r.s_suppkey\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select coalesce(l.S_ADDRESS,l.S_NATIONKEY), upper(r.P_MFGR),r.P_MFGR \" +\n\"from supplier l join part_v2 r on l.s_suppkey = r.P_PARTKEY\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan, plan.contains(\" 5:Decode\\n\" +\n\" | : \\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(21: P_MFGR,[upper()])\"));\nAssert.assertTrue(plan.contains(\" 4:Project\\n\" +\n\" | : coalesce(3: S_ADDRESS, CAST(4: S_NATIONKEY AS VARCHAR))\\n\" +\n\" | : 21: P_MFGR\\n\" +\n\" | : DictExpr(21: P_MFGR,[upper()])\"));\nFeConstants.runningUnitTest = false;\n}\n@Test\npublic void testDecodeNodeRewrite14() throws Exception {\nString sql;\nString plan;\nsql = \"select count(*), approx_count_distinct(S_ADDRESS) from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select max(S_ADDRESS), approx_count_distinct(S_ADDRESS) from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testWithCaseWhen() throws Exception {\nString sql;\nString plan;\nsql = \"select case when S_ADDRESS = 'key' then 1 else 0 end from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\"9 <-> DictExpr(10: S_ADDRESS,[if( = 'key', 1, 0)])\"));\nAssert.assertTrue(plan.contains(\"dict_col=S_ADDRESS\"));\nsql = \"select case when S_ADDRESS = 'key' then 1 when S_ADDRESS = '2' then 2 else 0 end from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" dict_col=S_ADDRESS\"));\nsql =\n\"select case when S_ADDRESS = 'key' then 1 when S_ADDRESS = '2' then 2 else S_NATIONKEY end from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" dict_col=S_ADDRESS\"));\nAssert.assertTrue(plan.contains(\n\" | 9 <-> CASE WHEN DictExpr(10: S_ADDRESS,[ = 'key']) \" +\n\"THEN 1 WHEN DictExpr(10: S_ADDRESS,[ = '2']) THEN 2 ELSE 4: S_NATIONKEY END\"));\nsql = \"select S_ADDRESS = 'key' , \" +\n\"case when S_ADDRESS = 'key' then 1 when S_ADDRESS = '2' then 2 else 3 end from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 1:Project\\n\" +\n\" | output columns:\\n\" +\n\" | 9 <-> DictExpr(11: S_ADDRESS,[ = 'key'])\\n\" +\n\" | 10 <-> DictExpr(11: S_ADDRESS,[CASE WHEN = 'key' \" +\n\"THEN 1 WHEN = '2' THEN 2 ELSE 3 END])\\n\" +\n\" | cardinality: 1\"));\nAssert.assertTrue(plan.contains(\" dict_col=S_ADDRESS\"));\nsql = \"select case when S_ADDRESS = 'key' then 'key1' when S_ADDRESS = '2' \" +\n\"then 'key2' else 'key3' end from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 2:Decode\\n\" +\n\" | : \"));\nsql = \"select case when S_ADDRESS = 'key' then rand() when S_ADDRESS = '2' \" +\n\"then 'key2' else 'key3' end from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" | 9 <-> CASE WHEN DictExpr(10: S_ADDRESS,[ = 'key']) \" +\n\"THEN CAST(rand() AS VARCHAR) \" +\n\"WHEN DictExpr(10: S_ADDRESS,[ = '2']) \" +\n\"THEN 'key2' ELSE 'key3' END\"));\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select if(S_ADDRESS = 'key', S_COMMENT, 'y') from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" | 9 <-> if[(DictExpr(10: S_ADDRESS,[ = 'key']), \" +\n\"DictExpr(11: S_COMMENT,[]), 'y'); \" +\n\"args: BOOLEAN,VARCHAR,VARCHAR; result: VARCHAR; args nullable: true; result nullable: true]\"));\n}\n@Test\npublic void testLeftJoinWithUnion() throws Exception {\nString sql;\nString plan;\nsql = \"SELECT subt1.S_ADDRESS\\n\" +\n\"FROM (\\n\" +\n\" SELECT S_ADDRESS, S_NATIONKEY\\n\" +\n\" FROM supplier\\n\" +\n\" ) subt1 LEFT ANTI\\n\" +\n\" JOIN (\\n\" +\n\" SELECT S_ADDRESS, S_NATIONKEY\\n\" +\n\" FROM supplier\\n\" +\n\" ) subt0 ON subt1.S_NATIONKEY = subt0.S_NATIONKEY \\n\" +\n\"WHERE true\\n\" +\n\"UNION ALL\\n\" +\n\"SELECT subt1.S_ADDRESS\\n\" +\n\"FROM (\\n\" +\n\" SELECT S_ADDRESS, S_NATIONKEY\\n\" +\n\" FROM supplier\\n\" +\n\" ) subt1 LEFT ANTI\\n\" +\n\" JOIN (\\n\" +\n\" SELECT S_ADDRESS, S_NATIONKEY\\n\" +\n\" FROM supplier\\n\" +\n\" ) subt0 ON subt1.S_NATIONKEY = subt0.S_NATIONKEY\\n\" +\n\"WHERE (NOT (true));\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 5:Decode\\n\" +\n\" | : \\n\" +\n\" | \\n\" +\n\" 4:Project\\n\" +\n\" | : 34: S_ADDRESS\"));\n}\n@Test\npublic void testProject() throws Exception {\nString sql;\nString plan;\nsql = \"select cast (S_ADDRESS as datetime) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" dict_col=S_ADDRESS\"));\nsql = \"select substring(S_ADDRESS,1,2) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 11 <-> DictExpr(10: S_ADDRESS,[substring(, 1, 2)])\"));\nsql = \"select substring(S_ADDRESS, S_SUPPKEY, 2) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\n\"9 <-> substring[([3: S_ADDRESS, VARCHAR, false], [1: S_SUPPKEY, INT, false], 2); \" +\n\"args: VARCHAR,INT,INT; result: VARCHAR; args nullable: false; result nullable: true]\"));\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select substring(S_ADDRESS, S_ADDRESS, 1) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\n\"11 <-> DictExpr(10: S_ADDRESS,[substring(, CAST( AS INT), 1)])\"));\nsql = \"select substring(upper(S_ADDRESS), S_SUPPKEY, 2) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\n\"9 <-> substring[(DictExpr(10: S_ADDRESS,[upper()]), [1: S_SUPPKEY, INT, false], 2); \" +\n\"args: VARCHAR,INT,INT; result: VARCHAR; args nullable: true; result nullable: true]\"));\nsql = \"select concat(S_ADDRESS, S_COMMENT) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\n\" | 9 <-> concat[([3: S_ADDRESS, VARCHAR, false], [7: S_COMMENT, VARCHAR, false]); \" +\n\"args: VARCHAR; result: VARCHAR; args nullable: false; result nullable: true]\"));\nsql = \"select if(S_SUPPKEY='kks', upper(S_ADDRESS), S_COMMENT), upper(S_ADDRESS) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\n\" | 9 <-> if[(cast([1: S_SUPPKEY, INT, false] as VARCHAR(1048576)) = 'kks', \" +\n\"DictExpr(11: S_ADDRESS,[upper()]), DictExpr(12: S_COMMENT,[])); \" +\n\"args: BOOLEAN,VARCHAR,VARCHAR; result: VARCHAR; args nullable: true; result nullable: true]\\n\" +\n\" | 13 <-> DictExpr(11: S_ADDRESS,[upper()])\"));\nAssert.assertTrue(plan.contains(\"Decode\"));\nsql = \"select if(S_ADDRESS='kks', S_COMMENT, S_COMMENT) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\n\" | 9 <-> if[(DictExpr(10: S_ADDRESS,[ = 'kks']), [12: expr, VARCHAR(101), true], \" +\n\"[12: expr, VARCHAR(101), true]); args: BOOLEAN,VARCHAR,VARCHAR; result: VARCHAR; \" +\n\"args nullable: true; result nullable: true]\\n\" +\n\" | common expressions:\\n\" +\n\" | 12 <-> DictExpr(11: S_COMMENT,[])\"));\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql =\n\"select if(S_ADDRESS='kks', upper(S_COMMENT), S_COMMENT), concat(upper(S_COMMENT), S_ADDRESS) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" | output columns:\\n\" +\n\" | 9 <-> if[(DictExpr(11: S_ADDRESS,[ = 'kks']), [13: expr, VARCHAR, true], \" +\n\"DictExpr(12: S_COMMENT,[])); args: BOOLEAN,VARCHAR,VARCHAR; result: VARCHAR; \" +\n\"args nullable: true; result nullable: true]\\n\" +\n\" | 10 <-> concat[([13: expr, VARCHAR, true], DictExpr(11: S_ADDRESS,[])); \" +\n\"args: VARCHAR; result: VARCHAR; args nullable: true; result nullable: true]\"));\nAssert.assertTrue(plan.contains(\" | common expressions:\\n\" +\n\" | 13 <-> DictExpr(12: S_COMMENT,[upper()])\"));\nsql = \"select REVERSE(SUBSTR(LEFT(REVERSE(S_ADDRESS),INSTR(REVERSE(S_ADDRESS),'/')-1),5)) FROM supplier\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 1:Project\\n\" +\n\" | : reverse(substr(left(11: expr, CAST(CAST(instr(11: expr, '/') AS BIGINT)\" +\n\" - 1 AS INT)), 5))\\n\" +\n\" | common expressions:\\n\" +\n\" | : DictExpr(10: S_ADDRESS,[reverse()])\");\n}\n@Test\npublic void testScanPredicate() throws Exception {\nString sql;\nString plan;\nsql = \"select count(*) from \" +\n\"supplier where S_ADDRESS like '%A%' and S_ADDRESS not like '%B%'\";\nplan = getCostExplain(sql);\nAssert.assertFalse(plan.contains(\" dict_col=S_ADDRESS \"));\nsql = \"select * from supplier l join supplier r on \" +\n\"l.S_NAME = r.S_NAME where upper(l.S_ADDRESS) like '%A%' and upper(l.S_ADDRESS) not like '%B%'\";\nplan = getCostExplain(sql);\nassertContains(plan, \"0:OlapScanNode\\n\" +\n\" table: supplier, rollup: supplier\\n\" +\n\" preAggregation: on\\n\" +\n\" Predicates: upper(3: S_ADDRESS) LIKE '%A%', NOT (upper(3: S_ADDRESS) LIKE '%B%')\\n\" +\n\" dict_col=S_COMMENT\");\nsql = \"select count(*) from supplier where S_ADDRESS = 'kks' group by S_ADDRESS \";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\"DictExpr(10: S_ADDRESS,[ = 'kks'])\"));\nAssert.assertTrue(plan.contains(\"group by: 10: S_ADDRESS\"));\nsql = \"select count(*) from supplier where S_ADDRESS + 2 > 'kks' group by S_ADDRESS\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\"group by: 3: S_ADDRESS\"));\nsql = \"select count(*) from supplier where if(S_ADDRESS = 'kks', true, false)\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan,\nplan.contains(\"PREDICATES: DictExpr(12: S_ADDRESS,[if( = 'kks', TRUE, FALSE)])\"));\nsql = \"select count(*) from supplier where if(S_ADDRESS = 'kks', cast(S_ADDRESS as boolean), false)\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\n\"PREDICATES: DictExpr(12: S_ADDRESS,[if( = 'kks', CAST( AS BOOLEAN), FALSE)])\"));\nsql = \"select count(*) from supplier where if(S_ADDRESS = 'kks',cast(S_COMMENT as boolean), false)\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan, plan.contains(\n\"PREDICATES: if(DictExpr(12: S_ADDRESS,[ = 'kks']), \" +\n\"DictExpr(13: S_COMMENT,[CAST( AS BOOLEAN)]), FALSE)\"));\nsql = \"select count(*) from supplier where if(S_ADDRESS = 'kks',cast(S_NAME as boolean), false)\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\n\"PREDICATES: if(DictExpr(12: S_ADDRESS,[ = 'kks']), CAST(2: S_NAME AS BOOLEAN), FALSE)\"));\nsql = \"select count(*) from supplier where S_ADDRESS = 'kks' and S_COMMENT not like '%kks%'\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\n\"PREDICATES: DictExpr(12: S_ADDRESS,[ = 'kks']), NOT (7: S_COMMENT LIKE '%kks%')\"));\nsql = \"select count(*) from supplier where if(S_ADDRESS = 'kks',cast(S_COMMENT as boolean), false) \" +\n\"and S_COMMENT not like '%kks%'\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\n\"PREDICATES: if(DictExpr(12: S_ADDRESS,[ = 'kks']), \" +\n\"CAST(7: S_COMMENT AS BOOLEAN), FALSE), NOT (7: S_COMMENT LIKE '%kks%')\"));\n}\n@Test\npublic void testAggHaving() throws Exception {\nString sql = \"select count(*) from supplier group by S_ADDRESS having S_ADDRESS = 'kks' \";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\"DictExpr(10: S_ADDRESS,[ = 'kks'])\"));\nAssert.assertTrue(plan.contains(\"group by: 10: S_ADDRESS\"));\nsql = \"select count(*) as b from supplier group by S_ADDRESS having b > 3\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan, plan.contains(\" | group by: 10: S_ADDRESS\\n\" +\n\" | having: 9: count > 3\"));\nsql = \"select sum(S_NATIONKEY) a, sum(S_ACCTBAL) as b, S_ADDRESS as c from supplier group by S_ADDRESS \" +\n\"having a < b*1.2 or c not like '%open%'\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"SELECT count(*) a FROM supplier having max(S_ADDRESS)='123'\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testJoin() throws Exception {\nString sql;\nString plan;\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nsql =\n\"select count(*) from supplier l \" +\n\"join [shuffle] (select max(S_ADDRESS) as S_ADDRESS from supplier) r \" +\n\"on l.S_ADDRESS = r.S_ADDRESS;\";\nplan = getVerboseExplain(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql =\n\"select count(*) from supplier l \" +\n\"join [broadcast] (select max(S_ADDRESS) as S_ADDRESS from supplier) r \" +\n\"on l.S_ADDRESS = r.S_ADDRESS;\";\nplan = getVerboseExplain(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select count(*) from supplier l \" +\n\"join [broadcast] (select max(id_int) as id_int from table_int) r \" +\n\"on l.S_ADDRESS = r.id_int where l.S_ADDRESS not like '%key%'\";\nplan = getVerboseExplain(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select *\\n\" +\n\"from(\\n\" +\n\" select S_SUPPKEY,\\n\" +\n\" S_NATIONKEY\\n\" +\n\" from supplier\\n\" +\n\" ) l\\n\" +\n\" right outer join [shuffle] (\\n\" +\n\" select S_SUPPKEY,\\n\" +\n\" max(S_ADDRESS) as MS\\n\" +\n\" from supplier_nullable\\n\" +\n\" group by S_SUPPKEY\\n\" +\n\" ) r on l.S_SUPPKEY = r.S_SUPPKEY\\n\" +\n\" and l.S_NATIONKEY = r.MS;\";\nplan = getVerboseExplain(sql);\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\nAssert.assertTrue(plan.contains(\"OutPut Partition: HASH_PARTITIONED: 9: S_SUPPKEY, 17\"));\nsql = \"select * from test.join1 right join test.join2 on join1.id = join2.id where round(2.0, 0) > 3.0\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 5:Decode\\n\" +\n\" | : \\n\" +\n\" | : \"));\nsql = \"SELECT * \\n\" +\n\"FROM emp \\n\" +\n\"WHERE EXISTS (SELECT dept.dept_id \\n\" +\n\" FROM dept \\n\" +\n\" WHERE emp.dept_id = dept.dept_id \\n\" +\n\" ORDER BY state) \\n\" +\n\"ORDER BY hiredate\";\nString planFragment = getFragmentPlan(sql);\nAssert.assertTrue(planFragment.contains(\" 5:Decode\\n\" +\n\" | : \"));\nsql = \"select * from join1 join pushdown_test on join1.id = pushdown_test.k1;\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 6:Decode\\n\" +\n\" | : \\n\" +\n\" | : \"));\nAssert.assertTrue(plan.contains(\"INNER JOIN (BROADCAST)\"));\nsql = \"select part_v2.p_partkey from lineitem join part_v2 on L_COMMENT = hex(P_NAME);\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select * from supplier l join supplier_nullable r where l.S_SUPPKEY = r.S_SUPPKEY \" +\n\"order by l.S_ADDRESS limit 10\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 4:TOP-N\\n\" +\n\" | order by: 17: S_ADDRESS ASC\\n\" +\n\" | offset: 0\\n\" +\n\" | limit: 10\\n\" +\n\" | \\n\" +\n\" 3:HASH JOIN\\n\" +\n\" | join op: INNER JOIN (BROADCAST)\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | equal join conjunct: 1: S_SUPPKEY = 9: S_SUPPKEY\");\nsql = \"select max(S_ADDRESS), max(S_COMMENT) from \" +\n\"( select l.S_ADDRESS as S_ADDRESS,r.S_COMMENT as S_COMMENT,l.S_SUPPKEY from supplier l \" +\n\"join supplier_nullable r \" +\n\" on l.S_SUPPKEY = r.S_SUPPKEY ) tb group by S_SUPPKEY\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 8:Decode\\n\" +\n\" | : \\n\" +\n\" | : \\n\" +\n\" | \\n\" +\n\" 7:Project\\n\" +\n\" | : 21: S_ADDRESS\\n\" +\n\" | : 22: S_COMMENT\\n\" +\n\" | \\n\" +\n\" 6:AGGREGATE (update finalize)\\n\" +\n\" | output: max(19: S_ADDRESS), max(20: S_COMMENT)\\n\" +\n\" | group by: 1: S_SUPPKEY\");\nplan = getThriftPlan(sql);\nAssert.assertEquals(plan.split(\"\\n\").length, 3);\nassertContains(plan.split(\"\\n\")[0], \"query_global_dicts:\" +\n\"[TGlobalDict(columnId:19, strings:[6D 6F 63 6B], ids:[1]), \" +\n\"TGlobalDict(columnId:20, strings:[6D 6F 63 6B], ids:[1]), \" +\n\"TGlobalDict(columnId:21, strings:[6D 6F 63 6B], ids:[1]), \" +\n\"TGlobalDict(columnId:22, strings:[6D 6F 63 6B], ids:[1])])\");\nsql = \"select upper(ST_S_ADDRESS),\\n\" +\n\" upper(ST_S_COMMENT)\\n\" +\n\"from (\\n\" +\n\" select ST_S_ADDRESS, ST_S_COMMENT\\n\" +\n\" from (\\n\" +\n\" select l.S_ADDRESS as ST_S_ADDRESS,\\n\" +\n\" l.S_COMMENT ST_S_COMMENT,\\n\" +\n\" l.S_SUPPKEY S_SUPPKEY,\\n\" +\n\" l.S_NATIONKEY S_NATIONKEY\\n\" +\n\" from supplier l\\n\" +\n\" join [shuffle] supplier m on l.S_SUPPKEY = m.S_SUPPKEY\\n\" +\n\" order by l.S_ADDRESS\\n\" +\n\" limit 10\\n\" +\n\" ) star join [shuffle] supplier r on star.S_NATIONKEY = r.S_NATIONKEY\\n\" +\n\" union select 1,2\\n\" +\n\" ) sys\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 20:AGGREGATE (update serialize)\\n\" +\n\" | STREAMING\\n\" +\n\" | group by: 30: S_ADDRESS, 31: S_COMMENT\\n\" +\n\" | \\n\" +\n\" 0:UNION\\n\" +\n\" | \\n\" +\n\" |----19:EXCHANGE\\n\" +\n\" | \\n\" +\n\" 16:EXCHANGE\");\nassertContains(plan, \"Decode\");\nplan = getThriftPlan(sql);\nassertNotContains(plan.split(\"\\n\")[1], \"query_global_dicts\");\n}\n@Test\npublic void testJoinGlobalDict() throws Exception {\nString sql =\n\"select part_v2.P_COMMENT from lineitem join part_v2 \" +\n\"on L_PARTKEY = p_partkey where p_mfgr = 'MFGR\nString plan = getThriftPlan(sql);\nAssert.assertTrue(plan.contains(\"dict_string_id_to_int_ids:{}\"));\nAssert.assertTrue(plan.contains(\"DictExpr(28: P_MFGR,[ IN ('MFGR\nAssert.assertTrue(plan.contains(\"RESULT_SINK, result_sink:TResultSink(type:MYSQL_PROTOCAL)), \" +\n\"partition:TDataPartition(type:RANDOM, partition_exprs:[]), \" +\n\"query_global_dicts:[TGlobalDict(columnId:28\"));\nAssert.assertTrue(\nplan.contains(\"TDataPartition(type:UNPARTITIONED, partition_exprs:[]), is_merge:false, dest_dop:0)), \" +\n\"partition:TDataPartition(type:RANDOM, partition_exprs:[]), \" +\n\"query_global_dicts:[TGlobalDict(columnId:28\"));\n}\n@Test\npublic void testCountDistinctMultiColumns() throws Exception {\nFeConstants.runningUnitTest = true;\nString sql = \"select count(distinct S_SUPPKEY, S_COMMENT) from supplier\";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\"2:Decode\\n\" +\n\" | : \"));\nAssert.assertTrue(plan.contains(\":AGGREGATE (update serialize)\\n\" +\n\" | output: count(if(1: S_SUPPKEY IS NULL, NULL, 7))\"));\nsql = \"select count(distinct S_ADDRESS, S_COMMENT) from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\"4:Decode\\n\" +\n\" | : \\n\" +\n\" | : \"));\nAssert.assertTrue(plan.contains(\" 5:AGGREGATE (update serialize)\\n\" +\n\" | output: count(if(3 IS NULL, NULL, 7))\"));\nFeConstants.runningUnitTest = false;\n}\n@Test\npublic void testGroupByWithOrderBy() throws Exception {\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nString sql;\nString plan;\nsql = \"select max(S_NAME) as b from supplier group by S_ADDRESS order by b\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\"group by: 10: S_ADDRESS\"));\nsql = \"select S_ADDRESS from supplier order by S_ADDRESS\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 1:SORT\\n\" +\n\" | order by: [9, INT, false] ASC\"));\nsql = \"select S_NAME from supplier_nullable order by upper(S_ADDRESS), S_NAME\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 2:SORT\\n\" +\n\" | order by: [11, INT, true] ASC, [2, VARCHAR, false] ASC\"));\nsql = \"select substr(S_ADDRESS, 0, 1) from supplier group by substr(S_ADDRESS, 0, 1) \" +\n\"order by substr(S_ADDRESS, 0, 1)\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 7:Decode\\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(10: S_ADDRESS,[substr(, 0, 1)])\"));\nAssert.assertTrue(plan.contains(\" 5:SORT\\n\" +\n\" | order by: [11, INT, true] ASC\"));\nsql = \"select approx_count_distinct(S_ADDRESS), upper(S_ADDRESS) from supplier \" +\n\" group by upper(S_ADDRESS)\" +\n\"order by 2\";\nplan = getVerboseExplain(sql);\nassertContains(plan, \" 3:AGGREGATE (update serialize)\\n\" +\n\" | STREAMING\\n\" +\n\" | aggregate: approx_count_distinct[([3, VARCHAR, false]);\");\nassertContains(plan, \"2:Decode\\n\" +\n\" | : \\n\" +\n\" | : \");\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testAnalytic() throws Exception {\nString sql;\nString plan;\nsql = \"select sum(rm) from (\" +\n\"select row_number() over( partition by L_COMMENT order by L_PARTKEY) as rm from lineitem\" +\n\") t where rm < 10\";\nplan = getCostExplain(sql);\nAssert.assertTrue(plan.contains(\" 2:SORT\\n\" +\n\" | order by: [20, INT, false] ASC, [2, INT, false] ASC\"));\nAssert.assertTrue(plan.contains(\" 1:PARTITION-TOP-N\\n\" +\n\" | partition by: [20: L_COMMENT, INT, false] \"));\nAssert.assertTrue(plan.contains(\" | order by: [20, INT, false] ASC, [2, INT, false] ASC\"));\nsql = \"select * from (select L_COMMENT,l_quantity, row_number() over \" +\n\"(partition by L_COMMENT order by l_quantity desc) rn from lineitem )t where rn <= 10;\";\nplan = getCostExplain(sql);\nassertContains(plan, \" 1:PARTITION-TOP-N\\n\" +\n\" | partition by: [19: L_COMMENT, INT, false] \\n\" +\n\" | partition limit: 10\\n\" +\n\" | order by: [19, INT, false] ASC, [5, DOUBLE, false] DESC\\n\" +\n\" | offset: 0\");\nsql = \"select * from (select L_COMMENT,l_quantity, rank() over \" +\n\"(partition by L_COMMENT order by l_quantity desc) rn from lineitem )t where rn <= 10;\";\nplan = getCostExplain(sql);\nassertContains(plan, \" 1:PARTITION-TOP-N\\n\" +\n\" | type: RANK\\n\" +\n\" | partition by: [19: L_COMMENT, INT, false] \\n\" +\n\" | partition limit: 10\\n\" +\n\" | order by: [19, INT, false] ASC, [5, DOUBLE, false] DESC\");\nsql = \"select * from (select L_COMMENT,l_quantity, rank() over \" +\n\"(partition by L_COMMENT, l_shipmode order by l_quantity desc) rn from lineitem )t where rn <= 10;\";\nplan = getCostExplain(sql);\nassertContains(plan, \" 1:PARTITION-TOP-N\\n\" +\n\" | type: RANK\\n\" +\n\" | partition by: [19: L_COMMENT, INT, false] , [15: L_SHIPMODE, CHAR, false] \\n\" +\n\" | partition limit: 10\\n\" +\n\" | order by: [19, INT, false] ASC, [15, VARCHAR, false] ASC, [5, DOUBLE, false] DESC\\n\" +\n\" | offset: 0\");\n}\n@Test\npublic void testProjectionPredicate() throws Exception {\nString sql = \"select count(t.a) from(select S_ADDRESS in ('kks', 'kks2') as a from supplier) as t\";\nString plan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" dict_col=S_ADDRESS\"));\nAssert.assertTrue(plan.contains(\"9 <-> DictExpr(11: S_ADDRESS,[ IN ('kks', 'kks2')])\"));\nsql = \"select count(t.a) from(select S_ADDRESS = 'kks' as a from supplier) as t\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" dict_col=S_ADDRESS\"));\nAssert.assertTrue(plan.contains(\"9 <-> DictExpr(11: S_ADDRESS,[ = 'kks'])\"));\nsql = \"select count(t.a) from(select S_ADDRESS is null as a from supplier) as t\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" dict_col=S_ADDRESS\"));\nAssert.assertTrue(plan.contains(\"9 <-> DictExpr(11: S_ADDRESS,[ IS NULL])\"));\nsql = \"select count(t.a) from(select S_ADDRESS is not null as a from supplier) as t\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" dict_col=S_ADDRESS\"));\nAssert.assertTrue(plan.contains(\"9 <-> DictExpr(11: S_ADDRESS,[ IS NOT NULL])\"));\nsql = \"select count(t.a) from(select S_ADDRESS <=> 'kks' as a from supplier) as t\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\"[3: S_ADDRESS, VARCHAR, false] <=> 'kks'\"));\nsql = \"select S_ADDRESS not like '%key%' from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertFalse(plan, plan.contains(\" dict_col=S_ADDRESS\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nsql = \"select count(distinct S_ADDRESS), count(distinct S_NAME) as a from supplier_nullable\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\"multi_distinct_count[([9: count, VARBINARY, false]);\"));\nAssert.assertTrue(plan.contains(\"multi_distinct_count[([11: S_ADDRESS, INT, true]);\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testNestedExpressions() throws Exception {\nString sql;\nString plan;\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nsql = \"select upper(lower(S_ADDRESS)) from supplier group by lower(S_ADDRESS);\";\nplan = getVerboseExplain(sql);\nassertContains(plan, \"6:Project\\n\" +\n\" | output columns:\\n\" +\n\" | 10 <-> upper[([9, VARCHAR, true]); args: VARCHAR; result: VARCHAR; \" +\n\"args nullable: true; result nullable: true]\\n\" +\n\" | cardinality: 1\\n\" +\n\" | \\n\" +\n\" 5:Decode\\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(11: S_ADDRESS,[lower()])\");\nAssert.assertTrue(plan.contains(\" 4:AGGREGATE (merge finalize)\\n\" +\n\" | group by: [12: lower, INT, true]\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testMultiMaxMin() throws Exception {\nString sql;\nString plan;\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nsql = \"select count(distinct S_ADDRESS), max(S_ADDRESS), count(distinct S_SUPPKEY) as a from supplier_nullable\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\"1:AGGREGATE (update serialize)\\n\" +\n\" | aggregate: multi_distinct_count[([12: S_ADDRESS, INT, true]);\"));\nAssert.assertTrue(plan.contains(\"3:AGGREGATE (merge finalize)\\n\" +\n\" | aggregate: multi_distinct_count[([9: count, VARBINARY, false]);\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nsql = \"select min(distinct S_ADDRESS), max(S_ADDRESS) from supplier_nullable\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 1:AGGREGATE (update serialize)\\n\" +\n\" | output: min(11: S_ADDRESS), max(11: S_ADDRESS)\"));\nAssert.assertTrue(plan.contains(\" 3:AGGREGATE (merge finalize)\\n\" +\n\" | output: min(12: S_ADDRESS), max(13: S_ADDRESS)\"));\nAssert.assertTrue(plan.contains(\" 4:Decode\\n\" +\n\" | : \\n\" +\n\" | : \"));\nsql = \"select max(upper(S_ADDRESS)) from supplier_nullable\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 5:Decode\\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(11: S_ADDRESS,[upper()])\\n\" +\n\" | \"));\nsql = \"select max(if(S_ADDRESS='kks', upper(S_COMMENT), S_COMMENT)), \" +\n\"min(upper(S_COMMENT)) from supplier_nullable \" +\n\"group by upper(S_COMMENT)\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan, plan.contains(\"6:Decode\\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(14: S_COMMENT,[upper()])\\n\" +\n\" | \\n\" +\n\" 5:Project\\n\" +\n\" | : 11: max\\n\" +\n\" | : 17: upper\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testSubqueryWithLimit() throws Exception {\nString sql = \"select t0.S_ADDRESS from (select S_ADDRESS, S_NATIONKEY from supplier_nullable limit 10) t0\" +\n\" inner join supplier on t0.S_NATIONKEY = supplier.S_NATIONKEY;\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \" 2:Decode\\n\" +\n\" | : \\n\");\n}\n@Test\npublic void testDecodeWithCast() throws Exception {\nString sql = \"select reverse(conv(cast(S_ADDRESS as bigint), NULL, NULL)) from supplier\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nAssert.assertTrue(plan.contains(\"reverse(conv(CAST(3: S_ADDRESS AS BIGINT), NULL, NULL))\"));\n}\n@Test\npublic void testAssignWrongNullableProperty() throws Exception {\nString sql;\nString plan;\nsql = \"SELECT S_ADDRESS, Dense_rank() OVER ( ORDER BY S_SUPPKEY) \" +\n\"FROM supplier UNION SELECT S_ADDRESS, Dense_rank() OVER ( ORDER BY S_SUPPKEY) FROM supplier;\";\nplan = getCostExplain(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select rank() over (order by S_ADDRESS) as rk from supplier_nullable\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 4:ANALYTIC\\n\" +\n\" | functions: [, rank(), ]\\n\" +\n\" | order by: 3 ASC\\n\" +\n\" | window: RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW\\n\" +\n\" | \\n\" +\n\" 3:Decode\\n\" +\n\" | : \");\nsql = \"select S_ADDRESS, S_COMMENT from (select S_ADDRESS, \" +\n\"S_COMMENT from supplier_nullable order by S_COMMENT limit 10) tb where S_ADDRESS = 'SS' order by S_ADDRESS \";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 5:SORT\\n\" +\n\" | order by: 3 ASC\\n\" +\n\" | offset: 0\\n\" +\n\" | \\n\" +\n\" 4:SELECT\\n\" +\n\" | predicates: 3 = 'SS'\\n\" +\n\" | \\n\" +\n\" 3:Decode\\n\" +\n\" | : \\n\" +\n\" | : \");\n}\n@Test\npublic void testHavingAggFunctionOnConstant() throws Exception {\nString sql = \"select S_ADDRESS from supplier GROUP BY S_ADDRESS HAVING (cast(count(null) as string)) IN (\\\"\\\")\";\nString plan = getCostExplain(sql);\nassertContains(plan, \"1:AGGREGATE (update finalize)\\n\" +\n\" | aggregate: count[(NULL); args: BOOLEAN; result: BIGINT; args nullable: true; result nullable: false]\\n\" +\n\" | group by: [10: S_ADDRESS, INT, false]\");\nassertContains(plan, \" 3:Decode\\n\" +\n\" | : \\n\" +\n\" | cardinality: 1\");\n}\n@Test\npublic void testDecodeWithLimit() throws Exception {\nString sql = \"select count(*), S_ADDRESS from supplier group by S_ADDRESS limit 10\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \" 3:Decode\\n\" +\n\" | : \\n\");\n}\n@Test\npublic void testNoDecode() throws Exception {\nString sql = \"select *, to_bitmap(S_SUPPKEY) from supplier limit 1\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select hex(10), s_address from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"SELECT SUM(count) FROM (SELECT CAST((CAST((((\\\"C\\\")||(CAST(s_address AS STRING ) ))) \" +\n\"BETWEEN (((\\\"T\\\")||(\\\"\\\"))) AND (\\\"\\\") AS BOOLEAN) = true) \" +\n\"AND (CAST((((\\\"C\\\")||(CAST(s_address AS STRING ) ))) BETWEEN (((\\\"T\\\")||(\\\"\\\"))) \" +\n\"AND (\\\"\\\") AS BOOLEAN) IS NOT NULL) AS INT) as count FROM supplier ) t;\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql =\n\"SELECT SUM(count) FROM (SELECT CAST((CAST((s_address) BETWEEN (((CAST(s_address AS STRING ) )||(\\\"\\\"))) \" +\n\"AND (s_address) AS BOOLEAN) = true) AND (CAST((s_address) \" +\n\"BETWEEN (((CAST(s_address AS STRING ) )||(\\\"\\\"))) AND (s_address) AS BOOLEAN) IS NOT NULL) AS INT) \" +\n\"as count FROM supplier ) t;\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testDecodeOnExchange() throws Exception {\nString sql = \" SELECT \\n\" +\n\" DISTINCT * \\n\" +\n\"FROM \\n\" +\n\" (\\n\" +\n\" SELECT \\n\" +\n\" DISTINCT t1.v4 \\n\" +\n\" FROM \\n\" +\n\" t1, \\n\" +\n\" test_all_type as t2, \\n\" +\n\" test_all_type as t0 \\n\" +\n\" WHERE \\n\" +\n\" NOT (\\n\" +\n\" (t2.t1a) != (\\n\" +\n\" concat(t0.t1a, \\\"ji\\\")\\n\" +\n\" )\\n\" +\n\" ) \\n\" +\n\" ) t;\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testProjectWithUnionEmptySet() throws Exception {\nString sql;\nString plan;\nsql = \"select t1a from test_all_type group by t1a union all select v4 from t1 where false\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 3:Decode\\n\" +\n\" | : \"));\nAssert.assertTrue(plan.contains(\" 2:Project\\n\" +\n\" | : 16: t1a\"));\nsql = \"SELECT 'all', 'allx' where 1 = 2 union all select distinct S_ADDRESS, S_ADDRESS from supplier;\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 3:Project\\n\" +\n\" | : 8\\n\" +\n\" | : clone(8)\\n\" +\n\" | \\n\" +\n\" 2:Decode\\n\" +\n\" | : \\n\" +\n\" | \\n\" +\n\" 1:AGGREGATE (update finalize)\\n\" +\n\" | group by: 16: S_ADDRESS\");\nsql = \"SELECT 'all', 'all', 'all', 'all' where 1 = 2 union all \" +\n\"select distinct S_ADDRESS, S_SUPPKEY + 1, S_SUPPKEY + 1, S_ADDRESS + 1 from supplier;\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 4:Project\\n\" +\n\" | : 9\\n\" +\n\" | : 25: cast\\n\" +\n\" | : CAST(15: expr AS VARCHAR)\\n\" +\n\" | : CAST(CAST(9 AS DOUBLE) + 1.0 AS VARCHAR)\\n\" +\n\" | common expressions:\\n\" +\n\" | : CAST(15: expr AS VARCHAR)\\n\" +\n\" | \\n\" +\n\" 3:Decode\\n\" +\n\" | : \\n\" +\n\" | \\n\" +\n\" 2:AGGREGATE (update finalize)\\n\" +\n\" | group by: 24: S_ADDRESS, 15: expr\\n\" +\n\" | \\n\" +\n\" 1:Project\\n\" +\n\" | : CAST(7: S_SUPPKEY AS BIGINT) + 1\\n\" +\n\" | : 24: S_ADDRESS\");\n}\n@Test\npublic void testCTEWithDecode() throws Exception {\nconnectContext.getSessionVariable().setCboCteReuse(true);\nconnectContext.getSessionVariable().setEnablePipelineEngine(true);\nconnectContext.getSessionVariable().setCboCTERuseRatio(0);\nString sql = \"with v1 as( select S_ADDRESS a, count(*) b from supplier group by S_ADDRESS) \" +\n\"select x1.a, x1.b from v1 x1 join v1 x2 on x1.a=x2.a\";\nString plan = getThriftPlan(sql);\nAssert.assertTrue(\nplan.contains(\"query_global_dicts:[TGlobalDict(columnId:28, strings:[6D 6F 63 6B], ids:[1])\"));\nconnectContext.getSessionVariable().setCboCteReuse(false);\nconnectContext.getSessionVariable().setEnablePipelineEngine(false);\n}\n@Test\npublic void testMetaScan() throws Exception {\nString sql = \"select max(v1), min(v1) from t0 [_META_]\";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 0:MetaScan\\n\" +\n\" Table: t0\\n\" +\n\" : max_v1\\n\" +\n\" : min_v1\"));\nString thrift = getThriftPlan(sql);\nAssert.assertTrue(thrift.contains(\"id_to_names:{6=max_v1, 7=min_v1}\"));\n}\n@Test\npublic void testMetaScan2() throws Exception {\nString sql = \"select max(t1c), min(t1d), dict_merge(t1a) from test_all_type [_META_]\";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 0:MetaScan\\n\" +\n\" Table: test_all_type\\n\" +\n\" : dict_merge_t1a\\n\" +\n\" : max_t1c\\n\" +\n\" : min_t1d\"));\nString thrift = getThriftPlan(sql);\nAssert.assertTrue(thrift.contains(\"TFunctionName(function_name:dict_merge), \" +\n\"binary_type:BUILTIN, arg_types:[TTypeDesc(types:[TTypeNode(type:ARRAY), \" +\n\"TTypeNode(type:SCALAR, scalar_type:TScalarType(type:VARCHAR, len:-1))])]\"));\n}\n@Test\npublic void testMetaScan3() throws Exception {\nString sql = \"select max(t1c), min(t1d), dict_merge(t1a) from test_all_type [_META_]\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"1:AGGREGATE (update serialize)\\n\" +\n\" | output: max(max_t1c), min(min_t1d), dict_merge(dict_merge_t1a)\\n\" +\n\" | group by: \\n\" +\n\" | \\n\" +\n\" 0:MetaScan\\n\" +\n\" Table: test_all_type\\n\" +\n\" : dict_merge_t1a\\n\" +\n\" : max_t1c\\n\" +\n\" : min_t1d\");\n}\n@Test\npublic void testMetaScan4() throws Exception {\nString sql = \"select sum(t1c), min(t1d), t1a from test_all_type [_META_]\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"2:AGGREGATE (update serialize)\\n\" +\n\" | output: sum(3: t1c), min(4: t1d), any_value(1: t1a)\\n\" +\n\" | group by: \\n\" +\n\" | \\n\" +\n\" 1:Project\\n\" +\n\" | : t1a\\n\" +\n\" | : t1c\\n\" +\n\" | : t1d\\n\" +\n\" | \\n\" +\n\" 0:MetaScan\\n\" +\n\" Table: test_all_type\");\nsql = \"select sum(t1c) from test_all_type [_META_] group by t1a\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \"2:AGGREGATE (update serialize)\\n\" +\n\" | STREAMING\\n\" +\n\" | output: sum(3: t1c)\\n\" +\n\" | group by: 1: t1a\\n\" +\n\" | \\n\" +\n\" 1:Project\\n\" +\n\" | : t1a\\n\" +\n\" | : t1c\\n\" +\n\" | \\n\" +\n\" 0:MetaScan\\n\" +\n\" Table: test_all_type\");\n}\n@Test\npublic void testHasGlobalDictButNotFound() throws Exception {\nIDictManager dictManager = IDictManager.getInstance();\nnew Expectations(dictManager) {\n{\ndictManager.hasGlobalDict(anyLong, \"S_ADDRESS\", anyLong);\nresult = true;\ndictManager.getGlobalDict(anyLong, \"S_ADDRESS\");\nresult = Optional.empty();\n}\n};\nString sql = \"select S_ADDRESS from supplier group by S_ADDRESS\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testExtractProject() throws Exception {\nString sql;\nString plan;\nsql = \"select max(upper(S_ADDRESS)), min(upper(S_ADDRESS)), max(S_ADDRESS), sum(S_SUPPKEY + 1) from supplier\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 2:AGGREGATE (update finalize)\\n\" +\n\" | output: max(18: upper), min(18: upper), max(17: S_ADDRESS), sum(1: S_SUPPKEY), count(*)\\n\" +\n\" | group by: \\n\" +\n\" | \\n\" +\n\" 1:Project\\n\" +\n\" | : 1: S_SUPPKEY\\n\" +\n\" | : 17: S_ADDRESS\\n\" +\n\" | : DictExpr(17: S_ADDRESS,[upper()])\");\n}\n@Test\npublic void testCompoundPredicate() throws Exception {\nString sql = \"select count(*) from supplier group by S_ADDRESS having \" +\n\"if(S_ADDRESS > 'a' and S_ADDRESS < 'b', true, false)\";\nString plan = getVerboseExplain(sql);\nassertContains(plan,\n\"DictExpr(10: S_ADDRESS,[if(( > 'a') \" +\n\"AND ( < 'b'), TRUE, FALSE)])\");\nsql = \"select count(*) from supplier group by S_ADDRESS having \" +\n\"if(not S_ADDRESS like '%a%' and S_ADDRESS < 'b', true, false)\";\nplan = getVerboseExplain(sql);\nassertContains(plan,\n\"DictExpr(10: S_ADDRESS,[if((NOT ( LIKE '%a%')) \" +\n\"AND ( < 'b'), TRUE, FALSE)])\");\n}\n@Test\npublic void testComplexScalarOperator_1() throws Exception {\nString sql = \"select case when s_address = 'test' then 'a' \" +\n\"when s_phone = 'b' then 'b' \" +\n\"when coalesce(s_address, 'c') = 'c' then 'c' \" +\n\"else 'a' end from supplier; \";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"1:Project\\n\" +\n\" | : CASE WHEN DictExpr(10: S_ADDRESS,[ = 'test']) THEN 'a' \" +\n\"WHEN 5: S_PHONE = 'b' THEN 'b' \" +\n\"WHEN coalesce(DictExpr(10: S_ADDRESS,[]), 'c') = 'c' THEN 'c' \" +\n\"ELSE 'a' END\\n\" +\n\" |\");\nsql = \"select case when s_address = 'test' then 'a' \" +\n\"when s_phone = 'b' then 'b' \" +\n\"when upper(s_address) = 'c' then 'c' \" +\n\"else 'a' end from supplier; \";\nplan = getFragmentPlan(sql);\nassertContains(plan, \"1:Project\\n\" +\n\" | : CASE WHEN DictExpr(10: S_ADDRESS,[ = 'test']) THEN 'a' \" +\n\"WHEN 5: S_PHONE = 'b' THEN 'b' \" +\n\"WHEN DictExpr(10: S_ADDRESS,[upper()]) = 'c' THEN 'c' \" +\n\"ELSE 'a' END\\n\" +\n\" |\");\n}\n@Test\npublic void testComplexScalarOperator_2() throws Exception {\nString sql = \"select count(*) from supplier where s_phone = 'a' or coalesce(s_address, 'c') = 'c' \" +\n\"or s_address = 'address'\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"0:OlapScanNode\\n\" +\n\" TABLE: supplier\\n\" +\n\" PREAGGREGATION: ON\\n\" +\n\" PREDICATES: ((5: S_PHONE = 'a') OR (coalesce(DictExpr(12: S_ADDRESS,[]), 'c') = 'c')) \" +\n\"OR (DictExpr(12: S_ADDRESS,[ = 'address']))\");\nsql = \"select count(*) from supplier where s_phone = 'a' or upper(s_address) = 'c' \" +\n\"or s_address = 'address'\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \"0:OlapScanNode\\n\" +\n\" TABLE: supplier\\n\" +\n\" PREAGGREGATION: ON\\n\" +\n\" PREDICATES: ((5: S_PHONE = 'a') OR (DictExpr(12: S_ADDRESS,[upper()]) = 'c')) \" +\n\"OR (DictExpr(12: S_ADDRESS,[ = 'address']))\");\n}\n@Test\npublic void testAggWithProjection() throws Exception {\nString sql = \"select cast(max(s_address) as date) from supplier\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"2:Project\\n\" +\n\" | : DictExpr(12: S_ADDRESS,[CAST( AS DATE)])\\n\" +\n\" | \\n\" +\n\" 1:AGGREGATE (update finalize)\\n\" +\n\" | output: max(11: S_ADDRESS)\\n\" +\n\" | group by: \");\n}\n@Test\npublic void testJoinWithProjection() throws Exception {\nString sql = \"select s_address, cast(t1.s_address as date), cast(t1.s_phone as date), upper(t1.s_address),\" +\n\" cast(t2.a as date), 123 from supplier t1 join (select max(s_address) a from supplier) t2 \";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"5:Project\\n\" +\n\" | : DictExpr(23: S_ADDRESS,[CAST( AS DATE)])\\n\" +\n\" | : CAST(5: S_PHONE AS DATE)\\n\" +\n\" | : DictExpr(25: S_ADDRESS,[CAST( AS DATE)])\\n\" +\n\" | : 123\\n\" +\n\" | : 23: S_ADDRESS\\n\" +\n\" | : DictExpr(23: S_ADDRESS,[upper()])\\n\" +\n\" | \\n\" +\n\" 4:NESTLOOP JOIN\\n\" +\n\" | join op: CROSS JOIN\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | \\n\" +\n\" |----3:EXCHANGE\");\n}\n@Test\npublic void testTopNWithProjection() throws Exception {\nString sql =\n\"select t2.s_address, cast(t1.a as date), concat(t1.b, '') from (select max(s_address) a, min(s_phone) b \" +\n\"from supplier group by s_address) t1 join (select s_address from supplier) t2 order by t1.a\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"10:Decode\\n\" +\n\" | : \\n\" +\n\" | \\n\" +\n\" 9:Project\\n\" +\n\" | : 19: cast\\n\" +\n\" | : 20: concat\\n\" +\n\" | : 23: S_ADDRESS\\n\" +\n\" | \\n\" +\n\" 8:MERGING-EXCHANGE\");\n}\n@Test\npublic void testLogicalProperty() throws Exception {\nString sql = \"select cast(max(s_address) as date) from supplier where s_suppkey = 1 group by S_PHONE\";\nExecPlan execPlan = getExecPlan(sql);\nOlapScanNode olapScanNode = (OlapScanNode) execPlan.getScanNodes().get(0);\nAssert.assertEquals(0, olapScanNode.getBucketExprs().size());\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nassertContains(plan, \"3:Project\\n\" +\n\" | : DictExpr(12: S_ADDRESS,[CAST( AS DATE)])\\n\" +\n\" | \\n\" +\n\" 2:AGGREGATE (update finalize)\\n\" +\n\" | output: max(11: S_ADDRESS)\\n\" +\n\" | group by: 5: S_PHONE\");\n}\n@Test\npublic void testLowCardForLimit() throws Exception {\nString sql = \"SELECT * from (SELECT t_a_0.`S_ADDRESS` AS f_ax_0, t_a_0.`S_ADDRESS` AS f_ax_1 FROM \" +\n\"(select * from (select * from supplier limit 20000) b) t_a_0) t_a_1 ORDER BY t_a_1.f_ax_0 desc LIMIT 0,20;\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"3:Decode\\n\" +\n\" | : \\n\" +\n\" | \\n\" +\n\" 2:TOP-N\\n\" +\n\" | order by: 9: S_ADDRESS DESC\\n\" +\n\" | offset: 0\\n\" +\n\" | limit: 20\");\n}\n@Test\npublic void testNeedDecode_1() throws Exception {\nString sql = \"with cte_1 as (\\n\" +\n\" select\\n\" +\n\" t0.P_NAME as a,\\n\" +\n\" t0.P_BRAND as b,\\n\" +\n\" t1.s_name as c,\\n\" +\n\" t1.s_address as d,\\n\" +\n\" t1.s_address as e,\\n\" +\n\" t1.s_nationkey as f\\n\" +\n\" from\\n\" +\n\" part_v2 t0\\n\" +\n\" left join supplier_nullable t1 on t0.P_SIZE > t1.s_suppkey\\n\" +\n\")\\n\" +\n\"select\\n\" +\n\" cte_1.b,\\n\" +\n\" if(\\n\" +\n\" cte_1.d in ('hz', 'bj'),\\n\" +\n\" cte_1.b,\\n\" +\n\" if (cte_1.e in ('hz'), 1035, cte_1.f)\\n\" +\n\" ),\\n\" +\n\" count(distinct if(cte_1.c = '', cte_1.e, null))\\n\" +\n\"from\\n\" +\n\" cte_1\\n\" +\n\"group by\\n\" +\n\" cte_1.b,\\n\" +\n\" if(\\n\" +\n\" cte_1.d in ('hz', 'bj'),\\n\" +\n\" cte_1.b,\\n\" +\n\" if (cte_1.e in ('hz'), 1035, cte_1.f)\\n\" +\n\" );\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"6:Project\\n\" +\n\" | : 22\\n\" +\n\" | : if(31: S_ADDRESS IN ('hz', 'bj'), 22, CAST(if(31: S_ADDRESS = 'hz', \" +\n\"1035, 32: S_NATIONKEY) AS VARCHAR))\\n\" +\n\" | : if(30: S_NAME = '', 31: S_ADDRESS, NULL)\\n\" +\n\" | \\n\" +\n\" 5:Decode\\n\" +\n\" | : \");\n}\n@Test\npublic void testNeedDecode_2() throws Exception {\nString sql = \"with cte_1 as (\\n\" +\n\" select\\n\" +\n\" t0.P_NAME as a,\\n\" +\n\" t0.P_BRAND as b,\\n\" +\n\" t1.s_name as c,\\n\" +\n\" t1.s_address as d,\\n\" +\n\" t1.s_address as e,\\n\" +\n\" t1.s_nationkey as f\\n\" +\n\" from\\n\" +\n\" part_v2 t0\\n\" +\n\" left join supplier_nullable t1 on t0.P_SIZE > t1.s_suppkey\\n\" +\n\")\\n\" +\n\"select\\n\" +\n\" if(\\n\" +\n\" cte_1.d in ('hz', 'bj'),\\n\" +\n\" cte_1.b,\\n\" +\n\" if (cte_1.e in ('hz'), 1035, cte_1.f)\\n\" +\n\" ),\\n\" +\n\" count(distinct if(cte_1.c = '', cte_1.e, null))\\n\" +\n\"from\\n\" +\n\" cte_1\\n\" +\n\"group by\\n\" +\n\" if(\\n\" +\n\" cte_1.d in ('hz', 'bj'),\\n\" +\n\" cte_1.b,\\n\" +\n\" if (cte_1.e in ('hz'), 1035, cte_1.f)\\n\" +\n\" );\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"6:Project\\n\" +\n\" | : if(31: S_ADDRESS IN ('hz', 'bj'), 22, CAST(if(31: S_ADDRESS = 'hz', 1035, \" +\n\"32: S_NATIONKEY) AS VARCHAR))\\n\" +\n\" | : if(30: S_NAME = '', 31: S_ADDRESS, NULL)\\n\" +\n\" | \\n\" +\n\" 5:Decode\\n\" +\n\" | : \");\n}\n@Test\npublic void testProjectionRewrite() throws Exception {\nString sql = \"SELECT '2023-03-26' D_DATE, c_user, concat(C_NODEVALUE, '-', C_BROKERNAME) AS C_NAME, c_dept, \" +\n\"c_par, '\u4eba', '1', '\u89c4', '1', 'KPI1' C_KPICODE, round(sum(if(c_par='01', F_ASSET_zb, F_ASSET))/100000000, 5) \" +\n\"F_CURRENTDAta FROM low_card_t1 WHERE c_par IN ( '02', '01' ) AND D_DATE='2023-03-26' \" +\n\"GROUP BY C_BROKERNAME, c_dept, c_par, c_user, C_NODEVALUE \" +\n\"union all \" +\n\"SELECT '2023-03-26' D_DATE, c_mr AS C_CODE, CASE WHEN c_mr = '01' THEN '\u90e8' ELSE '\u6237\u90e8' END C_NAME, \" +\n\"c_mr c_dept, c_mr c_par, '\u95e8' AS C_ROLE, '3' AS F_ROLERANK, '\u5165' AS C_KPITYPE, '2' AS F_KPIRANK, \" +\n\"'KPI2' C_KPICODE, ifnull(ROUND(SUM(fee_zb)/100000000, 5), 0) AS F_CURRENTDATA FROM low_card_t2 \" +\n\"WHERE c_mr IN ('02', '03') AND D_DATE>concat(year(str_to_date('2023-03-26', '%Y-%m-%d'))-1, '1231') \" +\n\"AND d_date<='2023-03-26' GROUP BY c_mr;\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"10:Decode\\n\" +\n\" | : \\n\" +\n\" | \\n\" +\n\" 9:AGGREGATE (update finalize)\\n\" +\n\" | output: sum(38: fee_zb)\\n\" +\n\" | group by: 74: c_mr\");\n}\n}", + "context_after": "class LowCardinalityTest extends PlanTestBase {\n@BeforeClass\n@AfterClass\npublic static void afterClass() {\nconnectContext.getSessionVariable().setSqlMode(0);\nconnectContext.getSessionVariable().setEnableLowCardinalityOptimize(false);\n}\n@Test\npublic void testOlapScanNodeOutputColumns() throws Exception {\nconnectContext.getSessionVariable().enableTrimOnlyFilteredColumnsInScanStage();\nString sql =\n\"SELECT C_CITY, S_CITY, year(LO_ORDERDATE) as year, sum(LO_REVENUE) AS revenue FROM lineorder_flat \" +\n\"WHERE C_CITY in ('UNITED KI1', 'UNITED KI5') AND S_CITY in ( 'UNITED KI1', 'UNITED\\n\" +\n\"KI5') AND LO_ORDERDATE >= '1997-12-01' AND LO_ORDERDATE <= '1997-12-31' GROUP BY C_CITY, S_CITY, year \" +\n\"ORDER BY year ASC, revenue DESC;\";\nString plan = getThriftPlan(sql);\nAssert.assertTrue(plan.contains(\"unused_output_column_name:[]\"));\nconnectContext.getSessionVariable().disableTrimOnlyFilteredColumnsInScanStage();\n}\n@Test\npublic void testDecodeNodeRewrite() throws Exception {\nString sql = \"select\\n\" +\n\" 100.00 * sum(case\\n\" +\n\" when p_type like 'PROMO%'\\n\" +\n\" then l_extendedprice * (1 - l_discount)\\n\" +\n\" else 0\\n\" +\n\" end) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue\\n\" +\n\"from\\n\" +\n\" lineitem,\\n\" +\n\" part\\n\" +\n\"where\\n\" +\n\" l_partkey = p_partkey\\n\" +\n\" and l_shipdate >= date '1997-02-01'\\n\" +\n\" and l_shipdate < date '1997-03-01';\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testDecodeNodeRewrite2() throws Exception {\nString sql = \"select\\n\" +\n\" p_brand,\\n\" +\n\" p_type,\\n\" +\n\" p_size,\\n\" +\n\" count(distinct ps_suppkey) as supplier_cnt\\n\" +\n\"from\\n\" +\n\" partsupp,\\n\" +\n\" part\\n\" +\n\"where\\n\" +\n\" p_partkey = ps_partkey\\n\" +\n\" and p_brand <> 'Brand\n\" and p_type not like 'PROMO BURNISHED%'\\n\" +\n\" and p_size in (31, 43, 9, 6, 18, 11, 25, 1)\\n\" +\n\" and ps_suppkey not in (\\n\" +\n\" select\\n\" +\n\" s_suppkey\\n\" +\n\" from\\n\" +\n\" supplier\\n\" +\n\" where\\n\" +\n\" s_comment like '%Customer%Complaints%'\\n\" +\n\")\\n\" +\n\"group by\\n\" +\n\" p_brand,\\n\" +\n\" p_type,\\n\" +\n\" p_size\\n;\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testDecodeNodeRewrite3() throws Exception {\nString sql = \"select L_COMMENT from lineitem group by L_COMMENT\";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 2:Decode\\n\" +\n\" | : \\n\"));\n}\n@Test\npublic void testDecodeNodeRewrite4() throws Exception {\nString sql = \"select dept_name from dept group by dept_name,state\";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 3:Decode\\n\" +\n\" | : \\n\" +\n\" | \\n\" +\n\" 2:Project\\n\" +\n\" | : 4: dept_name\"));\n}\n@Test\npublic void testDecodeNodeRewrite5() throws Exception {\nString sql = \"select S_ADDRESS from supplier where S_ADDRESS \" +\n\"like '%Customer%Complaints%' group by S_ADDRESS \";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 2:Decode\\n\" +\n\" | : \"));\nAssert.assertTrue(\nplan.contains(\"PREDICATES: DictExpr(9: S_ADDRESS,[ LIKE '%Customer%Complaints%'])\"));\n}\n@Test\npublic void testDecodeNodeRewrite6() throws Exception {\nString sql = \"select count(S_ADDRESS) from supplier\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nAssert.assertTrue(plan.contains(\"count(10: S_ADDRESS)\"));\nsql = \"select count(distinct S_ADDRESS) from supplier\";\nconnectContext.getSessionVariable().setNewPlanerAggStage(4);\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nAssert.assertTrue(plan.contains(\"count(10: S_ADDRESS)\"));\nAssert.assertTrue(plan.contains(\"HASH_PARTITIONED: 10: S_ADDRESS\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testDecodeNodeRewriteMultiAgg()\nthrows Exception {\nboolean cboCteReuse = connectContext.getSessionVariable().isCboCteReuse();\nboolean enableLowCardinalityOptimize = connectContext.getSessionVariable().isEnableLowCardinalityOptimize();\nint newPlannerAggStage = connectContext.getSessionVariable().getNewPlannerAggStage();\nconnectContext.getSessionVariable().setCboCteReuse(false);\nconnectContext.getSessionVariable().setEnableLowCardinalityOptimize(true);\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\ntry {\nString sql = \"select count(distinct S_ADDRESS), count(distinct S_NATIONKEY) from supplier\";\nString plan = getVerboseExplain(sql);\nAssert.assertTrue(plan, plan.contains(\"dict_col=S_ADDRESS\"));\nsql = \"select count(distinct S_ADDRESS), count(distinct S_NATIONKEY) from supplier \" +\n\"having count(1) > 0\";\nplan = getVerboseExplain(sql);\nAssert.assertFalse(plan, plan.contains(\"dict_col=\"));\nAssert.assertFalse(plan, plan.contains(\"Decode\"));\n} finally {\nconnectContext.getSessionVariable().setCboCteReuse(cboCteReuse);\nconnectContext.getSessionVariable().setEnableLowCardinalityOptimize(enableLowCardinalityOptimize);\nconnectContext.getSessionVariable().setNewPlanerAggStage(newPlannerAggStage);\n}\n}\n@Test\npublic void testDecodeNodeRewrite7() throws Exception {\nString sql = \"select S_ADDRESS, count(S_ADDRESS) from supplier group by S_ADDRESS\";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 2:Decode\\n\" +\n\" | : \"));\nString thrift = getThriftPlan(sql);\nAssert.assertTrue(thrift.contains(\"TGlobalDict(columnId:10, strings:[6D 6F 63 6B], ids:[1])\"));\n}\n@Test\npublic void testDecodeNodeRewrite8() throws Exception {\nString sql = \"select S_ADDRESS, count(S_ADDRESS) from supplier group by S_ADDRESS\";\nString plan = getCostExplain(sql);\nAssert.assertTrue(plan.contains(\" 2:Decode\\n\" +\n\" | : \\n\" +\n\" | cardinality: 1\\n\" +\n\" | column statistics: \\n\" +\n\" | * S_ADDRESS-->[-Infinity, Infinity, 0.0, 40.0, 10000.0] ESTIMATE\\n\" +\n\" | * count-->[0.0, 1.0, 0.0, 8.0, 1.0] ESTIMATE\"));\n}\n@Test\npublic void testDecodeNodeRewrite9() throws Exception {\nString sql = \"select S_ADDRESS, upper(S_ADDRESS) from supplier\";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" | : \\n\" +\n\" | : \"));\nString thriftPlan = getThriftPlan(sql);\nAssert.assertTrue(plan.contains(\" | : \\n\" +\n\" | : \"));\nAssert.assertTrue(thriftPlan.contains(\"could_apply_dict_optimize:true\"));\nAssert.assertTrue(thriftPlan.contains(\"string_functions:{11=TExpr(nodes\"));\n}\n@Test\npublic void testDecodeRewrite9Scan() throws Exception {\nString sql = \"select S_ADDRESS from supplier\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testDecodeNodeRewrite10() throws Exception {\nString sql = \"select upper(S_ADDRESS) as a, count(*) from supplier group by a\";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan, plan.contains(\" 3:Decode\\n\" +\n\" | : \"));\nAssert.assertTrue(plan.contains(\" : DictExpr(11: S_ADDRESS,[upper()])\"));\nsql = \"select S_ADDRESS, count(*) from supplier_nullable group by S_ADDRESS\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\"group by: [10: S_ADDRESS, INT, true]\"));\n}\n@Test\npublic void testDecodeNodeRewriteMultiCountDistinct() throws Exception {\nString sql;\nString plan;\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nsql = \"select count(distinct a),count(distinct b) from (\" +\n\"select lower(upper(S_ADDRESS)) as a, upper(S_ADDRESS) as b, \" +\n\"count(*) from supplier group by a,b) as t \";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nAssert.assertTrue(plan.contains(\"7:AGGREGATE (merge finalize)\\n\" +\n\" | output: multi_distinct_count(12: count), multi_distinct_count(13: count)\"));\nsql = \"select count(distinct S_ADDRESS), count(distinct S_COMMENT) from supplier;\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" multi_distinct_count(11: S_ADDRESS), \" +\n\"multi_distinct_count(12: S_COMMENT)\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(3);\nsql = \"select max(S_ADDRESS), count(distinct S_ADDRESS) from supplier group by S_ADDRESS;\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 4:AGGREGATE (update finalize)\\n\" +\n\" | output: max(13: S_ADDRESS), count(11: S_ADDRESS)\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testDecodeNodeRewriteDistinct() throws Exception {\nString sql;\nString plan;\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\nsql = \"select count(distinct S_ADDRESS) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 1:AGGREGATE (update finalize)\\n\" +\n\" | aggregate: multi_distinct_count[([10: S_ADDRESS, INT, false]); \" +\n\"args: INT; result: BIGINT; args nullable: false; result nullable: false]\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 3:AGGREGATE (merge finalize)\\n\" +\n\" | aggregate: multi_distinct_count[([9: count, VARBINARY, false]); \" +\n\"args: INT; result: BIGINT; args nullable: true; result nullable: false]\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(3);\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 4:AGGREGATE (update serialize)\\n\" +\n\" | aggregate: count[([10: S_ADDRESS, INT, false]); args: INT; result: BIGINT; \" +\n\"args nullable: false; result nullable: false]\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(4);\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 6:AGGREGATE (merge finalize)\\n\" +\n\" | aggregate: count[([9: count, BIGINT, false]); args: VARCHAR; result: BIGINT; \" +\n\"args nullable: true; result nullable: false]\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\nsql = \"select count(distinct S_ADDRESS, S_COMMENT) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\n\"aggregate: count[(if[(3 IS NULL, NULL, [7, VARCHAR, false]); \" +\n\"args: BOOLEAN,VARCHAR,VARCHAR; result: VARCHAR; \" +\n\"args nullable: true; result nullable: true]); \" +\n\"args: VARCHAR; result: BIGINT; args nullable: true; result nullable: false]\\n\"));\nAssert.assertTrue(plan.contains(\" 4:Decode\\n\" +\n\" | : \\n\" +\n\" | : \\n\" +\n\" | cardinality: 1\"));\n}\n@Test\npublic void testDecodeNodeRewriteTwoPaseDistinct() throws Exception {\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nString sql = \"select count(distinct S_ADDRESS), count(distinct S_NATIONKEY) from supplier\";\nString plan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\"3:AGGREGATE (merge finalize)\\n\" +\n\" | aggregate: multi_distinct_count[([9: count, VARBINARY, false]); \" +\n\"args: INT; result: BIGINT; args nullable: true; result nullable: false], \" +\n\"multi_distinct_count[([10: count, VARBINARY, false]); args: INT; result: BIGINT; \" +\n\"args nullable: true; result nullable: false]\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testDecodeNodeRewriteTwoPhaseAgg() throws Exception {\nString sql = \"select lower(upper(S_ADDRESS)) as a, upper(S_ADDRESS) as b, count(*) from supplier group by a,b\";\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 1:Project\\n\" +\n\" | : DictExpr(12: S_ADDRESS,[lower(upper())])\\n\" +\n\" | : DictExpr(12: S_ADDRESS,[upper()])\"));\nAssert.assertFalse(plan.contains(\"common expressions\"));\nplan = getThriftPlan(sql);\nAssert.assertTrue(plan.contains(\"global_dicts:[TGlobalDict(columnId:12, strings:[6D 6F 63 6B], ids:[1])]\"));\nAssert.assertTrue(plan.contains(\"global_dicts:[TGlobalDict(columnId:12, strings:[6D 6F 63 6B], ids:[1])]\"));\nsql = \"select count(*) from supplier group by S_ADDRESS\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nAssert.assertTrue(plan.contains(\" 3:AGGREGATE (merge finalize)\\n\" +\n\" | output: count(9: count)\\n\" +\n\" | group by: 10: S_ADDRESS\"));\nsql = \"select count(*) from supplier group by S_ADDRESS\";\nplan = getThriftPlan(sql);\nAssert.assertTrue(plan.contains(\"global_dicts:[TGlobalDict(columnId:10, strings:[6D 6F 63 6B], ids:[1])\"));\nAssert.assertTrue(plan.contains(\"partition:TDataPartition(type:RANDOM, partition_exprs:[]), \" +\n\"query_global_dicts:[TGlobalDict(columnId:10, strings:[6D 6F 63 6B], ids:[1])\"));\nsql = \"select count(distinct S_NATIONKEY) from supplier group by S_ADDRESS\";\nplan = getThriftPlan(sql);\nSystem.out.println(plan);\nAssert.assertTrue(plan, plan.contains(\n\"partition:TDataPartition(type:RANDOM, partition_exprs:[]), \" +\n\"query_global_dicts:[TGlobalDict(columnId:10, strings:[6D 6F 63 6B], ids:[1])]\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testDecodeRewriteTwoFunctions() throws Exception {\nString sql;\nString plan;\nsql = \"select substr(S_ADDRESS, 0, S_NATIONKEY), upper(S_ADDRESS) from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select substr(S_ADDRESS, 0, 1), S_ADDRESS from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" | : \\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(10: S_ADDRESS,[substr(, 0, 1)])\"));\nsql = \"select substr(S_ADDRESS, 0, 1), lower(upper(S_ADDRESS)), S_ADDRESS from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 2:Decode\\n\" +\n\" | : \\n\" +\n\" | : \\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(11: S_ADDRESS,[substr(, 0, 1)])\\n\" +\n\" | : DictExpr(11: S_ADDRESS,[lower(upper())])\"));\n}\n@Test\npublic void testDecodeRewrite1() throws Exception {\nString sql = \"select substr(S_ADDRESS, 0, S_NATIONKEY), S_ADDRESS from supplier\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testDecodeNodeTupleId() throws Exception {\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nString sql = \"select count(*), S_ADDRESS from supplier group by S_ADDRESS\";\nString plan = getThriftPlan(sql);\nAssert.assertTrue(plan.contains(\"node_type:DECODE_NODE, num_children:1, limit:-1, row_tuples:[3]\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testDecodeNodeRewrite11() throws Exception {\nString sql = \"select lower(upper(S_ADDRESS)) as a, count(*) from supplier group by a\";\nString plan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" : DictExpr(11: S_ADDRESS,[lower(upper())])\"));\nAssert.assertTrue(plan.contains(\"group by: [12: lower, INT, true]\"));\nsql = \"select lower(substr(S_ADDRESS, 0, 1)) as a, count(*) from supplier group by a\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(\nplan.contains(\" : DictExpr(11: S_ADDRESS,[lower(substr(, 0, 1))])\"));\nsql = \"select lower(upper(S_ADDRESS)) as a, upper(S_ADDRESS) as b, count(*) from supplier group by a,b\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan, plan.contains(\" 3:Decode\\n\" +\n\" | : \\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(12: S_ADDRESS,[lower(upper())])\\n\" +\n\" | : DictExpr(12: S_ADDRESS,[upper()])\"));\nsql = \"select lower(upper(S_ADDRESS)) as a, upper(S_ADDRESS) as b, count(*) from supplier group by S_ADDRESS\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 3:Decode\\n\" +\n\" | : \\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(12: S_ADDRESS,[lower(upper())])\\n\" +\n\" | : DictExpr(12: S_ADDRESS,[upper()])\"));\n}\n@Test\npublic void testDecodeNodeRewrite12() throws Exception {\nString sql;\nString plan;\nsql = \"select max(S_ADDRESS) from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\"Decode\"));\nsql = \"select min(S_ADDRESS) from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\"Decode\"));\nsql = \"select max(upper(S_ADDRESS)) from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan, plan.contains(\" 3:Decode\\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(11: S_ADDRESS,[upper()])\"));\nsql = \"select max(\\\"CONST\\\") from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testDecodeNodeRewrite13() throws Exception {\nFeConstants.runningUnitTest = true;\nString sql;\nString plan;\nsql = \"select coalesce(l.S_ADDRESS,l.S_NATIONKEY) from supplier l join supplier r on l.s_suppkey = r.s_suppkey\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 4:Project\\n\" +\n\" | : coalesce(3, CAST(4: S_NATIONKEY AS VARCHAR))\"));\nAssert.assertTrue(plan.contains(\" 3:Decode\\n\" +\n\" | : \"));\nsql = \"select coalesce(l.S_ADDRESS,l.S_NATIONKEY),l.S_ADDRESS,r.S_ADDRESS \" +\n\"from supplier l join supplier r on l.s_suppkey = r.s_suppkey\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 4:Project\\n\" +\n\" | : 3\\n\" +\n\" | : 11\\n\" +\n\" | : coalesce(3, CAST(4: S_NATIONKEY AS VARCHAR))\"));\nAssert.assertTrue(plan.contains(\" 3:Decode\\n\" +\n\" | : \\n\" +\n\" | : \"));\nsql = \"select coalesce(l.S_ADDRESS,l.S_NATIONKEY), upper(l.S_ADDRESS), l.S_ADDRESS \" +\n\"from supplier l join supplier r on l.s_suppkey = r.s_suppkey\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select coalesce(l.S_ADDRESS,l.S_NATIONKEY), upper(r.P_MFGR),r.P_MFGR \" +\n\"from supplier l join part_v2 r on l.s_suppkey = r.P_PARTKEY\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan, plan.contains(\" 5:Decode\\n\" +\n\" | : \\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(21: P_MFGR,[upper()])\"));\nAssert.assertTrue(plan.contains(\" 4:Project\\n\" +\n\" | : coalesce(3: S_ADDRESS, CAST(4: S_NATIONKEY AS VARCHAR))\\n\" +\n\" | : 21: P_MFGR\\n\" +\n\" | : DictExpr(21: P_MFGR,[upper()])\"));\nFeConstants.runningUnitTest = false;\n}\n@Test\npublic void testDecodeNodeRewrite14() throws Exception {\nString sql;\nString plan;\nsql = \"select count(*), approx_count_distinct(S_ADDRESS) from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select max(S_ADDRESS), approx_count_distinct(S_ADDRESS) from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testWithCaseWhen() throws Exception {\nString sql;\nString plan;\nsql = \"select case when S_ADDRESS = 'key' then 1 else 0 end from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\"9 <-> DictExpr(10: S_ADDRESS,[if( = 'key', 1, 0)])\"));\nAssert.assertTrue(plan.contains(\"dict_col=S_ADDRESS\"));\nsql = \"select case when S_ADDRESS = 'key' then 1 when S_ADDRESS = '2' then 2 else 0 end from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" dict_col=S_ADDRESS\"));\nsql =\n\"select case when S_ADDRESS = 'key' then 1 when S_ADDRESS = '2' then 2 else S_NATIONKEY end from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" dict_col=S_ADDRESS\"));\nAssert.assertTrue(plan.contains(\n\" | 9 <-> CASE WHEN DictExpr(10: S_ADDRESS,[ = 'key']) \" +\n\"THEN 1 WHEN DictExpr(10: S_ADDRESS,[ = '2']) THEN 2 ELSE 4: S_NATIONKEY END\"));\nsql = \"select S_ADDRESS = 'key' , \" +\n\"case when S_ADDRESS = 'key' then 1 when S_ADDRESS = '2' then 2 else 3 end from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 1:Project\\n\" +\n\" | output columns:\\n\" +\n\" | 9 <-> DictExpr(11: S_ADDRESS,[ = 'key'])\\n\" +\n\" | 10 <-> DictExpr(11: S_ADDRESS,[CASE WHEN = 'key' \" +\n\"THEN 1 WHEN = '2' THEN 2 ELSE 3 END])\\n\" +\n\" | cardinality: 1\"));\nAssert.assertTrue(plan.contains(\" dict_col=S_ADDRESS\"));\nsql = \"select case when S_ADDRESS = 'key' then 'key1' when S_ADDRESS = '2' \" +\n\"then 'key2' else 'key3' end from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 2:Decode\\n\" +\n\" | : \"));\nsql = \"select case when S_ADDRESS = 'key' then rand() when S_ADDRESS = '2' \" +\n\"then 'key2' else 'key3' end from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" | 9 <-> CASE WHEN DictExpr(10: S_ADDRESS,[ = 'key']) \" +\n\"THEN CAST(rand() AS VARCHAR) \" +\n\"WHEN DictExpr(10: S_ADDRESS,[ = '2']) \" +\n\"THEN 'key2' ELSE 'key3' END\"));\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select if(S_ADDRESS = 'key', S_COMMENT, 'y') from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" | 9 <-> if[(DictExpr(10: S_ADDRESS,[ = 'key']), \" +\n\"DictExpr(11: S_COMMENT,[]), 'y'); \" +\n\"args: BOOLEAN,VARCHAR,VARCHAR; result: VARCHAR; args nullable: true; result nullable: true]\"));\n}\n@Test\npublic void testLeftJoinWithUnion() throws Exception {\nString sql;\nString plan;\nsql = \"SELECT subt1.S_ADDRESS\\n\" +\n\"FROM (\\n\" +\n\" SELECT S_ADDRESS, S_NATIONKEY\\n\" +\n\" FROM supplier\\n\" +\n\" ) subt1 LEFT ANTI\\n\" +\n\" JOIN (\\n\" +\n\" SELECT S_ADDRESS, S_NATIONKEY\\n\" +\n\" FROM supplier\\n\" +\n\" ) subt0 ON subt1.S_NATIONKEY = subt0.S_NATIONKEY \\n\" +\n\"WHERE true\\n\" +\n\"UNION ALL\\n\" +\n\"SELECT subt1.S_ADDRESS\\n\" +\n\"FROM (\\n\" +\n\" SELECT S_ADDRESS, S_NATIONKEY\\n\" +\n\" FROM supplier\\n\" +\n\" ) subt1 LEFT ANTI\\n\" +\n\" JOIN (\\n\" +\n\" SELECT S_ADDRESS, S_NATIONKEY\\n\" +\n\" FROM supplier\\n\" +\n\" ) subt0 ON subt1.S_NATIONKEY = subt0.S_NATIONKEY\\n\" +\n\"WHERE (NOT (true));\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 5:Decode\\n\" +\n\" | : \\n\" +\n\" | \\n\" +\n\" 4:Project\\n\" +\n\" | : 34: S_ADDRESS\"));\n}\n@Test\npublic void testProject() throws Exception {\nString sql;\nString plan;\nsql = \"select cast (S_ADDRESS as datetime) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" dict_col=S_ADDRESS\"));\nsql = \"select substring(S_ADDRESS,1,2) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 11 <-> DictExpr(10: S_ADDRESS,[substring(, 1, 2)])\"));\nsql = \"select substring(S_ADDRESS, S_SUPPKEY, 2) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\n\"9 <-> substring[([3: S_ADDRESS, VARCHAR, false], [1: S_SUPPKEY, INT, false], 2); \" +\n\"args: VARCHAR,INT,INT; result: VARCHAR; args nullable: false; result nullable: true]\"));\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select substring(S_ADDRESS, S_ADDRESS, 1) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\n\"11 <-> DictExpr(10: S_ADDRESS,[substring(, CAST( AS INT), 1)])\"));\nsql = \"select substring(upper(S_ADDRESS), S_SUPPKEY, 2) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\n\"9 <-> substring[(DictExpr(10: S_ADDRESS,[upper()]), [1: S_SUPPKEY, INT, false], 2); \" +\n\"args: VARCHAR,INT,INT; result: VARCHAR; args nullable: true; result nullable: true]\"));\nsql = \"select concat(S_ADDRESS, S_COMMENT) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\n\" | 9 <-> concat[([3: S_ADDRESS, VARCHAR, false], [7: S_COMMENT, VARCHAR, false]); \" +\n\"args: VARCHAR; result: VARCHAR; args nullable: false; result nullable: true]\"));\nsql = \"select if(S_SUPPKEY='kks', upper(S_ADDRESS), S_COMMENT), upper(S_ADDRESS) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\n\" | 9 <-> if[(cast([1: S_SUPPKEY, INT, false] as VARCHAR(1048576)) = 'kks', \" +\n\"DictExpr(11: S_ADDRESS,[upper()]), DictExpr(12: S_COMMENT,[])); \" +\n\"args: BOOLEAN,VARCHAR,VARCHAR; result: VARCHAR; args nullable: true; result nullable: true]\\n\" +\n\" | 13 <-> DictExpr(11: S_ADDRESS,[upper()])\"));\nAssert.assertTrue(plan.contains(\"Decode\"));\nsql = \"select if(S_ADDRESS='kks', S_COMMENT, S_COMMENT) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\n\" | 9 <-> if[(DictExpr(10: S_ADDRESS,[ = 'kks']), [12: expr, VARCHAR(101), true], \" +\n\"[12: expr, VARCHAR(101), true]); args: BOOLEAN,VARCHAR,VARCHAR; result: VARCHAR; \" +\n\"args nullable: true; result nullable: true]\\n\" +\n\" | common expressions:\\n\" +\n\" | 12 <-> DictExpr(11: S_COMMENT,[])\"));\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql =\n\"select if(S_ADDRESS='kks', upper(S_COMMENT), S_COMMENT), concat(upper(S_COMMENT), S_ADDRESS) from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" | output columns:\\n\" +\n\" | 9 <-> if[(DictExpr(11: S_ADDRESS,[ = 'kks']), [13: expr, VARCHAR, true], \" +\n\"DictExpr(12: S_COMMENT,[])); args: BOOLEAN,VARCHAR,VARCHAR; result: VARCHAR; \" +\n\"args nullable: true; result nullable: true]\\n\" +\n\" | 10 <-> concat[([13: expr, VARCHAR, true], DictExpr(11: S_ADDRESS,[])); \" +\n\"args: VARCHAR; result: VARCHAR; args nullable: true; result nullable: true]\"));\nAssert.assertTrue(plan.contains(\" | common expressions:\\n\" +\n\" | 13 <-> DictExpr(12: S_COMMENT,[upper()])\"));\nsql = \"select REVERSE(SUBSTR(LEFT(REVERSE(S_ADDRESS),INSTR(REVERSE(S_ADDRESS),'/')-1),5)) FROM supplier\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 1:Project\\n\" +\n\" | : reverse(substr(left(11: expr, CAST(CAST(instr(11: expr, '/') AS BIGINT)\" +\n\" - 1 AS INT)), 5))\\n\" +\n\" | common expressions:\\n\" +\n\" | : DictExpr(10: S_ADDRESS,[reverse()])\");\n}\n@Test\npublic void testScanPredicate() throws Exception {\nString sql;\nString plan;\nsql = \"select count(*) from \" +\n\"supplier where S_ADDRESS like '%A%' and S_ADDRESS not like '%B%'\";\nplan = getCostExplain(sql);\nAssert.assertFalse(plan.contains(\" dict_col=S_ADDRESS \"));\nsql = \"select * from supplier l join supplier r on \" +\n\"l.S_NAME = r.S_NAME where upper(l.S_ADDRESS) like '%A%' and upper(l.S_ADDRESS) not like '%B%'\";\nplan = getCostExplain(sql);\nassertContains(plan, \"0:OlapScanNode\\n\" +\n\" table: supplier, rollup: supplier\\n\" +\n\" preAggregation: on\\n\" +\n\" Predicates: upper(3: S_ADDRESS) LIKE '%A%', NOT (upper(3: S_ADDRESS) LIKE '%B%')\\n\" +\n\" dict_col=S_COMMENT\");\nsql = \"select count(*) from supplier where S_ADDRESS = 'kks' group by S_ADDRESS \";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\"DictExpr(10: S_ADDRESS,[ = 'kks'])\"));\nAssert.assertTrue(plan.contains(\"group by: 10: S_ADDRESS\"));\nsql = \"select count(*) from supplier where S_ADDRESS + 2 > 'kks' group by S_ADDRESS\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\"group by: 3: S_ADDRESS\"));\nsql = \"select count(*) from supplier where if(S_ADDRESS = 'kks', true, false)\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan,\nplan.contains(\"PREDICATES: DictExpr(12: S_ADDRESS,[if( = 'kks', TRUE, FALSE)])\"));\nsql = \"select count(*) from supplier where if(S_ADDRESS = 'kks', cast(S_ADDRESS as boolean), false)\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\n\"PREDICATES: DictExpr(12: S_ADDRESS,[if( = 'kks', CAST( AS BOOLEAN), FALSE)])\"));\nsql = \"select count(*) from supplier where if(S_ADDRESS = 'kks',cast(S_COMMENT as boolean), false)\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan, plan.contains(\n\"PREDICATES: if(DictExpr(12: S_ADDRESS,[ = 'kks']), \" +\n\"DictExpr(13: S_COMMENT,[CAST( AS BOOLEAN)]), FALSE)\"));\nsql = \"select count(*) from supplier where if(S_ADDRESS = 'kks',cast(S_NAME as boolean), false)\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\n\"PREDICATES: if(DictExpr(12: S_ADDRESS,[ = 'kks']), CAST(2: S_NAME AS BOOLEAN), FALSE)\"));\nsql = \"select count(*) from supplier where S_ADDRESS = 'kks' and S_COMMENT not like '%kks%'\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\n\"PREDICATES: DictExpr(12: S_ADDRESS,[ = 'kks']), NOT (7: S_COMMENT LIKE '%kks%')\"));\nsql = \"select count(*) from supplier where if(S_ADDRESS = 'kks',cast(S_COMMENT as boolean), false) \" +\n\"and S_COMMENT not like '%kks%'\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\n\"PREDICATES: if(DictExpr(12: S_ADDRESS,[ = 'kks']), \" +\n\"CAST(7: S_COMMENT AS BOOLEAN), FALSE), NOT (7: S_COMMENT LIKE '%kks%')\"));\n}\n@Test\npublic void testAggHaving() throws Exception {\nString sql = \"select count(*) from supplier group by S_ADDRESS having S_ADDRESS = 'kks' \";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\"DictExpr(10: S_ADDRESS,[ = 'kks'])\"));\nAssert.assertTrue(plan.contains(\"group by: 10: S_ADDRESS\"));\nsql = \"select count(*) as b from supplier group by S_ADDRESS having b > 3\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan, plan.contains(\" | group by: 10: S_ADDRESS\\n\" +\n\" | having: 9: count > 3\"));\nsql = \"select sum(S_NATIONKEY) a, sum(S_ACCTBAL) as b, S_ADDRESS as c from supplier group by S_ADDRESS \" +\n\"having a < b*1.2 or c not like '%open%'\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"SELECT count(*) a FROM supplier having max(S_ADDRESS)='123'\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testJoin() throws Exception {\nString sql;\nString plan;\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nsql =\n\"select count(*) from supplier l \" +\n\"join [shuffle] (select max(S_ADDRESS) as S_ADDRESS from supplier) r \" +\n\"on l.S_ADDRESS = r.S_ADDRESS;\";\nplan = getVerboseExplain(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql =\n\"select count(*) from supplier l \" +\n\"join [broadcast] (select max(S_ADDRESS) as S_ADDRESS from supplier) r \" +\n\"on l.S_ADDRESS = r.S_ADDRESS;\";\nplan = getVerboseExplain(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select count(*) from supplier l \" +\n\"join [broadcast] (select max(id_int) as id_int from table_int) r \" +\n\"on l.S_ADDRESS = r.id_int where l.S_ADDRESS not like '%key%'\";\nplan = getVerboseExplain(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select *\\n\" +\n\"from(\\n\" +\n\" select S_SUPPKEY,\\n\" +\n\" S_NATIONKEY\\n\" +\n\" from supplier\\n\" +\n\" ) l\\n\" +\n\" right outer join [shuffle] (\\n\" +\n\" select S_SUPPKEY,\\n\" +\n\" max(S_ADDRESS) as MS\\n\" +\n\" from supplier_nullable\\n\" +\n\" group by S_SUPPKEY\\n\" +\n\" ) r on l.S_SUPPKEY = r.S_SUPPKEY\\n\" +\n\" and l.S_NATIONKEY = r.MS;\";\nplan = getVerboseExplain(sql);\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\nAssert.assertTrue(plan.contains(\"OutPut Partition: HASH_PARTITIONED: 9: S_SUPPKEY, 17\"));\nsql = \"select * from test.join1 right join test.join2 on join1.id = join2.id where round(2.0, 0) > 3.0\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 5:Decode\\n\" +\n\" | : \\n\" +\n\" | : \"));\nsql = \"SELECT * \\n\" +\n\"FROM emp \\n\" +\n\"WHERE EXISTS (SELECT dept.dept_id \\n\" +\n\" FROM dept \\n\" +\n\" WHERE emp.dept_id = dept.dept_id \\n\" +\n\" ORDER BY state) \\n\" +\n\"ORDER BY hiredate\";\nString planFragment = getFragmentPlan(sql);\nAssert.assertTrue(planFragment.contains(\" 5:Decode\\n\" +\n\" | : \"));\nsql = \"select * from join1 join pushdown_test on join1.id = pushdown_test.k1;\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 6:Decode\\n\" +\n\" | : \\n\" +\n\" | : \"));\nAssert.assertTrue(plan.contains(\"INNER JOIN (BROADCAST)\"));\nsql = \"select part_v2.p_partkey from lineitem join part_v2 on L_COMMENT = hex(P_NAME);\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select * from supplier l join supplier_nullable r where l.S_SUPPKEY = r.S_SUPPKEY \" +\n\"order by l.S_ADDRESS limit 10\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 4:TOP-N\\n\" +\n\" | order by: 17: S_ADDRESS ASC\\n\" +\n\" | offset: 0\\n\" +\n\" | limit: 10\\n\" +\n\" | \\n\" +\n\" 3:HASH JOIN\\n\" +\n\" | join op: INNER JOIN (BROADCAST)\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | equal join conjunct: 1: S_SUPPKEY = 9: S_SUPPKEY\");\nsql = \"select max(S_ADDRESS), max(S_COMMENT) from \" +\n\"( select l.S_ADDRESS as S_ADDRESS,r.S_COMMENT as S_COMMENT,l.S_SUPPKEY from supplier l \" +\n\"join supplier_nullable r \" +\n\" on l.S_SUPPKEY = r.S_SUPPKEY ) tb group by S_SUPPKEY\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 8:Decode\\n\" +\n\" | : \\n\" +\n\" | : \\n\" +\n\" | \\n\" +\n\" 7:Project\\n\" +\n\" | : 21: S_ADDRESS\\n\" +\n\" | : 22: S_COMMENT\\n\" +\n\" | \\n\" +\n\" 6:AGGREGATE (update finalize)\\n\" +\n\" | output: max(19: S_ADDRESS), max(20: S_COMMENT)\\n\" +\n\" | group by: 1: S_SUPPKEY\");\nplan = getThriftPlan(sql);\nAssert.assertEquals(plan.split(\"\\n\").length, 3);\nassertContains(plan.split(\"\\n\")[0], \"query_global_dicts:\" +\n\"[TGlobalDict(columnId:19, strings:[6D 6F 63 6B], ids:[1]), \" +\n\"TGlobalDict(columnId:20, strings:[6D 6F 63 6B], ids:[1]), \" +\n\"TGlobalDict(columnId:21, strings:[6D 6F 63 6B], ids:[1]), \" +\n\"TGlobalDict(columnId:22, strings:[6D 6F 63 6B], ids:[1])])\");\nsql = \"select upper(ST_S_ADDRESS),\\n\" +\n\" upper(ST_S_COMMENT)\\n\" +\n\"from (\\n\" +\n\" select ST_S_ADDRESS, ST_S_COMMENT\\n\" +\n\" from (\\n\" +\n\" select l.S_ADDRESS as ST_S_ADDRESS,\\n\" +\n\" l.S_COMMENT ST_S_COMMENT,\\n\" +\n\" l.S_SUPPKEY S_SUPPKEY,\\n\" +\n\" l.S_NATIONKEY S_NATIONKEY\\n\" +\n\" from supplier l\\n\" +\n\" join [shuffle] supplier m on l.S_SUPPKEY = m.S_SUPPKEY\\n\" +\n\" order by l.S_ADDRESS\\n\" +\n\" limit 10\\n\" +\n\" ) star join [shuffle] supplier r on star.S_NATIONKEY = r.S_NATIONKEY\\n\" +\n\" union select 1,2\\n\" +\n\" ) sys\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 20:AGGREGATE (update serialize)\\n\" +\n\" | STREAMING\\n\" +\n\" | group by: 30: S_ADDRESS, 31: S_COMMENT\\n\" +\n\" | \\n\" +\n\" 0:UNION\\n\" +\n\" | \\n\" +\n\" |----19:EXCHANGE\\n\" +\n\" | \\n\" +\n\" 16:EXCHANGE\");\nassertContains(plan, \"Decode\");\nplan = getThriftPlan(sql);\nassertNotContains(plan.split(\"\\n\")[1], \"query_global_dicts\");\n}\n@Test\npublic void testJoinGlobalDict() throws Exception {\nString sql =\n\"select part_v2.P_COMMENT from lineitem join part_v2 \" +\n\"on L_PARTKEY = p_partkey where p_mfgr = 'MFGR\nString plan = getThriftPlan(sql);\nAssert.assertTrue(plan.contains(\"dict_string_id_to_int_ids:{}\"));\nAssert.assertTrue(plan.contains(\"DictExpr(28: P_MFGR,[ IN ('MFGR\nAssert.assertTrue(plan.contains(\"RESULT_SINK, result_sink:TResultSink(type:MYSQL_PROTOCAL)), \" +\n\"partition:TDataPartition(type:RANDOM, partition_exprs:[]), \" +\n\"query_global_dicts:[TGlobalDict(columnId:28\"));\nAssert.assertTrue(\nplan.contains(\"TDataPartition(type:UNPARTITIONED, partition_exprs:[]), is_merge:false, dest_dop:0)), \" +\n\"partition:TDataPartition(type:RANDOM, partition_exprs:[]), \" +\n\"query_global_dicts:[TGlobalDict(columnId:28\"));\n}\n@Test\npublic void testCountDistinctMultiColumns() throws Exception {\nFeConstants.runningUnitTest = true;\nString sql = \"select count(distinct S_SUPPKEY, S_COMMENT) from supplier\";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\"2:Decode\\n\" +\n\" | : \"));\nAssert.assertTrue(plan.contains(\":AGGREGATE (update serialize)\\n\" +\n\" | output: count(if(1: S_SUPPKEY IS NULL, NULL, 7))\"));\nsql = \"select count(distinct S_ADDRESS, S_COMMENT) from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\"4:Decode\\n\" +\n\" | : \\n\" +\n\" | : \"));\nAssert.assertTrue(plan.contains(\" 5:AGGREGATE (update serialize)\\n\" +\n\" | output: count(if(3 IS NULL, NULL, 7))\"));\nFeConstants.runningUnitTest = false;\n}\n@Test\npublic void testGroupByWithOrderBy() throws Exception {\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nString sql;\nString plan;\nsql = \"select max(S_NAME) as b from supplier group by S_ADDRESS order by b\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\"group by: 10: S_ADDRESS\"));\nsql = \"select S_ADDRESS from supplier order by S_ADDRESS\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 1:SORT\\n\" +\n\" | order by: [9, INT, false] ASC\"));\nsql = \"select S_NAME from supplier_nullable order by upper(S_ADDRESS), S_NAME\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 2:SORT\\n\" +\n\" | order by: [11, INT, true] ASC, [2, VARCHAR, false] ASC\"));\nsql = \"select substr(S_ADDRESS, 0, 1) from supplier group by substr(S_ADDRESS, 0, 1) \" +\n\"order by substr(S_ADDRESS, 0, 1)\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" 7:Decode\\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(10: S_ADDRESS,[substr(, 0, 1)])\"));\nAssert.assertTrue(plan.contains(\" 5:SORT\\n\" +\n\" | order by: [11, INT, true] ASC\"));\nsql = \"select approx_count_distinct(S_ADDRESS), upper(S_ADDRESS) from supplier \" +\n\" group by upper(S_ADDRESS)\" +\n\"order by 2\";\nplan = getVerboseExplain(sql);\nassertContains(plan, \" 3:AGGREGATE (update serialize)\\n\" +\n\" | STREAMING\\n\" +\n\" | aggregate: approx_count_distinct[([3, VARCHAR, false]);\");\nassertContains(plan, \"2:Decode\\n\" +\n\" | : \\n\" +\n\" | : \");\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testAnalytic() throws Exception {\nString sql;\nString plan;\nsql = \"select sum(rm) from (\" +\n\"select row_number() over( partition by L_COMMENT order by L_PARTKEY) as rm from lineitem\" +\n\") t where rm < 10\";\nplan = getCostExplain(sql);\nAssert.assertTrue(plan.contains(\" 2:SORT\\n\" +\n\" | order by: [20, INT, false] ASC, [2, INT, false] ASC\"));\nAssert.assertTrue(plan.contains(\" 1:PARTITION-TOP-N\\n\" +\n\" | partition by: [20: L_COMMENT, INT, false] \"));\nAssert.assertTrue(plan.contains(\" | order by: [20, INT, false] ASC, [2, INT, false] ASC\"));\nsql = \"select * from (select L_COMMENT,l_quantity, row_number() over \" +\n\"(partition by L_COMMENT order by l_quantity desc) rn from lineitem )t where rn <= 10;\";\nplan = getCostExplain(sql);\nassertContains(plan, \" 1:PARTITION-TOP-N\\n\" +\n\" | partition by: [19: L_COMMENT, INT, false] \\n\" +\n\" | partition limit: 10\\n\" +\n\" | order by: [19, INT, false] ASC, [5, DOUBLE, false] DESC\\n\" +\n\" | offset: 0\");\nsql = \"select * from (select L_COMMENT,l_quantity, rank() over \" +\n\"(partition by L_COMMENT order by l_quantity desc) rn from lineitem )t where rn <= 10;\";\nplan = getCostExplain(sql);\nassertContains(plan, \" 1:PARTITION-TOP-N\\n\" +\n\" | type: RANK\\n\" +\n\" | partition by: [19: L_COMMENT, INT, false] \\n\" +\n\" | partition limit: 10\\n\" +\n\" | order by: [19, INT, false] ASC, [5, DOUBLE, false] DESC\");\nsql = \"select * from (select L_COMMENT,l_quantity, rank() over \" +\n\"(partition by L_COMMENT, l_shipmode order by l_quantity desc) rn from lineitem )t where rn <= 10;\";\nplan = getCostExplain(sql);\nassertContains(plan, \" 1:PARTITION-TOP-N\\n\" +\n\" | type: RANK\\n\" +\n\" | partition by: [19: L_COMMENT, INT, false] , [15: L_SHIPMODE, CHAR, false] \\n\" +\n\" | partition limit: 10\\n\" +\n\" | order by: [19, INT, false] ASC, [15, VARCHAR, false] ASC, [5, DOUBLE, false] DESC\\n\" +\n\" | offset: 0\");\n}\n@Test\npublic void testProjectionPredicate() throws Exception {\nString sql = \"select count(t.a) from(select S_ADDRESS in ('kks', 'kks2') as a from supplier) as t\";\nString plan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" dict_col=S_ADDRESS\"));\nAssert.assertTrue(plan.contains(\"9 <-> DictExpr(11: S_ADDRESS,[ IN ('kks', 'kks2')])\"));\nsql = \"select count(t.a) from(select S_ADDRESS = 'kks' as a from supplier) as t\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" dict_col=S_ADDRESS\"));\nAssert.assertTrue(plan.contains(\"9 <-> DictExpr(11: S_ADDRESS,[ = 'kks'])\"));\nsql = \"select count(t.a) from(select S_ADDRESS is null as a from supplier) as t\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" dict_col=S_ADDRESS\"));\nAssert.assertTrue(plan.contains(\"9 <-> DictExpr(11: S_ADDRESS,[ IS NULL])\"));\nsql = \"select count(t.a) from(select S_ADDRESS is not null as a from supplier) as t\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\" dict_col=S_ADDRESS\"));\nAssert.assertTrue(plan.contains(\"9 <-> DictExpr(11: S_ADDRESS,[ IS NOT NULL])\"));\nsql = \"select count(t.a) from(select S_ADDRESS <=> 'kks' as a from supplier) as t\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\"[3: S_ADDRESS, VARCHAR, false] <=> 'kks'\"));\nsql = \"select S_ADDRESS not like '%key%' from supplier\";\nplan = getVerboseExplain(sql);\nAssert.assertFalse(plan, plan.contains(\" dict_col=S_ADDRESS\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nsql = \"select count(distinct S_ADDRESS), count(distinct S_NAME) as a from supplier_nullable\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\"multi_distinct_count[([9: count, VARBINARY, false]);\"));\nAssert.assertTrue(plan.contains(\"multi_distinct_count[([11: S_ADDRESS, INT, true]);\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testNestedExpressions() throws Exception {\nString sql;\nString plan;\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nsql = \"select upper(lower(S_ADDRESS)) from supplier group by lower(S_ADDRESS);\";\nplan = getVerboseExplain(sql);\nassertContains(plan, \"6:Project\\n\" +\n\" | output columns:\\n\" +\n\" | 10 <-> upper[([9, VARCHAR, true]); args: VARCHAR; result: VARCHAR; \" +\n\"args nullable: true; result nullable: true]\\n\" +\n\" | cardinality: 1\\n\" +\n\" | \\n\" +\n\" 5:Decode\\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(11: S_ADDRESS,[lower()])\");\nAssert.assertTrue(plan.contains(\" 4:AGGREGATE (merge finalize)\\n\" +\n\" | group by: [12: lower, INT, true]\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testMultiMaxMin() throws Exception {\nString sql;\nString plan;\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nsql = \"select count(distinct S_ADDRESS), max(S_ADDRESS), count(distinct S_SUPPKEY) as a from supplier_nullable\";\nplan = getVerboseExplain(sql);\nAssert.assertTrue(plan.contains(\"1:AGGREGATE (update serialize)\\n\" +\n\" | aggregate: multi_distinct_count[([12: S_ADDRESS, INT, true]);\"));\nAssert.assertTrue(plan.contains(\"3:AGGREGATE (merge finalize)\\n\" +\n\" | aggregate: multi_distinct_count[([9: count, VARBINARY, false]);\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\nconnectContext.getSessionVariable().setNewPlanerAggStage(2);\nsql = \"select min(distinct S_ADDRESS), max(S_ADDRESS) from supplier_nullable\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 1:AGGREGATE (update serialize)\\n\" +\n\" | output: min(11: S_ADDRESS), max(11: S_ADDRESS)\"));\nAssert.assertTrue(plan.contains(\" 3:AGGREGATE (merge finalize)\\n\" +\n\" | output: min(12: S_ADDRESS), max(13: S_ADDRESS)\"));\nAssert.assertTrue(plan.contains(\" 4:Decode\\n\" +\n\" | : \\n\" +\n\" | : \"));\nsql = \"select max(upper(S_ADDRESS)) from supplier_nullable\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 5:Decode\\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(11: S_ADDRESS,[upper()])\\n\" +\n\" | \"));\nsql = \"select max(if(S_ADDRESS='kks', upper(S_COMMENT), S_COMMENT)), \" +\n\"min(upper(S_COMMENT)) from supplier_nullable \" +\n\"group by upper(S_COMMENT)\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan, plan.contains(\"6:Decode\\n\" +\n\" | : \\n\" +\n\" | string functions:\\n\" +\n\" | : DictExpr(14: S_COMMENT,[upper()])\\n\" +\n\" | \\n\" +\n\" 5:Project\\n\" +\n\" | : 11: max\\n\" +\n\" | : 17: upper\"));\nconnectContext.getSessionVariable().setNewPlanerAggStage(0);\n}\n@Test\npublic void testSubqueryWithLimit() throws Exception {\nString sql = \"select t0.S_ADDRESS from (select S_ADDRESS, S_NATIONKEY from supplier_nullable limit 10) t0\" +\n\" inner join supplier on t0.S_NATIONKEY = supplier.S_NATIONKEY;\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \" 2:Decode\\n\" +\n\" | : \\n\");\n}\n@Test\npublic void testDecodeWithCast() throws Exception {\nString sql = \"select reverse(conv(cast(S_ADDRESS as bigint), NULL, NULL)) from supplier\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nAssert.assertTrue(plan.contains(\"reverse(conv(CAST(3: S_ADDRESS AS BIGINT), NULL, NULL))\"));\n}\n@Test\npublic void testAssignWrongNullableProperty() throws Exception {\nString sql;\nString plan;\nsql = \"SELECT S_ADDRESS, Dense_rank() OVER ( ORDER BY S_SUPPKEY) \" +\n\"FROM supplier UNION SELECT S_ADDRESS, Dense_rank() OVER ( ORDER BY S_SUPPKEY) FROM supplier;\";\nplan = getCostExplain(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select rank() over (order by S_ADDRESS) as rk from supplier_nullable\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 4:ANALYTIC\\n\" +\n\" | functions: [, rank(), ]\\n\" +\n\" | order by: 3 ASC\\n\" +\n\" | window: RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW\\n\" +\n\" | \\n\" +\n\" 3:Decode\\n\" +\n\" | : \");\nsql = \"select S_ADDRESS, S_COMMENT from (select S_ADDRESS, \" +\n\"S_COMMENT from supplier_nullable order by S_COMMENT limit 10) tb where S_ADDRESS = 'SS' order by S_ADDRESS \";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 5:SORT\\n\" +\n\" | order by: 3 ASC\\n\" +\n\" | offset: 0\\n\" +\n\" | \\n\" +\n\" 4:SELECT\\n\" +\n\" | predicates: 3 = 'SS'\\n\" +\n\" | \\n\" +\n\" 3:Decode\\n\" +\n\" | : \\n\" +\n\" | : \");\n}\n@Test\npublic void testHavingAggFunctionOnConstant() throws Exception {\nString sql = \"select S_ADDRESS from supplier GROUP BY S_ADDRESS HAVING (cast(count(null) as string)) IN (\\\"\\\")\";\nString plan = getCostExplain(sql);\nassertContains(plan, \"1:AGGREGATE (update finalize)\\n\" +\n\" | aggregate: count[(NULL); args: BOOLEAN; result: BIGINT; args nullable: true; result nullable: false]\\n\" +\n\" | group by: [10: S_ADDRESS, INT, false]\");\nassertContains(plan, \" 3:Decode\\n\" +\n\" | : \\n\" +\n\" | cardinality: 1\");\n}\n@Test\npublic void testDecodeWithLimit() throws Exception {\nString sql = \"select count(*), S_ADDRESS from supplier group by S_ADDRESS limit 10\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \" 3:Decode\\n\" +\n\" | : \\n\");\n}\n@Test\npublic void testNoDecode() throws Exception {\nString sql = \"select *, to_bitmap(S_SUPPKEY) from supplier limit 1\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"select hex(10), s_address from supplier\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql = \"SELECT SUM(count) FROM (SELECT CAST((CAST((((\\\"C\\\")||(CAST(s_address AS STRING ) ))) \" +\n\"BETWEEN (((\\\"T\\\")||(\\\"\\\"))) AND (\\\"\\\") AS BOOLEAN) = true) \" +\n\"AND (CAST((((\\\"C\\\")||(CAST(s_address AS STRING ) ))) BETWEEN (((\\\"T\\\")||(\\\"\\\"))) \" +\n\"AND (\\\"\\\") AS BOOLEAN) IS NOT NULL) AS INT) as count FROM supplier ) t;\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\nsql =\n\"SELECT SUM(count) FROM (SELECT CAST((CAST((s_address) BETWEEN (((CAST(s_address AS STRING ) )||(\\\"\\\"))) \" +\n\"AND (s_address) AS BOOLEAN) = true) AND (CAST((s_address) \" +\n\"BETWEEN (((CAST(s_address AS STRING ) )||(\\\"\\\"))) AND (s_address) AS BOOLEAN) IS NOT NULL) AS INT) \" +\n\"as count FROM supplier ) t;\";\nplan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testDecodeOnExchange() throws Exception {\nString sql = \" SELECT \\n\" +\n\" DISTINCT * \\n\" +\n\"FROM \\n\" +\n\" (\\n\" +\n\" SELECT \\n\" +\n\" DISTINCT t1.v4 \\n\" +\n\" FROM \\n\" +\n\" t1, \\n\" +\n\" test_all_type as t2, \\n\" +\n\" test_all_type as t0 \\n\" +\n\" WHERE \\n\" +\n\" NOT (\\n\" +\n\" (t2.t1a) != (\\n\" +\n\" concat(t0.t1a, \\\"ji\\\")\\n\" +\n\" )\\n\" +\n\" ) \\n\" +\n\" ) t;\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testProjectWithUnionEmptySet() throws Exception {\nString sql;\nString plan;\nsql = \"select t1a from test_all_type group by t1a union all select v4 from t1 where false\";\nplan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 3:Decode\\n\" +\n\" | : \"));\nAssert.assertTrue(plan.contains(\" 2:Project\\n\" +\n\" | : 16: t1a\"));\nsql = \"SELECT 'all', 'allx' where 1 = 2 union all select distinct S_ADDRESS, S_ADDRESS from supplier;\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 3:Project\\n\" +\n\" | : 8\\n\" +\n\" | : clone(8)\\n\" +\n\" | \\n\" +\n\" 2:Decode\\n\" +\n\" | : \\n\" +\n\" | \\n\" +\n\" 1:AGGREGATE (update finalize)\\n\" +\n\" | group by: 16: S_ADDRESS\");\nsql = \"SELECT 'all', 'all', 'all', 'all' where 1 = 2 union all \" +\n\"select distinct S_ADDRESS, S_SUPPKEY + 1, S_SUPPKEY + 1, S_ADDRESS + 1 from supplier;\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 4:Project\\n\" +\n\" | : 9\\n\" +\n\" | : 25: cast\\n\" +\n\" | : CAST(15: expr AS VARCHAR)\\n\" +\n\" | : CAST(CAST(9 AS DOUBLE) + 1.0 AS VARCHAR)\\n\" +\n\" | common expressions:\\n\" +\n\" | : CAST(15: expr AS VARCHAR)\\n\" +\n\" | \\n\" +\n\" 3:Decode\\n\" +\n\" | : \\n\" +\n\" | \\n\" +\n\" 2:AGGREGATE (update finalize)\\n\" +\n\" | group by: 24: S_ADDRESS, 15: expr\\n\" +\n\" | \\n\" +\n\" 1:Project\\n\" +\n\" | : CAST(7: S_SUPPKEY AS BIGINT) + 1\\n\" +\n\" | : 24: S_ADDRESS\");\n}\n@Test\npublic void testCTEWithDecode() throws Exception {\nconnectContext.getSessionVariable().setCboCteReuse(true);\nconnectContext.getSessionVariable().setEnablePipelineEngine(true);\nconnectContext.getSessionVariable().setCboCTERuseRatio(0);\nString sql = \"with v1 as( select S_ADDRESS a, count(*) b from supplier group by S_ADDRESS) \" +\n\"select x1.a, x1.b from v1 x1 join v1 x2 on x1.a=x2.a\";\nString plan = getThriftPlan(sql);\nAssert.assertTrue(\nplan.contains(\"query_global_dicts:[TGlobalDict(columnId:28, strings:[6D 6F 63 6B], ids:[1])\"));\nconnectContext.getSessionVariable().setCboCteReuse(false);\nconnectContext.getSessionVariable().setEnablePipelineEngine(false);\n}\n@Test\npublic void testMetaScan() throws Exception {\nString sql = \"select max(v1), min(v1) from t0 [_META_]\";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 0:MetaScan\\n\" +\n\" Table: t0\\n\" +\n\" : max_v1\\n\" +\n\" : min_v1\"));\nString thrift = getThriftPlan(sql);\nAssert.assertTrue(thrift.contains(\"id_to_names:{6=max_v1, 7=min_v1}\"));\n}\n@Test\npublic void testMetaScan2() throws Exception {\nString sql = \"select max(t1c), min(t1d), dict_merge(t1a) from test_all_type [_META_]\";\nString plan = getFragmentPlan(sql);\nAssert.assertTrue(plan.contains(\" 0:MetaScan\\n\" +\n\" Table: test_all_type\\n\" +\n\" : dict_merge_t1a\\n\" +\n\" : max_t1c\\n\" +\n\" : min_t1d\"));\nString thrift = getThriftPlan(sql);\nAssert.assertTrue(thrift.contains(\"TFunctionName(function_name:dict_merge), \" +\n\"binary_type:BUILTIN, arg_types:[TTypeDesc(types:[TTypeNode(type:ARRAY), \" +\n\"TTypeNode(type:SCALAR, scalar_type:TScalarType(type:VARCHAR, len:-1))])]\"));\n}\n@Test\npublic void testMetaScan3() throws Exception {\nString sql = \"select max(t1c), min(t1d), dict_merge(t1a) from test_all_type [_META_]\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"1:AGGREGATE (update serialize)\\n\" +\n\" | output: max(max_t1c), min(min_t1d), dict_merge(dict_merge_t1a)\\n\" +\n\" | group by: \\n\" +\n\" | \\n\" +\n\" 0:MetaScan\\n\" +\n\" Table: test_all_type\\n\" +\n\" : dict_merge_t1a\\n\" +\n\" : max_t1c\\n\" +\n\" : min_t1d\");\n}\n@Test\npublic void testMetaScan4() throws Exception {\nString sql = \"select sum(t1c), min(t1d), t1a from test_all_type [_META_]\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"2:AGGREGATE (update serialize)\\n\" +\n\" | output: sum(3: t1c), min(4: t1d), any_value(1: t1a)\\n\" +\n\" | group by: \\n\" +\n\" | \\n\" +\n\" 1:Project\\n\" +\n\" | : t1a\\n\" +\n\" | : t1c\\n\" +\n\" | : t1d\\n\" +\n\" | \\n\" +\n\" 0:MetaScan\\n\" +\n\" Table: test_all_type\");\nsql = \"select sum(t1c) from test_all_type [_META_] group by t1a\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \"2:AGGREGATE (update serialize)\\n\" +\n\" | STREAMING\\n\" +\n\" | output: sum(3: t1c)\\n\" +\n\" | group by: 1: t1a\\n\" +\n\" | \\n\" +\n\" 1:Project\\n\" +\n\" | : t1a\\n\" +\n\" | : t1c\\n\" +\n\" | \\n\" +\n\" 0:MetaScan\\n\" +\n\" Table: test_all_type\");\n}\n@Test\npublic void testHasGlobalDictButNotFound() throws Exception {\nIDictManager dictManager = IDictManager.getInstance();\nnew Expectations(dictManager) {\n{\ndictManager.hasGlobalDict(anyLong, \"S_ADDRESS\", anyLong);\nresult = true;\ndictManager.getGlobalDict(anyLong, \"S_ADDRESS\");\nresult = Optional.empty();\n}\n};\nString sql = \"select S_ADDRESS from supplier group by S_ADDRESS\";\nString plan = getFragmentPlan(sql);\nAssert.assertFalse(plan.contains(\"Decode\"));\n}\n@Test\npublic void testExtractProject() throws Exception {\nString sql;\nString plan;\nsql = \"select max(upper(S_ADDRESS)), min(upper(S_ADDRESS)), max(S_ADDRESS), sum(S_SUPPKEY + 1) from supplier\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \" 2:AGGREGATE (update finalize)\\n\" +\n\" | output: max(18: upper), min(18: upper), max(17: S_ADDRESS), sum(1: S_SUPPKEY), count(*)\\n\" +\n\" | group by: \\n\" +\n\" | \\n\" +\n\" 1:Project\\n\" +\n\" | : 1: S_SUPPKEY\\n\" +\n\" | : 17: S_ADDRESS\\n\" +\n\" | : DictExpr(17: S_ADDRESS,[upper()])\");\n}\n@Test\npublic void testCompoundPredicate() throws Exception {\nString sql = \"select count(*) from supplier group by S_ADDRESS having \" +\n\"if(S_ADDRESS > 'a' and S_ADDRESS < 'b', true, false)\";\nString plan = getVerboseExplain(sql);\nassertContains(plan,\n\"DictExpr(10: S_ADDRESS,[if(( > 'a') \" +\n\"AND ( < 'b'), TRUE, FALSE)])\");\nsql = \"select count(*) from supplier group by S_ADDRESS having \" +\n\"if(not S_ADDRESS like '%a%' and S_ADDRESS < 'b', true, false)\";\nplan = getVerboseExplain(sql);\nassertContains(plan,\n\"DictExpr(10: S_ADDRESS,[if((NOT ( LIKE '%a%')) \" +\n\"AND ( < 'b'), TRUE, FALSE)])\");\n}\n@Test\npublic void testComplexScalarOperator_1() throws Exception {\nString sql = \"select case when s_address = 'test' then 'a' \" +\n\"when s_phone = 'b' then 'b' \" +\n\"when coalesce(s_address, 'c') = 'c' then 'c' \" +\n\"else 'a' end from supplier; \";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"1:Project\\n\" +\n\" | : CASE WHEN DictExpr(10: S_ADDRESS,[ = 'test']) THEN 'a' \" +\n\"WHEN 5: S_PHONE = 'b' THEN 'b' \" +\n\"WHEN coalesce(DictExpr(10: S_ADDRESS,[]), 'c') = 'c' THEN 'c' \" +\n\"ELSE 'a' END\\n\" +\n\" |\");\nsql = \"select case when s_address = 'test' then 'a' \" +\n\"when s_phone = 'b' then 'b' \" +\n\"when upper(s_address) = 'c' then 'c' \" +\n\"else 'a' end from supplier; \";\nplan = getFragmentPlan(sql);\nassertContains(plan, \"1:Project\\n\" +\n\" | : CASE WHEN DictExpr(10: S_ADDRESS,[ = 'test']) THEN 'a' \" +\n\"WHEN 5: S_PHONE = 'b' THEN 'b' \" +\n\"WHEN DictExpr(10: S_ADDRESS,[upper()]) = 'c' THEN 'c' \" +\n\"ELSE 'a' END\\n\" +\n\" |\");\n}\n@Test\npublic void testComplexScalarOperator_2() throws Exception {\nString sql = \"select count(*) from supplier where s_phone = 'a' or coalesce(s_address, 'c') = 'c' \" +\n\"or s_address = 'address'\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"0:OlapScanNode\\n\" +\n\" TABLE: supplier\\n\" +\n\" PREAGGREGATION: ON\\n\" +\n\" PREDICATES: ((5: S_PHONE = 'a') OR (coalesce(DictExpr(12: S_ADDRESS,[]), 'c') = 'c')) \" +\n\"OR (DictExpr(12: S_ADDRESS,[ = 'address']))\");\nsql = \"select count(*) from supplier where s_phone = 'a' or upper(s_address) = 'c' \" +\n\"or s_address = 'address'\";\nplan = getFragmentPlan(sql);\nassertContains(plan, \"0:OlapScanNode\\n\" +\n\" TABLE: supplier\\n\" +\n\" PREAGGREGATION: ON\\n\" +\n\" PREDICATES: ((5: S_PHONE = 'a') OR (DictExpr(12: S_ADDRESS,[upper()]) = 'c')) \" +\n\"OR (DictExpr(12: S_ADDRESS,[ = 'address']))\");\n}\n@Test\npublic void testAggWithProjection() throws Exception {\nString sql = \"select cast(max(s_address) as date) from supplier\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"2:Project\\n\" +\n\" | : DictExpr(12: S_ADDRESS,[CAST( AS DATE)])\\n\" +\n\" | \\n\" +\n\" 1:AGGREGATE (update finalize)\\n\" +\n\" | output: max(11: S_ADDRESS)\\n\" +\n\" | group by: \");\n}\n@Test\npublic void testJoinWithProjection() throws Exception {\nString sql = \"select s_address, cast(t1.s_address as date), cast(t1.s_phone as date), upper(t1.s_address),\" +\n\" cast(t2.a as date), 123 from supplier t1 join (select max(s_address) a from supplier) t2 \";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"5:Project\\n\" +\n\" | : DictExpr(23: S_ADDRESS,[CAST( AS DATE)])\\n\" +\n\" | : CAST(5: S_PHONE AS DATE)\\n\" +\n\" | : DictExpr(25: S_ADDRESS,[CAST( AS DATE)])\\n\" +\n\" | : 123\\n\" +\n\" | : 23: S_ADDRESS\\n\" +\n\" | : DictExpr(23: S_ADDRESS,[upper()])\\n\" +\n\" | \\n\" +\n\" 4:NESTLOOP JOIN\\n\" +\n\" | join op: CROSS JOIN\\n\" +\n\" | colocate: false, reason: \\n\" +\n\" | \\n\" +\n\" |----3:EXCHANGE\");\n}\n@Test\npublic void testTopNWithProjection() throws Exception {\nString sql =\n\"select t2.s_address, cast(t1.a as date), concat(t1.b, '') from (select max(s_address) a, min(s_phone) b \" +\n\"from supplier group by s_address) t1 join (select s_address from supplier) t2 order by t1.a\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"10:Decode\\n\" +\n\" | : \\n\" +\n\" | \\n\" +\n\" 9:Project\\n\" +\n\" | : 19: cast\\n\" +\n\" | : 20: concat\\n\" +\n\" | : 23: S_ADDRESS\\n\" +\n\" | \\n\" +\n\" 8:MERGING-EXCHANGE\");\n}\n@Test\npublic void testLogicalProperty() throws Exception {\nString sql = \"select cast(max(s_address) as date) from supplier where s_suppkey = 1 group by S_PHONE\";\nExecPlan execPlan = getExecPlan(sql);\nOlapScanNode olapScanNode = (OlapScanNode) execPlan.getScanNodes().get(0);\nAssert.assertEquals(0, olapScanNode.getBucketExprs().size());\nString plan = execPlan.getExplainString(TExplainLevel.NORMAL);\nassertContains(plan, \"3:Project\\n\" +\n\" | : DictExpr(12: S_ADDRESS,[CAST( AS DATE)])\\n\" +\n\" | \\n\" +\n\" 2:AGGREGATE (update finalize)\\n\" +\n\" | output: max(11: S_ADDRESS)\\n\" +\n\" | group by: 5: S_PHONE\");\n}\n@Test\npublic void testLowCardForLimit() throws Exception {\nString sql = \"SELECT * from (SELECT t_a_0.`S_ADDRESS` AS f_ax_0, t_a_0.`S_ADDRESS` AS f_ax_1 FROM \" +\n\"(select * from (select * from supplier limit 20000) b) t_a_0) t_a_1 ORDER BY t_a_1.f_ax_0 desc LIMIT 0,20;\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"3:Decode\\n\" +\n\" | : \\n\" +\n\" | \\n\" +\n\" 2:TOP-N\\n\" +\n\" | order by: 9: S_ADDRESS DESC\\n\" +\n\" | offset: 0\\n\" +\n\" | limit: 20\");\n}\n@Test\npublic void testNeedDecode_1() throws Exception {\nString sql = \"with cte_1 as (\\n\" +\n\" select\\n\" +\n\" t0.P_NAME as a,\\n\" +\n\" t0.P_BRAND as b,\\n\" +\n\" t1.s_name as c,\\n\" +\n\" t1.s_address as d,\\n\" +\n\" t1.s_address as e,\\n\" +\n\" t1.s_nationkey as f\\n\" +\n\" from\\n\" +\n\" part_v2 t0\\n\" +\n\" left join supplier_nullable t1 on t0.P_SIZE > t1.s_suppkey\\n\" +\n\")\\n\" +\n\"select\\n\" +\n\" cte_1.b,\\n\" +\n\" if(\\n\" +\n\" cte_1.d in ('hz', 'bj'),\\n\" +\n\" cte_1.b,\\n\" +\n\" if (cte_1.e in ('hz'), 1035, cte_1.f)\\n\" +\n\" ),\\n\" +\n\" count(distinct if(cte_1.c = '', cte_1.e, null))\\n\" +\n\"from\\n\" +\n\" cte_1\\n\" +\n\"group by\\n\" +\n\" cte_1.b,\\n\" +\n\" if(\\n\" +\n\" cte_1.d in ('hz', 'bj'),\\n\" +\n\" cte_1.b,\\n\" +\n\" if (cte_1.e in ('hz'), 1035, cte_1.f)\\n\" +\n\" );\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"6:Project\\n\" +\n\" | : 22\\n\" +\n\" | : if(31: S_ADDRESS IN ('hz', 'bj'), 22, CAST(if(31: S_ADDRESS = 'hz', \" +\n\"1035, 32: S_NATIONKEY) AS VARCHAR))\\n\" +\n\" | : if(30: S_NAME = '', 31: S_ADDRESS, NULL)\\n\" +\n\" | \\n\" +\n\" 5:Decode\\n\" +\n\" | : \");\n}\n@Test\npublic void testNeedDecode_2() throws Exception {\nString sql = \"with cte_1 as (\\n\" +\n\" select\\n\" +\n\" t0.P_NAME as a,\\n\" +\n\" t0.P_BRAND as b,\\n\" +\n\" t1.s_name as c,\\n\" +\n\" t1.s_address as d,\\n\" +\n\" t1.s_address as e,\\n\" +\n\" t1.s_nationkey as f\\n\" +\n\" from\\n\" +\n\" part_v2 t0\\n\" +\n\" left join supplier_nullable t1 on t0.P_SIZE > t1.s_suppkey\\n\" +\n\")\\n\" +\n\"select\\n\" +\n\" if(\\n\" +\n\" cte_1.d in ('hz', 'bj'),\\n\" +\n\" cte_1.b,\\n\" +\n\" if (cte_1.e in ('hz'), 1035, cte_1.f)\\n\" +\n\" ),\\n\" +\n\" count(distinct if(cte_1.c = '', cte_1.e, null))\\n\" +\n\"from\\n\" +\n\" cte_1\\n\" +\n\"group by\\n\" +\n\" if(\\n\" +\n\" cte_1.d in ('hz', 'bj'),\\n\" +\n\" cte_1.b,\\n\" +\n\" if (cte_1.e in ('hz'), 1035, cte_1.f)\\n\" +\n\" );\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"6:Project\\n\" +\n\" | : if(31: S_ADDRESS IN ('hz', 'bj'), 22, CAST(if(31: S_ADDRESS = 'hz', 1035, \" +\n\"32: S_NATIONKEY) AS VARCHAR))\\n\" +\n\" | : if(30: S_NAME = '', 31: S_ADDRESS, NULL)\\n\" +\n\" | \\n\" +\n\" 5:Decode\\n\" +\n\" | : \");\n}\n@Test\npublic void testProjectionRewrite() throws Exception {\nString sql = \"SELECT '2023-03-26' D_DATE, c_user, concat(C_NODEVALUE, '-', C_BROKERNAME) AS C_NAME, c_dept, \" +\n\"c_par, '\u4eba', '1', '\u89c4', '1', 'KPI1' C_KPICODE, round(sum(if(c_par='01', F_ASSET_zb, F_ASSET))/100000000, 5) \" +\n\"F_CURRENTDATA FROM low_card_t1 WHERE c_par IN ( '02', '01' ) AND D_DATE='2023-03-26' \" +\n\"GROUP BY C_BROKERNAME, c_dept, c_par, c_user, C_NODEVALUE \" +\n\"union all \" +\n\"SELECT '2023-03-26' D_DATE, c_mr AS C_CODE, CASE WHEN c_mr = '01' THEN '\u90e8' ELSE '\u6237\u90e8' END C_NAME, \" +\n\"c_mr c_dept, c_mr c_par, '\u95e8' AS C_ROLE, '3' AS F_ROLERANK, '\u5165' AS C_KPITYPE, '2' AS F_KPIRANK, \" +\n\"'KPI2' C_KPICODE, ifnull(ROUND(SUM(fee_zb)/100000000, 5), 0) AS F_CURRENTDATA FROM low_card_t2 \" +\n\"WHERE c_mr IN ('02', '03') AND D_DATE>concat(year(str_to_date('2023-03-26', '%Y-%m-%d'))-1, '1231') \" +\n\"AND d_date<='2023-03-26' GROUP BY c_mr;\";\nString plan = getFragmentPlan(sql);\nassertContains(plan, \"10:Decode\\n\" +\n\" | : \\n\" +\n\" | \\n\" +\n\" 9:AGGREGATE (update finalize)\\n\" +\n\" | output: sum(24: fee_zb)\\n\" +\n\" | group by: 55: c_mr\");\n}\n}" + }, + { + "comment": "For now, mentioned that this method will disregard non-record type members as a comment.", + "method_body": "public static String getModifiedSignature(DocumentServiceContext context, String signature) {\nMatcher matcher = TYPE_NAME_DECOMPOSE_PATTERN.matcher(signature);\nwhile (matcher.find()) {\nString orgName = matcher.group(1);\nString moduleName = matcher.group(2);\nString matchedString = matcher.group();\nString modulePrefix = getModulePrefix(context, orgName, moduleName);\nString replaceText = modulePrefix.isEmpty() ? matchedString + Names.VERSION_SEPARATOR : matchedString;\nsignature = signature.replace(replaceText, modulePrefix);\n}\nreturn signature;\n}", + "target_code": "}", + "method_body_after": "public static String getModifiedSignature(DocumentServiceContext context, String signature) {\nMatcher matcher = TYPE_NAME_DECOMPOSE_PATTERN.matcher(signature);\nwhile (matcher.find()) {\nString orgName = matcher.group(1);\nString moduleName = matcher.group(2);\nString matchedString = matcher.group();\nString modulePrefix = getModulePrefix(context, orgName, moduleName);\nString replaceText = modulePrefix.isEmpty() ? matchedString + Names.VERSION_SEPARATOR : matchedString;\nsignature = signature.replace(replaceText, modulePrefix);\n}\nreturn signature;\n}", + "context_before": "class CommonUtil {\npublic static final String MD_LINE_SEPARATOR = \" \" + System.lineSeparator();\npublic static final String LINE_SEPARATOR = System.lineSeparator();\npublic static final String FILE_SEPARATOR = File.separator;\npublic static final Pattern MD_NEW_LINE_PATTERN = Pattern.compile(\"\\\\s\\\\s\\\\r\\\\n?|\\\\s\\\\s\\\\n|\\\\r\\\\n?|\\\\n\");\npublic static final String BALLERINA_HOME;\npublic static final boolean COMPILE_OFFLINE;\npublic static final String BALLERINA_CMD;\npublic static final String URI_SCHEME_BALA = \"bala\";\npublic static final String URI_SCHEME_EXPR = \"expr\";\npublic static final String URI_SCHEME_FILE = \"file\";\npublic static final String LANGUAGE_ID_BALLERINA = \"ballerina\";\npublic static final String LANGUAGE_ID_TOML = \"toml\";\npublic static final String MARKDOWN_MARKUP_KIND = \"markdown\";\npublic static final String BALLERINA_ORG_NAME = \"ballerina\";\npublic static final String SDK_VERSION = System.getProperty(\"ballerina.version\");\npublic static final String EXPR_SCHEME = \"expr\";\npublic static final List PRE_DECLARED_LANG_LIBS = Arrays.asList(\"lang.boolean\", \"lang.decimal\",\n\"lang.error\", \"lang.float\", \"lang.future\", \"lang.int\", \"lang.map\", \"lang.object\", \"lang.stream\",\n\"lang.string\", \"lang.table\", \"lang.transaction\", \"lang.typedesc\", \"lang.xml\");\npublic static final List BALLERINA_KEYWORDS = SyntaxInfo.keywords();\npublic static final Set QUALIFIER_KINDS = Set.of(SyntaxKind.SERVICE_KEYWORD,\nSyntaxKind.CLIENT_KEYWORD, SyntaxKind.ISOLATED_KEYWORD, SyntaxKind.TRANSACTIONAL_KEYWORD,\nSyntaxKind.PUBLIC_KEYWORD, SyntaxKind.PRIVATE_KEYWORD);\npublic static final String SELF_KW = \"self\";\nprivate static final Pattern TYPE_NAME_DECOMPOSE_PATTERN = Pattern.compile(\"([\\\\w_.]*)/([\\\\w._]*):([\\\\w.-]*)\");\nstatic {\nBALLERINA_HOME = System.getProperty(\"ballerina.home\");\nString onlineCompilation = System.getProperty(\"ls.compilation.online\");\nCOMPILE_OFFLINE = !Boolean.parseBoolean(onlineCompilation);\nBALLERINA_CMD = BALLERINA_HOME + File.separator + \"bin\" + File.separator + \"bal\" +\n(SystemUtils.IS_OS_WINDOWS ? \".bat\" : \"\");\n}\nprivate CommonUtil() {\n}\n/**\n* Convert the syntax-node line range into a lsp4j range.\n*\n* @param lineRange - line range\n* @return {@link Range} converted range\n*/\npublic static Range toRange(LineRange lineRange) {\nreturn new Range(toPosition(lineRange.startLine()), toPosition(lineRange.endLine()));\n}\n/**\n* Converts syntax-node line position into a lsp4j position.\n*\n* @param linePosition - line position\n* @return {@link Position} converted position\n*/\npublic static Position toPosition(LinePosition linePosition) {\nreturn new Position(linePosition.line(), linePosition.offset());\n}\n/**\n* Get the text edit for an auto import statement.\n* Here we do not check whether the package is not already imported or a predeclared lang-lib, Particular\n* check should be done before usage\n*\n* @param orgName package org name\n* @param pkgName package name\n* @param context Language server context\n* @return {@link List} List of Text Edits to apply\n*/\npublic static List getAutoImportTextEdits(@Nonnull String orgName, String pkgName,\nDocumentServiceContext context) {\nMap currentDocImports = context.currentDocImportsMap();\nOptional last = CommonUtil.getLastItem(new ArrayList<>(currentDocImports.keySet()));\nint endLine = last.map(node -> node.lineRange().endLine().line()).orElse(0);\nPosition start = new Position(endLine, 0);\nString importStatement = ItemResolverConstants.IMPORT + \" \"\n+ (!orgName.isEmpty() ? orgName + SLASH_KEYWORD_KEY : orgName)\n+ pkgName + SEMI_COLON_SYMBOL_KEY\n+ CommonUtil.LINE_SEPARATOR;\nreturn Collections.singletonList(new TextEdit(new Range(start, start), importStatement));\n}\n/**\n* Get the text edit for an auto import statement.\n* Here we do not check whether the package is not already imported or a predeclared lang-lib, Particular\n* check should be done before usage\n*\n* @param orgName package org name\n* @param pkgName package name\n* @param alias import alias\n* @param context Language server context\n* @return {@link List} List of Text Edits to apply\n*/\npublic static List getAutoImportTextEdits(@Nonnull String orgName, String pkgName, String alias,\nDocumentServiceContext context) {\nMap currentDocImports = context.currentDocImportsMap();\nOptional last = CommonUtil.getLastItem(new ArrayList<>(currentDocImports.keySet()));\nint endLine = last.map(node -> node.lineRange().endLine().line()).orElse(0);\nPosition start = new Position(endLine, 0);\nStringBuilder builder = new StringBuilder(ItemResolverConstants.IMPORT + \" \"\n+ (!orgName.isEmpty() ? orgName + SLASH_KEYWORD_KEY : orgName)\n+ pkgName);\nif (!alias.isEmpty()) {\nbuilder.append(\" as \").append(alias);\n}\nbuilder.append(SEMI_COLON_SYMBOL_KEY).append(CommonUtil.LINE_SEPARATOR);\nreturn Collections.singletonList(new TextEdit(new Range(start, start), builder.toString()));\n}\n/**\n* Get the default value for the given BType.\n*\n* @param bType Type descriptor to get the default value\n* @param offset snippet offset.\n* @return {@link String} Default value as a String\n*/\npublic static Optional getDefaultValueForType(TypeSymbol bType, int offset) {\nreturn getDefaultValueForType(bType, true, offset);\n}\npublic static Optional getDefaultPlaceholderForType(TypeSymbol bType) {\nreturn getDefaultValueForType(bType)\n.map(defaultValue -> defaultValue.replace(\"}\", \"\\\\}", + "context_after": "class CommonUtil {\npublic static final String MD_LINE_SEPARATOR = \" \" + System.lineSeparator();\npublic static final String LINE_SEPARATOR = System.lineSeparator();\npublic static final String FILE_SEPARATOR = File.separator;\npublic static final Pattern MD_NEW_LINE_PATTERN = Pattern.compile(\"\\\\s\\\\s\\\\r\\\\n?|\\\\s\\\\s\\\\n|\\\\r\\\\n?|\\\\n\");\npublic static final String BALLERINA_HOME;\npublic static final boolean COMPILE_OFFLINE;\npublic static final String BALLERINA_CMD;\npublic static final String URI_SCHEME_BALA = \"bala\";\npublic static final String URI_SCHEME_EXPR = \"expr\";\npublic static final String URI_SCHEME_FILE = \"file\";\npublic static final String LANGUAGE_ID_BALLERINA = \"ballerina\";\npublic static final String LANGUAGE_ID_TOML = \"toml\";\npublic static final String MARKDOWN_MARKUP_KIND = \"markdown\";\npublic static final String BALLERINA_ORG_NAME = \"ballerina\";\npublic static final String SDK_VERSION = System.getProperty(\"ballerina.version\");\npublic static final String EXPR_SCHEME = \"expr\";\npublic static final List PRE_DECLARED_LANG_LIBS = Arrays.asList(\"lang.boolean\", \"lang.decimal\",\n\"lang.error\", \"lang.float\", \"lang.future\", \"lang.int\", \"lang.map\", \"lang.object\", \"lang.stream\",\n\"lang.string\", \"lang.table\", \"lang.transaction\", \"lang.typedesc\", \"lang.xml\");\npublic static final List BALLERINA_KEYWORDS = SyntaxInfo.keywords();\npublic static final Set QUALIFIER_KINDS = Set.of(SyntaxKind.SERVICE_KEYWORD,\nSyntaxKind.CLIENT_KEYWORD, SyntaxKind.ISOLATED_KEYWORD, SyntaxKind.TRANSACTIONAL_KEYWORD,\nSyntaxKind.PUBLIC_KEYWORD, SyntaxKind.PRIVATE_KEYWORD);\npublic static final String SELF_KW = \"self\";\nprivate static final Pattern TYPE_NAME_DECOMPOSE_PATTERN = Pattern.compile(\"([\\\\w_.]*)/([\\\\w._]*):([\\\\w.-]*)\");\nstatic {\nBALLERINA_HOME = System.getProperty(\"ballerina.home\");\nString onlineCompilation = System.getProperty(\"ls.compilation.online\");\nCOMPILE_OFFLINE = !Boolean.parseBoolean(onlineCompilation);\nBALLERINA_CMD = BALLERINA_HOME + File.separator + \"bin\" + File.separator + \"bal\" +\n(SystemUtils.IS_OS_WINDOWS ? \".bat\" : \"\");\n}\nprivate CommonUtil() {\n}\n/**\n* Convert the syntax-node line range into a lsp4j range.\n*\n* @param lineRange - line range\n* @return {@link Range} converted range\n*/\npublic static Range toRange(LineRange lineRange) {\nreturn new Range(toPosition(lineRange.startLine()), toPosition(lineRange.endLine()));\n}\n/**\n* Convert the syntax-node line position into a lsp4j range.\n*\n* @param linePosition - line position.\n* @return {@link Range} converted range\n*/\npublic static Range toRange(LinePosition linePosition) {\nreturn new Range(toPosition(linePosition), toPosition(linePosition));\n}\n/**\n* Converts syntax-node line position into a lsp4j position.\n*\n* @param linePosition - line position\n* @return {@link Position} converted position\n*/\npublic static Position toPosition(LinePosition linePosition) {\nreturn new Position(linePosition.line(), linePosition.offset());\n}\n/**\n* Get the text edit for an auto import statement.\n* Here we do not check whether the package is not already imported or a predeclared lang-lib, Particular\n* check should be done before usage\n*\n* @param orgName package org name\n* @param pkgName package name\n* @param context Language server context\n* @return {@link List} List of Text Edits to apply\n*/\npublic static List getAutoImportTextEdits(@Nonnull String orgName, String pkgName,\nDocumentServiceContext context) {\nMap currentDocImports = context.currentDocImportsMap();\nOptional last = CommonUtil.getLastItem(new ArrayList<>(currentDocImports.keySet()));\nint endLine = last.map(node -> node.lineRange().endLine().line()).orElse(0);\nPosition start = new Position(endLine, 0);\nString importStatement = ItemResolverConstants.IMPORT + \" \"\n+ (!orgName.isEmpty() ? orgName + SLASH_KEYWORD_KEY : orgName)\n+ pkgName + SEMI_COLON_SYMBOL_KEY\n+ CommonUtil.LINE_SEPARATOR;\nreturn Collections.singletonList(new TextEdit(new Range(start, start), importStatement));\n}\n/**\n* Get the text edit for an auto import statement.\n* Here we do not check whether the package is not already imported or a predeclared lang-lib, Particular\n* check should be done before usage\n*\n* @param orgName package org name\n* @param pkgName package name\n* @param alias import alias\n* @param context Language server context\n* @return {@link List} List of Text Edits to apply\n*/\npublic static List getAutoImportTextEdits(@Nonnull String orgName, String pkgName, String alias,\nDocumentServiceContext context) {\nMap currentDocImports = context.currentDocImportsMap();\nOptional last = CommonUtil.getLastItem(new ArrayList<>(currentDocImports.keySet()));\nint endLine = last.map(node -> node.lineRange().endLine().line()).orElse(0);\nPosition start = new Position(endLine, 0);\nStringBuilder builder = new StringBuilder(ItemResolverConstants.IMPORT + \" \"\n+ (!orgName.isEmpty() ? orgName + SLASH_KEYWORD_KEY : orgName)\n+ pkgName);\nif (!alias.isEmpty()) {\nbuilder.append(\" as \").append(alias);\n}\nbuilder.append(SEMI_COLON_SYMBOL_KEY).append(CommonUtil.LINE_SEPARATOR);\nreturn Collections.singletonList(new TextEdit(new Range(start, start), builder.toString()));\n}\n/**\n* Get the default value for the given BType.\n*\n* @param bType Type descriptor to get the default value\n* @param offset snippet offset.\n* @return {@link String} Default value as a String\n*/\npublic static Optional getDefaultValueForType(TypeSymbol bType, int offset) {\nreturn getDefaultValueForType(bType, true, offset);\n}\npublic static Optional getDefaultPlaceholderForType(TypeSymbol bType) {\nreturn getDefaultValueForType(bType)\n.map(defaultValue -> defaultValue.replace(\"}\", \"\\\\}" + }, + { + "comment": "Does this get the value for an environment variable by the name `AZURE_EVENT_HUBS_CONNECTION_STRING`? I don't believe any of the other languages support reading from env variables at the moment. Was this feature ported over from Track1?", + "method_body": "public EventHubClient build() {\nconfiguration = configuration == null ? ConfigurationManager.getConfiguration().clone() : configuration;\nif (credentials == null) {\nfinal String connectionString = configuration.get(AZURE_EVENT_HUBS_CONNECTION_STRING);\nif (ImplUtils.isNullOrEmpty(connectionString)) {\nthrow new IllegalArgumentException(\"Credentials have not been set using 'EventHubClientBuilder.credentials(String)'\"\n+ \"EventHubClientBuilder.credentials(String, String, TokenCredential). And the connection string is\"\n+ \"not set in the '\" + AZURE_EVENT_HUBS_CONNECTION_STRING + \"' environment variable.\");\n}\ncredential(connectionString);\n}\nif (timeout == null) {\ntimeout = Duration.ofSeconds(60);\n}\nfinal ReactorProvider provider = new ReactorProvider();\nfinal ReactorHandlerProvider handlerProvider = new ReactorHandlerProvider(provider);\nif (retry == null) {\nretry = Retry.getDefaultRetry();\n}\nproxyConfiguration = constructDefaultProxyConfiguration(configuration);\nif (scheduler == null) {\nscheduler = Schedulers.elastic();\n}\nfinal CBSAuthorizationType authorizationType = credentials instanceof EventHubSharedAccessKeyCredential\n? CBSAuthorizationType.SHARED_ACCESS_SIGNATURE\n: CBSAuthorizationType.JSON_WEB_TOKEN;\nfinal ConnectionOptions parameters = new ConnectionOptions(host, eventHubPath, credentials,\nauthorizationType, timeout, transport, retry, proxyConfiguration, scheduler);\nreturn new EventHubClient(parameters, provider, handlerProvider);\n}", + "target_code": "final String connectionString = configuration.get(AZURE_EVENT_HUBS_CONNECTION_STRING);", + "method_body_after": "public EventHubClient build() {\nconfiguration = configuration == null ? ConfigurationManager.getConfiguration().clone() : configuration;\nif (credentials == null) {\nfinal String connectionString = configuration.get(AZURE_EVENT_HUBS_CONNECTION_STRING);\nif (ImplUtils.isNullOrEmpty(connectionString)) {\nthrow new IllegalArgumentException(\"Credentials have not been set using 'EventHubClientBuilder.credentials(String)'\"\n+ \"EventHubClientBuilder.credentials(String, String, TokenCredential). And the connection string is\"\n+ \"not set in the '\" + AZURE_EVENT_HUBS_CONNECTION_STRING + \"' environment variable.\");\n}\ncredential(connectionString);\n}\nif (timeout == null) {\ntimeout = Duration.ofSeconds(60);\n}\nfinal ReactorProvider provider = new ReactorProvider();\nfinal ReactorHandlerProvider handlerProvider = new ReactorHandlerProvider(provider);\nif (retry == null) {\nretry = Retry.getDefaultRetry();\n}\nproxyConfiguration = constructDefaultProxyConfiguration(configuration);\nif (scheduler == null) {\nscheduler = Schedulers.elastic();\n}\nfinal CBSAuthorizationType authorizationType = credentials instanceof EventHubSharedAccessKeyCredential\n? CBSAuthorizationType.SHARED_ACCESS_SIGNATURE\n: CBSAuthorizationType.JSON_WEB_TOKEN;\nfinal ConnectionOptions parameters = new ConnectionOptions(host, eventHubPath, credentials,\nauthorizationType, timeout, transport, retry, proxyConfiguration, scheduler);\nreturn new EventHubClient(parameters, provider, handlerProvider);\n}", + "context_before": "class EventHubClientBuilder {\nprivate static final String AZURE_EVENT_HUBS_CONNECTION_STRING = \"AZURE_EVENT_HUBS_CONNECTION_STRING\";\nprivate TokenCredential credentials;\nprivate Configuration configuration;\nprivate Duration timeout;\nprivate ProxyConfiguration proxyConfiguration;\nprivate Retry retry;\nprivate Scheduler scheduler;\nprivate TransportType transport;\nprivate String host;\nprivate String eventHubPath;\n/**\n* Creates a new instance with the default transport {@link TransportType\n*/\npublic EventHubClientBuilder() {\ntransport = TransportType.AMQP;\n}\n/**\n* Sets the credential information given a connection string to the Event Hub instance.\n*\n* @param connectionString The connection string to the Event Hub this client wishes to connect to. It is expected\n* that the Event Hub path and the shared key properties are contained in this connection string.\n* @return The updated EventHubClientBuilder object.\n* @throws IllegalArgumentException if {@code connectionString} is null or empty. Or, the {@code connectionString}\n* does not contain the \"EntityPath\" key, which is the name of the Event Hub instance.\n* @throws AzureException If the shared access signature token credential could not be created using the connection\n* string.\n*/\npublic EventHubClientBuilder credential(String connectionString) {\nfinal ConnectionStringProperties properties = new ConnectionStringProperties(connectionString);\nfinal TokenCredential tokenCredential;\ntry {\ntokenCredential = new EventHubSharedAccessKeyCredential(properties.sharedAccessKeyName(),\nproperties.sharedAccessKey(), ClientConstants.TOKEN_VALIDITY);\n} catch (InvalidKeyException | NoSuchAlgorithmException e) {\nthrow new AzureException(\"Could not create the EventHubSharedAccessKeyCredential.\", e);\n}\nreturn credential(properties.endpoint().getHost(), properties.eventHubPath(), tokenCredential);\n}\n/**\n* Sets the credential information given a connection string to the Event Hubs namespace and a path to a specific\n* Event Hub instance.\n*\n* @param connectionString The connection string to use for connecting to the Event Hubs namespace; it is expected\n* that the shared key properties are contained in this connection string, but not the Event Hub path.\n* @param eventHubPath The path of the specific Event Hub to connect the client to.\n* @return The updated EventHubClientBuilder object.\n* @throws IllegalArgumentException if {@code connectionString} or {@code eventHubPath} is null or empty. Or, if the\n* {@code connectionString} contains the Event Hub path.\n* @throws AzureException If the shared access signature token credential could not be created using the connection\n* string.\n*/\npublic EventHubClientBuilder credential(String connectionString, String eventHubPath) {\nif (ImplUtils.isNullOrEmpty(eventHubPath)) {\nthrow new IllegalArgumentException(\"'eventHubPath' cannot be null or empty\");\n}\nfinal ConnectionStringProperties properties = new ConnectionStringProperties(connectionString);\nfinal TokenCredential tokenCredential;\ntry {\ntokenCredential = new EventHubSharedAccessKeyCredential(properties.sharedAccessKeyName(),\nproperties.sharedAccessKey(), ClientConstants.TOKEN_VALIDITY);\n} catch (InvalidKeyException | NoSuchAlgorithmException e) {\nthrow new AzureException(\"Could not create the EventHubSharedAccessKeyCredential.\", e);\n}\nif (!ImplUtils.isNullOrEmpty(properties.eventHubPath())) {\nthrow new IllegalArgumentException(String.format(Locale.US,\n\"'connectionString' contains an Event Hub path [%s]. Please use the\"\n+ \" credentials(String connectionString) overload. Or supply a 'connectionString' without\"\n+ \" 'EntityPath' in it.\", properties.eventHubPath()));\n}\nreturn credential(properties.endpoint().getHost(), eventHubPath, tokenCredential);\n}\n/**\n* Sets the credential information for which Event Hub instance to connect to, and how to authorize against it.\n*\n* @param host The fully qualified host name for the Event Hubs namespace. This is likely to be similar to\n* {@literal {your-namespace}.servicebus.windows.net}.\n* @param eventHubPath The path of the specific Event Hub to connect the client to.\n* @param credential The token credential to use for authorization. Access controls may be specified by the Event\n* Hubs namespace or the requested Event Hub, depending on Azure configuration.\n* @return The updated EventHubClientBuilder object.\n* @throws IllegalArgumentException if {@code host} or {@code eventHubPath} is null or empty.\n* @throws NullPointerException if {@code credentials} is null.\n*/\npublic EventHubClientBuilder credential(String host, String eventHubPath, TokenCredential credential) {\nif (ImplUtils.isNullOrEmpty(host)) {\nthrow new IllegalArgumentException(\"'host' cannot be null or empty\");\n}\nif (ImplUtils.isNullOrEmpty(eventHubPath)) {\nthrow new IllegalArgumentException(\"'eventHubPath' cannot be null or empty.\");\n}\nObjects.requireNonNull(credential);\nthis.host = host;\nthis.credentials = credential;\nthis.eventHubPath = eventHubPath;\nreturn this;\n}\n/**\n* Sets the transport type by which all the communication with Azure Event Hubs occurs.\n* Default value is {@link TransportType\n*\n* @param transport The transport type to use.\n* @return The updated EventHubClientBuilder object.\n*/\npublic EventHubClientBuilder transportType(TransportType transport) {\nthis.transport = transport;\nreturn this;\n}\n/**\n* Sets the timeout for each connection, link, and session.\n*\n* @param timeout Duration for timeout.\n* @return The updated EventHubClientBuilder object.\n*/\npublic EventHubClientBuilder timeout(Duration timeout) {\nthis.timeout = timeout;\nreturn this;\n}\n/**\n* Sets the scheduler for operations such as connecting to and receiving or sending data to Event Hubs. If none is\n* specified, an elastic pool is used.\n*\n* @param scheduler The scheduler for operations such as connecting to and receiving or sending data to Event Hubs.\n* @return The updated EventHubClientBuilder object.\n*/\npublic EventHubClientBuilder scheduler(Scheduler scheduler) {\nthis.scheduler = scheduler;\nreturn this;\n}\n/**\n* Sets the proxy configuration for EventHubClient.\n*\n* @param proxyConfiguration The proxy configuration to use.\n* @return The updated EventHubClientBuilder object.\n*/\npublic EventHubClientBuilder proxyConfiguration(ProxyConfiguration proxyConfiguration) {\nthis.proxyConfiguration = proxyConfiguration;\nreturn this;\n}\n/**\n* Sets the retry policy for EventHubClient.\n*\n* @param retry The retry policy to use.\n* @return The updated EventHubClientBuilder object.\n*/\npublic EventHubClientBuilder retry(Retry retry) {\nthis.retry = retry;\nreturn this;\n}\n/**\n* Sets the configuration store that is used during construction of the service client.\n*\n* The default configuration store is a clone of the {@link ConfigurationManager\n* configuration store}, use {@link Configuration\n*\n* @param configuration The configuration store used to\n* @return The updated EventHubClientBuilder object.\n*/\npublic EventHubClientBuilder configuration(Configuration configuration) {\nthis.configuration = configuration;\nreturn this;\n}\n/**\n* Creates a new {@link EventHubClient} based on the configuration set in this builder.\n* Use the default not null values if the Connection parameters are not provided.\n*\n* @return A new {@link EventHubClient} instance.\n* @throws IllegalArgumentException if the credentials have not been set using either {@link\n* or {@link\n*/\nprivate ProxyConfiguration constructDefaultProxyConfiguration(Configuration configuration) {\nProxyAuthenticationType authentication = ProxyAuthenticationType.NONE;\nif (proxyConfiguration != null) {\nauthentication = proxyConfiguration.authentication();\n}\nString proxyAddress = configuration.get(BaseConfigurations.HTTP_PROXY);\nProxy proxy = null;\nif (proxyAddress != null) {\nfinal String[] hostPort = proxyAddress.split(\":\");\nif (hostPort.length < 2) {\nthrow new IllegalArgumentException(\"HTTP_PROXY cannot be parsed into a proxy\");\n}\nfinal String host = hostPort[0];\nfinal int port = Integer.parseInt(hostPort[1]);\nproxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host, port));\n}\nfinal String username = configuration.get(ProxyConfiguration.PROXY_USERNAME);\nfinal String password = configuration.get(ProxyConfiguration.PROXY_PASSWORD);\nreturn new ProxyConfiguration(authentication, proxy, username, password);\n}\n}", + "context_after": "class EventHubClientBuilder {\nprivate static final String AZURE_EVENT_HUBS_CONNECTION_STRING = \"AZURE_EVENT_HUBS_CONNECTION_STRING\";\nprivate TokenCredential credentials;\nprivate Configuration configuration;\nprivate Duration timeout;\nprivate ProxyConfiguration proxyConfiguration;\nprivate Retry retry;\nprivate Scheduler scheduler;\nprivate TransportType transport;\nprivate String host;\nprivate String eventHubPath;\n/**\n* Creates a new instance with the default transport {@link TransportType\n*/\npublic EventHubClientBuilder() {\ntransport = TransportType.AMQP;\n}\n/**\n* Sets the credential information given a connection string to the Event Hub instance.\n*\n* @param connectionString The connection string to the Event Hub this client wishes to connect to. It is expected\n* that the Event Hub path and the shared key properties are contained in this connection string.\n* @return The updated EventHubClientBuilder object.\n* @throws IllegalArgumentException if {@code connectionString} is null or empty. Or, the {@code connectionString}\n* does not contain the \"EntityPath\" key, which is the name of the Event Hub instance.\n* @throws AzureException If the shared access signature token credential could not be created using the connection\n* string.\n*/\npublic EventHubClientBuilder credential(String connectionString) {\nfinal ConnectionStringProperties properties = new ConnectionStringProperties(connectionString);\nfinal TokenCredential tokenCredential;\ntry {\ntokenCredential = new EventHubSharedAccessKeyCredential(properties.sharedAccessKeyName(),\nproperties.sharedAccessKey(), ClientConstants.TOKEN_VALIDITY);\n} catch (InvalidKeyException | NoSuchAlgorithmException e) {\nthrow new AzureException(\"Could not create the EventHubSharedAccessKeyCredential.\", e);\n}\nreturn credential(properties.endpoint().getHost(), properties.eventHubPath(), tokenCredential);\n}\n/**\n* Sets the credential information given a connection string to the Event Hubs namespace and a path to a specific\n* Event Hub instance.\n*\n* @param connectionString The connection string to use for connecting to the Event Hubs namespace; it is expected\n* that the shared key properties are contained in this connection string, but not the Event Hub path.\n* @param eventHubPath The path of the specific Event Hub to connect the client to.\n* @return The updated EventHubClientBuilder object.\n* @throws IllegalArgumentException if {@code connectionString} or {@code eventHubPath} is null or empty. Or, if the\n* {@code connectionString} contains the Event Hub path.\n* @throws AzureException If the shared access signature token credential could not be created using the connection\n* string.\n*/\npublic EventHubClientBuilder credential(String connectionString, String eventHubPath) {\nif (ImplUtils.isNullOrEmpty(eventHubPath)) {\nthrow new IllegalArgumentException(\"'eventHubPath' cannot be null or empty\");\n}\nfinal ConnectionStringProperties properties = new ConnectionStringProperties(connectionString);\nfinal TokenCredential tokenCredential;\ntry {\ntokenCredential = new EventHubSharedAccessKeyCredential(properties.sharedAccessKeyName(),\nproperties.sharedAccessKey(), ClientConstants.TOKEN_VALIDITY);\n} catch (InvalidKeyException | NoSuchAlgorithmException e) {\nthrow new AzureException(\"Could not create the EventHubSharedAccessKeyCredential.\", e);\n}\nif (!ImplUtils.isNullOrEmpty(properties.eventHubPath())) {\nthrow new IllegalArgumentException(String.format(Locale.US,\n\"'connectionString' contains an Event Hub path [%s]. Please use the\"\n+ \" credentials(String connectionString) overload. Or supply a 'connectionString' without\"\n+ \" 'EntityPath' in it.\", properties.eventHubPath()));\n}\nreturn credential(properties.endpoint().getHost(), eventHubPath, tokenCredential);\n}\n/**\n* Sets the credential information for which Event Hub instance to connect to, and how to authorize against it.\n*\n* @param host The fully qualified host name for the Event Hubs namespace. This is likely to be similar to\n* {@literal {your-namespace}.servicebus.windows.net}.\n* @param eventHubPath The path of the specific Event Hub to connect the client to.\n* @param credential The token credential to use for authorization. Access controls may be specified by the Event\n* Hubs namespace or the requested Event Hub, depending on Azure configuration.\n* @return The updated EventHubClientBuilder object.\n* @throws IllegalArgumentException if {@code host} or {@code eventHubPath} is null or empty.\n* @throws NullPointerException if {@code credentials} is null.\n*/\npublic EventHubClientBuilder credential(String host, String eventHubPath, TokenCredential credential) {\nif (ImplUtils.isNullOrEmpty(host)) {\nthrow new IllegalArgumentException(\"'host' cannot be null or empty\");\n}\nif (ImplUtils.isNullOrEmpty(eventHubPath)) {\nthrow new IllegalArgumentException(\"'eventHubPath' cannot be null or empty.\");\n}\nObjects.requireNonNull(credential);\nthis.host = host;\nthis.credentials = credential;\nthis.eventHubPath = eventHubPath;\nreturn this;\n}\n/**\n* Sets the transport type by which all the communication with Azure Event Hubs occurs.\n* Default value is {@link TransportType\n*\n* @param transport The transport type to use.\n* @return The updated EventHubClientBuilder object.\n*/\npublic EventHubClientBuilder transportType(TransportType transport) {\nthis.transport = transport;\nreturn this;\n}\n/**\n* Sets the timeout for each connection, link, and session.\n*\n* @param timeout Duration for timeout.\n* @return The updated EventHubClientBuilder object.\n*/\npublic EventHubClientBuilder timeout(Duration timeout) {\nthis.timeout = timeout;\nreturn this;\n}\n/**\n* Sets the scheduler for operations such as connecting to and receiving or sending data to Event Hubs. If none is\n* specified, an elastic pool is used.\n*\n* @param scheduler The scheduler for operations such as connecting to and receiving or sending data to Event Hubs.\n* @return The updated EventHubClientBuilder object.\n*/\npublic EventHubClientBuilder scheduler(Scheduler scheduler) {\nthis.scheduler = scheduler;\nreturn this;\n}\n/**\n* Sets the proxy configuration for EventHubClient.\n*\n* @param proxyConfiguration The proxy configuration to use.\n* @return The updated EventHubClientBuilder object.\n*/\npublic EventHubClientBuilder proxyConfiguration(ProxyConfiguration proxyConfiguration) {\nthis.proxyConfiguration = proxyConfiguration;\nreturn this;\n}\n/**\n* Sets the retry policy for EventHubClient.\n*\n* @param retry The retry policy to use.\n* @return The updated EventHubClientBuilder object.\n*/\npublic EventHubClientBuilder retry(Retry retry) {\nthis.retry = retry;\nreturn this;\n}\n/**\n* Sets the configuration store that is used during construction of the service client.\n*\n* The default configuration store is a clone of the {@link ConfigurationManager\n* configuration store}, use {@link Configuration\n*\n* @param configuration The configuration store used to\n* @return The updated EventHubClientBuilder object.\n*/\npublic EventHubClientBuilder configuration(Configuration configuration) {\nthis.configuration = configuration;\nreturn this;\n}\n/**\n* Creates a new {@link EventHubClient} based on the configuration set in this builder.\n* Use the default not null values if the Connection parameters are not provided.\n*\n* @return A new {@link EventHubClient} instance.\n* @throws IllegalArgumentException if the credentials have not been set using either {@link\n* or {@link\n*/\nprivate ProxyConfiguration constructDefaultProxyConfiguration(Configuration configuration) {\nProxyAuthenticationType authentication = ProxyAuthenticationType.NONE;\nif (proxyConfiguration != null) {\nauthentication = proxyConfiguration.authentication();\n}\nString proxyAddress = configuration.get(BaseConfigurations.HTTP_PROXY);\nProxy proxy = null;\nif (proxyAddress != null) {\nfinal String[] hostPort = proxyAddress.split(\":\");\nif (hostPort.length < 2) {\nthrow new IllegalArgumentException(\"HTTP_PROXY cannot be parsed into a proxy\");\n}\nfinal String host = hostPort[0];\nfinal int port = Integer.parseInt(hostPort[1]);\nproxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host, port));\n}\nfinal String username = configuration.get(ProxyConfiguration.PROXY_USERNAME);\nfinal String password = configuration.get(ProxyConfiguration.PROXY_PASSWORD);\nreturn new ProxyConfiguration(authentication, proxy, username, password);\n}\n}" + }, + { + "comment": "It looks redundant to me. [SO answers](https://stackoverflow.com/questions/14932697/java-what-is-considered-more-readable-this-or-no-this) confirms the same. And there's no performance gain either(java compiler produces same bytecode for both). We can use `this` in ambiguous states as below(even we can simply rename the variable); ``` public MyClass(String data) { this.data = data ``` WDYT?", + "method_body": "public List resolveItems(LSServiceOperationContext ctx) {\nArrayList completionItems = new ArrayList<>();\nList poppedTokens = CommonUtil.popNFromStack(ctx.get(CompletionKeys.FORCE_CONSUMED_TOKENS_KEY), 3)\n.stream()\n.map(Token::getText)\n.collect(Collectors.toList());\nList symbolInfoList = ctx.get(CompletionKeys.VISIBLE_SYMBOLS_KEY);\nif (isInvocationOrInteractionOrFieldAccess(ctx)) {\nString delimiter = \"\";\nString variableName = \"\";\nfor (int i = 0; i < poppedTokens.size(); i++) {\nif (poppedTokens.get(i).equals(UtilSymbolKeys.DOT_SYMBOL_KEY)\n|| poppedTokens.get(i).equals(UtilSymbolKeys.PKG_DELIMITER_KEYWORD)\n|| poppedTokens.get(i).equals(UtilSymbolKeys.RIGHT_ARROW_SYMBOL_KEY)) {\ndelimiter = poppedTokens.get(i);\nvariableName = poppedTokens.get(i - 1);\nbreak;\n}\n}\nList filteredList = FilterUtils.getInvocationAndFieldSymbolsOnVar(ctx,\nvariableName,\ndelimiter,\nctx.get(CompletionKeys.VISIBLE_SYMBOLS_KEY));\nfilteredList.removeIf(CommonUtil.invalidSymbolsPredicate());\nfilteredList.forEach(symbolInfo -> {\nif (CommonUtil.isValidInvokableSymbol(symbolInfo.getScopeEntry().symbol)) {\nBSymbol scopeEntrySymbol = symbolInfo.getScopeEntry().symbol;\ncompletionItems.add(this.fillInvokableSymbolMatchSnippet((BInvokableSymbol) scopeEntrySymbol, ctx));\n}\n});\n} else {\nsymbolInfoList.removeIf(CommonUtil.invalidSymbolsPredicate());\nsymbolInfoList.forEach(symbolInfo -> {\nBSymbol bSymbol = symbolInfo.getScopeEntry().symbol;\nif (CommonUtil.isValidInvokableSymbol(symbolInfo.getScopeEntry().symbol)\n&& ((bSymbol.flags & Flags.ATTACHED) != Flags.ATTACHED)) {\ncompletionItems.add(this.fillInvokableSymbolMatchSnippet((BInvokableSymbol) bSymbol, ctx));\n} else if (!(symbolInfo.getScopeEntry().symbol instanceof BInvokableSymbol)\n&& bSymbol instanceof BVarSymbol) {\nfillVarSymbolMatchSnippet((BVarSymbol) bSymbol, completionItems, ctx);\nString typeName = symbolInfo.getScopeEntry().symbol.type.toString();\ncompletionItems.add(BVariableCompletionItemBuilder.build((BVarSymbol) bSymbol,\nsymbolInfo.getSymbolName(), typeName));\n} else if (bSymbol instanceof BPackageSymbol) {\ncompletionItems.add(\nBTypeCompletionItemBuilder.build((BPackageSymbol) bSymbol, symbolInfo.getSymbolName()));\n}\n});\n}\nItemSorters.get(MatchContextItemSorter.class).sortItems(ctx, completionItems);\nreturn completionItems;\n}", + "target_code": "fillVarSymbolMatchSnippet((BVarSymbol) bSymbol, completionItems, ctx);", + "method_body_after": "public List resolveItems(LSServiceOperationContext ctx) {\nArrayList completionItems = new ArrayList<>();\nList poppedTokens = CommonUtil.popNFromStack(ctx.get(CompletionKeys.FORCE_CONSUMED_TOKENS_KEY), 3)\n.stream()\n.map(Token::getText)\n.collect(Collectors.toList());\nList symbolInfoList = ctx.get(CompletionKeys.VISIBLE_SYMBOLS_KEY);\nif (isInvocationOrInteractionOrFieldAccess(ctx)) {\nString delimiter = \"\";\nString variableName = \"\";\nfor (int i = 0; i < poppedTokens.size(); i++) {\nif (poppedTokens.get(i).equals(UtilSymbolKeys.DOT_SYMBOL_KEY)\n|| poppedTokens.get(i).equals(UtilSymbolKeys.PKG_DELIMITER_KEYWORD)\n|| poppedTokens.get(i).equals(UtilSymbolKeys.RIGHT_ARROW_SYMBOL_KEY)) {\ndelimiter = poppedTokens.get(i);\nvariableName = poppedTokens.get(i - 1);\nbreak;\n}\n}\nList filteredList = FilterUtils.getInvocationAndFieldSymbolsOnVar(ctx,\nvariableName,\ndelimiter,\nctx.get(CompletionKeys.VISIBLE_SYMBOLS_KEY));\nfilteredList.removeIf(CommonUtil.invalidSymbolsPredicate());\nfilteredList.forEach(symbolInfo -> {\nif (CommonUtil.isValidInvokableSymbol(symbolInfo.getScopeEntry().symbol)) {\nBSymbol scopeEntrySymbol = symbolInfo.getScopeEntry().symbol;\ncompletionItems.add(this.fillInvokableSymbolMatchSnippet((BInvokableSymbol) scopeEntrySymbol, ctx));\n}\n});\n} else {\nsymbolInfoList.removeIf(CommonUtil.invalidSymbolsPredicate());\nsymbolInfoList.forEach(symbolInfo -> {\nBSymbol bSymbol = symbolInfo.getScopeEntry().symbol;\nif (CommonUtil.isValidInvokableSymbol(symbolInfo.getScopeEntry().symbol)\n&& ((bSymbol.flags & Flags.ATTACHED) != Flags.ATTACHED)) {\ncompletionItems.add(this.fillInvokableSymbolMatchSnippet((BInvokableSymbol) bSymbol, ctx));\n} else if (!(symbolInfo.getScopeEntry().symbol instanceof BInvokableSymbol)\n&& bSymbol instanceof BVarSymbol) {\nfillVarSymbolMatchSnippet((BVarSymbol) bSymbol, completionItems, ctx);\nString typeName = symbolInfo.getScopeEntry().symbol.type.toString();\ncompletionItems.add(BVariableCompletionItemBuilder.build((BVarSymbol) bSymbol,\nsymbolInfo.getSymbolName(), typeName));\n} else if (bSymbol instanceof BPackageSymbol) {\ncompletionItems.add(\nBTypeCompletionItemBuilder.build((BPackageSymbol) bSymbol, symbolInfo.getSymbolName()));\n}\n});\n}\nItemSorters.get(MatchContextItemSorter.class).sortItems(ctx, completionItems);\nreturn completionItems;\n}", + "context_before": "class ParserRuleMatchStatementContextResolver extends AbstractItemResolver {\nprivate static final String LINE_SEPARATOR = System.lineSeparator();\n@Override\nprivate String getMatchFieldsSnippet(BType bType, LSContext ctx) {\nfinal Set memberTypes = bType instanceof BUnionType ? ((BUnionType) bType).getMemberTypes() :\nnew LinkedHashSet<>(Collections.singletonList(bType));\nStringBuilder fieldsSnippet = new StringBuilder(\"{\");\nfieldsSnippet.append(LINE_SEPARATOR);\nmemberTypes.forEach(type -> fieldsSnippet\n.append(\"\\t\").append(CommonUtil.getBTypeName(type, ctx)).append(\" => {\")\n.append(LINE_SEPARATOR)\n.append(\"\\t\\t\")\n.append(LINE_SEPARATOR)\n.append(\"\\t\").append(\"}\")\n.append(LINE_SEPARATOR));\nfieldsSnippet.append(\"}\");\nreturn fieldsSnippet.toString();\n}\nprivate CompletionItem getVariableCompletionItem(BVarSymbol varSymbol, String matchFieldSnippet) {\nCompletionItem completionItem = BVariableCompletionItemBuilder.build(varSymbol,\nvarSymbol.getName().getValue(),\nvarSymbol.type.toString());\ncompletionItem.setInsertText(varSymbol.getName().getValue() + \" \" + matchFieldSnippet);\ncompletionItem.setInsertTextFormat(InsertTextFormat.Snippet);\nreturn completionItem;\n}\nprivate String getFunctionSignature(BInvokableSymbol func) {\nString[] nameComps = func.getName().getValue().split(\"\\\\.\");\nStringBuilder signature = new StringBuilder(nameComps[nameComps.length - 1]);\nList params = new ArrayList<>();\nsignature.append(UtilSymbolKeys.OPEN_BRACKET_KEY);\nfunc.getParameters().forEach(bVarSymbol -> params.add(bVarSymbol.getName().getValue()));\nfunc.getDefaultableParameters().forEach(bVarSymbol -> params.add(bVarSymbol.getName().getValue()));\nsignature.append(String.join(\",\", params)).append(\")\");\nreturn signature.toString();\n}\nprivate CompletionItem fillInvokableSymbolMatchSnippet(BInvokableSymbol func, LSContext ctx) {\nBType returnType = func.getType().getReturnType();\nString functionSignature = getFunctionSignature(func);\nString matchFieldSnippet = getMatchFieldsSnippet(returnType, ctx);\nreturn BFunctionCompletionItemBuilder.build(func, functionSignature,\nfunctionSignature + \" \" + matchFieldSnippet);\n}\nprivate void fillVarSymbolMatchSnippet(BVarSymbol varSymbol, List completionItems, LSContext ctx) {\nBType symbolType = varSymbol.getType();\ncompletionItems.add(getVariableCompletionItem(varSymbol, this.getMatchFieldsSnippet(symbolType, ctx)));\n}\n}", + "context_after": "class ParserRuleMatchStatementContextResolver extends AbstractItemResolver {\nprivate static final String LINE_SEPARATOR = System.lineSeparator();\n@Override\nprivate String getMatchFieldsSnippet(BType bType, LSContext ctx) {\nfinal Set memberTypes = bType instanceof BUnionType ? ((BUnionType) bType).getMemberTypes() :\nnew LinkedHashSet<>(Collections.singletonList(bType));\nStringBuilder fieldsSnippet = new StringBuilder(\"{\");\nfieldsSnippet.append(LINE_SEPARATOR);\nmemberTypes.forEach(type -> fieldsSnippet\n.append(\"\\t\").append(CommonUtil.getBTypeName(type, ctx)).append(\" => {\")\n.append(LINE_SEPARATOR)\n.append(\"\\t\\t\")\n.append(LINE_SEPARATOR)\n.append(\"\\t\").append(\"}\")\n.append(LINE_SEPARATOR));\nfieldsSnippet.append(\"}\");\nreturn fieldsSnippet.toString();\n}\nprivate CompletionItem getVariableCompletionItem(BVarSymbol varSymbol, String matchFieldSnippet) {\nCompletionItem completionItem = BVariableCompletionItemBuilder.build(varSymbol,\nvarSymbol.getName().getValue(),\nvarSymbol.type.toString());\ncompletionItem.setInsertText(varSymbol.getName().getValue() + \" \" + matchFieldSnippet);\ncompletionItem.setInsertTextFormat(InsertTextFormat.Snippet);\nreturn completionItem;\n}\nprivate String getFunctionSignature(BInvokableSymbol func) {\nString[] nameComps = func.getName().getValue().split(\"\\\\.\");\nStringBuilder signature = new StringBuilder(nameComps[nameComps.length - 1]);\nList params = new ArrayList<>();\nsignature.append(UtilSymbolKeys.OPEN_BRACKET_KEY);\nfunc.getParameters().forEach(bVarSymbol -> params.add(bVarSymbol.getName().getValue()));\nfunc.getDefaultableParameters().forEach(bVarSymbol -> params.add(bVarSymbol.getName().getValue()));\nsignature.append(String.join(\",\", params)).append(\")\");\nreturn signature.toString();\n}\nprivate CompletionItem fillInvokableSymbolMatchSnippet(BInvokableSymbol func, LSContext ctx) {\nBType returnType = func.getType().getReturnType();\nString functionSignature = getFunctionSignature(func);\nString matchFieldSnippet = getMatchFieldsSnippet(returnType, ctx);\nreturn BFunctionCompletionItemBuilder.build(func, functionSignature,\nfunctionSignature + \" \" + matchFieldSnippet);\n}\nprivate void fillVarSymbolMatchSnippet(BVarSymbol varSymbol, List completionItems, LSContext ctx) {\nBType symbolType = varSymbol.getType();\ncompletionItems.add(getVariableCompletionItem(varSymbol, this.getMatchFieldsSnippet(symbolType, ctx)));\n}\n}" + }, + { + "comment": "Yeah, I think there are a lot of cases where we end up wrapping an exception type in the type itself. I agree, we should look into offering a convenience method for this.", + "method_body": "private int getJavaVersion() {\nString version = System.getProperty(\"java.version\");\nif (CoreUtils.isNullOrEmpty(version)) {\nthrow LOGGER.logExceptionAsError(new RuntimeException(\"Can't find 'java.version' system property.\"));\n}\nif (version.startsWith(\"1.\")) {\nif (version.length() < 3) {\nthrow LOGGER.logExceptionAsError(new RuntimeException(\"Can't parse 'java.version':\" + version));\n}\ntry {\nreturn Integer.parseInt(version.substring(2, 3));\n} catch (Exception t) {\nthrow LOGGER.logExceptionAsError(new RuntimeException(\"Can't parse 'java.version':\" + version, t));\n}\n} else {\nint idx = version.indexOf(\".\");\nif (idx == -1) {\nreturn Integer.parseInt(version);\n}\ntry {\nreturn Integer.parseInt(version.substring(0, idx));\n} catch (Exception t) {\nthrow LOGGER.logExceptionAsError(new RuntimeException(\"Can't parse 'java.version':\" + version, t));\n}\n}\n}", + "target_code": "throw LOGGER.logExceptionAsError(new RuntimeException(\"Can't parse 'java.version':\" + version, t));", + "method_body_after": "private int getJavaVersion() {\nString version = System.getProperty(\"java.version\");\nif (CoreUtils.isNullOrEmpty(version)) {\nthrow LOGGER.logExceptionAsError(new RuntimeException(\"Can't find 'java.version' system property.\"));\n}\nif (version.startsWith(\"1.\")) {\nif (version.length() < 3) {\nthrow LOGGER.logExceptionAsError(new RuntimeException(\"Can't parse 'java.version':\" + version));\n}\ntry {\nreturn Integer.parseInt(version.substring(2, 3));\n} catch (Exception t) {\nthrow LOGGER.logExceptionAsError(new RuntimeException(\"Can't parse 'java.version':\" + version, t));\n}\n} else {\nint idx = version.indexOf(\".\");\nif (idx == -1) {\nreturn Integer.parseInt(version);\n}\ntry {\nreturn Integer.parseInt(version.substring(0, idx));\n} catch (Exception t) {\nthrow LOGGER.logExceptionAsError(new RuntimeException(\"Can't parse 'java.version':\" + version, t));\n}\n}\n}", + "context_before": "class JdkAsyncHttpClient implements HttpClient {\nprivate static final ClientLogger LOGGER = new ClientLogger(JdkAsyncHttpClient.class);\nprivate final java.net.http.HttpClient jdkHttpClient;\nprivate final Set restrictedHeaders;\nJdkAsyncHttpClient(java.net.http.HttpClient httpClient, Set restrictedHeaders) {\nthis.jdkHttpClient = httpClient;\nint javaVersion = getJavaVersion();\nif (javaVersion <= 11) {\nthrow LOGGER.logExceptionAsError(\nnew UnsupportedOperationException(\"JdkAsyncHttpClient is not supported in Java version 11 and below.\"));\n}\nthis.restrictedHeaders = restrictedHeaders;\nLOGGER.verbose(\"Effective restricted headers: {}\", restrictedHeaders);\n}\n@Override\npublic Mono send(HttpRequest request) {\nreturn send(request, Context.NONE);\n}\n@Override\npublic Mono send(HttpRequest request, Context context) {\nboolean eagerlyReadResponse = (boolean) context.getData(\"azure-eagerly-read-response\").orElse(false);\nreturn toJdkHttpRequest(request)\n.flatMap(jdkRequest -> Mono.fromCompletionStage(jdkHttpClient.sendAsync(jdkRequest, ofPublisher()))\n.flatMap(innerResponse -> {\nif (eagerlyReadResponse) {\nint statusCode = innerResponse.statusCode();\nHttpHeaders headers = fromJdkHttpHeaders(innerResponse.headers());\nreturn FluxUtil.collectBytesFromNetworkResponse(JdkFlowAdapter\n.flowPublisherToFlux(innerResponse.body())\n.flatMapSequential(Flux::fromIterable), headers)\n.map(bytes -> new BufferedJdkHttpResponse(request, statusCode, headers, bytes));\n} else {\nreturn Mono.just(new JdkHttpResponse(request, innerResponse));\n}\n}));\n}\n/**\n* Converts the given azure-core request to the JDK HttpRequest type.\n*\n* @param request the azure-core request\n* @return the Mono emitting HttpRequest\n*/\nprivate Mono toJdkHttpRequest(HttpRequest request) {\nreturn Mono.fromCallable(() -> {\nfinal java.net.http.HttpRequest.Builder builder = java.net.http.HttpRequest.newBuilder();\ntry {\nbuilder.uri(request.getUrl().toURI());\n} catch (URISyntaxException e) {\nthrow LOGGER.logExceptionAsError(Exceptions.propagate(e));\n}\nfinal HttpHeaders headers = request.getHeaders();\nif (headers != null) {\nfor (HttpHeader header : headers) {\nfinal String headerName = header.getName();\nif (!restrictedHeaders.contains(headerName)) {\nheader.getValuesList().forEach(headerValue -> builder.header(headerName, headerValue));\n} else {\nLOGGER.warning(\"The header '\" + headerName + \"' is restricted by default in JDK HttpClient 12 \"\n+ \"and above. This header can be added to allow list in JAVA_HOME/conf/net.properties \"\n+ \"or in System.setProperty() or in Configuration. Use the key 'jdk.httpclient\"\n+ \".allowRestrictedHeaders' and a comma separated list of header names.\");\n}\n}\n}\nswitch (request.getHttpMethod()) {\ncase GET:\nreturn builder.GET().build();\ncase HEAD:\nreturn builder.method(\"HEAD\", noBody()).build();\ndefault:\nfinal String contentLength = request.getHeaders().getValue(\"content-length\");\nfinal BodyPublisher bodyPublisher = toBodyPublisher(request.getBody(), contentLength);\nreturn builder.method(request.getHttpMethod().toString(), bodyPublisher).build();\n}\n});\n}\n/**\n* Create BodyPublisher from the given java.nio.ByteBuffer publisher.\n*\n* @param bbPublisher stream of java.nio.ByteBuffer representing request content\n* @return the request BodyPublisher\n*/\nprivate static BodyPublisher toBodyPublisher(Flux bbPublisher, String contentLength) {\nif (bbPublisher == null) {\nreturn noBody();\n}\nfinal Flow.Publisher bbFlowPublisher = JdkFlowAdapter.publisherToFlowPublisher(bbPublisher);\nif (CoreUtils.isNullOrEmpty(contentLength)) {\nreturn fromPublisher(bbFlowPublisher);\n} else {\nlong contentLengthLong = Long.parseLong(contentLength);\nif (contentLengthLong < 1) {\nreturn noBody();\n} else {\nreturn fromPublisher(bbFlowPublisher, contentLengthLong);\n}\n}\n}\n/**\n* Get the java runtime major version.\n*\n* @return the java major version\n*/\n/**\n* Converts the given JDK Http headers to azure-core Http header.\n*\n* @param headers the JDK Http headers\n* @return the azure-core Http headers\n*/\nstatic HttpHeaders fromJdkHttpHeaders(java.net.http.HttpHeaders headers) {\nfinal HttpHeaders httpHeaders = new HttpHeaders();\nfor (Map.Entry> kvp : headers.map().entrySet()) {\nif (CoreUtils.isNullOrEmpty(kvp.getValue())) {\ncontinue;\n}\nhttpHeaders.set(kvp.getKey(), kvp.getValue());\n}\nreturn httpHeaders;\n}\n}", + "context_after": "class JdkAsyncHttpClient implements HttpClient {\nprivate static final ClientLogger LOGGER = new ClientLogger(JdkAsyncHttpClient.class);\nprivate final java.net.http.HttpClient jdkHttpClient;\nprivate final Set restrictedHeaders;\nJdkAsyncHttpClient(java.net.http.HttpClient httpClient, Set restrictedHeaders) {\nthis.jdkHttpClient = httpClient;\nint javaVersion = getJavaVersion();\nif (javaVersion <= 11) {\nthrow LOGGER.logExceptionAsError(\nnew UnsupportedOperationException(\"JdkAsyncHttpClient is not supported in Java version 11 and below.\"));\n}\nthis.restrictedHeaders = restrictedHeaders;\nLOGGER.verbose(\"Effective restricted headers: {}\", restrictedHeaders);\n}\n@Override\npublic Mono send(HttpRequest request) {\nreturn send(request, Context.NONE);\n}\n@Override\npublic Mono send(HttpRequest request, Context context) {\nboolean eagerlyReadResponse = (boolean) context.getData(\"azure-eagerly-read-response\").orElse(false);\nreturn toJdkHttpRequest(request)\n.flatMap(jdkRequest -> Mono.fromCompletionStage(jdkHttpClient.sendAsync(jdkRequest, ofPublisher()))\n.flatMap(innerResponse -> {\nif (eagerlyReadResponse) {\nint statusCode = innerResponse.statusCode();\nHttpHeaders headers = fromJdkHttpHeaders(innerResponse.headers());\nreturn FluxUtil.collectBytesFromNetworkResponse(JdkFlowAdapter\n.flowPublisherToFlux(innerResponse.body())\n.flatMapSequential(Flux::fromIterable), headers)\n.map(bytes -> new BufferedJdkHttpResponse(request, statusCode, headers, bytes));\n} else {\nreturn Mono.just(new JdkHttpResponse(request, innerResponse));\n}\n}));\n}\n/**\n* Converts the given azure-core request to the JDK HttpRequest type.\n*\n* @param request the azure-core request\n* @return the Mono emitting HttpRequest\n*/\nprivate Mono toJdkHttpRequest(HttpRequest request) {\nreturn Mono.fromCallable(() -> {\nfinal java.net.http.HttpRequest.Builder builder = java.net.http.HttpRequest.newBuilder();\ntry {\nbuilder.uri(request.getUrl().toURI());\n} catch (URISyntaxException e) {\nthrow LOGGER.logExceptionAsError(Exceptions.propagate(e));\n}\nfinal HttpHeaders headers = request.getHeaders();\nif (headers != null) {\nfor (HttpHeader header : headers) {\nfinal String headerName = header.getName();\nif (!restrictedHeaders.contains(headerName)) {\nheader.getValuesList().forEach(headerValue -> builder.header(headerName, headerValue));\n} else {\nLOGGER.warning(\"The header '\" + headerName + \"' is restricted by default in JDK HttpClient 12 \"\n+ \"and above. This header can be added to allow list in JAVA_HOME/conf/net.properties \"\n+ \"or in System.setProperty() or in Configuration. Use the key 'jdk.httpclient\"\n+ \".allowRestrictedHeaders' and a comma separated list of header names.\");\n}\n}\n}\nswitch (request.getHttpMethod()) {\ncase GET:\nreturn builder.GET().build();\ncase HEAD:\nreturn builder.method(\"HEAD\", noBody()).build();\ndefault:\nfinal String contentLength = request.getHeaders().getValue(\"content-length\");\nfinal BodyPublisher bodyPublisher = toBodyPublisher(request.getBody(), contentLength);\nreturn builder.method(request.getHttpMethod().toString(), bodyPublisher).build();\n}\n});\n}\n/**\n* Create BodyPublisher from the given java.nio.ByteBuffer publisher.\n*\n* @param bbPublisher stream of java.nio.ByteBuffer representing request content\n* @return the request BodyPublisher\n*/\nprivate static BodyPublisher toBodyPublisher(Flux bbPublisher, String contentLength) {\nif (bbPublisher == null) {\nreturn noBody();\n}\nfinal Flow.Publisher bbFlowPublisher = JdkFlowAdapter.publisherToFlowPublisher(bbPublisher);\nif (CoreUtils.isNullOrEmpty(contentLength)) {\nreturn fromPublisher(bbFlowPublisher);\n} else {\nlong contentLengthLong = Long.parseLong(contentLength);\nif (contentLengthLong < 1) {\nreturn noBody();\n} else {\nreturn fromPublisher(bbFlowPublisher, contentLengthLong);\n}\n}\n}\n/**\n* Get the java runtime major version.\n*\n* @return the java major version\n*/\n/**\n* Converts the given JDK Http headers to azure-core Http header.\n*\n* @param headers the JDK Http headers\n* @return the azure-core Http headers\n*/\nstatic HttpHeaders fromJdkHttpHeaders(java.net.http.HttpHeaders headers) {\nfinal HttpHeaders httpHeaders = new HttpHeaders();\nfor (Map.Entry> kvp : headers.map().entrySet()) {\nif (CoreUtils.isNullOrEmpty(kvp.getValue())) {\ncontinue;\n}\nhttpHeaders.set(kvp.getKey(), kvp.getValue());\n}\nreturn httpHeaders;\n}\n}" + }, + { + "comment": "What is usually a varying string? The project path ? Can you please share an example? ", + "method_body": "public void testAnnotationAccess() throws BallerinaTestException, IOException {\nString endString = \" SEVERE {b7a.log.crash} - \";\nString firstString = \"We thank you for helping make us better.\";\nString[] args = mergeCoverageArgs(new String[]{\"annotation-access\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\noutput = CommonUtils.replaceVaryingString(firstString, endString, output);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAnnotationAccess.txt\", output);\n}", + "target_code": "output = CommonUtils.replaceVaryingString(firstString, endString, output);", + "method_body_after": "public void testAnnotationAccess() throws BallerinaTestException, IOException {\nString endString = \" SEVERE {b7a.log.crash} - \";\nString firstString = \"We thank you for helping make us better.\";\nString[] args = mergeCoverageArgs(new String[]{\"annotation-access\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\noutput = CommonUtils.replaceVaryingString(firstString, endString, output);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAnnotationAccess.txt\", output);\n}", + "context_before": "class BasicCasesTest extends BaseTestCase {\nprivate BMainInstance balClient;\nprivate String projectPath;\n@BeforeClass()\npublic void setup() throws BallerinaTestException, IOException {\nbalClient = new BMainInstance(balServer);\nprojectPath = projectBasedTestsPath.toString();\nFileUtils.copyFolder(Paths.get(\"build/libs\"),\nPaths.get(projectPath, \"runtime-api-tests\", \"libs\"));\n}\n@Test\npublic void testAssertions() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"assertions\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAssertions.txt\", output);\n}\n@Test\npublic void testAssertDiffError() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"assertions-diff-error\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAssertDiffError.txt\", output);\n}\n@Test\npublic void testAssertionErrorMessage() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"assertions-error-messages\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAssertionErrorMessage.txt\", output);\n}\n@Test\npublic void testAssertBehavioralTypes() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"assertions-behavioral-types\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAssertBehavioralTypes.txt\", output);\n}\n@Test\npublic void testAssertStructuralTypes() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"assertions-structural-types\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAssertStructuralTypes.txt\", output);\n}\n@Test\npublic void testAssertSequenceTypes() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"assertions-sequence-types\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAssertSequenceTypes.txt\", output);\n}\n@Test\n@Test\npublic void testJavaInterops() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"interops\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testJavaInterops.txt\", output);\n}\n@Test\npublic void testRuntimeApi() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"runtime-api-tests\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testRuntimeApi.txt\", output);\n}\n@Test\npublic void testBeforeAfter() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"before-after\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testBeforeAfter.txt\", output);\n}\n@Test\npublic void testBeforeEachAfterEach() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"before-each-after-each\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testBeforeEachAfterEach.txt\", output);\n}\n@Test(dependsOnMethods = \"testBeforeAfter\")\npublic void testDependsOn() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"depends-on\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testDependsOn.txt\", output);\n}\n@Test(dependsOnMethods = \"testDependsOn\")\npublic void testAnnotations() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"annotations\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAnnotations.txt\", output);\n}\n@Test\npublic void testIsolatedFunctions() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"isolated-functions\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testIsolatedFunctions.txt\", output);\n}\n@Test\npublic void testIntersectionTypes() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"intersection-type-test\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testIntersectionTypes.txt\", output);\n}\n@Test\npublic void testAnydataType() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"anydata-type-test\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAnydataType.txt\", output);\n}\n@Test\npublic void testAsyncInvocation() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"async\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAsyncInvocation.txt\", output);\n}\n}", + "context_after": "class BasicCasesTest extends BaseTestCase {\nprivate BMainInstance balClient;\nprivate String projectPath;\n@BeforeClass()\npublic void setup() throws BallerinaTestException, IOException {\nbalClient = new BMainInstance(balServer);\nprojectPath = projectBasedTestsPath.toString();\nFileUtils.copyFolder(Paths.get(\"build/libs\"),\nPaths.get(projectPath, \"runtime-api-tests\", \"libs\"));\n}\n@Test\npublic void testAssertions() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"assertions\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAssertions.txt\", output);\n}\n@Test\npublic void testAssertDiffError() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"assertions-diff-error\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAssertDiffError.txt\", output);\n}\n@Test\npublic void testAssertionErrorMessage() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"assertions-error-messages\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAssertionErrorMessage.txt\", output);\n}\n@Test\npublic void testAssertBehavioralTypes() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"assertions-behavioral-types\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAssertBehavioralTypes.txt\", output);\n}\n@Test\npublic void testAssertStructuralTypes() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"assertions-structural-types\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAssertStructuralTypes.txt\", output);\n}\n@Test\npublic void testAssertSequenceTypes() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"assertions-sequence-types\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAssertSequenceTypes.txt\", output);\n}\n@Test\n@Test\npublic void testJavaInterops() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"interops\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testJavaInterops.txt\", output);\n}\n@Test\npublic void testRuntimeApi() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"runtime-api-tests\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testRuntimeApi.txt\", output);\n}\n@Test\npublic void testBeforeAfter() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"before-after\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testBeforeAfter.txt\", output);\n}\n@Test\npublic void testBeforeEachAfterEach() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"before-each-after-each\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testBeforeEachAfterEach.txt\", output);\n}\n@Test(dependsOnMethods = \"testBeforeAfter\")\npublic void testDependsOn() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"depends-on\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testDependsOn.txt\", output);\n}\n@Test(dependsOnMethods = \"testDependsOn\")\npublic void testAnnotations() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"annotations\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAnnotations.txt\", output);\n}\n@Test\npublic void testIsolatedFunctions() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"isolated-functions\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testIsolatedFunctions.txt\", output);\n}\n@Test\npublic void testIntersectionTypes() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"intersection-type-test\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testIntersectionTypes.txt\", output);\n}\n@Test\npublic void testAnydataType() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"anydata-type-test\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAnydataType.txt\", output);\n}\n@Test\npublic void testAsyncInvocation() throws BallerinaTestException, IOException {\nString[] args = mergeCoverageArgs(new String[]{\"async\"});\nString output = balClient.runMainAndReadStdOut(\"test\", args,\nnew HashMap<>(), projectPath, true);\nAssertionUtils.assertOutput(\"BasicCasesTest-testAsyncInvocation.txt\", output);\n}\n}" + }, + { + "comment": "If we are only talking about programming errors, then I believe we should call `System.exit` (== fail hard) because programming errors usually leave the system in a corrupted state. Why do you think that it hides error details? Not doing resource clean up in a failure case is acceptable. Failing other jobs if the process has been corrupted is fine as well.", + "method_body": "private void startTriggeringCheckpoint(CheckpointTriggerRequest request) {\ntry {\nsynchronized (lock) {\npreCheckGlobalState(request.isPeriodic);\n}\nfinal Execution[] executions = getTriggerExecutions();\nfinal Map ackTasks = getAckTasks();\nPreconditions.checkState(!isTriggering);\nisTriggering = true;\nfinal long timestamp = System.currentTimeMillis();\nfinal CompletableFuture pendingCheckpointCompletableFuture =\ninitializeCheckpoint(request.props, request.externalSavepointLocation)\n.thenApplyAsync(\n(checkpointIdAndStorageLocation) -> createPendingCheckpoint(\ntimestamp,\nrequest.props,\nackTasks,\nrequest.isPeriodic,\ncheckpointIdAndStorageLocation.checkpointId,\ncheckpointIdAndStorageLocation.checkpointStorageLocation,\nrequest.getOnCompletionFuture()),\ntimer);\nfinal CompletableFuture masterStatesComplete = pendingCheckpointCompletableFuture\n.thenCompose(this::snapshotMasterState);\nfinal CompletableFuture coordinatorCheckpointsComplete = pendingCheckpointCompletableFuture\n.thenComposeAsync((pendingCheckpoint) ->\nOperatorCoordinatorCheckpoints.triggerAndAcknowledgeAllCoordinatorCheckpointsWithCompletion(\ncoordinatorsToCheckpoint, pendingCheckpoint, timer),\ntimer);\nFutureUtils.waitForAll(asList(masterStatesComplete, coordinatorCheckpointsComplete))\n.handleAsync(\n(ignored, throwable) -> {\nfinal PendingCheckpoint checkpoint =\nFutureUtils.getWithoutException(pendingCheckpointCompletableFuture);\nPreconditions.checkState(\ncheckpoint != null || throwable != null,\n\"Either the pending checkpoint needs to be created or an error must have been occurred.\");\nif (throwable != null) {\nif (checkpoint == null) {\nonTriggerFailure(request, throwable);\n} else {\nonTriggerFailure(checkpoint, throwable);\n}\n} else {\nif (checkpoint.isDiscarded()) {\nonTriggerFailure(\ncheckpoint,\nnew CheckpointException(\nCheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE,\ncheckpoint.getFailureCause()));\n} else {\nfinal long checkpointId = checkpoint.getCheckpointId();\nsnapshotTaskState(\ntimestamp,\ncheckpointId,\ncheckpoint.getCheckpointStorageLocation(),\nrequest.props,\nexecutions,\nrequest.advanceToEndOfTime);\ncoordinatorsToCheckpoint.forEach((ctx) -> ctx.afterSourceBarrierInjection(checkpointId));\nonTriggerSuccess();\n}\n}\nreturn null;\n},\ntimer)\n.whenComplete((unused, error) -> {\nif (error != null) {\nif (!isShutdown()) {\nfailureManager.handleJobLevelCheckpointException(new CheckpointException(EXCEPTION, error), Optional.empty());\n} else if (error instanceof RejectedExecutionException) {\nLOG.debug(\"Execution rejected during shutdown\");\n} else {\nLOG.warn(\"Error encountered during shutdown\", error);\n}\n}\n});\n} catch (Throwable throwable) {\nonTriggerFailure(request, throwable);\n}\n}", + "target_code": "failureManager.handleJobLevelCheckpointException(new CheckpointException(EXCEPTION, error), Optional.empty());", + "method_body_after": "private void startTriggeringCheckpoint(CheckpointTriggerRequest request) {\ntry {\nsynchronized (lock) {\npreCheckGlobalState(request.isPeriodic);\n}\nfinal Execution[] executions = getTriggerExecutions();\nfinal Map ackTasks = getAckTasks();\nPreconditions.checkState(!isTriggering);\nisTriggering = true;\nfinal long timestamp = System.currentTimeMillis();\nfinal CompletableFuture pendingCheckpointCompletableFuture =\ninitializeCheckpoint(request.props, request.externalSavepointLocation)\n.thenApplyAsync(\n(checkpointIdAndStorageLocation) -> createPendingCheckpoint(\ntimestamp,\nrequest.props,\nackTasks,\nrequest.isPeriodic,\ncheckpointIdAndStorageLocation.checkpointId,\ncheckpointIdAndStorageLocation.checkpointStorageLocation,\nrequest.getOnCompletionFuture()),\ntimer);\nfinal CompletableFuture masterStatesComplete = pendingCheckpointCompletableFuture\n.thenCompose(this::snapshotMasterState);\nfinal CompletableFuture coordinatorCheckpointsComplete = pendingCheckpointCompletableFuture\n.thenComposeAsync((pendingCheckpoint) ->\nOperatorCoordinatorCheckpoints.triggerAndAcknowledgeAllCoordinatorCheckpointsWithCompletion(\ncoordinatorsToCheckpoint, pendingCheckpoint, timer),\ntimer);\nFutureUtils.assertNoException(\nCompletableFuture.allOf(masterStatesComplete, coordinatorCheckpointsComplete)\n.handleAsync(\n(ignored, throwable) -> {\nfinal PendingCheckpoint checkpoint =\nFutureUtils.getWithoutException(pendingCheckpointCompletableFuture);\nPreconditions.checkState(\ncheckpoint != null || throwable != null,\n\"Either the pending checkpoint needs to be created or an error must have been occurred.\");\nif (throwable != null) {\nif (checkpoint == null) {\nonTriggerFailure(request, throwable);\n} else {\nonTriggerFailure(checkpoint, throwable);\n}\n} else {\nif (checkpoint.isDiscarded()) {\nonTriggerFailure(\ncheckpoint,\nnew CheckpointException(\nCheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE,\ncheckpoint.getFailureCause()));\n} else {\nfinal long checkpointId = checkpoint.getCheckpointId();\nsnapshotTaskState(\ntimestamp,\ncheckpointId,\ncheckpoint.getCheckpointStorageLocation(),\nrequest.props,\nexecutions,\nrequest.advanceToEndOfTime);\ncoordinatorsToCheckpoint.forEach((ctx) -> ctx.afterSourceBarrierInjection(checkpointId));\nonTriggerSuccess();\n}\n}\nreturn null;\n},\ntimer)\n.exceptionally(error -> {\nif (!isShutdown()) {\nthrow new CompletionException(error);\n} else if (error instanceof RejectedExecutionException) {\nLOG.debug(\"Execution rejected during shutdown\");\n} else {\nLOG.warn(\"Error encountered during shutdown\", error);\n}\nreturn null;\n}));\n} catch (Throwable throwable) {\nonTriggerFailure(request, throwable);\n}\n}", + "context_before": "class CheckpointCoordinator {\nprivate static final Logger LOG = LoggerFactory.getLogger(CheckpointCoordinator.class);\n/** The number of recent checkpoints whose IDs are remembered. */\nprivate static final int NUM_GHOST_CHECKPOINT_IDS = 16;\n/** Coordinator-wide lock to safeguard the checkpoint updates. */\nprivate final Object lock = new Object();\n/** The job whose checkpoint this coordinator coordinates. */\nprivate final JobID job;\n/** Default checkpoint properties. **/\nprivate final CheckpointProperties checkpointProperties;\n/** The executor used for asynchronous calls, like potentially blocking I/O. */\nprivate final Executor executor;\n/** Tasks who need to be sent a message when a checkpoint is started. */\nprivate final ExecutionVertex[] tasksToTrigger;\n/** Tasks who need to acknowledge a checkpoint before it succeeds. */\nprivate final ExecutionVertex[] tasksToWaitFor;\n/** Tasks who need to be sent a message when a checkpoint is confirmed. */\nprivate final ExecutionVertex[] tasksToCommitTo;\n/** The operator coordinators that need to be checkpointed. */\nprivate final Collection coordinatorsToCheckpoint;\n/** Map from checkpoint ID to the pending checkpoint. */\n@GuardedBy(\"lock\")\nprivate final Map pendingCheckpoints;\n/** Completed checkpoints. Implementations can be blocking. Make sure calls to methods\n* accessing this don't block the job manager actor and run asynchronously. */\nprivate final CompletedCheckpointStore completedCheckpointStore;\n/** The root checkpoint state backend, which is responsible for initializing the\n* checkpoint, storing the metadata, and cleaning up the checkpoint. */\nprivate final CheckpointStorageCoordinatorView checkpointStorage;\n/** A list of recent checkpoint IDs, to identify late messages (vs invalid ones). */\nprivate final ArrayDeque recentPendingCheckpoints;\n/** Checkpoint ID counter to ensure ascending IDs. In case of job manager failures, these\n* need to be ascending across job managers. */\nprivate final CheckpointIDCounter checkpointIdCounter;\n/** The base checkpoint interval. Actual trigger time may be affected by the\n* max concurrent checkpoints and minimum-pause values */\nprivate final long baseInterval;\n/** The max time (in ms) that a checkpoint may take. */\nprivate final long checkpointTimeout;\n/** The min time(in ms) to delay after a checkpoint could be triggered. Allows to\n* enforce minimum processing time between checkpoint attempts */\nprivate final long minPauseBetweenCheckpoints;\n/** The timer that handles the checkpoint timeouts and triggers periodic checkpoints.\n* It must be single-threaded. Eventually it will be replaced by main thread executor. */\nprivate final ScheduledExecutor timer;\n/** The master checkpoint hooks executed by this checkpoint coordinator. */\nprivate final HashMap> masterHooks;\nprivate final boolean unalignedCheckpointsEnabled;\n/** Actor that receives status updates from the execution graph this coordinator works for. */\nprivate JobStatusListener jobStatusListener;\n/** The number of consecutive failed trigger attempts. */\nprivate final AtomicInteger numUnsuccessfulCheckpointsTriggers = new AtomicInteger(0);\n/** A handle to the current periodic trigger, to cancel it when necessary. */\nprivate ScheduledFuture currentPeriodicTrigger;\n/** The timestamp (via {@link Clock\n* completed. */\nprivate long lastCheckpointCompletionRelativeTime;\n/** Flag whether a triggered checkpoint should immediately schedule the next checkpoint.\n* Non-volatile, because only accessed in synchronized scope */\nprivate boolean periodicScheduling;\n/** Flag marking the coordinator as shut down (not accepting any messages any more). */\nprivate volatile boolean shutdown;\n/** Optional tracker for checkpoint statistics. */\n@Nullable\nprivate CheckpointStatsTracker statsTracker;\n/** A factory for SharedStateRegistry objects. */\nprivate final SharedStateRegistryFactory sharedStateRegistryFactory;\n/** Registry that tracks state which is shared across (incremental) checkpoints. */\nprivate SharedStateRegistry sharedStateRegistry;\nprivate boolean isPreferCheckpointForRecovery;\nprivate final CheckpointFailureManager failureManager;\nprivate final Clock clock;\nprivate final boolean isExactlyOnceMode;\n/** Flag represents there is an in-flight trigger request. */\nprivate boolean isTriggering = false;\nprivate final CheckpointRequestDecider requestDecider;\npublic CheckpointCoordinator(\nJobID job,\nCheckpointCoordinatorConfiguration chkConfig,\nExecutionVertex[] tasksToTrigger,\nExecutionVertex[] tasksToWaitFor,\nExecutionVertex[] tasksToCommitTo,\nCollection coordinatorsToCheckpoint,\nCheckpointIDCounter checkpointIDCounter,\nCompletedCheckpointStore completedCheckpointStore,\nStateBackend checkpointStateBackend,\nExecutor executor,\nScheduledExecutor timer,\nSharedStateRegistryFactory sharedStateRegistryFactory,\nCheckpointFailureManager failureManager) {\nthis(\njob,\nchkConfig,\ntasksToTrigger,\ntasksToWaitFor,\ntasksToCommitTo,\ncoordinatorsToCheckpoint,\ncheckpointIDCounter,\ncompletedCheckpointStore,\ncheckpointStateBackend,\nexecutor,\ntimer,\nsharedStateRegistryFactory,\nfailureManager,\nSystemClock.getInstance());\n}\n@VisibleForTesting\npublic CheckpointCoordinator(\nJobID job,\nCheckpointCoordinatorConfiguration chkConfig,\nExecutionVertex[] tasksToTrigger,\nExecutionVertex[] tasksToWaitFor,\nExecutionVertex[] tasksToCommitTo,\nCollection coordinatorsToCheckpoint,\nCheckpointIDCounter checkpointIDCounter,\nCompletedCheckpointStore completedCheckpointStore,\nStateBackend checkpointStateBackend,\nExecutor executor,\nScheduledExecutor timer,\nSharedStateRegistryFactory sharedStateRegistryFactory,\nCheckpointFailureManager failureManager,\nClock clock) {\ncheckNotNull(checkpointStateBackend);\nlong minPauseBetweenCheckpoints = chkConfig.getMinPauseBetweenCheckpoints();\nif (minPauseBetweenCheckpoints > 365L * 24 * 60 * 60 * 1_000) {\nminPauseBetweenCheckpoints = 365L * 24 * 60 * 60 * 1_000;\n}\nlong baseInterval = chkConfig.getCheckpointInterval();\nif (baseInterval < minPauseBetweenCheckpoints) {\nbaseInterval = minPauseBetweenCheckpoints;\n}\nthis.job = checkNotNull(job);\nthis.baseInterval = baseInterval;\nthis.checkpointTimeout = chkConfig.getCheckpointTimeout();\nthis.minPauseBetweenCheckpoints = minPauseBetweenCheckpoints;\nthis.tasksToTrigger = checkNotNull(tasksToTrigger);\nthis.tasksToWaitFor = checkNotNull(tasksToWaitFor);\nthis.tasksToCommitTo = checkNotNull(tasksToCommitTo);\nthis.coordinatorsToCheckpoint = Collections.unmodifiableCollection(coordinatorsToCheckpoint);\nthis.pendingCheckpoints = new LinkedHashMap<>();\nthis.checkpointIdCounter = checkNotNull(checkpointIDCounter);\nthis.completedCheckpointStore = checkNotNull(completedCheckpointStore);\nthis.executor = checkNotNull(executor);\nthis.sharedStateRegistryFactory = checkNotNull(sharedStateRegistryFactory);\nthis.sharedStateRegistry = sharedStateRegistryFactory.create(executor);\nthis.isPreferCheckpointForRecovery = chkConfig.isPreferCheckpointForRecovery();\nthis.failureManager = checkNotNull(failureManager);\nthis.clock = checkNotNull(clock);\nthis.isExactlyOnceMode = chkConfig.isExactlyOnce();\nthis.unalignedCheckpointsEnabled = chkConfig.isUnalignedCheckpointsEnabled();\nthis.recentPendingCheckpoints = new ArrayDeque<>(NUM_GHOST_CHECKPOINT_IDS);\nthis.masterHooks = new HashMap<>();\nthis.timer = timer;\nthis.checkpointProperties = CheckpointProperties.forCheckpoint(chkConfig.getCheckpointRetentionPolicy());\ntry {\nthis.checkpointStorage = checkpointStateBackend.createCheckpointStorage(job);\ncheckpointStorage.initializeBaseLocations();\n} catch (IOException e) {\nthrow new FlinkRuntimeException(\"Failed to create checkpoint storage at checkpoint coordinator side.\", e);\n}\ntry {\ncheckpointIDCounter.start();\n} catch (Throwable t) {\nthrow new RuntimeException(\"Failed to start checkpoint ID counter: \" + t.getMessage(), t);\n}\nthis.requestDecider = new CheckpointRequestDecider(\nchkConfig.getMaxConcurrentCheckpoints(),\nthis::rescheduleTrigger,\nthis.clock,\nthis.minPauseBetweenCheckpoints,\nthis.pendingCheckpoints::size,\nthis.lock);\n}\n/**\n* Adds the given master hook to the checkpoint coordinator. This method does nothing, if\n* the checkpoint coordinator already contained a hook with the same ID (as defined via\n* {@link MasterTriggerRestoreHook\n*\n* @param hook The hook to add.\n* @return True, if the hook was added, false if the checkpoint coordinator already\n* contained a hook with the same ID.\n*/\npublic boolean addMasterHook(MasterTriggerRestoreHook hook) {\ncheckNotNull(hook);\nfinal String id = hook.getIdentifier();\ncheckArgument(!StringUtils.isNullOrWhitespaceOnly(id), \"The hook has a null or empty id\");\nsynchronized (lock) {\nif (!masterHooks.containsKey(id)) {\nmasterHooks.put(id, hook);\nreturn true;\n}\nelse {\nreturn false;\n}\n}\n}\n/**\n* Gets the number of currently register master hooks.\n*/\npublic int getNumberOfRegisteredMasterHooks() {\nsynchronized (lock) {\nreturn masterHooks.size();\n}\n}\n/**\n* Sets the checkpoint stats tracker.\n*\n* @param statsTracker The checkpoint stats tracker.\n*/\npublic void setCheckpointStatsTracker(@Nullable CheckpointStatsTracker statsTracker) {\nthis.statsTracker = statsTracker;\n}\n/**\n* Shuts down the checkpoint coordinator.\n*\n*

After this method has been called, the coordinator does not accept\n* and further messages and cannot trigger any further checkpoints.\n*/\npublic void shutdown(JobStatus jobStatus) throws Exception {\nsynchronized (lock) {\nif (!shutdown) {\nshutdown = true;\nLOG.info(\"Stopping checkpoint coordinator for job {}.\", job);\nperiodicScheduling = false;\nMasterHooks.close(masterHooks.values(), LOG);\nmasterHooks.clear();\nfinal CheckpointException reason = new CheckpointException(\nCheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN);\nabortPendingAndQueuedCheckpoints(reason);\ncompletedCheckpointStore.shutdown(jobStatus);\ncheckpointIdCounter.shutdown(jobStatus);\n}\n}\n}\npublic boolean isShutdown() {\nreturn shutdown;\n}\n/**\n* Triggers a savepoint with the given savepoint directory as a target.\n*\n* @param targetLocation Target location for the savepoint, optional. If null, the\n* state backend's configured default will be used.\n* @return A future to the completed checkpoint\n* @throws IllegalStateException If no savepoint directory has been\n* specified and no default savepoint directory has been\n* configured\n*/\npublic CompletableFuture triggerSavepoint(@Nullable final String targetLocation) {\nfinal CheckpointProperties properties = CheckpointProperties.forSavepoint(!unalignedCheckpointsEnabled);\nreturn triggerSavepointInternal(properties, false, targetLocation);\n}\n/**\n* Triggers a synchronous savepoint with the given savepoint directory as a target.\n*\n* @param advanceToEndOfEventTime Flag indicating if the source should inject a {@code MAX_WATERMARK} in the pipeline\n* to fire any registered event-time timers.\n* @param targetLocation Target location for the savepoint, optional. If null, the\n* state backend's configured default will be used.\n* @return A future to the completed checkpoint\n* @throws IllegalStateException If no savepoint directory has been\n* specified and no default savepoint directory has been\n* configured\n*/\npublic CompletableFuture triggerSynchronousSavepoint(\nfinal boolean advanceToEndOfEventTime,\n@Nullable final String targetLocation) {\nfinal CheckpointProperties properties = CheckpointProperties.forSyncSavepoint(!unalignedCheckpointsEnabled);\nreturn triggerSavepointInternal(properties, advanceToEndOfEventTime, targetLocation);\n}\nprivate CompletableFuture triggerSavepointInternal(\nfinal CheckpointProperties checkpointProperties,\nfinal boolean advanceToEndOfEventTime,\n@Nullable final String targetLocation) {\ncheckNotNull(checkpointProperties);\nfinal CompletableFuture resultFuture = new CompletableFuture<>();\ntimer.execute(() -> triggerCheckpoint(\ncheckpointProperties,\ntargetLocation,\nfalse,\nadvanceToEndOfEventTime)\n.whenComplete((completedCheckpoint, throwable) -> {\nif (throwable == null) {\nresultFuture.complete(completedCheckpoint);\n} else {\nresultFuture.completeExceptionally(throwable);\n}\n}));\nreturn resultFuture;\n}\n/**\n* Triggers a new standard checkpoint and uses the given timestamp as the checkpoint\n* timestamp. The return value is a future. It completes when the checkpoint triggered finishes\n* or an error occurred.\n*\n* @param isPeriodic Flag indicating whether this triggered checkpoint is\n* periodic. If this flag is true, but the periodic scheduler is disabled,\n* the checkpoint will be declined.\n* @return a future to the completed checkpoint.\n*/\npublic CompletableFuture triggerCheckpoint(boolean isPeriodic) {\nreturn triggerCheckpoint(checkpointProperties, null, isPeriodic, false);\n}\n@VisibleForTesting\npublic CompletableFuture triggerCheckpoint(\nCheckpointProperties props,\n@Nullable String externalSavepointLocation,\nboolean isPeriodic,\nboolean advanceToEndOfTime) {\nif (advanceToEndOfTime && !(props.isSynchronous() && props.isSavepoint())) {\nreturn FutureUtils.completedExceptionally(new IllegalArgumentException(\n\"Only synchronous savepoints are allowed to advance the watermark to MAX.\"));\n}\nCheckpointTriggerRequest request = new CheckpointTriggerRequest(props, externalSavepointLocation, isPeriodic, advanceToEndOfTime);\nrequestDecider\n.chooseRequestToExecute(request, isTriggering, lastCheckpointCompletionRelativeTime)\n.ifPresent(this::startTriggeringCheckpoint);\nreturn request.onCompletionPromise;\n}\n/**\n* Initialize the checkpoint trigger asynchronously. It will be executed in io thread due to\n* it might be time-consuming.\n*\n* @param props checkpoint properties\n* @param externalSavepointLocation the external savepoint location, it might be null\n* @return the future of initialized result, checkpoint id and checkpoint location\n*/\nprivate CompletableFuture initializeCheckpoint(\nCheckpointProperties props,\n@Nullable String externalSavepointLocation) {\nreturn CompletableFuture.supplyAsync(() -> {\ntry {\nlong checkpointID = checkpointIdCounter.getAndIncrement();\nCheckpointStorageLocation checkpointStorageLocation = props.isSavepoint() ?\ncheckpointStorage\n.initializeLocationForSavepoint(checkpointID, externalSavepointLocation) :\ncheckpointStorage.initializeLocationForCheckpoint(checkpointID);\nreturn new CheckpointIdAndStorageLocation(checkpointID, checkpointStorageLocation);\n} catch (Throwable throwable) {\nthrow new CompletionException(throwable);\n}\n}, executor);\n}\nprivate PendingCheckpoint createPendingCheckpoint(\nlong timestamp,\nCheckpointProperties props,\nMap ackTasks,\nboolean isPeriodic,\nlong checkpointID,\nCheckpointStorageLocation checkpointStorageLocation,\nCompletableFuture onCompletionPromise) {\nsynchronized (lock) {\ntry {\npreCheckGlobalState(isPeriodic);\n} catch (Throwable t) {\nthrow new CompletionException(t);\n}\n}\nfinal PendingCheckpoint checkpoint = new PendingCheckpoint(\njob,\ncheckpointID,\ntimestamp,\nackTasks,\nOperatorInfo.getIds(coordinatorsToCheckpoint),\nmasterHooks.keySet(),\nprops,\ncheckpointStorageLocation,\nexecutor,\nonCompletionPromise);\nif (statsTracker != null) {\nPendingCheckpointStats callback = statsTracker.reportPendingCheckpoint(\ncheckpointID,\ntimestamp,\nprops);\ncheckpoint.setStatsCallback(callback);\n}\nsynchronized (lock) {\npendingCheckpoints.put(checkpointID, checkpoint);\nScheduledFuture cancellerHandle = timer.schedule(\nnew CheckpointCanceller(checkpoint),\ncheckpointTimeout, TimeUnit.MILLISECONDS);\nif (!checkpoint.setCancellerHandle(cancellerHandle)) {\ncancellerHandle.cancel(false);\n}\n}\nLOG.info(\"Triggering checkpoint {} (type={}) @ {} for job {}.\", checkpointID, checkpoint.getProps().getCheckpointType(), timestamp, job);\nreturn checkpoint;\n}\n/**\n* Snapshot master hook states asynchronously.\n*\n* @param checkpoint the pending checkpoint\n* @return the future represents master hook states are finished or not\n*/\nprivate CompletableFuture snapshotMasterState(PendingCheckpoint checkpoint) {\nif (masterHooks.isEmpty()) {\nreturn CompletableFuture.completedFuture(null);\n}\nfinal long checkpointID = checkpoint.getCheckpointId();\nfinal long timestamp = checkpoint.getCheckpointTimestamp();\nfinal CompletableFuture masterStateCompletableFuture = new CompletableFuture<>();\nfor (MasterTriggerRestoreHook masterHook : masterHooks.values()) {\nMasterHooks\n.triggerHook(masterHook, checkpointID, timestamp, executor)\n.whenCompleteAsync(\n(masterState, throwable) -> {\ntry {\nsynchronized (lock) {\nif (masterStateCompletableFuture.isDone()) {\nreturn;\n}\nif (checkpoint.isDiscarded()) {\nthrow new IllegalStateException(\n\"Checkpoint \" + checkpointID + \" has been discarded\");\n}\nif (throwable == null) {\ncheckpoint.acknowledgeMasterState(\nmasterHook.getIdentifier(), masterState);\nif (checkpoint.areMasterStatesFullyAcknowledged()) {\nmasterStateCompletableFuture.complete(null);\n}\n} else {\nmasterStateCompletableFuture.completeExceptionally(throwable);\n}\n}\n} catch (Throwable t) {\nmasterStateCompletableFuture.completeExceptionally(t);\n}\n},\ntimer);\n}\nreturn masterStateCompletableFuture;\n}\n/**\n* Snapshot task state.\n*\n* @param timestamp the timestamp of this checkpoint reques\n* @param checkpointID the checkpoint id\n* @param checkpointStorageLocation the checkpoint location\n* @param props the checkpoint properties\n* @param executions the executions which should be triggered\n* @param advanceToEndOfTime Flag indicating if the source should inject a {@code MAX_WATERMARK}\n* in the pipeline to fire any registered event-time timers.\n*/\nprivate void snapshotTaskState(\nlong timestamp,\nlong checkpointID,\nCheckpointStorageLocation checkpointStorageLocation,\nCheckpointProperties props,\nExecution[] executions,\nboolean advanceToEndOfTime) {\nfinal CheckpointOptions checkpointOptions = new CheckpointOptions(\nprops.getCheckpointType(),\ncheckpointStorageLocation.getLocationReference(),\nisExactlyOnceMode,\nprops.getCheckpointType() == CheckpointType.CHECKPOINT && unalignedCheckpointsEnabled);\nfor (Execution execution: executions) {\nif (props.isSynchronous()) {\nexecution.triggerSynchronousSavepoint(checkpointID, timestamp, checkpointOptions, advanceToEndOfTime);\n} else {\nexecution.triggerCheckpoint(checkpointID, timestamp, checkpointOptions);\n}\n}\n}\n/**\n* Trigger request is successful.\n* NOTE, it must be invoked if trigger request is successful.\n*/\nprivate void onTriggerSuccess() {\nisTriggering = false;\nnumUnsuccessfulCheckpointsTriggers.set(0);\nexecuteQueuedRequest();\n}\n/**\n* The trigger request is failed prematurely without a proper initialization.\n* There is no resource to release, but the completion promise needs to fail manually here.\n*\n* @param onCompletionPromise the completion promise of the checkpoint/savepoint\n* @param throwable the reason of trigger failure\n*/\nprivate void onTriggerFailure(\nCheckpointTriggerRequest onCompletionPromise, Throwable throwable) {\nfinal CheckpointException checkpointException =\ngetCheckpointException(CheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, throwable);\nonCompletionPromise.completeExceptionally(checkpointException);\nonTriggerFailure((PendingCheckpoint) null, checkpointException);\n}\n/**\n* The trigger request is failed.\n* NOTE, it must be invoked if trigger request is failed.\n*\n* @param checkpoint the pending checkpoint which is failed. It could be null if it's failed\n* prematurely without a proper initialization.\n* @param throwable the reason of trigger failure\n*/\nprivate void onTriggerFailure(@Nullable PendingCheckpoint checkpoint, Throwable throwable) {\nthrowable = ExceptionUtils.stripCompletionException(throwable);\ntry {\ncoordinatorsToCheckpoint.forEach(OperatorCoordinatorCheckpointContext::abortCurrentTriggering);\nif (checkpoint != null && !checkpoint.isDiscarded()) {\nint numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet();\nLOG.warn(\n\"Failed to trigger checkpoint {} for job {}. ({} consecutive failed attempts so far)\",\ncheckpoint.getCheckpointId(),\njob,\nnumUnsuccessful,\nthrowable);\nfinal CheckpointException cause =\ngetCheckpointException(\nCheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, throwable);\nsynchronized (lock) {\nabortPendingCheckpoint(checkpoint, cause);\n}\n}\n} finally {\nisTriggering = false;\nexecuteQueuedRequest();\n}\n}\nprivate void executeQueuedRequest() {\nrequestDecider.chooseQueuedRequestToExecute(isTriggering, lastCheckpointCompletionRelativeTime).ifPresent(this::startTriggeringCheckpoint);\n}\n/**\n* Receives a {@link DeclineCheckpoint} message for a pending checkpoint.\n*\n* @param message Checkpoint decline from the task manager\n* @param taskManagerLocationInfo The location info of the decline checkpoint message's sender\n*/\npublic void receiveDeclineMessage(DeclineCheckpoint message, String taskManagerLocationInfo) {\nif (shutdown || message == null) {\nreturn;\n}\nif (!job.equals(message.getJob())) {\nthrow new IllegalArgumentException(\"Received DeclineCheckpoint message for job \" +\nmessage.getJob() + \" from \" + taskManagerLocationInfo + \" while this coordinator handles job \" + job);\n}\nfinal long checkpointId = message.getCheckpointId();\nfinal String reason = (message.getReason() != null ? message.getReason().getMessage() : \"\");\nPendingCheckpoint checkpoint;\nsynchronized (lock) {\nif (shutdown) {\nreturn;\n}\ncheckpoint = pendingCheckpoints.get(checkpointId);\nif (checkpoint != null) {\nPreconditions.checkState(\n!checkpoint.isDiscarded(),\n\"Received message for discarded but non-removed checkpoint \" + checkpointId);\nLOG.info(\"Decline checkpoint {} by task {} of job {} at {}.\",\ncheckpointId,\nmessage.getTaskExecutionId(),\njob,\ntaskManagerLocationInfo);\nfinal CheckpointException checkpointException;\nif (message.getReason() == null) {\ncheckpointException =\nnew CheckpointException(CheckpointFailureReason.CHECKPOINT_DECLINED);\n} else {\ncheckpointException = getCheckpointException(\nCheckpointFailureReason.JOB_FAILURE, message.getReason());\n}\nabortPendingCheckpoint(\ncheckpoint,\ncheckpointException,\nmessage.getTaskExecutionId());\n} else if (LOG.isDebugEnabled()) {\nif (recentPendingCheckpoints.contains(checkpointId)) {\nLOG.debug(\"Received another decline message for now expired checkpoint attempt {} from task {} of job {} at {} : {}\",\ncheckpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo, reason);\n} else {\nLOG.debug(\"Received decline message for unknown (too old?) checkpoint attempt {} from task {} of job {} at {} : {}\",\ncheckpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo, reason);\n}\n}\n}\n}\n/**\n* Receives an AcknowledgeCheckpoint message and returns whether the\n* message was associated with a pending checkpoint.\n*\n* @param message Checkpoint ack from the task manager\n*\n* @param taskManagerLocationInfo The location of the acknowledge checkpoint message's sender\n* @return Flag indicating whether the ack'd checkpoint was associated\n* with a pending checkpoint.\n*\n* @throws CheckpointException If the checkpoint cannot be added to the completed checkpoint store.\n*/\npublic boolean receiveAcknowledgeMessage(AcknowledgeCheckpoint message, String taskManagerLocationInfo) throws CheckpointException {\nif (shutdown || message == null) {\nreturn false;\n}\nif (!job.equals(message.getJob())) {\nLOG.error(\"Received wrong AcknowledgeCheckpoint message for job {} from {} : {}\", job, taskManagerLocationInfo, message);\nreturn false;\n}\nfinal long checkpointId = message.getCheckpointId();\nsynchronized (lock) {\nif (shutdown) {\nreturn false;\n}\nfinal PendingCheckpoint checkpoint = pendingCheckpoints.get(checkpointId);\nif (checkpoint != null && !checkpoint.isDiscarded()) {\nswitch (checkpoint.acknowledgeTask(message.getTaskExecutionId(), message.getSubtaskState(), message.getCheckpointMetrics())) {\ncase SUCCESS:\nLOG.debug(\"Received acknowledge message for checkpoint {} from task {} of job {} at {}.\",\ncheckpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);\nif (checkpoint.areTasksFullyAcknowledged()) {\ncompletePendingCheckpoint(checkpoint);\n}\nbreak;\ncase DUPLICATE:\nLOG.debug(\"Received a duplicate acknowledge message for checkpoint {}, task {}, job {}, location {}.\",\nmessage.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);\nbreak;\ncase UNKNOWN:\nLOG.warn(\"Could not acknowledge the checkpoint {} for task {} of job {} at {}, \" +\n\"because the task's execution attempt id was unknown. Discarding \" +\n\"the state handle to avoid lingering state.\", message.getCheckpointId(),\nmessage.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);\ndiscardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState());\nbreak;\ncase DISCARDED:\nLOG.warn(\"Could not acknowledge the checkpoint {} for task {} of job {} at {}, \" +\n\"because the pending checkpoint had been discarded. Discarding the \" +\n\"state handle tp avoid lingering state.\",\nmessage.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);\ndiscardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState());\n}\nreturn true;\n}\nelse if (checkpoint != null) {\nthrow new IllegalStateException(\n\"Received message for discarded but non-removed checkpoint \" + checkpointId);\n}\nelse {\nboolean wasPendingCheckpoint;\nif (recentPendingCheckpoints.contains(checkpointId)) {\nwasPendingCheckpoint = true;\nLOG.warn(\"Received late message for now expired checkpoint attempt {} from task \" +\n\"{} of job {} at {}.\", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);\n}\nelse {\nLOG.debug(\"Received message for an unknown checkpoint {} from task {} of job {} at {}.\",\ncheckpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);\nwasPendingCheckpoint = false;\n}\ndiscardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState());\nreturn wasPendingCheckpoint;\n}\n}\n}\n/**\n* Try to complete the given pending checkpoint.\n*\n*

Important: This method should only be called in the checkpoint lock scope.\n*\n* @param pendingCheckpoint to complete\n* @throws CheckpointException if the completion failed\n*/\nprivate void completePendingCheckpoint(PendingCheckpoint pendingCheckpoint) throws CheckpointException {\nfinal long checkpointId = pendingCheckpoint.getCheckpointId();\nfinal CompletedCheckpoint completedCheckpoint;\nMap operatorStates = pendingCheckpoint.getOperatorStates();\nsharedStateRegistry.registerAll(operatorStates.values());\ntry {\ntry {\ncompletedCheckpoint = pendingCheckpoint.finalizeCheckpoint();\nfailureManager.handleCheckpointSuccess(pendingCheckpoint.getCheckpointId());\n}\ncatch (Exception e1) {\nif (!pendingCheckpoint.isDiscarded()) {\nabortPendingCheckpoint(\npendingCheckpoint,\nnew CheckpointException(\nCheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1));\n}\nthrow new CheckpointException(\"Could not finalize the pending checkpoint \" + checkpointId + '.',\nCheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1);\n}\nPreconditions.checkState(pendingCheckpoint.isDiscarded() && completedCheckpoint != null);\ntry {\ncompletedCheckpointStore.addCheckpoint(completedCheckpoint);\n} catch (Exception exception) {\nexecutor.execute(new Runnable() {\n@Override\npublic void run() {\ntry {\ncompletedCheckpoint.discardOnFailedStoring();\n} catch (Throwable t) {\nLOG.warn(\"Could not properly discard completed checkpoint {}.\", completedCheckpoint.getCheckpointID(), t);\n}\n}\n});\nsendAbortedMessages(checkpointId, pendingCheckpoint.getCheckpointTimestamp());\nthrow new CheckpointException(\"Could not complete the pending checkpoint \" + checkpointId + '.',\nCheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, exception);\n}\n} finally {\npendingCheckpoints.remove(checkpointId);\ntimer.execute(this::executeQueuedRequest);\n}\nrememberRecentCheckpointId(checkpointId);\ndropSubsumedCheckpoints(checkpointId);\nlastCheckpointCompletionRelativeTime = clock.relativeTimeMillis();\nLOG.info(\"Completed checkpoint {} for job {} ({} bytes in {} ms).\", checkpointId, job,\ncompletedCheckpoint.getStateSize(), completedCheckpoint.getDuration());\nif (LOG.isDebugEnabled()) {\nStringBuilder builder = new StringBuilder();\nbuilder.append(\"Checkpoint state: \");\nfor (OperatorState state : completedCheckpoint.getOperatorStates().values()) {\nbuilder.append(state);\nbuilder.append(\", \");\n}\nbuilder.setLength(builder.length() - 2);\nLOG.debug(builder.toString());\n}\nsendAcknowledgeMessages(checkpointId, completedCheckpoint.getTimestamp());\n}\nprivate void sendAcknowledgeMessages(long checkpointId, long timestamp) {\nfor (ExecutionVertex ev : tasksToCommitTo) {\nExecution ee = ev.getCurrentExecutionAttempt();\nif (ee != null) {\nee.notifyCheckpointComplete(checkpointId, timestamp);\n}\n}\nfor (OperatorCoordinatorCheckpointContext coordinatorContext : coordinatorsToCheckpoint) {\ncoordinatorContext.checkpointComplete(checkpointId);\n}\n}\nprivate void sendAbortedMessages(long checkpointId, long timeStamp) {\nexecutor.execute(() -> {\nfor (ExecutionVertex ev : tasksToCommitTo) {\nExecution ee = ev.getCurrentExecutionAttempt();\nif (ee != null) {\nee.notifyCheckpointAborted(checkpointId, timeStamp);\n}\n}\n});\n}\n/**\n* Fails all pending checkpoints which have not been acknowledged by the given execution\n* attempt id.\n*\n* @param executionAttemptId for which to discard unacknowledged pending checkpoints\n* @param cause of the failure\n*/\npublic void failUnacknowledgedPendingCheckpointsFor(ExecutionAttemptID executionAttemptId, Throwable cause) {\nsynchronized (lock) {\nabortPendingCheckpoints(\ncheckpoint -> !checkpoint.isAcknowledgedBy(executionAttemptId),\nnew CheckpointException(CheckpointFailureReason.TASK_FAILURE, cause));\n}\n}\nprivate void rememberRecentCheckpointId(long id) {\nif (recentPendingCheckpoints.size() >= NUM_GHOST_CHECKPOINT_IDS) {\nrecentPendingCheckpoints.removeFirst();\n}\nrecentPendingCheckpoints.addLast(id);\n}\nprivate void dropSubsumedCheckpoints(long checkpointId) {\nabortPendingCheckpoints(\ncheckpoint -> checkpoint.getCheckpointId() < checkpointId && checkpoint.canBeSubsumed(),\nnew CheckpointException(CheckpointFailureReason.CHECKPOINT_SUBSUMED));\n}\n/**\n* Restores the latest checkpointed state.\n*\n* @param tasks Map of job vertices to restore. State for these vertices is\n* restored via {@link Execution\n* @param errorIfNoCheckpoint Fail if no completed checkpoint is available to\n* restore from.\n* @param allowNonRestoredState Allow checkpoint state that cannot be mapped\n* to any job vertex in tasks.\n* @return true if state was restored, false otherwise.\n* @throws IllegalStateException If the CheckpointCoordinator is shut down.\n* @throws IllegalStateException If no completed checkpoint is available and\n* the failIfNoCheckpoint flag has been set.\n* @throws IllegalStateException If the checkpoint contains state that cannot be\n* mapped to any job vertex in tasks and the\n* allowNonRestoredState flag has not been set.\n* @throws IllegalStateException If the max parallelism changed for an operator\n* that restores state from this checkpoint.\n* @throws IllegalStateException If the parallelism changed for an operator\n* that restores non-partitioned state from this\n* checkpoint.\n*/\n@Deprecated\npublic boolean restoreLatestCheckpointedState(\nMap tasks,\nboolean errorIfNoCheckpoint,\nboolean allowNonRestoredState) throws Exception {\nreturn restoreLatestCheckpointedStateInternal(new HashSet<>(tasks.values()), true, errorIfNoCheckpoint, allowNonRestoredState);\n}\n/**\n* Restores the latest checkpointed state to a set of subtasks. This method represents a \"local\"\n* or \"regional\" failover and does restore states to coordinators. Note that a regional failover\n* might still include all tasks.\n*\n* @param tasks Set of job vertices to restore. State for these vertices is\n* restored via {@link Execution\n* @return true if state was restored, false otherwise.\n* @throws IllegalStateException If the CheckpointCoordinator is shut down.\n* @throws IllegalStateException If no completed checkpoint is available and\n* the failIfNoCheckpoint flag has been set.\n* @throws IllegalStateException If the checkpoint contains state that cannot be\n* mapped to any job vertex in tasks and the\n* allowNonRestoredState flag has not been set.\n* @throws IllegalStateException If the max parallelism changed for an operator\n* that restores state from this checkpoint.\n* @throws IllegalStateException If the parallelism changed for an operator\n* that restores non-partitioned state from this\n* checkpoint.\n*/\npublic boolean restoreLatestCheckpointedStateToSubtasks(final Set tasks) throws Exception {\nreturn restoreLatestCheckpointedStateInternal(tasks, false, false, true);\n}\n/**\n* Restores the latest checkpointed state to all tasks and all coordinators.\n* This method represents a \"global restore\"-style operation where all stateful tasks\n* and coordinators from the given set of Job Vertices are restored.\n* are restored to their latest checkpointed state.\n*\n* @param tasks Set of job vertices to restore. State for these vertices is\n* restored via {@link Execution\n* @param allowNonRestoredState Allow checkpoint state that cannot be mapped\n* to any job vertex in tasks.\n* @return true if state was restored, false otherwise.\n* @throws IllegalStateException If the CheckpointCoordinator is shut down.\n* @throws IllegalStateException If no completed checkpoint is available and\n* the failIfNoCheckpoint flag has been set.\n* @throws IllegalStateException If the checkpoint contains state that cannot be\n* mapped to any job vertex in tasks and the\n* allowNonRestoredState flag has not been set.\n* @throws IllegalStateException If the max parallelism changed for an operator\n* that restores state from this checkpoint.\n* @throws IllegalStateException If the parallelism changed for an operator\n* that restores non-partitioned state from this\n* checkpoint.\n*/\npublic boolean restoreLatestCheckpointedStateToAll(\nfinal Set tasks,\nfinal boolean allowNonRestoredState) throws Exception {\nreturn restoreLatestCheckpointedStateInternal(tasks, true, false, allowNonRestoredState);\n}\nprivate boolean restoreLatestCheckpointedStateInternal(\nfinal Set tasks,\nfinal boolean restoreCoordinators,\nfinal boolean errorIfNoCheckpoint,\nfinal boolean allowNonRestoredState) throws Exception {\nsynchronized (lock) {\nif (shutdown) {\nthrow new IllegalStateException(\"CheckpointCoordinator is shut down\");\n}\nsharedStateRegistry.close();\nsharedStateRegistry = sharedStateRegistryFactory.create(executor);\ncompletedCheckpointStore.recover();\nfor (CompletedCheckpoint completedCheckpoint : completedCheckpointStore.getAllCheckpoints()) {\ncompletedCheckpoint.registerSharedStatesAfterRestored(sharedStateRegistry);\n}\nLOG.debug(\"Status of the shared state registry of job {} after restore: {}.\", job, sharedStateRegistry);\nCompletedCheckpoint latest = completedCheckpointStore.getLatestCheckpoint(isPreferCheckpointForRecovery);\nif (latest == null) {\nif (errorIfNoCheckpoint) {\nthrow new IllegalStateException(\"No completed checkpoint available\");\n} else {\nLOG.debug(\"Resetting the master hooks.\");\nMasterHooks.reset(masterHooks.values(), LOG);\nreturn false;\n}\n}\nLOG.info(\"Restoring job {} from latest valid checkpoint: {}.\", job, latest);\nfinal Map operatorStates = latest.getOperatorStates();\nStateAssignmentOperation stateAssignmentOperation =\nnew StateAssignmentOperation(latest.getCheckpointID(), tasks, operatorStates, allowNonRestoredState);\nstateAssignmentOperation.assignStates();\nMasterHooks.restoreMasterHooks(\nmasterHooks,\nlatest.getMasterHookStates(),\nlatest.getCheckpointID(),\nallowNonRestoredState,\nLOG);\nif (restoreCoordinators) {\nrestoreStateToCoordinators(operatorStates);\n}\nif (statsTracker != null) {\nlong restoreTimestamp = System.currentTimeMillis();\nRestoredCheckpointStats restored = new RestoredCheckpointStats(\nlatest.getCheckpointID(),\nlatest.getProperties(),\nrestoreTimestamp,\nlatest.getExternalPointer());\nstatsTracker.reportRestoredCheckpoint(restored);\n}\nreturn true;\n}\n}\n/**\n* Restore the state with given savepoint.\n*\n* @param savepointPointer The pointer to the savepoint.\n* @param allowNonRestored True if allowing checkpoint state that cannot be\n* mapped to any job vertex in tasks.\n* @param tasks Map of job vertices to restore. State for these\n* vertices is restored via\n* {@link Execution\n* @param userClassLoader The class loader to resolve serialized classes in\n* legacy savepoint versions.\n*/\npublic boolean restoreSavepoint(\nString savepointPointer,\nboolean allowNonRestored,\nMap tasks,\nClassLoader userClassLoader) throws Exception {\nPreconditions.checkNotNull(savepointPointer, \"The savepoint path cannot be null.\");\nLOG.info(\"Starting job {} from savepoint {} ({})\",\njob, savepointPointer, (allowNonRestored ? \"allowing non restored state\" : \"\"));\nfinal CompletedCheckpointStorageLocation checkpointLocation = checkpointStorage.resolveCheckpoint(savepointPointer);\nCompletedCheckpoint savepoint = Checkpoints.loadAndValidateCheckpoint(\njob, tasks, checkpointLocation, userClassLoader, allowNonRestored);\ncompletedCheckpointStore.addCheckpoint(savepoint);\nlong nextCheckpointId = savepoint.getCheckpointID() + 1;\ncheckpointIdCounter.setCount(nextCheckpointId);\nLOG.info(\"Reset the checkpoint ID of job {} to {}.\", job, nextCheckpointId);\nreturn restoreLatestCheckpointedStateInternal(new HashSet<>(tasks.values()), true, true, allowNonRestored);\n}\npublic int getNumberOfPendingCheckpoints() {\nsynchronized (lock) {\nreturn this.pendingCheckpoints.size();\n}\n}\npublic int getNumberOfRetainedSuccessfulCheckpoints() {\nsynchronized (lock) {\nreturn completedCheckpointStore.getNumberOfRetainedCheckpoints();\n}\n}\npublic Map getPendingCheckpoints() {\nsynchronized (lock) {\nreturn new HashMap<>(this.pendingCheckpoints);\n}\n}\npublic List getSuccessfulCheckpoints() throws Exception {\nsynchronized (lock) {\nreturn completedCheckpointStore.getAllCheckpoints();\n}\n}\npublic CheckpointStorageCoordinatorView getCheckpointStorage() {\nreturn checkpointStorage;\n}\npublic CompletedCheckpointStore getCheckpointStore() {\nreturn completedCheckpointStore;\n}\npublic long getCheckpointTimeout() {\nreturn checkpointTimeout;\n}\n/**\n* @deprecated use {@link\n*/\n@Deprecated\n@VisibleForTesting\nPriorityQueue getTriggerRequestQueue() {\nreturn requestDecider.getTriggerRequestQueue();\n}\npublic boolean isTriggering() {\nreturn isTriggering;\n}\n@VisibleForTesting\nboolean isCurrentPeriodicTriggerAvailable() {\nreturn currentPeriodicTrigger != null;\n}\n/**\n* Returns whether periodic checkpointing has been configured.\n*\n* @return true if periodic checkpoints have been configured.\n*/\npublic boolean isPeriodicCheckpointingConfigured() {\nreturn baseInterval != Long.MAX_VALUE;\n}\npublic void startCheckpointScheduler() {\nsynchronized (lock) {\nif (shutdown) {\nthrow new IllegalArgumentException(\"Checkpoint coordinator is shut down\");\n}\nstopCheckpointScheduler();\nperiodicScheduling = true;\ncurrentPeriodicTrigger = scheduleTriggerWithDelay(getRandomInitDelay());\n}\n}\npublic void stopCheckpointScheduler() {\nsynchronized (lock) {\nperiodicScheduling = false;\ncancelPeriodicTrigger();\nfinal CheckpointException reason =\nnew CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SUSPEND);\nabortPendingAndQueuedCheckpoints(reason);\nnumUnsuccessfulCheckpointsTriggers.set(0);\n}\n}\n/**\n* Aborts all the pending checkpoints due to en exception.\n* @param exception The exception.\n*/\npublic void abortPendingCheckpoints(CheckpointException exception) {\nsynchronized (lock) {\nabortPendingCheckpoints(ignored -> true, exception);\n}\n}\nprivate void abortPendingCheckpoints(\nPredicate checkpointToFailPredicate,\nCheckpointException exception) {\nassert Thread.holdsLock(lock);\nfinal PendingCheckpoint[] pendingCheckpointsToFail = pendingCheckpoints\n.values()\n.stream()\n.filter(checkpointToFailPredicate)\n.toArray(PendingCheckpoint[]::new);\nfor (PendingCheckpoint pendingCheckpoint : pendingCheckpointsToFail) {\nabortPendingCheckpoint(pendingCheckpoint, exception);\n}\n}\nprivate void rescheduleTrigger(long tillNextMillis) {\ncancelPeriodicTrigger();\ncurrentPeriodicTrigger = scheduleTriggerWithDelay(tillNextMillis);\n}\nprivate void cancelPeriodicTrigger() {\nif (currentPeriodicTrigger != null) {\ncurrentPeriodicTrigger.cancel(false);\ncurrentPeriodicTrigger = null;\n}\n}\nprivate long getRandomInitDelay() {\nreturn ThreadLocalRandom.current().nextLong(minPauseBetweenCheckpoints, baseInterval + 1L);\n}\nprivate ScheduledFuture scheduleTriggerWithDelay(long initDelay) {\nreturn timer.scheduleAtFixedRate(\nnew ScheduledTrigger(),\ninitDelay, baseInterval, TimeUnit.MILLISECONDS);\n}\nprivate void restoreStateToCoordinators(final Map operatorStates) throws Exception {\nfor (OperatorCoordinatorCheckpointContext coordContext : coordinatorsToCheckpoint) {\nfinal OperatorState state = operatorStates.get(coordContext.operatorId());\nif (state == null) {\ncontinue;\n}\nfinal ByteStreamStateHandle coordinatorState = state.getCoordinatorState();\nif (coordinatorState != null) {\ncoordContext.resetToCheckpoint(coordinatorState.getData());\n}\n}\n}\npublic JobStatusListener createActivatorDeactivator() {\nsynchronized (lock) {\nif (shutdown) {\nthrow new IllegalArgumentException(\"Checkpoint coordinator is shut down\");\n}\nif (jobStatusListener == null) {\njobStatusListener = new CheckpointCoordinatorDeActivator(this);\n}\nreturn jobStatusListener;\n}\n}\nint getNumQueuedRequests() {\nreturn requestDecider.getNumQueuedRequests();\n}\nprivate final class ScheduledTrigger implements Runnable {\n@Override\npublic void run() {\ntry {\ntriggerCheckpoint(true);\n}\ncatch (Exception e) {\nLOG.error(\"Exception while triggering checkpoint for job {}.\", job, e);\n}\n}\n}\n/**\n* Discards the given state object asynchronously belonging to the given job, execution attempt\n* id and checkpoint id.\n*\n* @param jobId identifying the job to which the state object belongs\n* @param executionAttemptID identifying the task to which the state object belongs\n* @param checkpointId of the state object\n* @param subtaskState to discard asynchronously\n*/\nprivate void discardSubtaskState(\nfinal JobID jobId,\nfinal ExecutionAttemptID executionAttemptID,\nfinal long checkpointId,\nfinal TaskStateSnapshot subtaskState) {\nif (subtaskState != null) {\nexecutor.execute(new Runnable() {\n@Override\npublic void run() {\ntry {\nsubtaskState.discardState();\n} catch (Throwable t2) {\nLOG.warn(\"Could not properly discard state object of checkpoint {} \" +\n\"belonging to task {} of job {}.\", checkpointId, executionAttemptID, jobId, t2);\n}\n}\n});\n}\n}\nprivate void abortPendingCheckpoint(\nPendingCheckpoint pendingCheckpoint,\nCheckpointException exception) {\nabortPendingCheckpoint(pendingCheckpoint, exception, null);\n}\nprivate void abortPendingCheckpoint(\nPendingCheckpoint pendingCheckpoint,\nCheckpointException exception,\n@Nullable final ExecutionAttemptID executionAttemptID) {\nassert(Thread.holdsLock(lock));\nif (!pendingCheckpoint.isDiscarded()) {\ntry {\npendingCheckpoint.abort(\nexception.getCheckpointFailureReason(), exception.getCause());\nif (pendingCheckpoint.getProps().isSavepoint() &&\npendingCheckpoint.getProps().isSynchronous()) {\nfailureManager.handleSynchronousSavepointFailure(exception);\n} else if (executionAttemptID != null) {\nfailureManager.handleTaskLevelCheckpointException(\nexception, pendingCheckpoint.getCheckpointId(), executionAttemptID);\n} else {\nfailureManager.handleJobLevelCheckpointException(\nexception, of(pendingCheckpoint.getCheckpointId()));\n}\n} finally {\nsendAbortedMessages(pendingCheckpoint.getCheckpointId(), pendingCheckpoint.getCheckpointTimestamp());\npendingCheckpoints.remove(pendingCheckpoint.getCheckpointId());\nrememberRecentCheckpointId(pendingCheckpoint.getCheckpointId());\ntimer.execute(this::executeQueuedRequest);\n}\n}\n}\nprivate void preCheckGlobalState(boolean isPeriodic) throws CheckpointException {\nif (shutdown) {\nthrow new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN);\n}\nif (isPeriodic && !periodicScheduling) {\nthrow new CheckpointException(CheckpointFailureReason.PERIODIC_SCHEDULER_SHUTDOWN);\n}\n}\n/**\n* Check if all tasks that we need to trigger are running. If not, abort the checkpoint.\n*\n* @return the executions need to be triggered.\n* @throws CheckpointException the exception fails checking\n*/\nprivate Execution[] getTriggerExecutions() throws CheckpointException {\nExecution[] executions = new Execution[tasksToTrigger.length];\nfor (int i = 0; i < tasksToTrigger.length; i++) {\nExecution ee = tasksToTrigger[i].getCurrentExecutionAttempt();\nif (ee == null) {\nLOG.info(\n\"Checkpoint triggering task {} of job {} is not being executed at the moment. Aborting checkpoint.\",\ntasksToTrigger[i].getTaskNameWithSubtaskIndex(),\njob);\nthrow new CheckpointException(\nCheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING);\n} else if (ee.getState() == ExecutionState.RUNNING) {\nexecutions[i] = ee;\n} else {\nLOG.info(\n\"Checkpoint triggering task {} of job {} is not in state {} but {} instead. Aborting checkpoint.\",\ntasksToTrigger[i].getTaskNameWithSubtaskIndex(),\njob,\nExecutionState.RUNNING,\nee.getState());\nthrow new CheckpointException(\nCheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING);\n}\n}\nreturn executions;\n}\n/**\n* Check if all tasks that need to acknowledge the checkpoint are running.\n* If not, abort the checkpoint\n*\n* @return the execution vertices which should give an ack response\n* @throws CheckpointException the exception fails checking\n*/\nprivate Map getAckTasks() throws CheckpointException {\nMap ackTasks = new HashMap<>(tasksToWaitFor.length);\nfor (ExecutionVertex ev : tasksToWaitFor) {\nExecution ee = ev.getCurrentExecutionAttempt();\nif (ee != null) {\nackTasks.put(ee.getAttemptId(), ev);\n} else {\nLOG.info(\n\"Checkpoint acknowledging task {} of job {} is not being executed at the moment. Aborting checkpoint.\",\nev.getTaskNameWithSubtaskIndex(),\njob);\nthrow new CheckpointException(\nCheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING);\n}\n}\nreturn ackTasks;\n}\nprivate void abortPendingAndQueuedCheckpoints(CheckpointException exception) {\nassert(Thread.holdsLock(lock));\nrequestDecider.abortAll(exception);\nabortPendingCheckpoints(exception);\n}\n/**\n* The canceller of checkpoint. The checkpoint might be cancelled if it doesn't finish in a\n* configured period.\n*/\nprivate class CheckpointCanceller implements Runnable {\nprivate final PendingCheckpoint pendingCheckpoint;\nprivate CheckpointCanceller(PendingCheckpoint pendingCheckpoint) {\nthis.pendingCheckpoint = checkNotNull(pendingCheckpoint);\n}\n@Override\npublic void run() {\nsynchronized (lock) {\nif (!pendingCheckpoint.isDiscarded()) {\nLOG.info(\"Checkpoint {} of job {} expired before completing.\",\npendingCheckpoint.getCheckpointId(), job);\nabortPendingCheckpoint(\npendingCheckpoint,\nnew CheckpointException(CheckpointFailureReason.CHECKPOINT_EXPIRED));\n}\n}\n}\n}\nprivate static CheckpointException getCheckpointException(\nCheckpointFailureReason defaultReason, Throwable throwable) {\nfinal Optional checkpointExceptionOptional =\nExceptionUtils.findThrowable(throwable, CheckpointException.class);\nreturn checkpointExceptionOptional\n.orElseGet(() -> new CheckpointException(defaultReason, throwable));\n}\nprivate static class CheckpointIdAndStorageLocation {\nprivate final long checkpointId;\nprivate final CheckpointStorageLocation checkpointStorageLocation;\nCheckpointIdAndStorageLocation(\nlong checkpointId,\nCheckpointStorageLocation checkpointStorageLocation) {\nthis.checkpointId = checkpointId;\nthis.checkpointStorageLocation = checkNotNull(checkpointStorageLocation);\n}\n}\nstatic class CheckpointTriggerRequest {\nfinal long timestamp;\nfinal CheckpointProperties props;\nfinal @Nullable String externalSavepointLocation;\nfinal boolean isPeriodic;\nfinal boolean advanceToEndOfTime;\nprivate final CompletableFuture onCompletionPromise = new CompletableFuture<>();\nCheckpointTriggerRequest(\nCheckpointProperties props,\n@Nullable String externalSavepointLocation,\nboolean isPeriodic,\nboolean advanceToEndOfTime) {\nthis.timestamp = System.currentTimeMillis();\nthis.props = checkNotNull(props);\nthis.externalSavepointLocation = externalSavepointLocation;\nthis.isPeriodic = isPeriodic;\nthis.advanceToEndOfTime = advanceToEndOfTime;\n}\nCompletableFuture getOnCompletionFuture() {\nreturn onCompletionPromise;\n}\npublic void completeExceptionally(CheckpointException exception) {\nonCompletionPromise.completeExceptionally(exception);\n}\npublic boolean isForce() {\nreturn props.forceCheckpoint();\n}\n}\n}", + "context_after": "class CheckpointCoordinator {\nprivate static final Logger LOG = LoggerFactory.getLogger(CheckpointCoordinator.class);\n/** The number of recent checkpoints whose IDs are remembered. */\nprivate static final int NUM_GHOST_CHECKPOINT_IDS = 16;\n/** Coordinator-wide lock to safeguard the checkpoint updates. */\nprivate final Object lock = new Object();\n/** The job whose checkpoint this coordinator coordinates. */\nprivate final JobID job;\n/** Default checkpoint properties. **/\nprivate final CheckpointProperties checkpointProperties;\n/** The executor used for asynchronous calls, like potentially blocking I/O. */\nprivate final Executor executor;\n/** Tasks who need to be sent a message when a checkpoint is started. */\nprivate final ExecutionVertex[] tasksToTrigger;\n/** Tasks who need to acknowledge a checkpoint before it succeeds. */\nprivate final ExecutionVertex[] tasksToWaitFor;\n/** Tasks who need to be sent a message when a checkpoint is confirmed. */\nprivate final ExecutionVertex[] tasksToCommitTo;\n/** The operator coordinators that need to be checkpointed. */\nprivate final Collection coordinatorsToCheckpoint;\n/** Map from checkpoint ID to the pending checkpoint. */\n@GuardedBy(\"lock\")\nprivate final Map pendingCheckpoints;\n/** Completed checkpoints. Implementations can be blocking. Make sure calls to methods\n* accessing this don't block the job manager actor and run asynchronously. */\nprivate final CompletedCheckpointStore completedCheckpointStore;\n/** The root checkpoint state backend, which is responsible for initializing the\n* checkpoint, storing the metadata, and cleaning up the checkpoint. */\nprivate final CheckpointStorageCoordinatorView checkpointStorage;\n/** A list of recent checkpoint IDs, to identify late messages (vs invalid ones). */\nprivate final ArrayDeque recentPendingCheckpoints;\n/** Checkpoint ID counter to ensure ascending IDs. In case of job manager failures, these\n* need to be ascending across job managers. */\nprivate final CheckpointIDCounter checkpointIdCounter;\n/** The base checkpoint interval. Actual trigger time may be affected by the\n* max concurrent checkpoints and minimum-pause values */\nprivate final long baseInterval;\n/** The max time (in ms) that a checkpoint may take. */\nprivate final long checkpointTimeout;\n/** The min time(in ms) to delay after a checkpoint could be triggered. Allows to\n* enforce minimum processing time between checkpoint attempts */\nprivate final long minPauseBetweenCheckpoints;\n/** The timer that handles the checkpoint timeouts and triggers periodic checkpoints.\n* It must be single-threaded. Eventually it will be replaced by main thread executor. */\nprivate final ScheduledExecutor timer;\n/** The master checkpoint hooks executed by this checkpoint coordinator. */\nprivate final HashMap> masterHooks;\nprivate final boolean unalignedCheckpointsEnabled;\n/** Actor that receives status updates from the execution graph this coordinator works for. */\nprivate JobStatusListener jobStatusListener;\n/** The number of consecutive failed trigger attempts. */\nprivate final AtomicInteger numUnsuccessfulCheckpointsTriggers = new AtomicInteger(0);\n/** A handle to the current periodic trigger, to cancel it when necessary. */\nprivate ScheduledFuture currentPeriodicTrigger;\n/** The timestamp (via {@link Clock\n* completed. */\nprivate long lastCheckpointCompletionRelativeTime;\n/** Flag whether a triggered checkpoint should immediately schedule the next checkpoint.\n* Non-volatile, because only accessed in synchronized scope */\nprivate boolean periodicScheduling;\n/** Flag marking the coordinator as shut down (not accepting any messages any more). */\nprivate volatile boolean shutdown;\n/** Optional tracker for checkpoint statistics. */\n@Nullable\nprivate CheckpointStatsTracker statsTracker;\n/** A factory for SharedStateRegistry objects. */\nprivate final SharedStateRegistryFactory sharedStateRegistryFactory;\n/** Registry that tracks state which is shared across (incremental) checkpoints. */\nprivate SharedStateRegistry sharedStateRegistry;\nprivate boolean isPreferCheckpointForRecovery;\nprivate final CheckpointFailureManager failureManager;\nprivate final Clock clock;\nprivate final boolean isExactlyOnceMode;\n/** Flag represents there is an in-flight trigger request. */\nprivate boolean isTriggering = false;\nprivate final CheckpointRequestDecider requestDecider;\npublic CheckpointCoordinator(\nJobID job,\nCheckpointCoordinatorConfiguration chkConfig,\nExecutionVertex[] tasksToTrigger,\nExecutionVertex[] tasksToWaitFor,\nExecutionVertex[] tasksToCommitTo,\nCollection coordinatorsToCheckpoint,\nCheckpointIDCounter checkpointIDCounter,\nCompletedCheckpointStore completedCheckpointStore,\nStateBackend checkpointStateBackend,\nExecutor executor,\nScheduledExecutor timer,\nSharedStateRegistryFactory sharedStateRegistryFactory,\nCheckpointFailureManager failureManager) {\nthis(\njob,\nchkConfig,\ntasksToTrigger,\ntasksToWaitFor,\ntasksToCommitTo,\ncoordinatorsToCheckpoint,\ncheckpointIDCounter,\ncompletedCheckpointStore,\ncheckpointStateBackend,\nexecutor,\ntimer,\nsharedStateRegistryFactory,\nfailureManager,\nSystemClock.getInstance());\n}\n@VisibleForTesting\npublic CheckpointCoordinator(\nJobID job,\nCheckpointCoordinatorConfiguration chkConfig,\nExecutionVertex[] tasksToTrigger,\nExecutionVertex[] tasksToWaitFor,\nExecutionVertex[] tasksToCommitTo,\nCollection coordinatorsToCheckpoint,\nCheckpointIDCounter checkpointIDCounter,\nCompletedCheckpointStore completedCheckpointStore,\nStateBackend checkpointStateBackend,\nExecutor executor,\nScheduledExecutor timer,\nSharedStateRegistryFactory sharedStateRegistryFactory,\nCheckpointFailureManager failureManager,\nClock clock) {\ncheckNotNull(checkpointStateBackend);\nlong minPauseBetweenCheckpoints = chkConfig.getMinPauseBetweenCheckpoints();\nif (minPauseBetweenCheckpoints > 365L * 24 * 60 * 60 * 1_000) {\nminPauseBetweenCheckpoints = 365L * 24 * 60 * 60 * 1_000;\n}\nlong baseInterval = chkConfig.getCheckpointInterval();\nif (baseInterval < minPauseBetweenCheckpoints) {\nbaseInterval = minPauseBetweenCheckpoints;\n}\nthis.job = checkNotNull(job);\nthis.baseInterval = baseInterval;\nthis.checkpointTimeout = chkConfig.getCheckpointTimeout();\nthis.minPauseBetweenCheckpoints = minPauseBetweenCheckpoints;\nthis.tasksToTrigger = checkNotNull(tasksToTrigger);\nthis.tasksToWaitFor = checkNotNull(tasksToWaitFor);\nthis.tasksToCommitTo = checkNotNull(tasksToCommitTo);\nthis.coordinatorsToCheckpoint = Collections.unmodifiableCollection(coordinatorsToCheckpoint);\nthis.pendingCheckpoints = new LinkedHashMap<>();\nthis.checkpointIdCounter = checkNotNull(checkpointIDCounter);\nthis.completedCheckpointStore = checkNotNull(completedCheckpointStore);\nthis.executor = checkNotNull(executor);\nthis.sharedStateRegistryFactory = checkNotNull(sharedStateRegistryFactory);\nthis.sharedStateRegistry = sharedStateRegistryFactory.create(executor);\nthis.isPreferCheckpointForRecovery = chkConfig.isPreferCheckpointForRecovery();\nthis.failureManager = checkNotNull(failureManager);\nthis.clock = checkNotNull(clock);\nthis.isExactlyOnceMode = chkConfig.isExactlyOnce();\nthis.unalignedCheckpointsEnabled = chkConfig.isUnalignedCheckpointsEnabled();\nthis.recentPendingCheckpoints = new ArrayDeque<>(NUM_GHOST_CHECKPOINT_IDS);\nthis.masterHooks = new HashMap<>();\nthis.timer = timer;\nthis.checkpointProperties = CheckpointProperties.forCheckpoint(chkConfig.getCheckpointRetentionPolicy());\ntry {\nthis.checkpointStorage = checkpointStateBackend.createCheckpointStorage(job);\ncheckpointStorage.initializeBaseLocations();\n} catch (IOException e) {\nthrow new FlinkRuntimeException(\"Failed to create checkpoint storage at checkpoint coordinator side.\", e);\n}\ntry {\ncheckpointIDCounter.start();\n} catch (Throwable t) {\nthrow new RuntimeException(\"Failed to start checkpoint ID counter: \" + t.getMessage(), t);\n}\nthis.requestDecider = new CheckpointRequestDecider(\nchkConfig.getMaxConcurrentCheckpoints(),\nthis::rescheduleTrigger,\nthis.clock,\nthis.minPauseBetweenCheckpoints,\nthis.pendingCheckpoints::size,\nthis.lock);\n}\n/**\n* Adds the given master hook to the checkpoint coordinator. This method does nothing, if\n* the checkpoint coordinator already contained a hook with the same ID (as defined via\n* {@link MasterTriggerRestoreHook\n*\n* @param hook The hook to add.\n* @return True, if the hook was added, false if the checkpoint coordinator already\n* contained a hook with the same ID.\n*/\npublic boolean addMasterHook(MasterTriggerRestoreHook hook) {\ncheckNotNull(hook);\nfinal String id = hook.getIdentifier();\ncheckArgument(!StringUtils.isNullOrWhitespaceOnly(id), \"The hook has a null or empty id\");\nsynchronized (lock) {\nif (!masterHooks.containsKey(id)) {\nmasterHooks.put(id, hook);\nreturn true;\n}\nelse {\nreturn false;\n}\n}\n}\n/**\n* Gets the number of currently register master hooks.\n*/\npublic int getNumberOfRegisteredMasterHooks() {\nsynchronized (lock) {\nreturn masterHooks.size();\n}\n}\n/**\n* Sets the checkpoint stats tracker.\n*\n* @param statsTracker The checkpoint stats tracker.\n*/\npublic void setCheckpointStatsTracker(@Nullable CheckpointStatsTracker statsTracker) {\nthis.statsTracker = statsTracker;\n}\n/**\n* Shuts down the checkpoint coordinator.\n*\n*

After this method has been called, the coordinator does not accept\n* and further messages and cannot trigger any further checkpoints.\n*/\npublic void shutdown(JobStatus jobStatus) throws Exception {\nsynchronized (lock) {\nif (!shutdown) {\nshutdown = true;\nLOG.info(\"Stopping checkpoint coordinator for job {}.\", job);\nperiodicScheduling = false;\nMasterHooks.close(masterHooks.values(), LOG);\nmasterHooks.clear();\nfinal CheckpointException reason = new CheckpointException(\nCheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN);\nabortPendingAndQueuedCheckpoints(reason);\ncompletedCheckpointStore.shutdown(jobStatus);\ncheckpointIdCounter.shutdown(jobStatus);\n}\n}\n}\npublic boolean isShutdown() {\nreturn shutdown;\n}\n/**\n* Triggers a savepoint with the given savepoint directory as a target.\n*\n* @param targetLocation Target location for the savepoint, optional. If null, the\n* state backend's configured default will be used.\n* @return A future to the completed checkpoint\n* @throws IllegalStateException If no savepoint directory has been\n* specified and no default savepoint directory has been\n* configured\n*/\npublic CompletableFuture triggerSavepoint(@Nullable final String targetLocation) {\nfinal CheckpointProperties properties = CheckpointProperties.forSavepoint(!unalignedCheckpointsEnabled);\nreturn triggerSavepointInternal(properties, false, targetLocation);\n}\n/**\n* Triggers a synchronous savepoint with the given savepoint directory as a target.\n*\n* @param advanceToEndOfEventTime Flag indicating if the source should inject a {@code MAX_WATERMARK} in the pipeline\n* to fire any registered event-time timers.\n* @param targetLocation Target location for the savepoint, optional. If null, the\n* state backend's configured default will be used.\n* @return A future to the completed checkpoint\n* @throws IllegalStateException If no savepoint directory has been\n* specified and no default savepoint directory has been\n* configured\n*/\npublic CompletableFuture triggerSynchronousSavepoint(\nfinal boolean advanceToEndOfEventTime,\n@Nullable final String targetLocation) {\nfinal CheckpointProperties properties = CheckpointProperties.forSyncSavepoint(!unalignedCheckpointsEnabled);\nreturn triggerSavepointInternal(properties, advanceToEndOfEventTime, targetLocation);\n}\nprivate CompletableFuture triggerSavepointInternal(\nfinal CheckpointProperties checkpointProperties,\nfinal boolean advanceToEndOfEventTime,\n@Nullable final String targetLocation) {\ncheckNotNull(checkpointProperties);\nfinal CompletableFuture resultFuture = new CompletableFuture<>();\ntimer.execute(() -> triggerCheckpoint(\ncheckpointProperties,\ntargetLocation,\nfalse,\nadvanceToEndOfEventTime)\n.whenComplete((completedCheckpoint, throwable) -> {\nif (throwable == null) {\nresultFuture.complete(completedCheckpoint);\n} else {\nresultFuture.completeExceptionally(throwable);\n}\n}));\nreturn resultFuture;\n}\n/**\n* Triggers a new standard checkpoint and uses the given timestamp as the checkpoint\n* timestamp. The return value is a future. It completes when the checkpoint triggered finishes\n* or an error occurred.\n*\n* @param isPeriodic Flag indicating whether this triggered checkpoint is\n* periodic. If this flag is true, but the periodic scheduler is disabled,\n* the checkpoint will be declined.\n* @return a future to the completed checkpoint.\n*/\npublic CompletableFuture triggerCheckpoint(boolean isPeriodic) {\nreturn triggerCheckpoint(checkpointProperties, null, isPeriodic, false);\n}\n@VisibleForTesting\npublic CompletableFuture triggerCheckpoint(\nCheckpointProperties props,\n@Nullable String externalSavepointLocation,\nboolean isPeriodic,\nboolean advanceToEndOfTime) {\nif (advanceToEndOfTime && !(props.isSynchronous() && props.isSavepoint())) {\nreturn FutureUtils.completedExceptionally(new IllegalArgumentException(\n\"Only synchronous savepoints are allowed to advance the watermark to MAX.\"));\n}\nCheckpointTriggerRequest request = new CheckpointTriggerRequest(props, externalSavepointLocation, isPeriodic, advanceToEndOfTime);\nrequestDecider\n.chooseRequestToExecute(request, isTriggering, lastCheckpointCompletionRelativeTime)\n.ifPresent(this::startTriggeringCheckpoint);\nreturn request.onCompletionPromise;\n}\n/**\n* Initialize the checkpoint trigger asynchronously. It will be executed in io thread due to\n* it might be time-consuming.\n*\n* @param props checkpoint properties\n* @param externalSavepointLocation the external savepoint location, it might be null\n* @return the future of initialized result, checkpoint id and checkpoint location\n*/\nprivate CompletableFuture initializeCheckpoint(\nCheckpointProperties props,\n@Nullable String externalSavepointLocation) {\nreturn CompletableFuture.supplyAsync(() -> {\ntry {\nlong checkpointID = checkpointIdCounter.getAndIncrement();\nCheckpointStorageLocation checkpointStorageLocation = props.isSavepoint() ?\ncheckpointStorage\n.initializeLocationForSavepoint(checkpointID, externalSavepointLocation) :\ncheckpointStorage.initializeLocationForCheckpoint(checkpointID);\nreturn new CheckpointIdAndStorageLocation(checkpointID, checkpointStorageLocation);\n} catch (Throwable throwable) {\nthrow new CompletionException(throwable);\n}\n}, executor);\n}\nprivate PendingCheckpoint createPendingCheckpoint(\nlong timestamp,\nCheckpointProperties props,\nMap ackTasks,\nboolean isPeriodic,\nlong checkpointID,\nCheckpointStorageLocation checkpointStorageLocation,\nCompletableFuture onCompletionPromise) {\nsynchronized (lock) {\ntry {\npreCheckGlobalState(isPeriodic);\n} catch (Throwable t) {\nthrow new CompletionException(t);\n}\n}\nfinal PendingCheckpoint checkpoint = new PendingCheckpoint(\njob,\ncheckpointID,\ntimestamp,\nackTasks,\nOperatorInfo.getIds(coordinatorsToCheckpoint),\nmasterHooks.keySet(),\nprops,\ncheckpointStorageLocation,\nexecutor,\nonCompletionPromise);\nif (statsTracker != null) {\nPendingCheckpointStats callback = statsTracker.reportPendingCheckpoint(\ncheckpointID,\ntimestamp,\nprops);\ncheckpoint.setStatsCallback(callback);\n}\nsynchronized (lock) {\npendingCheckpoints.put(checkpointID, checkpoint);\nScheduledFuture cancellerHandle = timer.schedule(\nnew CheckpointCanceller(checkpoint),\ncheckpointTimeout, TimeUnit.MILLISECONDS);\nif (!checkpoint.setCancellerHandle(cancellerHandle)) {\ncancellerHandle.cancel(false);\n}\n}\nLOG.info(\"Triggering checkpoint {} (type={}) @ {} for job {}.\", checkpointID, checkpoint.getProps().getCheckpointType(), timestamp, job);\nreturn checkpoint;\n}\n/**\n* Snapshot master hook states asynchronously.\n*\n* @param checkpoint the pending checkpoint\n* @return the future represents master hook states are finished or not\n*/\nprivate CompletableFuture snapshotMasterState(PendingCheckpoint checkpoint) {\nif (masterHooks.isEmpty()) {\nreturn CompletableFuture.completedFuture(null);\n}\nfinal long checkpointID = checkpoint.getCheckpointId();\nfinal long timestamp = checkpoint.getCheckpointTimestamp();\nfinal CompletableFuture masterStateCompletableFuture = new CompletableFuture<>();\nfor (MasterTriggerRestoreHook masterHook : masterHooks.values()) {\nMasterHooks\n.triggerHook(masterHook, checkpointID, timestamp, executor)\n.whenCompleteAsync(\n(masterState, throwable) -> {\ntry {\nsynchronized (lock) {\nif (masterStateCompletableFuture.isDone()) {\nreturn;\n}\nif (checkpoint.isDiscarded()) {\nthrow new IllegalStateException(\n\"Checkpoint \" + checkpointID + \" has been discarded\");\n}\nif (throwable == null) {\ncheckpoint.acknowledgeMasterState(\nmasterHook.getIdentifier(), masterState);\nif (checkpoint.areMasterStatesFullyAcknowledged()) {\nmasterStateCompletableFuture.complete(null);\n}\n} else {\nmasterStateCompletableFuture.completeExceptionally(throwable);\n}\n}\n} catch (Throwable t) {\nmasterStateCompletableFuture.completeExceptionally(t);\n}\n},\ntimer);\n}\nreturn masterStateCompletableFuture;\n}\n/**\n* Snapshot task state.\n*\n* @param timestamp the timestamp of this checkpoint reques\n* @param checkpointID the checkpoint id\n* @param checkpointStorageLocation the checkpoint location\n* @param props the checkpoint properties\n* @param executions the executions which should be triggered\n* @param advanceToEndOfTime Flag indicating if the source should inject a {@code MAX_WATERMARK}\n* in the pipeline to fire any registered event-time timers.\n*/\nprivate void snapshotTaskState(\nlong timestamp,\nlong checkpointID,\nCheckpointStorageLocation checkpointStorageLocation,\nCheckpointProperties props,\nExecution[] executions,\nboolean advanceToEndOfTime) {\nfinal CheckpointOptions checkpointOptions = new CheckpointOptions(\nprops.getCheckpointType(),\ncheckpointStorageLocation.getLocationReference(),\nisExactlyOnceMode,\nprops.getCheckpointType() == CheckpointType.CHECKPOINT && unalignedCheckpointsEnabled);\nfor (Execution execution: executions) {\nif (props.isSynchronous()) {\nexecution.triggerSynchronousSavepoint(checkpointID, timestamp, checkpointOptions, advanceToEndOfTime);\n} else {\nexecution.triggerCheckpoint(checkpointID, timestamp, checkpointOptions);\n}\n}\n}\n/**\n* Trigger request is successful.\n* NOTE, it must be invoked if trigger request is successful.\n*/\nprivate void onTriggerSuccess() {\nisTriggering = false;\nnumUnsuccessfulCheckpointsTriggers.set(0);\nexecuteQueuedRequest();\n}\n/**\n* The trigger request is failed prematurely without a proper initialization.\n* There is no resource to release, but the completion promise needs to fail manually here.\n*\n* @param onCompletionPromise the completion promise of the checkpoint/savepoint\n* @param throwable the reason of trigger failure\n*/\nprivate void onTriggerFailure(\nCheckpointTriggerRequest onCompletionPromise, Throwable throwable) {\nfinal CheckpointException checkpointException =\ngetCheckpointException(CheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, throwable);\nonCompletionPromise.completeExceptionally(checkpointException);\nonTriggerFailure((PendingCheckpoint) null, checkpointException);\n}\n/**\n* The trigger request is failed.\n* NOTE, it must be invoked if trigger request is failed.\n*\n* @param checkpoint the pending checkpoint which is failed. It could be null if it's failed\n* prematurely without a proper initialization.\n* @param throwable the reason of trigger failure\n*/\nprivate void onTriggerFailure(@Nullable PendingCheckpoint checkpoint, Throwable throwable) {\nthrowable = ExceptionUtils.stripCompletionException(throwable);\ntry {\ncoordinatorsToCheckpoint.forEach(OperatorCoordinatorCheckpointContext::abortCurrentTriggering);\nif (checkpoint != null && !checkpoint.isDiscarded()) {\nint numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet();\nLOG.warn(\n\"Failed to trigger checkpoint {} for job {}. ({} consecutive failed attempts so far)\",\ncheckpoint.getCheckpointId(),\njob,\nnumUnsuccessful,\nthrowable);\nfinal CheckpointException cause =\ngetCheckpointException(\nCheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, throwable);\nsynchronized (lock) {\nabortPendingCheckpoint(checkpoint, cause);\n}\n}\n} finally {\nisTriggering = false;\nexecuteQueuedRequest();\n}\n}\nprivate void executeQueuedRequest() {\nrequestDecider.chooseQueuedRequestToExecute(isTriggering, lastCheckpointCompletionRelativeTime).ifPresent(this::startTriggeringCheckpoint);\n}\n/**\n* Receives a {@link DeclineCheckpoint} message for a pending checkpoint.\n*\n* @param message Checkpoint decline from the task manager\n* @param taskManagerLocationInfo The location info of the decline checkpoint message's sender\n*/\npublic void receiveDeclineMessage(DeclineCheckpoint message, String taskManagerLocationInfo) {\nif (shutdown || message == null) {\nreturn;\n}\nif (!job.equals(message.getJob())) {\nthrow new IllegalArgumentException(\"Received DeclineCheckpoint message for job \" +\nmessage.getJob() + \" from \" + taskManagerLocationInfo + \" while this coordinator handles job \" + job);\n}\nfinal long checkpointId = message.getCheckpointId();\nfinal String reason = (message.getReason() != null ? message.getReason().getMessage() : \"\");\nPendingCheckpoint checkpoint;\nsynchronized (lock) {\nif (shutdown) {\nreturn;\n}\ncheckpoint = pendingCheckpoints.get(checkpointId);\nif (checkpoint != null) {\nPreconditions.checkState(\n!checkpoint.isDiscarded(),\n\"Received message for discarded but non-removed checkpoint \" + checkpointId);\nLOG.info(\"Decline checkpoint {} by task {} of job {} at {}.\",\ncheckpointId,\nmessage.getTaskExecutionId(),\njob,\ntaskManagerLocationInfo);\nfinal CheckpointException checkpointException;\nif (message.getReason() == null) {\ncheckpointException =\nnew CheckpointException(CheckpointFailureReason.CHECKPOINT_DECLINED);\n} else {\ncheckpointException = getCheckpointException(\nCheckpointFailureReason.JOB_FAILURE, message.getReason());\n}\nabortPendingCheckpoint(\ncheckpoint,\ncheckpointException,\nmessage.getTaskExecutionId());\n} else if (LOG.isDebugEnabled()) {\nif (recentPendingCheckpoints.contains(checkpointId)) {\nLOG.debug(\"Received another decline message for now expired checkpoint attempt {} from task {} of job {} at {} : {}\",\ncheckpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo, reason);\n} else {\nLOG.debug(\"Received decline message for unknown (too old?) checkpoint attempt {} from task {} of job {} at {} : {}\",\ncheckpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo, reason);\n}\n}\n}\n}\n/**\n* Receives an AcknowledgeCheckpoint message and returns whether the\n* message was associated with a pending checkpoint.\n*\n* @param message Checkpoint ack from the task manager\n*\n* @param taskManagerLocationInfo The location of the acknowledge checkpoint message's sender\n* @return Flag indicating whether the ack'd checkpoint was associated\n* with a pending checkpoint.\n*\n* @throws CheckpointException If the checkpoint cannot be added to the completed checkpoint store.\n*/\npublic boolean receiveAcknowledgeMessage(AcknowledgeCheckpoint message, String taskManagerLocationInfo) throws CheckpointException {\nif (shutdown || message == null) {\nreturn false;\n}\nif (!job.equals(message.getJob())) {\nLOG.error(\"Received wrong AcknowledgeCheckpoint message for job {} from {} : {}\", job, taskManagerLocationInfo, message);\nreturn false;\n}\nfinal long checkpointId = message.getCheckpointId();\nsynchronized (lock) {\nif (shutdown) {\nreturn false;\n}\nfinal PendingCheckpoint checkpoint = pendingCheckpoints.get(checkpointId);\nif (checkpoint != null && !checkpoint.isDiscarded()) {\nswitch (checkpoint.acknowledgeTask(message.getTaskExecutionId(), message.getSubtaskState(), message.getCheckpointMetrics())) {\ncase SUCCESS:\nLOG.debug(\"Received acknowledge message for checkpoint {} from task {} of job {} at {}.\",\ncheckpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);\nif (checkpoint.areTasksFullyAcknowledged()) {\ncompletePendingCheckpoint(checkpoint);\n}\nbreak;\ncase DUPLICATE:\nLOG.debug(\"Received a duplicate acknowledge message for checkpoint {}, task {}, job {}, location {}.\",\nmessage.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);\nbreak;\ncase UNKNOWN:\nLOG.warn(\"Could not acknowledge the checkpoint {} for task {} of job {} at {}, \" +\n\"because the task's execution attempt id was unknown. Discarding \" +\n\"the state handle to avoid lingering state.\", message.getCheckpointId(),\nmessage.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);\ndiscardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState());\nbreak;\ncase DISCARDED:\nLOG.warn(\"Could not acknowledge the checkpoint {} for task {} of job {} at {}, \" +\n\"because the pending checkpoint had been discarded. Discarding the \" +\n\"state handle tp avoid lingering state.\",\nmessage.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);\ndiscardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState());\n}\nreturn true;\n}\nelse if (checkpoint != null) {\nthrow new IllegalStateException(\n\"Received message for discarded but non-removed checkpoint \" + checkpointId);\n}\nelse {\nboolean wasPendingCheckpoint;\nif (recentPendingCheckpoints.contains(checkpointId)) {\nwasPendingCheckpoint = true;\nLOG.warn(\"Received late message for now expired checkpoint attempt {} from task \" +\n\"{} of job {} at {}.\", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);\n}\nelse {\nLOG.debug(\"Received message for an unknown checkpoint {} from task {} of job {} at {}.\",\ncheckpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);\nwasPendingCheckpoint = false;\n}\ndiscardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState());\nreturn wasPendingCheckpoint;\n}\n}\n}\n/**\n* Try to complete the given pending checkpoint.\n*\n*

Important: This method should only be called in the checkpoint lock scope.\n*\n* @param pendingCheckpoint to complete\n* @throws CheckpointException if the completion failed\n*/\nprivate void completePendingCheckpoint(PendingCheckpoint pendingCheckpoint) throws CheckpointException {\nfinal long checkpointId = pendingCheckpoint.getCheckpointId();\nfinal CompletedCheckpoint completedCheckpoint;\nMap operatorStates = pendingCheckpoint.getOperatorStates();\nsharedStateRegistry.registerAll(operatorStates.values());\ntry {\ntry {\ncompletedCheckpoint = pendingCheckpoint.finalizeCheckpoint();\nfailureManager.handleCheckpointSuccess(pendingCheckpoint.getCheckpointId());\n}\ncatch (Exception e1) {\nif (!pendingCheckpoint.isDiscarded()) {\nabortPendingCheckpoint(\npendingCheckpoint,\nnew CheckpointException(\nCheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1));\n}\nthrow new CheckpointException(\"Could not finalize the pending checkpoint \" + checkpointId + '.',\nCheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1);\n}\nPreconditions.checkState(pendingCheckpoint.isDiscarded() && completedCheckpoint != null);\ntry {\ncompletedCheckpointStore.addCheckpoint(completedCheckpoint);\n} catch (Exception exception) {\nexecutor.execute(new Runnable() {\n@Override\npublic void run() {\ntry {\ncompletedCheckpoint.discardOnFailedStoring();\n} catch (Throwable t) {\nLOG.warn(\"Could not properly discard completed checkpoint {}.\", completedCheckpoint.getCheckpointID(), t);\n}\n}\n});\nsendAbortedMessages(checkpointId, pendingCheckpoint.getCheckpointTimestamp());\nthrow new CheckpointException(\"Could not complete the pending checkpoint \" + checkpointId + '.',\nCheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, exception);\n}\n} finally {\npendingCheckpoints.remove(checkpointId);\ntimer.execute(this::executeQueuedRequest);\n}\nrememberRecentCheckpointId(checkpointId);\ndropSubsumedCheckpoints(checkpointId);\nlastCheckpointCompletionRelativeTime = clock.relativeTimeMillis();\nLOG.info(\"Completed checkpoint {} for job {} ({} bytes in {} ms).\", checkpointId, job,\ncompletedCheckpoint.getStateSize(), completedCheckpoint.getDuration());\nif (LOG.isDebugEnabled()) {\nStringBuilder builder = new StringBuilder();\nbuilder.append(\"Checkpoint state: \");\nfor (OperatorState state : completedCheckpoint.getOperatorStates().values()) {\nbuilder.append(state);\nbuilder.append(\", \");\n}\nbuilder.setLength(builder.length() - 2);\nLOG.debug(builder.toString());\n}\nsendAcknowledgeMessages(checkpointId, completedCheckpoint.getTimestamp());\n}\nprivate void sendAcknowledgeMessages(long checkpointId, long timestamp) {\nfor (ExecutionVertex ev : tasksToCommitTo) {\nExecution ee = ev.getCurrentExecutionAttempt();\nif (ee != null) {\nee.notifyCheckpointComplete(checkpointId, timestamp);\n}\n}\nfor (OperatorCoordinatorCheckpointContext coordinatorContext : coordinatorsToCheckpoint) {\ncoordinatorContext.checkpointComplete(checkpointId);\n}\n}\nprivate void sendAbortedMessages(long checkpointId, long timeStamp) {\nexecutor.execute(() -> {\nfor (ExecutionVertex ev : tasksToCommitTo) {\nExecution ee = ev.getCurrentExecutionAttempt();\nif (ee != null) {\nee.notifyCheckpointAborted(checkpointId, timeStamp);\n}\n}\n});\n}\n/**\n* Fails all pending checkpoints which have not been acknowledged by the given execution\n* attempt id.\n*\n* @param executionAttemptId for which to discard unacknowledged pending checkpoints\n* @param cause of the failure\n*/\npublic void failUnacknowledgedPendingCheckpointsFor(ExecutionAttemptID executionAttemptId, Throwable cause) {\nsynchronized (lock) {\nabortPendingCheckpoints(\ncheckpoint -> !checkpoint.isAcknowledgedBy(executionAttemptId),\nnew CheckpointException(CheckpointFailureReason.TASK_FAILURE, cause));\n}\n}\nprivate void rememberRecentCheckpointId(long id) {\nif (recentPendingCheckpoints.size() >= NUM_GHOST_CHECKPOINT_IDS) {\nrecentPendingCheckpoints.removeFirst();\n}\nrecentPendingCheckpoints.addLast(id);\n}\nprivate void dropSubsumedCheckpoints(long checkpointId) {\nabortPendingCheckpoints(\ncheckpoint -> checkpoint.getCheckpointId() < checkpointId && checkpoint.canBeSubsumed(),\nnew CheckpointException(CheckpointFailureReason.CHECKPOINT_SUBSUMED));\n}\n/**\n* Restores the latest checkpointed state.\n*\n* @param tasks Map of job vertices to restore. State for these vertices is\n* restored via {@link Execution\n* @param errorIfNoCheckpoint Fail if no completed checkpoint is available to\n* restore from.\n* @param allowNonRestoredState Allow checkpoint state that cannot be mapped\n* to any job vertex in tasks.\n* @return true if state was restored, false otherwise.\n* @throws IllegalStateException If the CheckpointCoordinator is shut down.\n* @throws IllegalStateException If no completed checkpoint is available and\n* the failIfNoCheckpoint flag has been set.\n* @throws IllegalStateException If the checkpoint contains state that cannot be\n* mapped to any job vertex in tasks and the\n* allowNonRestoredState flag has not been set.\n* @throws IllegalStateException If the max parallelism changed for an operator\n* that restores state from this checkpoint.\n* @throws IllegalStateException If the parallelism changed for an operator\n* that restores non-partitioned state from this\n* checkpoint.\n*/\n@Deprecated\npublic boolean restoreLatestCheckpointedState(\nMap tasks,\nboolean errorIfNoCheckpoint,\nboolean allowNonRestoredState) throws Exception {\nreturn restoreLatestCheckpointedStateInternal(new HashSet<>(tasks.values()), true, errorIfNoCheckpoint, allowNonRestoredState);\n}\n/**\n* Restores the latest checkpointed state to a set of subtasks. This method represents a \"local\"\n* or \"regional\" failover and does restore states to coordinators. Note that a regional failover\n* might still include all tasks.\n*\n* @param tasks Set of job vertices to restore. State for these vertices is\n* restored via {@link Execution\n* @return true if state was restored, false otherwise.\n* @throws IllegalStateException If the CheckpointCoordinator is shut down.\n* @throws IllegalStateException If no completed checkpoint is available and\n* the failIfNoCheckpoint flag has been set.\n* @throws IllegalStateException If the checkpoint contains state that cannot be\n* mapped to any job vertex in tasks and the\n* allowNonRestoredState flag has not been set.\n* @throws IllegalStateException If the max parallelism changed for an operator\n* that restores state from this checkpoint.\n* @throws IllegalStateException If the parallelism changed for an operator\n* that restores non-partitioned state from this\n* checkpoint.\n*/\npublic boolean restoreLatestCheckpointedStateToSubtasks(final Set tasks) throws Exception {\nreturn restoreLatestCheckpointedStateInternal(tasks, false, false, true);\n}\n/**\n* Restores the latest checkpointed state to all tasks and all coordinators.\n* This method represents a \"global restore\"-style operation where all stateful tasks\n* and coordinators from the given set of Job Vertices are restored.\n* are restored to their latest checkpointed state.\n*\n* @param tasks Set of job vertices to restore. State for these vertices is\n* restored via {@link Execution\n* @param allowNonRestoredState Allow checkpoint state that cannot be mapped\n* to any job vertex in tasks.\n* @return true if state was restored, false otherwise.\n* @throws IllegalStateException If the CheckpointCoordinator is shut down.\n* @throws IllegalStateException If no completed checkpoint is available and\n* the failIfNoCheckpoint flag has been set.\n* @throws IllegalStateException If the checkpoint contains state that cannot be\n* mapped to any job vertex in tasks and the\n* allowNonRestoredState flag has not been set.\n* @throws IllegalStateException If the max parallelism changed for an operator\n* that restores state from this checkpoint.\n* @throws IllegalStateException If the parallelism changed for an operator\n* that restores non-partitioned state from this\n* checkpoint.\n*/\npublic boolean restoreLatestCheckpointedStateToAll(\nfinal Set tasks,\nfinal boolean allowNonRestoredState) throws Exception {\nreturn restoreLatestCheckpointedStateInternal(tasks, true, false, allowNonRestoredState);\n}\nprivate boolean restoreLatestCheckpointedStateInternal(\nfinal Set tasks,\nfinal boolean restoreCoordinators,\nfinal boolean errorIfNoCheckpoint,\nfinal boolean allowNonRestoredState) throws Exception {\nsynchronized (lock) {\nif (shutdown) {\nthrow new IllegalStateException(\"CheckpointCoordinator is shut down\");\n}\nsharedStateRegistry.close();\nsharedStateRegistry = sharedStateRegistryFactory.create(executor);\ncompletedCheckpointStore.recover();\nfor (CompletedCheckpoint completedCheckpoint : completedCheckpointStore.getAllCheckpoints()) {\ncompletedCheckpoint.registerSharedStatesAfterRestored(sharedStateRegistry);\n}\nLOG.debug(\"Status of the shared state registry of job {} after restore: {}.\", job, sharedStateRegistry);\nCompletedCheckpoint latest = completedCheckpointStore.getLatestCheckpoint(isPreferCheckpointForRecovery);\nif (latest == null) {\nif (errorIfNoCheckpoint) {\nthrow new IllegalStateException(\"No completed checkpoint available\");\n} else {\nLOG.debug(\"Resetting the master hooks.\");\nMasterHooks.reset(masterHooks.values(), LOG);\nreturn false;\n}\n}\nLOG.info(\"Restoring job {} from latest valid checkpoint: {}.\", job, latest);\nfinal Map operatorStates = latest.getOperatorStates();\nStateAssignmentOperation stateAssignmentOperation =\nnew StateAssignmentOperation(latest.getCheckpointID(), tasks, operatorStates, allowNonRestoredState);\nstateAssignmentOperation.assignStates();\nMasterHooks.restoreMasterHooks(\nmasterHooks,\nlatest.getMasterHookStates(),\nlatest.getCheckpointID(),\nallowNonRestoredState,\nLOG);\nif (restoreCoordinators) {\nrestoreStateToCoordinators(operatorStates);\n}\nif (statsTracker != null) {\nlong restoreTimestamp = System.currentTimeMillis();\nRestoredCheckpointStats restored = new RestoredCheckpointStats(\nlatest.getCheckpointID(),\nlatest.getProperties(),\nrestoreTimestamp,\nlatest.getExternalPointer());\nstatsTracker.reportRestoredCheckpoint(restored);\n}\nreturn true;\n}\n}\n/**\n* Restore the state with given savepoint.\n*\n* @param savepointPointer The pointer to the savepoint.\n* @param allowNonRestored True if allowing checkpoint state that cannot be\n* mapped to any job vertex in tasks.\n* @param tasks Map of job vertices to restore. State for these\n* vertices is restored via\n* {@link Execution\n* @param userClassLoader The class loader to resolve serialized classes in\n* legacy savepoint versions.\n*/\npublic boolean restoreSavepoint(\nString savepointPointer,\nboolean allowNonRestored,\nMap tasks,\nClassLoader userClassLoader) throws Exception {\nPreconditions.checkNotNull(savepointPointer, \"The savepoint path cannot be null.\");\nLOG.info(\"Starting job {} from savepoint {} ({})\",\njob, savepointPointer, (allowNonRestored ? \"allowing non restored state\" : \"\"));\nfinal CompletedCheckpointStorageLocation checkpointLocation = checkpointStorage.resolveCheckpoint(savepointPointer);\nCompletedCheckpoint savepoint = Checkpoints.loadAndValidateCheckpoint(\njob, tasks, checkpointLocation, userClassLoader, allowNonRestored);\ncompletedCheckpointStore.addCheckpoint(savepoint);\nlong nextCheckpointId = savepoint.getCheckpointID() + 1;\ncheckpointIdCounter.setCount(nextCheckpointId);\nLOG.info(\"Reset the checkpoint ID of job {} to {}.\", job, nextCheckpointId);\nreturn restoreLatestCheckpointedStateInternal(new HashSet<>(tasks.values()), true, true, allowNonRestored);\n}\npublic int getNumberOfPendingCheckpoints() {\nsynchronized (lock) {\nreturn this.pendingCheckpoints.size();\n}\n}\npublic int getNumberOfRetainedSuccessfulCheckpoints() {\nsynchronized (lock) {\nreturn completedCheckpointStore.getNumberOfRetainedCheckpoints();\n}\n}\npublic Map getPendingCheckpoints() {\nsynchronized (lock) {\nreturn new HashMap<>(this.pendingCheckpoints);\n}\n}\npublic List getSuccessfulCheckpoints() throws Exception {\nsynchronized (lock) {\nreturn completedCheckpointStore.getAllCheckpoints();\n}\n}\npublic CheckpointStorageCoordinatorView getCheckpointStorage() {\nreturn checkpointStorage;\n}\npublic CompletedCheckpointStore getCheckpointStore() {\nreturn completedCheckpointStore;\n}\npublic long getCheckpointTimeout() {\nreturn checkpointTimeout;\n}\n/**\n* @deprecated use {@link\n*/\n@Deprecated\n@VisibleForTesting\nPriorityQueue getTriggerRequestQueue() {\nreturn requestDecider.getTriggerRequestQueue();\n}\npublic boolean isTriggering() {\nreturn isTriggering;\n}\n@VisibleForTesting\nboolean isCurrentPeriodicTriggerAvailable() {\nreturn currentPeriodicTrigger != null;\n}\n/**\n* Returns whether periodic checkpointing has been configured.\n*\n* @return true if periodic checkpoints have been configured.\n*/\npublic boolean isPeriodicCheckpointingConfigured() {\nreturn baseInterval != Long.MAX_VALUE;\n}\npublic void startCheckpointScheduler() {\nsynchronized (lock) {\nif (shutdown) {\nthrow new IllegalArgumentException(\"Checkpoint coordinator is shut down\");\n}\nstopCheckpointScheduler();\nperiodicScheduling = true;\ncurrentPeriodicTrigger = scheduleTriggerWithDelay(getRandomInitDelay());\n}\n}\npublic void stopCheckpointScheduler() {\nsynchronized (lock) {\nperiodicScheduling = false;\ncancelPeriodicTrigger();\nfinal CheckpointException reason =\nnew CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SUSPEND);\nabortPendingAndQueuedCheckpoints(reason);\nnumUnsuccessfulCheckpointsTriggers.set(0);\n}\n}\n/**\n* Aborts all the pending checkpoints due to en exception.\n* @param exception The exception.\n*/\npublic void abortPendingCheckpoints(CheckpointException exception) {\nsynchronized (lock) {\nabortPendingCheckpoints(ignored -> true, exception);\n}\n}\nprivate void abortPendingCheckpoints(\nPredicate checkpointToFailPredicate,\nCheckpointException exception) {\nassert Thread.holdsLock(lock);\nfinal PendingCheckpoint[] pendingCheckpointsToFail = pendingCheckpoints\n.values()\n.stream()\n.filter(checkpointToFailPredicate)\n.toArray(PendingCheckpoint[]::new);\nfor (PendingCheckpoint pendingCheckpoint : pendingCheckpointsToFail) {\nabortPendingCheckpoint(pendingCheckpoint, exception);\n}\n}\nprivate void rescheduleTrigger(long tillNextMillis) {\ncancelPeriodicTrigger();\ncurrentPeriodicTrigger = scheduleTriggerWithDelay(tillNextMillis);\n}\nprivate void cancelPeriodicTrigger() {\nif (currentPeriodicTrigger != null) {\ncurrentPeriodicTrigger.cancel(false);\ncurrentPeriodicTrigger = null;\n}\n}\nprivate long getRandomInitDelay() {\nreturn ThreadLocalRandom.current().nextLong(minPauseBetweenCheckpoints, baseInterval + 1L);\n}\nprivate ScheduledFuture scheduleTriggerWithDelay(long initDelay) {\nreturn timer.scheduleAtFixedRate(\nnew ScheduledTrigger(),\ninitDelay, baseInterval, TimeUnit.MILLISECONDS);\n}\nprivate void restoreStateToCoordinators(final Map operatorStates) throws Exception {\nfor (OperatorCoordinatorCheckpointContext coordContext : coordinatorsToCheckpoint) {\nfinal OperatorState state = operatorStates.get(coordContext.operatorId());\nif (state == null) {\ncontinue;\n}\nfinal ByteStreamStateHandle coordinatorState = state.getCoordinatorState();\nif (coordinatorState != null) {\ncoordContext.resetToCheckpoint(coordinatorState.getData());\n}\n}\n}\npublic JobStatusListener createActivatorDeactivator() {\nsynchronized (lock) {\nif (shutdown) {\nthrow new IllegalArgumentException(\"Checkpoint coordinator is shut down\");\n}\nif (jobStatusListener == null) {\njobStatusListener = new CheckpointCoordinatorDeActivator(this);\n}\nreturn jobStatusListener;\n}\n}\nint getNumQueuedRequests() {\nreturn requestDecider.getNumQueuedRequests();\n}\nprivate final class ScheduledTrigger implements Runnable {\n@Override\npublic void run() {\ntry {\ntriggerCheckpoint(true);\n}\ncatch (Exception e) {\nLOG.error(\"Exception while triggering checkpoint for job {}.\", job, e);\n}\n}\n}\n/**\n* Discards the given state object asynchronously belonging to the given job, execution attempt\n* id and checkpoint id.\n*\n* @param jobId identifying the job to which the state object belongs\n* @param executionAttemptID identifying the task to which the state object belongs\n* @param checkpointId of the state object\n* @param subtaskState to discard asynchronously\n*/\nprivate void discardSubtaskState(\nfinal JobID jobId,\nfinal ExecutionAttemptID executionAttemptID,\nfinal long checkpointId,\nfinal TaskStateSnapshot subtaskState) {\nif (subtaskState != null) {\nexecutor.execute(new Runnable() {\n@Override\npublic void run() {\ntry {\nsubtaskState.discardState();\n} catch (Throwable t2) {\nLOG.warn(\"Could not properly discard state object of checkpoint {} \" +\n\"belonging to task {} of job {}.\", checkpointId, executionAttemptID, jobId, t2);\n}\n}\n});\n}\n}\nprivate void abortPendingCheckpoint(\nPendingCheckpoint pendingCheckpoint,\nCheckpointException exception) {\nabortPendingCheckpoint(pendingCheckpoint, exception, null);\n}\nprivate void abortPendingCheckpoint(\nPendingCheckpoint pendingCheckpoint,\nCheckpointException exception,\n@Nullable final ExecutionAttemptID executionAttemptID) {\nassert(Thread.holdsLock(lock));\nif (!pendingCheckpoint.isDiscarded()) {\ntry {\npendingCheckpoint.abort(\nexception.getCheckpointFailureReason(), exception.getCause());\nif (pendingCheckpoint.getProps().isSavepoint() &&\npendingCheckpoint.getProps().isSynchronous()) {\nfailureManager.handleSynchronousSavepointFailure(exception);\n} else if (executionAttemptID != null) {\nfailureManager.handleTaskLevelCheckpointException(\nexception, pendingCheckpoint.getCheckpointId(), executionAttemptID);\n} else {\nfailureManager.handleJobLevelCheckpointException(\nexception, pendingCheckpoint.getCheckpointId());\n}\n} finally {\nsendAbortedMessages(pendingCheckpoint.getCheckpointId(), pendingCheckpoint.getCheckpointTimestamp());\npendingCheckpoints.remove(pendingCheckpoint.getCheckpointId());\nrememberRecentCheckpointId(pendingCheckpoint.getCheckpointId());\ntimer.execute(this::executeQueuedRequest);\n}\n}\n}\nprivate void preCheckGlobalState(boolean isPeriodic) throws CheckpointException {\nif (shutdown) {\nthrow new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN);\n}\nif (isPeriodic && !periodicScheduling) {\nthrow new CheckpointException(CheckpointFailureReason.PERIODIC_SCHEDULER_SHUTDOWN);\n}\n}\n/**\n* Check if all tasks that we need to trigger are running. If not, abort the checkpoint.\n*\n* @return the executions need to be triggered.\n* @throws CheckpointException the exception fails checking\n*/\nprivate Execution[] getTriggerExecutions() throws CheckpointException {\nExecution[] executions = new Execution[tasksToTrigger.length];\nfor (int i = 0; i < tasksToTrigger.length; i++) {\nExecution ee = tasksToTrigger[i].getCurrentExecutionAttempt();\nif (ee == null) {\nLOG.info(\n\"Checkpoint triggering task {} of job {} is not being executed at the moment. Aborting checkpoint.\",\ntasksToTrigger[i].getTaskNameWithSubtaskIndex(),\njob);\nthrow new CheckpointException(\nCheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING);\n} else if (ee.getState() == ExecutionState.RUNNING) {\nexecutions[i] = ee;\n} else {\nLOG.info(\n\"Checkpoint triggering task {} of job {} is not in state {} but {} instead. Aborting checkpoint.\",\ntasksToTrigger[i].getTaskNameWithSubtaskIndex(),\njob,\nExecutionState.RUNNING,\nee.getState());\nthrow new CheckpointException(\nCheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING);\n}\n}\nreturn executions;\n}\n/**\n* Check if all tasks that need to acknowledge the checkpoint are running.\n* If not, abort the checkpoint\n*\n* @return the execution vertices which should give an ack response\n* @throws CheckpointException the exception fails checking\n*/\nprivate Map getAckTasks() throws CheckpointException {\nMap ackTasks = new HashMap<>(tasksToWaitFor.length);\nfor (ExecutionVertex ev : tasksToWaitFor) {\nExecution ee = ev.getCurrentExecutionAttempt();\nif (ee != null) {\nackTasks.put(ee.getAttemptId(), ev);\n} else {\nLOG.info(\n\"Checkpoint acknowledging task {} of job {} is not being executed at the moment. Aborting checkpoint.\",\nev.getTaskNameWithSubtaskIndex(),\njob);\nthrow new CheckpointException(\nCheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING);\n}\n}\nreturn ackTasks;\n}\nprivate void abortPendingAndQueuedCheckpoints(CheckpointException exception) {\nassert(Thread.holdsLock(lock));\nrequestDecider.abortAll(exception);\nabortPendingCheckpoints(exception);\n}\n/**\n* The canceller of checkpoint. The checkpoint might be cancelled if it doesn't finish in a\n* configured period.\n*/\nprivate class CheckpointCanceller implements Runnable {\nprivate final PendingCheckpoint pendingCheckpoint;\nprivate CheckpointCanceller(PendingCheckpoint pendingCheckpoint) {\nthis.pendingCheckpoint = checkNotNull(pendingCheckpoint);\n}\n@Override\npublic void run() {\nsynchronized (lock) {\nif (!pendingCheckpoint.isDiscarded()) {\nLOG.info(\"Checkpoint {} of job {} expired before completing.\",\npendingCheckpoint.getCheckpointId(), job);\nabortPendingCheckpoint(\npendingCheckpoint,\nnew CheckpointException(CheckpointFailureReason.CHECKPOINT_EXPIRED));\n}\n}\n}\n}\nprivate static CheckpointException getCheckpointException(\nCheckpointFailureReason defaultReason, Throwable throwable) {\nfinal Optional checkpointExceptionOptional =\nExceptionUtils.findThrowable(throwable, CheckpointException.class);\nreturn checkpointExceptionOptional\n.orElseGet(() -> new CheckpointException(defaultReason, throwable));\n}\nprivate static class CheckpointIdAndStorageLocation {\nprivate final long checkpointId;\nprivate final CheckpointStorageLocation checkpointStorageLocation;\nCheckpointIdAndStorageLocation(\nlong checkpointId,\nCheckpointStorageLocation checkpointStorageLocation) {\nthis.checkpointId = checkpointId;\nthis.checkpointStorageLocation = checkNotNull(checkpointStorageLocation);\n}\n}\nstatic class CheckpointTriggerRequest {\nfinal long timestamp;\nfinal CheckpointProperties props;\nfinal @Nullable String externalSavepointLocation;\nfinal boolean isPeriodic;\nfinal boolean advanceToEndOfTime;\nprivate final CompletableFuture onCompletionPromise = new CompletableFuture<>();\nCheckpointTriggerRequest(\nCheckpointProperties props,\n@Nullable String externalSavepointLocation,\nboolean isPeriodic,\nboolean advanceToEndOfTime) {\nthis.timestamp = System.currentTimeMillis();\nthis.props = checkNotNull(props);\nthis.externalSavepointLocation = externalSavepointLocation;\nthis.isPeriodic = isPeriodic;\nthis.advanceToEndOfTime = advanceToEndOfTime;\n}\nCompletableFuture getOnCompletionFuture() {\nreturn onCompletionPromise;\n}\npublic void completeExceptionally(CheckpointException exception) {\nonCompletionPromise.completeExceptionally(exception);\n}\npublic boolean isForce() {\nreturn props.forceCheckpoint();\n}\n}\n}" + }, + { + "comment": "Can't put it after `INSTANCE `, because must after `private Constructor`", + "method_body": "public static PipelineJobExecutor getInstance() {\nreturn INSTANCE;\n}", + "target_code": "}", + "method_body_after": "public static PipelineJobExecutor getInstance() {\nreturn INSTANCE;\n}", + "context_before": "class PipelineJobExecutor {\nprivate static final PipelineJobExecutor INSTANCE = new PipelineJobExecutor();\nprivate final Map listenerMap = new ConcurrentHashMap<>();\nprivate PipelineJobExecutor() {\nCollection instances = PipelineMetaDataChangedHandlerFactory.findAllInstance();\nfor (PipelineMetaDataChangedHandler each : instances) {\nlistenerMap.put(each.getKeyPattern(), each);\n}\nPipelineAPIFactory.getGovernanceRepositoryAPI().watch(DataPipelineConstants.DATA_PIPELINE_ROOT, this::dispatchEvent);\n}\nprivate void dispatchEvent(final DataChangedEvent event) {\nfor (Entry entry : listenerMap.entrySet()) {\nif (entry.getKey().matcher(event.getKey()).matches()) {\nlog.info(\"{} job config: {}\", event.getType(), event.getKey());\nJobConfigurationPOJO jobConfigPOJO;\ntry {\njobConfigPOJO = YamlEngine.unmarshal(event.getValue(), JobConfigurationPOJO.class, true);\n} catch (final Exception ex) {\nlog.error(\"analyze job config pojo failed.\", ex);\nreturn;\n}\nentry.getValue().handle(event, jobConfigPOJO);\nreturn;\n}\n}\n}\n/**\n* Get pipeline job executor instance.\n*\n* @return pipeline job executor\n*/\n}", + "context_after": "class PipelineJobExecutor {\nprivate static final PipelineJobExecutor INSTANCE = new PipelineJobExecutor();\nprivate final Map listenerMap = new ConcurrentHashMap<>();\nprivate PipelineJobExecutor() {\nCollection instances = PipelineMetaDataChangedHandlerFactory.findAllInstances();\nfor (PipelineMetaDataChangedHandler each : instances) {\nlistenerMap.put(each.getKeyPattern(), each);\n}\nPipelineAPIFactory.getGovernanceRepositoryAPI().watch(DataPipelineConstants.DATA_PIPELINE_ROOT, this::dispatchEvent);\n}\nprivate void dispatchEvent(final DataChangedEvent event) {\nfor (Entry entry : listenerMap.entrySet()) {\nif (entry.getKey().matcher(event.getKey()).matches()) {\nentry.getValue().handle(event);\nreturn;\n}\n}\n}\n/**\n* Get pipeline job executor instance.\n*\n* @return pipeline job executor\n*/\n}" + }, + { + "comment": "```suggestion boolean isEnableMemtableOnSinkNode = ((OlapTable) table).getTableProperty().getUseSchemaLightChange() ? this.enableMemTableOnSinkNode: false; ```", + "method_body": "private void createLoadingTask(Database db, BrokerPendingTaskAttachment attachment) throws UserException {\nList

tableList = db.getTablesOnIdOrderOrThrowException(\nLists.newArrayList(fileGroupAggInfo.getAllTableIds()));\nList newLoadingTasks = Lists.newArrayList();\nthis.jobProfile = new Profile(\"BrokerLoadJob \" + id + \". \" + label, true);\nProgressManager progressManager = Env.getCurrentProgressManager();\nprogressManager.registerProgressSimple(String.valueOf(id));\nMetaLockUtils.readLockTables(tableList);\ntry {\nfor (Map.Entry> entry\n: fileGroupAggInfo.getAggKeyToFileGroups().entrySet()) {\nFileGroupAggKey aggKey = entry.getKey();\nList brokerFileGroups = entry.getValue();\nlong tableId = aggKey.getTableId();\nOlapTable table = (OlapTable) db.getTableNullable(tableId);\nboolean isEnableMemtableOnSinkNode = !((OlapTable) table).getTableProperty().getUseSchemaLightChange()\n? false : this.enableMemTableOnSinkNode;\nLoadLoadingTask task = new LoadLoadingTask(db, table, brokerDesc,\nbrokerFileGroups, getDeadlineMs(), getExecMemLimit(),\nisStrictMode(), isPartialUpdate(), transactionId, this, getTimeZone(), getTimeout(),\ngetLoadParallelism(), getSendBatchParallelism(),\ngetMaxFilterRatio() <= 0, enableProfile ? jobProfile : null, isSingleTabletLoadPerSink(),\nuseNewLoadScanNode(), getPriority(), isEnableMemtableOnSinkNode);\nUUID uuid = UUID.randomUUID();\nTUniqueId loadId = new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());\ntask.init(loadId, attachment.getFileStatusByTable(aggKey),\nattachment.getFileNumByTable(aggKey), getUserInfo());\nidToTasks.put(task.getSignature(), task);\nnewLoadingTasks.add(task);\nTransactionState txnState = Env.getCurrentGlobalTransactionMgr()\n.getTransactionState(dbId, transactionId);\nif (txnState == null) {\nthrow new UserException(\"txn does not exist: \" + transactionId);\n}\ntxnState.addTableIndexes(table);\nif (isPartialUpdate()) {\ntxnState.setSchemaForPartialUpdate(table);\n}\n}\n} finally {\nMetaLockUtils.readUnlockTables(tableList);\n}\nfor (LoadTask loadTask : newLoadingTasks) {\nEnv.getCurrentEnv().getLoadingLoadTaskScheduler().submit(loadTask);\n}\n}", + "target_code": "? false : this.enableMemTableOnSinkNode;", + "method_body_after": "private void createLoadingTask(Database db, BrokerPendingTaskAttachment attachment) throws UserException {\nList
tableList = db.getTablesOnIdOrderOrThrowException(\nLists.newArrayList(fileGroupAggInfo.getAllTableIds()));\nList newLoadingTasks = Lists.newArrayList();\nthis.jobProfile = new Profile(\"BrokerLoadJob \" + id + \". \" + label, true);\nProgressManager progressManager = Env.getCurrentProgressManager();\nprogressManager.registerProgressSimple(String.valueOf(id));\nMetaLockUtils.readLockTables(tableList);\ntry {\nfor (Map.Entry> entry\n: fileGroupAggInfo.getAggKeyToFileGroups().entrySet()) {\nFileGroupAggKey aggKey = entry.getKey();\nList brokerFileGroups = entry.getValue();\nlong tableId = aggKey.getTableId();\nOlapTable table = (OlapTable) db.getTableNullable(tableId);\nboolean isEnableMemtableOnSinkNode = ((OlapTable) table).getTableProperty().getUseSchemaLightChange()\n? this.enableMemTableOnSinkNode : false;\nLoadLoadingTask task = new LoadLoadingTask(db, table, brokerDesc,\nbrokerFileGroups, getDeadlineMs(), getExecMemLimit(),\nisStrictMode(), isPartialUpdate(), transactionId, this, getTimeZone(), getTimeout(),\ngetLoadParallelism(), getSendBatchParallelism(),\ngetMaxFilterRatio() <= 0, enableProfile ? jobProfile : null, isSingleTabletLoadPerSink(),\nuseNewLoadScanNode(), getPriority(), isEnableMemtableOnSinkNode);\nUUID uuid = UUID.randomUUID();\nTUniqueId loadId = new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());\ntask.init(loadId, attachment.getFileStatusByTable(aggKey),\nattachment.getFileNumByTable(aggKey), getUserInfo());\nidToTasks.put(task.getSignature(), task);\nnewLoadingTasks.add(task);\nTransactionState txnState = Env.getCurrentGlobalTransactionMgr()\n.getTransactionState(dbId, transactionId);\nif (txnState == null) {\nthrow new UserException(\"txn does not exist: \" + transactionId);\n}\ntxnState.addTableIndexes(table);\nif (isPartialUpdate()) {\ntxnState.setSchemaForPartialUpdate(table);\n}\n}\n} finally {\nMetaLockUtils.readUnlockTables(tableList);\n}\nfor (LoadTask loadTask : newLoadingTasks) {\nEnv.getCurrentEnv().getLoadingLoadTaskScheduler().submit(loadTask);\n}\n}", + "context_before": "class BrokerLoadJob extends BulkLoadJob {\nprivate static final Logger LOG = LogManager.getLogger(BrokerLoadJob.class);\nprivate Profile jobProfile;\nprivate boolean enableProfile = false;\nprivate boolean enableMemTableOnSinkNode = false;\npublic BrokerLoadJob() {\nsuper(EtlJobType.BROKER);\n}\npublic BrokerLoadJob(long dbId, String label, BrokerDesc brokerDesc,\nOriginStatement originStmt, UserIdentity userInfo)\nthrows MetaNotFoundException {\nsuper(EtlJobType.BROKER, dbId, label, originStmt, userInfo);\nthis.brokerDesc = brokerDesc;\nif (ConnectContext.get() != null) {\nenableProfile = ConnectContext.get().getSessionVariable().enableProfile();\nenableMemTableOnSinkNode = ConnectContext.get().getSessionVariable().enableMemtableOnSinkNode;\n}\n}\n@Override\npublic void beginTxn()\nthrows LabelAlreadyUsedException, BeginTransactionException, AnalysisException, DuplicatedRequestException,\nQuotaExceedException, MetaNotFoundException {\ntransactionId = Env.getCurrentGlobalTransactionMgr()\n.beginTransaction(dbId, Lists.newArrayList(fileGroupAggInfo.getAllTableIds()), label, null,\nnew TxnCoordinator(TxnSourceType.FE, FrontendOptions.getLocalHostAddress()),\nTransactionState.LoadJobSourceType.BATCH_LOAD_JOB, id,\ngetTimeout());\n}\n@Override\nprotected void unprotectedExecuteJob() {\nLoadTask task = new BrokerLoadPendingTask(this, fileGroupAggInfo.getAggKeyToFileGroups(),\nbrokerDesc, getPriority());\nidToTasks.put(task.getSignature(), task);\nEnv.getCurrentEnv().getPendingLoadTaskScheduler().submit(task);\n}\n/**\n* Situation1: When attachment is instance of BrokerPendingTaskAttachment,\n* this method is called by broker pending task.\n* LoadLoadingTask will be created after BrokerPendingTask is finished.\n* Situation2: When attachment is instance of BrokerLoadingTaskAttachment, this method is called by LoadLoadingTask.\n* CommitTxn will be called after all of LoadingTasks are finished.\n*\n* @param attachment\n*/\n@Override\npublic void onTaskFinished(TaskAttachment attachment) {\nif (attachment instanceof BrokerPendingTaskAttachment) {\nonPendingTaskFinished((BrokerPendingTaskAttachment) attachment);\n} else if (attachment instanceof BrokerLoadingTaskAttachment) {\nonLoadingTaskFinished((BrokerLoadingTaskAttachment) attachment);\n}\n}\n/**\n* step1: divide job into loading task\n* step2: init the plan of task\n* step3: submit tasks into loadingTaskExecutor\n* @param attachment BrokerPendingTaskAttachment\n*/\nprivate void onPendingTaskFinished(BrokerPendingTaskAttachment attachment) {\nwriteLock();\ntry {\nif (isTxnDone()) {\nLOG.warn(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"state\", state)\n.add(\"error_msg\", \"this task will be ignored when job is: \" + state)\n.build());\nreturn;\n}\nif (finishedTaskIds.contains(attachment.getTaskId())) {\nLOG.warn(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"task_id\", attachment.getTaskId())\n.add(\"error_msg\", \"this is a duplicated callback of pending task \"\n+ \"when broker already has loading task\")\n.build());\nreturn;\n}\nfinishedTaskIds.add(attachment.getTaskId());\n} finally {\nwriteUnlock();\n}\ntry {\nDatabase db = getDb();\ncreateLoadingTask(db, attachment);\n} catch (UserException e) {\nLOG.warn(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"database_id\", dbId)\n.add(\"error_msg\", \"Failed to divide job into loading task.\")\n.build(), e);\ncancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.ETL_RUN_FAIL, e.getMessage()), true, true);\nreturn;\n} catch (RejectedExecutionException e) {\nLOG.warn(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"database_id\", dbId)\n.add(\"error_msg\", \"the task queque is full.\")\n.build(), e);\ncancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.ETL_RUN_FAIL, e.getMessage()), true, true);\nreturn;\n}\nloadStartTimestamp = System.currentTimeMillis();\n}\nprivate void onLoadingTaskFinished(BrokerLoadingTaskAttachment attachment) {\nwriteLock();\ntry {\nif (isTxnDone()) {\nLOG.warn(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"state\", state)\n.add(\"error_msg\", \"this task will be ignored when job is: \" + state)\n.build());\nreturn;\n}\nif (finishedTaskIds.contains(attachment.getTaskId())) {\nLOG.warn(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"task_id\", attachment.getTaskId())\n.add(\"error_msg\", \"this is a duplicated callback of loading task\").build());\nreturn;\n}\nfinishedTaskIds.add(attachment.getTaskId());\nupdateLoadingStatus(attachment);\nif (finishedTaskIds.size() != idToTasks.size()) {\nreturn;\n}\n} finally {\nwriteUnlock();\n}\nif (LOG.isDebugEnabled()) {\nLOG.debug(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"commit_infos\", Joiner.on(\",\").join(commitInfos))\n.build());\n}\nif (!checkDataQuality()) {\ncancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.ETL_QUALITY_UNSATISFIED,\nDataQualityException.QUALITY_FAIL_MSG), true, true);\nreturn;\n}\nDatabase db = null;\nList
tableList = null;\ntry {\ndb = getDb();\ntableList = db.getTablesOnIdOrderOrThrowException(Lists.newArrayList(fileGroupAggInfo.getAllTableIds()));\nMetaLockUtils.writeLockTablesOrMetaException(tableList);\n} catch (MetaNotFoundException e) {\nLOG.warn(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"database_id\", dbId)\n.add(\"error_msg\", \"db has been deleted when job is loading\")\n.build(), e);\ncancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.LOAD_RUN_FAIL, e.getMessage()), true, true);\nreturn;\n}\ntry {\nLOG.info(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"txn_id\", transactionId)\n.add(\"msg\", \"Load job try to commit txn\")\n.build());\nEnv.getCurrentGlobalTransactionMgr().commitTransaction(\ndbId, tableList, transactionId, commitInfos,\nnew LoadJobFinalOperation(id, loadingStatus, progress, loadStartTimestamp,\nfinishTimestamp, state, failMsg));\n} catch (UserException e) {\nLOG.warn(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"database_id\", dbId)\n.add(\"error_msg\", \"Failed to commit txn with error:\" + e.getMessage())\n.build(), e);\ncancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.LOAD_RUN_FAIL, e.getMessage()), true, true);\n} finally {\nMetaLockUtils.writeUnlockTables(tableList);\n}\n}\nprivate void writeProfile() {\nif (!enableProfile) {\nreturn;\n}\njobProfile.update(createTimestamp, getSummaryInfo(true), true,\nInteger.valueOf(sessionVariables.getOrDefault(SessionVariable.PROFILE_LEVEL, \"3\")), null, false);\n}\nprivate Map getSummaryInfo(boolean isFinished) {\nlong currentTimestamp = System.currentTimeMillis();\nSummaryBuilder builder = new SummaryBuilder();\nbuilder.profileId(String.valueOf(id));\nif (Version.DORIS_BUILD_VERSION_MAJOR == 0) {\nbuilder.dorisVersion(Version.DORIS_BUILD_SHORT_HASH);\n} else {\nbuilder.dorisVersion(Version.DORIS_BUILD_VERSION + \"-\" + Version.DORIS_BUILD_SHORT_HASH);\n}\nbuilder.taskType(ProfileType.LOAD.name());\nbuilder.startTime(TimeUtils.longToTimeString(createTimestamp));\nif (isFinished) {\nbuilder.endTime(TimeUtils.longToTimeString(currentTimestamp));\nbuilder.totalTime(DebugUtil.getPrettyStringMs(currentTimestamp - createTimestamp));\n}\nbuilder.taskState(\"FINISHED\");\nbuilder.user(getUserInfo() != null ? getUserInfo().getQualifiedUser() : \"N/A\");\nbuilder.defaultDb(getDefaultDb());\nbuilder.sqlStatement(getOriginStmt().originStmt);\nreturn builder.build();\n}\nprivate String getDefaultDb() {\nDatabase database = Env.getCurrentEnv().getInternalCatalog().getDb(this.dbId).orElse(null);\nreturn database == null ? \"N/A\" : database.getFullName();\n}\nprivate void updateLoadingStatus(BrokerLoadingTaskAttachment attachment) {\nloadingStatus.replaceCounter(DPP_ABNORMAL_ALL,\nincreaseCounter(DPP_ABNORMAL_ALL, attachment.getCounter(DPP_ABNORMAL_ALL)));\nloadingStatus.replaceCounter(DPP_NORMAL_ALL,\nincreaseCounter(DPP_NORMAL_ALL, attachment.getCounter(DPP_NORMAL_ALL)));\nloadingStatus.replaceCounter(UNSELECTED_ROWS,\nincreaseCounter(UNSELECTED_ROWS, attachment.getCounter(UNSELECTED_ROWS)));\nif (attachment.getTrackingUrl() != null) {\nloadingStatus.setTrackingUrl(attachment.getTrackingUrl());\n}\ncommitInfos.addAll(attachment.getCommitInfoList());\nerrorTabletInfos.addAll(attachment.getErrorTabletInfos().stream().limit(Config.max_error_tablet_of_broker_load)\n.collect(Collectors.toList()));\nprogress = (int) ((double) finishedTaskIds.size() / idToTasks.size() * 100);\nif (progress == 100) {\nprogress = 99;\n}\n}\n@Override\npublic void updateProgress(Long beId, TUniqueId loadId, TUniqueId fragmentId, long scannedRows,\nlong scannedBytes, boolean isDone) {\nsuper.updateProgress(beId, loadId, fragmentId, scannedRows, scannedBytes, isDone);\nprogress = (int) ((double) loadStatistic.getLoadBytes() / loadStatistic.totalFileSizeB * 100);\nif (progress >= 100) {\nprogress = 99;\n}\n}\nprivate String increaseCounter(String key, String deltaValue) {\nlong value = 0;\nif (loadingStatus.getCounters().containsKey(key)) {\nvalue = Long.valueOf(loadingStatus.getCounters().get(key));\n}\nif (deltaValue != null) {\nvalue += Long.valueOf(deltaValue);\n}\nreturn String.valueOf(value);\n}\n@Override\npublic void afterVisible(TransactionState txnState, boolean txnOperated) {\nsuper.afterVisible(txnState, txnOperated);\nwriteProfile();\n}\n@Override\npublic String getResourceName() {\nStorageBackend.StorageType storageType = brokerDesc.getStorageType();\nif (storageType == StorageBackend.StorageType.BROKER) {\nreturn brokerDesc.getName();\n} else if (storageType == StorageBackend.StorageType.S3) {\nreturn Optional.ofNullable(brokerDesc.getProperties())\n.map(o -> o.get(S3Properties.Env.ENDPOINT))\n.orElse(\"s3_cluster\");\n} else {\nreturn storageType.name().toLowerCase().concat(\"_cluster\");\n}\n}\n}", + "context_after": "class BrokerLoadJob extends BulkLoadJob {\nprivate static final Logger LOG = LogManager.getLogger(BrokerLoadJob.class);\nprivate Profile jobProfile;\nprivate boolean enableProfile = false;\nprivate boolean enableMemTableOnSinkNode = false;\npublic BrokerLoadJob() {\nsuper(EtlJobType.BROKER);\n}\npublic BrokerLoadJob(long dbId, String label, BrokerDesc brokerDesc,\nOriginStatement originStmt, UserIdentity userInfo)\nthrows MetaNotFoundException {\nsuper(EtlJobType.BROKER, dbId, label, originStmt, userInfo);\nthis.brokerDesc = brokerDesc;\nif (ConnectContext.get() != null) {\nenableProfile = ConnectContext.get().getSessionVariable().enableProfile();\nenableMemTableOnSinkNode = ConnectContext.get().getSessionVariable().enableMemtableOnSinkNode;\n}\n}\n@Override\npublic void beginTxn()\nthrows LabelAlreadyUsedException, BeginTransactionException, AnalysisException, DuplicatedRequestException,\nQuotaExceedException, MetaNotFoundException {\ntransactionId = Env.getCurrentGlobalTransactionMgr()\n.beginTransaction(dbId, Lists.newArrayList(fileGroupAggInfo.getAllTableIds()), label, null,\nnew TxnCoordinator(TxnSourceType.FE, FrontendOptions.getLocalHostAddress()),\nTransactionState.LoadJobSourceType.BATCH_LOAD_JOB, id,\ngetTimeout());\n}\n@Override\nprotected void unprotectedExecuteJob() {\nLoadTask task = new BrokerLoadPendingTask(this, fileGroupAggInfo.getAggKeyToFileGroups(),\nbrokerDesc, getPriority());\nidToTasks.put(task.getSignature(), task);\nEnv.getCurrentEnv().getPendingLoadTaskScheduler().submit(task);\n}\n/**\n* Situation1: When attachment is instance of BrokerPendingTaskAttachment,\n* this method is called by broker pending task.\n* LoadLoadingTask will be created after BrokerPendingTask is finished.\n* Situation2: When attachment is instance of BrokerLoadingTaskAttachment, this method is called by LoadLoadingTask.\n* CommitTxn will be called after all of LoadingTasks are finished.\n*\n* @param attachment\n*/\n@Override\npublic void onTaskFinished(TaskAttachment attachment) {\nif (attachment instanceof BrokerPendingTaskAttachment) {\nonPendingTaskFinished((BrokerPendingTaskAttachment) attachment);\n} else if (attachment instanceof BrokerLoadingTaskAttachment) {\nonLoadingTaskFinished((BrokerLoadingTaskAttachment) attachment);\n}\n}\n/**\n* step1: divide job into loading task\n* step2: init the plan of task\n* step3: submit tasks into loadingTaskExecutor\n* @param attachment BrokerPendingTaskAttachment\n*/\nprivate void onPendingTaskFinished(BrokerPendingTaskAttachment attachment) {\nwriteLock();\ntry {\nif (isTxnDone()) {\nLOG.warn(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"state\", state)\n.add(\"error_msg\", \"this task will be ignored when job is: \" + state)\n.build());\nreturn;\n}\nif (finishedTaskIds.contains(attachment.getTaskId())) {\nLOG.warn(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"task_id\", attachment.getTaskId())\n.add(\"error_msg\", \"this is a duplicated callback of pending task \"\n+ \"when broker already has loading task\")\n.build());\nreturn;\n}\nfinishedTaskIds.add(attachment.getTaskId());\n} finally {\nwriteUnlock();\n}\ntry {\nDatabase db = getDb();\ncreateLoadingTask(db, attachment);\n} catch (UserException e) {\nLOG.warn(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"database_id\", dbId)\n.add(\"error_msg\", \"Failed to divide job into loading task.\")\n.build(), e);\ncancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.ETL_RUN_FAIL, e.getMessage()), true, true);\nreturn;\n} catch (RejectedExecutionException e) {\nLOG.warn(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"database_id\", dbId)\n.add(\"error_msg\", \"the task queque is full.\")\n.build(), e);\ncancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.ETL_RUN_FAIL, e.getMessage()), true, true);\nreturn;\n}\nloadStartTimestamp = System.currentTimeMillis();\n}\nprivate void onLoadingTaskFinished(BrokerLoadingTaskAttachment attachment) {\nwriteLock();\ntry {\nif (isTxnDone()) {\nLOG.warn(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"state\", state)\n.add(\"error_msg\", \"this task will be ignored when job is: \" + state)\n.build());\nreturn;\n}\nif (finishedTaskIds.contains(attachment.getTaskId())) {\nLOG.warn(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"task_id\", attachment.getTaskId())\n.add(\"error_msg\", \"this is a duplicated callback of loading task\").build());\nreturn;\n}\nfinishedTaskIds.add(attachment.getTaskId());\nupdateLoadingStatus(attachment);\nif (finishedTaskIds.size() != idToTasks.size()) {\nreturn;\n}\n} finally {\nwriteUnlock();\n}\nif (LOG.isDebugEnabled()) {\nLOG.debug(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"commit_infos\", Joiner.on(\",\").join(commitInfos))\n.build());\n}\nif (!checkDataQuality()) {\ncancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.ETL_QUALITY_UNSATISFIED,\nDataQualityException.QUALITY_FAIL_MSG), true, true);\nreturn;\n}\nDatabase db = null;\nList
tableList = null;\ntry {\ndb = getDb();\ntableList = db.getTablesOnIdOrderOrThrowException(Lists.newArrayList(fileGroupAggInfo.getAllTableIds()));\nMetaLockUtils.writeLockTablesOrMetaException(tableList);\n} catch (MetaNotFoundException e) {\nLOG.warn(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"database_id\", dbId)\n.add(\"error_msg\", \"db has been deleted when job is loading\")\n.build(), e);\ncancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.LOAD_RUN_FAIL, e.getMessage()), true, true);\nreturn;\n}\ntry {\nLOG.info(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"txn_id\", transactionId)\n.add(\"msg\", \"Load job try to commit txn\")\n.build());\nEnv.getCurrentGlobalTransactionMgr().commitTransaction(\ndbId, tableList, transactionId, commitInfos,\nnew LoadJobFinalOperation(id, loadingStatus, progress, loadStartTimestamp,\nfinishTimestamp, state, failMsg));\n} catch (UserException e) {\nLOG.warn(new LogBuilder(LogKey.LOAD_JOB, id)\n.add(\"database_id\", dbId)\n.add(\"error_msg\", \"Failed to commit txn with error:\" + e.getMessage())\n.build(), e);\ncancelJobWithoutCheck(new FailMsg(FailMsg.CancelType.LOAD_RUN_FAIL, e.getMessage()), true, true);\n} finally {\nMetaLockUtils.writeUnlockTables(tableList);\n}\n}\nprivate void writeProfile() {\nif (!enableProfile) {\nreturn;\n}\njobProfile.update(createTimestamp, getSummaryInfo(true), true,\nInteger.valueOf(sessionVariables.getOrDefault(SessionVariable.PROFILE_LEVEL, \"3\")), null, false);\n}\nprivate Map getSummaryInfo(boolean isFinished) {\nlong currentTimestamp = System.currentTimeMillis();\nSummaryBuilder builder = new SummaryBuilder();\nbuilder.profileId(String.valueOf(id));\nif (Version.DORIS_BUILD_VERSION_MAJOR == 0) {\nbuilder.dorisVersion(Version.DORIS_BUILD_SHORT_HASH);\n} else {\nbuilder.dorisVersion(Version.DORIS_BUILD_VERSION + \"-\" + Version.DORIS_BUILD_SHORT_HASH);\n}\nbuilder.taskType(ProfileType.LOAD.name());\nbuilder.startTime(TimeUtils.longToTimeString(createTimestamp));\nif (isFinished) {\nbuilder.endTime(TimeUtils.longToTimeString(currentTimestamp));\nbuilder.totalTime(DebugUtil.getPrettyStringMs(currentTimestamp - createTimestamp));\n}\nbuilder.taskState(\"FINISHED\");\nbuilder.user(getUserInfo() != null ? getUserInfo().getQualifiedUser() : \"N/A\");\nbuilder.defaultDb(getDefaultDb());\nbuilder.sqlStatement(getOriginStmt().originStmt);\nreturn builder.build();\n}\nprivate String getDefaultDb() {\nDatabase database = Env.getCurrentEnv().getInternalCatalog().getDb(this.dbId).orElse(null);\nreturn database == null ? \"N/A\" : database.getFullName();\n}\nprivate void updateLoadingStatus(BrokerLoadingTaskAttachment attachment) {\nloadingStatus.replaceCounter(DPP_ABNORMAL_ALL,\nincreaseCounter(DPP_ABNORMAL_ALL, attachment.getCounter(DPP_ABNORMAL_ALL)));\nloadingStatus.replaceCounter(DPP_NORMAL_ALL,\nincreaseCounter(DPP_NORMAL_ALL, attachment.getCounter(DPP_NORMAL_ALL)));\nloadingStatus.replaceCounter(UNSELECTED_ROWS,\nincreaseCounter(UNSELECTED_ROWS, attachment.getCounter(UNSELECTED_ROWS)));\nif (attachment.getTrackingUrl() != null) {\nloadingStatus.setTrackingUrl(attachment.getTrackingUrl());\n}\ncommitInfos.addAll(attachment.getCommitInfoList());\nerrorTabletInfos.addAll(attachment.getErrorTabletInfos().stream().limit(Config.max_error_tablet_of_broker_load)\n.collect(Collectors.toList()));\nprogress = (int) ((double) finishedTaskIds.size() / idToTasks.size() * 100);\nif (progress == 100) {\nprogress = 99;\n}\n}\n@Override\npublic void updateProgress(Long beId, TUniqueId loadId, TUniqueId fragmentId, long scannedRows,\nlong scannedBytes, boolean isDone) {\nsuper.updateProgress(beId, loadId, fragmentId, scannedRows, scannedBytes, isDone);\nprogress = (int) ((double) loadStatistic.getLoadBytes() / loadStatistic.totalFileSizeB * 100);\nif (progress >= 100) {\nprogress = 99;\n}\n}\nprivate String increaseCounter(String key, String deltaValue) {\nlong value = 0;\nif (loadingStatus.getCounters().containsKey(key)) {\nvalue = Long.valueOf(loadingStatus.getCounters().get(key));\n}\nif (deltaValue != null) {\nvalue += Long.valueOf(deltaValue);\n}\nreturn String.valueOf(value);\n}\n@Override\npublic void afterVisible(TransactionState txnState, boolean txnOperated) {\nsuper.afterVisible(txnState, txnOperated);\nwriteProfile();\n}\n@Override\npublic String getResourceName() {\nStorageBackend.StorageType storageType = brokerDesc.getStorageType();\nif (storageType == StorageBackend.StorageType.BROKER) {\nreturn brokerDesc.getName();\n} else if (storageType == StorageBackend.StorageType.S3) {\nreturn Optional.ofNullable(brokerDesc.getProperties())\n.map(o -> o.get(S3Properties.Env.ENDPOINT))\n.orElse(\"s3_cluster\");\n} else {\nreturn storageType.name().toLowerCase().concat(\"_cluster\");\n}\n}\n}" + }, + { + "comment": "```suggestion byte[] byteArray = rawString.getBytes(StandardCharsets.US_ASCII); ```", + "method_body": "public void testNestedContext() throws Exception {\nDataSampler sampler = new DataSampler();\nString rawString = \"hello\";\nbyte[] byteArray = rawString.getBytes(Charset.forName(\"ASCII\"));\nByteArrayCoder coder = ByteArrayCoder.of();\nsampler.sampleOutput(\"pcollection-id\", coder).sample(byteArray);\nBeamFnApi.InstructionResponse samples = getAllSamples(sampler);\nassertHasSamples(samples, \"pcollection-id\", Collections.singleton(encodeByteArray(byteArray)));\n}", + "target_code": "byte[] byteArray = rawString.getBytes(Charset.forName(\"ASCII\"));", + "method_body_after": "public void testNestedContext() throws Exception {\nDataSampler sampler = new DataSampler();\nString rawString = \"hello\";\nbyte[] byteArray = rawString.getBytes(StandardCharsets.US_ASCII);\nByteArrayCoder coder = ByteArrayCoder.of();\nsampler.sampleOutput(\"pcollection-id\", coder).sample(byteArray);\nBeamFnApi.InstructionResponse samples = getAllSamples(sampler);\nassertHasSamples(samples, \"pcollection-id\", Collections.singleton(encodeByteArray(byteArray)));\n}", + "context_before": "class DataSamplerTest {\nbyte[] encodeInt(Integer i) throws IOException {\nVarIntCoder coder = VarIntCoder.of();\nByteArrayOutputStream stream = new ByteArrayOutputStream();\ncoder.encode(i, stream, Coder.Context.NESTED);\nreturn stream.toByteArray();\n}\nbyte[] encodeString(String s) throws IOException {\nStringUtf8Coder coder = StringUtf8Coder.of();\nByteArrayOutputStream stream = new ByteArrayOutputStream();\ncoder.encode(s, stream, Coder.Context.NESTED);\nreturn stream.toByteArray();\n}\nbyte[] encodeByteArray(byte[] b) throws IOException {\nByteArrayCoder coder = ByteArrayCoder.of();\nByteArrayOutputStream stream = new ByteArrayOutputStream();\ncoder.encode(b, stream, Coder.Context.NESTED);\nreturn stream.toByteArray();\n}\nBeamFnApi.InstructionResponse getAllSamples(DataSampler dataSampler) {\nBeamFnApi.InstructionRequest request =\nBeamFnApi.InstructionRequest.newBuilder()\n.setSampleData(BeamFnApi.SampleDataRequest.newBuilder().build())\n.build();\nreturn dataSampler.handleDataSampleRequest(request).build();\n}\nBeamFnApi.InstructionResponse getSamplesForPCollection(\nDataSampler dataSampler, String pcollection) {\nBeamFnApi.InstructionRequest request =\nBeamFnApi.InstructionRequest.newBuilder()\n.setSampleData(\nBeamFnApi.SampleDataRequest.newBuilder().addPcollectionIds(pcollection).build())\n.build();\nreturn dataSampler.handleDataSampleRequest(request).build();\n}\nBeamFnApi.InstructionResponse getSamplesForPCollections(\nDataSampler dataSampler, Iterable pcollections) {\nBeamFnApi.InstructionRequest request =\nBeamFnApi.InstructionRequest.newBuilder()\n.setSampleData(\nBeamFnApi.SampleDataRequest.newBuilder().addAllPcollectionIds(pcollections).build())\n.build();\nreturn dataSampler.handleDataSampleRequest(request).build();\n}\nvoid assertHasSamples(\nBeamFnApi.InstructionResponse response, String pcollection, Iterable elements) {\nMap elementSamplesMap =\nresponse.getSampleData().getElementSamplesMap();\nassertFalse(elementSamplesMap.isEmpty());\nBeamFnApi.SampleDataResponse.ElementList elementList = elementSamplesMap.get(pcollection);\nassertNotNull(elementList);\nList expectedSamples = new ArrayList<>();\nfor (byte[] el : elements) {\nexpectedSamples.add(\nBeamFnApi.SampledElement.newBuilder().setElement(ByteString.copyFrom(el)).build());\n}\nassertTrue(elementList.getElementsList().containsAll(expectedSamples));\n}\n/**\n* Smoke test that a samples show in the output map.\n*\n* @throws Exception\n*/\n@Test\npublic void testSingleOutput() throws Exception {\nDataSampler sampler = new DataSampler();\nVarIntCoder coder = VarIntCoder.of();\nsampler.sampleOutput(\"pcollection-id\", coder).sample(1);\nBeamFnApi.InstructionResponse samples = getAllSamples(sampler);\nassertHasSamples(samples, \"pcollection-id\", Collections.singleton(encodeInt(1)));\n}\n/**\n* Smoke test that a samples show in the output map.\n*\n* @throws Exception\n*/\n@Test\n/**\n* Test that sampling multiple PCollections under the same descriptor is OK.\n*\n* @throws Exception\n*/\n@Test\npublic void testMultipleOutputs() throws Exception {\nDataSampler sampler = new DataSampler();\nVarIntCoder coder = VarIntCoder.of();\nsampler.sampleOutput(\"pcollection-id-1\", coder).sample(1);\nsampler.sampleOutput(\"pcollection-id-2\", coder).sample(2);\nBeamFnApi.InstructionResponse samples = getAllSamples(sampler);\nassertHasSamples(samples, \"pcollection-id-1\", Collections.singleton(encodeInt(1)));\nassertHasSamples(samples, \"pcollection-id-2\", Collections.singleton(encodeInt(2)));\n}\n/**\n* Test that the response contains samples from the same PCollection across descriptors.\n*\n* @throws Exception\n*/\n@Test\npublic void testMultipleSamePCollections() throws Exception {\nDataSampler sampler = new DataSampler();\nVarIntCoder coder = VarIntCoder.of();\nsampler.sampleOutput(\"pcollection-id\", coder).sample(1);\nsampler.sampleOutput(\"pcollection-id\", coder).sample(2);\nBeamFnApi.InstructionResponse samples = getAllSamples(sampler);\nassertHasSamples(samples, \"pcollection-id\", ImmutableList.of(encodeInt(1), encodeInt(2)));\n}\nvoid generateStringSamples(DataSampler sampler) {\nStringUtf8Coder coder = StringUtf8Coder.of();\nsampler.sampleOutput(\"a\", coder).sample(\"a1\");\nsampler.sampleOutput(\"a\", coder).sample(\"a2\");\nsampler.sampleOutput(\"b\", coder).sample(\"b1\");\nsampler.sampleOutput(\"b\", coder).sample(\"b2\");\nsampler.sampleOutput(\"c\", coder).sample(\"c1\");\nsampler.sampleOutput(\"c\", coder).sample(\"c2\");\n}\n/**\n* Test that samples can be filtered based on PCollection id.\n*\n* @throws Exception\n*/\n@Test\npublic void testFiltersSinglePCollectionId() throws Exception {\nDataSampler sampler = new DataSampler(10, 10);\ngenerateStringSamples(sampler);\nBeamFnApi.InstructionResponse samples = getSamplesForPCollection(sampler, \"a\");\nassertHasSamples(samples, \"a\", ImmutableList.of(encodeString(\"a1\"), encodeString(\"a2\")));\n}\n/**\n* Test that samples can be filtered both on PCollection and ProcessBundleDescriptor id.\n*\n* @throws Exception\n*/\n@Test\npublic void testFiltersMultiplePCollectionIds() throws Exception {\nList pcollectionIds = ImmutableList.of(\"a\", \"c\");\nDataSampler sampler = new DataSampler(10, 10);\ngenerateStringSamples(sampler);\nBeamFnApi.InstructionResponse samples = getSamplesForPCollections(sampler, pcollectionIds);\nassertThat(samples.getSampleData().getElementSamplesMap().size(), equalTo(2));\nassertHasSamples(samples, \"a\", ImmutableList.of(encodeString(\"a1\"), encodeString(\"a2\")));\nassertHasSamples(samples, \"c\", ImmutableList.of(encodeString(\"c1\"), encodeString(\"c2\")));\n}\n/**\n* Test that samples can be taken from the DataSampler while adding new OutputSamplers. This fails\n* with a ConcurrentModificationException if there is a bug.\n*\n* @throws Exception\n*/\n@Test\npublic void testConcurrentNewSampler() throws Exception {\nDataSampler sampler = new DataSampler();\nVarIntCoder coder = VarIntCoder.of();\nThread sampleThread =\nnew Thread(\n() -> {\nfor (int i = 0; i < 1000000; i++) {\nsampler.sampleOutput(\"pcollection-\" + i, coder).sample(0);\ntry {\nThread.sleep(0);\n} catch (InterruptedException e) {\nreturn;\n}\n}\n});\nsampleThread.start();\nfor (int i = 0; i < 20; i++) {\nsampler.handleDataSampleRequest(\nBeamFnApi.InstructionRequest.newBuilder()\n.setSampleData(BeamFnApi.SampleDataRequest.newBuilder())\n.build());\n}\nsampleThread.interrupt();\nsampleThread.join();\n}\n}", + "context_after": "class DataSamplerTest {\nbyte[] encodeInt(Integer i) throws IOException {\nVarIntCoder coder = VarIntCoder.of();\nByteArrayOutputStream stream = new ByteArrayOutputStream();\ncoder.encode(i, stream, Coder.Context.NESTED);\nreturn stream.toByteArray();\n}\nbyte[] encodeString(String s) throws IOException {\nStringUtf8Coder coder = StringUtf8Coder.of();\nByteArrayOutputStream stream = new ByteArrayOutputStream();\ncoder.encode(s, stream, Coder.Context.NESTED);\nreturn stream.toByteArray();\n}\nbyte[] encodeByteArray(byte[] b) throws IOException {\nByteArrayCoder coder = ByteArrayCoder.of();\nByteArrayOutputStream stream = new ByteArrayOutputStream();\ncoder.encode(b, stream, Coder.Context.NESTED);\nreturn stream.toByteArray();\n}\nBeamFnApi.InstructionResponse getAllSamples(DataSampler dataSampler) {\nBeamFnApi.InstructionRequest request =\nBeamFnApi.InstructionRequest.newBuilder()\n.setSampleData(BeamFnApi.SampleDataRequest.newBuilder().build())\n.build();\nreturn dataSampler.handleDataSampleRequest(request).build();\n}\nBeamFnApi.InstructionResponse getSamplesForPCollection(\nDataSampler dataSampler, String pcollection) {\nBeamFnApi.InstructionRequest request =\nBeamFnApi.InstructionRequest.newBuilder()\n.setSampleData(\nBeamFnApi.SampleDataRequest.newBuilder().addPcollectionIds(pcollection).build())\n.build();\nreturn dataSampler.handleDataSampleRequest(request).build();\n}\nBeamFnApi.InstructionResponse getSamplesForPCollections(\nDataSampler dataSampler, Iterable pcollections) {\nBeamFnApi.InstructionRequest request =\nBeamFnApi.InstructionRequest.newBuilder()\n.setSampleData(\nBeamFnApi.SampleDataRequest.newBuilder().addAllPcollectionIds(pcollections).build())\n.build();\nreturn dataSampler.handleDataSampleRequest(request).build();\n}\nvoid assertHasSamples(\nBeamFnApi.InstructionResponse response, String pcollection, Iterable elements) {\nMap elementSamplesMap =\nresponse.getSampleData().getElementSamplesMap();\nassertFalse(elementSamplesMap.isEmpty());\nBeamFnApi.SampleDataResponse.ElementList elementList = elementSamplesMap.get(pcollection);\nassertNotNull(elementList);\nList expectedSamples = new ArrayList<>();\nfor (byte[] el : elements) {\nexpectedSamples.add(\nBeamFnApi.SampledElement.newBuilder().setElement(ByteString.copyFrom(el)).build());\n}\nassertTrue(elementList.getElementsList().containsAll(expectedSamples));\n}\n/**\n* Smoke test that a samples show in the output map.\n*\n* @throws Exception\n*/\n@Test\npublic void testSingleOutput() throws Exception {\nDataSampler sampler = new DataSampler();\nVarIntCoder coder = VarIntCoder.of();\nsampler.sampleOutput(\"pcollection-id\", coder).sample(1);\nBeamFnApi.InstructionResponse samples = getAllSamples(sampler);\nassertHasSamples(samples, \"pcollection-id\", Collections.singleton(encodeInt(1)));\n}\n/**\n* Smoke test that a sample shows in the output map.\n*\n* @throws Exception\n*/\n@Test\n/**\n* Test that sampling multiple PCollections under the same descriptor is OK.\n*\n* @throws Exception\n*/\n@Test\npublic void testMultipleOutputs() throws Exception {\nDataSampler sampler = new DataSampler();\nVarIntCoder coder = VarIntCoder.of();\nsampler.sampleOutput(\"pcollection-id-1\", coder).sample(1);\nsampler.sampleOutput(\"pcollection-id-2\", coder).sample(2);\nBeamFnApi.InstructionResponse samples = getAllSamples(sampler);\nassertHasSamples(samples, \"pcollection-id-1\", Collections.singleton(encodeInt(1)));\nassertHasSamples(samples, \"pcollection-id-2\", Collections.singleton(encodeInt(2)));\n}\n/**\n* Test that the response contains samples from the same PCollection across descriptors.\n*\n* @throws Exception\n*/\n@Test\npublic void testMultipleSamePCollections() throws Exception {\nDataSampler sampler = new DataSampler();\nVarIntCoder coder = VarIntCoder.of();\nsampler.sampleOutput(\"pcollection-id\", coder).sample(1);\nsampler.sampleOutput(\"pcollection-id\", coder).sample(2);\nBeamFnApi.InstructionResponse samples = getAllSamples(sampler);\nassertHasSamples(samples, \"pcollection-id\", ImmutableList.of(encodeInt(1), encodeInt(2)));\n}\nvoid generateStringSamples(DataSampler sampler) {\nStringUtf8Coder coder = StringUtf8Coder.of();\nsampler.sampleOutput(\"a\", coder).sample(\"a1\");\nsampler.sampleOutput(\"a\", coder).sample(\"a2\");\nsampler.sampleOutput(\"b\", coder).sample(\"b1\");\nsampler.sampleOutput(\"b\", coder).sample(\"b2\");\nsampler.sampleOutput(\"c\", coder).sample(\"c1\");\nsampler.sampleOutput(\"c\", coder).sample(\"c2\");\n}\n/**\n* Test that samples can be filtered based on PCollection id.\n*\n* @throws Exception\n*/\n@Test\npublic void testFiltersSinglePCollectionId() throws Exception {\nDataSampler sampler = new DataSampler(10, 10);\ngenerateStringSamples(sampler);\nBeamFnApi.InstructionResponse samples = getSamplesForPCollection(sampler, \"a\");\nassertHasSamples(samples, \"a\", ImmutableList.of(encodeString(\"a1\"), encodeString(\"a2\")));\n}\n/**\n* Test that samples can be filtered both on PCollection and ProcessBundleDescriptor id.\n*\n* @throws Exception\n*/\n@Test\npublic void testFiltersMultiplePCollectionIds() throws Exception {\nList pcollectionIds = ImmutableList.of(\"a\", \"c\");\nDataSampler sampler = new DataSampler(10, 10);\ngenerateStringSamples(sampler);\nBeamFnApi.InstructionResponse samples = getSamplesForPCollections(sampler, pcollectionIds);\nassertThat(samples.getSampleData().getElementSamplesMap().size(), equalTo(2));\nassertHasSamples(samples, \"a\", ImmutableList.of(encodeString(\"a1\"), encodeString(\"a2\")));\nassertHasSamples(samples, \"c\", ImmutableList.of(encodeString(\"c1\"), encodeString(\"c2\")));\n}\n/**\n* Test that samples can be taken from the DataSampler while adding new OutputSamplers. This fails\n* with a ConcurrentModificationException if there is a bug.\n*\n* @throws Exception\n*/\n@Test\npublic void testConcurrentNewSampler() throws Exception {\nDataSampler sampler = new DataSampler();\nVarIntCoder coder = VarIntCoder.of();\nThread[] sampleThreads = new Thread[100];\nCountDownLatch startSignal = new CountDownLatch(1);\nCountDownLatch doneSignal = new CountDownLatch(sampleThreads.length);\nfor (int i = 0; i < sampleThreads.length; i++) {\nsampleThreads[i] =\nnew Thread(\n() -> {\ntry {\nstartSignal.await();\n} catch (InterruptedException e) {\nreturn;\n}\nfor (int j = 0; j < 100; j++) {\nsampler.sampleOutput(\"pcollection-\" + j, coder).sample(0);\n}\ndoneSignal.countDown();\n});\nsampleThreads[i].start();\n}\nstartSignal.countDown();\nwhile (doneSignal.getCount() > 0) {\nsampler.handleDataSampleRequest(\nBeamFnApi.InstructionRequest.newBuilder()\n.setSampleData(BeamFnApi.SampleDataRequest.newBuilder())\n.build());\n}\nfor (Thread sampleThread : sampleThreads) {\nsampleThread.join();\n}\n}\n}" + }, + { + "comment": "Why is `0 < minGroupsUpRatio < 0.01` disallowed?", + "method_body": "private static Optional maxGroupsAllowedDown(ModelElement tuning, boolean allowMoreThanOneContentGroupDown, int numberOfLeafGroups) {\nvar minGroupsUpRatio = tuning.childAsDouble(\"min-group-up-ratio\");\nif (minGroupsUpRatio != null) {\nif (minGroupsUpRatio < 0.01 || minGroupsUpRatio > 1)\nthrow new IllegalArgumentException(\"min-groups-up-ratio must be between 0.01 and 1, got \" + minGroupsUpRatio);\ndouble minGroupsUp = minGroupsUpRatio * numberOfLeafGroups;\nvar maxGroupsAllowedDown = Math.max(1, numberOfLeafGroups - (int) Math.ceil(minGroupsUp));\nreturn allowMoreThanOneContentGroupDown ? Optional.of(maxGroupsAllowedDown) : Optional.empty();\n}\nreturn Optional.empty();\n}", + "target_code": "if (minGroupsUpRatio < 0.01 || minGroupsUpRatio > 1)", + "method_body_after": "private static Optional maxGroupsAllowedDown(ModelElement tuning, boolean allowMoreThanOneContentGroupDown, int numberOfLeafGroups) {\nvar groupsAllowedDownRatio = tuning.childAsDouble(\"groups-allowed-down-ratio\");\nif (groupsAllowedDownRatio != null) {\nif (groupsAllowedDownRatio < 0 || groupsAllowedDownRatio > 1)\nthrow new IllegalArgumentException(\"groups-allowed-down-ratio must be between 0 and 1, got \" + groupsAllowedDownRatio);\nvar maxGroupsAllowedDown = Math.max(1, (int) Math.floor(groupsAllowedDownRatio * numberOfLeafGroups));\nreturn allowMoreThanOneContentGroupDown ? Optional.of(maxGroupsAllowedDown) : Optional.empty();\n}\nreturn Optional.empty();\n}", + "context_before": "class ClusterControllerTuningBuilder {\nprivate final Optional minNodeRatioPerGroup;\nprivate final Optional initProgressTime;\nprivate final Optional transitionTime;\nprivate final Optional maxPrematureCrashes;\nprivate final Optional stableStateTimePeriod;\nprivate final Optional minDistributorUpRatio;\nprivate final Optional minStorageUpRatio;\nprivate final Optional minSplitBits;\nprivate final Optional maxGroupsAllowedDown;\nClusterControllerTuningBuilder(ModelElement tuning,\nOptional minNodeRatioPerGroup,\nOptional bucketSplittingMinimumBits,\nboolean maxGroupsAllowedDown,\nint numberOfLeafGroups) {\nthis.minSplitBits = bucketSplittingMinimumBits;\nthis.minNodeRatioPerGroup = minNodeRatioPerGroup;\nif (tuning == null) {\nthis.initProgressTime = Optional.empty();\nthis.transitionTime = Optional.empty();\nthis.maxPrematureCrashes = Optional.empty();\nthis.stableStateTimePeriod = Optional.empty();\nthis.minDistributorUpRatio = Optional.empty();\nthis.minStorageUpRatio = Optional.empty();\nthis.maxGroupsAllowedDown = Optional.empty();\n}\nelse {\nthis.initProgressTime = Optional.ofNullable(tuning.childAsDuration(\"init-progress-time\"));\nthis.transitionTime = Optional.ofNullable(tuning.childAsDuration(\"transition-time\"));\nthis.maxPrematureCrashes = Optional.ofNullable(tuning.childAsLong(\"max-premature-crashes\"));\nthis.stableStateTimePeriod = Optional.ofNullable(tuning.childAsDuration(\"stable-state-period\"));\nthis.minDistributorUpRatio = Optional.ofNullable(tuning.childAsDouble(\"min-distributor-up-ratio\"));\nthis.minStorageUpRatio = Optional.ofNullable(tuning.childAsDouble(\"min-storage-up-ratio\"));\nthis.maxGroupsAllowedDown = maxGroupsAllowedDown(tuning, maxGroupsAllowedDown, numberOfLeafGroups);\n}\n}\nprivate ClusterControllerTuning build() {\nreturn new ClusterControllerTuning(initProgressTime,\ntransitionTime,\nmaxPrematureCrashes,\nstableStateTimePeriod,\nminDistributorUpRatio,\nminStorageUpRatio,\nmaxGroupsAllowedDown,\nminNodeRatioPerGroup,\nminSplitBits);\n}\n}", + "context_after": "class ClusterControllerTuningBuilder {\nprivate final Optional minNodeRatioPerGroup;\nprivate final Optional initProgressTime;\nprivate final Optional transitionTime;\nprivate final Optional maxPrematureCrashes;\nprivate final Optional stableStateTimePeriod;\nprivate final Optional minDistributorUpRatio;\nprivate final Optional minStorageUpRatio;\nprivate final Optional minSplitBits;\nprivate final Optional maxGroupsAllowedDown;\nClusterControllerTuningBuilder(ModelElement tuning,\nOptional minNodeRatioPerGroup,\nOptional bucketSplittingMinimumBits,\nboolean maxGroupsAllowedDown,\nint numberOfLeafGroups) {\nthis.minSplitBits = bucketSplittingMinimumBits;\nthis.minNodeRatioPerGroup = minNodeRatioPerGroup;\nif (tuning == null) {\nthis.initProgressTime = Optional.empty();\nthis.transitionTime = Optional.empty();\nthis.maxPrematureCrashes = Optional.empty();\nthis.stableStateTimePeriod = Optional.empty();\nthis.minDistributorUpRatio = Optional.empty();\nthis.minStorageUpRatio = Optional.empty();\nthis.maxGroupsAllowedDown = Optional.empty();\n}\nelse {\nthis.initProgressTime = Optional.ofNullable(tuning.childAsDuration(\"init-progress-time\"));\nthis.transitionTime = Optional.ofNullable(tuning.childAsDuration(\"transition-time\"));\nthis.maxPrematureCrashes = Optional.ofNullable(tuning.childAsLong(\"max-premature-crashes\"));\nthis.stableStateTimePeriod = Optional.ofNullable(tuning.childAsDuration(\"stable-state-period\"));\nthis.minDistributorUpRatio = Optional.ofNullable(tuning.childAsDouble(\"min-distributor-up-ratio\"));\nthis.minStorageUpRatio = Optional.ofNullable(tuning.childAsDouble(\"min-storage-up-ratio\"));\nthis.maxGroupsAllowedDown = maxGroupsAllowedDown(tuning, maxGroupsAllowedDown, numberOfLeafGroups);\n}\n}\nprivate ClusterControllerTuning build() {\nreturn new ClusterControllerTuning(initProgressTime,\ntransitionTime,\nmaxPrematureCrashes,\nstableStateTimePeriod,\nminDistributorUpRatio,\nminStorageUpRatio,\nmaxGroupsAllowedDown,\nminNodeRatioPerGroup,\nminSplitBits);\n}\n}" + }, + { + "comment": "You forget to add 'IndexName=xxx' example", + "method_body": "private void analyzeSubPredicate(Expr subExpr) throws AnalysisException {\nif (subExpr == null) {\nreturn;\n}\nif (subExpr instanceof CompoundPredicate) {\nCompoundPredicate cp = (CompoundPredicate) subExpr;\nif (cp.getOp() != org.apache.doris.analysis.CompoundPredicate.Operator.AND) {\nthrow new AnalysisException(\"Only allow compound predicate with operator AND\");\n}\nanalyzeSubPredicate(cp.getChild(0));\nanalyzeSubPredicate(cp.getChild(1));\nreturn;\n}\nboolean valid = true;\ndo {\nif (!(subExpr instanceof BinaryPredicate)) {\nvalid = false;\nbreak;\n}\nBinaryPredicate binaryPredicate = (BinaryPredicate) subExpr;\nif (binaryPredicate.getOp() != BinaryPredicate.Operator.EQ) {\nvalid = false;\nbreak;\n}\nif (!(subExpr.getChild(0) instanceof SlotRef)) {\nvalid = false;\nbreak;\n}\nString leftKey = ((SlotRef) subExpr.getChild(0)).getColumnName();\nif (leftKey.equalsIgnoreCase(\"version\")) {\nif (!(subExpr.getChild(1) instanceof IntLiteral) || version > -1) {\nvalid = false;\nbreak;\n}\nversion = ((IntLiteral) subExpr.getChild(1)).getValue();\n} else if (leftKey.equalsIgnoreCase(\"backendid\")) {\nif (!(subExpr.getChild(1) instanceof IntLiteral) || backendId > -1) {\nvalid = false;\nbreak;\n}\nbackendId = ((IntLiteral) subExpr.getChild(1)).getValue();\n} else if (leftKey.equalsIgnoreCase(\"indexname\")) {\nif (!(subExpr.getChild(1) instanceof StringLiteral) || indexName != null) {\nvalid = false;\nbreak;\n}\nindexName = ((StringLiteral) subExpr.getChild(1)).getValue();\n} else if (leftKey.equalsIgnoreCase(\"state\")) {\nif (!(subExpr.getChild(1) instanceof StringLiteral) || replicaState != null) {\nvalid = false;\nbreak;\n}\nString state = ((StringLiteral) subExpr.getChild(1)).getValue().toUpperCase();\ntry {\nreplicaState = Replica.ReplicaState.valueOf(state);\n} catch (Exception e) {\nreplicaState = null;\nvalid = false;\nbreak;\n}\n} else {\nvalid = false;\nbreak;\n}\n} while(false);\nif (!valid) {\nthrow new AnalysisException(\"Where clause should looks like: Version = \\\"version\\\",\"\n+ \" State = \\\"NORMAL|ROLLUP|CLONE|DECOMMISSION\\\", or BackendId = 10000\"\n+ \" or compound predicate with operator AND\");\n}\n}", + "target_code": "+ \" or compound predicate with operator AND\");", + "method_body_after": "private void analyzeSubPredicate(Expr subExpr) throws AnalysisException {\nif (subExpr == null) {\nreturn;\n}\nif (subExpr instanceof CompoundPredicate) {\nCompoundPredicate cp = (CompoundPredicate) subExpr;\nif (cp.getOp() != org.apache.doris.analysis.CompoundPredicate.Operator.AND) {\nthrow new AnalysisException(\"Only allow compound predicate with operator AND\");\n}\nanalyzeSubPredicate(cp.getChild(0));\nanalyzeSubPredicate(cp.getChild(1));\nreturn;\n}\nboolean valid = true;\ndo {\nif (!(subExpr instanceof BinaryPredicate)) {\nvalid = false;\nbreak;\n}\nBinaryPredicate binaryPredicate = (BinaryPredicate) subExpr;\nif (binaryPredicate.getOp() != BinaryPredicate.Operator.EQ) {\nvalid = false;\nbreak;\n}\nif (!(subExpr.getChild(0) instanceof SlotRef)) {\nvalid = false;\nbreak;\n}\nString leftKey = ((SlotRef) subExpr.getChild(0)).getColumnName();\nif (leftKey.equalsIgnoreCase(\"version\")) {\nif (!(subExpr.getChild(1) instanceof IntLiteral) || version > -1) {\nvalid = false;\nbreak;\n}\nversion = ((IntLiteral) subExpr.getChild(1)).getValue();\n} else if (leftKey.equalsIgnoreCase(\"backendid\")) {\nif (!(subExpr.getChild(1) instanceof IntLiteral) || backendId > -1) {\nvalid = false;\nbreak;\n}\nbackendId = ((IntLiteral) subExpr.getChild(1)).getValue();\n} else if (leftKey.equalsIgnoreCase(\"indexname\")) {\nif (!(subExpr.getChild(1) instanceof StringLiteral) || indexName != null) {\nvalid = false;\nbreak;\n}\nindexName = ((StringLiteral) subExpr.getChild(1)).getValue();\n} else if (leftKey.equalsIgnoreCase(\"state\")) {\nif (!(subExpr.getChild(1) instanceof StringLiteral) || replicaState != null) {\nvalid = false;\nbreak;\n}\nString state = ((StringLiteral) subExpr.getChild(1)).getValue().toUpperCase();\ntry {\nreplicaState = Replica.ReplicaState.valueOf(state);\n} catch (Exception e) {\nreplicaState = null;\nvalid = false;\nbreak;\n}\n} else {\nvalid = false;\nbreak;\n}\n} while(false);\nif (!valid) {\nthrow new AnalysisException(\"Where clause should looks like: Version = \\\"version\\\",\"\n+ \" or state = \\\"NORMAL|ROLLUP|CLONE|DECOMMISSION\\\", or BackendId = 10000,\"\n+ \" indexname=\\\"rollup_name\\\" or compound predicate with operator AND\");\n}\n}", + "context_before": "class ShowTabletStmt extends ShowStmt {\nprivate String dbName;\nprivate String tableName;\nprivate long tabletId;\nprivate List partitionNames;\nprivate Expr whereClause;\nprivate List orderByElements;\nprivate LimitElement limitElement;\nprivate long version;\nprivate long backendId;\nprivate String indexName;\nprivate Replica.ReplicaState replicaState;\nprivate ArrayList orderByPairs;\nprivate boolean isShowSingleTablet;\npublic ShowTabletStmt(TableName dbTableName, long tabletId) {\nthis(dbTableName, tabletId, null, null, null,null);\n}\npublic ShowTabletStmt(TableName dbTableName, long tabletId, List partitionNames,\nExpr whereClause, List orderByElements, LimitElement limitElement) {\nif (dbTableName == null) {\nthis.dbName = null;\nthis.tableName = null;\nthis.isShowSingleTablet = true;\n} else {\nthis.dbName = dbTableName.getDb();\nthis.tableName = dbTableName.getTbl();\nthis.isShowSingleTablet = false;\n}\nthis.tabletId = tabletId;\nthis.partitionNames = partitionNames;\nthis.whereClause = whereClause;\nthis.orderByElements = orderByElements;\nthis.limitElement = limitElement;\nthis.version = -1;\nthis.backendId = -1;\nthis.indexName = null;\nthis.replicaState = null;\nthis.orderByPairs = null;\n}\npublic String getDbName() {\nreturn dbName;\n}\npublic String getTableName() {\nreturn tableName;\n}\npublic long getTabletId() {\nreturn tabletId;\n}\npublic boolean isShowSingleTablet() {\nreturn isShowSingleTablet;\n}\npublic boolean hasOffset() { return limitElement != null && limitElement.hasOffset(); }\npublic long getOffset() { return limitElement.getOffset(); }\npublic boolean hasPartition() { return partitionNames != null; }\npublic List getPartitionNames() { return partitionNames; }\npublic boolean hasLimit() { return limitElement != null && limitElement.hasLimit(); }\npublic long getLimit() { return limitElement.getLimit(); }\npublic long getVersion() { return version; }\npublic long getBackendId() { return backendId; }\npublic String getIndexName() { return indexName; }\npublic List getOrderByPairs() { return orderByPairs; }\npublic Replica.ReplicaState getReplicaState() { return replicaState; }\n@Override\npublic void analyze(Analyzer analyzer) throws UserException {\nif (!Catalog.getCurrentCatalog().getAuth().checkGlobalPriv(ConnectContext.get(), PrivPredicate.ADMIN)) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, \"SHOW TABLET\");\n}\nsuper.analyze(analyzer);\nif (!isShowSingleTablet && Strings.isNullOrEmpty(dbName)) {\ndbName = analyzer.getDefaultDb();\nif (Strings.isNullOrEmpty(dbName)) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_NO_DB_ERROR);\n}\n} else {\ndbName = ClusterNamespace.getFullName(getClusterName(), dbName);\n}\nif (limitElement != null) {\nlimitElement.analyze(analyzer);\n}\nif (whereClause != null) {\nif (whereClause instanceof CompoundPredicate) {\nCompoundPredicate cp = (CompoundPredicate) whereClause;\nif (cp.getOp() != org.apache.doris.analysis.CompoundPredicate.Operator.AND) {\nthrow new AnalysisException(\"Only allow compound predicate with operator AND\");\n}\nanalyzeSubPredicate(cp.getChild(0));\nanalyzeSubPredicate(cp.getChild(1));\n} else {\nanalyzeSubPredicate(whereClause);\n}\n}\nif (orderByElements != null && !orderByElements.isEmpty()) {\norderByPairs = new ArrayList();\nfor (OrderByElement orderByElement : orderByElements) {\nif (!(orderByElement.getExpr() instanceof SlotRef)) {\nthrow new AnalysisException(\"Should order by column\");\n}\nSlotRef slotRef = (SlotRef) orderByElement.getExpr();\nint index = TabletsProcDir.analyzeColumn(slotRef.getColumnName());\nOrderByPair orderByPair = new OrderByPair(index, !orderByElement.getIsAsc());\norderByPairs.add(orderByPair);\n}\n}\n}\n@Override\npublic String toSql() {\nStringBuilder sb = new StringBuilder();\nsb.append(\"SHOW TABLET \");\nif (isShowSingleTablet) {\nsb.append(tabletId);\n} else {\nsb.append(\" from \").append(\"`\").append(dbName).append(\"`.`\").append(tableName).append(\"`\");\n}\nif (limitElement != null) {\nif (limitElement.hasOffset() && limitElement.hasLimit()) {\nsb.append(\" \").append(limitElement.getOffset()).append(\",\").append(limitElement.getLimit());\n} else if (limitElement.hasLimit()){\nsb.append(\" \").append(limitElement.getLimit());\n}\n}\nreturn sb.toString();\n}\n@Override\npublic ShowResultSetMetaData getMetaData() {\nShowResultSetMetaData.Builder builder = ShowResultSetMetaData.builder();\nif (isShowSingleTablet) {\nbuilder.addColumn(new Column(\"DbName\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"TableName\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"PartitionName\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"IndexName\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"DbId\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"TableId\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"PartitionId\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"IndexId\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"IsSync\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"DetailCmd\", ScalarType.createVarchar(30)));\n} else {\nfor (String title : TabletsProcDir.TITLE_NAMES) {\nbuilder.addColumn(new Column(title, ScalarType.createVarchar(30)));\n}\n}\nreturn builder.build();\n}\n@Override\npublic RedirectStatus getRedirectStatus() {\nif (ConnectContext.get().getSessionVariable().getForwardToMaster()) {\nreturn RedirectStatus.FORWARD_NO_SYNC;\n} else {\nreturn RedirectStatus.NO_FORWARD;\n}\n}\n}", + "context_after": "class ShowTabletStmt extends ShowStmt {\nprivate String dbName;\nprivate String tableName;\nprivate long tabletId;\nprivate List partitionNames;\nprivate Expr whereClause;\nprivate List orderByElements;\nprivate LimitElement limitElement;\nprivate long version;\nprivate long backendId;\nprivate String indexName;\nprivate Replica.ReplicaState replicaState;\nprivate ArrayList orderByPairs;\nprivate boolean isShowSingleTablet;\npublic ShowTabletStmt(TableName dbTableName, long tabletId) {\nthis(dbTableName, tabletId, null, null, null,null);\n}\npublic ShowTabletStmt(TableName dbTableName, long tabletId, List partitionNames,\nExpr whereClause, List orderByElements, LimitElement limitElement) {\nif (dbTableName == null) {\nthis.dbName = null;\nthis.tableName = null;\nthis.isShowSingleTablet = true;\n} else {\nthis.dbName = dbTableName.getDb();\nthis.tableName = dbTableName.getTbl();\nthis.isShowSingleTablet = false;\n}\nthis.tabletId = tabletId;\nthis.partitionNames = partitionNames;\nthis.whereClause = whereClause;\nthis.orderByElements = orderByElements;\nthis.limitElement = limitElement;\nthis.version = -1;\nthis.backendId = -1;\nthis.indexName = null;\nthis.replicaState = null;\nthis.orderByPairs = null;\n}\npublic String getDbName() {\nreturn dbName;\n}\npublic String getTableName() {\nreturn tableName;\n}\npublic long getTabletId() {\nreturn tabletId;\n}\npublic boolean isShowSingleTablet() {\nreturn isShowSingleTablet;\n}\npublic boolean hasOffset() { return limitElement != null && limitElement.hasOffset(); }\npublic long getOffset() { return limitElement.getOffset(); }\npublic boolean hasPartition() { return partitionNames != null; }\npublic List getPartitionNames() { return partitionNames; }\npublic boolean hasLimit() { return limitElement != null && limitElement.hasLimit(); }\npublic long getLimit() { return limitElement.getLimit(); }\npublic long getVersion() { return version; }\npublic long getBackendId() { return backendId; }\npublic String getIndexName() { return indexName; }\npublic List getOrderByPairs() { return orderByPairs; }\npublic Replica.ReplicaState getReplicaState() { return replicaState; }\n@Override\npublic void analyze(Analyzer analyzer) throws UserException {\nif (!Catalog.getCurrentCatalog().getAuth().checkGlobalPriv(ConnectContext.get(), PrivPredicate.ADMIN)) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, \"SHOW TABLET\");\n}\nsuper.analyze(analyzer);\nif (!isShowSingleTablet && Strings.isNullOrEmpty(dbName)) {\ndbName = analyzer.getDefaultDb();\nif (Strings.isNullOrEmpty(dbName)) {\nErrorReport.reportAnalysisException(ErrorCode.ERR_NO_DB_ERROR);\n}\n} else {\ndbName = ClusterNamespace.getFullName(getClusterName(), dbName);\n}\nif (limitElement != null) {\nlimitElement.analyze(analyzer);\n}\nif (whereClause != null) {\nif (whereClause instanceof CompoundPredicate) {\nCompoundPredicate cp = (CompoundPredicate) whereClause;\nif (cp.getOp() != org.apache.doris.analysis.CompoundPredicate.Operator.AND) {\nthrow new AnalysisException(\"Only allow compound predicate with operator AND\");\n}\nanalyzeSubPredicate(cp.getChild(0));\nanalyzeSubPredicate(cp.getChild(1));\n} else {\nanalyzeSubPredicate(whereClause);\n}\n}\nif (orderByElements != null && !orderByElements.isEmpty()) {\norderByPairs = new ArrayList();\nfor (OrderByElement orderByElement : orderByElements) {\nif (!(orderByElement.getExpr() instanceof SlotRef)) {\nthrow new AnalysisException(\"Should order by column\");\n}\nSlotRef slotRef = (SlotRef) orderByElement.getExpr();\nint index = TabletsProcDir.analyzeColumn(slotRef.getColumnName());\nOrderByPair orderByPair = new OrderByPair(index, !orderByElement.getIsAsc());\norderByPairs.add(orderByPair);\n}\n}\n}\n@Override\npublic String toSql() {\nStringBuilder sb = new StringBuilder();\nsb.append(\"SHOW TABLET \");\nif (isShowSingleTablet) {\nsb.append(tabletId);\n} else {\nsb.append(\" from \").append(\"`\").append(dbName).append(\"`.`\").append(tableName).append(\"`\");\n}\nif (limitElement != null) {\nif (limitElement.hasOffset() && limitElement.hasLimit()) {\nsb.append(\" \").append(limitElement.getOffset()).append(\",\").append(limitElement.getLimit());\n} else if (limitElement.hasLimit()){\nsb.append(\" \").append(limitElement.getLimit());\n}\n}\nreturn sb.toString();\n}\n@Override\npublic ShowResultSetMetaData getMetaData() {\nShowResultSetMetaData.Builder builder = ShowResultSetMetaData.builder();\nif (isShowSingleTablet) {\nbuilder.addColumn(new Column(\"DbName\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"TableName\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"PartitionName\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"IndexName\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"DbId\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"TableId\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"PartitionId\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"IndexId\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"IsSync\", ScalarType.createVarchar(30)));\nbuilder.addColumn(new Column(\"DetailCmd\", ScalarType.createVarchar(30)));\n} else {\nfor (String title : TabletsProcDir.TITLE_NAMES) {\nbuilder.addColumn(new Column(title, ScalarType.createVarchar(30)));\n}\n}\nreturn builder.build();\n}\n@Override\npublic RedirectStatus getRedirectStatus() {\nif (ConnectContext.get().getSessionVariable().getForwardToMaster()) {\nreturn RedirectStatus.FORWARD_NO_SYNC;\n} else {\nreturn RedirectStatus.NO_FORWARD;\n}\n}\n}" + }, + { + "comment": "tbh, I feel like the `TypeSerializer` interface could be cleaned up in this regard. Some `reuse` should be marked as `@Nullable` and each implementation should be able to handle that specific case (i.e. instantiating a new instance) rather than having two separate implementations. There would be a default implementation of `copy(T)` and `deserialize(T)` in that case. But that's out-of-scope for this PR.", + "method_body": "public T deserialize(T reuse, DataInputView source) throws IOException {\nint flags = source.readByte();\nif ((flags & IS_NULL) != 0) {\nreturn null;\n}\nClass subclass = null;\nTypeSerializer subclassSerializer = null;\nif ((flags & IS_SUBCLASS) != 0) {\nString subclassName = source.readUTF();\ntry {\nsubclass = Class.forName(subclassName, true, cl);\n} catch (ClassNotFoundException e) {\nthrow new RuntimeException(\"Cannot instantiate class.\", e);\n}\nsubclassSerializer = getSubclassSerializer(subclass);\nif (reuse == null || subclass != reuse.getClass()) {\nreuse = (T) subclassSerializer.createInstance();\ninitializeFields(reuse);\n}\n} else if ((flags & IS_TAGGED_SUBCLASS) != 0) {\nint subclassTag = source.readByte();\nsubclassSerializer = registeredSerializers[subclassTag];\nif (reuse == null || ((PojoSerializer) subclassSerializer).clazz != reuse.getClass()) {\nreuse = (T) subclassSerializer.createInstance();\ninitializeFields(reuse);\n}\n} else {\nif (reuse == null || clazz != reuse.getClass()) {\nreuse = createInstance();\n}\n}\nif (isRecord) {\ntry {\nJavaRecordBuilderFactory.JavaRecordBuilder builder = recordHelper.newBuilder();\nfor (int i = 0; i < numFields; i++) {\nboolean isNull = source.readBoolean();\nif (fields[i] != null) {\nif (isNull) {\nbuilder.setField(i, null);\n} else {\nObject reuseField = reuse == null ? null : fields[i].get(reuse);\nbuilder.setField(i, deserializeField(reuseField, i, source));\n}\n} else if (!isNull) {\nfieldSerializers[i].deserialize(source);\n}\n}\nreuse = builder.build();\n} catch (IllegalAccessException e) {\nthrow new RuntimeException(\n\"Error during POJO copy, this should not happen since we check the fields before.\",\ne);\n}\n} else if ((flags & NO_SUBCLASS) != 0) {\ntry {\nfor (int i = 0; i < numFields; i++) {\nboolean isNull = source.readBoolean();\nif (fields[i] != null) {\nif (isNull) {\nfields[i].set(reuse, null);\n} else {\nfields[i].set(reuse, deserializeField(fields[i].get(reuse), i, source));\n}\n} else if (!isNull) {\nfieldSerializers[i].deserialize(source);\n}\n}\n} catch (IllegalAccessException e) {\nthrow new RuntimeException(\n\"Error during POJO copy, this should not happen since we check the fields before.\",\ne);\n}\n} else {\nif (subclassSerializer != null) {\nreuse = (T) subclassSerializer.deserialize(reuse, source);\n}\n}\nreturn reuse;\n}", + "target_code": "if (isRecord) {", + "method_body_after": "public T deserialize(T reuse, DataInputView source) throws IOException {\nint flags = source.readByte();\nif ((flags & IS_NULL) != 0) {\nreturn null;\n}\nClass subclass = null;\nTypeSerializer subclassSerializer = null;\nif ((flags & IS_SUBCLASS) != 0) {\nString subclassName = source.readUTF();\ntry {\nsubclass = Class.forName(subclassName, true, cl);\n} catch (ClassNotFoundException e) {\nthrow new RuntimeException(\"Cannot instantiate class.\", e);\n}\nsubclassSerializer = getSubclassSerializer(subclass);\nif (reuse == null || subclass != reuse.getClass()) {\nreuse = (T) subclassSerializer.createInstance();\ninitializeFields(reuse);\n}\n} else if ((flags & IS_TAGGED_SUBCLASS) != 0) {\nint subclassTag = source.readByte();\nsubclassSerializer = registeredSerializers[subclassTag];\nif (reuse == null || ((PojoSerializer) subclassSerializer).clazz != reuse.getClass()) {\nreuse = (T) subclassSerializer.createInstance();\ninitializeFields(reuse);\n}\n} else {\nif (reuse == null || clazz != reuse.getClass()) {\nreuse = createInstance();\n}\n}\nif (isRecord()) {\ntry {\nJavaRecordBuilderFactory.JavaRecordBuilder builder = recordFactory.newBuilder();\nfor (int i = 0; i < numFields; i++) {\nboolean isNull = source.readBoolean();\nif (fields[i] != null) {\nif (isNull) {\nbuilder.setField(i, null);\n} else {\nObject reuseField = reuse == null ? null : fields[i].get(reuse);\nbuilder.setField(i, deserializeField(reuseField, i, source));\n}\n} else if (!isNull) {\nfieldSerializers[i].deserialize(source);\n}\n}\nreuse = builder.build();\n} catch (IllegalAccessException e) {\nthrow new RuntimeException(\n\"Error during POJO copy, this should not happen since we check the fields before.\",\ne);\n}\n} else if ((flags & NO_SUBCLASS) != 0) {\ntry {\nfor (int i = 0; i < numFields; i++) {\nboolean isNull = source.readBoolean();\nif (fields[i] != null) {\nif (isNull) {\nfields[i].set(reuse, null);\n} else {\nfields[i].set(reuse, deserializeField(fields[i].get(reuse), i, source));\n}\n} else if (!isNull) {\nfieldSerializers[i].deserialize(source);\n}\n}\n} catch (IllegalAccessException e) {\nthrow new RuntimeException(\n\"Error during POJO copy, this should not happen since we check the fields before.\",\ne);\n}\n} else {\nif (subclassSerializer != null) {\nreuse = (T) subclassSerializer.deserialize(reuse, source);\n}\n}\nreturn reuse;\n}", + "context_before": "class serializer is not responsible\ninitializeFields(target);\n} else {\ntarget = createInstance();\n}", + "context_after": "class serializer is not responsible\ninitializeFields(target);\n} else {\ntarget = createInstance();\n}" + }, + { + "comment": "```suggestion assertThat(optionalCause.get()) .hasMessage(exceptionMessage); ```", + "method_body": "public void testRunAsyncCausesFatalError() throws Exception {\nnew Context() {\n{\nfinal String exceptionMessage = \"runAsyncCausesFatalError\";\naddContainerRequestFutures.add(CompletableFuture.completedFuture(null));\ntestingYarnAMRMClientAsyncBuilder.setGetMatchingRequestsFunction(\nignored -> {\nthrow new RuntimeException(exceptionMessage);\n});\nfinal CompletableFuture throwableCompletableFuture =\nnew CompletableFuture<>();\nresourceEventHandlerBuilder.setOnErrorConsumer(\nthrowableCompletableFuture::complete);\nrunTest(\n() -> {\nrunInMainThread(\n() ->\ngetDriver()\n.requestResource(\ntestingTaskExecutorProcessSpec));\nresourceManagerClientCallbackHandler.onContainersAllocated(\nImmutableList.of(testingContainer));\nThrowable t =\nthrowableCompletableFuture.get(TIMEOUT_SEC, TimeUnit.SECONDS);\nfinal Optional optionalCause =\nExceptionUtils.findThrowable(t, RuntimeException.class);\nassertThat(optionalCause).isPresent();\nassertThat(optionalCause.get().getMessage())\n.isEqualTo(exceptionMessage);\n});\n}\n};\n}", + "target_code": ".isEqualTo(exceptionMessage);", + "method_body_after": "public void testRunAsyncCausesFatalError() throws Exception {\nnew Context() {\n{\nfinal String exceptionMessage = \"runAsyncCausesFatalError\";\naddContainerRequestFutures.add(CompletableFuture.completedFuture(null));\ntestingYarnAMRMClientAsyncBuilder.setGetMatchingRequestsFunction(\nignored -> {\nthrow new RuntimeException(exceptionMessage);\n});\nfinal CompletableFuture throwableCompletableFuture =\nnew CompletableFuture<>();\nresourceEventHandlerBuilder.setOnErrorConsumer(\nthrowableCompletableFuture::complete);\nrunTest(\n() -> {\nrunInMainThread(\n() ->\ngetDriver()\n.requestResource(\ntestingTaskExecutorProcessSpec));\nresourceManagerClientCallbackHandler.onContainersAllocated(\nImmutableList.of(testingContainer));\nThrowable t =\nthrowableCompletableFuture.get(TIMEOUT_SEC, TimeUnit.SECONDS);\nfinal Optional optionalCause =\nExceptionUtils.findThrowable(t, RuntimeException.class);\nassertThat(optionalCause).isPresent();\nassertThat(optionalCause.get()).hasMessage(exceptionMessage);\n});\n}\n};\n}", + "context_before": "class YarnResourceManagerDriverTest extends ResourceManagerDriverTestBase {\nprivate static final Logger log = LoggerFactory.getLogger(YarnResourceManagerDriverTest.class);\nprivate static final Resource testingResource =\nResource.newInstance(\nYarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB,\nYarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);\nprivate static final Priority testingPriority = Priority.newInstance(1);\nprivate static final Container testingContainer =\ncreateTestingContainerWithResource(testingResource, testingPriority, 1);\nprivate static final TaskExecutorProcessSpec testingTaskExecutorProcessSpec =\nnew TaskExecutorProcessSpec(\nnew CPUResource(1),\nMemorySize.ZERO,\nMemorySize.ZERO,\nMemorySize.ofMebiBytes(256),\nMemorySize.ofMebiBytes(256),\nMemorySize.ofMebiBytes(256),\nMemorySize.ofMebiBytes(256),\nMemorySize.ZERO,\nMemorySize.ZERO,\nCollections.emptyList());\n@TempDir private File tmpFolder;\n@Override\nprotected Context createContext() {\nreturn new Context();\n}\n@Test\n@Test\npublic void testShutdownRequestCausesFatalError() throws Exception {\nnew Context() {\n{\nfinal CompletableFuture throwableCompletableFuture =\nnew CompletableFuture<>();\nresourceEventHandlerBuilder.setOnErrorConsumer(\nthrowableCompletableFuture::complete);\nrunTest(\n() -> {\nresourceManagerClientCallbackHandler.onShutdownRequest();\nThrowable throwable =\nthrowableCompletableFuture.get(TIMEOUT_SEC, TimeUnit.SECONDS);\nassertThat(\nExceptionUtils.findThrowable(\nthrowable, ResourceManagerException.class))\n.isPresent();\nassertThat(\nExceptionUtils.findThrowableWithMessage(\nthrowable, ERROR_MESSAGE_ON_SHUTDOWN_REQUEST))\n.isPresent();\n});\n}\n};\n}\n@Test\npublic void testTerminationDoesNotBlock() throws Exception {\nnew Context() {\n{\nrunTest(\n() -> {\ntry {\nrunInMainThread(() -> getDriver().terminate());\n} catch (Exception ex) {\nlog.error(\"cannot terminate driver\", ex);\nfail(\"termination of driver failed\");\n}\n});\n}\n};\n}\n@Test\npublic void testTerminationWaitsOnContainerStopSuccess() throws Exception {\nnew Context() {\n{\nfinal CompletableFuture containerIdFuture = new CompletableFuture<>();\ntestingYarnNMClientAsyncBuilder.setStopContainerAsyncConsumer(\n(containerId, ignored, callbackHandler) ->\ncontainerIdFuture.complete(containerId));\nresetYarnNodeManagerClientFactory();\nrunTest(\n() -> {\nfinal CompletableFuture yarnWorkerFuture =\nrunInMainThread(\n() ->\ngetDriver()\n.requestResource(\ntestingTaskExecutorProcessSpec))\n.thenCompose(Function.identity());\nresourceManagerClientCallbackHandler.onContainersAllocated(\nImmutableList.of(testingContainer));\nfinal YarnWorkerNode worker = yarnWorkerFuture.get();\nfinal CompletableFuture driverHasTerminatedFuture =\nrunInMainThread(\n() -> {\ngetDriver().releaseResource(worker);\ngetDriver().terminate();\n});\nassertThat(driverHasTerminatedFuture).isNotCompleted();\nnodeManagerClientCallbackHandler.onContainerStopped(\ncontainerIdFuture.get());\ndriverHasTerminatedFuture.get();\n});\n}\n};\n}\n@Test\npublic void testTerminationWaitsOnContainerStopError() throws Exception {\nnew Context() {\n{\nfinal CompletableFuture containerIdFuture = new CompletableFuture<>();\ntestingYarnNMClientAsyncBuilder.setStopContainerAsyncConsumer(\n(containerId, ignored, callbackHandler) ->\ncontainerIdFuture.complete(containerId));\nresetYarnNodeManagerClientFactory();\nrunTest(\n() -> {\nfinal CompletableFuture yarnWorkerFuture =\nrunInMainThread(\n() ->\ngetDriver()\n.requestResource(\ntestingTaskExecutorProcessSpec))\n.thenCompose(Function.identity());\nresourceManagerClientCallbackHandler.onContainersAllocated(\nImmutableList.of(testingContainer));\nfinal YarnWorkerNode worker = yarnWorkerFuture.get();\nfinal CompletableFuture driverHasTerminatedFuture =\nrunInMainThread(\n() -> {\ngetDriver().releaseResource(worker);\ngetDriver().terminate();\n});\nassertThat(driverHasTerminatedFuture).isNotCompleted();\nnodeManagerClientCallbackHandler.onStopContainerError(\ncontainerIdFuture.get(), null);\ndriverHasTerminatedFuture.get();\n});\n}\n};\n}\n/**\n* Tests that application files are deleted when the YARN application master is de-registered.\n*/\n@Test\npublic void testDeleteApplicationFiles() throws Exception {\nnew Context() {\n{\nfinal File applicationDir = newFolderIn(tmpFolder, \".flink\");\nenv.put(FLINK_YARN_FILES, applicationDir.getCanonicalPath());\nrunTest(\n() -> {\ngetDriver().deregisterApplication(ApplicationStatus.SUCCEEDED, null);\nassertFalse(\nFiles.exists(applicationDir.toPath()),\n\"YARN application directory was not removed\");\n});\n}\n};\n}\n@Test\npublic void testOnContainerAllocated() throws Exception {\nnew Context() {\n{\naddContainerRequestFutures.add(new CompletableFuture<>());\ntestingYarnAMRMClientAsyncBuilder.setAddContainerRequestConsumer(\n(ignored1, ignored2) ->\naddContainerRequestFutures\n.get(\naddContainerRequestFuturesNumCompleted\n.getAndIncrement())\n.complete(null));\nrunTest(\n() -> {\nrunInMainThread(\n() ->\ngetDriver()\n.requestResource(\ntestingTaskExecutorProcessSpec));\nresourceManagerClientCallbackHandler.onContainersAllocated(\nImmutableList.of(testingContainer));\nverifyFutureCompleted(addContainerRequestFutures.get(0));\nverifyFutureCompleted(removeContainerRequestFuture);\nverifyFutureCompleted(startContainerAsyncFuture);\n});\n}\n};\n}\n@Test\npublic void testOnSuccessfulContainerCompleted() throws Exception {\nrunTestOnContainerCompleted(createSuccessfulCompletedContainerStatus());\n}\n@Test\npublic void testOnContainerCompletedBecauseDisksFailed() throws Exception {\nrunTestOnContainerCompleted(createCompletedContainerStatusBecauseDisksFailed());\n}\n@Test\npublic void testOnContainerCompletedBecauseItWasAborted() throws Exception {\nrunTestOnContainerCompleted(createCompletedContainerStatusBecauseItWasAborted());\n}\n@Test\npublic void testOnContainerCompletedBecauseItWasInvalid() throws Exception {\nrunTestOnContainerCompleted(createCompletedContainerStatusBecauseItWasInvalid());\n}\n@Test\npublic void testOnContainerCompletedForUnknownCause() throws Exception {\nrunTestOnContainerCompleted(createCompletedContainerStatusForUnknownCause());\n}\n@Test\npublic void testOnContainerCompletedBecauseItWasPreempted() throws Exception {\nrunTestOnContainerCompleted(createCompletedContainerStatusBecauseItWasPreempted());\n}\npublic void runTestOnContainerCompleted(ContainerStatus completedContainerStatus)\nthrows Exception {\nnew Context() {\n{\naddContainerRequestFutures.add(new CompletableFuture<>());\naddContainerRequestFutures.add(new CompletableFuture<>());\ntestingYarnAMRMClientAsyncBuilder.setAddContainerRequestConsumer(\n(ignored1, ignored2) ->\naddContainerRequestFutures\n.get(\naddContainerRequestFuturesNumCompleted\n.getAndIncrement())\n.complete(null));\nresourceEventHandlerBuilder.setOnWorkerTerminatedConsumer(\n(ignore1, ignore2) ->\ngetDriver().requestResource(testingTaskExecutorProcessSpec));\nrunTest(\n() -> {\nrunInMainThread(\n() ->\ngetDriver()\n.requestResource(\ntestingTaskExecutorProcessSpec));\nresourceManagerClientCallbackHandler.onContainersAllocated(\nImmutableList.of(testingContainer));\nresourceManagerClientCallbackHandler.onContainersCompleted(\nImmutableList.of(completedContainerStatus));\nverifyFutureCompleted(addContainerRequestFutures.get(1));\n});\n}\n};\n}\n@Test\npublic void testOnStartContainerError() throws Exception {\nnew Context() {\n{\naddContainerRequestFutures.add(new CompletableFuture<>());\naddContainerRequestFutures.add(new CompletableFuture<>());\ntestingYarnAMRMClientAsyncBuilder.setAddContainerRequestConsumer(\n(ignored1, ignored2) ->\naddContainerRequestFutures\n.get(\naddContainerRequestFuturesNumCompleted\n.getAndIncrement())\n.complete(null));\nresourceEventHandlerBuilder.setOnWorkerTerminatedConsumer(\n(ignore1, ignore2) ->\ngetDriver().requestResource(testingTaskExecutorProcessSpec));\nrunTest(\n() -> {\nrunInMainThread(\n() ->\ngetDriver()\n.requestResource(\ntestingTaskExecutorProcessSpec));\nresourceManagerClientCallbackHandler.onContainersAllocated(\nImmutableList.of(testingContainer));\nnodeManagerClientCallbackHandler.onStartContainerError(\ntestingContainer.getId(), new Exception(\"start error\"));\nverifyFutureCompleted(releaseAssignedContainerFuture);\nverifyFutureCompleted(addContainerRequestFutures.get(1));\n});\n}\n};\n}\n@Test\npublic void testStartWorkerVariousSpec() throws Exception {\nfinal TaskExecutorProcessSpec taskExecutorProcessSpec1 =\nnew TaskExecutorProcessSpec(\nnew CPUResource(1),\nMemorySize.ZERO,\nMemorySize.ZERO,\nMemorySize.ofMebiBytes(50),\nMemorySize.ofMebiBytes(50),\nMemorySize.ofMebiBytes(50),\nMemorySize.ofMebiBytes(50),\nMemorySize.ZERO,\nMemorySize.ZERO,\nCollections.emptyList());\nfinal TaskExecutorProcessSpec taskExecutorProcessSpec2 =\nnew TaskExecutorProcessSpec(\nnew CPUResource(2),\nMemorySize.ZERO,\nMemorySize.ZERO,\nMemorySize.ofMebiBytes(500),\nMemorySize.ofMebiBytes(500),\nMemorySize.ofMebiBytes(500),\nMemorySize.ofMebiBytes(500),\nMemorySize.ZERO,\nMemorySize.ZERO,\nCollections.emptyList());\nnew Context() {\n{\nfinal String startCommand1 =\nTaskManagerOptions.TASK_HEAP_MEMORY.key() + \"=\" + (50L << 20);\nfinal String startCommand2 =\nTaskManagerOptions.TASK_HEAP_MEMORY.key() + \"=\" + (100L << 20);\nfinal CompletableFuture startContainerAsyncCommandFuture1 =\nnew CompletableFuture<>();\nfinal CompletableFuture startContainerAsyncCommandFuture2 =\nnew CompletableFuture<>();\nprepareForTestStartTaskExecutorProcessVariousSpec(\nstartCommand1,\nstartCommand2,\nstartContainerAsyncCommandFuture1,\nstartContainerAsyncCommandFuture2,\ntaskExecutorProcessSpec1);\ntestingYarnAMRMClientAsyncBuilder.setGetMatchingRequestsFunction(\ntuple -> {\nfinal Priority priority = tuple.f0;\nfinal List matchingRequests =\nnew ArrayList<>();\nfor (CompletableFuture\naddContainerRequestFuture : addContainerRequestFutures) {\nfinal AMRMClient.ContainerRequest request =\naddContainerRequestFuture.getNow(null);\nif (request != null && priority.equals(request.getPriority())) {\nassertThat(tuple.f2).isEqualTo(request.getCapability());\nmatchingRequests.add(request);\n}\n}\nreturn Collections.singletonList(matchingRequests);\n});\nrunTest(\n() -> {\nfinal Resource containerResource1 =\n((YarnResourceManagerDriver) getDriver())\n.getContainerResource(taskExecutorProcessSpec1)\n.get();\nfinal Resource containerResource2 =\n((YarnResourceManagerDriver) getDriver())\n.getContainerResource(taskExecutorProcessSpec2)\n.get();\nassertThat(containerResource2).isNotEqualTo(containerResource1);\nrunInMainThread(\n() -> getDriver().requestResource(taskExecutorProcessSpec1));\nrunInMainThread(\n() -> getDriver().requestResource(taskExecutorProcessSpec2));\nverifyFutureCompleted(addContainerRequestFutures.get(0));\nverifyFutureCompleted(addContainerRequestFutures.get(1));\nContainer container1 =\ncreateTestingContainerWithResource(containerResource1);\nresourceManagerClientCallbackHandler.onContainersAllocated(\nCollections.singletonList(container1));\nverifyFutureCompleted(startContainerAsyncCommandFuture1);\nassertFalse(startContainerAsyncCommandFuture2.isDone());\nContainerStatus testingContainerStatus =\ncreateTestingContainerCompletedStatus(container1.getId());\nresourceManagerClientCallbackHandler.onContainersCompleted(\nCollections.singletonList(testingContainerStatus));\nverifyFutureCompleted(addContainerRequestFutures.get(2));\nassertThat(addContainerRequestFutures.get(2).get().getCapability())\n.isEqualTo(containerResource1);\nassertFalse(addContainerRequestFutures.get(3).isDone());\n});\n}\n};\n}\nprivate boolean containsStartCommand(\nContainerLaunchContext containerLaunchContext, String command) {\nreturn containerLaunchContext.getCommands().stream().anyMatch(str -> str.contains(command));\n}\nprivate static Container createTestingContainerWithResource(\nResource resource, Priority priority, int containerIdx) {\nfinal ContainerId containerId =\nContainerId.newInstance(\nApplicationAttemptId.newInstance(\nApplicationId.newInstance(System.currentTimeMillis(), 1), 1),\ncontainerIdx);\nfinal NodeId nodeId = NodeId.newInstance(\"container\", 1234);\nreturn new TestingContainer(containerId, nodeId, resource, priority);\n}\nprivate class Context extends ResourceManagerDriverTestBase.Context {\nprivate final CompletableFuture stopAndCleanupClusterFuture =\nnew CompletableFuture<>();\nprivate final CompletableFuture createTaskManagerContainerFuture =\nnew CompletableFuture<>();\nprotected final CompletableFuture stopContainerAsyncFuture =\nnew CompletableFuture<>();\nfinal List> addContainerRequestFutures =\nnew ArrayList<>();\nfinal AtomicInteger addContainerRequestFuturesNumCompleted = new AtomicInteger(0);\nfinal CompletableFuture removeContainerRequestFuture = new CompletableFuture<>();\nfinal CompletableFuture releaseAssignedContainerFuture = new CompletableFuture<>();\nfinal CompletableFuture startContainerAsyncFuture = new CompletableFuture<>();\nfinal CompletableFuture resourceManagerClientInitFuture = new CompletableFuture<>();\nfinal CompletableFuture resourceManagerClientStartFuture = new CompletableFuture<>();\nfinal CompletableFuture resourceManagerClientStopFuture = new CompletableFuture<>();\nfinal CompletableFuture nodeManagerClientInitFuture = new CompletableFuture<>();\nfinal CompletableFuture nodeManagerClientStartFuture = new CompletableFuture<>();\nfinal CompletableFuture nodeManagerClientStopFuture = new CompletableFuture<>();\nAMRMClientAsync.CallbackHandler resourceManagerClientCallbackHandler;\nNMClientAsync.CallbackHandler nodeManagerClientCallbackHandler;\nTestingYarnNMClientAsync testingYarnNMClientAsync;\nTestingYarnAMRMClientAsync testingYarnAMRMClientAsync;\nfinal TestingYarnNMClientAsync.Builder testingYarnNMClientAsyncBuilder =\nTestingYarnNMClientAsync.builder()\n.setStartContainerAsyncConsumer(\n(ignored1, ignored2, ignored3) ->\nstartContainerAsyncFuture.complete(null))\n.setStopContainerAsyncConsumer(\n(ignored1, ignored2, ignored3) ->\nstopContainerAsyncFuture.complete(null))\n.setClientInitRunnable(() -> nodeManagerClientInitFuture.complete(null))\n.setClientStartRunnable(() -> nodeManagerClientStartFuture.complete(null))\n.setClientStopRunnable(() -> nodeManagerClientStopFuture.complete(null));\nfinal TestingYarnAMRMClientAsync.Builder testingYarnAMRMClientAsyncBuilder =\nTestingYarnAMRMClientAsync.builder()\n.setAddContainerRequestConsumer(\n(request, handler) -> {\ncreateTaskManagerContainerFuture.complete(\nrequest.getCapability());\nresourceManagerClientCallbackHandler.onContainersAllocated(\nCollections.singletonList(testingContainer));\n})\n.setGetMatchingRequestsFunction(\nignored ->\nCollections.singletonList(\nCollections.singletonList(\nContainerRequestReflector.INSTANCE\n.getContainerRequest(\ntestingResource,\nPriority.UNDEFINED,\nnull))))\n.setRemoveContainerRequestConsumer(\n(request, handler) -> removeContainerRequestFuture.complete(null))\n.setReleaseAssignedContainerConsumer(\n(ignored1, ignored2) ->\nreleaseAssignedContainerFuture.complete(null))\n.setUnregisterApplicationMasterConsumer(\n(ignore1, ignore2, ignore3) ->\nstopAndCleanupClusterFuture.complete(null))\n.setClientInitRunnable(() -> resourceManagerClientInitFuture.complete(null))\n.setClientStartRunnable(\n() -> resourceManagerClientStartFuture.complete(null))\n.setClientStopRunnable(\n() -> resourceManagerClientStopFuture.complete(null));\nfinal TestingYarnResourceManagerClientFactory testingYarnResourceManagerClientFactory =\nnew TestingYarnResourceManagerClientFactory(\n((integer, handler) -> {\nresourceManagerClientCallbackHandler = handler;\ntestingYarnAMRMClientAsync =\ntestingYarnAMRMClientAsyncBuilder.build(handler);\nreturn testingYarnAMRMClientAsync;\n}));\nprivate TestingYarnNodeManagerClientFactory testingYarnNodeManagerClientFactory =\nnew TestingYarnNodeManagerClientFactory(\n(handler -> {\nnodeManagerClientCallbackHandler = handler;\ntestingYarnNMClientAsync =\ntestingYarnNMClientAsyncBuilder.build(handler);\nreturn testingYarnNMClientAsync;\n}));\nfinal Map env = new HashMap<>();\nprivate int containerIdx = 0;\nprotected void resetYarnNodeManagerClientFactory() {\ntestingYarnNodeManagerClientFactory =\nnew TestingYarnNodeManagerClientFactory(\n(handler -> {\nnodeManagerClientCallbackHandler = handler;\ntestingYarnNMClientAsync =\ntestingYarnNMClientAsyncBuilder.build(handler);\nreturn testingYarnNMClientAsync;\n}));\n}\n@Override\nprotected void prepareRunTest() throws Exception {\nFile home = new File(tmpFolder, \"home\");\nboolean created = home.mkdir();\nassertThat(created).isTrue();\nenv.put(ENV_APP_ID, \"foo\");\nenv.put(ENV_CLIENT_HOME_DIR, home.getAbsolutePath());\nenv.put(ENV_CLIENT_SHIP_FILES, \"\");\nenv.put(ENV_FLINK_CLASSPATH, \"\");\nenv.put(ENV_HADOOP_USER_NAME, \"foo\");\nenv.putIfAbsent(FLINK_YARN_FILES, \"\");\nenv.put(\nFLINK_DIST_JAR,\nnew YarnLocalResourceDescriptor(\n\"flink.jar\",\nnew Path(\"/tmp/flink.jar\"),\n0,\nSystem.currentTimeMillis(),\nLocalResourceVisibility.APPLICATION,\nLocalResourceType.FILE)\n.toString());\nenv.put(ApplicationConstants.Environment.PWD.key(), home.getAbsolutePath());\nBootstrapTools.writeConfiguration(\nflinkConfig, new File(home.getAbsolutePath(), FLINK_CONF_FILENAME));\n}\n@Override\nprotected void preparePreviousAttemptWorkers() {\ntestingYarnAMRMClientAsyncBuilder.setRegisterApplicationMasterFunction(\n(ignored1, ignored2, ignored3) ->\nnew TestingRegisterApplicationMasterResponse(\n() -> Collections.singletonList(testingContainer)));\n}\n@Override\nprotected ResourceManagerDriver createResourceManagerDriver() {\nreturn new YarnResourceManagerDriver(\nflinkConfig,\nnew YarnResourceManagerDriverConfiguration(env, \"localhost:9000\", null),\ntestingYarnResourceManagerClientFactory,\ntestingYarnNodeManagerClientFactory);\n}\n@Override\nprotected void validateInitialization() throws Exception {\nassertThat(testingYarnAMRMClientAsync).isNotNull();\nassertThat(testingYarnNMClientAsync).isNotNull();\nverifyFutureCompleted(nodeManagerClientInitFuture);\nverifyFutureCompleted(nodeManagerClientStartFuture);\nverifyFutureCompleted(resourceManagerClientInitFuture);\nverifyFutureCompleted(resourceManagerClientStartFuture);\n}\n@Override\nprotected void validateWorkersRecoveredFromPreviousAttempt(\nCollection workers) {\nassumeTrue(HadoopUtils.isMinHadoopVersion(2, 2));\nassertThat(workers.size()).isEqualTo(1);\nfinal ResourceID resourceId = workers.iterator().next().getResourceID();\nassertThat(resourceId.toString()).isEqualTo(testingContainer.getId().toString());\n}\n@Override\nprotected void validateTermination() throws Exception {\nverifyFutureCompleted(nodeManagerClientStopFuture);\nverifyFutureCompleted(resourceManagerClientStopFuture);\n}\n@Override\nprotected void validateDeregisterApplication() throws Exception {\nverifyFutureCompleted(stopAndCleanupClusterFuture);\n}\n@Override\nprotected void validateRequestedResources(\nCollection taskExecutorProcessSpecs) throws Exception {\nassertThat(taskExecutorProcessSpecs.size()).isEqualTo(1);\nfinal TaskExecutorProcessSpec taskExecutorProcessSpec =\ntaskExecutorProcessSpecs.iterator().next();\nfinal Resource resource =\ncreateTaskManagerContainerFuture.get(TIMEOUT_SEC, TimeUnit.SECONDS);\nassertThat(resource.getMemorySize())\n.isEqualTo(taskExecutorProcessSpec.getTotalProcessMemorySize().getMebiBytes());\nassertThat(resource.getVirtualCores())\n.isEqualTo(taskExecutorProcessSpec.getCpuCores().getValue().intValue());\nverifyFutureCompleted(removeContainerRequestFuture);\n}\n@Override\nprotected void validateReleaseResources(Collection workerNodes)\nthrows Exception {\nassertThat(workerNodes.size()).isEqualTo(1);\nverifyFutureCompleted(stopContainerAsyncFuture);\nverifyFutureCompleted(releaseAssignedContainerFuture);\n}\nContainerStatus createTestingContainerCompletedStatus(final ContainerId containerId) {\nreturn new TestingContainerStatus(\ncontainerId, ContainerState.COMPLETE, \"Test exit\", -1);\n}\nContainer createTestingContainerWithResource(Resource resource) {\nreturn YarnResourceManagerDriverTest.createTestingContainerWithResource(\nresource, testingPriority, containerIdx++);\n}\n void verifyFutureCompleted(CompletableFuture future) throws Exception {\nfuture.get(TIMEOUT_SEC, TimeUnit.SECONDS);\n}\nvoid prepareForTestStartTaskExecutorProcessVariousSpec(\nString startCommand1,\nString startCommand2,\nCompletableFuture startContainerAsyncCommandFuture1,\nCompletableFuture startContainerAsyncCommandFuture2,\nTaskExecutorProcessSpec taskExecutorProcessSpec) {\naddContainerRequestFutures.add(new CompletableFuture<>());\naddContainerRequestFutures.add(new CompletableFuture<>());\naddContainerRequestFutures.add(new CompletableFuture<>());\naddContainerRequestFutures.add(new CompletableFuture<>());\ntestingYarnAMRMClientAsyncBuilder.setAddContainerRequestConsumer(\n(request, ignored) ->\naddContainerRequestFutures\n.get(addContainerRequestFuturesNumCompleted.getAndIncrement())\n.complete(request));\ntestingYarnNMClientAsyncBuilder.setStartContainerAsyncConsumer(\n(ignored1, context, ignored3) -> {\nif (containsStartCommand(context, startCommand1)) {\nstartContainerAsyncCommandFuture1.complete(null);\n} else if (containsStartCommand(context, startCommand2)) {\nstartContainerAsyncCommandFuture2.complete(null);\n}\n});\nresourceEventHandlerBuilder.setOnWorkerTerminatedConsumer(\n(ignore1, ignore2) -> getDriver().requestResource(taskExecutorProcessSpec));\n}\n}\n@Test\npublic void testGetContainerCompletedCauseForSuccess() {\nContainerStatus containerStatus = createSuccessfulCompletedContainerStatus();\ntestingGetContainerCompletedCause(\ncontainerStatus,\nString.format(\"Container %s exited normally.\", containerStatus.getContainerId()));\n}\nprivate ContainerStatus createSuccessfulCompletedContainerStatus() {\nreturn new TestingContainerStatus(\ntestingContainer.getId(),\nContainerState.COMPLETE,\n\"success exit code\",\nContainerExitStatus.SUCCESS);\n}\n@Test\npublic void testGetContainerCompletedCauseForPreempted() {\nContainerStatus containerStatus = createCompletedContainerStatusBecauseItWasPreempted();\ntestingGetContainerCompletedCause(\ncontainerStatus,\nString.format(\n\"Container %s was preempted by yarn.\", containerStatus.getContainerId()));\n}\nprivate ContainerStatus createCompletedContainerStatusBecauseItWasPreempted() {\nreturn new TestingContainerStatus(\ntestingContainer.getId(),\nContainerState.COMPLETE,\n\"preempted exit code\",\nContainerExitStatus.PREEMPTED);\n}\n@Test\npublic void testGetContainerCompletedCauseForInvalid() {\nContainerStatus containerStatus = createCompletedContainerStatusBecauseItWasInvalid();\ntestingGetContainerCompletedCause(\ncontainerStatus,\nString.format(\"Container %s was invalid.\", containerStatus.getContainerId()));\n}\nprivate ContainerStatus createCompletedContainerStatusBecauseItWasInvalid() {\nreturn new TestingContainerStatus(\ntestingContainer.getId(),\nContainerState.COMPLETE,\n\"invalid exit code\",\nContainerExitStatus.INVALID);\n}\n@Test\npublic void testGetContainerCompletedCauseForAborted() {\nContainerStatus containerStatus = createCompletedContainerStatusBecauseItWasAborted();\ntestingGetContainerCompletedCause(\ncontainerStatus,\nString.format(\n\"Container %s killed by YARN, either due to being released by the application or being 'lost' due to node failures etc.\",\ncontainerStatus.getContainerId()));\n}\nprivate ContainerStatus createCompletedContainerStatusBecauseItWasAborted() {\nreturn new TestingContainerStatus(\ntestingContainer.getId(),\nContainerState.COMPLETE,\n\"aborted exit code\",\nContainerExitStatus.ABORTED);\n}\n@Test\npublic void testGetContainerCompletedCauseForDiskFailed() {\nContainerStatus containerStatus = createCompletedContainerStatusBecauseDisksFailed();\ntestingGetContainerCompletedCause(\ncontainerStatus,\nString.format(\n\"Container %s is failed because threshold number of the nodemanager-local-directories or\"\n+ \" threshold number of the nodemanager-log-directories have become bad.\",\ncontainerStatus.getContainerId()));\n}\nprivate ContainerStatus createCompletedContainerStatusBecauseDisksFailed() {\nreturn new TestingContainerStatus(\ntestingContainer.getId(),\nContainerState.COMPLETE,\n\"disk failed exit code\",\nContainerExitStatus.DISKS_FAILED);\n}\n@Test\npublic void testGetContainerCompletedCauseForUnknown() {\nContainerStatus containerStatus = createCompletedContainerStatusForUnknownCause();\ntestingGetContainerCompletedCause(\ncontainerStatus,\nString.format(\n\"Container %s marked as failed.\\n Exit code:%s.\",\ncontainerStatus.getContainerId(), containerStatus.getExitStatus()));\n}\nprivate ContainerStatus createCompletedContainerStatusForUnknownCause() {\nreturn new TestingContainerStatus(\ntestingContainer.getId(), ContainerState.COMPLETE, \"unknown exit code\", -1);\n}\npublic void testingGetContainerCompletedCause(\nContainerStatus containerStatus, String expectedCompletedCause) {\nfinal String containerCompletedCause =\nYarnResourceManagerDriver.getContainerCompletedCause(containerStatus);\nassertThat(containerCompletedCause).containsSequence(expectedCompletedCause);\nassertThat(containerCompletedCause).containsSequence(containerStatus.getDiagnostics());\n}\n}", + "context_after": "class YarnResourceManagerDriverTest extends ResourceManagerDriverTestBase {\nprivate static final Logger log = LoggerFactory.getLogger(YarnResourceManagerDriverTest.class);\nprivate static final Resource testingResource =\nResource.newInstance(\nYarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB,\nYarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);\nprivate static final Priority testingPriority = Priority.newInstance(1);\nprivate static final Container testingContainer =\ncreateTestingContainerWithResource(testingResource, testingPriority, 1);\nprivate static final TaskExecutorProcessSpec testingTaskExecutorProcessSpec =\nnew TaskExecutorProcessSpec(\nnew CPUResource(1),\nMemorySize.ZERO,\nMemorySize.ZERO,\nMemorySize.ofMebiBytes(256),\nMemorySize.ofMebiBytes(256),\nMemorySize.ofMebiBytes(256),\nMemorySize.ofMebiBytes(256),\nMemorySize.ZERO,\nMemorySize.ZERO,\nCollections.emptyList());\n@TempDir private java.nio.file.Path tmpFolder;\n@Override\nprotected Context createContext() {\nreturn new Context();\n}\n@Test\n@Test\npublic void testShutdownRequestCausesFatalError() throws Exception {\nnew Context() {\n{\nfinal CompletableFuture throwableCompletableFuture =\nnew CompletableFuture<>();\nresourceEventHandlerBuilder.setOnErrorConsumer(\nthrowableCompletableFuture::complete);\nrunTest(\n() -> {\nresourceManagerClientCallbackHandler.onShutdownRequest();\nThrowable throwable =\nthrowableCompletableFuture.get(TIMEOUT_SEC, TimeUnit.SECONDS);\nassertThat(throwable)\n.satisfies(anyCauseMatches(ResourceManagerException.class))\n.satisfies(anyCauseMatches(ERROR_MESSAGE_ON_SHUTDOWN_REQUEST));\n});\n}\n};\n}\n@Test\npublic void testTerminationDoesNotBlock() throws Exception {\nnew Context() {\n{\nrunTest(\n() -> {\ntry {\nrunInMainThread(() -> getDriver().terminate());\n} catch (Exception ex) {\nlog.error(\"cannot terminate driver\", ex);\nfail(\"termination of driver failed\");\n}\n});\n}\n};\n}\n@Test\npublic void testTerminationWaitsOnContainerStopSuccess() throws Exception {\nnew Context() {\n{\nfinal CompletableFuture containerIdFuture = new CompletableFuture<>();\ntestingYarnNMClientAsyncBuilder.setStopContainerAsyncConsumer(\n(containerId, ignored, callbackHandler) ->\ncontainerIdFuture.complete(containerId));\nresetYarnNodeManagerClientFactory();\nrunTest(\n() -> {\nfinal CompletableFuture yarnWorkerFuture =\nrunInMainThread(\n() ->\ngetDriver()\n.requestResource(\ntestingTaskExecutorProcessSpec))\n.thenCompose(Function.identity());\nresourceManagerClientCallbackHandler.onContainersAllocated(\nImmutableList.of(testingContainer));\nfinal YarnWorkerNode worker = yarnWorkerFuture.get();\nfinal CompletableFuture driverHasTerminatedFuture =\nrunInMainThread(\n() -> {\ngetDriver().releaseResource(worker);\ngetDriver().terminate();\n});\nassertThatThrownBy(\n() ->\ndriverHasTerminatedFuture.get(\n20, TimeUnit.MILLISECONDS))\n.isInstanceOf(TimeoutException.class);\nnodeManagerClientCallbackHandler.onContainerStopped(\ncontainerIdFuture.get());\ndriverHasTerminatedFuture.get();\n});\n}\n};\n}\n@Test\npublic void testTerminationWaitsOnContainerStopError() throws Exception {\nnew Context() {\n{\nfinal CompletableFuture containerIdFuture = new CompletableFuture<>();\ntestingYarnNMClientAsyncBuilder.setStopContainerAsyncConsumer(\n(containerId, ignored, callbackHandler) ->\ncontainerIdFuture.complete(containerId));\nresetYarnNodeManagerClientFactory();\nrunTest(\n() -> {\nfinal CompletableFuture yarnWorkerFuture =\nrunInMainThread(\n() ->\ngetDriver()\n.requestResource(\ntestingTaskExecutorProcessSpec))\n.thenCompose(Function.identity());\nresourceManagerClientCallbackHandler.onContainersAllocated(\nImmutableList.of(testingContainer));\nfinal YarnWorkerNode worker = yarnWorkerFuture.get();\nfinal CompletableFuture driverHasTerminatedFuture =\nrunInMainThread(\n() -> {\ngetDriver().releaseResource(worker);\ngetDriver().terminate();\n});\nassertThatThrownBy(\n() ->\ndriverHasTerminatedFuture.get(\n20, TimeUnit.MILLISECONDS))\n.isInstanceOf(TimeoutException.class);\nnodeManagerClientCallbackHandler.onStopContainerError(\ncontainerIdFuture.get(), null);\ndriverHasTerminatedFuture.get();\n});\n}\n};\n}\n/**\n* Tests that application files are deleted when the YARN application master is de-registered.\n*/\n@Test\npublic void testDeleteApplicationFiles() throws Exception {\nnew Context() {\n{\nfinal File applicationDir = Files.createTempDirectory(tmpFolder, \".flink\").toFile();\nenv.put(FLINK_YARN_FILES, applicationDir.getCanonicalPath());\nrunTest(\n() -> {\ngetDriver().deregisterApplication(ApplicationStatus.SUCCEEDED, null);\nassertThat(applicationDir.toPath()).doesNotExist();\n});\n}\n};\n}\n@Test\npublic void testOnContainerAllocated() throws Exception {\nnew Context() {\n{\naddContainerRequestFutures.add(new CompletableFuture<>());\ntestingYarnAMRMClientAsyncBuilder.setAddContainerRequestConsumer(\n(ignored1, ignored2) ->\naddContainerRequestFutures\n.get(\naddContainerRequestFuturesNumCompleted\n.getAndIncrement())\n.complete(null));\nrunTest(\n() -> {\nrunInMainThread(\n() ->\ngetDriver()\n.requestResource(\ntestingTaskExecutorProcessSpec));\nresourceManagerClientCallbackHandler.onContainersAllocated(\nImmutableList.of(testingContainer));\nverifyFutureCompleted(addContainerRequestFutures.get(0));\nverifyFutureCompleted(removeContainerRequestFuture);\nverifyFutureCompleted(startContainerAsyncFuture);\n});\n}\n};\n}\n@Test\npublic void testOnSuccessfulContainerCompleted() throws Exception {\nrunTestOnContainerCompleted(createSuccessfulCompletedContainerStatus());\n}\n@Test\npublic void testOnContainerCompletedBecauseDisksFailed() throws Exception {\nrunTestOnContainerCompleted(createCompletedContainerStatusBecauseDisksFailed());\n}\n@Test\npublic void testOnContainerCompletedBecauseItWasAborted() throws Exception {\nrunTestOnContainerCompleted(createCompletedContainerStatusBecauseItWasAborted());\n}\n@Test\npublic void testOnContainerCompletedBecauseItWasInvalid() throws Exception {\nrunTestOnContainerCompleted(createCompletedContainerStatusBecauseItWasInvalid());\n}\n@Test\npublic void testOnContainerCompletedForUnknownCause() throws Exception {\nrunTestOnContainerCompleted(createCompletedContainerStatusForUnknownCause());\n}\n@Test\npublic void testOnContainerCompletedBecauseItWasPreempted() throws Exception {\nrunTestOnContainerCompleted(createCompletedContainerStatusBecauseItWasPreempted());\n}\npublic void runTestOnContainerCompleted(ContainerStatus completedContainerStatus)\nthrows Exception {\nnew Context() {\n{\naddContainerRequestFutures.add(new CompletableFuture<>());\naddContainerRequestFutures.add(new CompletableFuture<>());\ntestingYarnAMRMClientAsyncBuilder.setAddContainerRequestConsumer(\n(ignored1, ignored2) ->\naddContainerRequestFutures\n.get(\naddContainerRequestFuturesNumCompleted\n.getAndIncrement())\n.complete(null));\nresourceEventHandlerBuilder.setOnWorkerTerminatedConsumer(\n(ignore1, ignore2) ->\ngetDriver().requestResource(testingTaskExecutorProcessSpec));\nrunTest(\n() -> {\nrunInMainThread(\n() ->\ngetDriver()\n.requestResource(\ntestingTaskExecutorProcessSpec));\nresourceManagerClientCallbackHandler.onContainersAllocated(\nImmutableList.of(testingContainer));\nresourceManagerClientCallbackHandler.onContainersCompleted(\nImmutableList.of(completedContainerStatus));\nverifyFutureCompleted(addContainerRequestFutures.get(1));\n});\n}\n};\n}\n@Test\npublic void testOnStartContainerError() throws Exception {\nnew Context() {\n{\naddContainerRequestFutures.add(new CompletableFuture<>());\naddContainerRequestFutures.add(new CompletableFuture<>());\ntestingYarnAMRMClientAsyncBuilder.setAddContainerRequestConsumer(\n(ignored1, ignored2) ->\naddContainerRequestFutures\n.get(\naddContainerRequestFuturesNumCompleted\n.getAndIncrement())\n.complete(null));\nresourceEventHandlerBuilder.setOnWorkerTerminatedConsumer(\n(ignore1, ignore2) ->\ngetDriver().requestResource(testingTaskExecutorProcessSpec));\nrunTest(\n() -> {\nrunInMainThread(\n() ->\ngetDriver()\n.requestResource(\ntestingTaskExecutorProcessSpec));\nresourceManagerClientCallbackHandler.onContainersAllocated(\nImmutableList.of(testingContainer));\nnodeManagerClientCallbackHandler.onStartContainerError(\ntestingContainer.getId(), new Exception(\"start error\"));\nverifyFutureCompleted(releaseAssignedContainerFuture);\nverifyFutureCompleted(addContainerRequestFutures.get(1));\n});\n}\n};\n}\n@Test\npublic void testStartWorkerVariousSpec() throws Exception {\nfinal TaskExecutorProcessSpec taskExecutorProcessSpec1 =\nnew TaskExecutorProcessSpec(\nnew CPUResource(1),\nMemorySize.ZERO,\nMemorySize.ZERO,\nMemorySize.ofMebiBytes(50),\nMemorySize.ofMebiBytes(50),\nMemorySize.ofMebiBytes(50),\nMemorySize.ofMebiBytes(50),\nMemorySize.ZERO,\nMemorySize.ZERO,\nCollections.emptyList());\nfinal TaskExecutorProcessSpec taskExecutorProcessSpec2 =\nnew TaskExecutorProcessSpec(\nnew CPUResource(2),\nMemorySize.ZERO,\nMemorySize.ZERO,\nMemorySize.ofMebiBytes(500),\nMemorySize.ofMebiBytes(500),\nMemorySize.ofMebiBytes(500),\nMemorySize.ofMebiBytes(500),\nMemorySize.ZERO,\nMemorySize.ZERO,\nCollections.emptyList());\nnew Context() {\n{\nfinal String startCommand1 =\nTaskManagerOptions.TASK_HEAP_MEMORY.key() + \"=\" + (50L << 20);\nfinal String startCommand2 =\nTaskManagerOptions.TASK_HEAP_MEMORY.key() + \"=\" + (100L << 20);\nfinal CompletableFuture startContainerAsyncCommandFuture1 =\nnew CompletableFuture<>();\nfinal CompletableFuture startContainerAsyncCommandFuture2 =\nnew CompletableFuture<>();\nprepareForTestStartTaskExecutorProcessVariousSpec(\nstartCommand1,\nstartCommand2,\nstartContainerAsyncCommandFuture1,\nstartContainerAsyncCommandFuture2,\ntaskExecutorProcessSpec1);\ntestingYarnAMRMClientAsyncBuilder.setGetMatchingRequestsFunction(\ntuple -> {\nfinal Priority priority = tuple.f0;\nfinal List matchingRequests =\nnew ArrayList<>();\nfor (CompletableFuture\naddContainerRequestFuture : addContainerRequestFutures) {\nfinal AMRMClient.ContainerRequest request =\naddContainerRequestFuture.getNow(null);\nif (request != null && priority.equals(request.getPriority())) {\nassertThat(tuple.f2).isEqualTo(request.getCapability());\nmatchingRequests.add(request);\n}\n}\nreturn Collections.singletonList(matchingRequests);\n});\nrunTest(\n() -> {\nfinal Resource containerResource1 =\n((YarnResourceManagerDriver) getDriver())\n.getContainerResource(taskExecutorProcessSpec1)\n.get();\nfinal Resource containerResource2 =\n((YarnResourceManagerDriver) getDriver())\n.getContainerResource(taskExecutorProcessSpec2)\n.get();\nassertThat(containerResource2).isNotEqualTo(containerResource1);\nrunInMainThread(\n() -> getDriver().requestResource(taskExecutorProcessSpec1));\nrunInMainThread(\n() -> getDriver().requestResource(taskExecutorProcessSpec2));\nverifyFutureCompleted(addContainerRequestFutures.get(0));\nverifyFutureCompleted(addContainerRequestFutures.get(1));\nContainer container1 =\ncreateTestingContainerWithResource(containerResource1);\nresourceManagerClientCallbackHandler.onContainersAllocated(\nCollections.singletonList(container1));\nverifyFutureCompleted(startContainerAsyncCommandFuture1);\nassertThat(startContainerAsyncCommandFuture2.isDone()).isFalse();\nContainerStatus testingContainerStatus =\ncreateTestingContainerCompletedStatus(container1.getId());\nresourceManagerClientCallbackHandler.onContainersCompleted(\nCollections.singletonList(testingContainerStatus));\nverifyFutureCompleted(addContainerRequestFutures.get(2));\nassertThat(addContainerRequestFutures.get(2).get().getCapability())\n.isEqualTo(containerResource1);\nassertThat(addContainerRequestFutures.get(3).isDone()).isFalse();\n});\n}\n};\n}\nprivate boolean containsStartCommand(\nContainerLaunchContext containerLaunchContext, String command) {\nreturn containerLaunchContext.getCommands().stream().anyMatch(str -> str.contains(command));\n}\nprivate static Container createTestingContainerWithResource(\nResource resource, Priority priority, int containerIdx) {\nfinal ContainerId containerId =\nContainerId.newInstance(\nApplicationAttemptId.newInstance(\nApplicationId.newInstance(System.currentTimeMillis(), 1), 1),\ncontainerIdx);\nfinal NodeId nodeId = NodeId.newInstance(\"container\", 1234);\nreturn new TestingContainer(containerId, nodeId, resource, priority);\n}\nprivate class Context extends ResourceManagerDriverTestBase.Context {\nprivate final CompletableFuture stopAndCleanupClusterFuture =\nnew CompletableFuture<>();\nprivate final CompletableFuture createTaskManagerContainerFuture =\nnew CompletableFuture<>();\nprotected final CompletableFuture stopContainerAsyncFuture =\nnew CompletableFuture<>();\nfinal List> addContainerRequestFutures =\nnew ArrayList<>();\nfinal AtomicInteger addContainerRequestFuturesNumCompleted = new AtomicInteger(0);\nfinal CompletableFuture removeContainerRequestFuture = new CompletableFuture<>();\nfinal CompletableFuture releaseAssignedContainerFuture = new CompletableFuture<>();\nfinal CompletableFuture startContainerAsyncFuture = new CompletableFuture<>();\nfinal CompletableFuture resourceManagerClientInitFuture = new CompletableFuture<>();\nfinal CompletableFuture resourceManagerClientStartFuture = new CompletableFuture<>();\nfinal CompletableFuture resourceManagerClientStopFuture = new CompletableFuture<>();\nfinal CompletableFuture nodeManagerClientInitFuture = new CompletableFuture<>();\nfinal CompletableFuture nodeManagerClientStartFuture = new CompletableFuture<>();\nfinal CompletableFuture nodeManagerClientStopFuture = new CompletableFuture<>();\nAMRMClientAsync.CallbackHandler resourceManagerClientCallbackHandler;\nNMClientAsync.CallbackHandler nodeManagerClientCallbackHandler;\nTestingYarnNMClientAsync testingYarnNMClientAsync;\nTestingYarnAMRMClientAsync testingYarnAMRMClientAsync;\nfinal TestingYarnNMClientAsync.Builder testingYarnNMClientAsyncBuilder =\nTestingYarnNMClientAsync.builder()\n.setStartContainerAsyncConsumer(\n(ignored1, ignored2, ignored3) ->\nstartContainerAsyncFuture.complete(null))\n.setStopContainerAsyncConsumer(\n(ignored1, ignored2, ignored3) ->\nstopContainerAsyncFuture.complete(null))\n.setClientInitRunnable(() -> nodeManagerClientInitFuture.complete(null))\n.setClientStartRunnable(() -> nodeManagerClientStartFuture.complete(null))\n.setClientStopRunnable(() -> nodeManagerClientStopFuture.complete(null));\nfinal TestingYarnAMRMClientAsync.Builder testingYarnAMRMClientAsyncBuilder =\nTestingYarnAMRMClientAsync.builder()\n.setAddContainerRequestConsumer(\n(request, handler) -> {\ncreateTaskManagerContainerFuture.complete(\nrequest.getCapability());\nresourceManagerClientCallbackHandler.onContainersAllocated(\nCollections.singletonList(testingContainer));\n})\n.setGetMatchingRequestsFunction(\nignored ->\nCollections.singletonList(\nCollections.singletonList(\nContainerRequestReflector.INSTANCE\n.getContainerRequest(\ntestingResource,\nPriority.UNDEFINED,\nnull))))\n.setRemoveContainerRequestConsumer(\n(request, handler) -> removeContainerRequestFuture.complete(null))\n.setReleaseAssignedContainerConsumer(\n(ignored1, ignored2) ->\nreleaseAssignedContainerFuture.complete(null))\n.setUnregisterApplicationMasterConsumer(\n(ignore1, ignore2, ignore3) ->\nstopAndCleanupClusterFuture.complete(null))\n.setClientInitRunnable(() -> resourceManagerClientInitFuture.complete(null))\n.setClientStartRunnable(\n() -> resourceManagerClientStartFuture.complete(null))\n.setClientStopRunnable(\n() -> resourceManagerClientStopFuture.complete(null));\nfinal TestingYarnResourceManagerClientFactory testingYarnResourceManagerClientFactory =\nnew TestingYarnResourceManagerClientFactory(\n((integer, handler) -> {\nresourceManagerClientCallbackHandler = handler;\ntestingYarnAMRMClientAsync =\ntestingYarnAMRMClientAsyncBuilder.build(handler);\nreturn testingYarnAMRMClientAsync;\n}));\nprivate TestingYarnNodeManagerClientFactory testingYarnNodeManagerClientFactory =\nnew TestingYarnNodeManagerClientFactory(\n(handler -> {\nnodeManagerClientCallbackHandler = handler;\ntestingYarnNMClientAsync =\ntestingYarnNMClientAsyncBuilder.build(handler);\nreturn testingYarnNMClientAsync;\n}));\nfinal Map env = new HashMap<>();\nprivate int containerIdx = 0;\nprotected void resetYarnNodeManagerClientFactory() {\ntestingYarnNodeManagerClientFactory =\nnew TestingYarnNodeManagerClientFactory(\n(handler -> {\nnodeManagerClientCallbackHandler = handler;\ntestingYarnNMClientAsync =\ntestingYarnNMClientAsyncBuilder.build(handler);\nreturn testingYarnNMClientAsync;\n}));\n}\n@Override\nprotected void prepareRunTest() throws Exception {\nFile home = Files.createTempDirectory(tmpFolder, \"home\").toFile();\nenv.put(ENV_APP_ID, \"foo\");\nenv.put(ENV_CLIENT_HOME_DIR, home.getAbsolutePath());\nenv.put(ENV_CLIENT_SHIP_FILES, \"\");\nenv.put(ENV_FLINK_CLASSPATH, \"\");\nenv.put(ENV_HADOOP_USER_NAME, \"foo\");\nenv.putIfAbsent(FLINK_YARN_FILES, \"\");\nenv.put(\nFLINK_DIST_JAR,\nnew YarnLocalResourceDescriptor(\n\"flink.jar\",\nnew Path(\"/tmp/flink.jar\"),\n0,\nSystem.currentTimeMillis(),\nLocalResourceVisibility.APPLICATION,\nLocalResourceType.FILE)\n.toString());\nenv.put(ApplicationConstants.Environment.PWD.key(), home.getAbsolutePath());\nBootstrapTools.writeConfiguration(\nflinkConfig, new File(home.getAbsolutePath(), FLINK_CONF_FILENAME));\n}\n@Override\nprotected void preparePreviousAttemptWorkers() {\ntestingYarnAMRMClientAsyncBuilder.setRegisterApplicationMasterFunction(\n(ignored1, ignored2, ignored3) ->\nnew TestingRegisterApplicationMasterResponse(\n() -> Collections.singletonList(testingContainer)));\n}\n@Override\nprotected ResourceManagerDriver createResourceManagerDriver() {\nreturn new YarnResourceManagerDriver(\nflinkConfig,\nnew YarnResourceManagerDriverConfiguration(env, \"localhost:9000\", null),\ntestingYarnResourceManagerClientFactory,\ntestingYarnNodeManagerClientFactory);\n}\n@Override\nprotected void validateInitialization() throws Exception {\nassertThat(testingYarnAMRMClientAsync).isNotNull();\nassertThat(testingYarnNMClientAsync).isNotNull();\nverifyFutureCompleted(nodeManagerClientInitFuture);\nverifyFutureCompleted(nodeManagerClientStartFuture);\nverifyFutureCompleted(resourceManagerClientInitFuture);\nverifyFutureCompleted(resourceManagerClientStartFuture);\n}\n@Override\nprotected void validateWorkersRecoveredFromPreviousAttempt(\nCollection workers) {\nassumeThat(HadoopUtils.isMinHadoopVersion(2, 2)).isTrue();\nassertThat(workers).hasSize(1);\nfinal ResourceID resourceId = workers.iterator().next().getResourceID();\nassertThat(resourceId).hasToString(testingContainer.getId().toString());\n}\n@Override\nprotected void validateTermination() throws Exception {\nverifyFutureCompleted(nodeManagerClientStopFuture);\nverifyFutureCompleted(resourceManagerClientStopFuture);\n}\n@Override\nprotected void validateDeregisterApplication() throws Exception {\nverifyFutureCompleted(stopAndCleanupClusterFuture);\n}\n@Override\nprotected void validateRequestedResources(\nCollection taskExecutorProcessSpecs) throws Exception {\nassertThat(taskExecutorProcessSpecs).hasSize(1);\nfinal TaskExecutorProcessSpec taskExecutorProcessSpec =\ntaskExecutorProcessSpecs.iterator().next();\nfinal Resource resource =\ncreateTaskManagerContainerFuture.get(TIMEOUT_SEC, TimeUnit.SECONDS);\nassertThat(resource.getMemorySize())\n.isEqualTo(taskExecutorProcessSpec.getTotalProcessMemorySize().getMebiBytes());\nassertThat(resource.getVirtualCores())\n.isEqualTo(taskExecutorProcessSpec.getCpuCores().getValue().intValue());\nverifyFutureCompleted(removeContainerRequestFuture);\n}\n@Override\nprotected void validateReleaseResources(Collection workerNodes)\nthrows Exception {\nassertThat(workerNodes).hasSize(1);\nverifyFutureCompleted(stopContainerAsyncFuture);\nverifyFutureCompleted(releaseAssignedContainerFuture);\n}\nContainerStatus createTestingContainerCompletedStatus(final ContainerId containerId) {\nreturn new TestingContainerStatus(\ncontainerId, ContainerState.COMPLETE, \"Test exit\", -1);\n}\nContainer createTestingContainerWithResource(Resource resource) {\nreturn YarnResourceManagerDriverTest.createTestingContainerWithResource(\nresource, testingPriority, containerIdx++);\n}\n void verifyFutureCompleted(CompletableFuture future) throws Exception {\nfuture.get(TIMEOUT_SEC, TimeUnit.SECONDS);\n}\nvoid prepareForTestStartTaskExecutorProcessVariousSpec(\nString startCommand1,\nString startCommand2,\nCompletableFuture startContainerAsyncCommandFuture1,\nCompletableFuture startContainerAsyncCommandFuture2,\nTaskExecutorProcessSpec taskExecutorProcessSpec) {\naddContainerRequestFutures.add(new CompletableFuture<>());\naddContainerRequestFutures.add(new CompletableFuture<>());\naddContainerRequestFutures.add(new CompletableFuture<>());\naddContainerRequestFutures.add(new CompletableFuture<>());\ntestingYarnAMRMClientAsyncBuilder.setAddContainerRequestConsumer(\n(request, ignored) ->\naddContainerRequestFutures\n.get(addContainerRequestFuturesNumCompleted.getAndIncrement())\n.complete(request));\ntestingYarnNMClientAsyncBuilder.setStartContainerAsyncConsumer(\n(ignored1, context, ignored3) -> {\nif (containsStartCommand(context, startCommand1)) {\nstartContainerAsyncCommandFuture1.complete(null);\n} else if (containsStartCommand(context, startCommand2)) {\nstartContainerAsyncCommandFuture2.complete(null);\n}\n});\nresourceEventHandlerBuilder.setOnWorkerTerminatedConsumer(\n(ignore1, ignore2) -> getDriver().requestResource(taskExecutorProcessSpec));\n}\n}\n@Test\npublic void testGetContainerCompletedCauseForSuccess() {\nContainerStatus containerStatus = createSuccessfulCompletedContainerStatus();\ntestingGetContainerCompletedCause(\ncontainerStatus,\nString.format(\"Container %s exited normally.\", containerStatus.getContainerId()));\n}\nprivate ContainerStatus createSuccessfulCompletedContainerStatus() {\nreturn new TestingContainerStatus(\ntestingContainer.getId(),\nContainerState.COMPLETE,\n\"success exit code\",\nContainerExitStatus.SUCCESS);\n}\n@Test\npublic void testGetContainerCompletedCauseForPreempted() {\nContainerStatus containerStatus = createCompletedContainerStatusBecauseItWasPreempted();\ntestingGetContainerCompletedCause(\ncontainerStatus,\nString.format(\n\"Container %s was preempted by yarn.\", containerStatus.getContainerId()));\n}\nprivate ContainerStatus createCompletedContainerStatusBecauseItWasPreempted() {\nreturn new TestingContainerStatus(\ntestingContainer.getId(),\nContainerState.COMPLETE,\n\"preempted exit code\",\nContainerExitStatus.PREEMPTED);\n}\n@Test\npublic void testGetContainerCompletedCauseForInvalid() {\nContainerStatus containerStatus = createCompletedContainerStatusBecauseItWasInvalid();\ntestingGetContainerCompletedCause(\ncontainerStatus,\nString.format(\"Container %s was invalid.\", containerStatus.getContainerId()));\n}\nprivate ContainerStatus createCompletedContainerStatusBecauseItWasInvalid() {\nreturn new TestingContainerStatus(\ntestingContainer.getId(),\nContainerState.COMPLETE,\n\"invalid exit code\",\nContainerExitStatus.INVALID);\n}\n@Test\npublic void testGetContainerCompletedCauseForAborted() {\nContainerStatus containerStatus = createCompletedContainerStatusBecauseItWasAborted();\ntestingGetContainerCompletedCause(\ncontainerStatus,\nString.format(\n\"Container %s killed by YARN, either due to being released by the application or being 'lost' due to node failures etc.\",\ncontainerStatus.getContainerId()));\n}\nprivate ContainerStatus createCompletedContainerStatusBecauseItWasAborted() {\nreturn new TestingContainerStatus(\ntestingContainer.getId(),\nContainerState.COMPLETE,\n\"aborted exit code\",\nContainerExitStatus.ABORTED);\n}\n@Test\npublic void testGetContainerCompletedCauseForDiskFailed() {\nContainerStatus containerStatus = createCompletedContainerStatusBecauseDisksFailed();\ntestingGetContainerCompletedCause(\ncontainerStatus,\nString.format(\n\"Container %s is failed because threshold number of the nodemanager-local-directories or\"\n+ \" threshold number of the nodemanager-log-directories have become bad.\",\ncontainerStatus.getContainerId()));\n}\nprivate ContainerStatus createCompletedContainerStatusBecauseDisksFailed() {\nreturn new TestingContainerStatus(\ntestingContainer.getId(),\nContainerState.COMPLETE,\n\"disk failed exit code\",\nContainerExitStatus.DISKS_FAILED);\n}\n@Test\npublic void testGetContainerCompletedCauseForUnknown() {\nContainerStatus containerStatus = createCompletedContainerStatusForUnknownCause();\ntestingGetContainerCompletedCause(\ncontainerStatus,\nString.format(\n\"Container %s marked as failed.\\n Exit code:%s.\",\ncontainerStatus.getContainerId(), containerStatus.getExitStatus()));\n}\nprivate ContainerStatus createCompletedContainerStatusForUnknownCause() {\nreturn new TestingContainerStatus(\ntestingContainer.getId(), ContainerState.COMPLETE, \"unknown exit code\", -1);\n}\npublic void testingGetContainerCompletedCause(\nContainerStatus containerStatus, String expectedCompletedCause) {\nfinal String containerCompletedCause =\nYarnResourceManagerDriver.getContainerCompletedCause(containerStatus);\nassertThat(containerCompletedCause)\n.contains(expectedCompletedCause, containerStatus.getDiagnostics());\n}\n}" + }, + { + "comment": "Thanks for the suggestion @becketqin ! Implemented in f7c873d", + "method_body": "private boolean offsetCommitEnabledManually() {\nboolean autoCommit =\nprops.containsKey(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG)\n&& Boolean.parseBoolean(\nprops.getProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG));\nboolean commitOnCheckpoint =\nprops.containsKey(KafkaSourceOptions.COMMIT_OFFSETS_ON_CHECKPOINT.key())\n&& Boolean.parseBoolean(\nprops.getProperty(\nKafkaSourceOptions.COMMIT_OFFSETS_ON_CHECKPOINT.key()));\nreturn autoCommit || commitOnCheckpoint;\n}", + "target_code": "props.containsKey(KafkaSourceOptions.COMMIT_OFFSETS_ON_CHECKPOINT.key())", + "method_body_after": "private boolean offsetCommitEnabledManually() {\nboolean autoCommit =\nprops.containsKey(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG)\n&& Boolean.parseBoolean(\nprops.getProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG));\nboolean commitOnCheckpoint =\nprops.containsKey(KafkaSourceOptions.COMMIT_OFFSETS_ON_CHECKPOINT.key())\n&& Boolean.parseBoolean(\nprops.getProperty(\nKafkaSourceOptions.COMMIT_OFFSETS_ON_CHECKPOINT.key()));\nreturn autoCommit || commitOnCheckpoint;\n}", + "context_before": "class KafkaSourceBuilder {\nprivate static final Logger LOG = LoggerFactory.getLogger(KafkaSourceBuilder.class);\nprivate static final String[] REQUIRED_CONFIGS = {ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG};\nprivate KafkaSubscriber subscriber;\nprivate OffsetsInitializer startingOffsetsInitializer;\nprivate OffsetsInitializer stoppingOffsetsInitializer;\nprivate Boundedness boundedness;\nprivate KafkaRecordDeserializationSchema deserializationSchema;\nprotected Properties props;\nKafkaSourceBuilder() {\nthis.subscriber = null;\nthis.startingOffsetsInitializer = OffsetsInitializer.earliest();\nthis.stoppingOffsetsInitializer = new NoStoppingOffsetsInitializer();\nthis.boundedness = Boundedness.CONTINUOUS_UNBOUNDED;\nthis.deserializationSchema = null;\nthis.props = new Properties();\n}\n/**\n* Sets the bootstrap servers for the KafkaConsumer of the KafkaSource.\n*\n* @param bootstrapServers the bootstrap servers of the Kafka cluster.\n* @return this KafkaSourceBuilder.\n*/\npublic KafkaSourceBuilder setBootstrapServers(String bootstrapServers) {\nreturn setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);\n}\n/**\n* Sets the consumer group id of the KafkaSource.\n*\n* @param groupId the group id of the KafkaSource.\n* @return this KafkaSourceBuilder.\n*/\npublic KafkaSourceBuilder setGroupId(String groupId) {\nreturn setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId);\n}\n/**\n* Set a list of topics the KafkaSource should consume from. All the topics in the list should\n* have existed in the Kafka cluster. Otherwise an exception will be thrown. To allow some of\n* the topics to be created lazily, please use {@link\n*\n* @param topics the list of topics to consume from.\n* @return this KafkaSourceBuilder.\n* @see org.apache.kafka.clients.consumer.KafkaConsumer\n*/\npublic KafkaSourceBuilder setTopics(List topics) {\nensureSubscriberIsNull(\"topics\");\nsubscriber = KafkaSubscriber.getTopicListSubscriber(topics);\nreturn this;\n}\n/**\n* Set a list of topics the KafkaSource should consume from. All the topics in the list should\n* have existed in the Kafka cluster. Otherwise an exception will be thrown. To allow some of\n* the topics to be created lazily, please use {@link\n*\n* @param topics the list of topics to consume from.\n* @return this KafkaSourceBuilder.\n* @see org.apache.kafka.clients.consumer.KafkaConsumer\n*/\npublic KafkaSourceBuilder setTopics(String... topics) {\nreturn setTopics(Arrays.asList(topics));\n}\n/**\n* Set a topic pattern to consume from use the java {@link Pattern}.\n*\n* @param topicPattern the pattern of the topic name to consume from.\n* @return this KafkaSourceBuilder.\n* @see org.apache.kafka.clients.consumer.KafkaConsumer\n*/\npublic KafkaSourceBuilder setTopicPattern(Pattern topicPattern) {\nensureSubscriberIsNull(\"topic pattern\");\nsubscriber = KafkaSubscriber.getTopicPatternSubscriber(topicPattern);\nreturn this;\n}\n/**\n* Set a set of partitions to consume from.\n*\n* @param partitions the set of partitions to consume from.\n* @return this KafkaSourceBuilder.\n* @see org.apache.kafka.clients.consumer.KafkaConsumer\n*/\npublic KafkaSourceBuilder setPartitions(Set partitions) {\nensureSubscriberIsNull(\"partitions\");\nsubscriber = KafkaSubscriber.getPartitionSetSubscriber(partitions);\nreturn this;\n}\n/**\n* Specify from which offsets the KafkaSource should start consume from by providing an {@link\n* OffsetsInitializer}.\n*\n*

The following {@link OffsetsInitializer}s are commonly used and provided out of the box.\n* Users can also implement their own {@link OffsetsInitializer} for custom behaviors.\n*\n*

    \n*
  • {@link OffsetsInitializer\n* also the default {@link OffsetsInitializer} of the KafkaSource for starting offsets.\n*
  • {@link OffsetsInitializer\n*
  • {@link OffsetsInitializer\n* the consumer group.\n*
  • {@link\n* OffsetsInitializer\n* - starting from the committed offsets of the consumer group. If there is no committed\n* offsets, starting from the offsets specified by the {@link\n* org.apache.kafka.clients.consumer.OffsetResetStrategy OffsetResetStrategy}.\n*
  • {@link OffsetsInitializer\n* partition.\n*
  • {@link OffsetsInitializer\n* each partition. Note that the guarantee here is that all the records in Kafka whose\n* {@link org.apache.kafka.clients.consumer.ConsumerRecord\n* the given starting timestamp will be consumed. However, it is possible that some\n* consumer records whose timestamp is smaller than the given starting timestamp are also\n* consumed.\n*
\n*\n* @param startingOffsetsInitializer the {@link OffsetsInitializer} setting the starting offsets\n* for the Source.\n* @return this KafkaSourceBuilder.\n*/\npublic KafkaSourceBuilder setStartingOffsets(\nOffsetsInitializer startingOffsetsInitializer) {\nthis.startingOffsetsInitializer = startingOffsetsInitializer;\nreturn this;\n}\n/**\n* By default the KafkaSource is set to run in {@link Boundedness\n* and thus never stops until the Flink job fails or is canceled. To let the KafkaSource run as\n* a streaming source but still stops at some point, one can set an {@link OffsetsInitializer}\n* to specify the stopping offsets for each partition. When all the partitions have reached\n* their stopping offsets, the KafkaSource will then exit.\n*\n*

This method is different from {@link\n* the stopping offsets with this method, {@link KafkaSource\n* {@link Boundedness\n* specified by the stopping offsets {@link OffsetsInitializer}.\n*\n*

The following {@link OffsetsInitializer} are commonly used and provided out of the box.\n* Users can also implement their own {@link OffsetsInitializer} for custom behaviors.\n*\n*

    \n*
  • {@link OffsetsInitializer\n* the KafkaSource starts to run.\n*
  • {@link OffsetsInitializer\n* consumer group.\n*
  • {@link OffsetsInitializer\n* partition.\n*
  • {@link OffsetsInitializer\n* partition. The guarantee of setting the stopping timestamp is that no Kafka records\n* whose {@link org.apache.kafka.clients.consumer.ConsumerRecord\n* than the given stopping timestamp will be consumed. However, it is possible that some\n* records whose timestamp is smaller than the specified stopping timestamp are not\n* consumed.\n*
\n*\n* @param stoppingOffsetsInitializer The {@link OffsetsInitializer} to specify the stopping\n* offset.\n* @return this KafkaSourceBuilder.\n* @see\n*/\npublic KafkaSourceBuilder setUnbounded(OffsetsInitializer stoppingOffsetsInitializer) {\nthis.boundedness = Boundedness.CONTINUOUS_UNBOUNDED;\nthis.stoppingOffsetsInitializer = stoppingOffsetsInitializer;\nreturn this;\n}\n/**\n* By default the KafkaSource is set to run in {@link Boundedness\n* and thus never stops until the Flink job fails or is canceled. To let the KafkaSource run in\n* {@link Boundedness\n* OffsetsInitializer} to specify the stopping offsets for each partition. When all the\n* partitions have reached their stopping offsets, the KafkaSource will then exit.\n*\n*

This method is different from {@link\n* the stopping offsets with this method, {@link KafkaSource\n* {@link Boundedness\n*\n*

The following {@link OffsetsInitializer} are commonly used and provided out of the box.\n* Users can also implement their own {@link OffsetsInitializer} for custom behaviors.\n*\n*

    \n*
  • {@link OffsetsInitializer\n* the KafkaSource starts to run.\n*
  • {@link OffsetsInitializer\n* consumer group.\n*
  • {@link OffsetsInitializer\n* partition.\n*
  • {@link OffsetsInitializer\n* partition. The guarantee of setting the stopping timestamp is that no Kafka records\n* whose {@link org.apache.kafka.clients.consumer.ConsumerRecord\n* than the given stopping timestamp will be consumed. However, it is possible that some\n* records whose timestamp is smaller than the specified stopping timestamp are not\n* consumed.\n*
\n*\n* @param stoppingOffsetsInitializer the {@link OffsetsInitializer} to specify the stopping\n* offsets.\n* @return this KafkaSourceBuilder.\n* @see\n*/\npublic KafkaSourceBuilder setBounded(OffsetsInitializer stoppingOffsetsInitializer) {\nthis.boundedness = Boundedness.BOUNDED;\nthis.stoppingOffsetsInitializer = stoppingOffsetsInitializer;\nreturn this;\n}\n/**\n* Sets the {@link KafkaRecordDeserializationSchema deserializer} of the {@link\n* org.apache.kafka.clients.consumer.ConsumerRecord ConsumerRecord} for KafkaSource.\n*\n* @param recordDeserializer the deserializer for Kafka {@link\n* org.apache.kafka.clients.consumer.ConsumerRecord ConsumerRecord}.\n* @return this KafkaSourceBuilder.\n*/\npublic KafkaSourceBuilder setDeserializer(\nKafkaRecordDeserializationSchema recordDeserializer) {\nthis.deserializationSchema = recordDeserializer;\nreturn this;\n}\n/**\n* Sets the {@link KafkaRecordDeserializationSchema deserializer} of the {@link\n* org.apache.kafka.clients.consumer.ConsumerRecord ConsumerRecord} for KafkaSource. The given\n* {@link DeserializationSchema} will be used to deserialize the value of ConsumerRecord. The\n* other information (e.g. key) in a ConsumerRecord will be ignored.\n*\n* @param deserializationSchema the {@link DeserializationSchema} to use for deserialization.\n* @return this KafkaSourceBuilder.\n*/\npublic KafkaSourceBuilder setValueOnlyDeserializer(\nDeserializationSchema deserializationSchema) {\nthis.deserializationSchema =\nKafkaRecordDeserializationSchema.valueOnly(deserializationSchema);\nreturn this;\n}\n/**\n* Sets the client id prefix of this KafkaSource.\n*\n* @param prefix the client id prefix to use for this KafkaSource.\n* @return this KafkaSourceBuilder.\n*/\npublic KafkaSourceBuilder setClientIdPrefix(String prefix) {\nreturn setProperty(KafkaSourceOptions.CLIENT_ID_PREFIX.key(), prefix);\n}\n/**\n* Set an arbitrary property for the KafkaSource and KafkaConsumer. The valid keys can be found\n* in {@link ConsumerConfig} and {@link KafkaSourceOptions}.\n*\n*

Note that the following keys will be overridden by the builder when the KafkaSource is\n* created.\n*\n*

    \n*
  • key.deserializer is always set to {@link ByteArrayDeserializer}.\n*
  • value.deserializer is always set to {@link ByteArrayDeserializer}.\n*
  • auto.offset.reset.strategy is overridden by {@link\n* OffsetsInitializer\n* default {@link OffsetsInitializer\n*
  • partition.discovery.interval.ms is overridden to -1 when {@link\n*\n*
\n*\n* @param key the key of the property.\n* @param value the value of the property.\n* @return this KafkaSourceBuilder.\n*/\npublic KafkaSourceBuilder setProperty(String key, String value) {\nprops.setProperty(key, value);\nreturn this;\n}\n/**\n* Set arbitrary properties for the KafkaSource and KafkaConsumer. The valid keys can be found\n* in {@link ConsumerConfig} and {@link KafkaSourceOptions}.\n*\n*

Note that the following keys will be overridden by the builder when the KafkaSource is\n* created.\n*\n*

    \n*
  • key.deserializer is always set to {@link ByteArrayDeserializer}.\n*
  • value.deserializer is always set to {@link ByteArrayDeserializer}.\n*
  • auto.offset.reset.strategy is overridden by {@link\n* OffsetsInitializer\n* default {@link OffsetsInitializer\n*
  • partition.discovery.interval.ms is overridden to -1 when {@link\n*\n*
  • client.id is overridden to the \"client.id.prefix-RANDOM_LONG\", or\n* \"group.id-RANDOM_LONG\" if the client id prefix is not set.\n*
\n*\n* @param props the properties to set for the KafkaSource.\n* @return this KafkaSourceBuilder.\n*/\npublic KafkaSourceBuilder setProperties(Properties props) {\nthis.props.putAll(props);\nreturn this;\n}\n/**\n* Build the {@link KafkaSource}.\n*\n* @return a KafkaSource with the settings made for this builder.\n*/\npublic KafkaSource build() {\nsanityCheck();\nparseAndSetRequiredProperties();\nreturn new KafkaSource<>(\nsubscriber,\nstartingOffsetsInitializer,\nstoppingOffsetsInitializer,\nboundedness,\ndeserializationSchema,\nprops);\n}\nprivate void ensureSubscriberIsNull(String attemptingSubscribeMode) {\nif (subscriber != null) {\nthrow new IllegalStateException(\nString.format(\n\"Cannot use %s for consumption because a %s is already set for consumption.\",\nattemptingSubscribeMode, subscriber.getClass().getSimpleName()));\n}\n}\nprivate void parseAndSetRequiredProperties() {\nmaybeOverride(\nConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,\nByteArrayDeserializer.class.getName(),\ntrue);\nmaybeOverride(\nConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,\nByteArrayDeserializer.class.getName(),\ntrue);\nif (!props.containsKey(ConsumerConfig.GROUP_ID_CONFIG)) {\nLOG.warn(\n\"Offset commit on checkpoint is disabled because {} is not specified\",\nConsumerConfig.GROUP_ID_CONFIG);\nmaybeOverride(KafkaSourceOptions.COMMIT_OFFSETS_ON_CHECKPOINT.key(), \"false\", false);\n}\nmaybeOverride(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, \"false\", false);\nmaybeOverride(\nConsumerConfig.AUTO_OFFSET_RESET_CONFIG,\nstartingOffsetsInitializer.getAutoOffsetResetStrategy().name().toLowerCase(),\ntrue);\nmaybeOverride(\nKafkaSourceOptions.PARTITION_DISCOVERY_INTERVAL_MS.key(),\n\"-1\",\nboundedness == Boundedness.BOUNDED);\nmaybeOverride(\nKafkaSourceOptions.CLIENT_ID_PREFIX.key(),\nprops.containsKey(ConsumerConfig.GROUP_ID_CONFIG)\n? props.getProperty(ConsumerConfig.GROUP_ID_CONFIG)\n: \"KafkaSource-\" + new Random().nextLong(),\nfalse);\n}\nprivate boolean maybeOverride(String key, String value, boolean override) {\nboolean overridden = false;\nString userValue = props.getProperty(key);\nif (userValue != null) {\nif (override) {\nLOG.warn(\nString.format(\n\"Property %s is provided but will be overridden from %s to %s\",\nkey, userValue, value));\nprops.setProperty(key, value);\noverridden = true;\n}\n} else {\nprops.setProperty(key, value);\n}\nreturn overridden;\n}\nprivate void sanityCheck() {\nfor (String requiredConfig : REQUIRED_CONFIGS) {\ncheckNotNull(\nprops.getProperty(requiredConfig),\nString.format(\"Property %s is required but not provided\", requiredConfig));\n}\ncheckNotNull(\nsubscriber,\n\"No subscribe mode is specified, \"\n+ \"should be one of topics, topic pattern and partition set.\");\ncheckNotNull(deserializationSchema, \"Deserialization schema is required but not provided.\");\ncheckState(\nprops.containsKey(ConsumerConfig.GROUP_ID_CONFIG) || !offsetCommitEnabledManually(),\nString.format(\n\"Property %s is required when offset commit is enabled\",\nConsumerConfig.GROUP_ID_CONFIG));\n}\n}", + "context_after": "class KafkaSourceBuilder {\nprivate static final Logger LOG = LoggerFactory.getLogger(KafkaSourceBuilder.class);\nprivate static final String[] REQUIRED_CONFIGS = {ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG};\nprivate KafkaSubscriber subscriber;\nprivate OffsetsInitializer startingOffsetsInitializer;\nprivate OffsetsInitializer stoppingOffsetsInitializer;\nprivate Boundedness boundedness;\nprivate KafkaRecordDeserializationSchema deserializationSchema;\nprotected Properties props;\nKafkaSourceBuilder() {\nthis.subscriber = null;\nthis.startingOffsetsInitializer = OffsetsInitializer.earliest();\nthis.stoppingOffsetsInitializer = new NoStoppingOffsetsInitializer();\nthis.boundedness = Boundedness.CONTINUOUS_UNBOUNDED;\nthis.deserializationSchema = null;\nthis.props = new Properties();\n}\n/**\n* Sets the bootstrap servers for the KafkaConsumer of the KafkaSource.\n*\n* @param bootstrapServers the bootstrap servers of the Kafka cluster.\n* @return this KafkaSourceBuilder.\n*/\npublic KafkaSourceBuilder setBootstrapServers(String bootstrapServers) {\nreturn setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);\n}\n/**\n* Sets the consumer group id of the KafkaSource.\n*\n* @param groupId the group id of the KafkaSource.\n* @return this KafkaSourceBuilder.\n*/\npublic KafkaSourceBuilder setGroupId(String groupId) {\nreturn setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId);\n}\n/**\n* Set a list of topics the KafkaSource should consume from. All the topics in the list should\n* have existed in the Kafka cluster. Otherwise an exception will be thrown. To allow some of\n* the topics to be created lazily, please use {@link\n*\n* @param topics the list of topics to consume from.\n* @return this KafkaSourceBuilder.\n* @see org.apache.kafka.clients.consumer.KafkaConsumer\n*/\npublic KafkaSourceBuilder setTopics(List topics) {\nensureSubscriberIsNull(\"topics\");\nsubscriber = KafkaSubscriber.getTopicListSubscriber(topics);\nreturn this;\n}\n/**\n* Set a list of topics the KafkaSource should consume from. All the topics in the list should\n* have existed in the Kafka cluster. Otherwise an exception will be thrown. To allow some of\n* the topics to be created lazily, please use {@link\n*\n* @param topics the list of topics to consume from.\n* @return this KafkaSourceBuilder.\n* @see org.apache.kafka.clients.consumer.KafkaConsumer\n*/\npublic KafkaSourceBuilder setTopics(String... topics) {\nreturn setTopics(Arrays.asList(topics));\n}\n/**\n* Set a topic pattern to consume from use the java {@link Pattern}.\n*\n* @param topicPattern the pattern of the topic name to consume from.\n* @return this KafkaSourceBuilder.\n* @see org.apache.kafka.clients.consumer.KafkaConsumer\n*/\npublic KafkaSourceBuilder setTopicPattern(Pattern topicPattern) {\nensureSubscriberIsNull(\"topic pattern\");\nsubscriber = KafkaSubscriber.getTopicPatternSubscriber(topicPattern);\nreturn this;\n}\n/**\n* Set a set of partitions to consume from.\n*\n* @param partitions the set of partitions to consume from.\n* @return this KafkaSourceBuilder.\n* @see org.apache.kafka.clients.consumer.KafkaConsumer\n*/\npublic KafkaSourceBuilder setPartitions(Set partitions) {\nensureSubscriberIsNull(\"partitions\");\nsubscriber = KafkaSubscriber.getPartitionSetSubscriber(partitions);\nreturn this;\n}\n/**\n* Specify from which offsets the KafkaSource should start consume from by providing an {@link\n* OffsetsInitializer}.\n*\n*

The following {@link OffsetsInitializer}s are commonly used and provided out of the box.\n* Users can also implement their own {@link OffsetsInitializer} for custom behaviors.\n*\n*

    \n*
  • {@link OffsetsInitializer\n* also the default {@link OffsetsInitializer} of the KafkaSource for starting offsets.\n*
  • {@link OffsetsInitializer\n*
  • {@link OffsetsInitializer\n* the consumer group.\n*
  • {@link\n* OffsetsInitializer\n* - starting from the committed offsets of the consumer group. If there is no committed\n* offsets, starting from the offsets specified by the {@link\n* org.apache.kafka.clients.consumer.OffsetResetStrategy OffsetResetStrategy}.\n*
  • {@link OffsetsInitializer\n* partition.\n*
  • {@link OffsetsInitializer\n* each partition. Note that the guarantee here is that all the records in Kafka whose\n* {@link org.apache.kafka.clients.consumer.ConsumerRecord\n* the given starting timestamp will be consumed. However, it is possible that some\n* consumer records whose timestamp is smaller than the given starting timestamp are also\n* consumed.\n*
\n*\n* @param startingOffsetsInitializer the {@link OffsetsInitializer} setting the starting offsets\n* for the Source.\n* @return this KafkaSourceBuilder.\n*/\npublic KafkaSourceBuilder setStartingOffsets(\nOffsetsInitializer startingOffsetsInitializer) {\nthis.startingOffsetsInitializer = startingOffsetsInitializer;\nreturn this;\n}\n/**\n* By default the KafkaSource is set to run in {@link Boundedness\n* and thus never stops until the Flink job fails or is canceled. To let the KafkaSource run as\n* a streaming source but still stops at some point, one can set an {@link OffsetsInitializer}\n* to specify the stopping offsets for each partition. When all the partitions have reached\n* their stopping offsets, the KafkaSource will then exit.\n*\n*

This method is different from {@link\n* the stopping offsets with this method, {@link KafkaSource\n* {@link Boundedness\n* specified by the stopping offsets {@link OffsetsInitializer}.\n*\n*

The following {@link OffsetsInitializer} are commonly used and provided out of the box.\n* Users can also implement their own {@link OffsetsInitializer} for custom behaviors.\n*\n*

    \n*
  • {@link OffsetsInitializer\n* the KafkaSource starts to run.\n*
  • {@link OffsetsInitializer\n* consumer group.\n*
  • {@link OffsetsInitializer\n* partition.\n*
  • {@link OffsetsInitializer\n* partition. The guarantee of setting the stopping timestamp is that no Kafka records\n* whose {@link org.apache.kafka.clients.consumer.ConsumerRecord\n* than the given stopping timestamp will be consumed. However, it is possible that some\n* records whose timestamp is smaller than the specified stopping timestamp are not\n* consumed.\n*
\n*\n* @param stoppingOffsetsInitializer The {@link OffsetsInitializer} to specify the stopping\n* offset.\n* @return this KafkaSourceBuilder.\n* @see\n*/\npublic KafkaSourceBuilder setUnbounded(OffsetsInitializer stoppingOffsetsInitializer) {\nthis.boundedness = Boundedness.CONTINUOUS_UNBOUNDED;\nthis.stoppingOffsetsInitializer = stoppingOffsetsInitializer;\nreturn this;\n}\n/**\n* By default the KafkaSource is set to run in {@link Boundedness\n* and thus never stops until the Flink job fails or is canceled. To let the KafkaSource run in\n* {@link Boundedness\n* OffsetsInitializer} to specify the stopping offsets for each partition. When all the\n* partitions have reached their stopping offsets, the KafkaSource will then exit.\n*\n*

This method is different from {@link\n* the stopping offsets with this method, {@link KafkaSource\n* {@link Boundedness\n*\n*

The following {@link OffsetsInitializer} are commonly used and provided out of the box.\n* Users can also implement their own {@link OffsetsInitializer} for custom behaviors.\n*\n*

    \n*
  • {@link OffsetsInitializer\n* the KafkaSource starts to run.\n*
  • {@link OffsetsInitializer\n* consumer group.\n*
  • {@link OffsetsInitializer\n* partition.\n*
  • {@link OffsetsInitializer\n* partition. The guarantee of setting the stopping timestamp is that no Kafka records\n* whose {@link org.apache.kafka.clients.consumer.ConsumerRecord\n* than the given stopping timestamp will be consumed. However, it is possible that some\n* records whose timestamp is smaller than the specified stopping timestamp are not\n* consumed.\n*
\n*\n* @param stoppingOffsetsInitializer the {@link OffsetsInitializer} to specify the stopping\n* offsets.\n* @return this KafkaSourceBuilder.\n* @see\n*/\npublic KafkaSourceBuilder setBounded(OffsetsInitializer stoppingOffsetsInitializer) {\nthis.boundedness = Boundedness.BOUNDED;\nthis.stoppingOffsetsInitializer = stoppingOffsetsInitializer;\nreturn this;\n}\n/**\n* Sets the {@link KafkaRecordDeserializationSchema deserializer} of the {@link\n* org.apache.kafka.clients.consumer.ConsumerRecord ConsumerRecord} for KafkaSource.\n*\n* @param recordDeserializer the deserializer for Kafka {@link\n* org.apache.kafka.clients.consumer.ConsumerRecord ConsumerRecord}.\n* @return this KafkaSourceBuilder.\n*/\npublic KafkaSourceBuilder setDeserializer(\nKafkaRecordDeserializationSchema recordDeserializer) {\nthis.deserializationSchema = recordDeserializer;\nreturn this;\n}\n/**\n* Sets the {@link KafkaRecordDeserializationSchema deserializer} of the {@link\n* org.apache.kafka.clients.consumer.ConsumerRecord ConsumerRecord} for KafkaSource. The given\n* {@link DeserializationSchema} will be used to deserialize the value of ConsumerRecord. The\n* other information (e.g. key) in a ConsumerRecord will be ignored.\n*\n* @param deserializationSchema the {@link DeserializationSchema} to use for deserialization.\n* @return this KafkaSourceBuilder.\n*/\npublic KafkaSourceBuilder setValueOnlyDeserializer(\nDeserializationSchema deserializationSchema) {\nthis.deserializationSchema =\nKafkaRecordDeserializationSchema.valueOnly(deserializationSchema);\nreturn this;\n}\n/**\n* Sets the client id prefix of this KafkaSource.\n*\n* @param prefix the client id prefix to use for this KafkaSource.\n* @return this KafkaSourceBuilder.\n*/\npublic KafkaSourceBuilder setClientIdPrefix(String prefix) {\nreturn setProperty(KafkaSourceOptions.CLIENT_ID_PREFIX.key(), prefix);\n}\n/**\n* Set an arbitrary property for the KafkaSource and KafkaConsumer. The valid keys can be found\n* in {@link ConsumerConfig} and {@link KafkaSourceOptions}.\n*\n*

Note that the following keys will be overridden by the builder when the KafkaSource is\n* created.\n*\n*

    \n*
  • key.deserializer is always set to {@link ByteArrayDeserializer}.\n*
  • value.deserializer is always set to {@link ByteArrayDeserializer}.\n*
  • auto.offset.reset.strategy is overridden by {@link\n* OffsetsInitializer\n* default {@link OffsetsInitializer\n*
  • partition.discovery.interval.ms is overridden to -1 when {@link\n*\n*
\n*\n* @param key the key of the property.\n* @param value the value of the property.\n* @return this KafkaSourceBuilder.\n*/\npublic KafkaSourceBuilder setProperty(String key, String value) {\nprops.setProperty(key, value);\nreturn this;\n}\n/**\n* Set arbitrary properties for the KafkaSource and KafkaConsumer. The valid keys can be found\n* in {@link ConsumerConfig} and {@link KafkaSourceOptions}.\n*\n*

Note that the following keys will be overridden by the builder when the KafkaSource is\n* created.\n*\n*

    \n*
  • key.deserializer is always set to {@link ByteArrayDeserializer}.\n*
  • value.deserializer is always set to {@link ByteArrayDeserializer}.\n*
  • auto.offset.reset.strategy is overridden by {@link\n* OffsetsInitializer\n* default {@link OffsetsInitializer\n*
  • partition.discovery.interval.ms is overridden to -1 when {@link\n*\n*
  • client.id is overridden to the \"client.id.prefix-RANDOM_LONG\", or\n* \"group.id-RANDOM_LONG\" if the client id prefix is not set.\n*
\n*\n* @param props the properties to set for the KafkaSource.\n* @return this KafkaSourceBuilder.\n*/\npublic KafkaSourceBuilder setProperties(Properties props) {\nthis.props.putAll(props);\nreturn this;\n}\n/**\n* Build the {@link KafkaSource}.\n*\n* @return a KafkaSource with the settings made for this builder.\n*/\npublic KafkaSource build() {\nsanityCheck();\nparseAndSetRequiredProperties();\nreturn new KafkaSource<>(\nsubscriber,\nstartingOffsetsInitializer,\nstoppingOffsetsInitializer,\nboundedness,\ndeserializationSchema,\nprops);\n}\nprivate void ensureSubscriberIsNull(String attemptingSubscribeMode) {\nif (subscriber != null) {\nthrow new IllegalStateException(\nString.format(\n\"Cannot use %s for consumption because a %s is already set for consumption.\",\nattemptingSubscribeMode, subscriber.getClass().getSimpleName()));\n}\n}\nprivate void parseAndSetRequiredProperties() {\nmaybeOverride(\nConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,\nByteArrayDeserializer.class.getName(),\ntrue);\nmaybeOverride(\nConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,\nByteArrayDeserializer.class.getName(),\ntrue);\nif (!props.containsKey(ConsumerConfig.GROUP_ID_CONFIG)) {\nLOG.warn(\n\"Offset commit on checkpoint is disabled because {} is not specified\",\nConsumerConfig.GROUP_ID_CONFIG);\nmaybeOverride(KafkaSourceOptions.COMMIT_OFFSETS_ON_CHECKPOINT.key(), \"false\", false);\n}\nmaybeOverride(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, \"false\", false);\nmaybeOverride(\nConsumerConfig.AUTO_OFFSET_RESET_CONFIG,\nstartingOffsetsInitializer.getAutoOffsetResetStrategy().name().toLowerCase(),\ntrue);\nmaybeOverride(\nKafkaSourceOptions.PARTITION_DISCOVERY_INTERVAL_MS.key(),\n\"-1\",\nboundedness == Boundedness.BOUNDED);\nmaybeOverride(\nKafkaSourceOptions.CLIENT_ID_PREFIX.key(),\nprops.containsKey(ConsumerConfig.GROUP_ID_CONFIG)\n? props.getProperty(ConsumerConfig.GROUP_ID_CONFIG)\n: \"KafkaSource-\" + new Random().nextLong(),\nfalse);\n}\nprivate boolean maybeOverride(String key, String value, boolean override) {\nboolean overridden = false;\nString userValue = props.getProperty(key);\nif (userValue != null) {\nif (override) {\nLOG.warn(\nString.format(\n\"Property %s is provided but will be overridden from %s to %s\",\nkey, userValue, value));\nprops.setProperty(key, value);\noverridden = true;\n}\n} else {\nprops.setProperty(key, value);\n}\nreturn overridden;\n}\nprivate void sanityCheck() {\nfor (String requiredConfig : REQUIRED_CONFIGS) {\ncheckNotNull(\nprops.getProperty(requiredConfig),\nString.format(\"Property %s is required but not provided\", requiredConfig));\n}\ncheckNotNull(\nsubscriber,\n\"No subscribe mode is specified, \"\n+ \"should be one of topics, topic pattern and partition set.\");\ncheckNotNull(deserializationSchema, \"Deserialization schema is required but not provided.\");\ncheckState(\nprops.containsKey(ConsumerConfig.GROUP_ID_CONFIG) || !offsetCommitEnabledManually(),\nString.format(\n\"Property %s is required when offset commit is enabled\",\nConsumerConfig.GROUP_ID_CONFIG));\nif (startingOffsetsInitializer instanceof OffsetsInitializerValidator) {\n((OffsetsInitializerValidator) startingOffsetsInitializer).validate(props);\n}\nif (stoppingOffsetsInitializer instanceof OffsetsInitializerValidator) {\n((OffsetsInitializerValidator) stoppingOffsetsInitializer).validate(props);\n}\n}\n}" + }, + { + "comment": "Updated to multiplying total cost by `getRowType().getFieldCount()` stead of just `getRowCount`.", + "method_body": "public BeamSqlTable getBeamSqlTable() {\nreturn beamTable;\n}", + "target_code": "}", + "method_body_after": "public BeamSqlTable getBeamSqlTable() {\nreturn beamTable;\n}", + "context_before": "class Transform extends PTransform, PCollection> {\n@Override\npublic PCollection expand(PCollectionList input) {\ncheckArgument(\ninput.size() == 0,\n\"Should not have received input for %s: %s\",\nBeamIOSourceRel.class.getSimpleName(),\ninput);\nPBegin begin = input.getPipeline().begin();\nBeamSqlTableFilter filters = beamTable.constructFilter(ImmutableList.of());\nif (usedFields.isEmpty() && filters instanceof DefaultTableFilter) {\nreturn beamTable.buildIOReader(begin);\n}\nreturn beamTable.buildIOReader(input.getPipeline().begin(), filters, usedFields);\n}\n}", + "context_after": "class Transform extends PTransform, PCollection> {\n@Override\npublic PCollection expand(PCollectionList input) {\ncheckArgument(\ninput.size() == 0,\n\"Should not have received input for %s: %s\",\nBeamIOSourceRel.class.getSimpleName(),\ninput);\nfinal PBegin begin = input.getPipeline().begin();\nfinal BeamSqlTableFilter filters = beamTable.constructFilter(ImmutableList.of());\nif (usedFields.isEmpty() && filters instanceof DefaultTableFilter) {\nreturn beamTable.buildIOReader(begin);\n}\nfinal Schema newBeamSchema = CalciteUtils.toSchema(getRowType());\nreturn beamTable.buildIOReader(begin, filters, usedFields).setRowSchema(newBeamSchema);\n}\n}" + }, + { + "comment": "there are easier ways to go about this, like `JobMasterTriggerSavepointITCase.NoOpBlockingInvokable`.", + "method_body": "public void testPutSuspendedJobOnClusterShutdown() throws Exception {\nfinal Duration timeout = Duration.ofSeconds(5);\ntry (final MiniCluster miniCluster =\nnew PersistingMiniCluster(new MiniClusterConfiguration.Builder().build())) {\nminiCluster.start();\nfinal JobVertex vertex = new JobVertex(\"blockingVertex\");\nvertex.setInvokableClass(BlockingNoOpInvokable.class);\nfinal JobGraph jobGraph = JobGraphTestUtils.streamingJobGraph(vertex);\nfinal JobID jobId = jobGraph.getJobID();\nminiCluster.submitJob(jobGraph);\nCommonTestUtils.waitUntilCondition(\n() -> {\ntry {\nif (miniCluster.getJobStatus(jobId).get() != JobStatus.RUNNING) {\nreturn false;\n}\n} catch (Exception e) {\nif (ExceptionUtils.findThrowable(e, FlinkJobNotFoundException.class)\n.isPresent()) {\nreturn false;\n}\nthrow e;\n}\nfor (AccessExecutionVertex executionVertex :\nminiCluster\n.getExecutionGraph(jobId)\n.get()\n.getAllExecutionVertices()) {\nif (executionVertex.getExecutionState() == ExecutionState.RUNNING) {\ncontinue;\n}\nreturn false;\n}\nreturn true;\n},\nDeadline.fromNow(timeout));\n}\n}", + "target_code": "if (miniCluster.getJobStatus(jobId).get() != JobStatus.RUNNING) {", + "method_body_after": "public void testPutSuspendedJobOnClusterShutdown() throws Exception {\ntry (final MiniCluster miniCluster =\nnew PersistingMiniCluster(new MiniClusterConfiguration.Builder().build())) {\nminiCluster.start();\nfinal JobVertex vertex = new JobVertex(\"blockingVertex\");\nvertex.setInvokableClass(SignallingBlockingNoOpInvokable.class);\nfinal JobGraph jobGraph = JobGraphTestUtils.streamingJobGraph(vertex);\nminiCluster.submitJob(jobGraph);\nSignallingBlockingNoOpInvokable.LATCH.await();\n}\n}", + "context_before": "class FileExecutionGraphInfoStoreTest extends TestLogger {\nprivate static final List GLOBALLY_TERMINAL_JOB_STATUS =\nArrays.stream(JobStatus.values())\n.filter(JobStatus::isGloballyTerminalState)\n.collect(Collectors.toList());\n@ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder();\n/**\n* Tests that we can put {@link ExecutionGraphInfo} into the {@link FileExecutionGraphInfoStore}\n* and that the graph is persisted.\n*/\n@Test\npublic void testPut() throws IOException {\nassertPutJobGraphWithStatus(JobStatus.FINISHED);\n}\n/** Tests that a SUSPENDED job can be persisted. */\n@Test\npublic void testPutSuspendedJob() throws IOException {\nassertPutJobGraphWithStatus(JobStatus.SUSPENDED);\n}\n/** Tests that null is returned if we request an unknown JobID. */\n@Test\npublic void testUnknownGet() throws IOException {\nfinal File rootDir = temporaryFolder.newFolder();\ntry (final FileExecutionGraphInfoStore executionGraphStore =\ncreateDefaultExecutionGraphInfoStore(rootDir)) {\nassertThat(executionGraphStore.get(new JobID()), Matchers.nullValue());\n}\n}\n/** Tests that we obtain the correct jobs overview. */\n@Test\npublic void testStoredJobsOverview() throws IOException {\nfinal int numberExecutionGraphs = 10;\nfinal Collection executionGraphInfos =\ngenerateTerminalExecutionGraphInfos(numberExecutionGraphs);\nfinal List jobStatuses =\nexecutionGraphInfos.stream()\n.map(ExecutionGraphInfo::getArchivedExecutionGraph)\n.map(ArchivedExecutionGraph::getState)\n.collect(Collectors.toList());\nfinal JobsOverview expectedJobsOverview = JobsOverview.create(jobStatuses);\nfinal File rootDir = temporaryFolder.newFolder();\ntry (final FileExecutionGraphInfoStore executionGraphInfoStore =\ncreateDefaultExecutionGraphInfoStore(rootDir)) {\nfor (ExecutionGraphInfo executionGraphInfo : executionGraphInfos) {\nexecutionGraphInfoStore.put(executionGraphInfo);\n}\nassertThat(\nexecutionGraphInfoStore.getStoredJobsOverview(),\nMatchers.equalTo(expectedJobsOverview));\n}\n}\n/** Tests that we obtain the correct collection of available job details. */\n@Test\npublic void testAvailableJobDetails() throws IOException {\nfinal int numberExecutionGraphs = 10;\nfinal Collection executionGraphInfos =\ngenerateTerminalExecutionGraphInfos(numberExecutionGraphs);\nfinal Collection jobDetails = generateJobDetails(executionGraphInfos);\nfinal File rootDir = temporaryFolder.newFolder();\ntry (final FileExecutionGraphInfoStore executionGraphInfoStore =\ncreateDefaultExecutionGraphInfoStore(rootDir)) {\nfor (ExecutionGraphInfo executionGraphInfo : executionGraphInfos) {\nexecutionGraphInfoStore.put(executionGraphInfo);\n}\nassertThat(\nexecutionGraphInfoStore.getAvailableJobDetails(),\nMatchers.containsInAnyOrder(jobDetails.toArray()));\n}\n}\n/** Tests that an expired execution graph is removed from the execution graph store. */\n@Test\npublic void testExecutionGraphExpiration() throws Exception {\nfinal File rootDir = temporaryFolder.newFolder();\nfinal Time expirationTime = Time.milliseconds(1L);\nfinal ManuallyTriggeredScheduledExecutor scheduledExecutor =\nnew ManuallyTriggeredScheduledExecutor();\nfinal ManualTicker manualTicker = new ManualTicker();\ntry (final FileExecutionGraphInfoStore executionGraphInfoStore =\nnew FileExecutionGraphInfoStore(\nrootDir,\nexpirationTime,\nInteger.MAX_VALUE,\n10000L,\nscheduledExecutor,\nmanualTicker)) {\nfinal ExecutionGraphInfo executionGraphInfo =\nnew ExecutionGraphInfo(\nnew ArchivedExecutionGraphBuilder()\n.setState(JobStatus.FINISHED)\n.build());\nexecutionGraphInfoStore.put(executionGraphInfo);\nassertThat(executionGraphInfoStore.size(), Matchers.equalTo(1));\nmanualTicker.advanceTime(expirationTime.toMilliseconds(), TimeUnit.MILLISECONDS);\nscheduledExecutor.triggerScheduledTasks();\nassertThat(executionGraphInfoStore.size(), Matchers.equalTo(0));\nassertThat(\nexecutionGraphInfoStore.get(executionGraphInfo.getJobId()),\nMatchers.nullValue());\nfinal File storageDirectory = executionGraphInfoStore.getStorageDir();\nassertThat(storageDirectory.listFiles().length, Matchers.equalTo(0));\n}\n}\n/** Tests that all persisted files are cleaned up after closing the store. */\n@Test\npublic void testCloseCleansUp() throws IOException {\nfinal File rootDir = temporaryFolder.newFolder();\nassertThat(rootDir.listFiles().length, Matchers.equalTo(0));\ntry (final FileExecutionGraphInfoStore executionGraphInfoStore =\ncreateDefaultExecutionGraphInfoStore(rootDir)) {\nassertThat(rootDir.listFiles().length, Matchers.equalTo(1));\nfinal File storageDirectory = executionGraphInfoStore.getStorageDir();\nassertThat(storageDirectory.listFiles().length, Matchers.equalTo(0));\nexecutionGraphInfoStore.put(\nnew ExecutionGraphInfo(\nnew ArchivedExecutionGraphBuilder()\n.setState(JobStatus.FINISHED)\n.build()));\nassertThat(storageDirectory.listFiles().length, Matchers.equalTo(1));\n}\nassertThat(rootDir.listFiles().length, Matchers.equalTo(0));\n}\n/** Tests that evicted {@link ExecutionGraphInfo} are loaded from disk again. */\n@Test\npublic void testCacheLoading() throws IOException {\nfinal File rootDir = temporaryFolder.newFolder();\ntry (final FileExecutionGraphInfoStore executionGraphInfoStore =\nnew FileExecutionGraphInfoStore(\nrootDir,\nTime.hours(1L),\nInteger.MAX_VALUE,\n100L << 10,\nTestingUtils.defaultScheduledExecutor(),\nTicker.systemTicker())) {\nfinal LoadingCache executionGraphInfoCache =\nexecutionGraphInfoStore.getExecutionGraphInfoCache();\nCollection executionGraphInfos = new ArrayList<>(64);\nboolean continueInserting = true;\nwhile (continueInserting) {\nfinal ExecutionGraphInfo executionGraphInfo =\nnew ExecutionGraphInfo(\nnew ArchivedExecutionGraphBuilder()\n.setState(JobStatus.FINISHED)\n.build());\nexecutionGraphInfoStore.put(executionGraphInfo);\nexecutionGraphInfos.add(executionGraphInfo);\ncontinueInserting = executionGraphInfoCache.size() == executionGraphInfos.size();\n}\nfinal File storageDirectory = executionGraphInfoStore.getStorageDir();\nassertThat(\nstorageDirectory.listFiles().length,\nMatchers.equalTo(executionGraphInfos.size()));\nfor (ExecutionGraphInfo executionGraphInfo : executionGraphInfos) {\nassertThat(\nexecutionGraphInfoStore.get(executionGraphInfo.getJobId()),\nmatchesPartiallyWith(executionGraphInfo));\n}\n}\n}\n/**\n* Tests that the size of {@link FileExecutionGraphInfoStore} is no more than the configured max\n* capacity and the old execution graphs will be purged if the total added number exceeds the\n* max capacity.\n*/\n@Test\npublic void testMaximumCapacity() throws IOException {\nfinal File rootDir = temporaryFolder.newFolder();\nfinal int maxCapacity = 10;\nfinal int numberExecutionGraphs = 10;\nfinal Collection oldExecutionGraphInfos =\ngenerateTerminalExecutionGraphInfos(numberExecutionGraphs);\nfinal Collection newExecutionGraphInfos =\ngenerateTerminalExecutionGraphInfos(numberExecutionGraphs);\nfinal Collection jobDetails = generateJobDetails(newExecutionGraphInfos);\ntry (final FileExecutionGraphInfoStore executionGraphInfoStore =\nnew FileExecutionGraphInfoStore(\nrootDir,\nTime.hours(1L),\nmaxCapacity,\n10000L,\nTestingUtils.defaultScheduledExecutor(),\nTicker.systemTicker())) {\nfor (ExecutionGraphInfo executionGraphInfo : oldExecutionGraphInfos) {\nexecutionGraphInfoStore.put(executionGraphInfo);\nassertTrue(executionGraphInfoStore.size() <= maxCapacity);\n}\nfor (ExecutionGraphInfo executionGraphInfo : newExecutionGraphInfos) {\nexecutionGraphInfoStore.put(executionGraphInfo);\nassertEquals(maxCapacity, executionGraphInfoStore.size());\n}\nassertThat(\nexecutionGraphInfoStore.getAvailableJobDetails(),\nMatchers.containsInAnyOrder(jobDetails.toArray()));\n}\n}\n/** Tests that a session cluster can terminate gracefully when jobs are still running. */\n@Test\nprivate class PersistingMiniCluster extends MiniCluster {\nPersistingMiniCluster(MiniClusterConfiguration miniClusterConfiguration) {\nsuper(miniClusterConfiguration);\n}\n@Override\nprotected Collection\ncreateDispatcherResourceManagerComponents(\nConfiguration configuration,\nRpcServiceFactory rpcServiceFactory,\nHighAvailabilityServices haServices,\nBlobServer blobServer,\nHeartbeatServices heartbeatServices,\nMetricRegistry metricRegistry,\nMetricQueryServiceRetriever metricQueryServiceRetriever,\nFatalErrorHandler fatalErrorHandler)\nthrows Exception {\nfinal DispatcherResourceManagerComponentFactory\ndispatcherResourceManagerComponentFactory =\nDefaultDispatcherResourceManagerComponentFactory\n.createSessionComponentFactory(\nStandaloneResourceManagerFactory.getInstance());\nfinal File rootDir = temporaryFolder.newFolder();\nfinal ExecutionGraphInfoStore executionGraphInfoStore =\ncreateDefaultExecutionGraphInfoStore(rootDir);\nreturn Collections.singleton(\ndispatcherResourceManagerComponentFactory.create(\nconfiguration,\ngetIOExecutor(),\nrpcServiceFactory.createRpcService(),\nhaServices,\nblobServer,\nheartbeatServices,\nmetricRegistry,\nexecutionGraphInfoStore,\nmetricQueryServiceRetriever,\nfatalErrorHandler));\n}\n}\nprivate Collection generateTerminalExecutionGraphInfos(int number) {\nfinal Collection executionGraphInfos = new ArrayList<>(number);\nfor (int i = 0; i < number; i++) {\nfinal JobStatus state =\nGLOBALLY_TERMINAL_JOB_STATUS.get(\nThreadLocalRandom.current()\n.nextInt(GLOBALLY_TERMINAL_JOB_STATUS.size()));\nexecutionGraphInfos.add(\nnew ExecutionGraphInfo(\nnew ArchivedExecutionGraphBuilder().setState(state).build()));\n}\nreturn executionGraphInfos;\n}\nprivate FileExecutionGraphInfoStore createDefaultExecutionGraphInfoStore(File storageDirectory)\nthrows IOException {\nreturn new FileExecutionGraphInfoStore(\nstorageDirectory,\nTime.hours(1L),\nInteger.MAX_VALUE,\n10000L,\nTestingUtils.defaultScheduledExecutor(),\nTicker.systemTicker());\n}\nprivate static final class PartialExecutionGraphInfoMatcher\nextends BaseMatcher {\nprivate final ExecutionGraphInfo expectedExecutionGraphInfo;\nprivate PartialExecutionGraphInfoMatcher(ExecutionGraphInfo expectedExecutionGraphInfo) {\nthis.expectedExecutionGraphInfo =\nPreconditions.checkNotNull(expectedExecutionGraphInfo);\n}\n@Override\npublic boolean matches(Object o) {\nif (expectedExecutionGraphInfo == o) {\nreturn true;\n}\nif (o == null || expectedExecutionGraphInfo.getClass() != o.getClass()) {\nreturn false;\n}\nExecutionGraphInfo that = (ExecutionGraphInfo) o;\nArchivedExecutionGraph thisExecutionGraph =\nexpectedExecutionGraphInfo.getArchivedExecutionGraph();\nArchivedExecutionGraph thatExecutionGraph = that.getArchivedExecutionGraph();\nreturn thisExecutionGraph.isStoppable() == thatExecutionGraph.isStoppable()\n&& Objects.equals(thisExecutionGraph.getJobID(), thatExecutionGraph.getJobID())\n&& Objects.equals(\nthisExecutionGraph.getJobName(), thatExecutionGraph.getJobName())\n&& thisExecutionGraph.getState() == thatExecutionGraph.getState()\n&& Objects.equals(\nthisExecutionGraph.getJsonPlan(), thatExecutionGraph.getJsonPlan())\n&& Objects.equals(\nthisExecutionGraph.getAccumulatorsSerialized(),\nthatExecutionGraph.getAccumulatorsSerialized())\n&& Objects.equals(\nthisExecutionGraph.getCheckpointCoordinatorConfiguration(),\nthatExecutionGraph.getCheckpointCoordinatorConfiguration())\n&& thisExecutionGraph.getAllVertices().size()\n== thatExecutionGraph.getAllVertices().size()\n&& Objects.equals(\nexpectedExecutionGraphInfo.getExceptionHistory(),\nthat.getExceptionHistory());\n}\n@Override\npublic void describeTo(Description description) {\ndescription.appendText(\n\"Matches against \" + ExecutionGraphInfo.class.getSimpleName() + '.');\n}\n}\nprivate void assertPutJobGraphWithStatus(JobStatus jobStatus) throws IOException {\nfinal ExecutionGraphInfo dummyExecutionGraphInfo =\nnew ExecutionGraphInfo(\nnew ArchivedExecutionGraphBuilder().setState(jobStatus).build());\nfinal File rootDir = temporaryFolder.newFolder();\ntry (final FileExecutionGraphInfoStore executionGraphStore =\ncreateDefaultExecutionGraphInfoStore(rootDir)) {\nfinal File storageDirectory = executionGraphStore.getStorageDir();\nassertThat(storageDirectory.listFiles().length, Matchers.equalTo(0));\nexecutionGraphStore.put(dummyExecutionGraphInfo);\nassertThat(storageDirectory.listFiles().length, Matchers.equalTo(1));\nassertThat(\nexecutionGraphStore.get(dummyExecutionGraphInfo.getJobId()),\nnew PartialExecutionGraphInfoMatcher(dummyExecutionGraphInfo));\n}\n}\nprivate static Matcher matchesPartiallyWith(\nExecutionGraphInfo executionGraphInfo) {\nreturn new PartialExecutionGraphInfoMatcher(executionGraphInfo);\n}\nprivate static Collection generateJobDetails(\nCollection executionGraphInfos) {\nreturn executionGraphInfos.stream()\n.map(ExecutionGraphInfo::getArchivedExecutionGraph)\n.map(JobDetails::createDetailsForJob)\n.collect(Collectors.toList());\n}\n}", + "context_after": "class FileExecutionGraphInfoStoreTest extends TestLogger {\nprivate static final List GLOBALLY_TERMINAL_JOB_STATUS =\nArrays.stream(JobStatus.values())\n.filter(JobStatus::isGloballyTerminalState)\n.collect(Collectors.toList());\n@ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder();\n/**\n* Tests that we can put {@link ExecutionGraphInfo} into the {@link FileExecutionGraphInfoStore}\n* and that the graph is persisted.\n*/\n@Test\npublic void testPut() throws IOException {\nassertPutJobGraphWithStatus(JobStatus.FINISHED);\n}\n/** Tests that a SUSPENDED job can be persisted. */\n@Test\npublic void testPutSuspendedJob() throws IOException {\nassertPutJobGraphWithStatus(JobStatus.SUSPENDED);\n}\n/** Tests that null is returned if we request an unknown JobID. */\n@Test\npublic void testUnknownGet() throws IOException {\nfinal File rootDir = temporaryFolder.newFolder();\ntry (final FileExecutionGraphInfoStore executionGraphStore =\ncreateDefaultExecutionGraphInfoStore(rootDir)) {\nassertThat(executionGraphStore.get(new JobID()), Matchers.nullValue());\n}\n}\n/** Tests that we obtain the correct jobs overview. */\n@Test\npublic void testStoredJobsOverview() throws IOException {\nfinal int numberExecutionGraphs = 10;\nfinal Collection executionGraphInfos =\ngenerateTerminalExecutionGraphInfos(numberExecutionGraphs);\nfinal List jobStatuses =\nexecutionGraphInfos.stream()\n.map(ExecutionGraphInfo::getArchivedExecutionGraph)\n.map(ArchivedExecutionGraph::getState)\n.collect(Collectors.toList());\nfinal JobsOverview expectedJobsOverview = JobsOverview.create(jobStatuses);\nfinal File rootDir = temporaryFolder.newFolder();\ntry (final FileExecutionGraphInfoStore executionGraphInfoStore =\ncreateDefaultExecutionGraphInfoStore(rootDir)) {\nfor (ExecutionGraphInfo executionGraphInfo : executionGraphInfos) {\nexecutionGraphInfoStore.put(executionGraphInfo);\n}\nassertThat(\nexecutionGraphInfoStore.getStoredJobsOverview(),\nMatchers.equalTo(expectedJobsOverview));\n}\n}\n/** Tests that we obtain the correct collection of available job details. */\n@Test\npublic void testAvailableJobDetails() throws IOException {\nfinal int numberExecutionGraphs = 10;\nfinal Collection executionGraphInfos =\ngenerateTerminalExecutionGraphInfos(numberExecutionGraphs);\nfinal Collection jobDetails = generateJobDetails(executionGraphInfos);\nfinal File rootDir = temporaryFolder.newFolder();\ntry (final FileExecutionGraphInfoStore executionGraphInfoStore =\ncreateDefaultExecutionGraphInfoStore(rootDir)) {\nfor (ExecutionGraphInfo executionGraphInfo : executionGraphInfos) {\nexecutionGraphInfoStore.put(executionGraphInfo);\n}\nassertThat(\nexecutionGraphInfoStore.getAvailableJobDetails(),\nMatchers.containsInAnyOrder(jobDetails.toArray()));\n}\n}\n/** Tests that an expired execution graph is removed from the execution graph store. */\n@Test\npublic void testExecutionGraphExpiration() throws Exception {\nfinal File rootDir = temporaryFolder.newFolder();\nfinal Time expirationTime = Time.milliseconds(1L);\nfinal ManuallyTriggeredScheduledExecutor scheduledExecutor =\nnew ManuallyTriggeredScheduledExecutor();\nfinal ManualTicker manualTicker = new ManualTicker();\ntry (final FileExecutionGraphInfoStore executionGraphInfoStore =\nnew FileExecutionGraphInfoStore(\nrootDir,\nexpirationTime,\nInteger.MAX_VALUE,\n10000L,\nscheduledExecutor,\nmanualTicker)) {\nfinal ExecutionGraphInfo executionGraphInfo =\nnew ExecutionGraphInfo(\nnew ArchivedExecutionGraphBuilder()\n.setState(JobStatus.FINISHED)\n.build());\nexecutionGraphInfoStore.put(executionGraphInfo);\nassertThat(executionGraphInfoStore.size(), Matchers.equalTo(1));\nmanualTicker.advanceTime(expirationTime.toMilliseconds(), TimeUnit.MILLISECONDS);\nscheduledExecutor.triggerScheduledTasks();\nassertThat(executionGraphInfoStore.size(), Matchers.equalTo(0));\nassertThat(\nexecutionGraphInfoStore.get(executionGraphInfo.getJobId()),\nMatchers.nullValue());\nfinal File storageDirectory = executionGraphInfoStore.getStorageDir();\nassertThat(storageDirectory.listFiles().length, Matchers.equalTo(0));\n}\n}\n/** Tests that all persisted files are cleaned up after closing the store. */\n@Test\npublic void testCloseCleansUp() throws IOException {\nfinal File rootDir = temporaryFolder.newFolder();\nassertThat(rootDir.listFiles().length, Matchers.equalTo(0));\ntry (final FileExecutionGraphInfoStore executionGraphInfoStore =\ncreateDefaultExecutionGraphInfoStore(rootDir)) {\nassertThat(rootDir.listFiles().length, Matchers.equalTo(1));\nfinal File storageDirectory = executionGraphInfoStore.getStorageDir();\nassertThat(storageDirectory.listFiles().length, Matchers.equalTo(0));\nexecutionGraphInfoStore.put(\nnew ExecutionGraphInfo(\nnew ArchivedExecutionGraphBuilder()\n.setState(JobStatus.FINISHED)\n.build()));\nassertThat(storageDirectory.listFiles().length, Matchers.equalTo(1));\n}\nassertThat(rootDir.listFiles().length, Matchers.equalTo(0));\n}\n/** Tests that evicted {@link ExecutionGraphInfo} are loaded from disk again. */\n@Test\npublic void testCacheLoading() throws IOException {\nfinal File rootDir = temporaryFolder.newFolder();\ntry (final FileExecutionGraphInfoStore executionGraphInfoStore =\nnew FileExecutionGraphInfoStore(\nrootDir,\nTime.hours(1L),\nInteger.MAX_VALUE,\n100L << 10,\nTestingUtils.defaultScheduledExecutor(),\nTicker.systemTicker())) {\nfinal LoadingCache executionGraphInfoCache =\nexecutionGraphInfoStore.getExecutionGraphInfoCache();\nCollection executionGraphInfos = new ArrayList<>(64);\nboolean continueInserting = true;\nwhile (continueInserting) {\nfinal ExecutionGraphInfo executionGraphInfo =\nnew ExecutionGraphInfo(\nnew ArchivedExecutionGraphBuilder()\n.setState(JobStatus.FINISHED)\n.build());\nexecutionGraphInfoStore.put(executionGraphInfo);\nexecutionGraphInfos.add(executionGraphInfo);\ncontinueInserting = executionGraphInfoCache.size() == executionGraphInfos.size();\n}\nfinal File storageDirectory = executionGraphInfoStore.getStorageDir();\nassertThat(\nstorageDirectory.listFiles().length,\nMatchers.equalTo(executionGraphInfos.size()));\nfor (ExecutionGraphInfo executionGraphInfo : executionGraphInfos) {\nassertThat(\nexecutionGraphInfoStore.get(executionGraphInfo.getJobId()),\nmatchesPartiallyWith(executionGraphInfo));\n}\n}\n}\n/**\n* Tests that the size of {@link FileExecutionGraphInfoStore} is no more than the configured max\n* capacity and the old execution graphs will be purged if the total added number exceeds the\n* max capacity.\n*/\n@Test\npublic void testMaximumCapacity() throws IOException {\nfinal File rootDir = temporaryFolder.newFolder();\nfinal int maxCapacity = 10;\nfinal int numberExecutionGraphs = 10;\nfinal Collection oldExecutionGraphInfos =\ngenerateTerminalExecutionGraphInfos(numberExecutionGraphs);\nfinal Collection newExecutionGraphInfos =\ngenerateTerminalExecutionGraphInfos(numberExecutionGraphs);\nfinal Collection jobDetails = generateJobDetails(newExecutionGraphInfos);\ntry (final FileExecutionGraphInfoStore executionGraphInfoStore =\nnew FileExecutionGraphInfoStore(\nrootDir,\nTime.hours(1L),\nmaxCapacity,\n10000L,\nTestingUtils.defaultScheduledExecutor(),\nTicker.systemTicker())) {\nfor (ExecutionGraphInfo executionGraphInfo : oldExecutionGraphInfos) {\nexecutionGraphInfoStore.put(executionGraphInfo);\nassertTrue(executionGraphInfoStore.size() <= maxCapacity);\n}\nfor (ExecutionGraphInfo executionGraphInfo : newExecutionGraphInfos) {\nexecutionGraphInfoStore.put(executionGraphInfo);\nassertEquals(maxCapacity, executionGraphInfoStore.size());\n}\nassertThat(\nexecutionGraphInfoStore.getAvailableJobDetails(),\nMatchers.containsInAnyOrder(jobDetails.toArray()));\n}\n}\n/** Tests that a session cluster can terminate gracefully when jobs are still running. */\n@Test\n/**\n* Invokable which signals with {@link SignallingBlockingNoOpInvokable\n* and blocks forever afterwards.\n*/\npublic static class SignallingBlockingNoOpInvokable extends AbstractInvokable {\n/** Latch used to signal an initial invocation. */\npublic static final OneShotLatch LATCH = new OneShotLatch();\npublic SignallingBlockingNoOpInvokable(Environment environment) {\nsuper(environment);\n}\n@Override\npublic void invoke() throws Exception {\nLATCH.trigger();\nThread.sleep(Long.MAX_VALUE);\n}\n}\nprivate class PersistingMiniCluster extends MiniCluster {\nPersistingMiniCluster(MiniClusterConfiguration miniClusterConfiguration) {\nsuper(miniClusterConfiguration);\n}\n@Override\nprotected Collection\ncreateDispatcherResourceManagerComponents(\nConfiguration configuration,\nRpcServiceFactory rpcServiceFactory,\nHighAvailabilityServices haServices,\nBlobServer blobServer,\nHeartbeatServices heartbeatServices,\nMetricRegistry metricRegistry,\nMetricQueryServiceRetriever metricQueryServiceRetriever,\nFatalErrorHandler fatalErrorHandler)\nthrows Exception {\nfinal DispatcherResourceManagerComponentFactory\ndispatcherResourceManagerComponentFactory =\nDefaultDispatcherResourceManagerComponentFactory\n.createSessionComponentFactory(\nStandaloneResourceManagerFactory.getInstance());\nfinal File rootDir = temporaryFolder.newFolder();\nfinal ExecutionGraphInfoStore executionGraphInfoStore =\ncreateDefaultExecutionGraphInfoStore(rootDir);\nreturn Collections.singleton(\ndispatcherResourceManagerComponentFactory.create(\nconfiguration,\ngetIOExecutor(),\nrpcServiceFactory.createRpcService(),\nhaServices,\nblobServer,\nheartbeatServices,\nmetricRegistry,\nexecutionGraphInfoStore,\nmetricQueryServiceRetriever,\nfatalErrorHandler));\n}\n}\nprivate Collection generateTerminalExecutionGraphInfos(int number) {\nfinal Collection executionGraphInfos = new ArrayList<>(number);\nfor (int i = 0; i < number; i++) {\nfinal JobStatus state =\nGLOBALLY_TERMINAL_JOB_STATUS.get(\nThreadLocalRandom.current()\n.nextInt(GLOBALLY_TERMINAL_JOB_STATUS.size()));\nexecutionGraphInfos.add(\nnew ExecutionGraphInfo(\nnew ArchivedExecutionGraphBuilder().setState(state).build()));\n}\nreturn executionGraphInfos;\n}\nprivate FileExecutionGraphInfoStore createDefaultExecutionGraphInfoStore(File storageDirectory)\nthrows IOException {\nreturn new FileExecutionGraphInfoStore(\nstorageDirectory,\nTime.hours(1L),\nInteger.MAX_VALUE,\n10000L,\nTestingUtils.defaultScheduledExecutor(),\nTicker.systemTicker());\n}\nprivate static final class PartialExecutionGraphInfoMatcher\nextends BaseMatcher {\nprivate final ExecutionGraphInfo expectedExecutionGraphInfo;\nprivate PartialExecutionGraphInfoMatcher(ExecutionGraphInfo expectedExecutionGraphInfo) {\nthis.expectedExecutionGraphInfo =\nPreconditions.checkNotNull(expectedExecutionGraphInfo);\n}\n@Override\npublic boolean matches(Object o) {\nif (expectedExecutionGraphInfo == o) {\nreturn true;\n}\nif (o == null || expectedExecutionGraphInfo.getClass() != o.getClass()) {\nreturn false;\n}\nExecutionGraphInfo that = (ExecutionGraphInfo) o;\nArchivedExecutionGraph thisExecutionGraph =\nexpectedExecutionGraphInfo.getArchivedExecutionGraph();\nArchivedExecutionGraph thatExecutionGraph = that.getArchivedExecutionGraph();\nreturn thisExecutionGraph.isStoppable() == thatExecutionGraph.isStoppable()\n&& Objects.equals(thisExecutionGraph.getJobID(), thatExecutionGraph.getJobID())\n&& Objects.equals(\nthisExecutionGraph.getJobName(), thatExecutionGraph.getJobName())\n&& thisExecutionGraph.getState() == thatExecutionGraph.getState()\n&& Objects.equals(\nthisExecutionGraph.getJsonPlan(), thatExecutionGraph.getJsonPlan())\n&& Objects.equals(\nthisExecutionGraph.getAccumulatorsSerialized(),\nthatExecutionGraph.getAccumulatorsSerialized())\n&& Objects.equals(\nthisExecutionGraph.getCheckpointCoordinatorConfiguration(),\nthatExecutionGraph.getCheckpointCoordinatorConfiguration())\n&& thisExecutionGraph.getAllVertices().size()\n== thatExecutionGraph.getAllVertices().size()\n&& Objects.equals(\nexpectedExecutionGraphInfo.getExceptionHistory(),\nthat.getExceptionHistory());\n}\n@Override\npublic void describeTo(Description description) {\ndescription.appendText(\n\"Matches against \" + ExecutionGraphInfo.class.getSimpleName() + '.');\n}\n}\nprivate void assertPutJobGraphWithStatus(JobStatus jobStatus) throws IOException {\nfinal ExecutionGraphInfo dummyExecutionGraphInfo =\nnew ExecutionGraphInfo(\nnew ArchivedExecutionGraphBuilder().setState(jobStatus).build());\nfinal File rootDir = temporaryFolder.newFolder();\ntry (final FileExecutionGraphInfoStore executionGraphStore =\ncreateDefaultExecutionGraphInfoStore(rootDir)) {\nfinal File storageDirectory = executionGraphStore.getStorageDir();\nassertThat(storageDirectory.listFiles().length, Matchers.equalTo(0));\nexecutionGraphStore.put(dummyExecutionGraphInfo);\nassertThat(storageDirectory.listFiles().length, Matchers.equalTo(1));\nassertThat(\nexecutionGraphStore.get(dummyExecutionGraphInfo.getJobId()),\nnew PartialExecutionGraphInfoMatcher(dummyExecutionGraphInfo));\n}\n}\nprivate static Matcher matchesPartiallyWith(\nExecutionGraphInfo executionGraphInfo) {\nreturn new PartialExecutionGraphInfoMatcher(executionGraphInfo);\n}\nprivate static Collection generateJobDetails(\nCollection executionGraphInfos) {\nreturn executionGraphInfos.stream()\n.map(ExecutionGraphInfo::getArchivedExecutionGraph)\n.map(JobDetails::createDetailsForJob)\n.collect(Collectors.toList());\n}\n}" + }, + { + "comment": "Hi @zzzwjZhang, you can read ShardingSphere code conduct first - https://shardingsphere.apache.org/community/en/involved/conduct/code/. We recommend using assertThat over assertEquals.", + "method_body": "public void testMaskIfPlainValueIsLess() {\nString result = keepFirstNLastMMaskAlgorithm.mask(\"abc\");\nassertEquals(\"abc\", result);\n}", + "target_code": "assertEquals(\"abc\", result);", + "method_body_after": "public void testMaskIfPlainValueIsLess() {\nString actual = keepFirstNLastMMaskAlgorithm.mask(\"abc\");\nassertThat(actual, is(\"abc\"));\n}", + "context_before": "class KeepFirstNLastMMaskAlgorithmTest {\nprivate KeepFirstNLastMMaskAlgorithm keepFirstNLastMMaskAlgorithm;\n@Before\npublic void setUp() {\nkeepFirstNLastMMaskAlgorithm = new KeepFirstNLastMMaskAlgorithm();\nkeepFirstNLastMMaskAlgorithm.init(initProperties());\n}\nprivate Properties initProperties() {\nProperties properties = new Properties();\nproperties.setProperty(\"start-index\", \"2\");\nproperties.setProperty(\"stop-index\", \"5\");\nproperties.setProperty(\"replace-char\", \"*\");\nreturn properties;\n}\n@Test\npublic void testMask() {\nString result = keepFirstNLastMMaskAlgorithm.mask(\"abc123456\");\nassertEquals(\"ab*23456\", result);\n}\n@Test\n@Test\npublic void testNotSetStartIndex() {\nKeepFirstNLastMMaskAlgorithm keepFirstNLastMMaskAlgorithm1 = new KeepFirstNLastMMaskAlgorithm();\nProperties wrongProperties = new Properties();\nwrongProperties.setProperty(\"stop-index\", \"5\");\nwrongProperties.setProperty(\"replace-char\", \"*\");\nassertThrows(IllegalArgumentException.class, () -> {\nkeepFirstNLastMMaskAlgorithm1.init(wrongProperties);\n});\n}\n}", + "context_after": "class KeepFirstNLastMMaskAlgorithmTest {\nprivate KeepFirstNLastMMaskAlgorithm keepFirstNLastMMaskAlgorithm;\n@Before\npublic void setUp() {\nkeepFirstNLastMMaskAlgorithm = new KeepFirstNLastMMaskAlgorithm();\nkeepFirstNLastMMaskAlgorithm.init(initProperties());\n}\nprivate Properties initProperties() {\nProperties properties = new Properties();\nproperties.setProperty(\"n\", \"2\");\nproperties.setProperty(\"m\", \"5\");\nproperties.setProperty(\"replace-char\", \"*\");\nreturn properties;\n}\n@Test\npublic void testMask() {\nString actual = keepFirstNLastMMaskAlgorithm.mask(\"abc123456\");\nassertThat(actual, is(\"ab**23456\"));\n}\n@Test\n@Test\npublic void testNotSetStartIndex() {\nKeepFirstNLastMMaskAlgorithm keepFirstNLastMMaskAlgorithm1 = new KeepFirstNLastMMaskAlgorithm();\nProperties wrongProperties = new Properties();\nwrongProperties.setProperty(\"m\", \"5\");\nwrongProperties.setProperty(\"replace-char\", \"*\");\nassertThrows(IllegalArgumentException.class, () -> {\nkeepFirstNLastMMaskAlgorithm1.init(wrongProperties);\n});\n}\n}" + }, + { + "comment": "Could you please explain why this `sleep` is needed?", + "method_body": "public void testSwitchFromDisablingToEnablingInClaimMode() throws Exception {\nFile firstCheckpointFolder = TEMPORARY_FOLDER.newFolder();\nMiniCluster miniCluster = cluster.getMiniCluster();\nStreamExecutionEnvironment env1 =\ngetEnv(delegatedStateBackend, firstCheckpointFolder, false, 100, 600000);\nJobGraph firstJobGraph = buildJobGraph(env1);\nminiCluster.submitJob(firstJobGraph).get();\nwaitForAllTaskRunning(miniCluster, firstJobGraph.getJobID(), true);\nminiCluster.triggerCheckpoint(firstJobGraph.getJobID()).get();\nminiCluster.cancelJob(firstJobGraph.getJobID()).get();\nString firstRestorePath =\ngetLatestCompletedCheckpointPath(firstJobGraph.getJobID(), miniCluster).get();\nFile secondCheckpointFolder = TEMPORARY_FOLDER.newFolder();\nStreamExecutionEnvironment env2 =\ngetEnv(delegatedStateBackend, secondCheckpointFolder, true, 100, 60000);\nJobGraph secondJobGraph = buildJobGraph(env2);\nsetSavepointRestoreSettings(secondJobGraph, firstRestorePath);\nminiCluster.submitJob(secondJobGraph).get();\nwaitForAllTaskRunning(miniCluster, secondJobGraph.getJobID(), true);\nminiCluster.triggerCheckpoint(secondJobGraph.getJobID()).get();\nminiCluster.cancelJob(secondJobGraph.getJobID()).get();\nString secondRestorePath =\ngetLatestCompletedCheckpointPath(secondJobGraph.getJobID(), miniCluster).get();\nFile thirdCheckpointFolder = TEMPORARY_FOLDER.newFolder();\nStreamExecutionEnvironment env3 =\ngetEnv(delegatedStateBackend, thirdCheckpointFolder, true, 100, 100);\nJobGraph thirdJobGraph = buildJobGraph(env3);\nsetSavepointRestoreSettings(thirdJobGraph, secondRestorePath);\nminiCluster.submitJob(thirdJobGraph).get();\nwaitForAllTaskRunning(miniCluster, thirdJobGraph.getJobID(), true);\nThread.sleep(500);\nminiCluster.triggerCheckpoint(thirdJobGraph.getJobID()).get();\nminiCluster.cancelJob(thirdJobGraph.getJobID()).get();\n}", + "target_code": "Thread.sleep(500);", + "method_body_after": "public void testSwitchFromDisablingToEnablingInClaimMode() throws Exception {\nFile firstCheckpointFolder = TEMPORARY_FOLDER.newFolder();\nMiniCluster miniCluster = cluster.getMiniCluster();\nStreamExecutionEnvironment env1 =\ngetEnv(delegatedStateBackend, firstCheckpointFolder, false, 100, 600000);\nJobGraph firstJobGraph = buildJobGraph(env1);\nminiCluster.submitJob(firstJobGraph).get();\nwaitForAllTaskRunning(miniCluster, firstJobGraph.getJobID(), true);\nminiCluster.triggerCheckpoint(firstJobGraph.getJobID()).get();\nminiCluster.cancelJob(firstJobGraph.getJobID()).get();\nString firstRestorePath =\ngetLatestCompletedCheckpointPath(firstJobGraph.getJobID(), miniCluster).get();\nFile secondCheckpointFolder = TEMPORARY_FOLDER.newFolder();\nStreamExecutionEnvironment env2 =\ngetEnv(delegatedStateBackend, secondCheckpointFolder, true, 100, 600000);\nJobGraph secondJobGraph = buildJobGraph(env2);\nsetSavepointRestoreSettings(secondJobGraph, firstRestorePath);\nminiCluster.submitJob(secondJobGraph).get();\nwaitForAllTaskRunning(miniCluster, secondJobGraph.getJobID(), true);\nminiCluster.triggerCheckpoint(secondJobGraph.getJobID()).get();\nminiCluster.cancelJob(secondJobGraph.getJobID()).get();\nString secondRestorePath =\ngetLatestCompletedCheckpointPath(secondJobGraph.getJobID(), miniCluster).get();\nFile thirdCheckpointFolder = TEMPORARY_FOLDER.newFolder();\nStreamExecutionEnvironment env3 =\ngetEnv(delegatedStateBackend, thirdCheckpointFolder, true, 100, 100);\nJobGraph thirdJobGraph = buildJobGraph(env3);\nsetSavepointRestoreSettings(thirdJobGraph, secondRestorePath);\nminiCluster.submitJob(thirdJobGraph).get();\nwaitForAllTaskRunning(miniCluster, thirdJobGraph.getJobID(), true);\nminiCluster.triggerCheckpoint(thirdJobGraph.getJobID()).get();\nminiCluster.cancelJob(thirdJobGraph.getJobID()).get();\n}", + "context_before": "class ChangelogPeriodicMaterializationSwitchStateBackendITCase\nextends ChangelogPeriodicMaterializationSwitchEnvTestBase {\npublic ChangelogPeriodicMaterializationSwitchStateBackendITCase(\nAbstractStateBackend delegatedStateBackend) {\nsuper(delegatedStateBackend);\n}\n@Before\n@Override\npublic void setup() throws Exception {\nConfiguration configuration = new Configuration();\nconfiguration.setInteger(CheckpointingOptions.MAX_RETAINED_CHECKPOINTS, 1);\nFsStateChangelogStorageFactory.configure(\nconfiguration, TEMPORARY_FOLDER.newFolder(), Duration.ofMinutes(1), 10);\ncluster =\nnew MiniClusterWithClientResource(\nnew MiniClusterResourceConfiguration.Builder()\n.setConfiguration(configuration)\n.setNumberTaskManagers(1)\n.setNumberSlotsPerTaskManager(4)\n.build());\ncluster.before();\ncluster.getMiniCluster().overrideRestoreModeForChangelogStateBackend();\n}\n@Test\npublic void testSwitchFromEnablingToDisabling() throws Exception {\ntestSwitchEnv(getEnv(true), getEnv(false));\n}\n@Test\npublic void testSwitchFromEnablingToDisablingWithRescalingOut() throws Exception {\ntestSwitchEnv(getEnv(true, NUM_SLOTS / 2), getEnv(false, NUM_SLOTS));\n}\n@Test\npublic void testSwitchFromEnablingToDisablingWithRescalingIn() throws Exception {\ntestSwitchEnv(getEnv(true, NUM_SLOTS), getEnv(false, NUM_SLOTS / 2));\n}\n@Test\n@Test\npublic void testCheckpointFolderDeletion() throws Exception {\nFile firstCheckpointFolder = TEMPORARY_FOLDER.newFolder();\nMiniCluster miniCluster = cluster.getMiniCluster();\nStreamExecutionEnvironment env1 =\ngetEnv(delegatedStateBackend, firstCheckpointFolder, false, 100, 600000);\nJobGraph firstJobGraph = buildJobGraph(env1);\nminiCluster.submitJob(firstJobGraph).get();\nwaitForAllTaskRunning(miniCluster, firstJobGraph.getJobID(), true);\nminiCluster.triggerCheckpoint(firstJobGraph.getJobID()).get();\nminiCluster.cancelJob(firstJobGraph.getJobID()).get();\nString firstRestorePath =\ngetLatestCompletedCheckpointPath(firstJobGraph.getJobID(), miniCluster).get();\nFile secondCheckpointFolder = TEMPORARY_FOLDER.newFolder();\nStreamExecutionEnvironment env2 =\ngetEnv(delegatedStateBackend, secondCheckpointFolder, true, 100, 100);\nJobGraph secondJobGraph = buildJobGraph(env2);\nsetSavepointRestoreSettings(secondJobGraph, firstRestorePath);\nminiCluster.submitJob(secondJobGraph).get();\nwaitForAllTaskRunning(miniCluster, secondJobGraph.getJobID(), true);\nThread.sleep(1000);\nminiCluster.triggerCheckpoint(secondJobGraph.getJobID()).get();\nminiCluster.cancelJob(secondJobGraph.getJobID()).get();\nassertFalse(checkpointFolderExists(firstRestorePath.substring(5)));\n}\nprivate StreamExecutionEnvironment getEnv(boolean enableChangelog) {\nreturn getEnv(enableChangelog, NUM_SLOTS);\n}\nprivate StreamExecutionEnvironment getEnv(boolean enableChangelog, int parallelism) {\nStreamExecutionEnvironment env = getEnv(delegatedStateBackend, 100, 0, 500, 0);\nenv.enableChangelogStateBackend(enableChangelog);\nenv.setParallelism(parallelism);\nenv.getCheckpointConfig()\n.setExternalizedCheckpointCleanup(\nCheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);\nreturn env;\n}\nprivate StreamExecutionEnvironment getEnv(\nStateBackend stateBackend,\nFile checkpointFile,\nboolean changelogEnabled,\nlong checkpointInterval,\nlong materializationInterval) {\nStreamExecutionEnvironment env =\ngetEnv(\nstateBackend,\ncheckpointFile,\ncheckpointInterval,\n0,\nmaterializationInterval,\n0);\nenv.enableChangelogStateBackend(changelogEnabled);\nenv.getCheckpointConfig()\n.setExternalizedCheckpointCleanup(\nCheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);\nConfiguration configuration = new Configuration();\nconfiguration.setInteger(CheckpointingOptions.MAX_RETAINED_CHECKPOINTS, 1);\nenv.configure(configuration);\nreturn env;\n}\nprivate void setSavepointRestoreSettings(JobGraph jobGraph, String restorePath) {\njobGraph.setSavepointRestoreSettings(\nSavepointRestoreSettings.forPath(restorePath, false, RestoreMode.CLAIM));\n}\nprivate boolean checkpointFolderExists(String checkpointPath) {\nFile chk = new File(checkpointPath);\nreturn chk.exists();\n}\n}", + "context_after": "class ChangelogPeriodicMaterializationSwitchStateBackendITCase\nextends ChangelogPeriodicMaterializationSwitchEnvTestBase {\npublic ChangelogPeriodicMaterializationSwitchStateBackendITCase(\nAbstractStateBackend delegatedStateBackend) {\nsuper(delegatedStateBackend);\n}\n@Before\n@Override\npublic void setup() throws Exception {\nConfiguration configuration = new Configuration();\nconfiguration.setInteger(CheckpointingOptions.MAX_RETAINED_CHECKPOINTS, 1);\nFsStateChangelogStorageFactory.configure(\nconfiguration, TEMPORARY_FOLDER.newFolder(), Duration.ofMinutes(1), 10);\ncluster =\nnew MiniClusterWithClientResource(\nnew MiniClusterResourceConfiguration.Builder()\n.setConfiguration(configuration)\n.setNumberTaskManagers(1)\n.setNumberSlotsPerTaskManager(4)\n.build());\ncluster.before();\ncluster.getMiniCluster().overrideRestoreModeForChangelogStateBackend();\n}\n@Test\npublic void testSwitchFromEnablingToDisabling() throws Exception {\ntestSwitchEnv(getEnv(true), getEnv(false));\n}\n@Test\npublic void testSwitchFromEnablingToDisablingWithRescalingOut() throws Exception {\ntestSwitchEnv(getEnv(true, NUM_SLOTS / 2), getEnv(false, NUM_SLOTS));\n}\n@Test\npublic void testSwitchFromEnablingToDisablingWithRescalingIn() throws Exception {\ntestSwitchEnv(getEnv(true, NUM_SLOTS), getEnv(false, NUM_SLOTS / 2));\n}\n@Test\nprivate StreamExecutionEnvironment getEnv(boolean enableChangelog) {\nreturn getEnv(enableChangelog, NUM_SLOTS);\n}\nprivate StreamExecutionEnvironment getEnv(boolean enableChangelog, int parallelism) {\nStreamExecutionEnvironment env = getEnv(delegatedStateBackend, 100, 0, 500, 0);\nenv.enableChangelogStateBackend(enableChangelog);\nenv.setParallelism(parallelism);\nenv.getCheckpointConfig()\n.setExternalizedCheckpointCleanup(\nCheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);\nreturn env;\n}\nprivate StreamExecutionEnvironment getEnv(\nStateBackend stateBackend,\nFile checkpointFile,\nboolean changelogEnabled,\nlong checkpointInterval,\nlong materializationInterval) {\nStreamExecutionEnvironment env =\ngetEnv(\nstateBackend,\ncheckpointFile,\ncheckpointInterval,\n0,\nmaterializationInterval,\n0);\nenv.enableChangelogStateBackend(changelogEnabled);\nenv.getCheckpointConfig()\n.setExternalizedCheckpointCleanup(\nCheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);\nConfiguration configuration = new Configuration();\nconfiguration.setInteger(CheckpointingOptions.MAX_RETAINED_CHECKPOINTS, 1);\nenv.configure(configuration);\nreturn env;\n}\nprivate void setSavepointRestoreSettings(JobGraph jobGraph, String restorePath) {\njobGraph.setSavepointRestoreSettings(\nSavepointRestoreSettings.forPath(restorePath, false, RestoreMode.CLAIM));\n}\n}" + }, + { + "comment": "The most risky bug in this code is: Potential loss of TaskRuns when `replayCreateTaskRun` rejects a task due to `arrangeTaskRun` returning false. You can modify the code like this: ```java if (!taskRunManager.arrangeTaskRun(taskRun)) { LOG.warn(\"Submit task run to pending queue failed, reject the submit:{}\", taskRun); // Handle failure case properly, e.g., requeue, notify, or log as needed. } ``` Explanation: When `taskRunManager.arrangeTaskRun(taskRun)` returns false and the task submission is rejected, it's simply logged but there's no action taken to either re-attempt the arrangement or handle the failure case adequately (e.g., by adding it to a retry queue, notifying an administrator, or otherwise managing the failed submission). This could lead to lost task runs where tasks that fail to be arranged are neither processed nor properly accounted for. Such losses in a task scheduling system could lead to data inconsistencies, delays in processing, or other operational issues depending on the context and purpose of the system. Properly handling these failures would mitigate the risk of losing track of tasks that cannot be immediately arranged.", + "method_body": "public void replayUpdateTaskRun(TaskRunStatusChange statusChange) {\nConstants.TaskRunState fromStatus = statusChange.getFromStatus();\nConstants.TaskRunState toStatus = statusChange.getToStatus();\nLong taskId = statusChange.getTaskId();\nLOG.info(\"replayUpdateTaskRun:\" + statusChange);\nif (fromStatus == Constants.TaskRunState.PENDING) {\nQueue taskRunQueue = taskRunManager.getPendingTaskRunMap().get(taskId);\nif (taskRunQueue == null) {\nreturn;\n}\nif (taskRunQueue.size() == 0) {\ntaskRunManager.getPendingTaskRunMap().remove(taskId);\nreturn;\n}\nTaskRun pendingTaskRun = null;\nList tempQueue = Lists.newArrayList();\nwhile (!taskRunQueue.isEmpty()) {\nTaskRun taskRun = taskRunQueue.poll();\nif (taskRun.getStatus().getQueryId().equals(statusChange.getQueryId())) {\npendingTaskRun = taskRun;\nbreak;\n} else {\ntempQueue.add(taskRun);\n}\n}\ntaskRunQueue.addAll(tempQueue);\nif (pendingTaskRun == null) {\nLOG.warn(\"could not find query_id:{}, taskId:{}, when replay update pendingTaskRun\",\nstatusChange.getQueryId(), taskId);\nreturn;\n}\nTaskRunStatus status = pendingTaskRun.getStatus();\nif (toStatus == Constants.TaskRunState.RUNNING) {\nif (status.getQueryId().equals(statusChange.getQueryId())) {\nstatus.setState(Constants.TaskRunState.RUNNING);\ntaskRunManager.getRunningTaskRunMap().put(taskId, pendingTaskRun);\n}\n} else if (toStatus == Constants.TaskRunState.FAILED) {\nstatus.setErrorMessage(statusChange.getErrorMessage());\nstatus.setErrorCode(statusChange.getErrorCode());\nstatus.setState(Constants.TaskRunState.FAILED);\ntaskRunManager.getTaskRunHistory().addHistory(status);\n} else if (toStatus == Constants.TaskRunState.SUCCESS) {\nLOG.info(\"Replay update pendingTaskRun which is merged by others, query_id:{}, taskId:{}\",\nstatusChange.getQueryId(), taskId);\nstatus.setErrorMessage(statusChange.getErrorMessage());\nstatus.setErrorCode(statusChange.getErrorCode());\nstatus.setState(Constants.TaskRunState.SUCCESS);\nstatus.setProgress(100);\nstatus.setFinishTime(statusChange.getFinishTime());\ntaskRunManager.getTaskRunHistory().addHistory(status);\n}\nif (taskRunQueue.size() == 0) {\ntaskRunManager.getPendingTaskRunMap().remove(taskId);\n}\n} else if (fromStatus == Constants.TaskRunState.RUNNING &&\n(toStatus == Constants.TaskRunState.SUCCESS || toStatus == Constants.TaskRunState.FAILED)) {\nTaskRun runningTaskRun = taskRunManager.getRunningTaskRunMap().remove(taskId);\nif (runningTaskRun != null) {\nTaskRunStatus status = runningTaskRun.getStatus();\nif (status.getQueryId().equals(statusChange.getQueryId())) {\nif (toStatus == Constants.TaskRunState.FAILED) {\nstatus.setErrorMessage(statusChange.getErrorMessage());\nstatus.setErrorCode(statusChange.getErrorCode());\n}\nstatus.setState(toStatus);\nstatus.setProgress(100);\nstatus.setFinishTime(statusChange.getFinishTime());\nstatus.setExtraMessage(statusChange.getExtraMessage());\ntaskRunManager.getTaskRunHistory().addHistory(status);\n}\n} else {\nString queryId = statusChange.getQueryId();\nTaskRunStatus status = taskRunManager.getTaskRunHistory().getTask(queryId);\nif (status == null) {\nreturn;\n}\nstatus.setExtraMessage(statusChange.getExtraMessage());\n}\n} else {\nLOG.warn(\"Illegal TaskRun queryId:{} status transform from {} to {}\",\nstatusChange.getQueryId(), fromStatus, toStatus);\n}\n}", + "target_code": "taskRunManager.getPendingTaskRunMap().remove(taskId);", + "method_body_after": "public void replayUpdateTaskRun(TaskRunStatusChange statusChange) {\nConstants.TaskRunState fromStatus = statusChange.getFromStatus();\nConstants.TaskRunState toStatus = statusChange.getToStatus();\nLong taskId = statusChange.getTaskId();\nLOG.info(\"replayUpdateTaskRun:\" + statusChange);\nif (fromStatus == Constants.TaskRunState.PENDING) {\nQueue taskRunQueue = taskRunManager.getPendingTaskRunMap().get(taskId);\nif (taskRunQueue == null) {\nreturn;\n}\nif (taskRunQueue.size() == 0) {\ntaskRunManager.getPendingTaskRunMap().remove(taskId);\nreturn;\n}\nTaskRun pendingTaskRun = null;\nList tempQueue = Lists.newArrayList();\nwhile (!taskRunQueue.isEmpty()) {\nTaskRun taskRun = taskRunQueue.poll();\nif (taskRun.getStatus().getQueryId().equals(statusChange.getQueryId())) {\npendingTaskRun = taskRun;\nbreak;\n} else {\ntempQueue.add(taskRun);\n}\n}\ntaskRunQueue.addAll(tempQueue);\nif (pendingTaskRun == null) {\nLOG.warn(\"could not find query_id:{}, taskId:{}, when replay update pendingTaskRun\",\nstatusChange.getQueryId(), taskId);\nreturn;\n}\nTaskRunStatus status = pendingTaskRun.getStatus();\nif (toStatus == Constants.TaskRunState.RUNNING) {\nif (status.getQueryId().equals(statusChange.getQueryId())) {\nstatus.setState(Constants.TaskRunState.RUNNING);\ntaskRunManager.getRunningTaskRunMap().put(taskId, pendingTaskRun);\n}\n} else if (toStatus == Constants.TaskRunState.FAILED) {\nstatus.setErrorMessage(statusChange.getErrorMessage());\nstatus.setErrorCode(statusChange.getErrorCode());\nstatus.setState(Constants.TaskRunState.FAILED);\ntaskRunManager.getTaskRunHistory().addHistory(status);\n} else if (toStatus == Constants.TaskRunState.SUCCESS) {\nLOG.info(\"Replay update pendingTaskRun which is merged by others, query_id:{}, taskId:{}\",\nstatusChange.getQueryId(), taskId);\nstatus.setErrorMessage(statusChange.getErrorMessage());\nstatus.setErrorCode(statusChange.getErrorCode());\nstatus.setState(Constants.TaskRunState.SUCCESS);\nstatus.setProgress(100);\nstatus.setFinishTime(statusChange.getFinishTime());\ntaskRunManager.getTaskRunHistory().addHistory(status);\n}\nif (taskRunQueue.size() == 0) {\ntaskRunManager.getPendingTaskRunMap().remove(taskId);\n}\n} else if (fromStatus == Constants.TaskRunState.RUNNING &&\n(toStatus == Constants.TaskRunState.SUCCESS || toStatus == Constants.TaskRunState.FAILED)) {\nTaskRun runningTaskRun = taskRunManager.getRunningTaskRunMap().remove(taskId);\nif (runningTaskRun != null) {\nTaskRunStatus status = runningTaskRun.getStatus();\nif (status.getQueryId().equals(statusChange.getQueryId())) {\nif (toStatus == Constants.TaskRunState.FAILED) {\nstatus.setErrorMessage(statusChange.getErrorMessage());\nstatus.setErrorCode(statusChange.getErrorCode());\n}\nstatus.setState(toStatus);\nstatus.setProgress(100);\nstatus.setFinishTime(statusChange.getFinishTime());\nstatus.setExtraMessage(statusChange.getExtraMessage());\ntaskRunManager.getTaskRunHistory().addHistory(status);\n}\n} else {\nString queryId = statusChange.getQueryId();\nTaskRunStatus status = taskRunManager.getTaskRunHistory().getTask(queryId);\nif (status == null) {\nreturn;\n}\nstatus.setExtraMessage(statusChange.getExtraMessage());\n}\n} else {\nLOG.warn(\"Illegal TaskRun queryId:{} status transform from {} to {}\",\nstatusChange.getQueryId(), fromStatus, toStatus);\n}\n}", + "context_before": "class TaskManager implements MemoryTrackable {\nprivate static final Logger LOG = LogManager.getLogger(TaskManager.class);\nprivate final Map idToTaskMap;\nprivate final Map nameToTaskMap;\nprivate final Map> periodFutureMap;\nprivate final TaskRunManager taskRunManager;\nprivate final ScheduledExecutorService periodScheduler = Executors.newScheduledThreadPool(1);\nprivate final ScheduledExecutorService dispatchScheduler = Executors.newScheduledThreadPool(1);\nprivate final QueryableReentrantLock taskLock;\nprivate final AtomicBoolean isStart = new AtomicBoolean(false);\npublic TaskManager() {\nidToTaskMap = Maps.newConcurrentMap();\nnameToTaskMap = Maps.newConcurrentMap();\nperiodFutureMap = Maps.newConcurrentMap();\ntaskRunManager = new TaskRunManager();\ntaskLock = new QueryableReentrantLock(true);\n}\npublic void start() {\nif (isStart.compareAndSet(false, true)) {\nclearUnfinishedTaskRun();\nregisterPeriodicalTask();\ndispatchScheduler.scheduleAtFixedRate(() -> {\nif (!taskRunManager.tryTaskRunLock()) {\nLOG.warn(\"TaskRun scheduler cannot acquire the lock\");\nreturn;\n}\ntry {\ntaskRunManager.checkRunningTaskRun();\ntaskRunManager.scheduledPendingTaskRun();\n} catch (Exception ex) {\nLOG.warn(\"failed to dispatch task.\", ex);\n} finally {\ntaskRunManager.taskRunUnlock();\n}\n}, 0, 1, TimeUnit.SECONDS);\n}\n}\nprivate void registerPeriodicalTask() {\nfor (Task task : nameToTaskMap.values()) {\nif (task.getType() != Constants.TaskType.PERIODICAL) {\ncontinue;\n}\nTaskSchedule taskSchedule = task.getSchedule();\nif (task.getState() != Constants.TaskState.ACTIVE) {\ncontinue;\n}\nif (taskSchedule == null) {\ncontinue;\n}\nregisterScheduler(task);\n}\n}\n@VisibleForTesting\nstatic long getInitialDelayTime(long periodSeconds, LocalDateTime startTime, LocalDateTime scheduleTime) {\nDuration duration = Duration.between(scheduleTime, startTime);\nlong initialDelay = duration.getSeconds();\nif (initialDelay < 0) {\nint extra = scheduleTime.getNano() > 0 ? 1 : 0;\nreturn ((initialDelay % periodSeconds) + periodSeconds + extra) % periodSeconds;\n} else {\nreturn initialDelay;\n}\n}\nprivate void clearUnfinishedTaskRun() {\nif (!taskRunManager.tryTaskRunLock()) {\nreturn;\n}\ntry {\nIterator pendingIter = taskRunManager.getPendingTaskRunMap().keySet().iterator();\nwhile (pendingIter.hasNext()) {\nQueue taskRuns = taskRunManager.getPendingTaskRunMap().get(pendingIter.next());\nwhile (!taskRuns.isEmpty()) {\nTaskRun taskRun = taskRuns.poll();\ntaskRun.getStatus().setErrorMessage(\"Fe abort the task\");\ntaskRun.getStatus().setErrorCode(-1);\ntaskRun.getStatus().setState(Constants.TaskRunState.FAILED);\ntaskRunManager.getTaskRunHistory().addHistory(taskRun.getStatus());\nTaskRunStatusChange statusChange = new TaskRunStatusChange(taskRun.getTaskId(), taskRun.getStatus(),\nConstants.TaskRunState.PENDING, Constants.TaskRunState.FAILED);\nGlobalStateMgr.getCurrentState().getEditLog().logUpdateTaskRun(statusChange);\n}\npendingIter.remove();\n}\nIterator runningIter = taskRunManager.getRunningTaskRunMap().keySet().iterator();\nwhile (runningIter.hasNext()) {\nTaskRun taskRun = taskRunManager.getRunningTaskRunMap().get(runningIter.next());\ntaskRun.getStatus().setErrorMessage(\"Fe abort the task\");\ntaskRun.getStatus().setErrorCode(-1);\ntaskRun.getStatus().setState(Constants.TaskRunState.FAILED);\ntaskRun.getStatus().setFinishTime(System.currentTimeMillis());\nrunningIter.remove();\ntaskRunManager.getTaskRunHistory().addHistory(taskRun.getStatus());\nTaskRunStatusChange statusChange = new TaskRunStatusChange(taskRun.getTaskId(), taskRun.getStatus(),\nConstants.TaskRunState.RUNNING, Constants.TaskRunState.FAILED);\nGlobalStateMgr.getCurrentState().getEditLog().logUpdateTaskRun(statusChange);\n}\n} finally {\ntaskRunManager.taskRunUnlock();\n}\n}\npublic void createTask(Task task, boolean isReplay) throws DdlException {\ntakeTaskLock();\ntry {\nif (nameToTaskMap.containsKey(task.getName())) {\nthrow new DdlException(\"Task [\" + task.getName() + \"] already exists\");\n}\nif (!isReplay) {\nPreconditions.checkArgument(task.getId() == 0);\ntask.setId(GlobalStateMgr.getCurrentState().getNextId());\n}\nif (task.getType() == Constants.TaskType.PERIODICAL) {\ntask.setState(Constants.TaskState.ACTIVE);\nif (!isReplay) {\nTaskSchedule schedule = task.getSchedule();\nif (schedule == null) {\nthrow new DdlException(\"Task [\" + task.getName() + \"] has no scheduling information\");\n}\nregisterScheduler(task);\n}\n}\nnameToTaskMap.put(task.getName(), task);\nidToTaskMap.put(task.getId(), task);\nif (!isReplay) {\nGlobalStateMgr.getCurrentState().getEditLog().logCreateTask(task);\n}\n} finally {\ntaskUnlock();\n}\n}\nprivate boolean stopScheduler(String taskName) {\nTask task = nameToTaskMap.get(taskName);\nif (task.getType() != Constants.TaskType.PERIODICAL) {\nreturn false;\n}\nif (task.getState() == Constants.TaskState.PAUSE) {\nreturn true;\n}\nTaskSchedule taskSchedule = task.getSchedule();\nif (taskSchedule == null) {\nLOG.warn(\"fail to obtain scheduled info for task [{}]\", task.getName());\nreturn true;\n}\nScheduledFuture future = periodFutureMap.get(task.getId());\nif (future == null) {\nLOG.warn(\"fail to obtain scheduled info for task [{}]\", task.getName());\nreturn true;\n}\nboolean isCancel = future.cancel(true);\nif (!isCancel) {\nLOG.warn(\"fail to cancel scheduler for task [{}]\", task.getName());\n}\nreturn isCancel;\n}\npublic boolean killTask(String taskName, boolean clearPending) {\nTask task = nameToTaskMap.get(taskName);\nif (task == null) {\nreturn false;\n}\nif (clearPending) {\nif (!taskRunManager.tryTaskRunLock()) {\nreturn false;\n}\ntry {\ntaskRunManager.getPendingTaskRunMap().remove(task.getId());\n} catch (Exception ex) {\nLOG.warn(\"failed to kill task.\", ex);\n} finally {\ntaskRunManager.taskRunUnlock();\n}\n}\nreturn taskRunManager.killTaskRun(task.getId());\n}\npublic SubmitResult executeTask(String taskName) {\nreturn executeTask(taskName, new ExecuteOption());\n}\npublic SubmitResult executeTask(String taskName, ExecuteOption option) {\nTask task = getTask(taskName);\nif (task == null) {\nreturn new SubmitResult(null, SubmitResult.SubmitStatus.FAILED);\n}\nif (option.getIsSync()) {\nreturn executeTaskSync(task, option);\n} else {\nreturn executeTaskAsync(task, option);\n}\n}\npublic SubmitResult executeTaskSync(Task task) {\nreturn executeTaskSync(task, new ExecuteOption());\n}\npublic SubmitResult executeTaskSync(Task task, ExecuteOption option) {\nTaskRun taskRun;\nSubmitResult submitResult;\nif (!tryTaskLock()) {\nthrow new DmlException(\"Failed to get task lock when execute Task sync[\" + task.getName() + \"]\");\n}\ntry {\ntaskRun = TaskRunBuilder.newBuilder(task)\n.properties(option.getTaskRunProperties())\n.setExecuteOption(option)\n.setConnectContext(ConnectContext.get()).build();\nsubmitResult = taskRunManager.submitTaskRun(taskRun, option);\nif (submitResult.getStatus() != SUBMITTED) {\nthrow new DmlException(\"execute task:\" + task.getName() + \" failed\");\n}\n} finally {\ntaskUnlock();\n}\ntry {\nConstants.TaskRunState taskRunState = taskRun.getFuture().get();\nif (taskRunState != Constants.TaskRunState.SUCCESS) {\nString msg = taskRun.getStatus().getErrorMessage();\nthrow new DmlException(\"execute task %s failed: %s\", task.getName(), msg);\n}\nreturn submitResult;\n} catch (InterruptedException | ExecutionException e) {\nThrowable rootCause = e.getCause();\nthrow new DmlException(\"execute task %s failed: %s\", rootCause, task.getName(), rootCause.getMessage());\n} catch (Exception e) {\nthrow new DmlException(\"execute task %s failed: %s\", e, task.getName(), e.getMessage());\n}\n}\npublic SubmitResult executeTaskAsync(Task task, ExecuteOption option) {\nTaskRun taskRun = TaskRunBuilder\n.newBuilder(task)\n.properties(option.getTaskRunProperties())\n.setExecuteOption(option)\n.build();\nreturn taskRunManager.submitTaskRun(taskRun, option);\n}\npublic void dropTasks(List taskIdList, boolean isReplay) {\ntakeTaskLock();\ntry {\nfor (long taskId : taskIdList) {\nTask task = idToTaskMap.get(taskId);\nif (task == null) {\nLOG.warn(\"drop taskId {} failed because task is null\", taskId);\ncontinue;\n}\nif (task.getType() == Constants.TaskType.PERIODICAL && !isReplay) {\nboolean isCancel = stopScheduler(task.getName());\nif (!isCancel) {\ncontinue;\n}\nperiodFutureMap.remove(task.getId());\n}\nif (!killTask(task.getName(), true)) {\nLOG.error(\"kill task failed: \" + task.getName());\n}\nidToTaskMap.remove(task.getId());\nnameToTaskMap.remove(task.getName());\n}\nif (!isReplay) {\nGlobalStateMgr.getCurrentState().getEditLog().logDropTasks(taskIdList);\n}\n} finally {\ntaskUnlock();\n}\nLOG.info(\"drop tasks:{}\", taskIdList);\n}\npublic List showTasks(String dbName) {\nList taskList = Lists.newArrayList();\nif (dbName == null) {\ntaskList.addAll(nameToTaskMap.values());\n} else {\nfor (Map.Entry entry : nameToTaskMap.entrySet()) {\nTask task = entry.getValue();\nif (task.getDbName() != null && task.getDbName().equals(dbName)) {\ntaskList.add(task);\n}\n}\n}\nreturn taskList;\n}\npublic void alterTask(Task currentTask, Task changedTask, boolean isReplay) {\nConstants.TaskType currentType = currentTask.getType();\nConstants.TaskType changedType = changedTask.getType();\nboolean hasChanged = false;\nif (currentType == Constants.TaskType.MANUAL) {\nif (changedType == Constants.TaskType.EVENT_TRIGGERED) {\nhasChanged = true;\n}\n} else if (currentTask.getType() == Constants.TaskType.EVENT_TRIGGERED) {\nif (changedType == Constants.TaskType.MANUAL) {\nhasChanged = true;\n}\n} else if (currentTask.getType() == Constants.TaskType.PERIODICAL) {\nif (!isReplay) {\nboolean isCancel = stopScheduler(currentTask.getName());\nif (!isCancel) {\nthrow new RuntimeException(\"stop scheduler failed\");\n}\n}\nperiodFutureMap.remove(currentTask.getId());\ncurrentTask.setState(Constants.TaskState.UNKNOWN);\ncurrentTask.setSchedule(null);\nhasChanged = true;\n}\nif (changedType == Constants.TaskType.PERIODICAL) {\ncurrentTask.setState(Constants.TaskState.ACTIVE);\nTaskSchedule schedule = changedTask.getSchedule();\ncurrentTask.setSchedule(schedule);\nif (!isReplay) {\nregisterScheduler(currentTask);\n}\nhasChanged = true;\n}\nif (hasChanged) {\ncurrentTask.setType(changedTask.getType());\nif (!isReplay) {\nGlobalStateMgr.getCurrentState().getEditLog().logAlterTask(changedTask);\n}\n}\n}\nprivate void registerScheduler(Task task) {\nLocalDateTime scheduleTime = LocalDateTime.now();\nTaskSchedule schedule = task.getSchedule();\nLocalDateTime startTime = Utils.getDatetimeFromLong(schedule.getStartTime());\nlong periodSeconds = TimeUtils.convertTimeUnitValueToSecond(schedule.getPeriod(), schedule.getTimeUnit());\nlong initialDelay = getInitialDelayTime(periodSeconds, startTime, scheduleTime);\nLOG.info(\"Register scheduler, task:{}, initialDelay:{}, periodSeconds:{}, startTime:{}, scheduleTime:{}\",\ntask.getName(), initialDelay, periodSeconds, startTime, scheduleTime);\nExecuteOption option = new ExecuteOption(Constants.TaskRunPriority.LOWEST.value(), true, task.getProperties());\nScheduledFuture future = periodScheduler.scheduleAtFixedRate(() ->\nexecuteTask(task.getName(), option), initialDelay, periodSeconds, TimeUnit.SECONDS);\nperiodFutureMap.put(task.getId(), future);\n}\npublic void replayAlterTask(Task task) {\nTask currentTask = getTask(task.getName());\nalterTask(currentTask, task, true);\n}\nprivate boolean tryTaskLock() {\ntry {\nif (!taskLock.tryLock(5, TimeUnit.SECONDS)) {\nThread owner = taskLock.getOwner();\nif (owner != null) {\nLOG.warn(\"task lock is held by: {}\", Util.dumpThread(owner, 50));\n} else {\nLOG.warn(\"task lock owner is null\");\n}\nreturn false;\n}\nreturn true;\n} catch (InterruptedException e) {\nLOG.warn(\"got exception while getting task lock\", e);\n}\nreturn false;\n}\n/**\n* Keep trying to get the lock until succeed\n*/\nprivate void takeTaskLock() {\nint i = 1;\nwhile (!tryTaskLock()) {\nLOG.warn(\"fail to get TaskManager lock after retry {} times\", i);\ni++;\n}\n}\npublic void taskUnlock() {\nthis.taskLock.unlock();\n}\npublic void replayCreateTask(Task task) {\nif (task.getType() == Constants.TaskType.PERIODICAL) {\nTaskSchedule taskSchedule = task.getSchedule();\nif (taskSchedule == null) {\nLOG.warn(\"replay a null schedule period Task [{}]\", task.getName());\nreturn;\n}\n}\nif (task.getExpireTime() > 0 && System.currentTimeMillis() > task.getExpireTime()) {\nreturn;\n}\ntry {\ncreateTask(task, true);\n} catch (DdlException e) {\nLOG.warn(\"failed to replay create task [{}]\", task.getName(), e);\n}\n}\npublic void replayDropTasks(List taskIdList) {\ndropTasks(taskIdList, true);\n}\npublic TaskRunManager getTaskRunManager() {\nreturn taskRunManager;\n}\npublic TaskRunHistory getTaskRunHistory() {\nreturn taskRunManager.getTaskRunHistory();\n}\npublic ShowResultSet handleSubmitTaskStmt(SubmitTaskStmt submitTaskStmt) throws DdlException {\nTask task = TaskBuilder.buildTask(submitTaskStmt, ConnectContext.get());\nString taskName = task.getName();\nSubmitResult submitResult;\ntry {\ncreateTask(task, false);\nif (task.getType() == Constants.TaskType.MANUAL) {\nsubmitResult = executeTask(taskName);\n} else {\nsubmitResult = new SubmitResult(null, SUBMITTED);\n}\n} catch (DdlException ex) {\nif (ex.getMessage().contains(\"Failed to get task lock\")) {\nsubmitResult = new SubmitResult(null, SubmitResult.SubmitStatus.REJECTED);\n} else {\nLOG.warn(\"Failed to create Task [{}]\", taskName, ex);\nthrow ex;\n}\n}\nShowResultSetMetaData.Builder builder = ShowResultSetMetaData.builder();\nbuilder.addColumn(new Column(\"TaskName\", ScalarType.createVarchar(40)));\nbuilder.addColumn(new Column(\"Status\", ScalarType.createVarchar(10)));\nList item = ImmutableList.of(taskName, submitResult.getStatus().toString());\nList> result = ImmutableList.of(item);\nreturn new ShowResultSet(builder.build(), result);\n}\npublic long loadTasks(DataInputStream dis, long checksum) throws IOException {\nint taskCount = 0;\ntry {\nString s = Text.readString(dis);\nSerializeData data = GsonUtils.GSON.fromJson(s, SerializeData.class);\nif (data != null) {\nif (data.tasks != null) {\nfor (Task task : data.tasks) {\nreplayCreateTask(task);\n}\ntaskCount = data.tasks.size();\n}\nif (data.runStatus != null) {\nfor (TaskRunStatus runStatus : data.runStatus) {\nreplayCreateTaskRun(runStatus);\n}\n}\n}\nchecksum ^= taskCount;\nLOG.info(\"finished replaying TaskManager from image\");\n} catch (EOFException e) {\nLOG.info(\"no TaskManager to replay.\");\n}\nreturn checksum;\n}\npublic void loadTasksV2(SRMetaBlockReader reader)\nthrows IOException, SRMetaBlockException, SRMetaBlockEOFException {\nint size = reader.readInt();\nwhile (size-- > 0) {\nTask task = reader.readJson(Task.class);\nreplayCreateTask(task);\n}\nsize = reader.readInt();\nwhile (size-- > 0) {\nTaskRunStatus status = reader.readJson(TaskRunStatus.class);\nreplayCreateTaskRun(status);\n}\n}\npublic long saveTasks(DataOutputStream dos, long checksum) throws IOException {\nSerializeData data = new SerializeData();\ndata.tasks = new ArrayList<>(nameToTaskMap.values());\nchecksum ^= data.tasks.size();\ndata.runStatus = showTaskRunStatus(null);\nint beforeSize = data.runStatus.size();\nif (beforeSize >= Config.task_runs_max_history_number) {\ntaskRunManager.getTaskRunHistory().forceGC();\ndata.runStatus = showTaskRunStatus(null);\nString s = GsonUtils.GSON.toJson(data);\nLOG.warn(\"Too much task metadata triggers forced task_run GC, \" +\n\"size before GC:{}, size after GC:{}.\", beforeSize, data.runStatus.size());\nText.writeString(dos, s);\n} else {\nString s = GsonUtils.GSON.toJson(data);\nText.writeString(dos, s);\n}\nreturn checksum;\n}\npublic void saveTasksV2(DataOutputStream dos) throws IOException, SRMetaBlockException {\ntaskRunManager.getTaskRunHistory().forceGC();\nList runStatusList = showTaskRunStatus(null);\nSRMetaBlockWriter writer = new SRMetaBlockWriter(dos, SRMetaBlockID.TASK_MGR,\n2 + nameToTaskMap.size() + runStatusList.size());\nwriter.writeJson(nameToTaskMap.size());\nfor (Task task : nameToTaskMap.values()) {\nwriter.writeJson(task);\n}\nwriter.writeJson(runStatusList.size());\nfor (TaskRunStatus status : runStatusList) {\nwriter.writeJson(status);\n}\nwriter.close();\n}\npublic List showTaskRunStatus(String dbName) {\nList taskRunList = Lists.newArrayList();\nif (dbName == null) {\nfor (Queue pTaskRunQueue : taskRunManager.getPendingTaskRunMap().values()) {\ntaskRunList.addAll(pTaskRunQueue.stream().map(TaskRun::getStatus).collect(Collectors.toList()));\n}\ntaskRunList.addAll(taskRunManager.getRunningTaskRunMap().values().stream().map(TaskRun::getStatus)\n.collect(Collectors.toList()));\ntaskRunList.addAll(taskRunManager.getTaskRunHistory().getAllHistory());\n} else {\nfor (Queue pTaskRunQueue : taskRunManager.getPendingTaskRunMap().values()) {\ntaskRunList.addAll(pTaskRunQueue.stream().map(TaskRun::getStatus)\n.filter(u -> u.getDbName().equals(dbName)).collect(Collectors.toList()));\n}\ntaskRunList.addAll(taskRunManager.getRunningTaskRunMap().values().stream().map(TaskRun::getStatus)\n.filter(u -> u.getDbName().equals(dbName)).collect(Collectors.toList()));\ntaskRunList.addAll(taskRunManager.getTaskRunHistory().getAllHistory().stream()\n.filter(u -> u.getDbName().equals(dbName)).collect(Collectors.toList()));\n}\nreturn taskRunList;\n}\n/**\n* Return the last refresh TaskRunStatus for the task which the source type is MV.\n* The iteration order is by the task refresh time:\n* PendingTaskRunMap > RunningTaskRunMap > TaskRunHistory\n* TODO: Maybe only return needed MVs rather than all MVs.\n*/\npublic Map> listMVRefreshedTaskRunStatus(String dbName,\nSet taskNames) {\nMap> mvNameRunStatusMap = Maps.newHashMap();\nfor (Queue pTaskRunQueue : taskRunManager.getPendingTaskRunMap().values()) {\npTaskRunQueue.stream()\n.filter(task -> task.getTask().getSource() == Constants.TaskSource.MV)\n.map(TaskRun::getStatus)\n.filter(Objects::nonNull)\n.filter(u -> dbName == null || u.getDbName().equals(dbName))\n.filter(task -> taskNames == null || taskNames.contains(task.getTaskName()))\n.forEach(task -> mvNameRunStatusMap.computeIfAbsent(task.getTaskName(), x -> Lists.newArrayList()).add(task));\n}\ntaskRunManager.getTaskRunHistory().getAllHistory().stream()\n.filter(u -> dbName == null || u.getDbName().equals(dbName))\n.filter(task -> taskNames == null || taskNames.contains(task.getTaskName()))\n.filter(task -> isSameTaskRunJob(task, mvNameRunStatusMap))\n.forEach(task -> mvNameRunStatusMap\n.computeIfAbsent(task.getTaskName(), x -> Lists.newArrayList())\n.add(task));\ntaskRunManager.getRunningTaskRunMap().values().stream()\n.filter(task -> task.getTask().getSource() == Constants.TaskSource.MV)\n.map(TaskRun::getStatus)\n.filter(u -> dbName == null || u != null && u.getDbName().equals(dbName))\n.filter(task -> taskNames == null || taskNames.contains(task.getTaskName()))\n.filter(task -> isSameTaskRunJob(task, mvNameRunStatusMap))\n.forEach(task -> mvNameRunStatusMap.computeIfAbsent(task.getTaskName(), x -> Lists.newArrayList()).add(task));\nreturn mvNameRunStatusMap;\n}\nprivate boolean isSameTaskRunJob(TaskRunStatus taskRunStatus,\nMap> mvNameRunStatusMap) {\nif (!mvNameRunStatusMap.containsKey(taskRunStatus.getTaskName())) {\nreturn true;\n}\nList existedTaskRuns = mvNameRunStatusMap.get(taskRunStatus.getTaskName());\nif (existedTaskRuns == null || existedTaskRuns.isEmpty()) {\nreturn true;\n}\nif (!Config.enable_show_materialized_views_include_all_task_runs) {\nreturn false;\n}\nString jobId = taskRunStatus.getStartTaskRunId();\nreturn !Strings.isNullOrEmpty(jobId) && jobId.equals(existedTaskRuns.get(0).getStartTaskRunId());\n}\npublic void replayCreateTaskRun(TaskRunStatus status) {\nif (status.getState() == Constants.TaskRunState.SUCCESS ||\nstatus.getState() == Constants.TaskRunState.FAILED) {\nif (System.currentTimeMillis() > status.getExpireTime()) {\nreturn;\n}\n}\nLOG.info(\"replayCreateTaskRun:\" + status);\nswitch (status.getState()) {\ncase PENDING:\nString taskName = status.getTaskName();\nTask task = nameToTaskMap.get(taskName);\nif (task == null) {\nLOG.warn(\"fail to obtain task name {} because task is null\", taskName);\nreturn;\n}\nExecuteOption executeOption = new ExecuteOption();\nexecuteOption.setReplay(true);\nTaskRun taskRun = TaskRunBuilder\n.newBuilder(task)\n.setExecuteOption(executeOption)\n.build();\ntaskRun.initStatus(status.getQueryId(), status.getCreateTime());\nif (!taskRunManager.arrangeTaskRun(taskRun)) {\nLOG.warn(\"Submit task run to pending queue failed, reject the submit:{}\", taskRun);\n}\nbreak;\ncase RUNNING:\nstatus.setState(Constants.TaskRunState.FAILED);\ntaskRunManager.getTaskRunHistory().addHistory(status);\nbreak;\ncase FAILED:\ntaskRunManager.getTaskRunHistory().addHistory(status);\nbreak;\ncase SUCCESS:\nstatus.setProgress(100);\ntaskRunManager.getTaskRunHistory().addHistory(status);\nbreak;\n}\n}\npublic void replayDropTaskRuns(List queryIdList) {\nMap index = Maps.newHashMapWithExpectedSize(queryIdList.size());\nfor (String queryId : queryIdList) {\nindex.put(queryId, null);\n}\ntaskRunManager.getTaskRunHistory().getAllHistory()\n.removeIf(runStatus -> index.containsKey(runStatus.getQueryId()));\n}\npublic void replayAlterRunningTaskRunProgress(Map taskRunProgresMap) {\nMap runningTaskRunMap = taskRunManager.getRunningTaskRunMap();\nfor (Map.Entry entry : taskRunProgresMap.entrySet()) {\nif (runningTaskRunMap.containsKey(entry.getKey())) {\nrunningTaskRunMap.get(entry.getKey()).getStatus().setProgress(entry.getValue());\n}\n}\n}\npublic void removeExpiredTasks() {\nlong currentTimeMs = System.currentTimeMillis();\nList taskIdToDelete = Lists.newArrayList();\nif (!tryTaskLock()) {\nreturn;\n}\ntry {\nList currentTask = showTasks(null);\nfor (Task task : currentTask) {\nif (task.getType() == Constants.TaskType.PERIODICAL) {\nTaskSchedule taskSchedule = task.getSchedule();\nif (taskSchedule == null) {\ntaskIdToDelete.add(task.getId());\nLOG.warn(\"clean up a null schedule periodical Task [{}]\", task.getName());\ncontinue;\n}\nif (task.getState() == Constants.TaskState.ACTIVE) {\ncontinue;\n}\n}\nLong expireTime = task.getExpireTime();\nif (expireTime > 0 && currentTimeMs > expireTime) {\ntaskIdToDelete.add(task.getId());\n}\n}\n} finally {\ntaskUnlock();\n}\ndropTasks(taskIdToDelete, true);\n}\npublic void removeExpiredTaskRuns() {\nlong currentTimeMs = System.currentTimeMillis();\nList historyToDelete = Lists.newArrayList();\nif (!taskRunManager.tryTaskRunLock()) {\nreturn;\n}\ntry {\nList taskRunHistory = taskRunManager.getTaskRunHistory().getAllHistory();\nIterator iterator = taskRunHistory.iterator();\nwhile (iterator.hasNext()) {\nTaskRunStatus taskRunStatus = iterator.next();\nlong expireTime = taskRunStatus.getExpireTime();\nif (currentTimeMs > expireTime) {\nhistoryToDelete.add(taskRunStatus.getQueryId());\ntaskRunManager.getTaskRunHistory().removeTask(taskRunStatus.getQueryId());\niterator.remove();\n}\n}\n} finally {\ntaskRunManager.taskRunUnlock();\n}\nLOG.info(\"remove run history:{}\", historyToDelete);\n}\n@Override\npublic Map estimateCount() {\nreturn ImmutableMap.of(\"Task\", (long) idToTaskMap.size());\n}\n@Override\npublic long estimateSize() {\nreturn SizeEstimator.estimate(idToTaskMap.values());\n}\nprivate static class SerializeData {\n@SerializedName(\"tasks\")\npublic List tasks;\n@SerializedName(\"runStatus\")\npublic List runStatus;\n}\npublic boolean containTask(String taskName) {\ntakeTaskLock();\ntry {\nreturn nameToTaskMap.containsKey(taskName);\n} finally {\ntaskUnlock();\n}\n}\npublic Task getTask(String taskName) {\ntakeTaskLock();\ntry {\nreturn nameToTaskMap.get(taskName);\n} finally {\ntaskUnlock();\n}\n}\npublic Task getTaskWithoutLock(String taskName) {\nreturn nameToTaskMap.get(taskName);\n}\npublic long getTaskCount() {\nreturn this.idToTaskMap.size();\n}\n}", + "context_after": "class TaskManager implements MemoryTrackable {\nprivate static final Logger LOG = LogManager.getLogger(TaskManager.class);\nprivate final Map idToTaskMap;\nprivate final Map nameToTaskMap;\nprivate final Map> periodFutureMap;\nprivate final TaskRunManager taskRunManager;\nprivate final ScheduledExecutorService periodScheduler = Executors.newScheduledThreadPool(1);\nprivate final ScheduledExecutorService dispatchScheduler = Executors.newScheduledThreadPool(1);\nprivate final QueryableReentrantLock taskLock;\nprivate final AtomicBoolean isStart = new AtomicBoolean(false);\npublic TaskManager() {\nidToTaskMap = Maps.newConcurrentMap();\nnameToTaskMap = Maps.newConcurrentMap();\nperiodFutureMap = Maps.newConcurrentMap();\ntaskRunManager = new TaskRunManager();\ntaskLock = new QueryableReentrantLock(true);\n}\npublic void start() {\nif (isStart.compareAndSet(false, true)) {\nclearUnfinishedTaskRun();\nregisterPeriodicalTask();\ndispatchScheduler.scheduleAtFixedRate(() -> {\nif (!taskRunManager.tryTaskRunLock()) {\nLOG.warn(\"TaskRun scheduler cannot acquire the lock\");\nreturn;\n}\ntry {\ntaskRunManager.checkRunningTaskRun();\ntaskRunManager.scheduledPendingTaskRun();\n} catch (Exception ex) {\nLOG.warn(\"failed to dispatch task.\", ex);\n} finally {\ntaskRunManager.taskRunUnlock();\n}\n}, 0, 1, TimeUnit.SECONDS);\n}\n}\nprivate void registerPeriodicalTask() {\nfor (Task task : nameToTaskMap.values()) {\nif (task.getType() != Constants.TaskType.PERIODICAL) {\ncontinue;\n}\nTaskSchedule taskSchedule = task.getSchedule();\nif (task.getState() != Constants.TaskState.ACTIVE) {\ncontinue;\n}\nif (taskSchedule == null) {\ncontinue;\n}\nregisterScheduler(task);\n}\n}\n@VisibleForTesting\nstatic long getInitialDelayTime(long periodSeconds, LocalDateTime startTime, LocalDateTime scheduleTime) {\nDuration duration = Duration.between(scheduleTime, startTime);\nlong initialDelay = duration.getSeconds();\nif (initialDelay < 0) {\nint extra = scheduleTime.getNano() > 0 ? 1 : 0;\nreturn ((initialDelay % periodSeconds) + periodSeconds + extra) % periodSeconds;\n} else {\nreturn initialDelay;\n}\n}\nprivate void clearUnfinishedTaskRun() {\nif (!taskRunManager.tryTaskRunLock()) {\nreturn;\n}\ntry {\nIterator pendingIter = taskRunManager.getPendingTaskRunMap().keySet().iterator();\nwhile (pendingIter.hasNext()) {\nQueue taskRuns = taskRunManager.getPendingTaskRunMap().get(pendingIter.next());\nwhile (!taskRuns.isEmpty()) {\nTaskRun taskRun = taskRuns.poll();\ntaskRun.getStatus().setErrorMessage(\"Fe abort the task\");\ntaskRun.getStatus().setErrorCode(-1);\ntaskRun.getStatus().setState(Constants.TaskRunState.FAILED);\ntaskRunManager.getTaskRunHistory().addHistory(taskRun.getStatus());\nTaskRunStatusChange statusChange = new TaskRunStatusChange(taskRun.getTaskId(), taskRun.getStatus(),\nConstants.TaskRunState.PENDING, Constants.TaskRunState.FAILED);\nGlobalStateMgr.getCurrentState().getEditLog().logUpdateTaskRun(statusChange);\n}\npendingIter.remove();\n}\nIterator runningIter = taskRunManager.getRunningTaskRunMap().keySet().iterator();\nwhile (runningIter.hasNext()) {\nTaskRun taskRun = taskRunManager.getRunningTaskRunMap().get(runningIter.next());\ntaskRun.getStatus().setErrorMessage(\"Fe abort the task\");\ntaskRun.getStatus().setErrorCode(-1);\ntaskRun.getStatus().setState(Constants.TaskRunState.FAILED);\ntaskRun.getStatus().setFinishTime(System.currentTimeMillis());\nrunningIter.remove();\ntaskRunManager.getTaskRunHistory().addHistory(taskRun.getStatus());\nTaskRunStatusChange statusChange = new TaskRunStatusChange(taskRun.getTaskId(), taskRun.getStatus(),\nConstants.TaskRunState.RUNNING, Constants.TaskRunState.FAILED);\nGlobalStateMgr.getCurrentState().getEditLog().logUpdateTaskRun(statusChange);\n}\n} finally {\ntaskRunManager.taskRunUnlock();\n}\n}\npublic void createTask(Task task, boolean isReplay) throws DdlException {\ntakeTaskLock();\ntry {\nif (nameToTaskMap.containsKey(task.getName())) {\nthrow new DdlException(\"Task [\" + task.getName() + \"] already exists\");\n}\nif (!isReplay) {\nPreconditions.checkArgument(task.getId() == 0);\ntask.setId(GlobalStateMgr.getCurrentState().getNextId());\n}\nif (task.getType() == Constants.TaskType.PERIODICAL) {\ntask.setState(Constants.TaskState.ACTIVE);\nif (!isReplay) {\nTaskSchedule schedule = task.getSchedule();\nif (schedule == null) {\nthrow new DdlException(\"Task [\" + task.getName() + \"] has no scheduling information\");\n}\nregisterScheduler(task);\n}\n}\nnameToTaskMap.put(task.getName(), task);\nidToTaskMap.put(task.getId(), task);\nif (!isReplay) {\nGlobalStateMgr.getCurrentState().getEditLog().logCreateTask(task);\n}\n} finally {\ntaskUnlock();\n}\n}\nprivate boolean stopScheduler(String taskName) {\nTask task = nameToTaskMap.get(taskName);\nif (task.getType() != Constants.TaskType.PERIODICAL) {\nreturn false;\n}\nif (task.getState() == Constants.TaskState.PAUSE) {\nreturn true;\n}\nTaskSchedule taskSchedule = task.getSchedule();\nif (taskSchedule == null) {\nLOG.warn(\"fail to obtain scheduled info for task [{}]\", task.getName());\nreturn true;\n}\nScheduledFuture future = periodFutureMap.get(task.getId());\nif (future == null) {\nLOG.warn(\"fail to obtain scheduled info for task [{}]\", task.getName());\nreturn true;\n}\nboolean isCancel = future.cancel(true);\nif (!isCancel) {\nLOG.warn(\"fail to cancel scheduler for task [{}]\", task.getName());\n}\nreturn isCancel;\n}\npublic boolean killTask(String taskName, boolean clearPending) {\nTask task = nameToTaskMap.get(taskName);\nif (task == null) {\nreturn false;\n}\nif (clearPending) {\nif (!taskRunManager.tryTaskRunLock()) {\nreturn false;\n}\ntry {\ntaskRunManager.getPendingTaskRunMap().remove(task.getId());\n} catch (Exception ex) {\nLOG.warn(\"failed to kill task.\", ex);\n} finally {\ntaskRunManager.taskRunUnlock();\n}\n}\nreturn taskRunManager.killTaskRun(task.getId());\n}\npublic SubmitResult executeTask(String taskName) {\nreturn executeTask(taskName, new ExecuteOption());\n}\npublic SubmitResult executeTask(String taskName, ExecuteOption option) {\nTask task = getTask(taskName);\nif (task == null) {\nreturn new SubmitResult(null, SubmitResult.SubmitStatus.FAILED);\n}\nif (option.getIsSync()) {\nreturn executeTaskSync(task, option);\n} else {\nreturn executeTaskAsync(task, option);\n}\n}\npublic SubmitResult executeTaskSync(Task task) {\nreturn executeTaskSync(task, new ExecuteOption());\n}\npublic SubmitResult executeTaskSync(Task task, ExecuteOption option) {\nTaskRun taskRun;\nSubmitResult submitResult;\nif (!tryTaskLock()) {\nthrow new DmlException(\"Failed to get task lock when execute Task sync[\" + task.getName() + \"]\");\n}\ntry {\ntaskRun = TaskRunBuilder.newBuilder(task)\n.properties(option.getTaskRunProperties())\n.setExecuteOption(option)\n.setConnectContext(ConnectContext.get()).build();\nsubmitResult = taskRunManager.submitTaskRun(taskRun, option);\nif (submitResult.getStatus() != SUBMITTED) {\nthrow new DmlException(\"execute task:\" + task.getName() + \" failed\");\n}\n} finally {\ntaskUnlock();\n}\ntry {\nConstants.TaskRunState taskRunState = taskRun.getFuture().get();\nif (taskRunState != Constants.TaskRunState.SUCCESS) {\nString msg = taskRun.getStatus().getErrorMessage();\nthrow new DmlException(\"execute task %s failed: %s\", task.getName(), msg);\n}\nreturn submitResult;\n} catch (InterruptedException | ExecutionException e) {\nThrowable rootCause = e.getCause();\nthrow new DmlException(\"execute task %s failed: %s\", rootCause, task.getName(), rootCause.getMessage());\n} catch (Exception e) {\nthrow new DmlException(\"execute task %s failed: %s\", e, task.getName(), e.getMessage());\n}\n}\npublic SubmitResult executeTaskAsync(Task task, ExecuteOption option) {\nTaskRun taskRun = TaskRunBuilder\n.newBuilder(task)\n.properties(option.getTaskRunProperties())\n.setExecuteOption(option)\n.build();\nreturn taskRunManager.submitTaskRun(taskRun, option);\n}\npublic void dropTasks(List taskIdList, boolean isReplay) {\ntakeTaskLock();\ntry {\nfor (long taskId : taskIdList) {\nTask task = idToTaskMap.get(taskId);\nif (task == null) {\nLOG.warn(\"drop taskId {} failed because task is null\", taskId);\ncontinue;\n}\nif (task.getType() == Constants.TaskType.PERIODICAL && !isReplay) {\nboolean isCancel = stopScheduler(task.getName());\nif (!isCancel) {\ncontinue;\n}\nperiodFutureMap.remove(task.getId());\n}\nif (!killTask(task.getName(), true)) {\nLOG.error(\"kill task failed: \" + task.getName());\n}\nidToTaskMap.remove(task.getId());\nnameToTaskMap.remove(task.getName());\n}\nif (!isReplay) {\nGlobalStateMgr.getCurrentState().getEditLog().logDropTasks(taskIdList);\n}\n} finally {\ntaskUnlock();\n}\nLOG.info(\"drop tasks:{}\", taskIdList);\n}\npublic List showTasks(String dbName) {\nList taskList = Lists.newArrayList();\nif (dbName == null) {\ntaskList.addAll(nameToTaskMap.values());\n} else {\nfor (Map.Entry entry : nameToTaskMap.entrySet()) {\nTask task = entry.getValue();\nif (task.getDbName() != null && task.getDbName().equals(dbName)) {\ntaskList.add(task);\n}\n}\n}\nreturn taskList;\n}\npublic void alterTask(Task currentTask, Task changedTask, boolean isReplay) {\nConstants.TaskType currentType = currentTask.getType();\nConstants.TaskType changedType = changedTask.getType();\nboolean hasChanged = false;\nif (currentType == Constants.TaskType.MANUAL) {\nif (changedType == Constants.TaskType.EVENT_TRIGGERED) {\nhasChanged = true;\n}\n} else if (currentTask.getType() == Constants.TaskType.EVENT_TRIGGERED) {\nif (changedType == Constants.TaskType.MANUAL) {\nhasChanged = true;\n}\n} else if (currentTask.getType() == Constants.TaskType.PERIODICAL) {\nif (!isReplay) {\nboolean isCancel = stopScheduler(currentTask.getName());\nif (!isCancel) {\nthrow new RuntimeException(\"stop scheduler failed\");\n}\n}\nperiodFutureMap.remove(currentTask.getId());\ncurrentTask.setState(Constants.TaskState.UNKNOWN);\ncurrentTask.setSchedule(null);\nhasChanged = true;\n}\nif (changedType == Constants.TaskType.PERIODICAL) {\ncurrentTask.setState(Constants.TaskState.ACTIVE);\nTaskSchedule schedule = changedTask.getSchedule();\ncurrentTask.setSchedule(schedule);\nif (!isReplay) {\nregisterScheduler(currentTask);\n}\nhasChanged = true;\n}\nif (hasChanged) {\ncurrentTask.setType(changedTask.getType());\nif (!isReplay) {\nGlobalStateMgr.getCurrentState().getEditLog().logAlterTask(changedTask);\n}\n}\n}\nprivate void registerScheduler(Task task) {\nLocalDateTime scheduleTime = LocalDateTime.now();\nTaskSchedule schedule = task.getSchedule();\nLocalDateTime startTime = Utils.getDatetimeFromLong(schedule.getStartTime());\nlong periodSeconds = TimeUtils.convertTimeUnitValueToSecond(schedule.getPeriod(), schedule.getTimeUnit());\nlong initialDelay = getInitialDelayTime(periodSeconds, startTime, scheduleTime);\nLOG.info(\"Register scheduler, task:{}, initialDelay:{}, periodSeconds:{}, startTime:{}, scheduleTime:{}\",\ntask.getName(), initialDelay, periodSeconds, startTime, scheduleTime);\nExecuteOption option = new ExecuteOption(Constants.TaskRunPriority.LOWEST.value(), true, task.getProperties());\nScheduledFuture future = periodScheduler.scheduleAtFixedRate(() ->\nexecuteTask(task.getName(), option), initialDelay, periodSeconds, TimeUnit.SECONDS);\nperiodFutureMap.put(task.getId(), future);\n}\npublic void replayAlterTask(Task task) {\nTask currentTask = getTask(task.getName());\nalterTask(currentTask, task, true);\n}\nprivate boolean tryTaskLock() {\ntry {\nif (!taskLock.tryLock(5, TimeUnit.SECONDS)) {\nThread owner = taskLock.getOwner();\nif (owner != null) {\nLOG.warn(\"task lock is held by: {}\", Util.dumpThread(owner, 50));\n} else {\nLOG.warn(\"task lock owner is null\");\n}\nreturn false;\n}\nreturn true;\n} catch (InterruptedException e) {\nLOG.warn(\"got exception while getting task lock\", e);\n}\nreturn false;\n}\n/**\n* Keep trying to get the lock until succeed\n*/\nprivate void takeTaskLock() {\nint i = 1;\nwhile (!tryTaskLock()) {\nLOG.warn(\"fail to get TaskManager lock after retry {} times\", i);\ni++;\n}\n}\npublic void taskUnlock() {\nthis.taskLock.unlock();\n}\npublic void replayCreateTask(Task task) {\nif (task.getType() == Constants.TaskType.PERIODICAL) {\nTaskSchedule taskSchedule = task.getSchedule();\nif (taskSchedule == null) {\nLOG.warn(\"replay a null schedule period Task [{}]\", task.getName());\nreturn;\n}\n}\nif (task.getExpireTime() > 0 && System.currentTimeMillis() > task.getExpireTime()) {\nreturn;\n}\ntry {\ncreateTask(task, true);\n} catch (DdlException e) {\nLOG.warn(\"failed to replay create task [{}]\", task.getName(), e);\n}\n}\npublic void replayDropTasks(List taskIdList) {\ndropTasks(taskIdList, true);\n}\npublic TaskRunManager getTaskRunManager() {\nreturn taskRunManager;\n}\npublic TaskRunHistory getTaskRunHistory() {\nreturn taskRunManager.getTaskRunHistory();\n}\npublic ShowResultSet handleSubmitTaskStmt(SubmitTaskStmt submitTaskStmt) throws DdlException {\nTask task = TaskBuilder.buildTask(submitTaskStmt, ConnectContext.get());\nString taskName = task.getName();\nSubmitResult submitResult;\ntry {\ncreateTask(task, false);\nif (task.getType() == Constants.TaskType.MANUAL) {\nsubmitResult = executeTask(taskName);\n} else {\nsubmitResult = new SubmitResult(null, SUBMITTED);\n}\n} catch (DdlException ex) {\nif (ex.getMessage().contains(\"Failed to get task lock\")) {\nsubmitResult = new SubmitResult(null, SubmitResult.SubmitStatus.REJECTED);\n} else {\nLOG.warn(\"Failed to create Task [{}]\", taskName, ex);\nthrow ex;\n}\n}\nShowResultSetMetaData.Builder builder = ShowResultSetMetaData.builder();\nbuilder.addColumn(new Column(\"TaskName\", ScalarType.createVarchar(40)));\nbuilder.addColumn(new Column(\"Status\", ScalarType.createVarchar(10)));\nList item = ImmutableList.of(taskName, submitResult.getStatus().toString());\nList> result = ImmutableList.of(item);\nreturn new ShowResultSet(builder.build(), result);\n}\npublic long loadTasks(DataInputStream dis, long checksum) throws IOException {\nint taskCount = 0;\ntry {\nString s = Text.readString(dis);\nSerializeData data = GsonUtils.GSON.fromJson(s, SerializeData.class);\nif (data != null) {\nif (data.tasks != null) {\nfor (Task task : data.tasks) {\nreplayCreateTask(task);\n}\ntaskCount = data.tasks.size();\n}\nif (data.runStatus != null) {\nfor (TaskRunStatus runStatus : data.runStatus) {\nreplayCreateTaskRun(runStatus);\n}\n}\n}\nchecksum ^= taskCount;\nLOG.info(\"finished replaying TaskManager from image\");\n} catch (EOFException e) {\nLOG.info(\"no TaskManager to replay.\");\n}\nreturn checksum;\n}\npublic void loadTasksV2(SRMetaBlockReader reader)\nthrows IOException, SRMetaBlockException, SRMetaBlockEOFException {\nint size = reader.readInt();\nwhile (size-- > 0) {\nTask task = reader.readJson(Task.class);\nreplayCreateTask(task);\n}\nsize = reader.readInt();\nwhile (size-- > 0) {\nTaskRunStatus status = reader.readJson(TaskRunStatus.class);\nreplayCreateTaskRun(status);\n}\n}\npublic long saveTasks(DataOutputStream dos, long checksum) throws IOException {\nSerializeData data = new SerializeData();\ndata.tasks = new ArrayList<>(nameToTaskMap.values());\nchecksum ^= data.tasks.size();\ndata.runStatus = showTaskRunStatus(null);\nint beforeSize = data.runStatus.size();\nif (beforeSize >= Config.task_runs_max_history_number) {\ntaskRunManager.getTaskRunHistory().forceGC();\ndata.runStatus = showTaskRunStatus(null);\nString s = GsonUtils.GSON.toJson(data);\nLOG.warn(\"Too much task metadata triggers forced task_run GC, \" +\n\"size before GC:{}, size after GC:{}.\", beforeSize, data.runStatus.size());\nText.writeString(dos, s);\n} else {\nString s = GsonUtils.GSON.toJson(data);\nText.writeString(dos, s);\n}\nreturn checksum;\n}\npublic void saveTasksV2(DataOutputStream dos) throws IOException, SRMetaBlockException {\ntaskRunManager.getTaskRunHistory().forceGC();\nList runStatusList = showTaskRunStatus(null);\nSRMetaBlockWriter writer = new SRMetaBlockWriter(dos, SRMetaBlockID.TASK_MGR,\n2 + nameToTaskMap.size() + runStatusList.size());\nwriter.writeJson(nameToTaskMap.size());\nfor (Task task : nameToTaskMap.values()) {\nwriter.writeJson(task);\n}\nwriter.writeJson(runStatusList.size());\nfor (TaskRunStatus status : runStatusList) {\nwriter.writeJson(status);\n}\nwriter.close();\n}\npublic List showTaskRunStatus(String dbName) {\nList taskRunList = Lists.newArrayList();\nif (dbName == null) {\nfor (Queue pTaskRunQueue : taskRunManager.getPendingTaskRunMap().values()) {\ntaskRunList.addAll(pTaskRunQueue.stream().map(TaskRun::getStatus).collect(Collectors.toList()));\n}\ntaskRunList.addAll(taskRunManager.getRunningTaskRunMap().values().stream().map(TaskRun::getStatus)\n.collect(Collectors.toList()));\ntaskRunList.addAll(taskRunManager.getTaskRunHistory().getAllHistory());\n} else {\nfor (Queue pTaskRunQueue : taskRunManager.getPendingTaskRunMap().values()) {\ntaskRunList.addAll(pTaskRunQueue.stream().map(TaskRun::getStatus)\n.filter(u -> u.getDbName().equals(dbName)).collect(Collectors.toList()));\n}\ntaskRunList.addAll(taskRunManager.getRunningTaskRunMap().values().stream().map(TaskRun::getStatus)\n.filter(u -> u.getDbName().equals(dbName)).collect(Collectors.toList()));\ntaskRunList.addAll(taskRunManager.getTaskRunHistory().getAllHistory().stream()\n.filter(u -> u.getDbName().equals(dbName)).collect(Collectors.toList()));\n}\nreturn taskRunList;\n}\n/**\n* Return the last refresh TaskRunStatus for the task which the source type is MV.\n* The iteration order is by the task refresh time:\n* PendingTaskRunMap > RunningTaskRunMap > TaskRunHistory\n* TODO: Maybe only return needed MVs rather than all MVs.\n*/\npublic Map> listMVRefreshedTaskRunStatus(String dbName,\nSet taskNames) {\nMap> mvNameRunStatusMap = Maps.newHashMap();\nfor (Queue pTaskRunQueue : taskRunManager.getPendingTaskRunMap().values()) {\npTaskRunQueue.stream()\n.filter(task -> task.getTask().getSource() == Constants.TaskSource.MV)\n.map(TaskRun::getStatus)\n.filter(Objects::nonNull)\n.filter(u -> dbName == null || u.getDbName().equals(dbName))\n.filter(task -> taskNames == null || taskNames.contains(task.getTaskName()))\n.forEach(task -> mvNameRunStatusMap.computeIfAbsent(task.getTaskName(), x -> Lists.newArrayList()).add(task));\n}\ntaskRunManager.getTaskRunHistory().getAllHistory().stream()\n.filter(u -> dbName == null || u.getDbName().equals(dbName))\n.filter(task -> taskNames == null || taskNames.contains(task.getTaskName()))\n.filter(task -> isSameTaskRunJob(task, mvNameRunStatusMap))\n.forEach(task -> mvNameRunStatusMap\n.computeIfAbsent(task.getTaskName(), x -> Lists.newArrayList())\n.add(task));\ntaskRunManager.getRunningTaskRunMap().values().stream()\n.filter(task -> task.getTask().getSource() == Constants.TaskSource.MV)\n.map(TaskRun::getStatus)\n.filter(u -> dbName == null || u != null && u.getDbName().equals(dbName))\n.filter(task -> taskNames == null || taskNames.contains(task.getTaskName()))\n.filter(task -> isSameTaskRunJob(task, mvNameRunStatusMap))\n.forEach(task -> mvNameRunStatusMap.computeIfAbsent(task.getTaskName(), x -> Lists.newArrayList()).add(task));\nreturn mvNameRunStatusMap;\n}\nprivate boolean isSameTaskRunJob(TaskRunStatus taskRunStatus,\nMap> mvNameRunStatusMap) {\nif (!mvNameRunStatusMap.containsKey(taskRunStatus.getTaskName())) {\nreturn true;\n}\nList existedTaskRuns = mvNameRunStatusMap.get(taskRunStatus.getTaskName());\nif (existedTaskRuns == null || existedTaskRuns.isEmpty()) {\nreturn true;\n}\nif (!Config.enable_show_materialized_views_include_all_task_runs) {\nreturn false;\n}\nString jobId = taskRunStatus.getStartTaskRunId();\nreturn !Strings.isNullOrEmpty(jobId) && jobId.equals(existedTaskRuns.get(0).getStartTaskRunId());\n}\npublic void replayCreateTaskRun(TaskRunStatus status) {\nif (status.getState() == Constants.TaskRunState.SUCCESS ||\nstatus.getState() == Constants.TaskRunState.FAILED) {\nif (System.currentTimeMillis() > status.getExpireTime()) {\nreturn;\n}\n}\nLOG.info(\"replayCreateTaskRun:\" + status);\nswitch (status.getState()) {\ncase PENDING:\nString taskName = status.getTaskName();\nTask task = nameToTaskMap.get(taskName);\nif (task == null) {\nLOG.warn(\"fail to obtain task name {} because task is null\", taskName);\nreturn;\n}\nExecuteOption executeOption = new ExecuteOption();\nexecuteOption.setReplay(true);\nTaskRun taskRun = TaskRunBuilder\n.newBuilder(task)\n.setExecuteOption(executeOption)\n.build();\ntaskRun.initStatus(status.getQueryId(), status.getCreateTime());\nif (!taskRunManager.arrangeTaskRun(taskRun)) {\nLOG.warn(\"Submit task run to pending queue failed, reject the submit:{}\", taskRun);\n}\nbreak;\ncase RUNNING:\nstatus.setState(Constants.TaskRunState.FAILED);\ntaskRunManager.getTaskRunHistory().addHistory(status);\nbreak;\ncase FAILED:\ntaskRunManager.getTaskRunHistory().addHistory(status);\nbreak;\ncase SUCCESS:\nstatus.setProgress(100);\ntaskRunManager.getTaskRunHistory().addHistory(status);\nbreak;\n}\n}\npublic void replayDropTaskRuns(List queryIdList) {\nMap index = Maps.newHashMapWithExpectedSize(queryIdList.size());\nfor (String queryId : queryIdList) {\nindex.put(queryId, null);\n}\ntaskRunManager.getTaskRunHistory().getAllHistory()\n.removeIf(runStatus -> index.containsKey(runStatus.getQueryId()));\n}\npublic void replayAlterRunningTaskRunProgress(Map taskRunProgresMap) {\nMap runningTaskRunMap = taskRunManager.getRunningTaskRunMap();\nfor (Map.Entry entry : taskRunProgresMap.entrySet()) {\nif (runningTaskRunMap.containsKey(entry.getKey())) {\nrunningTaskRunMap.get(entry.getKey()).getStatus().setProgress(entry.getValue());\n}\n}\n}\npublic void removeExpiredTasks() {\nlong currentTimeMs = System.currentTimeMillis();\nList taskIdToDelete = Lists.newArrayList();\nif (!tryTaskLock()) {\nreturn;\n}\ntry {\nList currentTask = showTasks(null);\nfor (Task task : currentTask) {\nif (task.getType() == Constants.TaskType.PERIODICAL) {\nTaskSchedule taskSchedule = task.getSchedule();\nif (taskSchedule == null) {\ntaskIdToDelete.add(task.getId());\nLOG.warn(\"clean up a null schedule periodical Task [{}]\", task.getName());\ncontinue;\n}\nif (task.getState() == Constants.TaskState.ACTIVE) {\ncontinue;\n}\n}\nLong expireTime = task.getExpireTime();\nif (expireTime > 0 && currentTimeMs > expireTime) {\ntaskIdToDelete.add(task.getId());\n}\n}\n} finally {\ntaskUnlock();\n}\ndropTasks(taskIdToDelete, true);\n}\npublic void removeExpiredTaskRuns() {\nlong currentTimeMs = System.currentTimeMillis();\nList historyToDelete = Lists.newArrayList();\nif (!taskRunManager.tryTaskRunLock()) {\nreturn;\n}\ntry {\nList taskRunHistory = taskRunManager.getTaskRunHistory().getAllHistory();\nIterator iterator = taskRunHistory.iterator();\nwhile (iterator.hasNext()) {\nTaskRunStatus taskRunStatus = iterator.next();\nlong expireTime = taskRunStatus.getExpireTime();\nif (currentTimeMs > expireTime) {\nhistoryToDelete.add(taskRunStatus.getQueryId());\ntaskRunManager.getTaskRunHistory().removeTask(taskRunStatus.getQueryId());\niterator.remove();\n}\n}\n} finally {\ntaskRunManager.taskRunUnlock();\n}\nLOG.info(\"remove run history:{}\", historyToDelete);\n}\n@Override\npublic Map estimateCount() {\nreturn ImmutableMap.of(\"Task\", (long) idToTaskMap.size());\n}\n@Override\npublic long estimateSize() {\nreturn SizeEstimator.estimate(idToTaskMap.values());\n}\nprivate static class SerializeData {\n@SerializedName(\"tasks\")\npublic List tasks;\n@SerializedName(\"runStatus\")\npublic List runStatus;\n}\npublic boolean containTask(String taskName) {\ntakeTaskLock();\ntry {\nreturn nameToTaskMap.containsKey(taskName);\n} finally {\ntaskUnlock();\n}\n}\npublic Task getTask(String taskName) {\ntakeTaskLock();\ntry {\nreturn nameToTaskMap.get(taskName);\n} finally {\ntaskUnlock();\n}\n}\npublic Task getTaskWithoutLock(String taskName) {\nreturn nameToTaskMap.get(taskName);\n}\npublic long getTaskCount() {\nreturn this.idToTaskMap.size();\n}\n}" + }, + { + "comment": "This is because too many tables will cause a lot of optimzer time? this can be configurable to avoid some bad cases?", + "method_body": "void memoOptimize(ConnectContext connectContext, Memo memo, TaskContext rootTaskContext) {\nOptExpression tree = memo.getRootGroup().extractLogicalTree();\nSessionVariable sessionVariable = connectContext.getSessionVariable();\nif (Utils.countJoinNodeSize(tree, CboTablePruneRule.JOIN_TYPES) < 10 &&\nsessionVariable.isEnableCboTablePrune()) {\ncontext.getRuleSet().addCboTablePruneRule();\n}\nint innerCrossJoinNode = Utils.countJoinNodeSize(tree, JoinOperator.innerCrossJoinSet());\nif (!sessionVariable.isDisableJoinReorder() && innerCrossJoinNode < sessionVariable.getCboMaxReorderNode()) {\nif (innerCrossJoinNode > sessionVariable.getCboMaxReorderNodeUseExhaustive()) {\nCTEUtils.collectForceCteStatistics(memo, context);\nOptimizerTraceUtil.logOptExpression(connectContext, \"before ReorderJoinRule:\\n%s\", tree);\nnew ReorderJoinRule().transform(tree, context);\nOptimizerTraceUtil.logOptExpression(connectContext, \"after ReorderJoinRule:\\n%s\", tree);\ncontext.getRuleSet().addJoinCommutativityWithOutInnerRule();\n} else {\nif (Utils.countJoinNodeSize(tree, JoinOperator.semiAntiJoinSet()) <\nsessionVariable.getCboMaxReorderNodeUseExhaustive()) {\ncontext.getRuleSet().getTransformRules().add(new SemiReorderRule());\n}\ncontext.getRuleSet().addJoinTransformationRules();\n}\n}\nif (!sessionVariable.isDisableJoinReorder() && sessionVariable.isEnableOuterJoinReorder()\n&& Utils.capableOuterReorder(tree, sessionVariable.getCboReorderThresholdUseExhaustive())) {\ncontext.getRuleSet().addOuterJoinTransformationRules();\n}\nif (!sessionVariable.isMVPlanner()) {\nString joinImplementationMode = ConnectContext.get().getSessionVariable().getJoinImplementationMode();\nif (\"merge\".equalsIgnoreCase(joinImplementationMode)) {\ncontext.getRuleSet().addMergeJoinImplementationRule();\n} else if (\"hash\".equalsIgnoreCase(joinImplementationMode)) {\ncontext.getRuleSet().addHashJoinImplementationRule();\n} else if (\"nestloop\".equalsIgnoreCase(joinImplementationMode)) {\ncontext.getRuleSet().addNestLoopJoinImplementationRule();\n} else {\ncontext.getRuleSet().addAutoJoinImplementationRule();\n}\n} else {\ncontext.getRuleSet().addRealtimeMVRules();\n}\nif (isEnableMultiTableRewrite(connectContext, tree)) {\nif (sessionVariable.isEnableMaterializedViewViewDeltaRewrite() &&\nrootTaskContext.getOptimizerContext().getCandidateMvs()\n.stream().anyMatch(MaterializationContext::hasMultiTables)) {\ncontext.getRuleSet().addSingleTableMvRewriteRule();\n}\ncontext.getRuleSet().addMultiTableMvRewriteRule();\n}\ncontext.getTaskScheduler().pushTask(new OptimizeGroupTask(rootTaskContext, memo.getRootGroup()));\ncontext.getTaskScheduler().executeTasks(rootTaskContext);\n}", + "target_code": "if (Utils.countJoinNodeSize(tree, CboTablePruneRule.JOIN_TYPES) < 10 &&", + "method_body_after": "void memoOptimize(ConnectContext connectContext, Memo memo, TaskContext rootTaskContext) {\nOptExpression tree = memo.getRootGroup().extractLogicalTree();\nSessionVariable sessionVariable = connectContext.getSessionVariable();\nif (Utils.countJoinNodeSize(tree, CboTablePruneRule.JOIN_TYPES) < 10 &&\nsessionVariable.isEnableCboTablePrune()) {\ncontext.getRuleSet().addCboTablePruneRule();\n}\nint innerCrossJoinNode = Utils.countJoinNodeSize(tree, JoinOperator.innerCrossJoinSet());\nif (!sessionVariable.isDisableJoinReorder() && innerCrossJoinNode < sessionVariable.getCboMaxReorderNode()) {\nif (innerCrossJoinNode > sessionVariable.getCboMaxReorderNodeUseExhaustive()) {\nCTEUtils.collectForceCteStatistics(memo, context);\nOptimizerTraceUtil.logOptExpression(connectContext, \"before ReorderJoinRule:\\n%s\", tree);\nnew ReorderJoinRule().transform(tree, context);\nOptimizerTraceUtil.logOptExpression(connectContext, \"after ReorderJoinRule:\\n%s\", tree);\ncontext.getRuleSet().addJoinCommutativityWithOutInnerRule();\n} else {\nif (Utils.countJoinNodeSize(tree, JoinOperator.semiAntiJoinSet()) <\nsessionVariable.getCboMaxReorderNodeUseExhaustive()) {\ncontext.getRuleSet().getTransformRules().add(new SemiReorderRule());\n}\ncontext.getRuleSet().addJoinTransformationRules();\n}\n}\nif (!sessionVariable.isDisableJoinReorder() && sessionVariable.isEnableOuterJoinReorder()\n&& Utils.capableOuterReorder(tree, sessionVariable.getCboReorderThresholdUseExhaustive())) {\ncontext.getRuleSet().addOuterJoinTransformationRules();\n}\nif (!sessionVariable.isMVPlanner()) {\nString joinImplementationMode = ConnectContext.get().getSessionVariable().getJoinImplementationMode();\nif (\"merge\".equalsIgnoreCase(joinImplementationMode)) {\ncontext.getRuleSet().addMergeJoinImplementationRule();\n} else if (\"hash\".equalsIgnoreCase(joinImplementationMode)) {\ncontext.getRuleSet().addHashJoinImplementationRule();\n} else if (\"nestloop\".equalsIgnoreCase(joinImplementationMode)) {\ncontext.getRuleSet().addNestLoopJoinImplementationRule();\n} else {\ncontext.getRuleSet().addAutoJoinImplementationRule();\n}\n} else {\ncontext.getRuleSet().addRealtimeMVRules();\n}\nif (isEnableMultiTableRewrite(connectContext, tree)) {\nif (sessionVariable.isEnableMaterializedViewViewDeltaRewrite() &&\nrootTaskContext.getOptimizerContext().getCandidateMvs()\n.stream().anyMatch(MaterializationContext::hasMultiTables)) {\ncontext.getRuleSet().addSingleTableMvRewriteRule();\n}\ncontext.getRuleSet().addMultiTableMvRewriteRule();\n}\ncontext.getTaskScheduler().pushTask(new OptimizeGroupTask(rootTaskContext, memo.getRootGroup()));\ncontext.getTaskScheduler().executeTasks(rootTaskContext);\n}", + "context_before": "class Optimizer {\nprivate static final Logger LOG = LogManager.getLogger(Optimizer.class);\nprivate OptimizerContext context;\nprivate final OptimizerConfig optimizerConfig;\nprivate long updateTableId = -1;\npublic Optimizer() {\nthis(OptimizerConfig.defaultConfig());\n}\npublic Optimizer(OptimizerConfig config) {\nthis.optimizerConfig = config;\n}\n@VisibleForTesting\npublic OptimizerConfig getOptimizerConfig() {\nreturn optimizerConfig;\n}\npublic OptimizerContext getContext() {\nreturn context;\n}\npublic OptExpression optimize(ConnectContext connectContext,\nOptExpression logicOperatorTree,\nPhysicalPropertySet requiredProperty,\nColumnRefSet requiredColumns,\nColumnRefFactory columnRefFactory) {\nprepare(connectContext, logicOperatorTree, columnRefFactory);\ncontext.setUpdateTableId(updateTableId);\nif (optimizerConfig.isRuleBased()) {\nreturn optimizeByRule(connectContext, logicOperatorTree, requiredProperty, requiredColumns);\n} else {\nreturn optimizeByCost(connectContext, logicOperatorTree, requiredProperty, requiredColumns);\n}\n}\npublic void setUpdateTableId(long updateTableId) {\nthis.updateTableId = updateTableId;\n}\nprivate OptExpression optimizeByRule(ConnectContext connectContext,\nOptExpression logicOperatorTree,\nPhysicalPropertySet requiredProperty,\nColumnRefSet requiredColumns) {\nOptimizerTraceUtil.logOptExpression(connectContext, \"origin logicOperatorTree:\\n%s\", logicOperatorTree);\nTaskContext rootTaskContext =\nnew TaskContext(context, requiredProperty, requiredColumns.clone(), Double.MAX_VALUE);\nlogicOperatorTree = rewriteAndValidatePlan(connectContext, logicOperatorTree, rootTaskContext);\nOptimizerTraceUtil.log(connectContext, \"after logical rewrite, new logicOperatorTree:\\n%s\", logicOperatorTree);\nreturn logicOperatorTree;\n}\n/**\n* Optimizer will transform and implement the logical operator based on\n* the {@see Rule}, then cost the physical operator, and finally find the\n* lowest cost physical operator tree\n*\n* @param logicOperatorTree the input for query Optimizer\n* @param requiredProperty the required physical property from sql or groupExpression\n* @param requiredColumns the required output columns from sql or groupExpression\n* @return the lowest cost physical operator for this query\n*/\nprivate OptExpression optimizeByCost(ConnectContext connectContext,\nOptExpression logicOperatorTree,\nPhysicalPropertySet requiredProperty,\nColumnRefSet requiredColumns) {\nOptimizerTraceUtil.logOptExpression(connectContext, \"origin logicOperatorTree:\\n%s\", logicOperatorTree);\nMemo memo = context.getMemo();\nTaskContext rootTaskContext =\nnew TaskContext(context, requiredProperty, requiredColumns.clone(), Double.MAX_VALUE);\ntry (PlannerProfile.ScopedTimer ignored = PlannerProfile.getScopedTimer(\"Optimizer.RuleBaseOptimize\")) {\nlogicOperatorTree = rewriteAndValidatePlan(connectContext, logicOperatorTree, rootTaskContext);\n}\nmemo.init(logicOperatorTree);\nOptimizerTraceUtil.log(connectContext, \"after logical rewrite, root group:\\n%s\", memo.getRootGroup());\ncollectAllScanOperators(memo, rootTaskContext);\nmemo.deriveAllGroupLogicalProperty();\ntry (PlannerProfile.ScopedTimer ignored = PlannerProfile.getScopedTimer(\"Optimizer.CostBaseOptimize\")) {\nmemoOptimize(connectContext, memo, rootTaskContext);\n}\nOptExpression result;\nif (!connectContext.getSessionVariable().isSetUseNthExecPlan()) {\nresult = extractBestPlan(requiredProperty, memo.getRootGroup());\n} else {\nint nthExecPlan = connectContext.getSessionVariable().getUseNthExecPlan();\nresult = EnumeratePlan.extractNthPlan(requiredProperty, memo.getRootGroup(), nthExecPlan);\n}\nOptimizerTraceUtil.logOptExpression(connectContext, \"after extract best plan:\\n%s\", result);\nfinal CostEstimate costs = Explain.buildCost(result);\nconnectContext.getAuditEventBuilder().setPlanCpuCosts(costs.getCpuCost())\n.setPlanMemCosts(costs.getMemoryCost());\nOptExpression finalPlan;\ntry (PlannerProfile.ScopedTimer ignored = PlannerProfile.getScopedTimer(\"Optimizer.PhysicalRewrite\")) {\nfinalPlan = physicalRuleRewrite(rootTaskContext, result);\nOptimizerTraceUtil.logOptExpression(connectContext, \"final plan after physical rewrite:\\n%s\", finalPlan);\nOptimizerTraceUtil.log(connectContext, context.getTraceInfo());\n}\ntry (PlannerProfile.ScopedTimer ignored = PlannerProfile.getScopedTimer(\"Optimizer.PlanValidate\")) {\nPlanValidator.getInstance().validatePlan(finalPlan, rootTaskContext);\nreturn finalPlan;\n}\n}\nprivate void prepare(ConnectContext connectContext, OptExpression logicOperatorTree,\nColumnRefFactory columnRefFactory) {\nMemo memo = null;\nif (!optimizerConfig.isRuleBased()) {\nmemo = new Memo();\n}\ncontext = new OptimizerContext(memo, columnRefFactory, connectContext, optimizerConfig);\nOptimizerTraceInfo traceInfo;\nif (connectContext.getExecutor() == null) {\ntraceInfo = new OptimizerTraceInfo(connectContext.getQueryId(), null);\n} else {\ntraceInfo =\nnew OptimizerTraceInfo(connectContext.getQueryId(), connectContext.getExecutor().getParsedStmt());\n}\ncontext.setTraceInfo(traceInfo);\nif (Config.enable_experimental_mv\n&& connectContext.getSessionVariable().isEnableMaterializedViewRewrite()\n&& !optimizerConfig.isRuleBased()) {\nMvRewritePreprocessor preprocessor =\nnew MvRewritePreprocessor(connectContext, columnRefFactory, context, logicOperatorTree);\ntry (PlannerProfile.ScopedTimer ignored = PlannerProfile.getScopedTimer(\"Optimizer.preprocessMvs\")) {\npreprocessor.prepareMvCandidatesForPlan();\n}\nif (connectContext.getSessionVariable().isEnableSyncMaterializedViewRewrite()) {\ntry (PlannerProfile.ScopedTimer ignored = PlannerProfile.getScopedTimer(\"Optimizer.preprocessSyncMvs\")) {\npreprocessor.prepareSyncMvCandidatesForPlan();\n}\n}\nOptimizerTraceUtil.logMVPrepare(connectContext, \"There are %d candidate MVs after prepare phase\",\ncontext.getCandidateMvs().size());\n}\n}\nprivate void pruneTables(OptExpression tree, TaskContext rootTaskContext, ColumnRefSet requiredColumns) {\nif (rootTaskContext.getOptimizerContext().getSessionVariable().isEnableRboTablePrune()) {\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.PARTITION_PRUNE);\nruleRewriteIterative(tree, rootTaskContext, new MergeProjectWithChildRule());\ntree = new UniquenessBasedTablePruneRule().rewrite(tree, rootTaskContext);\ntree = new ReorderJoinRule().rewrite(tree, context);\ntree = new SeparateProjectRule().rewrite(tree, rootTaskContext);\nderiveLogicalProperty(tree);\ntree = new PrimaryKeyUpdateTableRule().rewrite(tree, rootTaskContext);\nderiveLogicalProperty(tree);\ntree = new RboTablePruneRule().rewrite(tree, rootTaskContext);\nruleRewriteIterative(tree, rootTaskContext, new MergeTwoProjectRule());\nrootTaskContext.setRequiredColumns(requiredColumns.clone());\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.PRUNE_COLUMNS);\ncontext.setEnableLeftRightJoinEquivalenceDerive(true);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.PUSH_DOWN_PREDICATE);\n}\n}\nprivate OptExpression logicalRuleRewrite(ConnectContext connectContext,\nOptExpression tree,\nTaskContext rootTaskContext) {\ntree = OptExpression.create(new LogicalTreeAnchorOperator(), tree);\nColumnRefSet requiredColumns = rootTaskContext.getRequiredColumns().clone();\nderiveLogicalProperty(tree);\nSessionVariable sessionVariable = rootTaskContext.getOptimizerContext().getSessionVariable();\nCTEContext cteContext = context.getCteContext();\nCTEUtils.collectCteOperators(tree, context);\nif (sessionVariable.isEnableRboTablePrune()) {\ncontext.setEnableLeftRightJoinEquivalenceDerive(false);\n}\nwhile (cteContext.hasInlineCTE()) {\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.INLINE_CTE);\nCTEUtils.collectCteOperators(tree, context);\n}\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.AGGREGATE_REWRITE);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.PUSH_DOWN_SUBQUERY);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.SUBQUERY_REWRITE_COMMON);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.SUBQUERY_REWRITE_TO_WINDOW);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.SUBQUERY_REWRITE_TO_JOIN);\nruleRewriteOnlyOnce(tree, rootTaskContext, new ApplyExceptionRule());\nCTEUtils.collectCteOperators(tree, context);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.PUSH_DOWN_PREDICATE);\nruleRewriteIterative(tree, rootTaskContext, new MergeTwoProjectRule());\nruleRewriteOnlyOnce(tree, rootTaskContext, new PushDownAggToMetaScanRule());\nruleRewriteOnlyOnce(tree, rootTaskContext, new PushDownPredicateRankingWindowRule());\nruleRewriteOnlyOnce(tree, rootTaskContext, new PushDownJoinOnExpressionToChildProject());\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.PRUNE_COLUMNS);\nderiveLogicalProperty(tree);\nruleRewriteIterative(tree, rootTaskContext, new PruneEmptyWindowRule());\nruleRewriteIterative(tree, rootTaskContext, new MergeTwoAggRule());\nrootTaskContext.setRequiredColumns(requiredColumns.clone());\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.PRUNE_COLUMNS);\npruneTables(tree, rootTaskContext, requiredColumns);\nruleRewriteIterative(tree, rootTaskContext, new PruneEmptyWindowRule());\nruleRewriteIterative(tree, rootTaskContext, new MergeTwoProjectRule());\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.MERGE_LIMIT);\nruleRewriteIterative(tree, rootTaskContext, new PushDownProjectLimitRule());\nruleRewriteOnlyOnce(tree, rootTaskContext, new PushDownLimitRankingWindowRule());\nif (sessionVariable.isEnableRewriteGroupingsetsToUnionAll()) {\nruleRewriteIterative(tree, rootTaskContext, new RewriteGroupingSetsByCTERule());\n}\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.PRUNE_ASSERT_ROW);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.PRUNE_PROJECT);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.PRUNE_SET_OPERATOR);\nCTEUtils.collectCteOperators(tree, context);\nif (cteContext.needOptimizeCTE()) {\ncteContext.reset();\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.COLLECT_CTE);\nrootTaskContext.setRequiredColumns(requiredColumns.clone());\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.PRUNE_COLUMNS);\nif (cteContext.needPushLimit() || cteContext.needPushPredicate()) {\nruleRewriteOnlyOnce(tree, rootTaskContext, new PushLimitAndFilterToCTEProduceRule());\n}\nif (cteContext.needPushPredicate()) {\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.PUSH_DOWN_PREDICATE);\n}\nif (cteContext.needPushLimit()) {\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.MERGE_LIMIT);\n}\n}\nif (!optimizerConfig.isRuleDisable(TF_MATERIALIZED_VIEW)\n&& sessionVariable.isEnableSyncMaterializedViewRewrite()) {\nOptimizerTraceUtil.logOptExpression(connectContext, \"before MaterializedViewRule:\\n%s\", tree);\ntree = new MaterializedViewRule().transform(tree, context).get(0);\nOptimizerTraceUtil.logOptExpression(connectContext, \"after MaterializedViewRule:\\n%s\", tree);\nderiveLogicalProperty(tree);\n}\ntree = pruneSubfield(tree, rootTaskContext);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.MULTI_DISTINCT_REWRITE);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.PUSH_DOWN_PREDICATE);\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.PARTITION_PRUNE);\nruleRewriteOnlyOnce(tree, rootTaskContext, LimitPruneTabletsRule.getInstance());\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.PRUNE_PROJECT);\ntree = pushDownAggregation(tree, rootTaskContext, requiredColumns);\nCTEUtils.collectCteOperators(tree, context);\nwhile (cteContext.hasInlineCTE()) {\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.INLINE_CTE);\nCTEUtils.collectCteOperators(tree, context);\n}\nruleRewriteIterative(tree, rootTaskContext, new PruneEmptyWindowRule());\nruleRewriteIterative(tree, rootTaskContext, new MergeTwoProjectRule());\nruleRewriteIterative(tree, rootTaskContext, new RewriteSimpleAggToMetaScanRule());\nruleRewriteIterative(tree, rootTaskContext, new MergeProjectWithChildRule());\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.INTERSECT_REWRITE);\nruleRewriteIterative(tree, rootTaskContext, new RemoveAggregationFromAggTable());\nruleRewriteOnlyOnce(tree, rootTaskContext, SplitScanORToUnionRule.getInstance());\nif (isEnableSingleTableMVRewrite(rootTaskContext, sessionVariable, tree)) {\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.SINGLE_TABLE_MV_REWRITE);\n}\nruleRewriteOnlyOnce(tree, rootTaskContext, new GroupByCountDistinctRewriteRule());\nreturn tree.getInputs().get(0);\n}\nprivate boolean isEnableSingleTableMVRewrite(TaskContext rootTaskContext,\nSessionVariable sessionVariable,\nOptExpression queryPlan) {\nif (optimizerConfig.isRuleSetTypeDisable(RuleSetType.SINGLE_TABLE_MV_REWRITE)) {\nreturn false;\n}\nif (!sessionVariable.isEnableMaterializedViewRewrite()\n|| !sessionVariable.isEnableRuleBasedMaterializedViewRewrite()) {\nreturn false;\n}\nif (rootTaskContext.getOptimizerContext().getCandidateMvs().isEmpty()) {\nreturn false;\n}\nif (!sessionVariable.isEnableMaterializedViewSingleTableViewDeltaRewrite() &&\nMvUtils.getAllTables(queryPlan).size() <= 1) {\nreturn true;\n}\nif (sessionVariable.isEnableMaterializedViewViewDeltaRewrite() &&\nrootTaskContext.getOptimizerContext().getCandidateMvs()\n.stream().anyMatch(MaterializationContext::hasMultiTables)) {\nreturn false;\n}\nreturn true;\n}\nprivate OptExpression rewriteAndValidatePlan(ConnectContext connectContext,\nOptExpression tree,\nTaskContext rootTaskContext) {\nOptExpression result = logicalRuleRewrite(connectContext, tree, rootTaskContext);\nOptExpressionValidator validator = new OptExpressionValidator();\nvalidator.validate(result);\nreturn result;\n}\nprivate OptExpression pushDownAggregation(OptExpression tree, TaskContext rootTaskContext,\nColumnRefSet requiredColumns) {\nif (context.getSessionVariable().isCboPushDownDistinctBelowWindow()) {\ntree = new PushDownDistinctAggregateRule().rewrite(tree, rootTaskContext);\n}\nif (context.getSessionVariable().getCboPushDownAggregateMode() == -1) {\nreturn tree;\n}\ntree = new PushDownAggregateRule().rewrite(tree, rootTaskContext);\nderiveLogicalProperty(tree);\nrootTaskContext.setRequiredColumns(requiredColumns.clone());\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.PRUNE_COLUMNS);\nreturn tree;\n}\nprivate OptExpression pruneSubfield(OptExpression tree, TaskContext rootTaskContext) {\nif (!context.getSessionVariable().isCboPruneSubfield()) {\nreturn tree;\n}\ntree = new PruneSubfieldRule().rewrite(tree, rootTaskContext);\nreturn tree;\n}\nprivate void deriveLogicalProperty(OptExpression root) {\nfor (OptExpression child : root.getInputs()) {\nderiveLogicalProperty(child);\n}\nExpressionContext context = new ExpressionContext(root);\ncontext.deriveLogicalProperty();\nroot.setLogicalProperty(context.getRootProperty());\n}\nprivate boolean isEnableMultiTableRewrite(ConnectContext connectContext, OptExpression queryPlan) {\nif (context.getCandidateMvs().isEmpty()) {\nreturn false;\n}\nif (!connectContext.getSessionVariable().isEnableMaterializedViewRewrite()) {\nreturn false;\n}\nif (!connectContext.getSessionVariable().isEnableMaterializedViewSingleTableViewDeltaRewrite() &&\nMvUtils.getAllTables(queryPlan).size() <= 1) {\nreturn false;\n}\nreturn true;\n}\nprivate OptExpression physicalRuleRewrite(TaskContext rootTaskContext, OptExpression result) {\nPreconditions.checkState(result.getOp().isPhysical());\nint planCount = result.getPlanCount();\nresult = new PreAggregateTurnOnRule().rewrite(result, rootTaskContext);\nresult = new ExchangeSortToMergeRule().rewrite(result, rootTaskContext);\nresult = new PruneAggregateNodeRule().rewrite(result, rootTaskContext);\nresult = new PruneShuffleColumnRule().rewrite(result, rootTaskContext);\nresult = new UseSortAggregateRule().rewrite(result, rootTaskContext);\nresult = new AddDecodeNodeForDictStringRule().rewrite(result, rootTaskContext);\nresult = new ScalarOperatorsReuseRule().rewrite(result, rootTaskContext);\nresult = new PredicateReorderRule(rootTaskContext.getOptimizerContext().getSessionVariable()).rewrite(result,\nrootTaskContext);\nresult = new ExtractAggregateColumn().rewrite(result, rootTaskContext);\nresult = new PruneSubfieldsForComplexType().rewrite(result, rootTaskContext);\nSessionVariable sessionVariable = rootTaskContext.getOptimizerContext().getSessionVariable();\nif (sessionVariable.isEnableCboTablePrune() || sessionVariable.isEnableRboTablePrune()) {\nresult = new CloneDuplicateColRefRule().rewrite(result, rootTaskContext);\n}\nresult.setPlanCount(planCount);\nreturn result;\n}\n/**\n* Extract the lowest cost physical operator tree from memo\n*\n* @param requiredProperty the required physical property from sql or groupExpression\n* @param rootGroup the current group to find the lowest cost physical operator\n* @return the lowest cost physical operator for this query\n*/\nprivate OptExpression extractBestPlan(PhysicalPropertySet requiredProperty,\nGroup rootGroup) {\nGroupExpression groupExpression = rootGroup.getBestExpression(requiredProperty);\nif (groupExpression == null) {\nString msg = \"no executable plan for this sql. group: %s. required property: %s\";\nthrow new IllegalArgumentException(String.format(msg, rootGroup, requiredProperty));\n}\nList inputProperties = groupExpression.getInputProperties(requiredProperty);\nList childPlans = Lists.newArrayList();\nfor (int i = 0; i < groupExpression.arity(); ++i) {\nOptExpression childPlan = extractBestPlan(inputProperties.get(i), groupExpression.inputAt(i));\nchildPlans.add(childPlan);\n}\nOptExpression expression = OptExpression.create(groupExpression.getOp(),\nchildPlans);\nexpression.setRequiredProperties(inputProperties);\nexpression.setStatistics(groupExpression.getGroup().getStatistics());\nexpression.setCost(groupExpression.getCost(requiredProperty));\nexpression.setLogicalProperty(rootGroup.getLogicalProperty());\nreturn expression;\n}\nprivate void collectAllScanOperators(Memo memo, TaskContext rootTaskContext) {\nOptExpression tree = memo.getRootGroup().extractLogicalTree();\nList list = Lists.newArrayList();\nUtils.extractOlapScanOperator(tree.getGroupExpression(), list);\nrootTaskContext.setAllScanOperators(Collections.unmodifiableList(list));\n}\nprivate void ruleRewriteIterative(OptExpression tree, TaskContext rootTaskContext, RuleSetType ruleSetType) {\nif (optimizerConfig.isRuleSetTypeDisable(ruleSetType)) {\nreturn;\n}\nList rules = rootTaskContext.getOptimizerContext().getRuleSet().getRewriteRulesByType(ruleSetType);\ncontext.getTaskScheduler().pushTask(new RewriteTreeTask(rootTaskContext, tree, rules, false));\ncontext.getTaskScheduler().executeTasks(rootTaskContext);\n}\nprivate void ruleRewriteIterative(OptExpression tree, TaskContext rootTaskContext, Rule rule) {\nif (optimizerConfig.isRuleDisable(rule.type())) {\nreturn;\n}\nList rules = Collections.singletonList(rule);\ncontext.getTaskScheduler().pushTask(new RewriteTreeTask(rootTaskContext, tree, rules, false));\ncontext.getTaskScheduler().executeTasks(rootTaskContext);\n}\nprivate void ruleRewriteOnlyOnce(OptExpression tree, TaskContext rootTaskContext, RuleSetType ruleSetType) {\nif (optimizerConfig.isRuleSetTypeDisable(ruleSetType)) {\nreturn;\n}\nList rules = rootTaskContext.getOptimizerContext().getRuleSet().getRewriteRulesByType(ruleSetType);\ncontext.getTaskScheduler().pushTask(new RewriteTreeTask(rootTaskContext, tree, rules, true));\ncontext.getTaskScheduler().executeTasks(rootTaskContext);\n}\nprivate void ruleRewriteOnlyOnce(OptExpression tree, TaskContext rootTaskContext, Rule rule) {\nif (optimizerConfig.isRuleDisable(rule.type())) {\nreturn;\n}\nList rules = Collections.singletonList(rule);\ncontext.getTaskScheduler().pushTask(new RewriteTreeTask(rootTaskContext, tree, rules, true));\ncontext.getTaskScheduler().executeTasks(rootTaskContext);\n}\n}", + "context_after": "class Optimizer {\nprivate static final Logger LOG = LogManager.getLogger(Optimizer.class);\nprivate OptimizerContext context;\nprivate final OptimizerConfig optimizerConfig;\nprivate long updateTableId = -1;\npublic Optimizer() {\nthis(OptimizerConfig.defaultConfig());\n}\npublic Optimizer(OptimizerConfig config) {\nthis.optimizerConfig = config;\n}\n@VisibleForTesting\npublic OptimizerConfig getOptimizerConfig() {\nreturn optimizerConfig;\n}\npublic OptimizerContext getContext() {\nreturn context;\n}\npublic OptExpression optimize(ConnectContext connectContext,\nOptExpression logicOperatorTree,\nPhysicalPropertySet requiredProperty,\nColumnRefSet requiredColumns,\nColumnRefFactory columnRefFactory) {\nprepare(connectContext, logicOperatorTree, columnRefFactory);\ncontext.setUpdateTableId(updateTableId);\nif (optimizerConfig.isRuleBased()) {\nreturn optimizeByRule(connectContext, logicOperatorTree, requiredProperty, requiredColumns);\n} else {\nreturn optimizeByCost(connectContext, logicOperatorTree, requiredProperty, requiredColumns);\n}\n}\npublic void setUpdateTableId(long updateTableId) {\nthis.updateTableId = updateTableId;\n}\nprivate OptExpression optimizeByRule(ConnectContext connectContext,\nOptExpression logicOperatorTree,\nPhysicalPropertySet requiredProperty,\nColumnRefSet requiredColumns) {\nOptimizerTraceUtil.logOptExpression(connectContext, \"origin logicOperatorTree:\\n%s\", logicOperatorTree);\nTaskContext rootTaskContext =\nnew TaskContext(context, requiredProperty, requiredColumns.clone(), Double.MAX_VALUE);\nlogicOperatorTree = rewriteAndValidatePlan(connectContext, logicOperatorTree, rootTaskContext);\nOptimizerTraceUtil.log(connectContext, \"after logical rewrite, new logicOperatorTree:\\n%s\", logicOperatorTree);\nreturn logicOperatorTree;\n}\n/**\n* Optimizer will transform and implement the logical operator based on\n* the {@see Rule}, then cost the physical operator, and finally find the\n* lowest cost physical operator tree\n*\n* @param logicOperatorTree the input for query Optimizer\n* @param requiredProperty the required physical property from sql or groupExpression\n* @param requiredColumns the required output columns from sql or groupExpression\n* @return the lowest cost physical operator for this query\n*/\nprivate OptExpression optimizeByCost(ConnectContext connectContext,\nOptExpression logicOperatorTree,\nPhysicalPropertySet requiredProperty,\nColumnRefSet requiredColumns) {\nOptimizerTraceUtil.logOptExpression(connectContext, \"origin logicOperatorTree:\\n%s\", logicOperatorTree);\nMemo memo = context.getMemo();\nTaskContext rootTaskContext =\nnew TaskContext(context, requiredProperty, requiredColumns.clone(), Double.MAX_VALUE);\ntry (PlannerProfile.ScopedTimer ignored = PlannerProfile.getScopedTimer(\"Optimizer.RuleBaseOptimize\")) {\nlogicOperatorTree = rewriteAndValidatePlan(connectContext, logicOperatorTree, rootTaskContext);\n}\nmemo.init(logicOperatorTree);\nOptimizerTraceUtil.log(connectContext, \"after logical rewrite, root group:\\n%s\", memo.getRootGroup());\ncollectAllScanOperators(memo, rootTaskContext);\nmemo.deriveAllGroupLogicalProperty();\ntry (PlannerProfile.ScopedTimer ignored = PlannerProfile.getScopedTimer(\"Optimizer.CostBaseOptimize\")) {\nmemoOptimize(connectContext, memo, rootTaskContext);\n}\nOptExpression result;\nif (!connectContext.getSessionVariable().isSetUseNthExecPlan()) {\nresult = extractBestPlan(requiredProperty, memo.getRootGroup());\n} else {\nint nthExecPlan = connectContext.getSessionVariable().getUseNthExecPlan();\nresult = EnumeratePlan.extractNthPlan(requiredProperty, memo.getRootGroup(), nthExecPlan);\n}\nOptimizerTraceUtil.logOptExpression(connectContext, \"after extract best plan:\\n%s\", result);\nfinal CostEstimate costs = Explain.buildCost(result);\nconnectContext.getAuditEventBuilder().setPlanCpuCosts(costs.getCpuCost())\n.setPlanMemCosts(costs.getMemoryCost());\nOptExpression finalPlan;\ntry (PlannerProfile.ScopedTimer ignored = PlannerProfile.getScopedTimer(\"Optimizer.PhysicalRewrite\")) {\nfinalPlan = physicalRuleRewrite(rootTaskContext, result);\nOptimizerTraceUtil.logOptExpression(connectContext, \"final plan after physical rewrite:\\n%s\", finalPlan);\nOptimizerTraceUtil.log(connectContext, context.getTraceInfo());\n}\ntry (PlannerProfile.ScopedTimer ignored = PlannerProfile.getScopedTimer(\"Optimizer.PlanValidate\")) {\nPlanValidator.getInstance().validatePlan(finalPlan, rootTaskContext);\nreturn finalPlan;\n}\n}\nprivate void prepare(ConnectContext connectContext, OptExpression logicOperatorTree,\nColumnRefFactory columnRefFactory) {\nMemo memo = null;\nif (!optimizerConfig.isRuleBased()) {\nmemo = new Memo();\n}\ncontext = new OptimizerContext(memo, columnRefFactory, connectContext, optimizerConfig);\nOptimizerTraceInfo traceInfo;\nif (connectContext.getExecutor() == null) {\ntraceInfo = new OptimizerTraceInfo(connectContext.getQueryId(), null);\n} else {\ntraceInfo =\nnew OptimizerTraceInfo(connectContext.getQueryId(), connectContext.getExecutor().getParsedStmt());\n}\ncontext.setTraceInfo(traceInfo);\nif (Config.enable_experimental_mv\n&& connectContext.getSessionVariable().isEnableMaterializedViewRewrite()\n&& !optimizerConfig.isRuleBased()) {\nMvRewritePreprocessor preprocessor =\nnew MvRewritePreprocessor(connectContext, columnRefFactory, context, logicOperatorTree);\ntry (PlannerProfile.ScopedTimer ignored = PlannerProfile.getScopedTimer(\"Optimizer.preprocessMvs\")) {\npreprocessor.prepareMvCandidatesForPlan();\n}\nif (connectContext.getSessionVariable().isEnableSyncMaterializedViewRewrite()) {\ntry (PlannerProfile.ScopedTimer ignored = PlannerProfile.getScopedTimer(\"Optimizer.preprocessSyncMvs\")) {\npreprocessor.prepareSyncMvCandidatesForPlan();\n}\n}\nOptimizerTraceUtil.logMVPrepare(connectContext, \"There are %d candidate MVs after prepare phase\",\ncontext.getCandidateMvs().size());\n}\n}\nprivate void pruneTables(OptExpression tree, TaskContext rootTaskContext, ColumnRefSet requiredColumns) {\nif (rootTaskContext.getOptimizerContext().getSessionVariable().isEnableRboTablePrune()) {\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.PARTITION_PRUNE);\nruleRewriteIterative(tree, rootTaskContext, new MergeProjectWithChildRule());\ntree = new UniquenessBasedTablePruneRule().rewrite(tree, rootTaskContext);\ntree = new ReorderJoinRule().rewrite(tree, context);\ntree = new SeparateProjectRule().rewrite(tree, rootTaskContext);\nderiveLogicalProperty(tree);\ntree = new PrimaryKeyUpdateTableRule().rewrite(tree, rootTaskContext);\nderiveLogicalProperty(tree);\ntree = new RboTablePruneRule().rewrite(tree, rootTaskContext);\nruleRewriteIterative(tree, rootTaskContext, new MergeTwoProjectRule());\nrootTaskContext.setRequiredColumns(requiredColumns.clone());\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.PRUNE_COLUMNS);\ncontext.setEnableLeftRightJoinEquivalenceDerive(true);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.PUSH_DOWN_PREDICATE);\n}\n}\nprivate OptExpression logicalRuleRewrite(ConnectContext connectContext,\nOptExpression tree,\nTaskContext rootTaskContext) {\ntree = OptExpression.create(new LogicalTreeAnchorOperator(), tree);\nColumnRefSet requiredColumns = rootTaskContext.getRequiredColumns().clone();\nderiveLogicalProperty(tree);\nSessionVariable sessionVariable = rootTaskContext.getOptimizerContext().getSessionVariable();\nCTEContext cteContext = context.getCteContext();\nCTEUtils.collectCteOperators(tree, context);\nif (sessionVariable.isEnableRboTablePrune()) {\ncontext.setEnableLeftRightJoinEquivalenceDerive(false);\n}\nwhile (cteContext.hasInlineCTE()) {\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.INLINE_CTE);\nCTEUtils.collectCteOperators(tree, context);\n}\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.AGGREGATE_REWRITE);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.PUSH_DOWN_SUBQUERY);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.SUBQUERY_REWRITE_COMMON);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.SUBQUERY_REWRITE_TO_WINDOW);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.SUBQUERY_REWRITE_TO_JOIN);\nruleRewriteOnlyOnce(tree, rootTaskContext, new ApplyExceptionRule());\nCTEUtils.collectCteOperators(tree, context);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.PUSH_DOWN_PREDICATE);\nruleRewriteIterative(tree, rootTaskContext, new MergeTwoProjectRule());\nruleRewriteOnlyOnce(tree, rootTaskContext, new PushDownAggToMetaScanRule());\nruleRewriteOnlyOnce(tree, rootTaskContext, new PushDownPredicateRankingWindowRule());\nruleRewriteOnlyOnce(tree, rootTaskContext, new PushDownJoinOnExpressionToChildProject());\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.PRUNE_COLUMNS);\nderiveLogicalProperty(tree);\nruleRewriteIterative(tree, rootTaskContext, new PruneEmptyWindowRule());\nruleRewriteIterative(tree, rootTaskContext, new MergeTwoAggRule());\nrootTaskContext.setRequiredColumns(requiredColumns.clone());\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.PRUNE_COLUMNS);\npruneTables(tree, rootTaskContext, requiredColumns);\nruleRewriteIterative(tree, rootTaskContext, new PruneEmptyWindowRule());\nruleRewriteIterative(tree, rootTaskContext, new MergeTwoProjectRule());\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.MERGE_LIMIT);\nruleRewriteIterative(tree, rootTaskContext, new PushDownProjectLimitRule());\nruleRewriteOnlyOnce(tree, rootTaskContext, new PushDownLimitRankingWindowRule());\nif (sessionVariable.isEnableRewriteGroupingsetsToUnionAll()) {\nruleRewriteIterative(tree, rootTaskContext, new RewriteGroupingSetsByCTERule());\n}\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.PRUNE_ASSERT_ROW);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.PRUNE_PROJECT);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.PRUNE_SET_OPERATOR);\nCTEUtils.collectCteOperators(tree, context);\nif (cteContext.needOptimizeCTE()) {\ncteContext.reset();\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.COLLECT_CTE);\nrootTaskContext.setRequiredColumns(requiredColumns.clone());\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.PRUNE_COLUMNS);\nif (cteContext.needPushLimit() || cteContext.needPushPredicate()) {\nruleRewriteOnlyOnce(tree, rootTaskContext, new PushLimitAndFilterToCTEProduceRule());\n}\nif (cteContext.needPushPredicate()) {\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.PUSH_DOWN_PREDICATE);\n}\nif (cteContext.needPushLimit()) {\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.MERGE_LIMIT);\n}\n}\nif (!optimizerConfig.isRuleDisable(TF_MATERIALIZED_VIEW)\n&& sessionVariable.isEnableSyncMaterializedViewRewrite()) {\nOptimizerTraceUtil.logOptExpression(connectContext, \"before MaterializedViewRule:\\n%s\", tree);\ntree = new MaterializedViewRule().transform(tree, context).get(0);\nOptimizerTraceUtil.logOptExpression(connectContext, \"after MaterializedViewRule:\\n%s\", tree);\nderiveLogicalProperty(tree);\n}\ntree = pruneSubfield(tree, rootTaskContext);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.MULTI_DISTINCT_REWRITE);\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.PUSH_DOWN_PREDICATE);\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.PARTITION_PRUNE);\nruleRewriteOnlyOnce(tree, rootTaskContext, LimitPruneTabletsRule.getInstance());\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.PRUNE_PROJECT);\ntree = pushDownAggregation(tree, rootTaskContext, requiredColumns);\nCTEUtils.collectCteOperators(tree, context);\nwhile (cteContext.hasInlineCTE()) {\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.INLINE_CTE);\nCTEUtils.collectCteOperators(tree, context);\n}\nruleRewriteIterative(tree, rootTaskContext, new PruneEmptyWindowRule());\nruleRewriteIterative(tree, rootTaskContext, new MergeTwoProjectRule());\nruleRewriteIterative(tree, rootTaskContext, new RewriteSimpleAggToMetaScanRule());\nruleRewriteIterative(tree, rootTaskContext, new MergeProjectWithChildRule());\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.INTERSECT_REWRITE);\nruleRewriteIterative(tree, rootTaskContext, new RemoveAggregationFromAggTable());\nruleRewriteOnlyOnce(tree, rootTaskContext, SplitScanORToUnionRule.getInstance());\nif (isEnableSingleTableMVRewrite(rootTaskContext, sessionVariable, tree)) {\nruleRewriteIterative(tree, rootTaskContext, RuleSetType.SINGLE_TABLE_MV_REWRITE);\n}\nruleRewriteOnlyOnce(tree, rootTaskContext, new GroupByCountDistinctRewriteRule());\nreturn tree.getInputs().get(0);\n}\nprivate boolean isEnableSingleTableMVRewrite(TaskContext rootTaskContext,\nSessionVariable sessionVariable,\nOptExpression queryPlan) {\nif (optimizerConfig.isRuleSetTypeDisable(RuleSetType.SINGLE_TABLE_MV_REWRITE)) {\nreturn false;\n}\nif (!sessionVariable.isEnableMaterializedViewRewrite()\n|| !sessionVariable.isEnableRuleBasedMaterializedViewRewrite()) {\nreturn false;\n}\nif (rootTaskContext.getOptimizerContext().getCandidateMvs().isEmpty()) {\nreturn false;\n}\nif (!sessionVariable.isEnableMaterializedViewSingleTableViewDeltaRewrite() &&\nMvUtils.getAllTables(queryPlan).size() <= 1) {\nreturn true;\n}\nif (sessionVariable.isEnableMaterializedViewViewDeltaRewrite() &&\nrootTaskContext.getOptimizerContext().getCandidateMvs()\n.stream().anyMatch(MaterializationContext::hasMultiTables)) {\nreturn false;\n}\nreturn true;\n}\nprivate OptExpression rewriteAndValidatePlan(ConnectContext connectContext,\nOptExpression tree,\nTaskContext rootTaskContext) {\nOptExpression result = logicalRuleRewrite(connectContext, tree, rootTaskContext);\nOptExpressionValidator validator = new OptExpressionValidator();\nvalidator.validate(result);\nreturn result;\n}\nprivate OptExpression pushDownAggregation(OptExpression tree, TaskContext rootTaskContext,\nColumnRefSet requiredColumns) {\nif (context.getSessionVariable().isCboPushDownDistinctBelowWindow()) {\ntree = new PushDownDistinctAggregateRule().rewrite(tree, rootTaskContext);\n}\nif (context.getSessionVariable().getCboPushDownAggregateMode() == -1) {\nreturn tree;\n}\ntree = new PushDownAggregateRule().rewrite(tree, rootTaskContext);\nderiveLogicalProperty(tree);\nrootTaskContext.setRequiredColumns(requiredColumns.clone());\nruleRewriteOnlyOnce(tree, rootTaskContext, RuleSetType.PRUNE_COLUMNS);\nreturn tree;\n}\nprivate OptExpression pruneSubfield(OptExpression tree, TaskContext rootTaskContext) {\nif (!context.getSessionVariable().isCboPruneSubfield()) {\nreturn tree;\n}\ntree = new PruneSubfieldRule().rewrite(tree, rootTaskContext);\nreturn tree;\n}\nprivate void deriveLogicalProperty(OptExpression root) {\nfor (OptExpression child : root.getInputs()) {\nderiveLogicalProperty(child);\n}\nExpressionContext context = new ExpressionContext(root);\ncontext.deriveLogicalProperty();\nroot.setLogicalProperty(context.getRootProperty());\n}\nprivate boolean isEnableMultiTableRewrite(ConnectContext connectContext, OptExpression queryPlan) {\nif (context.getCandidateMvs().isEmpty()) {\nreturn false;\n}\nif (!connectContext.getSessionVariable().isEnableMaterializedViewRewrite()) {\nreturn false;\n}\nif (!connectContext.getSessionVariable().isEnableMaterializedViewSingleTableViewDeltaRewrite() &&\nMvUtils.getAllTables(queryPlan).size() <= 1) {\nreturn false;\n}\nreturn true;\n}\nprivate OptExpression physicalRuleRewrite(TaskContext rootTaskContext, OptExpression result) {\nPreconditions.checkState(result.getOp().isPhysical());\nint planCount = result.getPlanCount();\nresult = new PreAggregateTurnOnRule().rewrite(result, rootTaskContext);\nresult = new ExchangeSortToMergeRule().rewrite(result, rootTaskContext);\nresult = new PruneAggregateNodeRule().rewrite(result, rootTaskContext);\nresult = new PruneShuffleColumnRule().rewrite(result, rootTaskContext);\nresult = new UseSortAggregateRule().rewrite(result, rootTaskContext);\nresult = new AddDecodeNodeForDictStringRule().rewrite(result, rootTaskContext);\nresult = new ScalarOperatorsReuseRule().rewrite(result, rootTaskContext);\nresult = new PredicateReorderRule(rootTaskContext.getOptimizerContext().getSessionVariable()).rewrite(result,\nrootTaskContext);\nresult = new ExtractAggregateColumn().rewrite(result, rootTaskContext);\nresult = new PruneSubfieldsForComplexType().rewrite(result, rootTaskContext);\nSessionVariable sessionVariable = rootTaskContext.getOptimizerContext().getSessionVariable();\nif (sessionVariable.isEnableCboTablePrune() || sessionVariable.isEnableRboTablePrune()) {\nresult = new CloneDuplicateColRefRule().rewrite(result, rootTaskContext);\n}\nresult.setPlanCount(planCount);\nreturn result;\n}\n/**\n* Extract the lowest cost physical operator tree from memo\n*\n* @param requiredProperty the required physical property from sql or groupExpression\n* @param rootGroup the current group to find the lowest cost physical operator\n* @return the lowest cost physical operator for this query\n*/\nprivate OptExpression extractBestPlan(PhysicalPropertySet requiredProperty,\nGroup rootGroup) {\nGroupExpression groupExpression = rootGroup.getBestExpression(requiredProperty);\nif (groupExpression == null) {\nString msg = \"no executable plan for this sql. group: %s. required property: %s\";\nthrow new IllegalArgumentException(String.format(msg, rootGroup, requiredProperty));\n}\nList inputProperties = groupExpression.getInputProperties(requiredProperty);\nList childPlans = Lists.newArrayList();\nfor (int i = 0; i < groupExpression.arity(); ++i) {\nOptExpression childPlan = extractBestPlan(inputProperties.get(i), groupExpression.inputAt(i));\nchildPlans.add(childPlan);\n}\nOptExpression expression = OptExpression.create(groupExpression.getOp(),\nchildPlans);\nexpression.setRequiredProperties(inputProperties);\nexpression.setStatistics(groupExpression.getGroup().getStatistics());\nexpression.setCost(groupExpression.getCost(requiredProperty));\nexpression.setLogicalProperty(rootGroup.getLogicalProperty());\nreturn expression;\n}\nprivate void collectAllScanOperators(Memo memo, TaskContext rootTaskContext) {\nOptExpression tree = memo.getRootGroup().extractLogicalTree();\nList list = Lists.newArrayList();\nUtils.extractOlapScanOperator(tree.getGroupExpression(), list);\nrootTaskContext.setAllScanOperators(Collections.unmodifiableList(list));\n}\nprivate void ruleRewriteIterative(OptExpression tree, TaskContext rootTaskContext, RuleSetType ruleSetType) {\nif (optimizerConfig.isRuleSetTypeDisable(ruleSetType)) {\nreturn;\n}\nList rules = rootTaskContext.getOptimizerContext().getRuleSet().getRewriteRulesByType(ruleSetType);\ncontext.getTaskScheduler().pushTask(new RewriteTreeTask(rootTaskContext, tree, rules, false));\ncontext.getTaskScheduler().executeTasks(rootTaskContext);\n}\nprivate void ruleRewriteIterative(OptExpression tree, TaskContext rootTaskContext, Rule rule) {\nif (optimizerConfig.isRuleDisable(rule.type())) {\nreturn;\n}\nList rules = Collections.singletonList(rule);\ncontext.getTaskScheduler().pushTask(new RewriteTreeTask(rootTaskContext, tree, rules, false));\ncontext.getTaskScheduler().executeTasks(rootTaskContext);\n}\nprivate void ruleRewriteOnlyOnce(OptExpression tree, TaskContext rootTaskContext, RuleSetType ruleSetType) {\nif (optimizerConfig.isRuleSetTypeDisable(ruleSetType)) {\nreturn;\n}\nList rules = rootTaskContext.getOptimizerContext().getRuleSet().getRewriteRulesByType(ruleSetType);\ncontext.getTaskScheduler().pushTask(new RewriteTreeTask(rootTaskContext, tree, rules, true));\ncontext.getTaskScheduler().executeTasks(rootTaskContext);\n}\nprivate void ruleRewriteOnlyOnce(OptExpression tree, TaskContext rootTaskContext, Rule rule) {\nif (optimizerConfig.isRuleDisable(rule.type())) {\nreturn;\n}\nList rules = Collections.singletonList(rule);\ncontext.getTaskScheduler().pushTask(new RewriteTreeTask(rootTaskContext, tree, rules, true));\ncontext.getTaskScheduler().executeTasks(rootTaskContext);\n}\n}" + }, + { + "comment": "```suggestion // default format is \"yyyy-MM-dd HH:mm:ss\" ```", + "method_body": "public Expr apply(Expr expr, Analyzer analyzer) throws AnalysisException {\nif (!(expr instanceof BinaryPredicate)) {\nreturn expr;\n}\nBinaryPredicate bp = (BinaryPredicate) expr;\nExpr left = bp.getChild(0);\nif (!(left instanceof FunctionCallExpr)) {\nreturn expr;\n}\nFunctionCallExpr fce = (FunctionCallExpr) left;\nif (!fce.getFnName().getFunction().equalsIgnoreCase(\"from_unixtime\")) {\nreturn expr;\n}\nFunctionParams params = fce.getParams();\nif (params == null) {\nreturn expr;\n}\nif (params.exprs().size() != 1 && params.exprs().size() != 2) {\nreturn expr;\n}\nExpr paramSlot = params.exprs().get(0);\nif (!(paramSlot instanceof SlotRef)) {\nreturn expr;\n}\nSlotRef sr = (SlotRef) paramSlot;\nif (!sr.getColumn().getType().isIntegerType()) {\nreturn new BoolLiteral(false);\n}\nExpr right = bp.getChild(1);\nif (!(right instanceof LiteralExpr)) {\nreturn expr;\n}\nLiteralExpr le = (LiteralExpr) right;\nSimpleDateFormat format = new SimpleDateFormat( \"yyyy-MM-dd\" );\nif (params.exprs().size() == 1) {\nformat = new SimpleDateFormat( \"yyyy-MM-dd HH:mm:ss\" );\n} else {\nLiteralExpr fm = (LiteralExpr) params.exprs().get(1);\nformat = new SimpleDateFormat(fm.getStringValue());\n}\ntry {\nDate date = format.parse(le.getStringValue());\nif (bp.getOp() == BinaryPredicate.Operator.LT || bp.getOp() == BinaryPredicate.Operator.LE) {\nBinaryPredicate r = new BinaryPredicate(bp.getOp(), sr, LiteralExpr.create(String.valueOf(date.getTime() / 1000), Type.BIGINT));\nBinaryPredicate l = new BinaryPredicate(BinaryPredicate.Operator.GE, sr, LiteralExpr.create(\"0\", Type.BIGINT));\nreturn new CompoundPredicate(CompoundPredicate.Operator.AND, r, l);\n} else if (bp.getOp() == BinaryPredicate.Operator.GT || bp.getOp() == BinaryPredicate.Operator.GE) {\nBinaryPredicate l = new BinaryPredicate(bp.getOp(), sr, LiteralExpr.create(String.valueOf(date.getTime() / 1000), Type.BIGINT));\nBinaryPredicate r = new BinaryPredicate(BinaryPredicate.Operator.LE, sr, LiteralExpr.create(\"253402271999\", Type.BIGINT));\nreturn new CompoundPredicate(CompoundPredicate.Operator.AND, r, l);\n} else {\nreturn new BinaryPredicate(bp.getOp(), sr, LiteralExpr.create(String.valueOf(date.getTime() / 1000), Type.BIGINT));\n}\n} catch (ParseException e) {\nreturn expr;\n}\n}", + "target_code": "", + "method_body_after": "public Expr apply(Expr expr, Analyzer analyzer) throws AnalysisException {\nif (!(expr instanceof BinaryPredicate)) {\nreturn expr;\n}\nBinaryPredicate bp = (BinaryPredicate) expr;\nExpr left = bp.getChild(0);\nif (!(left instanceof FunctionCallExpr)) {\nreturn expr;\n}\nFunctionCallExpr fce = (FunctionCallExpr) left;\nif (!fce.getFnName().getFunction().equalsIgnoreCase(\"from_unixtime\")) {\nreturn expr;\n}\nFunctionParams params = fce.getParams();\nif (params == null) {\nreturn expr;\n}\nif (params.exprs().size() != 1 && params.exprs().size() != 2) {\nreturn expr;\n}\nExpr paramSlot = params.exprs().get(0);\nif (!(paramSlot instanceof SlotRef)) {\nreturn expr;\n}\nSlotRef sr = (SlotRef) paramSlot;\nif (!sr.getColumn().getType().isIntegerType()) {\nreturn new BoolLiteral(false);\n}\nExpr right = bp.getChild(1);\nif (!(right instanceof LiteralExpr)) {\nreturn expr;\n}\nLiteralExpr le = (LiteralExpr) right;\nSimpleDateFormat format = new SimpleDateFormat(\"yyyy-MM-dd\");\nif (params.exprs().size() == 1) {\nformat = new SimpleDateFormat(\"yyyy-MM-dd HH:mm:ss\");\n} else {\nLiteralExpr fm = (LiteralExpr) params.exprs().get(1);\nformat = new SimpleDateFormat(fm.getStringValue());\n}\ntry {\nDate date = format.parse(le.getStringValue());\nif (bp.getOp() == BinaryPredicate.Operator.LT || bp.getOp() == BinaryPredicate.Operator.LE) {\nBinaryPredicate r = new BinaryPredicate(bp.getOp(), sr, LiteralExpr.create(String.valueOf(date.getTime() / 1000), Type.BIGINT));\nBinaryPredicate l = new BinaryPredicate(BinaryPredicate.Operator.GE, sr, LiteralExpr.create(\"0\", Type.BIGINT));\nreturn new CompoundPredicate(CompoundPredicate.Operator.AND, r, l);\n} else if (bp.getOp() == BinaryPredicate.Operator.GT || bp.getOp() == BinaryPredicate.Operator.GE) {\nBinaryPredicate l = new BinaryPredicate(bp.getOp(), sr, LiteralExpr.create(String.valueOf(date.getTime() / 1000), Type.BIGINT));\nBinaryPredicate r = new BinaryPredicate(BinaryPredicate.Operator.LE, sr, LiteralExpr.create(\"253402271999\", Type.BIGINT));\nreturn new CompoundPredicate(CompoundPredicate.Operator.AND, r, l);\n} else {\nreturn new BinaryPredicate(bp.getOp(), sr, LiteralExpr.create(String.valueOf(date.getTime() / 1000), Type.BIGINT));\n}\n} catch (ParseException e) {\nreturn expr;\n}\n}", + "context_before": "class RewriteFromUnixTimeRule implements ExprRewriteRule {\npublic static RewriteFromUnixTimeRule INSTANCE = new RewriteFromUnixTimeRule();\n@Override\n}", + "context_after": "class RewriteFromUnixTimeRule implements ExprRewriteRule {\npublic static RewriteFromUnixTimeRule INSTANCE = new RewriteFromUnixTimeRule();\n@Override\n}" + }, + { + "comment": "The most risky bug in this code is: Zero division error when initializing failureRatio if collectSQLList.size() is zero. You can modify the code like this: ```java double failureRatio = collectSQLList.isEmpty() ? 0 : 1.0 * failedNum / collectSQLList.size(); ```", + "method_body": "public void collect(ConnectContext context, AnalyzeStatus analyzeStatus) throws Exception {\nint parallelism = Math.max(1, context.getSessionVariable().getStatisticCollectParallelism());\nList> collectSQLList = buildCollectSQLList(parallelism);\nlong totalCollectSQL = collectSQLList.size();\nif (table.isTemporaryTable()) {\ncontext.setSessionId(((OlapTable) table).getSessionId());\n}\ncontext.getSessionVariable().setEnableAnalyzePhasePruneColumns(true);\nlong finishedSQLNum = 0;\nlong failedNum = 0;\nfor (List sqlUnion : collectSQLList) {\nif (sqlUnion.size() < parallelism) {\ncontext.getSessionVariable().setPipelineDop(parallelism / sqlUnion.size());\n} else {\ncontext.getSessionVariable().setPipelineDop(1);\n}\nString sql = Joiner.on(\" UNION ALL \").join(sqlUnion);\ntry {\ncollectStatisticSync(sql, context);\n} catch (Exception e) {\nfailedNum++;\nLOG.warn(\"collect statistics task failed in job: {}, {}\", this, sql, e);\ndouble failureRatio = 1.0 * failedNum / collectSQLList.size();\nif (collectSQLList.size() < 100) {\nthrow e;\n} else if (failureRatio > Config.statistic_full_statistics_failure_tolerance_ratio) {\nString message = String.format(\"collect statistic job failed due to \" +\n\"too many failed tasks: %d/%d, the last failure is %s\",\nfailedNum, collectSQLList.size(), e);\nLOG.warn(message, e);\nthrow new RuntimeException(message, e);\n} else {\ncontinue;\n}\n}\nfinishedSQLNum++;\nanalyzeStatus.setProgress(finishedSQLNum * 100 / totalCollectSQL);\nGlobalStateMgr.getCurrentState().getAnalyzeMgr().addAnalyzeStatus(analyzeStatus);\n}\nflushInsertStatisticsData(context, true);\n}", + "target_code": "GlobalStateMgr.getCurrentState().getAnalyzeMgr().addAnalyzeStatus(analyzeStatus);", + "method_body_after": "public void collect(ConnectContext context, AnalyzeStatus analyzeStatus) throws Exception {\nint parallelism = Math.max(1, context.getSessionVariable().getStatisticCollectParallelism());\nList> collectSQLList = buildCollectSQLList(parallelism);\nlong totalCollectSQL = collectSQLList.size();\nif (table.isTemporaryTable()) {\ncontext.setSessionId(((OlapTable) table).getSessionId());\n}\ncontext.getSessionVariable().setEnableAnalyzePhasePruneColumns(true);\nlong finishedSQLNum = 0;\nlong failedNum = 0;\nException lastFailure = null;\nfor (List sqlUnion : collectSQLList) {\nif (sqlUnion.size() < parallelism) {\ncontext.getSessionVariable().setPipelineDop(parallelism / sqlUnion.size());\n} else {\ncontext.getSessionVariable().setPipelineDop(1);\n}\nString sql = Joiner.on(\" UNION ALL \").join(sqlUnion);\ntry {\ncollectStatisticSync(sql, context);\n} catch (Exception e) {\nfailedNum++;\nLOG.warn(\"collect statistics task failed in job: {}, {}\", this, sql, e);\ndouble failureRatio = 1.0 * failedNum / collectSQLList.size();\nif (collectSQLList.size() < 100) {\nthrow e;\n} else if (failureRatio > Config.statistic_full_statistics_failure_tolerance_ratio) {\nString message = String.format(\"collect statistic job failed due to \" +\n\"too many failed tasks: %d/%d, the last failure is %s\",\nfailedNum, collectSQLList.size(), e);\nLOG.warn(message, e);\nthrow new RuntimeException(message, e);\n} else {\nlastFailure = e;\ncontinue;\n}\n}\nfinishedSQLNum++;\nanalyzeStatus.setProgress(finishedSQLNum * 100 / totalCollectSQL);\nGlobalStateMgr.getCurrentState().getAnalyzeMgr().addAnalyzeStatus(analyzeStatus);\n}\nif (lastFailure != null) {\nString message = String.format(\"collect statistic job partially failed but tolerated %d/%d, \" +\n\"last error is %s\", failedNum, collectSQLList.size(), lastFailure);\nanalyzeStatus.setReason(message);\nLOG.warn(message);\n}\nflushInsertStatisticsData(context, true);\n}", + "context_before": "class FullStatisticsCollectJob extends StatisticsCollectJob {\nprivate static final Logger LOG = LogManager.getLogger(FullStatisticsCollectJob.class);\nprivate static final String BATCH_FULL_STATISTIC_TEMPLATE = \"SELECT cast($version as INT)\" +\n\", cast($partitionId as BIGINT)\" +\n\", '$columnNameStr'\" +\n\", cast(COUNT(1) as BIGINT)\" +\n\", cast($dataSize as BIGINT)\" +\n\", $hllFunction\" +\n\", cast($countNullFunction as BIGINT)\" +\n\", $maxFunction\" +\n\", $minFunction \" +\n\" FROM (select $quoteColumnName as column_key from `$dbName`.`$tableName` partition `$partitionName`) tt\";\nprivate final List partitionIdList;\nprivate final List sqlBuffer = Lists.newArrayList();\nprivate final List> rowsBuffer = Lists.newArrayList();\npublic FullStatisticsCollectJob(Database db, Table table, List partitionIdList, List columns,\nStatsConstants.AnalyzeType type, StatsConstants.ScheduleType scheduleType,\nMap properties) {\nsuper(db, table, columns, type, scheduleType, properties);\nthis.partitionIdList = partitionIdList;\n}\npublic FullStatisticsCollectJob(Database db, Table table, List partitionIdList, List columnNames,\nList columnTypes, StatsConstants.AnalyzeType type,\nStatsConstants.ScheduleType scheduleType, Map properties) {\nsuper(db, table, columnNames, columnTypes, type, scheduleType, properties);\nthis.partitionIdList = partitionIdList;\n}\n@Override\n@Override\npublic void collectStatisticSync(String sql, ConnectContext context) throws Exception {\nLOG.debug(\"statistics collect sql : \" + sql);\nStatisticExecutor executor = new StatisticExecutor();\nsetDefaultSessionVariable(context);\nList dataList = executor.executeStatisticDQL(context, sql);\nString tableName = StringEscapeUtils.escapeSql(db.getOriginName() + \".\" + table.getName());\nfor (TStatisticData data : dataList) {\nList params = Lists.newArrayList();\nList row = Lists.newArrayList();\nPartition partition = table.getPartition(data.getPartitionId());\nif (partition == null) {\ncontinue;\n}\nString partitionName = StringEscapeUtils.escapeSql(partition.getName());\nparams.add(String.valueOf(table.getId()));\nparams.add(String.valueOf(data.getPartitionId()));\nparams.add(\"'\" + StringEscapeUtils.escapeSql(data.getColumnName()) + \"'\");\nparams.add(String.valueOf(db.getId()));\nparams.add(\"'\" + tableName + \"'\");\nparams.add(\"'\" + partitionName + \"'\");\nparams.add(String.valueOf(data.getRowCount()));\nparams.add(String.valueOf(data.getDataSize()));\nparams.add(\"hll_deserialize(unhex('mockData'))\");\nparams.add(String.valueOf(data.getNullCount()));\nparams.add(\"'\" + data.getMax() + \"'\");\nparams.add(\"'\" + data.getMin() + \"'\");\nparams.add(\"now()\");\nrow.add(new IntLiteral(table.getId(), Type.BIGINT));\nrow.add(new IntLiteral(data.getPartitionId(), Type.BIGINT));\nrow.add(new StringLiteral(data.getColumnName()));\nrow.add(new IntLiteral(db.getId(), Type.BIGINT));\nrow.add(new StringLiteral(tableName));\nrow.add(new StringLiteral(partitionName));\nrow.add(new IntLiteral(data.getRowCount(), Type.BIGINT));\nrow.add(new IntLiteral((long) data.getDataSize(), Type.BIGINT));\nrow.add(hllDeserialize(data.getHll()));\nrow.add(new IntLiteral(data.getNullCount(), Type.BIGINT));\nrow.add(new StringLiteral(data.getMax()));\nrow.add(new StringLiteral(data.getMin()));\nrow.add(nowFn());\nrowsBuffer.add(row);\nsqlBuffer.add(\"(\" + String.join(\", \", params) + \")\");\n}\nflushInsertStatisticsData(context, false);\n}\nprivate void flushInsertStatisticsData(ConnectContext context, boolean force) throws Exception {\nlong bufferSize = 33L * 1024 * rowsBuffer.size();\nif (bufferSize < Config.statistic_full_collect_buffer && !force) {\nreturn;\n}\nif (rowsBuffer.isEmpty()) {\nreturn;\n}\nint count = 0;\nint maxRetryTimes = 5;\nStatementBase insertStmt = createInsertStmt();\ndo {\nLOG.debug(\"statistics insert sql size:\" + rowsBuffer.size());\nStmtExecutor executor = new StmtExecutor(context, insertStmt);\ncontext.setExecutor(executor);\ncontext.setQueryId(UUIDUtil.genUUID());\ncontext.setStartTime();\nexecutor.execute();\nif (context.getState().getStateType() == QueryState.MysqlStateType.ERR) {\nLOG.warn(\"Statistics collect fail | {} | Error Message [{}]\", DebugUtil.printId(context.getQueryId()),\ncontext.getState().getErrorMessage());\nif (StringUtils.contains(context.getState().getErrorMessage(), \"Too many versions\")) {\nThread.sleep(Config.statistic_collect_too_many_version_sleep);\ncount++;\n} else {\nthrow new DdlException(context.getState().getErrorMessage());\n}\n} else {\nsqlBuffer.clear();\nrowsBuffer.clear();\nreturn;\n}\n} while (count < maxRetryTimes);\nthrow new DdlException(context.getState().getErrorMessage());\n}\nprivate StatementBase createInsertStmt() {\nString sql = \"INSERT INTO column_statistics values \" + String.join(\", \", sqlBuffer) + \";\";\nList names = Lists.newArrayList(\"column_0\", \"column_1\", \"column_2\", \"column_3\",\n\"column_4\", \"column_5\", \"column_6\", \"column_7\", \"column_8\", \"column_9\",\n\"column_10\", \"column_11\", \"column_12\");\nQueryStatement qs = new QueryStatement(new ValuesRelation(rowsBuffer, names));\nInsertStmt insert = new InsertStmt(new TableName(\"_statistics_\", \"column_statistics\"), qs);\ninsert.setOrigStmt(new OriginStatement(sql, 0));\nreturn insert;\n}\n/*\n* Split tasks at the partition and column levels,\n* and the number of rows to scan is the number of rows in the partition\n* where the column is located.\n* The number of rows is accumulated in turn until the maximum number of rows is accumulated.\n* Use UNION ALL connection between multiple tasks and collect them in one query\n*/\nprotected List> buildCollectSQLList(int parallelism) {\nList totalQuerySQL = new ArrayList<>();\nfor (Long partitionId : partitionIdList) {\nPartition partition = table.getPartition(partitionId);\nif (partition == null) {\ncontinue;\n}\nfor (int i = 0; i < columnNames.size(); i++) {\ntotalQuerySQL.add(buildBatchCollectFullStatisticSQL(table, partition, columnNames.get(i),\ncolumnTypes.get(i)));\n}\n}\nreturn Lists.partition(totalQuerySQL, parallelism);\n}\nprivate String buildBatchCollectFullStatisticSQL(Table table, Partition partition, String columnName,\nType columnType) {\nStringBuilder builder = new StringBuilder();\nVelocityContext context = new VelocityContext();\nString columnNameStr = StringEscapeUtils.escapeSql(columnName);\nString quoteColumnName = StatisticUtils.quoting(table, columnName);\nString quoteColumnKey = \"`column_key`\";\ncontext.put(\"version\", StatsConstants.STATISTIC_BATCH_VERSION);\ncontext.put(\"partitionId\", partition.getId());\ncontext.put(\"columnNameStr\", columnNameStr);\ncontext.put(\"dataSize\", fullAnalyzeGetDataSize(quoteColumnKey, columnType));\ncontext.put(\"partitionName\", partition.getName());\ncontext.put(\"dbName\", db.getOriginName());\ncontext.put(\"tableName\", table.getName());\ncontext.put(\"quoteColumnName\", quoteColumnName);\nif (!columnType.canStatistic()) {\ncontext.put(\"hllFunction\", \"hex(hll_serialize(hll_empty()))\");\ncontext.put(\"countNullFunction\", \"0\");\ncontext.put(\"maxFunction\", \"''\");\ncontext.put(\"minFunction\", \"''\");\n} else {\ncontext.put(\"hllFunction\", \"hex(hll_serialize(IFNULL(hll_raw(\" + quoteColumnKey + \"), hll_empty())))\");\ncontext.put(\"countNullFunction\", \"COUNT(1) - COUNT(\" + quoteColumnKey + \")\");\ncontext.put(\"maxFunction\", getMinMaxFunction(columnType, quoteColumnKey, true));\ncontext.put(\"minFunction\", getMinMaxFunction(columnType, quoteColumnKey, false));\n}\nbuilder.append(build(context, BATCH_FULL_STATISTIC_TEMPLATE));\nreturn builder.toString();\n}\n@Override\npublic String toString() {\nfinal StringBuilder sb = new StringBuilder(\"FullStatisticsCollectJob{\");\nsb.append(\"type=\").append(type);\nsb.append(\", scheduleType=\").append(scheduleType);\nsb.append(\", db=\").append(db);\nsb.append(\", table=\").append(table);\nsb.append(\", partitionIdList=\").append(partitionIdList);\nsb.append(\", columnNames=\").append(columnNames);\nsb.append(\", properties=\").append(properties);\nsb.append('}');\nreturn sb.toString();\n}\n}", + "context_after": "class FullStatisticsCollectJob extends StatisticsCollectJob {\nprivate static final Logger LOG = LogManager.getLogger(FullStatisticsCollectJob.class);\nprivate static final String BATCH_FULL_STATISTIC_TEMPLATE = \"SELECT cast($version as INT)\" +\n\", cast($partitionId as BIGINT)\" +\n\", '$columnNameStr'\" +\n\", cast(COUNT(1) as BIGINT)\" +\n\", cast($dataSize as BIGINT)\" +\n\", $hllFunction\" +\n\", cast($countNullFunction as BIGINT)\" +\n\", $maxFunction\" +\n\", $minFunction \" +\n\" FROM (select $quoteColumnName as column_key from `$dbName`.`$tableName` partition `$partitionName`) tt\";\nprivate final List partitionIdList;\nprivate final List sqlBuffer = Lists.newArrayList();\nprivate final List> rowsBuffer = Lists.newArrayList();\npublic FullStatisticsCollectJob(Database db, Table table, List partitionIdList, List columns,\nStatsConstants.AnalyzeType type, StatsConstants.ScheduleType scheduleType,\nMap properties) {\nsuper(db, table, columns, type, scheduleType, properties);\nthis.partitionIdList = partitionIdList;\n}\npublic FullStatisticsCollectJob(Database db, Table table, List partitionIdList, List columnNames,\nList columnTypes, StatsConstants.AnalyzeType type,\nStatsConstants.ScheduleType scheduleType, Map properties) {\nsuper(db, table, columnNames, columnTypes, type, scheduleType, properties);\nthis.partitionIdList = partitionIdList;\n}\n@Override\n@Override\npublic void collectStatisticSync(String sql, ConnectContext context) throws Exception {\nLOG.debug(\"statistics collect sql : \" + sql);\nStatisticExecutor executor = new StatisticExecutor();\nsetDefaultSessionVariable(context);\nList dataList = executor.executeStatisticDQL(context, sql);\nString tableName = StringEscapeUtils.escapeSql(db.getOriginName() + \".\" + table.getName());\nfor (TStatisticData data : dataList) {\nList params = Lists.newArrayList();\nList row = Lists.newArrayList();\nPartition partition = table.getPartition(data.getPartitionId());\nif (partition == null) {\ncontinue;\n}\nString partitionName = StringEscapeUtils.escapeSql(partition.getName());\nparams.add(String.valueOf(table.getId()));\nparams.add(String.valueOf(data.getPartitionId()));\nparams.add(\"'\" + StringEscapeUtils.escapeSql(data.getColumnName()) + \"'\");\nparams.add(String.valueOf(db.getId()));\nparams.add(\"'\" + tableName + \"'\");\nparams.add(\"'\" + partitionName + \"'\");\nparams.add(String.valueOf(data.getRowCount()));\nparams.add(String.valueOf(data.getDataSize()));\nparams.add(\"hll_deserialize(unhex('mockData'))\");\nparams.add(String.valueOf(data.getNullCount()));\nparams.add(\"'\" + data.getMax() + \"'\");\nparams.add(\"'\" + data.getMin() + \"'\");\nparams.add(\"now()\");\nrow.add(new IntLiteral(table.getId(), Type.BIGINT));\nrow.add(new IntLiteral(data.getPartitionId(), Type.BIGINT));\nrow.add(new StringLiteral(data.getColumnName()));\nrow.add(new IntLiteral(db.getId(), Type.BIGINT));\nrow.add(new StringLiteral(tableName));\nrow.add(new StringLiteral(partitionName));\nrow.add(new IntLiteral(data.getRowCount(), Type.BIGINT));\nrow.add(new IntLiteral((long) data.getDataSize(), Type.BIGINT));\nrow.add(hllDeserialize(data.getHll()));\nrow.add(new IntLiteral(data.getNullCount(), Type.BIGINT));\nrow.add(new StringLiteral(data.getMax()));\nrow.add(new StringLiteral(data.getMin()));\nrow.add(nowFn());\nrowsBuffer.add(row);\nsqlBuffer.add(\"(\" + String.join(\", \", params) + \")\");\n}\nflushInsertStatisticsData(context, false);\n}\nprivate void flushInsertStatisticsData(ConnectContext context, boolean force) throws Exception {\nlong bufferSize = 33L * 1024 * rowsBuffer.size();\nif (bufferSize < Config.statistic_full_collect_buffer && !force) {\nreturn;\n}\nif (rowsBuffer.isEmpty()) {\nreturn;\n}\nint count = 0;\nint maxRetryTimes = 5;\nStatementBase insertStmt = createInsertStmt();\ndo {\nLOG.debug(\"statistics insert sql size:\" + rowsBuffer.size());\nStmtExecutor executor = new StmtExecutor(context, insertStmt);\ncontext.setExecutor(executor);\ncontext.setQueryId(UUIDUtil.genUUID());\ncontext.setStartTime();\nexecutor.execute();\nif (context.getState().getStateType() == QueryState.MysqlStateType.ERR) {\nLOG.warn(\"Statistics collect fail | {} | Error Message [{}]\", DebugUtil.printId(context.getQueryId()),\ncontext.getState().getErrorMessage());\nif (StringUtils.contains(context.getState().getErrorMessage(), \"Too many versions\")) {\nThread.sleep(Config.statistic_collect_too_many_version_sleep);\ncount++;\n} else {\nthrow new DdlException(context.getState().getErrorMessage());\n}\n} else {\nsqlBuffer.clear();\nrowsBuffer.clear();\nreturn;\n}\n} while (count < maxRetryTimes);\nthrow new DdlException(context.getState().getErrorMessage());\n}\nprivate StatementBase createInsertStmt() {\nString sql = \"INSERT INTO column_statistics values \" + String.join(\", \", sqlBuffer) + \";\";\nList names = Lists.newArrayList(\"column_0\", \"column_1\", \"column_2\", \"column_3\",\n\"column_4\", \"column_5\", \"column_6\", \"column_7\", \"column_8\", \"column_9\",\n\"column_10\", \"column_11\", \"column_12\");\nQueryStatement qs = new QueryStatement(new ValuesRelation(rowsBuffer, names));\nInsertStmt insert = new InsertStmt(new TableName(\"_statistics_\", \"column_statistics\"), qs);\ninsert.setOrigStmt(new OriginStatement(sql, 0));\nreturn insert;\n}\n/*\n* Split tasks at the partition and column levels,\n* and the number of rows to scan is the number of rows in the partition\n* where the column is located.\n* The number of rows is accumulated in turn until the maximum number of rows is accumulated.\n* Use UNION ALL connection between multiple tasks and collect them in one query\n*/\nprotected List> buildCollectSQLList(int parallelism) {\nList totalQuerySQL = new ArrayList<>();\nfor (Long partitionId : partitionIdList) {\nPartition partition = table.getPartition(partitionId);\nif (partition == null) {\ncontinue;\n}\nfor (int i = 0; i < columnNames.size(); i++) {\ntotalQuerySQL.add(buildBatchCollectFullStatisticSQL(table, partition, columnNames.get(i),\ncolumnTypes.get(i)));\n}\n}\nreturn Lists.partition(totalQuerySQL, parallelism);\n}\nprivate String buildBatchCollectFullStatisticSQL(Table table, Partition partition, String columnName,\nType columnType) {\nStringBuilder builder = new StringBuilder();\nVelocityContext context = new VelocityContext();\nString columnNameStr = StringEscapeUtils.escapeSql(columnName);\nString quoteColumnName = StatisticUtils.quoting(table, columnName);\nString quoteColumnKey = \"`column_key`\";\ncontext.put(\"version\", StatsConstants.STATISTIC_BATCH_VERSION);\ncontext.put(\"partitionId\", partition.getId());\ncontext.put(\"columnNameStr\", columnNameStr);\ncontext.put(\"dataSize\", fullAnalyzeGetDataSize(quoteColumnKey, columnType));\ncontext.put(\"partitionName\", partition.getName());\ncontext.put(\"dbName\", db.getOriginName());\ncontext.put(\"tableName\", table.getName());\ncontext.put(\"quoteColumnName\", quoteColumnName);\nif (!columnType.canStatistic()) {\ncontext.put(\"hllFunction\", \"hex(hll_serialize(hll_empty()))\");\ncontext.put(\"countNullFunction\", \"0\");\ncontext.put(\"maxFunction\", \"''\");\ncontext.put(\"minFunction\", \"''\");\n} else {\ncontext.put(\"hllFunction\", \"hex(hll_serialize(IFNULL(hll_raw(\" + quoteColumnKey + \"), hll_empty())))\");\ncontext.put(\"countNullFunction\", \"COUNT(1) - COUNT(\" + quoteColumnKey + \")\");\ncontext.put(\"maxFunction\", getMinMaxFunction(columnType, quoteColumnKey, true));\ncontext.put(\"minFunction\", getMinMaxFunction(columnType, quoteColumnKey, false));\n}\nbuilder.append(build(context, BATCH_FULL_STATISTIC_TEMPLATE));\nreturn builder.toString();\n}\n@Override\npublic String toString() {\nfinal StringBuilder sb = new StringBuilder(\"FullStatisticsCollectJob{\");\nsb.append(\"type=\").append(type);\nsb.append(\", scheduleType=\").append(scheduleType);\nsb.append(\", db=\").append(db);\nsb.append(\", table=\").append(table);\nsb.append(\", partitionIdList=\").append(partitionIdList);\nsb.append(\", columnNames=\").append(columnNames);\nsb.append(\", properties=\").append(properties);\nsb.append('}');\nreturn sb.toString();\n}\n}" + }, + { + "comment": "Yea, the default value should be null. Also the property name should be something line `quarkus.test.integration-jvm-arg-line` to fit in with our naming convention, and also to make it clear where this applies so you have something like: ``` /** * JVM parameters that are used to launch jar based integration tests. */ @ConfigItem String integrationJvmArgLine; ```", + "method_body": "private JarLauncher(Path jarPath, Config config) {\nthis(jarPath,\nconfig.getValue(\"quarkus.http.test-port\", OptionalInt.class).orElse(DEFAULT_PORT),\nconfig.getValue(\"quarkus.http.test-ssl-port\", OptionalInt.class).orElse(DEFAULT_HTTPS_PORT),\nconfig.getValue(\"quarkus.test.jar-wait-time\", OptionalLong.class).orElse(DEFAULT_JAR_WAIT_TIME),\nconfig.getOptionalValue(\"quarkus.test.argLine\", String.class).orElse(null),\nconfig.getOptionalValue(\"quarkus.test.native-image-profile\", String.class)\n.orElse(null));\n}", + "target_code": "config.getOptionalValue(\"quarkus.test.argLine\", String.class).orElse(null),", + "method_body_after": "private JarLauncher(Path jarPath, Config config) {\nthis(jarPath,\nconfig.getValue(\"quarkus.http.test-port\", OptionalInt.class).orElse(DEFAULT_PORT),\nconfig.getValue(\"quarkus.http.test-ssl-port\", OptionalInt.class).orElse(DEFAULT_HTTPS_PORT),\nconfig.getValue(\"quarkus.test.jar-wait-time\", OptionalLong.class).orElse(DEFAULT_JAR_WAIT_TIME),\nconfig.getOptionalValue(\"quarkus.test.argLine\", String.class).orElse(null),\nconfig.getOptionalValue(\"quarkus.test.native-image-profile\", String.class)\n.orElse(null));\n}", + "context_before": "class JarLauncher implements ArtifactLauncher {\nprivate static final int DEFAULT_PORT = 8081;\nprivate static final int DEFAULT_HTTPS_PORT = 8444;\nprivate static final long DEFAULT_JAR_WAIT_TIME = 60;\nprivate final Path jarPath;\nprivate final String profile;\nprivate final String argLine;\nprivate Process quarkusProcess;\nprivate final int httpPort;\nprivate final int httpsPort;\nprivate final long jarWaitTime;\nprivate final Map systemProps = new HashMap<>();\nprivate boolean isSsl;\npublic JarLauncher(Path jarPath) {\nthis(jarPath, installAndGetSomeConfig());\n}\npublic JarLauncher(Path jarPath, int httpPort, int httpsPort, long jarWaitTime, String argLine, String profile) {\nthis.jarPath = jarPath;\nthis.httpPort = httpPort;\nthis.httpsPort = httpsPort;\nthis.jarWaitTime = jarWaitTime;\nthis.argLine = argLine;\nthis.profile = profile;\n}\npublic void start() throws IOException {\nSystem.setProperty(\"test.url\", TestHTTPResourceManager.getUri());\nList args = new ArrayList<>();\nargs.add(\"java\");\nif (argLine != null) {\nargs.add(argLine);\n}\nargs.add(\"-Dquarkus.http.port=\" + httpPort);\nargs.add(\"-Dquarkus.http.ssl-port=\" + httpsPort);\nargs.add(\"-Dtest.url=\" + TestHTTPResourceManager.getUri());\nPath logFile = PropertyTestUtil.getLogFilePath();\nargs.add(\"-Dquarkus.log.file.path=\" + logFile.toAbsolutePath().toString());\nargs.add(\"-Dquarkus.log.file.enable=true\");\nif (profile != null) {\nargs.add(\"-Dquarkus.profile=\" + profile);\n}\nfor (Map.Entry e : systemProps.entrySet()) {\nargs.add(\"-D\" + e.getKey() + \"=\" + e.getValue());\n}\nargs.add(\"-jar\");\nargs.add(jarPath.toAbsolutePath().toString());\nSystem.out.println(\"Executing \" + args);\nFiles.deleteIfExists(logFile);\nFiles.createDirectories(logFile.getParent());\nquarkusProcess = LauncherUtil.launchProcess(args);\nListeningAddress result = waitForCapturedListeningData(quarkusProcess, logFile, jarWaitTime);\nupdateConfigForPort(result.getPort());\nisSsl = result.isSsl();\n}\npublic boolean listensOnSsl() {\nreturn isSsl;\n}\npublic void addSystemProperties(Map systemProps) {\nthis.systemProps.putAll(systemProps);\n}\n@Override\npublic void close() {\nquarkusProcess.destroy();\n}\n}", + "context_after": "class JarLauncher implements ArtifactLauncher {\nprivate static final int DEFAULT_PORT = 8081;\nprivate static final int DEFAULT_HTTPS_PORT = 8444;\nprivate static final long DEFAULT_JAR_WAIT_TIME = 60;\nprivate final Path jarPath;\nprivate final String profile;\nprivate final String argLine;\nprivate Process quarkusProcess;\nprivate final int httpPort;\nprivate final int httpsPort;\nprivate final long jarWaitTime;\nprivate final Map systemProps = new HashMap<>();\nprivate boolean isSsl;\npublic JarLauncher(Path jarPath) {\nthis(jarPath, installAndGetSomeConfig());\n}\npublic JarLauncher(Path jarPath, int httpPort, int httpsPort, long jarWaitTime, String argLine, String profile) {\nthis.jarPath = jarPath;\nthis.httpPort = httpPort;\nthis.httpsPort = httpsPort;\nthis.jarWaitTime = jarWaitTime;\nthis.argLine = argLine;\nthis.profile = profile;\n}\npublic void start() throws IOException {\nSystem.setProperty(\"test.url\", TestHTTPResourceManager.getUri());\nList args = new ArrayList<>();\nargs.add(\"java\");\nif (argLine != null) {\nargs.add(argLine);\n}\nargs.add(\"-Dquarkus.http.port=\" + httpPort);\nargs.add(\"-Dquarkus.http.ssl-port=\" + httpsPort);\nargs.add(\"-Dtest.url=\" + TestHTTPResourceManager.getUri());\nPath logFile = PropertyTestUtil.getLogFilePath();\nargs.add(\"-Dquarkus.log.file.path=\" + logFile.toAbsolutePath().toString());\nargs.add(\"-Dquarkus.log.file.enable=true\");\nif (profile != null) {\nargs.add(\"-Dquarkus.profile=\" + profile);\n}\nfor (Map.Entry e : systemProps.entrySet()) {\nargs.add(\"-D\" + e.getKey() + \"=\" + e.getValue());\n}\nargs.add(\"-jar\");\nargs.add(jarPath.toAbsolutePath().toString());\nSystem.out.println(\"Executing \" + args);\nFiles.deleteIfExists(logFile);\nFiles.createDirectories(logFile.getParent());\nquarkusProcess = LauncherUtil.launchProcess(args);\nListeningAddress result = waitForCapturedListeningData(quarkusProcess, logFile, jarWaitTime);\nupdateConfigForPort(result.getPort());\nisSsl = result.isSsl();\n}\npublic boolean listensOnSsl() {\nreturn isSsl;\n}\npublic void addSystemProperties(Map systemProps) {\nthis.systemProps.putAll(systemProps);\n}\n@Override\npublic void close() {\nquarkusProcess.destroy();\n}\n}" + }, + { + "comment": "Thanks @fapaul for the explanation! I think perhaps `CommittableSummary` is also the subclass of the the `CommittableMessage` and it should be able to be mixed in the returned list? And passing `committer` is indeed required, but it seems fine from my side~ Also may I have a double confirmation that It seems with the current implementation we'll emit `CommittableSummary` whenever we commit some committables instead of only when checkpointing? It seems to me it is to deal with the case that for unaligned checkpoint, the barrier might jump over all the summary and committable messages~ It should be also the reason we support receive multiple summary from one subtask for the same checkpoint, do I understand right~? ", + "method_body": "private void commitAndEmitCheckpoints() throws IOException, InterruptedException {\nfor (CheckpointCommittableManager manager :\ncommittableCollector.getCheckpointCommittablesUpTo(lastCompletedCheckpointId)) {\nboolean fullyReceived = manager.getCheckpointId() == lastCompletedCheckpointId;\ncommitAndEmit(manager, fullyReceived);\n}\nif (!committableCollector.isFinished()) {\nretryWithDelay();\n}\n}", + "target_code": "", + "method_body_after": "private void commitAndEmitCheckpoints() throws IOException, InterruptedException {\nfor (CheckpointCommittableManager manager :\ncommittableCollector.getCheckpointCommittablesUpTo(lastCompletedCheckpointId)) {\nboolean fullyReceived = manager.getCheckpointId() == lastCompletedCheckpointId;\ncommitAndEmit(manager, fullyReceived);\n}\nif (!committableCollector.isFinished()) {\nretryWithDelay();\n}\n}", + "context_before": "class CommitterOperator extends AbstractStreamOperator>\nimplements OneInputStreamOperator, CommittableMessage>,\nBoundedOneInput {\nprivate static final long RETRY_DELAY = 1000;\nprivate final SimpleVersionedSerializer committableSerializer;\nprivate final Committer committer;\nprivate final boolean emitDownstream;\nprivate CommittableCollector committableCollector;\nprivate long lastCompletedCheckpointId = -1;\n/** The operator's state descriptor. */\nprivate static final ListStateDescriptor STREAMING_COMMITTER_RAW_STATES_DESC =\nnew ListStateDescriptor<>(\n\"streaming_committer_raw_states\", BytePrimitiveArraySerializer.INSTANCE);\n/** The operator's state. */\nprivate ListState> committableCollectorState;\npublic CommitterOperator(\nProcessingTimeService processingTimeService,\nSimpleVersionedSerializer committableSerializer,\nCommitter committer,\nboolean emitDownstream) {\nthis.emitDownstream = emitDownstream;\nthis.processingTimeService = checkNotNull(processingTimeService);\nthis.committableSerializer = checkNotNull(committableSerializer);\nthis.committer = checkNotNull(committer);\n}\n@Override\npublic void setup(\nStreamTask containingTask,\nStreamConfig config,\nOutput>> output) {\nsuper.setup(containingTask, config, output);\ncommittableCollector = CommittableCollector.of(getRuntimeContext());\n}\n@Override\npublic void initializeState(StateInitializationContext context) throws Exception {\nsuper.initializeState(context);\ncommittableCollectorState =\nnew SimpleVersionedListState<>(\ncontext.getOperatorStateStore()\n.getListState(STREAMING_COMMITTER_RAW_STATES_DESC),\nnew CommittableCollectorSerializer<>(committableSerializer));\nif (context.isRestored()) {\ncommittableCollectorState.get().forEach(cc -> committableCollector.merge(cc));\nlastCompletedCheckpointId = context.getRestoredCheckpointId().getAsLong();\ncommitAndEmitCheckpoints();\n}\n}\n@Override\npublic void snapshotState(StateSnapshotContext context) throws Exception {\nsuper.snapshotState(context);\ncommittableCollectorState.update(Collections.singletonList(committableCollector.copy()));\n}\n@Override\npublic void endInput() throws Exception {\nCollection> endOfInputCommittables =\ncommittableCollector.getEndOfInputCommittables();\nif (endOfInputCommittables != null) {\ndo {\nfor (CommittableManager endOfInputCommittable : endOfInputCommittables) {\ncommitAndEmit(endOfInputCommittable, false);\n}\n} while (!committableCollector.isFinished());\n}\n}\n@Override\npublic void notifyCheckpointComplete(long checkpointId) throws Exception {\nsuper.notifyCheckpointComplete(checkpointId);\nlastCompletedCheckpointId = Math.max(lastCompletedCheckpointId, checkpointId);\ncommitAndEmitCheckpoints();\n}\nprivate void commitAndEmit(CommittableManager committableManager, boolean fullyReceived)\nthrows IOException, InterruptedException {\nCollection> committed =\ncommittableManager.commit(fullyReceived, committer);\nif (emitDownstream && !committed.isEmpty()) {\noutput.collect(new StreamRecord<>(committableManager.getSummary()));\nfor (CommittableWithLineage committable : committed) {\noutput.collect(new StreamRecord<>(committable));\n}\n}\n}\nprivate void retryWithDelay() {\nprocessingTimeService.registerTimer(\nprocessingTimeService.getCurrentProcessingTime() + RETRY_DELAY,\nts -> commitAndEmitCheckpoints());\n}\n@Override\npublic void processElement(StreamRecord> element) throws Exception {\ncommittableCollector.addMessage(element.getValue());\nOptionalLong checkpointId = element.getValue().getCheckpointId();\nif (checkpointId.isPresent() && checkpointId.getAsLong() <= lastCompletedCheckpointId) {\ncommitAndEmitCheckpoints();\n}\n}\n@Override\npublic void close() throws Exception {\ncloseAll(committer, super::close);\n}\n}", + "context_after": "class CommitterOperator extends AbstractStreamOperator>\nimplements OneInputStreamOperator, CommittableMessage>,\nBoundedOneInput {\nprivate static final long RETRY_DELAY = 1000;\nprivate final SimpleVersionedSerializer committableSerializer;\nprivate final Committer committer;\nprivate final boolean emitDownstream;\nprivate CommittableCollector committableCollector;\nprivate long lastCompletedCheckpointId = -1;\n/** The operator's state descriptor. */\nprivate static final ListStateDescriptor STREAMING_COMMITTER_RAW_STATES_DESC =\nnew ListStateDescriptor<>(\n\"streaming_committer_raw_states\", BytePrimitiveArraySerializer.INSTANCE);\n/** The operator's state. */\nprivate ListState> committableCollectorState;\npublic CommitterOperator(\nProcessingTimeService processingTimeService,\nSimpleVersionedSerializer committableSerializer,\nCommitter committer,\nboolean emitDownstream) {\nthis.emitDownstream = emitDownstream;\nthis.processingTimeService = checkNotNull(processingTimeService);\nthis.committableSerializer = checkNotNull(committableSerializer);\nthis.committer = checkNotNull(committer);\n}\n@Override\npublic void setup(\nStreamTask containingTask,\nStreamConfig config,\nOutput>> output) {\nsuper.setup(containingTask, config, output);\ncommittableCollector = CommittableCollector.of(getRuntimeContext());\n}\n@Override\npublic void initializeState(StateInitializationContext context) throws Exception {\nsuper.initializeState(context);\ncommittableCollectorState =\nnew SimpleVersionedListState<>(\ncontext.getOperatorStateStore()\n.getListState(STREAMING_COMMITTER_RAW_STATES_DESC),\nnew CommittableCollectorSerializer<>(\ncommittableSerializer,\ngetRuntimeContext().getIndexOfThisSubtask(),\ngetRuntimeContext().getNumberOfParallelSubtasks()));\nif (context.isRestored()) {\ncommittableCollectorState.get().forEach(cc -> committableCollector.merge(cc));\nlastCompletedCheckpointId = context.getRestoredCheckpointId().getAsLong();\ncommitAndEmitCheckpoints();\n}\n}\n@Override\npublic void snapshotState(StateSnapshotContext context) throws Exception {\nsuper.snapshotState(context);\ncommittableCollectorState.update(Collections.singletonList(committableCollector.copy()));\n}\n@Override\npublic void endInput() throws Exception {\nCollection> endOfInputCommittables =\ncommittableCollector.getEndOfInputCommittables();\nif (endOfInputCommittables != null) {\ndo {\nfor (CommittableManager endOfInputCommittable : endOfInputCommittables) {\ncommitAndEmit(endOfInputCommittable, false);\n}\n} while (!committableCollector.isFinished());\n}\n}\n@Override\npublic void notifyCheckpointComplete(long checkpointId) throws Exception {\nsuper.notifyCheckpointComplete(checkpointId);\nlastCompletedCheckpointId = Math.max(lastCompletedCheckpointId, checkpointId);\ncommitAndEmitCheckpoints();\n}\nprivate void commitAndEmit(CommittableManager committableManager, boolean fullyReceived)\nthrows IOException, InterruptedException {\nCollection> committed =\ncommittableManager.commit(fullyReceived, committer);\nif (emitDownstream && !committed.isEmpty()) {\noutput.collect(new StreamRecord<>(committableManager.getSummary()));\nfor (CommittableWithLineage committable : committed) {\noutput.collect(new StreamRecord<>(committable));\n}\n}\n}\nprivate void retryWithDelay() {\nprocessingTimeService.registerTimer(\nprocessingTimeService.getCurrentProcessingTime() + RETRY_DELAY,\nts -> commitAndEmitCheckpoints());\n}\n@Override\npublic void processElement(StreamRecord> element) throws Exception {\ncommittableCollector.addMessage(element.getValue());\nOptionalLong checkpointId = element.getValue().getCheckpointId();\nif (checkpointId.isPresent() && checkpointId.getAsLong() <= lastCompletedCheckpointId) {\ncommitAndEmitCheckpoints();\n}\n}\n@Override\npublic void close() throws Exception {\ncloseAll(committer, super::close);\n}\n}" + }, + { + "comment": "argh formatting isnt setup properly yet...", + "method_body": "public static Object[] parameters() {\nreturn new Object[][]{new Object[]{true}, new Object[]{false}};\n}", + "target_code": "return new Object[][]{new Object[]{true}, new Object[]{false}};", + "method_body_after": "public static Object[] parameters() {\nreturn new Object[][] {new Object[] {true}, new Object[] {false}};\n}", + "context_before": "class NettyPartitionRequestClientTest {\n@Parameterized.Parameter\npublic boolean connectionReuseEnabled;\n@Parameterized.Parameters(name = \"connection reuse enabled = {0}\")\n@Test\npublic void testPartitionRequestClientReuse() throws Exception {\nfinal CreditBasedPartitionRequestClientHandler handler =\nnew CreditBasedPartitionRequestClientHandler();\nfinal EmbeddedChannel channel = new EmbeddedChannel(handler);\nfinal NettyPartitionRequestClient client =\ncreatePartitionRequestClient(channel, handler, true);\nfinal NetworkBufferPool networkBufferPool = new NetworkBufferPool(10, 32);\nfinal SingleInputGate inputGate = createSingleInputGate(1, networkBufferPool);\nfinal RemoteInputChannel inputChannel = createRemoteInputChannel(inputGate, client);\ntry {\nclient.close(inputChannel);\nassertFalse(client.canBeDisposed());\nhandler.notifyAllChannelsOfErrorAndClose(new RuntimeException());\nassertTrue(client.canBeDisposed());\n} finally {\ninputGate.close();\nnetworkBufferPool.destroyAllBufferPools();\nnetworkBufferPool.destroy();\n}\n}\n@Test\npublic void testRetriggerPartitionRequest() throws Exception {\nfinal long deadline = System.currentTimeMillis() + 30_000L;\nfinal CreditBasedPartitionRequestClientHandler handler =\nnew CreditBasedPartitionRequestClientHandler();\nfinal EmbeddedChannel channel = new EmbeddedChannel(handler);\nfinal PartitionRequestClient client =\ncreatePartitionRequestClient(channel, handler, connectionReuseEnabled);\nfinal int numExclusiveBuffers = 2;\nfinal NetworkBufferPool networkBufferPool = new NetworkBufferPool(10, 32);\nfinal SingleInputGate inputGate = createSingleInputGate(1, networkBufferPool);\nfinal RemoteInputChannel inputChannel =\nInputChannelBuilder.newBuilder()\n.setConnectionManager(\nmockConnectionManagerWithPartitionRequestClient(client))\n.setInitialBackoff(1)\n.setMaxBackoff(2)\n.buildRemoteChannel(inputGate);\ntry {\ninputGate.setInputChannels(inputChannel);\nfinal BufferPool bufferPool = networkBufferPool.createBufferPool(6, 6);\ninputGate.setBufferPool(bufferPool);\ninputGate.setupChannels();\ninputChannel.requestSubpartition();\nassertTrue(channel.isWritable());\nObject readFromOutbound = channel.readOutbound();\nassertThat(readFromOutbound, instanceOf(PartitionRequest.class));\nassertEquals(\ninputChannel.getInputChannelId(),\n((PartitionRequest) readFromOutbound).receiverId);\nassertEquals(numExclusiveBuffers, ((PartitionRequest) readFromOutbound).credit);\ninputGate.retriggerPartitionRequest(\ninputChannel.getPartitionId().getPartitionId(),\ninputChannel.getConsumedSubpartitionIndex());\nrunAllScheduledPendingTasks(channel, deadline);\nreadFromOutbound = channel.readOutbound();\nassertThat(readFromOutbound, instanceOf(PartitionRequest.class));\nassertEquals(\ninputChannel.getInputChannelId(),\n((PartitionRequest) readFromOutbound).receiverId);\nassertEquals(numExclusiveBuffers, ((PartitionRequest) readFromOutbound).credit);\ninputGate.retriggerPartitionRequest(\ninputChannel.getPartitionId().getPartitionId(),\ninputChannel.getConsumedSubpartitionIndex());\nrunAllScheduledPendingTasks(channel, deadline);\nreadFromOutbound = channel.readOutbound();\nassertThat(readFromOutbound, instanceOf(PartitionRequest.class));\nassertEquals(\ninputChannel.getInputChannelId(),\n((PartitionRequest) readFromOutbound).receiverId);\nassertEquals(numExclusiveBuffers, ((PartitionRequest) readFromOutbound).credit);\nassertNull(channel.readOutbound());\n} finally {\ninputGate.close();\nnetworkBufferPool.destroyAllBufferPools();\nnetworkBufferPool.destroy();\n}\n}\n@Test\npublic void testDoublePartitionRequest() throws Exception {\nfinal CreditBasedPartitionRequestClientHandler handler =\nnew CreditBasedPartitionRequestClientHandler();\nfinal EmbeddedChannel channel = new EmbeddedChannel(handler);\nfinal PartitionRequestClient client =\ncreatePartitionRequestClient(channel, handler, connectionReuseEnabled);\nfinal int numExclusiveBuffers = 2;\nfinal NetworkBufferPool networkBufferPool = new NetworkBufferPool(10, 32);\nfinal SingleInputGate inputGate = createSingleInputGate(1, networkBufferPool);\nfinal RemoteInputChannel inputChannel = createRemoteInputChannel(inputGate, client);\ntry {\ninputGate.setInputChannels(inputChannel);\nfinal BufferPool bufferPool = networkBufferPool.createBufferPool(6, 6);\ninputGate.setBufferPool(bufferPool);\ninputGate.setupChannels();\ninputChannel.requestSubpartition();\nassertTrue(channel.isWritable());\nObject readFromOutbound = channel.readOutbound();\nassertThat(readFromOutbound, instanceOf(PartitionRequest.class));\nassertEquals(\ninputChannel.getInputChannelId(),\n((PartitionRequest) readFromOutbound).receiverId);\nassertEquals(numExclusiveBuffers, ((PartitionRequest) readFromOutbound).credit);\nassertNull(channel.readOutbound());\n} finally {\ninputGate.close();\nnetworkBufferPool.destroyAllBufferPools();\nnetworkBufferPool.destroy();\n}\n}\n@Test\npublic void testResumeConsumption() throws Exception {\nfinal CreditBasedPartitionRequestClientHandler handler =\nnew CreditBasedPartitionRequestClientHandler();\nfinal EmbeddedChannel channel = new EmbeddedChannel(handler);\nfinal PartitionRequestClient client =\ncreatePartitionRequestClient(channel, handler, connectionReuseEnabled);\nfinal NetworkBufferPool networkBufferPool = new NetworkBufferPool(10, 32);\nfinal SingleInputGate inputGate = createSingleInputGate(1, networkBufferPool);\nfinal RemoteInputChannel inputChannel = createRemoteInputChannel(inputGate, client);\ntry {\nfinal BufferPool bufferPool = networkBufferPool.createBufferPool(6, 6);\ninputGate.setBufferPool(bufferPool);\ninputGate.setupChannels();\ninputChannel.requestSubpartition();\ninputChannel.resumeConsumption();\nchannel.runPendingTasks();\nObject readFromOutbound = channel.readOutbound();\nassertThat(readFromOutbound, instanceOf(PartitionRequest.class));\nreadFromOutbound = channel.readOutbound();\nassertThat(readFromOutbound, instanceOf(ResumeConsumption.class));\nassertEquals(\ninputChannel.getInputChannelId(),\n((ResumeConsumption) readFromOutbound).receiverId);\nassertNull(channel.readOutbound());\n} finally {\ninputGate.close();\nnetworkBufferPool.destroyAllBufferPools();\nnetworkBufferPool.destroy();\n}\n}\n@Test\npublic void testAcknowledgeAllRecordsProcessed() throws Exception {\nCreditBasedPartitionRequestClientHandler handler =\nnew CreditBasedPartitionRequestClientHandler();\nEmbeddedChannel channel = new EmbeddedChannel(handler);\nPartitionRequestClient client =\ncreatePartitionRequestClient(channel, handler, connectionReuseEnabled);\nNetworkBufferPool networkBufferPool = new NetworkBufferPool(10, 32);\nSingleInputGate inputGate = createSingleInputGate(1, networkBufferPool);\nRemoteInputChannel inputChannel = createRemoteInputChannel(inputGate, client);\ntry {\nBufferPool bufferPool = networkBufferPool.createBufferPool(6, 6);\ninputGate.setBufferPool(bufferPool);\ninputGate.setupChannels();\ninputChannel.requestSubpartition();\ninputChannel.acknowledgeAllRecordsProcessed();\nchannel.runPendingTasks();\nObject readFromOutbound = channel.readOutbound();\nassertThat(readFromOutbound, instanceOf(PartitionRequest.class));\nreadFromOutbound = channel.readOutbound();\nassertThat(readFromOutbound, instanceOf(NettyMessage.AckAllUserRecordsProcessed.class));\nassertEquals(\ninputChannel.getInputChannelId(),\n((NettyMessage.AckAllUserRecordsProcessed) readFromOutbound).receiverId);\nassertNull(channel.readOutbound());\n} finally {\ninputGate.close();\nnetworkBufferPool.destroyAllBufferPools();\nnetworkBufferPool.destroy();\n}\n}\nprivate NettyPartitionRequestClient createPartitionRequestClient(\nChannel tcpChannel, NetworkClientHandler clientHandler, boolean connectionReuseEnabled)\nthrows Exception {\nConnectionID connectionID =\nnew ConnectionID(\nResourceID.generate(), new InetSocketAddress(\"localhost\", 0), 0);\nNettyConfig config =\nnew NettyConfig(InetAddress.getLocalHost(), 0, 1024, 1, new Configuration());\nNettyClient nettyClient = new NettyClient(config);\nPartitionRequestClientFactory partitionRequestClientFactory =\nnew PartitionRequestClientFactory(nettyClient, connectionReuseEnabled);\nreturn new NettyPartitionRequestClient(\ntcpChannel, clientHandler, connectionID, partitionRequestClientFactory);\n}\n/**\n* Run all pending scheduled tasks (waits until all tasks have been run or the deadline has\n* passed.\n*\n* @param channel the channel to execute tasks for\n* @param deadline maximum timestamp in ms to stop waiting further\n*\n* @throws InterruptedException\n*/\nvoid runAllScheduledPendingTasks(EmbeddedChannel channel, long deadline)\nthrows InterruptedException {\nwhile (channel.runScheduledPendingTasks() != -1 && System.currentTimeMillis() < deadline) {\nThread.sleep(1);\n}\n}\n}", + "context_after": "class NettyPartitionRequestClientTest {\n@Parameterized.Parameter public boolean connectionReuseEnabled;\n@Parameterized.Parameters(name = \"connection reuse enabled = {0}\")\n@Test\npublic void testPartitionRequestClientReuse() throws Exception {\nfinal CreditBasedPartitionRequestClientHandler handler =\nnew CreditBasedPartitionRequestClientHandler();\nfinal EmbeddedChannel channel = new EmbeddedChannel(handler);\nfinal NettyPartitionRequestClient client =\ncreatePartitionRequestClient(channel, handler, true);\nfinal NetworkBufferPool networkBufferPool = new NetworkBufferPool(10, 32);\nfinal SingleInputGate inputGate = createSingleInputGate(1, networkBufferPool);\nfinal RemoteInputChannel inputChannel = createRemoteInputChannel(inputGate, client);\ntry {\nclient.close(inputChannel);\nassertFalse(client.canBeDisposed());\nhandler.notifyAllChannelsOfErrorAndClose(new RuntimeException());\nassertTrue(client.canBeDisposed());\n} finally {\ninputGate.close();\nnetworkBufferPool.destroyAllBufferPools();\nnetworkBufferPool.destroy();\n}\n}\n@Test\npublic void testRetriggerPartitionRequest() throws Exception {\nfinal long deadline = System.currentTimeMillis() + 30_000L;\nfinal CreditBasedPartitionRequestClientHandler handler =\nnew CreditBasedPartitionRequestClientHandler();\nfinal EmbeddedChannel channel = new EmbeddedChannel(handler);\nfinal PartitionRequestClient client =\ncreatePartitionRequestClient(channel, handler, connectionReuseEnabled);\nfinal int numExclusiveBuffers = 2;\nfinal NetworkBufferPool networkBufferPool = new NetworkBufferPool(10, 32);\nfinal SingleInputGate inputGate = createSingleInputGate(1, networkBufferPool);\nfinal RemoteInputChannel inputChannel =\nInputChannelBuilder.newBuilder()\n.setConnectionManager(\nmockConnectionManagerWithPartitionRequestClient(client))\n.setInitialBackoff(1)\n.setMaxBackoff(2)\n.buildRemoteChannel(inputGate);\ntry {\ninputGate.setInputChannels(inputChannel);\nfinal BufferPool bufferPool = networkBufferPool.createBufferPool(6, 6);\ninputGate.setBufferPool(bufferPool);\ninputGate.setupChannels();\ninputChannel.requestSubpartition();\nassertTrue(channel.isWritable());\nObject readFromOutbound = channel.readOutbound();\nassertThat(readFromOutbound, instanceOf(PartitionRequest.class));\nassertEquals(\ninputChannel.getInputChannelId(),\n((PartitionRequest) readFromOutbound).receiverId);\nassertEquals(numExclusiveBuffers, ((PartitionRequest) readFromOutbound).credit);\ninputGate.retriggerPartitionRequest(\ninputChannel.getPartitionId().getPartitionId(),\ninputChannel.getConsumedSubpartitionIndex());\nrunAllScheduledPendingTasks(channel, deadline);\nreadFromOutbound = channel.readOutbound();\nassertThat(readFromOutbound, instanceOf(PartitionRequest.class));\nassertEquals(\ninputChannel.getInputChannelId(),\n((PartitionRequest) readFromOutbound).receiverId);\nassertEquals(numExclusiveBuffers, ((PartitionRequest) readFromOutbound).credit);\ninputGate.retriggerPartitionRequest(\ninputChannel.getPartitionId().getPartitionId(),\ninputChannel.getConsumedSubpartitionIndex());\nrunAllScheduledPendingTasks(channel, deadline);\nreadFromOutbound = channel.readOutbound();\nassertThat(readFromOutbound, instanceOf(PartitionRequest.class));\nassertEquals(\ninputChannel.getInputChannelId(),\n((PartitionRequest) readFromOutbound).receiverId);\nassertEquals(numExclusiveBuffers, ((PartitionRequest) readFromOutbound).credit);\nassertNull(channel.readOutbound());\n} finally {\ninputGate.close();\nnetworkBufferPool.destroyAllBufferPools();\nnetworkBufferPool.destroy();\n}\n}\n@Test\npublic void testDoublePartitionRequest() throws Exception {\nfinal CreditBasedPartitionRequestClientHandler handler =\nnew CreditBasedPartitionRequestClientHandler();\nfinal EmbeddedChannel channel = new EmbeddedChannel(handler);\nfinal PartitionRequestClient client =\ncreatePartitionRequestClient(channel, handler, connectionReuseEnabled);\nfinal int numExclusiveBuffers = 2;\nfinal NetworkBufferPool networkBufferPool = new NetworkBufferPool(10, 32);\nfinal SingleInputGate inputGate = createSingleInputGate(1, networkBufferPool);\nfinal RemoteInputChannel inputChannel = createRemoteInputChannel(inputGate, client);\ntry {\ninputGate.setInputChannels(inputChannel);\nfinal BufferPool bufferPool = networkBufferPool.createBufferPool(6, 6);\ninputGate.setBufferPool(bufferPool);\ninputGate.setupChannels();\ninputChannel.requestSubpartition();\nassertTrue(channel.isWritable());\nObject readFromOutbound = channel.readOutbound();\nassertThat(readFromOutbound, instanceOf(PartitionRequest.class));\nassertEquals(\ninputChannel.getInputChannelId(),\n((PartitionRequest) readFromOutbound).receiverId);\nassertEquals(numExclusiveBuffers, ((PartitionRequest) readFromOutbound).credit);\nassertNull(channel.readOutbound());\n} finally {\ninputGate.close();\nnetworkBufferPool.destroyAllBufferPools();\nnetworkBufferPool.destroy();\n}\n}\n@Test\npublic void testResumeConsumption() throws Exception {\nfinal CreditBasedPartitionRequestClientHandler handler =\nnew CreditBasedPartitionRequestClientHandler();\nfinal EmbeddedChannel channel = new EmbeddedChannel(handler);\nfinal PartitionRequestClient client =\ncreatePartitionRequestClient(channel, handler, connectionReuseEnabled);\nfinal NetworkBufferPool networkBufferPool = new NetworkBufferPool(10, 32);\nfinal SingleInputGate inputGate = createSingleInputGate(1, networkBufferPool);\nfinal RemoteInputChannel inputChannel = createRemoteInputChannel(inputGate, client);\ntry {\nfinal BufferPool bufferPool = networkBufferPool.createBufferPool(6, 6);\ninputGate.setBufferPool(bufferPool);\ninputGate.setupChannels();\ninputChannel.requestSubpartition();\ninputChannel.resumeConsumption();\nchannel.runPendingTasks();\nObject readFromOutbound = channel.readOutbound();\nassertThat(readFromOutbound, instanceOf(PartitionRequest.class));\nreadFromOutbound = channel.readOutbound();\nassertThat(readFromOutbound, instanceOf(ResumeConsumption.class));\nassertEquals(\ninputChannel.getInputChannelId(),\n((ResumeConsumption) readFromOutbound).receiverId);\nassertNull(channel.readOutbound());\n} finally {\ninputGate.close();\nnetworkBufferPool.destroyAllBufferPools();\nnetworkBufferPool.destroy();\n}\n}\n@Test\npublic void testAcknowledgeAllRecordsProcessed() throws Exception {\nCreditBasedPartitionRequestClientHandler handler =\nnew CreditBasedPartitionRequestClientHandler();\nEmbeddedChannel channel = new EmbeddedChannel(handler);\nPartitionRequestClient client =\ncreatePartitionRequestClient(channel, handler, connectionReuseEnabled);\nNetworkBufferPool networkBufferPool = new NetworkBufferPool(10, 32);\nSingleInputGate inputGate = createSingleInputGate(1, networkBufferPool);\nRemoteInputChannel inputChannel = createRemoteInputChannel(inputGate, client);\ntry {\nBufferPool bufferPool = networkBufferPool.createBufferPool(6, 6);\ninputGate.setBufferPool(bufferPool);\ninputGate.setupChannels();\ninputChannel.requestSubpartition();\ninputChannel.acknowledgeAllRecordsProcessed();\nchannel.runPendingTasks();\nObject readFromOutbound = channel.readOutbound();\nassertThat(readFromOutbound, instanceOf(PartitionRequest.class));\nreadFromOutbound = channel.readOutbound();\nassertThat(readFromOutbound, instanceOf(NettyMessage.AckAllUserRecordsProcessed.class));\nassertEquals(\ninputChannel.getInputChannelId(),\n((NettyMessage.AckAllUserRecordsProcessed) readFromOutbound).receiverId);\nassertNull(channel.readOutbound());\n} finally {\ninputGate.close();\nnetworkBufferPool.destroyAllBufferPools();\nnetworkBufferPool.destroy();\n}\n}\nprivate NettyPartitionRequestClient createPartitionRequestClient(\nChannel tcpChannel, NetworkClientHandler clientHandler, boolean connectionReuseEnabled)\nthrows Exception {\nConnectionID connectionID =\nnew ConnectionID(ResourceID.generate(), new InetSocketAddress(\"localhost\", 0), 0);\nNettyConfig config =\nnew NettyConfig(InetAddress.getLocalHost(), 0, 1024, 1, new Configuration());\nNettyClient nettyClient = new NettyClient(config);\nPartitionRequestClientFactory partitionRequestClientFactory =\nnew PartitionRequestClientFactory(nettyClient, connectionReuseEnabled);\nreturn new NettyPartitionRequestClient(\ntcpChannel, clientHandler, connectionID, partitionRequestClientFactory);\n}\n/**\n* Run all pending scheduled tasks (waits until all tasks have been run or the deadline has\n* passed.\n*\n* @param channel the channel to execute tasks for\n* @param deadline maximum timestamp in ms to stop waiting further\n* @throws InterruptedException\n*/\nvoid runAllScheduledPendingTasks(EmbeddedChannel channel, long deadline)\nthrows InterruptedException {\nwhile (channel.runScheduledPendingTasks() != -1 && System.currentTimeMillis() < deadline) {\nThread.sleep(1);\n}\n}\n}" + } +] \ No newline at end of file