Datasets:
Tasks:
Text Generation
Modalities:
Text
Formats:
json
Languages:
English
Size:
1K - 10K
Tags:
code_review
License:
comment
stringlengths 16
8.84k
| method_body
stringlengths 37
239k
| target_code
stringlengths 0
242
| method_body_after
stringlengths 29
239k
| context_before
stringlengths 14
424k
| context_after
stringlengths 14
284k
|
---|---|---|---|---|---|
Can't we simply return `values.clone`? | public byte[] getBytes() {
BByteArray copy = (BByteArray) this.copy();
return copy.values;
} | return copy.values; | public byte[] getBytes() {
return values.clone();
} | class BByteArray extends BNewArray {
private static BType arrayType = new BArrayType(BTypes.typeByte);
private byte[] values;
public BByteArray(byte[] values) {
this.values = values;
this.size = values.length;
}
public BByteArray() {
values = (byte[]) newArrayInstance(Byte.TYPE);
}
public void add(long index, byte value) {
prepareForAdd(index, values.length);
values[(int) index] = value;
}
public byte get(long index) {
rangeCheckForGet(index, size);
return values[(int) index];
}
@Override
public BType getType() {
return arrayType;
}
@Override
public void grow(int newLength) {
values = Arrays.copyOf(values, newLength);
}
@Override
public BValue copy() {
BByteArray byteArray = new BByteArray(Arrays.copyOf(values, values.length));
byteArray.size = this.size;
return byteArray;
}
@Override
public String stringValue() {
StringJoiner sj = new StringJoiner(", ", "[", "]");
for (int i = 0; i < size; i++) {
sj.add("\'" + Byte.toUnsignedInt(values[i]) + "\'");
}
return sj.toString();
}
@Override
public BValue getBValue(long index) {
return new BByte(get(index));
}
} | class BByteArray extends BNewArray {
private static BType arrayType = new BArrayType(BTypes.typeByte);
private byte[] values;
public BByteArray(byte[] values) {
this.values = values;
this.size = values.length;
}
public BByteArray() {
values = (byte[]) newArrayInstance(Byte.TYPE);
}
public void add(long index, byte value) {
prepareForAdd(index, values.length);
values[(int) index] = value;
}
public byte get(long index) {
rangeCheckForGet(index, size);
return values[(int) index];
}
@Override
public BType getType() {
return arrayType;
}
@Override
public void grow(int newLength) {
values = Arrays.copyOf(values, newLength);
}
@Override
public BValue copy() {
BByteArray byteArray = new BByteArray(Arrays.copyOf(values, values.length));
byteArray.size = this.size;
return byteArray;
}
@Override
public String stringValue() {
StringJoiner sj = new StringJoiner(", ", "[", "]");
for (int i = 0; i < size; i++) {
sj.add("\'" + Byte.toUnsignedInt(values[i]) + "\'");
}
return sj.toString();
}
@Override
public BValue getBValue(long index) {
return new BByte(get(index));
}
} |
I think we can remove the DEFAULT_CLUSTER also. The feature is useless but only has compatibility. | public TGetDBPrivsResult getDBPrivs(TGetDBPrivsParams params) throws TException {
LOG.debug("get database privileges request: {}", params);
TGetDBPrivsResult result = new TGetDBPrivsResult();
List<TDBPrivDesc> tDBPrivs = Lists.newArrayList();
result.setDb_privs(tDBPrivs);
UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident);
List<DbPrivEntry> dbPrivEntries = GlobalStateMgr.getCurrentState().getAuth().getDBPrivEntries(currentUser);
for (DbPrivEntry entry : dbPrivEntries) {
PrivBitSet savedPrivs = entry.getPrivSet();
String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER;
String userIdentStr = currentUser.toString().replace(clusterPrefix, "");
String dbName = entry.getOrigDb();
boolean isGrantable = savedPrivs.satisfy(PrivPredicate.GRANT);
List<TDBPrivDesc> tPrivs = savedPrivs.toPrivilegeList().stream().map(
priv -> {
TDBPrivDesc privDesc = new TDBPrivDesc();
privDesc.setDb_name(dbName);
privDesc.setIs_grantable(isGrantable);
privDesc.setUser_ident_str(userIdentStr);
privDesc.setPriv(priv.getUpperNameForMysql());
return privDesc;
}
).collect(Collectors.toList());
if (savedPrivs.satisfy(PrivPredicate.LOAD)) {
tPrivs.addAll(Lists.newArrayList("INSERT", "UPDATE", "DELETE").stream().map(priv -> {
TDBPrivDesc privDesc = new TDBPrivDesc();
privDesc.setDb_name(dbName);
privDesc.setIs_grantable(isGrantable);
privDesc.setUser_ident_str(userIdentStr);
privDesc.setPriv(priv);
return privDesc;
}).collect(Collectors.toList()));
}
tDBPrivs.addAll(tPrivs);
}
return result;
} | String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER; | public TGetDBPrivsResult getDBPrivs(TGetDBPrivsParams params) throws TException {
LOG.debug("get database privileges request: {}", params);
TGetDBPrivsResult result = new TGetDBPrivsResult();
List<TDBPrivDesc> tDBPrivs = Lists.newArrayList();
result.setDb_privs(tDBPrivs);
UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident);
List<DbPrivEntry> dbPrivEntries = GlobalStateMgr.getCurrentState().getAuth().getDBPrivEntries(currentUser);
for (DbPrivEntry entry : dbPrivEntries) {
PrivBitSet savedPrivs = entry.getPrivSet();
String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER;
String userIdentStr = currentUser.toString().replace(clusterPrefix, "");
String dbName = entry.getOrigDb();
boolean isGrantable = savedPrivs.satisfy(PrivPredicate.GRANT);
List<TDBPrivDesc> tPrivs = savedPrivs.toPrivilegeList().stream().map(
priv -> {
TDBPrivDesc privDesc = new TDBPrivDesc();
privDesc.setDb_name(dbName);
privDesc.setIs_grantable(isGrantable);
privDesc.setUser_ident_str(userIdentStr);
privDesc.setPriv(priv.getUpperNameForMysql());
return privDesc;
}
).collect(Collectors.toList());
if (savedPrivs.satisfy(PrivPredicate.LOAD)) {
tPrivs.addAll(Lists.newArrayList("INSERT", "UPDATE", "DELETE").stream().map(priv -> {
TDBPrivDesc privDesc = new TDBPrivDesc();
privDesc.setDb_name(dbName);
privDesc.setIs_grantable(isGrantable);
privDesc.setUser_ident_str(userIdentStr);
privDesc.setPriv(priv);
return privDesc;
}).collect(Collectors.toList()));
}
tDBPrivs.addAll(tPrivs);
}
return result;
} | class FrontendServiceImpl implements FrontendService.Iface {
private static final Logger LOG = LogManager.getLogger(LeaderImpl.class);
private LeaderImpl leaderImpl;
private ExecuteEnv exeEnv;
public FrontendServiceImpl(ExecuteEnv exeEnv) {
leaderImpl = new LeaderImpl();
this.exeEnv = exeEnv;
}
@Override
public TGetDbsResult getDbNames(TGetDbsParams params) throws TException {
LOG.debug("get db request: {}", params);
TGetDbsResult result = new TGetDbsResult();
List<String> dbs = Lists.newArrayList();
PatternMatcher matcher = null;
if (params.isSetPattern()) {
try {
matcher = PatternMatcher.createMysqlPattern(params.getPattern(),
CaseSensibility.DATABASE.getCaseSensibility());
} catch (AnalysisException e) {
throw new TException("Pattern is in bad format: " + params.getPattern());
}
}
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
List<String> dbNames = globalStateMgr.getDbNames();
LOG.debug("get db names: {}", dbNames);
UserIdentity currentUser = null;
if (params.isSetCurrent_user_ident()) {
currentUser = UserIdentity.fromThrift(params.current_user_ident);
} else {
currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);
}
for (String fullName : dbNames) {
if (!globalStateMgr.getAuth().checkDbPriv(currentUser, fullName, PrivPredicate.SHOW)) {
continue;
}
final String db = ClusterNamespace.getNameFromFullName(fullName);
if (matcher != null && !matcher.match(db)) {
continue;
}
dbs.add(fullName);
}
result.setDbs(dbs);
return result;
}
@Override
public TGetTablesResult getTableNames(TGetTablesParams params) throws TException {
LOG.debug("get table name request: {}", params);
TGetTablesResult result = new TGetTablesResult();
List<String> tablesResult = Lists.newArrayList();
result.setTables(tablesResult);
PatternMatcher matcher = null;
if (params.isSetPattern()) {
try {
matcher = PatternMatcher.createMysqlPattern(params.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
} catch (AnalysisException e) {
throw new TException("Pattern is in bad format: " + params.getPattern());
}
}
Database db = GlobalStateMgr.getCurrentState().getDb(params.db);
UserIdentity currentUser = null;
if (params.isSetCurrent_user_ident()) {
currentUser = UserIdentity.fromThrift(params.current_user_ident);
} else {
currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);
}
if (db != null) {
for (String tableName : db.getTableNamesWithLock()) {
LOG.debug("get table: {}, wait to check", tableName);
if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db,
tableName, PrivPredicate.SHOW)) {
continue;
}
if (matcher != null && !matcher.match(tableName)) {
continue;
}
tablesResult.add(tableName);
}
}
return result;
}
@Override
public TListTableStatusResult listTableStatus(TGetTablesParams params) throws TException {
LOG.debug("get list table request: {}", params);
TListTableStatusResult result = new TListTableStatusResult();
List<TTableStatus> tablesResult = Lists.newArrayList();
result.setTables(tablesResult);
PatternMatcher matcher = null;
if (params.isSetPattern()) {
try {
matcher = PatternMatcher.createMysqlPattern(params.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
} catch (AnalysisException e) {
throw new TException("Pattern is in bad format " + params.getPattern());
}
}
Database db = GlobalStateMgr.getCurrentState().getDb(params.db);
long limit = params.isSetLimit() ? params.getLimit() : -1;
UserIdentity currentUser = null;
if (params.isSetCurrent_user_ident()) {
currentUser = UserIdentity.fromThrift(params.current_user_ident);
} else {
currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);
}
if (params.isSetType() && TTableType.MATERIALIZED_VIEW.equals(params.getType())) {
listMaterializedViewStatus(tablesResult, limit, matcher, currentUser, params.db);
return result;
}
if (db != null) {
db.readLock();
try {
boolean listingViews = params.isSetType() && TTableType.VIEW.equals(params.getType());
List<Table> tables = listingViews ? db.getViews() : db.getTables();
for (Table table : tables) {
if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db,
table.getName(), PrivPredicate.SHOW)) {
continue;
}
if (matcher != null && !matcher.match(table.getName())) {
continue;
}
TTableStatus status = new TTableStatus();
status.setName(table.getName());
status.setType(table.getMysqlType());
status.setEngine(table.getEngine());
status.setComment(table.getComment());
status.setCreate_time(table.getCreateTime());
status.setLast_check_time(table.getLastCheckTime());
if (listingViews) {
View view = (View) table;
String ddlSql = view.getInlineViewDef();
List<TableRef> tblRefs = new ArrayList<>();
view.getQueryStmt().collectTableRefs(tblRefs);
for (TableRef tblRef : tblRefs) {
if (!GlobalStateMgr.getCurrentState().getAuth()
.checkTblPriv(currentUser, tblRef.getName().getDb(),
tblRef.getName().getTbl(), PrivPredicate.SHOW)) {
ddlSql = "";
break;
}
}
status.setDdl_sql(ddlSql);
}
tablesResult.add(status);
if (limit > 0 && tablesResult.size() >= limit) {
break;
}
}
} finally {
db.readUnlock();
}
}
return result;
}
public void listMaterializedViewStatus(List<TTableStatus> tablesResult, long limit, PatternMatcher matcher,
UserIdentity currentUser, String dbName) {
Database db = GlobalStateMgr.getCurrentState().getDb(dbName);
if (db == null) {
LOG.warn("database not exists: {}", dbName);
return;
}
db.readLock();
try {
for (Table materializedView : db.getMaterializedViews()) {
if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, dbName,
materializedView.getName(), PrivPredicate.SHOW)) {
continue;
}
if (matcher != null && !matcher.match(materializedView.getName())) {
continue;
}
MaterializedView mvTable = (MaterializedView) materializedView;
List<String> createTableStmt = Lists.newArrayList();
GlobalStateMgr.getDdlStmt(mvTable, createTableStmt, null, null, false, true);
String ddlSql = createTableStmt.get(0);
TTableStatus status = new TTableStatus();
status.setId(String.valueOf(mvTable.getId()));
status.setName(mvTable.getName());
status.setDdl_sql(ddlSql);
status.setRows(String.valueOf(mvTable.getRowCount()));
status.setType(mvTable.getMysqlType());
status.setComment(mvTable.getComment());
tablesResult.add(status);
if (limit > 0 && tablesResult.size() >= limit) {
return;
}
}
for (Table table : db.getTables()) {
if (table.getType() == Table.TableType.OLAP) {
OlapTable olapTable = (OlapTable) table;
List<MaterializedIndex> visibleMaterializedViews = olapTable.getVisibleIndex();
long baseIdx = olapTable.getBaseIndexId();
for (MaterializedIndex mvIdx : visibleMaterializedViews) {
if (baseIdx == mvIdx.getId()) {
continue;
}
if (matcher != null && !matcher.match(olapTable.getIndexNameById(mvIdx.getId()))) {
continue;
}
MaterializedIndexMeta mvMeta = olapTable.getVisibleIndexIdToMeta().get(mvIdx.getId());
TTableStatus status = new TTableStatus();
status.setId(String.valueOf(mvIdx.getId()));
status.setName(olapTable.getIndexNameById(mvIdx.getId()));
if (mvMeta.getOriginStmt() == null) {
StringBuilder originStmtBuilder = new StringBuilder(
"create materialized view " + olapTable.getIndexNameById(mvIdx.getId()) +
" as select ");
String groupByString = "";
for (Column column : mvMeta.getSchema()) {
if (column.isKey()) {
groupByString += column.getName() + ",";
}
}
originStmtBuilder.append(groupByString);
for (Column column : mvMeta.getSchema()) {
if (!column.isKey()) {
originStmtBuilder.append(column.getAggregationType().toString()).append("(")
.append(column.getName()).append(")").append(",");
}
}
originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());
originStmtBuilder.append(" from ").append(olapTable.getName()).append(" group by ")
.append(groupByString);
originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());
status.setDdl_sql(originStmtBuilder.toString());
} else {
status.setDdl_sql(mvMeta.getOriginStmt().replace("\n", "").replace("\t", "")
.replaceAll("[ ]+", " "));
}
status.setRows(String.valueOf(mvIdx.getRowCount()));
status.setType("");
status.setComment("");
tablesResult.add(status);
if (limit > 0 && tablesResult.size() >= limit) {
return;
}
}
}
}
} finally {
db.readUnlock();
}
}
@Override
public TGetTaskInfoResult getTasks(TGetTasksParams params) throws TException {
LOG.debug("get show task request: {}", params);
TGetTaskInfoResult result = new TGetTaskInfoResult();
List<TTaskInfo> tasksResult = Lists.newArrayList();
result.setTasks(tasksResult);
UserIdentity currentUser = null;
if (params.isSetCurrent_user_ident()) {
currentUser = UserIdentity.fromThrift(params.current_user_ident);
}
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
TaskManager taskManager = globalStateMgr.getTaskManager();
List<Task> taskList = taskManager.showTasks(null);
for (Task task : taskList) {
if (!globalStateMgr.getAuth().checkDbPriv(currentUser, task.getDbName(), PrivPredicate.SHOW)) {
continue;
}
TTaskInfo info = new TTaskInfo();
info.setTask_name(task.getName());
info.setCreate_time(task.getCreateTime() / 1000);
String scheduleStr = task.getType().name();
if (task.getType() == Constants.TaskType.PERIODICAL) {
scheduleStr += task.getSchedule();
}
info.setSchedule(scheduleStr);
info.setDatabase(ClusterNamespace.getNameFromFullName(task.getDbName()));
info.setDefinition(task.getDefinition());
info.setExpire_time(task.getExpireTime() / 1000);
tasksResult.add(info);
}
return result;
}
@Override
public TGetTaskRunInfoResult getTaskRuns(TGetTasksParams params) throws TException {
LOG.debug("get show task run request: {}", params);
TGetTaskRunInfoResult result = new TGetTaskRunInfoResult();
List<TTaskRunInfo> tasksResult = Lists.newArrayList();
result.setTask_runs(tasksResult);
UserIdentity currentUser = null;
if (params.isSetCurrent_user_ident()) {
currentUser = UserIdentity.fromThrift(params.current_user_ident);
}
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
TaskManager taskManager = globalStateMgr.getTaskManager();
List<TaskRunStatus> taskRunList = taskManager.showTaskRunStatus(null);
for (TaskRunStatus status : taskRunList) {
if (!globalStateMgr.getAuth().checkDbPriv(currentUser, status.getDbName(), PrivPredicate.SHOW)) {
continue;
}
TTaskRunInfo info = new TTaskRunInfo();
info.setQuery_id(status.getQueryId());
info.setTask_name(status.getTaskName());
info.setCreate_time(status.getCreateTime() / 1000);
info.setFinish_time(status.getFinishTime() / 1000);
info.setState(status.getState().toString());
info.setDatabase(ClusterNamespace.getNameFromFullName(status.getDbName()));
info.setDefinition(status.getDefinition());
info.setError_code(status.getErrorCode());
info.setError_message(status.getErrorMessage());
info.setExpire_time(status.getExpireTime() / 1000);
tasksResult.add(info);
}
return result;
}
@Override
@Override
public TGetTablePrivsResult getTablePrivs(TGetTablePrivsParams params) throws TException {
LOG.debug("get table privileges request: {}", params);
TGetTablePrivsResult result = new TGetTablePrivsResult();
List<TTablePrivDesc> tTablePrivs = Lists.newArrayList();
result.setTable_privs(tTablePrivs);
UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident);
List<TablePrivEntry> tablePrivEntries =
GlobalStateMgr.getCurrentState().getAuth().getTablePrivEntries(currentUser);
for (TablePrivEntry entry : tablePrivEntries) {
PrivBitSet savedPrivs = entry.getPrivSet();
String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER;
String userIdentStr = currentUser.toString().replace(clusterPrefix, "");
String dbName = entry.getOrigDb();
boolean isGrantable = savedPrivs.satisfy(PrivPredicate.GRANT);
List<TTablePrivDesc> tPrivs = savedPrivs.toPrivilegeList().stream().map(
priv -> {
TTablePrivDesc privDesc = new TTablePrivDesc();
privDesc.setDb_name(dbName);
privDesc.setTable_name(entry.getOrigTbl());
privDesc.setIs_grantable(isGrantable);
privDesc.setUser_ident_str(userIdentStr);
privDesc.setPriv(priv.getUpperNameForMysql());
return privDesc;
}
).collect(Collectors.toList());
if (savedPrivs.satisfy(PrivPredicate.LOAD)) {
tPrivs.addAll(Lists.newArrayList("INSERT", "UPDATE", "DELETE").stream().map(priv -> {
TTablePrivDesc privDesc = new TTablePrivDesc();
privDesc.setDb_name(dbName);
privDesc.setTable_name(entry.getOrigTbl());
privDesc.setIs_grantable(isGrantable);
privDesc.setUser_ident_str(userIdentStr);
privDesc.setPriv(priv);
return privDesc;
}).collect(Collectors.toList()));
}
tTablePrivs.addAll(tPrivs);
}
return result;
}
@Override
public TGetUserPrivsResult getUserPrivs(TGetUserPrivsParams params) throws TException {
LOG.debug("get user privileges request: {}", params);
TGetUserPrivsResult result = new TGetUserPrivsResult();
List<TUserPrivDesc> tUserPrivs = Lists.newArrayList();
result.setUser_privs(tUserPrivs);
UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident);
Auth currAuth = GlobalStateMgr.getCurrentState().getAuth();
UserPrivTable userPrivTable = currAuth.getUserPrivTable();
List<UserIdentity> userIdents = Lists.newArrayList();
userIdents.add(currentUser);
for (UserIdentity userIdent : userIdents) {
PrivBitSet savedPrivs = new PrivBitSet();
userPrivTable.getPrivs(userIdent, savedPrivs);
String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER;
String userIdentStr = currentUser.toString().replace(clusterPrefix, "");
List<TUserPrivDesc> tPrivs = savedPrivs.toPrivilegeList().stream().map(
priv -> {
boolean isGrantable =
Privilege.NODE_PRIV != priv
&& userPrivTable.hasPriv(userIdent,
PrivPredicate.GRANT);
TUserPrivDesc privDesc = new TUserPrivDesc();
privDesc.setIs_grantable(isGrantable);
privDesc.setUser_ident_str(userIdentStr);
privDesc.setPriv(priv.getUpperNameForMysql());
return privDesc;
}
).collect(Collectors.toList());
tUserPrivs.addAll(tPrivs);
}
return result;
}
@Override
public TFeResult updateExportTaskStatus(TUpdateExportTaskStatusRequest request) throws TException {
TStatus status = new TStatus(TStatusCode.OK);
TFeResult result = new TFeResult(FrontendServiceVersion.V1, status);
return result;
}
@Override
public TDescribeTableResult describeTable(TDescribeTableParams params) throws TException {
LOG.debug("get desc table request: {}", params);
TDescribeTableResult result = new TDescribeTableResult();
List<TColumnDef> columns = Lists.newArrayList();
result.setColumns(columns);
UserIdentity currentUser = null;
if (params.isSetCurrent_user_ident()) {
currentUser = UserIdentity.fromThrift(params.current_user_ident);
} else {
currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);
}
long limit = params.isSetLimit() ? params.getLimit() : -1;
if (!params.isSetDb() && StringUtils.isBlank(params.getTable_name())) {
describeWithoutDbAndTable(currentUser, columns, limit);
return result;
}
if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db,
params.getTable_name(), PrivPredicate.SHOW)) {
return result;
}
Database db = GlobalStateMgr.getCurrentState().getDb(params.db);
if (db != null) {
db.readLock();
try {
Table table = db.getTable(params.getTable_name());
setColumnDesc(columns, table, limit, false, params.db, params.getTable_name());
} finally {
db.readUnlock();
}
}
return result;
}
private void describeWithoutDbAndTable(UserIdentity currentUser, List<TColumnDef> columns, long limit) {
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
List<String> dbNames = globalStateMgr.getDbNames();
boolean reachLimit;
for (String fullName : dbNames) {
if (!GlobalStateMgr.getCurrentState().getAuth().checkDbPriv(currentUser, fullName, PrivPredicate.SHOW)) {
continue;
}
Database db = GlobalStateMgr.getCurrentState().getDb(fullName);
if (db != null) {
for (String tableName : db.getTableNamesWithLock()) {
LOG.debug("get table: {}, wait to check", tableName);
if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, fullName,
tableName, PrivPredicate.SHOW)) {
continue;
}
db.readLock();
try {
Table table = db.getTable(tableName);
reachLimit = setColumnDesc(columns, table, limit, true, fullName, tableName);
} finally {
db.readUnlock();
}
if (reachLimit) {
return;
}
}
}
}
}
private boolean setColumnDesc(List<TColumnDef> columns, Table table, long limit,
boolean needSetDbAndTable, String db, String tbl) {
if (table != null) {
String tableKeysType = "";
if (TableType.OLAP.equals(table.getType())) {
OlapTable olapTable = (OlapTable) table;
tableKeysType = olapTable.getKeysType().name().substring(0, 3).toUpperCase();
}
for (Column column : table.getBaseSchema()) {
final TColumnDesc desc =
new TColumnDesc(column.getName(), column.getPrimitiveType().toThrift());
final Integer precision = column.getType().getPrecision();
if (precision != null) {
desc.setColumnPrecision(precision);
}
final Integer columnLength = column.getType().getColumnSize();
if (columnLength != null) {
desc.setColumnLength(columnLength);
}
final Integer decimalDigits = column.getType().getDecimalDigits();
if (decimalDigits != null) {
desc.setColumnScale(decimalDigits);
}
if (column.isKey()) {
desc.setColumnKey(tableKeysType);
} else {
desc.setColumnKey("");
}
final TColumnDef colDef = new TColumnDef(desc);
final String comment = column.getComment();
if (comment != null) {
colDef.setComment(comment);
}
columns.add(colDef);
if (needSetDbAndTable) {
columns.get(columns.size() - 1).columnDesc.setDbName(db);
columns.get(columns.size() - 1).columnDesc.setTableName(tbl);
}
if (limit > 0 && columns.size() >= limit) {
return true;
}
}
}
return false;
}
@Override
public TShowVariableResult showVariables(TShowVariableRequest params) throws TException {
TShowVariableResult result = new TShowVariableResult();
Map<String, String> map = Maps.newHashMap();
result.setVariables(map);
ConnectContext ctx = exeEnv.getScheduler().getContext(params.getThreadId());
if (ctx == null) {
return result;
}
List<List<String>> rows = VariableMgr.dump(SetType.fromThrift(params.getVarType()), ctx.getSessionVariable(),
null);
for (List<String> row : rows) {
map.put(row.get(0), row.get(1));
}
return result;
}
@Override
public TReportExecStatusResult reportExecStatus(TReportExecStatusParams params) throws TException {
return QeProcessorImpl.INSTANCE.reportExecStatus(params, getClientAddr());
}
@Override
public TMasterResult finishTask(TFinishTaskRequest request) throws TException {
return leaderImpl.finishTask(request);
}
@Override
public TMasterResult report(TReportRequest request) throws TException {
return leaderImpl.report(request);
}
@Override
public TFetchResourceResult fetchResource() throws TException {
throw new TException("not supported");
}
@Override
public TFeResult isMethodSupported(TIsMethodSupportedRequest request) throws TException {
TStatus status = new TStatus(TStatusCode.OK);
TFeResult result = new TFeResult(FrontendServiceVersion.V1, status);
switch (request.getFunction_name()) {
case "STREAMING_MINI_LOAD":
break;
default:
status.setStatus_code(NOT_IMPLEMENTED_ERROR);
break;
}
return result;
}
@Override
public TMasterOpResult forward(TMasterOpRequest params) throws TException {
TNetworkAddress clientAddr = getClientAddr();
if (clientAddr != null) {
Frontend fe = GlobalStateMgr.getCurrentState().getFeByHost(clientAddr.getHostname());
if (fe == null) {
LOG.warn("reject request from invalid host. client: {}", clientAddr);
throw new TException("request from invalid host was rejected.");
}
}
LOG.info("receive forwarded stmt {} from FE: {}", params.getStmt_id(), clientAddr.getHostname());
ConnectContext context = new ConnectContext(null);
ConnectProcessor processor = new ConnectProcessor(context);
TMasterOpResult result = processor.proxyExecute(params);
ConnectContext.remove();
return result;
}
private void checkPasswordAndPrivs(String cluster, String user, String passwd, String db, String tbl,
String clientIp, PrivPredicate predicate) throws AuthenticationException {
final String fullUserName = ClusterNamespace.getFullName(user);
List<UserIdentity> currentUser = Lists.newArrayList();
if (!GlobalStateMgr.getCurrentState().getAuth()
.checkPlainPassword(fullUserName, clientIp, passwd, currentUser)) {
throw new AuthenticationException("Access denied for " + fullUserName + "@" + clientIp);
}
Preconditions.checkState(currentUser.size() == 1);
if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser.get(0), db, tbl, predicate)) {
throw new AuthenticationException(
"Access denied; you need (at least one of) the LOAD privilege(s) for this operation");
}
}
@Override
public TLoadTxnBeginResult loadTxnBegin(TLoadTxnBeginRequest request) throws TException {
String clientAddr = getClientAddrAsString();
LOG.info("receive txn begin request, db: {}, tbl: {}, label: {}, backend: {}",
request.getDb(), request.getTbl(), request.getLabel(), clientAddr);
LOG.debug("txn begin request: {}", request);
TLoadTxnBeginResult result = new TLoadTxnBeginResult();
if (!GlobalStateMgr.getCurrentState().isLeader()) {
TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);
status.setError_msgs(Lists.newArrayList("current fe is not master"));
result.setStatus(status);
return result;
}
TStatus status = new TStatus(TStatusCode.OK);
result.setStatus(status);
try {
result.setTxnId(loadTxnBeginImpl(request, clientAddr));
} catch (DuplicatedRequestException e) {
LOG.info("duplicate request for stream load. request id: {}, txn_id: {}", e.getDuplicatedRequestId(),
e.getTxnId());
result.setTxnId(e.getTxnId());
} catch (LabelAlreadyUsedException e) {
status.setStatus_code(TStatusCode.LABEL_ALREADY_EXISTS);
status.addToError_msgs(e.getMessage());
result.setJob_status(e.getJobStatus());
} catch (UserException e) {
LOG.warn("failed to begin: {}", e.getMessage());
status.setStatus_code(TStatusCode.ANALYSIS_ERROR);
status.addToError_msgs(e.getMessage());
} catch (Throwable e) {
LOG.warn("catch unknown result.", e);
status.setStatus_code(TStatusCode.INTERNAL_ERROR);
status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));
return result;
}
return result;
}
private long loadTxnBeginImpl(TLoadTxnBeginRequest request, String clientIp) throws UserException {
String cluster = request.getCluster();
if (Strings.isNullOrEmpty(cluster)) {
cluster = SystemInfoService.DEFAULT_CLUSTER;
}
checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),
request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD);
if (Strings.isNullOrEmpty(request.getLabel())) {
throw new UserException("empty label in begin request");
}
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
String dbName = request.getDb();
Database db = globalStateMgr.getDb(dbName);
if (db == null) {
throw new UserException("unknown database, database=" + dbName);
}
Table table = db.getTable(request.getTbl());
if (table == null) {
throw new UserException("unknown table \"" + request.getDb() + "." + request.getTbl() + "\"");
}
long timeoutSecond = request.isSetTimeout() ? request.getTimeout() : Config.stream_load_default_timeout_second;
MetricRepo.COUNTER_LOAD_ADD.increase(1L);
return GlobalStateMgr.getCurrentGlobalTransactionMgr().beginTransaction(
db.getId(), Lists.newArrayList(table.getId()), request.getLabel(), request.getRequest_id(),
new TxnCoordinator(TxnSourceType.BE, clientIp),
TransactionState.LoadJobSourceType.BACKEND_STREAMING, -1, timeoutSecond);
}
@Override
public TLoadTxnCommitResult loadTxnCommit(TLoadTxnCommitRequest request) throws TException {
String clientAddr = getClientAddrAsString();
LOG.info("receive txn commit request. db: {}, tbl: {}, txn_id: {}, backend: {}",
request.getDb(), request.getTbl(), request.getTxnId(), clientAddr);
LOG.debug("txn commit request: {}", request);
TLoadTxnCommitResult result = new TLoadTxnCommitResult();
if (!GlobalStateMgr.getCurrentState().isLeader()) {
TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);
status.setError_msgs(Lists.newArrayList("current fe is not master"));
result.setStatus(status);
return result;
}
TStatus status = new TStatus(TStatusCode.OK);
result.setStatus(status);
try {
if (!loadTxnCommitImpl(request)) {
status.setStatus_code(TStatusCode.PUBLISH_TIMEOUT);
status.addToError_msgs("Publish timeout. The data will be visible after a while");
}
} catch (UserException e) {
LOG.warn("failed to commit txn_id: {}: {}", request.getTxnId(), e.getMessage());
status.setStatus_code(TStatusCode.ANALYSIS_ERROR);
status.addToError_msgs(e.getMessage());
} catch (Throwable e) {
LOG.warn("catch unknown result.", e);
status.setStatus_code(TStatusCode.INTERNAL_ERROR);
status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));
return result;
}
return result;
}
private boolean loadTxnCommitImpl(TLoadTxnCommitRequest request) throws UserException {
String cluster = request.getCluster();
if (Strings.isNullOrEmpty(cluster)) {
cluster = SystemInfoService.DEFAULT_CLUSTER;
}
if (request.isSetAuth_code()) {
} else {
checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),
request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD);
}
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
String dbName = request.getDb();
Database db = globalStateMgr.getDb(dbName);
if (db == null) {
throw new UserException("unknown database, database=" + dbName);
}
TxnCommitAttachment attachment = TxnCommitAttachment.fromThrift(request.txnCommitAttachment);
long timeoutMs = request.isSetThrift_rpc_timeout_ms() ? request.getThrift_rpc_timeout_ms() : 5000;
timeoutMs = timeoutMs * 3 / 4;
boolean ret = GlobalStateMgr.getCurrentGlobalTransactionMgr().commitAndPublishTransaction(
db, request.getTxnId(),
TabletCommitInfo.fromThrift(request.getCommitInfos()),
timeoutMs, attachment);
if (!ret) {
return ret;
}
MetricRepo.COUNTER_LOAD_FINISHED.increase(1L);
if (null == attachment) {
return ret;
}
Table tbl = db.getTable(request.getTbl());
if (null == tbl) {
return ret;
}
TableMetricsEntity entity = TableMetricsRegistry.getInstance().getMetricsEntity(tbl.getId());
switch (request.txnCommitAttachment.getLoadType()) {
case ROUTINE_LOAD:
if (!(attachment instanceof RLTaskTxnCommitAttachment)) {
break;
}
RLTaskTxnCommitAttachment routineAttachment = (RLTaskTxnCommitAttachment) attachment;
entity.counterRoutineLoadFinishedTotal.increase(1L);
entity.counterRoutineLoadBytesTotal.increase(routineAttachment.getReceivedBytes());
entity.counterRoutineLoadRowsTotal.increase(routineAttachment.getLoadedRows());
break;
case MANUAL_LOAD:
if (!(attachment instanceof ManualLoadTxnCommitAttachment)) {
break;
}
ManualLoadTxnCommitAttachment streamAttachment = (ManualLoadTxnCommitAttachment) attachment;
entity.counterStreamLoadFinishedTotal.increase(1L);
entity.counterStreamLoadBytesTotal.increase(streamAttachment.getReceivedBytes());
entity.counterStreamLoadRowsTotal.increase(streamAttachment.getLoadedRows());
break;
default:
break;
}
return ret;
}
@Override
public TLoadTxnCommitResult loadTxnPrepare(TLoadTxnCommitRequest request) throws TException {
String clientAddr = getClientAddrAsString();
LOG.info("receive txn prepare request. db: {}, tbl: {}, txn_id: {}, backend: {}",
request.getDb(), request.getTbl(), request.getTxnId(), clientAddr);
LOG.debug("txn prepare request: {}", request);
TLoadTxnCommitResult result = new TLoadTxnCommitResult();
if (!GlobalStateMgr.getCurrentState().isLeader()) {
TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);
status.setError_msgs(Lists.newArrayList("current fe is not master"));
result.setStatus(status);
return result;
}
TStatus status = new TStatus(TStatusCode.OK);
result.setStatus(status);
try {
loadTxnPrepareImpl(request);
} catch (UserException e) {
LOG.warn("failed to prepare txn_id: {}: {}", request.getTxnId(), e.getMessage());
status.setStatus_code(TStatusCode.ANALYSIS_ERROR);
status.addToError_msgs(e.getMessage());
} catch (Throwable e) {
LOG.warn("catch unknown result.", e);
status.setStatus_code(TStatusCode.INTERNAL_ERROR);
status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));
return result;
}
return result;
}
private void loadTxnPrepareImpl(TLoadTxnCommitRequest request) throws UserException {
String cluster = request.getCluster();
if (Strings.isNullOrEmpty(cluster)) {
cluster = SystemInfoService.DEFAULT_CLUSTER;
}
if (request.isSetAuth_code()) {
} else {
checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),
request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD);
}
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
String dbName = request.getDb();
Database db = globalStateMgr.getDb(dbName);
if (db == null) {
throw new UserException("unknown database, database=" + dbName);
}
TxnCommitAttachment attachment = TxnCommitAttachment.fromThrift(request.txnCommitAttachment);
GlobalStateMgr.getCurrentGlobalTransactionMgr().prepareTransaction(
db.getId(), request.getTxnId(),
TabletCommitInfo.fromThrift(request.getCommitInfos()),
attachment);
}
@Override
public TLoadTxnRollbackResult loadTxnRollback(TLoadTxnRollbackRequest request) throws TException {
String clientAddr = getClientAddrAsString();
LOG.info("receive txn rollback request. db: {}, tbl: {}, txn_id: {}, reason: {}, backend: {}",
request.getDb(), request.getTbl(), request.getTxnId(), request.getReason(), clientAddr);
LOG.debug("txn rollback request: {}", request);
TLoadTxnRollbackResult result = new TLoadTxnRollbackResult();
if (!GlobalStateMgr.getCurrentState().isLeader()) {
TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);
status.setError_msgs(Lists.newArrayList("current fe is not master"));
result.setStatus(status);
return result;
}
TStatus status = new TStatus(TStatusCode.OK);
result.setStatus(status);
try {
loadTxnRollbackImpl(request);
} catch (TransactionNotFoundException e) {
LOG.warn("failed to rollback txn {}: {}", request.getTxnId(), e.getMessage());
status.setStatus_code(TStatusCode.TXN_NOT_EXISTS);
status.addToError_msgs(e.getMessage());
} catch (UserException e) {
LOG.warn("failed to rollback txn {}: {}", request.getTxnId(), e.getMessage());
status.setStatus_code(TStatusCode.ANALYSIS_ERROR);
status.addToError_msgs(e.getMessage());
} catch (Throwable e) {
LOG.warn("catch unknown result.", e);
status.setStatus_code(TStatusCode.INTERNAL_ERROR);
status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));
return result;
}
return result;
}
private void loadTxnRollbackImpl(TLoadTxnRollbackRequest request) throws UserException {
String cluster = request.getCluster();
if (Strings.isNullOrEmpty(cluster)) {
cluster = SystemInfoService.DEFAULT_CLUSTER;
}
if (request.isSetAuth_code()) {
} else {
checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),
request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD);
}
String dbName = request.getDb();
Database db = GlobalStateMgr.getCurrentState().getDb(dbName);
if (db == null) {
throw new MetaNotFoundException("db " + dbName + " does not exist");
}
long dbId = db.getId();
GlobalStateMgr.getCurrentGlobalTransactionMgr().abortTransaction(dbId, request.getTxnId(),
request.isSetReason() ? request.getReason() : "system cancel",
TxnCommitAttachment.fromThrift(request.getTxnCommitAttachment()));
}
@Override
public TStreamLoadPutResult streamLoadPut(TStreamLoadPutRequest request) {
String clientAddr = getClientAddrAsString();
LOG.info("receive stream load put request. db:{}, tbl: {}, txn_id: {}, load id: {}, backend: {}",
request.getDb(), request.getTbl(), request.getTxnId(), DebugUtil.printId(request.getLoadId()),
clientAddr);
LOG.debug("stream load put request: {}", request);
TStreamLoadPutResult result = new TStreamLoadPutResult();
TStatus status = new TStatus(TStatusCode.OK);
result.setStatus(status);
try {
result.setParams(streamLoadPutImpl(request));
} catch (UserException e) {
LOG.warn("failed to get stream load plan: {}", e.getMessage());
status.setStatus_code(TStatusCode.ANALYSIS_ERROR);
status.addToError_msgs(e.getMessage());
} catch (Throwable e) {
LOG.warn("catch unknown result.", e);
status.setStatus_code(TStatusCode.INTERNAL_ERROR);
status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));
return result;
}
return result;
}
private TExecPlanFragmentParams streamLoadPutImpl(TStreamLoadPutRequest request) throws UserException {
String cluster = request.getCluster();
if (Strings.isNullOrEmpty(cluster)) {
cluster = SystemInfoService.DEFAULT_CLUSTER;
}
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
String dbName = request.getDb();
Database db = globalStateMgr.getDb(dbName);
if (db == null) {
throw new UserException("unknown database, database=" + dbName);
}
long timeoutMs = request.isSetThrift_rpc_timeout_ms() ? request.getThrift_rpc_timeout_ms() : 5000;
if (!db.tryReadLock(timeoutMs, TimeUnit.MILLISECONDS)) {
throw new UserException("get database read lock timeout, database=" + dbName);
}
try {
Table table = db.getTable(request.getTbl());
if (table == null) {
throw new UserException("unknown table, table=" + request.getTbl());
}
if (!(table instanceof OlapTable)) {
throw new UserException("load table type is not OlapTable, type=" + table.getClass());
}
if (table instanceof MaterializedView) {
throw new UserException(String.format(
"The data of '%s' cannot be inserted because '%s' is a materialized view," +
"and the data of materialized view must be consistent with the base table.",
table.getName(), table.getName()));
}
StreamLoadTask streamLoadTask = StreamLoadTask.fromTStreamLoadPutRequest(request, db);
StreamLoadPlanner planner = new StreamLoadPlanner(db, (OlapTable) table, streamLoadTask);
TExecPlanFragmentParams plan = planner.plan(streamLoadTask.getId());
TransactionState txnState =
GlobalStateMgr.getCurrentGlobalTransactionMgr().getTransactionState(db.getId(), request.getTxnId());
if (txnState == null) {
throw new UserException("txn does not exist: " + request.getTxnId());
}
txnState.addTableIndexes((OlapTable) table);
return plan;
} finally {
db.readUnlock();
}
}
@Override
public TStatus snapshotLoaderReport(TSnapshotLoaderReportRequest request) throws TException {
if (GlobalStateMgr.getCurrentState().getBackupHandler().report(request.getTask_type(), request.getJob_id(),
request.getTask_id(), request.getFinished_num(), request.getTotal_num())) {
return new TStatus(TStatusCode.OK);
}
return new TStatus(TStatusCode.CANCELLED);
}
@Override
public TRefreshTableResponse refreshTable(TRefreshTableRequest request) throws TException {
try {
if (request.getCatalog_name() == null) {
request.setCatalog_name(InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME);
}
GlobalStateMgr.getCurrentState().refreshExternalTable(new TableName(request.getCatalog_name(),
request.getDb_name(), request.getTable_name()), request.getPartitions());
return new TRefreshTableResponse(new TStatus(TStatusCode.OK));
} catch (DdlException e) {
TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);
status.setError_msgs(Lists.newArrayList(e.getMessage()));
return new TRefreshTableResponse(status);
}
}
private TNetworkAddress getClientAddr() {
ThriftServerContext connectionContext = ThriftServerEventProcessor.getConnectionContext();
if (connectionContext != null) {
return connectionContext.getClient();
}
return null;
}
private String getClientAddrAsString() {
TNetworkAddress addr = getClientAddr();
return addr == null ? "unknown" : addr.hostname;
}
@Override
public TGetTableMetaResponse getTableMeta(TGetTableMetaRequest request) throws TException {
return leaderImpl.getTableMeta(request);
}
@Override
public TBeginRemoteTxnResponse beginRemoteTxn(TBeginRemoteTxnRequest request) throws TException {
return leaderImpl.beginRemoteTxn(request);
}
@Override
public TCommitRemoteTxnResponse commitRemoteTxn(TCommitRemoteTxnRequest request) throws TException {
return leaderImpl.commitRemoteTxn(request);
}
@Override
public TAbortRemoteTxnResponse abortRemoteTxn(TAbortRemoteTxnRequest request) throws TException {
return leaderImpl.abortRemoteTxn(request);
}
@Override
public TSetConfigResponse setConfig(TSetConfigRequest request) throws TException {
try {
Preconditions.checkState(request.getKeys().size() == request.getValues().size());
Map<String, String> configs = new HashMap<>();
for (int i = 0; i < request.getKeys().size(); i++) {
configs.put(request.getKeys().get(i), request.getValues().get(i));
}
GlobalStateMgr.getCurrentState().setFrontendConfig(configs);
return new TSetConfigResponse(new TStatus(TStatusCode.OK));
} catch (DdlException e) {
TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);
status.setError_msgs(Lists.newArrayList(e.getMessage()));
return new TSetConfigResponse(status);
}
}
} | class FrontendServiceImpl implements FrontendService.Iface {
private static final Logger LOG = LogManager.getLogger(LeaderImpl.class);
private LeaderImpl leaderImpl;
private ExecuteEnv exeEnv;
public FrontendServiceImpl(ExecuteEnv exeEnv) {
leaderImpl = new LeaderImpl();
this.exeEnv = exeEnv;
}
@Override
public TGetDbsResult getDbNames(TGetDbsParams params) throws TException {
LOG.debug("get db request: {}", params);
TGetDbsResult result = new TGetDbsResult();
List<String> dbs = Lists.newArrayList();
PatternMatcher matcher = null;
if (params.isSetPattern()) {
try {
matcher = PatternMatcher.createMysqlPattern(params.getPattern(),
CaseSensibility.DATABASE.getCaseSensibility());
} catch (AnalysisException e) {
throw new TException("Pattern is in bad format: " + params.getPattern());
}
}
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
List<String> dbNames = globalStateMgr.getDbNames();
LOG.debug("get db names: {}", dbNames);
UserIdentity currentUser = null;
if (params.isSetCurrent_user_ident()) {
currentUser = UserIdentity.fromThrift(params.current_user_ident);
} else {
currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);
}
for (String fullName : dbNames) {
if (!globalStateMgr.getAuth().checkDbPriv(currentUser, fullName, PrivPredicate.SHOW)) {
continue;
}
final String db = ClusterNamespace.getNameFromFullName(fullName);
if (matcher != null && !matcher.match(db)) {
continue;
}
dbs.add(fullName);
}
result.setDbs(dbs);
return result;
}
@Override
public TGetTablesResult getTableNames(TGetTablesParams params) throws TException {
LOG.debug("get table name request: {}", params);
TGetTablesResult result = new TGetTablesResult();
List<String> tablesResult = Lists.newArrayList();
result.setTables(tablesResult);
PatternMatcher matcher = null;
if (params.isSetPattern()) {
try {
matcher = PatternMatcher.createMysqlPattern(params.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
} catch (AnalysisException e) {
throw new TException("Pattern is in bad format: " + params.getPattern());
}
}
Database db = GlobalStateMgr.getCurrentState().getDb(params.db);
UserIdentity currentUser = null;
if (params.isSetCurrent_user_ident()) {
currentUser = UserIdentity.fromThrift(params.current_user_ident);
} else {
currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);
}
if (db != null) {
for (String tableName : db.getTableNamesWithLock()) {
LOG.debug("get table: {}, wait to check", tableName);
if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db,
tableName, PrivPredicate.SHOW)) {
continue;
}
if (matcher != null && !matcher.match(tableName)) {
continue;
}
tablesResult.add(tableName);
}
}
return result;
}
@Override
public TListTableStatusResult listTableStatus(TGetTablesParams params) throws TException {
LOG.debug("get list table request: {}", params);
TListTableStatusResult result = new TListTableStatusResult();
List<TTableStatus> tablesResult = Lists.newArrayList();
result.setTables(tablesResult);
PatternMatcher matcher = null;
if (params.isSetPattern()) {
try {
matcher = PatternMatcher.createMysqlPattern(params.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
} catch (AnalysisException e) {
throw new TException("Pattern is in bad format " + params.getPattern());
}
}
Database db = GlobalStateMgr.getCurrentState().getDb(params.db);
long limit = params.isSetLimit() ? params.getLimit() : -1;
UserIdentity currentUser = null;
if (params.isSetCurrent_user_ident()) {
currentUser = UserIdentity.fromThrift(params.current_user_ident);
} else {
currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);
}
if (params.isSetType() && TTableType.MATERIALIZED_VIEW.equals(params.getType())) {
listMaterializedViewStatus(tablesResult, limit, matcher, currentUser, params.db);
return result;
}
if (db != null) {
db.readLock();
try {
boolean listingViews = params.isSetType() && TTableType.VIEW.equals(params.getType());
List<Table> tables = listingViews ? db.getViews() : db.getTables();
for (Table table : tables) {
if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db,
table.getName(), PrivPredicate.SHOW)) {
continue;
}
if (matcher != null && !matcher.match(table.getName())) {
continue;
}
TTableStatus status = new TTableStatus();
status.setName(table.getName());
status.setType(table.getMysqlType());
status.setEngine(table.getEngine());
status.setComment(table.getComment());
status.setCreate_time(table.getCreateTime());
status.setLast_check_time(table.getLastCheckTime());
if (listingViews) {
View view = (View) table;
String ddlSql = view.getInlineViewDef();
List<TableRef> tblRefs = new ArrayList<>();
view.getQueryStmt().collectTableRefs(tblRefs);
for (TableRef tblRef : tblRefs) {
if (!GlobalStateMgr.getCurrentState().getAuth()
.checkTblPriv(currentUser, tblRef.getName().getDb(),
tblRef.getName().getTbl(), PrivPredicate.SHOW)) {
ddlSql = "";
break;
}
}
status.setDdl_sql(ddlSql);
}
tablesResult.add(status);
if (limit > 0 && tablesResult.size() >= limit) {
break;
}
}
} finally {
db.readUnlock();
}
}
return result;
}
public void listMaterializedViewStatus(List<TTableStatus> tablesResult, long limit, PatternMatcher matcher,
UserIdentity currentUser, String dbName) {
Database db = GlobalStateMgr.getCurrentState().getDb(dbName);
if (db == null) {
LOG.warn("database not exists: {}", dbName);
return;
}
db.readLock();
try {
for (Table materializedView : db.getMaterializedViews()) {
if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, dbName,
materializedView.getName(), PrivPredicate.SHOW)) {
continue;
}
if (matcher != null && !matcher.match(materializedView.getName())) {
continue;
}
MaterializedView mvTable = (MaterializedView) materializedView;
List<String> createTableStmt = Lists.newArrayList();
GlobalStateMgr.getDdlStmt(mvTable, createTableStmt, null, null, false, true);
String ddlSql = createTableStmt.get(0);
TTableStatus status = new TTableStatus();
status.setId(String.valueOf(mvTable.getId()));
status.setName(mvTable.getName());
status.setDdl_sql(ddlSql);
status.setRows(String.valueOf(mvTable.getRowCount()));
status.setType(mvTable.getMysqlType());
status.setComment(mvTable.getComment());
tablesResult.add(status);
if (limit > 0 && tablesResult.size() >= limit) {
return;
}
}
for (Table table : db.getTables()) {
if (table.getType() == Table.TableType.OLAP) {
OlapTable olapTable = (OlapTable) table;
List<MaterializedIndex> visibleMaterializedViews = olapTable.getVisibleIndex();
long baseIdx = olapTable.getBaseIndexId();
for (MaterializedIndex mvIdx : visibleMaterializedViews) {
if (baseIdx == mvIdx.getId()) {
continue;
}
if (matcher != null && !matcher.match(olapTable.getIndexNameById(mvIdx.getId()))) {
continue;
}
MaterializedIndexMeta mvMeta = olapTable.getVisibleIndexIdToMeta().get(mvIdx.getId());
TTableStatus status = new TTableStatus();
status.setId(String.valueOf(mvIdx.getId()));
status.setName(olapTable.getIndexNameById(mvIdx.getId()));
if (mvMeta.getOriginStmt() == null) {
StringBuilder originStmtBuilder = new StringBuilder(
"create materialized view " + olapTable.getIndexNameById(mvIdx.getId()) +
" as select ");
String groupByString = "";
for (Column column : mvMeta.getSchema()) {
if (column.isKey()) {
groupByString += column.getName() + ",";
}
}
originStmtBuilder.append(groupByString);
for (Column column : mvMeta.getSchema()) {
if (!column.isKey()) {
originStmtBuilder.append(column.getAggregationType().toString()).append("(")
.append(column.getName()).append(")").append(",");
}
}
originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());
originStmtBuilder.append(" from ").append(olapTable.getName()).append(" group by ")
.append(groupByString);
originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());
status.setDdl_sql(originStmtBuilder.toString());
} else {
status.setDdl_sql(mvMeta.getOriginStmt().replace("\n", "").replace("\t", "")
.replaceAll("[ ]+", " "));
}
status.setRows(String.valueOf(mvIdx.getRowCount()));
status.setType("");
status.setComment("");
tablesResult.add(status);
if (limit > 0 && tablesResult.size() >= limit) {
return;
}
}
}
}
} finally {
db.readUnlock();
}
}
@Override
public TGetTaskInfoResult getTasks(TGetTasksParams params) throws TException {
LOG.debug("get show task request: {}", params);
TGetTaskInfoResult result = new TGetTaskInfoResult();
List<TTaskInfo> tasksResult = Lists.newArrayList();
result.setTasks(tasksResult);
UserIdentity currentUser = null;
if (params.isSetCurrent_user_ident()) {
currentUser = UserIdentity.fromThrift(params.current_user_ident);
}
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
TaskManager taskManager = globalStateMgr.getTaskManager();
List<Task> taskList = taskManager.showTasks(null);
for (Task task : taskList) {
if (!globalStateMgr.getAuth().checkDbPriv(currentUser, task.getDbName(), PrivPredicate.SHOW)) {
continue;
}
TTaskInfo info = new TTaskInfo();
info.setTask_name(task.getName());
info.setCreate_time(task.getCreateTime() / 1000);
String scheduleStr = task.getType().name();
if (task.getType() == Constants.TaskType.PERIODICAL) {
scheduleStr += task.getSchedule();
}
info.setSchedule(scheduleStr);
info.setDatabase(ClusterNamespace.getNameFromFullName(task.getDbName()));
info.setDefinition(task.getDefinition());
info.setExpire_time(task.getExpireTime() / 1000);
tasksResult.add(info);
}
return result;
}
@Override
public TGetTaskRunInfoResult getTaskRuns(TGetTasksParams params) throws TException {
LOG.debug("get show task run request: {}", params);
TGetTaskRunInfoResult result = new TGetTaskRunInfoResult();
List<TTaskRunInfo> tasksResult = Lists.newArrayList();
result.setTask_runs(tasksResult);
UserIdentity currentUser = null;
if (params.isSetCurrent_user_ident()) {
currentUser = UserIdentity.fromThrift(params.current_user_ident);
}
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
TaskManager taskManager = globalStateMgr.getTaskManager();
List<TaskRunStatus> taskRunList = taskManager.showTaskRunStatus(null);
for (TaskRunStatus status : taskRunList) {
if (!globalStateMgr.getAuth().checkDbPriv(currentUser, status.getDbName(), PrivPredicate.SHOW)) {
continue;
}
TTaskRunInfo info = new TTaskRunInfo();
info.setQuery_id(status.getQueryId());
info.setTask_name(status.getTaskName());
info.setCreate_time(status.getCreateTime() / 1000);
info.setFinish_time(status.getFinishTime() / 1000);
info.setState(status.getState().toString());
info.setDatabase(ClusterNamespace.getNameFromFullName(status.getDbName()));
info.setDefinition(status.getDefinition());
info.setError_code(status.getErrorCode());
info.setError_message(status.getErrorMessage());
info.setExpire_time(status.getExpireTime() / 1000);
tasksResult.add(info);
}
return result;
}
@Override
@Override
public TGetTablePrivsResult getTablePrivs(TGetTablePrivsParams params) throws TException {
LOG.debug("get table privileges request: {}", params);
TGetTablePrivsResult result = new TGetTablePrivsResult();
List<TTablePrivDesc> tTablePrivs = Lists.newArrayList();
result.setTable_privs(tTablePrivs);
UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident);
List<TablePrivEntry> tablePrivEntries =
GlobalStateMgr.getCurrentState().getAuth().getTablePrivEntries(currentUser);
for (TablePrivEntry entry : tablePrivEntries) {
PrivBitSet savedPrivs = entry.getPrivSet();
String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER;
String userIdentStr = currentUser.toString().replace(clusterPrefix, "");
String dbName = entry.getOrigDb();
boolean isGrantable = savedPrivs.satisfy(PrivPredicate.GRANT);
List<TTablePrivDesc> tPrivs = savedPrivs.toPrivilegeList().stream().map(
priv -> {
TTablePrivDesc privDesc = new TTablePrivDesc();
privDesc.setDb_name(dbName);
privDesc.setTable_name(entry.getOrigTbl());
privDesc.setIs_grantable(isGrantable);
privDesc.setUser_ident_str(userIdentStr);
privDesc.setPriv(priv.getUpperNameForMysql());
return privDesc;
}
).collect(Collectors.toList());
if (savedPrivs.satisfy(PrivPredicate.LOAD)) {
tPrivs.addAll(Lists.newArrayList("INSERT", "UPDATE", "DELETE").stream().map(priv -> {
TTablePrivDesc privDesc = new TTablePrivDesc();
privDesc.setDb_name(dbName);
privDesc.setTable_name(entry.getOrigTbl());
privDesc.setIs_grantable(isGrantable);
privDesc.setUser_ident_str(userIdentStr);
privDesc.setPriv(priv);
return privDesc;
}).collect(Collectors.toList()));
}
tTablePrivs.addAll(tPrivs);
}
return result;
}
@Override
public TGetUserPrivsResult getUserPrivs(TGetUserPrivsParams params) throws TException {
LOG.debug("get user privileges request: {}", params);
TGetUserPrivsResult result = new TGetUserPrivsResult();
List<TUserPrivDesc> tUserPrivs = Lists.newArrayList();
result.setUser_privs(tUserPrivs);
UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident);
Auth currAuth = GlobalStateMgr.getCurrentState().getAuth();
UserPrivTable userPrivTable = currAuth.getUserPrivTable();
List<UserIdentity> userIdents = Lists.newArrayList();
userIdents.add(currentUser);
for (UserIdentity userIdent : userIdents) {
PrivBitSet savedPrivs = new PrivBitSet();
userPrivTable.getPrivs(userIdent, savedPrivs);
String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER;
String userIdentStr = currentUser.toString().replace(clusterPrefix, "");
List<TUserPrivDesc> tPrivs = savedPrivs.toPrivilegeList().stream().map(
priv -> {
boolean isGrantable =
Privilege.NODE_PRIV != priv
&& userPrivTable.hasPriv(userIdent,
PrivPredicate.GRANT);
TUserPrivDesc privDesc = new TUserPrivDesc();
privDesc.setIs_grantable(isGrantable);
privDesc.setUser_ident_str(userIdentStr);
privDesc.setPriv(priv.getUpperNameForMysql());
return privDesc;
}
).collect(Collectors.toList());
tUserPrivs.addAll(tPrivs);
}
return result;
}
@Override
public TFeResult updateExportTaskStatus(TUpdateExportTaskStatusRequest request) throws TException {
TStatus status = new TStatus(TStatusCode.OK);
TFeResult result = new TFeResult(FrontendServiceVersion.V1, status);
return result;
}
@Override
public TDescribeTableResult describeTable(TDescribeTableParams params) throws TException {
LOG.debug("get desc table request: {}", params);
TDescribeTableResult result = new TDescribeTableResult();
List<TColumnDef> columns = Lists.newArrayList();
result.setColumns(columns);
UserIdentity currentUser = null;
if (params.isSetCurrent_user_ident()) {
currentUser = UserIdentity.fromThrift(params.current_user_ident);
} else {
currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);
}
long limit = params.isSetLimit() ? params.getLimit() : -1;
if (!params.isSetDb() && StringUtils.isBlank(params.getTable_name())) {
describeWithoutDbAndTable(currentUser, columns, limit);
return result;
}
if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db,
params.getTable_name(), PrivPredicate.SHOW)) {
return result;
}
Database db = GlobalStateMgr.getCurrentState().getDb(params.db);
if (db != null) {
db.readLock();
try {
Table table = db.getTable(params.getTable_name());
setColumnDesc(columns, table, limit, false, params.db, params.getTable_name());
} finally {
db.readUnlock();
}
}
return result;
}
private void describeWithoutDbAndTable(UserIdentity currentUser, List<TColumnDef> columns, long limit) {
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
List<String> dbNames = globalStateMgr.getDbNames();
boolean reachLimit;
for (String fullName : dbNames) {
if (!GlobalStateMgr.getCurrentState().getAuth().checkDbPriv(currentUser, fullName, PrivPredicate.SHOW)) {
continue;
}
Database db = GlobalStateMgr.getCurrentState().getDb(fullName);
if (db != null) {
for (String tableName : db.getTableNamesWithLock()) {
LOG.debug("get table: {}, wait to check", tableName);
if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, fullName,
tableName, PrivPredicate.SHOW)) {
continue;
}
db.readLock();
try {
Table table = db.getTable(tableName);
reachLimit = setColumnDesc(columns, table, limit, true, fullName, tableName);
} finally {
db.readUnlock();
}
if (reachLimit) {
return;
}
}
}
}
}
private boolean setColumnDesc(List<TColumnDef> columns, Table table, long limit,
boolean needSetDbAndTable, String db, String tbl) {
if (table != null) {
String tableKeysType = "";
if (TableType.OLAP.equals(table.getType())) {
OlapTable olapTable = (OlapTable) table;
tableKeysType = olapTable.getKeysType().name().substring(0, 3).toUpperCase();
}
for (Column column : table.getBaseSchema()) {
final TColumnDesc desc =
new TColumnDesc(column.getName(), column.getPrimitiveType().toThrift());
final Integer precision = column.getType().getPrecision();
if (precision != null) {
desc.setColumnPrecision(precision);
}
final Integer columnLength = column.getType().getColumnSize();
if (columnLength != null) {
desc.setColumnLength(columnLength);
}
final Integer decimalDigits = column.getType().getDecimalDigits();
if (decimalDigits != null) {
desc.setColumnScale(decimalDigits);
}
if (column.isKey()) {
desc.setColumnKey(tableKeysType);
} else {
desc.setColumnKey("");
}
final TColumnDef colDef = new TColumnDef(desc);
final String comment = column.getComment();
if (comment != null) {
colDef.setComment(comment);
}
columns.add(colDef);
if (needSetDbAndTable) {
columns.get(columns.size() - 1).columnDesc.setDbName(db);
columns.get(columns.size() - 1).columnDesc.setTableName(tbl);
}
if (limit > 0 && columns.size() >= limit) {
return true;
}
}
}
return false;
}
@Override
public TShowVariableResult showVariables(TShowVariableRequest params) throws TException {
TShowVariableResult result = new TShowVariableResult();
Map<String, String> map = Maps.newHashMap();
result.setVariables(map);
ConnectContext ctx = exeEnv.getScheduler().getContext(params.getThreadId());
if (ctx == null) {
return result;
}
List<List<String>> rows = VariableMgr.dump(SetType.fromThrift(params.getVarType()), ctx.getSessionVariable(),
null);
for (List<String> row : rows) {
map.put(row.get(0), row.get(1));
}
return result;
}
@Override
public TReportExecStatusResult reportExecStatus(TReportExecStatusParams params) throws TException {
return QeProcessorImpl.INSTANCE.reportExecStatus(params, getClientAddr());
}
@Override
public TMasterResult finishTask(TFinishTaskRequest request) throws TException {
return leaderImpl.finishTask(request);
}
@Override
public TMasterResult report(TReportRequest request) throws TException {
return leaderImpl.report(request);
}
@Override
public TFetchResourceResult fetchResource() throws TException {
throw new TException("not supported");
}
@Override
public TFeResult isMethodSupported(TIsMethodSupportedRequest request) throws TException {
TStatus status = new TStatus(TStatusCode.OK);
TFeResult result = new TFeResult(FrontendServiceVersion.V1, status);
switch (request.getFunction_name()) {
case "STREAMING_MINI_LOAD":
break;
default:
status.setStatus_code(NOT_IMPLEMENTED_ERROR);
break;
}
return result;
}
@Override
public TMasterOpResult forward(TMasterOpRequest params) throws TException {
TNetworkAddress clientAddr = getClientAddr();
if (clientAddr != null) {
Frontend fe = GlobalStateMgr.getCurrentState().getFeByHost(clientAddr.getHostname());
if (fe == null) {
LOG.warn("reject request from invalid host. client: {}", clientAddr);
throw new TException("request from invalid host was rejected.");
}
}
LOG.info("receive forwarded stmt {} from FE: {}", params.getStmt_id(), clientAddr.getHostname());
ConnectContext context = new ConnectContext(null);
ConnectProcessor processor = new ConnectProcessor(context);
TMasterOpResult result = processor.proxyExecute(params);
ConnectContext.remove();
return result;
}
private void checkPasswordAndPrivs(String cluster, String user, String passwd, String db, String tbl,
String clientIp, PrivPredicate predicate) throws AuthenticationException {
final String fullUserName = ClusterNamespace.getFullName(user);
List<UserIdentity> currentUser = Lists.newArrayList();
if (!GlobalStateMgr.getCurrentState().getAuth()
.checkPlainPassword(fullUserName, clientIp, passwd, currentUser)) {
throw new AuthenticationException("Access denied for " + fullUserName + "@" + clientIp);
}
Preconditions.checkState(currentUser.size() == 1);
if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser.get(0), db, tbl, predicate)) {
throw new AuthenticationException(
"Access denied; you need (at least one of) the LOAD privilege(s) for this operation");
}
}
@Override
public TLoadTxnBeginResult loadTxnBegin(TLoadTxnBeginRequest request) throws TException {
String clientAddr = getClientAddrAsString();
LOG.info("receive txn begin request, db: {}, tbl: {}, label: {}, backend: {}",
request.getDb(), request.getTbl(), request.getLabel(), clientAddr);
LOG.debug("txn begin request: {}", request);
TLoadTxnBeginResult result = new TLoadTxnBeginResult();
if (!GlobalStateMgr.getCurrentState().isLeader()) {
TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);
status.setError_msgs(Lists.newArrayList("current fe is not master"));
result.setStatus(status);
return result;
}
TStatus status = new TStatus(TStatusCode.OK);
result.setStatus(status);
try {
result.setTxnId(loadTxnBeginImpl(request, clientAddr));
} catch (DuplicatedRequestException e) {
LOG.info("duplicate request for stream load. request id: {}, txn_id: {}", e.getDuplicatedRequestId(),
e.getTxnId());
result.setTxnId(e.getTxnId());
} catch (LabelAlreadyUsedException e) {
status.setStatus_code(TStatusCode.LABEL_ALREADY_EXISTS);
status.addToError_msgs(e.getMessage());
result.setJob_status(e.getJobStatus());
} catch (UserException e) {
LOG.warn("failed to begin: {}", e.getMessage());
status.setStatus_code(TStatusCode.ANALYSIS_ERROR);
status.addToError_msgs(e.getMessage());
} catch (Throwable e) {
LOG.warn("catch unknown result.", e);
status.setStatus_code(TStatusCode.INTERNAL_ERROR);
status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));
return result;
}
return result;
}
private long loadTxnBeginImpl(TLoadTxnBeginRequest request, String clientIp) throws UserException {
String cluster = request.getCluster();
if (Strings.isNullOrEmpty(cluster)) {
cluster = SystemInfoService.DEFAULT_CLUSTER;
}
checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),
request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD);
if (Strings.isNullOrEmpty(request.getLabel())) {
throw new UserException("empty label in begin request");
}
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
String dbName = request.getDb();
Database db = globalStateMgr.getDb(dbName);
if (db == null) {
throw new UserException("unknown database, database=" + dbName);
}
Table table = db.getTable(request.getTbl());
if (table == null) {
throw new UserException("unknown table \"" + request.getDb() + "." + request.getTbl() + "\"");
}
long timeoutSecond = request.isSetTimeout() ? request.getTimeout() : Config.stream_load_default_timeout_second;
MetricRepo.COUNTER_LOAD_ADD.increase(1L);
return GlobalStateMgr.getCurrentGlobalTransactionMgr().beginTransaction(
db.getId(), Lists.newArrayList(table.getId()), request.getLabel(), request.getRequest_id(),
new TxnCoordinator(TxnSourceType.BE, clientIp),
TransactionState.LoadJobSourceType.BACKEND_STREAMING, -1, timeoutSecond);
}
@Override
public TLoadTxnCommitResult loadTxnCommit(TLoadTxnCommitRequest request) throws TException {
String clientAddr = getClientAddrAsString();
LOG.info("receive txn commit request. db: {}, tbl: {}, txn_id: {}, backend: {}",
request.getDb(), request.getTbl(), request.getTxnId(), clientAddr);
LOG.debug("txn commit request: {}", request);
TLoadTxnCommitResult result = new TLoadTxnCommitResult();
if (!GlobalStateMgr.getCurrentState().isLeader()) {
TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);
status.setError_msgs(Lists.newArrayList("current fe is not master"));
result.setStatus(status);
return result;
}
TStatus status = new TStatus(TStatusCode.OK);
result.setStatus(status);
try {
if (!loadTxnCommitImpl(request)) {
status.setStatus_code(TStatusCode.PUBLISH_TIMEOUT);
status.addToError_msgs("Publish timeout. The data will be visible after a while");
}
} catch (UserException e) {
LOG.warn("failed to commit txn_id: {}: {}", request.getTxnId(), e.getMessage());
status.setStatus_code(TStatusCode.ANALYSIS_ERROR);
status.addToError_msgs(e.getMessage());
} catch (Throwable e) {
LOG.warn("catch unknown result.", e);
status.setStatus_code(TStatusCode.INTERNAL_ERROR);
status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));
return result;
}
return result;
}
private boolean loadTxnCommitImpl(TLoadTxnCommitRequest request) throws UserException {
String cluster = request.getCluster();
if (Strings.isNullOrEmpty(cluster)) {
cluster = SystemInfoService.DEFAULT_CLUSTER;
}
if (request.isSetAuth_code()) {
} else {
checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),
request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD);
}
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
String dbName = request.getDb();
Database db = globalStateMgr.getDb(dbName);
if (db == null) {
throw new UserException("unknown database, database=" + dbName);
}
TxnCommitAttachment attachment = TxnCommitAttachment.fromThrift(request.txnCommitAttachment);
long timeoutMs = request.isSetThrift_rpc_timeout_ms() ? request.getThrift_rpc_timeout_ms() : 5000;
timeoutMs = timeoutMs * 3 / 4;
boolean ret = GlobalStateMgr.getCurrentGlobalTransactionMgr().commitAndPublishTransaction(
db, request.getTxnId(),
TabletCommitInfo.fromThrift(request.getCommitInfos()),
timeoutMs, attachment);
if (!ret) {
return ret;
}
MetricRepo.COUNTER_LOAD_FINISHED.increase(1L);
if (null == attachment) {
return ret;
}
Table tbl = db.getTable(request.getTbl());
if (null == tbl) {
return ret;
}
TableMetricsEntity entity = TableMetricsRegistry.getInstance().getMetricsEntity(tbl.getId());
switch (request.txnCommitAttachment.getLoadType()) {
case ROUTINE_LOAD:
if (!(attachment instanceof RLTaskTxnCommitAttachment)) {
break;
}
RLTaskTxnCommitAttachment routineAttachment = (RLTaskTxnCommitAttachment) attachment;
entity.counterRoutineLoadFinishedTotal.increase(1L);
entity.counterRoutineLoadBytesTotal.increase(routineAttachment.getReceivedBytes());
entity.counterRoutineLoadRowsTotal.increase(routineAttachment.getLoadedRows());
break;
case MANUAL_LOAD:
if (!(attachment instanceof ManualLoadTxnCommitAttachment)) {
break;
}
ManualLoadTxnCommitAttachment streamAttachment = (ManualLoadTxnCommitAttachment) attachment;
entity.counterStreamLoadFinishedTotal.increase(1L);
entity.counterStreamLoadBytesTotal.increase(streamAttachment.getReceivedBytes());
entity.counterStreamLoadRowsTotal.increase(streamAttachment.getLoadedRows());
break;
default:
break;
}
return ret;
}
@Override
public TLoadTxnCommitResult loadTxnPrepare(TLoadTxnCommitRequest request) throws TException {
String clientAddr = getClientAddrAsString();
LOG.info("receive txn prepare request. db: {}, tbl: {}, txn_id: {}, backend: {}",
request.getDb(), request.getTbl(), request.getTxnId(), clientAddr);
LOG.debug("txn prepare request: {}", request);
TLoadTxnCommitResult result = new TLoadTxnCommitResult();
if (!GlobalStateMgr.getCurrentState().isLeader()) {
TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);
status.setError_msgs(Lists.newArrayList("current fe is not master"));
result.setStatus(status);
return result;
}
TStatus status = new TStatus(TStatusCode.OK);
result.setStatus(status);
try {
loadTxnPrepareImpl(request);
} catch (UserException e) {
LOG.warn("failed to prepare txn_id: {}: {}", request.getTxnId(), e.getMessage());
status.setStatus_code(TStatusCode.ANALYSIS_ERROR);
status.addToError_msgs(e.getMessage());
} catch (Throwable e) {
LOG.warn("catch unknown result.", e);
status.setStatus_code(TStatusCode.INTERNAL_ERROR);
status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));
return result;
}
return result;
}
private void loadTxnPrepareImpl(TLoadTxnCommitRequest request) throws UserException {
String cluster = request.getCluster();
if (Strings.isNullOrEmpty(cluster)) {
cluster = SystemInfoService.DEFAULT_CLUSTER;
}
if (request.isSetAuth_code()) {
} else {
checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),
request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD);
}
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
String dbName = request.getDb();
Database db = globalStateMgr.getDb(dbName);
if (db == null) {
throw new UserException("unknown database, database=" + dbName);
}
TxnCommitAttachment attachment = TxnCommitAttachment.fromThrift(request.txnCommitAttachment);
GlobalStateMgr.getCurrentGlobalTransactionMgr().prepareTransaction(
db.getId(), request.getTxnId(),
TabletCommitInfo.fromThrift(request.getCommitInfos()),
attachment);
}
@Override
public TLoadTxnRollbackResult loadTxnRollback(TLoadTxnRollbackRequest request) throws TException {
String clientAddr = getClientAddrAsString();
LOG.info("receive txn rollback request. db: {}, tbl: {}, txn_id: {}, reason: {}, backend: {}",
request.getDb(), request.getTbl(), request.getTxnId(), request.getReason(), clientAddr);
LOG.debug("txn rollback request: {}", request);
TLoadTxnRollbackResult result = new TLoadTxnRollbackResult();
if (!GlobalStateMgr.getCurrentState().isLeader()) {
TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);
status.setError_msgs(Lists.newArrayList("current fe is not master"));
result.setStatus(status);
return result;
}
TStatus status = new TStatus(TStatusCode.OK);
result.setStatus(status);
try {
loadTxnRollbackImpl(request);
} catch (TransactionNotFoundException e) {
LOG.warn("failed to rollback txn {}: {}", request.getTxnId(), e.getMessage());
status.setStatus_code(TStatusCode.TXN_NOT_EXISTS);
status.addToError_msgs(e.getMessage());
} catch (UserException e) {
LOG.warn("failed to rollback txn {}: {}", request.getTxnId(), e.getMessage());
status.setStatus_code(TStatusCode.ANALYSIS_ERROR);
status.addToError_msgs(e.getMessage());
} catch (Throwable e) {
LOG.warn("catch unknown result.", e);
status.setStatus_code(TStatusCode.INTERNAL_ERROR);
status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));
return result;
}
return result;
}
private void loadTxnRollbackImpl(TLoadTxnRollbackRequest request) throws UserException {
String cluster = request.getCluster();
if (Strings.isNullOrEmpty(cluster)) {
cluster = SystemInfoService.DEFAULT_CLUSTER;
}
if (request.isSetAuth_code()) {
} else {
checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),
request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD);
}
String dbName = request.getDb();
Database db = GlobalStateMgr.getCurrentState().getDb(dbName);
if (db == null) {
throw new MetaNotFoundException("db " + dbName + " does not exist");
}
long dbId = db.getId();
GlobalStateMgr.getCurrentGlobalTransactionMgr().abortTransaction(dbId, request.getTxnId(),
request.isSetReason() ? request.getReason() : "system cancel",
TxnCommitAttachment.fromThrift(request.getTxnCommitAttachment()));
}
@Override
public TStreamLoadPutResult streamLoadPut(TStreamLoadPutRequest request) {
String clientAddr = getClientAddrAsString();
LOG.info("receive stream load put request. db:{}, tbl: {}, txn_id: {}, load id: {}, backend: {}",
request.getDb(), request.getTbl(), request.getTxnId(), DebugUtil.printId(request.getLoadId()),
clientAddr);
LOG.debug("stream load put request: {}", request);
TStreamLoadPutResult result = new TStreamLoadPutResult();
TStatus status = new TStatus(TStatusCode.OK);
result.setStatus(status);
try {
result.setParams(streamLoadPutImpl(request));
} catch (UserException e) {
LOG.warn("failed to get stream load plan: {}", e.getMessage());
status.setStatus_code(TStatusCode.ANALYSIS_ERROR);
status.addToError_msgs(e.getMessage());
} catch (Throwable e) {
LOG.warn("catch unknown result.", e);
status.setStatus_code(TStatusCode.INTERNAL_ERROR);
status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));
return result;
}
return result;
}
private TExecPlanFragmentParams streamLoadPutImpl(TStreamLoadPutRequest request) throws UserException {
String cluster = request.getCluster();
if (Strings.isNullOrEmpty(cluster)) {
cluster = SystemInfoService.DEFAULT_CLUSTER;
}
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
String dbName = request.getDb();
Database db = globalStateMgr.getDb(dbName);
if (db == null) {
throw new UserException("unknown database, database=" + dbName);
}
long timeoutMs = request.isSetThrift_rpc_timeout_ms() ? request.getThrift_rpc_timeout_ms() : 5000;
if (!db.tryReadLock(timeoutMs, TimeUnit.MILLISECONDS)) {
throw new UserException("get database read lock timeout, database=" + dbName);
}
try {
Table table = db.getTable(request.getTbl());
if (table == null) {
throw new UserException("unknown table, table=" + request.getTbl());
}
if (!(table instanceof OlapTable)) {
throw new UserException("load table type is not OlapTable, type=" + table.getClass());
}
if (table instanceof MaterializedView) {
throw new UserException(String.format(
"The data of '%s' cannot be inserted because '%s' is a materialized view," +
"and the data of materialized view must be consistent with the base table.",
table.getName(), table.getName()));
}
StreamLoadTask streamLoadTask = StreamLoadTask.fromTStreamLoadPutRequest(request, db);
StreamLoadPlanner planner = new StreamLoadPlanner(db, (OlapTable) table, streamLoadTask);
TExecPlanFragmentParams plan = planner.plan(streamLoadTask.getId());
TransactionState txnState =
GlobalStateMgr.getCurrentGlobalTransactionMgr().getTransactionState(db.getId(), request.getTxnId());
if (txnState == null) {
throw new UserException("txn does not exist: " + request.getTxnId());
}
txnState.addTableIndexes((OlapTable) table);
return plan;
} finally {
db.readUnlock();
}
}
@Override
public TStatus snapshotLoaderReport(TSnapshotLoaderReportRequest request) throws TException {
if (GlobalStateMgr.getCurrentState().getBackupHandler().report(request.getTask_type(), request.getJob_id(),
request.getTask_id(), request.getFinished_num(), request.getTotal_num())) {
return new TStatus(TStatusCode.OK);
}
return new TStatus(TStatusCode.CANCELLED);
}
@Override
public TRefreshTableResponse refreshTable(TRefreshTableRequest request) throws TException {
try {
if (request.getCatalog_name() == null) {
request.setCatalog_name(InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME);
}
GlobalStateMgr.getCurrentState().refreshExternalTable(new TableName(request.getCatalog_name(),
request.getDb_name(), request.getTable_name()), request.getPartitions());
return new TRefreshTableResponse(new TStatus(TStatusCode.OK));
} catch (DdlException e) {
TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);
status.setError_msgs(Lists.newArrayList(e.getMessage()));
return new TRefreshTableResponse(status);
}
}
private TNetworkAddress getClientAddr() {
ThriftServerContext connectionContext = ThriftServerEventProcessor.getConnectionContext();
if (connectionContext != null) {
return connectionContext.getClient();
}
return null;
}
private String getClientAddrAsString() {
TNetworkAddress addr = getClientAddr();
return addr == null ? "unknown" : addr.hostname;
}
@Override
public TGetTableMetaResponse getTableMeta(TGetTableMetaRequest request) throws TException {
return leaderImpl.getTableMeta(request);
}
@Override
public TBeginRemoteTxnResponse beginRemoteTxn(TBeginRemoteTxnRequest request) throws TException {
return leaderImpl.beginRemoteTxn(request);
}
@Override
public TCommitRemoteTxnResponse commitRemoteTxn(TCommitRemoteTxnRequest request) throws TException {
return leaderImpl.commitRemoteTxn(request);
}
@Override
public TAbortRemoteTxnResponse abortRemoteTxn(TAbortRemoteTxnRequest request) throws TException {
return leaderImpl.abortRemoteTxn(request);
}
@Override
public TSetConfigResponse setConfig(TSetConfigRequest request) throws TException {
try {
Preconditions.checkState(request.getKeys().size() == request.getValues().size());
Map<String, String> configs = new HashMap<>();
for (int i = 0; i < request.getKeys().size(); i++) {
configs.put(request.getKeys().get(i), request.getValues().get(i));
}
GlobalStateMgr.getCurrentState().setFrontendConfig(configs);
return new TSetConfigResponse(new TStatus(TStatusCode.OK));
} catch (DdlException e) {
TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);
status.setError_msgs(Lists.newArrayList(e.getMessage()));
return new TSetConfigResponse(status);
}
}
} |
the `hashCode` is also inefficient | public int hashCode() {
return Objects.hashCode(this.toString());
} | return Objects.hashCode(this.toString()); | public int hashCode() {
return Objects.hashCode(this.sql);
} | class AstKey {
private final ParseNode parseNode;
public AstKey(ParseNode parseNode) {
this.parseNode = parseNode;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || ! (o instanceof AstKey)) {
return false;
}
AstKey other = (AstKey) o;
return this.toString().equals(other.toString());
}
@Override
@Override
public String toString() {
return new AstToSQLBuilder.AST2SQLBuilderVisitor(true, false).visit(parseNode);
}
} | class AstKey {
private final String sql;
public AstKey(ParseNode parseNode) {
this.sql = new AstToSQLBuilder.AST2SQLBuilderVisitor(true, false).visit(parseNode);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || ! (o instanceof AstKey)) {
return false;
}
AstKey other = (AstKey) o;
if (this.sql == null) {
return false;
}
return this.sql.equals(other.sql);
}
@Override
@Override
public String toString() {
return this.sql;
}
} |
I think this deserves a "final" here, and the line below too. | public static void main(String[] args) throws Exception {
ServerConfiguration configuration = new ServerConfiguration();
CmdLineParser parser = new CmdLineParser(configuration);
try {
parser.parseArgument(args);
fromConfig(configuration).run();
} catch (CmdLineException e) {
LOG.error("Unable to parse command line arguments {}", Arrays.asList(args), e);
throw new IllegalArgumentException("Unable to parse command line arguments.", e);
} catch (Exception e) {
LOG.error("Hit exception with SamzaJobServer. Exiting...", e);
throw e;
}
} | ServerConfiguration configuration = new ServerConfiguration(); | public static void main(String[] args) throws Exception {
final ServerConfiguration configuration = new ServerConfiguration();
final CmdLineParser parser = new CmdLineParser(configuration);
try {
parser.parseArgument(args);
fromConfig(configuration).run();
} catch (CmdLineException e) {
LOG.error("Unable to parse command line arguments {}", Arrays.asList(args), e);
throw new IllegalArgumentException("Unable to parse command line arguments.", e);
} catch (Exception e) {
LOG.error("Hit exception with SamzaJobServer. Exiting...", e);
throw e;
}
} | class ServerConfiguration {
@Option(name = "--job-port", usage = "The job service port. (Default: 11440)")
private int jobPort = 11440;
@Option(name = "--control-port", usage = "The FnControl port. (Default: 11441)")
private int controlPort = 11441;
} | class ServerConfiguration {
@Option(name = "--job-port", usage = "The job service port. (Default: 11440)")
private int jobPort = 11440;
@Option(name = "--control-port", usage = "The FnControl port. (Default: 11441)")
private int controlPort = 11441;
} |
The above TODO can be removed. | protected void createColumnAndViewDefs(Analyzer analyzer) throws AnalysisException, UserException {
if (cols != null) {
if (cols.size() != viewDefStmt.getColLabels().size()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_VIEW_WRONG_LIST);
}
for (int i = 0; i < cols.size(); ++i) {
Type type = viewDefStmt.getBaseTblResultExprs().get(i).getType().clone();
Column col = new Column(cols.get(i).getColName(), type);
col.setComment(cols.get(i).getComment());
finalCols.add(col);
}
} else {
for (int i = 0; i < viewDefStmt.getBaseTblResultExprs().size(); ++i) {
Type type = viewDefStmt.getBaseTblResultExprs().get(i).getType().clone();
finalCols.add(new Column(viewDefStmt.getColLabels().get(i), type));
}
}
Set<String> colSets = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER);
for (Column col : finalCols) {
if (!colSets.add(col.getName())) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_DUP_FIELDNAME, col.getName());
}
}
originalViewDef = viewDefStmt.toSql();
if (cols == null) {
inlineViewDef = originalViewDef;
return;
}
Analyzer tmpAnalyzer = new Analyzer(analyzer);
List<String> colNames = cols.stream().map(c -> c.getColName()).collect(Collectors.toList());
cloneStmt.substituteSelectList(tmpAnalyzer, colNames);
inlineViewDef = cloneStmt.toSql();
} | Type type = viewDefStmt.getBaseTblResultExprs().get(i).getType().clone(); | protected void createColumnAndViewDefs(Analyzer analyzer) throws AnalysisException, UserException {
if (cols != null) {
if (cols.size() != viewDefStmt.getColLabels().size()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_VIEW_WRONG_LIST);
}
for (int i = 0; i < cols.size(); ++i) {
Type type = viewDefStmt.getBaseTblResultExprs().get(i).getType().clone();
Column col = new Column(cols.get(i).getColName(), type);
col.setComment(cols.get(i).getComment());
finalCols.add(col);
}
} else {
for (int i = 0; i < viewDefStmt.getBaseTblResultExprs().size(); ++i) {
Type type = viewDefStmt.getBaseTblResultExprs().get(i).getType().clone();
finalCols.add(new Column(viewDefStmt.getColLabels().get(i), type));
}
}
Set<String> colSets = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER);
for (Column col : finalCols) {
if (!colSets.add(col.getName())) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_DUP_FIELDNAME, col.getName());
}
}
originalViewDef = viewDefStmt.toSql();
if (cols == null) {
inlineViewDef = originalViewDef;
return;
}
Analyzer tmpAnalyzer = new Analyzer(analyzer);
List<String> colNames = cols.stream().map(c -> c.getColName()).collect(Collectors.toList());
cloneStmt.substituteSelectList(tmpAnalyzer, colNames);
inlineViewDef = cloneStmt.toSql();
} | class BaseViewStmt extends DdlStmt {
private static final Logger LOG = LogManager.getLogger(BaseViewStmt.class);
protected final TableName tableName;
protected final List<ColWithComment> cols;
protected final QueryStmt viewDefStmt;
protected final List<Column> finalCols;
protected String originalViewDef;
protected String inlineViewDef;
protected QueryStmt cloneStmt;
public BaseViewStmt(TableName tableName, List<ColWithComment> cols, QueryStmt queryStmt) {
Preconditions.checkNotNull(queryStmt);
this.tableName = tableName;
this.cols = cols;
this.viewDefStmt = queryStmt;
finalCols = Lists.newArrayList();
}
public String getDbName() {
return tableName.getDb();
}
public String getTable() {
return tableName.getTbl();
}
public List<Column> getColumns() {
return finalCols;
}
public String getInlineViewDef() {
return inlineViewDef;
}
/**
* Sets the originalViewDef and the expanded inlineViewDef based on viewDefStmt.
* If columnNames were given, checks that they do not contain duplicate column names
* and throws an exception if they do.
*/
@Override
public void analyze(Analyzer analyzer) throws AnalysisException, UserException {
super.analyze(analyzer);
if (viewDefStmt.hasOutFileClause()) {
throw new AnalysisException("Not support OUTFILE clause in CREATE VIEW statement");
}
}
} | class BaseViewStmt extends DdlStmt {
private static final Logger LOG = LogManager.getLogger(BaseViewStmt.class);
protected final TableName tableName;
protected final List<ColWithComment> cols;
protected final QueryStmt viewDefStmt;
protected final List<Column> finalCols;
protected String originalViewDef;
protected String inlineViewDef;
protected QueryStmt cloneStmt;
public BaseViewStmt(TableName tableName, List<ColWithComment> cols, QueryStmt queryStmt) {
Preconditions.checkNotNull(queryStmt);
this.tableName = tableName;
this.cols = cols;
this.viewDefStmt = queryStmt;
finalCols = Lists.newArrayList();
}
public String getDbName() {
return tableName.getDb();
}
public String getTable() {
return tableName.getTbl();
}
public List<Column> getColumns() {
return finalCols;
}
public String getInlineViewDef() {
return inlineViewDef;
}
/**
* Sets the originalViewDef and the expanded inlineViewDef based on viewDefStmt.
* If columnNames were given, checks that they do not contain duplicate column names
* and throws an exception if they do.
*/
@Override
public void analyze(Analyzer analyzer) throws AnalysisException, UserException {
super.analyze(analyzer);
if (viewDefStmt.hasOutFileClause()) {
throw new AnalysisException("Not support OUTFILE clause in CREATE VIEW statement");
}
}
} |
Then maybe we substitute `Set<CoLocationGroupDesc>` with `Map<AbstractID, CoLocationGroupDesc>`? ``` final Set<CoLocationGroupDesc> coLocationGroupDescs = getVertices() .stream() .map(vertex -> CoLocationGroupDesc.from(vertex.getCoLocationGroup())) .collect(Collectors.toMap(CoLocationGroupDesc::getId, d -> d)) .values() .stream() .collect(Collectors.toSet()); ``` It can be also in a for-loop, but it is nit anyways so I think it is ok as it is. Maybe, at least a comment why we do it like this: `because equals ...`. | public Set<CoLocationGroupDesc> getCoLocationGroupDescriptors() {
final Set<CoLocationGroup> coLocationGroups = new HashSet<>();
for (JobVertex vertex : getVertices()) {
CoLocationGroup coLocationGroup = vertex.getCoLocationGroup();
if (coLocationGroup != null) {
coLocationGroups.add(coLocationGroup);
}
}
final Set<CoLocationGroupDesc> coLocationGroupDescs = coLocationGroups
.stream()
.map(CoLocationGroupDesc::from)
.collect(Collectors.toSet());
return Collections.unmodifiableSet(coLocationGroupDescs);
} | } | public Set<CoLocationGroupDesc> getCoLocationGroupDescriptors() {
final Set<CoLocationGroupDesc> coLocationGroups = IterableUtils
.toStream(getVertices())
.map(JobVertex::getCoLocationGroup)
.filter(Objects::nonNull)
.distinct()
.map(CoLocationGroupDesc::from)
.collect(Collectors.toSet());
return Collections.unmodifiableSet(coLocationGroups);
} | class JobGraph implements Serializable {
private static final long serialVersionUID = 1L;
/** List of task vertices included in this job graph. */
private final Map<JobVertexID, JobVertex> taskVertices = new LinkedHashMap<JobVertexID, JobVertex>();
/** The job configuration attached to this job. */
private final Configuration jobConfiguration = new Configuration();
/** ID of this job. May be set if specific job id is desired (e.g. session management) */
private JobID jobID;
/** Name of this job. */
private final String jobName;
/** The mode in which the job is scheduled. */
private ScheduleMode scheduleMode = ScheduleMode.LAZY_FROM_SOURCES;
/** Job specific execution config. */
private SerializedValue<ExecutionConfig> serializedExecutionConfig;
/** The settings for the job checkpoints. */
private JobCheckpointingSettings snapshotSettings;
/** Savepoint restore settings. */
private SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.none();
/** Set of JAR files required to run this job. */
private final List<Path> userJars = new ArrayList<Path>();
/** Set of custom files required to run this job. */
private final Map<String, DistributedCache.DistributedCacheEntry> userArtifacts = new HashMap<>();
/** Set of blob keys identifying the JAR files required to run this job. */
private final List<PermanentBlobKey> userJarBlobKeys = new ArrayList<>();
/** List of classpaths required to run this job. */
private List<URL> classpaths = Collections.emptyList();
/**
* Constructs a new job graph with the given name, the given {@link ExecutionConfig},
* and a random job ID. The ExecutionConfig will be serialized and can't be modified afterwards.
*
* @param jobName The name of the job.
*/
public JobGraph(String jobName) {
this(null, jobName);
}
/**
* Constructs a new job graph with the given job ID (or a random ID, if {@code null} is passed),
* the given name and the given execution configuration (see {@link ExecutionConfig}).
* The ExecutionConfig will be serialized and can't be modified afterwards.
*
* @param jobId The id of the job. A random ID is generated, if {@code null} is passed.
* @param jobName The name of the job.
*/
public JobGraph(JobID jobId, String jobName) {
this.jobID = jobId == null ? new JobID() : jobId;
this.jobName = jobName == null ? "(unnamed job)" : jobName;
try {
setExecutionConfig(new ExecutionConfig());
} catch (IOException e) {
throw new RuntimeException("bug, empty execution config is not serializable");
}
}
/**
* Constructs a new job graph with no name, a random job ID, the given {@link ExecutionConfig}, and
* the given job vertices. The ExecutionConfig will be serialized and can't be modified afterwards.
*
* @param vertices The vertices to add to the graph.
*/
public JobGraph(JobVertex... vertices) {
this(null, vertices);
}
/**
* Constructs a new job graph with the given name, the given {@link ExecutionConfig}, a random job ID,
* and the given job vertices. The ExecutionConfig will be serialized and can't be modified afterwards.
*
* @param jobName The name of the job.
* @param vertices The vertices to add to the graph.
*/
public JobGraph(String jobName, JobVertex... vertices) {
this(null, jobName, vertices);
}
/**
* Constructs a new job graph with the given name, the given {@link ExecutionConfig},
* the given jobId or a random one if null supplied, and the given job vertices.
* The ExecutionConfig will be serialized and can't be modified afterwards.
*
* @param jobId The id of the job. A random ID is generated, if {@code null} is passed.
* @param jobName The name of the job.
* @param vertices The vertices to add to the graph.
*/
public JobGraph(JobID jobId, String jobName, JobVertex... vertices) {
this(jobId, jobName);
for (JobVertex vertex : vertices) {
addVertex(vertex);
}
}
/**
* Returns the ID of the job.
*
* @return the ID of the job
*/
public JobID getJobID() {
return this.jobID;
}
/**
* Sets the ID of the job.
*/
public void setJobID(JobID jobID) {
this.jobID = jobID;
}
/**
* Returns the name assigned to the job graph.
*
* @return the name assigned to the job graph
*/
public String getName() {
return this.jobName;
}
/**
* Returns the configuration object for this job. Job-wide parameters should be set into that
* configuration object.
*
* @return The configuration object for this job.
*/
public Configuration getJobConfiguration() {
return this.jobConfiguration;
}
/**
* Returns the {@link ExecutionConfig}.
*
* @return ExecutionConfig
*/
public SerializedValue<ExecutionConfig> getSerializedExecutionConfig() {
return serializedExecutionConfig;
}
public void setScheduleMode(ScheduleMode scheduleMode) {
this.scheduleMode = scheduleMode;
}
public ScheduleMode getScheduleMode() {
return scheduleMode;
}
/**
* Sets the savepoint restore settings.
* @param settings The savepoint restore settings.
*/
public void setSavepointRestoreSettings(SavepointRestoreSettings settings) {
this.savepointRestoreSettings = checkNotNull(settings, "Savepoint restore settings");
}
/**
* Returns the configured savepoint restore setting.
* @return The configured savepoint restore settings.
*/
public SavepointRestoreSettings getSavepointRestoreSettings() {
return savepointRestoreSettings;
}
/**
* Sets the execution config. This method eagerly serialized the ExecutionConfig for future RPC
* transport. Further modification of the referenced ExecutionConfig object will not affect
* this serialized copy.
*
* @param executionConfig The ExecutionConfig to be serialized.
* @throws IOException Thrown if the serialization of the ExecutionConfig fails
*/
public void setExecutionConfig(ExecutionConfig executionConfig) throws IOException {
checkNotNull(executionConfig, "ExecutionConfig must not be null.");
this.serializedExecutionConfig = new SerializedValue<>(executionConfig);
}
/**
* Adds a new task vertex to the job graph if it is not already included.
*
* @param vertex
* the new task vertex to be added
*/
public void addVertex(JobVertex vertex) {
final JobVertexID id = vertex.getID();
JobVertex previous = taskVertices.put(id, vertex);
if (previous != null) {
taskVertices.put(id, previous);
throw new IllegalArgumentException("The JobGraph already contains a vertex with that id.");
}
}
/**
* Returns an Iterable to iterate all vertices registered with the job graph.
*
* @return an Iterable to iterate all vertices registered with the job graph
*/
public Iterable<JobVertex> getVertices() {
return this.taskVertices.values();
}
/**
* Returns an array of all job vertices that are registered with the job graph. The order in which the vertices
* appear in the list is not defined.
*
* @return an array of all job vertices that are registered with the job graph
*/
public JobVertex[] getVerticesAsArray() {
return this.taskVertices.values().toArray(new JobVertex[this.taskVertices.size()]);
}
/**
* Returns the number of all vertices.
*
* @return The number of all vertices.
*/
public int getNumberOfVertices() {
return this.taskVertices.size();
}
public Set<SlotSharingGroup> getSlotSharingGroups() {
final Set<SlotSharingGroup> slotSharingGroups = new HashSet<>();
for (JobVertex vertex : getVertices()) {
final SlotSharingGroup slotSharingGroup = vertex.getSlotSharingGroup();
checkNotNull(slotSharingGroup);
slotSharingGroups.add(slotSharingGroup);
}
return Collections.unmodifiableSet(slotSharingGroups);
}
/**
* Sets the settings for asynchronous snapshots. A value of {@code null} means that
* snapshotting is not enabled.
*
* @param settings The snapshot settings
*/
public void setSnapshotSettings(JobCheckpointingSettings settings) {
this.snapshotSettings = settings;
}
/**
* Gets the settings for asynchronous snapshots. This method returns null, when
* checkpointing is not enabled.
*
* @return The snapshot settings
*/
public JobCheckpointingSettings getCheckpointingSettings() {
return snapshotSettings;
}
/**
* Checks if the checkpointing was enabled for this job graph.
*
* @return true if checkpointing enabled
*/
public boolean isCheckpointingEnabled() {
if (snapshotSettings == null) {
return false;
}
long checkpointInterval = snapshotSettings.getCheckpointCoordinatorConfiguration().getCheckpointInterval();
return checkpointInterval > 0 &&
checkpointInterval < Long.MAX_VALUE;
}
/**
* Searches for a vertex with a matching ID and returns it.
*
* @param id
* the ID of the vertex to search for
* @return the vertex with the matching ID or <code>null</code> if no vertex with such ID could be found
*/
public JobVertex findVertexByID(JobVertexID id) {
return this.taskVertices.get(id);
}
/**
* Sets the classpaths required to run the job on a task manager.
*
* @param paths paths of the directories/JAR files required to run the job on a task manager
*/
public void setClasspaths(List<URL> paths) {
classpaths = paths;
}
public List<URL> getClasspaths() {
return classpaths;
}
/**
* Gets the maximum parallelism of all operations in this job graph.
*
* @return The maximum parallelism of this job graph
*/
public int getMaximumParallelism() {
int maxParallelism = -1;
for (JobVertex vertex : taskVertices.values()) {
maxParallelism = Math.max(vertex.getParallelism(), maxParallelism);
}
return maxParallelism;
}
public List<JobVertex> getVerticesSortedTopologicallyFromSources() throws InvalidProgramException {
if (this.taskVertices.isEmpty()) {
return Collections.emptyList();
}
List<JobVertex> sorted = new ArrayList<JobVertex>(this.taskVertices.size());
Set<JobVertex> remaining = new LinkedHashSet<JobVertex>(this.taskVertices.values());
{
Iterator<JobVertex> iter = remaining.iterator();
while (iter.hasNext()) {
JobVertex vertex = iter.next();
if (vertex.hasNoConnectedInputs()) {
sorted.add(vertex);
iter.remove();
}
}
}
int startNodePos = 0;
while (!remaining.isEmpty()) {
if (startNodePos >= sorted.size()) {
throw new InvalidProgramException("The job graph is cyclic.");
}
JobVertex current = sorted.get(startNodePos++);
addNodesThatHaveNoNewPredecessors(current, sorted, remaining);
}
return sorted;
}
private void addNodesThatHaveNoNewPredecessors(JobVertex start, List<JobVertex> target, Set<JobVertex> remaining) {
for (IntermediateDataSet dataSet : start.getProducedDataSets()) {
for (JobEdge edge : dataSet.getConsumers()) {
JobVertex v = edge.getTarget();
if (!remaining.contains(v)) {
continue;
}
boolean hasNewPredecessors = false;
for (JobEdge e : v.getInputs()) {
if (e == edge) {
continue;
}
IntermediateDataSet source = e.getSource();
if (remaining.contains(source.getProducer())) {
hasNewPredecessors = true;
break;
}
}
if (!hasNewPredecessors) {
target.add(v);
remaining.remove(v);
addNodesThatHaveNoNewPredecessors(v, target, remaining);
}
}
}
}
/**
* Adds the path of a JAR file required to run the job on a task manager.
*
* @param jar
* path of the JAR file required to run the job on a task manager
*/
public void addJar(Path jar) {
if (jar == null) {
throw new IllegalArgumentException();
}
if (!userJars.contains(jar)) {
userJars.add(jar);
}
}
/**
* Adds the given jar files to the {@link JobGraph} via {@link JobGraph
*
* @param jarFilesToAttach a list of the {@link URL URLs} of the jar files to attach to the jobgraph.
* @throws RuntimeException if a jar URL is not valid.
*/
public void addJars(final List<URL> jarFilesToAttach) {
for (URL jar : jarFilesToAttach) {
try {
addJar(new Path(jar.toURI()));
} catch (URISyntaxException e) {
throw new RuntimeException("URL is invalid. This should not happen.", e);
}
}
}
/**
* Gets the list of assigned user jar paths.
*
* @return The list of assigned user jar paths
*/
public List<Path> getUserJars() {
return userJars;
}
/**
* Adds the path of a custom file required to run the job on a task manager.
*
* @param name a name under which this artifact will be accessible through {@link DistributedCache}
* @param file path of a custom file required to run the job on a task manager
*/
public void addUserArtifact(String name, DistributedCache.DistributedCacheEntry file) {
if (file == null) {
throw new IllegalArgumentException();
}
userArtifacts.putIfAbsent(name, file);
}
/**
* Gets the list of assigned user jar paths.
*
* @return The list of assigned user jar paths
*/
public Map<String, DistributedCache.DistributedCacheEntry> getUserArtifacts() {
return userArtifacts;
}
/**
* Adds the BLOB referenced by the key to the JobGraph's dependencies.
*
* @param key
* path of the JAR file required to run the job on a task manager
*/
public void addUserJarBlobKey(PermanentBlobKey key) {
if (key == null) {
throw new IllegalArgumentException();
}
if (!userJarBlobKeys.contains(key)) {
userJarBlobKeys.add(key);
}
}
/**
* Checks whether the JobGraph has user code JAR files attached.
*
* @return True, if the JobGraph has user code JAR files attached, false otherwise.
*/
public boolean hasUsercodeJarFiles() {
return this.userJars.size() > 0;
}
/**
* Returns a set of BLOB keys referring to the JAR files required to run this job.
*
* @return set of BLOB keys referring to the JAR files required to run this job
*/
public List<PermanentBlobKey> getUserJarBlobKeys() {
return this.userJarBlobKeys;
}
@Override
public String toString() {
return "JobGraph(jobId: " + jobID + ")";
}
public void setUserArtifactBlobKey(String entryName, PermanentBlobKey blobKey) throws IOException {
byte[] serializedBlobKey;
serializedBlobKey = InstantiationUtil.serializeObject(blobKey);
userArtifacts.computeIfPresent(entryName, (key, originalEntry) -> new DistributedCache.DistributedCacheEntry(
originalEntry.filePath,
originalEntry.isExecutable,
serializedBlobKey,
originalEntry.isZipped
));
}
public void setUserArtifactRemotePath(String entryName, String remotePath) {
userArtifacts.computeIfPresent(entryName, (key, originalEntry) -> new DistributedCache.DistributedCacheEntry(
remotePath,
originalEntry.isExecutable,
null,
originalEntry.isZipped
));
}
public void writeUserArtifactEntriesToConfiguration() {
for (Map.Entry<String, DistributedCache.DistributedCacheEntry> userArtifact : userArtifacts.entrySet()) {
DistributedCache.writeFileInfoToConfig(
userArtifact.getKey(),
userArtifact.getValue(),
jobConfiguration
);
}
}
} | class JobGraph implements Serializable {
private static final long serialVersionUID = 1L;
/** List of task vertices included in this job graph. */
private final Map<JobVertexID, JobVertex> taskVertices = new LinkedHashMap<JobVertexID, JobVertex>();
/** The job configuration attached to this job. */
private final Configuration jobConfiguration = new Configuration();
/** ID of this job. May be set if specific job id is desired (e.g. session management) */
private JobID jobID;
/** Name of this job. */
private final String jobName;
/** The mode in which the job is scheduled. */
private ScheduleMode scheduleMode = ScheduleMode.LAZY_FROM_SOURCES;
/** Job specific execution config. */
private SerializedValue<ExecutionConfig> serializedExecutionConfig;
/** The settings for the job checkpoints. */
private JobCheckpointingSettings snapshotSettings;
/** Savepoint restore settings. */
private SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.none();
/** Set of JAR files required to run this job. */
private final List<Path> userJars = new ArrayList<Path>();
/** Set of custom files required to run this job. */
private final Map<String, DistributedCache.DistributedCacheEntry> userArtifacts = new HashMap<>();
/** Set of blob keys identifying the JAR files required to run this job. */
private final List<PermanentBlobKey> userJarBlobKeys = new ArrayList<>();
/** List of classpaths required to run this job. */
private List<URL> classpaths = Collections.emptyList();
/**
* Constructs a new job graph with the given name, the given {@link ExecutionConfig},
* and a random job ID. The ExecutionConfig will be serialized and can't be modified afterwards.
*
* @param jobName The name of the job.
*/
public JobGraph(String jobName) {
this(null, jobName);
}
/**
* Constructs a new job graph with the given job ID (or a random ID, if {@code null} is passed),
* the given name and the given execution configuration (see {@link ExecutionConfig}).
* The ExecutionConfig will be serialized and can't be modified afterwards.
*
* @param jobId The id of the job. A random ID is generated, if {@code null} is passed.
* @param jobName The name of the job.
*/
public JobGraph(JobID jobId, String jobName) {
this.jobID = jobId == null ? new JobID() : jobId;
this.jobName = jobName == null ? "(unnamed job)" : jobName;
try {
setExecutionConfig(new ExecutionConfig());
} catch (IOException e) {
throw new RuntimeException("bug, empty execution config is not serializable");
}
}
/**
* Constructs a new job graph with no name, a random job ID, the given {@link ExecutionConfig}, and
* the given job vertices. The ExecutionConfig will be serialized and can't be modified afterwards.
*
* @param vertices The vertices to add to the graph.
*/
public JobGraph(JobVertex... vertices) {
this(null, vertices);
}
/**
* Constructs a new job graph with the given name, the given {@link ExecutionConfig}, a random job ID,
* and the given job vertices. The ExecutionConfig will be serialized and can't be modified afterwards.
*
* @param jobName The name of the job.
* @param vertices The vertices to add to the graph.
*/
public JobGraph(String jobName, JobVertex... vertices) {
this(null, jobName, vertices);
}
/**
* Constructs a new job graph with the given name, the given {@link ExecutionConfig},
* the given jobId or a random one if null supplied, and the given job vertices.
* The ExecutionConfig will be serialized and can't be modified afterwards.
*
* @param jobId The id of the job. A random ID is generated, if {@code null} is passed.
* @param jobName The name of the job.
* @param vertices The vertices to add to the graph.
*/
public JobGraph(JobID jobId, String jobName, JobVertex... vertices) {
this(jobId, jobName);
for (JobVertex vertex : vertices) {
addVertex(vertex);
}
}
/**
* Returns the ID of the job.
*
* @return the ID of the job
*/
public JobID getJobID() {
return this.jobID;
}
/**
* Sets the ID of the job.
*/
public void setJobID(JobID jobID) {
this.jobID = jobID;
}
/**
* Returns the name assigned to the job graph.
*
* @return the name assigned to the job graph
*/
public String getName() {
return this.jobName;
}
/**
* Returns the configuration object for this job. Job-wide parameters should be set into that
* configuration object.
*
* @return The configuration object for this job.
*/
public Configuration getJobConfiguration() {
return this.jobConfiguration;
}
/**
* Returns the {@link ExecutionConfig}.
*
* @return ExecutionConfig
*/
public SerializedValue<ExecutionConfig> getSerializedExecutionConfig() {
return serializedExecutionConfig;
}
public void setScheduleMode(ScheduleMode scheduleMode) {
this.scheduleMode = scheduleMode;
}
public ScheduleMode getScheduleMode() {
return scheduleMode;
}
/**
* Sets the savepoint restore settings.
* @param settings The savepoint restore settings.
*/
public void setSavepointRestoreSettings(SavepointRestoreSettings settings) {
this.savepointRestoreSettings = checkNotNull(settings, "Savepoint restore settings");
}
/**
* Returns the configured savepoint restore setting.
* @return The configured savepoint restore settings.
*/
public SavepointRestoreSettings getSavepointRestoreSettings() {
return savepointRestoreSettings;
}
/**
* Sets the execution config. This method eagerly serialized the ExecutionConfig for future RPC
* transport. Further modification of the referenced ExecutionConfig object will not affect
* this serialized copy.
*
* @param executionConfig The ExecutionConfig to be serialized.
* @throws IOException Thrown if the serialization of the ExecutionConfig fails
*/
public void setExecutionConfig(ExecutionConfig executionConfig) throws IOException {
checkNotNull(executionConfig, "ExecutionConfig must not be null.");
this.serializedExecutionConfig = new SerializedValue<>(executionConfig);
}
/**
* Adds a new task vertex to the job graph if it is not already included.
*
* @param vertex
* the new task vertex to be added
*/
public void addVertex(JobVertex vertex) {
final JobVertexID id = vertex.getID();
JobVertex previous = taskVertices.put(id, vertex);
if (previous != null) {
taskVertices.put(id, previous);
throw new IllegalArgumentException("The JobGraph already contains a vertex with that id.");
}
}
/**
* Returns an Iterable to iterate all vertices registered with the job graph.
*
* @return an Iterable to iterate all vertices registered with the job graph
*/
public Iterable<JobVertex> getVertices() {
return this.taskVertices.values();
}
/**
* Returns an array of all job vertices that are registered with the job graph. The order in which the vertices
* appear in the list is not defined.
*
* @return an array of all job vertices that are registered with the job graph
*/
public JobVertex[] getVerticesAsArray() {
return this.taskVertices.values().toArray(new JobVertex[this.taskVertices.size()]);
}
/**
* Returns the number of all vertices.
*
* @return The number of all vertices.
*/
public int getNumberOfVertices() {
return this.taskVertices.size();
}
public Set<SlotSharingGroup> getSlotSharingGroups() {
final Set<SlotSharingGroup> slotSharingGroups = IterableUtils
.toStream(getVertices())
.map(JobVertex::getSlotSharingGroup)
.collect(Collectors.toSet());
return Collections.unmodifiableSet(slotSharingGroups);
}
/**
* Sets the settings for asynchronous snapshots. A value of {@code null} means that
* snapshotting is not enabled.
*
* @param settings The snapshot settings
*/
public void setSnapshotSettings(JobCheckpointingSettings settings) {
this.snapshotSettings = settings;
}
/**
* Gets the settings for asynchronous snapshots. This method returns null, when
* checkpointing is not enabled.
*
* @return The snapshot settings
*/
public JobCheckpointingSettings getCheckpointingSettings() {
return snapshotSettings;
}
/**
* Checks if the checkpointing was enabled for this job graph.
*
* @return true if checkpointing enabled
*/
public boolean isCheckpointingEnabled() {
if (snapshotSettings == null) {
return false;
}
long checkpointInterval = snapshotSettings.getCheckpointCoordinatorConfiguration().getCheckpointInterval();
return checkpointInterval > 0 &&
checkpointInterval < Long.MAX_VALUE;
}
/**
* Searches for a vertex with a matching ID and returns it.
*
* @param id
* the ID of the vertex to search for
* @return the vertex with the matching ID or <code>null</code> if no vertex with such ID could be found
*/
public JobVertex findVertexByID(JobVertexID id) {
return this.taskVertices.get(id);
}
/**
* Sets the classpaths required to run the job on a task manager.
*
* @param paths paths of the directories/JAR files required to run the job on a task manager
*/
public void setClasspaths(List<URL> paths) {
classpaths = paths;
}
public List<URL> getClasspaths() {
return classpaths;
}
/**
* Gets the maximum parallelism of all operations in this job graph.
*
* @return The maximum parallelism of this job graph
*/
public int getMaximumParallelism() {
int maxParallelism = -1;
for (JobVertex vertex : taskVertices.values()) {
maxParallelism = Math.max(vertex.getParallelism(), maxParallelism);
}
return maxParallelism;
}
public List<JobVertex> getVerticesSortedTopologicallyFromSources() throws InvalidProgramException {
if (this.taskVertices.isEmpty()) {
return Collections.emptyList();
}
List<JobVertex> sorted = new ArrayList<JobVertex>(this.taskVertices.size());
Set<JobVertex> remaining = new LinkedHashSet<JobVertex>(this.taskVertices.values());
{
Iterator<JobVertex> iter = remaining.iterator();
while (iter.hasNext()) {
JobVertex vertex = iter.next();
if (vertex.hasNoConnectedInputs()) {
sorted.add(vertex);
iter.remove();
}
}
}
int startNodePos = 0;
while (!remaining.isEmpty()) {
if (startNodePos >= sorted.size()) {
throw new InvalidProgramException("The job graph is cyclic.");
}
JobVertex current = sorted.get(startNodePos++);
addNodesThatHaveNoNewPredecessors(current, sorted, remaining);
}
return sorted;
}
private void addNodesThatHaveNoNewPredecessors(JobVertex start, List<JobVertex> target, Set<JobVertex> remaining) {
for (IntermediateDataSet dataSet : start.getProducedDataSets()) {
for (JobEdge edge : dataSet.getConsumers()) {
JobVertex v = edge.getTarget();
if (!remaining.contains(v)) {
continue;
}
boolean hasNewPredecessors = false;
for (JobEdge e : v.getInputs()) {
if (e == edge) {
continue;
}
IntermediateDataSet source = e.getSource();
if (remaining.contains(source.getProducer())) {
hasNewPredecessors = true;
break;
}
}
if (!hasNewPredecessors) {
target.add(v);
remaining.remove(v);
addNodesThatHaveNoNewPredecessors(v, target, remaining);
}
}
}
}
/**
* Adds the path of a JAR file required to run the job on a task manager.
*
* @param jar
* path of the JAR file required to run the job on a task manager
*/
public void addJar(Path jar) {
if (jar == null) {
throw new IllegalArgumentException();
}
if (!userJars.contains(jar)) {
userJars.add(jar);
}
}
/**
* Adds the given jar files to the {@link JobGraph} via {@link JobGraph
*
* @param jarFilesToAttach a list of the {@link URL URLs} of the jar files to attach to the jobgraph.
* @throws RuntimeException if a jar URL is not valid.
*/
public void addJars(final List<URL> jarFilesToAttach) {
for (URL jar : jarFilesToAttach) {
try {
addJar(new Path(jar.toURI()));
} catch (URISyntaxException e) {
throw new RuntimeException("URL is invalid. This should not happen.", e);
}
}
}
/**
* Gets the list of assigned user jar paths.
*
* @return The list of assigned user jar paths
*/
public List<Path> getUserJars() {
return userJars;
}
/**
* Adds the path of a custom file required to run the job on a task manager.
*
* @param name a name under which this artifact will be accessible through {@link DistributedCache}
* @param file path of a custom file required to run the job on a task manager
*/
public void addUserArtifact(String name, DistributedCache.DistributedCacheEntry file) {
if (file == null) {
throw new IllegalArgumentException();
}
userArtifacts.putIfAbsent(name, file);
}
/**
* Gets the list of assigned user jar paths.
*
* @return The list of assigned user jar paths
*/
public Map<String, DistributedCache.DistributedCacheEntry> getUserArtifacts() {
return userArtifacts;
}
/**
* Adds the BLOB referenced by the key to the JobGraph's dependencies.
*
* @param key
* path of the JAR file required to run the job on a task manager
*/
public void addUserJarBlobKey(PermanentBlobKey key) {
if (key == null) {
throw new IllegalArgumentException();
}
if (!userJarBlobKeys.contains(key)) {
userJarBlobKeys.add(key);
}
}
/**
* Checks whether the JobGraph has user code JAR files attached.
*
* @return True, if the JobGraph has user code JAR files attached, false otherwise.
*/
public boolean hasUsercodeJarFiles() {
return this.userJars.size() > 0;
}
/**
* Returns a set of BLOB keys referring to the JAR files required to run this job.
*
* @return set of BLOB keys referring to the JAR files required to run this job
*/
public List<PermanentBlobKey> getUserJarBlobKeys() {
return this.userJarBlobKeys;
}
@Override
public String toString() {
return "JobGraph(jobId: " + jobID + ")";
}
public void setUserArtifactBlobKey(String entryName, PermanentBlobKey blobKey) throws IOException {
byte[] serializedBlobKey;
serializedBlobKey = InstantiationUtil.serializeObject(blobKey);
userArtifacts.computeIfPresent(entryName, (key, originalEntry) -> new DistributedCache.DistributedCacheEntry(
originalEntry.filePath,
originalEntry.isExecutable,
serializedBlobKey,
originalEntry.isZipped
));
}
public void setUserArtifactRemotePath(String entryName, String remotePath) {
userArtifacts.computeIfPresent(entryName, (key, originalEntry) -> new DistributedCache.DistributedCacheEntry(
remotePath,
originalEntry.isExecutable,
null,
originalEntry.isZipped
));
}
public void writeUserArtifactEntriesToConfiguration() {
for (Map.Entry<String, DistributedCache.DistributedCacheEntry> userArtifact : userArtifacts.entrySet()) {
DistributedCache.writeFileInfoToConfig(
userArtifact.getKey(),
userArtifact.getValue(),
jobConfiguration
);
}
}
} |
Oh, I see! You cache the `Uni` and resubscribe to it every time (well Quarkus does). That's actually interesting, but I'm not sure it's what the user would expect. Imagine: ```java @GET public Uni<String> callMyRemoteService() { return webClient.send().map(r -> r.bodyAsString()); } ``` Basically, it calls a remote service. If you cache the result, what would/should happen? 1. the response is cached - the users will get the same response, avoiding calls to the remote service 2. the uni is cached - to every time there is a request, there is another subscription calling the remote service I would have said 1, but it looks like 2 has been implemented. Can you confirm? | public Uni<String> cachedMethod(String key) {
invocations++;
return Uni.createFrom().item(() -> new String());
} | return Uni.createFrom().item(() -> new String()); | public Uni<String> cachedMethod(String key) {
invocations++;
return Uni.createFrom().item(() -> {
subscriptions++;
return "" + subscriptions;
});
} | class CachedService {
private int invocations;
@CacheResult(cacheName = "test-cache")
public int getInvocations() {
return invocations;
}
} | class CachedService {
private int invocations;
private int subscriptions;
@CacheResult(cacheName = "test-cache")
public int getInvocations() {
return invocations;
}
} |
"If you could abstract the query schedule strategy like `Presto` and refactor this class, which woul(...TRUNCATED) | "private void computeFragmentExecParams() throws Exception {\ncomputeFragmentHosts();\ninstanceIds.c(...TRUNCATED) | if (bucketShuffleJoinController.isBucketShuffleJoin(destFragment.getFragmentId().asInt())) { | "private void computeFragmentExecParams() throws Exception {\ncomputeFragmentHosts();\ninstanceIds.c(...TRUNCATED) | "class Coordinator {\nprivate static final Logger LOG = LogManager.getLogger(Coordinator.class);\npr(...TRUNCATED) | "class Coordinator {\nprivate static final Logger LOG = LogManager.getLogger(Coordinator.class);\npr(...TRUNCATED) |
"Currently, the shift operation for tuples is not supported by the runtime. Therefore the runtime ch(...TRUNCATED) | "private void validateTupleSizeAndInherentType() {\nint numOfMandatoryTypes = this.tupleType.getTupl(...TRUNCATED) | this.tupleType.getRestType() : this.tupleType.getTupleTypes().get(i)); | "private void validateTupleSizeAndInherentType() {\nList<Type> tupleTypesList = this.tupleType.getTu(...TRUNCATED) | "class TupleValueImpl extends AbstractArrayValue {\nprotected TupleType tupleType;\nprotected Type t(...TRUNCATED) | "class TupleValueImpl extends AbstractArrayValue {\nprotected TupleType tupleType;\nprotected Type t(...TRUNCATED) |
"These two return two different results. Original one is : 2 ^ (tryCount-1) Now: (tryCount -1) ^ 2 (...TRUNCATED) | "long calculateDelayInMs(int tryCount) {\nlong delay;\nswitch (this.retryPolicyType) {\ncase EXPONEN(...TRUNCATED) | delay = ((tryCount - 1) * (tryCount - 1) - 1L) * this.retryDelayInMs; | "long calculateDelayInMs(int tryCount) {\nlong delay;\nswitch (this.retryPolicyType) {\ncase EXPONEN(...TRUNCATED) | "class RequestRetryOptions {\nprivate final ClientLogger logger = new ClientLogger(RequestRetryOptio(...TRUNCATED) | "class RequestRetryOptions {\nprivate final ClientLogger logger = new ClientLogger(RequestRetryOptio(...TRUNCATED) |
End of preview. Expand
in Dataset Viewer.
README.md exists but content is empty.
- Downloads last month
- 30