language
stringclasses 5
values | text
stringlengths 15
988k
|
---|---|
Java | public class DirectInsert {
/**
* This method is called when executing this sample application from the
* command line.
*
* @param args the command line parameters
*/
public static void main(String... args) throws Exception {
Class.forName("org.h2.Driver");
DeleteDbFiles.execute("~", "test", true);
String url = "jdbc:h2:~/test";
initialInsert(url, 200000);
for (int i = 0; i < 3; i++) {
createAsSelect(url, true);
createAsSelect(url, false);
}
}
private static void initialInsert(String url, int len) throws SQLException {
Connection conn = DriverManager.getConnection(url + ";LOG=0");
Statement stat = conn.createStatement();
stat.execute("DROP TABLE IF EXISTS TEST");
stat.execute("CREATE TABLE TEST(ID INT PRIMARY KEY, NAME VARCHAR)");
PreparedStatement prep = conn.prepareStatement(
"INSERT INTO TEST VALUES(?, 'Test' || SPACE(100))");
long time = System.currentTimeMillis();
for (int i = 0; i < len; i++) {
long now = System.currentTimeMillis();
if (now > time + 1000) {
time = now;
System.out.println("Inserting " + (100L * i / len) + "%");
}
prep.setInt(1, i);
prep.execute();
}
conn.commit();
prep.close();
stat.close();
conn.close();
}
private static void createAsSelect(String url, boolean optimize)
throws SQLException {
Connection conn = DriverManager.getConnection(url +
";OPTIMIZE_INSERT_FROM_SELECT=" + optimize);
Statement stat = conn.createStatement();
stat.execute("DROP TABLE IF EXISTS TEST2");
System.out.println("CREATE TABLE ... AS SELECT " +
(optimize ? "(optimized)" : ""));
long time = System.currentTimeMillis();
stat.execute("CREATE TABLE TEST2 AS SELECT * FROM TEST");
System.out.printf("%.3f sec.\n",
(System.currentTimeMillis() - time) / 1000.0);
stat.execute("INSERT INTO TEST2 SELECT * FROM TEST2");
stat.close();
conn.close();
}
} |
Java | public class SchemaCompatibilityValidator implements FilterPredicate.Visitor<Void> {
public static void validate(FilterPredicate predicate, MessageType schema) {
checkNotNull(predicate, "predicate");
checkNotNull(schema, "schema");
predicate.accept(new SchemaCompatibilityValidator(schema));
}
// A map of column name to the type the user supplied for this column.
// Used to validate that the user did not provide different types for the same
// column.
private final Map<ColumnPath, Class<?>> columnTypesEncountered = new HashMap<ColumnPath, Class<?>>();
// the columns (keyed by path) according to the file's schema. This is the source of truth, and
// we are validating that what the user provided agrees with these.
private final Map<ColumnPath, ColumnDescriptor> columnsAccordingToSchema = new HashMap<ColumnPath, ColumnDescriptor>();
// the original type of a column, keyed by path
private final Map<ColumnPath, OriginalType> originalTypes = new HashMap<ColumnPath, OriginalType>();
private SchemaCompatibilityValidator(MessageType schema) {
for (ColumnDescriptor cd : schema.getColumns()) {
ColumnPath columnPath = ColumnPath.get(cd.getPath());
columnsAccordingToSchema.put(columnPath, cd);
OriginalType ot = schema.getType(cd.getPath()).getOriginalType();
if (ot != null) {
originalTypes.put(columnPath, ot);
}
}
}
@Override
public <T extends Comparable<T>> Void visit(Eq<T> pred) {
validateColumnFilterPredicate(pred);
return null;
}
@Override
public <T extends Comparable<T>> Void visit(NotEq<T> pred) {
validateColumnFilterPredicate(pred);
return null;
}
@Override
public <T extends Comparable<T>> Void visit(Lt<T> pred) {
validateColumnFilterPredicate(pred);
return null;
}
@Override
public <T extends Comparable<T>> Void visit(LtEq<T> pred) {
validateColumnFilterPredicate(pred);
return null;
}
@Override
public <T extends Comparable<T>> Void visit(Gt<T> pred) {
validateColumnFilterPredicate(pred);
return null;
}
@Override
public <T extends Comparable<T>> Void visit(GtEq<T> pred) {
validateColumnFilterPredicate(pred);
return null;
}
@Override
public Void visit(And and) {
and.getLeft().accept(this);
and.getRight().accept(this);
return null;
}
@Override
public Void visit(Or or) {
or.getLeft().accept(this);
or.getRight().accept(this);
return null;
}
@Override
public Void visit(Not not) {
not.getPredicate().accept(this);
return null;
}
@Override
public <T extends Comparable<T>, U extends UserDefinedPredicate<T>> Void visit(UserDefined<T, U> udp) {
validateColumn(udp.getColumn());
return null;
}
@Override
public <T extends Comparable<T>, U extends UserDefinedPredicate<T>> Void visit(LogicalNotUserDefined<T, U> udp) {
return udp.getUserDefined().accept(this);
}
private <T extends Comparable<T>> void validateColumnFilterPredicate(ColumnFilterPredicate<T> pred) {
validateColumn(pred.getColumn());
}
private <T extends Comparable<T>> void validateColumn(Column<T> column) {
ColumnPath path = column.getColumnPath();
Class<?> alreadySeen = columnTypesEncountered.get(path);
if (alreadySeen != null && !alreadySeen.equals(column.getColumnType())) {
throw new IllegalArgumentException("Column: "
+ path.toDotString()
+ " was provided with different types in the same predicate."
+ " Found both: (" + alreadySeen + ", " + column.getColumnType() + ")");
}
if (alreadySeen == null) {
columnTypesEncountered.put(path, column.getColumnType());
}
ColumnDescriptor descriptor = getColumnDescriptor(path);
if (descriptor.getMaxRepetitionLevel() > 0) {
throw new IllegalArgumentException("FilterPredicates do not currently support repeated columns. "
+ "Column " + path.toDotString() + " is repeated.");
}
ValidTypeMap.assertTypeValid(column, descriptor.getType(), originalTypes.get(path));
}
private ColumnDescriptor getColumnDescriptor(ColumnPath columnPath) {
ColumnDescriptor cd = columnsAccordingToSchema.get(columnPath);
checkArgument(cd != null, "Column " + columnPath + " was not found in schema!");
return cd;
}
} |
Java | public class Lines extends Generic implements Serializable {
private static final long serialVersionUID = 1376311472836304996L;
public boolean DEBUG = false;
ArrayList<Node[]> transformedCoordinates;
ArrayList<Coordinate[]> rawCoordinates;
/**
* Instantiate the GeoLine object with a
* BoundingBox and String. Depending on
* whether the String ends in .csv or .shp
* we will employ the appropriate tool to
* open and read the data.
* @param b the bounding box of the default view
* @param r a String that allows us to determine the type of resource to open
*/
public Lines(BoundingBox b, String r) {
super(b,r);
}
/**
* Instantiate the GeoLine object with a
* BoundingBox and Feature Collection object.
* @param b the bounding box of the default viewport
* @param f a reference to FeatureCollection of SimpleFeature objects
*/
public Lines(BoundingBox b, FeatureCollection<SimpleFeatureType,SimpleFeature> f) {
super(b,f);
}
/**
* You don't normally need to call this directly,
* as it will be done for you when you ask for the
* file to be projected in the Applet. However, I
* have left the method public to offer people the
* opportunity to improvise or improve on the code.
* @param a a Processing PApplet object
*/
public void transformCoordinates(PApplet a) {
if (a.width == this.appletWidth && a.height == this.appletHeight) {
// Do nothing
} else {
if (this.appletWidth == 0) {
this.appletWidth = a.width;
}
if (this.appletHeight == 0) {
this.appletHeight = a.height;
}
if (super.featureCollection != null) {
transformedCoordinates = new ArrayList<Node[]>();
rawCoordinates = new ArrayList<Coordinate[]>();
/*
* featureCollection was instantiated in the superclass
*/
FeatureIterator<SimpleFeature> iterator = super.featureCollection.features();
/*
* Now loop through the geometries to
* set everything up with the correct
* X/Y coordinates
*/
try {
while (iterator.hasNext()) {
SimpleFeature feature = iterator.next();
Geometry theGeom = (Geometry) feature.getDefaultGeometry();
SimpleFeatureType def = feature.getFeatureType();
for (int i = 0; i < theGeom.getNumGeometries(); i++) {
//System.out.println("Got geometry " + i);
Geometry g = theGeom.getGeometryN(i);
String theName = "";
Double theValue = 0d;
if (this.labelPosition > 0) {
theName = (String) feature.getAttribute(this.labelPosition);
} else if (def.indexOf(this.labelName) != -1) {
theName = (String) feature.getAttribute(this.labelName);
}
if (this.valuePosition > 0) {
theValue = (Double) feature.getAttribute(this.valuePosition);
} else if (def.indexOf(this.valueName) != -1) {
theValue = (Double) feature.getAttribute(this.valueName);
}
if (g.getGeometryType().equalsIgnoreCase("linestring")) {
Coordinate[] c = null;
LineString l = (LineString) g;
if (this.localSimplify > 0) {
c = DouglasPeuckerLineSimplifier.simplify(l.getCoordinates(), this.localSimplify);
} else {
c = l.getCoordinates();
}
rawCoordinates.add(c);
try {
Node[] t = new Node[c.length];
for (int j = 0; j < c.length; j++) {
t[j] = new Node(
c[j].hashCode(),
this.map((float) c[j].x, box.getWest(), box.getEast(), 0f, a.width),
this.map((float) c[j].y, box.getNorth(), box.getSouth(), 0f, a.height),
theValue.doubleValue(),
theName
);
if (DEBUG == true) {
System.out.println("X-axis: " + c[j].x + " is mapped from geographical range (" + box.getWest() + "," + box.getEast() + ") on to display range (0f," + a.width + ") as " + this.map((float) c[j].x, box.getWest(), box.getEast(), 0f, a.width));
System.out.println("Y-axis: " + c[j].y + " is mapped from geographical range (" + box.getNorth() + "," + box.getSouth() + ") on to display range (0f," + a.height + ") as " + this.map((float) c[j].y, box.getNorth(), box.getSouth(), 0f, a.height));
}
}
transformedCoordinates.add(t);
} catch (NullPointerException e2) {
System.out.println("NullPointerException " + e2);
}
} else {
System.out.println("Have instantiated a MultiLine object but with geometry of type " + g.getGeometryType());
}
}
}
} finally {
if (iterator != null) {
iterator.close();
}
}
}
}
}
/**
* Return an ArrayList<Node[]>, each row of which contains
* an array of Nodes. The idea is that each
* array represents a separate set of lines that can be
* drawn in the sketch. To some extent this is just a
* stub left over from a previous implementation, but it
* does provide us with the capacity to insert some logic
* in between asking for the coordinates and getting back
* the transformed result.
* @return ArrayList<Node[]>
*/
public ArrayList<Node[]> getCoordinates(PApplet a) {
this.transformCoordinates(a);
return this.transformedCoordinates;
}
/**
* Returns an ArrayList<Node[]> of the raw
* coordinates contained in the shape file. You don't
* normally need to worry about these, but they could
* be useful if you wanted to completely bypass the
* mapping process for some reason (e.g. you won't to
* show things in polar coordinates).
* @return ArrayList<Node[]>
*/
public ArrayList<Coordinate[]> getCoordinates() {
return this.rawCoordinates;
}
/**
* Draws all of the lines contained in a
* Lines object loaded from a file.
* The simplest way to work with this method is
* to set the color, fill, and stroke in your sketch
* and then just call this function by passing it
* the Applet instance (probably by saying: <code>object.project(this)</code>.
* @param a a Processing PApplet object
*/
public void project(PApplet a) {
ArrayList<Node[]> al = this.getCoordinates(a);
//println("Multiline array list returned " + a.size());
for (int i = 0; i < al.size(); i++) {
Node[] line = al.get(i);
for (int j = 0; j < line.length-1; j++) {
a.g.line(line[j].getX(), line[j].getY(), line[j+1].getX(), line[j+1].getY());
}
}
}
/**
* Apply a color scale to the the polygons
* so that it is possible to represent the values
* in a useful way.
* @param a a Processing PApplet object
* @param min the minimum value of the value field (I will try to make this automatic in later releases)
* @param max the maximum value of the value field (I will try to make this automatic in later releases)
*/
public void projectValues(PApplet a, float min, float max) {
ArrayList<Node[]> al = this.getCoordinates(a);
//println("Multiline array list returned " + al.size());
for (int i = 0; i < al.size(); i++) {
Node[] line = al.get(i);
a.g.stroke(this.interpolateColor(a, (float) line[0].getValue(), min, max));
for (int j = 0; j < line.length-1; j++) {
a.g.line(line[j].getX(), line[j].getY(), line[j+1].getX(), line[j+1].getY());
}
}
}
/**
* <p>Draws all of the lines contained in a
* Lines object loaded from a file into a
* PGraphics object so that you can use it
* as a buffer.</p>
* <p.The simplest way to work with this method is
* to set the color, fill, and stroke in your sketch
* and then just call this function by passing it
* the Applet instance and a PGraphics object.</p>
* @param a a Processing PApplet object
* @param p a Processing PGraphics
*/
public void project(PApplet a, PGraphics p) {
ArrayList<Node[]> al = this.getCoordinates(a);
//println("Multiline array list returned " + a.size());
for (int i = 0; i < al.size(); i++) {
Node[] line = al.get(i);
for (int j = 0; j < line.length-1; j++) {
p.line(line[j].getX(), line[j].getY(), line[j+1].getX(), line[j+1].getY());
}
}
}
/**
* <p>Reverses the order of the points in a polygon.
* Note that this will <em>only</em> do something <em>if</em>
* you have already called transformCoordinates or project
* once (since it only works on the points that have been
* mapped into the Processing sketch.</p>
*/
public void reverse() {
if (this.transformedCoordinates.size() > 0) {
for (int i=0; i < this.transformedCoordinates.size(); i++) {
Collections.reverse(Arrays.asList(this.transformedCoordinates.get(i)));
}
Collections.reverse(this.transformedCoordinates);
}
}
} |
Java | public class ExtendedHTTPEventAdapter implements OutputEventAdapter {
private static final Log log = LogFactory.getLog(ExtendedHTTPEventAdapter.class);
private OutputEventAdapterConfiguration eventAdapterConfiguration;
private Map<String, String> globalProperties;
private static ExecutorService executorService;
private String clientMethod;
private int tenantId;
private String contentType;
private static HttpConnectionManager connectionManager;
private static HttpClient httpClient = null;
private HostConfiguration hostConfiguration = null;
private AccessTokenGenerator accessTokenGenerator;
private String oauthURL;
public ExtendedHTTPEventAdapter(OutputEventAdapterConfiguration eventAdapterConfiguration,
Map<String, String> globalProperties) {
this.eventAdapterConfiguration = eventAdapterConfiguration;
this.globalProperties = globalProperties;
this.clientMethod = eventAdapterConfiguration.getStaticProperties()
.get(ExtendedHTTPEventAdapterConstants.ADAPTER_HTTP_CLIENT_METHOD);
}
@Override
public void init() throws OutputEventAdapterException {
tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
//ExecutorService will be assigned if it is null
if (executorService == null) {
int minThread;
int maxThread;
long defaultKeepAliveTime;
int jobQueSize;
//If global properties are available those will be assigned else constant values will be assigned
if (globalProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_MIN_THREAD_POOL_SIZE_NAME) != null) {
minThread = Integer.parseInt(
globalProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_MIN_THREAD_POOL_SIZE_NAME));
} else {
minThread = ExtendedHTTPEventAdapterConstants.ADAPTER_MIN_THREAD_POOL_SIZE;
}
if (globalProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_MAX_THREAD_POOL_SIZE_NAME) != null) {
maxThread = Integer.parseInt(
globalProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_MAX_THREAD_POOL_SIZE_NAME));
} else {
maxThread = ExtendedHTTPEventAdapterConstants.ADAPTER_MAX_THREAD_POOL_SIZE;
}
if (globalProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_KEEP_ALIVE_TIME_NAME) != null) {
defaultKeepAliveTime = Integer.parseInt(
globalProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_KEEP_ALIVE_TIME_NAME));
} else {
defaultKeepAliveTime = ExtendedHTTPEventAdapterConstants.DEFAULT_KEEP_ALIVE_TIME_IN_MILLIS;
}
if (globalProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_EXECUTOR_JOB_QUEUE_SIZE_NAME) != null) {
jobQueSize = Integer.parseInt(
globalProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_EXECUTOR_JOB_QUEUE_SIZE_NAME));
} else {
jobQueSize = ExtendedHTTPEventAdapterConstants.ADAPTER_EXECUTOR_JOB_QUEUE_SIZE;
}
executorService = new ThreadPoolExecutor(minThread, maxThread, defaultKeepAliveTime, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<Runnable>(jobQueSize));
//configurations for the httpConnectionManager which will be shared by every http adapter
int defaultMaxConnectionsPerHost;
int maxTotalConnections;
if (globalProperties.get(ExtendedHTTPEventAdapterConstants.DEFAULT_MAX_CONNECTIONS_PER_HOST) != null) {
defaultMaxConnectionsPerHost = Integer
.parseInt(globalProperties
.get(ExtendedHTTPEventAdapterConstants.DEFAULT_MAX_CONNECTIONS_PER_HOST));
} else {
defaultMaxConnectionsPerHost =
ExtendedHTTPEventAdapterConstants.DEFAULT_DEFAULT_MAX_CONNECTIONS_PER_HOST;
}
if (globalProperties.get(ExtendedHTTPEventAdapterConstants.MAX_TOTAL_CONNECTIONS) != null) {
maxTotalConnections = Integer
.parseInt(globalProperties.get(ExtendedHTTPEventAdapterConstants.MAX_TOTAL_CONNECTIONS));
} else {
maxTotalConnections = ExtendedHTTPEventAdapterConstants.DEFAULT_MAX_TOTAL_CONNECTIONS;
}
connectionManager = new MultiThreadedHttpConnectionManager();
connectionManager.getParams().setDefaultMaxConnectionsPerHost(defaultMaxConnectionsPerHost);
connectionManager.getParams().setMaxTotalConnections(maxTotalConnections);
Map<String, String> staticProperties = eventAdapterConfiguration.getStaticProperties();
if (staticProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_OAUTH_CONSUMER_KEY) != null) {
accessTokenGenerator = new AccessTokenGenerator(
staticProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_OAUTH_URL),
staticProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_OAUTH_CONSUMER_KEY),
staticProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_OAUTH_CONSUMER_SECRET));
this.oauthURL = staticProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_OAUTH_URL);
}
}
}
@Override
public void testConnect() throws TestConnectionNotSupportedException {
throw new TestConnectionNotSupportedException("Test connection is not available");
}
@Override
public void connect() {
this.checkHTTPClientInit(eventAdapterConfiguration.getStaticProperties());
}
@Override
public void publish(Object message, Map<String, String> dynamicProperties) {
//Load dynamic properties
String url = dynamicProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_MESSAGE_URL);
String username = dynamicProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_USERNAME);
String password = dynamicProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_PASSWORD);
Map<String, String> headers = this
.extractHeaders(dynamicProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_HEADERS));
String payload = message.toString();
try {
if (accessTokenGenerator != null) {
if (this.oauthURL == null) {
try {
URL endpointURL = new URL(url);
this.oauthURL = endpointURL.getProtocol() + "://" + endpointURL.getHost() + ":"
+ endpointURL.getPort();
accessTokenGenerator.setOauthUrl(oauthURL);
} catch (MalformedURLException e) {
EventAdapterUtil.logAndDrop(eventAdapterConfiguration.getName(), message,
"Incorrect end point configurations", log, tenantId);
}
}
String accessToken = accessTokenGenerator.getAccessToken();
executorService.execute(new HTTPSender(url, payload, accessToken, headers, httpClient));
} else if (username != null && password != null) {
executorService.execute(new HTTPSender(url, payload, username, password, headers, httpClient));
} else {
EventAdapterUtil.logAndDrop(eventAdapterConfiguration.getName(), message,
"Incorrect publishing configurations", log, tenantId);
}
} catch (RejectedExecutionException e) {
EventAdapterUtil.logAndDrop(eventAdapterConfiguration.getName(), message, "Job queue is full", e,
log, tenantId);
}
}
@Override
public void disconnect() {
//not required
}
@Override
public void destroy() {
//not required
}
@Override
public boolean isPolled() {
return false;
}
private void checkHTTPClientInit(Map<String, String> staticProperties) {
if (this.httpClient != null) {
return;
}
synchronized (ExtendedHTTPEventAdapter.class) {
if (this.httpClient != null) {
return;
}
httpClient = new HttpClient(connectionManager);
String proxyHost = staticProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_PROXY_HOST);
String proxyPort = staticProperties.get(ExtendedHTTPEventAdapterConstants.ADAPTER_PROXY_PORT);
if (proxyHost != null && proxyHost.trim().length() > 0) {
try {
HttpHost host = new HttpHost(proxyHost, Integer.parseInt(proxyPort));
this.httpClient.getParams().setParameter(ConnRoutePNames.DEFAULT_PROXY, host);
} catch (NumberFormatException e) {
log.error("Invalid proxy port: " + proxyPort + ", "
+ "ignoring proxy settings for HTTP output event adaptor.", e);
}
}
String messageFormat = eventAdapterConfiguration.getMessageFormat();
if (ExtendedHTTPEventAdapterConstants.JSON_STRING.equalsIgnoreCase(messageFormat)) {
contentType = ExtendedHTTPEventAdapterConstants.APPLICATION_JSON_CONTENT_TYPE;
} else if (ExtendedHTTPEventAdapterConstants.TEXT_STRING.equalsIgnoreCase(messageFormat)) {
contentType = ExtendedHTTPEventAdapterConstants.TEXT_PLAIN_CONTENT_TYPE;
} else {
contentType = ExtendedHTTPEventAdapterConstants.TEXT_XML_CONTENT_TYPE;
}
}
}
private Map<String, String> extractHeaders(String headers) {
if (headers == null || headers.isEmpty()) {
return null;
}
String[] entries = headers.split(ExtendedHTTPEventAdapterConstants.HEADER_SEPARATOR);
String[] keyValue;
Map<String, String> result = new HashMap<String, String>();
for (String header : entries) {
keyValue = header.split(ExtendedHTTPEventAdapterConstants.ENTRY_SEPARATOR, 2);
if (keyValue.length == 2) {
result.put(keyValue[0].trim(), keyValue[1].trim());
} else {
log.warn("Header property '" + header + "' is not defined in the correct format.");
}
}
return result;
}
/**
* This class represents a job to send an HTTP request to a target URL.
*/
class HTTPSender implements Runnable {
private String url;
private String payload;
private String accessToken;
private String userName;
private String password;
private Map<String, String> headers;
private HttpClient httpClient;
public HTTPSender(String url, String payload, String accessToken, Map<String, String> headers,
HttpClient httpClient) {
this.url = url;
this.payload = payload;
this.accessToken = accessToken;
this.headers = headers;
this.httpClient = httpClient;
}
/**
* If user name and password is given, basic auth is used. If not OAuth2 is used.
*/
public HTTPSender(String url, String payload, String userName, String password, Map<String, String> headers,
HttpClient httpClient) {
this.url = url;
this.payload = payload;
this.userName = userName;
this.password = password;
this.headers = headers;
this.httpClient = httpClient;
}
public String getUrl() {
return url;
}
public String getPayload() {
return payload;
}
public String getAccessToken() {
return accessToken;
}
public Map<String, String> getHeaders() {
return headers;
}
public HttpClient getHttpClient() {
return httpClient;
}
public String getUserName() {
return userName;
}
public String getPassword() {
return password;
}
public void run() {
EntityEnclosingMethod method = null;
try {
if (clientMethod.equalsIgnoreCase(ExtendedHTTPEventAdapterConstants.CONSTANT_HTTP_PUT)) {
method = new PutMethod(this.getUrl());
} else {
method = new PostMethod(this.getUrl());
}
if (hostConfiguration == null) {
URL hostUrl = new URL(this.getUrl());
hostConfiguration = new HostConfiguration();
hostConfiguration.setHost(hostUrl.getHost(), hostUrl.getPort(), hostUrl.getProtocol());
}
method.setRequestEntity(new StringRequestEntity(this.getPayload(), contentType,
ExtendedHTTPEventAdapterConstants.UTF_EIGHT_CONTENT_TYPE));
if (this.getAccessToken() != null && !this.getAccessToken().isEmpty()) {
method.setRequestHeader(ExtendedHTTPEventAdapterConstants.AUTHORIZATION_HEADER_DEFAULT,
ExtendedHTTPEventAdapterConstants.AUTHORIZATION_BEARER + this.getAccessToken());
} else if (this.getUserName() != null && this.getPassword() != null) {
method.setRequestHeader(ExtendedHTTPEventAdapterConstants.AUTHORIZATION_HEADER_DEFAULT,
ExtendedHTTPEventAdapterConstants.AUTHORIZATION_BASIC + Base64
.encode((this.getUserName() + ExtendedHTTPEventAdapterConstants.ENTRY_SEPARATOR + this
.getPassword()).getBytes()));
}
if (this.getHeaders() != null) {
for (Map.Entry<String, String> header : this.getHeaders().entrySet()) {
method.setRequestHeader(header.getKey(), header.getValue());
}
}
int statusCode = this.getHttpClient().executeMethod(hostConfiguration, method);
if (statusCode == HttpStatus.SC_UNAUTHORIZED && accessTokenGenerator != null){
accessTokenGenerator.removeInvalidToken(new String[]{APIConstants.OAUTH2_DEFAULT_SCOPE});
}
} catch (IOException e) {
EventAdapterUtil.logAndDrop(eventAdapterConfiguration.getName(), this.getPayload(),
"Cannot connect to " + this.getUrl(), e, log, tenantId);
} finally {
if (method != null) {
method.releaseConnection();
}
}
}
}
} |
Java | class HTTPSender implements Runnable {
private String url;
private String payload;
private String accessToken;
private String userName;
private String password;
private Map<String, String> headers;
private HttpClient httpClient;
public HTTPSender(String url, String payload, String accessToken, Map<String, String> headers,
HttpClient httpClient) {
this.url = url;
this.payload = payload;
this.accessToken = accessToken;
this.headers = headers;
this.httpClient = httpClient;
}
/**
* If user name and password is given, basic auth is used. If not OAuth2 is used.
*/
public HTTPSender(String url, String payload, String userName, String password, Map<String, String> headers,
HttpClient httpClient) {
this.url = url;
this.payload = payload;
this.userName = userName;
this.password = password;
this.headers = headers;
this.httpClient = httpClient;
}
public String getUrl() {
return url;
}
public String getPayload() {
return payload;
}
public String getAccessToken() {
return accessToken;
}
public Map<String, String> getHeaders() {
return headers;
}
public HttpClient getHttpClient() {
return httpClient;
}
public String getUserName() {
return userName;
}
public String getPassword() {
return password;
}
public void run() {
EntityEnclosingMethod method = null;
try {
if (clientMethod.equalsIgnoreCase(ExtendedHTTPEventAdapterConstants.CONSTANT_HTTP_PUT)) {
method = new PutMethod(this.getUrl());
} else {
method = new PostMethod(this.getUrl());
}
if (hostConfiguration == null) {
URL hostUrl = new URL(this.getUrl());
hostConfiguration = new HostConfiguration();
hostConfiguration.setHost(hostUrl.getHost(), hostUrl.getPort(), hostUrl.getProtocol());
}
method.setRequestEntity(new StringRequestEntity(this.getPayload(), contentType,
ExtendedHTTPEventAdapterConstants.UTF_EIGHT_CONTENT_TYPE));
if (this.getAccessToken() != null && !this.getAccessToken().isEmpty()) {
method.setRequestHeader(ExtendedHTTPEventAdapterConstants.AUTHORIZATION_HEADER_DEFAULT,
ExtendedHTTPEventAdapterConstants.AUTHORIZATION_BEARER + this.getAccessToken());
} else if (this.getUserName() != null && this.getPassword() != null) {
method.setRequestHeader(ExtendedHTTPEventAdapterConstants.AUTHORIZATION_HEADER_DEFAULT,
ExtendedHTTPEventAdapterConstants.AUTHORIZATION_BASIC + Base64
.encode((this.getUserName() + ExtendedHTTPEventAdapterConstants.ENTRY_SEPARATOR + this
.getPassword()).getBytes()));
}
if (this.getHeaders() != null) {
for (Map.Entry<String, String> header : this.getHeaders().entrySet()) {
method.setRequestHeader(header.getKey(), header.getValue());
}
}
int statusCode = this.getHttpClient().executeMethod(hostConfiguration, method);
if (statusCode == HttpStatus.SC_UNAUTHORIZED && accessTokenGenerator != null){
accessTokenGenerator.removeInvalidToken(new String[]{APIConstants.OAUTH2_DEFAULT_SCOPE});
}
} catch (IOException e) {
EventAdapterUtil.logAndDrop(eventAdapterConfiguration.getName(), this.getPayload(),
"Cannot connect to " + this.getUrl(), e, log, tenantId);
} finally {
if (method != null) {
method.releaseConnection();
}
}
}
} |
Java | public class ObjectPair {
/** _more_ */
private Object o1;
/** _more_ */
private Object o2;
/**
* _more_
*
*/
public ObjectPair() {}
/**
* _more_
*
* @param o1
* @param o2
*
*/
public ObjectPair(Object o1, Object o2) {
this.o1 = o1;
this.o2 = o2;
}
/**
* _more_
* @return _more_
*/
public Object getObject1() {
return o1;
}
/**
* _more_
*
* @param o1
*/
public void setObject1(Object o1) {
this.o1 = o1;
}
/**
* _more_
* @return _more_
*/
public Object getObject2() {
return o2;
}
/**
* _more_
*
* @param o2
*/
public void setObject2(Object o2) {
this.o2 = o2;
}
/**
* _more_
* @return _more_
*/
public int hashCode() {
if ((o1 != null) && (o2 != null)) {
return o1.hashCode() ^ o2.hashCode();
}
if (o1 != null) {
return o1.hashCode();
}
if (o2 != null) {
return o2.hashCode();
}
return super.hashCode();
}
/**
* _more_
*
* @param o
* @return _more_
*/
public boolean equals(Object o) {
if ( !(o instanceof ObjectPair)) {
return false;
}
ObjectPair other = (ObjectPair) o;
return (Misc.equals(o1, other.o1) && Misc.equals(o2, other.o2));
}
/**
* _more_
* @return _more_
*/
public String toString() {
return o1.toString() + " " + o2.toString();
}
} |
Java | @Generated("by gapic-generator-java")
public class CloudShellServiceStubSettings extends StubSettings<CloudShellServiceStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build();
private final UnaryCallSettings<GetEnvironmentRequest, Environment> getEnvironmentSettings;
private final UnaryCallSettings<StartEnvironmentRequest, Operation> startEnvironmentSettings;
private final OperationCallSettings<
StartEnvironmentRequest, StartEnvironmentResponse, StartEnvironmentMetadata>
startEnvironmentOperationSettings;
private final UnaryCallSettings<AuthorizeEnvironmentRequest, Operation>
authorizeEnvironmentSettings;
private final OperationCallSettings<
AuthorizeEnvironmentRequest, AuthorizeEnvironmentResponse, AuthorizeEnvironmentMetadata>
authorizeEnvironmentOperationSettings;
private final UnaryCallSettings<AddPublicKeyRequest, Operation> addPublicKeySettings;
private final OperationCallSettings<
AddPublicKeyRequest, AddPublicKeyResponse, AddPublicKeyMetadata>
addPublicKeyOperationSettings;
private final UnaryCallSettings<RemovePublicKeyRequest, Operation> removePublicKeySettings;
private final OperationCallSettings<
RemovePublicKeyRequest, RemovePublicKeyResponse, RemovePublicKeyMetadata>
removePublicKeyOperationSettings;
/** Returns the object with the settings used for calls to getEnvironment. */
public UnaryCallSettings<GetEnvironmentRequest, Environment> getEnvironmentSettings() {
return getEnvironmentSettings;
}
/** Returns the object with the settings used for calls to startEnvironment. */
public UnaryCallSettings<StartEnvironmentRequest, Operation> startEnvironmentSettings() {
return startEnvironmentSettings;
}
/** Returns the object with the settings used for calls to startEnvironment. */
public OperationCallSettings<
StartEnvironmentRequest, StartEnvironmentResponse, StartEnvironmentMetadata>
startEnvironmentOperationSettings() {
return startEnvironmentOperationSettings;
}
/** Returns the object with the settings used for calls to authorizeEnvironment. */
public UnaryCallSettings<AuthorizeEnvironmentRequest, Operation> authorizeEnvironmentSettings() {
return authorizeEnvironmentSettings;
}
/** Returns the object with the settings used for calls to authorizeEnvironment. */
public OperationCallSettings<
AuthorizeEnvironmentRequest, AuthorizeEnvironmentResponse, AuthorizeEnvironmentMetadata>
authorizeEnvironmentOperationSettings() {
return authorizeEnvironmentOperationSettings;
}
/** Returns the object with the settings used for calls to addPublicKey. */
public UnaryCallSettings<AddPublicKeyRequest, Operation> addPublicKeySettings() {
return addPublicKeySettings;
}
/** Returns the object with the settings used for calls to addPublicKey. */
public OperationCallSettings<AddPublicKeyRequest, AddPublicKeyResponse, AddPublicKeyMetadata>
addPublicKeyOperationSettings() {
return addPublicKeyOperationSettings;
}
/** Returns the object with the settings used for calls to removePublicKey. */
public UnaryCallSettings<RemovePublicKeyRequest, Operation> removePublicKeySettings() {
return removePublicKeySettings;
}
/** Returns the object with the settings used for calls to removePublicKey. */
public OperationCallSettings<
RemovePublicKeyRequest, RemovePublicKeyResponse, RemovePublicKeyMetadata>
removePublicKeyOperationSettings() {
return removePublicKeyOperationSettings;
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public CloudShellServiceStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcCloudShellServiceStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return "cloudshell.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "cloudshell.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
@BetaApi("The surface for customizing headers is not stable yet and may change in the future.")
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(CloudShellServiceStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected CloudShellServiceStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
getEnvironmentSettings = settingsBuilder.getEnvironmentSettings().build();
startEnvironmentSettings = settingsBuilder.startEnvironmentSettings().build();
startEnvironmentOperationSettings = settingsBuilder.startEnvironmentOperationSettings().build();
authorizeEnvironmentSettings = settingsBuilder.authorizeEnvironmentSettings().build();
authorizeEnvironmentOperationSettings =
settingsBuilder.authorizeEnvironmentOperationSettings().build();
addPublicKeySettings = settingsBuilder.addPublicKeySettings().build();
addPublicKeyOperationSettings = settingsBuilder.addPublicKeyOperationSettings().build();
removePublicKeySettings = settingsBuilder.removePublicKeySettings().build();
removePublicKeyOperationSettings = settingsBuilder.removePublicKeyOperationSettings().build();
}
/** Builder for CloudShellServiceStubSettings. */
public static class Builder extends StubSettings.Builder<CloudShellServiceStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<GetEnvironmentRequest, Environment>
getEnvironmentSettings;
private final UnaryCallSettings.Builder<StartEnvironmentRequest, Operation>
startEnvironmentSettings;
private final OperationCallSettings.Builder<
StartEnvironmentRequest, StartEnvironmentResponse, StartEnvironmentMetadata>
startEnvironmentOperationSettings;
private final UnaryCallSettings.Builder<AuthorizeEnvironmentRequest, Operation>
authorizeEnvironmentSettings;
private final OperationCallSettings.Builder<
AuthorizeEnvironmentRequest, AuthorizeEnvironmentResponse, AuthorizeEnvironmentMetadata>
authorizeEnvironmentOperationSettings;
private final UnaryCallSettings.Builder<AddPublicKeyRequest, Operation> addPublicKeySettings;
private final OperationCallSettings.Builder<
AddPublicKeyRequest, AddPublicKeyResponse, AddPublicKeyMetadata>
addPublicKeyOperationSettings;
private final UnaryCallSettings.Builder<RemovePublicKeyRequest, Operation>
removePublicKeySettings;
private final OperationCallSettings.Builder<
RemovePublicKeyRequest, RemovePublicKeyResponse, RemovePublicKeyMetadata>
removePublicKeyOperationSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(
Lists.<StatusCode.Code>newArrayList(
StatusCode.Code.UNAVAILABLE, StatusCode.Code.UNKNOWN)));
definitions.put(
"no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(1000L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelay(Duration.ofMillis(60000L))
.setInitialRpcTimeout(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ofMillis(60000L))
.setTotalTimeout(Duration.ofMillis(60000L))
.build();
definitions.put("retry_policy_0_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeout(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ofMillis(60000L))
.setTotalTimeout(Duration.ofMillis(60000L))
.build();
definitions.put("no_retry_1_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
getEnvironmentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
startEnvironmentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
startEnvironmentOperationSettings = OperationCallSettings.newBuilder();
authorizeEnvironmentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
authorizeEnvironmentOperationSettings = OperationCallSettings.newBuilder();
addPublicKeySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
addPublicKeyOperationSettings = OperationCallSettings.newBuilder();
removePublicKeySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
removePublicKeyOperationSettings = OperationCallSettings.newBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
getEnvironmentSettings,
startEnvironmentSettings,
authorizeEnvironmentSettings,
addPublicKeySettings,
removePublicKeySettings);
initDefaults(this);
}
protected Builder(CloudShellServiceStubSettings settings) {
super(settings);
getEnvironmentSettings = settings.getEnvironmentSettings.toBuilder();
startEnvironmentSettings = settings.startEnvironmentSettings.toBuilder();
startEnvironmentOperationSettings = settings.startEnvironmentOperationSettings.toBuilder();
authorizeEnvironmentSettings = settings.authorizeEnvironmentSettings.toBuilder();
authorizeEnvironmentOperationSettings =
settings.authorizeEnvironmentOperationSettings.toBuilder();
addPublicKeySettings = settings.addPublicKeySettings.toBuilder();
addPublicKeyOperationSettings = settings.addPublicKeyOperationSettings.toBuilder();
removePublicKeySettings = settings.removePublicKeySettings.toBuilder();
removePublicKeyOperationSettings = settings.removePublicKeyOperationSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
getEnvironmentSettings,
startEnvironmentSettings,
authorizeEnvironmentSettings,
addPublicKeySettings,
removePublicKeySettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setEndpoint(getDefaultEndpoint());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.getEnvironmentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.startEnvironmentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.authorizeEnvironmentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.addPublicKeySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.removePublicKeySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.startEnvironmentOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<StartEnvironmentRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(StartEnvironmentResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(StartEnvironmentMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(45000L))
.setInitialRpcTimeout(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ZERO)
.setTotalTimeout(Duration.ofMillis(300000L))
.build()));
builder
.authorizeEnvironmentOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<AuthorizeEnvironmentRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(
AuthorizeEnvironmentResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(
AuthorizeEnvironmentMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(45000L))
.setInitialRpcTimeout(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ZERO)
.setTotalTimeout(Duration.ofMillis(300000L))
.build()));
builder
.addPublicKeyOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<AddPublicKeyRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(AddPublicKeyResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(AddPublicKeyMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(45000L))
.setInitialRpcTimeout(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ZERO)
.setTotalTimeout(Duration.ofMillis(300000L))
.build()));
builder
.removePublicKeyOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<RemovePublicKeyRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(RemovePublicKeyResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(RemovePublicKeyMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(45000L))
.setInitialRpcTimeout(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ZERO)
.setTotalTimeout(Duration.ofMillis(300000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to getEnvironment. */
public UnaryCallSettings.Builder<GetEnvironmentRequest, Environment> getEnvironmentSettings() {
return getEnvironmentSettings;
}
/** Returns the builder for the settings used for calls to startEnvironment. */
public UnaryCallSettings.Builder<StartEnvironmentRequest, Operation>
startEnvironmentSettings() {
return startEnvironmentSettings;
}
/** Returns the builder for the settings used for calls to startEnvironment. */
@BetaApi(
"The surface for use by generated code is not stable yet and may change in the future.")
public OperationCallSettings.Builder<
StartEnvironmentRequest, StartEnvironmentResponse, StartEnvironmentMetadata>
startEnvironmentOperationSettings() {
return startEnvironmentOperationSettings;
}
/** Returns the builder for the settings used for calls to authorizeEnvironment. */
public UnaryCallSettings.Builder<AuthorizeEnvironmentRequest, Operation>
authorizeEnvironmentSettings() {
return authorizeEnvironmentSettings;
}
/** Returns the builder for the settings used for calls to authorizeEnvironment. */
@BetaApi(
"The surface for use by generated code is not stable yet and may change in the future.")
public OperationCallSettings.Builder<
AuthorizeEnvironmentRequest, AuthorizeEnvironmentResponse, AuthorizeEnvironmentMetadata>
authorizeEnvironmentOperationSettings() {
return authorizeEnvironmentOperationSettings;
}
/** Returns the builder for the settings used for calls to addPublicKey. */
public UnaryCallSettings.Builder<AddPublicKeyRequest, Operation> addPublicKeySettings() {
return addPublicKeySettings;
}
/** Returns the builder for the settings used for calls to addPublicKey. */
@BetaApi(
"The surface for use by generated code is not stable yet and may change in the future.")
public OperationCallSettings.Builder<
AddPublicKeyRequest, AddPublicKeyResponse, AddPublicKeyMetadata>
addPublicKeyOperationSettings() {
return addPublicKeyOperationSettings;
}
/** Returns the builder for the settings used for calls to removePublicKey. */
public UnaryCallSettings.Builder<RemovePublicKeyRequest, Operation> removePublicKeySettings() {
return removePublicKeySettings;
}
/** Returns the builder for the settings used for calls to removePublicKey. */
@BetaApi(
"The surface for use by generated code is not stable yet and may change in the future.")
public OperationCallSettings.Builder<
RemovePublicKeyRequest, RemovePublicKeyResponse, RemovePublicKeyMetadata>
removePublicKeyOperationSettings() {
return removePublicKeyOperationSettings;
}
@Override
public CloudShellServiceStubSettings build() throws IOException {
return new CloudShellServiceStubSettings(this);
}
}
} |
Java | public abstract class Grids_GridNumber extends Grids_Grid {
private static final long serialVersionUID = 1L;
/**
* The noDataValue for the grid.
*/
public BigDecimal ndv;
/**
* Create a new instance.
*
* @param ge Grids_Environment
* @param fs Cache
* @param id ID
* @param ndv NoDataValue
* @throws Exception If encountered.
*/
protected Grids_GridNumber(Grids_Environment ge, IO_Cache fs,
long id, BigDecimal ndv) throws Exception {
super(ge, fs, id);
this.ndv = ndv;
}
/**
* @return The value at x-coordinate {@code x} and y-coordinate {@code y} as
* a BigDecimal.
* @param x The x-coordinate of the point at which the cell value is
* returned.
* @param y The y-coordinate of the point at which the cell value is
* returned.
* @throws java.io.IOException If encountered.
* @throws java.lang.ClassNotFoundException If encountered.
*/
public BigDecimal getCellBigDecimal(Math_BigRational x, Math_BigRational y)
throws IOException, Exception, ClassNotFoundException {
return getCellBigDecimal(getChunkRow(y), getChunkCol(x),
getChunkCellRow(y), getChunkCellCol(x));
}
/**
* @param row The chunk cell row index.
* @param col The chunk cell column index.
* @return Value at cell row index {@code row}, cell col index {@code col}
* as a BigDecimal.
* @throws java.io.IOException If encountered.
* @throws java.lang.ClassNotFoundException If encountered.
*/
public BigDecimal getCellBigDecimal(long row, long col) throws IOException,
Exception, ClassNotFoundException {
return getCellBigDecimal(getChunkRow(row), getChunkCol(col),
getChunkCellRow(row), getChunkCellCol(col));
}
/**
* For getting the value of chunk cell row {@code ccr} and chunk cell column
* {@code ccc} in chunk in chunk row {@code cr}, chunk column {@code cc} as
* a BigDecimal.
*
* @param cr The chunk row.
* @param cc The chunk col.
* @param ccr The cell row index of the chunk.
* @param ccc The cell column index of the chunk.
* @return The value of chunk cell row {@code ccr}, chunk cell column
* {@code ccc} in chunk in chunk row {@code cr}, chunk column {@code cc} as
* a BigDecimal.
* @throws java.lang.Exception If encountered.
* @throws java.io.IOException If encountered.
* @throws java.lang.ClassNotFoundException If encountered.
*/
public BigDecimal getCellBigDecimal(int cr, int cc, int ccr, int ccc)
throws IOException, Exception, ClassNotFoundException {
if (!isInGrid(cr, cc, ccr, ccc)) {
return ndv;
}
Grids_Chunk gc = getChunk(cr, cc);
if (gc == null) {
return ndv;
}
return getCellBigDecimal(gc, cr, cc, ccr, ccc);
}
/**
* For getting the value of chunk cell row {@code ccr} and chunk cell column
* {@code ccc} in chunk in chunk row {@code cr}, chunk column {@code cc} as
* a BigDecimal.
*
* @param chunk The Grids_Chunk containing the cell.
* @param cr The chunk row.
* @param cc The chunk col.
* @param ccr The cell row index of the chunk.
* @param ccc The cell column index of the chunk.
* @return The value of chunk cell row {@code ccr}, chunk cell column
* {@code ccc} in chunk in chunk row {@code cr}, chunk column {@code cc} as
* a BigDecimal.
*/
public abstract BigDecimal getCellBigDecimal(Grids_Chunk chunk, int cr,
int cc, int ccr, int ccc);
/**
* For setting the value of chunk cell row {@code ccr} and chunk cell column
* {@code ccc} in chunk in chunk row {@code cr}, chunk column {@code cc} to
* {@code v}.
*
* @param cr The chunk row.
* @param cc The chunk col.
* @param ccr The cell row index of the chunk.
* @param ccc The cell column index of the chunk.
* @param v The value to set.
* @return The value of chunk cell row {@code ccr}, chunk cell column
* {@code ccc} in chunk in chunk row {@code cr}, chunk column {@code cc} as
* a BigDecimal.
* @throws java.lang.Exception If encountered.
* @throws java.io.IOException If encountered.
* @throws java.lang.ClassNotFoundException If encountered.
*/
public abstract Number setCell(int cr, int cc, int ccr, int ccc,
BigDecimal v) throws IOException, ClassNotFoundException,
Exception;
/**
* For setting the value at cell row index {@code r}, cell column index
* {@code c} to v.
*
* @param r The cell row.
* @param c The cell column.
* @param v The value to add.
* @return The value of at cell row index {@code r}, cell column index
* {@code c} before it is set to {@code v}.
* @throws java.lang.Exception If encountered.
* @throws java.io.IOException If encountered.
* @throws java.lang.ClassNotFoundException If encountered.
*/
public Number setCell(long r, long c, BigDecimal v)
throws IOException, Exception, ClassNotFoundException {
int cr = getChunkRow(r);
int cc = getChunkCol(c);
int ccr = getChunkCellRow(r);
int ccc = getChunkCellCol(c);
if (isInGrid(cr, cc, ccr, ccc)) {
return setCell(cr, cc, ccr, ccc, v);
}
return ndv;
}
/**
* For setting the value at cell with cell ID {@code cellID}.
*
* @param cellID The cell ID.
* @param v The value to add.
* @return The value of at cell row index {@code r}, cell column index
* {@code c} before it is set to {@code v}.
* @throws java.lang.Exception If encountered.
* @throws java.io.IOException If encountered.
* @throws java.lang.ClassNotFoundException If encountered.
*/
public Number setCell(Grids_2D_ID_long cellID, BigDecimal v)
throws IOException, Exception, ClassNotFoundException {
return setCell(cellID.getRow(), cellID.getCol(), v);
}
/**
* For setting the value at x-coordinate {@code x}, y-coordinate {@code y}
* to {@code v}.
*
* @param x The x-coordinate of the point at which the cell value is
* returned.
* @param y The y-coordinate of the point at which the cell value is
* returned.
* @param v The value to set.
* @return The value of at cell row index {@code r}, cell column index
* {@code c} before it is set to {@code v}.
* @throws java.lang.Exception If encountered.
* @throws java.io.IOException If encountered.
* @throws java.lang.ClassNotFoundException If encountered.
*/
public Number setCell(Math_BigRational x, Math_BigRational y, BigDecimal v)
throws IOException, Exception, ClassNotFoundException {
if (isInGrid(x, y)) {
return setCell(getChunkRow(y), getChunkCol(x),
getChunkCellRow(y), getChunkCellCol(x), v);
}
return ndv;
}
/**
* For adding {@code v} to the value of chunk cell row {@code ccr}, chunk
* cell column {@code ccc} in chunk in chunk row {@code cr}, chunk column
* {@code cc} to {@code v}.
*
* @param cr The chunk row.
* @param cc The chunk col.
* @param ccr The cell row index of the chunk.
* @param ccc The cell column index of the chunk.
* @param v The value to add.
* @throws java.lang.Exception If encountered.
* @throws java.io.IOException If encountered.
* @throws java.lang.ClassNotFoundException If encountered.
*/
public void addToCell(int cr, int cc, int ccr, int ccc, BigDecimal v)
throws IOException, ClassNotFoundException, Exception {
if (v.compareTo(ndv) != 0) {
if (isInGrid(cr, cc, ccr, ccc)) {
BigDecimal v2 = getCellBigDecimal(cr, cc, ccr, ccc);
if (v2.compareTo(ndv) == 0) {
setCell(cr, cc, ccr, ccc, v);
} else {
setCell(cr, cc, ccr, ccc, v.add(v2));
}
}
}
}
/**
* For adding {@code v} to the cell value at cell row index {@code r}, cell
* column index {@code c}.
*
* @param r The cell row.
* @param c The cell column.
* @param v The value to add.
* @throws java.lang.Exception If encountered.
* @throws java.io.IOException If encountered.
* @throws java.lang.ClassNotFoundException If encountered.
*/
public void addToCell(long r, long c, BigDecimal v)
throws IOException, Exception, ClassNotFoundException {
addToCell(getChunkRow(r), getChunkCol(c), getChunkCellRow(r),
getChunkCellCol(c), v);
}
/**
* For adding {@code v} to the cell with cell ID {@code cellID}.
*
* @param cellID The cell ID.
* @param v The value to add.
* @throws java.lang.Exception If encountered.
* @throws java.io.IOException If encountered.
* @throws java.lang.ClassNotFoundException If encountered.
*/
public void addToCell(Grids_2D_ID_long cellID, BigDecimal v)
throws IOException, Exception, ClassNotFoundException {
addToCell(cellID.getRow(), cellID.getCol(), v);
}
/**
* For setting the value at x-coordinate {@code x}, y-coordinate {@code y}
* to {@code v}.
*
* @param x The x-coordinate of the point at which the cell value is
* returned.
* @param y The y-coordinate of the point at which the cell value is
* returned.
* @param v The value to add.
* @throws java.lang.Exception If encountered.
* @throws java.io.IOException If encountered.
* @throws java.lang.ClassNotFoundException If encountered.
*/
public void addToCell(Math_BigRational x, Math_BigRational y, BigDecimal v)
throws IOException, Exception, ClassNotFoundException {
addToCell(getChunkRow(y), getChunkCol(x), getChunkCellRow(y),
getChunkCellCol(x), v);
}
/**
* @return The CellIDs of the nearest cells with data values nearest to
* point with position given by: x-coordinate x, y-coordinate y; and, cell
* row index row, cell column index col.
* @param x the x-coordinate of the point
* @param y the y-coordinate of the point
* @param row The row index from which the cell IDs of the nearest cells
* with data values are returned.
* @param col The column index from which the cell IDs of the nearest cells
* with data values are returned.
* @param oom The Order of Magnitude for calculating the distance.
* @throws java.io.IOException If encountered.
* @throws java.lang.ClassNotFoundException If encountered.
*/
protected abstract NearestValuesCellIDsAndDistance
getNearestValuesCellIDsAndDistance(Math_BigRational x,
Math_BigRational y, long row, long col, int oom)
throws IOException, Exception, ClassNotFoundException;
/**
* @return a Grids_2D_ID_long[] - The CellIDs of the nearest cells with data
* values to position given by row index rowIndex, column index colIndex.
* @param row The row from which the cell IDs of the nearest cells with data
* values are returned.
* @param col The column from which the cell IDs of the nearest cells with
* data values are returned.
* @param oom The Order of Magnitude for calculating the distance.
* @throws java.io.IOException If encountered.
* @throws java.lang.ClassNotFoundException If encountered.
*/
protected abstract NearestValuesCellIDsAndDistance
getNearestValuesCellIDsAndDistance(long row, long col, int oom)
throws IOException, Exception, ClassNotFoundException;
/**
* @return a Grids_2D_ID_long[] The CellIDs of the nearest cells with data
* values to point given by x-coordinate x, y-coordinate y.
* @param x The x-coordinate of the point.
* @param y The y-coordinate of the point.
* @param oom The Order of Magnitude for calculating the distance.
* @throws java.io.IOException If encountered.
* @throws java.lang.ClassNotFoundException If encountered.
*/
protected NearestValuesCellIDsAndDistance getNearestValuesCellIDsAndDistance(
Math_BigRational x, Math_BigRational y, int oom) throws IOException,
Exception, ClassNotFoundException {
return getNearestValuesCellIDsAndDistance(x, y, getRow(y), getCol(x), oom);
}
/**
* Used to help log a view of the grid.
*
* @param ncols The number of columns in the grid.
* @param c The number of columns to write out.
* @param row The row of the grid to write out.
* @throws Exception If encountered.
*/
@Override
protected void logRow(long ncols, long c, long row) throws Exception {
String s = " " + getStringValue(Math_BigRational.valueOf(row)) + " | ";
if (ncols < c) {
long col;
for (col = 0; col < ncols - 1; col++) {
s += getStringValue(getCellBigDecimal(row, col), ndv) + " | ";
}
s += getStringValue(getCellBigDecimal(row, col), ndv) + " | ";
env.env.log(s);
} else {
for (long col = 0; col < c - 1; col++) {
s += getStringValue(getCellBigDecimal(row, col), ndv) + " | ";
}
s += " |";
s += " " + getStringValue(getCellBigDecimal(row, ncols - 1), ndv) + " |";
env.env.log(s);
}
}
/**
* Used to help log a view of the grid.
*
* @param v The value to represent as a String.
* @param ndv The no data value.
* @return a String representation of {@code v}.
*/
public String getStringValue(BigDecimal v, BigDecimal ndv) {
if (v.compareTo(ndv) == 0) {
return " * ";
}
return Math_BigDecimal.getStringValue(v);
}
} |
Java | public abstract class PartnerLoadVisitor extends DAOLoadVisitor {
public PartnerLoadVisitor(Controller controller, ILoaderProgress monitor, DataWriter successWriter,
DataWriter errorWriter) {
super(controller, monitor, successWriter, errorWriter);
}
@Override
protected void loadBatch() throws DataAccessObjectException, LoadException {
Object[] results = null;
try {
results = executeClientAction(getController().getPartnerClient(), dynaArray);
} catch (ApiFault e) {
handleException(e);
} catch (ConnectionException e) {
handleException(e);
}
// set the current processed
int currentProcessed;
try {
currentProcessed = getConfig().getInt(LastRun.LAST_LOAD_BATCH_ROW);
} catch (ParameterLoadException e) {
// if there's a problem getting last batch row, start at the beginning
currentProcessed = 0;
}
currentProcessed += results.length;
getConfig().setValue(LastRun.LAST_LOAD_BATCH_ROW, currentProcessed);
try {
getConfig().saveLastRun();
} catch (IOException e) {
String errMsg = Messages.getString("LoadAction.errorLastRun");
getLogger().error(errMsg, e);
handleException(errMsg, e);
}
writeOutputToWriter(results, dataArray);
// update Monitor
getProgressMonitor().worked(results.length);
getProgressMonitor().setSubTask(getRateCalculator().calculateSubTask(getNumberOfRows(), getNumberErrors()));
// now clear the arrays
clearArrays();
}
private void writeOutputToWriter(Object[] results, List<Row> dataArray)
throws DataAccessObjectException, LoadException {
if (results.length != dataArray.size()) {
getLogger().fatal(Messages.getString("Visitor.errorResultsLength")); //$NON-NLS-1$
throw new LoadException(Messages.getString("Visitor.errorResultsLength"));
}
// have to do this because although saveResult and deleteResult
// are a) not the same class yet b) not subclassed
for (int i = 0; i < results.length; i++) {
Row dataRow = dataArray.get(i);
String statusMsg = null;
if (results instanceof SaveResult[]) {
SaveResult saveRes = (SaveResult)results[i];
if (saveRes.getSuccess()) {
if (OperationInfo.insert == getConfig().getOperationInfo()) {
statusMsg = Messages.getString("DAOLoadVisitor.statusItemCreated");
} else {
statusMsg = Messages.getString("DAOLoadVisitor.statusItemUpdated");
}
}
dataRow.put(Config.STATUS_COLUMN_NAME, statusMsg);
processResult(dataRow, saveRes.getSuccess(), saveRes.getId(), saveRes.getErrors());
} else if (results instanceof DeleteResult[]) {
DeleteResult deleteRes = (DeleteResult)results[i];
if (deleteRes.getSuccess()) {
statusMsg = Messages.getString("DAOLoadVisitor.statusItemDeleted");
}
dataRow.put(Config.STATUS_COLUMN_NAME, statusMsg);
processResult(dataRow, deleteRes.getSuccess(), deleteRes.getId(), deleteRes.getErrors());
} else if (results instanceof UpsertResult[]) {
UpsertResult upsertRes = (UpsertResult)results[i];
if (upsertRes.getSuccess()) {
statusMsg = upsertRes.getCreated() ? Messages.getString("DAOLoadVisitor.statusItemCreated")
: Messages.getString("DAOLoadVisitor.statusItemUpdated");
}
dataRow.put(Config.STATUS_COLUMN_NAME, statusMsg);
processResult(dataRow, upsertRes.getSuccess(), upsertRes.getId(), upsertRes.getErrors());
}
}
}
private void processResult(Row dataRow, boolean isSuccess, String id, Error[] errors)
throws DataAccessObjectException {
// process success vs. error
// extract error message from error result
if (isSuccess) {
writeSuccess(dataRow, id, null);
} else {
writeError(dataRow,
errors == null ? Messages.getString("Visitor.noErrorReceivedMsg") : errors[0].getMessage());
}
}
/**
* This method performs the actual client action. It must be implemented by all subclasses. It returns an object[]
* because of saveResult[] and deleteResult[], while do the exact same thing, are two different classes without
* common inheritance. And we're stuck with it for legacy reasons.
*
* @throws ConnectionException
*/
protected abstract Object[] executeClientAction(PartnerClient client, List<DynaBean> data)
throws ConnectionException;
} |
Java | @XmlRootElement(name = "cluster")
public class VcenterClusterParam {
private URI id;
private List<URI> addHosts;
private List<URI> removeHosts;
public VcenterClusterParam() {
}
public VcenterClusterParam(URI id) {
this.id = id;
}
/**
* Cluster to be exported to vCenter.
*/
@XmlElement(required = true)
public URI getId() {
return id;
}
public void setId(URI id) {
this.id = id;
}
/**
* List of host IDs to add
*/
@XmlElementWrapper(name = "add_hosts")
@XmlElement(name = "add_host")
public List<URI> getAddHosts() {
if (addHosts == null) {
addHosts = new ArrayList<URI>();
}
return addHosts;
}
public void setAddHosts(List<URI> addHosts) {
this.addHosts = addHosts;
}
/**
* List of host IDs to remove
*/
@XmlElementWrapper(name = "remove_hosts")
@XmlElement(name = "remove_host")
public List<URI> getRemoveHosts() {
if (removeHosts == null) {
removeHosts = new ArrayList<URI>();
}
return removeHosts;
}
public void setRemoveHosts(List<URI> removeHosts) {
this.removeHosts = removeHosts;
}
} |
Java | public class FlagTaxonomyTermAction extends BaseActionSupport {
private static final Logger log = LoggerFactory.getLogger(FlagTaxonomyTermAction.class);
private TaxonomyService taxonomyService;
private Long articleID;
private Long categoryID;
/**
* Flag a particular taxonomy term applied to an article
*
* Check the user's cookies to make an attempt at stopping spamming one article/category with a lot of flags
*
* @return INPUT or SUCCESS
*
* @throws Exception
*/
@Override
public String execute() throws Exception {
String cookieValue = Cookies.getCookieValue(Cookies.COOKIE_ARTICLE_CATEGORY_FLAGS);
List<ArticleCategoryPair> valuePairs = new ArrayList<ArticleCategoryPair>();
if(articleID != null && categoryID != null) {
boolean flaggedAlready = false;
if(cookieValue != null) {
TaxonomyCookie taxonomyCookie = new TaxonomyCookie(cookieValue);
for(ArticleCategoryPair articleCategory : taxonomyCookie.getArticleCategories()) {
//Add existing values to the set to use for the new cookie
valuePairs.add(articleCategory);
long storedArticleID = articleCategory.getArticleID();
long storedCategoryID = articleCategory.getCategoryID();
if(articleID.equals(storedArticleID) && categoryID.equals(storedCategoryID)) {
flaggedAlready = true;
}
}
}
if(!flaggedAlready) {
//Here add new value to the first in the list. This way if cookie limit is reached, the oldest values will
// get lost.
List<ArticleCategoryPair> temp = new ArrayList<ArticleCategoryPair>();
temp.add(new ArticleCategoryPair(articleID, categoryID));
temp.addAll(valuePairs);
valuePairs = temp;
this.taxonomyService.flagTaxonomyTerm(articleID, categoryID, this.getAuthId());
log.debug("Article/Category Flagged. ArticleID: {}, CategoryID: {}, AuthID: '{}'", new Object[] { articleID, categoryID, this.getAuthId() });
} else {
log.debug("Article/Category Flagged already. {}/{}", articleID, categoryID);
}
TaxonomyCookie newCookie = new TaxonomyCookie(valuePairs);
Cookies.setCookieValue(Cookies.COOKIE_ARTICLE_CATEGORY_FLAGS, newCookie.toCookieString());
return SUCCESS;
}
addActionError("ArticleID or CategoryID not specified.");
return INPUT;
}
public void setArticleID(Long articleID) {
this.articleID = articleID;
}
public void setCategoryID(Long categoryID) {
this.categoryID = categoryID;
}
@Required
public void setTaxonomyService(TaxonomyService taxonomyService) {
this.taxonomyService = taxonomyService;
}
} |
Java | public class SessionLifecycle implements Startable {
private Session session;
public SessionLifecycle(Session session) {
this.session = session;
}
public void start() {
//Does nothing to start.
}
public void stop() {
try {
session.flush();
session.close();
} catch (HibernateException ex) {
// swallow it? not sure what to do with it...
}
}
} |
Java | public class SourceTargetMapperTest {
public SourceTargetMapperTest() {
}
/**
* Test of toTarget method, of class SourceTargetMapper.
*/
@Test
public void testToTarget() {
// prepare dto's
ParentDto parent = new ParentDto();
parent.setName( "jim" );
ChildDto childDto1 = new ChildDto();
childDto1.setName( "jack" );
ChildDto childDto2 = new ChildDto();
childDto2.setName( "jill" );
parent.setChildren( Arrays.asList( childDto1, childDto2 ) );
// context
JpaContext jpaCtx = new JpaContext( null );
ParentEntity parentEntity = SourceTargetMapper.MAPPER.toEntity( parent, jpaCtx );
//results
assertThat( parentEntity ).isNotNull();
assertThat( parentEntity.getName() ).isEqualTo( "jim" );
assertThat( parentEntity.getChildren() ).hasSize( 2 );
assertThat( parentEntity.getChildren().get( 0 ).getName() ).isEqualTo( "jack" );
assertThat( parentEntity.getChildren().get( 0 ).getMyParent() ).isEqualTo( parentEntity );
assertThat( parentEntity.getChildren().get( 1 ).getName() ).isEqualTo( "jill" );
assertThat( parentEntity.getChildren().get( 1 ).getMyParent() ).isEqualTo( parentEntity );
}
} |
Java | public class FirebaseInstanceService extends FirebaseInstanceIdService {
@Override
public void onTokenRefresh() {
String token = FirebaseInstanceId.getInstance().getToken();
Log.d("Test", token);
}
} |
Java | @Named("viewJobBean")
@RequestScoped
public class ViewJobBean {
/**
* Stores the application.
*/
@Inject
private ApplicationBean application;
/**
* Stores the job.
*/
private Job job;
/**
* Stores the job start dates.
*/
private List<Date> startDates;
/**
* Get the job.
*
* @return the job.
*/
public Job getJob() {
return job;
}
/**
* Get the start dates.
*
* @return the start dates.
*/
public List<Date> getStartDates() {
return startDates;
}
/**
* Delete a job output.
*
* @param request the HTTP servlet request.
* @return the job view page.
*/
@ActionMapping("/output/delete/*")
public String deleteJobOutput(HttpServletRequest request) {
String outputId = request.getRequestURI().substring(
request.getRequestURI().lastIndexOf("/") + 1);
String jobId = request.getParameter("jobId");
DataStore dataStore = DataStoreFactory.create();
job = dataStore.loadJob(jobId);
dataStore.deleteJobOutput(jobId, outputId);
startDates = dataStore.loadAllJobStartDates(jobId);
return "/WEB-INF/ui/view.xhtml";
}
/**
* Manually submit a job.
*
* @param request the HTTP servlet request.
* @return the job view page.
*/
@ActionMapping("/submit/*")
public String submit(HttpServletRequest request) {
String id = request.getRequestURI().substring(request.getRequestURI().lastIndexOf("/") + 1);
DataStore dataStore = DataStoreFactory.create();
job = dataStore.loadJob(id);
JobOutput jobOutput = application.submitJob(id);
if (jobOutput != null) {
startDates = dataStore.loadAllJobStartDates(id);
}
return "/WEB-INF/ui/view.xhtml";
}
/**
* Show the job.
*
* @param request the HTTP servlet request.
* @return the job view page.
*/
@ActionMapping("/view/*")
public String view(HttpServletRequest request) {
String id = request.getRequestURI().substring(request.getRequestURI().lastIndexOf("/") + 1);
DataStore dataStore = DataStoreFactory.create();
job = dataStore.loadJob(id);
startDates = dataStore.loadAllJobStartDates(id);
return "/WEB-INF/ui/view.xhtml";
}
} |
Java | public final class FeatEyeOf {
private FeatEyeOf() {}
public final static int GRUUMSH_BLINDING_SPITTLE = 480;
public final static int GRUUMSH_BLINDING_SPITTLE_2 = 481;
public final static int GRUUMSH_COMMAND_THE_HORDE = 482;
public final static int GRUUMSH_RITUAL_SCARRING = 484;
public final static int GRUUMSH_SIGHT_OF_GRUUMSH = 487;
public final static int GRUUMSH_SWING_BLINDLY = 483;
public static String nameOf(int value) {
if (value == 480) return "FeatEyeOf.GRUUMSH_BLINDING_SPITTLE";
if (value == 481) return "FeatEyeOf.GRUUMSH_BLINDING_SPITTLE_2";
if (value == 482) return "FeatEyeOf.GRUUMSH_COMMAND_THE_HORDE";
if (value == 484) return "FeatEyeOf.GRUUMSH_RITUAL_SCARRING";
if (value == 487) return "FeatEyeOf.GRUUMSH_SIGHT_OF_GRUUMSH";
if (value == 483) return "FeatEyeOf.GRUUMSH_SWING_BLINDLY";
return "FeatEyeOf.(not found: " + value + ")";
}
public static String nameOf(float value) {
return "FeatEyeOf.(not found: " + value + ")";
}
public static String nameOf(String value) {
return "FeatEyeOf.(not found: " + value + ")";
}
} |
Java | public final class JavaScopeDeriver
extends ScopeDeriver
{
/**
* Creates a new instance of this scope deriver.
*/
public JavaScopeDeriver()
{
}
@Override
public void deriveScope( ScopeContext context )
throws RepositoryException
{
context.setDerivedScope( getDerivedScope( context.getParentScope(), context.getChildScope() ) );
}
private String getDerivedScope( String parentScope, String childScope )
{
String derivedScope;
if ( JavaScopes.SYSTEM.equals( childScope ) || JavaScopes.TEST.equals( childScope ) )
{
derivedScope = childScope;
}
else if ( parentScope == null || parentScope.length() <= 0 || JavaScopes.COMPILE.equals( parentScope ) )
{
derivedScope = childScope;
}
else if ( JavaScopes.TEST.equals( parentScope ) || JavaScopes.RUNTIME.equals( parentScope ) )
{
derivedScope = parentScope;
}
else if ( JavaScopes.SYSTEM.equals( parentScope ) || JavaScopes.PROVIDED.equals( parentScope ) )
{
derivedScope = JavaScopes.PROVIDED;
}
else
{
derivedScope = JavaScopes.RUNTIME;
}
return derivedScope;
}
} |
Java | public class Template_1_3_6_1_4_1_19376_1_5_3_1_4_5_1 extends AbstractTemplateRule {
protected Template_1_3_6_1_4_1_19376_1_5_3_1_4_5_1() {
}
protected Template_1_3_6_1_4_1_19376_1_5_3_1_4_5_1(TemplateRuleID templateId) {
super(templateId);
}
public static Template_1_3_6_1_4_1_19376_1_5_3_1_4_5_1 newInstance() {
return new Template_1_3_6_1_4_1_19376_1_5_3_1_4_5_1(TemplateRuleID.PCC_ConcernEntry);
}
/**
* Applies the Concern Entry rule to the section template.
*
* @param template
*/
@Override
public void apply(Template template) {
// template.addTemplateRule(TemplateRuleFactory.getRule("2.16.840.1.113883.10.20.1.27"));
Concern concern = (Concern) template;
Act concernAct = concern.getAct();
CD<String> code = new CD<String>();
code.setNullFlavor(NullFlavor.NotApplicable);
concernAct.setCode(code);
// statusCode must be active if only a low time is specified
if (concern.getStartTime() != null && concern.getEndTime() == null) {
concernAct.setStatusCode(ActStatus.Active);
}
// statusCode is either completed or aborted if the low AND high times are set - making it completed for now
if (concern.getStartTime() != null && concern.getEndTime() != null) {
concernAct.setStatusCode(ActStatus.Completed);
}
//Concern entries have an Act as a root. Add effective time to it.
concernAct.setEffectiveTime(
CoreDataTypeHelpers.createIVL_TS(concern.getStartTime(), concern.getEndTime(), null));
}
@Deprecated
public void populateWithRequiredDefaults(SectionTemplate section, Act act, Calendar lowTime, Calendar highTime) {
// apply parent CCD Template
Template_2_16_840_1_113883_10_20_1_27.newInstance().populateWithRequiredDefaults(section, act, lowTime, highTime);
section.getRoot().getTemplateId().add(getTemplateID().getII());
//section.addTemplateRule(this);
CD<String> code = new CD<String>();
code.setNullFlavor(NullFlavor.NotApplicable);
act.setCode(code);
// statusCode must be active if only a low time is specified
if (lowTime != null && highTime == null) {
act.setStatusCode(ActStatus.Active);
}
// statusCode is either completed or aborted if the low AND high times are set - making it completed for now
if (lowTime != null && highTime != null) {
act.setStatusCode(ActStatus.Completed);
}
act.setEffectiveTime(CoreDataTypeHelpers.createIVL_TS(lowTime, highTime, null));
}
// public void populateWithRequiredDefaults(SectionTemplate section, Act act)
// {
// act.getTemplateId().add(new II(this.getTemplateIDs().getOid()));
// section.getTemplateRules().add(this);
// }
} |
Java | public class AddCommandIntegrationTest {
private Model model;
@BeforeEach
public void setUp() {
model = new ModelManager(new IntervieweeList(), new InterviewerList(), new UserPrefs(), new LinkedList<>());
}
@Test
public void execute_newInterviewee_success() {
Interviewee validInterviewee = ALICE_INTERVIEWEE;
Model expectedModel = new ModelManager(model.getMutableIntervieweeList(), model.getMutableInterviewerList(),
new UserPrefs(), new LinkedList<>());
expectedModel.addInterviewee(validInterviewee);
assertCommandSuccess(new AddIntervieweeCommand(validInterviewee), model,
String.format(AddCommand.MESSAGE_SUCCESS, validInterviewee), expectedModel);
}
@Test
public void execute_newInterviewer_success() {
Interviewer validInterviewer = ALICE_INTERVIEWER;
Model expectedModel = new ModelManager(model.getMutableIntervieweeList(), model.getMutableInterviewerList(),
new UserPrefs(), new LinkedList<>());
expectedModel.addInterviewer(validInterviewer);
assertCommandSuccess(new AddInterviewerCommand(validInterviewer), model,
String.format(AddCommand.MESSAGE_SUCCESS, validInterviewer), expectedModel);
}
@Test
public void execute_duplicateInterviewee_throwsCommandException() {
model = new ModelManager(getTypicalIntervieweeList(), getTypicalInterviewerList(),
new UserPrefs(), new LinkedList<>());
Interviewee intervieweeInList = model.getInterviewee(ALICE_INTERVIEWEE.getName().fullName);
assertCommandFailure(new AddIntervieweeCommand(intervieweeInList), model, AddCommand.MESSAGE_DUPLICATE_PERSON);
}
} |
Java | public abstract class BlockBody {
public static final String[] EMPTY_PARAMETER_LIST = new String[0];
protected final Signature signature;
public BlockBody(Signature signature) {
this.signature = signature;
}
public Signature getSignature() {
return signature;
}
public void setEvalType(EvalType evalType) {
System.err.println("setEvalType unimplemented in " + this.getClass().getName());
}
public IRubyObject call(ThreadContext context, IRubyObject[] args, Binding binding, Block.Type type) {
args = prepareArgumentsForCall(context, args, type);
return yield(context, args, null, binding, type);
}
public IRubyObject call(ThreadContext context, IRubyObject[] args, Binding binding,
Block.Type type, Block block) {
args = prepareArgumentsForCall(context, args, type);
return yield(context, args, null, binding, type, block);
}
public final IRubyObject yield(ThreadContext context, IRubyObject value, Binding binding, Block.Type type) {
return doYield(context, value, binding, type);
}
public final IRubyObject yield(ThreadContext context, IRubyObject[] args, IRubyObject self,
Binding binding, Block.Type type) {
IRubyObject[] preppedValue = RubyProc.prepareArgs(context, type, this, args);
return doYield(context, preppedValue, self, binding, type);
}
/**
* Subclass specific yield implementation.
* <p>
* Should not be called directly. Gets called by {@link #yield(ThreadContext, IRubyObject, Binding, Block.Type)}
* after ensuring that any common yield logic is taken care of.
*/
protected abstract IRubyObject doYield(ThreadContext context, IRubyObject value, Binding binding, Block.Type type);
/**
* Subclass specific yield implementation.
* <p>
* Should not be called directly. Gets called by {@link #yield(ThreadContext, org.jruby.runtime.builtin.IRubyObject[], org.jruby.runtime.builtin.IRubyObject, Binding, org.jruby.runtime.Block.Type)}
* after ensuring that all common yield logic is taken care of.
*/
protected abstract IRubyObject doYield(ThreadContext context, IRubyObject[] args, IRubyObject self,
Binding binding, Block.Type type);
// FIXME: This should be unified with the final versions above
// Here to allow incremental replacement. Overriden by subclasses which support it.
public IRubyObject yield(ThreadContext context, IRubyObject[] args, IRubyObject self,
Binding binding, Block.Type type, Block block) {
return yield(context, args, self, binding, type);
}
// FIXME: This should be unified with the final versions above
// Here to allow incremental replacement. Overriden by subclasses which support it.
public IRubyObject yield(ThreadContext context, IRubyObject value,
Binding binding, Block.Type type, Block block) {
return yield(context, value, binding, type);
}
public IRubyObject call(ThreadContext context, Binding binding, Block.Type type) {
IRubyObject[] args = IRubyObject.NULL_ARRAY;
args = prepareArgumentsForCall(context, args, type);
return yield(context, args, null, binding, type);
}
public IRubyObject call(ThreadContext context, Binding binding,
Block.Type type, Block unusedBlock) {
return call(context, binding, type);
}
public IRubyObject yieldSpecific(ThreadContext context, Binding binding, Block.Type type) {
return yield(context, null, binding, type);
}
public IRubyObject call(ThreadContext context, IRubyObject arg0, Binding binding, Block.Type type) {
IRubyObject[] args = new IRubyObject[] {arg0};
args = prepareArgumentsForCall(context, args, type);
return yield(context, args, null, binding, type);
}
public IRubyObject call(ThreadContext context, IRubyObject arg0, Binding binding,
Block.Type type, Block unusedBlock) {
return call(context, arg0, binding, type);
}
public IRubyObject yieldSpecific(ThreadContext context, IRubyObject arg0, Binding binding, Block.Type type) {
return yield(context, arg0, binding, type);
}
public IRubyObject call(ThreadContext context, IRubyObject arg0, IRubyObject arg1, Binding binding, Block.Type type) {
IRubyObject[] args = new IRubyObject[] {arg0, arg1};
args = prepareArgumentsForCall(context, args, type);
return yield(context, args, null, binding, type);
}
public IRubyObject call(ThreadContext context, IRubyObject arg0, IRubyObject arg1, Binding binding,
Block.Type type, Block unusedBlock) {
return call(context, arg0, arg1, binding, type);
}
public IRubyObject yieldSpecific(ThreadContext context, IRubyObject arg0, IRubyObject arg1, Binding binding, Block.Type type) {
return yield(context, new IRubyObject[] { arg0, arg1 }, null, binding, type);
}
public IRubyObject call(ThreadContext context, IRubyObject arg0, IRubyObject arg1, IRubyObject arg2, Binding binding, Block.Type type) {
IRubyObject[] args = new IRubyObject[] {arg0, arg1, arg2};
args = prepareArgumentsForCall(context, args, type);
return yield(context, args, null, binding, type);
}
public IRubyObject call(ThreadContext context, IRubyObject arg0, IRubyObject arg1, IRubyObject arg2, Binding binding,
Block.Type type, Block unusedBlock) {
return call(context, arg0, arg1, arg2, binding, type);
}
public IRubyObject yieldSpecific(ThreadContext context, IRubyObject arg0, IRubyObject arg1, IRubyObject arg2, Binding binding, Block.Type type) {
return yield(context, new IRubyObject[] { arg0, arg1, arg2 }, null, binding, type);
}
public abstract StaticScope getStaticScope();
public abstract void setStaticScope(StaticScope newScope);
/**
* What is the arity of this block?
*
* @return the arity
*/
@Deprecated
public Arity arity() {
return signature.arity();
}
/**
* Is the current block a real yield'able block instead a null one
*
* @return true if this is a valid block or false otherwise
*/
public boolean isGiven() {
return true;
}
/**
* Get the filename for this block
*/
public abstract String getFile();
/**
* get The line number for this block
*/
public abstract int getLine();
public IRubyObject[] prepareArgumentsForCall(ThreadContext context, IRubyObject[] args, Block.Type type) {
if (type == Block.Type.LAMBDA) {
signature.checkArity(context.runtime, args);
} else {
// SSS FIXME: How is it even possible to "call" a NORMAL block?
// I thought only procs & lambdas can be called, and blocks are yielded to.
if (args.length == 1) {
// Convert value to arg-array, unwrapping where necessary
args = IRRuntimeHelpers.convertValueIntoArgArray(context, args[0], signature.arityValue(), type == Block.Type.NORMAL && args[0] instanceof RubyArray);
} else if (getSignature().arityValue() == 1 && !getSignature().restKwargs()) {
// discard excess arguments
args = args.length == 0 ? context.runtime.getSingleNilArray() : new IRubyObject[] { args[0] };
}
}
return args;
}
public ArgumentDescriptor[] getArgumentDescriptors() {
return ArgumentDescriptor.EMPTY_ARRAY;
}
public static final BlockBody NULL_BODY = new NullBlockBody();
} |
Java | public class OffsetDateTimeDescriptorTest extends AbstractDescriptorTest<OffsetDateTime> {
final OffsetDateTime original = OffsetDateTime.of(LocalDateTime.of( 2016, 10, 8, 15, 13 ), ZoneOffset.ofHoursMinutes( 2, 0));
final OffsetDateTime copy = OffsetDateTime.of(LocalDateTime.of( 2016, 10, 8, 15, 13 ), ZoneOffset.ofHoursMinutes( 2, 0));
final OffsetDateTime different = OffsetDateTime.of(LocalDateTime.of( 2016, 10, 8, 15, 13 ), ZoneOffset.ofHoursMinutes( 4, 30));
public OffsetDateTimeDescriptorTest() {
super(OffsetDateTimeJavaDescriptor.INSTANCE);
}
@Override
protected Data<OffsetDateTime> getTestData() {
return new Data<>( original, copy, different );
}
@Override
protected boolean shouldBeMutable() {
return false;
}
} |
Java | class LSH {
Multimap<String, String>[] maps;
int num_bands;
int num_rows;
int num_hashes;
public LSH(int n, int b) throws IllegalArgumentException {
num_hashes = n;
num_bands = b;
if ((n % b) != 0)
throw new IllegalArgumentException("Bands must divide num_hashes (" + num_hashes + ") evenly");
num_rows = n / b;
System.err.println("LSH with " + num_hashes + " hash buckets and " + num_bands + " bands");
System.err.println("Target threshold: " + Math.pow(1.0 / num_hashes, (1.0 / (num_hashes / num_bands))));
maps = new ArrayListMultimap[num_bands];
setup_bands();
}
public LSH(int n, double t) {
num_hashes = n;
num_bands = compute_bands(n, t);
num_rows = n / num_bands;
System.err.println("LSH with " + num_hashes + " hash buckets and " + num_bands + " bands");
System.err.println("Target threshold: " + Math.pow(1.0 / num_hashes, (1.0 / (num_hashes / num_bands))));
maps = new ArrayListMultimap[num_bands];
setup_bands();
}
protected void setup_bands() {
for (int i = 0; i < num_bands; i++)
maps[i] = ArrayListMultimap.create();
}
protected int compute_bands(int num_hashes, double jaccard_threshold) {
int bands = num_hashes;
// System.err.println("N=" + num_hashes);
while (bands > 1) {
if ((num_hashes % bands) == 0) {
double thresh = Math.pow((double)1.0 / bands, (double)bands/num_hashes);
// System.err.println("Checking b=" + bands + ", threshold is " + thresh);
if (thresh > jaccard_threshold)
break;
}
bands --;
}
// System.err.println("Returning b=" + bands);
return bands;
}
public void insert(String key, int[] hashes) {
for (int b = 0; b < num_bands; b++) {
StringBuffer sb = new StringBuffer();
for (int r = 0; r < num_rows; r++) {
sb.append(Integer.toHexString(hashes[b * num_rows + r]));
}
String hh = sb.toString();
maps[b].put(hh, key);
}
}
public Set<String> query(int[] hashes) {
HashSet<String> candidates = new HashSet<String>();
for (int b = 0; b < num_bands; b++) {
StringBuffer sb = new StringBuffer();
for (int r = 0; r < num_rows; r++) {
sb.append(String.format("%8x", hashes[b * num_rows + r]));
}
String hh = sb.toString();
candidates.addAll(maps[b].get(hh));
}
return candidates;
}
public static void main(String args[]) {
int num_hashes = Integer.parseInt(args[0]);
double threshold = Double.parseDouble(args[1]);
LSH lsh = new LSH(num_hashes, threshold);
}
} |
Java | public class BenchmarkActivity extends AppCompatActivity implements BenchmarkTask.BenchmarkCallback {
private BenchmarkRecyclerAdapter adapter;
private Minion minion;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
tintStatusBarIcons(this, true);
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_benchmark);
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
actionBar.setTitle(R.string.benchmark);
actionBar.setDisplayHomeAsUpEnabled(true);
}
adapter = new BenchmarkRecyclerAdapter(this);
LinearLayoutManager layoutManager = new LinearLayoutManager(this,
RecyclerView.VERTICAL, false);
RecyclerView recyclerView = findViewById(R.id.recycler);
recyclerView.setLayoutManager(layoutManager);
recyclerView.setAdapter(adapter);
prepareMinion();
startBenchmark();
}
private void startBenchmark() {
onComplete(0x00);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
finish();
return true;
default:
return false;
}
}
private void prepareMinion() {
Writable writable = MemoryStorage.create();
minion = Minion
.lets()
.store(writable)
.sync();
}
@Override
public void onComplete(int id) {
prepareMinion();
Task task;
switch (id) {
case 0x00:
task = new GroupsCreationBenchmarkTask(minion, adapter, this);
break;
case 0x01:
task = new ItemsCreationBenchmarkTask(minion, adapter, this);
break;
case 0x02:
task = new GroupsAccessBenchmarkTask(minion, adapter, this);
break;
case 0x03:
task = new ItemsAccessBenchmarkTask(minion, adapter, this);
break;
default:
return;
}
TaskExecutor
.getInstance()
.execute(task);
}
} |
Java | class DemoStep extends dsFunctions {
// some constants
private static int NUM = 10; // number of bodies
private static int NUMJ = 9; // number of joints
//private static double SIDE = 0.2; // side length of a box
//private static double MASS = 1.0; // mass of a box
private static float RADIUS = 0.1732f; // sphere radius
// dynamics and collision objects
static DWorld world=null;
static DBody[] body=new DBody[NUM];
static DBallJoint[] joint=new DBallJoint[NUMJ];
// create the test system
private static void createTest()
{
int i,j;
if (world != null) world.destroy();
world = OdeHelper.createWorld();
// create random bodies
for (i=0; i<NUM; i++) {
// create bodies at random position and orientation
body[i] = OdeHelper.createBody (world);
body[i].setPosition (dRandReal()*2-1,dRandReal()*2-1,
dRandReal()*2+RADIUS);
DQuaternion q = new DQuaternion();
for (j=0; j<4; j++) q.set(j, dRandReal()*2-1);
body[i].setQuaternion (q);
// set random velocity
body[i].setLinearVel (dRandReal()*2-1,dRandReal()*2-1,
dRandReal()*2-1);
body[i].setAngularVel (dRandReal()*2-1,dRandReal()*2-1,
dRandReal()*2-1);
// set random mass (random diagonal mass rotated by a random amount)
DMass m = OdeHelper.createMass();
DMatrix3 R = new DMatrix3();
m.setBox (1,dRandReal()+0.1,dRandReal()+0.1,dRandReal()+0.1);
m.adjust (dRandReal()+1);
for (j=0; j<4; j++) q.set(j, dRandReal()*2-1);
OdeMath.dRfromQ(R, q);
m.rotate (R);
body[i].setMass (m);
}
// create ball-n-socket joints at random positions, linking random bodies
// (but make sure not to link the same pair of bodies twice)
char[] linked=new char[NUM*NUM];
for (i=0; i<NUM*NUM; i++) linked[i] = 0;
for (i=0; i<NUMJ; i++) {
int b1,b2;
do {
b1 = dRandInt (NUM);
b2 = dRandInt (NUM);
} while (linked[b1*NUM + b2]!=0 || b1==b2);
linked[b1*NUM + b2] = 1;
linked[b2*NUM + b1] = 1;
joint[i] = OdeHelper.createBallJoint (world,null);
joint[i].attach (body[b1],body[b2]);
joint[i].setAnchor (dRandReal()*2-1,
dRandReal()*2-1,dRandReal()*2+RADIUS);
}
for (i=0; i<NUM; i++) {
// move bodies a bit to get some joint error
DVector3C pos = body[i].getPosition();
body[i].setPosition (pos.get(0)+dRandReal()*0.2-0.1,
pos.get(1)+dRandReal()*0.2-0.1,pos.get(2)+dRandReal()*0.2-0.1);
}
}
private static float[] xyz = {2.6117f,-1.4433f,2.3700f};
private static float[] hpr = {151.5000f,-30.5000f,0.0000f};
// start simulation - set viewpoint
@Override
public void start()
{
dsSetViewpoint (xyz,hpr);
}
// simulation loop
@Override
public void step (boolean pause)
{
if (!pause) {
// add random forces and torques to all bodies
int i;
final double scale1 = 5;
final double scale2 = 5;
for (i=0; i<NUM; i++) {
body[i].addForce (
scale1*(dRandReal()*2-1),
scale1*(dRandReal()*2-1),
scale1*(dRandReal()*2-1));
body[i].addTorque (
scale2*(dRandReal()*2-1),
scale2*(dRandReal()*2-1),
scale2*(dRandReal()*2-1));
}
world.step (0.05);
createTest();
}
// float sides[3] = {SIDE,SIDE,SIDE};
dsSetColor (1,1,0);
for (int i=0; i<NUM; i++)
dsDrawSphere (body[i].getPosition(), body[i].getRotation(), RADIUS);
}
public static void main(String[] args) {
new DemoStep().demo(args);
}
private void demo(String[] args) {
OdeHelper.initODE2(0);
dRandSetSeed (System.currentTimeMillis()/1000);
createTest();
// run simulation
dsSimulationLoop (args,352,288,this);
world.destroy();
OdeHelper.closeODE();
}
@Override
public void command(char cmd) {
//Nothing
}
@Override
public void stop() {
// Nothing
}
} |
Java | public class ViewHolder extends RecyclerView.ViewHolder {
private TextView txt_my_note;
private TextView txt_identite;
private TextView txt_avg_note;
private EditText edit_noter;
private Button btn_valider;
private CardView cv_student;
private FloatingActionButton fab_info;
ViewHolder(View itemView) {
super(itemView);
txt_identite = itemView.findViewById(R.id.txt_identite);
txt_my_note = itemView.findViewById(R.id.txt_my_note);
txt_avg_note = itemView.findViewById(R.id.txt_avg_note);
btn_valider = itemView.findViewById(R.id.btn_valider);
edit_noter = itemView.findViewById(R.id.edit_noter);
cv_student = itemView.findViewById(R.id.cv_student);
fab_info= itemView.findViewById(R.id.fab_info);
}
} |
Java | public class ClassDelegateActivityBehavior extends AbstractBpmnActivityBehavior {
protected static final BpmnBehaviorLogger LOG = ProcessEngineLogger.BPMN_BEHAVIOR_LOGGER;
protected String className;
protected List<FieldDeclaration> fieldDeclarations;
public ClassDelegateActivityBehavior(String className, List<FieldDeclaration> fieldDeclarations) {
this.className = className;
this.fieldDeclarations = fieldDeclarations;
}
public ClassDelegateActivityBehavior(Class<?> clazz, List<FieldDeclaration> fieldDeclarations) {
this(clazz.getName(), fieldDeclarations);
}
// Activity Behavior
public void execute(final ActivityExecution execution) throws Exception {
this.executeWithErrorPropagation(execution, new Callable<Void>() {
@Override
public Void call() throws Exception {
getActivityBehaviorInstance(execution).execute(execution);
return null;
}
});
}
// Signallable activity behavior
public void signal(final ActivityExecution execution, final String signalName, final Object signalData) throws Exception {
ProcessApplicationReference targetProcessApplication = ProcessApplicationContextUtil.getTargetProcessApplication((ExecutionEntity) execution);
if(ProcessApplicationContextUtil.requiresContextSwitch(targetProcessApplication)) {
Context.executeWithinProcessApplication(new Callable<Void>() {
public Void call() throws Exception {
signal(execution, signalName, signalData);
return null;
}
}, targetProcessApplication);
}
else {
doSignal(execution, signalName, signalData);
}
}
protected void doSignal(final ActivityExecution execution, final String signalName, final Object signalData) throws Exception {
final ActivityBehavior activityBehaviorInstance = getActivityBehaviorInstance(execution);
if (activityBehaviorInstance instanceof CustomActivityBehavior) {
CustomActivityBehavior behavior = (CustomActivityBehavior) activityBehaviorInstance;
ActivityBehavior delegate = behavior.getDelegateActivityBehavior();
if (!(delegate instanceof SignallableActivityBehavior)) {
throw LOG.incorrectlyUsedSignalException(SignallableActivityBehavior.class.getName() );
}
}
executeWithErrorPropagation(execution, new Callable<Void>() {
@Override
public Void call() throws Exception {
((SignallableActivityBehavior) activityBehaviorInstance).signal(execution, signalName, signalData);
return null;
}
});
}
protected ActivityBehavior getActivityBehaviorInstance(ActivityExecution execution) {
Object delegateInstance = instantiateDelegate(className, fieldDeclarations);
if (delegateInstance instanceof ActivityBehavior) {
return new CustomActivityBehavior((ActivityBehavior) delegateInstance);
} else if (delegateInstance instanceof JavaDelegate) {
return new ServiceTaskJavaDelegateActivityBehavior((JavaDelegate) delegateInstance);
} else {
throw LOG.missingDelegateParentClassException(
delegateInstance.getClass().getName(),
JavaDelegate.class.getName(),
ActivityBehavior.class.getName()
);
}
}
} |
Java | public class GracedSimulator extends ReactiveBasedAgentSimulator {
private int total;
private int current;
private String currentTablename;
/**
* Default constructor of the class.
* @param environment Reference to the environment.
* @param agent Reference to the agent.
*/
public GracedSimulator(Environment environment, Agent agent) {
// Lets call the parent's constructor
super(environment, agent);
// Add the reference
currentTablename = "";
total = this.getEnvironment().getTablenamesCount();
current = 0;
}
/**
* Returns the class converted to string.
*/
@Override
public String getSimulatorName() {
return "Simulador Gr.A.C.E.D";
}
@Override
public void start() {
// The environment generates a perception
TablenamePerception percept = (TablenamePerception) this.getEnvironment().getPercept();
percept.setIndex( current+1 );
currentTablename = percept.getTablename().getName();
// The agent now sees the perception
this.getAgent().see( percept );
// Get the Classify Action
ClassifyAction action = (ClassifyAction) this.getAgent().selectAction();
action.execute( this.getAgent().getAgentState() , this.getEnvironment().getEnvironmentState() );
// Then get the Hyphenation Action
HyphenationAction haction = (HyphenationAction) this.getAgent().selectAction();
haction.execute( this.getAgent().getAgentState() , this.getEnvironment().getEnvironmentState() );
// Update the count
current++;
}
/**
* Returns the environment of this simulator.
* @returns the database environment already casted.
*/
public DatabaseEnvironment getEnvironment() {
return (DatabaseEnvironment) super.getEnvironment();
}
/**
* Returns the agent of this simulator.
* @return the one and only graced agent, already casted.
*/
public GracedAgent getAgent() {
return (GracedAgent) super.getAgents().get(0);
}
/**
* Method that evaluates if the agent failed.
*/
@Override
public boolean agentFailed(Action a) {
// TODO complete agentFailed method.
return false;
}
/**
* This method is executed in the mail loop of the simulation when the
* agent returns an action.
* @param agent
* @param action
*/
@Override
public void actionReturned(Agent agent, Action action) {
// TODO complete actionReturned method.
}
@Override
public boolean agentSucceeded(Action a) {
// TODO complete agentSuceed
return (current == total);
}
/**
* Getter for the current number of table being processed.
* @return The index of the current table.
*/
public int getCurrent() {
return current;
}
/**
* Getter for the total number of tables.
* @return The number of total tables in the database.
*/
public int getTotal() {
return total;
}
public String getTablename() {
return currentTablename;
}
} |
Java | public class ErraiDevelopmentModeLogHandler extends DevelopmentModeLogHandler implements ErraiLogHandler {
/*
* Workaround to so that superlcass does not overwrite log level
*/
private boolean init = false;
public ErraiDevelopmentModeLogHandler(final Formatter formatter) {
setFormatter(formatter);
init = true;
}
public ErraiDevelopmentModeLogHandler() {
this(new ErraiSimpleFormatter());
}
@Override
public boolean isEnabled() {
return !getLevel().equals(Level.OFF);
}
@Override
public void setLevel(Level newLevel) {
if (init)
staticSetLevel(newLevel.getName());
}
@Override
public Level getLevel() {
return Level.parse(staticGetLevel());
}
public static native void staticSetLevel(String newLevel) /*-{
$wnd.erraiDevelopmentModeLogHandlerLevel = newLevel;
}-*/;
public static native String staticGetLevel() /*-{
if ($wnd.erraiDevelopmentModeLogHandlerLevel === undefined) {
return "ALL";
} else {
return $wnd.erraiDevelopmentModeLogHandlerLevel;
}
}-*/;
} |
Java | public class ThreeSumTest {
@Test
public void test() {
ThreeSum solution = new ThreeSum();
List<List<Integer>> res = solution.threeSum(new int[]{-1, 0, 1, 2, -1, -4});
System.out.println(res);
}
} |
Java | @NodeInfo
public abstract class IntegerConvertNode<OP, REV> extends UnaryNode implements ArithmeticOperation, ConvertNode, ArithmeticLIRLowerable, StampInverter {
@SuppressWarnings("rawtypes") public static final NodeClass<IntegerConvertNode> TYPE = NodeClass.create(IntegerConvertNode.class);
protected final SerializableIntegerConvertFunction<OP> getOp;
protected final SerializableIntegerConvertFunction<REV> getReverseOp;
protected final int inputBits;
protected final int resultBits;
protected interface SerializableIntegerConvertFunction<T> extends Function<ArithmeticOpTable, IntegerConvertOp<T>>, Serializable {
}
protected IntegerConvertNode(NodeClass<? extends IntegerConvertNode<OP, REV>> c, SerializableIntegerConvertFunction<OP> getOp, SerializableIntegerConvertFunction<REV> getReverseOp, int inputBits,
int resultBits, ValueNode input) {
super(c, getOp.apply(ArithmeticOpTable.forStamp(input.stamp(NodeView.DEFAULT))).foldStamp(inputBits, resultBits, input.stamp(NodeView.DEFAULT)), input);
this.getOp = getOp;
this.getReverseOp = getReverseOp;
this.inputBits = inputBits;
this.resultBits = resultBits;
assert ((PrimitiveStamp) input.stamp(NodeView.DEFAULT)).getBits() == inputBits;
}
public int getInputBits() {
return inputBits;
}
public int getResultBits() {
return resultBits;
}
protected final IntegerConvertOp<OP> getOp(ValueNode forValue) {
return getOp.apply(ArithmeticOpTable.forStamp(forValue.stamp(NodeView.DEFAULT)));
}
@Override
public final IntegerConvertOp<OP> getArithmeticOp() {
return getOp(getValue());
}
@Override
public Constant convert(Constant c, ConstantReflectionProvider constantReflection) {
return getArithmeticOp().foldConstant(getInputBits(), getResultBits(), c);
}
@Override
public Constant reverse(Constant c, ConstantReflectionProvider constantReflection) {
IntegerConvertOp<REV> reverse = getReverseOp.apply(ArithmeticOpTable.forStamp(stamp(NodeView.DEFAULT)));
return reverse.foldConstant(getResultBits(), getInputBits(), c);
}
@Override
public Stamp foldStamp(Stamp newStamp) {
assert newStamp.isCompatible(getValue().stamp(NodeView.DEFAULT));
return getArithmeticOp().foldStamp(inputBits, resultBits, newStamp);
}
@Override
public ValueNode canonical(CanonicalizerTool tool, ValueNode forValue) {
ValueNode synonym = findSynonym(getOp(forValue), forValue, inputBits, resultBits, stamp(NodeView.DEFAULT));
if (synonym != null) {
return synonym;
}
return this;
}
protected static <T> ValueNode findSynonym(IntegerConvertOp<T> operation, ValueNode value, int inputBits, int resultBits, Stamp stamp) {
if (inputBits == resultBits) {
return value;
} else if (value.isConstant()) {
return ConstantNode.forPrimitive(stamp, operation.foldConstant(inputBits, resultBits, value.asConstant()));
}
return null;
}
public static ValueNode convert(ValueNode input, Stamp stamp, NodeView view) {
return convert(input, stamp, false, view);
}
public static ValueNode convert(ValueNode input, Stamp stamp, StructuredGraph graph, NodeView view) {
ValueNode convert = convert(input, stamp, false, view);
if (!convert.isAlive()) {
assert !convert.isDeleted();
convert = graph.addOrUniqueWithInputs(convert);
}
return convert;
}
public static ValueNode convertUnsigned(ValueNode input, Stamp stamp, NodeView view) {
return convert(input, stamp, true, view);
}
public static ValueNode convertUnsigned(ValueNode input, Stamp stamp, StructuredGraph graph, NodeView view) {
ValueNode convert = convert(input, stamp, true, view);
if (!convert.isAlive()) {
assert !convert.isDeleted();
convert = graph.addOrUniqueWithInputs(convert);
}
return convert;
}
public static ValueNode convert(ValueNode input, Stamp stamp, boolean zeroExtend, NodeView view) {
IntegerStamp fromStamp = (IntegerStamp) input.stamp(view);
IntegerStamp toStamp = (IntegerStamp) stamp;
ValueNode result;
if (toStamp.getBits() == fromStamp.getBits()) {
result = input;
} else if (toStamp.getBits() < fromStamp.getBits()) {
result = new NarrowNode(input, fromStamp.getBits(), toStamp.getBits());
} else if (zeroExtend) {
// toStamp.getBits() > fromStamp.getBits()
result = ZeroExtendNode.create(input, toStamp.getBits(), view);
} else {
// toStamp.getBits() > fromStamp.getBits()
result = SignExtendNode.create(input, toStamp.getBits(), view);
}
IntegerStamp resultStamp = (IntegerStamp) result.stamp(view);
assert toStamp.getBits() == resultStamp.getBits();
return result;
}
@Override
public Stamp invertStamp(Stamp outStamp) {
return getArithmeticOp().invertStamp(inputBits, resultBits, outStamp);
}
} |
Java | @Controller
public class LivenessReadinessHandler {
private static final Logger LOG = LoggerFactory.getLogger(LivenessReadinessHandler.class);
public Mono<ServerResponse> liveness(ServerRequest serverRequest) {
LOG.debug("liveness check");
return ServerResponse.ok().contentType(MediaType.APPLICATION_JSON).build();
}
public Mono<ServerResponse> readiness(ServerRequest serverRequest) {
LOG.debug("readiness check");
return ServerResponse.ok().contentType(MediaType.APPLICATION_JSON).build();
}
} |
Java | public class DataExchangeFragment extends Fragment {
private final static String TAG = DataExchangeFragment.class.getSimpleName();
private WearManager mWearManager;
private AbstractWearConsumer mWearConsumer;
private TextView mMessageTextView;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mWearManager = WearManager.getInstance();
// We register a listener to be notified when messages arrive while we are on this page.
// We then filter messages based on their path to identify the ones that report on the
// navigation in the companion wear app. When such message is discovered, we write the name
// of new page to the view.
mWearConsumer = new AbstractWearConsumer() {
@Override
public void onWearableMessageReceived(MessageEvent messageEvent) {
if (!Constants.NAVIGATION_PATH_WEAR.equals(messageEvent.getPath())) {
return;
}
DataMap dataMap = DataMap.fromByteArray(messageEvent.getData());
int currentPage = dataMap.getInt(Constants.KEY_PAGE, Constants.TARGET_INTRO);
writeMessage(currentPage);
}
@Override
public void onWearableDataChanged(DataEventBuffer dataEvents){
Log.d(TAG,"datachanged occured");
}
};
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.data_exchange, container, false);
mMessageTextView = (TextView) view.findViewById(R.id.message);
return view;
}
/**
* Mapping the id of page to its name and displaying the name
*/
private void writeMessage(final int page) {
String pageName = null;
switch (page) {
case Constants.TARGET_INTRO:
break;
case Constants.TARGET_DATA:
pageName = getString(R.string.page_data_exchange);
break;
case Constants.TARGET_FILE_TRANSFER:
pageName = getString(R.string.page_file_transfer);
break;
case Constants.TARGET_STOCK:
pageName = getString(R.string.page_stock);
break;
case Constants.TARGET_LIST:
pageName = getString(R.string.page_list);
break;
case Constants.TARGET_LIST_DIALOG:
pageName = getString(R.string.page_list_dialog);
break;
case Constants.TARGET_VOICE_STREAM:
pageName = getString(R.string.page_stream_voice);
break;
default:
pageName = getString(R.string.unknown);
}
final String text = getString(R.string.page_navigation_info, pageName);
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
mMessageTextView.setText(text);
}
});
}
@Override
public void onResume() {
super.onResume();
mWearManager.addWearConsumer(mWearConsumer);
MobileApplication.setPage(Constants.TARGET_DATA);
}
@Override
public void onPause() {
mWearManager.removeWearConsumer(mWearConsumer);
super.onPause();
}
} |
Java | class
RodCutting
{
/* Returns the best obtainable price for a rod of length
n and price[] as prices of different pieces */
static
int
cutRod(
int
price[],
int
n)
{
if
(n <=
0
)
return
0
;
int
max_val = Integer.MIN_VALUE;
// Recursively cut the rod in different pieces and
// compare different configurations
for
(
int
i =
0
; i<n; i++)
max_val = Math.max(max_val,
price[i] + cutRod(price, n-i-
1
));
return
max_val;
}
/* Driver program to test above functions */
public
static
void
main(String args[])
{
int
arr[] =
new
int
[] {
1
,
5
,
8
,
9
,
10
,
17
,
17
,
20
};
int
size = arr.length;
System.out.println(
"Maximum Obtainable Value is "
+
cutRod(arr, size));
}
} |
Java | public final class RadixUniverse {
public static RadixUniverse create(BootstrapConfig bootstrapConfig) {
return create(
bootstrapConfig.getConfig(),
bootstrapConfig.getDiscoveryEpics(),
bootstrapConfig.getInitialNetwork()
);
}
/**
* Creates a universe with peer discovery through epics
*
* @param config universe config
* @param discoveryEpics epics which are responsible for peer discovery
* @param initialNetwork nodes in initial network
* @return the created universe
*/
public static RadixUniverse create(
RadixUniverseConfig config,
List<RadixNetworkEpic> discoveryEpics,
Set<RadixNode> initialNetwork
) {
return create(config, discoveryEpics, initialNetwork, new WebSockets());
}
/**
* Creates a universe with peer discovery through epics
*
* @param config universe config
* @param discoveryEpics epics which are responsible for peer discovery
* @param initialNetwork nodes in initial network
* @param webSockets web sockets
* @return the created universe
*/
public static RadixUniverse create(
RadixUniverseConfig config,
List<RadixNetworkEpic> discoveryEpics,
Set<RadixNode> initialNetwork,
WebSockets webSockets
) {
final InMemoryAtomStore inMemoryAtomStore = new InMemoryAtomStore();
config.getGenesis().forEach(atom ->
atom.addresses()
.forEach(addr -> inMemoryAtomStore.store(addr, AtomObservation.stored(atom, config.timestamp())))
);
final InMemoryAtomStoreReducer atomStoreReducer = new InMemoryAtomStoreReducer(inMemoryAtomStore);
RadixNetworkControllerBuilder builder = new RadixNetworkControllerBuilder()
.setNetwork(new RadixNetwork())
.addInitialNodes(initialNetwork)
.addReducer(atomStoreReducer::reduce)
.addEpic(
new WebSocketsEpicBuilder()
.setWebSockets(webSockets)
.add(WebSocketEventsEpic::new)
.add(ConnectWebSocketEpic::new)
.add(SubmitAtomEpic::new)
.add(FetchAtomsEpic::new)
.add(RadixJsonRpcMethodEpic::createGetLivePeersEpic)
.add(RadixJsonRpcMethodEpic::createGetNodeDataEpic)
.add(RadixJsonRpcMethodEpic::createGetUniverseEpic)
.add(RadixJsonRpcAutoConnectEpic::new)
.add(RadixJsonRpcAutoCloseEpic::new)
.build()
)
.addEpic(new FindANodeEpic(new RandomSelector()));
discoveryEpics.forEach(builder::addEpic);
return new RadixUniverse(config, builder.build(), inMemoryAtomStore);
}
/**
* Network Interface
*/
private final RadixNetworkController networkController;
/**
* Universe Configuration
*/
private final RadixUniverseConfig config;
private final AtomPuller puller;
private final AtomStore atomStore;
private final RRI nativeToken;
private RadixUniverse(RadixUniverseConfig config, RadixNetworkController networkController, AtomStore atomStore) {
this.config = config;
this.networkController = networkController;
this.nativeToken = config.getGenesis().stream()
.flatMap(atom -> atom.particles(Spin.UP))
.filter(p -> p instanceof MutableSupplyTokenDefinitionParticle)
.map(p -> ((MutableSupplyTokenDefinitionParticle) p).getRRI())
.findFirst()
.orElseThrow(() -> new IllegalStateException("No Native Token defined in universe"));
this.atomStore = atomStore;
this.puller = new RadixAtomPuller(networkController);
}
public RadixNetworkController getNetworkController() {
return networkController;
}
public RRI getNativeToken() {
return nativeToken;
}
public int getMagic() {
return config.getMagic();
}
public AtomPuller getAtomPuller() {
return puller;
}
public AtomStore getAtomStore() {
return atomStore;
}
/**
* Returns the system public key, also defined as the creator of this Universe
*
* @return the system public key
*/
public ECPublicKey getSystemPublicKey() {
return config.getSystemPublicKey();
}
/**
* Maps a public key to it's corresponding Radix address in this universe.
* Within a universe, a public key has a one to one bijective relationship to an address
*
* @param publicKey the key to get an address from
* @return the corresponding address to the key for this universe
*/
public RadixAddress getAddressFrom(ECPublicKey publicKey) {
return new RadixAddress(config.getMagicByte(), publicKey);
}
public RadixUniverseConfig getConfig() {
return config;
}
/**
* Retrieves the fee table for this universe.
* @return The fee table for the universe.
*/
public FeeTable feeTable() {
// WARNING: There is a duplicate fee table in TokenFeeModule in core. If you update this
// fee table, you will need to change the one there also.
ImmutableList<FeeEntry> feeEntries = ImmutableList.of(
// 1 millirad per byte after the first three kilobytes
PerBytesFeeEntry.of(1, 3072, milliRads(1L)),
// 1,000 millirads per fixed supply token definition
PerParticleFeeEntry.of(FixedSupplyTokenDefinitionParticle.class, 0, milliRads(1000L)),
// 1,000 millirads per mutable supply token definition
PerParticleFeeEntry.of(MutableSupplyTokenDefinitionParticle.class, 0, milliRads(1000L))
);
// Minimum fee of 40 millirads
return FeeTable.from(milliRads(40L), feeEntries);
}
private static UInt256 milliRads(long count) {
// 1 count is 10^{-3} rads, so we subtract that from the sub-units power
// No risk of overflow here, as 10^18 is approx 60 bits, plus 64 bits of count will not exceed 256 bits
return UInt256.TEN.pow(TokenUnitConversions.getTokenScale() - 3).multiply(UInt256.from(count));
}
} |
Java | @RunWith(Arquillian.class)
@RunAsClient
public class OptionalConfigPropertyInjectionTest {
private static Client client;
@ArquillianResource
private URL url;
@Deployment
public static Archive<?> deploy() {
return TestEnvironment.createWar(OptionalConfigPropertyInjectionTest.class)
.addClasses(OptionalConfigPropertyInjectionResource.class)
.addAsWebInfResource(EmptyAsset.INSTANCE, "beans.xml");
}
@BeforeClass
public static void setup() {
client = ClientBuilder.newClient();
}
@AfterClass
public static void cleanup() {
client.close();
}
/**
* @tpTestDetails This test checks injection of optional config properties when:
* - optional property does not exist
* - optional property exists
* @tpSince RESTEasy 4.6.0
*/
@Test
public void testOptionalPropertiesInjection() throws Exception {
String missingOptionalPropertyValue = client.target(
TestEnvironment.generateUri(url, "test-app",
OptionalConfigPropertyInjectionResource.MISSING_OPTIONAL_PROPERTY_PATH))
.request(MediaType.TEXT_PLAIN_TYPE)
.get(String.class);
Assert.assertNull(missingOptionalPropertyValue);
String presentOptionalPropertyValue = client.target(
TestEnvironment.generateUri(url, "test-app",
OptionalConfigPropertyInjectionResource.PRESENT_OPTIONAL_PROPERTY_PATH))
.request(MediaType.TEXT_PLAIN_TYPE)
.get(String.class);
Assert.assertEquals(OptionalConfigPropertyInjectionResource.OPTIONAL_PROPERTY_VALUE, presentOptionalPropertyValue);
}
} |
Java | public class StyleRecordFactory {
private static final Logger LOG = Logger.getLogger(StyleRecordFactory.class);
static {
LOG.setLevel(Level.DEBUG);
}
private StyleRecordSet styleRecordSet;
private boolean normalizeFontNames;
public StyleRecordFactory() {
}
public StyleRecordSet createStyleRecordSet(List<SVGText> texts) {
styleRecordSet = null;
if (texts != null) {
styleRecordSet = new StyleRecordSet();
styleRecordSet.setNormalizeFontNames(normalizeFontNames);
for (SVGText text : texts) {
styleRecordSet.getOrCreateStyleRecord(text);
}
if (normalizeFontNames) {
styleRecordSet.normalizeFontNamesByStyleAndWeight();
LOG.debug("normalize");
}
}
return styleRecordSet;
}
public StyleRecordSet createStyleRecordSet(File svgFile) {
AbstractCMElement svgElement = SVGElement.readAndCreateSVG(svgFile);
return createStyleRecordSet(svgElement);
}
public StyleRecordSet createStyleRecordSet(AbstractCMElement svgElement) {
List<SVGText> texts = SVGText.extractSelfAndDescendantTexts(svgElement);
StyleRecordSet styleRecordSet = this.createStyleRecordSet(texts);
return styleRecordSet;
}
public boolean isNormalizeFontNames() {
return normalizeFontNames;
}
public void setNormalizeFontNames(boolean normalizeFontNames) {
this.normalizeFontNames = normalizeFontNames;
}
} |
Java | public class LogEntryHeader {
/**
* Persistent fields. Layout on disk is
* (invariant) checksum - 4 bytes
* (invariant) entry type - 1 byte
* (invariant) entry flags - 1 byte
* (invariant) offset of previous log entry - 4 bytes
* (invariant) item size (not counting header size) - 4 bytes
* (optional) vlsn - 8 bytes
*
* Flags:
* The provisional bit can be set for any log type in the log. It's an
* indication to recovery that the entry shouldn't be processed when
* rebuilding the tree. See com.sleepycat.je.log.Provisional.java for
* the reasons why it's set.
*
* The replicated bit is set when this particular log entry is
* part of the replication stream and contains a VLSN in the header.
*
* The invisible bit is set when this log entry has been rolled back as
* part of replication syncup. The ensuing log entry has not been
* checksum-corrected, and to read it, the invisible bit must be cloaked.
*/
/* The invariant size of the log entry header. */
public static final int MIN_HEADER_SIZE = 14;
/* Only used for tests and asserts. */
public static final int MAX_HEADER_SIZE = MIN_HEADER_SIZE + VLSN.LOG_SIZE;
public static final int CHECKSUM_BYTES = 4;
static final int ENTRYTYPE_OFFSET = 4;
static final int FLAGS_OFFSET = 5;
private static final int PREV_OFFSET = 6;
private static final int ITEMSIZE_OFFSET = 10;
public static final int VLSN_OFFSET = MIN_HEADER_SIZE;
/*
* Flags defined in the entry header.
*
* WARNING: Flags may not be defined or used in the entry header of the
* FileHeader. All flags defined here may only be used in log entries
* other then the FileHeader. [#16939]
*/
private static final byte PROVISIONAL_ALWAYS_MASK = (byte) 0x80;
private static final byte PROVISIONAL_BEFORE_CKPT_END_MASK = (byte) 0x40;
private static final byte REPLICATED_MASK = (byte) 0x20;
private static final byte INVISIBLE = (byte) 0x10;
private static final byte IGNORE_INVISIBLE = ~INVISIBLE;
/* Flags stored in version byte for logVersion 6 and below.*/
private static final byte VERSION_6_FLAGS =
PROVISIONAL_ALWAYS_MASK |
PROVISIONAL_BEFORE_CKPT_END_MASK |
REPLICATED_MASK;
private static final byte IGNORE_VERSION_6_FLAGS = ~VERSION_6_FLAGS;
private static final byte FILE_HEADER_TYPE_NUM =
LogEntryType.LOG_FILE_HEADER.getTypeNum();
private long checksumVal; // stored in 4 bytes as an unsigned int
private final byte entryType;
private long prevOffset;
private final int itemSize;
private VLSN vlsn;
/*
* Prior to log version 6, a type-specific version was stored in each
* entry, and was packed together with the flags in a single byte.
*
* For version 6, we changed to use a global version (not type specific),
* but it was stored in each entry, packed with the flags as in earlier
* versions, as well as being stored redundantly in the FileHeader. The
* entry header and file header versions are always the same for all
* entries in a file. We flip the log file to guarantee this, when running
* for the first time with an upgraded JE with a new log version.
*
* For version 7 and above, the version is stored only in the FileHeader,
* freeing the space formerly taken by the version in each entry for use
* by flag bits. The version is not stored in each entry; however, the
* version is still maintained in this in-memory object for two reasons:
*
* 1. When reading log files prior to version 6, each entry potentially has
* a different version.
* 2. Convenience of access to the version when processing log entries.
*
* [#16939]
*/
private int entryVersion;
/* Version flag fields */
private Provisional provisional;
private boolean replicated;
private boolean invisible;
/**
* For reading a log entry.
*
* @param entryBuffer the buffer containing at least the first
* MIN_HEADER_SIZE bytes of the entry header.
*
* @param logVersion is the log version of the file that contains the given
* buffer, and is obtained from the file header. Note that for the file
* header entry itself, UNKNOWN_FILE_HEADER_VERSION may be passed.
*/
public LogEntryHeader(ByteBuffer entryBuffer, int logVersion)
throws ChecksumException {
assert logVersion == LogEntryType.UNKNOWN_FILE_HEADER_VERSION ||
(logVersion >= LogEntryType.FIRST_LOG_VERSION &&
logVersion <= LogEntryType.LOG_VERSION) : logVersion;
checksumVal = LogUtils.readUnsignedInt(entryBuffer);
entryType = entryBuffer.get();
if (!LogEntryType.isValidType(entryType)) {
throw new ChecksumException("Read invalid log entry type: " +
entryType);
}
if (entryType == FILE_HEADER_TYPE_NUM) {
/* Actual version will be set by setFileHeaderVersion. */
entryVersion = LogEntryType.UNKNOWN_FILE_HEADER_VERSION;
/* Discard flags byte: none are allowed for the file header. */
entryBuffer.get();
initFlags(0);
} else {
assert logVersion != LogEntryType.UNKNOWN_FILE_HEADER_VERSION :
logVersion;
if (logVersion <= 6) {
/* Before version 7, flags and version were packed together. */
entryVersion = entryBuffer.get();
initFlags(entryVersion & VERSION_6_FLAGS);
entryVersion &= IGNORE_VERSION_6_FLAGS;
/* For log version 6, the entry version should always match. */
assert (logVersion == 6) ? (entryVersion == 6) : true;
} else {
/* For log version 7+, only flags are stored in the entry. */
entryVersion = logVersion;
initFlags(entryBuffer.get());
}
}
prevOffset = LogUtils.readUnsignedInt(entryBuffer);
itemSize = LogUtils.readInt(entryBuffer);
}
/**
* For writing a log header. public for unit tests.
*/
public LogEntryHeader(LogEntry entry,
Provisional provisional,
ReplicationContext repContext) {
LogEntryType logEntryType = entry.getLogType();
entryType = logEntryType.getTypeNum();
entryVersion = LogEntryType.LOG_VERSION;
this.itemSize = entry.getSize();
this.provisional = provisional;
assert (!((!logEntryType.isReplicationPossible()) &&
repContext.inReplicationStream())) :
logEntryType + " should never be replicated.";
if (logEntryType.isReplicationPossible()) {
this.replicated = repContext.inReplicationStream();
} else {
this.replicated = false;
}
invisible = false;
}
/**
* For reading a replication message. The node-specific parts of the header
* are not needed.
*/
public LogEntryHeader(byte entryType,
int entryVersion,
int itemSize,
VLSN vlsn) {
assert ((vlsn != null) && !vlsn.isNull()) :
"vlsn = " + vlsn;
this.entryType = entryType;
this.entryVersion = entryVersion;
this.itemSize = itemSize;
this.vlsn = vlsn;
replicated = true;
provisional = Provisional.NO;
}
private void initFlags(int entryFlags) {
if ((entryFlags & PROVISIONAL_ALWAYS_MASK) != 0) {
provisional = Provisional.YES;
} else if ((entryFlags & PROVISIONAL_BEFORE_CKPT_END_MASK) != 0) {
provisional = Provisional.BEFORE_CKPT_END;
} else {
provisional = Provisional.NO;
}
replicated = ((entryFlags & REPLICATED_MASK) != 0);
invisible = ((entryFlags & INVISIBLE) != 0);
}
/**
* Called to set the version for a file header entry after reading the
* version from the item data. See FileHeaderEntry.readEntry. [#16939]
*/
public void setFileHeaderVersion(final int logVersion) {
entryVersion = logVersion;
}
public long getChecksum() {
return checksumVal;
}
public byte getType() {
return entryType;
}
public int getVersion() {
return entryVersion;
}
public long getPrevOffset() {
return prevOffset;
}
public int getItemSize() {
return itemSize;
}
public VLSN getVLSN() {
return vlsn;
}
public boolean getReplicated() {
return replicated;
}
public Provisional getProvisional() {
return provisional;
}
public boolean isInvisible() {
return invisible;
}
public int getVariablePortionSize() {
return VLSN.LOG_SIZE;
}
/**
* @return number of bytes used to store this header
*/
public int getSize() {
if (replicated) {
return MIN_HEADER_SIZE + VLSN.LOG_SIZE;
}
return MIN_HEADER_SIZE;
}
/**
* @return the number of bytes used to store the header, excepting
* the checksum field.
*/
int getSizeMinusChecksum() {
return getSize()- CHECKSUM_BYTES;
}
/**
* @return the number of bytes used to store the header, excepting
* the checksum field.
*/
int getInvariantSizeMinusChecksum() {
return MIN_HEADER_SIZE - CHECKSUM_BYTES;
}
/**
* Assumes this is called directly after the constructor, and that the
* entryBuffer is positioned right before the VLSN.
*/
public void readVariablePortion(ByteBuffer entryBuffer) {
if (replicated) {
vlsn = new VLSN();
vlsn.readFromLog(entryBuffer, entryVersion);
}
}
/**
* Serialize this object into the buffer and leave the buffer positioned in
* the right place to write the following item. The checksum, prevEntry,
* and vlsn values will filled in later on.
*
* public for unit tests.
*/
public void writeToLog(ByteBuffer entryBuffer) {
/* Skip over the checksumVal, proceed to the entry type. */
entryBuffer.position(ENTRYTYPE_OFFSET);
entryBuffer.put(entryType);
/* Flags */
byte flags = 0;
if (provisional == Provisional.YES) {
flags |= PROVISIONAL_ALWAYS_MASK;
} else if (provisional == Provisional.BEFORE_CKPT_END) {
flags |= PROVISIONAL_BEFORE_CKPT_END_MASK;
}
if (replicated) {
flags |= REPLICATED_MASK;
}
entryBuffer.put(flags);
/*
* Leave room for the prev offset, which must be added under
* the log write latch. Proceed to write the item size.
*/
entryBuffer.position(ITEMSIZE_OFFSET);
LogUtils.writeInt(entryBuffer, itemSize);
/*
* Leave room for a VLSN if needed, must also be generated
* under the log write latch.
*/
if (replicated) {
entryBuffer.position(entryBuffer.position() + VLSN.LOG_SIZE);
}
}
/**
* Add those parts of the header that must be calculated later to the
* entryBuffer, and also assign the fields in this class.
* That's
* - the prev offset, which must be done within the log write latch to
* be sure what that lsn is
* - the VLSN, for the same reason
* - the checksumVal, which must be added last, after all other
* fields are marshalled.
* (public for unit tests)
*/
public ByteBuffer addPostMarshallingInfo(EnvironmentImpl envImpl,
ByteBuffer entryBuffer,
long lastOffset,
ReplicationContext repContext) {
/* Add the prev pointer */
prevOffset = lastOffset;
entryBuffer.position(PREV_OFFSET);
LogUtils.writeUnsignedInt(entryBuffer, prevOffset);
/* Add the optional VLSN */
if (repContext.inReplicationStream()) {
entryBuffer.position(VLSN_OFFSET);
if (repContext.mustGenerateVLSN()) {
vlsn = envImpl.bumpVLSN();
} else {
vlsn = repContext.getClientVLSN();
}
vlsn.writeToLog(entryBuffer);
}
/*
* Now calculate the checksumVal and write it into the buffer. Be sure
* to set the field in this instance, for use later when printing or
* debugging the header.
*/
Checksum checksum = Adler32.makeChecksum();
checksum.update(entryBuffer.array(),
entryBuffer.arrayOffset() + CHECKSUM_BYTES,
entryBuffer.limit() - CHECKSUM_BYTES);
entryBuffer.position(0);
checksumVal = checksum.getValue();
LogUtils.writeUnsignedInt(entryBuffer, checksumVal);
/* Leave this buffer ready for copying into another buffer. */
entryBuffer.position(0);
return entryBuffer;
}
/**
* @param sb destination string buffer
* @param verbose if true, dump the full, verbose version
*/
public void dumpLog(StringBuilder sb, boolean verbose) {
sb.append("<hdr ");
dumpLogNoTag(sb, verbose);
sb.append("\"/>");
}
/**
* Dump the header without enclosing <header> tags. Used for
* DbPrintLog, to make the header attributes in the <entry> tag, for
* a more compact rendering.
* @param sb destination string buffer
* @param verbose if true, dump the full, verbose version
*/
void dumpLogNoTag(StringBuilder sb, boolean verbose) {
LogEntryType lastEntryType = LogEntryType.findType(entryType);
sb.append("type=\"").append(lastEntryType.toStringNoVersion()).
append("/").append(entryVersion);
if (provisional != Provisional.NO) {
sb.append("\" prov=\"");
sb.append(provisional);
}
/*
* No need to show isReplicated bit; the presence of a VLSN indicates
* that the entry is replicated.
*/
if (vlsn != null) {
sb.append("\" ");
vlsn.dumpLog(sb, verbose);
} else {
sb.append("\"");
}
if (isInvisible()) {
sb.append(" isInvisible=\"1\"");
}
sb.append(" prev=\"0x").append(Long.toHexString(prevOffset));
if (verbose) {
sb.append("\" size=\"").append(itemSize);
sb.append("\" cksum=\"").append(checksumVal);
}
}
/**
* For use in special case where commits are transformed to aborts because
* of i/o errors during a logBuffer flush. See [11271].
* Assumes that the entryBuffer is positioned at the start of the item.
* Return with the entryBuffer positioned to the end of the log entry.
*/
void convertCommitToAbort(ByteBuffer entryBuffer) {
assert (entryType == LogEntryType.LOG_TXN_COMMIT.getTypeNum());
/* Remember the start of the entry item. */
int itemStart = entryBuffer.position();
/* Back up to where the type is stored and change the type. */
int entryTypePosition =
itemStart - (getSize() - ENTRYTYPE_OFFSET);
entryBuffer.position(entryTypePosition);
entryBuffer.put(LogEntryType.LOG_TXN_ABORT.getTypeNum());
/*
* Recalculate the checksum. This byte buffer could be large,
* so don't just turn the whole buffer into an array to pass
* into the checksum object.
*/
Checksum checksum = Adler32.makeChecksum();
int checksumSize = itemSize + (getSize() - CHECKSUM_BYTES);
checksum.update(entryBuffer.array(),
entryTypePosition + entryBuffer.arrayOffset(),
checksumSize);
entryBuffer.position(itemStart - getSize());
checksumVal = checksum.getValue();
LogUtils.writeUnsignedInt(entryBuffer, checksumVal);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
dumpLog(sb, true /* verbose */);
return sb.toString();
}
/*
* Dump only the parts of the header that apply for replicated entries.
*/
public void dumpRep(StringBuilder sb) {
LogEntryType lastEntryType = LogEntryType.findType(entryType);
sb.append(lastEntryType.toStringNoVersion()).
append("/").append(entryVersion);
if (vlsn != null) {
sb.append(" vlsn=" ).append(vlsn);
} else {
sb.append("\"");
}
if (isInvisible()) {
sb.append( "invisible=1");
}
}
/**
* @return true if these two log headers are logically the same.
* Used for replication.
*/
public boolean logicalEquals(LogEntryHeader other) {
/*
* Note that item size is not part of the logical equality, because
* on-disk compression can make itemSize vary if the entry has VLSNs
* that were packed differently.
*/
return ((getType() == other.getType()) &&
(getVersion() == other.getVersion()) &&
(getVLSN().equals(other.getVLSN())) &&
(getReplicated() == other.getReplicated()) &&
(isInvisible() == other.isInvisible()));
}
/**
* May be called after reading MIN_HEADER_SIZE bytes to determine
* whether more bytes (getVariablePortionSize) should be read.
*/
public boolean isVariableLength() {
/* Currently only replicated entries are variable length. */
return replicated;
}
/**
* Set the invisible bit in the given log entry flags.
*/
static byte makeInvisible(byte flags) {
return flags |= INVISIBLE;
}
/**
* Turn off the invisible bit in the byte buffer which backs this log entry
* header.
* @param logHeaderStartPosition the byte position of the start of the log
* entry header.
*/
public static void turnOffInvisible(ByteBuffer buffer,
int logHeaderStartPosition) {
int flagsPosition = logHeaderStartPosition + FLAGS_OFFSET;
byte originalFlags = buffer.get(flagsPosition);
byte flipped = originalFlags &= IGNORE_INVISIBLE;
buffer.put(flagsPosition, flipped);
}
} |
Java | public class ScalingFilters {
/**
* The key prefix to use for tag-based filters in the EC2 API. See the
* <a href=
* "http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeInstances.html"
* >EC2 API reference</a>.
*/
private static final String TAG_FILTER_KEY_PREFIX = "tag:";
/**
* The filter key used to filter instances with a given value for the cloud
* pool tag.
*/
public static final String CLOUD_POOL_TAG_FILTER = TAG_FILTER_KEY_PREFIX + ScalingTags.CLOUD_POOL_TAG;
/**
* The filter key to use for instance state-based filters in the EC2 API.
* See the <a href=
* "http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeInstances.html"
* >EC2 API reference</a>.
*/
public static final String INSTANCE_STATE_FILTER = "instance-state-name";
/** A filter key for filtering spot requests on state. */
public static final String SPOT_REQUEST_STATE_FILTER = "state";
/** A filter key for filtering spot requests by identifier. */
public static final String SPOT_REQUEST_ID_FILTER = "spot-instance-request-id";
} |
Java | public class ValueEqualComparator<T> implements Comparator {
ValueRetriever<T> retriever;
public ValueEqualComparator(ValueRetriever<T> retriever) {
this.retriever = retriever;
}
public boolean matches(Event event1, Event event2) {
return retriever.getValue(event1).equals(retriever.getValue(event2));
}
} |
Java | public class TFloat extends Type {
public TFloat(Calculator calculator) {
super(calculator);
}
@Override
public void fillConverter(Converter converter) {
converter.addConverter(Float.class, null, value -> 0f);
converter.addConverter(Float.class, Integer.class, value -> (float) (int) value);
converter.addConverter(Float.class, Long.class, value -> (float) Math.toIntExact((long) value));
converter.addConverter(Float.class, Float.class, value -> value);
converter.addConverter(Float.class, Double.class, value -> (float) (double) value);
converter.addConverter(Float.class, Character.class, value -> (float) (char) value);
converter.addConverter(Float.class, Boolean.class, value -> ((boolean) value) ? 1f : -1f);
converter.addConverter(Float.class, String.class, value -> Float.parseFloat((String) value));
}
@Override
public Class<?> forClass() {
return Float.class;
}
@Override
public Float mul(Object v1, Object v2) {
Float value1 = calculator.toType(Float.class, v1);
Float value2 = calculator.toType(Float.class, v2);
return value1 * value2;
}
@Override
public Float div(Object v1, Object v2) {
Float value1 = calculator.toType(Float.class, v1);
Float value2 = calculator.toType(Float.class, v2);
return value1 / value2;
}
@Override
public Float plus(Object v1, Object v2) {
Float value1 = calculator.toType(Float.class, v1);
Float value2 = calculator.toType(Float.class, v2);
return value1 + value2;
}
@Override
public Float minus(Object v1, Object v2) {
Float value1 = calculator.toType(Float.class, v1);
Float value2 = calculator.toType(Float.class, v2);
return value1 - value2;
}
@Override
public boolean equal(Object v1, Object v2) {
Float value1 = calculator.toType(Float.class, v1);
Float value2 = calculator.toType(Float.class, v2);
return value1.floatValue() == value2.floatValue();
}
@Override
public boolean notequal(Object v1, Object v2) {
Float value1 = calculator.toType(Float.class, v1);
Float value2 = calculator.toType(Float.class, v2);
return value1.floatValue() != value2.floatValue();
}
@Override
public boolean great(Object v1, Object v2) {
Float value1 = calculator.toType(Float.class, v1);
Float value2 = calculator.toType(Float.class, v2);
return value1 > value2;
}
@Override
public boolean greatEqual(Object v1, Object v2) {
Float value1 = calculator.toType(Float.class, v1);
Float value2 = calculator.toType(Float.class, v2);
return value1 >= value2;
}
@Override
public boolean less(Object v1, Object v2) {
Float value1 = calculator.toType(Float.class, v1);
Float value2 = calculator.toType(Float.class, v2);
return value1 < value2;
}
@Override
public boolean lessEqual(Object v1, Object v2) {
Float value1 = calculator.toType(Float.class, v1);
Float value2 = calculator.toType(Float.class, v2);
return value1 <= value2;
}
} |
Java | final public class Add extends AbstractOperation {
/**
* Create a new add operation. The source range must be supplied.
*
* @param sourceRange
*/
private Add(SourceRange sourceRange, Operation... ops) {
super(sourceRange, ops);
assert (ops.length == 2);
}
/**
* Factory class for creating a new Add operation. Because of optimization,
* this class may actually return an element if the result can be determined
* at compile time.
*
* @param sourceRange
* location of this operation in the source
* @param ops
* arguments for this operation
*
* @return optimized Operation for this Add expression
*/
public static Operation newOperation(SourceRange sourceRange,
Operation... ops) throws SyntaxException {
assert (ops.length == 2);
Operation result = null;
// Attempt to optimize this operation.
if (ops[0] instanceof Element && ops[1] instanceof Element) {
try {
Property a = (Property) ops[0];
Property b = (Property) ops[1];
result = execute(sourceRange, a, b);
} catch (ClassCastException cce) {
throw new EvaluationException(MessageUtils
.format(MSG_INVALID_ARGS_ADD), sourceRange);
} catch (EvaluationException ee) {
throw SyntaxException.create(sourceRange, ee);
}
} else {
result = new Add(sourceRange, ops);
}
return result;
}
/**
* Perform the addition of the two top values on the data stack in the given
* DMLContext. This method will handle long, double, and string values.
* Exceptions are thrown if the types of the arguments do not match or they
* are another type.
*/
@Override
public Element execute(Context context) {
try {
Element[] args = calculateArgs(context);
Property a = (Property) args[0];
Property b = (Property) args[1];
return execute(sourceRange, a, b);
} catch (ClassCastException cce) {
throw new EvaluationException(MessageUtils
.format(MSG_INVALID_ARGS_ADD), sourceRange);
}
}
/**
* Do the actual addition.
*/
private static Element execute(SourceRange sourceRange, Property a,
Property b) {
assert (a != null);
assert (b != null);
Element result = null;
if ((a instanceof LongProperty) && (b instanceof LongProperty)) {
long l1 = ((Long) a.getValue()).longValue();
long l2 = ((Long) b.getValue()).longValue();
result = LongProperty.getInstance(l1 + l2);
} else if ((a instanceof NumberProperty)
&& (b instanceof NumberProperty)) {
double d1 = ((NumberProperty) a).doubleValue();
double d2 = ((NumberProperty) b).doubleValue();
result = DoubleProperty.getInstance(d1 + d2);
} else if ((a instanceof StringProperty)
&& (b instanceof StringProperty)) {
String s1 = (String) a.getValue();
String s2 = (String) b.getValue();
result = StringProperty.getInstance(s1 + s2);
} else {
throw new EvaluationException(MessageUtils
.format(MSG_MISMATCHED_ARGS_ADD), sourceRange);
}
return result;
}
} |
Java | @Getter
@Setter
@ToString(
callSuper = true,
doNotUseGetters = true
)
@Entity
@EntityListeners(
{
// Can't decouple through interface or abstract class or configuration, only concrete classes work.
JobEntityListener.class
}
)
@Table(name = "jobs")
public class JobEntity extends BaseEntity implements
FinishedJobProjection,
JobProjection,
JobRequestProjection,
JobMetadataProjection,
JobExecutionProjection,
JobApplicationsProjection,
JobClusterProjection,
JobCommandProjection,
JobSearchProjection,
V4JobRequestProjection,
JobSpecificationProjection,
JobArchiveLocationProjection,
IsV4JobProjection,
JobApiProjection,
StatusProjection {
private static final long serialVersionUID = 2849367731657512224L;
// TODO: Drop this column once search implemented via better mechanism
@Basic
@Column(name = "tags", length = 1024, updatable = false)
@Size(max = 1024, message = "Max length in database is 1024 characters")
@Getter(AccessLevel.PACKAGE)
@Setter(AccessLevel.NONE)
private String tagSearchString;
// TODO: Drop this column once all jobs run via Agent
@Basic
@Column(name = "genie_user_group", updatable = false)
@Size(max = 255, message = "Max length in database is 255 characters")
private String genieUserGroup;
@Basic(optional = false)
@Column(name = "archiving_disabled", nullable = false, updatable = false)
private boolean archivingDisabled;
@Basic
@Column(name = "email", updatable = false)
@Email
@Size(max = 255, message = "Max length in database is 255 characters")
private String email;
@Basic
@Column(name = "requested_cpu", updatable = false)
@Min(value = 1, message = "Can't have less than 1 CPU")
private Integer requestedCpu;
@Basic
@Column(name = "requested_memory", updatable = false)
@Min(value = 1, message = "Can't have less than 1 MB of memory allocated")
private Integer requestedMemory;
@Basic
@Column(name = "requested_timeout", updatable = false)
@Min(value = 1)
private Integer requestedTimeout;
@Basic
@Column(name = "grouping", updatable = false)
@Size(max = 255, message = "Max length in database is 255 characters")
private String grouping;
@Basic
@Column(name = "grouping_instance", updatable = false)
@Size(max = 255, message = "Max length in database is 255 characters")
private String groupingInstance;
@Basic
@Column(name = "request_api_client_hostname", updatable = false)
@Size(max = 255, message = "Max length in database is 255 characters")
private String requestApiClientHostname;
@Basic
@Column(name = "request_api_client_user_agent", length = 1024, updatable = false)
@Size(max = 1024, message = "Max length in database is 1024 characters")
private String requestApiClientUserAgent;
@Basic
@Column(name = "request_agent_client_hostname", updatable = false)
@Size(max = 255, message = "Max length in database is 255 characters")
private String requestAgentClientHostname;
@Basic
@Column(name = "request_agent_client_version", updatable = false)
@Size(max = 255, message = "Max length in database is 255 characters")
private String requestAgentClientVersion;
@Basic
@Column(name = "request_agent_client_pid", updatable = false)
@Min(value = 0, message = "Agent Client Pid can't be less than zero")
private Integer requestAgentClientPid;
@Basic
@Column(name = "num_attachments", updatable = false)
@Min(value = 0, message = "Can't have less than zero attachments")
private Integer numAttachments;
@Basic
@Column(name = "total_size_of_attachments", updatable = false)
@Min(value = 0, message = "Can't have less than zero bytes total attachment size")
private Long totalSizeOfAttachments;
@Basic
@Column(name = "std_out_size")
@Min(value = 0, message = "Can't have less than zero bytes for std out size")
private Long stdOutSize;
@Basic
@Column(name = "std_err_size")
@Min(value = 0, message = "Can't have less than zero bytes for std err size")
private Long stdErrSize;
@Basic
@Column(name = "cluster_name")
@Size(max = 255, message = "Max length in database is 255 characters")
private String clusterName;
@Basic
@Column(name = "command_name")
@Size(max = 255, message = "Max length in database is 255 characters")
private String commandName;
@Basic
@Column(name = "status_msg")
@Size(max = 255, message = "Max length in database is 255 characters")
private String statusMsg;
@Basic
@Column(name = "started")
private Instant started;
@Basic
@Column(name = "finished")
private Instant finished;
@Basic
@Column(name = "agent_hostname")
@Size(max = 255, message = "An agent hostname can be no longer than 255 characters")
private String agentHostname;
@Basic
@Column(name = "agent_version")
@Size(max = 255, message = "An agent version can be no longer than 255 characters")
private String agentVersion;
@Basic
@Column(name = "agent_pid")
@Min(0)
private Integer agentPid;
@Basic
@Column(name = "process_id")
private Integer processId;
@Basic
@Column(name = "check_delay")
@Min(1)
private Long checkDelay;
@Basic
@Column(name = "exit_code")
private Integer exitCode;
@Basic
@Column(name = "memory_used")
private Integer memoryUsed;
@Basic
@Column(name = "archive_location", length = 1024)
@Size(max = 1024, message = "Max length in database is 1024 characters")
private String archiveLocation;
@Basic
@Column(name = "requested_archive_location_prefix", length = 1024)
@Size(max = 1024, message = "Max length in database is 1024 characters")
private String requestedArchiveLocationPrefix;
@Basic(optional = false)
@Column(name = "interactive", nullable = false, updatable = false)
private boolean interactive;
@Basic(optional = false)
@Column(name = "resolved", nullable = false)
private boolean resolved;
@Basic(optional = false)
@Column(name = "claimed", nullable = false)
private boolean claimed;
@Basic(optional = false)
@Column(name = "v4", nullable = false)
private boolean v4;
@Basic
@Column(name = "requested_job_directory_location", length = 1024, updatable = false)
private String requestedJobDirectoryLocation;
@Basic
@Column(name = "job_directory_location", length = 1024)
private String jobDirectoryLocation;
@Lob
@Basic(fetch = FetchType.LAZY)
@Column(name = "requested_agent_config_ext", updatable = false, columnDefinition = "TEXT DEFAULT NULL")
@ToString.Exclude
private String requestedAgentConfigExt;
@Lob
@Basic(fetch = FetchType.LAZY)
@Column(name = "requested_agent_environment_ext", updatable = false, columnDefinition = "TEXT DEFAULT NULL")
@ToString.Exclude
private String requestedAgentEnvironmentExt;
@Basic
@Column(name = "timeout_used")
private Integer timeoutUsed;
@Basic(optional = false)
@Column(name = "api", nullable = false)
private boolean api = true;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "cluster_id")
@ToString.Exclude
private ClusterEntity cluster;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "command_id")
@ToString.Exclude
private CommandEntity command;
@ElementCollection
@CollectionTable(
name = "job_command_arguments",
joinColumns = {
@JoinColumn(name = "job_id", nullable = false, updatable = false)
}
)
@Column(name = "argument", length = 10_000, nullable = false, updatable = false)
@OrderColumn(name = "argument_order", nullable = false, updatable = false)
@ToString.Exclude
private List<@NotBlank @Size(max = 10_000) String> commandArgs = new ArrayList<>();
@ElementCollection(fetch = FetchType.LAZY)
@CollectionTable(
name = "job_requested_environment_variables",
joinColumns = {
@JoinColumn(name = "job_id", nullable = false, updatable = false)
}
)
@MapKeyColumn(name = "name", updatable = false)
@Column(name = "value", length = 1024, nullable = false, updatable = false)
@ToString.Exclude
private Map<@NotBlank @Size(max = 255) String, @NotNull @Size(max = 1024) String>
requestedEnvironmentVariables = Maps.newHashMap();
@ElementCollection(fetch = FetchType.LAZY)
@CollectionTable(
name = "job_environment_variables",
joinColumns = {
@JoinColumn(name = "job_id", nullable = false)
}
)
@MapKeyColumn(name = "name")
@Column(name = "value", length = 1024, nullable = false)
@ToString.Exclude
private Map<@NotBlank @Size(max = 255) String, @NotNull @Size(max = 1024) String>
environmentVariables = Maps.newHashMap();
@ManyToMany(fetch = FetchType.LAZY)
@JoinTable(
name = "jobs_applications",
joinColumns = {
@JoinColumn(name = "job_id", referencedColumnName = "id", nullable = false)
},
inverseJoinColumns = {
@JoinColumn(name = "application_id", referencedColumnName = "id", nullable = false)
}
)
@OrderColumn(name = "application_order", nullable = false, updatable = false)
@ToString.Exclude
private List<ApplicationEntity> applications = new ArrayList<>();
@ManyToMany(fetch = FetchType.LAZY, cascade = CascadeType.ALL)
@JoinTable(
name = "jobs_cluster_criteria",
joinColumns = {
@JoinColumn(name = "job_id", referencedColumnName = "id", nullable = false, updatable = false)
},
inverseJoinColumns = {
@JoinColumn(name = "criterion_id", referencedColumnName = "id", nullable = false, updatable = false)
}
)
@OrderColumn(name = "priority_order", nullable = false, updatable = false)
@ToString.Exclude
private List<CriterionEntity> clusterCriteria = new ArrayList<>();
@ManyToOne(fetch = FetchType.LAZY, cascade = CascadeType.ALL)
@JoinColumn(name = "command_criterion", nullable = false, updatable = false)
@ToString.Exclude
private CriterionEntity commandCriterion;
@ElementCollection
@CollectionTable(
name = "job_requested_applications",
joinColumns = {
@JoinColumn(name = "job_id", nullable = false, updatable = false)
}
)
@Column(name = "application_id", nullable = false, updatable = false)
@OrderColumn(name = "application_order", nullable = false, updatable = false)
@ToString.Exclude
private List<String> requestedApplications = new ArrayList<>();
@ManyToMany(fetch = FetchType.LAZY)
@JoinTable(
name = "jobs_configs",
joinColumns = {
@JoinColumn(name = "job_id", referencedColumnName = "id", nullable = false, updatable = false)
},
inverseJoinColumns = {
@JoinColumn(name = "file_id", referencedColumnName = "id", nullable = false, updatable = false)
}
)
@ToString.Exclude
private Set<FileEntity> configs = new HashSet<>();
@ManyToMany(fetch = FetchType.LAZY)
@JoinTable(
name = "jobs_dependencies",
joinColumns = {
@JoinColumn(name = "job_id", referencedColumnName = "id", nullable = false, updatable = false)
},
inverseJoinColumns = {
@JoinColumn(name = "file_id", referencedColumnName = "id", nullable = false, updatable = false)
}
)
@ToString.Exclude
private Set<FileEntity> dependencies = new HashSet<>();
@ManyToMany(fetch = FetchType.LAZY)
@JoinTable(
name = "jobs_tags",
joinColumns = {
@JoinColumn(name = "job_id", referencedColumnName = "id", nullable = false, updatable = false)
},
inverseJoinColumns = {
@JoinColumn(name = "tag_id", referencedColumnName = "id", nullable = false, updatable = false)
}
)
@ToString.Exclude
private Set<TagEntity> tags = new HashSet<>();
@Transient
@ToString.Exclude
private String notifiedJobStatus;
/**
* Default Constructor.
*/
public JobEntity() {
super();
}
/**
* Before a job is created create the job search string.
*/
@PrePersist
void onCreateJob() {
if (!this.tags.isEmpty()) {
// Tag search string length max is currently 1024 which will be caught by hibernate validator if this
// exceeds that length
this.tagSearchString = JpaSpecificationUtils.createTagSearchString(this.tags);
}
}
/**
* {@inheritDoc}
*/
@Override
public Optional<String> getGenieUserGroup() {
return Optional.ofNullable(this.genieUserGroup);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<String> getEmail() {
return Optional.ofNullable(this.email);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<Integer> getRequestedCpu() {
return Optional.ofNullable(this.requestedCpu);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<Integer> getRequestedTimeout() {
return Optional.ofNullable(this.requestedTimeout);
}
/**
* Set the command criterion.
*
* @param commandCriterion The criterion. Null clears reference.
*/
public void setCommandCriterion(@Nullable final CriterionEntity commandCriterion) {
this.commandCriterion = commandCriterion;
}
/**
* {@inheritDoc}
*/
@Override
public Optional<String> getGrouping() {
return Optional.ofNullable(this.grouping);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<String> getGroupingInstance() {
return Optional.ofNullable(this.groupingInstance);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<String> getStatusMsg() {
return Optional.ofNullable(this.statusMsg);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<Instant> getStarted() {
return Optional.ofNullable(this.started);
}
/**
* Set the start time for the job.
*
* @param started The started time.
*/
public void setStarted(@Nullable final Instant started) {
this.started = started;
}
/**
* {@inheritDoc}
*/
@Override
public Optional<Instant> getFinished() {
return Optional.ofNullable(this.finished);
}
/**
* Set the finishTime for the job.
*
* @param finished The finished time.
*/
public void setFinished(@Nullable final Instant finished) {
this.finished = finished;
}
/**
* {@inheritDoc}
*/
@Override
public Optional<Integer> getRequestedMemory() {
return Optional.ofNullable(this.requestedMemory);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<String> getRequestApiClientHostname() {
return Optional.ofNullable(this.requestApiClientHostname);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<String> getRequestApiClientUserAgent() {
return Optional.ofNullable(this.requestApiClientUserAgent);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<Integer> getNumAttachments() {
return Optional.ofNullable(this.numAttachments);
}
/**
* Get the hostname of the agent that requested this job be run if there was one.
*
* @return The hostname wrapped in an {@link Optional}
*/
public Optional<String> getRequestAgentClientHostname() {
return Optional.ofNullable(this.requestAgentClientHostname);
}
/**
* Get the version of the agent that requested this job be run if there was one.
*
* @return The version wrapped in an {@link Optional}
*/
public Optional<String> getRequestAgentClientVersion() {
return Optional.ofNullable(this.requestAgentClientVersion);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<Integer> getExitCode() {
return Optional.ofNullable(this.exitCode);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<String> getArchiveLocation() {
return Optional.ofNullable(this.archiveLocation);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<Integer> getMemoryUsed() {
return Optional.ofNullable(this.memoryUsed);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<ClusterEntity> getCluster() {
return Optional.ofNullable(this.cluster);
}
/**
* Set the cluster this job ran on.
*
* @param cluster The cluster this job ran on
*/
public void setCluster(@Nullable final ClusterEntity cluster) {
if (this.cluster != null) {
this.clusterName = null;
}
this.cluster = cluster;
if (this.cluster != null) {
this.clusterName = cluster.getName();
}
}
/**
* {@inheritDoc}
*/
@Override
public Optional<CommandEntity> getCommand() {
return Optional.ofNullable(this.command);
}
/**
* Set the command used to run this job.
*
* @param command The command
*/
public void setCommand(@Nullable final CommandEntity command) {
if (this.command != null) {
this.commandName = null;
}
this.command = command;
if (this.command != null) {
this.commandName = command.getName();
}
}
/**
* {@inheritDoc}
*/
@Override
public Optional<Long> getTotalSizeOfAttachments() {
return Optional.ofNullable(this.totalSizeOfAttachments);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<Long> getStdOutSize() {
return Optional.ofNullable(this.stdOutSize);
}
/**
* Set the total size in bytes of the std out file for this job.
*
* @param stdOutSize The size. Null empties database field
*/
public void setStdOutSize(@Nullable final Long stdOutSize) {
this.stdOutSize = stdOutSize;
}
/**
* {@inheritDoc}
*/
@Override
public Optional<Long> getStdErrSize() {
return Optional.ofNullable(this.stdErrSize);
}
/**
* Set the total size in bytes of the std err file for this job.
*
* @param stdErrSize The size. Null empties database field
*/
public void setStdErrSize(@Nullable final Long stdErrSize) {
this.stdErrSize = stdErrSize;
}
/**
* Get the PID of the agent that requested this job be run if there was one.
*
* @return The PID wrapped in an {@link Optional}
*/
public Optional<Integer> getRequestAgentClientPid() {
return Optional.ofNullable(this.requestAgentClientPid);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<String> getClusterName() {
return Optional.ofNullable(this.clusterName);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<String> getCommandName() {
return Optional.ofNullable(this.commandName);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<String> getAgentHostname() {
return Optional.ofNullable(this.agentHostname);
}
/**
* Get the version of the agent that claimed this job.
*
* @return The version wrapped in an {@link Optional} in case it wasn't set yet in which case it will be
* {@link Optional#empty()}
*/
public Optional<String> getAgentVersion() {
return Optional.ofNullable(this.agentVersion);
}
/**
* Get the pid of the agent that claimed this job.
*
* @return The pid wrapped in an {@link Optional} in case it wasn't set yet in which case it will be
* {@link Optional#empty()}
*/
public Optional<Integer> getAgentPid() {
return Optional.ofNullable(this.agentPid);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<Integer> getProcessId() {
return Optional.ofNullable(this.processId);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<Long> getCheckDelay() {
return Optional.ofNullable(this.checkDelay);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<Integer> getTimeoutUsed() {
return Optional.ofNullable(this.timeoutUsed);
}
/**
* Set the final resolved timeout duration for this job.
*
* @param timeoutUsed The timeout value (in seconds) after which this job should be killed by the system
*/
public void setTimeoutUsed(@Nullable final Integer timeoutUsed) {
this.timeoutUsed = timeoutUsed;
}
/**
* Set the command arguments to use with this job.
*
* @param commandArgs The command arguments to use
*/
public void setCommandArgs(@Nullable final List<String> commandArgs) {
this.commandArgs.clear();
if (commandArgs != null) {
this.commandArgs.addAll(commandArgs);
}
}
/**
* Set all the files associated as configuration files for this job.
*
* @param configs The configuration files to set
*/
public void setConfigs(@Nullable final Set<FileEntity> configs) {
this.configs.clear();
if (configs != null) {
this.configs.addAll(configs);
}
}
/**
* Set all the files associated as dependency files for this job.
*
* @param dependencies The dependency files to set
*/
public void setDependencies(@Nullable final Set<FileEntity> dependencies) {
this.dependencies.clear();
if (dependencies != null) {
this.dependencies.addAll(dependencies);
}
}
/**
* Set all the tags associated to this job.
*
* @param tags The tags to set
*/
public void setTags(@Nullable final Set<TagEntity> tags) {
this.tags.clear();
if (tags != null) {
this.tags.addAll(tags);
}
}
/**
* Set the requested environment variables.
*
* @param requestedEnvironmentVariables The environment variables the user requested be added to the job runtime
*/
public void setRequestedEnvironmentVariables(@Nullable final Map<String, String> requestedEnvironmentVariables) {
this.requestedEnvironmentVariables.clear();
if (requestedEnvironmentVariables != null) {
this.requestedEnvironmentVariables.putAll(requestedEnvironmentVariables);
}
}
/**
* Set the environment variables for the job.
*
* @param environmentVariables The final set of environment variables that were set in the job runtime
*/
public void setEnvironmentVariables(@Nullable final Map<String, String> environmentVariables) {
this.environmentVariables.clear();
if (environmentVariables != null) {
this.environmentVariables.putAll(environmentVariables);
}
}
/**
* {@inheritDoc}
*/
@Override
public Optional<String> getRequestedJobDirectoryLocation() {
return Optional.ofNullable(this.requestedJobDirectoryLocation);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<String> getRequestedAgentEnvironmentExt() {
return Optional.ofNullable(this.requestedAgentEnvironmentExt);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<String> getRequestedAgentConfigExt() {
return Optional.ofNullable(this.requestedAgentConfigExt);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<String> getRequestedArchiveLocationPrefix() {
return Optional.ofNullable(this.requestedArchiveLocationPrefix);
}
/**
* {@inheritDoc}
*/
@Override
public Optional<String> getJobDirectoryLocation() {
return Optional.ofNullable(this.jobDirectoryLocation);
}
/**
* Set the applications used to run this job.
*
* @param applications The applications
*/
public void setApplications(@Nullable final List<ApplicationEntity> applications) {
this.applications.clear();
if (applications != null) {
this.applications.addAll(applications);
}
}
/**
* Set the cluster criteria set for this job.
*
* @param clusterCriteria The cluster criteria in priority order
*/
public void setClusterCriteria(@Nullable final List<CriterionEntity> clusterCriteria) {
this.clusterCriteria.clear();
if (clusterCriteria != null) {
this.clusterCriteria.addAll(clusterCriteria);
}
}
/**
* Get the previously notified job status if there was one.
*
* @return The previously notified job status wrapped in an {@link Optional} or {@link Optional#empty()}
*/
public Optional<String> getNotifiedJobStatus() {
return Optional.ofNullable(this.notifiedJobStatus);
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(final Object o) {
return super.equals(o);
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
return super.hashCode();
}
} |
Java | public abstract class DataPoint implements Serializable {
/**
* Data point time stamp
*/
protected long _timeInMillis;
/**
* set timestamp
*
* @param time timestamp in msec as long
*/
public void setTimeInMillis(long time) {
_timeInMillis = time;
}
/**
* get timestamp
*
* @return long
*/
@XmlTransient
@SerializationIndex(1)
public long getTimeInMillis() {
return _timeInMillis;
}
} |
Java | public class ItemOfBiblioteca {
private String code;
private String title;
private String authorDirector;
private int year;
private String rating; //1-10 or unrated
private Boolean isAvailable;
public ItemOfBiblioteca(String code, String title, String authorDirector, int year, String rating, Boolean isAvailable) {
this.code = code;
this.title = title;
this.authorDirector = authorDirector;
this.year = year;
this.rating = rating;
this.isAvailable = isAvailable;
}
public ItemOfBiblioteca(String code, String title, String authorDirector, int year, Boolean isAvailable) {
//constructor for books
this(code, title, authorDirector, year, null, isAvailable);
}
public String getCode() {
return code;
}
public Boolean getIsAvailable() {
return isAvailable;
}
public void setIsAvailable(Boolean isAvailable) {
this.isAvailable = isAvailable;
}
public String toString() {
StringBuilder result = new StringBuilder();
//determine fields declared in this class only (no fields of superclass)
Field[] fields = this.getClass().getDeclaredFields();
for (Field field : fields) {
try {
if (!field.getName().equals("isAvailable")&&isAvailable && (field.get(this) != null)) {
result.append(field.get(this));
result.append(" ");
}
} catch (IllegalAccessException ex) {
System.out.println(ex);
}
}
return result.toString();
}
public String toStringForTxtFile() {
StringBuilder result = new StringBuilder();
//determine fields declared in this class only (no fields of superclass)
Field[] fields = this.getClass().getDeclaredFields();
String newLine = System.getProperty("line.separator");
for (Field field : fields) {
try {
if (field.get(this) != null) {
result.append(field.get(this));
result.append(newLine);
}
} catch (IllegalAccessException ex) {
System.out.println(ex);
}
}
return result.toString();
}
} |
Java | public class ScrollBarCompatibility extends AScrollBar implements IDraggable {
private final DragAndDropController dragAndDropController;
private float prevDraggingMouseX;
private float prevDraggingMouseY;
public ScrollBarCompatibility(boolean isHorizontal, DragAndDropController dragAndDropController) {
super(isHorizontal);
this.dragAndDropController = dragAndDropController;
}
@Override
public void pick(Pick pick) {
if (pick.isAnyDragging() && !pick.isDoDragging())
return;
if (callback == null)
return;
switch (pick.getPickingMode()) {
case MOUSE_OVER:
hovered = true;
break;
case CLICKED:
dragAndDropController.clearDraggables();
dragAndDropController.setDraggingProperties(pick.getPickedPoint(), "ScrollbarDrag");
dragAndDropController.addDraggable(this);
break;
case MOUSE_OUT:
hovered = false;
break;
default:
break;
}
}
@Override
public void setDraggingStartPoint(float mouseCoordinateX, float mouseCoordinateY) {
prevDraggingMouseX = mouseCoordinateX;
prevDraggingMouseY = mouseCoordinateY;
}
@Override
public void handleDragging(GL2 gl, float mouseCoordinateX, float mouseCoordinateY) {
float mouseDelta;
if (dim.isHorizontal()) {
if (prevDraggingMouseX >= mouseCoordinateX - 0.01 && prevDraggingMouseX <= mouseCoordinateX + 0.01)
return;
mouseDelta = prevDraggingMouseX - mouseCoordinateX;
} else {
if (prevDraggingMouseY >= mouseCoordinateY - 0.01 && prevDraggingMouseY <= mouseCoordinateY + 0.01)
return;
mouseDelta = prevDraggingMouseY - mouseCoordinateY;
}
drag(mouseDelta);
prevDraggingMouseX = mouseCoordinateX;
prevDraggingMouseY = mouseCoordinateY;
}
@Override
public void handleDrop(GL2 gl, float mouseCoordinateX, float mouseCoordinateY) {
}
} |
Java | public class JaegerHandler implements MiddlewareHandler {
static final Logger logger = LoggerFactory.getLogger(JaegerHandler.class);
static JaegerConfig jaegerConfig = (JaegerConfig) Config.getInstance().getJsonObjectConfig(JaegerConfig.CONFIG_NAME, JaegerConfig.class);
private volatile HttpHandler next;
public JaegerHandler() {
}
/**
* Extract the context, start and stop the span here.
*
* @param exchange HttpServerExchange
* @throws Exception Exception
*/
@Override
public void handleRequest(final HttpServerExchange exchange) throws Exception {
// get the path and method to construct the endpoint for the operation of tracing.
Map<String, Object> auditInfo = exchange.getAttachment(AttachmentConstants.AUDIT_INFO);
String endpoint = null;
if(auditInfo != null) {
endpoint = (String)auditInfo.get(Constants.ENDPOINT_STRING);
} else {
endpoint = exchange.getRequestPath() + "@" + exchange.getRequestMethod();
}
HeaderMap headerMap = exchange.getRequestHeaders();
final HashMap<String, String> headers = new HashMap<>();
for(HttpString key : headerMap.getHeaderNames()) {
headers.put(key.toString(), headerMap.getFirst(key));
}
TextMap carrier = new TextMapAdapter(headers);
// start the server span.
Tracer.SpanBuilder spanBuilder;
try {
SpanContext parentSpanCtx = tracer.extract(Format.Builtin.HTTP_HEADERS, carrier);
if (parentSpanCtx == null) {
spanBuilder = tracer.buildSpan(endpoint);
} else {
spanBuilder = tracer.buildSpan(endpoint).asChildOf(parentSpanCtx);
}
} catch (IllegalArgumentException e) {
spanBuilder = tracer.buildSpan(endpoint);
}
Span rootSpan = spanBuilder
.withTag(Tags.SPAN_KIND.getKey(), Tags.SPAN_KIND_SERVER)
.withTag(Tags.PEER_HOSTNAME.getKey(), NetUtils.getLocalAddressByDatagram())
.withTag(Tags.PEER_PORT.getKey(), Server.getServerConfig().getHttpsPort())
.start();
tracer.activateSpan(rootSpan);
// This can be retrieved in the business handler to add tags and logs for tracing.
exchange.putAttachment(ROOT_SPAN, rootSpan);
// The client module can use this to inject tracer.
exchange.putAttachment(EXCHANGE_TRACER, tracer);
// add an exchange complete listener to close the Root Span for the request.
exchange.addExchangeCompleteListener((exchange1, nextListener) -> {
Span span = exchange1.getAttachment(ROOT_SPAN);
if(span != null) {
span.finish();
}
nextListener.proceed();
});
Handler.next(exchange, next);
}
@Override
public HttpHandler getNext() {
return next;
}
@Override
public MiddlewareHandler setNext(final HttpHandler next) {
Handlers.handlerNotNull(next);
this.next = next;
return this;
}
@Override
public boolean isEnabled() {
return jaegerConfig.isEnabled();
}
@Override
public void register() {
ModuleRegistry.registerModule(JaegerHandler.class.getName(), Config.getInstance().getJsonMapConfigNoCache(JaegerConfig.CONFIG_NAME), null);
}
} |
Java | public class TabController<T> {
/**
* Creates a TabController for a list of elements.
*/
public static <T> TabController<T> create(BaseResources res, TabClickedListener<T> listener,
Element tabContainer, JsonArray<TabElement<T>> headers) {
TabController<T> controller = new TabController<T>(res, listener, tabContainer);
// Create the tab headers
for (int i = 0; i < headers.size(); i++) {
TabElement<T> element = headers.get(i);
if (controller.activeTab == null) {
controller.setActiveTab(element);
}
tabContainer.appendChild(element);
}
return controller;
}
public interface TabClickedListener<T> {
public void onTabClicked(TabElement<T> element);
}
/**
* An javascript overlay which encapsulates the tab identifier associated with
* each tab header.
*/
public static class TabElement<T> extends JsElement {
/**
* Creates a tab element from an element and data.
*/
public static <T> TabElement<T> create(BaseResources res, String label, T data) {
@SuppressWarnings("unchecked")
TabElement<T> element = (TabElement<T>) Elements.createDivElement(res.baseCss().tab());
element.setTextContent(label);
element.setTabData(data);
return element;
}
protected TabElement() {
// javascript overlay
}
public final native void setTabData(T data) /*-{
this.__tabData = data;
}-*/;
public final native T getTabData() /*-{
return this.__tabData;
}-*/;
}
private final BaseResources res;
private final TabClickedListener<T> listener;
private final Element container;
private TabElement<T> activeTab;
private TabController(BaseResources res, TabClickedListener<T> listener, Element container) {
this.container = container;
this.res = res;
this.listener = listener;
attachHandlers();
}
private void attachHandlers() {
container.addEventListener(Event.CLICK, new EventListener() {
@Override
public void handleEvent(Event evt) {
MouseEvent event = (MouseEvent) evt;
// we could really just use the event target but this is for future
// expandability I guess.
Element element = CssUtils.getAncestorOrSelfWithClassName(
(Element) event.getTarget(), res.baseCss().tab());
if (element != null) {
@SuppressWarnings("unchecked")
TabElement<T> tabElement = (TabElement<T>) element;
selectTab(tabElement);
}
}
}, false);
}
/**
* Selects the supplied tab dispatching the listeners clicked event.
*/
public void selectTab(TabElement<T> element) {
if (activeTab == element) {
return;
}
setActiveTab(element);
listener.onTabClicked(element);
}
public T getActiveTab() {
return activeTab.getTabData();
}
/**
* Sets the active tab based on the provided tab data without dispatching the
* listeners clicked event.
*/
public boolean setActiveTab(T tab) {
if (getActiveTab() == tab) {
return true;
}
HTMLCollection nodes = container.getChildren();
for (int i = 0; i < nodes.getLength(); i++) {
@SuppressWarnings("unchecked")
TabElement<T> element = (TabElement<T>) nodes.item(i);
if (element.getTabData().equals(tab)) {
setActiveTab(element);
return true;
}
}
return false;
}
/**
* Sets the active tab without triggering the {@link TabClickedListener}
* callback.
*/
private void setActiveTab(TabElement<T> element) {
if (activeTab != null) {
activeTab.removeClassName(res.baseCss().activeTab());
}
element.addClassName(res.baseCss().activeTab());
activeTab = element;
}
} |
Java | public static class TabElement<T> extends JsElement {
/**
* Creates a tab element from an element and data.
*/
public static <T> TabElement<T> create(BaseResources res, String label, T data) {
@SuppressWarnings("unchecked")
TabElement<T> element = (TabElement<T>) Elements.createDivElement(res.baseCss().tab());
element.setTextContent(label);
element.setTabData(data);
return element;
}
protected TabElement() {
// javascript overlay
}
public final native void setTabData(T data) /*-{
this.__tabData = data;
}-*/;
public final native T getTabData() /*-{
return this.__tabData;
}-*/;
} |
Java | public final class ConsentState {
private final static String SERIALIZED_GDPR_KEY = "GDPR";
private final static String SERIALIZED_CCPA_KEY = "CCPA";
private Map<String, GDPRConsent> gdprConsentState = null;
private CCPAConsent ccpaConsentState;
private ConsentState() {
}
private ConsentState(Builder builder) {
gdprConsentState = builder.gdprConsentState;
ccpaConsentState = builder.ccpaConsent;
}
@NonNull
public static Builder builder() {
return new Builder();
}
@NonNull
public static Builder withConsentState(@NonNull ConsentState consentState) {
return new Builder(consentState);
}
@NonNull
public static Builder withConsentState(@NonNull String consentState) {
return new Builder(consentState);
}
/**
* When comparing consent values for duplication with string fields:
* 1) case doesn't matter. "foo" and "Foo" are the same;
* 2) null, empty, and whitespace are all the same - nothing;
* 3) leading or training whitespace is ignored. "foo ", " foo", and "foo" are the same;
*/
private static String canonicalizeForDeduplication(String source) {
if (MPUtility.isEmpty(source)) {
return null;
}
return source.toLowerCase(Locale.US).trim();
}
/**
* Retrieve the current GDPR consent state for this user.
* <p>
* Note that all purpose keys will be lower-case and trimmed.
*
* @return returns an unmodifiable Map. Attempted mutation will
* result in an <code>UnsupportedOperationException</code>.
*/
@NonNull
public Map<String, GDPRConsent> getGDPRConsentState() {
return Collections.unmodifiableMap(gdprConsentState);
}
@Nullable
public CCPAConsent getCCPAConsentState() {
return ccpaConsentState;
}
@Override
@NonNull
public String toString() {
JSONObject consentJsonObject = new JSONObject();
try {
JSONObject gdprConsentStateJsonObject = new JSONObject();
consentJsonObject.put(SERIALIZED_GDPR_KEY, gdprConsentStateJsonObject);
for (Map.Entry<String, GDPRConsent> entry : gdprConsentState.entrySet()) {
gdprConsentStateJsonObject.put(entry.getKey(), entry.getValue().toString());
}
if (ccpaConsentState != null) {
consentJsonObject.put(SERIALIZED_CCPA_KEY, ccpaConsentState.toString());
}
} catch (JSONException ignored) {
}
return consentJsonObject.toString();
}
public static class Builder {
private Map<String, GDPRConsent> gdprConsentState = new HashMap<String, GDPRConsent>();
private CCPAConsent ccpaConsent = null;
public Builder() {
}
private Builder(ConsentState consentState) {
setGDPRConsentState(consentState.getGDPRConsentState());
setCCPAConsentState(consentState.getCCPAConsentState());
}
private Builder(String serializedConsent) {
if (MPUtility.isEmpty(serializedConsent)) {
return;
}
try {
JSONObject jsonConsent = new JSONObject(serializedConsent);
if (jsonConsent.has(SERIALIZED_GDPR_KEY)) {
JSONObject gdprConsentState = jsonConsent.getJSONObject(SERIALIZED_GDPR_KEY);
for (Iterator<String> it = gdprConsentState.keys(); it.hasNext(); ) {
String key = it.next();
this.addGDPRConsentState(key, GDPRConsent.withGDPRConsent(gdprConsentState.getString(key)).build());
}
}
if (jsonConsent.has(SERIALIZED_CCPA_KEY)) {
String ccpaConsentString = jsonConsent.getString(SERIALIZED_CCPA_KEY);
setCCPAConsentState(CCPAConsent.withCCPAConsent(ccpaConsentString).build());
}
} catch (JSONException ignored) {
}
}
/**
* Set/replace the entire GDPR consent state of this builder.
* <p>
* Note that all purpose keys will be lower-cased and trimmed.
*
* @param consentState
*/
@NonNull
public Builder setGDPRConsentState(@Nullable Map<String, GDPRConsent> consentState) {
if (consentState == null) {
gdprConsentState = new HashMap<String, GDPRConsent>();
return this;
}
Map<String, GDPRConsent> consentStateCopy = new HashMap<String, GDPRConsent>(consentState);
gdprConsentState = new HashMap<String, GDPRConsent>();
for (Map.Entry<String, GDPRConsent> entry : consentStateCopy.entrySet()) {
this.addGDPRConsentState(entry.getKey(), entry.getValue());
}
return this;
}
/**
* Add or override a single GDPR consent state for this builder.
* <p>
* Note that all purpose keys will be lower-cased and trimmed.
*
* @param purpose
* @param consent
*/
@NonNull
public Builder addGDPRConsentState(@NonNull String purpose, @NonNull GDPRConsent consent) {
String normalizedPurpose = ConsentState.canonicalizeForDeduplication(purpose);
if (MPUtility.isEmpty(normalizedPurpose)) {
Logger.error("Cannot set GDPR Consent with null or empty purpose.");
return this;
}
if (gdprConsentState == null) {
gdprConsentState = new HashMap<String, GDPRConsent>();
}
gdprConsentState.put(normalizedPurpose, consent);
return this;
}
/**
* Remove a single GDPR consent state for this builder.
* <p>
* Note that all purpose keys will be lower-cased and trimmed.
*
* @param purpose
*/
@NonNull
public Builder removeGDPRConsentState(@NonNull String purpose) {
String normalizedPurpose = ConsentState.canonicalizeForDeduplication(purpose);
if (MPUtility.isEmpty(normalizedPurpose)) {
Logger.error("Cannot remove GDPR Consent with null or empty purpose");
return this;
}
if (gdprConsentState == null) {
return this;
}
gdprConsentState.remove(normalizedPurpose);
return this;
}
@Deprecated
@NonNull
public Builder setCCPAConsent(@NonNull CCPAConsent ccpaConsent) {
return setCCPAConsentState(ccpaConsent);
}
@NonNull
public Builder setCCPAConsentState(@NonNull CCPAConsent ccpaConsent) {
this.ccpaConsent = ccpaConsent;
return this;
}
@Deprecated
@NonNull
public Builder removeCCPAConsent() {
return removeCCPAConsentState();
}
@NonNull
public Builder removeCCPAConsentState() {
ccpaConsent = null;
return this;
}
@NonNull
public ConsentState build() {
return new ConsentState(this);
}
@Override
@NonNull
public String toString() {
return build().toString();
}
}
} |
Java | @XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "GmailTeaser", propOrder = {
"headline",
"description",
"businessName",
"logoImage"
})
public class GmailTeaser {
protected String headline;
protected String description;
protected String businessName;
protected Image logoImage;
/**
* Gets the value of the headline property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getHeadline() {
return headline;
}
/**
* Sets the value of the headline property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setHeadline(String value) {
this.headline = value;
}
/**
* Gets the value of the description property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDescription() {
return description;
}
/**
* Sets the value of the description property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDescription(String value) {
this.description = value;
}
/**
* Gets the value of the businessName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getBusinessName() {
return businessName;
}
/**
* Sets the value of the businessName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setBusinessName(String value) {
this.businessName = value;
}
/**
* Gets the value of the logoImage property.
*
* @return
* possible object is
* {@link Image }
*
*/
public Image getLogoImage() {
return logoImage;
}
/**
* Sets the value of the logoImage property.
*
* @param value
* allowed object is
* {@link Image }
*
*/
public void setLogoImage(Image value) {
this.logoImage = value;
}
} |
Java | public class DisplayDeviceConfig {
private static final String TAG = "DisplayDeviceConfig";
public static final float HIGH_BRIGHTNESS_MODE_UNSUPPORTED = Float.NaN;
public static final String QUIRK_CAN_SET_BRIGHTNESS_VIA_HWC = "canSetBrightnessViaHwc";
private static final float BRIGHTNESS_DEFAULT = 0.5f;
private static final String ETC_DIR = "etc";
private static final String DISPLAY_CONFIG_DIR = "displayconfig";
private static final String CONFIG_FILE_FORMAT = "display_%s.xml";
private static final String PORT_SUFFIX_FORMAT = "port_%d";
private static final String STABLE_ID_SUFFIX_FORMAT = "id_%d";
private static final String NO_SUFFIX_FORMAT = "%d";
private static final long STABLE_FLAG = 1L << 62;
// Float.NaN (used as invalid for brightness) cannot be stored in config.xml
// so -2 is used instead
private static final float INVALID_BRIGHTNESS_IN_CONFIG = -2f;
private static final float NITS_INVALID = -1;
private final Context mContext;
// The details of the ambient light sensor associated with this display.
private final SensorData mAmbientLightSensor = new SensorData();
// The details of the proximity sensor associated with this display.
private final SensorData mProximitySensor = new SensorData();
private final List<RefreshRateLimitation> mRefreshRateLimitations =
new ArrayList<>(2 /*initialCapacity*/);
// Nits and backlight values that are loaded from either the display device config file, or
// config.xml. These are the raw values and just used for the dumpsys
private float[] mRawNits;
private float[] mRawBacklight;
// These arrays are calculated from the raw arrays, but clamped to contain values equal to and
// between mBacklightMinimum and mBacklightMaximum. These three arrays should all be the same
// length
// Nits array that is used to store the entire range of nits values that the device supports
private float[] mNits;
// Backlight array holds the values that the HAL uses to display the corresponding nits values
private float[] mBacklight;
// Purely an array that covers the ranges of values 0.0 - 1.0, indicating the system brightness
// for the corresponding values above
private float[] mBrightness;
private float mBacklightMinimum = Float.NaN;
private float mBacklightMaximum = Float.NaN;
private float mBrightnessDefault = Float.NaN;
private float mBrightnessRampFastDecrease = Float.NaN;
private float mBrightnessRampFastIncrease = Float.NaN;
private float mBrightnessRampSlowDecrease = Float.NaN;
private float mBrightnessRampSlowIncrease = Float.NaN;
private Spline mBrightnessToBacklightSpline;
private Spline mBacklightToBrightnessSpline;
private Spline mBacklightToNitsSpline;
private List<String> mQuirks;
private boolean mIsHighBrightnessModeEnabled = false;
private HighBrightnessModeData mHbmData;
private String mLoadedFrom = null;
private DisplayDeviceConfig(Context context) {
mContext = context;
}
/**
* Creates an instance for the specified display.
* Tries to find a file with identifier in the following priority order:
* <ol>
* <li>physicalDisplayId</li>
* <li>physicalDisplayId without a stable flag (old system)</li>
* <li>portId</li>
* </ol>
*
* @param physicalDisplayId The display ID for which to load the configuration.
* @return A configuration instance for the specified display.
*/
public static DisplayDeviceConfig create(Context context, long physicalDisplayId,
boolean isDefaultDisplay) {
DisplayDeviceConfig config;
config = loadConfigFromDirectory(context, Environment.getProductDirectory(),
physicalDisplayId);
if (config != null) {
return config;
}
config = loadConfigFromDirectory(context, Environment.getVendorDirectory(),
physicalDisplayId);
if (config != null) {
return config;
}
// If no config can be loaded from any ddc xml at all,
// prepare a whole config using the global config.xml.
// Guaranteed not null
return create(context, isDefaultDisplay);
}
/**
* Creates an instance using global values since no display device config xml exists.
* Uses values from config or PowerManager.
*
* @param context
* @param useConfigXml
* @return A configuration instance.
*/
public static DisplayDeviceConfig create(Context context, boolean useConfigXml) {
DisplayDeviceConfig config;
if (useConfigXml) {
config = getConfigFromGlobalXml(context);
} else {
config = getConfigFromPmValues(context);
}
return config;
}
private static DisplayDeviceConfig loadConfigFromDirectory(Context context,
File baseDirectory, long physicalDisplayId) {
DisplayDeviceConfig config;
// Create config using filename from physical ID (including "stable" bit).
config = getConfigFromSuffix(context, baseDirectory, STABLE_ID_SUFFIX_FORMAT,
physicalDisplayId);
if (config != null) {
return config;
}
// Create config using filename from physical ID (excluding "stable" bit).
final long withoutStableFlag = physicalDisplayId & ~STABLE_FLAG;
config = getConfigFromSuffix(context, baseDirectory, NO_SUFFIX_FORMAT, withoutStableFlag);
if (config != null) {
return config;
}
// Create config using filename from port ID.
final DisplayAddress.Physical physicalAddress =
DisplayAddress.fromPhysicalDisplayId(physicalDisplayId);
int port = physicalAddress.getPort();
config = getConfigFromSuffix(context, baseDirectory, PORT_SUFFIX_FORMAT, port);
return config;
}
/**
* Return the brightness mapping nits array.
*
* @return The brightness mapping nits array.
*/
public float[] getNits() {
return mNits;
}
/**
* Return the brightness mapping backlight array.
*
* @return The backlight mapping value array.
*/
public float[] getBacklight() {
return mBacklight;
}
/**
* Calculates the backlight value, as recognised by the HAL, from the brightness value
* given that the rest of the system deals with.
*
* @param brightness value on the framework scale of 0-1
* @return backlight value on the HAL scale of 0-1
*/
public float getBacklightFromBrightness(float brightness) {
return mBrightnessToBacklightSpline.interpolate(brightness);
}
/**
* Calculates the nits value for the specified backlight value if a mapping exists.
*
* @return The mapped nits or 0 if no mapping exits.
*/
public float getNitsFromBacklight(float backlight) {
if (mBacklightToNitsSpline == null) {
Slog.wtf(TAG, "requesting nits when no mapping exists.");
return NITS_INVALID;
}
backlight = Math.max(backlight, mBacklightMinimum);
return mBacklightToNitsSpline.interpolate(backlight);
}
/**
* Return an array of equal length to backlight and nits, that covers the entire system
* brightness range of 0.0-1.0.
*
* @return brightness array
*/
public float[] getBrightness() {
return mBrightness;
}
/**
* Return the default brightness on a scale of 0.0f - 1.0f
*
* @return default brightness
*/
public float getBrightnessDefault() {
return mBrightnessDefault;
}
public float getBrightnessRampFastDecrease() {
return mBrightnessRampFastDecrease;
}
public float getBrightnessRampFastIncrease() {
return mBrightnessRampFastIncrease;
}
public float getBrightnessRampSlowDecrease() {
return mBrightnessRampSlowDecrease;
}
public float getBrightnessRampSlowIncrease() {
return mBrightnessRampSlowIncrease;
}
SensorData getAmbientLightSensor() {
return mAmbientLightSensor;
}
SensorData getProximitySensor() {
return mProximitySensor;
}
/**
* @param quirkValue The quirk to test.
* @return {@code true} if the specified quirk is present in this configuration,
* {@code false} otherwise.
*/
public boolean hasQuirk(String quirkValue) {
return mQuirks != null && mQuirks.contains(quirkValue);
}
/**
* @return high brightness mode configuration data for the display.
*/
public HighBrightnessModeData getHighBrightnessModeData() {
if (!mIsHighBrightnessModeEnabled || mHbmData == null) {
return null;
}
HighBrightnessModeData hbmData = new HighBrightnessModeData();
mHbmData.copyTo(hbmData);
return hbmData;
}
public List<RefreshRateLimitation> getRefreshRateLimitations() {
return mRefreshRateLimitations;
}
@Override
public String toString() {
String str = "DisplayDeviceConfig{"
+ "mLoadedFrom=" + mLoadedFrom
+ ", mBacklight=" + Arrays.toString(mBacklight)
+ ", mNits=" + Arrays.toString(mNits)
+ ", mRawBacklight=" + Arrays.toString(mRawBacklight)
+ ", mRawNits=" + Arrays.toString(mRawNits)
+ ", mBrightness=" + Arrays.toString(mBrightness)
+ ", mBrightnessToBacklightSpline=" + mBrightnessToBacklightSpline
+ ", mBacklightToBrightnessSpline=" + mBacklightToBrightnessSpline
+ ", mBacklightMinimum=" + mBacklightMinimum
+ ", mBacklightMaximum=" + mBacklightMaximum
+ ", mBrightnessDefault=" + mBrightnessDefault
+ ", mQuirks=" + mQuirks
+ ", isHbmEnabled=" + mIsHighBrightnessModeEnabled
+ ", mHbmData=" + mHbmData
+ ", mBrightnessRampFastDecrease=" + mBrightnessRampFastDecrease
+ ", mBrightnessRampFastIncrease=" + mBrightnessRampFastIncrease
+ ", mBrightnessRampSlowDecrease=" + mBrightnessRampSlowDecrease
+ ", mBrightnessRampSlowIncrease=" + mBrightnessRampSlowIncrease
+ ", mAmbientLightSensor=" + mAmbientLightSensor
+ ", mProximitySensor=" + mProximitySensor
+ ", mRefreshRateLimitations= " + Arrays.toString(mRefreshRateLimitations.toArray())
+ "}";
return str;
}
private static DisplayDeviceConfig getConfigFromSuffix(Context context, File baseDirectory,
String suffixFormat, long idNumber) {
final String suffix = String.format(suffixFormat, idNumber);
final String filename = String.format(CONFIG_FILE_FORMAT, suffix);
final File filePath = Environment.buildPath(
baseDirectory, ETC_DIR, DISPLAY_CONFIG_DIR, filename);
final DisplayDeviceConfig config = new DisplayDeviceConfig(context);
if (config.initFromFile(filePath)) {
return config;
}
return null;
}
private static DisplayDeviceConfig getConfigFromGlobalXml(Context context) {
DisplayDeviceConfig config = new DisplayDeviceConfig(context);
config.initFromGlobalXml();
return config;
}
private static DisplayDeviceConfig getConfigFromPmValues(Context context) {
DisplayDeviceConfig config = new DisplayDeviceConfig(context);
config.initFromDefaultValues();
return config;
}
private boolean initFromFile(File configFile) {
if (!configFile.exists()) {
// Display configuration files aren't required to exist.
return false;
}
if (!configFile.isFile()) {
Slog.e(TAG, "Display configuration is not a file: " + configFile + ", skipping");
return false;
}
try (InputStream in = new BufferedInputStream(new FileInputStream(configFile))) {
final DisplayConfiguration config = XmlParser.read(in);
if (config != null) {
loadBrightnessDefaultFromDdcXml(config);
loadBrightnessConstraintsFromConfigXml();
loadBrightnessMap(config);
loadHighBrightnessModeData(config);
loadQuirks(config);
loadBrightnessRamps(config);
loadAmbientLightSensorFromDdc(config);
loadProxSensorFromDdc(config);
} else {
Slog.w(TAG, "DisplayDeviceConfig file is null");
}
} catch (IOException | DatatypeConfigurationException | XmlPullParserException e) {
Slog.e(TAG, "Encountered an error while reading/parsing display config file: "
+ configFile, e);
}
mLoadedFrom = configFile.toString();
return true;
}
private void initFromGlobalXml() {
// If no ddc exists, use config.xml
loadBrightnessDefaultFromConfigXml();
loadBrightnessConstraintsFromConfigXml();
loadBrightnessMapFromConfigXml();
loadBrightnessRampsFromConfigXml();
loadAmbientLightSensorFromConfigXml();
setProxSensorUnspecified();
mLoadedFrom = "<config.xml>";
}
private void initFromDefaultValues() {
// Set all to basic values
mLoadedFrom = "Static values";
mBacklightMinimum = PowerManager.BRIGHTNESS_MIN;
mBacklightMaximum = PowerManager.BRIGHTNESS_MAX;
mBrightnessDefault = BRIGHTNESS_DEFAULT;
mBrightnessRampFastDecrease = PowerManager.BRIGHTNESS_MAX;
mBrightnessRampFastIncrease = PowerManager.BRIGHTNESS_MAX;
mBrightnessRampSlowDecrease = PowerManager.BRIGHTNESS_MAX;
mBrightnessRampSlowIncrease = PowerManager.BRIGHTNESS_MAX;
setSimpleMappingStrategyValues();
loadAmbientLightSensorFromConfigXml();
setProxSensorUnspecified();
}
private void loadBrightnessDefaultFromDdcXml(DisplayConfiguration config) {
// Default brightness values are stored in the displayDeviceConfig file,
// Or we fallback standard values if not.
// Priority 1: Value in the displayDeviceConfig
// Priority 2: Value in the config.xml (float)
// Priority 3: Value in the config.xml (int)
if (config != null) {
BigDecimal configBrightnessDefault = config.getScreenBrightnessDefault();
if (configBrightnessDefault != null) {
mBrightnessDefault = configBrightnessDefault.floatValue();
} else {
loadBrightnessDefaultFromConfigXml();
}
}
}
private void loadBrightnessDefaultFromConfigXml() {
// Priority 1: Value in the config.xml (float)
// Priority 2: Value in the config.xml (int)
final float def = mContext.getResources().getFloat(com.android.internal.R.dimen
.config_screenBrightnessSettingDefaultFloat);
if (def == INVALID_BRIGHTNESS_IN_CONFIG) {
mBrightnessDefault = BrightnessSynchronizer.brightnessIntToFloat(
mContext.getResources().getInteger(com.android.internal.R.integer
.config_screenBrightnessSettingDefault));
} else {
mBrightnessDefault = def;
}
}
private void loadBrightnessConstraintsFromConfigXml() {
// TODO(b/175373898) add constraints (min / max) to ddc.
final float min = mContext.getResources().getFloat(com.android.internal.R.dimen
.config_screenBrightnessSettingMinimumFloat);
final float max = mContext.getResources().getFloat(com.android.internal.R.dimen
.config_screenBrightnessSettingMaximumFloat);
if (min == INVALID_BRIGHTNESS_IN_CONFIG || max == INVALID_BRIGHTNESS_IN_CONFIG) {
mBacklightMinimum = BrightnessSynchronizer.brightnessIntToFloat(
mContext.getResources().getInteger(com.android.internal.R.integer
.config_screenBrightnessSettingMinimum));
mBacklightMaximum = BrightnessSynchronizer.brightnessIntToFloat(
mContext.getResources().getInteger(com.android.internal.R.integer
.config_screenBrightnessSettingMaximum));
} else {
mBacklightMinimum = min;
mBacklightMaximum = max;
}
}
private void loadBrightnessMap(DisplayConfiguration config) {
final NitsMap map = config.getScreenBrightnessMap();
// Map may not exist in display device config
if (map == null) {
loadBrightnessMapFromConfigXml();
return;
}
// Use the (preferred) display device config mapping
final List<Point> points = map.getPoint();
final int size = points.size();
float[] nits = new float[size];
float[] backlight = new float[size];
int i = 0;
for (Point point : points) {
nits[i] = point.getNits().floatValue();
backlight[i] = point.getValue().floatValue();
if (i > 0) {
if (nits[i] < nits[i - 1]) {
Slog.e(TAG, "screenBrightnessMap must be non-decreasing, ignoring rest "
+ " of configuration. Nits: " + nits[i] + " < " + nits[i - 1]);
return;
}
if (backlight[i] < backlight[i - 1]) {
Slog.e(TAG, "screenBrightnessMap must be non-decreasing, ignoring rest "
+ " of configuration. Value: " + backlight[i] + " < "
+ backlight[i - 1]);
return;
}
}
++i;
}
mRawNits = nits;
mRawBacklight = backlight;
constrainNitsAndBacklightArrays();
}
private void loadBrightnessMapFromConfigXml() {
// Use the config.xml mapping
final Resources res = mContext.getResources();
final float[] sysNits = BrightnessMappingStrategy.getFloatArray(res.obtainTypedArray(
com.android.internal.R.array.config_screenBrightnessNits));
final int[] sysBrightness = res.getIntArray(
com.android.internal.R.array.config_screenBrightnessBacklight);
final float[] sysBrightnessFloat = new float[sysBrightness.length];
for (int i = 0; i < sysBrightness.length; i++) {
sysBrightnessFloat[i] = BrightnessSynchronizer.brightnessIntToFloat(
sysBrightness[i]);
}
// These arrays are allowed to be empty, we set null values so that
// BrightnessMappingStrategy will create a SimpleMappingStrategy instead.
if (sysBrightnessFloat.length == 0 || sysNits.length == 0) {
setSimpleMappingStrategyValues();
return;
}
mRawNits = sysNits;
mRawBacklight = sysBrightnessFloat;
constrainNitsAndBacklightArrays();
}
private void setSimpleMappingStrategyValues() {
// No translation from backlight to brightness should occur if we are using a
// SimpleMappingStrategy (ie they should be the same) so the splines are
// set to be linear, between 0.0 and 1.0
mNits = null;
mBacklight = null;
float[] simpleMappingStrategyArray = new float[]{0.0f, 1.0f};
mBrightnessToBacklightSpline = Spline.createSpline(simpleMappingStrategyArray,
simpleMappingStrategyArray);
mBacklightToBrightnessSpline = Spline.createSpline(simpleMappingStrategyArray,
simpleMappingStrategyArray);
}
/**
* Change the nits and backlight arrays, so that they cover only the allowed backlight values
* Use the brightness minimum and maximum values to clamp these arrays.
*/
private void constrainNitsAndBacklightArrays() {
if (mRawBacklight[0] > mBacklightMinimum
|| mRawBacklight[mRawBacklight.length - 1] < mBacklightMaximum
|| mBacklightMinimum > mBacklightMaximum) {
throw new IllegalStateException("Min or max values are invalid"
+ "; raw min=" + mRawBacklight[0]
+ "; raw max=" + mRawBacklight[mRawBacklight.length - 1]
+ "; backlight min=" + mBacklightMinimum
+ "; backlight max=" + mBacklightMaximum);
}
float[] newNits = new float[mRawBacklight.length];
float[] newBacklight = new float[mRawBacklight.length];
// Find the starting index of the clamped arrays. This may be less than the min so
// we'll need to clamp this value still when actually doing the remapping.
int newStart = 0;
for (int i = 0; i < mRawBacklight.length - 1; i++) {
if (mRawBacklight[i + 1] > mBacklightMinimum) {
newStart = i;
break;
}
}
boolean isLastValue = false;
int newIndex = 0;
for (int i = newStart; i < mRawBacklight.length && !isLastValue; i++) {
newIndex = i - newStart;
final float newBacklightVal;
final float newNitsVal;
isLastValue = mRawBacklight[i] > mBacklightMaximum
|| i >= mRawBacklight.length - 1;
// Clamp beginning and end to valid backlight values.
if (newIndex == 0) {
newBacklightVal = MathUtils.max(mRawBacklight[i], mBacklightMinimum);
newNitsVal = rawBacklightToNits(i, newBacklightVal);
} else if (isLastValue) {
newBacklightVal = MathUtils.min(mRawBacklight[i], mBacklightMaximum);
newNitsVal = rawBacklightToNits(i - 1, newBacklightVal);
} else {
newBacklightVal = mRawBacklight[i];
newNitsVal = mRawNits[i];
}
newBacklight[newIndex] = newBacklightVal;
newNits[newIndex] = newNitsVal;
}
mBacklight = Arrays.copyOf(newBacklight, newIndex + 1);
mNits = Arrays.copyOf(newNits, newIndex + 1);
createBacklightConversionSplines();
}
private float rawBacklightToNits(int i, float backlight) {
return MathUtils.map(mRawBacklight[i], mRawBacklight[i + 1],
mRawNits[i], mRawNits[i + 1], backlight);
}
// This method creates a brightness spline that is of equal length with proportional increments
// to the backlight spline. The values of this array range from 0.0f to 1.0f instead of the
// potential constrained range that the backlight array covers
// These splines are used to convert from the system brightness value to the HAL backlight
// value
private void createBacklightConversionSplines() {
mBrightness = new float[mBacklight.length];
for (int i = 0; i < mBrightness.length; i++) {
mBrightness[i] = MathUtils.map(mBacklight[0],
mBacklight[mBacklight.length - 1],
PowerManager.BRIGHTNESS_MIN, PowerManager.BRIGHTNESS_MAX, mBacklight[i]);
}
mBrightnessToBacklightSpline = Spline.createSpline(mBrightness, mBacklight);
mBacklightToBrightnessSpline = Spline.createSpline(mBacklight, mBrightness);
mBacklightToNitsSpline = Spline.createSpline(mBacklight, mNits);
}
private void loadQuirks(DisplayConfiguration config) {
final DisplayQuirks quirks = config.getQuirks();
if (quirks != null) {
mQuirks = new ArrayList<>(quirks.getQuirk());
}
}
private void loadHighBrightnessModeData(DisplayConfiguration config) {
final HighBrightnessMode hbm = config.getHighBrightnessMode();
if (hbm != null) {
mIsHighBrightnessModeEnabled = hbm.getEnabled();
mHbmData = new HighBrightnessModeData();
mHbmData.minimumLux = hbm.getMinimumLux_all().floatValue();
float transitionPointBacklightScale = hbm.getTransitionPoint_all().floatValue();
if (transitionPointBacklightScale >= mBacklightMaximum) {
throw new IllegalArgumentException("HBM transition point invalid. "
+ mHbmData.transitionPoint + " is not less than "
+ mBacklightMaximum);
}
mHbmData.transitionPoint =
mBacklightToBrightnessSpline.interpolate(transitionPointBacklightScale);
final HbmTiming hbmTiming = hbm.getTiming_all();
mHbmData.timeWindowMillis = hbmTiming.getTimeWindowSecs_all().longValue() * 1000;
mHbmData.timeMaxMillis = hbmTiming.getTimeMaxSecs_all().longValue() * 1000;
mHbmData.timeMinMillis = hbmTiming.getTimeMinSecs_all().longValue() * 1000;
mHbmData.thermalStatusLimit = convertThermalStatus(hbm.getThermalStatusLimit_all());
mHbmData.allowInLowPowerMode = hbm.getAllowInLowPowerMode_all();
final RefreshRateRange rr = hbm.getRefreshRate_all();
if (rr != null) {
final float min = rr.getMinimum().floatValue();
final float max = rr.getMaximum().floatValue();
mRefreshRateLimitations.add(new RefreshRateLimitation(
DisplayManagerInternal.REFRESH_RATE_LIMIT_HIGH_BRIGHTNESS_MODE, min, max));
}
}
}
private void loadBrightnessRamps(DisplayConfiguration config) {
// Priority 1: Value in the display device config (float)
// Priority 2: Value in the config.xml (int)
final BigDecimal fastDownDecimal = config.getScreenBrightnessRampFastDecrease();
final BigDecimal fastUpDecimal = config.getScreenBrightnessRampFastIncrease();
final BigDecimal slowDownDecimal = config.getScreenBrightnessRampSlowDecrease();
final BigDecimal slowUpDecimal = config.getScreenBrightnessRampSlowIncrease();
if (fastDownDecimal != null && fastUpDecimal != null && slowDownDecimal != null
&& slowUpDecimal != null) {
mBrightnessRampFastDecrease = fastDownDecimal.floatValue();
mBrightnessRampFastIncrease = fastUpDecimal.floatValue();
mBrightnessRampSlowDecrease = slowDownDecimal.floatValue();
mBrightnessRampSlowIncrease = slowUpDecimal.floatValue();
} else {
if (fastDownDecimal != null || fastUpDecimal != null || slowDownDecimal != null
|| slowUpDecimal != null) {
Slog.w(TAG, "Per display brightness ramp values ignored because not all "
+ "values are present in display device config");
}
loadBrightnessRampsFromConfigXml();
}
}
private void loadBrightnessRampsFromConfigXml() {
mBrightnessRampFastIncrease = BrightnessSynchronizer.brightnessIntToFloat(
mContext.getResources().getInteger(R.integer.config_brightness_ramp_rate_fast));
mBrightnessRampSlowIncrease = BrightnessSynchronizer.brightnessIntToFloat(
mContext.getResources().getInteger(R.integer.config_brightness_ramp_rate_slow));
// config.xml uses the same values for both increasing and decreasing brightness
// transitions so we assign them to the same values here.
mBrightnessRampFastDecrease = mBrightnessRampFastIncrease;
mBrightnessRampSlowDecrease = mBrightnessRampSlowIncrease;
}
private void loadAmbientLightSensorFromConfigXml() {
mAmbientLightSensor.name = "";
mAmbientLightSensor.type = mContext.getResources().getString(
com.android.internal.R.string.config_displayLightSensorType);
}
private void loadAmbientLightSensorFromDdc(DisplayConfiguration config) {
final SensorDetails sensorDetails = config.getLightSensor();
if (sensorDetails != null) {
mAmbientLightSensor.type = sensorDetails.getType();
mAmbientLightSensor.name = sensorDetails.getName();
final RefreshRateRange rr = sensorDetails.getRefreshRate();
if (rr != null) {
mAmbientLightSensor.minRefreshRate = rr.getMinimum().floatValue();
mAmbientLightSensor.maxRefreshRate = rr.getMaximum().floatValue();
}
} else {
loadAmbientLightSensorFromConfigXml();
}
}
private void setProxSensorUnspecified() {
mProximitySensor.name = "";
mProximitySensor.type = "";
}
private void loadProxSensorFromDdc(DisplayConfiguration config) {
SensorDetails sensorDetails = config.getProxSensor();
if (sensorDetails != null) {
mProximitySensor.name = sensorDetails.getName();
mProximitySensor.type = sensorDetails.getType();
final RefreshRateRange rr = sensorDetails.getRefreshRate();
if (rr != null) {
mProximitySensor.minRefreshRate = rr.getMinimum().floatValue();
mProximitySensor.maxRefreshRate = rr.getMaximum().floatValue();
}
} else {
setProxSensorUnspecified();
}
}
private @PowerManager.ThermalStatus int convertThermalStatus(ThermalStatus value) {
if (value == null) {
return PowerManager.THERMAL_STATUS_NONE;
}
switch (value) {
case none:
return PowerManager.THERMAL_STATUS_NONE;
case light:
return PowerManager.THERMAL_STATUS_LIGHT;
case moderate:
return PowerManager.THERMAL_STATUS_MODERATE;
case severe:
return PowerManager.THERMAL_STATUS_SEVERE;
case critical:
return PowerManager.THERMAL_STATUS_CRITICAL;
case emergency:
return PowerManager.THERMAL_STATUS_EMERGENCY;
case shutdown:
return PowerManager.THERMAL_STATUS_SHUTDOWN;
default:
Slog.wtf(TAG, "Unexpected Thermal Status: " + value);
return PowerManager.THERMAL_STATUS_NONE;
}
}
static class SensorData {
public String type;
public String name;
public float minRefreshRate = 0.0f;
public float maxRefreshRate = Float.POSITIVE_INFINITY;
@Override
public String toString() {
return "Sensor{"
+ "type: " + type
+ ", name: " + name
+ ", refreshRateRange: [" + minRefreshRate + ", " + maxRefreshRate + "]"
+ "} ";
}
/**
* @return True if the sensor matches both the specified name and type, or one if only
* one is specified (not-empty). Always returns false if both parameters are null or empty.
*/
public boolean matches(String sensorName, String sensorType) {
final boolean isNameSpecified = !TextUtils.isEmpty(sensorName);
final boolean isTypeSpecified = !TextUtils.isEmpty(sensorType);
return (isNameSpecified || isTypeSpecified)
&& (!isNameSpecified || sensorName.equals(name))
&& (!isTypeSpecified || sensorType.equals(type));
}
}
/**
* Container for high brightness mode configuration data.
*/
static class HighBrightnessModeData {
/** Minimum lux needed to enter high brightness mode */
public float minimumLux;
/** Brightness level at which we transition from normal to high-brightness. */
public float transitionPoint;
/** Enable HBM only if the thermal status is not higher than this. */
public @PowerManager.ThermalStatus int thermalStatusLimit;
/** Whether HBM is allowed when {@code Settings.Global.LOW_POWER_MODE} is active. */
public boolean allowInLowPowerMode;
/** Time window for HBM. */
public long timeWindowMillis;
/** Maximum time HBM is allowed to be during in a {@code timeWindowMillis}. */
public long timeMaxMillis;
/** Minimum time that HBM can be on before being enabled. */
public long timeMinMillis;
HighBrightnessModeData() {}
HighBrightnessModeData(float minimumLux, float transitionPoint, long timeWindowMillis,
long timeMaxMillis, long timeMinMillis,
@PowerManager.ThermalStatus int thermalStatusLimit, boolean allowInLowPowerMode) {
this.minimumLux = minimumLux;
this.transitionPoint = transitionPoint;
this.timeWindowMillis = timeWindowMillis;
this.timeMaxMillis = timeMaxMillis;
this.timeMinMillis = timeMinMillis;
this.thermalStatusLimit = thermalStatusLimit;
this.allowInLowPowerMode = allowInLowPowerMode;
}
/**
* Copies the HBM data to the specified parameter instance.
* @param other the instance to copy data to.
*/
public void copyTo(@NonNull HighBrightnessModeData other) {
other.minimumLux = minimumLux;
other.timeWindowMillis = timeWindowMillis;
other.timeMaxMillis = timeMaxMillis;
other.timeMinMillis = timeMinMillis;
other.transitionPoint = transitionPoint;
other.thermalStatusLimit = thermalStatusLimit;
other.allowInLowPowerMode = allowInLowPowerMode;
}
@Override
public String toString() {
return "HBM{"
+ "minLux: " + minimumLux
+ ", transition: " + transitionPoint
+ ", timeWindow: " + timeWindowMillis + "ms"
+ ", timeMax: " + timeMaxMillis + "ms"
+ ", timeMin: " + timeMinMillis + "ms"
+ ", thermalStatusLimit: " + thermalStatusLimit
+ ", allowInLowPowerMode: " + allowInLowPowerMode
+ "} ";
}
}
} |
Java | static class HighBrightnessModeData {
/** Minimum lux needed to enter high brightness mode */
public float minimumLux;
/** Brightness level at which we transition from normal to high-brightness. */
public float transitionPoint;
/** Enable HBM only if the thermal status is not higher than this. */
public @PowerManager.ThermalStatus int thermalStatusLimit;
/** Whether HBM is allowed when {@code Settings.Global.LOW_POWER_MODE} is active. */
public boolean allowInLowPowerMode;
/** Time window for HBM. */
public long timeWindowMillis;
/** Maximum time HBM is allowed to be during in a {@code timeWindowMillis}. */
public long timeMaxMillis;
/** Minimum time that HBM can be on before being enabled. */
public long timeMinMillis;
HighBrightnessModeData() {}
HighBrightnessModeData(float minimumLux, float transitionPoint, long timeWindowMillis,
long timeMaxMillis, long timeMinMillis,
@PowerManager.ThermalStatus int thermalStatusLimit, boolean allowInLowPowerMode) {
this.minimumLux = minimumLux;
this.transitionPoint = transitionPoint;
this.timeWindowMillis = timeWindowMillis;
this.timeMaxMillis = timeMaxMillis;
this.timeMinMillis = timeMinMillis;
this.thermalStatusLimit = thermalStatusLimit;
this.allowInLowPowerMode = allowInLowPowerMode;
}
/**
* Copies the HBM data to the specified parameter instance.
* @param other the instance to copy data to.
*/
public void copyTo(@NonNull HighBrightnessModeData other) {
other.minimumLux = minimumLux;
other.timeWindowMillis = timeWindowMillis;
other.timeMaxMillis = timeMaxMillis;
other.timeMinMillis = timeMinMillis;
other.transitionPoint = transitionPoint;
other.thermalStatusLimit = thermalStatusLimit;
other.allowInLowPowerMode = allowInLowPowerMode;
}
@Override
public String toString() {
return "HBM{"
+ "minLux: " + minimumLux
+ ", transition: " + transitionPoint
+ ", timeWindow: " + timeWindowMillis + "ms"
+ ", timeMax: " + timeMaxMillis + "ms"
+ ", timeMin: " + timeMinMillis + "ms"
+ ", thermalStatusLimit: " + thermalStatusLimit
+ ", allowInLowPowerMode: " + allowInLowPowerMode
+ "} ";
}
} |
Java | public class Error {
private String id;
private String status;
private String code;
private String title;
private String detail;
private Source source;
private ErrorLinks links;
private HashMap<String, Object> meta;
public HashMap<String, Object> getMeta() {
return meta;
}
public void setMeta(HashMap<String, Object> meta) {
this.meta = meta;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public ErrorLinks getLinks() {
return links;
}
public void setLinks(ErrorLinks linkss) {
this.links = linkss;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDetail() {
return detail;
}
public void setDetail(String detail) {
this.detail = detail;
}
public Source getSource() {
return source;
}
public void setSource(Source source) {
this.source = source;
}
} |
Java | public class OneToManyUnidireccionalTest {
private static EntityManagerFactory emf;
private EntityManager em;
@BeforeClass
public static void setUpClass() {
emf = Persistence.createEntityManagerFactory("LocalMemoryPU");
}
@AfterClass
public static void tearDownClass() {
emf.close();
}
@Before
public void setUp() {
em = emf.createEntityManager();
}
@After
public void tearDown() {
em.close();
}
@Test
public void testPersist() {
em.getTransaction().begin();
Factura factura = new Factura();
Item item = new Item();
factura.getItems().add(item);
em.persist(item);
assertNotNull(item.getId());
Item item1 = new Item();
factura.getItems().add(item1);
em.persist(item1);
assertNotNull(item1.getId());
em.persist(factura);
assertNotNull(factura.getId());
em.getTransaction().commit();
em.getTransaction().begin();
Factura otraFactura = em.find(Factura.class, factura.getId());
assertNotNull(otraFactura);
assertEquals(otraFactura.getItems().size(), 2);
Item otroItem = em.find(Item.class, item.getId());
assertNotNull(otroItem);
assertEquals(otroItem.getId(), otraFactura.getItems().get(0).getId());
assertEquals(em.createQuery("select f from Factura f").getResultList().size(), 1);
assertEquals(em.createQuery("select i from Item i").getResultList().size(), 2);
em.getTransaction().commit();
}
} |
Java | public class TetheringCreationRequest {
// Name of the peer
private final String peer;
// Server endpoint
private final String endpoint;
// CDAP namespaces
private final List<NamespaceAllocation> namespaceAllocations;
// Metadata associated with this tethering
private final Map<String, String> metadata;
public TetheringCreationRequest(String peer, String endpoint,
List<NamespaceAllocation> namespaceAllocations, Map<String, String> metadata) {
this.peer = peer;
this.endpoint = endpoint;
this.namespaceAllocations = namespaceAllocations;
this.metadata = metadata;
}
public String getPeer() {
return peer;
}
public String getEndpoint() {
return endpoint;
}
public List<NamespaceAllocation> getNamespaceAllocations() {
return namespaceAllocations;
}
public Map<String, String> getMetadata() {
return metadata;
}
} |
Java | abstract public class MyScreen implements Screen, InitableInterface {
protected static SpriteBatch spriteBatch = new SpriteBatch();
public float r=0,g=0,b=0;
public final marancsicsGame game;
public static SpriteBatch getSpriteBatch() {
return spriteBatch;
}
public MyScreen(marancsicsGame game) {
this.game = game;
init();
}
@Override
public void dispose() {
//spriteBatch.dispose();
}
@Override
public void hide() {
}
@Override
public void pause() {
}
@Override
public void render(float delta) {
Gdx.gl.glClearColor(r, g, b, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
//spriteBatch.setProjectionMatrix(camera.combined);
}
@Override
public void resize(int width, int height) {
//setCameraReset(viewport, width, height);
}
@Override
public void resume() {
}
@Override
public void show() {
}
public marancsicsGame getGame() {
return game;
}
public void setBackGroundColor(float r, float g, float b)
{
this.r=r;
this.g = g;
this.b = b;
}
} |
Java | public abstract class SuperTypeRefactoringProcessor extends WatchableRefactoringProcessor {
// CODINGSPECTATOR: Made the name of the attribute public for the first time because of the UI tests.
// TODO: remove
public static final String ATTRIBUTE_INSTANCEOF= "instanceof"; //$NON-NLS-1$
// CODINGSPECTATOR: Made the name of the attribute public for the first time because of the UI tests.
// TODO: remove
public static final String ATTRIBUTE_REPLACE= "replace"; //$NON-NLS-1$
/** The super type group category set */
protected static final GroupCategorySet SET_SUPER_TYPE= new GroupCategorySet(new GroupCategory("org.eclipse.jdt.internal.corext.superType", //$NON-NLS-1$
RefactoringCoreMessages.SuperTypeRefactoringProcessor_category_name, RefactoringCoreMessages.SuperTypeRefactoringProcessor_category_description));
/** Number of compilation units to parse at once */
private static final int SIZE_BATCH= 500;
/**
* Returns a new ast node corresponding to the given type.
*
* @param rewrite the compilation unit rewrite to use
* @param type the specified type
* @return A corresponding ast node
*/
protected static ASTNode createCorrespondingNode(final CompilationUnitRewrite rewrite, final TType type) {
return rewrite.getImportRewrite().addImportFromSignature(new BindingKey(type.getBindingKey()).toSignature(), rewrite.getAST());
}
/** Should type occurrences on instanceof's also be rewritten? */
protected boolean fInstanceOf= false;
/**
* The obsolete casts (element type:
* <code><ICompilationUnit, Collection<CastVariable2>></code>)
*/
protected Map fObsoleteCasts= null;
/** The working copy owner */
protected final WorkingCopyOwner fOwner= new WorkingCopyOwner() {
};
/** Should occurrences of the type be replaced by the supertype? */
protected boolean fReplace= false;
/** The code generation settings, or <code>null</code> */
protected CodeGenerationSettings fSettings;
/** The static bindings to import */
protected final Set fStaticBindings= new HashSet();
/** The type bindings to import */
protected final Set fTypeBindings= new HashSet();
/**
* The type occurrences (element type:
* <code><ICompilationUnit, Collection<IDeclaredConstraintVariable>></code>)
*/
protected Map fTypeOccurrences= null;
/**
* Creates a new supertype refactoring processor.
*
* @param settings the code generation settings, or <code>null</code>
*/
protected SuperTypeRefactoringProcessor(final CodeGenerationSettings settings) {
fSettings= settings;
}
/**
* Adds the refactoring settings to the specified comment.
*
* @param comment the java refactoring descriptor comment
* @param addUseSupertype <code>true</code> to add the use supertype setting, <code>false</code>
* otherwise
*/
protected void addSuperTypeSettings(final JDTRefactoringDescriptorComment comment, final boolean addUseSupertype) {
Assert.isNotNull(comment);
if (fReplace) {
if (addUseSupertype)
comment.addSetting(RefactoringCoreMessages.SuperTypeRefactoringProcessor_user_supertype_setting);
if (fInstanceOf)
comment.addSetting(RefactoringCoreMessages.SuperTypeRefactoringProcessor_use_in_instanceof_setting);
}
}
/**
* Creates the super type constraint solver to solve the model.
*
* @param model the model to create a solver for
* @return The created super type constraint solver
*/
protected abstract SuperTypeConstraintsSolver createContraintSolver(SuperTypeConstraintsModel model);
/**
* Creates the declarations of the new supertype members.
*
* @param sourceRewrite the source compilation unit rewrite
* @param targetRewrite the target rewrite
* @param targetDeclaration the target type declaration
* @throws CoreException if a buffer could not be retrieved
*/
protected void createMemberDeclarations(CompilationUnitRewrite sourceRewrite, ASTRewrite targetRewrite, AbstractTypeDeclaration targetDeclaration) throws CoreException {
// Do nothing
}
/**
* Creates the declaration of the new supertype, excluding any comments or package declaration.
*
* @param sourceRewrite the source compilation unit rewrite
* @param subType the subtype
* @param superName the name of the supertype
* @param sourceDeclaration the type declaration of the source type
* @param buffer the string buffer containing the declaration
* @param isInterface <code>true</code> if the type declaration is an interface,
* <code>false</code> otherwise
* @param status the refactoring status
* @param monitor the progress monitor to use
* @throws CoreException if an error occurs
*/
protected final void createTypeDeclaration(final CompilationUnitRewrite sourceRewrite, final IType subType, final String superName, final AbstractTypeDeclaration sourceDeclaration,
final StringBuffer buffer, boolean isInterface, final RefactoringStatus status, final IProgressMonitor monitor) throws CoreException {
Assert.isNotNull(sourceRewrite);
Assert.isNotNull(subType);
Assert.isNotNull(superName);
Assert.isNotNull(sourceDeclaration);
Assert.isNotNull(buffer);
Assert.isNotNull(status);
Assert.isNotNull(monitor);
try {
monitor.beginTask("", 100); //$NON-NLS-1$
monitor.setTaskName(RefactoringCoreMessages.ExtractInterfaceProcessor_creating);
final String delimiter= StubUtility.getLineDelimiterUsed(subType.getJavaProject());
if (JdtFlags.isPublic(subType)) {
buffer.append(JdtFlags.VISIBILITY_STRING_PUBLIC);
buffer.append(" "); //$NON-NLS-1$
}
if (isInterface)
buffer.append("interface "); //$NON-NLS-1$
else
buffer.append("class "); //$NON-NLS-1$
buffer.append(superName);
buffer.append(" {"); //$NON-NLS-1$
buffer.append(delimiter);
buffer.append(delimiter);
buffer.append('}');
final IDocument document= new Document(buffer.toString());
final ASTParser parser= ASTParser.newParser(AST.JLS3);
parser.setSource(document.get().toCharArray());
final CompilationUnit unit= (CompilationUnit)parser.createAST(new SubProgressMonitor(monitor, 100));
final ASTRewrite targetRewrite= ASTRewrite.create(unit.getAST());
final AbstractTypeDeclaration targetDeclaration= (AbstractTypeDeclaration)unit.types().get(0);
createTypeParameters(targetRewrite, subType, sourceDeclaration, targetDeclaration);
createMemberDeclarations(sourceRewrite, targetRewrite, targetDeclaration);
final TextEdit edit= targetRewrite.rewriteAST(document, subType.getJavaProject().getOptions(true));
try {
edit.apply(document, TextEdit.UPDATE_REGIONS);
} catch (MalformedTreeException exception) {
JavaPlugin.log(exception);
} catch (BadLocationException exception) {
JavaPlugin.log(exception);
}
buffer.setLength(0);
buffer.append(document.get());
} finally {
monitor.done();
}
}
/**
* Creates the necessary imports for the extracted supertype.
*
* @param unit the working copy of the new supertype
* @param monitor the progress monitor to use
* @return the generated import declaration
* @throws CoreException if the imports could not be generated
*/
protected final String createTypeImports(final ICompilationUnit unit, final IProgressMonitor monitor) throws CoreException {
Assert.isNotNull(unit);
Assert.isNotNull(monitor);
try {
monitor.beginTask("", 100); //$NON-NLS-1$
monitor.setTaskName(RefactoringCoreMessages.ExtractInterfaceProcessor_creating);
final ImportRewrite rewrite= StubUtility.createImportRewrite(unit, true);
ITypeBinding type= null;
for (final Iterator iterator= fTypeBindings.iterator(); iterator.hasNext();) {
type= (ITypeBinding)iterator.next();
if (type.isTypeVariable()) {
final ITypeBinding[] bounds= type.getTypeBounds();
for (int index= 0; index < bounds.length; index++)
rewrite.addImport(bounds[index]);
}
rewrite.addImport(type);
}
IBinding binding= null;
for (final Iterator iterator= fStaticBindings.iterator(); iterator.hasNext();) {
binding= (IBinding)iterator.next();
rewrite.addStaticImport(binding);
}
final IDocument document= new Document();
try {
rewrite.rewriteImports(new SubProgressMonitor(monitor, 100)).apply(document);
} catch (MalformedTreeException exception) {
JavaPlugin.log(exception);
} catch (BadLocationException exception) {
JavaPlugin.log(exception);
} catch (CoreException exception) {
JavaPlugin.log(exception);
}
fTypeBindings.clear();
fStaticBindings.clear();
return document.get();
} finally {
monitor.done();
}
}
/**
* Creates the type parameters of the new supertype.
*
* @param targetRewrite the target compilation unit rewrite
* @param subType the subtype
* @param sourceDeclaration the type declaration of the source type
* @param targetDeclaration the type declaration of the target type
*/
protected final void createTypeParameters(final ASTRewrite targetRewrite, final IType subType, final AbstractTypeDeclaration sourceDeclaration, final AbstractTypeDeclaration targetDeclaration) {
Assert.isNotNull(targetRewrite);
Assert.isNotNull(sourceDeclaration);
Assert.isNotNull(targetDeclaration);
if (sourceDeclaration instanceof TypeDeclaration) {
TypeParameter parameter= null;
final ListRewrite rewrite= targetRewrite.getListRewrite(targetDeclaration, TypeDeclaration.TYPE_PARAMETERS_PROPERTY);
for (final Iterator iterator= ((TypeDeclaration)sourceDeclaration).typeParameters().iterator(); iterator.hasNext();) {
parameter= (TypeParameter)iterator.next();
rewrite.insertLast(ASTNode.copySubtree(targetRewrite.getAST(), parameter), null);
ImportRewriteUtil.collectImports(subType.getJavaProject(), sourceDeclaration, fTypeBindings, fStaticBindings, false);
}
}
}
/**
* Creates the source for the new compilation unit containing the supertype.
*
* @param copy the working copy of the new supertype
* @param subType the subtype
* @param superName the name of the supertype
* @param sourceRewrite the source compilation unit rewrite
* @param declaration the type declaration
* @param status the refactoring status
* @param monitor the progress monitor to display progress
* @return the source of the new compilation unit, or <code>null</code>
* @throws CoreException if an error occurs
*/
protected final String createTypeSource(final ICompilationUnit copy, final IType subType, final String superName, final CompilationUnitRewrite sourceRewrite,
final AbstractTypeDeclaration declaration, final RefactoringStatus status, final IProgressMonitor monitor) throws CoreException {
Assert.isNotNull(copy);
Assert.isNotNull(subType);
Assert.isNotNull(superName);
Assert.isNotNull(sourceRewrite);
Assert.isNotNull(declaration);
Assert.isNotNull(status);
Assert.isNotNull(monitor);
String source= null;
try {
monitor.beginTask("", 100); //$NON-NLS-1$
monitor.setTaskName(RefactoringCoreMessages.ExtractInterfaceProcessor_creating);
final String delimiter= StubUtility.getLineDelimiterUsed(subType.getJavaProject());
String typeComment= null;
String fileComment= null;
if (fSettings.createComments) {
final ITypeParameter[] parameters= subType.getTypeParameters();
final String[] names= new String[parameters.length];
for (int index= 0; index < parameters.length; index++)
names[index]= parameters[index].getElementName();
typeComment= CodeGeneration.getTypeComment(copy, superName, names, delimiter);
fileComment= CodeGeneration.getFileComment(copy, delimiter);
}
final StringBuffer buffer= new StringBuffer(64);
createTypeDeclaration(sourceRewrite, subType, superName, declaration, buffer, true, status, new SubProgressMonitor(monitor, 40));
final String imports= createTypeImports(copy, new SubProgressMonitor(monitor, 60));
source= createTypeTemplate(copy, imports, fileComment, typeComment, buffer.toString());
if (source == null) {
if (!subType.getPackageFragment().isDefaultPackage()) {
if (imports.length() > 0)
buffer.insert(0, imports);
buffer.insert(0, "package " + subType.getPackageFragment().getElementName() + ";"); //$NON-NLS-1$//$NON-NLS-2$
}
source= buffer.toString();
}
final IDocument document= new Document(source);
final TextEdit edit= CodeFormatterUtil.format2(CodeFormatter.K_COMPILATION_UNIT, source, 0, delimiter, copy.getJavaProject().getOptions(true));
if (edit != null) {
try {
edit.apply(document, TextEdit.UPDATE_REGIONS);
} catch (MalformedTreeException exception) {
JavaPlugin.log(exception);
status.merge(RefactoringStatus.createFatalErrorStatus(RefactoringCoreMessages.ExtractInterfaceProcessor_internal_error));
} catch (BadLocationException exception) {
JavaPlugin.log(exception);
status.merge(RefactoringStatus.createFatalErrorStatus(RefactoringCoreMessages.ExtractInterfaceProcessor_internal_error));
}
source= document.get();
}
} finally {
monitor.done();
}
return source;
}
/**
* Creates the type template based on the code generation settings.
*
* @param unit the working copy for the new supertype
* @param imports the generated imports declaration
* @param fileComment the file comment
* @param comment the type comment
* @param content the type content
* @return a template for the supertype, or <code>null</code>
* @throws CoreException if the template could not be evaluated
*/
protected final String createTypeTemplate(final ICompilationUnit unit, final String imports, String fileComment, final String comment, final String content) throws CoreException {
Assert.isNotNull(unit);
Assert.isNotNull(imports);
Assert.isNotNull(content);
final IPackageFragment fragment= (IPackageFragment)unit.getParent();
final StringBuffer buffer= new StringBuffer();
final String delimiter= StubUtility.getLineDelimiterUsed(unit.getJavaProject());
if (!fragment.isDefaultPackage()) {
buffer.append("package " + fragment.getElementName() + ";"); //$NON-NLS-1$ //$NON-NLS-2$
buffer.append(delimiter);
buffer.append(delimiter);
}
if (imports.length() > 0)
buffer.append(imports);
return StubUtility.getCompilationUnitContent(unit, buffer.toString(), fileComment, comment, content, delimiter);
}
/**
* {@inheritDoc}
*/
protected void finalize() throws Throwable {
resetWorkingCopies();
}
/**
* Returns the field which corresponds to the specified variable declaration fragment
*
* @param fragment the variable declaration fragment
* @return the corresponding field
* @throws JavaModelException if an error occurs
*/
protected final IField getCorrespondingField(final VariableDeclarationFragment fragment) throws JavaModelException {
final IBinding binding= fragment.getName().resolveBinding();
if (binding instanceof IVariableBinding) {
final IVariableBinding variable= (IVariableBinding)binding;
if (variable.isField()) {
final ICompilationUnit unit= RefactoringASTParser.getCompilationUnit(fragment);
final IJavaElement element= unit.getElementAt(fragment.getStartPosition());
if (element instanceof IField)
return (IField)element;
}
}
return null;
}
/**
* Computes the compilation units of fields referencing the specified type occurrences.
*
* @param units the compilation unit map (element type:
* <code><IJavaProject, Set<ICompilationUnit>></code>)
* @param nodes the ast nodes representing the type occurrences
* @throws JavaModelException if an error occurs
*/
protected final void getFieldReferencingCompilationUnits(final Map units, final ASTNode[] nodes) throws JavaModelException {
ASTNode node= null;
IField field= null;
IJavaProject project= null;
for (int index= 0; index < nodes.length; index++) {
node= nodes[index];
project= RefactoringASTParser.getCompilationUnit(node).getJavaProject();
if (project != null) {
final List fields= getReferencingFields(node, project);
for (int offset= 0; offset < fields.size(); offset++) {
field= (IField)fields.get(offset);
Set set= (Set)units.get(project);
if (set == null) {
set= new HashSet();
units.put(project, set);
}
final ICompilationUnit unit= field.getCompilationUnit();
if (unit != null)
set.add(unit);
}
}
}
}
/**
* Computes the compilation units of methods referencing the specified type occurrences.
*
* @param units the compilation unit map (element type:
* <code><IJavaProject, Set<ICompilationUnit>></code>)
* @param nodes the ast nodes representing the type occurrences
* @throws JavaModelException if an error occurs
*/
protected final void getMethodReferencingCompilationUnits(final Map units, final ASTNode[] nodes) throws JavaModelException {
ASTNode node= null;
IMethod method= null;
IJavaProject project= null;
for (int index= 0; index < nodes.length; index++) {
node= nodes[index];
project= RefactoringASTParser.getCompilationUnit(node).getJavaProject();
if (project != null) {
method= getReferencingMethod(node);
if (method != null) {
Set set= (Set)units.get(project);
if (set == null) {
set= new HashSet();
units.put(project, set);
}
final ICompilationUnit unit= method.getCompilationUnit();
if (unit != null)
set.add(unit);
}
}
}
}
/**
* Computes the compilation units referencing the subtype to replace.
*
* @param type the subtype
* @param monitor the progress monitor to use
* @param status the refactoring status
* @return the referenced compilation units (element type:
* <code><IJavaProject, Collection<SearchResultGroup>></code>)
* @throws JavaModelException if an error occurs
*/
protected final Map getReferencingCompilationUnits(final IType type, final IProgressMonitor monitor, final RefactoringStatus status) throws JavaModelException {
try {
monitor.beginTask("", 100); //$NON-NLS-1$
monitor.setTaskName(RefactoringCoreMessages.SuperTypeRefactoringProcessor_creating);
final RefactoringSearchEngine2 engine= new RefactoringSearchEngine2();
engine.setOwner(fOwner);
engine.setFiltering(true, true);
engine.setStatus(status);
engine.setScope(RefactoringScopeFactory.create(type));
engine.setPattern(SearchPattern.createPattern(type, IJavaSearchConstants.REFERENCES, SearchUtils.GENERICS_AGNOSTIC_MATCH_RULE));
engine.searchPattern(new SubProgressMonitor(monitor, 100));
return engine.getAffectedProjects();
} finally {
monitor.done();
}
}
/**
* Returns the fields which reference the specified ast node.
*
* @param node the ast node
* @param project the java project
* @return the referencing fields
* @throws JavaModelException if an error occurs
*/
protected final List getReferencingFields(final ASTNode node, final IJavaProject project) throws JavaModelException {
List result= Collections.EMPTY_LIST;
if (node instanceof Type) {
final BodyDeclaration parent= (BodyDeclaration)ASTNodes.getParent(node, BodyDeclaration.class);
if (parent instanceof FieldDeclaration) {
final List fragments= ((FieldDeclaration)parent).fragments();
result= new ArrayList(fragments.size());
VariableDeclarationFragment fragment= null;
for (final Iterator iterator= fragments.iterator(); iterator.hasNext();) {
fragment= (VariableDeclarationFragment)iterator.next();
final IField field= getCorrespondingField(fragment);
if (field != null)
result.add(field);
}
}
}
return result;
}
/**
* Returns the method which references the specified ast node.
*
* @param node the ast node
* @return the referencing method
* @throws JavaModelException if an error occurs
*/
protected final IMethod getReferencingMethod(final ASTNode node) throws JavaModelException {
if (node instanceof Type) {
final BodyDeclaration parent= (BodyDeclaration)ASTNodes.getParent(node, BodyDeclaration.class);
if (parent instanceof MethodDeclaration) {
final IMethodBinding binding= ((MethodDeclaration)parent).resolveBinding();
if (binding != null) {
final ICompilationUnit unit= RefactoringASTParser.getCompilationUnit(node);
final IJavaElement element= unit.getElementAt(node.getStartPosition());
if (element instanceof IMethod)
return (IMethod)element;
}
}
}
return null;
}
protected ICompilationUnit getSharedWorkingCopy(final ICompilationUnit unit, final IProgressMonitor monitor) throws JavaModelException {
try {
ICompilationUnit copy= unit.findWorkingCopy(fOwner);
if (copy == null)
copy= unit.getWorkingCopy(fOwner, monitor);
return copy;
} finally {
monitor.done();
}
}
/**
* Returns whether type occurrences in instanceof's should be rewritten.
*
* @return <code>true</code> if they are rewritten, <code>false</code> otherwise
*/
public final boolean isInstanceOf() {
return fInstanceOf;
}
/**
* Should occurrences of the subtype be replaced by the supertype?
*
* @return <code>true</code> if the subtype should be replaced, <code>false</code> otherwise
*/
public final boolean isReplace() {
return fReplace;
}
/**
* Performs the first pass of processing the affected compilation units.
*
* @param creator the constraints creator to use
* @param units the compilation unit map (element type:
* <code><IJavaProject, Set<ICompilationUnit>></code>)
* @param groups the search result group map (element type:
* <code><ICompilationUnit, SearchResultGroup></code>)
* @param unit the compilation unit of the subtype
* @param node the compilation unit node of the subtype
* @param monitor the progress monitor to use
*/
protected final void performFirstPass(final SuperTypeConstraintsCreator creator, final Map units, final Map groups, final ICompilationUnit unit, final CompilationUnit node,
final IProgressMonitor monitor) {
try {
monitor.beginTask("", 100); //$NON-NLS-1$
monitor.setTaskName(RefactoringCoreMessages.SuperTypeRefactoringProcessor_creating);
node.accept(creator);
monitor.worked(20);
final SearchResultGroup group= (SearchResultGroup)groups.get(unit);
if (group != null) {
final ASTNode[] nodes= ASTNodeSearchUtil.getAstNodes(group.getSearchResults(), node);
try {
getMethodReferencingCompilationUnits(units, nodes);
monitor.worked(40);
getFieldReferencingCompilationUnits(units, nodes);
monitor.worked(40);
} catch (JavaModelException exception) {
JavaPlugin.log(exception);
}
}
} finally {
monitor.done();
}
}
/**
* Performs the second pass of processing the affected compilation units.
*
* @param creator the constraints creator to use
* @param unit the compilation unit of the subtype
* @param node the compilation unit node of the subtype
* @param monitor the progress monitor to use
*/
protected final void performSecondPass(final SuperTypeConstraintsCreator creator, final ICompilationUnit unit, final CompilationUnit node, final IProgressMonitor monitor) {
try {
monitor.beginTask("", 20); //$NON-NLS-1$
monitor.setTaskName(RefactoringCoreMessages.SuperTypeRefactoringProcessor_creating);
node.accept(creator);
monitor.worked(20);
} finally {
monitor.done();
}
}
/**
* Resets the working copies.
*/
protected void resetWorkingCopies() {
final ICompilationUnit[] units= JavaCore.getWorkingCopies(fOwner);
for (int index= 0; index < units.length; index++) {
final ICompilationUnit unit= units[index];
try {
unit.discardWorkingCopy();
} catch (Exception exception) {
// Do nothing
}
}
}
/**
* Resets the working copies.
*
* @param unit the compilation unit to discard
*/
protected void resetWorkingCopies(final ICompilationUnit unit) {
final ICompilationUnit[] units= JavaCore.getWorkingCopies(fOwner);
for (int index= 0; index < units.length; index++) {
if (!units[index].equals(unit)) {
try {
units[index].discardWorkingCopy();
} catch (Exception exception) {
// Do nothing
}
} else {
try {
units[index].getBuffer().setContents(unit.getPrimary().getBuffer().getContents());
JavaModelUtil.reconcile(units[index]);
} catch (JavaModelException exception) {
JavaPlugin.log(exception);
}
}
}
}
/**
* Creates the necessary text edits to replace the subtype occurrence by a supertype.
*
* @param range the compilation unit range
* @param estimate the type estimate
* @param requestor the ast requestor to use
* @param rewrite the compilation unit rewrite to use
* @param copy the compilation unit node of the working copy ast
* @param replacements the set of variable binding keys of formal parameters which must be
* replaced
* @param group the text edit group to use
*/
protected final void rewriteTypeOccurrence(final CompilationUnitRange range, final TType estimate, final ASTRequestor requestor, final CompilationUnitRewrite rewrite, final CompilationUnit copy,
final Set replacements, final TextEditGroup group) {
ASTNode node= null;
IBinding binding= null;
final CompilationUnit target= rewrite.getRoot();
node= NodeFinder.perform(copy, range.getSourceRange());
if (node != null) {
node= ASTNodes.getNormalizedNode(node).getParent();
if (node instanceof VariableDeclaration) {
binding= ((VariableDeclaration)node).resolveBinding();
node= target.findDeclaringNode(binding.getKey());
if (node instanceof SingleVariableDeclaration) {
rewriteTypeOccurrence(estimate, rewrite, ((SingleVariableDeclaration)node).getType(), group);
if (node.getParent() instanceof MethodDeclaration) {
binding= ((VariableDeclaration)node).resolveBinding();
if (binding != null)
replacements.add(binding.getKey());
}
}
} else if (node instanceof VariableDeclarationStatement) {
binding= ((VariableDeclaration)((VariableDeclarationStatement)node).fragments().get(0)).resolveBinding();
node= target.findDeclaringNode(binding.getKey());
if (node instanceof VariableDeclarationFragment)
rewriteTypeOccurrence(estimate, rewrite, ((VariableDeclarationStatement)((VariableDeclarationFragment)node).getParent()).getType(), group);
} else if (node instanceof MethodDeclaration) {
binding= ((MethodDeclaration)node).resolveBinding();
node= target.findDeclaringNode(binding.getKey());
if (node instanceof MethodDeclaration)
rewriteTypeOccurrence(estimate, rewrite, ((MethodDeclaration)node).getReturnType2(), group);
} else if (node instanceof FieldDeclaration) {
binding= ((VariableDeclaration)((FieldDeclaration)node).fragments().get(0)).resolveBinding();
node= target.findDeclaringNode(binding.getKey());
if (node instanceof VariableDeclarationFragment) {
node= node.getParent();
if (node instanceof FieldDeclaration)
rewriteTypeOccurrence(estimate, rewrite, ((FieldDeclaration)node).getType(), group);
}
} else if (node instanceof ArrayType) {
final ASTNode type= node;
while (node != null && !(node instanceof MethodDeclaration) && !(node instanceof VariableDeclarationFragment))
node= node.getParent();
if (node != null) {
final int delta= node.getStartPosition() + node.getLength() - type.getStartPosition();
if (node instanceof MethodDeclaration)
binding= ((MethodDeclaration)node).resolveBinding();
else if (node instanceof VariableDeclarationFragment)
binding= ((VariableDeclarationFragment)node).resolveBinding();
if (binding != null) {
node= target.findDeclaringNode(binding.getKey());
if (node instanceof MethodDeclaration || node instanceof VariableDeclarationFragment) {
node= NodeFinder.perform(target, (node.getStartPosition() + node.getLength() - delta), 0);
if (node instanceof SimpleName)
rewriteTypeOccurrence(estimate, rewrite, node, group);
}
}
}
} else if (node instanceof QualifiedName) {
final ASTNode name= node;
while (node != null && !(node instanceof MethodDeclaration) && !(node instanceof VariableDeclarationFragment))
node= node.getParent();
if (node != null) {
final int delta= node.getStartPosition() + node.getLength() - name.getStartPosition();
if (node instanceof MethodDeclaration)
binding= ((MethodDeclaration)node).resolveBinding();
else if (node instanceof VariableDeclarationFragment)
binding= ((VariableDeclarationFragment)node).resolveBinding();
if (binding != null) {
node= target.findDeclaringNode(binding.getKey());
if (node instanceof SimpleName || node instanceof MethodDeclaration || node instanceof VariableDeclarationFragment) {
node= NodeFinder.perform(target, (node.getStartPosition() + node.getLength() - delta), 0);
if (node instanceof SimpleName)
rewriteTypeOccurrence(estimate, rewrite, node, group);
}
}
}
} else if (node instanceof CastExpression) {
final ASTNode expression= node;
while (node != null && !(node instanceof MethodDeclaration))
node= node.getParent();
if (node != null) {
final int delta= node.getStartPosition() + node.getLength() - expression.getStartPosition();
binding= ((MethodDeclaration)node).resolveBinding();
node= target.findDeclaringNode(binding.getKey());
if (node instanceof MethodDeclaration) {
node= NodeFinder.perform(target, (node.getStartPosition() + node.getLength() - delta), 0);
if (node instanceof CastExpression)
rewriteTypeOccurrence(estimate, rewrite, ((CastExpression)node).getType(), group);
}
}
}
}
}
/**
* Creates the necessary text edits to replace the subtype occurrence by a supertype.
*
* @param estimate the type estimate
* @param rewrite the ast rewrite to use
* @param node the ast node to rewrite
* @param group the text edit group to use
*/
protected final void rewriteTypeOccurrence(final TType estimate, final CompilationUnitRewrite rewrite, final ASTNode node, final TextEditGroup group) {
rewrite.getImportRemover().registerRemovedNode(node);
rewrite.getASTRewrite().replace(node, createCorrespondingNode(rewrite, estimate), group);
}
/**
* Creates the necessary text edits to replace the subtype occurrence by a supertype.
*
* @param manager the text change manager to use
* @param requestor the ast requestor to use
* @param rewrite the compilation unit rewrite of the subtype (not in working copy mode)
* @param unit the compilation unit
* @param node the compilation unit node
* @param replacements the set of variable binding keys of formal parameters which must be
* replaced
* @param monitor the progress monitor to use
* @throws CoreException if the change could not be generated
*/
protected abstract void rewriteTypeOccurrences(TextEditBasedChangeManager manager, ASTRequestor requestor, CompilationUnitRewrite rewrite, ICompilationUnit unit, CompilationUnit node,
Set replacements, IProgressMonitor monitor) throws CoreException;
/**
* Creates the necessary text edits to replace the subtype occurrences by a supertype.
*
* @param manager the text change manager to use
* @param sourceRewrite the compilation unit rewrite of the subtype (not in working copy mode)
* @param sourceRequestor the ast requestor of the subtype, or <code>null</code>
* @param subUnit the compilation unit of the subtype, or <code>null</code>
* @param subNode the compilation unit node of the subtype, or <code>null</code>
* @param replacements the set of variable binding keys of formal parameters which must be
* replaced
* @param status the refactoring status
* @param monitor the progress monitor to use
*/
protected final void rewriteTypeOccurrences(final TextEditBasedChangeManager manager, final ASTRequestor sourceRequestor, final CompilationUnitRewrite sourceRewrite,
final ICompilationUnit subUnit, final CompilationUnit subNode, final Set replacements, final RefactoringStatus status, final IProgressMonitor monitor) {
try {
monitor.beginTask("", 300); //$NON-NLS-1$
monitor.setTaskName(RefactoringCoreMessages.ExtractInterfaceProcessor_creating);
if (fTypeOccurrences != null) {
final Set units= new HashSet(fTypeOccurrences.keySet());
if (subUnit != null)
units.remove(subUnit);
final Map projects= new HashMap();
Collection collection= null;
IJavaProject project= null;
ICompilationUnit current= null;
for (final Iterator iterator= units.iterator(); iterator.hasNext();) {
current= (ICompilationUnit)iterator.next();
project= current.getJavaProject();
collection= (Collection)projects.get(project);
if (collection == null) {
collection= new ArrayList();
projects.put(project, collection);
}
collection.add(current);
}
final ASTParser parser= ASTParser.newParser(AST.JLS3);
final IProgressMonitor subMonitor= new SubProgressMonitor(monitor, 320);
try {
final Set keySet= projects.keySet();
subMonitor.beginTask("", keySet.size() * 100); //$NON-NLS-1$
subMonitor.setTaskName(RefactoringCoreMessages.SuperTypeRefactoringProcessor_creating);
for (final Iterator iterator= keySet.iterator(); iterator.hasNext();) {
project= (IJavaProject)iterator.next();
collection= (Collection)projects.get(project);
parser.setWorkingCopyOwner(fOwner);
parser.setResolveBindings(true);
parser.setProject(project);
parser.setCompilerOptions(RefactoringASTParser.getCompilerOptions(project));
final IProgressMonitor subsubMonitor= new SubProgressMonitor(subMonitor, 100);
try {
subsubMonitor.beginTask("", collection.size() * 100 + 200); //$NON-NLS-1$
subsubMonitor.setTaskName(RefactoringCoreMessages.SuperTypeRefactoringProcessor_creating);
parser.createASTs((ICompilationUnit[])collection.toArray(new ICompilationUnit[collection.size()]), new String[0], new ASTRequestor() {
public final void acceptAST(final ICompilationUnit unit, final CompilationUnit node) {
final IProgressMonitor subsubsubMonitor= new SubProgressMonitor(subsubMonitor, 100);
try {
subsubsubMonitor.beginTask("", 100); //$NON-NLS-1$
subsubsubMonitor.setTaskName(RefactoringCoreMessages.SuperTypeRefactoringProcessor_creating);
if (sourceRewrite != null)
rewriteTypeOccurrences(manager, this, sourceRewrite, unit, node, replacements, new SubProgressMonitor(subsubsubMonitor, 100));
} catch (CoreException exception) {
status.merge(RefactoringStatus.createFatalErrorStatus(exception.getLocalizedMessage()));
} finally {
subsubsubMonitor.done();
}
}
public final void acceptBinding(final String key, final IBinding binding) {
// Do nothing
}
}, new SubProgressMonitor(subsubMonitor, 200));
} finally {
subsubMonitor.done();
}
}
try {
if (subUnit != null && subNode != null && sourceRewrite != null && sourceRequestor != null)
rewriteTypeOccurrences(manager, sourceRequestor, sourceRewrite, subUnit, subNode, replacements, new SubProgressMonitor(subMonitor, 20));
} catch (CoreException exception) {
status.merge(RefactoringStatus.createFatalErrorStatus(exception.getLocalizedMessage()));
}
} finally {
subMonitor.done();
}
}
} finally {
monitor.done();
}
}
/**
* Determines whether type occurrences in instanceof's should be rewritten.
*
* @param rewrite <code>true</code> to rewrite them, <code>false</code> otherwise
*/
public final void setInstanceOf(final boolean rewrite) {
fInstanceOf= rewrite;
}
/**
* Determines whether occurrences of the subtype should be replaced by the supertype.
*
* @param replace <code>true</code> to replace occurrences where possible, <code>false</code>
* otherwise
*/
public final void setReplace(final boolean replace) {
fReplace= replace;
}
/**
* Solves the supertype constraints to replace subtype by a supertype.
*
* @param subUnit the compilation unit of the subtype, or <code>null</code>
* @param subNode the compilation unit node of the subtype, or <code>null</code>
* @param subType the java element of the subtype
* @param subBinding the type binding of the subtype to replace
* @param superBinding the type binding of the supertype to use as replacement
* @param monitor the progress monitor to use
* @param status the refactoring status
* @throws JavaModelException if an error occurs
*/
protected final void solveSuperTypeConstraints(final ICompilationUnit subUnit, final CompilationUnit subNode, final IType subType, final ITypeBinding subBinding, final ITypeBinding superBinding,
final IProgressMonitor monitor, final RefactoringStatus status) throws JavaModelException {
Assert.isNotNull(subType);
Assert.isNotNull(subBinding);
Assert.isNotNull(superBinding);
Assert.isNotNull(monitor);
Assert.isNotNull(status);
int level= 3;
TypeEnvironment environment= new TypeEnvironment();
final SuperTypeConstraintsModel model= new SuperTypeConstraintsModel(environment, environment.create(subBinding), environment.create(superBinding));
final SuperTypeConstraintsCreator creator= new SuperTypeConstraintsCreator(model, fInstanceOf);
try {
monitor.beginTask("", 300); //$NON-NLS-1$
monitor.setTaskName(RefactoringCoreMessages.SuperTypeRefactoringProcessor_creating);
final Map firstPass= getReferencingCompilationUnits(subType, new SubProgressMonitor(monitor, 100), status);
final Map secondPass= new HashMap();
IJavaProject project= null;
Collection collection= null;
try {
final ASTParser parser= ASTParser.newParser(AST.JLS3);
Object element= null;
ICompilationUnit current= null;
SearchResultGroup group= null;
SearchMatch[] matches= null;
final Map groups= new HashMap();
for (final Iterator outer= firstPass.keySet().iterator(); outer.hasNext();) {
project= (IJavaProject)outer.next();
if (level == 3 && !JavaModelUtil.is50OrHigher(project))
level= 2;
collection= (Collection)firstPass.get(project);
if (collection != null) {
for (final Iterator inner= collection.iterator(); inner.hasNext();) {
group= (SearchResultGroup)inner.next();
matches= group.getSearchResults();
for (int index= 0; index < matches.length; index++) {
element= matches[index].getElement();
if (element instanceof IMember) {
current= ((IMember)element).getCompilationUnit();
if (current != null)
groups.put(current, group);
}
}
}
}
}
Set units= null;
final Set processed= new HashSet();
if (subUnit != null)
processed.add(subUnit);
model.beginCreation();
IProgressMonitor subMonitor= new SubProgressMonitor(monitor, 120);
try {
final Set keySet= firstPass.keySet();
subMonitor.beginTask("", keySet.size() * 100); //$NON-NLS-1$
subMonitor.setTaskName(RefactoringCoreMessages.SuperTypeRefactoringProcessor_creating);
for (final Iterator outer= keySet.iterator(); outer.hasNext();) {
project= (IJavaProject)outer.next();
collection= (Collection)firstPass.get(project);
if (collection != null) {
units= new HashSet(collection.size());
for (final Iterator inner= collection.iterator(); inner.hasNext();) {
group= (SearchResultGroup)inner.next();
matches= group.getSearchResults();
for (int index= 0; index < matches.length; index++) {
element= matches[index].getElement();
if (element instanceof IMember) {
current= ((IMember)element).getCompilationUnit();
if (current != null)
units.add(current);
}
}
}
final List batches= new ArrayList(units);
final int size= batches.size();
final int iterations= (size - 1) / SIZE_BATCH + 1;
final IProgressMonitor subsubMonitor= new SubProgressMonitor(subMonitor, 100);
try {
subsubMonitor.beginTask("", iterations * 100); //$NON-NLS-1$
subsubMonitor.setTaskName(RefactoringCoreMessages.SuperTypeRefactoringProcessor_creating);
final Map options= RefactoringASTParser.getCompilerOptions(project);
for (int index= 0; index < iterations; index++) {
final List iteration= batches.subList(index * SIZE_BATCH, Math.min(size, (index + 1) * SIZE_BATCH));
parser.setWorkingCopyOwner(fOwner);
parser.setResolveBindings(true);
parser.setProject(project);
parser.setCompilerOptions(options);
final IProgressMonitor subsubsubMonitor= new SubProgressMonitor(subsubMonitor, 100);
try {
final int count= iteration.size();
subsubsubMonitor.beginTask("", count * 100); //$NON-NLS-1$
subsubsubMonitor.setTaskName(RefactoringCoreMessages.SuperTypeRefactoringProcessor_creating);
parser.createASTs((ICompilationUnit[])iteration.toArray(new ICompilationUnit[count]), new String[0], new ASTRequestor() {
public final void acceptAST(final ICompilationUnit unit, final CompilationUnit node) {
if (!processed.contains(unit)) {
performFirstPass(creator, secondPass, groups, unit, node, new SubProgressMonitor(subsubsubMonitor, 100));
processed.add(unit);
} else
subsubsubMonitor.worked(100);
}
public final void acceptBinding(final String key, final IBinding binding) {
// Do nothing
}
}, new NullProgressMonitor());
} finally {
subsubsubMonitor.done();
}
}
} finally {
subsubMonitor.done();
}
}
}
} finally {
firstPass.clear();
subMonitor.done();
}
if (subUnit != null && subNode != null)
performFirstPass(creator, secondPass, groups, subUnit, subNode, new SubProgressMonitor(subMonitor, 20));
subMonitor= new SubProgressMonitor(monitor, 100);
try {
final Set keySet= secondPass.keySet();
subMonitor.beginTask("", keySet.size() * 100); //$NON-NLS-1$
subMonitor.setTaskName(RefactoringCoreMessages.SuperTypeRefactoringProcessor_creating);
for (final Iterator iterator= keySet.iterator(); iterator.hasNext();) {
project= (IJavaProject)iterator.next();
if (level == 3 && !JavaModelUtil.is50OrHigher(project))
level= 2;
collection= (Collection)secondPass.get(project);
if (collection != null) {
parser.setWorkingCopyOwner(fOwner);
parser.setResolveBindings(true);
parser.setProject(project);
parser.setCompilerOptions(RefactoringASTParser.getCompilerOptions(project));
final IProgressMonitor subsubMonitor= new SubProgressMonitor(subMonitor, 100);
try {
subsubMonitor.beginTask("", collection.size() * 100); //$NON-NLS-1$
subsubMonitor.setTaskName(RefactoringCoreMessages.SuperTypeRefactoringProcessor_creating);
parser.createASTs((ICompilationUnit[])collection.toArray(new ICompilationUnit[collection.size()]), new String[0], new ASTRequestor() {
public final void acceptAST(final ICompilationUnit unit, final CompilationUnit node) {
if (!processed.contains(unit))
performSecondPass(creator, unit, node, new SubProgressMonitor(subsubMonitor, 100));
else
subsubMonitor.worked(100);
}
public final void acceptBinding(final String key, final IBinding binding) {
// Do nothing
}
}, new NullProgressMonitor());
} finally {
subsubMonitor.done();
}
}
}
} finally {
secondPass.clear();
subMonitor.done();
}
} finally {
model.endCreation();
model.setCompliance(level);
}
final SuperTypeConstraintsSolver solver= createContraintSolver(model);
solver.solveConstraints();
fTypeOccurrences= solver.getTypeOccurrences();
fObsoleteCasts= solver.getObsoleteCasts();
} finally {
monitor.done();
}
}
} |
Java | @UtilityClass
public final class JAXBDateUtils
{
private static final ThreadLocal<DatatypeFactory> datatypeFactoryHolder = new ThreadLocal<DatatypeFactory>()
{
@Override
protected DatatypeFactory initialValue()
{
try
{
return DatatypeFactory.newInstance();
}
catch (final DatatypeConfigurationException e)
{
throw new IllegalStateException("failed to create " + DatatypeFactory.class.getSimpleName(), e);
}
}
};
public static XMLGregorianCalendar toXMLGregorianCalendar(final LocalDateTime date)
{
if (date == null)
{
return null;
}
final GregorianCalendar c = new GregorianCalendar();
c.setTimeInMillis(date.atZone(ZoneId.systemDefault()).toInstant().toEpochMilli());
return datatypeFactoryHolder.get().newXMLGregorianCalendar(c);
}
public static XMLGregorianCalendar toXMLGregorianCalendar(final ZonedDateTime date)
{
if (date == null)
{
return null;
}
final GregorianCalendar c = new GregorianCalendar();
c.setTimeInMillis(date.toInstant().toEpochMilli());
return datatypeFactoryHolder.get().newXMLGregorianCalendar(c);
}
public static LocalDateTime toLocalDateTime(final XMLGregorianCalendar xml)
{
if (xml == null)
{
return null;
}
return xml.toGregorianCalendar().toZonedDateTime().toLocalDateTime();
}
public static ZonedDateTime toZonedDateTime(final XMLGregorianCalendar xml)
{
if (xml == null)
{
return null;
}
return xml.toGregorianCalendar().toZonedDateTime();
}
public static java.util.Date toDate(final XMLGregorianCalendar xml)
{
return xml == null ? null : xml.toGregorianCalendar().getTime();
}
public static Timestamp toTimestamp(final XMLGregorianCalendar xmlGregorianCalendar)
{
final Date date = toDate(xmlGregorianCalendar);
if (date == null)
{
return null;
}
return new Timestamp(date.getTime());
}
} |
Java | @TestForIssue(jiraKey = "HHH-10667")
public class IdClassReferenceIdentifierTest extends BaseEnversJPAFunctionalTestCase {
private ReferenceIdentifierClassId entityId = null;
private Integer typeId = null;
@Override
protected Class<?>[] getAnnotatedClasses() {
return new Class<?>[] {
ReferenceIdentifierEntity.class,
ReferenceIdentifierClassId.class,
ClassType.class,
IntegerGeneratedIdentityEntity.class
};
}
@Test
@Priority(10)
public void initData() {
EntityManager em = getEntityManager();
// Revision 1
em.getTransaction().begin();
ClassType type = new ClassType( "type", "initial description" );
em.persist( type );
IntegerGeneratedIdentityEntity type2 = new IntegerGeneratedIdentityEntity();
em.persist(type2);
ReferenceIdentifierEntity entity = new ReferenceIdentifierEntity();
entity.setSampleValue( "initial data" );
entity.setType( type );
entity.setIiie( type2 );
em.persist( entity );
em.getTransaction().commit();
typeId = type2.getId();
entityId = new ReferenceIdentifierClassId( typeId, type.getType() );
// Revision 2
em.getTransaction().begin();
type = em.find( ClassType.class, type.getType() );
type.setDescription( "modified description" );
em.merge( type );
em.getTransaction().commit();
// Revision 3
em.getTransaction().begin();
entity = em.find( ReferenceIdentifierEntity.class, entityId );
entity.setSampleValue( "modified data" );
em.merge( entity );
em.getTransaction().commit();
em.close();
}
@Test
public void testRevisionsCounts() {
Assert.assertEquals( Arrays.asList( 1, 2 ), getAuditReader().getRevisions( ClassType.class, "type" ) );
Assert.assertEquals( Arrays.asList( 1 ), getAuditReader().getRevisions( IntegerGeneratedIdentityEntity.class, typeId ) );
Assert.assertEquals( Arrays.asList( 1, 3 ), getAuditReader().getRevisions( ReferenceIdentifierEntity.class, entityId ) );
}
@Test
public void testHistoryOfEntity() {
// given
ReferenceIdentifierEntity entity = new ReferenceIdentifierEntity( new IntegerGeneratedIdentityEntity(typeId), new ClassType( "type", "initial description" ), "initial data" );
// when
ReferenceIdentifierEntity ver1 = getAuditReader().find( ReferenceIdentifierEntity.class, entityId, 1 );
// then
Assert.assertEquals( entity.getIiie().getId(), ver1.getIiie().getId() );
Assert.assertEquals( entity.getSampleValue(), ver1.getSampleValue() );
Assert.assertEquals( entity.getType().getType(), ver1.getType().getType() );
Assert.assertEquals( entity.getType().getDescription(), ver1.getType().getDescription() );
// given
entity.setSampleValue( "modified data" );
entity.getType().setDescription( "modified description" );
// when
ReferenceIdentifierEntity ver2 = getAuditReader().find( ReferenceIdentifierEntity.class, entityId, 3 );
// then
Assert.assertEquals( entity.getIiie().getId(), ver2.getIiie().getId() );
Assert.assertEquals( entity.getSampleValue(), ver2.getSampleValue() );
Assert.assertEquals( entity.getType().getType(), ver2.getType().getType() );
Assert.assertEquals( entity.getType().getDescription(), ver2.getType().getDescription() );
}
} |
Java | public class SqlManagementClientImpl extends AzureServiceClient {
/** the {@link AzureClient} used for long running operations. */
private AzureClient azureClient;
/**
* Gets the {@link AzureClient} used for long running operations.
* @return the azure client;
*/
public AzureClient getAzureClient() {
return this.azureClient;
}
/** The subscription ID that identifies an Azure subscription. */
private String subscriptionId;
/**
* Gets The subscription ID that identifies an Azure subscription.
*
* @return the subscriptionId value.
*/
public String subscriptionId() {
return this.subscriptionId;
}
/**
* Sets The subscription ID that identifies an Azure subscription.
*
* @param subscriptionId the subscriptionId value.
* @return the service client itself
*/
public SqlManagementClientImpl withSubscriptionId(String subscriptionId) {
this.subscriptionId = subscriptionId;
return this;
}
/** Gets or sets the preferred language for the response. */
private String acceptLanguage;
/**
* Gets Gets or sets the preferred language for the response.
*
* @return the acceptLanguage value.
*/
public String acceptLanguage() {
return this.acceptLanguage;
}
/**
* Sets Gets or sets the preferred language for the response.
*
* @param acceptLanguage the acceptLanguage value.
* @return the service client itself
*/
public SqlManagementClientImpl withAcceptLanguage(String acceptLanguage) {
this.acceptLanguage = acceptLanguage;
return this;
}
/** Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30. */
private int longRunningOperationRetryTimeout;
/**
* Gets Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30.
*
* @return the longRunningOperationRetryTimeout value.
*/
public int longRunningOperationRetryTimeout() {
return this.longRunningOperationRetryTimeout;
}
/**
* Sets Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30.
*
* @param longRunningOperationRetryTimeout the longRunningOperationRetryTimeout value.
* @return the service client itself
*/
public SqlManagementClientImpl withLongRunningOperationRetryTimeout(int longRunningOperationRetryTimeout) {
this.longRunningOperationRetryTimeout = longRunningOperationRetryTimeout;
return this;
}
/** When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true. */
private boolean generateClientRequestId;
/**
* Gets When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true.
*
* @return the generateClientRequestId value.
*/
public boolean generateClientRequestId() {
return this.generateClientRequestId;
}
/**
* Sets When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true.
*
* @param generateClientRequestId the generateClientRequestId value.
* @return the service client itself
*/
public SqlManagementClientImpl withGenerateClientRequestId(boolean generateClientRequestId) {
this.generateClientRequestId = generateClientRequestId;
return this;
}
/**
* The BackupLongTermRetentionPoliciesInner object to access its operations.
*/
private BackupLongTermRetentionPoliciesInner backupLongTermRetentionPolicies;
/**
* Gets the BackupLongTermRetentionPoliciesInner object to access its operations.
* @return the BackupLongTermRetentionPoliciesInner object.
*/
public BackupLongTermRetentionPoliciesInner backupLongTermRetentionPolicies() {
return this.backupLongTermRetentionPolicies;
}
/**
* The BackupLongTermRetentionVaultsInner object to access its operations.
*/
private BackupLongTermRetentionVaultsInner backupLongTermRetentionVaults;
/**
* Gets the BackupLongTermRetentionVaultsInner object to access its operations.
* @return the BackupLongTermRetentionVaultsInner object.
*/
public BackupLongTermRetentionVaultsInner backupLongTermRetentionVaults() {
return this.backupLongTermRetentionVaults;
}
/**
* The RecoverableDatabasesInner object to access its operations.
*/
private RecoverableDatabasesInner recoverableDatabases;
/**
* Gets the RecoverableDatabasesInner object to access its operations.
* @return the RecoverableDatabasesInner object.
*/
public RecoverableDatabasesInner recoverableDatabases() {
return this.recoverableDatabases;
}
/**
* The RestorableDroppedDatabasesInner object to access its operations.
*/
private RestorableDroppedDatabasesInner restorableDroppedDatabases;
/**
* Gets the RestorableDroppedDatabasesInner object to access its operations.
* @return the RestorableDroppedDatabasesInner object.
*/
public RestorableDroppedDatabasesInner restorableDroppedDatabases() {
return this.restorableDroppedDatabases;
}
/**
* The CapabilitiesInner object to access its operations.
*/
private CapabilitiesInner capabilities;
/**
* Gets the CapabilitiesInner object to access its operations.
* @return the CapabilitiesInner object.
*/
public CapabilitiesInner capabilities() {
return this.capabilities;
}
/**
* The ServersInner object to access its operations.
*/
private ServersInner servers;
/**
* Gets the ServersInner object to access its operations.
* @return the ServersInner object.
*/
public ServersInner servers() {
return this.servers;
}
/**
* The ServerConnectionPoliciesInner object to access its operations.
*/
private ServerConnectionPoliciesInner serverConnectionPolicies;
/**
* Gets the ServerConnectionPoliciesInner object to access its operations.
* @return the ServerConnectionPoliciesInner object.
*/
public ServerConnectionPoliciesInner serverConnectionPolicies() {
return this.serverConnectionPolicies;
}
/**
* The DatabasesInner object to access its operations.
*/
private DatabasesInner databases;
/**
* Gets the DatabasesInner object to access its operations.
* @return the DatabasesInner object.
*/
public DatabasesInner databases() {
return this.databases;
}
/**
* The DatabaseThreatDetectionPoliciesInner object to access its operations.
*/
private DatabaseThreatDetectionPoliciesInner databaseThreatDetectionPolicies;
/**
* Gets the DatabaseThreatDetectionPoliciesInner object to access its operations.
* @return the DatabaseThreatDetectionPoliciesInner object.
*/
public DatabaseThreatDetectionPoliciesInner databaseThreatDetectionPolicies() {
return this.databaseThreatDetectionPolicies;
}
/**
* The DataMaskingPoliciesInner object to access its operations.
*/
private DataMaskingPoliciesInner dataMaskingPolicies;
/**
* Gets the DataMaskingPoliciesInner object to access its operations.
* @return the DataMaskingPoliciesInner object.
*/
public DataMaskingPoliciesInner dataMaskingPolicies() {
return this.dataMaskingPolicies;
}
/**
* The DataMaskingRulesInner object to access its operations.
*/
private DataMaskingRulesInner dataMaskingRules;
/**
* Gets the DataMaskingRulesInner object to access its operations.
* @return the DataMaskingRulesInner object.
*/
public DataMaskingRulesInner dataMaskingRules() {
return this.dataMaskingRules;
}
/**
* The ElasticPoolsInner object to access its operations.
*/
private ElasticPoolsInner elasticPools;
/**
* Gets the ElasticPoolsInner object to access its operations.
* @return the ElasticPoolsInner object.
*/
public ElasticPoolsInner elasticPools() {
return this.elasticPools;
}
/**
* The FirewallRulesInner object to access its operations.
*/
private FirewallRulesInner firewallRules;
/**
* Gets the FirewallRulesInner object to access its operations.
* @return the FirewallRulesInner object.
*/
public FirewallRulesInner firewallRules() {
return this.firewallRules;
}
/**
* The GeoBackupPoliciesInner object to access its operations.
*/
private GeoBackupPoliciesInner geoBackupPolicies;
/**
* Gets the GeoBackupPoliciesInner object to access its operations.
* @return the GeoBackupPoliciesInner object.
*/
public GeoBackupPoliciesInner geoBackupPolicies() {
return this.geoBackupPolicies;
}
/**
* The RecommendedElasticPoolsInner object to access its operations.
*/
private RecommendedElasticPoolsInner recommendedElasticPools;
/**
* Gets the RecommendedElasticPoolsInner object to access its operations.
* @return the RecommendedElasticPoolsInner object.
*/
public RecommendedElasticPoolsInner recommendedElasticPools() {
return this.recommendedElasticPools;
}
/**
* The ReplicationLinksInner object to access its operations.
*/
private ReplicationLinksInner replicationLinks;
/**
* Gets the ReplicationLinksInner object to access its operations.
* @return the ReplicationLinksInner object.
*/
public ReplicationLinksInner replicationLinks() {
return this.replicationLinks;
}
/**
* The ServerAzureADAdministratorsInner object to access its operations.
*/
private ServerAzureADAdministratorsInner serverAzureADAdministrators;
/**
* Gets the ServerAzureADAdministratorsInner object to access its operations.
* @return the ServerAzureADAdministratorsInner object.
*/
public ServerAzureADAdministratorsInner serverAzureADAdministrators() {
return this.serverAzureADAdministrators;
}
/**
* The ServerCommunicationLinksInner object to access its operations.
*/
private ServerCommunicationLinksInner serverCommunicationLinks;
/**
* Gets the ServerCommunicationLinksInner object to access its operations.
* @return the ServerCommunicationLinksInner object.
*/
public ServerCommunicationLinksInner serverCommunicationLinks() {
return this.serverCommunicationLinks;
}
/**
* The ServiceObjectivesInner object to access its operations.
*/
private ServiceObjectivesInner serviceObjectives;
/**
* Gets the ServiceObjectivesInner object to access its operations.
* @return the ServiceObjectivesInner object.
*/
public ServiceObjectivesInner serviceObjectives() {
return this.serviceObjectives;
}
/**
* The ElasticPoolActivitiesInner object to access its operations.
*/
private ElasticPoolActivitiesInner elasticPoolActivities;
/**
* Gets the ElasticPoolActivitiesInner object to access its operations.
* @return the ElasticPoolActivitiesInner object.
*/
public ElasticPoolActivitiesInner elasticPoolActivities() {
return this.elasticPoolActivities;
}
/**
* The ElasticPoolDatabaseActivitiesInner object to access its operations.
*/
private ElasticPoolDatabaseActivitiesInner elasticPoolDatabaseActivities;
/**
* Gets the ElasticPoolDatabaseActivitiesInner object to access its operations.
* @return the ElasticPoolDatabaseActivitiesInner object.
*/
public ElasticPoolDatabaseActivitiesInner elasticPoolDatabaseActivities() {
return this.elasticPoolDatabaseActivities;
}
/**
* The ServiceTierAdvisorsInner object to access its operations.
*/
private ServiceTierAdvisorsInner serviceTierAdvisors;
/**
* Gets the ServiceTierAdvisorsInner object to access its operations.
* @return the ServiceTierAdvisorsInner object.
*/
public ServiceTierAdvisorsInner serviceTierAdvisors() {
return this.serviceTierAdvisors;
}
/**
* The TransparentDataEncryptionsInner object to access its operations.
*/
private TransparentDataEncryptionsInner transparentDataEncryptions;
/**
* Gets the TransparentDataEncryptionsInner object to access its operations.
* @return the TransparentDataEncryptionsInner object.
*/
public TransparentDataEncryptionsInner transparentDataEncryptions() {
return this.transparentDataEncryptions;
}
/**
* The TransparentDataEncryptionActivitiesInner object to access its operations.
*/
private TransparentDataEncryptionActivitiesInner transparentDataEncryptionActivities;
/**
* Gets the TransparentDataEncryptionActivitiesInner object to access its operations.
* @return the TransparentDataEncryptionActivitiesInner object.
*/
public TransparentDataEncryptionActivitiesInner transparentDataEncryptionActivities() {
return this.transparentDataEncryptionActivities;
}
/**
* The ServerUsagesInner object to access its operations.
*/
private ServerUsagesInner serverUsages;
/**
* Gets the ServerUsagesInner object to access its operations.
* @return the ServerUsagesInner object.
*/
public ServerUsagesInner serverUsages() {
return this.serverUsages;
}
/**
* The DatabaseUsagesInner object to access its operations.
*/
private DatabaseUsagesInner databaseUsages;
/**
* Gets the DatabaseUsagesInner object to access its operations.
* @return the DatabaseUsagesInner object.
*/
public DatabaseUsagesInner databaseUsages() {
return this.databaseUsages;
}
/**
* The DatabaseAutomaticTuningsInner object to access its operations.
*/
private DatabaseAutomaticTuningsInner databaseAutomaticTunings;
/**
* Gets the DatabaseAutomaticTuningsInner object to access its operations.
* @return the DatabaseAutomaticTuningsInner object.
*/
public DatabaseAutomaticTuningsInner databaseAutomaticTunings() {
return this.databaseAutomaticTunings;
}
/**
* The EncryptionProtectorsInner object to access its operations.
*/
private EncryptionProtectorsInner encryptionProtectors;
/**
* Gets the EncryptionProtectorsInner object to access its operations.
* @return the EncryptionProtectorsInner object.
*/
public EncryptionProtectorsInner encryptionProtectors() {
return this.encryptionProtectors;
}
/**
* The FailoverGroupsInner object to access its operations.
*/
private FailoverGroupsInner failoverGroups;
/**
* Gets the FailoverGroupsInner object to access its operations.
* @return the FailoverGroupsInner object.
*/
public FailoverGroupsInner failoverGroups() {
return this.failoverGroups;
}
/**
* The ManagedInstancesInner object to access its operations.
*/
private ManagedInstancesInner managedInstances;
/**
* Gets the ManagedInstancesInner object to access its operations.
* @return the ManagedInstancesInner object.
*/
public ManagedInstancesInner managedInstances() {
return this.managedInstances;
}
/**
* The OperationsInner object to access its operations.
*/
private OperationsInner operations;
/**
* Gets the OperationsInner object to access its operations.
* @return the OperationsInner object.
*/
public OperationsInner operations() {
return this.operations;
}
/**
* The ServerKeysInner object to access its operations.
*/
private ServerKeysInner serverKeys;
/**
* Gets the ServerKeysInner object to access its operations.
* @return the ServerKeysInner object.
*/
public ServerKeysInner serverKeys() {
return this.serverKeys;
}
/**
* The SyncAgentsInner object to access its operations.
*/
private SyncAgentsInner syncAgents;
/**
* Gets the SyncAgentsInner object to access its operations.
* @return the SyncAgentsInner object.
*/
public SyncAgentsInner syncAgents() {
return this.syncAgents;
}
/**
* The SyncGroupsInner object to access its operations.
*/
private SyncGroupsInner syncGroups;
/**
* Gets the SyncGroupsInner object to access its operations.
* @return the SyncGroupsInner object.
*/
public SyncGroupsInner syncGroups() {
return this.syncGroups;
}
/**
* The SyncMembersInner object to access its operations.
*/
private SyncMembersInner syncMembers;
/**
* Gets the SyncMembersInner object to access its operations.
* @return the SyncMembersInner object.
*/
public SyncMembersInner syncMembers() {
return this.syncMembers;
}
/**
* The SubscriptionUsagesInner object to access its operations.
*/
private SubscriptionUsagesInner subscriptionUsages;
/**
* Gets the SubscriptionUsagesInner object to access its operations.
* @return the SubscriptionUsagesInner object.
*/
public SubscriptionUsagesInner subscriptionUsages() {
return this.subscriptionUsages;
}
/**
* The VirtualNetworkRulesInner object to access its operations.
*/
private VirtualNetworkRulesInner virtualNetworkRules;
/**
* Gets the VirtualNetworkRulesInner object to access its operations.
* @return the VirtualNetworkRulesInner object.
*/
public VirtualNetworkRulesInner virtualNetworkRules() {
return this.virtualNetworkRules;
}
/**
* The ExtendedDatabaseBlobAuditingPoliciesInner object to access its operations.
*/
private ExtendedDatabaseBlobAuditingPoliciesInner extendedDatabaseBlobAuditingPolicies;
/**
* Gets the ExtendedDatabaseBlobAuditingPoliciesInner object to access its operations.
* @return the ExtendedDatabaseBlobAuditingPoliciesInner object.
*/
public ExtendedDatabaseBlobAuditingPoliciesInner extendedDatabaseBlobAuditingPolicies() {
return this.extendedDatabaseBlobAuditingPolicies;
}
/**
* The ExtendedServerBlobAuditingPoliciesInner object to access its operations.
*/
private ExtendedServerBlobAuditingPoliciesInner extendedServerBlobAuditingPolicies;
/**
* Gets the ExtendedServerBlobAuditingPoliciesInner object to access its operations.
* @return the ExtendedServerBlobAuditingPoliciesInner object.
*/
public ExtendedServerBlobAuditingPoliciesInner extendedServerBlobAuditingPolicies() {
return this.extendedServerBlobAuditingPolicies;
}
/**
* The ServerBlobAuditingPoliciesInner object to access its operations.
*/
private ServerBlobAuditingPoliciesInner serverBlobAuditingPolicies;
/**
* Gets the ServerBlobAuditingPoliciesInner object to access its operations.
* @return the ServerBlobAuditingPoliciesInner object.
*/
public ServerBlobAuditingPoliciesInner serverBlobAuditingPolicies() {
return this.serverBlobAuditingPolicies;
}
/**
* The DatabaseBlobAuditingPoliciesInner object to access its operations.
*/
private DatabaseBlobAuditingPoliciesInner databaseBlobAuditingPolicies;
/**
* Gets the DatabaseBlobAuditingPoliciesInner object to access its operations.
* @return the DatabaseBlobAuditingPoliciesInner object.
*/
public DatabaseBlobAuditingPoliciesInner databaseBlobAuditingPolicies() {
return this.databaseBlobAuditingPolicies;
}
/**
* The DatabaseVulnerabilityAssessmentRuleBaselinesInner object to access its operations.
*/
private DatabaseVulnerabilityAssessmentRuleBaselinesInner databaseVulnerabilityAssessmentRuleBaselines;
/**
* Gets the DatabaseVulnerabilityAssessmentRuleBaselinesInner object to access its operations.
* @return the DatabaseVulnerabilityAssessmentRuleBaselinesInner object.
*/
public DatabaseVulnerabilityAssessmentRuleBaselinesInner databaseVulnerabilityAssessmentRuleBaselines() {
return this.databaseVulnerabilityAssessmentRuleBaselines;
}
/**
* The DatabaseVulnerabilityAssessmentsInner object to access its operations.
*/
private DatabaseVulnerabilityAssessmentsInner databaseVulnerabilityAssessments;
/**
* Gets the DatabaseVulnerabilityAssessmentsInner object to access its operations.
* @return the DatabaseVulnerabilityAssessmentsInner object.
*/
public DatabaseVulnerabilityAssessmentsInner databaseVulnerabilityAssessments() {
return this.databaseVulnerabilityAssessments;
}
/**
* The JobAgentsInner object to access its operations.
*/
private JobAgentsInner jobAgents;
/**
* Gets the JobAgentsInner object to access its operations.
* @return the JobAgentsInner object.
*/
public JobAgentsInner jobAgents() {
return this.jobAgents;
}
/**
* The JobCredentialsInner object to access its operations.
*/
private JobCredentialsInner jobCredentials;
/**
* Gets the JobCredentialsInner object to access its operations.
* @return the JobCredentialsInner object.
*/
public JobCredentialsInner jobCredentials() {
return this.jobCredentials;
}
/**
* The JobExecutionsInner object to access its operations.
*/
private JobExecutionsInner jobExecutions;
/**
* Gets the JobExecutionsInner object to access its operations.
* @return the JobExecutionsInner object.
*/
public JobExecutionsInner jobExecutions() {
return this.jobExecutions;
}
/**
* The JobsInner object to access its operations.
*/
private JobsInner jobs;
/**
* Gets the JobsInner object to access its operations.
* @return the JobsInner object.
*/
public JobsInner jobs() {
return this.jobs;
}
/**
* The JobStepExecutionsInner object to access its operations.
*/
private JobStepExecutionsInner jobStepExecutions;
/**
* Gets the JobStepExecutionsInner object to access its operations.
* @return the JobStepExecutionsInner object.
*/
public JobStepExecutionsInner jobStepExecutions() {
return this.jobStepExecutions;
}
/**
* The JobStepsInner object to access its operations.
*/
private JobStepsInner jobSteps;
/**
* Gets the JobStepsInner object to access its operations.
* @return the JobStepsInner object.
*/
public JobStepsInner jobSteps() {
return this.jobSteps;
}
/**
* The JobTargetExecutionsInner object to access its operations.
*/
private JobTargetExecutionsInner jobTargetExecutions;
/**
* Gets the JobTargetExecutionsInner object to access its operations.
* @return the JobTargetExecutionsInner object.
*/
public JobTargetExecutionsInner jobTargetExecutions() {
return this.jobTargetExecutions;
}
/**
* The JobTargetGroupsInner object to access its operations.
*/
private JobTargetGroupsInner jobTargetGroups;
/**
* Gets the JobTargetGroupsInner object to access its operations.
* @return the JobTargetGroupsInner object.
*/
public JobTargetGroupsInner jobTargetGroups() {
return this.jobTargetGroups;
}
/**
* The JobVersionsInner object to access its operations.
*/
private JobVersionsInner jobVersions;
/**
* Gets the JobVersionsInner object to access its operations.
* @return the JobVersionsInner object.
*/
public JobVersionsInner jobVersions() {
return this.jobVersions;
}
/**
* The ManagedDatabasesInner object to access its operations.
*/
private ManagedDatabasesInner managedDatabases;
/**
* Gets the ManagedDatabasesInner object to access its operations.
* @return the ManagedDatabasesInner object.
*/
public ManagedDatabasesInner managedDatabases() {
return this.managedDatabases;
}
/**
* The SensitivityLabelsInner object to access its operations.
*/
private SensitivityLabelsInner sensitivityLabels;
/**
* Gets the SensitivityLabelsInner object to access its operations.
* @return the SensitivityLabelsInner object.
*/
public SensitivityLabelsInner sensitivityLabels() {
return this.sensitivityLabels;
}
/**
* The ServerAutomaticTuningsInner object to access its operations.
*/
private ServerAutomaticTuningsInner serverAutomaticTunings;
/**
* Gets the ServerAutomaticTuningsInner object to access its operations.
* @return the ServerAutomaticTuningsInner object.
*/
public ServerAutomaticTuningsInner serverAutomaticTunings() {
return this.serverAutomaticTunings;
}
/**
* The ServerDnsAliasesInner object to access its operations.
*/
private ServerDnsAliasesInner serverDnsAliases;
/**
* Gets the ServerDnsAliasesInner object to access its operations.
* @return the ServerDnsAliasesInner object.
*/
public ServerDnsAliasesInner serverDnsAliases() {
return this.serverDnsAliases;
}
/**
* The ServerSecurityAlertPoliciesInner object to access its operations.
*/
private ServerSecurityAlertPoliciesInner serverSecurityAlertPolicies;
/**
* Gets the ServerSecurityAlertPoliciesInner object to access its operations.
* @return the ServerSecurityAlertPoliciesInner object.
*/
public ServerSecurityAlertPoliciesInner serverSecurityAlertPolicies() {
return this.serverSecurityAlertPolicies;
}
/**
* The RestorePointsInner object to access its operations.
*/
private RestorePointsInner restorePoints;
/**
* Gets the RestorePointsInner object to access its operations.
* @return the RestorePointsInner object.
*/
public RestorePointsInner restorePoints() {
return this.restorePoints;
}
/**
* The DatabaseOperationsInner object to access its operations.
*/
private DatabaseOperationsInner databaseOperations;
/**
* Gets the DatabaseOperationsInner object to access its operations.
* @return the DatabaseOperationsInner object.
*/
public DatabaseOperationsInner databaseOperations() {
return this.databaseOperations;
}
/**
* The ElasticPoolOperationsInner object to access its operations.
*/
private ElasticPoolOperationsInner elasticPoolOperations;
/**
* Gets the ElasticPoolOperationsInner object to access its operations.
* @return the ElasticPoolOperationsInner object.
*/
public ElasticPoolOperationsInner elasticPoolOperations() {
return this.elasticPoolOperations;
}
/**
* The DatabaseVulnerabilityAssessmentScansInner object to access its operations.
*/
private DatabaseVulnerabilityAssessmentScansInner databaseVulnerabilityAssessmentScans;
/**
* Gets the DatabaseVulnerabilityAssessmentScansInner object to access its operations.
* @return the DatabaseVulnerabilityAssessmentScansInner object.
*/
public DatabaseVulnerabilityAssessmentScansInner databaseVulnerabilityAssessmentScans() {
return this.databaseVulnerabilityAssessmentScans;
}
/**
* The InstanceFailoverGroupsInner object to access its operations.
*/
private InstanceFailoverGroupsInner instanceFailoverGroups;
/**
* Gets the InstanceFailoverGroupsInner object to access its operations.
* @return the InstanceFailoverGroupsInner object.
*/
public InstanceFailoverGroupsInner instanceFailoverGroups() {
return this.instanceFailoverGroups;
}
/**
* The BackupShortTermRetentionPoliciesInner object to access its operations.
*/
private BackupShortTermRetentionPoliciesInner backupShortTermRetentionPolicies;
/**
* Gets the BackupShortTermRetentionPoliciesInner object to access its operations.
* @return the BackupShortTermRetentionPoliciesInner object.
*/
public BackupShortTermRetentionPoliciesInner backupShortTermRetentionPolicies() {
return this.backupShortTermRetentionPolicies;
}
/**
* The TdeCertificatesInner object to access its operations.
*/
private TdeCertificatesInner tdeCertificates;
/**
* Gets the TdeCertificatesInner object to access its operations.
* @return the TdeCertificatesInner object.
*/
public TdeCertificatesInner tdeCertificates() {
return this.tdeCertificates;
}
/**
* The ManagedInstanceTdeCertificatesInner object to access its operations.
*/
private ManagedInstanceTdeCertificatesInner managedInstanceTdeCertificates;
/**
* Gets the ManagedInstanceTdeCertificatesInner object to access its operations.
* @return the ManagedInstanceTdeCertificatesInner object.
*/
public ManagedInstanceTdeCertificatesInner managedInstanceTdeCertificates() {
return this.managedInstanceTdeCertificates;
}
/**
* Initializes an instance of SqlManagementClient client.
*
* @param credentials the management credentials for Azure
*/
public SqlManagementClientImpl(ServiceClientCredentials credentials) {
this("https://management.azure.com", credentials);
}
/**
* Initializes an instance of SqlManagementClient client.
*
* @param baseUrl the base URL of the host
* @param credentials the management credentials for Azure
*/
public SqlManagementClientImpl(String baseUrl, ServiceClientCredentials credentials) {
super(baseUrl, credentials);
initialize();
}
/**
* Initializes an instance of SqlManagementClient client.
*
* @param restClient the REST client to connect to Azure.
*/
public SqlManagementClientImpl(RestClient restClient) {
super(restClient);
initialize();
}
protected void initialize() {
this.acceptLanguage = "en-US";
this.longRunningOperationRetryTimeout = 30;
this.generateClientRequestId = true;
this.backupLongTermRetentionPolicies = new BackupLongTermRetentionPoliciesInner(restClient().retrofit(), this);
this.backupLongTermRetentionVaults = new BackupLongTermRetentionVaultsInner(restClient().retrofit(), this);
this.recoverableDatabases = new RecoverableDatabasesInner(restClient().retrofit(), this);
this.restorableDroppedDatabases = new RestorableDroppedDatabasesInner(restClient().retrofit(), this);
this.capabilities = new CapabilitiesInner(restClient().retrofit(), this);
this.servers = new ServersInner(restClient().retrofit(), this);
this.serverConnectionPolicies = new ServerConnectionPoliciesInner(restClient().retrofit(), this);
this.databases = new DatabasesInner(restClient().retrofit(), this);
this.databaseThreatDetectionPolicies = new DatabaseThreatDetectionPoliciesInner(restClient().retrofit(), this);
this.dataMaskingPolicies = new DataMaskingPoliciesInner(restClient().retrofit(), this);
this.dataMaskingRules = new DataMaskingRulesInner(restClient().retrofit(), this);
this.elasticPools = new ElasticPoolsInner(restClient().retrofit(), this);
this.firewallRules = new FirewallRulesInner(restClient().retrofit(), this);
this.geoBackupPolicies = new GeoBackupPoliciesInner(restClient().retrofit(), this);
this.recommendedElasticPools = new RecommendedElasticPoolsInner(restClient().retrofit(), this);
this.replicationLinks = new ReplicationLinksInner(restClient().retrofit(), this);
this.serverAzureADAdministrators = new ServerAzureADAdministratorsInner(restClient().retrofit(), this);
this.serverCommunicationLinks = new ServerCommunicationLinksInner(restClient().retrofit(), this);
this.serviceObjectives = new ServiceObjectivesInner(restClient().retrofit(), this);
this.elasticPoolActivities = new ElasticPoolActivitiesInner(restClient().retrofit(), this);
this.elasticPoolDatabaseActivities = new ElasticPoolDatabaseActivitiesInner(restClient().retrofit(), this);
this.serviceTierAdvisors = new ServiceTierAdvisorsInner(restClient().retrofit(), this);
this.transparentDataEncryptions = new TransparentDataEncryptionsInner(restClient().retrofit(), this);
this.transparentDataEncryptionActivities = new TransparentDataEncryptionActivitiesInner(restClient().retrofit(), this);
this.serverUsages = new ServerUsagesInner(restClient().retrofit(), this);
this.databaseUsages = new DatabaseUsagesInner(restClient().retrofit(), this);
this.databaseAutomaticTunings = new DatabaseAutomaticTuningsInner(restClient().retrofit(), this);
this.encryptionProtectors = new EncryptionProtectorsInner(restClient().retrofit(), this);
this.failoverGroups = new FailoverGroupsInner(restClient().retrofit(), this);
this.managedInstances = new ManagedInstancesInner(restClient().retrofit(), this);
this.operations = new OperationsInner(restClient().retrofit(), this);
this.serverKeys = new ServerKeysInner(restClient().retrofit(), this);
this.syncAgents = new SyncAgentsInner(restClient().retrofit(), this);
this.syncGroups = new SyncGroupsInner(restClient().retrofit(), this);
this.syncMembers = new SyncMembersInner(restClient().retrofit(), this);
this.subscriptionUsages = new SubscriptionUsagesInner(restClient().retrofit(), this);
this.virtualNetworkRules = new VirtualNetworkRulesInner(restClient().retrofit(), this);
this.extendedDatabaseBlobAuditingPolicies = new ExtendedDatabaseBlobAuditingPoliciesInner(restClient().retrofit(), this);
this.extendedServerBlobAuditingPolicies = new ExtendedServerBlobAuditingPoliciesInner(restClient().retrofit(), this);
this.serverBlobAuditingPolicies = new ServerBlobAuditingPoliciesInner(restClient().retrofit(), this);
this.databaseBlobAuditingPolicies = new DatabaseBlobAuditingPoliciesInner(restClient().retrofit(), this);
this.databaseVulnerabilityAssessmentRuleBaselines = new DatabaseVulnerabilityAssessmentRuleBaselinesInner(restClient().retrofit(), this);
this.databaseVulnerabilityAssessments = new DatabaseVulnerabilityAssessmentsInner(restClient().retrofit(), this);
this.jobAgents = new JobAgentsInner(restClient().retrofit(), this);
this.jobCredentials = new JobCredentialsInner(restClient().retrofit(), this);
this.jobExecutions = new JobExecutionsInner(restClient().retrofit(), this);
this.jobs = new JobsInner(restClient().retrofit(), this);
this.jobStepExecutions = new JobStepExecutionsInner(restClient().retrofit(), this);
this.jobSteps = new JobStepsInner(restClient().retrofit(), this);
this.jobTargetExecutions = new JobTargetExecutionsInner(restClient().retrofit(), this);
this.jobTargetGroups = new JobTargetGroupsInner(restClient().retrofit(), this);
this.jobVersions = new JobVersionsInner(restClient().retrofit(), this);
this.managedDatabases = new ManagedDatabasesInner(restClient().retrofit(), this);
this.sensitivityLabels = new SensitivityLabelsInner(restClient().retrofit(), this);
this.serverAutomaticTunings = new ServerAutomaticTuningsInner(restClient().retrofit(), this);
this.serverDnsAliases = new ServerDnsAliasesInner(restClient().retrofit(), this);
this.serverSecurityAlertPolicies = new ServerSecurityAlertPoliciesInner(restClient().retrofit(), this);
this.restorePoints = new RestorePointsInner(restClient().retrofit(), this);
this.databaseOperations = new DatabaseOperationsInner(restClient().retrofit(), this);
this.elasticPoolOperations = new ElasticPoolOperationsInner(restClient().retrofit(), this);
this.databaseVulnerabilityAssessmentScans = new DatabaseVulnerabilityAssessmentScansInner(restClient().retrofit(), this);
this.instanceFailoverGroups = new InstanceFailoverGroupsInner(restClient().retrofit(), this);
this.backupShortTermRetentionPolicies = new BackupShortTermRetentionPoliciesInner(restClient().retrofit(), this);
this.tdeCertificates = new TdeCertificatesInner(restClient().retrofit(), this);
this.managedInstanceTdeCertificates = new ManagedInstanceTdeCertificatesInner(restClient().retrofit(), this);
this.azureClient = new AzureClient(this);
}
/**
* Gets the User-Agent header for the client.
*
* @return the user agent string.
*/
@Override
public String userAgent() {
return String.format("%s (%s)", super.userAgent(), "SqlManagementClient");
}
} |
Java | public class GenericLineWrapPositionStrategy implements LineWrapPositionStrategy {
/**
* We consider that it's possible to wrap line on non-id symbol. However, weight of such position is expected to be less
* than weight of wrap position bound to explicitly configured symbol.
*/
private static final int NON_ID_WEIGHT = (Rule.DEFAULT_WEIGHT - 1) / 2;
/** Holds symbols wrap rules by symbol. */
private final Int2ObjectOpenHashMap<Rule> myRules = new Int2ObjectOpenHashMap<>();
private final Storage myOffset2weight = new Storage();
@Override
public int calculateWrapPosition(@NotNull Document document,
@Nullable Project project,
int startOffset,
int endOffset,
int maxPreferredOffset,
boolean allowToBeyondMaxPreferredOffset,
boolean isSoftWrap)
{
if (endOffset <= startOffset) {
return endOffset;
}
myOffset2weight.clear();
myOffset2weight.anchor = startOffset;
CharSequence text = document.getCharsSequence();
// Normalization.
int maxPreferredOffsetToUse = maxPreferredOffset >= endOffset ? endOffset - 1 : maxPreferredOffset;
maxPreferredOffsetToUse = Math.max(maxPreferredOffsetToUse, startOffset);
// Try to find out wrap position before preferred offset.
for (int i = Math.min(maxPreferredOffsetToUse, text.length() - 1); i > startOffset; i--) {
char c = text.charAt(i);
if (c == '\n') {
return i + 1;
}
if (!canUseOffset(document, i, isSoftWrap)) {
continue;
}
Rule rule = myRules.get(c);
if (rule != null) {
if (rule.condition == WrapCondition.BOTH || rule.condition == WrapCondition.AFTER) {
int target = i+1;
if (rule.symbol != ' ') {
while(target < maxPreferredOffsetToUse && text.charAt(target) == ' ') {
target++;
}
}
if (target <= maxPreferredOffsetToUse) {
myOffset2weight.store(target, rule.weight);
}
}
if (rule.condition == WrapCondition.BOTH || rule.condition == WrapCondition.BEFORE) {
myOffset2weight.store(i, rule.weight);
}
continue;
}
// Don't wrap on a non-id symbol followed by non-id symbol, e.g. don't wrap between two pluses at i++.
// Also don't wrap before non-id symbol preceded by a space - wrap on space instead;
if (!isIdSymbol(c) && i > startOffset + 1 && isIdSymbol(text.charAt(i - 1)) && !myRules.containsKey(text.charAt(i - 1))) {
myOffset2weight.store(i, NON_ID_WEIGHT);
}
}
int result = chooseOffset();
if (result > 0) {
return result;
}
if (!allowToBeyondMaxPreferredOffset) {
return -1;
}
// Try to find target offset that is beyond preferred offset.
// Note that we don't consider symbol weights here and just break on the first appropriate position.
for (int i = Math.min(maxPreferredOffsetToUse + 1, text.length() - 1); i < endOffset; i++) {
char c = text.charAt(i);
if (c == '\n') {
return i + 1;
}
if (!canUseOffset(document, i, isSoftWrap)) {
continue;
}
Rule rule = myRules.get(c);
if (rule != null) {
switch (rule.condition) {
case BOTH:
case BEFORE: return i;
case AFTER: if (i < endOffset - 1) return i + 1;
}
}
// Don't wrap on a non-id symbol followed by non-id symbol, e.g. don't wrap between two pluses at i++;
if (!isIdSymbol(c) && i < endOffset - 1 && isIdSymbol(text.charAt(i + 1))) {
return i;
}
}
return -1;
}
protected boolean canUseOffset(@NotNull Document document, int offset, boolean virtual) {
return true;
}
/**
* Registers given rule with the current strategy.
*
* @param rule rule to register
* @throws IllegalArgumentException if another rule for the same symbol is already registered within the current strategy
*/
public void addRule(@NotNull Rule rule) throws IllegalArgumentException {
Rule existing = myRules.get(rule.symbol);
if (existing != null) {
throw new IllegalArgumentException(String.format(
"Can't register given wrap rule (%s) within the current line wrap position strategy. Reason: another rule is already "
+ "registered for it - '%s'", rule, existing
));
}
existing = myRules.put(rule.symbol, rule);
assert existing == null;
}
private static boolean isIdSymbol(char c) {
return c == '_' || c == '$' || (c >= '0' && c <= '9') || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z');
}
/**
* Tries to derive offset to use from {@link #myOffset2weight} data structure assuming that it contains mappings
* like '{@code offset -> weight}'.
*
* @return one of the keys of the given map to use; negative value if no appropriate key is found or the map is empty
*/
private int chooseOffset() {
if (myOffset2weight.end <= 0) {
return -1;
}
final double[] resultingWeight = new double[1];
final int[] resultingOffset = new int[1];
for (int i = myOffset2weight.end - 1; i >= 0; i--) {
if (myOffset2weight.data[i] == 0) {
continue;
}
if (resultingWeight[0] <= 0) {
resultingWeight[0] = myOffset2weight.data[i];
resultingOffset[0] = i;
continue;
}
if (resultingWeight[0] < myOffset2weight.data[i]) {
boolean change = myOffset2weight.data[i] * i > resultingOffset[0] * resultingWeight[0];
if (change) {
resultingWeight[0] = myOffset2weight.data[i];
resultingOffset[0] = i;
}
}
}
return resultingOffset[0] + myOffset2weight.anchor;
}
/**
* Defines how wrapping may be performed for particular symbol.
*
* @see Rule
*/
public enum WrapCondition {
/** Means that wrap is allowed only after particular symbol. */
AFTER,
/** Means that wrap is allowed only before particular symbol. */
BEFORE,
/** Means that wrap is allowed before and after particular symbol. */
BOTH
}
/**
* Encapsulates information about rule to use during line wrapping.
*/
public static class Rule {
public static final int DEFAULT_WEIGHT = 10;
public final char symbol;
public final WrapCondition condition;
/**
* There is a possible case that there are more than one appropriate wrap positions on a line and we need to choose between them.
* Here 'weight' characteristics comes into play.
* <p/>
* The general idea is that it's possible to prefer position with lower offset if it's weight is more than the one from
* position with higher offset and distance between them is not too big.
* <p/>
* Current algorithm uses the {@code 'weight'} in a following manner:
* <p/>
* <pre>
* <ol>
* <li>Calculate product of line length on first wrap location and its weight;</li>
* <li>Calculate product of line length on second wrap location and its weight;</li>
* <li>Compare those products;</li>
* </ol>
* </pre>
* <p/>
* <b>Example</b>
* Suppose we have two positions that define lines of length 30 and 10 symbols. Suppose that the weights are {@code '1'}
* and {@code '4'} correspondingly.Position with greater weight is preferred because it's product is higher
* ({@code 10 * 4 > 30 * 1})
*/
public final double weight;
public Rule(char symbol) {
this(symbol, WrapCondition.BOTH, DEFAULT_WEIGHT);
}
public Rule(char symbol, WrapCondition condition) {
this(symbol, condition, DEFAULT_WEIGHT);
}
public Rule(char symbol, double weight) {
this(symbol, WrapCondition.BOTH, weight);
}
public Rule(char symbol, WrapCondition condition, double weight) {
this.symbol = symbol;
this.condition = condition;
this.weight = weight;
}
}
/**
* Primitive array-based data structure that contain mappings like {@code int -> double}.
* <p/>
* The key is array index plus anchor; the value is array value.
*/
private static class Storage {
public double[] data = new double[256];
public int anchor;
public int end;
public void store(int key, double value) {
int index = key - anchor;
if (index >= data.length) {
int newLength = data.length << 1;
while (newLength <= index && newLength > 0) {
newLength <<= 1;
}
data = ArrayUtil.realloc(data, newLength);
}
data[index] = value;
if (index >= end) {
end = index + 1;
}
}
public void clear() {
anchor = 0;
end = 0;
Arrays.fill(data, 0);
}
}
} |
Java | public static class Rule {
public static final int DEFAULT_WEIGHT = 10;
public final char symbol;
public final WrapCondition condition;
/**
* There is a possible case that there are more than one appropriate wrap positions on a line and we need to choose between them.
* Here 'weight' characteristics comes into play.
* <p/>
* The general idea is that it's possible to prefer position with lower offset if it's weight is more than the one from
* position with higher offset and distance between them is not too big.
* <p/>
* Current algorithm uses the {@code 'weight'} in a following manner:
* <p/>
* <pre>
* <ol>
* <li>Calculate product of line length on first wrap location and its weight;</li>
* <li>Calculate product of line length on second wrap location and its weight;</li>
* <li>Compare those products;</li>
* </ol>
* </pre>
* <p/>
* <b>Example</b>
* Suppose we have two positions that define lines of length 30 and 10 symbols. Suppose that the weights are {@code '1'}
* and {@code '4'} correspondingly.Position with greater weight is preferred because it's product is higher
* ({@code 10 * 4 > 30 * 1})
*/
public final double weight;
public Rule(char symbol) {
this(symbol, WrapCondition.BOTH, DEFAULT_WEIGHT);
}
public Rule(char symbol, WrapCondition condition) {
this(symbol, condition, DEFAULT_WEIGHT);
}
public Rule(char symbol, double weight) {
this(symbol, WrapCondition.BOTH, weight);
}
public Rule(char symbol, WrapCondition condition, double weight) {
this.symbol = symbol;
this.condition = condition;
this.weight = weight;
}
} |
Java | private static class Storage {
public double[] data = new double[256];
public int anchor;
public int end;
public void store(int key, double value) {
int index = key - anchor;
if (index >= data.length) {
int newLength = data.length << 1;
while (newLength <= index && newLength > 0) {
newLength <<= 1;
}
data = ArrayUtil.realloc(data, newLength);
}
data[index] = value;
if (index >= end) {
end = index + 1;
}
}
public void clear() {
anchor = 0;
end = 0;
Arrays.fill(data, 0);
}
} |
Java | public class NovorAdvancedSettingsPanel extends PopupWindow {
private final HorizontalLabelDropDownList fragmentaionMethod;
private final HorizontalLabelDropDownList massAnalyzer;
private IdentificationParameters webSearchParameters;
/**
*
*/
public NovorAdvancedSettingsPanel() {
super(VaadinIcons.COG.getHtml() + " Novor Advanced Settings");
AbsoluteLayout container = new AbsoluteLayout();
container.setStyleName("popuppanelmaincontainer");
container.setWidth(385, Unit.PIXELS);
container.setHeight(170, Unit.PIXELS);
Label title = new Label("Novor");
container.addComponent(title, "left:10px;top:10px");
VerticalLayout subContainer = new VerticalLayout();
subContainer.setCaption("De Novo Settings");
subContainer.setSizeFull();
subContainer.setStyleName("subcontainer");
subContainer.addStyleName("importfiltersubcontainer");
subContainer.addStyleName("noversubcontainer");
container.addComponent(subContainer, "left:10px;top:70px;right:10px;bottom:40px");
NovorAdvancedSettingsPanel.this.setContent(container);
NovorAdvancedSettingsPanel.this.setClosable(true);
fragmentaionMethod = new HorizontalLabelDropDownList("Fragmentation Method");
subContainer.addComponent(fragmentaionMethod);
Set<String> values2 = new LinkedHashSet<>();
/**
* The fragmentation type ID: 0: HCD, 1: CID.
*/
values2.add("HCD");
values2.add("CID");
fragmentaionMethod.updateData(values2);
massAnalyzer = new HorizontalLabelDropDownList("Mass Analyzer");
subContainer.addComponent(massAnalyzer);
values2.clear();
values2.add("Trap");
values2.add("TOF");
values2.add("FT");
massAnalyzer.updateData(values2);
String helpText = "<a href='https://www.rapidnovor.com' target='_blank'>";
HelpPopupButton help = new HelpPopupButton(helpText, "<font style='line-height: 20px;'>Click to open Rapid Novor page.</font>", 100, 20);
container.addComponent(help, "left:10px;bottom:10px;");
Button okBtn = new Button("OK");
okBtn.setWidth(76, Unit.PIXELS);
okBtn.setHeight(20, Unit.PIXELS);
okBtn.setStyleName(ValoTheme.BUTTON_TINY);
okBtn.addClickListener((Button.ClickEvent event) -> {
updateParameters();
setPopupVisible(false);
});
Button cancelBtn = new Button("Cancel");
cancelBtn.setStyleName(ValoTheme.BUTTON_TINY);
cancelBtn.setWidth(76, Unit.PIXELS);
cancelBtn.setHeight(20, Unit.PIXELS);
container.addComponent(okBtn, "bottom:10px;right:96px");
container.addComponent(cancelBtn, "bottom:10px;right:10px");
cancelBtn.addClickListener((Button.ClickEvent event) -> {
NovorAdvancedSettingsPanel.this.setPopupVisible(false);
});
}
/**
*
* @param webSearchParameters
*/
public void updateGUI(IdentificationParameters webSearchParameters) {
this.webSearchParameters = webSearchParameters;
NovorParameters noverParameters = (NovorParameters) webSearchParameters.getSearchParameters().getIdentificationAlgorithmParameter(Advocate.novor.getIndex());
massAnalyzer.setSelected(noverParameters.getMassAnalyzer() + "");
fragmentaionMethod.setSelected(noverParameters.getFragmentationMethod() + "");
}
/**
*
*/
@Override
public void onClosePopup() {
}
/**
*
* @param visible
*/
@Override
public void setPopupVisible(boolean visible) {
if (visible && webSearchParameters != null) {
updateGUI(webSearchParameters);
} else if (webSearchParameters != null) {
updateParameters();
}
super.setPopupVisible(visible);
}
private void updateParameters() {
NovorParameters noverParameters = (NovorParameters) webSearchParameters.getSearchParameters().getIdentificationAlgorithmParameter(Advocate.novor.getIndex());
noverParameters.setMassAnalyzer(massAnalyzer.getSelectedValue());
noverParameters.setFragmentationMethod(fragmentaionMethod.getSelectedValue());
}
} |
Java | public class RefreshTokenGrantTest extends TestCase {
public void testConstructor() {
RefreshToken refreshToken = new RefreshToken();
RefreshTokenGrant grant = new RefreshTokenGrant(refreshToken);
assertEquals(GrantType.REFRESH_TOKEN, grant.getType());
assertEquals(refreshToken, grant.getRefreshToken());
Map<String,String> params = grant.toParameters();
assertEquals(GrantType.REFRESH_TOKEN.getValue(), params.get("grant_type"));
assertEquals(refreshToken.getValue(), params.get("refresh_token"));
assertEquals(2, params.size());
}
public void testParse()
throws Exception {
Map<String,String> params = new HashMap<>();
params.put("grant_type", "refresh_token");
params.put("refresh_token", "abc123");
RefreshTokenGrant grant = RefreshTokenGrant.parse(params);
assertEquals(GrantType.REFRESH_TOKEN, grant.getType());
assertEquals("abc123", grant.getRefreshToken().getValue());
}
public void testEquality() {
assertTrue(new RefreshTokenGrant(new RefreshToken("xyz")).equals(new RefreshTokenGrant(new RefreshToken("xyz"))));
}
public void testInequality() {
assertFalse(new RefreshTokenGrant(new RefreshToken("abc")).equals(new RefreshTokenGrant(new RefreshToken("xyz"))));
}
} |
Java | @Test(groups = "api", testName = "CategoryLiveApiTest")
public class CategoryLiveApiTest extends BaseAbiquoApiLiveApiTest {
public void testCreateAndGet() {
Category category = Category.builder(env.context.getApiContext()).name(PREFIX + "-test-category").build();
category.save();
Category apiCategory = env.context.getAdministrationService().findCategory(
CategoryPredicates.name(PREFIX + "-test-category"));
assertNotNull(apiCategory);
assertEquals(category.getName(), apiCategory.getName());
apiCategory.delete();
}
@Test(dependsOnMethods = "testCreateAndGet")
public void testUpdate() {
Iterable<Category> categories = env.context.getAdministrationService().listCategories();
assertNotNull(categories);
Category category = categories.iterator().next();
String name = category.getName();
category.setName(PREFIX + "-test-category-updated");
category.update();
Category apiCategory = env.context.getAdministrationService().findCategory(
CategoryPredicates.name(PREFIX + "-test-category-updated"));
assertNotNull(apiCategory);
assertEquals(PREFIX + "-test-category-updated", apiCategory.getName());
category.setName(name);
category.update();
}
} |
Java | public abstract class AbstractTransport implements Closeable {
ServiceLoader<ISocketProvider> spiLoader = ServiceLoader.load(ISocketProvider.class);
private final Logger logger;
private final BusAddress address;
private SASL.SaslMode saslMode;
private int saslAuthMode;
private IMessageReader inputReader;
private IMessageWriter outputWriter;
private boolean fileDescriptorSupported;
AbstractTransport(BusAddress _address) {
address = _address;
if (_address.isListeningSocket()) {
saslMode = SASL.SaslMode.SERVER;
} else {
saslMode = SASL.SaslMode.CLIENT;
}
saslAuthMode = SASL.AUTH_NONE;
logger = LoggerFactory.getLogger(getClass());
}
/**
* Write a message to the underlying socket.
*
* @param _msg message to write
* @throws IOException on write error or if output was already closed or null
*/
public void writeMessage(Message _msg) throws IOException {
if (!fileDescriptorSupported && Message.ArgumentType.FILEDESCRIPTOR == _msg.getType()) {
throw new IllegalArgumentException("File descriptors are not supported!");
}
if (outputWriter != null && !outputWriter.isClosed()) {
outputWriter.writeMessage(_msg);
} else {
throw new IOException("OutputWriter already closed or null");
}
}
/**
* Read a message from the underlying socket.
*
* @return read message, maybe null
* @throws IOException when input already close or null
* @throws DBusException when message could not be converted to a DBus message
*/
public Message readMessage() throws IOException, DBusException {
if (inputReader != null && !inputReader.isClosed()) {
return inputReader.readMessage();
}
throw new IOException("InputReader already closed or null");
}
/**
* Abstract method implemented by concrete sub classes to establish a connection
* using whatever transport type (e.g. TCP/Unix socket).
* @throws IOException when connection fails
*/
abstract void connect() throws IOException;
/**
* Method to indicate if passing of file descriptors is allowed.
*
* @return true to allow FD passing, false otherwise
*/
abstract boolean hasFileDescriptorSupport();
/**
* Helper method to authenticate to DBus using SASL.
*
* @param _out output stream
* @param _in input stream
* @param _sock socket
* @throws IOException on any error
*/
protected void authenticate(OutputStream _out, InputStream _in, Socket _sock) throws IOException {
SASL sasl = new SASL(hasFileDescriptorSupport());
if (!sasl.auth(saslMode, saslAuthMode, address.getGuid(), _out, _in, _sock)) {
_out.close();
throw new IOException("Failed to auth");
}
fileDescriptorSupported = sasl.isFileDescriptorSupported();
}
/**
* Setup message reader/writer.
* Will look for SPI provider first, if none is found default implementation is used.
* The default implementation does not support file descriptor passing!
*
* @param _socket socket to use
*/
protected void setInputOutput(Socket _socket) {
try {
for( ISocketProvider provider : spiLoader ){
logger.debug( "Found ISocketProvider {}", provider );
provider.setFileDescriptorSupport(hasFileDescriptorSupport() && fileDescriptorSupported);
inputReader = provider.createReader(_socket);
outputWriter = provider.createWriter(_socket);
if( inputReader != null && outputWriter != null ){
logger.debug( "Using ISocketProvider {}", provider );
break;
}
}
} catch (ServiceConfigurationError _ex) {
logger.error("Could not initialize service provider.", _ex);
} catch (IOException _ex) {
logger.error("Could not initialize alternative message reader/writer.", _ex);
}
try{
if( inputReader == null || outputWriter == null ){
logger.debug( "No alternative ISocketProvider found, using built-in implementation. "
+ "inputReader = {}, outputWriter = {}",
inputReader,
outputWriter );
inputReader = new InputStreamMessageReader(_socket.getInputStream());
outputWriter = new OutputStreamMessageWriter(_socket.getOutputStream());
}
} catch (IOException _ex) {
logger.error("Could not initialize default message reader/writer.", _ex);
}
}
protected int getSaslAuthMode() {
return saslAuthMode;
}
protected void setSaslAuthMode(int _saslAuthMode) {
saslAuthMode = _saslAuthMode;
}
protected SASL.SaslMode getSaslMode() {
return saslMode;
}
protected void setSaslMode(SASL.SaslMode _saslMode) {
saslMode = _saslMode;
}
protected BusAddress getAddress() {
return address;
}
protected Logger getLogger() {
return logger;
}
@Override
public void close() throws IOException {
inputReader.close();
outputWriter.close();
}
} |
Java | @Module
public class CommonsActivityModule {
private Activity activity;
public CommonsActivityModule(Activity activity) {
this.activity = activity;
}
@Provides
@ActivityContext
Context provideContext() {
return activity;
}
@Provides
Activity provideActivity() {
return activity;
}
@Provides
@ApplicationContext
Context provideApplicationContext() {
return activity.getApplication().getApplicationContext();
}
@Provides
CommonsPreferenceHelper provideCommonsPreferenceHelper(CommonsPrefsHelperImpl commonsPrefsHelper) {
return commonsPrefsHelper;
}
@Provides
@PreferenceInfo
String providePreferenceFileName() {
return Constants.COMMON_SHARED_PREFS_NAME;
}
@Provides
CompositeDisposable provideCompositeDisposable() {
return new CompositeDisposable();
}
@Provides
BackendCallHelper provideApiHelper() {
return BackendCallHelperImpl.getInstance();
}
} |
Java | @Beta
public class LogEntries implements Iterable<LogEntry> {
private final List<LogEntry> entries;
public LogEntries(Iterable<LogEntry> entries) {
List<LogEntry> mutableEntries = new ArrayList<LogEntry>();
for (LogEntry entry : entries) {
mutableEntries.add(entry);
}
this.entries = Collections.unmodifiableList(mutableEntries);
}
/**
* Get the list of all log entries.
*
* @return a view of all {@link LogEntry} fetched
*/
public List<LogEntry> getAll() {
return entries;
}
/**
* @param level {@link Level} the level to filter the log entries
* @return all log entries for that level and above
*/
public List<LogEntry> filter(Level level) {
List<LogEntry> toReturn = new ArrayList<LogEntry>();
for (LogEntry entry : entries) {
if (entry.getLevel().intValue() >= level.intValue()) {
toReturn.add(entry);
}
}
return toReturn;
}
public Iterator<LogEntry> iterator() {
return entries.iterator();
}
} |
Java | class AlterTableConstantAction extends DDLSingleTableConstantAction
implements RowLocationRetRowSource
{
protected SchemaDescriptor sd;
protected String tableName;
protected UUID schemaId;
protected int tableType;
protected long tableConglomerateId;
protected ColumnInfo[] columnInfo;
protected ConstraintConstantAction[] constraintActions;
protected char lockGranularity;
private boolean compressTable;
private boolean sequential;
private int behavior;
// Alter table compress and Drop column
private boolean doneScan;
private boolean[] needToDropSort;
private boolean[] validRow;
private int bulkFetchSize = 16;
private int currentCompressRow;
private int numIndexes;
private int rowCount;
private long estimatedRowCount;
private long[] indexConglomerateNumbers;
private long[] sortIds;
private FormatableBitSet indexedCols;
private ConglomerateController compressHeapCC;
private ExecIndexRow[] indexRows;
private ExecRow[] baseRow;
private ExecRow currentRow;
private GroupFetchScanController compressHeapGSC;
private IndexRowGenerator[] compressIRGs;
private DataValueDescriptor[][] baseRowArray;
private RowLocation[] compressRL;
private SortController[] sorters;
private int columnPosition;
private ColumnOrdering[][] ordering;
private TableDescriptor td;
//truncate table
private boolean truncateTable;
// CONSTRUCTORS
private LanguageConnectionContext lcc;
private DataDictionary dd;
private DependencyManager dm;
private TransactionController tc;
private Activation activation;
/**
* Make the AlterAction for an ALTER TABLE statement.
*
* @param sd descriptor for the schema that table lives in.
* @param tableName Name of table.
* @param tableId UUID of table
* @param tableConglomerateId heap conglomerate number of table
* @param tableType Type of table (e.g., BASE).
* @param columnInfo Information on all the columns in the table.
* @param constraintActions ConstraintConstantAction[] for constraints
* @param lockGranularity The lock granularity.
* @param compressTable Whether or not this is a compress table
* @param behavior drop behavior for dropping column
* @param sequential If compress table/drop column, whether or not sequential
* @param truncateTable Whether or not this is a truncate table
*/
AlterTableConstantAction(
SchemaDescriptor sd,
String tableName,
UUID tableId,
long tableConglomerateId,
int tableType,
ColumnInfo[] columnInfo,
ConstraintConstantAction[] constraintActions,
char lockGranularity,
boolean compressTable,
int behavior,
boolean sequential,
boolean truncateTable)
{
super(tableId);
this.sd = sd;
this.tableName = tableName;
this.tableConglomerateId = tableConglomerateId;
this.tableType = tableType;
this.columnInfo = columnInfo;
this.constraintActions = constraintActions;
this.lockGranularity = lockGranularity;
this.compressTable = compressTable;
this.behavior = behavior;
this.sequential = sequential;
this.truncateTable = truncateTable;
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(sd != null, "schema descriptor is null");
}
}
// OBJECT METHODS
public String toString()
{
// Do not put this under SanityManager.DEBUG - it is needed for
// error reporting.
// we don't bother trying to print out the
// schema because we don't have it until execution
if(truncateTable)
return "TRUNCATE TABLE " + tableName;
else
return "ALTER TABLE " + tableName;
}
// INTERFACE METHODS
/**
* This is the guts of the Execution-time logic for ALTER TABLE.
*
* @see ConstantAction#executeConstantAction
*
* @exception StandardException Thrown on failure
*/
public void executeConstantAction( Activation activation )
throws StandardException
{
LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
DataDictionary dd = lcc.getDataDictionary();
DependencyManager dm = dd.getDependencyManager();
TransactionController tc = lcc.getTransactionExecute();
/*
** Inform the data dictionary that we are about to write to it.
** There are several calls to data dictionary "get" methods here
** that might be done in "read" mode in the data dictionary, but
** it seemed safer to do this whole operation in "write" mode.
**
** We tell the data dictionary we're done writing at the end of
** the transaction.
*/
dd.startWriting(lcc);
// now do the real work
// get an exclusive lock of the heap, to avoid deadlock on rows of
// SYSCOLUMNS etc datadictionary tables (track 879) and phantom table
// descriptor, in which case table shape could be changed by a
// concurrent thread doing add/drop column (track 3804 and 3825)
// older version (or at target) has to get td first, potential deadlock
if (tableConglomerateId == 0)
{
td = dd.getTableDescriptor(tableId);
if (td == null)
{
throw StandardException.newException(
SQLState.LANG_TABLE_NOT_FOUND_DURING_EXECUTION, tableName);
}
tableConglomerateId = td.getHeapConglomerateId();
}
lockTableForDDL(tc, tableConglomerateId, true);
td = dd.getTableDescriptor(tableId);
if (td == null)
{
throw StandardException.newException(
SQLState.LANG_TABLE_NOT_FOUND_DURING_EXECUTION, tableName);
}
if(truncateTable)
dm.invalidateFor(td, DependencyManager.TRUNCATE_TABLE, lcc);
else
dm.invalidateFor(td, DependencyManager.ALTER_TABLE, lcc);
execGuts( activation );
}
/**
* Wrapper for this DDL action. Factored out so that our child,
* RepAlterTableConstantAction
* could enjoy the benefits of the startWriting() method above.
*
* @param dd the data dictionary
*
* @exception StandardException Thrown on failure
*/
public void execGuts( Activation activation)
throws StandardException
{
ColumnDescriptor columnDescriptor;
int numRows = 0;
boolean tableNeedsScanning = false;
boolean tableScanned = false;
LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
DataDictionary dd = lcc.getDataDictionary();
DependencyManager dm = dd.getDependencyManager();
TransactionController tc = lcc.getTransactionExecute();
// Save the TableDescriptor off in the Activation
activation.setDDLTableDescriptor(td);
/*
** If the schema descriptor is null, then
** we must have just read ourselves in.
** So we will get the corresponding schema
** descriptor from the data dictionary.
*/
if (sd == null)
{
sd = getAndCheckSchemaDescriptor(dd, schemaId, "ALTER TABLE");
}
/* Prepare all dependents to invalidate. (This is there chance
* to say that they can't be invalidated. For example, an open
* cursor referencing a table/view that the user is attempting to
* alter.) If no one objects, then invalidate any dependent objects.
*/
if(truncateTable)
dm.invalidateFor(td, DependencyManager.TRUNCATE_TABLE, lcc);
else
dm.invalidateFor(td, DependencyManager.ALTER_TABLE, lcc);
// Are we working on columns?
if (columnInfo != null)
{
/* NOTE: We only allow a single column to be added within
* each ALTER TABLE command at the language level. However,
* this may change some day, so we will try to plan for it.
*/
/* for each new column, see if the user is adding a non-nullable
* column. This is only allowed on an empty table.
*/
for (int ix = 0; ix < columnInfo.length; ix++)
{
/* Is this new column non-nullable?
* If so, it can only be added to an
* empty table if it does not have a default value.
* We need to scan the table to find out how many rows
* there are.
*/
if ((columnInfo[ix].action == ColumnInfo.CREATE) &&
!(columnInfo[ix].dataType.isNullable()) &&
(columnInfo[ix].defaultInfo == null) &&
(columnInfo[ix].autoincInc == 0)
)
{
tableNeedsScanning = true;
}
}
// Scan the table if necessary
if (tableNeedsScanning)
{
numRows = getSemiRowCount(tc);
// Don't allow user to add non-nullable column to non-empty table
if (numRows > 0)
{
throw StandardException.newException(SQLState.LANG_ADDING_NON_NULL_COLUMN_TO_NON_EMPTY_TABLE,
td.getQualifiedName());
}
tableScanned = true;
}
// for each related column, stuff system.column
for (int ix = 0; ix < columnInfo.length; ix++)
{
ColumnDescriptorList cdl = new ColumnDescriptorList();
/* If there is a default value, use it, otherwise use null */
// Are we adding a new column or modifying a default?
if (columnInfo[ix].action == ColumnInfo.CREATE)
{
addNewColumnToTable(activation, ix);
}
else if (columnInfo[ix].action ==
ColumnInfo.MODIFY_COLUMN_DEFAULT)
{
modifyColumnDefault(activation, ix);
}
else if (columnInfo[ix].action ==
ColumnInfo.MODIFY_COLUMN_TYPE)
{
modifyColumnType(activation, ix);
}
else if (columnInfo[ix].action ==
ColumnInfo.MODIFY_COLUMN_CONSTRAINT)
{
modifyColumnConstraint(activation, columnInfo[ix].name, true);
}
else if (columnInfo[ix].action ==
ColumnInfo.MODIFY_COLUMN_CONSTRAINT_NOT_NULL)
{
if (! tableScanned)
{
tableScanned = true;
numRows = getSemiRowCount(tc);
}
// check that the data in the column is not null
String colNames[] = new String[1];
colNames[0] = columnInfo[ix].name;
boolean nullCols[] = new boolean[1];
/* note validateNotNullConstraint returns true if the
* column is nullable
*/
if (validateNotNullConstraint(colNames, nullCols,
numRows, lcc, SQLState.LANG_NULL_DATA_IN_NON_NULL_COLUMN))
{
/* nullable column - modify it to be not null
* This is O.K. at this point since we would have
* thrown an exception if any data was null
*/
modifyColumnConstraint(activation, columnInfo[ix].name, false);
}
}
else if (columnInfo[ix].action == ColumnInfo.DROP)
{
dropColumnFromTable(activation, ix);
}
else if (SanityManager.DEBUG)
{
SanityManager.THROWASSERT(
"Unexpected action in AlterTableConstantAction");
}
}
}
/* Create/Drop any constraints */
if (constraintActions != null)
{
for (int conIndex = 0; conIndex < constraintActions.length; conIndex++)
{
ConstraintConstantAction cca = constraintActions[conIndex];
if (cca instanceof CreateConstraintConstantAction)
{
int constraintType = cca.getConstraintType();
/* Some constraint types require special checking:
* Check - table must be empty, for now
* Primary Key - table cannot already have a primary key
*/
switch (constraintType)
{
case DataDictionary.PRIMARYKEY_CONSTRAINT:
// Check to see if a constraint of the same type already exists
ConstraintDescriptorList cdl = dd.getConstraintDescriptors(td);
if (cdl.getPrimaryKey() != null)
{
throw StandardException.newException(SQLState.LANG_ADD_PRIMARY_KEY_FAILED1,
td.getQualifiedName());
}
if (! tableScanned)
{
tableScanned = true;
numRows = getSemiRowCount(tc);
}
break;
case DataDictionary.CHECK_CONSTRAINT:
if (! tableScanned)
{
tableScanned = true;
numRows = getSemiRowCount(tc);
}
if (numRows > 0)
{
/*
** We are assuming that there will only be one
** check constraint that we are adding, so it
** is ok to do the check now rather than try
** to lump together several checks.
*/
ConstraintConstantAction.validateConstraint(
cca.getConstraintName(),
((CreateConstraintConstantAction)cca).getConstraintText(),
td,
lcc, true);
}
break;
}
}
else
{
if (SanityManager.DEBUG)
{
if (!(cca instanceof DropConstraintConstantAction))
{
SanityManager.THROWASSERT("constraintActions[" + conIndex +
"] expected to be instanceof DropConstraintConstantAction not " +
cca.getClass().getName());
}
}
}
constraintActions[conIndex].executeConstantAction(activation);
}
}
// Are we changing the lock granularity?
if (lockGranularity != '\0')
{
if (SanityManager.DEBUG)
{
if (lockGranularity != 'T' &&
lockGranularity != 'R')
{
SanityManager.THROWASSERT(
"lockGranularity expected to be 'T'or 'R', not " + lockGranularity);
}
}
// update the TableDescriptor
td.setLockGranularity(lockGranularity);
// update the DataDictionary
dd.updateLockGranularity(td, sd, lockGranularity, tc);
}
// Are we doing a compress table?
if (compressTable)
{
compressTable(activation);
}
// Are we doing a truncate table?
if (truncateTable)
{
truncateTable(activation);
}
}
/**
* Workhorse for adding a new column to a table.
*
* @param dd the data dictionary.
* @param ix the index of the column specfication in the ALTER
* statement-- currently we allow only one.
* @exception StandardException thrown on failure.
*/
private void addNewColumnToTable(Activation activation,
int ix)
throws StandardException
{
LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
DataDictionary dd = lcc.getDataDictionary();
DependencyManager dm = dd.getDependencyManager();
TransactionController tc = lcc.getTransactionExecute();
ColumnDescriptor columnDescriptor =
td.getColumnDescriptor(columnInfo[ix].name);
DataValueDescriptor storableDV;
int colNumber = td.getMaxColumnID() + ix;
DataDescriptorGenerator ddg = dd.getDataDescriptorGenerator();
/* We need to verify that the table does not have an existing
* column with the same name before we try to add the new
* one as addColumnDescriptor() is a void method.
*/
if (columnDescriptor != null)
{
throw
StandardException.newException(
SQLState.LANG_OBJECT_ALREADY_EXISTS_IN_OBJECT,
columnDescriptor.getDescriptorType(),
columnInfo[ix].name,
td.getDescriptorType(),
td.getQualifiedName());
}
if (columnInfo[ix].defaultValue != null)
storableDV = columnInfo[ix].defaultValue;
else
storableDV = columnInfo[ix].dataType.getNull();
// Add the column to the conglomerate.(Column ids in store are 0-based)
tc.addColumnToConglomerate(td.getHeapConglomerateId(), colNumber,
storableDV);
UUID defaultUUID = columnInfo[ix].newDefaultUUID;
/* Generate a UUID for the default, if one exists
* and there is no default id yet.
*/
if (columnInfo[ix].defaultInfo != null &&
defaultUUID == null)
{
defaultUUID = dd.getUUIDFactory().createUUID();
}
// Add the column to syscolumns.
// Column ids in system tables are 1-based
columnDescriptor = new ColumnDescriptor(
columnInfo[ix].name,
colNumber + 1,
columnInfo[ix].dataType,
columnInfo[ix].defaultValue,
columnInfo[ix].defaultInfo,
td,
defaultUUID,
columnInfo[ix].autoincStart,
columnInfo[ix].autoincInc,
columnInfo[ix].autoincInc != 0
);
dd.addDescriptor(columnDescriptor, td,
DataDictionary.SYSCOLUMNS_CATALOG_NUM, false, tc);
// now add the column to the tables column descriptor list.
td.getColumnDescriptorList().add(columnDescriptor);
if (columnDescriptor.isAutoincrement())
{
updateNewAutoincrementColumn(activation, columnInfo[ix].name,
columnInfo[ix].autoincStart,
columnInfo[ix].autoincInc);
}
// Update the new column to its default, if it has a non-null default
if (columnDescriptor.hasNonNullDefault())
{
updateNewColumnToDefault(activation,
columnInfo[ix].name,
columnInfo[ix].defaultInfo.getDefaultText(),
lcc);
DefaultDescriptor defaultDescriptor = new DefaultDescriptor(dd, defaultUUID, td.getUUID(),
colNumber + 1);
/* Create stored dependencies for each provider to default */
ProviderInfo[] providerInfo = ((DefaultInfoImpl) columnInfo[ix].defaultInfo).getProviderInfo();
int providerInfoLength = (providerInfo == null) ? 0 : providerInfo.length;
for (int provIndex = 0; provIndex < providerInfoLength;
provIndex++)
{
Provider provider = null;
/* We should always be able to find the Provider */
try
{
provider = (Provider) providerInfo[provIndex].
getDependableFinder().
getDependable(
providerInfo[provIndex].getObjectId());
}
catch (java.sql.SQLException te)
{
if (SanityManager.DEBUG)
{
SanityManager.THROWASSERT("unexpected java.sql.SQLException - " + te);
}
}
dm.addDependency(defaultDescriptor, provider, lcc.getContextManager());
}
}
}
/**
* Workhorse for dropping a column from a table.
*
* @param dd the data dictionary.
* @param ix the index of the column specfication in the ALTER
* statement-- currently we allow only one.
* @exception StandardException thrown on failure.
*/
private void dropColumnFromTable(Activation activation,
int ix)
throws StandardException
{
LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
DataDictionary dd = lcc.getDataDictionary();
DependencyManager dm = dd.getDependencyManager();
TransactionController tc = lcc.getTransactionExecute();
ColumnDescriptor columnDescriptor =
td.getColumnDescriptor(columnInfo[ix].name);
// We already verified this in bind, but do it again
if (columnDescriptor == null)
{
throw
StandardException.newException(
SQLState.LANG_COLUMN_NOT_FOUND_IN_TABLE,
columnInfo[ix].name,
td.getQualifiedName());
}
DataDescriptorGenerator ddg = dd.getDataDescriptorGenerator();
ColumnDescriptorList tab_cdl = td.getColumnDescriptorList();
int size = tab_cdl.size();
// can NOT drop a column if it is the only one in the table
if (size == 1)
{
throw StandardException.newException(SQLState.LANG_PROVIDER_HAS_DEPENDENT_OBJECT,
dm.getActionString(DependencyManager.DROP_COLUMN),
"THE *LAST* COLUMN " + columnInfo[ix].name,
"TABLE",
td.getQualifiedName() );
}
columnPosition = columnDescriptor.getPosition();
boolean cascade = (behavior == StatementType.DROP_CASCADE);
FormatableBitSet toDrop = new FormatableBitSet(size + 1);
toDrop.set(columnPosition);
td.setReferencedColumnMap(toDrop);
dm.invalidateFor(td, cascade ?
DependencyManager.DROP_COLUMN_CASCADE :
DependencyManager.DROP_COLUMN, lcc);
// If column has a default we drop the default and any dependencies
if (columnDescriptor.getDefaultInfo() != null)
{
DefaultDescriptor defaultDesc = columnDescriptor.getDefaultDescriptor(dd);
dm.clearDependencies(lcc, defaultDesc);
}
// need to deal with triggers if has referencedColumns
GenericDescriptorList tdl = dd.getTriggerDescriptors(td);
Enumeration descs = tdl.elements();
while (descs.hasMoreElements())
{
TriggerDescriptor trd = (TriggerDescriptor) descs.nextElement();
int[] referencedCols = trd.getReferencedCols();
if (referencedCols == null)
continue;
int refColLen = referencedCols.length, j;
boolean changed = false;
for (j = 0; j < refColLen; j++)
{
if (referencedCols[j] > columnPosition)
changed = true;
else if (referencedCols[j] == columnPosition)
{
if (cascade)
{
DropTriggerConstantAction.dropTriggerDescriptor(lcc, dm, dd, tc, trd, activation);
activation.addWarning(
StandardException.newWarning(SQLState.LANG_TRIGGER_DROPPED,
trd.getName(), td.getName()));
}
else
{ // we'd better give an error if don't drop it,
// otherwsie there would be unexpected behaviors
throw StandardException.newException(SQLState.LANG_PROVIDER_HAS_DEPENDENT_OBJECT,
dm.getActionString(DependencyManager.DROP_COLUMN),
columnInfo[ix].name, "TRIGGER",
trd.getName() );
}
break;
}
}
// change triggers to refer to columns in new positions
if (j == refColLen && changed)
{
dd.dropTriggerDescriptor(trd, tc);
for (j = 0; j < refColLen; j++)
{
if (referencedCols[j] > columnPosition)
referencedCols[j]--;
}
dd.addDescriptor(trd, sd,
DataDictionary.SYSTRIGGERS_CATALOG_NUM,
false, tc);
}
}
ConstraintDescriptorList csdl = dd.getConstraintDescriptors(td);
int csdl_size = csdl.size();
// we want to remove referenced primary/unique keys in the second
// round. This will ensure that self-referential constraints will
// work OK.
int tbr_size = 0;
ConstraintDescriptor[] toBeRemoved = new ConstraintDescriptor[csdl_size];
// let's go downwards, don't want to get messed up while removing
for (int i = csdl_size - 1; i >= 0; i--)
{
ConstraintDescriptor cd = csdl.elementAt(i);
int[] referencedColumns = cd.getReferencedColumns();
int numRefCols = referencedColumns.length, j;
boolean changed = false;
for (j = 0; j < numRefCols; j++)
{
if (referencedColumns[j] > columnPosition)
changed = true;
if (referencedColumns[j] == columnPosition)
break;
}
if (j == numRefCols) // column not referenced
{
if ((cd instanceof CheckConstraintDescriptor) && changed)
{
dd.dropConstraintDescriptor(td, cd, tc);
for (j = 0; j < numRefCols; j++)
{
if (referencedColumns[j] > columnPosition)
referencedColumns[j]--;
}
((CheckConstraintDescriptor) cd).setReferencedColumnsDescriptor(new ReferencedColumnsDescriptorImpl(referencedColumns));
dd.addConstraintDescriptor(cd, tc);
}
continue;
}
if (! cascade)
{
if (numRefCols > 1 || cd.getConstraintType() == DataDictionary.PRIMARYKEY_CONSTRAINT)
{
throw StandardException.newException(SQLState.LANG_PROVIDER_HAS_DEPENDENT_OBJECT,
dm.getActionString(DependencyManager.DROP_COLUMN),
columnInfo[ix].name, "CONSTRAINT",
cd.getConstraintName() );
}
}
if (cd instanceof ReferencedKeyConstraintDescriptor)
{
// restrict will raise an error in invalidate if really referenced
toBeRemoved[tbr_size++] = cd;
continue;
}
// drop now in all other cases
dm.invalidateFor(cd, DependencyManager.DROP_CONSTRAINT,
lcc);
DropConstraintConstantAction.dropConstraintAndIndex(dm, td, dd,
cd, tc, activation, true);
activation.addWarning(StandardException.newWarning(SQLState.LANG_CONSTRAINT_DROPPED,
cd.getConstraintName(), td.getName()));
}
for (int i = tbr_size - 1; i >= 0; i--)
{
ConstraintDescriptor cd = toBeRemoved[i];
DropConstraintConstantAction.dropConstraintAndIndex(dm, td, dd, cd,
tc, activation, false);
activation.addWarning(StandardException.newWarning(SQLState.LANG_CONSTRAINT_DROPPED,
cd.getConstraintName(), td.getName()));
if (cascade)
{
ConstraintDescriptorList fkcdl = dd.getForeignKeys(cd.getUUID());
for (int j = 0; j < fkcdl.size(); j++)
{
ConstraintDescriptor fkcd = (ConstraintDescriptor) fkcdl.elementAt(j);
dm.invalidateFor(fkcd,
DependencyManager.DROP_CONSTRAINT,
lcc);
DropConstraintConstantAction.dropConstraintAndIndex(
dm, fkcd.getTableDescriptor(), dd, fkcd, tc, activation, true);
activation.addWarning(StandardException.newWarning(SQLState.LANG_CONSTRAINT_DROPPED,
fkcd.getConstraintName(), fkcd.getTableDescriptor().getName()));
}
}
dm.invalidateFor(cd, DependencyManager.DROP_CONSTRAINT, lcc);
dm.clearDependencies(lcc, cd);
}
compressTable(activation);
// drop the column from syscolumns
dd.dropColumnDescriptor(td.getUUID(), columnInfo[ix].name, tc);
ColumnDescriptor[] cdlArray = new ColumnDescriptor[size - columnDescriptor.getPosition()];
for (int i = columnDescriptor.getPosition(), j = 0; i < size; i++, j++)
{
ColumnDescriptor cd = (ColumnDescriptor) tab_cdl.elementAt(i);
dd.dropColumnDescriptor(td.getUUID(), cd.getColumnName(), tc);
cd.setPosition(i);
cdlArray[j] = cd;
}
dd.addDescriptorArray(cdlArray, td,
DataDictionary.SYSCOLUMNS_CATALOG_NUM, false, tc);
List deps = dd.getProvidersDescriptorList(td.getObjectID().toString());
for (Iterator depsIterator = deps.listIterator(); depsIterator.hasNext();)
{
DependencyDescriptor depDesc = (DependencyDescriptor) depsIterator.next();
DependableFinder finder = depDesc.getProviderFinder();
if (finder instanceof DDColumnDependableFinder)
{
DDColumnDependableFinder colFinder = (DDColumnDependableFinder) finder;
FormatableBitSet oldColumnBitMap = new FormatableBitSet(colFinder.getColumnBitMap());
FormatableBitSet newColumnBitMap = new FormatableBitSet(oldColumnBitMap);
newColumnBitMap.clear();
int bitLen = oldColumnBitMap.getLength();
for (int i = 0; i < bitLen; i++)
{
if (i < columnPosition && oldColumnBitMap.isSet(i))
newColumnBitMap.set(i);
if (i > columnPosition && oldColumnBitMap.isSet(i))
newColumnBitMap.set(i - 1);
}
if (newColumnBitMap.equals(oldColumnBitMap))
continue;
dd.dropStoredDependency(depDesc, tc);
colFinder.setColumnBitMap(newColumnBitMap.getByteArray());
dd.addDescriptor(depDesc, null,
DataDictionary.SYSDEPENDS_CATALOG_NUM,
true, tc);
}
}
}
private void modifyColumnType(Activation activation,
int ix)
throws StandardException
{
LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
DataDictionary dd = lcc.getDataDictionary();
TransactionController tc = lcc.getTransactionExecute();
ColumnDescriptor columnDescriptor =
td.getColumnDescriptor(columnInfo[ix].name),
newColumnDescriptor = null;
newColumnDescriptor =
new ColumnDescriptor(columnInfo[ix].name,
columnDescriptor.getPosition(),
columnInfo[ix].dataType,
columnDescriptor.getDefaultValue(),
columnDescriptor.getDefaultInfo(),
td,
columnDescriptor.getDefaultUUID(),
columnInfo[ix].autoincStart,
columnInfo[ix].autoincInc,
columnInfo[ix].autoincInc != 0
);
// Update the ColumnDescriptor with new default info
dd.dropColumnDescriptor(td.getUUID(), columnInfo[ix].name, tc);
dd.addDescriptor(newColumnDescriptor, td,
DataDictionary.SYSCOLUMNS_CATALOG_NUM, false, tc);
}
/**
* Workhorse for modifying column level constraints.
* Right now it is restricted to modifying a null constraint to a not null
* constraint.
*/
private void modifyColumnConstraint(Activation activation,
String colName,
boolean nullability)
throws StandardException
{
LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
DataDictionary dd = lcc.getDataDictionary();
TransactionController tc = lcc.getTransactionExecute();
ColumnDescriptor columnDescriptor =
td.getColumnDescriptor(colName),
newColumnDescriptor = null;
DataTypeDescriptor dataType = columnDescriptor.getType();
// set nullability
dataType.setNullability(nullability);
newColumnDescriptor =
new ColumnDescriptor(colName,
columnDescriptor.getPosition(),
dataType,
columnDescriptor.getDefaultValue(),
columnDescriptor.getDefaultInfo(),
td,
columnDescriptor.getDefaultUUID(),
columnDescriptor.getAutoincStart(),
columnDescriptor.getAutoincInc(),
columnDescriptor.getAutoincInc() != 0);
// Update the ColumnDescriptor with new default info
dd.dropColumnDescriptor(td.getUUID(), colName, tc);
dd.addDescriptor(newColumnDescriptor, td,
DataDictionary.SYSCOLUMNS_CATALOG_NUM, false, tc);
}
/**
* Workhorse for modifying the default value of a column.
*
* @param activation activation
* @param ix the index of the column specfication in the ALTER
* statement-- currently we allow only one.
* @exception StandardException, thrown on error.
*/
private void modifyColumnDefault(Activation activation,
int ix)
throws StandardException
{
LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
DataDictionary dd = lcc.getDataDictionary();
DependencyManager dm = dd.getDependencyManager();
TransactionController tc = lcc.getTransactionExecute();
ColumnDescriptor columnDescriptor =
td.getColumnDescriptor(columnInfo[ix].name);
DataDescriptorGenerator ddg = dd.getDataDescriptorGenerator();
int columnPosition = columnDescriptor.getPosition();
// Clean up after the old default, if non-null
if (columnDescriptor.hasNonNullDefault())
{
// Invalidate off of the old default
DefaultDescriptor defaultDescriptor = new DefaultDescriptor(dd, columnInfo[ix].oldDefaultUUID,
td.getUUID(), columnPosition);
dm.invalidateFor(defaultDescriptor, DependencyManager.MODIFY_COLUMN_DEFAULT, lcc);
// Drop any dependencies
dm.clearDependencies(lcc, defaultDescriptor);
}
UUID defaultUUID = columnInfo[ix].newDefaultUUID;
/* Generate a UUID for the default, if one exists
* and there is no default id yet.
*/
if (columnInfo[ix].defaultInfo != null &&
defaultUUID == null)
{
defaultUUID = dd.getUUIDFactory().createUUID();
}
/* Get a ColumnDescriptor reflecting the new default */
columnDescriptor = new ColumnDescriptor(
columnInfo[ix].name,
columnPosition,
columnInfo[ix].dataType,
columnInfo[ix].defaultValue,
columnInfo[ix].defaultInfo,
td,
defaultUUID,
columnInfo[ix].autoincStart,
columnInfo[ix].autoincInc,
columnInfo[ix].autoincInc != 0
);
// Update the ColumnDescriptor with new default info
dd.dropColumnDescriptor(td.getUUID(), columnInfo[ix].name, tc);
dd.addDescriptor(columnDescriptor, td,
DataDictionary.SYSCOLUMNS_CATALOG_NUM, false, tc);
if (columnInfo[ix].autoincInc != 0)
{
// adding an autoincrement default-- calculate the maximum value
// of the autoincrement column.
long maxValue = getColumnMax(activation, td, columnInfo[ix].name,
columnInfo[ix].autoincInc,
columnInfo[ix].autoincStart);
dd.setAutoincrementValue(tc, td.getUUID(), columnInfo[ix].name,
maxValue, true);
}
// Add default info for new default, if non-null
if (columnDescriptor.hasNonNullDefault())
{
DefaultDescriptor defaultDescriptor =
new DefaultDescriptor(dd, defaultUUID,
td.getUUID(),
columnPosition);
/* Create stored dependencies for each provider to default */
ProviderInfo[] providerInfo = ((DefaultInfoImpl) columnInfo[ix].defaultInfo).getProviderInfo();
int providerInfoLength = (providerInfo == null) ? 0 : providerInfo.length;
for (int provIndex = 0; provIndex < providerInfoLength; provIndex++)
{
Provider provider = null;
/* We should always be able to find the Provider */
try
{
provider = (Provider) providerInfo[provIndex].
getDependableFinder().
getDependable(
providerInfo[provIndex].getObjectId());
}
catch(java.sql.SQLException te)
{
if (SanityManager.DEBUG)
{
SanityManager.THROWASSERT("unexpected java.sql.SQLException - " + te);
}
}
dm.addDependency(defaultDescriptor, provider,
lcc.getContextManager());
}
}
}
/* NOTE: compressTable can also be called for
* ALTER TABLE <t> DROP COLUMN <c>;
*/
private void compressTable(Activation activation)
throws StandardException
{
ExecRow emptyHeapRow;
long newHeapConglom;
Properties properties = new Properties();
RowLocation rl;
this.lcc = activation.getLanguageConnectionContext();
this.dd = lcc.getDataDictionary();
this.dm = dd.getDependencyManager();
this.tc = lcc.getTransactionExecute();
this.activation = activation;
if (SanityManager.DEBUG)
{
if (lockGranularity != '\0')
{
SanityManager.THROWASSERT(
"lockGranularity expected to be '\0', not " + lockGranularity);
}
SanityManager.ASSERT(! compressTable || columnInfo == null,
"columnInfo expected to be null");
SanityManager.ASSERT(constraintActions == null,
"constraintActions expected to be null");
}
emptyHeapRow = td.getEmptyExecRow(lcc.getContextManager());
compressHeapCC = tc.openConglomerate(
td.getHeapConglomerateId(),
false,
TransactionController.OPENMODE_FORUPDATE,
TransactionController.MODE_TABLE,
TransactionController.ISOLATION_SERIALIZABLE);
// invalidate any prepared statements that
// depended on this table (including this one)
// bug 3653 has threads that start up and block on our lock, but do
// not see they have to recompile their plan. We now invalidate earlier
// however they still might recompile using the old conglomerate id before we
// commit our DD changes.
//
dm.invalidateFor(td, DependencyManager.COMPRESS_TABLE, lcc);
rl = compressHeapCC.newRowLocationTemplate();
// Get the properties on the old heap
compressHeapCC.getInternalTablePropertySet(properties);
compressHeapCC.close();
compressHeapCC = null;
// Create an array to put base row template
baseRow = new ExecRow[bulkFetchSize];
baseRowArray = new DataValueDescriptor[bulkFetchSize][];
validRow = new boolean[bulkFetchSize];
/* Set up index info */
getAffectedIndexes(activation);
// Get an array of RowLocation template
compressRL = new RowLocation[bulkFetchSize];
indexRows = new ExecIndexRow[numIndexes];
if (! compressTable)
{
ExecRow newRow = activation.getExecutionFactory().getValueRow(emptyHeapRow.nColumns() - 1);
for (int i = 0; i < newRow.nColumns(); i++)
{
newRow.setColumn(i + 1, i < columnPosition - 1 ?
emptyHeapRow.getColumn(i + 1) :
emptyHeapRow.getColumn(i + 1 + 1));
}
emptyHeapRow = newRow;
}
setUpAllSorts(emptyHeapRow, rl);
// Start by opening a full scan on the base table.
openBulkFetchScan(td.getHeapConglomerateId());
// Get the estimated row count for the sorters
estimatedRowCount = compressHeapGSC.getEstimatedRowCount();
// Create the array of base row template
for (int i = 0; i < bulkFetchSize; i++)
{
// create a base row template
baseRow[i] = td.getEmptyExecRow(lcc.getContextManager());
baseRowArray[i] = baseRow[i].getRowArray();
compressRL[i] = compressHeapGSC.newRowLocationTemplate();
}
newHeapConglom = tc.createAndLoadConglomerate(
"heap",
emptyHeapRow.getRowArray(),
null, //column sort order - not required for heap
properties,
TransactionController.IS_DEFAULT,
this,
(long[]) null);
closeBulkFetchScan();
// Set the "estimated" row count
ScanController compressHeapSC = tc.openScan(
newHeapConglom,
false,
TransactionController.OPENMODE_FORUPDATE,
TransactionController.MODE_TABLE,
TransactionController.ISOLATION_SERIALIZABLE,
(FormatableBitSet) null,
(DataValueDescriptor[]) null,
0,
(Qualifier[][]) null,
(DataValueDescriptor[]) null,
0);
compressHeapSC.setEstimatedRowCount(rowCount);
compressHeapSC.close();
compressHeapSC = null; // RESOLVE DJD CLEANUP
/*
** Inform the data dictionary that we are about to write to it.
** There are several calls to data dictionary "get" methods here
** that might be done in "read" mode in the data dictionary, but
** it seemed safer to do this whole operation in "write" mode.
**
** We tell the data dictionary we're done writing at the end of
** the transaction.
*/
dd.startWriting(lcc);
// Update all indexes
if (compressIRGs.length > 0)
{
updateAllIndexes(newHeapConglom, dd);
}
/* Update the DataDictionary
* RESOLVE - this will change in 1.4 because we will get
* back the same conglomerate number
*/
// Get the ConglomerateDescriptor for the heap
long oldHeapConglom = td.getHeapConglomerateId();
ConglomerateDescriptor cd = td.getConglomerateDescriptor(oldHeapConglom);
// Update sys.sysconglomerates with new conglomerate #
dd.updateConglomerateDescriptor(cd, newHeapConglom, tc);
// Drop the old conglomerate
tc.dropConglomerate(oldHeapConglom);
cleanUp();
}
/*
* TRUNCATE TABLE TABLENAME; (quickly removes all the rows from table and
* it's correctponding indexes).
* Truncate is implemented by dropping the existing conglomerates(heap,indexes) and recreating a
* new ones with the properties of dropped conglomerates. Currently Store
* does not have support to truncate existing conglomerated until store
* supports it , this is the only way to do it.
* Error Cases: Truncate error cases same as other DDL's statements except
* 1)Truncate is not allowed when the table is references by another table.
* 2)Truncate is not allowed when there are enabled delete triggers on the table.
* Note: Because conglomerate number is changed during recreate process all the statements will be
* marked as invalide and they will get recompiled internally on their next
* execution. This is okay because truncate makes the number of rows to zero
* it may be good idea to recompile them becuase plans are likely to be
* incorrect. Recompile is done internally by cloudscape, user does not have
* any effect.
*/
private void truncateTable(Activation activation)
throws StandardException
{
ExecRow emptyHeapRow;
long newHeapConglom;
Properties properties = new Properties();
RowLocation rl;
this.lcc = activation.getLanguageConnectionContext();
this.dd = lcc.getDataDictionary();
this.dm = dd.getDependencyManager();
this.tc = lcc.getTransactionExecute();
this.activation = activation;
if (SanityManager.DEBUG)
{
if (lockGranularity != '\0')
{
SanityManager.THROWASSERT(
"lockGranularity expected to be '\0', not " + lockGranularity);
}
SanityManager.ASSERT(columnInfo == null,
"columnInfo expected to be null");
SanityManager.ASSERT(constraintActions == null,
"constraintActions expected to be null");
}
//truncate table is not allowed if there are any tables referencing it.
//except if it is self referencing.
ConstraintDescriptorList cdl = dd.getConstraintDescriptors(td);
for(int index = 0; index < cdl.size(); index++)
{
ConstraintDescriptor cd = cdl.elementAt(index);
if (cd instanceof ReferencedKeyConstraintDescriptor)
{
ReferencedKeyConstraintDescriptor rfcd = (ReferencedKeyConstraintDescriptor) cd;
if(rfcd.hasNonSelfReferencingFK(ConstraintDescriptor.ENABLED))
{
throw StandardException.newException(SQLState.LANG_NO_TRUNCATE_ON_FK_REFERENCE_TABLE,td.getName());
}
}
}
//truncate is not allowed when there are enabled DELETE triggers
GenericDescriptorList tdl = dd.getTriggerDescriptors(td);
Enumeration descs = tdl.elements();
while (descs.hasMoreElements())
{
TriggerDescriptor trd = (TriggerDescriptor) descs.nextElement();
if (trd.listensForEvent(TriggerDescriptor.TRIGGER_EVENT_DELETE) &&
trd.isEnabled())
{
throw
StandardException.newException(SQLState.LANG_NO_TRUNCATE_ON_ENABLED_DELETE_TRIGGERS,
td.getName(),trd.getName());
}
}
//gather information from the existing conglomerate to create new one.
emptyHeapRow = td.getEmptyExecRow(lcc.getContextManager());
compressHeapCC = tc.openConglomerate(
td.getHeapConglomerateId(),
false,
TransactionController.OPENMODE_FORUPDATE,
TransactionController.MODE_TABLE,
TransactionController.ISOLATION_SERIALIZABLE);
// invalidate any prepared statements that
// depended on this table (including this one)
// bug 3653 has threads that start up and block on our lock, but do
// not see they have to recompile their plan. We now invalidate earlier
// however they still might recompile using the old conglomerate id before we
// commit our DD changes.
//
dm.invalidateFor(td, DependencyManager.TRUNCATE_TABLE, lcc);
rl = compressHeapCC.newRowLocationTemplate();
// Get the properties on the old heap
compressHeapCC.getInternalTablePropertySet(properties);
compressHeapCC.close();
compressHeapCC = null;
//create new conglomerate
newHeapConglom = tc.createConglomerate(
"heap",
emptyHeapRow.getRowArray(),
null, //column sort order - not required for heap
properties,
TransactionController.IS_DEFAULT);
/* Set up index info to perform truncate on them*/
getAffectedIndexes(activation);
if(numIndexes > 0)
{
indexRows = new ExecIndexRow[numIndexes];
ordering = new ColumnOrdering[numIndexes][];
for (int index = 0; index < numIndexes; index++)
{
// create a single index row template for each index
indexRows[index] = compressIRGs[index].getIndexRowTemplate();
compressIRGs[index].getIndexRow(emptyHeapRow,
rl,
indexRows[index],
(FormatableBitSet) null);
/* For non-unique indexes, we order by all columns + the RID.
* For unique indexes, we just order by the columns.
* No need to try to enforce uniqueness here as
* index should be valid.
*/
int[] baseColumnPositions = compressIRGs[index].baseColumnPositions();
boolean[] isAscending = compressIRGs[index].isAscending();
int numColumnOrderings;
numColumnOrderings = baseColumnPositions.length + 1;
ordering[index] = new ColumnOrdering[numColumnOrderings];
for (int ii =0; ii < numColumnOrderings - 1; ii++)
{
ordering[index][ii] = new IndexColumnOrder(ii, isAscending[ii]);
}
ordering[index][numColumnOrderings - 1] = new IndexColumnOrder(numColumnOrderings - 1);
}
}
/*
** Inform the data dictionary that we are about to write to it.
** There are several calls to data dictionary "get" methods here
** that might be done in "read" mode in the data dictionary, but
** it seemed safer to do this whole operation in "write" mode.
**
** We tell the data dictionary we're done writing at the end of
** the transaction.
*/
dd.startWriting(lcc);
// truncate all indexes
if(numIndexes > 0)
{
long[] newIndexCongloms = new long[numIndexes];
for (int index = 0; index < numIndexes; index++)
{
updateIndex(newHeapConglom, dd, index, newIndexCongloms);
}
}
// Update the DataDictionary
// Get the ConglomerateDescriptor for the heap
long oldHeapConglom = td.getHeapConglomerateId();
ConglomerateDescriptor cd = td.getConglomerateDescriptor(oldHeapConglom);
// Update sys.sysconglomerates with new conglomerate #
dd.updateConglomerateDescriptor(cd, newHeapConglom, tc);
// Drop the old conglomerate
tc.dropConglomerate(oldHeapConglom);
cleanUp();
}
/**
* Update all of the indexes on a table when doing a bulk insert
* on an empty table.
*
* @exception StandardException thrown on error
*/
private void updateAllIndexes(long newHeapConglom,
DataDictionary dd)
throws StandardException
{
long[] newIndexCongloms = new long[numIndexes];
/* Populate each index (one at a time or all at once). */
if (sequential)
{
// First sorter populated during heap compression
if (numIndexes >= 1)
{
updateIndex(newHeapConglom, dd, 0, newIndexCongloms);
}
for (int index = 1; index < numIndexes; index++)
{
// Scan heap and populate next sorter
openBulkFetchScan(newHeapConglom);
while (getNextRowFromRowSource() != null)
{
objectifyStreamingColumns();
insertIntoSorter(index, compressRL[currentCompressRow - 1]);
}
updateIndex(newHeapConglom, dd, index, newIndexCongloms);
closeBulkFetchScan();
}
}
else
{
for (int index = 0; index < numIndexes; index++)
{
updateIndex(newHeapConglom, dd, index, newIndexCongloms);
}
}
}
private void updateIndex(long newHeapConglom, DataDictionary dd,
int index, long[] newIndexCongloms)
throws StandardException
{
ConglomerateController indexCC;
Properties properties = new Properties();
ConglomerateDescriptor cd;
// Get the ConglomerateDescriptor for the index
cd = td.getConglomerateDescriptor(indexConglomerateNumbers[index]);
// Build the properties list for the new conglomerate
indexCC = tc.openConglomerate(
indexConglomerateNumbers[index],
false,
TransactionController.OPENMODE_FORUPDATE,
TransactionController.MODE_TABLE,
TransactionController.ISOLATION_SERIALIZABLE);
// Get the properties on the old index
indexCC.getInternalTablePropertySet(properties);
/* Create the properties that language supplies when creating the
* the index. (The store doesn't preserve these.)
*/
int indexRowLength = indexRows[index].nColumns();
properties.put("baseConglomerateId", Long.toString(newHeapConglom));
if (cd.getIndexDescriptor().isUnique())
{
properties.put("nUniqueColumns",
Integer.toString(indexRowLength - 1));
}
else
{
properties.put("nUniqueColumns",
Integer.toString(indexRowLength));
}
properties.put("rowLocationColumn",
Integer.toString(indexRowLength - 1));
properties.put("nKeyFields", Integer.toString(indexRowLength));
indexCC.close();
// We can finally drain the sorter and rebuild the index
// RESOLVE - all indexes are btrees right now
// Populate the index.
RowLocationRetRowSource cCount = null;
boolean updateStatistics = false;
if(!truncateTable)
{
sorters[index].close();
sorters[index] = null;
if (td.statisticsExist(cd))
{
cCount = new CardinalityCounter(tc.openSortRowSource(sortIds[index]));
updateStatistics = true;
}
else
cCount = tc.openSortRowSource(sortIds[index]);
newIndexCongloms[index] = tc.createAndLoadConglomerate(
"BTREE",
indexRows[index].getRowArray(),
ordering[index],
properties,
TransactionController.IS_DEFAULT,
cCount,
(long[]) null);
}else
{
newIndexCongloms[index] = tc.createConglomerate(
"BTREE",
indexRows[index].getRowArray(),
ordering[index],
properties,
TransactionController.IS_DEFAULT);
//on truncate drop the statistics because we know for sure
//rowscount is zero and existing statistic will be invalid.
if (td.statisticsExist(cd))
dd.dropStatisticsDescriptors(td.getUUID(), cd.getUUID(), tc);
}
if (updateStatistics)
{
dd.dropStatisticsDescriptors(td.getUUID(), cd.getUUID(), tc);
long numRows;
if ((numRows = ((CardinalityCounter)cCount).getRowCount()) > 0)
{
long[] c = ((CardinalityCounter)cCount).getCardinality();
for (int i = 0; i < c.length; i++)
{
StatisticsDescriptor statDesc =
new StatisticsDescriptor(dd, dd.getUUIDFactory().createUUID(),
cd.getUUID(), td.getUUID(), "I", new StatisticsImpl(numRows, c[i]),
i + 1);
dd.addDescriptor(statDesc, null, // no parent descriptor
DataDictionary.SYSSTATISTICS_CATALOG_NUM,
true, tc); // no error on duplicate.
}
}
}
/* Update the DataDictionary
* RESOLVE - this will change in 1.4 because we will get
* back the same conglomerate number
*
* Update sys.sysconglomerates with new conglomerate #, we need to
* update all (if any) duplicate index entries sharing this same
* conglomerate.
*/
dd.updateConglomerateDescriptor(
td.getConglomerateDescriptors(indexConglomerateNumbers[index]),
newIndexCongloms[index], tc);
// Drop the old conglomerate
tc.dropConglomerate(indexConglomerateNumbers[index]);
}
/**
* Get info on the indexes on the table being compress.
*
* @return Nothing
*
* @exception StandardException Thrown on error
*/
private void getAffectedIndexes(Activation activation)
throws StandardException
{
IndexLister indexLister = td.getIndexLister( );
/* We have to get non-distinct index row generaters and conglom numbers
* here and then compress it to distinct later because drop column
* will need to change the index descriptor directly on each index
* entry in SYSCONGLOMERATES, on duplicate indexes too.
*/
compressIRGs = indexLister.getIndexRowGenerators();
numIndexes = compressIRGs.length;
indexConglomerateNumbers = indexLister.getIndexConglomerateNumbers();
if (! (compressTable || truncateTable)) // then it's drop column
{
for (int i = 0; i < compressIRGs.length; i++)
{
int[] baseColumnPositions = compressIRGs[i].baseColumnPositions();
int j;
for (j = 0; j < baseColumnPositions.length; j++)
if (baseColumnPositions[j] == columnPosition) break;
if (j == baseColumnPositions.length) // not related
continue;
if (baseColumnPositions.length == 1 ||
(behavior == StatementType.DROP_CASCADE && compressIRGs[i].isUnique()))
{
numIndexes--;
/* get first conglomerate with this conglom number each time
* and each duplicate one will be eventually all dropped
*/
ConglomerateDescriptor cd = td.getConglomerateDescriptor
(indexConglomerateNumbers[i]);
DropIndexConstantAction.dropIndex(dm, dd, tc, cd, td, activation);
compressIRGs[i] = null; // mark it
continue;
}
// give an error for unique index on multiple columns including
// the column we are to drop (restrict), such index is not for
// a constraint, because constraints have already been handled
if (compressIRGs[i].isUnique())
{
ConglomerateDescriptor cd = td.getConglomerateDescriptor
(indexConglomerateNumbers[i]);
throw StandardException.newException(SQLState.LANG_PROVIDER_HAS_DEPENDENT_OBJECT,
dm.getActionString(DependencyManager.DROP_COLUMN),
columnInfo[0].name, "UNIQUE INDEX",
cd.getConglomerateName() );
}
}
IndexRowGenerator[] newIRGs = new IndexRowGenerator[numIndexes];
long[] newIndexConglomNumbers = new long[numIndexes];
for (int i = 0, j = 0; i < numIndexes; i++, j++)
{
while (compressIRGs[j] == null)
j++;
int[] baseColumnPositions = compressIRGs[j].baseColumnPositions();
newIRGs[i] = compressIRGs[j];
newIndexConglomNumbers[i] = indexConglomerateNumbers[j];
boolean[] isAscending = compressIRGs[j].isAscending();
boolean reMakeArrays = false;
int size = baseColumnPositions.length;
for (int k = 0; k < size; k++)
{
if (baseColumnPositions[k] > columnPosition)
baseColumnPositions[k]--;
else if (baseColumnPositions[k] == columnPosition)
{
baseColumnPositions[k] = 0; // mark it
reMakeArrays = true;
}
}
if (reMakeArrays)
{
size--;
int[] newBCP = new int[size];
boolean[] newIsAscending = new boolean[size];
for (int k = 0, step = 0; k < size; k++)
{
if (step == 0 && baseColumnPositions[k + step] == 0)
step++;
newBCP[k] = baseColumnPositions[k + step];
newIsAscending[k] = isAscending[k + step];
}
IndexDescriptor id = compressIRGs[j].getIndexDescriptor();
id.setBaseColumnPositions(newBCP);
id.setIsAscending(newIsAscending);
id.setNumberOfOrderedColumns(id.numberOfOrderedColumns() - 1);
}
}
compressIRGs = newIRGs;
indexConglomerateNumbers = newIndexConglomNumbers;
}
/* Now we are done with updating each index descriptor entry directly
* in SYSCONGLOMERATES (for duplicate index as well), from now on, our
* work should apply ONLY once for each real conglomerate, so we
* compress any duplicate indexes now.
*/
Object[] compressIndexResult =
compressIndexArrays(indexConglomerateNumbers, compressIRGs);
if (compressIndexResult != null)
{
indexConglomerateNumbers = (long[]) compressIndexResult[1];
compressIRGs = (IndexRowGenerator[]) compressIndexResult[2];
numIndexes = indexConglomerateNumbers.length;
}
indexedCols = new FormatableBitSet(compressTable || truncateTable ? td.getNumberOfColumns() + 1 :
td.getNumberOfColumns());
for (int index = 0; index < numIndexes; index++)
{
int[] colIds = compressIRGs[index].getIndexDescriptor().baseColumnPositions();
for (int index2 = 0; index2 < colIds.length; index2++)
{
indexedCols.set(colIds[index2]);
}
}
}
/**
* Set up to update all of the indexes on a table when doing a bulk insert
* on an empty table.
*
* @exception StandardException thrown on error
*/
private void setUpAllSorts(ExecRow sourceRow,
RowLocation rl)
throws StandardException
{
ordering = new ColumnOrdering[numIndexes][];
needToDropSort = new boolean[numIndexes];
sortIds = new long[numIndexes];
/* For each index, build a single index row and a sorter. */
for (int index = 0; index < numIndexes; index++)
{
// create a single index row template for each index
indexRows[index] = compressIRGs[index].getIndexRowTemplate();
// Get an index row based on the base row
// (This call is only necessary here because we need to pass a template to the sorter.)
compressIRGs[index].getIndexRow(sourceRow,
rl,
indexRows[index],
(FormatableBitSet) null);
/* For non-unique indexes, we order by all columns + the RID.
* For unique indexes, we just order by the columns.
* No need to try to enforce uniqueness here as
* index should be valid.
*/
int[] baseColumnPositions = compressIRGs[index].baseColumnPositions();
boolean[] isAscending = compressIRGs[index].isAscending();
int numColumnOrderings;
SortObserver sortObserver = null;
/* We can only reuse the wrappers when doing an
* external sort if there is only 1 index. Otherwise,
* we could get in a situation where 1 sort reuses a
* wrapper that is still in use in another sort.
*/
boolean reuseWrappers = (numIndexes == 1);
numColumnOrderings = baseColumnPositions.length + 1;
sortObserver = new BasicSortObserver(false, false,
indexRows[index],
reuseWrappers);
ordering[index] = new ColumnOrdering[numColumnOrderings];
for (int ii =0; ii < numColumnOrderings - 1; ii++)
{
ordering[index][ii] = new IndexColumnOrder(ii, isAscending[ii]);
}
ordering[index][numColumnOrderings - 1] = new IndexColumnOrder(numColumnOrderings - 1);
// create the sorters
sortIds[index] = tc.createSort(
(Properties)null,
indexRows[index].getRowArrayClone(),
ordering[index],
sortObserver,
false, // not in order
estimatedRowCount, // est rows
-1 // est row size, -1 means no idea
);
}
sorters = new SortController[numIndexes];
// Open the sorts
for (int index = 0; index < numIndexes; index++)
{
sorters[index] = tc.openSort(sortIds[index]);
needToDropSort[index] = true;
}
}
// RowSource interface
/**
* @see RowSource#getValidColumns
*/
public FormatableBitSet getValidColumns()
{
// All columns are valid
return null;
}
/**
* @see RowSource#getNextRowFromRowSource
* @exception StandardException on error
*/
public DataValueDescriptor[] getNextRowFromRowSource()
throws StandardException
{
currentRow = null;
// Time for a new bulk fetch?
if ((! doneScan) &&
(currentCompressRow == bulkFetchSize || !validRow[currentCompressRow]))
{
int bulkFetched = 0;
bulkFetched = compressHeapGSC.fetchNextGroup(baseRowArray, compressRL);
doneScan = (bulkFetched != bulkFetchSize);
currentCompressRow = 0;
rowCount += bulkFetched;
for (int index = 0; index < bulkFetched; index++)
{
validRow[index] = true;
}
for (int index = bulkFetched; index < bulkFetchSize; index++)
{
validRow[index] = false;
}
}
if (validRow[currentCompressRow])
{
if (compressTable)
currentRow = baseRow[currentCompressRow];
else
{
if (currentRow == null)
currentRow = activation.getExecutionFactory().getValueRow(baseRowArray[currentCompressRow].length - 1);
for (int i = 0; i < currentRow.nColumns(); i++)
{
currentRow.setColumn(i + 1, i < columnPosition - 1 ?
baseRow[currentCompressRow].getColumn(i+1) :
baseRow[currentCompressRow].getColumn(i+1+1));
}
}
currentCompressRow++;
}
if (currentRow != null)
{
/* Let the target preprocess the row. For now, this
* means doing an in place clone on any indexed columns
* to optimize cloning and so that we don't try to drain
* a stream multiple times.
*/
if (compressIRGs.length > 0)
{
/* Do in-place cloning of all of the key columns */
currentRow = currentRow.getClone(indexedCols);
}
return currentRow.getRowArray();
}
return null;
}
/**
* @see RowSource#needsToClone
*/
public boolean needsToClone()
{
return(true);
}
/**
* @see RowSource#closeRowSource
*/
public void closeRowSource()
{
// Do nothing here - actual work will be done in close()
}
// RowLocationRetRowSource interface
/**
* @see RowLocationRetRowSource#needsRowLocation
*/
public boolean needsRowLocation()
{
// Only true if table has indexes
return (numIndexes > 0);
}
/**
* @see RowLocationRetRowSource#rowLocation
* @exception StandardException on error
*/
public void rowLocation(RowLocation rl)
throws StandardException
{
/* Set up sorters, etc. if 1st row and there are indexes */
if (compressIRGs.length > 0)
{
objectifyStreamingColumns();
/* Put the row into the indexes. If sequential,
* then we only populate the 1st sorter when compressing
* the heap.
*/
int maxIndex = compressIRGs.length;
if (maxIndex > 1 && sequential)
{
maxIndex = 1;
}
for (int index = 0; index < maxIndex; index++)
{
insertIntoSorter(index, rl);
}
}
}
private void objectifyStreamingColumns()
throws StandardException
{
// Objectify any the streaming columns that are indexed.
for (int i = 0; i < currentRow.getRowArray().length; i++)
{
/* Object array is 0-based,
* indexedCols is 1-based.
*/
if (! indexedCols.get(i + 1))
{
continue;
}
if (currentRow.getRowArray()[i] instanceof StreamStorable)
{
((DataValueDescriptor) currentRow.getRowArray()[i]).getObject();
}
}
}
private void insertIntoSorter(int index, RowLocation rl)
throws StandardException
{
// Get a new object Array for the index
indexRows[index].getNewObjectArray();
// Associate the index row with the source row
compressIRGs[index].getIndexRow(currentRow,
(RowLocation) rl.cloneObject(),
indexRows[index],
(FormatableBitSet) null);
// Insert the index row into the matching sorter
sorters[index].insert(indexRows[index].getRowArray());
}
/**
* @see ResultSet#cleanUp
*
* @exception StandardException Thrown on error
*/
public void cleanUp() throws StandardException
{
if (compressHeapCC != null)
{
compressHeapCC.close();
compressHeapCC = null;
}
if (compressHeapGSC != null)
{
closeBulkFetchScan();
}
// Close each sorter
if (sorters != null)
{
for (int index = 0; index < compressIRGs.length; index++)
{
if (sorters[index] != null)
{
sorters[index].close();
}
sorters[index] = null;
}
}
if (needToDropSort != null)
{
for (int index = 0; index < needToDropSort.length; index++)
{
if (needToDropSort[index])
{
tc.dropSort(sortIds[index]);
needToDropSort[index] = false;
}
}
}
}
// class implementation
/**
* Return the "semi" row count of a table. We are only interested in
* whether the table has 0, 1 or > 1 rows.
*
*
* @return Number of rows (0, 1 or > 1) in table.
*
* @exception StandardException Thrown on failure
*/
private int getSemiRowCount(TransactionController tc)
throws StandardException
{
int numRows = 0;
ScanController sc = tc.openScan(td.getHeapConglomerateId(),
false, // hold
0, // open read only
TransactionController.MODE_TABLE,
TransactionController.ISOLATION_SERIALIZABLE,
RowUtil.EMPTY_ROW_BITSET, // scanColumnList
null, // start position
ScanController.GE, // startSearchOperation
null, // scanQualifier
null, //stop position - through last row
ScanController.GT); // stopSearchOperation
while (sc.next())
{
numRows++;
// We're only interested in whether the table has 0, 1 or > 1 rows
if (numRows == 2)
{
break;
}
}
sc.close();
return numRows;
}
/**
* Update a new column with its default.
* We could do the scan ourself here, but
* instead we get a nested connection and
* issue the appropriate update statement.
*
* @param columnName column name
* @param defaultText default text
* @param lcc the language connection context
*
* @exception StandardException if update to default fails
*/
private void updateNewColumnToDefault
(
Activation activation,
String columnName,
String defaultText,
LanguageConnectionContext lcc
)
throws StandardException
{
/* Need to use delimited identifiers for all object names
* to ensure correctness.
*/
String updateStmt = "UPDATE \"" + td.getSchemaName() + "\".\"" +
td.getName() + "\" SET \"" +
columnName + "\" = " + defaultText;
AlterTableConstantAction.executeUpdate(lcc, updateStmt);
}
private static void executeUpdate(LanguageConnectionContext lcc, String updateStmt) throws StandardException
{
PreparedStatement ps = lcc.prepareInternalStatement(updateStmt);
ResultSet rs = ps.execute(lcc, true);
rs.close();
rs.finish();
}
/**
* computes the minimum/maximum value in a column of a table.
*/
private long getColumnMax(Activation activation, TableDescriptor td, String columnName,
long increment, long initial)
throws StandardException
{
String maxStr = (increment > 0) ? "MAX" : "MIN";
String maxStmt = "SELECT " + maxStr + "(\"" + columnName + "\")" +
"FROM \"" + td.getSchemaName() + "\".\"" + td.getName() + "\"";
LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
PreparedStatement ps = lcc.prepareInternalStatement(maxStmt);
ResultSet rs = ps.execute(lcc, false);
DataValueDescriptor[] rowArray = rs.getNextRow().getRowArray();
rs.close();
rs.finish();
return rowArray[0].getLong();
}
private void dropAllColumnDefaults(UUID tableId, DataDictionary dd)
throws StandardException
{
ColumnDescriptorList cdl = td.getColumnDescriptorList();
int cdlSize = cdl.size();
for(int index = 0; index < cdlSize; index++)
{
ColumnDescriptor cd = (ColumnDescriptor) cdl.elementAt(index);
// If column has a default we drop the default and
// any dependencies
if (cd.getDefaultInfo() != null)
{
DefaultDescriptor defaultDesc = cd.getDefaultDescriptor(dd);
dm.clearDependencies(lcc, defaultDesc);
}
}
}
private void openBulkFetchScan(long heapConglomNumber)
throws StandardException
{
doneScan = false;
compressHeapGSC = tc.openGroupFetchScan(
heapConglomNumber,
false, // hold
0, // open base table read only
TransactionController.MODE_TABLE,
TransactionController.ISOLATION_SERIALIZABLE,
null, // all fields as objects
(DataValueDescriptor[]) null, // startKeyValue
0, // not used when giving null start posn.
null, // qualifier
(DataValueDescriptor[]) null, // stopKeyValue
0); // not used when giving null stop posn.
}
private void closeBulkFetchScan()
throws StandardException
{
compressHeapGSC.close();
compressHeapGSC = null;
}
/**
* Update values in a new autoincrement column being added to a table.
* This is similar to updateNewColumnToDefault whereby we issue an
* update statement using a nested connection. The UPDATE statement
* uses a static method in ConnectionInfo (which is not documented)
* which returns the next value to be inserted into the autoincrement
* column.
*
* @param columnName autoincrement column name that is being added.
* @param initial initial value of the autoincrement column.
* @param increment increment value of the autoincrement column.
*
* @see #updateNewColumnToDefault
*/
private void updateNewAutoincrementColumn(Activation activation, String columnName, long initial,
long increment)
throws StandardException
{
LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
// Don't throw an error in bind when we try to update the
// autoincrement column.
lcc.setAutoincrementUpdate(true);
lcc.autoincrementCreateCounter(td.getSchemaName(),
td.getName(),
columnName, new Long(initial),
increment, 0);
// the sql query is.
// UPDATE table
// set ai_column = ConnectionInfo.nextAutoincrementValue(
// schemaName, tableName,
// columnName)
String updateStmt = "UPDATE \"" + td.getSchemaName() + "\".\"" +
td.getName() + "\" SET \"" + columnName + "\" = " +
"org.apache.derby.iapi.db.ConnectionInfo::" +
"nextAutoincrementValue(" +
"'" + td.getSchemaName() + "'" + "," +
"'" + td.getName() + "'" + "," +
"'" + columnName + "'" + ")";
try
{
AlterTableConstantAction.executeUpdate(lcc, updateStmt);
}
catch (StandardException se)
{
if (se.getMessageId().equals(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE))
{
// If overflow, override with more meaningful message.
throw StandardException.newException(SQLState.LANG_AI_OVERFLOW,
se,
td.getName(),
columnName);
}
throw se;
}
finally
{
// and now update the autoincrement value.
lcc.autoincrementFlushCache(td.getUUID());
lcc.setAutoincrementUpdate(false);
}
}
/**
* Make sure that the columns are non null
* If any column is nullable, check that the data is null.
*
* @param columnNames names of columns to be checked
* @param nullCols true if corresponding column is nullable
* @param numRows number of rows in the table
* @param lcc language context
* @param errorMsg error message to use for exception
*
* @return true if any nullable columns found (nullable columns must have
* all non null data or exception is thrown
* @exception StandardException on error
*/
private boolean validateNotNullConstraint
(
String columnNames[],
boolean nullCols[],
int numRows,
LanguageConnectionContext lcc,
String errorMsg
)
throws StandardException
{
boolean foundNullable = false;
StringBuffer constraintText = new StringBuffer();
/*
* Check for nullable columns and create a constraint string which can
* be used in validateConstraint to check whether any of the
* data is null.
*/
for (int colCtr = 0; colCtr < columnNames.length; colCtr++)
{
ColumnDescriptor cd = td.getColumnDescriptor(columnNames[colCtr]);
if (cd == null)
{
throw StandardException.newException(SQLState.LANG_COLUMN_NOT_FOUND_IN_TABLE,
columnNames[colCtr],
td.getName());
}
if (cd.getType().isNullable())
{
if (numRows > 0)
{
// already found a nullable column so add "AND"
if (foundNullable)
constraintText.append(" AND ");
constraintText.append(columnNames[colCtr] + " IS NOT NULL ");
}
foundNullable = true;
nullCols[colCtr] = true;
}
}
/* if the table has nullable columns and isn't empty
* we need to validate the data
*/
if (foundNullable && numRows > 0)
{
if (!ConstraintConstantAction.validateConstraint(
(String) null,
constraintText.toString(),
td,
lcc,
false))
{
if (errorMsg.equals(SQLState.LANG_NULL_DATA_IN_PRIMARY_KEY))
{ //alter table add primary key
throw StandardException.newException(
SQLState.LANG_NULL_DATA_IN_PRIMARY_KEY,
td.getQualifiedName());
}
else
{ //alter table modify column not null
throw StandardException.newException(
SQLState.LANG_NULL_DATA_IN_NON_NULL_COLUMN,
td.getQualifiedName(), columnNames[0]);
}
}
}
return foundNullable;
}
/**
* Get rid of duplicates from a set of index conglomerate numbers and
* index descriptors.
*
* @param indexCIDS array of index conglomerate numbers
* @param irgs array of index row generaters
*
* @return value: If no duplicates, returns NULL; otherwise,
* a size-3 array of objects, first element is an
* array of duplicates' indexes in the input arrays;
* second element is the compact indexCIDs; third
* element is the compact irgs.
*/
private Object[] compressIndexArrays(
long[] indexCIDS,
IndexRowGenerator[] irgs)
{
/* An efficient way to compress indexes. From one end of workSpace,
* we save unique conglom IDs; and from the other end we save
* duplicate indexes' indexes. We save unique conglom IDs so that
* we can do less amount of comparisons. This is efficient in
* space as well. No need to use hash table.
*/
long[] workSpace = new long[indexCIDS.length];
int j = 0, k = indexCIDS.length - 1;
for (int i = 0; i < indexCIDS.length; i++)
{
int m;
for (m = 0; m < j; m++) // look up our unique set
{
if (indexCIDS[i] == workSpace[m]) // it's a duplicate
{
workSpace[k--] = i; // save dup index's index
break;
}
}
if (m == j)
workSpace[j++] = indexCIDS[i]; // save unique conglom id
}
if (j < indexCIDS.length) // duplicate exists
{
long[] newIndexCIDS = new long[j];
IndexRowGenerator[] newIrgs = new IndexRowGenerator[j];
int[] duplicateIndexes = new int[indexCIDS.length - j];
k = 0;
// do everything in one loop
for (int m = 0, n = indexCIDS.length - 1; m < indexCIDS.length; m++)
{
// we already gathered our indexCIDS and duplicateIndexes
if (m < j)
newIndexCIDS[m] = workSpace[m];
else
duplicateIndexes[indexCIDS.length - m - 1] = (int) workSpace[m];
// stack up our irgs, indexSCOCIs, indexDCOCIs
if ((n >= j) && (m == (int) workSpace[n]))
n--;
else
{
newIrgs[k] = irgs[m];
k++;
}
}
// construct return value
Object[] returnValue = new Object[3]; // [indexSCOCIs == null ? 3 : 5];
returnValue[0] = duplicateIndexes;
returnValue[1] = newIndexCIDS;
returnValue[2] = newIrgs;
return returnValue;
}
else // no duplicates
return null;
}
} |
Java | public final class DebugIntentFactory implements IntentFactory {
private final IntentFactory realIntentFactory;
private final boolean isMockMode;
private final Preference<Boolean> captureIntents;
public DebugIntentFactory(IntentFactory realIntentFactory, boolean isMockMode,
Preference<Boolean> captureIntents) {
this.realIntentFactory = realIntentFactory;
this.isMockMode = isMockMode;
this.captureIntents = captureIntents;
}
@Override public Intent createUrlIntent(String url) {
Intent baseIntent = realIntentFactory.createUrlIntent(url);
if (!isMockMode || !captureIntents.get()) {
return baseIntent;
} else {
return ExternalIntentActivity.createIntent(baseIntent);
}
}
} |
Java | public class PedigreeDiseaseCompatibilityDecorator {
/** the pedigree */
public final Pedigree pedigree;
/**
* Initialize decorator.
*/
public PedigreeDiseaseCompatibilityDecorator(Pedigree pedigree) {
this.pedigree = pedigree;
}
/**
* @return <code>true</code> if the <code>list</code> of {@link Genotype} calls is compatible with the autosomal
* dominant mode of inheritance
* @throws CompatibilityCheckerException
* if there are problems with <code>list</code> or {@link #pedigree}.
*/
public boolean isCompatibleWithAutosomalDominant(GenotypeList list) throws CompatibilityCheckerException {
return new CompatibilityCheckerAutosomalDominant(pedigree, list).run();
}
/**
* @return <code>true</code> if the <code>list</code> of {@link Genotype} calls is compatible with the autosomal
* recessive mode of inheritance
*/
public boolean isCompatibleWithAutosomalRecessive(GenotypeList list) throws CompatibilityCheckerException {
return new CompatibilityCheckerAutosomalRecessive(pedigree, list).run();
}
/**
* @return <code>true</code> if the <code>list</code> of {@link Genotype} calls is compatible with the X dominant
* mode of inheritance
*/
public boolean isCompatibleWithXDominant(GenotypeList list) throws CompatibilityCheckerException {
return new CompatibilityCheckerXDominant(pedigree, list).run();
}
/**
* @return <code>true</code> if the <code>list</code> of {@link Genotype} calls is compatible with the X recessive
* mode of inheritance
* @throws CompatibilityCheckerException
* if there are problems with <code>list</code> or {@link #pedigree}.
*/
public boolean isCompatibleWithXRecessive(GenotypeList list) throws CompatibilityCheckerException {
return new CompatibilityCheckerXRecessive(pedigree, list).run();
}
/**
* Convenience method for checking whether a {@link GenotypeList} is compatible with a given
* {@link ModeOfInheritance} and pedigree.
*
* @param list
* list of genotype calls to check for compatibility
* @param mode
* mode of inheritance to use for the checking
* @return <code>true</code> if <code>call</code> is compatible with the given <code>mode</code> of inheritance,
* also <code>true</code> if <code>mode</code> is {@link ModeOfInheritance#UNINITIALIZED}
* @throws CompatibilityCheckerException
* if there are problems with <code>list</code> or {@link #pedigree}.
*/
public boolean isCompatibleWith(GenotypeList list, ModeOfInheritance mode) throws CompatibilityCheckerException {
switch (mode) {
case AUTOSOMAL_DOMINANT:
return isCompatibleWithAutosomalDominant(list);
case AUTOSOMAL_RECESSIVE:
return isCompatibleWithAutosomalRecessive(list);
case X_RECESSIVE:
return isCompatibleWithXRecessive(list);
case X_DOMINANT:
return isCompatibleWithXDominant(list);
case UNINITIALIZED:
default:
return true;
}
}
} |
Java | public class ClientTestInterceptorCheckedExceptionPRETest {
@Test(dataProvider = "TransportType")
public void doTest(DiscoClientWrapper.TransportType tt) throws Exception {
// Set up the client using rescript transport
DiscoClientWrapper discoClientWrapper1 = DiscoClientWrapper.getInstance(tt);
DiscoClientWrapper wrapper = discoClientWrapper1;
// Call a wrapper method that will call an operation that returns a pre operation interceptor exception, catches it and returns the exception message
String errorMessage = wrapper.callInterceptorExceptionOperation(com.betfair.baseline.v2.enumerations.PreOrPostInterceptorException.PRE);
assertEquals("An anticipated pre-execution BSIDL defined checked exception", errorMessage);
}
@DataProvider(name="TransportType")
public Object[][] clients() {
return ClientTestsHelper.clientsToTest();
}
} |
Java | public class RequestFilter extends AbstractRequestFilter {
@Override
protected AppIDEnums getAppId() {
return AppIDEnums.SES_ROS;
}
} |