id
stringlengths 33
40
| content
stringlengths 662
61.5k
| max_stars_repo_path
stringlengths 85
97
|
---|---|---|
bugs-dot-jar_data_OAK-1215_a9efe3c4 | ---
BugID: OAK-1215
Summary: Wildcards in relative property paths don't work in search expressions
Description: |-
A search XPath of the form:
{code}
/jcr:root/etc/commerce/products//*[@size='M' or */@size='M']
{code}
returns:
{code}
Invalid path: *
{code}
(This works fine in Jackrabbit.)
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/AstElement.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/AstElement.java
index 40786b4..963865d 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/AstElement.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/AstElement.java
@@ -52,9 +52,10 @@ abstract class AstElement {
/**
* Normalize the property name (including namespace remapping).
+ * Asterisks are kept.
*
* @param propertyName the property name to normalize
- * @return the normalized property name
+ * @return the normalized (oak-) property name
*/
protected String normalizePropertyName(String propertyName) {
// TODO normalize the path (remove superfluous ".." and "."
@@ -71,7 +72,23 @@ abstract class AstElement {
}
// relative properties
String relativePath = PathUtils.getParentPath(propertyName);
- relativePath = query.getOakPath(relativePath);
+ if (relativePath.indexOf('*') >= 0) {
+ StringBuilder buff = new StringBuilder();
+ for (String p : PathUtils.elements(relativePath)) {
+ if (!p.equals("*")) {
+ p = query.getOakPath(p);
+ }
+ if (p.length() > 0) {
+ if (buff.length() > 0) {
+ buff.append('/');
+ }
+ buff.append(p);
+ }
+ }
+ relativePath = buff.toString();
+ } else {
+ relativePath = query.getOakPath(relativePath);
+ }
propertyName = PathUtils.getName(propertyName);
propertyName = normalizeNonRelativePropertyName(propertyName);
return PathUtils.concat(relativePath, propertyName);
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/PropertyValueImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/PropertyValueImpl.java
index f1eb907..88d5610 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/PropertyValueImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/PropertyValueImpl.java
@@ -18,7 +18,6 @@
*/
package org.apache.jackrabbit.oak.query.ast;
-import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
@@ -26,17 +25,10 @@ import java.util.Set;
import javax.jcr.PropertyType;
-import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.PropertyValue;
-import org.apache.jackrabbit.oak.api.Tree;
-import org.apache.jackrabbit.oak.api.Type;
-import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.query.QueryImpl;
import org.apache.jackrabbit.oak.query.SQL2Parser;
import org.apache.jackrabbit.oak.query.index.FilterImpl;
-import org.apache.jackrabbit.oak.spi.query.PropertyValues;
-
-import com.google.common.collect.Iterables;
/**
* A property expression.
@@ -107,58 +99,13 @@ public class PropertyValueImpl extends DynamicOperandImpl {
@Override
public PropertyValue currentProperty() {
- boolean asterisk = PathUtils.getName(propertyName).equals("*");
- if (!asterisk) {
- PropertyValue p = selector.currentProperty(propertyName);
- return matchesPropertyType(p) ? p : null;
- }
- Tree tree = selector.currentTree();
- if (tree == null || !tree.exists()) {
- return null;
- }
- if (!asterisk) {
- String name = PathUtils.getName(propertyName);
- name = normalizePropertyName(name);
- PropertyState p = tree.getProperty(name);
- if (p == null) {
- return null;
- }
- return matchesPropertyType(p) ? PropertyValues.create(p) : null;
- }
- // asterisk - create a multi-value property
- // warning: the returned property state may have a mixed type
- // (not all values may have the same type)
-
- // TODO currently all property values are converted to strings -
- // this doesn't play well with the idea that the types may be different
- List<String> values = new ArrayList<String>();
- for (PropertyState p : tree.getProperties()) {
- if (matchesPropertyType(p)) {
- Iterables.addAll(values, p.getValue(Type.STRINGS));
- }
- }
- // "*"
- return PropertyValues.newString(values);
- }
-
- private boolean matchesPropertyType(PropertyValue value) {
- if (value == null) {
- return false;
- }
- if (propertyType == PropertyType.UNDEFINED) {
- return true;
- }
- return value.getType().tag() == propertyType;
- }
-
- private boolean matchesPropertyType(PropertyState state) {
- if (state == null) {
- return false;
- }
+ PropertyValue p;
if (propertyType == PropertyType.UNDEFINED) {
- return true;
+ p = selector.currentProperty(propertyName);
+ } else {
+ p = selector.currentProperty(propertyName, propertyType);
}
- return state.getType().tag() == propertyType;
+ return p;
}
public void bindSelector(SourceImpl source) {
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java
index 4d99f0f..a9c22fd 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java
@@ -40,6 +40,7 @@ import javax.annotation.Nonnull;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.PropertyValue;
import org.apache.jackrabbit.oak.api.Tree;
+import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.query.QueryImpl;
import org.apache.jackrabbit.oak.query.fulltext.FullTextExpression;
@@ -53,6 +54,7 @@ import org.apache.jackrabbit.oak.spi.query.QueryIndex;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Iterables;
/**
* A selector within a query.
@@ -384,7 +386,20 @@ public class SelectorImpl extends SourceImpl {
String pn = normalizePropertyName(propertyName);
return currentOakProperty(pn);
}
-
+
+ /**
+ * The value for the given selector for the current node, filtered by
+ * property type.
+ *
+ * @param propertyName the JCR (not normalized) property name
+ * @param propertyType only include properties of this type
+ * @return the property value (possibly null)
+ */
+ public PropertyValue currentProperty(String propertyName, int propertyType) {
+ String pn = normalizePropertyName(propertyName);
+ return currentOakProperty(pn, propertyType);
+ }
+
/**
* Get the property value. The property name may be relative. The special
* property names "jcr:path", "jcr:score" and "rep:excerpt" are supported.
@@ -393,6 +408,24 @@ public class SelectorImpl extends SourceImpl {
* @return the property value or null if not found
*/
public PropertyValue currentOakProperty(String oakPropertyName) {
+ return currentOakProperty(oakPropertyName, null);
+ }
+
+ private PropertyValue currentOakProperty(String oakPropertyName, Integer propertyType) {
+ boolean asterisk = oakPropertyName.indexOf('*') >= 0;
+ if (asterisk) {
+ Tree t = currentTree();
+ ArrayList<PropertyValue> list = new ArrayList<PropertyValue>();
+ readOakProperties(list, t, oakPropertyName, propertyType);
+ if (list.size() == 0) {
+ return null;
+ }
+ ArrayList<String> strings = new ArrayList<String>();
+ for (PropertyValue p : list) {
+ Iterables.addAll(strings, p.getValue(Type.STRINGS));
+ }
+ return PropertyValues.newString(strings);
+ }
boolean relative = oakPropertyName.indexOf('/') >= 0;
Tree t = currentTree();
if (relative) {
@@ -410,6 +443,11 @@ public class SelectorImpl extends SourceImpl {
}
oakPropertyName = PathUtils.getName(oakPropertyName);
}
+ return currentOakProperty(t, oakPropertyName, propertyType);
+ }
+
+ private PropertyValue currentOakProperty(Tree t, String oakPropertyName, Integer propertyType) {
+ PropertyValue result;
if (t == null || !t.exists()) {
return null;
}
@@ -420,13 +458,59 @@ public class SelectorImpl extends SourceImpl {
// not a local path
return null;
}
- return PropertyValues.newString(local);
+ result = PropertyValues.newString(local);
} else if (oakPropertyName.equals(QueryImpl.JCR_SCORE)) {
- return currentRow.getValue(QueryImpl.JCR_SCORE);
+ result = currentRow.getValue(QueryImpl.JCR_SCORE);
} else if (oakPropertyName.equals(QueryImpl.REP_EXCERPT)) {
- return currentRow.getValue(QueryImpl.REP_EXCERPT);
+ result = currentRow.getValue(QueryImpl.REP_EXCERPT);
+ } else {
+ result = PropertyValues.create(t.getProperty(oakPropertyName));
+ }
+ if (result == null) {
+ return null;
+ }
+ if (propertyType != null && result.getType().tag() != propertyType) {
+ return null;
+ }
+ return result;
+ }
+
+ private void readOakProperties(ArrayList<PropertyValue> target, Tree t, String oakPropertyName, Integer propertyType) {
+ while (true) {
+ if (t == null || !t.exists()) {
+ return;
+ }
+ int slash = oakPropertyName.indexOf('/');
+ if (slash < 0) {
+ break;
+ }
+ String parent = oakPropertyName.substring(0, slash);
+ oakPropertyName = oakPropertyName.substring(slash + 1);
+ if (parent.equals("..")) {
+ t = t.isRoot() ? null : t.getParent();
+ } else if (parent.equals(".")) {
+ // same node
+ } else if (parent.equals("*")) {
+ for (Tree child : t.getChildren()) {
+ readOakProperties(target, child, oakPropertyName, propertyType);
+ }
+ } else {
+ t = t.getChild(parent);
+ }
+ }
+ if (!"*".equals(oakPropertyName)) {
+ PropertyValue value = currentOakProperty(t, oakPropertyName, propertyType);
+ if (value != null) {
+ target.add(value);
+ }
+ return;
}
- return PropertyValues.create(t.getProperty(oakPropertyName));
+ for (PropertyState p : t.getProperties()) {
+ if (propertyType == null || p.getType().tag() == propertyType) {
+ PropertyValue v = PropertyValues.create(p);
+ target.add(v);
+ }
+ }
}
@Override
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SourceImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SourceImpl.java
index 7b95dec..a545ba8 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SourceImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SourceImpl.java
@@ -176,7 +176,9 @@ public abstract class SourceImpl extends AstElement {
*
* this creates a filter for the given query
*
+ * @param preparing whether this this the prepare phase
+ * @return a new filter
*/
- abstract public Filter createFilter(boolean preparing);
+ public abstract Filter createFilter(boolean preparing);
}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/fulltext/SimpleExcerptProvider.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/fulltext/SimpleExcerptProvider.java
index aa56db0..e6ed3a9 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/fulltext/SimpleExcerptProvider.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/fulltext/SimpleExcerptProvider.java
@@ -35,6 +35,9 @@ import org.apache.jackrabbit.oak.query.ast.OrImpl;
import com.google.common.collect.ImmutableSet;
+/**
+ * This class can extract excerpts from node.
+ */
public class SimpleExcerptProvider {
private static final String REP_EXCERPT_FN = "rep:excerpt(.)";
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1215_a9efe3c4.diff |
bugs-dot-jar_data_OAK-766_6fc5ea9d | ---
BugID: OAK-766
Summary: 'TreeImpl#*Location: unable retrieve child location if access to parent is
denied'
Description: |-
as a consequence of OAK-709 we now have an issue with the way
SessionDelegate and Root#getLocation access a node in the hierarchy
which has an ancestor which is not accessible.
specifically RootImpl#getLocation will be served a NullLocation for the
first ancestor which is not accessible and consequently any accessible
child node cannot be accessed.
in order to reproduce the issue you may:
- change AccessControlConfigurationImpl to use PermissionProviderImpl instead
of the tmp solution
- and run o.a.j.oak.jcr.security.authorization.ReadTest#testReadDenied
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/core/AbstractNodeLocation.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/core/AbstractNodeLocation.java
index e70a1b1..89df0ac 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/core/AbstractNodeLocation.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/core/AbstractNodeLocation.java
@@ -57,15 +57,15 @@ abstract class AbstractNodeLocation<T extends Tree> extends AbstractTreeLocation
@Override
public TreeLocation getChild(String name) {
- T child = getChildTree(name);
- if (child != null) {
- return createNodeLocation(child);
- }
-
PropertyState prop = getPropertyState(name);
if (prop != null) {
return createPropertyLocation(this, name);
}
+
+ T child = getChildTree(name);
+ if (child != null) {
+ return createNodeLocation(child);
+ }
return new NullLocation(this, name);
}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/core/TreeImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/core/TreeImpl.java
index 8bda9f2..efb5ec9 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/core/TreeImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/core/TreeImpl.java
@@ -91,7 +91,7 @@ public class TreeImpl implements Tree {
this.root = checkNotNull(root);
this.parent = checkNotNull(parent);
this.name = checkNotNull(name);
- this.nodeBuilder = parent.nodeBuilder.child(name);
+ this.nodeBuilder = parent.nodeBuilder.getChildNode(name);
this.pendingMoves = checkNotNull(pendingMoves);
}
@@ -187,11 +187,7 @@ public class TreeImpl implements Tree {
checkNotNull(name);
enter();
TreeImpl child = internalGetChild(name);
- if (child != null && canRead(child)) {
- return child;
- } else {
- return null;
- }
+ return canRead(child) ? child : null;
}
@Override
@@ -438,11 +434,8 @@ public class TreeImpl implements Tree {
TreeImpl child = this;
for (String name : elements(path)) {
child = child.internalGetChild(name);
- if (child == null) {
- return null;
- }
}
- return (canRead(child)) ? child : null;
+ return canRead(child) ? child : null;
}
/**
@@ -526,9 +519,7 @@ public class TreeImpl implements Tree {
}
private TreeImpl internalGetChild(String childName) {
- return nodeBuilder.hasChildNode(childName)
- ? new TreeImpl(root, this, childName, pendingMoves)
- : null;
+ return new TreeImpl(root, this, childName, pendingMoves);
}
private PropertyState internalGetProperty(String propertyName) {
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-766_6fc5ea9d.diff |
bugs-dot-jar_data_OAK-1460_f1ba7a42 | ---
BugID: OAK-1460
Summary: ":childOrder out of sync when node is made orderable concurrently"
Description: The ChildOrderConflictHandler does not merge the :childOrder when an
addExistingProperty conflict occurs.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/commit/ChildOrderConflictHandler.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/commit/ChildOrderConflictHandler.java
index bf31922..c4ea380 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/commit/ChildOrderConflictHandler.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/commit/ChildOrderConflictHandler.java
@@ -45,7 +45,8 @@ public class ChildOrderConflictHandler extends ConflictHandlerWrapper {
if (isChildOrderProperty(ours)) {
// two sessions concurrently called orderBefore() on a Tree
// that was previously unordered.
- return Resolution.THEIRS;
+ merge(parent, ours, theirs);
+ return Resolution.MERGED;
} else {
return handler.addExistingProperty(parent, ours, theirs);
}
@@ -75,11 +76,11 @@ public class ChildOrderConflictHandler extends ConflictHandlerWrapper {
}
private static void merge(NodeBuilder parent, PropertyState ours, PropertyState theirs) {
- Set<String> theirOrder = Sets.newHashSet(theirs.getValue(Type.STRINGS));
- PropertyBuilder<String> merged = PropertyBuilder.array(Type.STRING).assignFrom(theirs);
+ Set<String> theirOrder = Sets.newHashSet(theirs.getValue(Type.NAMES));
+ PropertyBuilder<String> merged = PropertyBuilder.array(Type.NAME).assignFrom(theirs);
// Append child node names from ours that are not in theirs
- for (String ourChild : ours.getValue(Type.STRINGS)) {
+ for (String ourChild : ours.getValue(Type.NAMES)) {
if (!theirOrder.contains(ourChild)) {
merged.addValue(ourChild);
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1460_f1ba7a42.diff |
bugs-dot-jar_data_OAK-3930_b939aa6e | ---
BugID: OAK-3930
Summary: Sysview import of single valued mv property creates sv property
Description: |-
See test in filevault [0].
it imports a multivalue property that only has 1 value, via [1]. the same test succeeds in jackrabbit 2.0, but fails in oak 1.3.14
[0] https://github.com/apache/jackrabbit-filevault/blob/jackrabbit-filevault-3.1.26/vault-core/src/test/java/org/apache/jackrabbit/vault/packaging/integration/TestUserContentPackage.java#L297-L326
[1] https://github.com/apache/jackrabbit-filevault/blob/jackrabbit-filevault-3.1.26/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/io/JcrSysViewTransformer.java#L146-L148
diff --git a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/xml/SysViewImportHandler.java b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/xml/SysViewImportHandler.java
index d9cbf9c..121511a 100644
--- a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/xml/SysViewImportHandler.java
+++ b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/xml/SysViewImportHandler.java
@@ -277,7 +277,8 @@ class SysViewImportHandler extends TargetImportHandler {
PropInfo prop = new PropInfo(
currentPropName == null ? null : currentPropName.getRepoQualifiedName(),
currentPropType,
- currentPropValues);
+ currentPropValues,
+ currentPropMultipleStatus);
state.props.add(prop);
}
// reset temp fields
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3930_b939aa6e.diff |
bugs-dot-jar_data_OAK-2047_a0a495f0 | ---
BugID: OAK-2047
Summary: Missing privileges after repository upgrade
Description: "After upgrading from Jackrabbit classic all Oak specific privileges
are missing (rep:userManagement, rep:readNodes, rep:readProperties, rep:addProperties,\nrep:alterProperties,
rep:removeProperties, rep:indexDefinitionManagement).\n\nThe reason seems to be
that the {{PrivilegeInitializer}} is not run during upgrade. "
diff --git a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java
index 27e82ab..40b709f 100644
--- a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java
+++ b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java
@@ -65,6 +65,7 @@ import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_I
import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_IS_QUERYABLE;
import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_IS_QUERY_ORDERABLE;
import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_NODE_TYPES;
+import static org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.JCR_ALL;
import static org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.NT_REP_PRIVILEGE;
import static org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.NT_REP_PRIVILEGES;
import static org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.REP_AGGREGATES;
@@ -267,7 +268,7 @@ public class RepositoryUpgrade {
Map<Integer, String> idxToPrefix = newHashMap();
copyNamespaces(builder, uriToPrefix, idxToPrefix);
copyNodeTypes(builder, uriToPrefix.inverse());
- copyPrivileges(builder);
+ copyCustomPrivileges(builder);
// Triggers compilation of type information, which we need for
// the type predicates used by the bulk copy operations below.
@@ -466,7 +467,7 @@ public class RepositoryUpgrade {
}
@SuppressWarnings("deprecation")
- private void copyPrivileges(NodeBuilder root) throws RepositoryException {
+ private void copyCustomPrivileges(NodeBuilder root) {
PrivilegeRegistry registry = source.getPrivilegeRegistry();
NodeBuilder privileges = root.child(JCR_SYSTEM).child(REP_PRIVILEGES);
privileges.setProperty(JCR_PRIMARYTYPE, NT_REP_PRIVILEGES, NAME);
@@ -476,6 +477,12 @@ public class RepositoryUpgrade {
logger.info("Copying registered privileges");
for (Privilege privilege : registry.getRegisteredPrivileges()) {
String name = privilege.getName();
+ if (PrivilegeBits.BUILT_IN.containsKey(name) || JCR_ALL.equals(name)) {
+ // Ignore built in privileges as those have been installed by
+ // the PrivilegesInitializer already
+ continue;
+ }
+
NodeBuilder def = privileges.child(name);
def.setProperty(JCR_PRIMARYTYPE, NT_REP_PRIVILEGE, NAME);
@@ -510,7 +517,7 @@ public class RepositoryUpgrade {
}
}
- private PrivilegeBits resolvePrivilegeBits(
+ private static PrivilegeBits resolvePrivilegeBits(
NodeBuilder privileges, String name) {
NodeBuilder def = privileges.getChildNode(name);
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2047_a0a495f0.diff |
bugs-dot-jar_data_OAK-2174_5931a4a7 | ---
BugID: OAK-2174
Summary: Non-blocking reindexing doesn't finish properly
Description: The non blocking reindexer needs to run at least 2 cycles before setting
the index definition back to synchronous mode. Currently it is too eager to mark
the status as 'done' which confuses the _PropertyIndexAsyncReindex_ mbean into thinking
the indexing is over and so skipping the final round that is supposed to do the
switch back to sync mode.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/Oak.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/Oak.java
index 04020c1..9399ae9 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/Oak.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/Oak.java
@@ -49,6 +49,7 @@ import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.io.Closer;
+
import org.apache.jackrabbit.oak.api.ContentRepository;
import org.apache.jackrabbit.oak.api.ContentSession;
import org.apache.jackrabbit.oak.api.Root;
@@ -60,6 +61,7 @@ import org.apache.jackrabbit.oak.management.RepositoryManager;
import org.apache.jackrabbit.oak.plugins.commit.ConflictHook;
import org.apache.jackrabbit.oak.plugins.index.AsyncIndexUpdate;
import org.apache.jackrabbit.oak.plugins.index.CompositeIndexEditorProvider;
+import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
import org.apache.jackrabbit.oak.plugins.index.IndexEditorProvider;
import org.apache.jackrabbit.oak.plugins.index.IndexUpdateProvider;
import org.apache.jackrabbit.oak.plugins.index.property.jmx.PropertyIndexAsyncReindex;
@@ -528,11 +530,11 @@ public class Oak {
task.getIndexStats(), IndexStatsMBean.TYPE, name));
PropertyIndexAsyncReindex asyncPI = new PropertyIndexAsyncReindex(
- new AsyncIndexUpdate("async-reindex", store, indexEditors,
- true), getExecutor()
- );
- regs.add(registerMBean(whiteboard, PropertyIndexAsyncReindexMBean.class,
- asyncPI, PropertyIndexAsyncReindexMBean.TYPE, name));
+ new AsyncIndexUpdate(IndexConstants.ASYNC_REINDEX_VALUE,
+ store, indexEditors, true), getExecutor());
+ regs.add(registerMBean(whiteboard,
+ PropertyIndexAsyncReindexMBean.class, asyncPI,
+ PropertyIndexAsyncReindexMBean.TYPE, name));
}
regs.add(registerMBean(whiteboard, QueryEngineSettingsMBean.class,
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/AsyncIndexUpdate.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/AsyncIndexUpdate.java
index c8879d0..ea8dd80 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/AsyncIndexUpdate.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/AsyncIndexUpdate.java
@@ -357,29 +357,30 @@ public class AsyncIndexUpdate implements Runnable {
} else {
postAsyncRunStatsStatus(indexStats);
}
- } else if (switchOnSync) {
- log.debug("No changes detected after diff; will try to"
- + " switch to synchronous updates on {}",
- reindexedDefinitions);
-
- // no changes after diff, switch to sync on the async defs
- for (String path : reindexedDefinitions) {
- NodeBuilder c = builder;
- for (String p : elements(path)) {
- c = c.getChildNode(p);
- }
- if (c.exists() && !c.getBoolean(REINDEX_PROPERTY_NAME)) {
- c.removeProperty(ASYNC_PROPERTY_NAME);
+ } else {
+ if (switchOnSync) {
+ log.debug(
+ "No changes detected after diff; will try to switch to synchronous updates on {}",
+ reindexedDefinitions);
+
+ // no changes after diff, switch to sync on the async defs
+ for (String path : reindexedDefinitions) {
+ NodeBuilder c = builder;
+ for (String p : elements(path)) {
+ c = c.getChildNode(p);
+ }
+ if (c.exists() && !c.getBoolean(REINDEX_PROPERTY_NAME)) {
+ c.removeProperty(ASYNC_PROPERTY_NAME);
+ }
}
+ reindexedDefinitions.clear();
}
- reindexedDefinitions.clear();
+ postAsyncRunStatsStatus(indexStats);
}
mergeWithConcurrencyCheck(builder, beforeCheckpoint, callback.lease);
} finally {
callback.close();
}
-
- postAsyncRunStatsStatus(indexStats);
}
private void mergeWithConcurrencyCheck(
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2174_5931a4a7.diff |
bugs-dot-jar_data_OAK-548_717186d6 | ---
BugID: OAK-548
Summary: Moving larger trees cause OutOfMemoryError
Description: "{{LargeMoveTest.moveTest}} test runs out of heap space when moving roughly
100000 nodes (128M heap):\n\n{code}\njava.lang.OutOfMemoryError: Java heap space\n\tat
java.util.Arrays.copyOf(Arrays.java:2786)\n\tat java.lang.StringCoding.safeTrim(StringCoding.java:64)\n\tat
java.lang.StringCoding.access$300(StringCoding.java:34)\n\tat java.lang.StringCoding$StringEncoder.encode(StringCoding.java:251)\n\tat
java.lang.StringCoding.encode(StringCoding.java:272)\n\tat java.lang.String.getBytes(String.java:946)\n\tat
org.apache.jackrabbit.mk.util.IOUtils.writeString(IOUtils.java:84)\n\tat org.apache.jackrabbit.mk.store.BinaryBinding.writeMap(BinaryBinding.java:98)\n\tat
org.apache.jackrabbit.mk.model.ChildNodeEntriesMap.serialize(ChildNodeEntriesMap.java:196)\n\tat
org.apache.jackrabbit.mk.model.AbstractNode.serialize(AbstractNode.java:169)\n\tat
org.apache.jackrabbit.mk.persistence.InMemPersistence.writeNode(InMemPersistence.java:76)\n\tat
org.apache.jackrabbit.mk.store.DefaultRevisionStore.putNode(DefaultRevisionStore.java:276)\n\tat
org.apache.jackrabbit.mk.model.StagedNodeTree$StagedNode.persist(StagedNodeTree.java:568)\n\tat
org.apache.jackrabbit.mk.model.StagedNodeTree$StagedNode.persist(StagedNodeTree.java:563)\n\tat
org.apache.jackrabbit.mk.model.StagedNodeTree$StagedNode.persist(StagedNodeTree.java:563)\n\tat
org.apache.jackrabbit.mk.model.StagedNodeTree$StagedNode.persist(StagedNodeTree.java:563)\n\tat
org.apache.jackrabbit.mk.model.StagedNodeTree$StagedNode.persist(StagedNodeTree.java:563)\n\tat
org.apache.jackrabbit.mk.model.StagedNodeTree$StagedNode.persist(StagedNodeTree.java:563)\n\tat
org.apache.jackrabbit.mk.model.StagedNodeTree$StagedNode.persist(StagedNodeTree.java:563)\n\tat
org.apache.jackrabbit.mk.model.StagedNodeTree.persist(StagedNodeTree.java:80)\n\tat
org.apache.jackrabbit.mk.model.CommitBuilder.doCommit(CommitBuilder.java:126)\n\tat
org.apache.jackrabbit.mk.model.CommitBuilder.doCommit(CommitBuilder.java:94)\n\tat
org.apache.jackrabbit.mk.core.MicroKernelImpl.commit(MicroKernelImpl.java:496)\n\tat
org.apache.jackrabbit.oak.kernel.KernelNodeStoreBranch.commit(KernelNodeStoreBranch.java:178)\n\tat
org.apache.jackrabbit.oak.kernel.KernelNodeStoreBranch.setRoot(KernelNodeStoreBranch.java:78)\n\tat
org.apache.jackrabbit.oak.core.RootImpl.purgePendingChanges(RootImpl.java:355)\n\tat
org.apache.jackrabbit.oak.core.RootImpl.commit(RootImpl.java:234)\n\tat org.apache.jackrabbit.oak.core.LargeMoveTest.moveTest(LargeMoveTest.java:78)\n\tat
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)\n\tat
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)\n{code}\n\nThis
is caused by the inefficient rebase implementation in oak-core as discussed at length
in OAK-464."
diff --git a/oak-mk/src/main/java/org/apache/jackrabbit/mk/model/tree/DiffBuilder.java b/oak-mk/src/main/java/org/apache/jackrabbit/mk/model/tree/DiffBuilder.java
index f023fb3..d450b4f 100644
--- a/oak-mk/src/main/java/org/apache/jackrabbit/mk/model/tree/DiffBuilder.java
+++ b/oak-mk/src/main/java/org/apache/jackrabbit/mk/model/tree/DiffBuilder.java
@@ -60,7 +60,7 @@ public class DiffBuilder {
if (before == null) {
if (after != null) {
buff.tag('+').key(path).object();
- toJson(buff, after);
+ toJson(buff, after, depth);
return buff.endObject().newline().toString();
} else {
// path doesn't exist in the specified revisions
@@ -117,7 +117,7 @@ public class DiffBuilder {
addedNodes.put(after, p);
buff.tag('+').
key(p).object();
- toJson(buff, after);
+ toJson(buff, after, depth);
buff.endObject().newline();
}
}
@@ -215,7 +215,7 @@ public class DiffBuilder {
if (p.startsWith(pathFilter)) {
buff.tag('+').
key(p).object();
- toJson(buff, after);
+ toJson(buff, after, depth);
buff.endObject().newline();
}
}
@@ -267,14 +267,16 @@ public class DiffBuilder {
return buff.toString();
}
- private void toJson(JsopBuilder builder, NodeState node) {
+ private void toJson(JsopBuilder builder, NodeState node, int depth) {
for (PropertyState property : node.getProperties()) {
builder.key(property.getName()).encodedValue(property.getEncodedValue());
}
- for (ChildNode entry : node.getChildNodeEntries(0, -1)) {
- builder.key(entry.getName()).object();
- toJson(builder, entry.getNode());
- builder.endObject();
+ if (depth != 0) {
+ for (ChildNode entry : node.getChildNodeEntries(0, -1)) {
+ builder.key(entry.getName()).object();
+ toJson(builder, entry.getNode(), depth < 0 ? depth : depth - 1);
+ builder.endObject();
+ }
}
}
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-548_717186d6.diff |
bugs-dot-jar_data_OAK-2355_74f22886 | ---
BugID: OAK-2355
Summary: TarMK Cold Standby expose standby read timeout value
Description: |+
Running into a read timeout on the standby instance logs some uncaught error:
{code}
org.apache.jackrabbit.oak.plugins.segment.standby.client.SegmentLoaderHandler Exception caught, closing channel.
io.netty.handler.timeout.ReadTimeoutException: null
{code}
I'm not sure how/if I need to fix this, the sync process will pickup again, but we can expose the timeout value, so if the network connection is known to be poor, a client can increase the timeout to work around this issue.
diff --git a/oak-tarmk-standby/src/main/java/org/apache/jackrabbit/oak/plugins/segment/standby/client/StandbyClient.java b/oak-tarmk-standby/src/main/java/org/apache/jackrabbit/oak/plugins/segment/standby/client/StandbyClient.java
index bdcf513..79b348f 100644
--- a/oak-tarmk-standby/src/main/java/org/apache/jackrabbit/oak/plugins/segment/standby/client/StandbyClient.java
+++ b/oak-tarmk-standby/src/main/java/org/apache/jackrabbit/oak/plugins/segment/standby/client/StandbyClient.java
@@ -81,7 +81,7 @@ public final class StandbyClient implements ClientStandbyStatusMBean, Runnable,
private final AtomicBoolean running = new AtomicBoolean(true);
public StandbyClient(String host, int port, SegmentStore store) throws SSLException {
- this(host, port, store, false, 5000);
+ this(host, port, store, false, 10000);
}
public StandbyClient(String host, int port, SegmentStore store, boolean secure, int readTimeoutMs) throws SSLException {
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2355_74f22886.diff |
bugs-dot-jar_data_OAK-395_4ed7bc8e | ---
BugID: OAK-395
Summary: Inconsistency in Node#setProperty in case of null value
Description: "Setting a null value to a single valued property will result\nin 'null'
being returned while executing the same on a multivalued\nproperty will return the
removed property.\n\njr2 returned the removed property in both cases as far as i
\nremember and i would suggest that we don't change that behavior. in\nparticular
since the specification IMO doesn't allow to return\nnull-values for these methods."
diff --git a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/NodeImpl.java b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/NodeImpl.java
index c3ff7d6..c1114a7 100644
--- a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/NodeImpl.java
+++ b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/NodeImpl.java
@@ -302,19 +302,20 @@ public class NodeImpl extends ItemImpl<NodeDelegate> implements Node {
* @see Node#setProperty(String, javax.jcr.Value, int)
*/
@Override
- @CheckForNull
+ @Nonnull
public Property setProperty(final String jcrName, final Value value, final int type)
throws RepositoryException {
checkStatus();
- return sessionDelegate.perform(new SessionOperation<PropertyImpl>() {
+ return sessionDelegate.perform(new SessionOperation<Property>() {
@Override
- public PropertyImpl perform() throws RepositoryException {
- String oakName = sessionDelegate.getOakPathOrThrow(jcrName);
+ public Property perform() throws RepositoryException {
if (value == null) {
- dlg.removeProperty(oakName);
- return null;
+ Property property = getProperty(jcrName);
+ property.remove();
+ return property;
} else {
+ String oakName = sessionDelegate.getOakPathOrThrow(jcrName);
int targetType = getTargetType(value, type);
Value targetValue =
ValueHelper.convert(value, targetType, getValueFactory());
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-395_4ed7bc8e.diff |
bugs-dot-jar_data_OAK-678_6c54045d | ---
BugID: OAK-678
Summary: Access to disconnected MemoryNodeBuilder should throw IllegalStateException
Description:
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
index d5712ba..411c5fc 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
@@ -190,13 +190,18 @@ public class MemoryNodeBuilder implements NodeBuilder {
/**
* Determine whether this child exists at its direct parent.
- * @return {@code true} iff this child exists at its direct parent.
+ * @return {@code true} iff this child exists at its direct parent.
*/
private boolean exists() {
- // No need to check the base state if write state is null. The fact that we have this
- // builder instance proofs that this child existed at some point as it must have been
- // retrieved from the base state.
- return isRoot() || parent.writeState == null || parent.writeState.hasChildNode(name);
+ if (isRoot()) {
+ return true;
+ }
+ else if (parent.writeState == null) {
+ return parent.baseState != null && parent.baseState.hasChildNode(name);
+ }
+ else {
+ return parent.writeState.hasChildNode(name);
+ }
}
/**
@@ -206,18 +211,19 @@ public class MemoryNodeBuilder implements NodeBuilder {
private boolean updateReadState() {
if (revision != root.revision) {
assert(!isRoot()); // root never gets here since revision == root.revision
- if (!exists()) {
- return false;
- }
- parent.updateReadState();
- // The builder could have been reset, need to re-get base state
- baseState = parent.getBaseState(name);
+ if (parent.updateReadState() && exists()) {
+ // The builder could have been reset, need to re-get base state
+ baseState = parent.getBaseState(name);
- // ... same for the write state
- writeState = parent.getWriteState(name);
+ // ... same for the write state
+ writeState = parent.getWriteState(name);
+
+ revision = root.revision;
+ return true;
+ }
- revision = root.revision;
+ return false;
}
return writeState != null || baseState != null;
}
@@ -238,8 +244,8 @@ public class MemoryNodeBuilder implements NodeBuilder {
private MutableNodeState write(long newRevision, boolean reconnect) {
// make sure that all revision numbers up to the root gets updated
if (!isRoot()) {
- checkState(reconnect || exists(), "This node has been removed");
parent.write(newRevision, reconnect);
+ checkState(reconnect || exists(), "This node has been removed");
}
if (writeState == null || revision != root.revision) {
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-678_6c54045d.diff |
bugs-dot-jar_data_OAK-447_00df38d2 | ---
BugID: OAK-447
Summary: Adding a node with the name of a removed node can lead to an inconsistent
hierarchy of node builders
Description:
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
index 6600cc7..68cc5f2 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
@@ -422,7 +422,9 @@ public class MemoryNodeBuilder implements NodeBuilder {
read(); // shortcut when dealing with a read-only child node
if (baseState != null
&& baseState.hasChildNode(name)
- && (writeState == null || !writeState.nodes.containsKey(name))) {
+ && (writeState == null
+ || (writeState.base == baseState
+ && !writeState.nodes.containsKey(name)))) {
return createChildBuilder(name);
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-447_00df38d2.diff |
bugs-dot-jar_data_OAK-2388_487de751 | ---
BugID: OAK-2388
Summary: Possibility of overflow in file length calculation
Description: |-
In OakDirectory the length of a file is calculated in following way
{code:title=OakDirectory|linenumbers=true}
public OakIndexFile(String name, NodeBuilder file) {
...
this.blobSize = determineBlobSize(file);
this.blob = new byte[blobSize];
PropertyState property = file.getProperty(JCR_DATA);
if (property != null && property.getType() == BINARIES) {
this.data = newArrayList(property.getValue(BINARIES));
} else {
this.data = newArrayList();
}
this.length = data.size() * blobSize;
if (!data.isEmpty()) {
Blob last = data.get(data.size() - 1);
this.length -= blobSize - last.length();
}
{code}
In above calculation its possible to have an overflow in
bq. this.length = data.size() * blobSize;
As multiplication of two integers result in an integer [1]
[1] http://stackoverflow.com/questions/12861893/casting-result-of-multiplication-two-positive-integers-to-long-is-negative-value
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java
index 8e57339..8256394 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java
@@ -187,7 +187,7 @@ class OakDirectory extends Directory {
this.data = newArrayList();
}
- this.length = data.size() * blobSize;
+ this.length = (long)data.size() * blobSize;
if (!data.isEmpty()) {
Blob last = data.get(data.size() - 1);
this.length -= blobSize - last.length();
@@ -253,7 +253,9 @@ class OakDirectory extends Directory {
checkPositionIndexes(offset, offset + len, checkNotNull(b).length);
if (len < 0 || position + len > length) {
- throw new IOException("Invalid byte range request");
+ String msg = String.format("Invalid byte range request [%s] : position : %d, length : " +
+ "%d, len : %d", name, position, length, len);
+ throw new IOException(msg);
}
int i = (int) (position / blobSize);
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2388_487de751.diff |
bugs-dot-jar_data_OAK-1184_f72dd8d1 | ---
BugID: OAK-1184
Summary: 'Uploading large number of files to single folder fails. '
Description: "Repository: OAK with TarPM \nUpload is successful till 254 files and
it started failing afterwards with exception in server logs. \n[1] \n{code}\n14.11.2013
12:36:34.608 *ERROR* [10.40.146.206 [1384412794576] POST /content/dam/cq9032/./Coconut-5mb-110.jpg
HTTP/1.1] org.apache.sling.servlets.post.impl.operations.ModifyOperation Exception
during response processing.\njava.lang.IllegalStateException: null\n\tat com.google.common.base.Preconditions.checkState(Preconditions.java:133)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.segment.SegmentWriter.writeRecordId(SegmentWriter.java:259)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.segment.SegmentWriter.writeListBucket(SegmentWriter.java:346)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.segment.SegmentWriter.writeList(SegmentWriter.java:508)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.segment.SegmentWriter.writeProperty(SegmentWriter.java:669)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.segment.SegmentWriter.writeNode(SegmentWriter.java:847)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.segment.SegmentWriter$3.childNodeChanged(SegmentWriter.java:806)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.memory.ModifiedNodeState.compareAgainstBaseState(ModifiedNodeState.java:387)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.segment.SegmentWriter.writeNode(SegmentWriter.java:797)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.segment.SegmentWriter$3.childNodeChanged(SegmentWriter.java:806)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.memory.ModifiedNodeState.compareAgainstBaseState(ModifiedNodeState.java:387)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.segment.SegmentWriter.writeNode(SegmentWriter.java:797)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.segment.SegmentWriter$3.childNodeChanged(SegmentWriter.java:806)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.memory.ModifiedNodeState.compareAgainstBaseState(ModifiedNodeState.java:387)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.segment.SegmentWriter.writeNode(SegmentWriter.java:797)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.segment.SegmentRootBuilder.getNodeState(SegmentRootBuilder.java:53)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.segment.SegmentRootBuilder.getNodeState(SegmentRootBuilder.java:21)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.segment.SegmentNodeStore.rebase(SegmentNodeStore.java:135)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.segment.SegmentNodeStore.merge(SegmentNodeStore.java:113)
~[na:na]\n\tat org.apache.jackrabbit.oak.plugins.segment.SegmentNodeStoreService.merge(SegmentNodeStoreService.java:174)
~[na:na]\n\tat org.apache.jackrabbit.oak.core.AbstractRoot.commit(AbstractRoot.java:260)
~[na:na]\n\tat org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.commit(SessionDelegate.java:224)
~[na:na]\n\tat org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.commit(SessionDelegate.java:219)
~[na:na]\n\tat org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.commit(SessionDelegate.java:207)
~[na:na]\n\tat org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.save(SessionDelegate.java:332)
~[na:na]\n\tat org.apache.jackrabbit.oak.jcr.session.SessionImpl$8.perform(SessionImpl.java:399)
~[na:na]\n\tat org.apache.jackrabbit.oak.jcr.session.SessionImpl$8.perform(SessionImpl.java:396)
~[na:na]\n\tat org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.perform(SessionDelegate.java:128)
~[na:na]\n\tat org.apache.jackrabbit.oak.jcr.session.SessionImpl.perform(SessionImpl.java:117)
~[na:na]\n\tat org.apache.jackrabbit.oak.jcr.session.SessionImpl.save(SessionImpl.java:396)
~[na:na]\n\tat sun.reflect.GeneratedMethodAccessor18.invoke(Unknown Source) ~[na:na]\n\tat
sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source) ~[na:1.6.0_26]\n\tat
java.lang.reflect.Method.invoke(Unknown Source) ~[na:1.6.0_26]\n\tat org.apache.sling.jcr.base.SessionProxyHandler$SessionProxyInvocationHandler.invoke(SessionProxyHandler.java:109)
~[na:na]\n\tat $Proxy9.save(Unknown Source) ~[na:na]\n{code}"
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/ListRecord.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/ListRecord.java
index cc59163..fd6e50b 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/ListRecord.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/ListRecord.java
@@ -21,7 +21,7 @@ import static com.google.common.base.Preconditions.checkElementIndex;
class ListRecord extends Record {
- static final int LEVEL_SIZE = 1 << 8; // 256
+ static final int LEVEL_SIZE = Segment.SEGMENT_REFERENCE_LIMIT;
private final int size;
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1184_f72dd8d1.diff |
bugs-dot-jar_data_OAK-369_4e245a76 | ---
BugID: OAK-369
Summary: missing support for relative path consisting of parent-element
Description: |-
could not reopen OAK-95 -> cloning.
during testing of user-mgt api found that relpath consisting of a single parent element doesn't work (but used to):
{code}
@Test
public void getNode3() throws RepositoryException {
Node node = getNode("/foo");
Node root = node.getNode("..");
assertNotNull(root);
assertEquals("", root.getName());
assertTrue("/".equals(root.getPath()));
}
:
{code}
diff --git a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/NodeDelegate.java b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/NodeDelegate.java
index 644096b..6fd4a9c 100644
--- a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/NodeDelegate.java
+++ b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/NodeDelegate.java
@@ -33,6 +33,7 @@ import com.google.common.collect.Iterators;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.api.TreeLocation;
+import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.plugins.memory.PropertyStates;
/**
@@ -93,7 +94,7 @@ public class NodeDelegate extends ItemDelegate {
* no such property exists
*/
@CheckForNull
- public PropertyDelegate getProperty(String relPath) throws InvalidItemStateException {
+ public PropertyDelegate getProperty(String relPath) throws RepositoryException {
TreeLocation propertyLocation = getChildLocation(relPath);
PropertyState propertyState = propertyLocation.getProperty();
return propertyState == null
@@ -126,7 +127,7 @@ public class NodeDelegate extends ItemDelegate {
* no such node exists
*/
@CheckForNull
- public NodeDelegate getChild(String relPath) throws InvalidItemStateException {
+ public NodeDelegate getChild(String relPath) throws RepositoryException {
return create(sessionDelegate, getChildLocation(relPath));
}
@@ -241,8 +242,20 @@ public class NodeDelegate extends ItemDelegate {
// -----------------------------------------------------------< private >---
- private TreeLocation getChildLocation(String relPath) throws InvalidItemStateException {
- return getLocation().getChild(relPath);
+ private TreeLocation getChildLocation(String relPath) throws RepositoryException {
+ if (PathUtils.isAbsolute(relPath)) {
+ throw new RepositoryException("Not a relative path: " + relPath);
+ }
+
+ TreeLocation loc = getLocation();
+ for (String element : PathUtils.elements(relPath)) {
+ if (PathUtils.denotesParent(element)) {
+ loc = loc.getParent();
+ } else if (!PathUtils.denotesCurrent(element)) {
+ loc = loc.getChild(element);
+ } // else . -> skip to next element
+ }
+ return loc;
}
private Iterator<NodeDelegate> nodeDelegateIterator(
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-369_4e245a76.diff |
bugs-dot-jar_data_OAK-1093_531aca78 | ---
BugID: OAK-1093
Summary: IllegalArgumentException on Row.getValues()
Description: "Calling {{row.getValues()}} is throwing an {{IllegalArgumentException}}
when called on the {{QueryResult}} of the query {{SELECT properties FROM \\[nt:base\\]
WHERE \\[sling:resourceType\\]=\"cq/personalization/components/contextstores/surferinfo\"}}\n\n{quote}\njava.lang.IllegalArgumentException\n\tat
com.google.common.base.Preconditions.checkArgument(Preconditions.java:76)\n\tat
org.apache.jackrabbit.oak.plugins.value.ValueImpl.checkSingleValued(ValueImpl.java:85)\n\tat
org.apache.jackrabbit.oak.plugins.value.ValueImpl.<init>(ValueImpl.java:72)\n\tat
org.apache.jackrabbit.oak.plugins.value.ValueFactoryImpl.createValue(ValueFactoryImpl.java:95)\n\tat
org.apache.jackrabbit.oak.jcr.query.QueryResultImpl.createValue(QueryResultImpl.java:266)\n\tat
org.apache.jackrabbit.oak.jcr.query.RowImpl.getValues(RowImpl.java:99)\n\tat com.day.cq.analytics.sitecatalyst.impl.FrameworkComponentImpl.getListProperty(FrameworkComponentImpl.java:128)\n\tat
com.day.cq.analytics.sitecatalyst.impl.FrameworkComponentImpl.<init>(FrameworkComponentImpl.java:91)\n{quote}"
diff --git a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/query/RowImpl.java b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/query/RowImpl.java
index e337f39..e50d30d 100644
--- a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/query/RowImpl.java
+++ b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/query/RowImpl.java
@@ -100,9 +100,9 @@ public class RowImpl implements Row {
int len = values.length;
Value[] v2 = new Value[values.length];
for (int i = 0; i < len; i++) {
- if(values[i].isArray()){
+ if (values[i] != null && values[i].isArray()) {
v2[i] = result.createValue(mvpToString(values[i]));
- }else{
+ } else {
v2[i] = result.createValue(values[i]);
}
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1093_531aca78.diff |
bugs-dot-jar_data_OAK-3020_147515ae | ---
BugID: OAK-3020
Summary: Async Update fails after IllegalArgumentException
Description: |-
The async index update can fail due to a mismatch between an index definition and the actual content. If that is the case, it seems that it can no longer make any progress. Instead it re-indexes the latest changes over and over again until it hits the problematic property.
Discussion at http://markmail.org/thread/42bixzkrkwv4s6tq
Stacktrace attached.
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditor.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditor.java
index ea255df..2e60d8d 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditor.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditor.java
@@ -461,6 +461,14 @@ public class LuceneIndexEditor implements IndexEditor, Aggregate.AggregateRoot {
PropertyState property,
String pname,
PropertyDefinition pd) throws CommitFailedException {
+ // Ignore and warn if property multi-valued as not supported
+ if (property.getType().isArray()) {
+ log.warn(
+ "Ignoring ordered property {} of type {} for path {} as multivalued ordered property not supported",
+ pname, Type.fromTag(property.getType().tag(), true), getPath());
+ return false;
+ }
+
int tag = property.getType().tag();
int idxDefinedTag = pd.getType();
// Try converting type to the defined type in the index definition
@@ -475,37 +483,35 @@ public class LuceneIndexEditor implements IndexEditor, Aggregate.AggregateRoot {
String name = FieldNames.createDocValFieldName(pname);
boolean fieldAdded = false;
- for (int i = 0; i < property.count(); i++) {
- Field f = null;
- try {
- if (tag == Type.LONG.tag()) {
- //TODO Distinguish fields which need to be used for search and for sort
- //If a field is only used for Sort then it can be stored with less precision
- f = new NumericDocValuesField(name, property.getValue(Type.LONG, i));
- } else if (tag == Type.DATE.tag()) {
- String date = property.getValue(Type.DATE, i);
- f = new NumericDocValuesField(name, FieldFactory.dateToLong(date));
- } else if (tag == Type.DOUBLE.tag()) {
- f = new DoubleDocValuesField(name, property.getValue(Type.DOUBLE, i));
- } else if (tag == Type.BOOLEAN.tag()) {
- f = new SortedDocValuesField(name,
- new BytesRef(property.getValue(Type.BOOLEAN, i).toString()));
- } else if (tag == Type.STRING.tag()) {
- f = new SortedDocValuesField(name,
- new BytesRef(property.getValue(Type.STRING, i)));
- }
+ Field f = null;
+ try {
+ if (tag == Type.LONG.tag()) {
+ //TODO Distinguish fields which need to be used for search and for sort
+ //If a field is only used for Sort then it can be stored with less precision
+ f = new NumericDocValuesField(name, property.getValue(Type.LONG));
+ } else if (tag == Type.DATE.tag()) {
+ String date = property.getValue(Type.DATE);
+ f = new NumericDocValuesField(name, FieldFactory.dateToLong(date));
+ } else if (tag == Type.DOUBLE.tag()) {
+ f = new DoubleDocValuesField(name, property.getValue(Type.DOUBLE));
+ } else if (tag == Type.BOOLEAN.tag()) {
+ f = new SortedDocValuesField(name,
+ new BytesRef(property.getValue(Type.BOOLEAN).toString()));
+ } else if (tag == Type.STRING.tag()) {
+ f = new SortedDocValuesField(name,
+ new BytesRef(property.getValue(Type.STRING)));
+ }
- if (f != null) {
- fields.add(f);
- fieldAdded = true;
- }
- } catch (Exception e) {
- log.warn(
- "Ignoring ordered property. Could not convert property {} of type {} to type " +
- "{} for path {}",
- pname, Type.fromTag(property.getType().tag(), false),
- Type.fromTag(tag, false), getPath(), e);
+ if (f != null) {
+ fields.add(f);
+ fieldAdded = true;
}
+ } catch (Exception e) {
+ log.warn(
+ "Ignoring ordered property. Could not convert property {} of type {} to type " +
+ "{} for path {}",
+ pname, Type.fromTag(property.getType().tag(), false),
+ Type.fromTag(tag, false), getPath(), e);
}
return fieldAdded;
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3020_147515ae.diff |
bugs-dot-jar_data_OAK-612_df9e6913 | ---
BugID: OAK-612
Summary: Calling addNode on a node that has orderable child nodes violates specification
Description: "it seems to me that the current behavior of Node.addNode for a node
that \nhas orderable child nodes violates the specification (section 23.3):\n\n{quote}\n23.3
Adding a New Child Node\nWhen a child node is added to a node that has orderable
child nodes\nit is added to the end of the list.\n{quote}\n\nhowever, the following
test will fail:\n\n{code}\n@Test\n public void testAddNode() throws Exception
{\n new TestContentLoader().loadTestContent(getAdminSession());\n\n Session
session = getAdminSession();\n Node test = session.getRootNode().addNode(\"test\",
\"test:orderableFolder\");\n assertTrue(test.getPrimaryNodeType().hasOrderableChildNodes());\n\n
\ Node n1 = test.addNode(\"a\");\n Node n2 = test.addNode(\"b\");\n
\ session.save();\n\n NodeIterator it = test.getNodes();\n assertEquals(\"a\",
it.nextNode().getName());\n assertEquals(\"b\", it.nextNode().getName());\n
\ }\n{code}"
diff --git a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/NodeImpl.java b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/NodeImpl.java
index 79e0eb3..c84f625 100644
--- a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/NodeImpl.java
+++ b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/NodeImpl.java
@@ -274,6 +274,10 @@ public class NodeImpl<T extends NodeDelegate> extends ItemImpl<T> implements Nod
throw new ItemExistsException();
}
+ if (getPrimaryNodeType().hasOrderableChildNodes()) {
+ dlg.orderBefore(oakName, null);
+ }
+
NodeImpl<?> childNode = new NodeImpl<NodeDelegate>(added);
childNode.internalSetPrimaryType(ntName);
childNode.autoCreateItems();
@@ -1431,6 +1435,18 @@ public class NodeImpl<T extends NodeDelegate> extends ItemImpl<T> implements Nod
String jcrPrimaryType = sessionDelegate.getOakPath(Property.JCR_PRIMARY_TYPE);
Value value = sessionDelegate.getValueFactory().createValue(nodeTypeName, PropertyType.NAME);
dlg.setProperty(jcrPrimaryType, value);
+
+ if (nt.hasOrderableChildNodes()) {
+ // freeze child order with a call to orderBefore()
+ // only makes sense with a least two child nodes
+ Iterator<NodeDelegate> children = dlg.getChildren();
+ NodeDelegate child1 = children.hasNext() ? children.next() : null;
+ NodeDelegate child2 = children.hasNext() ? children.next() : null;
+ if (child1 != null && child2 != null) {
+ dlg.orderBefore(child1.getName(), child2.getName());
+ }
+ }
+
return null;
}
});
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-612_df9e6913.diff |
bugs-dot-jar_data_OAK-1054_0adf3a6e | ---
BugID: OAK-1054
Summary: Folder containing an admin user should not be removed
Description: |-
The action of removing a folder that contains the admin user should fail.
This is already the case if it is tried to remove the admin node .
Attaching unit test
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/user/UserValidator.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/user/UserValidator.java
index c731481..f4c736c 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/user/UserValidator.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/user/UserValidator.java
@@ -56,7 +56,7 @@ class UserValidator extends DefaultValidator implements UserConstants {
this.parentAfter = parentAfter;
this.provider = provider;
- authorizableType = UserUtil.getType(parentAfter);
+ authorizableType = (parentAfter == null) ? null : UserUtil.getType(parentAfter);
}
//----------------------------------------------------------< Validator >---
@@ -149,12 +149,17 @@ class UserValidator extends DefaultValidator implements UserConstants {
@Override
public Validator childNodeDeleted(String name, NodeState before) throws CommitFailedException {
- Tree node = parentBefore.getChild(name);
- if (isAdminUser(node)) {
- String msg = "The admin user cannot be removed.";
- throw constraintViolation(27, msg);
+ Tree tree = parentBefore.getChild(name);
+ AuthorizableType type = UserUtil.getType(tree);
+ if (type == AuthorizableType.USER || type == AuthorizableType.GROUP) {
+ if (isAdminUser(tree)) {
+ String msg = "The admin user cannot be removed.";
+ throw constraintViolation(27, msg);
+ }
+ return null;
+ } else {
+ return new VisibleValidator(new UserValidator(tree, null, provider), true, true);
}
- return null;
}
//------------------------------------------------------------< private >---
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1054_0adf3a6e.diff |
bugs-dot-jar_data_OAK-1244_b4a93c81 | ---
BugID: OAK-1244
Summary: Always create new UUID on ImportBehavior.IMPORT_UUID_CREATE_NEW
Description: |-
The implementation should create a new UUID for each referenceable node even if there is no existing node with that UUID. This spec says:
bq. Incoming nodes are assigned newly created identifiers upon addition to the workspace. As a result, identifier collisions never occur.
This will break backward compatibility, but is IMO the correct behavior and the only way to guarantee import of referenceable nodes does not fail in a concurrent import scenario. See OAK-1186 for more details.
diff --git a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/xml/ImporterImpl.java b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/xml/ImporterImpl.java
index bc2eb6b..f6d2c44 100644
--- a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/xml/ImporterImpl.java
+++ b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/xml/ImporterImpl.java
@@ -404,7 +404,12 @@ public class ImporterImpl implements Importer {
conflicting = currentStateIdManager.getTree(id);
}
- if (conflicting != null && conflicting.exists()) {
+ // resolve conflict if there is one or force
+ // conflict resolution when behavior is IMPORT_UUID_CREATE_NEW.
+ // the latter will always create a new UUID even if no
+ // conflicting node exists. see OAK-1244
+ if ((conflicting != null && conflicting.exists())
+ || uuidBehavior == ImportUUIDBehavior.IMPORT_UUID_CREATE_NEW) {
// resolve uuid conflict
tree = resolveUUIDConflict(parent, conflicting, id, nodeInfo);
if (tree == null) {
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1244_b4a93c81.diff |
bugs-dot-jar_data_OAK-2147_a1556c30 | ---
BugID: OAK-2147
Summary: "[Ordered Index] Indexing on large content is slow"
Description: |-
Indexing large number of ordered properties is quite slow.
Explore ways of making it faster. The current skip list implementation uses 4 lanes with a probability of 10%. It should be made configurable and the defaults changed.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/OrderedIndex.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/OrderedIndex.java
index ba92511..96f4cd3 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/OrderedIndex.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/OrderedIndex.java
@@ -136,12 +136,12 @@ public interface OrderedIndex {
* <dt>lane 3:</dt> <dd>0.1%</dd>
* </dl>
*/
- double DEFAULT_PROBABILITY = 0.1;
+ double DEFAULT_PROBABILITY = Integer.getInteger("oak.orderedIndex.prob", 3) / 10.0;
/**
* the number of lanes used in the SkipList
*/
- int LANES = 4;
+ int LANES = Integer.getInteger("oak.orderedIndex.lanes", 15);
/**
* Convenience Predicate that will force the implementor to expose what we're searching for
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/strategy/OrderedContentMirrorStoreStrategy.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/strategy/OrderedContentMirrorStoreStrategy.java
index e039c0d..ce158a2 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/strategy/OrderedContentMirrorStoreStrategy.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/strategy/OrderedContentMirrorStoreStrategy.java
@@ -23,6 +23,7 @@ import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_CONTE
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
+import java.util.ArrayList;
import java.util.Collections;
import java.util.Deque;
import java.util.Iterator;
@@ -51,7 +52,6 @@ import org.slf4j.LoggerFactory;
import com.google.common.base.Charsets;
import com.google.common.base.Strings;
-import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
@@ -867,10 +867,13 @@ public class OrderedContentMirrorStoreStrategy extends ContentMirrorStoreStrateg
LOG.debug("seek() - plain case");
lane = OrderedIndex.LANES - 1;
-
+ NodeBuilder currentNode = null;
do {
stillLaning = lane > 0;
- nextkey = getPropertyNext(index.getChildNode(currentKey), lane);
+ if (currentNode == null) {
+ currentNode = index.getChildNode(currentKey);
+ }
+ nextkey = getPropertyNext(currentNode, lane);
if ((Strings.isNullOrEmpty(nextkey) || !walkingPredicate.apply(nextkey)) && lane > 0) {
// if we're currently pointing to NIL or the next element does not fit the search
// but we still have lanes left, let's lower the lane;
@@ -880,6 +883,7 @@ public class OrderedContentMirrorStoreStrategy extends ContentMirrorStoreStrateg
found = nextkey;
} else {
currentKey = nextkey;
+ currentNode = null;
if (keepWalked && !Strings.isNullOrEmpty(currentKey)) {
for (int l = lane; l >= 0; l--) {
walkedLanes[l] = currentKey;
@@ -1072,12 +1076,18 @@ public class OrderedContentMirrorStoreStrategy extends ContentMirrorStoreStrateg
*/
static void setPropertyNext(@Nonnull final NodeBuilder node, final String... next) {
if (node != null && next != null) {
- String n1 = (next.length > 0) ? next[0] : "";
- String n2 = (next.length > 1) ? next[1] : "";
- String n3 = (next.length > 2) ? next[2] : "";
- String n4 = (next.length > 3) ? next[3] : "";
-
- node.setProperty(NEXT, ImmutableList.of(n1, n2, n3, n4), Type.STRINGS);
+ int len = next.length - 1;
+ for (; len >= 0; len--) {
+ if (next[len].length() != 0) {
+ break;
+ }
+ }
+ len++;
+ List<String> list = new ArrayList<String>(len);
+ for (int i = 0; i < len; i++) {
+ list.add(next[i]);
+ }
+ node.setProperty(NEXT, list, Type.STRINGS);
}
}
@@ -1102,7 +1112,7 @@ public class OrderedContentMirrorStoreStrategy extends ContentMirrorStoreStrateg
// content
LOG.debug("topping-up the number of lanes.");
List<String> vv = Lists.newArrayList(values);
- for (int i = vv.size(); i <= OrderedIndex.LANES; i++) {
+ for (int i = vv.size(); i < OrderedIndex.LANES; i++) {
vv.add("");
}
values = vv.toArray(new String[vv.size()]);
@@ -1151,7 +1161,10 @@ public class OrderedContentMirrorStoreStrategy extends ContentMirrorStoreStrateg
PropertyState ps = node.getProperty(NEXT);
if (ps != null) {
if (ps.isArray()) {
- next = ps.getValue(Type.STRING, Math.min(ps.count() - 1, lane));
+ int count = ps.count();
+ if (count > 0 && count > lane) {
+ next = ps.getValue(Type.STRING, lane);
+ }
} else {
next = ps.getValue(Type.STRING);
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2147_a1556c30.diff |
bugs-dot-jar_data_OAK-1168_c05cec12 | ---
BugID: OAK-1168
Summary: Invalid JCR paths not caught
Description: "{{NamePathMapper.getOakPath}} should return {{null}} when called with
an invalid JCR path like {{foo:bar]baz}}, but it doesn't. \n\n"
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/namepath/NamePathMapperImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/namepath/NamePathMapperImpl.java
index a257e69..9108966 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/namepath/NamePathMapperImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/namepath/NamePathMapperImpl.java
@@ -239,46 +239,49 @@ public class NamePathMapperImpl implements NamePathMapper {
int colon = -1; // index of the last colon in the path
switch (path.charAt(0)) {
- case '{': // possibly an expanded name
- case '[': // starts with an identifier
- case '.': // possibly "." or ".."
- case ':': // colon as the first character
- return true;
- case '/':
- if (length == 1) {
- return false; // the root path
- }
- slash = 0;
- break;
- }
-
- for (int i = 1; i < length; i++) {
- switch (path.charAt(i)) {
case '{': // possibly an expanded name
- case '[': // possibly an index
+ case '[': // starts with an identifier
+ case '.': // possibly "." or ".."
+ case ':': // colon as the first character
return true;
- case '.':
- if (i == slash + 1) {
- return true; // possibly "." or ".."
- }
- break;
- case ':':
- if (i == slash + 1 // "x/:y"
- || i == colon + i // "x::y"
- || colon > slash // "x:y:z"
- || i + 1 == length) { // "x:"
- return true;
- }
- colon = i;
- break;
case '/':
- if (i == slash + 1 // "x//y"
- || i == colon + i // "x:/y"
- || i + 1 == length) { // "x/"
- return true;
+ if (length == 1) {
+ return false; // the root path
}
- slash = i;
+ slash = 0;
break;
+ }
+
+ for (int i = 1; i < length; i++) {
+ switch (path.charAt(i)) {
+ case '{': // possibly an expanded name
+ case '[': // possibly an index
+ case ']': // illegal character if not part of index
+ case '|': // illegal character
+ case '*': // illegal character
+ return true;
+ case '.':
+ if (i == slash + 1) {
+ return true; // possibly "." or ".."
+ }
+ break;
+ case ':':
+ if (i == slash + 1 // "x/:y"
+ || i == colon + i // "x::y"
+ || colon > slash // "x:y:z"
+ || i + 1 == length) { // "x:"
+ return true;
+ }
+ colon = i;
+ break;
+ case '/':
+ if (i == slash + 1 // "x//y"
+ || i == colon + i // "x:/y"
+ || i + 1 == length) { // "x/"
+ return true;
+ }
+ slash = i;
+ break;
}
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1168_c05cec12.diff |
bugs-dot-jar_data_OAK-1111_459bd065 | ---
BugID: OAK-1111
Summary: Node#setProperty(String, Calendar) doesn't take time zone in account
Description: |-
Node#setProperty(String, Calendar) doesn't take time zone in account.
It looks the Calendar value is straightly stored as a long without take in consideration the time zone,
Unit test to follow
diff --git a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/PersistenceCopier.java b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/PersistenceCopier.java
index 23c905e..88030fe 100644
--- a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/PersistenceCopier.java
+++ b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/PersistenceCopier.java
@@ -292,7 +292,7 @@ class PersistenceCopier {
name, value.getBoolean(), Type.BOOLEAN);
case PropertyType.DATE:
return PropertyStates.createProperty(
- name, value.getCalendar().getTimeInMillis(), Type.DATE);
+ name, value.getString(), Type.DATE);
case PropertyType.DECIMAL:
return PropertyStates.createProperty(
name, value.getDecimal(), Type.DECIMAL);
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1111_459bd065.diff |
bugs-dot-jar_data_OAK-379_621a5101 | ---
BugID: OAK-379
Summary: Query test failures on buildbot
Description: |-
Since revision 1398915 various query tests fail on [buildbot|http://ci.apache.org/builders/oak-trunk/builds/784]:
{code}
sql1(org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexQueryTest): No LoginModules configured for jackrabbit.oak
sql2(org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexQueryTest): No LoginModules configured for jackrabbit.oak
xpath(org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexQueryTest): No LoginModules configured for jackrabbit.oak
bindVariableTest(org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexQueryTest): No LoginModules configured for jackrabbit.oak
sql1(org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexQueryTest): No LoginModules configured for jackrabbit.oak
sql2(org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexQueryTest): No LoginModules configured for jackrabbit.oak
xpath(org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexQueryTest): No LoginModules configured for jackrabbit.oak
bindVariableTest(org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexQueryTest): No LoginModules configured for jackrabbit.oak
sql2Explain(org.apache.jackrabbit.oak.plugins.index.old.QueryTest): No LoginModules configured for jackrabbit.oak
sql1(org.apache.jackrabbit.oak.plugins.index.old.QueryTest): No LoginModules configured for jackrabbit.oak
xpath(org.apache.jackrabbit.oak.plugins.index.old.QueryTest): No LoginModules configured for jackrabbit.oak
bindVariableTest(org.apache.jackrabbit.oak.plugins.index.old.QueryTest): No LoginModules configured for jackrabbit.oak
sql1(org.apache.jackrabbit.oak.query.index.TraversingIndexQueryTest): No LoginModules configured for jackrabbit.oak
sql2(org.apache.jackrabbit.oak.query.index.TraversingIndexQueryTest): No LoginModules configured for jackrabbit.oak
xpath(org.apache.jackrabbit.oak.query.index.TraversingIndexQueryTest): No LoginModules configured for jackrabbit.oak
bindVariableTest(org.apache.jackrabbit.oak.query.index.TraversingIndexQueryTest): No LoginModules configured for jackrabbit.oak
{code}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/SecurityProviderImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/SecurityProviderImpl.java
index 5e199bf..6d6487b 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/SecurityProviderImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/SecurityProviderImpl.java
@@ -77,6 +77,11 @@ public class SecurityProviderImpl implements SecurityProvider {
loginConfig = new OakConfiguration();
Configuration.setConfiguration(loginConfig);
}
+ if (loginConfig.getAppConfigurationEntry(appName) == null) {
+ log.warn("Failed to retrieve login configuration for {}: using default configuration.", appName);
+ loginConfig = new OakConfiguration();
+ Configuration.setConfiguration(loginConfig);
+ }
return new LoginContextProviderImpl(appName, loginConfig, nodeStore, this);
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-379_621a5101.diff |
bugs-dot-jar_data_OAK-4423_08f0b280 | ---
BugID: OAK-4423
Summary: Possible overflow in checkpoint creation
Description: Creating a checkpoint with {{Long.MAX_VALUE}} lifetime will overflow
the value, allowing the store to immediately release the checkpoint.
diff --git a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentNodeStore.java b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentNodeStore.java
index 61d16aa..af2ade6 100644
--- a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentNodeStore.java
+++ b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentNodeStore.java
@@ -404,7 +404,11 @@ public class SegmentNodeStore implements NodeStore, Observable {
}
NodeBuilder cp = checkpoints.child(name);
- cp.setProperty("timestamp", now + lifetime);
+ if (Long.MAX_VALUE - now > lifetime) {
+ cp.setProperty("timestamp", now + lifetime);
+ } else {
+ cp.setProperty("timestamp", Long.MAX_VALUE);
+ }
cp.setProperty("created", now);
NodeBuilder props = cp.setChildNode("properties");
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-4423_08f0b280.diff |
bugs-dot-jar_data_OAK-1959_93c1aa40 | ---
BugID: OAK-1959
Summary: AsyncIndexUpdate unable to cope with missing checkpoint ref
Description: |-
The async index uses a checkpoint reference stored under the _:async_ hidden node as a base for running the index diff.
It might happen that this reference is stale (pointing to checkpoints that no longer exist) so the async indexer logs a warning that it will reindex everything and will start its work.
The trouble is with the #mergeWithConcurrencyCheck which does not cope well with this scenario. Even if the ref checkpoint is null, it will throw a concurrent update exception which will be logged as a misleading debug log _Concurrent update detected in the async index update_.
Overall the code looks stuck in an endless reindexing loop.
{code}
*WARN* [pool-9-thread-1] org.apache.jackrabbit.oak.plugins.index.AsyncIndexUpdate Failed to retrieve previously indexed checkpoint 569d8847-ebb6-4832-a55f-2b0b1a32ae71; re-running the initial async index update
*DEBUG* [pool-9-thread-1] org.apache.jackrabbit.oak.plugins.index.AsyncIndexUpdate Concurrent update detected in the async index update
{code}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/AsyncIndexUpdate.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/AsyncIndexUpdate.java
index 88eb0f7..6401f23 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/AsyncIndexUpdate.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/AsyncIndexUpdate.java
@@ -318,7 +318,7 @@ public class AsyncIndexUpdate implements Runnable {
throws CommitFailedException {
// check for concurrent updates by this async task
NodeState async = before.getChildNode(ASYNC);
- if (Objects.equal(checkpoint, async.getString(name))
+ if (checkpoint == null || Objects.equal(checkpoint, async.getString(name))
&& lease == async.getLong(name + "-lease")) {
return after;
} else {
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1959_93c1aa40.diff |
bugs-dot-jar_data_OAK-847_65aa40dd | ---
BugID: OAK-847
Summary: Condition check broken in MemoryDocumentStore
Description: The Operation.CONTAINS_MAP_ENTRY condition check does not work correctly
in the MemoryDocumentStore and may return false even when the condition is not met.
diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/MemoryDocumentStore.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/MemoryDocumentStore.java
index e984585..ad581b7 100644
--- a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/MemoryDocumentStore.java
+++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/MemoryDocumentStore.java
@@ -178,14 +178,14 @@ public class MemoryDocumentStore implements DocumentStore {
return false;
}
} else {
- if (value instanceof java.util.Collection) {
- java.util.Collection<?> col = (java.util.Collection<?>) value;
+ if (value instanceof Map) {
+ Map map = (Map) value;
if (Boolean.TRUE.equals(op.value)) {
- if (!col.contains(kv[1])) {
+ if (!map.containsKey(kv[1])) {
return false;
}
} else {
- if (col.contains(kv[1])) {
+ if (map.containsKey(kv[1])) {
return false;
}
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-847_65aa40dd.diff |
bugs-dot-jar_data_OAK-846_7acb091a | ---
BugID: OAK-846
Summary: Branch conflicts not detected by MongoMK
Description: |-
MongoMK does not correctly detect conflicts when changes are committed into multiple branches concurrently and then merged back.
ConflictTest already covers conflict detection for non-branch commits and mixed branch/non-branch changes, but is missing tests for conflicting branches. I'll commit an ignored test to illustrate the problem.
diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/Collision.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/Collision.java
index 6165295..f9c1466 100644
--- a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/Collision.java
+++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/Collision.java
@@ -20,6 +20,8 @@ import java.util.Map;
import javax.annotation.Nonnull;
+import org.apache.jackrabbit.mk.api.MicroKernelException;
+import org.apache.jackrabbit.mongomk.DocumentStore.Collection;
import org.apache.jackrabbit.mongomk.util.Utils;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.slf4j.Logger;
@@ -29,7 +31,16 @@ import static com.google.common.base.Preconditions.checkNotNull;
/**
* A <code>Collision</code> happens when a commit modifies a node, which was
- * also modified in a branch commit, but the branch commit is not yet merged.
+ * also modified in another branch not visible to the current session. This
+ * includes the following situations:
+ * <ul>
+ * <li>Our commit goes to trunk and another session committed to a branch
+ * not yet merged back.</li>
+ * <li>Our commit goes to a branch and another session committed to trunk
+ * or some other branch.</li>
+ * </ul>
+ * Other collisions like concurrent commits to trunk are handled earlier and
+ * do not require collision marking. See {@link Commit#createOrUpdateNode()}.
*/
class Collision {
@@ -50,47 +61,115 @@ class Collision {
this.ourRev = checkNotNull(ourRev).toString();
}
- boolean mark(DocumentStore store) {
+ /**
+ * Marks the collision in the document store. Either our or their
+ * revision is annotated with a collision marker. Their revision is
+ * marked if it is not yet committed, otherwise our revision is marked.
+ *
+ * @param store the document store.
+ * @throws MicroKernelException if the mark operation fails.
+ */
+ void mark(DocumentStore store) throws MicroKernelException {
+ // first try to mark their revision
if (markCommitRoot(document, theirRev, store)) {
- return true;
+ return;
}
+ // their commit wins, we have to mark ourRev
+ Map<String, Object> newDoc = Utils.newMap();
+ Utils.deepCopyMap(document, newDoc);
+ MemoryDocumentStore.applyChanges(newDoc, ourOp);
+ if (!markCommitRoot(newDoc, ourRev, store)) {
+ throw new MicroKernelException("Unable to annotate our revision "
+ + "with collision marker. Our revision: " + ourRev
+ + ", document:\n" + Utils.formatDocument(newDoc));
+ }
+ }
+
+ /**
+ * Marks the commit root of the change to the given <code>document</code> in
+ * <code>revision</code>.
+ *
+ * @param document the MongoDB document.
+ * @param revision the revision of the commit to annotated with a collision
+ * marker.
+ * @param store the document store.
+ * @return <code>true</code> if the commit for the given revision was marked
+ * successfully; <code>false</code> otherwise.
+ */
+ private static boolean markCommitRoot(@Nonnull Map<String, Object> document,
+ @Nonnull String revision,
+ @Nonnull DocumentStore store) {
+ String p = Utils.getPathFromId((String) document.get(UpdateOp.ID));
+ String commitRootPath = null;
+ // first check if we can mark the commit with the given revision
@SuppressWarnings("unchecked")
Map<String, String> revisions = (Map<String, String>) document.get(UpdateOp.REVISIONS);
- if (revisions.containsKey(theirRev)) {
- String value = revisions.get(theirRev);
+ if (revisions != null && revisions.containsKey(revision)) {
+ String value = revisions.get(revision);
if ("true".equals(value)) {
- // their commit wins, we have to mark ourRev
- Map<String, Object> newDoc = Utils.newMap();
- Utils.deepCopyMap(document, newDoc);
- MemoryDocumentStore.applyChanges(newDoc, ourOp);
- if (markCommitRoot(newDoc, ourRev, store)) {
- return true;
+ // already committed
+ return false;
+ } else {
+ // node is also commit root, but not yet committed
+ // i.e. a branch commit, which is not yet merged
+ commitRootPath = p;
+ }
+ } else {
+ // next look at commit root
+ @SuppressWarnings("unchecked")
+ Map<String, Integer> commitRoots = (Map<String, Integer>) document.get(UpdateOp.COMMIT_ROOT);
+ if (commitRoots != null) {
+ Integer depth = commitRoots.get(revision);
+ if (depth != null) {
+ commitRootPath = PathUtils.getAncestorPath(p, PathUtils.getDepth(p) - depth);
+ } else {
+ throwNoCommitRootException(revision, document);
}
+ } else {
+ throwNoCommitRootException(revision, document);
}
}
+ // at this point we have a commitRootPath
+ UpdateOp op = new UpdateOp(commitRootPath,
+ Utils.getIdFromPath(commitRootPath), false);
+ document = store.find(Collection.NODES, op.getKey());
+ // check commit status of revision
+ if (isCommitted(revision, document)) {
+ return false;
+ }
+ op.setMapEntry(UpdateOp.COLLISIONS, revision, true);
+ document = store.createOrUpdate(DocumentStore.Collection.NODES, op);
+ // check again on old document right before our update was applied
+ if (isCommitted(revision, document)) {
+ return false;
+ }
+ // otherwise collision marker was set successfully
+ LOG.debug("Marked collision on: {} for {} ({})",
+ new Object[]{commitRootPath, p, revision});
return true;
}
-
- private static boolean markCommitRoot(@Nonnull Map<String, Object> document,
- @Nonnull String revision,
- @Nonnull DocumentStore store) {
+
+ private static void throwNoCommitRootException(@Nonnull String revision,
+ @Nonnull Map<String, Object> document)
+ throws MicroKernelException {
+ throw new MicroKernelException("No commit root for revision: "
+ + revision + ", document: " + Utils.formatDocument(document));
+ }
+
+ /**
+ * Returns <code>true</code> if the given <code>revision</code> is marked
+ * committed on the given <code>document</code>.
+ *
+ * @param revision the revision.
+ * @param document a MongoDB document.
+ * @return <code>true</code> if committed; <code>false</code> otherwise.
+ */
+ private static boolean isCommitted(String revision, Map<String, Object> document) {
@SuppressWarnings("unchecked")
- Map<String, Integer> commitRoots = (Map<String, Integer>) document.get(UpdateOp.COMMIT_ROOT);
- if (commitRoots != null) {
- Integer depth = commitRoots.get(revision);
- if (depth != null) {
- String p = Utils.getPathFromId((String) document.get(UpdateOp.ID));
- String commitRootPath = PathUtils.getAncestorPath(p, PathUtils.getDepth(p) - depth);
- UpdateOp op = new UpdateOp(commitRootPath,
- Utils.getIdFromPath(commitRootPath), false);
- op.setMapEntry(UpdateOp.COLLISIONS, revision, true);
- // TODO: detect concurrent commit of previously un-merged changes
- // TODO: check _commitRoot for revision is not 'true'
- store.createOrUpdate(DocumentStore.Collection.NODES, op);
- LOG.debug("Marked collision on: {} for {} ({})",
- new Object[]{commitRootPath, p, revision});
- return true;
- }
+ Map<String, String> revisions = (Map<String, String>) document.get(UpdateOp.REVISIONS);
+ if (revisions != null && revisions.containsKey(revision)) {
+ String value = revisions.get(revision);
+ return "true".equals(value);
}
return false;
}
diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/CollisionHandler.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/CollisionHandler.java
index 8695126..b311ec9 100644
--- a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/CollisionHandler.java
+++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/CollisionHandler.java
@@ -23,16 +23,16 @@ abstract class CollisionHandler {
static final CollisionHandler DEFAULT = new CollisionHandler() {
@Override
- void uncommittedModification(Revision uncommitted) {
+ void concurrentModification(Revision other) {
// do nothing
}
};
/**
- * Callback for an uncommitted modification in {@link Revision}
- * <code>uncommitted</code>.
+ * Callback for an concurrent modification in {@link Revision}
+ * <code>other</code>.
*
- * @param uncommitted the uncommitted revision of the change.
+ * @param other the revision of the concurrent change.
*/
- abstract void uncommittedModification(Revision uncommitted);
+ abstract void concurrentModification(Revision other);
}
diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/Commit.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/Commit.java
index 5922f7e..b565658 100644
--- a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/Commit.java
+++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/Commit.java
@@ -268,11 +268,11 @@ public class Commit {
Revision newestRev = mk.getNewestRevision(map, revision,
new CollisionHandler() {
@Override
- void uncommittedModification(Revision uncommitted) {
+ void concurrentModification(Revision other) {
if (collisions.get() == null) {
collisions.set(new ArrayList<Revision>());
}
- collisions.get().add(uncommitted);
+ collisions.get().add(other);
}
});
String conflictMessage = null;
@@ -296,7 +296,7 @@ public class Commit {
}
if (conflictMessage != null) {
conflictMessage += ", before\n" + revision +
- "; document:\n" + map.toString().replaceAll(", _", ",\n_").replaceAll("}, ", "},\n") +
+ "; document:\n" + Utils.formatDocument(map) +
",\nrevision order:\n" + mk.getRevisionComparator();
throw new MicroKernelException(conflictMessage);
}
@@ -306,11 +306,7 @@ public class Commit {
if (collisions.get() != null && isConflicting(map, op)) {
for (Revision r : collisions.get()) {
// mark collisions on commit root
- Collision c = new Collision(map, r, op, revision);
- boolean success = c.mark(store);
- if (!success) {
- // TODO: fail this commit
- }
+ new Collision(map, r, op, revision).mark(store);
}
}
}
diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/MongoMK.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/MongoMK.java
index a77ac51..57f1fcc 100644
--- a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/MongoMK.java
+++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/MongoMK.java
@@ -1117,7 +1117,7 @@ public class MongoMK implements MicroKernel {
*
* @param nodeMap the document
* @param changeRev the revision of the current change
- * @param handler the conflict handler, which is called for un-committed revisions
+ * @param handler the conflict handler, which is called for concurrent changes
* preceding <code>before</code>.
* @return the revision, or null if deleted
*/
@@ -1152,7 +1152,7 @@ public class MongoMK implements MicroKernel {
if (!propRev.equals(changeRev)) {
if (!isValidRevision(
propRev, changeRev, nodeMap, new HashSet<Revision>())) {
- handler.uncommittedModification(propRev);
+ handler.concurrentModification(propRev);
} else {
newestRev = propRev;
}
diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/util/Utils.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/util/Utils.java
index 79eb21d..3fe75f3 100644
--- a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/util/Utils.java
+++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/util/Utils.java
@@ -181,5 +181,15 @@ public class Utils {
target.put(e.getKey(), value);
}
}
+
+ /**
+ * Formats a MongoDB document for use in a log message.
+ *
+ * @param document the MongoDB document.
+ * @return
+ */
+ public static String formatDocument(Map<String, Object> document) {
+ return document.toString().replaceAll(", _", ",\n_").replaceAll("}, ", "},\n");
+ }
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-846_7acb091a.diff |
bugs-dot-jar_data_OAK-4050_52ca008c | ---
BugID: OAK-4050
Summary: SplitOperations may not retain most recent committed _commitRoot entry
Description: In some rare cases it may happen that SplitOperations does not retain
the most recent committed _commitRoot entry on a document. This may result in an
undetected hierarchy conflict.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/SplitOperations.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/SplitOperations.java
index dc3eb5e..75853ce 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/SplitOperations.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/SplitOperations.java
@@ -240,10 +240,10 @@ class SplitOperations {
} else if (r.getClusterId() == context.getClusterId()
&& !changes.contains(r)) {
// OAK-2528: _commitRoot entry without associated change
- // consider all but most recent as garbage (OAK-3333)
- if (mostRecent) {
+ // consider all but most recent as garbage (OAK-3333, OAK-4050)
+ if (mostRecent && doc.isCommitted(r)) {
mostRecent = false;
- } else {
+ } else if (isGarbage(r)) {
addGarbage(r, COMMIT_ROOT);
}
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-4050_52ca008c.diff |
bugs-dot-jar_data_OAK-3053_7552a10b | ---
BugID: OAK-3053
Summary: Locking issues seen with CopyOnWrite mode enabled
Description: "When CopyOnWrite mode is enabled and incremental mode is enabled i.e.
{{indexPath}} property set then failure in any indexing cycle would prevent further
indexing from progressing. For e.g. if any indexing cycle fails then subsequent
indexing cycle would fail with Lucene locking related exception\n\n{noformat}\nCaused
by: org.apache.lucene.store.LockObtainFailedException: Lock obtain timed out: NativeFSLock@/tmp/junit8067118705344013640/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae/1/write.lock\n\tat
org.apache.lucene.store.Lock.obtain(Lock.java:89)\n\tat org.apache.lucene.index.IndexWriter.<init>(IndexWriter.java:707)\n\tat
org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexEditorContext.getWriter(LuceneIndexEditorContext.java:169)\n\tat
org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexEditor.addOrUpdate(LuceneIndexEditor.java:293)\n\t...
37 more\n{noformat}\n\nAny further indexing would continue to fail with this exception"
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexCopier.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexCopier.java
index e17f580..ea6b0ed 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexCopier.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexCopier.java
@@ -56,7 +56,6 @@ import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.google.common.hash.Hashing;
import org.apache.commons.io.FileUtils;
-import org.apache.jackrabbit.oak.commons.IOUtils;
import org.apache.jackrabbit.oak.commons.concurrent.NotifyingFutureTask;
import org.apache.jackrabbit.oak.util.PerfLogger;
import org.apache.lucene.store.Directory;
@@ -65,6 +64,7 @@ import org.apache.lucene.store.FilterDirectory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.store.NoLockFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -147,7 +147,10 @@ public class IndexCopier implements CopyOnReadStatsMBean {
String newVersion = String.valueOf(definition.getReindexCount());
indexWriterDir = getVersionedDir(indexPath, indexDir, newVersion);
}
- Directory dir = FSDirectory.open(indexWriterDir);
+
+ //By design indexing in Oak is single threaded so Lucene locking
+ //can be disabled
+ Directory dir = FSDirectory.open(indexWriterDir, NoLockFactory.getNoLockFactory());
log.debug("IndexWriter would use {}", indexWriterDir);
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3053_7552a10b.diff |
bugs-dot-jar_data_OAK-4170_2a489d05 | ---
BugID: OAK-4170
Summary: QueryEngine adding invalid property restriction for fulltext query
Description: "QueryEngine inserts a property restriction of \"is not null\" for any
property used in fulltext constraint. For e.g. for query\n\n{noformat}\nselect *
from [nt:unstructured] where CONTAINS([jcr:content/metadata/comment], 'december')\n{noformat}\n\nA
property restriction would be added for {{jcr:content/metadata/comment}}. However
currently due to bug in {{FulltextSearchImpl}} [1] the property name generated is
{{comment/jcr:content/metadata}}.\n\n{code}\n@Override\n public void restrict(FilterImpl
f) {\n if (propertyName != null) {\n if (f.getSelector().equals(selector))
{\n String p = propertyName;\n if (relativePath !=
null) {\n p = PathUtils.concat(p, relativePath);\n }
\ \n p = normalizePropertyName(p);\n restrictPropertyOnFilter(p,
f);\n }\n }\n f.restrictFulltextCondition(fullTextSearchExpression.currentValue().getValue(Type.STRING));\n
\ }\n{code}\n\nThis happens because {{relativePath}} is passed as second param
to {{PathUtils.concat}}. It should be first param\n\n[1] https://github.com/apache/jackrabbit-oak/blob/1.4/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/FullTextSearchImpl.java#L275-L286"
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/FullTextSearchImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/FullTextSearchImpl.java
index 2e0505f..f3d11de 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/FullTextSearchImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/FullTextSearchImpl.java
@@ -273,16 +273,6 @@ public class FullTextSearchImpl extends ConstraintImpl {
@Override
public void restrict(FilterImpl f) {
- if (propertyName != null) {
- if (f.getSelector().equals(selector)) {
- String p = propertyName;
- if (relativePath != null) {
- p = PathUtils.concat(relativePath, p);
- }
- p = normalizePropertyName(p);
- restrictPropertyOnFilter(p, f);
- }
- }
f.restrictFulltextCondition(fullTextSearchExpression.currentValue().getValue(Type.STRING));
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-4170_2a489d05.diff |
bugs-dot-jar_data_OAK-3324_5f863af6 | ---
BugID: OAK-3324
Summary: Evaluation with restriction is not consistent with parent ACLs
Description: |
consider the following ACL setup:
{noformat}
testuser allow rep:read,rep:write /testroot
testuser deny jcr:removeNode /testroot/a glob=*/c
testuser allow jcr:removeNode /testroot/a glob=*/b
{noformat}
now: {{hasPermission(/tesroot/a/b/c, jcr:removeNode) == false}} but the user is still able to delete the node.
* if we change the order of the ACEs with the restriction, it works (i.e. the user can't delete)
* if we use direct ACLs on the respective nodes, it works
I think this is a bug...but I'm not sure if {{hasPermission}} is wrong, or the check during node deletion.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/CompiledPermissionImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/CompiledPermissionImpl.java
index 84f16ae..153884f 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/CompiledPermissionImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/CompiledPermissionImpl.java
@@ -145,7 +145,8 @@ final class CompiledPermissionImpl implements CompiledPermissions, PermissionCon
return new RepositoryPermission() {
@Override
public boolean isGranted(long repositoryPermissions) {
- return hasPermissions(getEntryIterator(new EntryPredicate()), repositoryPermissions, null);
+ EntryPredicate predicate = new EntryPredicate();
+ return hasPermissions(getEntryIterator(predicate), predicate, repositoryPermissions, null);
}
};
}
@@ -242,8 +243,8 @@ final class CompiledPermissionImpl implements CompiledPermissions, PermissionCon
@Override
public boolean isGranted(@Nonnull String path, long permissions) {
- Iterator<PermissionEntry> it = getEntryIterator(new EntryPredicate(path, Permissions.respectParentPermissions(permissions)));
- return hasPermissions(it, permissions, path);
+ EntryPredicate predicate = new EntryPredicate(path, Permissions.respectParentPermissions(permissions));
+ return hasPermissions(getEntryIterator(predicate), predicate, permissions, path);
}
@Nonnull
@@ -260,11 +261,12 @@ final class CompiledPermissionImpl implements CompiledPermissions, PermissionCon
//------------------------------------------------------------< private >---
private boolean internalIsGranted(@Nonnull Tree tree, @Nullable PropertyState property, long permissions) {
- Iterator<PermissionEntry> it = getEntryIterator(tree, property, permissions);
- return hasPermissions(it, permissions, tree.getPath());
+ EntryPredicate predicate = new EntryPredicate(tree, property, Permissions.respectParentPermissions(permissions));
+ return hasPermissions(getEntryIterator(predicate), predicate, permissions, tree.getPath());
}
private boolean hasPermissions(@Nonnull Iterator<PermissionEntry> entries,
+ @Nonnull EntryPredicate predicate,
long permissions, @Nullable String path) {
// calculate readable paths if the given permissions includes any read permission.
boolean isReadable = Permissions.diff(Permissions.READ, permissions) != Permissions.READ && readPolicy.isReadablePath(path, false);
@@ -310,14 +312,18 @@ final class CompiledPermissionImpl implements CompiledPermissions, PermissionCon
}
if (entry.isAllow) {
- allowBits.addDifference(entry.privilegeBits, denyBits);
+ if (!respectParent || predicate.apply(entry, false)) {
+ allowBits.addDifference(entry.privilegeBits, denyBits);
+ }
long ap = PrivilegeBits.calculatePermissions(allowBits, parentAllowBits, true);
allows |= Permissions.diff(ap, denies);
if ((allows | ~permissions) == -1) {
return true;
}
} else {
- denyBits.addDifference(entry.privilegeBits, allowBits);
+ if (!respectParent || predicate.apply(entry, false)) {
+ denyBits.addDifference(entry.privilegeBits, allowBits);
+ }
long dp = PrivilegeBits.calculatePermissions(denyBits, parentDenyBits, false);
denies |= Permissions.diff(dp, allows);
if (Permissions.includes(denies, permissions)) {
@@ -377,11 +383,6 @@ final class CompiledPermissionImpl implements CompiledPermissions, PermissionCon
}
@Nonnull
- private Iterator<PermissionEntry> getEntryIterator(@Nonnull Tree tree, @Nullable PropertyState property, long permissions) {
- return getEntryIterator(new EntryPredicate(tree, property, Permissions.respectParentPermissions(permissions)));
- }
-
- @Nonnull
private Iterator<PermissionEntry> getEntryIterator(@Nonnull EntryPredicate predicate) {
Iterator<PermissionEntry> userEntries = userStore.getEntryIterator(predicate);
Iterator<PermissionEntry> groupEntries = groupStore.getEntryIterator(predicate);
@@ -526,12 +527,16 @@ final class CompiledPermissionImpl implements CompiledPermissions, PermissionCon
@Override
public boolean isGranted(long permissions) {
- return hasPermissions(getIterator(null, permissions), permissions, tree.getPath());
+ EntryPredicate predicate = new EntryPredicate(tree, null, Permissions.respectParentPermissions(permissions));
+ Iterator<PermissionEntry> it = concat(new LazyIterator(this, true, predicate), new LazyIterator(this, false, predicate));
+ return hasPermissions(it, predicate, permissions, tree.getPath());
}
@Override
public boolean isGranted(long permissions, @Nonnull PropertyState property) {
- return hasPermissions(getIterator(property, permissions), permissions, tree.getPath());
+ EntryPredicate predicate = new EntryPredicate(tree, property, Permissions.respectParentPermissions(permissions));
+ Iterator<PermissionEntry> it = concat(new LazyIterator(this, true, predicate), new LazyIterator(this, false, predicate));
+ return hasPermissions(it, predicate, permissions, tree.getPath());
}
//--------------------------------------------------------< private >---
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/EntryPredicate.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/EntryPredicate.java
index 20fe765..023e18e 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/EntryPredicate.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/EntryPredicate.java
@@ -37,6 +37,7 @@ final class EntryPredicate implements Predicate<PermissionEntry> {
private final String parentPath;
private final Tree parent;
+ private final boolean respectParent;
public EntryPredicate(@Nonnull Tree tree, @Nullable PropertyState property,
boolean respectParent) {
@@ -64,6 +65,7 @@ final class EntryPredicate implements Predicate<PermissionEntry> {
parentPath = null;
parent = null;
}
+ this.respectParent = parent != null || parentPath != null;
}
@CheckForNull
@@ -73,25 +75,22 @@ final class EntryPredicate implements Predicate<PermissionEntry> {
@Override
public boolean apply(@Nullable PermissionEntry entry) {
+ return apply(entry, true);
+ }
+
+ public boolean apply(@Nullable PermissionEntry entry, boolean respectParent) {
if (entry == null) {
return false;
}
+ respectParent &= this.respectParent;
+
if (tree != null) {
- return entry.matches(tree, property) || applyToParent(entry);
+ return entry.matches(tree, property) || (respectParent && parent != null && entry.matches(parent, null));
} else if (path != null) {
- return entry.matches(path) || applyToParent(entry);
+ return entry.matches(path) || (respectParent && parentPath != null && entry.matches(parentPath));
} else {
return entry.matches();
}
}
- private boolean applyToParent(@Nonnull PermissionEntry entry) {
- if (parent != null) {
- return entry.matches(parent, null);
- } else if (parentPath != null) {
- return entry.matches(parentPath);
- } else {
- return false;
- }
- }
}
\ No newline at end of file
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionEntry.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionEntry.java
index 36dbe01..136ccb9 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionEntry.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionEntry.java
@@ -122,4 +122,16 @@ final class PermissionEntry implements Comparable<PermissionEntry>, PermissionCo
public int hashCode() {
return Objects.hashCode(privilegeBits, index, path, isAllow, restriction);
}
+
+ @Override
+ public String toString() {
+ final StringBuilder sb = new StringBuilder("PermissionEntry{");
+ sb.append("isAllow=").append(isAllow);
+ sb.append(", privilegeBits=").append(privilegeBits);
+ sb.append(", index=").append(index);
+ sb.append(", path='").append(path).append('\'');
+ sb.append(", restriction=").append(restriction);
+ sb.append('}');
+ return sb.toString();
+ }
}
\ No newline at end of file
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3324_5f863af6.diff |
bugs-dot-jar_data_OAK-2864_f51ea2a2 | ---
BugID: OAK-2864
Summary: XPath backwards compatibility issue with false() and true()
Description: |-
In JR2 (actually CRX 2) both of the following queries for nodes with a
boolean property can be parsed, however only query (a) returns search
results.
{noformat}
(a) /jcr:root/test//*[@foo = true()]
(b) /jcr:root/test//*[@foo = true]
{noformat}
On Oak 1.2, query (a) results in an exception\[0\] and query (b) returns
search results.
See discussion at http://markmail.org/thread/kpews55jpdwm62ds
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/XPathToSQL2Converter.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/XPathToSQL2Converter.java
index af95c65..fb046c5 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/XPathToSQL2Converter.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/XPathToSQL2Converter.java
@@ -497,8 +497,14 @@ public class XPathToSQL2Converter {
if (readIf("@")) {
return readProperty();
} else if (readIf("true")) {
+ if (readIf("(")) {
+ read(")");
+ }
return Expression.Literal.newBoolean(true);
} else if (readIf("false")) {
+ if (readIf("(")) {
+ read(")");
+ }
return Expression.Literal.newBoolean(false);
} else if (currentTokenType == VALUE_NUMBER) {
Expression.Literal l = Expression.Literal.newNumber(currentToken);
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2864_f51ea2a2.diff |
bugs-dot-jar_data_OAK-2442_ea7a6199 | ---
BugID: OAK-2442
Summary: NoSuchElementException thrown by NodeDocument
Description: |-
Following error is seen with latest 1.0.9-SNAPSHOT builds on some system
{noformat}
Caused by: java.util.NoSuchElementException: null
at java.util.TreeMap.key(TreeMap.java:1221)
at java.util.TreeMap.firstKey(TreeMap.java:285)
at java.util.Collections$UnmodifiableSortedMap.firstKey(Collections.java:1549)
at com.google.common.collect.ForwardingSortedMap.firstKey(ForwardingSortedMap.java:73)
at org.apache.jackrabbit.oak.plugins.document.NodeDocument.getNodeAtRevision(NodeDocument.java:819)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore.readNode(DocumentNodeStore.java:930)
{noformat}
Most likely the above occurs because a {{TreeMap}} associated with some key in NodeDocument is empty.
{noformat}
23.01.2015 01:57:23.308 *WARN* [pool-11-thread-5]org.apache.jackrabbit.oak.plugins.observation.NodeObserver Error whiledispatching observation eventscom.google.common.util.concurrent.UncheckedExecutionException:com.google.common.util.concurrent.UncheckedExecutionException:java.util.NoSuchElementException
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2199)
at com.google.common.cache.LocalCache.get(LocalCache.java:3932)
at com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4721)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore.getChildren(DocumentNodeStore.java:731)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore.diffImpl(DocumentNodeStore.java:1666)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore.access$200(DocumentNodeStore.java:105)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore$7.call(DocumentNodeStore.java:1260)
at org.apache.jackrabbit.oak.plugins.document.MongoDiffCache.getChanges(MongoDiffCache.java:88)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore.diffChildren(DocumentNodeStore.java:1255)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeState.compareAgainstBaseState(DocumentNodeState.java:260)
at org.apache.jackrabbit.oak.plugins.observation.EventGenerator$Continuation.run(EventGenerator.java:172)
at org.apache.jackrabbit.oak.plugins.observation.EventGenerator.generate(EventGenerator.java:118)
at org.apache.jackrabbit.oak.plugins.observation.NodeObserver.contentChanged(NodeObserver.java:156)
at org.apache.jackrabbit.oak.spi.commit.BackgroundObserver$1$1.call(BackgroundObserver.java:117)
at org.apache.jackrabbit.oak.spi.commit.BackgroundObserver$1$1.call(BackgroundObserver.java:111)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:744)
Caused by: com.google.common.util.concurrent.UncheckedExecutionException:java.util.NoSuchElementException
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2199)
at com.google.common.cache.LocalCache.get(LocalCache.java:3932)
at com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4721)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore.getNode(DocumentNodeStore.java:704)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore.readChildren(DocumentNodeStore.java:786)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore$4.call(DocumentNodeStore.java:734)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore$4.call(DocumentNodeStore.java:731)
at com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4724)
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3522)
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2315)
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2278)
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2193) ... 18 common frames omitted
Caused by: java.util.NoSuchElementException: null
at java.util.TreeMap.key(TreeMap.java:1221)
at java.util.TreeMap.firstKey(TreeMap.java:285)
at java.util.Collections$UnmodifiableSortedMap.firstKey(Collections.java:1549)
at com.google.common.collect.ForwardingSortedMap.firstKey(ForwardingSortedMap.java:73)
at org.apache.jackrabbit.oak.plugins.document.NodeDocument.getNodeAtRevision(NodeDocument.java:819)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore.readNode(DocumentNodeStore.java:930)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore$3.call(DocumentNodeStore.java:707)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore$3.call(DocumentNodeStore.java:704)
at com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4724)
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3522)
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2315)
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2278)
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2193)
{noformat}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java
index aa67c4a..c5d5094 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java
@@ -802,8 +802,13 @@ public final class NodeDocument extends Document implements CachedNodeDocument{
if (!Utils.isPropertyName(key)) {
continue;
}
+ // ignore when local map is empty (OAK-2442)
+ SortedMap<Revision, String> local = getLocalMap(key);
+ if (local.isEmpty()) {
+ continue;
+ }
// first check local map, which contains most recent values
- Value value = getLatestValue(nodeStore, getLocalMap(key),
+ Value value = getLatestValue(nodeStore, local,
min, readRevision, validRevisions, lastRevs);
// check if there may be more recent values in a previous document
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2442_ea7a6199.diff |
bugs-dot-jar_data_OAK-2435_7e250001 | ---
BugID: OAK-2435
Summary: UpdateOp.Key.equals() incorrect
Description: |-
As reported on the dev list [0], the equals implementation of UpdateOp.Key is incorrect.
[0] http://markmail.org/message/acpg2mhbxjn4lglu
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/UpdateOp.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/UpdateOp.java
index b957c37..707149d 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/UpdateOp.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/UpdateOp.java
@@ -368,7 +368,7 @@ public final class UpdateOp {
if (obj instanceof Key) {
Key other = (Key) obj;
return name.equals(other.name) &&
- revision != null ? revision.equals(other.revision) : other.revision == null;
+ (revision != null ? revision.equals(other.revision) : other.revision == null);
}
return false;
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2435_7e250001.diff |
bugs-dot-jar_data_OAK-428_916cd92f | ---
BugID: OAK-428
Summary: Binaries might get removed by garbage collection while still referenced
Description: 'The [Microkernel contract|http://svn.apache.org/repos/asf/jackrabbit/oak/trunk/oak-mk-api/src/main/java/org/apache/jackrabbit/mk/api/MicroKernel.java]
specifies a specific format for references to binaries: ":blobId:<blobId>". Currently
oak-core uses a different format and thus risks premature garbage collection of
such binaries.'
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/kernel/TypeCodes.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/kernel/TypeCodes.java
index 48142ab..dd0580a 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/kernel/TypeCodes.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/kernel/TypeCodes.java
@@ -34,7 +34,9 @@ public class TypeCodes {
static {
for (int type = PropertyType.UNDEFINED; type <= PropertyType.DECIMAL; type++) {
- String code = PropertyType.nameFromValue(type).substring(0, 3).toLowerCase(Locale.ENGLISH);
+ String code = type == PropertyType.BINARY
+ ? ":blobId" // See class comment for MicroKernel and OAK-428
+ : PropertyType.nameFromValue(type).substring(0, 3).toLowerCase(Locale.ENGLISH);
TYPE2CODE.put(type, code);
CODE2TYPE.put(code, type);
}
@@ -61,7 +63,10 @@ public class TypeCodes {
* @return the location where the prefix ends or -1 if no prefix is present
*/
public static int split(String jsonString) {
- if (jsonString.length() >= 4 && jsonString.charAt(3) == ':') {
+ if (jsonString.startsWith(":blobId:")) { // See OAK-428
+ return 7;
+ }
+ else if (jsonString.length() >= 4 && jsonString.charAt(3) == ':') {
return 3;
}
else {
@@ -82,8 +87,8 @@ public class TypeCodes {
else {
Integer type = CODE2TYPE.get(jsonString.substring(0, split));
return type == null
- ? PropertyType.UNDEFINED
- : type;
+ ? PropertyType.UNDEFINED
+ : type;
}
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-428_916cd92f.diff |
bugs-dot-jar_data_OAK-1848_093b9128 | ---
BugID: OAK-1848
Summary: Default sync handler property mapping does not allow constant properties
Description: |+
it would be useful, if the default sync handler user (and group) mapping could also handle constant properties and use given primary type and mixin type information. eg:
{noformat}
profile/nt:primaryType="sling:Folder"
profile/sling:resourceType="sling/security/profile"
{noformat}
diff --git a/oak-auth-external/src/main/java/org/apache/jackrabbit/oak/spi/security/authentication/external/impl/DefaultSyncHandler.java b/oak-auth-external/src/main/java/org/apache/jackrabbit/oak/spi/security/authentication/external/impl/DefaultSyncHandler.java
index 43afa31..be7be37 100644
--- a/oak-auth-external/src/main/java/org/apache/jackrabbit/oak/spi/security/authentication/external/impl/DefaultSyncHandler.java
+++ b/oak-auth-external/src/main/java/org/apache/jackrabbit/oak/spi/security/authentication/external/impl/DefaultSyncHandler.java
@@ -644,7 +644,12 @@ public class DefaultSyncHandler implements SyncHandler {
String name = entry.getValue();
Object obj = properties.get(name);
if (obj == null) {
- auth.removeProperty(relPath);
+ int nameLen = name.length();
+ if (nameLen > 1 && name.charAt(0) == '"' && name.charAt(nameLen-1) == '"') {
+ auth.setProperty(relPath, valueFactory.createValue(name.substring(1, nameLen - 1)));
+ } else {
+ auth.removeProperty(relPath);
+ }
} else {
if (obj instanceof Collection) {
auth.setProperty(relPath, createValues((Collection) obj));
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1848_093b9128.diff |
bugs-dot-jar_data_OAK-2021_004db804 | ---
BugID: OAK-2021
Summary: XPath queries with certain combinations of "or" conditions don't use an index
Description: |
XPath queries with the following conditions are not converted to "union" SQL-2 queries and therefore don't use an index:
{noformat}
/jcr:root/content//*[((@i = '1' or @i = '2') or (@s = 'x')) and (@t = 'a' or @t = 'b')]
{noformat}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/Expression.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/Expression.java
index 782a543..b34fb56 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/Expression.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/Expression.java
@@ -17,6 +17,8 @@
package org.apache.jackrabbit.oak.query.xpath;
import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
import org.apache.jackrabbit.oak.query.SQL2Parser;
import org.apache.jackrabbit.util.ISO9075;
@@ -46,6 +48,15 @@ abstract class Expression {
}
/**
+ * Get the optimized expression.
+ *
+ * @return the optimized expression
+ */
+ Expression optimize() {
+ return this;
+ }
+
+ /**
* Whether this is a condition.
*
* @return true if it is
@@ -55,6 +66,43 @@ abstract class Expression {
}
/**
+ * Whether this is a or contains a full-text condition.
+ *
+ * @return true if it is
+ */
+ boolean containsFullTextCondition() {
+ return false;
+ }
+
+ /**
+ * Get the left-hand-side expression for equality conditions.
+ * For example, for x=1, it is x. If it is not equality, return null.
+ *
+ * @return the left-hand-side expression, or null
+ */
+ String getCommonLeftPart() {
+ return null;
+ }
+
+ /**
+ * Get the left hand side of an expression.
+ *
+ * @return the left hand side
+ */
+ Expression getLeft() {
+ return null;
+ }
+
+ /**
+ * Get the list of the right hand side of an expression.
+ *
+ * @return the list
+ */
+ List<Expression> getRight() {
+ return null;
+ }
+
+ /**
* Pull an OR condition up to the right hand side of an AND condition.
*
* @return the (possibly rotated) expression
@@ -156,19 +204,24 @@ abstract class Expression {
int getPrecedence() {
return precedence;
}
-
- /**
- * Get the left-hand-side expression for equality conditions.
- * For example, for x=1, it is x. If it is not equality, return null.
- *
- * @return the left-hand-side expression, or null
- */
- public String getCommonLeftPart() {
+
+ @Override
+ String getCommonLeftPart() {
if (!"=".equals(operator)) {
return null;
}
return left.toString();
}
+
+ @Override
+ Expression getLeft() {
+ return left;
+ }
+
+ @Override
+ List<Expression> getRight() {
+ return Collections.singletonList(right);
+ }
@Override
public String toString() {
@@ -222,6 +275,11 @@ abstract class Expression {
boolean isCondition() {
return true;
}
+
+ @Override
+ Expression optimize() {
+ return this;
+ }
}
@@ -243,16 +301,87 @@ abstract class Expression {
*/
@Override
public String getCommonLeftPart() {
- if (left instanceof Condition && right instanceof Condition) {
- String l = ((Condition) left).getCommonLeftPart();
- String r = ((Condition) right).getCommonLeftPart();
- if (l != null && r != null && l.equals(r)) {
- return l;
- }
+ String l = left.getCommonLeftPart();
+ String r = right.getCommonLeftPart();
+ if (l != null && r != null && l.equals(r)) {
+ return l;
}
return null;
}
+ @Override
+ Expression optimize() {
+ Expression l = left.optimize();
+ Expression r = right.optimize();
+ if (l != left || r != right) {
+ return new OrCondition(l, r).optimize();
+ }
+ String commonLeft = getCommonLeftPart();
+ if (commonLeft == null) {
+ return this;
+ }
+ // "@x = 1 or @x = 2" is converted to "@x in (1, 2)"
+ ArrayList<Expression> list = new ArrayList<Expression>();
+ list.addAll(left.getRight());
+ list.addAll(right.getRight());
+ Expression le = left.getLeft();
+ InCondition in = new InCondition(le, list);
+ return in.optimize();
+ }
+
+ @Override
+ boolean containsFullTextCondition() {
+ return left.containsFullTextCondition() || right.containsFullTextCondition();
+ }
+
+ }
+
+ /**
+ * An "or" condition.
+ */
+ static class InCondition extends Expression {
+
+ final Expression left;
+ final List<Expression> list;
+
+ InCondition(Expression left, List<Expression> list) {
+ this.left = left;
+ this.list = list;
+ }
+
+ @Override
+ String getCommonLeftPart() {
+ return left.toString();
+ }
+
+ @Override
+ Expression getLeft() {
+ return left;
+ }
+
+ @Override
+ List<Expression> getRight() {
+ return list;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder buff = new StringBuilder();
+ buff.append(left).append(" in(");
+ for (int i = 0; i < list.size(); i++) {
+ if (i > 0) {
+ buff.append(", ");
+ }
+ buff.append(list.get(i));
+ }
+ return buff.append(')').toString();
+ }
+
+ @Override
+ boolean isCondition() {
+ return true;
+ }
+
}
/**
@@ -263,6 +392,16 @@ abstract class Expression {
AndCondition(Expression left, Expression right) {
super(left, "and", right, Expression.PRECEDENCE_AND);
}
+
+ @Override
+ Expression optimize() {
+ Expression l = left.optimize();
+ Expression r = right.optimize();
+ if (l != left || r != right) {
+ return new AndCondition(l, r);
+ }
+ return this;
+ }
@Override
AndCondition pullOrRight() {
@@ -285,6 +424,11 @@ abstract class Expression {
return this;
}
+ @Override
+ boolean containsFullTextCondition() {
+ return left.containsFullTextCondition() || right.containsFullTextCondition();
+ }
+
}
/**
@@ -320,6 +464,11 @@ abstract class Expression {
}
@Override
+ boolean containsFullTextCondition() {
+ return true;
+ }
+
+ @Override
boolean isName() {
return left.isName();
}
@@ -352,6 +501,11 @@ abstract class Expression {
boolean isCondition() {
return true;
}
+
+ @Override
+ boolean containsFullTextCondition() {
+ return true;
+ }
@Override
boolean isName() {
@@ -520,5 +674,5 @@ abstract class Expression {
}
}
-
+
}
\ No newline at end of file
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/Statement.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/Statement.java
index 0504429..6113c0c 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/Statement.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/Statement.java
@@ -20,7 +20,6 @@ import java.util.ArrayList;
import org.apache.jackrabbit.oak.query.QueryImpl;
import org.apache.jackrabbit.oak.query.xpath.Expression.AndCondition;
-import org.apache.jackrabbit.oak.query.xpath.Expression.Contains;
import org.apache.jackrabbit.oak.query.xpath.Expression.OrCondition;
import org.apache.jackrabbit.oak.query.xpath.Expression.Property;
@@ -29,8 +28,6 @@ import org.apache.jackrabbit.oak.query.xpath.Expression.Property;
*/
public class Statement {
- private String xpathQuery;
-
private boolean explain;
private boolean measure;
@@ -49,15 +46,18 @@ public class Statement {
private Expression where;
- private ArrayList<Order> orderList = new ArrayList<Order>();
+ ArrayList<Order> orderList = new ArrayList<Order>();
+
+ String xpathQuery;
public Statement optimize() {
- if (explain || measure || orderList.size() > 0) {
+ if (explain || measure) {
return this;
}
if (where == null) {
return this;
}
+ where = where.optimize();
ArrayList<Expression> unionList = new ArrayList<Expression>();
addToUnionList(where, unionList);
if (unionList.size() == 1) {
@@ -71,37 +71,29 @@ public class Statement {
s.selectors = selectors;
s.columnList = columnList;
s.where = e;
- if (i == unionList.size() - 1) {
- s.xpathQuery = xpathQuery;
- }
if (union == null) {
union = s;
} else {
union = new UnionStatement(union.optimize(), s.optimize());
}
}
+ union.orderList = orderList;
+ union.xpathQuery = xpathQuery;
return union;
}
private static void addToUnionList(Expression condition, ArrayList<Expression> unionList) {
- if (condition instanceof OrCondition) {
+ if (condition.containsFullTextCondition()) {
+ // do not use union
+ } else if (condition instanceof OrCondition) {
OrCondition or = (OrCondition) condition;
- if (or.getCommonLeftPart() != null) {
- // @x = 1 or @x = 2
- // is automatically converted to
- // @x in (1, 2)
- // within the query engine
- } else if (or.left instanceof Contains && or.right instanceof Contains) {
- // do not optimize "contains"
- } else {
- // conditions of type
- // @x = 1 or @y = 2
- // or similar are converted to
- // (@x = 1) union (@y = 2)
- addToUnionList(or.left, unionList);
- addToUnionList(or.right, unionList);
- return;
- }
+ // conditions of type
+ // @x = 1 or @y = 2
+ // or similar are converted to
+ // (@x = 1) union (@y = 2)
+ addToUnionList(or.left, unionList);
+ addToUnionList(or.right, unionList);
+ return;
} else if (condition instanceof AndCondition) {
// conditions of type
// @a = 1 and (@x = 1 or @y = 2)
@@ -111,19 +103,10 @@ public class Statement {
and = and.pullOrRight();
if (and.right instanceof OrCondition) {
OrCondition or = (OrCondition) and.right;
- if (or.getCommonLeftPart() != null) {
- // @x = 1 or @x = 2
- // is automatically converted to
- // @x in (1, 2)
- // within the query engine
- } else if (or.left instanceof Contains && or.right instanceof Contains) {
- // do not optimize "contains"
- } else {
- // same as above, but with the added "and"
- addToUnionList(new AndCondition(and.left, or.left), unionList);
- addToUnionList(new AndCondition(and.left, or.right), unionList);
- return;
- }
+ // same as above, but with the added "and"
+ addToUnionList(new AndCondition(and.left, or.left), unionList);
+ addToUnionList(new AndCondition(and.left, or.right), unionList);
+ return;
}
}
unionList.add(condition);
@@ -255,7 +238,25 @@ public class Statement {
@Override
public String toString() {
- return s1 + " union " + s2;
+ StringBuilder buff = new StringBuilder();
+ buff.append(s1).append(" union ").append(s2);
+ // order by ...
+ if (orderList != null && !orderList.isEmpty()) {
+ buff.append(" order by ");
+ for (int i = 0; i < orderList.size(); i++) {
+ if (i > 0) {
+ buff.append(", ");
+ }
+ buff.append(orderList.get(i));
+ }
+ }
+ // leave original xpath string as a comment
+ if (xpathQuery != null) {
+ buff.append(" /* xpath: ");
+ buff.append(xpathQuery);
+ buff.append(" */");
+ }
+ return buff.toString();
}
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2021_004db804.diff |
bugs-dot-jar_data_OAK-3511_5138a1e2 | ---
BugID: OAK-3511
Summary: 'Test failure: CompactionMapTest.removeSome'
Description: "Said test fails sporadically:\n\n{noformat}\nat org.junit.Assert.assertNull(Assert.java:562)\nat
org.apache.jackrabbit.oak.plugins.segment.CompactionMapTest.removeSome(CompactionMapTest.java:156)\n{noformat}\n\nThis
is a regression introduced with OAK-3501: the {{recent}} map gets not cleared when
{{segmentIdMap}} is empty. This can happen when a recent key is removed again while
there are no other changes. "
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/PersistedCompactionMap.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/PersistedCompactionMap.java
index ae5c14b..51a065b 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/PersistedCompactionMap.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/PersistedCompactionMap.java
@@ -229,9 +229,9 @@ public class PersistedCompactionMap implements PartialCompactionMap {
", baseId=" + previousBaseId + '}';
writer.writeString(mapInfo);
writer.flush();
- recent.clear();
}
+ recent.clear();
if (recordCount == 0) {
entries = null;
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3511_5138a1e2.diff |
bugs-dot-jar_data_OAK-520_ec961a38 | ---
BugID: OAK-520
Summary: IllegalStateException in MemoryNodeBuilder
Description: "{{AuthorizablePropertyTest.testSetPropertyByRelPath()}} sometimes causes
an IllegalStateException in {{MemoryNodeBuilder}}. This might be a problem with
the latter uncovered by the recent switch to the p2 index mechanism (OAK-511).\n\n{code}\njava.lang.IllegalStateException\n
\ at com.google.common.base.Preconditions.checkState(Preconditions.java:133)\n
\ at org.apache.jackrabbit.oak.plugins.memory.MemoryNodeBuilder.read(MemoryNodeBuilder.java:205)\n
\ at org.apache.jackrabbit.oak.plugins.memory.MemoryNodeBuilder.getChildNodeNames(MemoryNodeBuilder.java:379)\n
\ at org.apache.jackrabbit.oak.plugins.index.p2.strategy.ContentMirrorStoreStrategy.remove(ContentMirrorStoreStrategy.java:66)\n
\ at org.apache.jackrabbit.oak.plugins.index.p2.Property2IndexUpdate.apply(Property2IndexUpdate.java:143)\n
\ at org.apache.jackrabbit.oak.plugins.index.p2.Property2IndexDiff.apply(Property2IndexDiff.java:232)\n
\ at org.apache.jackrabbit.oak.plugins.index.IndexHookManager.apply(IndexHookManager.java:71)\n
\ at org.apache.jackrabbit.oak.plugins.index.IndexHookManager.processCommit(IndexHookManager.java:61)\n
\ at org.apache.jackrabbit.oak.spi.commit.CompositeHook.processCommit(CompositeHook.java:59)\n
\ at org.apache.jackrabbit.oak.kernel.KernelNodeStoreBranch.merge(KernelNodeStoreBranch.java:127)\n
\ at org.apache.jackrabbit.oak.core.RootImpl$2.run(RootImpl.java:240)\n at
org.apache.jackrabbit.oak.core.RootImpl$2.run(RootImpl.java:236)\n at java.security.AccessController.doPrivileged(Native
Method)\n at javax.security.auth.Subject.doAs(Subject.java:337)\n at org.apache.jackrabbit.oak.core.RootImpl.commit(RootImpl.java:235)\n
\ at org.apache.jackrabbit.oak.jcr.SessionDelegate.save(SessionDelegate.java:255)\n
\ at org.apache.jackrabbit.oak.jcr.SessionImpl.save(SessionImpl.java:283)\n at
org.apache.jackrabbit.oak.jcr.security.user.AbstractUserTest.tearDown(AbstractUserTest.java:72)\n
\ at org.apache.jackrabbit.test.AbstractJCRTest.run(AbstractJCRTest.java:456)\n
\ at org.junit.internal.runners.JUnit38ClassRunner.run(JUnit38ClassRunner.java:83)\n
\ at org.junit.runner.JUnitCore.run(JUnitCore.java:157)\n at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:76)\n
\ at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:195)\n
\ at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:63)\n
\ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)\n
\ at com.intellij.rt.execution.application.AppMain.main(AppMain.java:120) \n{code}"
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/p2/strategy/ContentMirrorStoreStrategy.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/p2/strategy/ContentMirrorStoreStrategy.java
index 36c1dee..9792b65 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/p2/strategy/ContentMirrorStoreStrategy.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/p2/strategy/ContentMirrorStoreStrategy.java
@@ -16,11 +16,12 @@
*/
package org.apache.jackrabbit.oak.plugins.index.p2.strategy;
+import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.Queue;
+import java.util.Map;
import java.util.Set;
+import java.util.TreeMap;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.PropertyState;
@@ -38,46 +39,66 @@ public class ContentMirrorStoreStrategy implements IndexStoreStrategy {
return;
}
NodeBuilder child = index.child(key);
- Queue<NodeBuilder> parentQueue = new LinkedList<NodeBuilder>();
+ Map<String, NodeBuilder> parents = new TreeMap<String, NodeBuilder>(Collections.reverseOrder());
+
for (String rm : values) {
if (PathUtils.denotesRoot(rm)) {
child.removeProperty("match");
} else {
- NodeBuilder indexEntry = child;
- Iterator<String> segments = PathUtils.elements(rm).iterator();
- while (segments.hasNext()) {
- String segment = segments.next();
- if (segments.hasNext()) {
- parentQueue.add(indexEntry);
+ String parentPath = PathUtils.getParentPath(rm);
+ String name = PathUtils.getName(rm);
+ NodeBuilder indexEntry = parents.get(parentPath);
+ if (indexEntry == null) {
+ indexEntry = child;
+ String segmentPath = "";
+ Iterator<String> segments = PathUtils.elements(parentPath)
+ .iterator();
+ while (segments.hasNext()) {
+ String segment = segments.next();
+ segmentPath = PathUtils.concat(segmentPath, segment);
indexEntry = indexEntry.child(segment);
- } else {
- // last segment
- if (indexEntry.hasChildNode(segment)) {
- indexEntry.removeNode(segment);
- }
+ parents.put(segmentPath, indexEntry);
+ }
+ }
+ if (indexEntry.hasChildNode(name)) {
+ NodeBuilder childEntry = indexEntry.child(name);
+ childEntry.removeProperty("match");
+ if (childEntry.getChildNodeCount() == 0) {
+ indexEntry.removeNode(name);
}
}
}
}
// prune the index: remove all children that have no children
// and no "match" property progressing bottom up
- // see OAK-520
- // while (!parentQueue.isEmpty()) {
- // NodeBuilder node = parentQueue.poll();
- // for (String name : node.getChildNodeNames()) {
- // NodeBuilder segment = node.child(name);
- // if (segment.getChildNodeCount() == 0
- // && segment.getProperty("match") == null) {
- // segment.removeNode(name);
- // }
- // }
- // }
- // finally remove the index node if empty
- if (child.getChildNodeCount() == 0) {
+ Iterator<String> it = parents.keySet().iterator();
+ while (it.hasNext()) {
+ String path = it.next();
+ NodeBuilder parent = parents.get(path);
+ pruneNode(parent);
+ }
+
+ // finally prune the index node
+ pruneNode(child);
+ if (child.getChildNodeCount() == 0
+ && child.getProperty("match") == null) {
index.removeNode(key);
}
}
+ private void pruneNode(NodeBuilder parent) {
+ if (parent.isRemoved()) {
+ return;
+ }
+ for (String name : parent.getChildNodeNames()) {
+ NodeBuilder segment = parent.child(name);
+ if (segment.getChildNodeCount() == 0
+ && segment.getProperty("match") == null) {
+ parent.removeNode(name);
+ }
+ }
+ }
+
@Override
public void insert(NodeBuilder index, String key, boolean unique,
Iterable<String> values) throws CommitFailedException {
@@ -85,9 +106,7 @@ public class ContentMirrorStoreStrategy implements IndexStoreStrategy {
for (String add : values) {
NodeBuilder indexEntry = child;
- Iterator<String> segments = PathUtils.elements(add).iterator();
- while (segments.hasNext()) {
- String segment = segments.next();
+ for(String segment: PathUtils.elements(add)){
indexEntry = indexEntry.child(segment);
}
indexEntry.setProperty("match", true);
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-520_ec961a38.diff |
bugs-dot-jar_data_OAK-2559_dfa87520 | ---
BugID: OAK-2559
Summary: Lucene index rules should be case insensitive
Description: Following the lucene index definitions update, the ignored properties
are upgraded as a lower case version, but the rest of the lucene bits (indexing)
still take the case into account, resulting in the exclude rules being ignored,
and properties being indexed.
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexDefinition.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexDefinition.java
index fcd547c..69d3431 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexDefinition.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexDefinition.java
@@ -23,6 +23,7 @@ import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
@@ -559,6 +560,9 @@ class IndexDefinition implements Aggregate.AggregateMapper{
public class IndexingRule {
private final String baseNodeType;
private final String nodeTypeName;
+ /**
+ * Case insensitive map of lower cased propertyName to propertyConfigs
+ */
private final Map<String, PropertyDefinition> propConfigs;
private final List<NamePattern> namePatterns;
private final List<PropertyDefinition> nullCheckEnabledProperties;
@@ -697,7 +701,7 @@ class IndexDefinition implements Aggregate.AggregateMapper{
*/
@CheckForNull
public PropertyDefinition getConfig(String propertyName) {
- PropertyDefinition config = propConfigs.get(propertyName);
+ PropertyDefinition config = propConfigs.get(propertyName.toLowerCase(Locale.ENGLISH));
if (config != null) {
return config;
} else if (namePatterns.size() > 0) {
@@ -761,7 +765,7 @@ class IndexDefinition implements Aggregate.AggregateMapper{
if(pd.isRegexp){
patterns.add(new NamePattern(pd.name, pd));
} else {
- propDefns.put(pd.name, pd);
+ propDefns.put(pd.name.toLowerCase(Locale.ENGLISH), pd);
}
if (pd.relative){
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2559_dfa87520.diff |
bugs-dot-jar_data_OAK-1932_c215b267 | ---
BugID: OAK-1932
Summary: TarMK compaction can create mixed segments
Description: As described in http://markmail.org/message/ujkqdlthudaortxf, commits
that occur while the compaction operation is running can make the compacted segments
contain references to older data segments, which prevents old data from being reclaimed
during cleanup.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Compactor.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Compactor.java
index 14278ca..18cb068 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Compactor.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Compactor.java
@@ -20,7 +20,6 @@ import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Maps.newHashMap;
import static org.apache.jackrabbit.oak.api.Type.BINARIES;
import static org.apache.jackrabbit.oak.api.Type.BINARY;
-import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import java.io.IOException;
import java.io.InputStream;
@@ -62,8 +61,6 @@ public class Compactor {
private final SegmentWriter writer;
- private final SegmentNodeBuilder builder;
-
private CompactionMap map = new CompactionMap(100000);
/**
@@ -75,11 +72,11 @@ public class Compactor {
public Compactor(SegmentWriter writer) {
this.writer = writer;
- this.builder =
- new SegmentNodeBuilder(writer.writeNode(EMPTY_NODE), writer);
}
public SegmentNodeState compact(NodeState before, NodeState after) {
+ SegmentNodeBuilder builder = new SegmentNodeBuilder(
+ writer.writeNode(before), writer);
after.compareAgainstBaseState(before, new CompactDiff(builder));
return builder.getNodeState();
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1932_c215b267.diff |
bugs-dot-jar_data_OAK-3879_4faf31e3 | ---
BugID: OAK-3879
Summary: 'Lucene index / compatVersion 2: search for ''abc!'' does not work'
Description: "When using a Lucene fulltext index with compatVersion 2, then the following
query does not return any results. When using compatVersion 1, the correct result
is returned.\n\n{noformat}\nSELECT * FROM [nt:unstructured] AS c \nWHERE CONTAINS(c.[jcr:description],
'abc!') \nAND ISDESCENDANTNODE(c, '/content')\n{noformat}\n\nWith compatVersion
1 and 2, searching for just 'abc' works. Also, searching with '=' instead of 'contains'
works."
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LucenePropertyIndex.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LucenePropertyIndex.java
index cd4d2c3..fb69de7 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LucenePropertyIndex.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LucenePropertyIndex.java
@@ -39,6 +39,7 @@ import com.google.common.collect.AbstractIterator;
import com.google.common.collect.Lists;
import com.google.common.collect.Queues;
import com.google.common.collect.Sets;
+import com.google.common.primitives.Chars;
import org.apache.jackrabbit.oak.api.PropertyValue;
import org.apache.jackrabbit.oak.api.Result.SizePrecision;
import org.apache.jackrabbit.oak.api.Type;
@@ -1337,6 +1338,11 @@ public class LucenePropertyIndex implements AdvancedQueryIndex, QueryIndex, Nati
}
/**
+ * Following chars are used as operators in Lucene Query and should be escaped
+ */
+ private static final char[] LUCENE_QUERY_OPERATORS = {':' , '/', '!', '&', '|', '[', ']', '{', '}'};
+
+ /**
* Following logic is taken from org.apache.jackrabbit.core.query.lucene.JackrabbitQueryParser#parse(java.lang.String)
*/
static String rewriteQueryText(String textsearch) {
@@ -1361,10 +1367,7 @@ public class LucenePropertyIndex implements AdvancedQueryIndex, QueryIndex, Nati
escaped = false;
}
rewritten.append(c);
- } else if (c == ':' || c == '/') {
- //TODO Some other chars are also considered special See OAK-3769 for details
- //':' fields as known in lucene are not supported
- //'/' its a special char used for regex search in Lucene
+ } else if (Chars.contains(LUCENE_QUERY_OPERATORS, c)) {
rewritten.append('\\').append(c);
} else {
if (escaped) {
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3879_4faf31e3.diff |
bugs-dot-jar_data_OAK-1793_16225d51 | ---
BugID: OAK-1793
Summary: MongoMK GC removes documents with data still in use
Description: The version garbage collector may delete previous documents that contain
commit root information still in use by the main document.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/VersionGarbageCollector.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/VersionGarbageCollector.java
index 885f721..74b4a84 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/VersionGarbageCollector.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/VersionGarbageCollector.java
@@ -20,7 +20,7 @@
package org.apache.jackrabbit.oak.plugins.document;
import java.util.ArrayList;
-import java.util.EnumSet;
+import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
@@ -43,11 +43,10 @@ public class VersionGarbageCollector {
/**
* Split document types which can be safely Garbage Collected
+ * OAK-1793: SplitDocType.DEFAULT_NO_CHILD and SplitDocType.PROP_COMMIT_ONLY
+ * have been removed, but should be added again when OAK-1794 is fixed.
*/
- private static final Set<NodeDocument.SplitDocType> GC_TYPES = EnumSet.of(
- NodeDocument.SplitDocType.DEFAULT_NO_CHILD,
- NodeDocument.SplitDocType.PROP_COMMIT_ONLY);
-
+ private static final Set<NodeDocument.SplitDocType> GC_TYPES = Collections.emptySet();
VersionGarbageCollector(DocumentNodeStore nodeStore) {
this.nodeStore = nodeStore;
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1793_16225d51.diff |
bugs-dot-jar_data_OAK-2418_039f892d | ---
BugID: OAK-2418
Summary: int overflow with orderby causing huge slowdown
Description: |-
Consider the following query:
{code}
//element(*,slingevent:Job) order by @slingevent:created ascending
{code}
this query - when running with a large number of slingevent:Job around - will take a very long time due to the fact, that FilterIterators.SortIterator.init() in the following loop:
{code}
if (list.size() > max * 2) {
// remove tail entries right now, to save memory
Collections.sort(list, orderBy);
keepFirst(list, max);
}
{code}
does a multiplication with 'max', which is by default set to Integer.MAX_VALUE (see FilterIterators.newCombinedFilter). This results in max *2 to overflow (result is -2) - thus that init-loop will sort the list for every additional entry. Which is definitely not the intention.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/FilterIterators.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/FilterIterators.java
index 11ae007..6fba745 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/FilterIterators.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/FilterIterators.java
@@ -205,9 +205,9 @@ public class FilterIterators {
list.add(x);
checkMemoryLimit(list.size(), settings);
// from time to time, sort and truncate
- // this should results in O(n*log(2*keep)) operations,
+ // this should need less than O(n*log(3*keep)) operations,
// which is close to the optimum O(n*log(keep))
- if (list.size() > max * 2) {
+ if (list.size() > (long) max * 2) {
// remove tail entries right now, to save memory
Collections.sort(list, orderBy);
keepFirst(list, max);
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2418_039f892d.diff |
bugs-dot-jar_data_OAK-1287_14849e22 | ---
BugID: OAK-1287
Summary: java.lang.IllegalArgumentException when running FlatTreeWithAceForSamePrincipalTest
Description: "Running \n{code}\njava -jar oak-run*.jar benchmark FlatTreeWithAceForSamePrincipalTest
Oak-Tar\n{code}\nwill end with\n{code}\njava.lang.IllegalArgumentException\n\tat
com.google.common.base.Preconditions.checkArgument(Preconditions.java:77)\n\tat
org.apache.jackrabbit.oak.plugins.segment.ListRecord.<init>(ListRecord.java:37)\n\tat
org.apache.jackrabbit.oak.plugins.segment.ListRecord.getEntries(ListRecord.java:80)\n\tat
org.apache.jackrabbit.oak.plugins.segment.SegmentPropertyState.getValue(SegmentPropertyState.java:130)\n\tat
org.apache.jackrabbit.oak.util.PropertyBuilder.assignFrom(PropertyBuilder.java:225)\n\tat
org.apache.jackrabbit.oak.util.PropertyBuilder.copy(PropertyBuilder.java:136)\n\tat
org.apache.jackrabbit.oak.core.MutableTree.addChild(MutableTree.java:216)\n\tat
org.apache.jackrabbit.oak.util.TreeUtil.addChild(TreeUtil.java:190)\n\tat org.apache.jackrabbit.oak.jcr.delegate.NodeDelegate.internalAddChild(NodeDelegate.java:841)\n\tat
org.apache.jackrabbit.oak.jcr.delegate.NodeDelegate.addChild(NodeDelegate.java:684)\n\tat
org.apache.jackrabbit.oak.jcr.session.NodeImpl$5.perform(NodeImpl.java:288)\n\tat
org.apache.jackrabbit.oak.jcr.session.NodeImpl$5.perform(NodeImpl.java:253)\n\tat
org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.perform(SessionDelegate.java:125)\n\tat
org.apache.jackrabbit.oak.jcr.session.ItemImpl.perform(ItemImpl.java:111)\n\tat
org.apache.jackrabbit.oak.jcr.session.NodeImpl.addNode(NodeImpl.java:253)\n\tat
org.apache.jackrabbit.oak.jcr.session.NodeImpl.addNode(NodeImpl.java:238)\n\tat
org.apache.jackrabbit.oak.benchmark.FlatTreeWithAceForSamePrincipalTest.beforeSuite(FlatTreeWithAceForSamePrincipalTest.java:56)\n\tat
org.apache.jackrabbit.oak.benchmark.AbstractTest.setUp(AbstractTest.java:113)\n\tat
org.apache.jackrabbit.oak.benchmark.FlatTreeWithAceForSamePrincipalTest.setUp(FlatTreeWithAceForSamePrincipalTest.java:31)\n\tat
org.apache.jackrabbit.oak.benchmark.AbstractTest.runTest(AbstractTest.java:151)\n\tat
org.apache.jackrabbit.oak.benchmark.AbstractTest.run(AbstractTest.java:138)\n\tat
org.apache.jackrabbit.oak.benchmark.FlatTreeWithAceForSamePrincipalTest.run(FlatTreeWithAceForSamePrincipalTest.java:31)\n\tat
org.apache.jackrabbit.oak.benchmark.BenchmarkRunner.main(BenchmarkRunner.java:195)\n\tat
org.apache.jackrabbit.oak.run.Main.main(Main.java:81)\n\n{code}"
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/ListRecord.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/ListRecord.java
index df2735e..27b5cba 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/ListRecord.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/ListRecord.java
@@ -57,8 +57,10 @@ class ListRecord extends Record {
int bucketIndex = index / bucketSize;
int bucketOffset = index % bucketSize;
Segment segment = getSegment();
- RecordId bucketId = segment.readRecordId(getOffset(0, bucketIndex));
- ListRecord bucket = new ListRecord(segment, bucketId, bucketSize);
+ RecordId id = segment.readRecordId(getOffset(0, bucketIndex));
+ ListRecord bucket = new ListRecord(
+ segment, id,
+ Math.min(bucketSize, size - bucketIndex * bucketSize));
return bucket.getEntry(bucketOffset);
}
}
@@ -78,7 +80,7 @@ class ListRecord extends Record {
list.add(id);
} else {
ListRecord bucket = new ListRecord(
- segment, id, Math.min(bucketSize, size - offset));
+ segment, id, Math.min(bucketSize, size - i));
list.addAll(bucket.getEntries());
}
offset += Segment.RECORD_ID_BYTES;
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java
index a961cbf..aeb7e98 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java
@@ -340,6 +340,7 @@ public class SegmentWriter {
private synchronized RecordId writeListBucket(List<RecordId> bucket) {
+ checkArgument(bucket.size() > 1);
RecordId bucketId = prepare(RecordType.BUCKET, 0, bucket);
for (RecordId id : bucket) {
writeRecordId(id);
@@ -516,7 +517,11 @@ public class SegmentWriter {
List<RecordId> nextLevel = Lists.newArrayList();
for (List<RecordId> bucket :
Lists.partition(thisLevel, ListRecord.LEVEL_SIZE)) {
- nextLevel.add(writeListBucket(bucket));
+ if (bucket.size() > 1) {
+ nextLevel.add(writeListBucket(bucket));
+ } else {
+ nextLevel.add(bucket.get(0));
+ }
}
thisLevel = nextLevel;
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1287_14849e22.diff |
bugs-dot-jar_data_OAK-3792_94110f21 | ---
BugID: OAK-3792
Summary: Provide Simple Exception Name in Credentials Attribute for PW Expiry
Description: currently upon encountering a pw history exception while changing the
password of a user, the credential attribute is set with the FQ class name, instead
of the simple name. this requires consumers (e.g. sling) to use oak package names
instead of a simple class name to react to the situation.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/user/UserAuthentication.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/user/UserAuthentication.java
index f7b3b69..95fd76a 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/user/UserAuthentication.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/user/UserAuthentication.java
@@ -169,7 +169,7 @@ class UserAuthentication implements Authentication, UserConstants {
}
}
} catch (PasswordHistoryException e) {
- credentials.setAttribute(e.getClass().getName(), e.getMessage());
+ credentials.setAttribute(e.getClass().getSimpleName(), e.getMessage());
log.error("Failed to change password for user " + userId, e.getMessage());
} catch (RepositoryException e) {
log.error("Failed to change password for user " + userId, e.getMessage());
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3792_94110f21.diff |
bugs-dot-jar_data_OAK-3137_c65b07c3 | ---
BugID: OAK-3137
Summary: Global fulltext index returning plan for pure NodeType queries
Description: |-
On a system having
# Global fulltext index enabled with version V2 and {{evaluatePathRestriction}} enabled
# NodeType index having indexing enabled for specific nodetype like cq:ClientLibraryFolder
A query like
{noformat}
/jcr:root//element(*, cq:ClientLibraryFolder)
{noformat}
Ends up getting evaluated by fulltext index as it return plan with include all query
*Expected*
For such query global fulltext index should not return any plan if the path restriction is on root path with include all children
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexPlanner.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexPlanner.java
index 522aa66..19ad3cf 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexPlanner.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexPlanner.java
@@ -315,7 +315,12 @@ class IndexPlanner {
}
private boolean canEvalPathRestrictions(IndexingRule rule) {
- if (filter.getPathRestriction() == Filter.PathRestriction.NO_RESTRICTION){
+ //Opt out if one is looking for all children for '/' as its equivalent to
+ //NO_RESTRICTION
+ if (filter.getPathRestriction() == Filter.PathRestriction.NO_RESTRICTION
+ || (filter.getPathRestriction() == Filter.PathRestriction.ALL_CHILDREN
+ && PathUtils.denotesRoot(filter.getPath()))
+ ){
return false;
}
//If no other restrictions is provided and query is pure
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3137_c65b07c3.diff |
bugs-dot-jar_data_OAK-531_90c45a02 | ---
BugID: OAK-531
Summary: NodeBuilder deleted child nodes can come back
Description: |-
While working on OAK-520, I've noticed a problem with the NodeBuilder: when we delete an entire hierarchy of nodes and then recreate a part of it, some of the previously deleted nodes can come back.
This only happens when there are more than 3 levels of nodes.
So given a hierarchy of nodes: /x/y/z deleted 'x' and simply use the NodeBuilder to traverse down on the same path: .child('x').child('y').
At this point the 'z' child reappears even though it was deleted before.
I'll attach a test case shortly.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
index 0168d6e..3efa027 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
@@ -187,22 +187,11 @@ public class MemoryNodeBuilder implements NodeBuilder {
}
}
- /**
- * Determine whether the named child has been removed. This is the
- * case when the write state has a corresponding {@code null} entry.
- * Assumes {@code read()}, {@code write()} needs not be called.
- * @param name name of the child
- * @return {@code true} iff a child with the given name has been removed
- */
- private boolean removed(String name) {
- return writeState != null && writeState.isRemoved(name);
- }
-
@Nonnull
private NodeState read() {
if (revision != root.revision) {
assert(!isRoot()); // root never gets here since revision == root.revision
- checkState(!parent.removed(name), "This node has already been removed");
+ checkState(!isRemoved(), "This node has already been removed");
parent.read();
// The builder could have been reset, need to re-get base state
@@ -232,7 +221,7 @@ public class MemoryNodeBuilder implements NodeBuilder {
private MutableNodeState write(long newRevision, boolean skipRemovedCheck) {
// make sure that all revision numbers up to the root gets updated
if (!isRoot()) {
- checkState(skipRemovedCheck || !parent.removed(name));
+ checkState(skipRemovedCheck || !isRemoved());
parent.write(newRevision, skipRemovedCheck);
}
@@ -244,7 +233,7 @@ public class MemoryNodeBuilder implements NodeBuilder {
writeState = parent.getWriteState(name);
if (writeState == null) {
- if (parent.removed(name)) {
+ if (isRemoved()) {
writeState = new MutableNodeState(null);
}
else {
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-531_90c45a02.diff |
bugs-dot-jar_data_OAK-3510_01f5a26f | ---
BugID: OAK-3510
Summary: Troublesome ExternalIdentityRef.equals(Object) implementation
Description: |-
in the light of OAK-3508 i looked at the {{ExternalIdentifyRef}} class and found the following implementation of {{Object.equals(Object)}}:
{code}
public boolean equals(Object o) {
try {
// assuming that we never compare other types of classes
return this == o || string.equals(((ExternalIdentityRef) o).string);
} catch (Exception e) {
return false;
}
}
{code}
since this class is public and exported as part of a public API, i don't think the assumption made in the code is justified. also i would argue that catching {{Exception}} is bad style as is exception driven development. in this particular case it was IMHO perfectly trivial to just get rid of the catch clause altogether.
diff --git a/oak-auth-external/src/main/java/org/apache/jackrabbit/oak/spi/security/authentication/external/ExternalIdentityRef.java b/oak-auth-external/src/main/java/org/apache/jackrabbit/oak/spi/security/authentication/external/ExternalIdentityRef.java
index ae67498..f3ffd6f 100644
--- a/oak-auth-external/src/main/java/org/apache/jackrabbit/oak/spi/security/authentication/external/ExternalIdentityRef.java
+++ b/oak-auth-external/src/main/java/org/apache/jackrabbit/oak/spi/security/authentication/external/ExternalIdentityRef.java
@@ -39,13 +39,13 @@ public class ExternalIdentityRef {
*/
public ExternalIdentityRef(@Nonnull String id, @CheckForNull String providerName) {
this.id = id;
- this.providerName = providerName;
+ this.providerName = (providerName == null || providerName.isEmpty()) ? null : providerName;
StringBuilder b = new StringBuilder();
escape(b, id);
- if (providerName != null && providerName.length() > 0) {
+ if (this.providerName != null) {
b.append(';');
- escape(b, providerName);
+ escape(b, this.providerName);
}
string = b.toString();
}
@@ -82,6 +82,7 @@ public class ExternalIdentityRef {
* @param str the string
* @return the reference
*/
+ @Nonnull
public static ExternalIdentityRef fromString(@Nonnull String str) {
int idx = str.indexOf(';');
if (idx < 0) {
@@ -99,7 +100,7 @@ public class ExternalIdentityRef {
* @param builder the builder
* @param str the string
*/
- private void escape(StringBuilder builder, CharSequence str) {
+ private static void escape(@Nonnull StringBuilder builder, @Nonnull CharSequence str) {
final int len = str.length();
for (int i=0; i<len; i++) {
char c = str.charAt(i);
@@ -119,16 +120,20 @@ public class ExternalIdentityRef {
}
/**
- * Tests if the given object is an external identity reference and if it's getString() is equal to this.
+ * Tests if the given object is an external identity reference and if it's
+ * getString() is equal to this. Note, that there is no need to
+ * include {@code id} and {@code provider} fields in the comparison as
+ * the string representation already incorporates both.
*/
@Override
public boolean equals(Object o) {
- try {
- // assuming that we never compare other types of classes
- return this == o || string.equals(((ExternalIdentityRef) o).string);
- } catch (Exception e) {
- return false;
+ if (this == o) {
+ return true;
+ }
+ if (o instanceof ExternalIdentityRef) {
+ return string.equals(((ExternalIdentityRef) o).string);
}
+ return false;
}
/**
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3510_01f5a26f.diff |
bugs-dot-jar_data_OAK-421_36e70bd7 | ---
BugID: OAK-421
Summary: NodeBuilder.reset might lead to inconsistent builder
Description: |-
The following test fails:
{code}
NodeBuilder root = new MemoryNodeBuilder(BASE);
NodeBuilder x = root.child("x");
NodeBuilder y = x.child("y");
root.reset(BASE);
assertTrue(root.hasChildNode("x"));
assertFalse(x.hasChildNode("y")); // fails
{code}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
index e96c9f3..1facf64 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
@@ -265,7 +265,7 @@ public class MemoryNodeBuilder implements NodeBuilder {
if (this == root) {
baseState = checkNotNull(newBase);
writeState = new MutableNodeState(baseState);
- revision++;
+ revision = 0;
} else {
throw new IllegalStateException("Cannot reset a non-root builder");
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-421_36e70bd7.diff |
bugs-dot-jar_data_OAK-1108_a8c925e0 | ---
BugID: OAK-1108
Summary: Query constraints marked as invalid in the case of an mvp
Description: |-
It seems that in the case of a query that has more constraints on the same property, like
bq. //*[(@prop = 'aaa' and @prop = 'bbb' and @prop = 'ccc')]
the filter is marked as invalid (_#isAlwaysFalse_) and the query returns no results.
This is incorrect and affects queries that search for multi-valued properties on nodes.
This comes from/affects OAK-1075.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/index/FilterImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/index/FilterImpl.java
index 14d8639..6eb282e 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/index/FilterImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/index/FilterImpl.java
@@ -251,6 +251,13 @@ public class FilterImpl implements Filter {
PropertyValue oldLast = x.last;
switch (op) {
case EQUAL:
+ if (x.first != null && x.last == x.first && x.firstIncluding && x.lastIncluding) {
+ // there is already an equality condition on this property
+ // we will keep this, as it could be a multi-valued property
+ // (unlike in databases, "x = 1 and x = 2" can match a node
+ // if x is a multi-valued property with value "{1, 2}")
+ return;
+ }
x.first = maxValue(oldFirst, v);
x.firstIncluding = x.first == oldFirst ? x.firstIncluding : true;
x.last = minValue(oldLast, v);
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1108_a8c925e0.diff |
bugs-dot-jar_data_OAK-1899_df59fb45 | ---
BugID: OAK-1899
Summary: Ordered index fails with old index content
Description: |-
With the latest changes, the ordered index no longer works with old index data. When running the latest Oak 1.0.2 snapshot run against an Oak 1.0.0 repository with an existing ordered index, the index fails with the exception below.
As a workaround, the ordered index can be manually re-built. Either the index re-build needs to be automatic, or the ordered index needs to work with the old index content.
{noformat}
java.lang.IndexOutOfBoundsException: index (3) must be less than size (1)
at com.google.common.base.Preconditions.checkElementIndex(Preconditions.java:306)
at com.google.common.base.Preconditions.checkElementIndex(Preconditions.java:285)
at org.apache.jackrabbit.oak.plugins.segment.SegmentPropertyState.getValue(SegmentPropertyState.java:157)
at org.apache.jackrabbit.oak.plugins.index.property.strategy.OrderedContentMirrorStoreStrategy.getPropertyNext(OrderedContentMirrorStoreStrategy.java:1024)
{noformat}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/strategy/OrderedContentMirrorStoreStrategy.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/strategy/OrderedContentMirrorStoreStrategy.java
index 3f78b78..abb7e5a 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/strategy/OrderedContentMirrorStoreStrategy.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/strategy/OrderedContentMirrorStoreStrategy.java
@@ -23,6 +23,7 @@ import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_CONTE
import java.util.Collections;
import java.util.Deque;
import java.util.Iterator;
+import java.util.List;
import java.util.NoSuchElementException;
import java.util.Random;
@@ -47,6 +48,7 @@ import org.slf4j.LoggerFactory;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
/**
* Same as for {@link ContentMirrorStoreStrategy} but the order of the keys is kept by using the
@@ -149,6 +151,7 @@ public class OrderedContentMirrorStoreStrategy extends ContentMirrorStoreStrateg
@Override
void prune(final NodeBuilder index, final Deque<NodeBuilder> builders, final String key) {
+ LOG.debug("prune() - deleting: {}", key);
for (NodeBuilder node : builders) {
if (node.hasProperty("match") || node.getChildNodeCount(1) > 0) {
return;
@@ -166,11 +169,25 @@ public class OrderedContentMirrorStoreStrategy extends ContentMirrorStoreStrateg
walkedLanes
);
lane0Next = getPropertyNext(walkedLanes[0]);
+ if (LOG.isDebugEnabled()) {
+ for (int i = 0; i < walkedLanes.length; i++) {
+ LOG.debug("prune() - walkedLanes[{}]: {}", i,
+ walkedLanes[i].getName());
+ }
+ }
for (int lane = walkedLanes.length - 1; lane >= 0; lane--) {
prevNext = getPropertyNext(walkedLanes[lane], lane);
if (key.equals(prevNext)) {
// if it's actually pointing to us let's deal with it
currNext = getPropertyNext(node, lane);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(
+ "prune() - setting next for '{}' on lane '{}' with '{}'",
+ new Object[] {
+ walkedLanes[lane].getName(),
+ lane,
+ currNext});
+ }
setPropertyNext(index.getChildNode(walkedLanes[lane].getName()),
currNext, lane);
}
@@ -997,7 +1014,23 @@ public class OrderedContentMirrorStoreStrategy extends ContentMirrorStoreStrateg
if (node != null && value != null && lane >= 0 && lane < OrderedIndex.LANES) {
PropertyState next = node.getProperty(NEXT);
if (next != null) {
- String[] values = Iterables.toArray(next.getValue(Type.STRINGS), String.class);
+ String[] values;
+ if (next.isArray()) {
+ values = Iterables.toArray(next.getValue(Type.STRINGS), String.class);
+ if (values.length < OrderedIndex.LANES) {
+ // it could be we increased the number of lanes and running on some existing
+ // content
+ LOG.debug("topping-up the number of lanes.");
+ List<String> vv = Lists.newArrayList(values);
+ for (int i = vv.size(); i <= OrderedIndex.LANES; i++) {
+ vv.add("");
+ }
+ values = vv.toArray(new String[0]);
+ }
+ } else {
+ values = Iterables.toArray(EMPTY_NEXT, String.class);
+ values[0] = next.getValue(Type.STRING);
+ }
values[lane] = value;
setPropertyNext(node, values);
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1899_df59fb45.diff |
bugs-dot-jar_data_OAK-1784_2426deae | ---
BugID: OAK-1784
Summary: Async index update persists conflict markers
Description: "A long running test I performed yesterday failed with a FileNotFoundException
in the lucene index. After analyzing the issue it turned out the async index update
persisted a conflict markers introduced by a rebase call. So far I'm not able to
reproduce it with a more simple test setup and after a shorter time (the initial
test failed after 10 hours). Given the way the async index update work, there shouldn't
be any conflicts, because it's the only component writing into this location of
the repository. \n\nAs an immediate workaround, I'd like to add the AnnotatingConflictHandler
& ConflictValidator combo to the merge call to make sure a commit with conflict
markers does not get persisted."
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/AsyncIndexUpdate.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/AsyncIndexUpdate.java
index c7538f7..9ce60a3 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/AsyncIndexUpdate.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/AsyncIndexUpdate.java
@@ -41,10 +41,15 @@ import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.api.jmx.IndexStatsMBean;
+import org.apache.jackrabbit.oak.plugins.commit.AnnotatingConflictHandler;
+import org.apache.jackrabbit.oak.plugins.commit.ConflictHook;
+import org.apache.jackrabbit.oak.plugins.commit.ConflictValidatorProvider;
import org.apache.jackrabbit.oak.plugins.value.Conversions;
import org.apache.jackrabbit.oak.spi.commit.CommitHook;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
+import org.apache.jackrabbit.oak.spi.commit.CompositeHook;
import org.apache.jackrabbit.oak.spi.commit.EditorDiff;
+import org.apache.jackrabbit.oak.spi.commit.EditorHook;
import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
@@ -226,7 +231,10 @@ public class AsyncIndexUpdate implements Runnable {
private static CommitHook newCommitHook(final String name,
final PropertyState state) throws CommitFailedException {
- return new CommitHook() {
+ return new CompositeHook(
+ new ConflictHook(new AnnotatingConflictHandler()),
+ new EditorHook(new ConflictValidatorProvider()),
+ new CommitHook() {
@Override
@Nonnull
public NodeState processCommit(NodeState before, NodeState after,
@@ -241,7 +249,7 @@ public class AsyncIndexUpdate implements Runnable {
throw CONCURRENT_UPDATE;
}
}
- };
+ });
}
private static void preAsyncRun(NodeStore store, String name) throws CommitFailedException {
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1784_2426deae.diff |
bugs-dot-jar_data_OAK-1899_b6f89048 | ---
BugID: OAK-1899
Summary: Ordered index fails with old index content
Description: |-
With the latest changes, the ordered index no longer works with old index data. When running the latest Oak 1.0.2 snapshot run against an Oak 1.0.0 repository with an existing ordered index, the index fails with the exception below.
As a workaround, the ordered index can be manually re-built. Either the index re-build needs to be automatic, or the ordered index needs to work with the old index content.
{noformat}
java.lang.IndexOutOfBoundsException: index (3) must be less than size (1)
at com.google.common.base.Preconditions.checkElementIndex(Preconditions.java:306)
at com.google.common.base.Preconditions.checkElementIndex(Preconditions.java:285)
at org.apache.jackrabbit.oak.plugins.segment.SegmentPropertyState.getValue(SegmentPropertyState.java:157)
at org.apache.jackrabbit.oak.plugins.index.property.strategy.OrderedContentMirrorStoreStrategy.getPropertyNext(OrderedContentMirrorStoreStrategy.java:1024)
{noformat}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/strategy/OrderedContentMirrorStoreStrategy.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/strategy/OrderedContentMirrorStoreStrategy.java
index 0fd121c..3f78b78 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/strategy/OrderedContentMirrorStoreStrategy.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/strategy/OrderedContentMirrorStoreStrategy.java
@@ -1021,8 +1021,11 @@ public class OrderedContentMirrorStoreStrategy extends ContentMirrorStoreStrateg
String next = "";
PropertyState ps = state.getProperty(NEXT);
if (ps != null) {
- next = (lane < OrderedIndex.LANES) ? ps.getValue(Type.STRING, lane)
- : "";
+ if (ps.isArray()) {
+ next = ps.getValue(Type.STRING, Math.min(ps.count() - 1, lane));
+ } else {
+ next = ps.getValue(Type.STRING);
+ }
}
return next;
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1899_b6f89048.diff |
bugs-dot-jar_data_OAK-1024_ecc5bdfd | ---
BugID: OAK-1024
Summary: Full-text search on the traversing index fails if the condition contains
a slash
Description: "A full-text search on the traversing index falls back to a sort of manual
evaluation of results. \nThis is handled by the _FullTextTerm_ class, and it appears
that it passes the constraint text through a cleanup process where it strips most
of the characters that are neither _Character.isLetterOrDigit(c)_ not in the list
_+-:&_\n\nI'm not exactly sure where this list comes from, but I see the '/' character
is missing which causes a certain type of query to fail.\n\nExample:\n{code}\n//*[jcr:contains(.,
'text/plain')]\n{code}\n\n"
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/FullTextSearchImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/FullTextSearchImpl.java
index d22c83b..2cf9cef 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/FullTextSearchImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/FullTextSearchImpl.java
@@ -184,6 +184,10 @@ public class FullTextSearchImpl extends ConstraintImpl {
appendString(buff, p);
} else {
String path = selector.currentPath();
+ if (!PathUtils.denotesRoot(path)) {
+ appendString(buff,
+ PropertyValues.newString(PathUtils.getName(path)));
+ }
if (relativePath != null) {
path = PathUtils.concat(path, relativePath);
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1024_ecc5bdfd.diff |
bugs-dot-jar_data_OAK-1719_c3773d53 | ---
BugID: OAK-1719
Summary: Missing commit hooks in upgrade
Description: There's a TODO in the RepositoryUpgrade class about missing commit hooks.
For example the PermissionHook isn't currently run as a part of the upgrade, which
breaks permission evaluation even though the actual ACL nodes are present after
the upgrade.
diff --git a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java
index e47aba3..9c24468 100644
--- a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java
+++ b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java
@@ -22,7 +22,6 @@ import java.io.InputStream;
import java.util.List;
import java.util.Map;
import java.util.Properties;
-
import javax.jcr.NamespaceException;
import javax.jcr.RepositoryException;
import javax.jcr.security.Privilege;
@@ -47,7 +46,6 @@ import org.apache.jackrabbit.oak.plugins.name.NamespaceConstants;
import org.apache.jackrabbit.oak.plugins.name.Namespaces;
import org.apache.jackrabbit.oak.plugins.nodetype.TypeEditorProvider;
import org.apache.jackrabbit.oak.plugins.nodetype.write.InitialContent;
-import org.apache.jackrabbit.oak.security.authorization.AuthorizationConfigurationImpl;
import org.apache.jackrabbit.oak.spi.commit.CommitHook;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.CompositeEditorProvider;
@@ -71,7 +69,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static com.google.common.base.Preconditions.checkState;
-import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Lists.newArrayListWithCapacity;
import static com.google.common.collect.Maps.newHashMap;
import static java.util.Arrays.asList;
@@ -225,8 +222,7 @@ public class RepositoryUpgrade {
NodeState root = builder.getNodeState();
copyVersionStore(builder, root, uriToPrefix, idxToPrefix);
- String workspaceName =
- copyWorkspaces(builder, root, uriToPrefix, idxToPrefix);
+ copyWorkspaces(builder, root, uriToPrefix, idxToPrefix);
logger.info("Applying default commit hooks");
String groupsPath;
@@ -237,15 +233,14 @@ public class RepositoryUpgrade {
groupsPath = UserConstants.DEFAULT_GROUP_PATH;
}
// TODO: default hooks?
- List<CommitHook> hooks = newArrayList();
- hooks.add(new EditorHook(new CompositeEditorProvider(
- new GroupEditorProvider(groupsPath),
+ CommitHook hook = new CompositeHook(
+ new EditorHook(new GroupEditorProvider(groupsPath)),
+ new EditorHook(new CompositeEditorProvider(
new TypeEditorProvider(false),
new IndexUpdateProvider(new CompositeIndexEditorProvider(
new ReferenceEditorProvider(),
new PropertyIndexEditorProvider())))));
- hooks.addAll(new AuthorizationConfigurationImpl().getCommitHooks(workspaceName));
- target.merge(builder, CompositeHook.compose(hooks), CommitInfo.EMPTY);
+ target.merge(builder, hook, CommitInfo.EMPTY);
} catch (Exception e) {
throw new RepositoryException("Failed to copy content", e);
}
@@ -562,7 +557,7 @@ public class RepositoryUpgrade {
"/jcr:system/jcr:activities", copyBinariesByReference));
}
- private String copyWorkspaces(
+ private void copyWorkspaces(
NodeBuilder builder, NodeState root,
Map<String, String> uriToPrefix, Map<Integer, String> idxToPrefix)
throws RepositoryException, IOException {
@@ -587,7 +582,7 @@ public class RepositoryUpgrade {
}
}
- return name;
+ // TODO: Copy all the active open-scoped locks
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1719_c3773d53.diff |
bugs-dot-jar_data_OAK-2235_29d3d8f1 | ---
BugID: OAK-2235
Summary: Lucene index not created if no node is indexed
Description: |-
If a Lucene property index is defined for a property which is not present in any of the nodes then {{LuceneIndexWriter}} would create any lucene index for that.
For eg if we have an index of {{foo}} and none of the node has property {{foo}} set in that case {{LuceneIndexWriter}} would not create an {{IndexWriter}} and hence no directory would be created. Later when system performs a query like {{select jcr:path from nt:base where foo = 'bar'}} then {{LucenePropertyIndex}} would not participate in the query as no Lucene index would be found and system would revert to traversal.
As a fix Lucene index should still be created even if it does not contain any document
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditor.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditor.java
index 5277652..e13e7ef 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditor.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditor.java
@@ -38,6 +38,7 @@ import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.plugins.index.IndexEditor;
import org.apache.jackrabbit.oak.plugins.index.IndexUpdateCallback;
+import org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState;
import org.apache.jackrabbit.oak.plugins.nodetype.TypePredicate;
import org.apache.jackrabbit.oak.spi.commit.Editor;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
@@ -122,6 +123,9 @@ public class LuceneIndexEditor implements IndexEditor {
@Override
public void enter(NodeState before, NodeState after)
throws CommitFailedException {
+ if (EmptyNodeState.MISSING_NODE == before && parent == null){
+ context.enableReindexMode();
+ }
}
@Override
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorContext.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorContext.java
index 686db73..a25bc41 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorContext.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndexEditorContext.java
@@ -101,6 +101,8 @@ public class LuceneIndexEditorContext {
private final IndexUpdateCallback updateCallback;
+ private boolean reindex;
+
LuceneIndexEditorContext(NodeBuilder definition, Analyzer analyzer, IndexUpdateCallback updateCallback) {
this.definitionBuilder = definition;
this.definition = new IndexDefinition(definitionBuilder);
@@ -132,6 +134,14 @@ public class LuceneIndexEditorContext {
* close writer if it's not null
*/
void closeWriter() throws IOException {
+ //If reindex or fresh index and write is null on close
+ //it indicates that the index is empty. In such a case trigger
+ //creation of write such that an empty Lucene index state is persisted
+ //in directory
+ if (reindex && writer == null){
+ getWriter();
+ }
+
if (writer != null) {
writer.close();
@@ -144,6 +154,10 @@ public class LuceneIndexEditorContext {
}
}
+ public void enableReindexMode(){
+ reindex = true;
+ }
+
public long incIndexedNodes() {
indexedNodes++;
return indexedNodes;
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2235_29d3d8f1.diff |
bugs-dot-jar_data_OAK-1770_192ee9e4 | ---
BugID: OAK-1770
Summary: Document split suppressed with steady load on many cluster nodes
Description: Document split is suppressed when there is a steady write load on many
cluster nodes. The document grows bigger over time and leads to poor performance.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java
index bb5984c..bd753f7 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java
@@ -969,7 +969,8 @@ public final class NodeDocument extends Document implements CachedNodeDocument{
UpdateUtils.applyChanges(oldDoc, old, context.getRevisionComparator());
setSplitDocProps(this, oldDoc, old, high);
// only split if enough of the data can be moved to old document
- if (oldDoc.getMemory() > getMemory() * SPLIT_RATIO) {
+ if (oldDoc.getMemory() > getMemory() * SPLIT_RATIO
+ || numValues >= NUM_REVS_THRESHOLD) {
splitOps.add(old);
} else {
main = null;
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1770_192ee9e4.diff |
bugs-dot-jar_data_OAK-1624_6d8146f8 | ---
BugID: OAK-1624
Summary: Item names with trailing spaces should not be allowed
Description: |-
the following should fail:
{code}
Node hello = session.getRootNode().addNode("hello");
session.save();
Node illegal = hello.addNode("test "); <-- here
session.save();
assertEquals("/hello/test ", illegal.getPath()); <-- and here
Node other = session.getNode("/hello/test "); <-- and here
assertTrue(other.isSame(illegal));
assertTrue(session.nodeExists("/hello/test ")); <-- and here
{code}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/name/Namespaces.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/name/Namespaces.java
index 0ad6022..3b4219c 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/name/Namespaces.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/name/Namespaces.java
@@ -244,8 +244,12 @@ public class Namespaces implements NamespaceConstants {
for (int i = 0; i < local.length(); i++) {
char ch = local.charAt(i);
- if ("/:[]|*".indexOf(ch) != -1) { // TODO: XMLChar check
- return false;
+ if (i == 0 && Character.isWhitespace(ch)) {
+ return false; // leading whitespace
+ } else if (i == local.length() - 1 && Character.isWhitespace(ch)) {
+ return false; // trailing whitespace
+ } else if ("/:[]|*".indexOf(ch) != -1) { // TODO: XMLChar check
+ return false; // invalid name character
}
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1624_6d8146f8.diff |
bugs-dot-jar_data_OAK-1894_35562cce | ---
BugID: OAK-1894
Summary: PropertyIndex only considers the cost of a single indexed property
Description: |-
The existing PropertyIndex loops through the PropertyRestriction objects in the Filter and essentially only calculates the cost of the first indexed property. This isn't actually the first property in the query and Filter.propertyRestrictions is a HashMap.
More confusingly, the plan for a query with multiple indexed properties outputs *all* indexed properties, even though only the first one is used.
For queries with multiple indexed properties, the cheapest property index should be used in all three relevant places: when calculating the cost, when executing the query, and when producing the plan.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/PropertyIndex.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/PropertyIndex.java
index 914881c..443f0cb 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/PropertyIndex.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/PropertyIndex.java
@@ -30,6 +30,8 @@ import org.apache.jackrabbit.oak.spi.query.Filter;
import org.apache.jackrabbit.oak.spi.query.Filter.PropertyRestriction;
import org.apache.jackrabbit.oak.spi.query.QueryIndex;
import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.common.base.Charsets;
import com.google.common.collect.Iterables;
@@ -90,6 +92,8 @@ class PropertyIndex implements QueryIndex {
*/
private static final String EMPTY_TOKEN = ":";
+ private static final Logger LOG = LoggerFactory.getLogger(PropertyIndex.class);
+
static Set<String> encode(PropertyValue value) {
if (value == null) {
return null;
@@ -113,6 +117,37 @@ class PropertyIndex implements QueryIndex {
return values;
}
+ private Cheapest findCheapestProperty(Filter filter, PropertyIndexLookup lookup) {
+ Cheapest cost = new Cheapest();
+ for (PropertyRestriction pr : filter.getPropertyRestrictions()) {
+ String propertyName = PathUtils.getName(pr.propertyName);
+ double propertyCost = Double.POSITIVE_INFINITY;
+ // TODO support indexes on a path
+ // currently, only indexes on the root node are supported
+ if (lookup.isIndexed(propertyName, "/", filter)) {
+ if (pr.firstIncluding && pr.lastIncluding
+ && pr.first != null && pr.first.equals(pr.last)) {
+ // "[property] = $value"
+ propertyCost = lookup.getCost(filter, propertyName, pr.first);
+ } else if (pr.list != null) {
+ propertyCost = 0;
+ for (PropertyValue p : pr.list) {
+ propertyCost += lookup.getCost(filter, propertyName, p);
+ }
+ } else {
+ // processed as "[property] is not null"
+ propertyCost = lookup.getCost(filter, propertyName, null);
+ }
+ }
+ LOG.debug("property cost for {} is {}", propertyName, propertyCost);
+ if (propertyCost < cost.cost) {
+ cost.cost = propertyCost;
+ cost.propertyRestriction = pr;
+ }
+ }
+ return cost;
+ }
+
//--------------------------------------------------------< QueryIndex >--
@Override
@@ -142,29 +177,9 @@ class PropertyIndex implements QueryIndex {
}
PropertyIndexLookup lookup = getLookup(root);
- for (PropertyRestriction pr : filter.getPropertyRestrictions()) {
- String propertyName = PathUtils.getName(pr.propertyName);
- // TODO support indexes on a path
- // currently, only indexes on the root node are supported
- if (lookup.isIndexed(propertyName, "/", filter)) {
- if (pr.firstIncluding && pr.lastIncluding
- && pr.first != null && pr.first.equals(pr.last)) {
- // "[property] = $value"
- return lookup.getCost(filter, propertyName, pr.first);
- } else if (pr.list != null) {
- double cost = 0;
- for (PropertyValue p : pr.list) {
- cost += lookup.getCost(filter, propertyName, p);
- }
- return cost;
- } else {
- // processed as "[property] is not null"
- return lookup.getCost(filter, propertyName, null);
- }
- }
- }
- // not an appropriate index
- return Double.POSITIVE_INFINITY;
+ Cheapest cheapest = findCheapestProperty(filter, lookup);
+ LOG.debug("Cheapest property cost is {} for property {}", cheapest.cost, cheapest.propertyRestriction != null ? cheapest.propertyRestriction.propertyName : null);
+ return cheapest.cost;
}
@Override
@@ -173,7 +188,11 @@ class PropertyIndex implements QueryIndex {
PropertyIndexLookup lookup = getLookup(root);
int depth = 1;
- for (PropertyRestriction pr : filter.getPropertyRestrictions()) {
+
+ Cheapest cheapest = findCheapestProperty(filter, lookup);
+ PropertyRestriction pr = cheapest.propertyRestriction;
+
+ if (pr != null) {
String propertyName = PathUtils.getName(pr.propertyName);
depth = PathUtils.getDepth(pr.propertyName);
// TODO support indexes on a path
@@ -184,7 +203,6 @@ class PropertyIndex implements QueryIndex {
&& pr.first != null && pr.first.equals(pr.last)) {
// "[property] = $value"
paths = lookup.query(filter, propertyName, pr.first);
- break;
} else if (pr.list != null) {
for (PropertyValue pv : pr.list) {
Iterable<String> p = lookup.query(filter, propertyName, pv);
@@ -194,11 +212,9 @@ class PropertyIndex implements QueryIndex {
paths = Iterables.concat(paths, p);
}
}
- break;
} else {
// processed as "[property] is not null"
paths = lookup.query(filter, propertyName, null);
- break;
}
}
}
@@ -217,7 +233,10 @@ class PropertyIndex implements QueryIndex {
StringBuilder buff = new StringBuilder("property");
StringBuilder notIndexed = new StringBuilder();
PropertyIndexLookup lookup = getLookup(root);
- for (PropertyRestriction pr : filter.getPropertyRestrictions()) {
+ Cheapest cheapest = findCheapestProperty(filter, lookup);
+ PropertyRestriction pr = cheapest.propertyRestriction;
+
+ if (pr != null) {
String propertyName = PathUtils.getName(pr.propertyName);
// TODO support indexes on a path
// currently, only indexes on the root node are supported
@@ -251,4 +270,9 @@ class PropertyIndex implements QueryIndex {
return buff.toString();
}
+ private static class Cheapest {
+ private double cost = Double.POSITIVE_INFINITY;
+ private PropertyRestriction propertyRestriction;
+ }
+
}
\ No newline at end of file
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1894_35562cce.diff |
bugs-dot-jar_data_OAK-2929_a2950285 | ---
BugID: OAK-2929
Summary: Parent of unseen children must not be removable
Description: |-
With OAK-2673, it's now possible to have hidden intermediate nodes created concurrently.
So, a scenario like:
{noformat}
start -> /:hidden
N1 creates /:hiddent/parent/node1
N2 creates /:hidden/parent/node2
{noformat}
is allowed.
But, if N2's creation of {{parent}} got persisted later than that on N1, then N2 is currently able to delete {{parent}} even though there's {{node1}}.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java
index 420ba29..021cfa2 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java
@@ -37,7 +37,6 @@ import javax.annotation.Nullable;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.AbstractIterator;
-import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterators;
import com.google.common.collect.Queues;
import org.apache.jackrabbit.oak.cache.CacheValue;
@@ -58,6 +57,7 @@ import com.google.common.collect.Sets;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Iterables.filter;
import static com.google.common.collect.Iterables.transform;
+import static java.util.Collections.reverseOrder;
import static org.apache.jackrabbit.oak.plugins.document.Collection.NODES;
import static org.apache.jackrabbit.oak.plugins.document.StableRevisionComparator.REVERSE;
import static org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key;
@@ -740,11 +740,11 @@ public final class NodeDocument extends Document implements CachedNodeDocument{
Revision newestRev = null;
// check local commits first
- SortedMap<Revision, String> revisions = getLocalRevisions();
- SortedMap<Revision, String> commitRoots = getLocalCommitRoot();
- Iterator<Revision> it = filter(Iterables.mergeSorted(
- ImmutableList.of(revisions.keySet(), commitRoots.keySet()),
- revisions.comparator()), predicate).iterator();
+ Comparator<Revision> comp = reverseOrder(context.getRevisionComparator());
+ SortedSet<Revision> revisions = Sets.newTreeSet(comp);
+ revisions.addAll(getLocalRevisions().keySet());
+ revisions.addAll(getLocalCommitRoot().keySet());
+ Iterator<Revision> it = filter(revisions, predicate).iterator();
if (it.hasNext()) {
newestRev = it.next();
} else {
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2929_a2950285.diff |
bugs-dot-jar_data_OAK-4387_ca05fd06 | ---
BugID: OAK-4387
Summary: 'XPath: querying for nodes named "text", "element", and "rep:excerpt" fails'
Description: |-
Queries that contain "text" or "element" as a node name currently fail, because the the parser assumes "text()" / "element(...)". Example query that fails:
{noformat}
/jcr:root/content/text/jcr:content//element(*,nt:unstructured)
{noformat}
A workaround is to use the escape mechanism, that is:
{noformat}
/jcr:root/tmp/_x0074_ext/jcr:content//element(*,nt:unstructured)
{noformat}
It looks like '(' and ')' are valid characters in node names, but to query for those characters, they need to be escaped.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/XPathToSQL2Converter.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/XPathToSQL2Converter.java
index d64d7cf..98bcc5e 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/XPathToSQL2Converter.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/XPathToSQL2Converter.java
@@ -192,46 +192,57 @@ public class XPathToSQL2Converter {
currentSelector.path = "/";
}
}
- } else if (readIf("text")) {
- // "...text()"
- currentSelector.isChild = false;
- pathPattern += "jcr:xmltext";
- read("(");
- read(")");
- if (currentSelector.isDescendant) {
- currentSelector.nodeName = "jcr:xmltext";
- } else {
- currentSelector.path = PathUtils.concat(currentSelector.path, "jcr:xmltext");
- }
- } else if (readIf("element")) {
- // "...element(..."
- read("(");
- if (readIf(")")) {
- // any
- pathPattern += "%";
- } else {
- if (readIf("*")) {
- // any
- pathPattern += "%";
+ } else if (currentTokenType == IDENTIFIER) {
+ // probably a path restriction
+ // String name = readPathSegment();
+ String identifier = readIdentifier();
+ if (readIf("(")) {
+ if ("text".equals(identifier)) {
+ // "...text()"
+ currentSelector.isChild = false;
+ pathPattern += "jcr:xmltext";
+ read(")");
+ if (currentSelector.isDescendant) {
+ currentSelector.nodeName = "jcr:xmltext";
+ } else {
+ currentSelector.path = PathUtils.concat(currentSelector.path, "jcr:xmltext");
+ }
+ } else if ("element".equals(identifier)) {
+ // "...element(..."
+ if (readIf(")")) {
+ // any
+ pathPattern += "%";
+ } else {
+ if (readIf("*")) {
+ // any
+ pathPattern += "%";
+ } else {
+ String name = readPathSegment();
+ pathPattern += name;
+ appendNodeName(name);
+ }
+ if (readIf(",")) {
+ currentSelector.nodeType = readIdentifier();
+ }
+ read(")");
+ }
+ } else if ("rep:excerpt".equals(identifier)) {
+ readOpenDotClose(false);
+ rewindSelector();
+ Expression.Property p = new Expression.Property(currentSelector, "rep:excerpt", false);
+ statement.addSelectColumn(p);
} else {
- String name = readPathSegment();
- pathPattern += name;
- appendNodeName(name);
- }
- if (readIf(",")) {
- currentSelector.nodeType = readIdentifier();
+ throw getSyntaxError();
}
- read(")");
+ } else {
+ String name = ISO9075.decode(identifier);
+ pathPattern += name;
+ appendNodeName(name);
}
} else if (readIf("@")) {
rewindSelector();
Expression.Property p = readProperty();
statement.addSelectColumn(p);
- } else if (readIf("rep:excerpt")) {
- rewindSelector();
- readExcerpt();
- Expression.Property p = new Expression.Property(currentSelector, "rep:excerpt", false);
- statement.addSelectColumn(p);
} else if (readIf("(")) {
rewindSelector();
do {
@@ -239,7 +250,7 @@ public class XPathToSQL2Converter {
Expression.Property p = readProperty();
statement.addSelectColumn(p);
} else if (readIf("rep:excerpt")) {
- readExcerpt();
+ readOpenDotClose(true);
Expression.Property p = new Expression.Property(currentSelector, "rep:excerpt", false);
statement.addSelectColumn(p);
} else if (readIf("rep:spellcheck")) {
@@ -249,7 +260,7 @@ public class XPathToSQL2Converter {
Expression.Property p = new Expression.Property(currentSelector, "rep:spellcheck()", false);
statement.addSelectColumn(p);
} else if (readIf("rep:suggest")) {
- readExcerpt();
+ readOpenDotClose(true);
Expression.Property p = new Expression.Property(currentSelector, "rep:suggest()", false);
statement.addSelectColumn(p);
}
@@ -257,11 +268,6 @@ public class XPathToSQL2Converter {
if (!readIf(")")) {
return convertToUnion(query, statement, startParseIndex - 1);
}
- } else if (currentTokenType == IDENTIFIER) {
- // path restriction
- String name = readPathSegment();
- pathPattern += name;
- appendNodeName(name);
} else if (readIf(".")) {
// just "." this is simply ignored, so that
// "a/./b" is the same as "a/b"
@@ -717,13 +723,19 @@ public class XPathToSQL2Converter {
return new Expression.Property(currentSelector, readPathSegment(), false);
}
- private void readExcerpt() throws ParseException {
- read("(");
- if (!readIf(")")) {
- // only rep:excerpt(.) and rep:excerpt() are currently supported
- read(".");
- read(")");
+ /**
+ * Read open bracket (optional), and optional dot, and close bracket.
+ *
+ * @param readOpenBracket whether to read the open bracket (false if this
+ * was already read)
+ * @throws ParseException if close bracket or the dot were not read
+ */
+ private void readOpenDotClose(boolean readOpenBracket) throws ParseException {
+ if (readOpenBracket) {
+ read("(");
}
+ readIf(".");
+ read(")");
}
private String readPathSegment() throws ParseException {
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-4387_ca05fd06.diff |
bugs-dot-jar_data_OAK-3733_a5ff019e | ---
BugID: OAK-3733
Summary: Sometimes hierarchy conflict between concurrent add/delete isn't detected
Description: |-
I'm not sure of exact set of event that led to an incident on one of our test clusters. The cluster is running 3 AEM instances based on oak build at 1.3.10.r1713699 backed by a single mongo 3 instance.
Unfortunately, we found the issue too late and logs had rolled over. Here's the exception that showed over and over as workflow jobs were (trying to) being processed:
{noformat}
....
at java.lang.Thread.run(Thread.java:745)
Caused by: javax.jcr.InvalidItemStateException: OakMerge0004: OakMerge0004: The node 8:/oak:index/event.job.topic/:index/com%2Fadobe%2Fgranite%2Fworkflow%2Ftransient%2Fjob%2Fetc%2Fworkflow%2Fmodels%2Fdam-xmp-writeback%2Fjcr_content%2Fmodel/var/eventing/jobs/assigned was already added in revision
r151233e54e1-0-4, before
r15166378b6a-0-2 (retries 5, 6830 ms)
at org.apache.jackrabbit.oak.api.CommitFailedException.asRepositoryException(CommitFailedException.java:239)
at org.apache.jackrabbit.oak.api.CommitFailedException.asRepositoryException(CommitFailedException.java:212)
at org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.newRepositoryException(SessionDelegate.java:669)
at org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.save(SessionDelegate.java:495)
at org.apache.jackrabbit.oak.jcr.session.SessionImpl$8.performVoid(SessionImpl.java:419)
at org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.performVoid(SessionDelegate.java:273)
at org.apache.jackrabbit.oak.jcr.session.SessionImpl.save(SessionImpl.java:416)
at org.apache.sling.jcr.resource.internal.helper.jcr.JcrResourceProvider.commit(JcrResourceProvider.java:634)
... 16 common frames omitted
Caused by: org.apache.jackrabbit.oak.api.CommitFailedException: OakMerge0004: OakMerge0004: The node 8:/oak:index/event.job.topic/:index/com%2Fadobe%2Fgranite%2Fworkflow%2Ftransient%2Fjob%2Fetc%2Fworkflow%2Fmodels%2Fdam-xmp-writeback%2Fjcr_content%2Fmodel/var/eventing/jobs/assigned was already added in revision
r151233e54e1-0-4, before
r15166378b6a-0-2 (retries 5, 6830 ms)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.merge0(DocumentNodeStoreBranch.java:200)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.merge(DocumentNodeStoreBranch.java:123)
at org.apache.jackrabbit.oak.plugins.document.DocumentRootBuilder.merge(DocumentRootBuilder.java:158)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore.merge(DocumentNodeStore.java:1497)
at org.apache.jackrabbit.oak.core.MutableRoot.commit(MutableRoot.java:247)
at org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.commit(SessionDelegate.java:346)
at org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.save(SessionDelegate.java:493)
... 20 common frames omitted
Caused by: org.apache.jackrabbit.oak.plugins.document.ConflictException: The node 8:/oak:index/event.job.topic/:index/com%2Fadobe%2Fgranite%2Fworkflow%2Ftransient%2Fjob%2Fetc%2Fworkflow%2Fmodels%2Fdam-xmp-writeback%2Fjcr_content%2Fmodel/var/eventing/jobs/assigned was already added in revision
r151233e54e1-0-4, before
r15166378b6a-0-2
at org.apache.jackrabbit.oak.plugins.document.Commit.checkConflicts(Commit.java:582)
at org.apache.jackrabbit.oak.plugins.document.Commit.createOrUpdateNode(Commit.java:487)
at org.apache.jackrabbit.oak.plugins.document.Commit.applyToDocumentStore(Commit.java:371)
at org.apache.jackrabbit.oak.plugins.document.Commit.applyToDocumentStore(Commit.java:265)
at org.apache.jackrabbit.oak.plugins.document.Commit.applyInternal(Commit.java:234)
at org.apache.jackrabbit.oak.plugins.document.Commit.apply(Commit.java:219)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.persist(DocumentNodeStoreBranch.java:290)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.persist(DocumentNodeStoreBranch.java:260)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.access$300(DocumentNodeStoreBranch.java:54)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch$InMemory.merge(DocumentNodeStoreBranch.java:498)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.merge0(DocumentNodeStoreBranch.java:180)
... 26 common frames omitted
....
{noformat}
Doing following removed repo corruption and restored w/f processing:
{noformat}
oak.removeDescendantsAndSelf("/oak:index/event.job.topic/:index/com%2Fadobe%2Fgranite%2Fworkflow%2Ftransient%2Fjob%2Fetc%2Fworkflow%2Fmodels%2Fdam-xmp-writeback%2Fjcr_content%2Fmodel/var/eventing/jobs/assigned")
{noformat}
Attaching [mongoexport output|^mongoexport.zip] for {{/oak:index/event.job.topic/:index/com%2Fadobe%2Fgranite%2Fworkflow%2Ftransient%2Fjob%2Fetc%2Fworkflow%2Fmodels%2Fdam-xmp-writeback%2Fjcr_content%2Fmodel/var/eventing/jobs/assigned/6a389a6a-a8bf-4038-b57b-cb441c6ac557/com.adobe.granite.workflow.transient.job.etc.workflow.models.dam-xmp-writeback.jcr_content.model/2015/11/19/23/54/6a389a6a-a8bf-4038-b57b-cb441c6ac557_10}} (the hierarchy created at {{r151233e54e1-0-4}}). I've renamed a few path elements to make it more reable though (e.g. {{:index/com%2Fadobe%2Fgranite%2Fworkflow%2Ftransient%2Fjob%2Fetc%2Fworkflow%2Fmodels%2Fdam-xmp-writeback%2Fjcr_content%2Fmodel}} -> {{enc_value}}).
[~mreutegg], I'm assigning it to myself for now, but I think this would require your expertise all the way :).
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/Commit.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/Commit.java
index fc7cd5a..d24876f 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/Commit.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/Commit.java
@@ -523,7 +523,8 @@ public class Commit {
String conflictMessage = null;
Revision conflictRevision = newestRev;
if (newestRev == null) {
- if ((op.isDelete() || !op.isNew()) && isConflicting(before, op)) {
+ if ((op.isDelete() || !op.isNew())
+ && !allowConcurrentAddRemove(before, op)) {
conflictMessage = "The node " +
op.getId() + " does not exist or is already deleted";
if (before != null && !before.getLocalDeleted().isEmpty()) {
@@ -531,7 +532,7 @@ public class Commit {
}
}
} else {
- if (op.isNew() && isConflicting(before, op)) {
+ if (op.isNew() && !allowConcurrentAddRemove(before, op)) {
conflictMessage = "The node " +
op.getId() + " was already added in revision\n" +
formatConflictRevision(newestRev);
@@ -616,6 +617,25 @@ public class Commit {
}
/**
+ * Checks whether a concurrent add/remove operation is allowed with the
+ * given before document and update operation. This method will first check
+ * if the concurrent add/remove feature is enable and return {@code false}
+ * immediately if it is disabled. Only when enabled will this method check
+ * if there is a conflict based on the given document and update operation.
+ * See also {@link #isConflicting(NodeDocument, UpdateOp)}.
+ *
+ * @param before the contents of the document before the update.
+ * @param op the update to perform.
+ * @return {@code true} is a concurrent add/remove update is allowed;
+ * {@code false} otherwise.
+ */
+ private boolean allowConcurrentAddRemove(@Nullable NodeDocument before,
+ @Nonnull UpdateOp op) {
+ return nodeStore.getEnableConcurrentAddRemove()
+ && !isConflicting(before, op);
+ }
+
+ /**
* @return the branch if this is a branch commit, otherwise {@code null}.
*/
@CheckForNull
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3733_a5ff019e.diff |
bugs-dot-jar_data_OAK-1697_1552be04 | ---
BugID: OAK-1697
Summary: Unresolved conflicts in TokenProviderImpl#createToken()
Description: "In certain situations (e.g. heavy load) {{TokenProviderImpl#createToken()}}
might create some unresolved conflicts.\n\ne.g. \n\n{code}\norg.apache.jackrabbit.oak.api.CommitFailedException:
OakState0001: Unresolved conflicts in /home/users/..../..../.tokens/2014-04-07T11.55.58.167+02.00\n{code}\n\nand\n\n{code}\n01.04.2014
17:52:41.216 *WARN* [qtp218544742-286] org.apache.jackrabbit.oak.security.authentication.token.TokenProviderImpl
Failed to create login token.\n01.04.2014 17:52:41.218 *WARN* [qtp218544742-300]
org.eclipse.jetty.servlet.ServletHandler /projects.html\njava.lang.IllegalArgumentException:
Invalid token ''\n at org.apache.jackrabbit.api.security.authentication.token.TokenCredentials.<init>(TokenCredentials.java:42)\n{code}"
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authentication/token/TokenProviderImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authentication/token/TokenProviderImpl.java
index 2d4600f..a10b41d 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authentication/token/TokenProviderImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authentication/token/TokenProviderImpl.java
@@ -29,6 +29,7 @@ import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
+import java.util.UUID;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.jcr.AccessDeniedException;
@@ -210,11 +211,7 @@ class TokenProviderImpl implements TokenProvider {
if (tokenParent != null) {
try {
long creationTime = new Date().getTime();
- Calendar creation = GregorianCalendar.getInstance();
- creation.setTimeInMillis(creationTime);
- String tokenName = Text.replace(ISO8601.format(creation), ":", ".");
-
- NodeUtil tokenNode = tokenParent.addChild(tokenName, TOKEN_NT_NAME);
+ NodeUtil tokenNode = createTokenNode(tokenParent, creationTime);
tokenNode.setString(JcrConstants.JCR_UUID, IdentifierManager.generateUUID());
String key = generateKey(options.getConfigValue(PARAM_TOKEN_LENGTH, DEFAULT_KEY_SIZE));
@@ -397,6 +394,31 @@ class TokenProviderImpl implements TokenProvider {
return tokenParent;
}
+ /**
+ * Create a new token node below the specified {@code parent}.
+ *
+ * @param parent The parent node.
+ * @param creationTime The creation time that is used as name hint.
+ * @return The new token node
+ * @throws AccessDeniedException
+ */
+ private NodeUtil createTokenNode(@Nonnull NodeUtil parent, @Nonnull long creationTime) throws AccessDeniedException {
+ Calendar creation = GregorianCalendar.getInstance();
+ creation.setTimeInMillis(creationTime);
+ String tokenName = Text.replace(ISO8601.format(creation), ":", ".");
+ NodeUtil tokenNode;
+ try {
+ tokenNode = parent.addChild(tokenName, TOKEN_NT_NAME);
+ root.commit();
+ } catch (CommitFailedException e) {
+ // conflict while creating token node -> retry
+ log.debug("Failed to create token node " + tokenName + ". Using random name as fallback.");
+ root.refresh();
+ tokenNode = parent.addChild(UUID.randomUUID().toString(), TOKEN_NT_NAME);
+ }
+ return tokenNode;
+ }
+
//--------------------------------------------------------------------------
/**
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1697_1552be04.diff |
bugs-dot-jar_data_OAK-1729_7ba9dd66 | ---
BugID: OAK-1729
Summary: DocumentNodeStore revision GC removes intermediate docs
Description: The revision garbage collection in DocumentNodeStore removes intermediate
documents of the revision history of a node even if it is still in use.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java
index 8f5e1ab..920cb9c 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java
@@ -51,10 +51,12 @@ import org.slf4j.LoggerFactory;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Iterables.filter;
import static com.google.common.collect.Iterables.transform;
+import static java.util.Collections.disjoint;
import static org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key;
import static org.apache.jackrabbit.oak.plugins.document.UpdateOp.Operation;
@@ -1339,7 +1341,7 @@ public final class NodeDocument extends Document implements CachedNodeDocument{
setSplitDocMaxRev(old, maxRev);
SplitDocType type = SplitDocType.DEFAULT;
- if(!mainDoc.hasChildren()){
+ if(!mainDoc.hasChildren() && !referencesOldDocAfterSplit(mainDoc, oldDoc)){
type = SplitDocType.DEFAULT_NO_CHILD;
} else if (oldDoc.getLocalRevisions().isEmpty()){
type = SplitDocType.PROP_COMMIT_ONLY;
@@ -1354,6 +1356,31 @@ public final class NodeDocument extends Document implements CachedNodeDocument{
}
/**
+ * Checks if the main document has changes referencing {@code oldDoc} after
+ * the split.
+ *
+ * @param mainDoc the main document before the split.
+ * @param oldDoc the old document created by the split.
+ * @return {@code true} if the main document contains references to the
+ * old document after the split; {@code false} otherwise.
+ */
+ private static boolean referencesOldDocAfterSplit(NodeDocument mainDoc,
+ NodeDocument oldDoc) {
+ Set<Revision> revs = oldDoc.getLocalRevisions().keySet();
+ for (String property : mainDoc.data.keySet()) {
+ if (IGNORE_ON_SPLIT.contains(property)) {
+ continue;
+ }
+ Set<Revision> changes = Sets.newHashSet(mainDoc.getLocalMap(property).keySet());
+ changes.removeAll(oldDoc.getLocalMap(property).keySet());
+ if (!disjoint(changes, revs)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
* Set various properties for intermediate split document
*
* @param intermediate updateOp of the intermediate doc getting created
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/VersionGarbageCollector.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/VersionGarbageCollector.java
index 320afb7..e671b66 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/VersionGarbageCollector.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/VersionGarbageCollector.java
@@ -46,8 +46,7 @@ public class VersionGarbageCollector {
*/
private static final Set<NodeDocument.SplitDocType> GC_TYPES = EnumSet.of(
NodeDocument.SplitDocType.DEFAULT_NO_CHILD,
- NodeDocument.SplitDocType.PROP_COMMIT_ONLY,
- NodeDocument.SplitDocType.INTERMEDIATE);
+ NodeDocument.SplitDocType.PROP_COMMIT_ONLY);
VersionGarbageCollector(DocumentNodeStore nodeStore) {
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1729_7ba9dd66.diff |
bugs-dot-jar_data_OAK-1289_0c3e3d70 | ---
BugID: OAK-1289
Summary: Range check fails with IllegalArgumentException
Description: "{{Range.includes()}} fails with IllegalArgumentException when provided
revision is from another cluster node:\n\n{noformat}\njava.lang.IllegalArgumentException:
Trying to compare revisions of different cluster ids: r142f43d2f0f-0-2 and r142f43d46fb-0-1\n\tat
org.apache.jackrabbit.oak.plugins.mongomk.Revision.compareRevisionTime(Revision.java:84)\n\tat
org.apache.jackrabbit.oak.plugins.mongomk.Range.includes(Range.java:55)\n{noformat}\n\nThe
IllegalArgumentException was introduced with OAK-1274."
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/mongomk/Range.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/mongomk/Range.java
index 19ed201..52045fc 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/mongomk/Range.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/mongomk/Range.java
@@ -51,8 +51,9 @@ final class Range {
* @return <code>true</code> if within this range; <code>false</code>
* otherwise.
*/
- boolean includes(Revision r) {
- return high.compareRevisionTime(r) >= 0
+ boolean includes(@Nonnull Revision r) {
+ return high.getClusterId() == r.getClusterId()
+ && high.compareRevisionTime(r) >= 0
&& low.compareRevisionTime(r) <= 0;
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1289_0c3e3d70.diff |
bugs-dot-jar_data_OAK-47_b62f1c26 | ---
BugID: OAK-47
Summary: Wrong results and NPE with copy operation
Description: "The following code either results in an NPE or in a wrong result depending
on which Microkernel instance is used. \n\n{code}\n mk.commit(\"\", \"+\\\"/root\\\":{}\",
mk.getHeadRevision(), \"\");\n mk.commit(\"\", \"+\\\"/root/N0\\\":{}*\\\"/root/N0\\\":\\\"/root/N1\\\"+\\\"/root/N0/N4\\\":{}\",\n
\ mk.getHeadRevision(), \"\");\n{code}\n\nThe wrong result is \n{code}\n{\n
\ \":childNodeCount\": 2,\n \"N0\": {\n \":childNodeCount\": 1,\n \"N4\":
{\n \":childNodeCount\": 0\n }\n },\n \"N1\": {\n \":childNodeCount\":
1,\n \"N4\": {\n \":childNodeCount\": 0\n }\n }\n}\n{code}\n\nThe
expected result is\n{code}\n{\n \":childNodeCount\": 2,\n \"N0\": {\n \":childNodeCount\":
1,\n \"N4\": {\n \":childNodeCount\": 0\n }\n },\n \"N1\":
{\n \":childNodeCount\": 0\n }\n}\n{code}\n\nsimple:fs:target/temp: wrong
result\nfs:{homeDir}/target: NPE\nhttp-bridge:fs:{homeDir}/target: NPE\nsimple:
wrong result\n\n"
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/index/Indexer.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/index/Indexer.java
index 6b2a935..be48a28 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/index/Indexer.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/mk/index/Indexer.java
@@ -299,6 +299,7 @@ public class Indexer {
/**
* Update the index with the given changes.
*
+ * @param rootPath the root path
* @param t the changes
* @param lastRevision
*/
@@ -309,6 +310,7 @@ public class Indexer {
break;
}
String path = PathUtils.concat(rootPath, t.readString());
+ String target;
switch (r) {
case '+': {
t.read(':');
@@ -327,8 +329,16 @@ public class Indexer {
}
break;
}
+ case '*':
+ // TODO support and test copy operation ("*"),
+ // specially in combination with other operations
+ // possibly split up the commit in this case
+ t.read(':');
+ target = t.readString();
+ moveOrCopyNode(path, false, target, lastRevision);
+ break;
case '-':
- moveNode(path, null, lastRevision);
+ moveOrCopyNode(path, true, null, lastRevision);
break;
case '^': {
removeProperty(path, lastRevision);
@@ -342,9 +352,12 @@ public class Indexer {
break;
}
case '>':
+ // TODO does move work correctly
+ // in combination with other operations?
+ // possibly split up the commit in this case
t.read(':');
String name = PathUtils.getName(path);
- String target, position;
+ String position;
if (t.matches('{')) {
position = t.readString();
t.read(':');
@@ -364,7 +377,7 @@ public class Indexer {
} else {
throw ExceptionFactory.get("position: " + position);
}
- moveNode(path, target, lastRevision);
+ moveOrCopyNode(path, true, target, lastRevision);
break;
default:
throw new AssertionError("token: " + (char) t.getTokenType());
@@ -430,7 +443,7 @@ public class Indexer {
}
}
- private void moveNode(String sourcePath, String targetPath, String lastRevision) {
+ private void moveOrCopyNode(String sourcePath, boolean remove, String targetPath, String lastRevision) {
if (isInIndex(sourcePath)) {
// don't index the index
return;
@@ -444,7 +457,9 @@ public class Indexer {
NodeMap map = new NodeMap();
t.read('{');
NodeImpl n = NodeImpl.parse(map, t, 0, sourcePath);
- addOrRemoveRecursive(n, true, false);
+ if (remove) {
+ addOrRemoveRecursive(n, true, false);
+ }
if (targetPath != null) {
t = new JsopTokenizer(node);
map = new NodeMap();
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/simple/SimpleKernelImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/simple/SimpleKernelImpl.java
index 87c2a96..185731e 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/simple/SimpleKernelImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/mk/simple/SimpleKernelImpl.java
@@ -278,18 +278,20 @@ public class SimpleKernelImpl extends MicroKernelWrapperBase implements MicroKer
break;
}
case '*': {
- // TODO is it really required?
// TODO possibly support target position notation
- // TODO support copy in wrappers, index,...
t.read(':');
String target = t.readString();
- diff.tag('*').key(path).value(target);
if (!PathUtils.isAbsolute(target)) {
target = PathUtils.concat(rootPath, target);
}
- NodeImpl node = data.getNode(from);
+ diff.tag('*').key(path).value(target);
String to = PathUtils.relativize("/", target);
- data = data.cloneAndAddChildNode(to, false, null, node, rev);
+ NodeImpl node = data.getNode(from);
+ JsopStream json = new JsopStream();
+ node.append(json, Integer.MAX_VALUE, 0, Integer.MAX_VALUE, false);
+ json.read('{');
+ NodeImpl n2 = NodeImpl.parse(nodeMap, json, rev);
+ data = data.cloneAndAddChildNode(to, false, null, n2, rev);
break;
}
default:
@@ -555,6 +557,7 @@ public class SimpleKernelImpl extends MicroKernelWrapperBase implements MicroKer
}
}
+ @Override
public String toString() {
return "simple:" + name;
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-47_b62f1c26.diff |
bugs-dot-jar_data_OAK-3630_fcd64766 | ---
BugID: OAK-3630
Summary: Mixin based rules not working for relative properties
Description: |-
If an indexing rule is defined for mixin then it does not work as expected for relative properties.
Issue here being that most of logic in Aggregate class (which is used for relative property handling also) relies on nodes primaryType and does not account for mixin type
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/Aggregate.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/Aggregate.java
index e9f7dd2..fa5728b 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/Aggregate.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/Aggregate.java
@@ -30,7 +30,6 @@ import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
-import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.plugins.index.lucene.util.ConfigUtil;
@@ -79,7 +78,7 @@ class Aggregate {
}
public void collectAggregates(NodeState root, ResultCollector collector) {
- if (nodeTypeName.equals(ConfigUtil.getPrimaryTypeName(root))) {
+ if (matchingType(nodeTypeName, root)) {
List<Matcher> matchers = createMatchers();
collectAggregates(root, matchers, collector);
}
@@ -111,6 +110,19 @@ class Aggregate {
return nodeTypeName;
}
+ private static boolean matchingType(String nodeTypeName, NodeState nodeState) {
+ if (nodeTypeName.equals(ConfigUtil.getPrimaryTypeName(nodeState))) {
+ return true;
+ }
+
+ for (String mixin : ConfigUtil.getMixinNames(nodeState)) {
+ if (nodeTypeName.equals(mixin)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
private static void collectAggregates(NodeState nodeState, List<Matcher> matchers,
ResultCollector collector) {
for (ChildNodeEntry cne : nodeState.getChildNodeEntries()) {
@@ -231,7 +243,7 @@ class Aggregate {
//last segment -> add to collector if node type matches
if (depth == maxDepth() - 1
&& primaryType != null
- && !primaryType.equals(ConfigUtil.getPrimaryTypeName(nodeState))) {
+ && !matchingType(primaryType, nodeState)) {
return false;
}
return super.match(name, nodeState, depth);
@@ -256,7 +268,19 @@ class Aggregate {
@Override
public Aggregate getAggregate(NodeState matchedNodeState) {
- return aggMapper.getAggregate(ConfigUtil.getPrimaryTypeName(matchedNodeState));
+ //Check agg defn for primaryType first
+ Aggregate agg = aggMapper.getAggregate(ConfigUtil.getPrimaryTypeName(matchedNodeState));
+
+ //If not found then look for defn for mixins
+ if (agg == null) {
+ for (String mixin : ConfigUtil.getMixinNames(matchedNodeState)) {
+ agg = aggMapper.getAggregate(mixin);
+ if (agg != null) {
+ break;
+ }
+ }
+ }
+ return agg;
}
@Override
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/util/ConfigUtil.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/util/ConfigUtil.java
index 476f7e1..73287a0 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/util/ConfigUtil.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/util/ConfigUtil.java
@@ -19,6 +19,8 @@
package org.apache.jackrabbit.oak.plugins.index.lucene.util;
+import java.util.Collections;
+
import com.google.common.primitives.Ints;
import org.apache.jackrabbit.JcrConstants;
import org.apache.jackrabbit.oak.api.Blob;
@@ -60,6 +62,11 @@ public class ConfigUtil {
return (ps == null) ? JcrConstants.NT_BASE : ps.getValue(Type.NAME);
}
+ public static Iterable<String> getMixinNames(NodeState nodeState) {
+ PropertyState ps = nodeState.getProperty(JcrConstants.JCR_MIXINTYPES);
+ return (ps == null) ? Collections.<String>emptyList() : ps.getValue(Type.NAMES);
+ }
+
/**
* Assumes that given state is of type nt:file and then reads
* the jcr:content/@jcr:data property to get the binary content
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/util/package-info.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/util/package-info.java
index 0761902..b1049b9 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/util/package-info.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/util/package-info.java
@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-@Version("1.1.0")
+@Version("1.2.0")
@Export(optional = "provide:=true")
package org.apache.jackrabbit.oak.plugins.index.lucene.util;
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3630_fcd64766.diff |
bugs-dot-jar_data_OAK-3099_25850476 | ---
BugID: OAK-3099
Summary: Revision GC fails when split documents with very long paths are present
Description: |-
My company is using the MongoDB microkernel with Oak, and we've noticed that the daily revision GC is failing with errors like this:
{code}
13.07.2015 13:06:16.261 *ERROR* [pool-7-thread-1-Maintenance Queue(com/adobe/granite/maintenance/job/RevisionCleanupTask)] org.apache.jackrabbit.oak.management.ManagementOperation Revision garbage collection failed
java.lang.IllegalArgumentException: 13:h113f9d0fe7ac0f87fa06397c37b9ffd4b372eeb1ec93e0818bb4024a32587820
at org.apache.jackrabbit.oak.plugins.document.Revision.fromString(Revision.java:236)
at org.apache.jackrabbit.oak.plugins.document.SplitDocumentCleanUp.disconnect(SplitDocumentCleanUp.java:84)
at org.apache.jackrabbit.oak.plugins.document.SplitDocumentCleanUp.disconnect(SplitDocumentCleanUp.java:56)
at org.apache.jackrabbit.oak.plugins.document.VersionGCSupport.deleteSplitDocuments(VersionGCSupport.java:53)
at org.apache.jackrabbit.oak.plugins.document.VersionGarbageCollector.collectSplitDocuments(VersionGarbageCollector.java:117)
at org.apache.jackrabbit.oak.plugins.document.VersionGarbageCollector.gc(VersionGarbageCollector.java:105)
at org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreService$2.run(DocumentNodeStoreService.java:511)
at org.apache.jackrabbit.oak.spi.state.RevisionGC$1.call(RevisionGC.java:68)
at org.apache.jackrabbit.oak.spi.state.RevisionGC$1.call(RevisionGC.java:64)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
{code}
I've narrowed the issue down to the disconnect(NodeDocument) method of the [SplitDocumentCleanUp class|https://svn.apache.org/repos/asf/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/SplitDocumentCleanUp.java]. The method always tries to extract the path of the node from its ID, but this won't work for documents whose path is very long because those documents will have the hash of their path in the ID.
I believe this code should fix the issue, but I haven't had a chance to actually try it:
{code}
private void disconnect(NodeDocument splitDoc) {
String mainId = Utils.getIdFromPath(splitDoc.getMainPath());
NodeDocument doc = store.find(NODES, mainId);
if (doc == null) {
LOG.warn("Main document {} already removed. Split document is {}",
mainId, splitId);
return;
}
String path = splitDoc.getPath();
int slashIdx = path.lastIndexOf('/');
int height = Integer.parseInt(path.substring(slashIdx + 1));
Revision rev = Revision.fromString(
path.substring(path.lastIndexOf('/', slashIdx - 1) + 1, slashIdx));
doc = doc.findPrevReferencingDoc(rev, height);
if (doc == null) {
LOG.warn("Split document {} not referenced anymore. Main document is {}",
splitId, mainId);
return;
}
// remove reference
if (doc.getSplitDocType() == INTERMEDIATE) {
disconnectFromIntermediate(doc, rev);
} else {
markStaleOnMain(doc, rev, height);
}
}
{code}
By using getPath(), the code should automatically use either the ID or the _path property, whichever is right for the document.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/SplitDocumentCleanUp.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/SplitDocumentCleanUp.java
index 081a422..7cdaebc 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/SplitDocumentCleanUp.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/SplitDocumentCleanUp.java
@@ -79,14 +79,16 @@ public class SplitDocumentCleanUp {
mainId, splitId);
return;
}
- int slashIdx = splitId.lastIndexOf('/');
- int height = Integer.parseInt(splitId.substring(slashIdx + 1));
+
+ String splitDocPath = splitDoc.getPath();
+ int slashIdx = splitDocPath.lastIndexOf('/');
+ int height = Integer.parseInt(splitDocPath.substring(slashIdx + 1));
Revision rev = Revision.fromString(
- splitId.substring(splitId.lastIndexOf('/', slashIdx - 1) + 1, slashIdx));
+ splitDocPath.substring(splitDocPath.lastIndexOf('/', slashIdx - 1) + 1, slashIdx));
doc = doc.findPrevReferencingDoc(rev, height);
if (doc == null) {
- LOG.warn("Split document {} not referenced anymore. Main document is {}",
- splitId, mainId);
+ LOG.warn("Split document {} for path {} not referenced anymore. Main document is {}",
+ splitId, splitDocPath, mainId);
return;
}
// remove reference
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3099_25850476.diff |
bugs-dot-jar_data_OAK-3318_e12e2052 | ---
BugID: OAK-3318
Summary: IndexRule not respecting inheritence based on mixins
Description: 'IndexRule are meant to be applied based on both primaryType and minin
type based inheritance. Currently it appears that only primaryType based inheritance
is working '
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexDefinition.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexDefinition.java
index 465ff12..8642545 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexDefinition.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexDefinition.java
@@ -748,6 +748,12 @@ class IndexDefinition implements Aggregate.AggregateMapper{
* <code>false</code> otherwise.
*/
public boolean appliesTo(Tree state) {
+ for (String mixinName : getMixinTypeNames(state)){
+ if (nodeTypeName.equals(mixinName)){
+ return true;
+ }
+ }
+
if (!nodeTypeName.equals(getPrimaryTypeName(state))) {
return false;
}
@@ -1242,7 +1248,7 @@ class IndexDefinition implements Aggregate.AggregateMapper{
}
private static Iterable<String> getMixinTypeNames(Tree tree) {
- PropertyState property = tree.getProperty(JcrConstants.JCR_MIMETYPE);
+ PropertyState property = tree.getProperty(JcrConstants.JCR_MIXINTYPES);
return property != null ? property.getValue(Type.NAMES) : Collections.<String>emptyList();
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3318_e12e2052.diff |
bugs-dot-jar_data_OAK-3903_690fb9f4 | ---
BugID: OAK-3903
Summary: Commit fails even though change made it to the DocumentStore
Description: |-
In some rare cases it may happen that the DocumentNodeStore considers a commit as failed even though the changes were applied entirely to the DocumentStore. The issue happens when the update of the commit root is applied to the storage of a DocumentStore but then shortly after the communication between Oak the the storage system fails. On the Oak side the call will be considered as failed, but the change was actually applied.
The issue can be reproduced with the test attached to OAK-1641 and a replica-set with 3 nodes. Killing the primary node and restarting it a after a while in a loop will eventually lead to a commit that conflicts itself.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/Commit.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/Commit.java
index 3d854d9..9074e68 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/Commit.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/Commit.java
@@ -388,7 +388,7 @@ public class Commit {
// only set revision on commit root when there is
// no collision for this commit revision
commit.containsMapEntry(COLLISIONS, revision, false);
- NodeDocument before = nodeStore.updateCommitRoot(commit);
+ NodeDocument before = nodeStore.updateCommitRoot(commit, revision);
if (before == null) {
String msg = "Conflicting concurrent change. " +
"Update operation failed: " + commitRoot;
@@ -425,7 +425,13 @@ public class Commit {
if (success) {
LOG.error("Exception occurred after commit. Rollback will be suppressed.", e);
} else {
- rollback(newNodes, opLog, commitRoot);
+ try {
+ rollback(newNodes, opLog, commitRoot);
+ } catch (Exception ex) {
+ // catch any exception caused by the rollback, log it
+ // and throw the original exception
+ LOG.warn("Rollback failed", ex);
+ }
throw e;
}
}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java
index 14e608f..8a715ac 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java
@@ -73,7 +73,6 @@ import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.cache.Cache;
import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.UncheckedExecutionException;
@@ -1164,12 +1163,14 @@ public final class DocumentNodeStore
* Updates a commit root document.
*
* @param commit the updates to apply on the commit root document.
+ * @param commitRev the commit revision.
* @return the document before the update was applied or <code>null</code>
* if the update failed because of a collision.
* @throws DocumentStoreException if the update fails with an error.
*/
@CheckForNull
- NodeDocument updateCommitRoot(UpdateOp commit) throws DocumentStoreException {
+ NodeDocument updateCommitRoot(UpdateOp commit, Revision commitRev)
+ throws DocumentStoreException {
// use batch commit when there are only revision and modified updates
boolean batch = true;
for (Map.Entry<Key, Operation> op : commit.getChanges().entrySet()) {
@@ -1181,11 +1182,63 @@ public final class DocumentNodeStore
batch = false;
break;
}
- if (batch) {
- return batchUpdateCommitRoot(commit);
- } else {
- return store.findAndUpdate(NODES, commit);
+ try {
+ if (batch) {
+ return batchUpdateCommitRoot(commit);
+ } else {
+ return store.findAndUpdate(NODES, commit);
+ }
+ } catch (DocumentStoreException e) {
+ return verifyCommitRootUpdateApplied(commit, commitRev, e);
+ }
+ }
+
+ /**
+ * Verifies if the {@code commit} update on the commit root was applied by
+ * reading the affected document and checks if the {@code commitRev} is
+ * set in the revisions map.
+ *
+ * @param commit the update operation on the commit root document.
+ * @param commitRev the commit revision.
+ * @param e the exception that will be thrown when this method determines
+ * that the update was not applied.
+ * @return the before document.
+ * @throws DocumentStoreException the exception passed to this document
+ * in case the commit update was not applied.
+ */
+ private NodeDocument verifyCommitRootUpdateApplied(UpdateOp commit,
+ Revision commitRev,
+ DocumentStoreException e)
+ throws DocumentStoreException {
+ LOG.info("Update of commit root failed with exception", e);
+ int numRetries = 10;
+ for (int i = 0; i < numRetries; i++) {
+ LOG.info("Checking if change made it to the DocumentStore anyway {}/{} ...",
+ i + 1, numRetries);
+ NodeDocument commitRootDoc;
+ try {
+ commitRootDoc = store.find(NODES, commit.getId(), 0);
+ } catch (Exception ex) {
+ LOG.info("Failed to read commit root document", ex);
+ continue;
+ }
+ if (commitRootDoc == null) {
+ LOG.info("Commit root document missing for {}", commit.getId());
+ break;
+ }
+ if (commitRootDoc.getLocalRevisions().containsKey(commitRev)) {
+ LOG.info("Update made it to the store even though the call " +
+ "failed with an exception. Previous exception will " +
+ "be suppressed. {}", commit);
+ NodeDocument before = NODES.newDocument(store);
+ commitRootDoc.deepCopy(before);
+ UpdateUtils.applyChanges(before, commit.getReverseOperation());
+ return before;
+ }
+ break;
}
+ LOG.info("Update didn't make it to the store. Re-throwing the exception");
+ throw e;
}
private NodeDocument batchUpdateCommitRoot(UpdateOp commit)
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3903_690fb9f4.diff |
bugs-dot-jar_data_OAK-1096_be44b816 | ---
BugID: OAK-1096
Summary: QueryManager does not have autorefresh
Description: |-
Having two sessions A and B.
A writes something for example /content/page/text = "text"
Accessing B's QueryManager and exexcute a query for "text" nothing will be found.
Triggering an explicit refresh on B before the query and the hit is found.
I assume that the autorefresh is missed for that case
diff --git a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/query/QueryImpl.java b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/query/QueryImpl.java
index 0ee38df..acff663 100644
--- a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/query/QueryImpl.java
+++ b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/query/QueryImpl.java
@@ -34,9 +34,10 @@ import javax.jcr.version.VersionException;
import org.apache.jackrabbit.JcrConstants;
import org.apache.jackrabbit.oak.commons.PathUtils;
+import org.apache.jackrabbit.oak.jcr.delegate.NodeDelegate;
import org.apache.jackrabbit.oak.jcr.session.NodeImpl;
import org.apache.jackrabbit.oak.jcr.session.SessionContext;
-import org.apache.jackrabbit.oak.jcr.delegate.NodeDelegate;
+import org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation;
/**
* The implementation of the corresponding JCR interface.
@@ -86,7 +87,14 @@ public class QueryImpl implements Query {
@Override
public QueryResult execute() throws RepositoryException {
- return manager.executeQuery(statement, language, limit, offset, bindVariableMap);
+ return sessionContext.getSessionDelegate().perform(
+ new SessionOperation<QueryResult>() {
+ @Override
+ public QueryResult perform() throws RepositoryException {
+ return manager.executeQuery(statement, language, limit,
+ offset, bindVariableMap);
+ }
+ });
}
@Override
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1096_be44b816.diff |
bugs-dot-jar_data_OAK-1178_84fb6b29 | ---
BugID: OAK-1178
Summary: 'MutableTree#isNew: replace implementation by NodeBuilder#isNew '
Description: |-
Similar to the issue described in OAK-1177 we may consider replacing the implementation of MutableTree#isNew by the corresponding call on the NodeBuilder.
See also OAK-947.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
index c3b3885..00136aa 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
@@ -260,7 +260,7 @@ public class MemoryNodeBuilder implements NodeBuilder {
@Override
public boolean isNew() {
- return !isRoot() && !parent.base().hasChildNode(name) && parent.hasChildNode(name);
+ return exists() && !base.exists();
}
@Override
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1178_84fb6b29.diff |
bugs-dot-jar_data_OAK-3549_9772f5b2 | ---
BugID: OAK-3549
Summary: Initial read of _lastRev creates incorrect RevisionComparator
Description: "The logic in backgroundRead(false) orders the local lastRev \nbefore
external lastRev. This the last change done by the\nlocal cluster node will look
as if it happend before a potentially\nolder external change. "
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentMK.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentMK.java
index 49496ee..9e3eac2 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentMK.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentMK.java
@@ -127,7 +127,7 @@ public class DocumentMK {
}
void backgroundRead() {
- nodeStore.backgroundRead(true);
+ nodeStore.backgroundRead();
}
void backgroundWrite() {
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java
index 061a364..347d694 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java
@@ -18,6 +18,7 @@ package org.apache.jackrabbit.oak.plugins.document;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.Iterables.filter;
import static com.google.common.collect.Iterables.toArray;
import static com.google.common.collect.Iterables.transform;
@@ -487,27 +488,29 @@ public final class DocumentNodeStore
setHeadRevision(commit.getRevision());
// make sure _lastRev is written back to store
backgroundWrite();
- } else {
- // initialize branchCommits
- branches.init(store, this);
- // initial reading of the revisions of other cluster nodes
- backgroundRead(false);
- if (headRevision == null) {
- // no revision read from other cluster nodes
- setHeadRevision(newRevision());
+ rootDoc = store.find(NODES, Utils.getIdFromPath("/"));
+ // at this point the root document must exist
+ if (rootDoc == null) {
+ throw new IllegalStateException("Root document does not exist");
}
+ } else {
+ checkLastRevRecovery();
+ initializeHeadRevision(rootDoc);
// check if _lastRev for our clusterId exists
if (!rootDoc.getLastRev().containsKey(clusterId)) {
unsavedLastRevisions.put("/", headRevision);
backgroundWrite();
}
}
- checkLastRevRecovery();
+
// Renew the lease because it may have been stale
renewClusterIdLease();
getRevisionComparator().add(headRevision, Revision.newRevision(0));
+ // initialize branchCommits
+ branches.init(store, this);
+
dispatcher = new ChangeDispatcher(getRoot());
commitQueue = new CommitQueue(this);
String threadNamePostfix = "(" + clusterId + ")";
@@ -1693,7 +1696,7 @@ public final class DocumentNodeStore
synchronized (backgroundReadMonitor) {
long start = clock.getTime();
// pull in changes from other cluster nodes
- BackgroundReadStats readStats = backgroundRead(true);
+ BackgroundReadStats readStats = backgroundRead();
long readTime = clock.getTime() - start;
String msg = "Background read operations stats (read:{} {})";
if (clock.getTime() - start > TimeUnit.SECONDS.toMillis(10)) {
@@ -1762,11 +1765,8 @@ public final class DocumentNodeStore
/**
* Perform a background read and make external changes visible.
- *
- * @param dispatchChange whether to dispatch external changes
- * to {@link #dispatcher}.
*/
- BackgroundReadStats backgroundRead(boolean dispatchChange) {
+ BackgroundReadStats backgroundRead() {
BackgroundReadStats stats = new BackgroundReadStats();
long time = clock.getTime();
String id = Utils.getIdFromPath("/");
@@ -1774,30 +1774,7 @@ public final class DocumentNodeStore
if (doc == null) {
return stats;
}
- Map<Integer, Revision> lastRevMap = doc.getLastRev();
- try {
- long externalTime = Utils.getMaxExternalTimestamp(lastRevMap.values(), clusterId);
- long localTime = clock.getTime();
- if (localTime < externalTime) {
- LOG.warn("Detected clock differences. Local time is '{}', " +
- "while most recent external time is '{}'. " +
- "Current _lastRev entries: {}",
- new Date(localTime), new Date(externalTime), lastRevMap.values());
- double delay = ((double) externalTime - localTime) / 1000d;
- String msg = String.format("Background read will be delayed by %.1f seconds. " +
- "Please check system time on cluster nodes.", delay);
- LOG.warn(msg);
- clock.waitUntil(externalTime + 1);
- } else if (localTime == externalTime) {
- // make sure local time is past external time
- // but only log at debug
- LOG.debug("Local and external time are equal. Waiting until local" +
- "time is more recent than external reported time.");
- clock.waitUntil(externalTime + 1);
- }
- } catch (InterruptedException e) {
- throw new RuntimeException("Background read interrupted", e);
- }
+ alignWithExternalRevisions(doc);
Revision.RevisionComparator revisionComparator = getRevisionComparator();
// the (old) head occurred first
@@ -1807,6 +1784,7 @@ public final class DocumentNodeStore
StringSort externalSort = JournalEntry.newSorter();
+ Map<Integer, Revision> lastRevMap = doc.getLastRev();
try {
Map<Revision, Revision> externalChanges = Maps.newHashMap();
for (Map.Entry<Integer, Revision> e : lastRevMap.entrySet()) {
@@ -1899,23 +1877,21 @@ public final class DocumentNodeStore
Revision oldHead = headRevision;
// the new head revision is after other revisions
setHeadRevision(newRevision());
- if (dispatchChange) {
- commitQueue.headRevisionChanged();
- time = clock.getTime();
- if (externalSort != null) {
- // then there were external changes and reading them
- // was successful -> apply them to the diff cache
- try {
- JournalEntry.applyTo(externalSort, diffCache, oldHead, headRevision);
- } catch (Exception e1) {
- LOG.error("backgroundRead: Exception while processing external changes from journal: {}", e1, e1);
- }
+ commitQueue.headRevisionChanged();
+ time = clock.getTime();
+ if (externalSort != null) {
+ // then there were external changes and reading them
+ // was successful -> apply them to the diff cache
+ try {
+ JournalEntry.applyTo(externalSort, diffCache, oldHead, headRevision);
+ } catch (Exception e1) {
+ LOG.error("backgroundRead: Exception while processing external changes from journal: {}", e1, e1);
}
- stats.populateDiffCache = clock.getTime() - time;
- time = clock.getTime();
-
- dispatcher.contentChanged(getRoot().fromExternalChange(), null);
}
+ stats.populateDiffCache = clock.getTime() - time;
+ time = clock.getTime();
+
+ dispatcher.contentChanged(getRoot().fromExternalChange(), null);
} finally {
backgroundOperationLock.writeLock().unlock();
}
@@ -2054,6 +2030,68 @@ public final class DocumentNodeStore
//-----------------------------< internal >---------------------------------
+ /**
+ * Performs an initial read of the _lastRevs on the root document,
+ * initializes the {@link #revisionComparator} and sets the head revision.
+ *
+ * @param rootDoc the current root document.
+ */
+ private void initializeHeadRevision(NodeDocument rootDoc) {
+ checkState(headRevision == null);
+
+ alignWithExternalRevisions(rootDoc);
+ Map<Integer, Revision> lastRevMap = rootDoc.getLastRev();
+ Revision seenAt = Revision.newRevision(0);
+ long purgeMillis = revisionPurgeMillis();
+ for (Map.Entry<Integer, Revision> entry : lastRevMap.entrySet()) {
+ Revision r = entry.getValue();
+ if (r.getTimestamp() > purgeMillis) {
+ revisionComparator.add(r, seenAt);
+ }
+ if (entry.getKey() == clusterId) {
+ continue;
+ }
+ lastKnownRevision.put(entry.getKey(), entry.getValue());
+ }
+ revisionComparator.purge(purgeMillis);
+ setHeadRevision(newRevision());
+ }
+
+ /**
+ * Makes sure the current time is after the most recent external revision
+ * timestamp in the _lastRev map of the given root document. If necessary
+ * the current thread waits until {@link #clock} is after the external
+ * revision timestamp.
+ *
+ * @param rootDoc the root document.
+ */
+ private void alignWithExternalRevisions(@Nonnull NodeDocument rootDoc) {
+ Map<Integer, Revision> lastRevMap = checkNotNull(rootDoc).getLastRev();
+ try {
+ long externalTime = Utils.getMaxExternalTimestamp(lastRevMap.values(), clusterId);
+ long localTime = clock.getTime();
+ if (localTime < externalTime) {
+ LOG.warn("Detected clock differences. Local time is '{}', " +
+ "while most recent external time is '{}'. " +
+ "Current _lastRev entries: {}",
+ new Date(localTime), new Date(externalTime), lastRevMap.values());
+ double delay = ((double) externalTime - localTime) / 1000d;
+ String msg = String.format("Background read will be delayed by %.1f seconds. " +
+ "Please check system time on cluster nodes.", delay);
+ LOG.warn(msg);
+ clock.waitUntil(externalTime + 1);
+ } else if (localTime == externalTime) {
+ // make sure local time is past external time
+ // but only log at debug
+ LOG.debug("Local and external time are equal. Waiting until local" +
+ "time is more recent than external reported time.");
+ clock.waitUntil(externalTime + 1);
+ }
+ } catch (InterruptedException e) {
+ throw new RuntimeException("Background read interrupted", e);
+ }
+ }
+
@Nonnull
private Commit newTrunkCommit(@Nonnull Revision base) {
checkArgument(!checkNotNull(base).isBranch(),
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3549_9772f5b2.diff |
bugs-dot-jar_data_OAK-3013_eabb4066 | ---
BugID: OAK-3013
Summary: SQL2 query with union, limit and offset can return invalid results
Description: |
when using order, limit and offset and a SQL2 query that contains an union of two subqueries that have common results can return invalid results
Example: assuming content tree /test/a/b/c/d/e exists
{code:sql}
SELECT [jcr:path] FROM [nt:base] AS a WHERE ISDESCENDANTNODE(a, '/test') UNION SELECT [jcr:path] FROM [nt:base] AS a WHERE ISDESCENDANTNODE(a, '/test')" ORDER BY [jcr:path]
{code}
with limit=3 and offset 2 returns only one row ( instead of 3 )
the correct result set is
{noformat}
/test/a/b/c
/test/a/b/c/d
/test/a/b/c/d/e
{noformat}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/UnionQueryImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/UnionQueryImpl.java
index 90cb3ff..2f9bf8d 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/UnionQueryImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/UnionQueryImpl.java
@@ -91,13 +91,19 @@ public class UnionQueryImpl implements Query {
@Override
public void setLimit(long limit) {
this.limit = limit;
- left.setLimit(limit);
- right.setLimit(limit);
+ applyLimitOffset();
}
@Override
public void setOffset(long offset) {
this.offset = offset;
+ applyLimitOffset();
+ }
+
+ private void applyLimitOffset() {
+ long subqueryLimit = QueryImpl.saturatedAdd(limit, offset);
+ left.setLimit(subqueryLimit);
+ right.setLimit(subqueryLimit);
}
@Override
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3013_eabb4066.diff |
bugs-dot-jar_data_OAK-1122_5286861d | ---
BugID: OAK-1122
Summary: Empty branch commit returns head revision on trunk
Description: MicroKernelImpl returns the head revision on trunk when an empty commit
happens on a branch revision.
diff --git a/oak-mk/src/main/java/org/apache/jackrabbit/mk/core/MicroKernelImpl.java b/oak-mk/src/main/java/org/apache/jackrabbit/mk/core/MicroKernelImpl.java
index 35126ae..05b6309 100644
--- a/oak-mk/src/main/java/org/apache/jackrabbit/mk/core/MicroKernelImpl.java
+++ b/oak-mk/src/main/java/org/apache/jackrabbit/mk/core/MicroKernelImpl.java
@@ -411,7 +411,7 @@ public class MicroKernelImpl implements MicroKernel {
throw new IllegalArgumentException("absolute path expected: " + path);
}
if (jsonDiff == null || jsonDiff.length() == 0) {
- return getHeadRevision();
+ return revisionId != null ? revisionId : getHeadRevision();
}
Id revId = revisionId == null ? getHeadRevisionId() : Id.fromString(revisionId);
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1122_5286861d.diff |
bugs-dot-jar_data_OAK-2649_72d24f4b | ---
BugID: OAK-2649
Summary: IndexCopier might create empty files in case of error occuring while copying
Description: |-
On some of the setups following logs are seen
{noformat}
error.log:12.03.2015 03:53:59.785 *WARN* [pool-5-thread-90] org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopier Found local copy for _2uv.cfs in MMapDirectory@/mnt/installation/crx-quickstart/repository/index/e5a943cdec3000bd8ce54924fd2070ab5d1d35b9ecf530963a3583d43bf28293/1 lockFactory=NativeFSLockFactory@/mnt/installation/crx-quickstart/repository/index/e5a943cdec3000bd8ce54924fd2070ab5d1d35b9ecf530963a3583d43bf28293/1 but size of local 0 differs from remote 1070972. Content would be read from remote file only
error.log:12.03.2015 03:54:02.883 *WARN* [pool-5-thread-125] org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopier Found local copy for _2rr.si in MMapDirectory@/mnt/installation/crx-quickstart/repository/index/43b36b107f8ce7e162c15b22508aa457ff6ae0083ed3e12d14a7dab67f886def/1 lockFactory=NativeFSLockFactory@/mnt/installation/crx-quickstart/repository/index/43b36b107f8ce7e162c15b22508aa457ff6ae0083ed3e12d14a7dab67f886def/1 but size of local 0 differs from remote 240. Content would be read from remote file only
error.log:12.03.2015 03:54:03.467 *WARN* [pool-5-thread-132] org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopier Found local copy for _2ro_3.del in MMapDirectory@/mnt/installation/crx-quickstart/repository/index/43b36b107f8ce7e162c15b22508aa457ff6ae0083ed3e12d14a7dab67f886def/1 lockFactory=NativeFSLockFactory@/mnt/installation/crx-quickstart/repository/index/43b36b107f8ce7e162c15b22508aa457ff6ae0083ed3e12d14a7dab67f886def/1 but size of local 0 differs from remote 42. Content would be read from remote file only
error.log:12.03.2015 03:54:03.737 *WARN* [pool-5-thread-135] org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopier Found local copy for _2rm_2.del in MMapDirectory@/mnt/installation/crx-quickstart/repository/index/43b36b107f8ce7e162c15b22508aa457ff6ae0083ed3e12d14a7dab67f886def/1 lockFactory=NativeFSLockFactory@/mnt/installation/crx-quickstart/repository/index/43b36b107f8ce7e162c15b22508aa457ff6ae0083ed3e12d14a7dab67f886def/1 but size of local 0 differs from remote 35. Content would be read from remote file only
{noformat}
They indicate that copier has created files of size 0. Looking at the code flow this can happen in case while starting copying some error occurs in between. {{org.apache.lucene.store.Directory#copy}} do take care of removing the file in case of error but that is done only for IOException and not for other cases.
As a fix the logic should ensure that local file gets deleted if the copy was not successful
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexCopier.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexCopier.java
index 6f67427..47e57d5 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexCopier.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexCopier.java
@@ -188,9 +188,12 @@ class IndexCopier implements CopyOnReadStatsMBean {
@Override
public void run() {
String name = reference.name;
+ boolean success = false;
+ boolean copyAttempted = false;
try {
if (!local.fileExists(name)) {
long start = System.currentTimeMillis();
+ copyAttempted = true;
remote.copy(local, name, name, IOContext.READ);
reference.markValid();
downloadTime.addAndGet(System.currentTimeMillis() - start);
@@ -210,11 +213,22 @@ class IndexCopier implements CopyOnReadStatsMBean {
reference.markValid();
}
}
+ success = true;
} catch (IOException e) {
//TODO In case of exception there would not be any other attempt
//to download the file. Look into support for retry
log.warn("Error occurred while copying file [{}] " +
"from {} to {}", name, remote, local, e);
+ } finally {
+ if (copyAttempted && !success){
+ try {
+ if (local.fileExists(name)) {
+ local.deleteFile(name);
+ }
+ } catch (IOException e) {
+ log.warn("Error occurred while deleting corrupted file [{}] from [{}]", name, local, e);
+ }
+ }
}
}
});
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2649_72d24f4b.diff |
bugs-dot-jar_data_OAK-546_428e32c6 | ---
BugID: OAK-546
Summary: 'Query: unexpected result on negative limit / offset'
Description: |-
Currently, running a query with limit of -1 never returns any rows, the same as when using limit = 0.
Either the query engine should fail with a negative limit or offset (IllegalArgumentException), or it should ignore negative values (unlimited result rows for limit, probably no offset for offset = -1).
I would prefer IllegalArgumentException, but I can also live with -1 = unlimited, at least for "limit".
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/api/QueryEngine.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/api/QueryEngine.java
index c43f58e..96b3ef8 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/api/QueryEngine.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/api/QueryEngine.java
@@ -52,8 +52,8 @@ public interface QueryEngine {
*
* @param statement the query statement
* @param language the language
- * @param limit the maximum result set size
- * @param offset the number of rows to skip
+ * @param limit the maximum result set size (may not be negative)
+ * @param offset the number of rows to skip (may not be negative)
* @param bindings the bind variable value bindings
* @param namePathMapper the name and path mapper to use
* @return the result
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/QueryEngineImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/QueryEngineImpl.java
index 4dcb86b..e5d4adb 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/QueryEngineImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/QueryEngineImpl.java
@@ -138,6 +138,12 @@ public abstract class QueryEngineImpl implements QueryEngine {
public Result executeQuery(String statement, String language, long limit,
long offset, Map<String, ? extends PropertyValue> bindings,
NamePathMapper namePathMapper) throws ParseException {
+ if (limit < 0) {
+ throw new IllegalArgumentException("Limit may not be negative, is: " + limit);
+ }
+ if (offset < 0) {
+ throw new IllegalArgumentException("Offset may not be negative, is: " + offset);
+ }
Query q = parseQuery(statement, language);
q.setRootTree(getRootTree());
q.setRootState(getRootState());
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/user/query/XPathQueryBuilder.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/user/query/XPathQueryBuilder.java
index 893738b..d1dd992 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/user/query/XPathQueryBuilder.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/user/query/XPathQueryBuilder.java
@@ -35,7 +35,7 @@ public class XPathQueryBuilder implements QueryBuilder<Condition> {
private boolean sortIgnoreCase;
private Value bound;
private long offset;
- private long maxCount = -1;
+ private long maxCount = Long.MAX_VALUE;
//-------------------------------------------------------< QueryBuilder >---
@Override
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-546_428e32c6.diff |
bugs-dot-jar_data_OAK-498_f2a2edec | ---
BugID: OAK-498
Summary: NamePathMapper should fail on absolute paths escaping root
Description: "The name path mapper should no accept invalid paths of type\n\n{code}\n/..\n{code}\n\nI.e.
paths which escape beyond the root of the hierarchy. "
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/namepath/JcrPathParser.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/namepath/JcrPathParser.java
index 2dc5bee..2854048 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/namepath/JcrPathParser.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/namepath/JcrPathParser.java
@@ -261,6 +261,7 @@ public class JcrPathParser {
public static boolean validate(String jcrPath) {
Listener listener = new Listener() {
+ int depth;
boolean hasRoot;
@Override
public boolean root() {
@@ -280,7 +281,8 @@ public class JcrPathParser {
@Override
public boolean parent() {
- return true;
+ depth--;
+ return !hasRoot || depth >= 0;
}
@Override
@@ -289,6 +291,7 @@ public class JcrPathParser {
@Override
public boolean name(String name, int index) {
+ depth++;
return true;
}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/namepath/NamePathMapperImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/namepath/NamePathMapperImpl.java
index 65639f1..c19578f 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/namepath/NamePathMapperImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/namepath/NamePathMapperImpl.java
@@ -21,6 +21,7 @@ import java.util.List;
import javax.annotation.Nonnull;
+import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.plugins.identifier.IdentifierManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -77,7 +78,7 @@ public class NamePathMapperImpl implements NamePathMapper {
@Override
@Nonnull
- public String getJcrPath(String oakPath) {
+ public String getJcrPath(final String oakPath) {
final List<String> elements = new ArrayList<String>();
if ("/".equals(oakPath)) {
@@ -103,11 +104,18 @@ public class NamePathMapperImpl implements NamePathMapper {
@Override
public boolean parent() {
- if (elements.isEmpty() || "..".equals(elements.get(elements.size() - 1))) {
+ int prevIdx = elements.size() - 1;
+ String prevElem = prevIdx >= 0 ? elements.get(prevIdx) : null;
+
+ if (prevElem == null || PathUtils.denotesParent(prevElem)) {
elements.add("..");
return true;
}
- elements.remove(elements.size() - 1);
+ if (prevElem.isEmpty()) {
+ throw new IllegalArgumentException("Absolute path escapes root: " + oakPath);
+ }
+
+ elements.remove(prevElem);
return true;
}
@@ -150,7 +158,7 @@ public class NamePathMapperImpl implements NamePathMapper {
return jcrPath.toString();
}
- private String getOakPath(String jcrPath, final boolean keepIndex) {
+ private String getOakPath(final String jcrPath, final boolean keepIndex) {
if ("/".equals(jcrPath)) {
// avoid the need to special case the root path later on
return "/";
@@ -237,11 +245,19 @@ public class NamePathMapperImpl implements NamePathMapper {
@Override
public boolean parent() {
- if (elements.isEmpty() || "..".equals(elements.get(elements.size() - 1))) {
+ int prevIdx = elements.size() - 1;
+ String prevElem = prevIdx >= 0 ? elements.get(prevIdx) : null;
+
+ if (prevElem == null || PathUtils.denotesParent(prevElem)) {
elements.add("..");
return true;
}
- elements.remove(elements.size() - 1);
+ if (prevElem.isEmpty()) {
+ parseErrors.append("Absolute path escapes root: ").append(jcrPath);
+ return false;
+ }
+
+ elements.remove(prevElem);
return true;
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-498_f2a2edec.diff |
bugs-dot-jar_data_OAK-2933_44585b0c | ---
BugID: OAK-2933
Summary: AccessDenied when modifying transiently moved item with too many ACEs
Description: |
If at least the following preconditions are fulfilled, saving a moved item fails with access denied:
1. there are more PermissionEntries in the PermissionEntryCache than the configured EagerCacheSize
2. an node is moved to a location where the user has write access through a group membership
3. a property is added to the transiently moved item
For example:
1. set the *eagerCacheSize* to '0'
2. create new group *testgroup* and user *testuser*
3. make *testuser* member of *testgroup*
4. create nodes {{/testroot/a}} and {{/testroot/a/b}} and {{/testroot/a/c}}
5. allow *testgroup* {{rep:write}} on {{/testroot/a}}
6. as *testuser* create {{/testroot/a/b/item}} (to verify that the user has write access)
7. as *testuser* move {{/testroot/a/b/item}} to {{/testroot/a/c/item}}
8. {{save()}} -> works
9. as *testuser* move {{/testroot/a/c/item}} back to {{/testroot/a/b/item}} AND add new property to the transient {{/testroot/a/b/item}}
10. {{save()}} -> access denied
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/MoveAwarePermissionValidator.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/MoveAwarePermissionValidator.java
index 3f2382a..cee534a 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/MoveAwarePermissionValidator.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/MoveAwarePermissionValidator.java
@@ -75,10 +75,11 @@ public class MoveAwarePermissionValidator extends PermissionValidator {
private Validator visibleValidator(@Nonnull Tree source,
@Nonnull Tree dest) {
// TODO improve: avoid calculating the 'before' permissions in case the current parent permissions already point to the correct tree.
- ImmutableTree parent = (ImmutableTree) moveCtx.rootBefore.getTree("/");
- TreePermission tp = getPermissionProvider().getTreePermission(parent, TreePermission.EMPTY);
+ ImmutableTree immutableTree = (ImmutableTree) moveCtx.rootBefore.getTree("/");
+ TreePermission tp = getPermissionProvider().getTreePermission(immutableTree, TreePermission.EMPTY);
for (String n : PathUtils.elements(source.getPath())) {
- tp = tp.getChildPermission(n, parent.getChild(n).getNodeState());
+ immutableTree = immutableTree.getChild(n);
+ tp = tp.getChildPermission(n, immutableTree.getNodeState());
}
Validator validator = createValidator(source, dest, tp, this);
return new VisibleValidator(validator, true, false);
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2933_44585b0c.diff |
bugs-dot-jar_data_OAK-3156_786b3d76 | ---
BugID: OAK-3156
Summary: Lucene suggestions index definition can't be restricted to a specific type
of node
Description: "While performing a suggestor query like \n\n{code}\nSELECT [rep:suggest()]
as suggestion FROM [nt:unstructured] WHERE suggest('foo')\n{code}\n\nSuggestor
does not provide any result. In current implementation, [suggestions|http://jackrabbit.apache.org/oak/docs/query/lucene.html#Suggestions]
in Oak work only for index definitions for {{nt:base}} nodetype.\nSo, an index definition
like:\n{code:xml}\n <lucene-suggest\n jcr:primaryType=\"oak:QueryIndexDefinition\"\n
\ async=\"async\"\n compatVersion=\"{Long}2\"\n type=\"lucene\">\n
\ <indexRules jcr:primaryType=\"nt:unstructured\">\n <nt:base jcr:primaryType=\"nt:unstructured\">\n
\ <properties jcr:primaryType=\"nt:unstructured\">\n <description\n
\ jcr:primaryType=\"nt:unstructured\"\n analyzed=\"{Boolean}true\"\n
\ name=\"description\"\n propertyIndex=\"{Boolean}true\"\n
\ useInSuggest=\"{Boolean}true\"/>\n </properties>\n
\ </nt:base>\n </indexRules>\n </lucene-suggest>\n{code}\nworks,
but if we change nodetype to {{nt:unstructured}} like:\n{code:xml}\n <lucene-suggest\n
\ jcr:primaryType=\"oak:QueryIndexDefinition\"\n async=\"async\"\n
\ compatVersion=\"{Long}2\"\n type=\"lucene\">\n <indexRules
jcr:primaryType=\"nt:unstructured\">\n <nt:unstructured jcr:primaryType=\"nt:unstructured\">\n
\ <properties jcr:primaryType=\"nt:unstructured\">\n <description\n
\ jcr:primaryType=\"nt:unstructured\"\n analyzed=\"{Boolean}true\"\n
\ name=\"description\"\n propertyIndex=\"{Boolean}true\"\n
\ useInSuggest=\"{Boolean}true\"/>\n </properties>\n
\ </nt:base>\n </indexRules>\n </lucene-suggest>\n{code}\n,
it won't work.\n\nThe issue is that suggestor implementation essentially is passing
a pseudo row with path=/.:\n{code:title=LucenePropertyIndex.java}\n private boolean
loadDocs() {\n...\n queue.add(new LuceneResultRow(suggestedWords));\n...\n{code}\nand\n{code:title=LucenePropertyIndex.java}\n
\ LuceneResultRow(Iterable<String> suggestWords) {\n this.path
= \"/\";\n this.score = 1.0d;\n this.suggestWords = suggestWords;\n
\ }\n{code}\nDue to path being set to \"/\", {{SelectorImpl}} later filters
out the result as {{rep:root}} (primary type of \"/\") isn't a {{nt:unstructured}}."
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndex.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndex.java
index 0a38732..a582292 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndex.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndex.java
@@ -1038,14 +1038,17 @@ public class LuceneIndex implements AdvanceFulltextQueryIndex {
final String path;
final double score;
final Iterable<String> suggestWords;
+ final boolean isVirtual;
LuceneResultRow(String path, double score) {
+ this.isVirtual = false;
this.path = path;
this.score = score;
this.suggestWords = Collections.emptySet();
}
LuceneResultRow(Iterable<String> suggestWords) {
+ this.isVirtual = true;
this.path = "/";
this.score = 1.0d;
this.suggestWords = suggestWords;
@@ -1110,7 +1113,7 @@ public class LuceneIndex implements AdvanceFulltextQueryIndex {
@Override
public boolean isVirtualRow() {
- return getPath() == null;
+ return currentRow.isVirtual;
}
@Override
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LucenePropertyIndex.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LucenePropertyIndex.java
index 4095277..2b7e477 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LucenePropertyIndex.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LucenePropertyIndex.java
@@ -1280,14 +1280,17 @@ public class LucenePropertyIndex implements AdvancedQueryIndex, QueryIndex, Nati
final String path;
final double score;
final Iterable<String> suggestWords;
+ final boolean isVirutal;
LuceneResultRow(String path, double score) {
+ this.isVirutal = false;
this.path = path;
this.score = score;
this.suggestWords = Collections.emptySet();
}
LuceneResultRow(Iterable<String> suggestWords) {
+ this.isVirutal = true;
this.path = "/";
this.score = 1.0d;
this.suggestWords = suggestWords;
@@ -1354,7 +1357,7 @@ public class LucenePropertyIndex implements AdvancedQueryIndex, QueryIndex, Nati
@Override
public boolean isVirtualRow() {
- return getPath() == null;
+ return currentRow.isVirutal;
}
@Override
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3156_786b3d76.diff |
bugs-dot-jar_data_OAK-642_7a84b3a8 | ---
BugID: OAK-642
Summary: NPE trying to add a node to an nt:folder node
Description: "The following code throws a NPE:\n\n{code}\nSession s = getAdminSession();\ns.getRootNode().addNode(\"a\",
\"nt:folder\").addNode(\"b\");\ns.save(); \n{code}\n\nStack trace:\n{code}\njava.lang.NullPointerException\nat
com.google.common.base.Preconditions.checkNotNull(Preconditions.java:191)\nat org.apache.jackrabbit.oak.namepath.LocalNameMapper.getOakNameOrNull(LocalNameMapper.java:82)\nat
org.apache.jackrabbit.oak.namepath.GlobalNameMapper.getOakName(GlobalNameMapper.java:64)\nat
org.apache.jackrabbit.oak.namepath.NamePathMapperImpl.getOakName(NamePathMapperImpl.java:62)\nat
org.apache.jackrabbit.oak.plugins.nodetype.ReadOnlyNodeTypeManager.getOakName(ReadOnlyNodeTypeManager.java:92)\nat
org.apache.jackrabbit.oak.plugins.nodetype.ReadOnlyNodeTypeManager.getNodeType(ReadOnlyNodeTypeManager.java:186)\nat
org.apache.jackrabbit.oak.jcr.NodeImpl$5.perform(NodeImpl.java:265)\nat org.apache.jackrabbit.oak.jcr.NodeImpl$5.perform(NodeImpl.java:1)\nat
org.apache.jackrabbit.oak.jcr.SessionDelegate.perform(SessionDelegate.java:136)\nat
org.apache.jackrabbit.oak.jcr.NodeImpl.addNode(NodeImpl.java:219)\nat org.apache.jackrabbit.oak.jcr.NodeImpl.addNode(NodeImpl.java:210)\nat
org.apache.jackrabbit.oak.jcr.CRUDTest.nodeType(CRUDTest.java:122)\n{code}\n"
diff --git a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/NodeImpl.java b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/NodeImpl.java
index 2c8566b..7cb7e2c 100644
--- a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/NodeImpl.java
+++ b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/NodeImpl.java
@@ -250,11 +250,10 @@ public class NodeImpl<T extends NodeDelegate> extends ItemImpl<T> implements Nod
String ntName = primaryNodeTypeName;
if (ntName == null) {
DefinitionProvider dp = sessionDelegate.getDefinitionProvider();
- try {
- String childName = sessionDelegate.getOakName(PathUtils.getName(relPath));
- NodeDefinition def = dp.getDefinition(new NodeImpl<NodeDelegate>(parent), childName);
- ntName = def.getDefaultPrimaryTypeName();
- } catch (RepositoryException e) {
+ String childName = sessionDelegate.getOakName(PathUtils.getName(relPath));
+ NodeDefinition def = dp.getDefinition(new NodeImpl<NodeDelegate>(parent), childName);
+ ntName = def.getDefaultPrimaryTypeName();
+ if (ntName == null) {
throw new ConstraintViolationException(
"no matching child node definition found for " + relPath);
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-642_7a84b3a8.diff |
bugs-dot-jar_data_OAK-1516_7c62bd81 | ---
BugID: OAK-1516
Summary: PhraseQuery fails due to missing posiion info in indexed fields
Description: "Following OAK-1487 I've introduced a regression in the indexing of fields
on the Lucene index.\nThere are some types of queries (the ones that use property
restrictions) that cannot run anymore.\n\nbq. /jcr:root/content/dam//*[jcr:contains(jcr:content/metadata/@dc:format,
'application/pdf')] \n\nbq. Caused by: java.lang.IllegalStateException: field \"dc:format\"
was indexed without position data; cannot run PhraseQuery (term=text)\n\nI could
not reproduce this in an unit test so far."
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndex.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndex.java
index a16d437..467b64d 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndex.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndex.java
@@ -707,7 +707,7 @@ public class LuceneIndex implements FulltextQueryIndex {
MultiPhraseQuery mpq = new MultiPhraseQuery();
for(String token: tokens){
if (hasFulltextToken(token)) {
- Term[] terms = extractMatchingTokens(reader, token);
+ Term[] terms = extractMatchingTokens(reader, fieldName, token);
if (terms != null && terms.length > 0) {
mpq.add(terms);
}
@@ -726,7 +726,7 @@ public class LuceneIndex implements FulltextQueryIndex {
}
}
- private static Term[] extractMatchingTokens(IndexReader reader, String token) {
+ private static Term[] extractMatchingTokens(IndexReader reader, String fieldName, String token) {
if (reader == null) {
// getPlan call
return null;
@@ -734,13 +734,14 @@ public class LuceneIndex implements FulltextQueryIndex {
try {
List<Term> terms = new ArrayList<Term>();
- Terms t = MultiFields.getTerms(reader, FieldNames.FULLTEXT);
- Automaton a = WildcardQuery.toAutomaton(newFulltextTerm(token));
+ Term onTerm = newFulltextTerm(token, fieldName);
+ Terms t = MultiFields.getTerms(reader, onTerm.field());
+ Automaton a = WildcardQuery.toAutomaton(onTerm);
CompiledAutomaton ca = new CompiledAutomaton(a);
TermsEnum te = ca.getTermsEnum(t);
BytesRef text;
while ((text = te.next()) != null) {
- terms.add(newFulltextTerm(text.utf8ToString()));
+ terms.add(newFulltextTerm(text.utf8ToString(), fieldName));
}
return terms.toArray(new Term[terms.size()]);
} catch (IOException e) {
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1516_7c62bd81.diff |
bugs-dot-jar_data_OAK-3872_c13708e3 | ---
BugID: OAK-3872
Summary: "[RDB] Updated blob still deleted even if deletion interval lower"
Description: "If an existing blob is uploaded again, the timestamp of the existing
entry is updated in the meta table. Subsequently if a call to delete (RDBBlobStore#countDeleteChunks)
is made with {{maxLastModifiedTime}} parameter of less than the updated time above,
the entry in the meta table is not touched but the data table entry is wiped out.
\n\nRefer https://github.com/apache/jackrabbit-oak/blob/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStore.java#L510\n"
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStore.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStore.java
index 530c877..2ae668b 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStore.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStore.java
@@ -506,8 +506,10 @@ public class RDBBlobStore extends CachingBlobStore implements Closeable {
.append(inClause.getStatementComponent());
if (maxLastModifiedTime > 0) {
+ // delete only if the last modified is OLDER than x
metaStatement.append(" and LASTMOD <= ?");
- dataStatement.append(" and not exists(select * from " + this.tnMeta + " m where ID = m.ID and m.LASTMOD <= ?)");
+ // delete if there is NO entry where the last modified of the meta is YOUNGER than x
+ dataStatement.append(" and not exists(select * from " + this.tnMeta + " m where ID = m.ID and m.LASTMOD > ?)");
}
prepMeta = con.prepareStatement(metaStatement.toString());
@@ -522,8 +524,17 @@ public class RDBBlobStore extends CachingBlobStore implements Closeable {
prepData.setLong(dindex, maxLastModifiedTime);
}
- count += prepMeta.executeUpdate();
- prepData.execute();
+ int deletedMeta = prepMeta.executeUpdate();
+ int deletedData = prepData.executeUpdate();
+
+ if (deletedMeta != deletedData) {
+ String message = String.format(
+ "chunk deletion affected different numbers of DATA records (%s) and META records (%s)", deletedMeta,
+ deletedData);
+ LOG.info(message);
+ }
+
+ count += deletedMeta;
} finally {
closeStatement(prepMeta);
closeStatement(prepData);
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3872_c13708e3.diff |
bugs-dot-jar_data_OAK-3517_24f7f60a | ---
BugID: OAK-3517
Summary: Node.addNode(String, String) may check nt-mgt-permission against the wrong
node
Description: |-
While I was troubleshooting an issue we're having in AEM 6.1, I've noticed an "impossible" access denied exception in the logs: the user had permission to add nodes under the node in question but still got an error.
Some testing narrowed the issue down to a difference in behavior between the following two invocations:
{{someNode.getNode("child").addNode("grandchild", "nt:unstructured");}}
{{someNode.addNode("child/grandchild", "nt:unstructured");}}
As far as I can tell, both should behave identically per the JCR spec, but the second one fails if the user doesn't have node type management permission to someNode, even if they have that permission to someNode/child.
I believe the issue is in line 283 of [NodeImpl|https://svn.apache.org/repos/asf/jackrabbit/oak/trunk/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/session/NodeImpl.java]: it is checking permissions against dlg.getTree(), but it should really check against parent.getTree(), or if possible, the path of the node that's about to be created (so glob restrictions can be evaluated).
diff --git a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/session/NodeImpl.java b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/session/NodeImpl.java
index 07e0ae3..2e1733d 100644
--- a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/session/NodeImpl.java
+++ b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/session/NodeImpl.java
@@ -290,7 +290,7 @@ public class NodeImpl<T extends NodeDelegate> extends ItemImpl<T> implements Nod
// modification of that property in the PermissionValidator
if (oakTypeName != null) {
PropertyState prop = PropertyStates.createProperty(JCR_PRIMARYTYPE, oakTypeName, NAME);
- sessionContext.getAccessManager().checkPermissions(dlg.getTree(), prop, Permissions.NODE_TYPE_MANAGEMENT);
+ sessionContext.getAccessManager().checkPermissions(parent.getTree(), prop, Permissions.NODE_TYPE_MANAGEMENT);
}
NodeDelegate added = parent.addChild(oakName, oakTypeName);
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-3517_24f7f60a.diff |
bugs-dot-jar_data_OAK-1674_073b814c | ---
BugID: OAK-1674
Summary: Node isNew() is false in case the node is removed and added in same commit
Description: |-
When you remove a Node /path/a transiently and add one add /path/a again.
The transiently added Node isNew() check will be false.
{code}
root.getNode(name).remove();
Node newNode = root.addNode(name);
nowNode.isNew() => false
{code}
The API says
{quote}
Returns true if this is a new item, meaning that it exists only in transient storage on the Session and has not yet been saved. Within a transaction, isNew on an Item may return false (because the item has been saved) even if that Item is not in persistent storage (because the transaction has not yet been committed)....
{quote}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/tree/AbstractTree.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/tree/AbstractTree.java
index 51afd2e..30f70ad 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/tree/AbstractTree.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/tree/AbstractTree.java
@@ -204,7 +204,7 @@ public abstract class AbstractTree implements Tree {
@Override
public Status getStatus() {
- if (nodeBuilder.isNew()) {
+ if (nodeBuilder.isNew() || nodeBuilder.isReplaced()) {
return NEW;
} else if (nodeBuilder.isModified()) {
return MODIFIED;
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1674_073b814c.diff |
bugs-dot-jar_data_OAK-2062_5c4589bd | ---
BugID: OAK-2062
Summary: Range queries and relative properties resultset should be consistent with
JR2
Description: When running a range query like {{/jcr:root/content/nodes//*[(*/*/*/@prop
>= 9)]}} the resultset is not consistent for the same use-case when running in jacrabbit
2.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java
index 11427c3..44e31c0 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java
@@ -58,6 +58,8 @@ import org.apache.jackrabbit.oak.spi.query.QueryIndex;
import org.apache.jackrabbit.oak.spi.query.QueryIndex.AdvancedQueryIndex;
import org.apache.jackrabbit.oak.spi.query.QueryIndex.IndexPlan;
import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
@@ -66,6 +68,7 @@ import com.google.common.collect.Iterables;
* A selector within a query.
*/
public class SelectorImpl extends SourceImpl {
+ private static final Logger LOG = LoggerFactory.getLogger(SelectorImpl.class);
// TODO possibly support using multiple indexes (using index intersection / index merge)
private SelectorExecutionPlan plan;
@@ -547,6 +550,10 @@ public class SelectorImpl extends SourceImpl {
boolean asterisk = oakPropertyName.indexOf('*') >= 0;
if (asterisk) {
Tree t = currentTree();
+ if (t != null) {
+ LOG.trace("currentOakProperty() - '*' case. looking for '{}' in '{}'",
+ oakPropertyName, t.getPath());
+ }
ArrayList<PropertyValue> list = new ArrayList<PropertyValue>();
readOakProperties(list, t, oakPropertyName, propertyType);
if (list.size() == 0) {
@@ -630,10 +637,13 @@ public class SelectorImpl extends SourceImpl {
private void readOakProperties(ArrayList<PropertyValue> target, Tree t, String oakPropertyName, Integer propertyType) {
boolean skipCurrentNode = false;
- while (true) {
+
+ while (!skipCurrentNode) {
if (t == null || !t.exists()) {
return;
}
+ LOG.trace("readOakProperties() - reading '{}' for '{}'", t.getPath(),
+ oakPropertyName);
int slash = oakPropertyName.indexOf('/');
if (slash < 0) {
break;
@@ -659,6 +669,7 @@ public class SelectorImpl extends SourceImpl {
if (!"*".equals(oakPropertyName)) {
PropertyValue value = currentOakProperty(t, oakPropertyName, propertyType);
if (value != null) {
+ LOG.trace("readOakProperties() - adding: '{}' from '{}'", value, t.getPath());
target.add(value);
}
return;
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2062_5c4589bd.diff |
bugs-dot-jar_data_OAK-448_999097e1 | ---
BugID: OAK-448
Summary: Node builder for existing node return null for base state
Description: "{{MemoryNodeBuilder.getBaseState()}} returns null on builder for an
existing node."
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
index ce13f08..6600cc7 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilder.java
@@ -24,7 +24,6 @@ import javax.annotation.Nonnull;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
-
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.spi.state.AbstractNodeState;
@@ -268,11 +267,12 @@ public class MemoryNodeBuilder implements NodeBuilder {
@Override
public boolean isModified() {
+ NodeState baseState = getBaseState();
if (writeState == null) {
return false;
}
else {
- NodeState baseState = getBaseState();
+ Map<String, MutableNodeState> nodes = writeState.nodes;
for (Entry<String, MutableNodeState> n : writeState.nodes.entrySet()) {
if (n.getValue() == null) {
return true;
@@ -308,6 +308,7 @@ public class MemoryNodeBuilder implements NodeBuilder {
@Override
public NodeState getBaseState() {
+ read();
return baseState;
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-448_999097e1.diff |
bugs-dot-jar_data_OAK-2389_7c320b1e | ---
BugID: OAK-2389
Summary: issues with JsopBuilder.encode and .escape
Description: |-
1) escape() escapes many characters that do not need to be escaped (>127)
2) encode() does not encode many control characters that would need to be escaped when read through a JSON parser.
diff --git a/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/json/JsopBuilder.java b/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/json/JsopBuilder.java
index b42beff..a9a8b9a 100644
--- a/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/json/JsopBuilder.java
+++ b/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/json/JsopBuilder.java
@@ -254,7 +254,7 @@ public class JsopBuilder implements JsopWriter {
}
for (int i = 0; i < length; i++) {
char c = s.charAt(i);
- if (c == '\"' || c == '\\' || c < ' ' || c >= 127) {
+ if (c == '\"' || c == '\\' || c < ' ') {
StringBuilder buff = new StringBuilder(length + 2 + length / 8);
buff.append('\"');
escape(s, length, buff);
@@ -269,13 +269,20 @@ public class JsopBuilder implements JsopWriter {
* Escape a string into the target buffer.
*
* @param s the string to escape
+ * @param buff the target buffer
+ */
+ public static void escape(String s, StringBuilder buff) {
+ escape(s, s.length(), buff);
+ }
+
+ /**
+ * Escape a string into the target buffer.
+ *
+ * @param s the string to escape
* @param length the number of characters.
* @param buff the target buffer
*/
- public static void escape(String s, int length, StringBuilder buff) {
- // TODO only backslashes, double quotes, and characters < 32 need to be
- // escaped - but currently all special characters are escaped, which
- // needs more time, memory, and storage space
+ private static void escape(String s, int length, StringBuilder buff) {
for (int i = 0; i < length; i++) {
char c = s.charAt(i);
switch (c) {
@@ -309,21 +316,10 @@ public class JsopBuilder implements JsopWriter {
break;
default:
if (c < ' ') {
- // guaranteed to be 1 or 2 hex digits only
buff.append("\\u00");
- String hex = Integer.toHexString(c);
- if (hex.length() == 1) {
- buff.append('0');
- }
- buff.append(hex);
- } else if (c >= 127) {
- // ascii only mode
- buff.append("\\u");
- String hex = Integer.toHexString(c);
- for (int len = hex.length(); len < 4; len++) {
- buff.append('0');
- }
- buff.append(hex);
+ // guaranteed to be 1 or 2 hex digits only
+ buff.append(Character.forDigit(c >>> 4, 16));
+ buff.append(Character.forDigit(c & 15, 16));
} else {
buff.append(c);
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2389_7c320b1e.diff |
bugs-dot-jar_data_OAK-4291_cdb34ffc | ---
BugID: OAK-4291
Summary: FileStore.flush prone to races leading to corruption
Description: "There is a small window in {{FileStore.flush}} that could lead to data
corruption: if we crash right after setting the persisted head but before any delay-flushed
{{SegmentBufferWriter}} instance flushes (see {{SegmentBufferWriterPool.returnWriter()}})
then that data is lost although it might already be referenced from the persisted
head.\n\nWe need to come up with a test case for this. \n\nA possible fix would
be to return a future from {{SegmentWriter.flush}} and rely on a completion callback.
Such a change would most likely also be useful for OAK-3690. \n"
diff --git a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentBufferWriterPool.java b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentBufferWriterPool.java
index c035323..610b3ca 100644
--- a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentBufferWriterPool.java
+++ b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentBufferWriterPool.java
@@ -34,6 +34,8 @@ import java.util.Set;
import javax.annotation.Nonnull;
import com.google.common.base.Supplier;
+import com.google.common.util.concurrent.Monitor;
+import com.google.common.util.concurrent.Monitor.Guard;
/**
* This {@link WriteOperationHandler} uses a pool of {@link SegmentBufferWriter}s,
@@ -43,8 +45,27 @@ import com.google.common.base.Supplier;
* {@link SegmentWriter}.
*/
public class SegmentBufferWriterPool implements WriteOperationHandler {
+
+ /**
+ * Monitor protecting the state of this pool. Neither of {@link #writers},
+ * {@link #borrowed} and {@link #disposed} must be modified without owning
+ * this monitor.
+ */
+ private final Monitor poolMonitor = new Monitor(true);
+
+ /**
+ * Pool of current writers that are not in use
+ */
private final Map<Object, SegmentBufferWriter> writers = newHashMap();
+
+ /**
+ * Writers that are currently in use
+ */
private final Set<SegmentBufferWriter> borrowed = newHashSet();
+
+ /**
+ * Retired writers that have not yet been flushed
+ */
private final Set<SegmentBufferWriter> disposed = newHashSet();
@Nonnull
@@ -95,38 +116,111 @@ public class SegmentBufferWriterPool implements WriteOperationHandler {
@Override
public void flush() throws IOException {
List<SegmentBufferWriter> toFlush = newArrayList();
- synchronized (this) {
+ List<SegmentBufferWriter> toReturn = newArrayList();
+
+ poolMonitor.enter();
+ try {
+ // Collect all writers that are not currently in use and clear
+ // the list so they won't get re-used anymore.
toFlush.addAll(writers.values());
- toFlush.addAll(disposed);
writers.clear();
- disposed.clear();
+
+ // Collect all borrowed writers, which we need to wait for.
+ // Clear the list so they will get disposed once returned.
+ toReturn.addAll(borrowed);
borrowed.clear();
+ } finally {
+ poolMonitor.leave();
+ }
+
+ // Wait for the return of the borrowed writers. This is the
+ // case once all of them appear in the disposed set.
+ if (safeEnterWhen(poolMonitor, allReturned(toReturn))) {
+ try {
+ // Collect all disposed writers and clear the list to mark them
+ // as flushed.
+ toFlush.addAll(toReturn);
+ disposed.removeAll(toReturn);
+ } finally {
+ poolMonitor.leave();
+ }
}
- // Call flush from outside a synchronized context to avoid
+
+ // Call flush from outside the pool monitor to avoid potential
// deadlocks of that method calling SegmentStore.writeSegment
for (SegmentBufferWriter writer : toFlush) {
writer.flush();
}
}
- private synchronized SegmentBufferWriter borrowWriter(Object key) {
- SegmentBufferWriter writer = writers.remove(key);
- if (writer == null) {
- writer = new SegmentBufferWriter(store, tracker, reader, version, getWriterId(wid), gcGeneration.get());
- } else if (writer.getGeneration() != gcGeneration.get()) {
- disposed.add(writer);
- writer = new SegmentBufferWriter(store, tracker, reader, version, getWriterId(wid), gcGeneration.get());
+ /**
+ * Create a {@code Guard} that is satisfied if and only if {@link #disposed}
+ * contains all items in {@code toReturn}
+ */
+ @Nonnull
+ private Guard allReturned(final List<SegmentBufferWriter> toReturn) {
+ return new Guard(poolMonitor) {
+
+ @Override
+ public boolean isSatisfied() {
+ return disposed.containsAll(toReturn);
+ }
+
+ };
+ }
+
+ /**
+ * Same as {@code monitor.enterWhen(guard)} but copes with that pesky {@code
+ * InterruptedException} by catching it and setting this thread's
+ * interrupted flag.
+ */
+ private static boolean safeEnterWhen(Monitor monitor, Guard guard) {
+ try {
+ monitor.enterWhen(guard);
+ return true;
+ } catch (InterruptedException ignore) {
+ Thread.currentThread().interrupt();
+ return false;
}
- borrowed.add(writer);
- return writer;
}
- private synchronized void returnWriter(Object key, SegmentBufferWriter writer) {
- if (borrowed.remove(writer)) {
- checkState(writers.put(key, writer) == null);
- } else {
- // Defer flush this writer as it was borrowed while flush() was called.
- disposed.add(writer);
+ /**
+ * Return a writer from the pool by its {@code key}. This method may return
+ * a fresh writer at any time. Callers need to return a writer before
+ * borrowing it again. Failing to do so leads to undefined behaviour.
+ */
+ private SegmentBufferWriter borrowWriter(Object key) {
+ poolMonitor.enter();
+ try {
+ SegmentBufferWriter writer = writers.remove(key);
+ if (writer == null) {
+ writer = new SegmentBufferWriter(store, tracker, reader, version, getWriterId(wid), gcGeneration.get());
+ } else if (writer.getGeneration() != gcGeneration.get()) {
+ disposed.add(writer);
+ writer = new SegmentBufferWriter(store, tracker, reader, version, getWriterId(wid), gcGeneration.get());
+ }
+ borrowed.add(writer);
+ return writer;
+ } finally {
+ poolMonitor.leave();
+ }
+ }
+
+ /**
+ * Return a writer to the pool using the {@code key} that was used to borrow
+ * it.
+ */
+ private void returnWriter(Object key, SegmentBufferWriter writer) {
+ poolMonitor.enter();
+ try {
+ if (borrowed.remove(writer)) {
+ checkState(writers.put(key, writer) == null);
+ } else {
+ // Defer flush this writer as it was borrowed while flush() was called.
+ disposed.add(writer);
+ }
+ } finally {
+ poolMonitor.leave();
}
}
diff --git a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/WriteOperationHandler.java b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/WriteOperationHandler.java
index 1a585c0..b122a0a 100644
--- a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/WriteOperationHandler.java
+++ b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/WriteOperationHandler.java
@@ -58,9 +58,6 @@ interface WriteOperationHandler {
/**
* Flush any pending changes on any {@link SegmentBufferWriter} managed by this instance.
- * This method <em>does not block</em> to wait for concurrent write operations. However, if
- * a write operation is currently in progress a call to this method ensures the respective
- * changes are properly flushed at the end of that call.
* @throws IOException
*/
void flush() throws IOException;
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-4291_cdb34ffc.diff |
bugs-dot-jar_data_OAK-1932_913c2f53 | ---
BugID: OAK-1932
Summary: TarMK compaction can create mixed segments
Description: As described in http://markmail.org/message/ujkqdlthudaortxf, commits
that occur while the compaction operation is running can make the compacted segments
contain references to older data segments, which prevents old data from being reclaimed
during cleanup.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/CompactionMap.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/CompactionMap.java
index a33cba5..bbf5d30 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/CompactionMap.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/CompactionMap.java
@@ -93,6 +93,19 @@ public class CompactionMap {
return after.equals(get(before));
}
+ /**
+ * Checks whether content in the segment with the given identifier was
+ * compacted to new segments.
+ *
+ * @param id segment identifier
+ * @return whether the identified segment was compacted
+ */
+ boolean wasCompacted(SegmentId id) {
+ long msb = id.getMostSignificantBits();
+ long lsb = id.getLeastSignificantBits();
+ return findEntry(msb, lsb) != -1;
+ }
+
public RecordId get(RecordId before) {
RecordId after = recent.get(before);
if (after != null) {
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Compactor.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Compactor.java
index 45bbbcc..14278ca 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Compactor.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Compactor.java
@@ -75,7 +75,8 @@ public class Compactor {
public Compactor(SegmentWriter writer) {
this.writer = writer;
- this.builder = writer.writeNode(EMPTY_NODE).builder();
+ this.builder =
+ new SegmentNodeBuilder(writer.writeNode(EMPTY_NODE), writer);
}
public SegmentNodeState compact(NodeState before, NodeState after) {
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Record.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Record.java
index f65a168..ec8fbfd 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Record.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Record.java
@@ -65,6 +65,15 @@ class Record {
}
/**
+ * Returns the tracker of the segment that contains this record.
+ *
+ * @return segment tracker
+ */
+ protected SegmentTracker getTracker() {
+ return segmentId.getTracker();
+ }
+
+ /**
* Returns the segment that contains this record.
*
* @return segment that contains this record
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentNodeBuilder.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentNodeBuilder.java
index 7fc474f..d532bdf 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentNodeBuilder.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentNodeBuilder.java
@@ -33,8 +33,12 @@ public class SegmentNodeBuilder extends MemoryNodeBuilder {
private long updateCount = 0;
SegmentNodeBuilder(SegmentNodeState base) {
+ this(base, base.getTracker().getWriter());
+ }
+
+ SegmentNodeBuilder(SegmentNodeState base, SegmentWriter writer) {
super(base);
- this.writer = base.getRecordId().getSegmentId().getTracker().getWriter();
+ this.writer = writer;
}
//-------------------------------------------------< MemoryNodeBuilder >--
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java
index 41fc1ef..a11b931 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java
@@ -940,10 +940,22 @@ public class SegmentWriter {
return id;
}
+ private SegmentNodeState uncompact(SegmentNodeState state) {
+ RecordId id = tracker.getCompactionMap().get(state.getRecordId());
+ if (id != null) {
+ return new SegmentNodeState(id);
+ } else {
+ return state;
+ }
+ }
+
public SegmentNodeState writeNode(NodeState state) {
- if (state instanceof SegmentNodeState
- && store.containsSegment(((SegmentNodeState) state).getRecordId().getSegmentId())) {
- return (SegmentNodeState) state;
+ if (state instanceof SegmentNodeState) {
+ SegmentNodeState sns = uncompact((SegmentNodeState) state);
+ if (sns != state || store.containsSegment(
+ sns.getRecordId().getSegmentId())) {
+ return sns;
+ }
}
SegmentNodeState before = null;
@@ -952,10 +964,13 @@ public class SegmentWriter {
if (state instanceof ModifiedNodeState) {
after = (ModifiedNodeState) state;
NodeState base = after.getBaseState();
- if (base instanceof SegmentNodeState
- && store.containsSegment(((SegmentNodeState) base).getRecordId().getSegmentId())) {
- before = (SegmentNodeState) base;
- beforeTemplate = before.getTemplate();
+ if (base instanceof SegmentNodeState) {
+ SegmentNodeState sns = uncompact((SegmentNodeState) base);
+ if (sns != base || store.containsSegment(
+ sns.getRecordId().getSegmentId())) {
+ before = sns;
+ beforeTemplate = before.getTemplate();
+ }
}
}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/file/FileStore.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/file/FileStore.java
index 3243572..f378247 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/file/FileStore.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/file/FileStore.java
@@ -415,6 +415,7 @@ public class FileStore implements SegmentStore {
SegmentNodeState before = getHead();
SegmentNodeState after = compactor.compact(EMPTY_NODE, before);
+ writer.flush();
while (!setHead(before, after)) {
// Some other concurrent changes have been made.
// Rebase (and compact) those changes on top of the
@@ -422,9 +423,16 @@ public class FileStore implements SegmentStore {
SegmentNodeState head = getHead();
after = compactor.compact(before, head);
before = head;
+ writer.flush();
}
tracker.setCompactionMap(compactor.getCompactionMap());
+ // Drop the SegmentWriter caches and flush any existing state
+ // in an attempt to prevent new references to old pre-compacted
+ // content. TODO: There should be a cleaner way to do this.
+ tracker.getWriter().dropCache();
+ tracker.getWriter().flush();
+
log.info("TarMK compaction completed in {}ms", MILLISECONDS
.convert(System.nanoTime() - start, NANOSECONDS));
cleanupNeeded.set(true);
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1932_913c2f53.diff |
bugs-dot-jar_data_OAK-2049_4af0d4ee | ---
BugID: OAK-2049
Summary: ArrayIndexOutOfBoundsException in Segment.getRefId()
Description: |-
It looks like there is some SegmentMK bug that causes the {{Segment.getRefId()}} to throw an {{ArrayIndexOutOfBoundsException}} in some fairly rare corner cases.
The data was originally migrated into oak via the crx2oak tool mentioned here: http://docs.adobe.com/docs/en/aem/6-0/deploy/upgrade.html
That tool uses *oak-core-1.0.0* creating an oak instance.
Similar to OAK-1566 this system was using FileDataStore with SegmentNodeStore.
In this case the error is seen when running offline compaction using oak-run-1.1-SNAPSHOT.jar (latest).
{code:none}
> java -Xmx4096m -jar oak-run-1.1-SNAPSHOT.jar compact /oak/crx-quickstart/repository/segmentstore
Apache Jackrabbit Oak 1.1-SNAPSHOT
Compacting /wcm/cq-author/crx-quickstart/repository/segmentstore
before [data00055a.tar, data00064a.tar, data00045b.tar, data00005a.tar, data00018a.tar, data00022a.tar, data00047a.tar, data00037a.tar, data00049a.tar, data00014a.tar, data00066a.tar, data00020a.tar, data00058a.tar, data00065a.tar, data00069a.tar, data00012a.tar, data00009a.tar, data00060a.tar, data00041a.tar, data00016a.tar, data00072a.tar, data00048a.tar, data00061a.tar, data00053a.tar, data00038a.tar, data00001a.tar, data00034a.tar, data00003a.tar, data00052a.tar, data00006a.tar, data00027a.tar, data00031a.tar, data00056a.tar, data00035a.tar, data00063a.tar, data00068a.tar, data00008v.tar, data00010a.tar, data00043b.tar, data00021a.tar, data00017a.tar, data00024a.tar, data00054a.tar, data00051a.tar, data00057a.tar, data00059a.tar, data00036a.tar, data00033a.tar, data00019a.tar, data00046a.tar, data00067a.tar, data00004a.tar, data00044a.tar, data00013a.tar, data00070a.tar, data00026a.tar, data00002a.tar, data00011a.tar, journal.log, data00030a.tar, data00042a.tar, data00025a.tar, data00062a.tar, data00023a.tar, data00071a.tar, data00032b.tar, data00040a.tar, data00015a.tar, data00029a.tar, data00050a.tar, data00000a.tar, data00007a.tar, data00028a.tar, data00039a.tar]
-> compacting
Exception in thread "main" java.lang.ArrayIndexOutOfBoundsException: 206
at org.apache.jackrabbit.oak.plugins.segment.Segment.getRefId(Segment.java:191)
at org.apache.jackrabbit.oak.plugins.segment.Segment.internalReadRecordId(Segment.java:299)
at org.apache.jackrabbit.oak.plugins.segment.Segment.readRecordId(Segment.java:295)
at org.apache.jackrabbit.oak.plugins.segment.SegmentNodeState.getTemplateId(SegmentNodeState.java:69)
at org.apache.jackrabbit.oak.plugins.segment.SegmentNodeState.getTemplate(SegmentNodeState.java:78)
at org.apache.jackrabbit.oak.plugins.segment.SegmentNodeState.getProperties(SegmentNodeState.java:150)
at org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.compareAgainstEmptyState(EmptyNodeState.java:154)
at org.apache.jackrabbit.oak.plugins.segment.Compactor$CompactDiff.childNodeAdded(Compactor.java:124)
at org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.compareAgainstEmptyState(EmptyNodeState.java:160)
at org.apache.jackrabbit.oak.plugins.segment.Compactor$CompactDiff.childNodeAdded(Compactor.java:124)
at org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.compareAgainstEmptyState(EmptyNodeState.java:160)
at org.apache.jackrabbit.oak.plugins.segment.Compactor$CompactDiff.childNodeAdded(Compactor.java:124)
at org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.compareAgainstEmptyState(EmptyNodeState.java:160)
at org.apache.jackrabbit.oak.plugins.segment.Compactor$CompactDiff.childNodeAdded(Compactor.java:124)
at org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.compareAgainstEmptyState(EmptyNodeState.java:160)
at org.apache.jackrabbit.oak.plugins.segment.Compactor$CompactDiff.childNodeAdded(Compactor.java:124)
at org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.compareAgainstEmptyState(EmptyNodeState.java:160)
at org.apache.jackrabbit.oak.plugins.segment.Compactor$CompactDiff.childNodeAdded(Compactor.java:124)
at org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.compareAgainstEmptyState(EmptyNodeState.java:160)
at org.apache.jackrabbit.oak.plugins.segment.Compactor$CompactDiff.childNodeAdded(Compactor.java:124)
at org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.compareAgainstEmptyState(EmptyNodeState.java:160)
at org.apache.jackrabbit.oak.plugins.segment.Compactor$CompactDiff.childNodeAdded(Compactor.java:124)
at org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.compareAgainstEmptyState(EmptyNodeState.java:160)
at org.apache.jackrabbit.oak.plugins.segment.Compactor$CompactDiff.childNodeAdded(Compactor.java:124)
at org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.compareAgainstEmptyState(EmptyNodeState.java:160)
at org.apache.jackrabbit.oak.plugins.segment.Compactor$CompactDiff.childNodeAdded(Compactor.java:124)
at org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.compareAgainstEmptyState(EmptyNodeState.java:160)
at org.apache.jackrabbit.oak.plugins.segment.Compactor$CompactDiff.childNodeAdded(Compactor.java:124)
at org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.compareAgainstEmptyState(EmptyNodeState.java:160)
at org.apache.jackrabbit.oak.plugins.segment.Compactor$CompactDiff.childNodeAdded(Compactor.java:124)
at org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.compareAgainstEmptyState(EmptyNodeState.java:160)
at org.apache.jackrabbit.oak.plugins.segment.Compactor$CompactDiff.childNodeAdded(Compactor.java:124)
at org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.compareAgainstEmptyState(EmptyNodeState.java:160)
at org.apache.jackrabbit.oak.plugins.segment.SegmentNodeState.compareAgainstBaseState(SegmentNodeState.java:395)
at org.apache.jackrabbit.oak.plugins.segment.Compactor.process(Compactor.java:80)
at org.apache.jackrabbit.oak.plugins.segment.Compactor.compact(Compactor.java:85)
at org.apache.jackrabbit.oak.plugins.segment.file.FileStore.compact(FileStore.java:438)
at org.apache.jackrabbit.oak.run.Main.compact(Main.java:311)
at org.apache.jackrabbit.oak.run.Main.main(Main.java:133)
{code}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java
index 84ac11a..62811a0 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java
@@ -46,6 +46,7 @@ import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
+import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -191,6 +192,8 @@ public class SegmentWriter {
refcount * 16 + rootcount * 3 + blobrefcount * 2 + length,
16);
+ checkState(length <= buffer.length);
+
int pos = refcount * 16;
if (pos + length <= buffer.length) {
// the whole segment fits to the space *after* the referenced
@@ -291,14 +294,23 @@ public class SegmentWriter {
refcount -= idcount;
Set<SegmentId> segmentIds = newIdentityHashSet();
+
+ // The set of old record ids in this segment
+ // that were previously root record ids, but will no longer be,
+ // because the record to be written references them.
+ // This needs to be a set, because the list of ids can
+ // potentially reference the same record multiple times
+ Set<RecordId> notRoots = new HashSet<RecordId>();
for (RecordId recordId : ids) {
SegmentId segmentId = recordId.getSegmentId();
if (segmentId != segment.getSegmentId()) {
segmentIds.add(segmentId);
} else if (roots.containsKey(recordId)) {
- rootcount--;
+ notRoots.add(recordId);
}
}
+ rootcount -= notRoots.size();
+
if (!segmentIds.isEmpty()) {
for (int refid = 1; refid < refcount; refid++) {
segmentIds.remove(segment.getRefId(refid));
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2049_4af0d4ee.diff |
bugs-dot-jar_data_OAK-644_55a4f738 | ---
BugID: OAK-644
Summary: Revisit PrivilegeDefinitionStore's use of null as a child name parameter
Description: |-
As discussed on OAK-635, I'm extracting the PrivilegeDefinitionStore code&patch into a dedicated issue.
Following the discussion on the dev list, I've filed it as a bug, as nulls are not considered valid input parameters.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/AccessControlConfigurationImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/AccessControlConfigurationImpl.java
index aa7b8d8..7531580 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/AccessControlConfigurationImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/AccessControlConfigurationImpl.java
@@ -70,7 +70,8 @@ public class AccessControlConfigurationImpl extends SecurityConfiguration.Defaul
return new CommitHookProvider() {
@Override
public CommitHook getCommitHook(String workspaceName) {
- return new CompositeHook(new PermissionHook(workspaceName), new VersionablePathHook(workspaceName));
+ //FIXME return new CompositeHook(new PermissionHook(workspaceName), new VersionablePathHook(workspaceName));
+ return new CompositeHook(new VersionablePathHook(workspaceName));
}
};
}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/privilege/PrivilegeDefinitionStore.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/privilege/PrivilegeDefinitionStore.java
index 6ae510c..2fa44f7 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/privilege/PrivilegeDefinitionStore.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/privilege/PrivilegeDefinitionStore.java
@@ -38,6 +38,8 @@ import org.apache.jackrabbit.oak.util.TreeUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static com.google.common.base.Preconditions.checkNotNull;
+
/**
* Reads and writes privilege definitions from and to the repository content
* without applying any validation.
@@ -110,7 +112,7 @@ public class PrivilegeDefinitionStore implements PrivilegeConstants {
}
PrivilegeBits bits = PrivilegeBits.getInstance();
for (String privilegeName : privilegeNames) {
- Tree defTree = privilegesTree.getChild(privilegeName);
+ Tree defTree = privilegesTree.getChild(checkNotNull(privilegeName));
if (defTree != null) {
bits.add(PrivilegeBits.getInstance(defTree));
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-644_55a4f738.diff |
bugs-dot-jar_data_OAK-479_3270e761 | ---
BugID: OAK-479
Summary: Adding a node to a node that doesn't accept children doesn't fail with ConstraintViolationException
Description: "More node type fun!\n\nI ran into this via the tck test {{org.apache.jackrabbit.test.api.query.SaveTest#testConstraintViolationException}}.\n\nIt
seems adding a node to a node that doesn't accept children (like for example {{nt:query}})
fails with a {{RepositoryException}} that wraps a {{CommitFailedException}} with
a message along the lines of: {{Cannot add node 'q2' at /q1}}, further wrapping
a weird-looking {{RepositoryException: No matching node definition found for org.apache.jackrabbit.oak.plugins.nodetype.ValidatingNodeTypeManager@257f1b}}\n\nWhile
this seems ok enough, the tck test expects a {{ConstraintViolationException}}, so
that's why I created this bug.\n\n\nI'll attach a test case shortly.\n\nTrace \n{code}\njavax.jcr.RepositoryException\n\tat
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)\n\tat sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39)\n\tat
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27)\n\tat
java.lang.reflect.Constructor.newInstance(Constructor.java:513)\n\tat org.apache.jackrabbit.oak.api.CommitFailedException.throwRepositoryException(CommitFailedException.java:57)\n\tat
org.apache.jackrabbit.oak.jcr.SessionDelegate.save(SessionDelegate.java:244)\n\tat
org.apache.jackrabbit.oak.jcr.SessionImpl.save(SessionImpl.java:283)\n\tat org.apache.jackrabbit.oak.jcr.nodetype.NodeTypeTest.illegalAddNode(NodeTypeTest.java:39)\n\tat
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)\n\tat
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)\n\tat
java.lang.reflect.Method.invoke(Method.java:597)\n\tat org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:45)\n\tat
org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)\n\tat
org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:42)\n\tat
org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)\n\tat
org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:28)\n\tat
org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:30)\n\tat
org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:263)\n\tat org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:68)\n\tat
org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:47)\n\tat
org.junit.runners.ParentRunner$3.run(ParentRunner.java:231)\n\tat org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:60)\n\tat
org.junit.runners.ParentRunner.runChildren(ParentRunner.java:229)\n\tat org.junit.runners.ParentRunner.access$000(ParentRunner.java:50)\n\tat
org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:222)\n\tat org.junit.runners.ParentRunner.run(ParentRunner.java:300)\n\tat
org.eclipse.jdt.internal.junit4.runner.JUnit4TestReference.run(JUnit4TestReference.java:50)\n\tat
org.eclipse.jdt.internal.junit.runner.TestExecution.run(TestExecution.java:38)\n\tat
org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:467)\n\tat
org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:683)\n\tat
org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.run(RemoteTestRunner.java:390)\n\tat
org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.main(RemoteTestRunner.java:197)\nCaused
by: org.apache.jackrabbit.oak.api.CommitFailedException: Cannot add node 'q2' at
/q1\n\tat org.apache.jackrabbit.oak.plugins.nodetype.TypeValidator.childNodeAdded(TypeValidator.java:134)\n\tat
org.apache.jackrabbit.oak.spi.commit.CompositeValidator.childNodeAdded(CompositeValidator.java:68)\n\tat
org.apache.jackrabbit.oak.spi.commit.ValidatingHook$ValidatorDiff.childNodeAdded(ValidatingHook.java:155)\n\tat
org.apache.jackrabbit.oak.spi.state.AbstractNodeState.compareAgainstBaseState(AbstractNodeState.java:157)\n\tat
org.apache.jackrabbit.oak.kernel.KernelNodeState.compareAgainstBaseState(KernelNodeState.java:243)\n\tat
org.apache.jackrabbit.oak.spi.commit.ValidatingHook$ValidatorDiff.validate(ValidatingHook.java:110)\n\tat
org.apache.jackrabbit.oak.spi.commit.ValidatingHook$ValidatorDiff.validate(ValidatingHook.java:101)\n\tat
org.apache.jackrabbit.oak.spi.commit.ValidatingHook$ValidatorDiff.childNodeAdded(ValidatingHook.java:157)\n\tat
org.apache.jackrabbit.oak.plugins.memory.ModifiedNodeState$3.childNodeAdded(ModifiedNodeState.java:292)\n\tat
org.apache.jackrabbit.oak.spi.state.AbstractNodeState.compareAgainstBaseState(AbstractNodeState.java:157)\n\tat
org.apache.jackrabbit.oak.kernel.KernelNodeState.compareAgainstBaseState(KernelNodeState.java:243)\n\tat
org.apache.jackrabbit.oak.plugins.memory.ModifiedNodeState.compareAgainstBaseState(ModifiedNodeState.java:269)\n\tat
org.apache.jackrabbit.oak.spi.commit.ValidatingHook$ValidatorDiff.validate(ValidatingHook.java:110)\n\tat
org.apache.jackrabbit.oak.spi.commit.ValidatingHook$ValidatorDiff.validate(ValidatingHook.java:101)\n\tat
org.apache.jackrabbit.oak.spi.commit.ValidatingHook.processCommit(ValidatingHook.java:73)\n\tat
org.apache.jackrabbit.oak.spi.commit.CompositeHook.processCommit(CompositeHook.java:59)\n\tat
org.apache.jackrabbit.oak.kernel.KernelNodeStoreBranch.merge(KernelNodeStoreBranch.java:127)\n\tat
org.apache.jackrabbit.oak.core.RootImpl$2.run(RootImpl.java:239)\n\tat org.apache.jackrabbit.oak.core.RootImpl$2.run(RootImpl.java:1)\n\tat
java.security.AccessController.doPrivileged(Native Method)\n\tat javax.security.auth.Subject.doAs(Subject.java:337)\n\tat
org.apache.jackrabbit.oak.core.RootImpl.commit(RootImpl.java:234)\n\tat org.apache.jackrabbit.oak.jcr.SessionDelegate.save(SessionDelegate.java:241)\n\t...
27 more\nCaused by: javax.jcr.RepositoryException: No matching node definition found
for org.apache.jackrabbit.oak.plugins.nodetype.ValidatingNodeTypeManager@257f1b\n\tat
org.apache.jackrabbit.oak.plugins.nodetype.ReadOnlyNodeTypeManager.getDefinition(ReadOnlyNodeTypeManager.java:406)\n\tat
org.apache.jackrabbit.oak.plugins.nodetype.TypeValidator$EffectiveNodeType.getDefinition(TypeValidator.java:302)\n\tat
org.apache.jackrabbit.oak.plugins.nodetype.TypeValidator$EffectiveNodeType.checkAddChildNode(TypeValidator.java:249)\n\tat
org.apache.jackrabbit.oak.plugins.nodetype.TypeValidator.childNodeAdded(TypeValidator.java:127)\n\t...
49 more\n{code}\n\n"
diff --git a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/ItemImpl.java b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/ItemImpl.java
index a584c10..efde490 100644
--- a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/ItemImpl.java
+++ b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/ItemImpl.java
@@ -169,7 +169,14 @@ abstract class ItemImpl<T extends ItemDelegate> extends AbstractItem {
}
void checkProtected() throws RepositoryException {
- ItemDefinition definition = (isNode()) ? ((Node) this).getDefinition() : ((Property) this).getDefinition();
+ ItemDefinition definition;
+ try {
+ definition = (isNode()) ? ((Node) this).getDefinition() : ((Property) this).getDefinition();
+ }
+ catch (RepositoryException ignore) {
+ // No definition -> not protected but a different error which should be handled else where
+ return;
+ }
checkProtected(definition);
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-479_3270e761.diff |
bugs-dot-jar_data_OAK-278_db19e70f | ---
BugID: OAK-278
Summary: Tree.getStatus() and Tree.getPropertyStatus() fail for items whose parent
has been removed
Description:
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/core/TreeImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/core/TreeImpl.java
index a3d3536..abc942a 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/core/TreeImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/core/TreeImpl.java
@@ -50,9 +50,12 @@ public class TreeImpl implements Tree, PurgeListener {
/** Underlying {@code Root} of this {@code Tree} instance */
private final RootImpl root;
- /** Parent of this tree. Null for the root and this for removed trees. */
+ /** Parent of this tree. Null for the root. */
private TreeImpl parent;
+ /** Marker for removed trees */
+ private boolean removed;
+
/** Name of this tree */
private String name;
@@ -254,7 +257,7 @@ public class TreeImpl implements Tree, PurgeListener {
NodeBuilder builder = parent.getNodeBuilder();
builder.removeNode(name);
parent.children.remove(name);
- parent = this;
+ removed = true;
root.purge();
return true;
} else {
@@ -390,6 +393,10 @@ public class TreeImpl implements Tree, PurgeListener {
}
private Status internalGetPropertyStatus(String name) {
+ if (isRemoved()) {
+ return Status.REMOVED;
+ }
+
NodeState baseState = getBaseState();
boolean exists = internalGetProperty(name) != null;
if (baseState == null) {
@@ -433,14 +440,10 @@ public class TreeImpl implements Tree, PurgeListener {
}
private boolean isRemoved() {
- return parent == this;
+ return removed || (parent != null && parent.isRemoved());
}
private void buildPath(StringBuilder sb) {
- if (isRemoved()) {
- throw new IllegalStateException("Cannot build the path of a removed tree");
- }
-
if (!isRoot()) {
parent.buildPath(sb);
sb.append('/').append(name);
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-278_db19e70f.diff |
bugs-dot-jar_data_OAK-4376_037dea72 | ---
BugID: OAK-4376
Summary: 'XPath: queries starting with "//" are not always converted correctly'
Description: |-
XPath queries starting with "//" are not always converted to the expected SQL-2 query. Examples:
{noformat}
//element(*, oak:QueryIndexDefinition)/*
select [jcr:path], [jcr:score], * from [oak:QueryIndexDefinition] as a
//element(*, oak:QueryIndexDefinition)//*
select [jcr:path], [jcr:score], * from [oak:QueryIndexDefinition] as a
{noformat}
This is wrong. Instead, a join should be used.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/XPathToSQL2Converter.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/XPathToSQL2Converter.java
index 9bfbd2d..d64d7cf 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/XPathToSQL2Converter.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/xpath/XPathToSQL2Converter.java
@@ -162,6 +162,10 @@ public class XPathToSQL2Converter {
} else if (readIf("/")) {
// "//" was read
pathPattern += "%";
+ if (currentSelector.isDescendant) {
+ // the query started with "//", and now "//" was read
+ nextSelector(true);
+ }
currentSelector.isDescendant = true;
} else {
// the token "/" was read
@@ -169,6 +173,10 @@ public class XPathToSQL2Converter {
if (startOfQuery) {
currentSelector.path = "/";
} else {
+ if (currentSelector.isDescendant) {
+ // the query started with "//", and now "/" was read
+ nextSelector(true);
+ }
currentSelector.isChild = true;
}
}
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-4376_037dea72.diff |
bugs-dot-jar_data_OAK-2345_a0dc4c89 | ---
BugID: OAK-2345
Summary: Diff reads too many nodes
Description: "DocumentNodeStore.diffManyChildren() may read too many nodes when there
is an inactive cluster node with an old _lastRev on the root document. This is a
regression introduced with the fix for OAK-2232.\n\nThe fix assumes an inactive
cluster node does not have a revision range with an old revision seen at a current
timestamp. The DocumentNodeStore will in fact purge revisions from the range in
the RevisionComparator after an hour. But on startup the first background read may
populate the RevisionComparator with a revision, which is potentially very old (e.g.
if the clusterId is not used anymore). "
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java
index 95a82d5..29a363e 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java
@@ -1532,6 +1532,13 @@ public final class DocumentNodeStore
Revision last = lastKnownRevision.get(machineId);
if (last == null || r.compareRevisionTime(last) > 0) {
lastKnownRevision.put(machineId, r);
+ // OAK-2345
+ // only consider as external change if
+ // - the revision changed for the machineId
+ // or
+ // - the revision is within the time frame we remember revisions
+ if (last != null
+ || r.getTimestamp() > revisionPurgeMillis())
externalChanges.put(r, otherSeen);
}
}
@@ -1562,7 +1569,17 @@ public final class DocumentNodeStore
backgroundOperationLock.writeLock().unlock();
}
}
- revisionComparator.purge(Revision.getCurrentTimestamp() - REMEMBER_REVISION_ORDER_MILLIS);
+ revisionComparator.purge(revisionPurgeMillis());
+ }
+
+ /**
+ * Returns the time in milliseconds when revisions can be purged from the
+ * revision comparator.
+ *
+ * @return time in milliseconds.
+ */
+ private static long revisionPurgeMillis() {
+ return Revision.getCurrentTimestamp() - REMEMBER_REVISION_ORDER_MILLIS;
}
private void backgroundSplit() {
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2345_a0dc4c89.diff |
bugs-dot-jar_data_OAK-579_7d72e6ed | ---
BugID: OAK-579
Summary: 'Query: for joins, sometimes no or the wrong index is used'
Description: "Currently, no index is used for the join condition. For example, the
query:\n\n{code}\nselect * from [nodeTypeA] as a \ninner join [nodeTypeB] as b\non
isdescendantnode(b, a) \nwhere lower(a.x) = 'y'\nand b.[property] is not null\n{code}\n\ncurrently
doesn't take into account that the path of the selector 'a' is known at the time
selector 'b' is accessed (given that selector 'a' is executed first). So in this
case, the query would use an index on the property b.[property], even if this index
has a very bad selectivity (many nodes with this problem), or the query would use
the node type index on [nodeTypeB], even if there are many nodes of this type.\n\nInstead,
most likely the query should do a traversal, using the isdescendantnode(b, a) join
condition."
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/Query.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/Query.java
index 57bf0d1..002bad8 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/Query.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/Query.java
@@ -308,7 +308,7 @@ public class Query {
prepare();
Iterator<ResultRowImpl> it;
if (explain) {
- String plan = source.getPlan(rootState);
+ String plan = getPlan();
columns = new ColumnImpl[] { new ColumnImpl("explain", "plan", "plan")};
ResultRowImpl r = new ResultRowImpl(this,
new String[0],
@@ -317,7 +317,7 @@ public class Query {
it = Arrays.asList(r).iterator();
} else {
if (LOG.isDebugEnabled()) {
- LOG.debug("plan: " + source.getPlan(rootState));
+ LOG.debug("plan: " + getPlan());
}
if (orderings == null) {
// can apply limit and offset directly
@@ -437,6 +437,15 @@ public class Query {
}
return comp;
}
+
+ /**
+ * Get the query plan. The query must already be prepared.
+ *
+ * @return the query plan
+ */
+ private String getPlan() {
+ return source.getPlan(rootState);
+ }
void prepare() {
if (prepared) {
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/QueryEngineImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/QueryEngineImpl.java
index a7569bc..2a79d67 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/QueryEngineImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/QueryEngineImpl.java
@@ -176,11 +176,14 @@ public abstract class QueryEngineImpl implements QueryEngine {
best = index;
}
}
- if (best == null) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("no indexes found - using TraversingIndex; indexProvider: " + indexProvider);
- }
- best = new TraversingIndex();
+ QueryIndex index = new TraversingIndex();
+ double cost = index.getCost(filter, rootState);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("cost for " + index.getIndexName() + " is " + cost);
+ }
+ if (cost < bestCost) {
+ bestCost = cost;
+ best = index;
}
return best;
}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/ChildNodeJoinConditionImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/ChildNodeJoinConditionImpl.java
index 829196a..6213557 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/ChildNodeJoinConditionImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/ChildNodeJoinConditionImpl.java
@@ -66,12 +66,22 @@ public class ChildNodeJoinConditionImpl extends JoinConditionImpl {
public void restrict(FilterImpl f) {
if (f.getSelector() == parentSelector) {
String c = childSelector.currentPath();
+ if (c == null && f.isPreparing() && childSelector.isPrepared()) {
+ // during the prepare phase, if the selector is already
+ // prepared, then we would know the value
+ c = KNOWN_PATH;
+ }
if (c != null) {
f.restrictPath(PathUtils.getParentPath(c), Filter.PathRestriction.EXACT);
}
}
if (f.getSelector() == childSelector) {
String p = parentSelector.currentPath();
+ if (p == null && f.isPreparing() && parentSelector.isPrepared()) {
+ // during the prepare phase, if the selector is already
+ // prepared, then we would know the value
+ p = KNOWN_PATH;
+ }
if (p != null) {
f.restrictPath(p, Filter.PathRestriction.DIRECT_CHILDREN);
}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/DescendantNodeJoinConditionImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/DescendantNodeJoinConditionImpl.java
index 1552155..8e5f945 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/DescendantNodeJoinConditionImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/DescendantNodeJoinConditionImpl.java
@@ -66,12 +66,22 @@ public class DescendantNodeJoinConditionImpl extends JoinConditionImpl {
public void restrict(FilterImpl f) {
if (f.getSelector() == ancestorSelector) {
String d = descendantSelector.currentPath();
+ if (d == null && f.isPreparing() && descendantSelector.isPrepared()) {
+ // during the prepare phase, if the selector is already
+ // prepared, then we would know the value
+ d = KNOWN_PATH;
+ }
if (d != null) {
f.restrictPath(PathUtils.getParentPath(d), Filter.PathRestriction.PARENT);
}
}
if (f.getSelector() == descendantSelector) {
String a = ancestorSelector.currentPath();
+ if (a == null && f.isPreparing() && ancestorSelector.isPrepared()) {
+ // during the prepare phase, if the selector is already
+ // prepared, then we would know the value
+ a = KNOWN_PATH;
+ }
if (a != null) {
f.restrictPath(a, Filter.PathRestriction.DIRECT_CHILDREN);
}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/EquiJoinConditionImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/EquiJoinConditionImpl.java
index dae131d..01d1ce2 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/EquiJoinConditionImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/EquiJoinConditionImpl.java
@@ -97,21 +97,35 @@ public class EquiJoinConditionImpl extends JoinConditionImpl {
public void restrict(FilterImpl f) {
if (f.getSelector() == selector1) {
PropertyValue p2 = selector2.currentProperty(property2Name);
+ if (p2 == null && f.isPreparing() && selector2.isPrepared()) {
+ // during the prepare phase, if the selector is already
+ // prepared, then we would know the value
+ p2 = PropertyValues.newString(KNOWN_VALUE);
+ }
if (p2 != null) {
- if (!p2.isArray()) {
+ if (p2.isArray()) {
// TODO support join on multi-valued properties
- f.restrictProperty(property1Name, Operator.EQUAL, p2);
+ p2 = null;
}
}
+ // always set the condition, even if unkown ( -> is not null)
+ f.restrictProperty(property1Name, Operator.EQUAL, p2);
}
if (f.getSelector() == selector2) {
PropertyValue p1 = selector1.currentProperty(property1Name);
+ if (p1 == null && f.isPreparing() && selector1.isPrepared()) {
+ // during the prepare phase, if the selector is already
+ // prepared, then we would know the value
+ p1 = PropertyValues.newString(KNOWN_VALUE);
+ }
if (p1 != null) {
- if (!p1.isArray()) {
+ if (p1.isArray()) {
// TODO support join on multi-valued properties
- f.restrictProperty(property2Name, Operator.EQUAL, p1);
+ p1 = null;
}
}
+ // always set the condition, even if unkown ( -> is not null)
+ f.restrictProperty(property2Name, Operator.EQUAL, p1);
}
}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/JoinConditionImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/JoinConditionImpl.java
index b3e2fd5..f5c8298 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/JoinConditionImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/JoinConditionImpl.java
@@ -19,11 +19,36 @@ import org.apache.jackrabbit.oak.query.index.FilterImpl;
* The base class for join conditions.
*/
public abstract class JoinConditionImpl extends AstElement {
+
+ /**
+ * A path with 6 elements, which is the expected average for a join.
+ */
+ protected static final String KNOWN_PATH = "/path/from/the/join/selector";
- public abstract boolean evaluate();
+ protected static final String KNOWN_VALUE = "valueFromTheJoinSelector";
+ /**
+ * Evaluate the result using the currently set values.
+ *
+ * @return true if the constraint matches
+ */
+ public abstract boolean evaluate();
+
+ /**
+ * Apply the condition to the filter, further restricting the filter if
+ * possible. This may also verify the data types are compatible, and that
+ * paths are valid.
+ *
+ * @param f the filter
+ */
public abstract void restrict(FilterImpl f);
- public abstract void restrictPushDown(SelectorImpl selectorImpl);
+ /**
+ * Push as much of the condition down to this selector, further restricting
+ * the selector condition if possible.
+ *
+ * @param s the selector
+ */
+ public abstract void restrictPushDown(SelectorImpl s);
}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/JoinImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/JoinImpl.java
index 6e4e408..a5ab71f 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/JoinImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/JoinImpl.java
@@ -60,8 +60,15 @@ public class JoinImpl extends SourceImpl {
@Override
public String getPlan(NodeState rootState) {
- return left.getPlan(rootState) + ' ' + joinType +
- " " + right.getPlan(rootState) + " on " + joinCondition;
+ StringBuilder buff = new StringBuilder();
+ buff.append(left.getPlan(rootState)).
+ append(' ').
+ append(joinType).
+ append(' ').
+ append(right.getPlan(rootState)).
+ append(" on ").
+ append(joinCondition);
+ return buff.toString();
}
@Override
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SameNodeJoinConditionImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SameNodeJoinConditionImpl.java
index 187a795..7ab0cbe 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SameNodeJoinConditionImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SameNodeJoinConditionImpl.java
@@ -81,6 +81,11 @@ public class SameNodeJoinConditionImpl extends JoinConditionImpl {
public void restrict(FilterImpl f) {
if (f.getSelector() == selector1) {
String p2 = selector2.currentPath();
+ if (p2 == null && f.isPreparing() && selector2.isPrepared()) {
+ // during the prepare phase, if the selector is already
+ // prepared, then we would know the value
+ p2 = KNOWN_PATH;
+ }
if (p2 != null) {
if (selector2Path.equals(".")) {
f.restrictPath(p2, Filter.PathRestriction.EXACT);
@@ -93,6 +98,11 @@ public class SameNodeJoinConditionImpl extends JoinConditionImpl {
}
if (f.getSelector() == selector2) {
String p1 = selector1.currentPath();
+ if (p1 == null && f.isPreparing() && selector1.isPrepared()) {
+ // during the prepare phase, if the selector is already
+ // prepared, then we would know the value
+ p1 = KNOWN_PATH;
+ }
if (p1 != null) {
if (selector2Path.equals(".")) {
f.restrictPath(p1, Filter.PathRestriction.EXACT);
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java
index ecf1e5b..3e779ff 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java
@@ -95,6 +95,9 @@ public class SelectorImpl extends SourceImpl {
return quote(nodeTypeName) + " as " + quote(selectorName);
}
+ public boolean isPrepared() {
+ return index != null;
+ }
@Override
public void prepare() {
@@ -106,19 +109,19 @@ public class SelectorImpl extends SourceImpl {
c.restrictPushDown(this);
}
}
- index = query.getBestIndex(createFilter());
+ index = query.getBestIndex(createFilter(true));
}
@Override
public void execute(NodeState rootState) {
- cursor = index.query(createFilter(), rootState);
+ cursor = index.query(createFilter(false), rootState);
}
@Override
public String getPlan(NodeState rootState) {
StringBuilder buff = new StringBuilder();
buff.append(toString());
- buff.append(" /* ").append(index.getPlan(createFilter(), rootState));
+ buff.append(" /* ").append(index.getPlan(createFilter(true), rootState));
if (selectorCondition != null) {
buff.append(" where ").append(selectorCondition);
}
@@ -126,8 +129,15 @@ public class SelectorImpl extends SourceImpl {
return buff.toString();
}
- private Filter createFilter() {
+ /**
+ * Create the filter condition for planning or execution.
+ *
+ * @param preparing whether a filter for the prepare phase should be made
+ * @return the filter
+ */
+ private Filter createFilter(boolean preparing) {
FilterImpl f = new FilterImpl(this, query.getStatement());
+ f.setPreparing(preparing);
validateNodeType(nodeTypeName);
f.setNodeType(nodeTypeName);
if (joinCondition != null) {
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SourceImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SourceImpl.java
index 9bf1ef2..dc1d8e9 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SourceImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SourceImpl.java
@@ -141,8 +141,8 @@ public abstract class SourceImpl extends AstElement {
public abstract String getPlan(NodeState rootState);
/**
- * Prepare executing the query. This method will decide which index to use.
- *
+ * Prepare executing the query (recursively). This method will decide which
+ * index to use.
*/
public abstract void prepare();
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/index/FilterImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/index/FilterImpl.java
index f540e18..767ea3e 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/index/FilterImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/index/FilterImpl.java
@@ -75,6 +75,11 @@ public class FilterImpl implements Filter {
* Only return distinct values.
*/
private boolean distinct;
+
+ /**
+ * Set during the prepare phase of a query.
+ */
+ private boolean preparing;
// TODO support "order by"
@@ -82,6 +87,14 @@ public class FilterImpl implements Filter {
this.selector = selector;
this.queryStatement = queryStatement;
}
+
+ public void setPreparing(boolean preparing) {
+ this.preparing = preparing;
+ }
+
+ public boolean isPreparing() {
+ return preparing;
+ }
/**
* Get the path.
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-579_7d72e6ed.diff |
bugs-dot-jar_data_OAK-1739_8188ef54 | ---
BugID: OAK-1739
Summary: Incorrect handling of multivalued comparisons in queries
Description: |-
[Section 6.7.14|http://www.day.com/specs/jcr/2.0/6_Query.html#6.7.16 Comparison] of the JCR 2.0 spec says:
bq. ... operand1 may evaluate to an array of values (for example, the values of a multi-valued property), in which case the comparison is separately performed for each element of the array, and the Comparison constraint is satisfied as a whole if the comparison against any element of the array is satisfied.
This is currently not the case in Oak. Instead only the first value of the array is used in the comparison.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/ComparisonImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/ComparisonImpl.java
index a77a85c..585b44e 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/ComparisonImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/ComparisonImpl.java
@@ -24,8 +24,10 @@ import java.util.Set;
import javax.jcr.PropertyType;
+import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.PropertyValue;
import org.apache.jackrabbit.oak.api.Type;
+import org.apache.jackrabbit.oak.plugins.memory.PropertyStates;
import org.apache.jackrabbit.oak.query.fulltext.LikePattern;
import org.apache.jackrabbit.oak.query.index.FilterImpl;
import org.apache.jackrabbit.oak.spi.query.PropertyValues;
@@ -101,7 +103,22 @@ public class ComparisonImpl extends ConstraintImpl {
// unable to convert, just skip this node
return false;
}
- return evaluate(p1, p2);
+ if (p1.isArray()) {
+ // JCR 2.0 spec, 6.7.16 Comparison:
+ // "... constraint is satisfied as a whole if the comparison
+ // against any element of the array is satisfied."
+ Type<?> base = p1.getType().getBaseType();
+ for (int i = 0; i < p1.count(); i++) {
+ PropertyState value = PropertyStates.createProperty(
+ "value", p1.getValue(base, i), base);
+ if (evaluate(PropertyValues.create(value), p2)) {
+ return true;
+ }
+ }
+ return false;
+ } else {
+ return evaluate(p1, p2);
+ }
}
/**
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-1739_8188ef54.diff |
bugs-dot-jar_data_OAK-2465_60186813 | ---
BugID: OAK-2465
Summary: Long overflow in PermissionEntryProviderImpl
Description: |-
PermissionEntryProviderImpl#init can end up in a Long overflow if the underlying implementation does not know the exact value of the number children, and the child node count is higher than maxSize.
I will attach a patch with a test case
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionEntryProviderImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionEntryProviderImpl.java
index cce6a71..d77fe97 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionEntryProviderImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionEntryProviderImpl.java
@@ -24,14 +24,19 @@ import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
+
import javax.annotation.Nonnull;
import com.google.common.base.Strings;
import com.google.common.collect.Iterators;
+import com.google.common.math.LongMath;
+
import org.apache.jackrabbit.commons.iterator.AbstractLazyIterator;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.spi.security.ConfigurationParameters;
import org.apache.jackrabbit.oak.spi.security.authorization.accesscontrol.AccessControlConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
class PermissionEntryProviderImpl implements PermissionEntryProvider {
@@ -39,18 +44,31 @@ class PermissionEntryProviderImpl implements PermissionEntryProvider {
private static final long DEFAULT_SIZE = 250;
+ private static final Logger log = LoggerFactory.getLogger(PermissionEntryProviderImpl.class);
+
+ /**
+ * The set of principal names for which this {@code PermissionEntryProvider}
+ * has been created.
+ */
private final Set<String> principalNames;
+ /**
+ * The set of principal names for which the store contains any permission
+ * entries. This set is equals or just a subset of the {@code principalNames}
+ * defined above. The methods collecting the entries will shortcut in case
+ * this set is empty and thus no permission entries exist for the specified
+ * set of principal.
+ */
private final Set<String> existingNames = new HashSet<String>();
- private Map<String, Collection<PermissionEntry>> pathEntryMap;
-
private final PermissionStore store;
private final PermissionEntryCache cache;
private final long maxSize;
+ private Map<String, Collection<PermissionEntry>> pathEntryMap;
+
PermissionEntryProviderImpl(@Nonnull PermissionStore store, @Nonnull PermissionEntryCache cache,
@Nonnull Set<String> principalNames, @Nonnull ConfigurationParameters options) {
this.store = store;
@@ -63,17 +81,42 @@ class PermissionEntryProviderImpl implements PermissionEntryProvider {
private void init() {
long cnt = 0;
existingNames.clear();
- for (String name: principalNames) {
+ for (String name : principalNames) {
long n = cache.getNumEntries(store, name, maxSize);
- cnt+= n;
+ /*
+ if cache.getNumEntries (n) returns a number bigger than 0, we
+ remember this principal name int the 'existingNames' set
+ */
if (n > 0) {
existingNames.add(name);
}
+ /*
+ Calculate the total number of permission entries (cnt) defined for the
+ given set of principals in order to be able to determine if the cache
+ should be loaded upfront.
+ Note however that cache.getNumEntries (n) may return Long.MAX_VALUE
+ if the underlying implementation does not know the exact value, and
+ the child node count is higher than maxSize (see OAK-2465).
+ */
+ if (cnt < Long.MAX_VALUE) {
+ if (Long.MAX_VALUE == n) {
+ cnt = Long.MAX_VALUE;
+ } else {
+ try {
+ cnt = LongMath.checkedAdd(cnt, n);
+ } catch (ArithmeticException ae) {
+ log.warn("Long overflow while calculate the total number of permission entries");
+ cnt = Long.MAX_VALUE;
+ }
+ }
+ }
}
- if (cnt < maxSize) {
- // cache all entries of all principals
+
+ if (cnt > 0 && cnt < maxSize) {
+ // the total number of entries is smaller that maxSize, so we can
+ // cache all entries for all principals having any entries right away
pathEntryMap = new HashMap<String, Collection<PermissionEntry>>();
- for (String name: principalNames) {
+ for (String name : existingNames) {
cache.load(store, pathEntryMap, name);
}
} else {
@@ -124,7 +167,7 @@ class PermissionEntryProviderImpl implements PermissionEntryProvider {
@Nonnull
private Collection<PermissionEntry> loadEntries(@Nonnull String path) {
Collection<PermissionEntry> ret = new TreeSet<PermissionEntry>();
- for (String name: existingNames) {
+ for (String name : existingNames) {
cache.load(store, ret, name, path);
}
return ret;
| bugs-dot-jar/jackrabbit-oak_extracted_diff/developer-patch_bugs-dot-jar_OAK-2465_60186813.diff |