You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@jackrabbit.apache.org by tr...@apache.org on 2013/08/10 07:53:54 UTC
svn commit: r1512568 [11/39] - in /jackrabbit/commons/filevault/trunk: ./
parent/ vault-cli/ vault-cli/src/ vault-cli/src/main/
vault-cli/src/main/appassembler/ vault-cli/src/main/assembly/
vault-cli/src/main/java/ vault-cli/src/main/java/org/ vault-cl...
Added: jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/DepthItemFilter.java
URL: http://svn.apache.org/viewvc/jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/DepthItemFilter.java?rev=1512568&view=auto
==============================================================================
--- jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/DepthItemFilter.java (added)
+++ jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/DepthItemFilter.java Sat Aug 10 05:53:42 2013
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jackrabbit.vault.fs.filter;
+
+import javax.jcr.Item;
+import javax.jcr.RepositoryException;
+
+import org.apache.jackrabbit.vault.fs.api.DumpContext;
+import org.apache.jackrabbit.vault.fs.api.ItemFilter;
+
+/**
+ * Implements a filter that filters item according to their (passed) depth.
+ *
+ */
+public class DepthItemFilter implements ItemFilter {
+
+ /**
+ * The minimal depth
+ */
+ private int minDepth = 0;
+
+ /**
+ * The maximal depth
+ */
+ private int maxDepth = Integer.MAX_VALUE;
+
+ /**
+ * Default constructor.
+ */
+ public DepthItemFilter() {
+ }
+
+ /**
+ * Creates a new depth filter for the given depths.
+ * @param minDepth the minimal depth
+ * @param maxDepth the maximal depth
+ */
+ public DepthItemFilter(int minDepth, int maxDepth) {
+ this.minDepth = minDepth;
+ this.maxDepth = maxDepth;
+ }
+
+ /**
+ * Sets the minimal depth
+ * @param minDepth the minimal depth
+ */
+ public void setMinDepth(String minDepth) {
+ this.minDepth = Integer.decode(minDepth);
+ }
+
+ /**
+ * Sets the maximal depth
+ * @param maxDepth the maximal depth
+ */
+ public void setMaxDepth(String maxDepth) {
+ this.maxDepth = Integer.decode(maxDepth);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * Matches if the given depth is greater or equal the minimum depth and
+ * less or equal the maximum depth and if the call to {@link #matches(Item)}
+ * returns <code>true</code>.
+ */
+ public boolean matches(Item item, int depth) throws RepositoryException {
+ return depth >= minDepth && depth <= maxDepth && matches(item);
+ }
+
+ /**
+ * Returns <code>true</code>. Subclasses can override to implement something
+ * useful that is dependant of the depth.
+ *
+ * @param item the item to match
+ * @return <code>true</code> if the item matches; <code>false</code> otherwise.
+ * @throws RepositoryException if an error occurs.
+ */
+ public boolean matches(Item item) throws RepositoryException {
+ return true;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public void dump(DumpContext ctx, boolean isLast) {
+ ctx.printf(isLast, "%s:", getClass().getSimpleName());
+ ctx.indent(isLast);
+ ctx.printf(false, "minDepth: %d", minDepth);
+ ctx.printf(true, "maxDepth: %d", maxDepth);
+ ctx.outdent();
+ }
+}
\ No newline at end of file
Added: jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/FileFolderNodeFilter.java
URL: http://svn.apache.org/viewvc/jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/FileFolderNodeFilter.java?rev=1512568&view=auto
==============================================================================
--- jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/FileFolderNodeFilter.java (added)
+++ jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/FileFolderNodeFilter.java Sat Aug 10 05:53:42 2013
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jackrabbit.vault.fs.filter;
+
+import javax.jcr.Item;
+import javax.jcr.Node;
+import javax.jcr.RepositoryException;
+import javax.jcr.nodetype.NodeDefinition;
+
+import org.apache.jackrabbit.vault.util.JcrConstants;
+
+/**
+ * Defines an filter that matches file/folder like nodes. It matches
+ * all nt:hierarchyNode nodes that have or define a jcr:content
+ * child node.
+ */
+public class FileFolderNodeFilter extends DepthItemFilter {
+
+ /**
+ * {@inheritDoc}
+ *
+ * Returns <code>true</code> if the item is a node of type nt:hierarchyNode
+ * that has or defines a 'jcr:content' child node.
+ */
+ public boolean matches(Item item) throws RepositoryException {
+ if (item.isNode()) {
+ Node node = (Node) item;
+ if (node.isNodeType(JcrConstants.NT_HIERARCHYNODE)) {
+ if (node.hasNode(JcrConstants.JCR_CONTENT)) {
+ return true;
+ } else {
+ for (NodeDefinition pd: node.getPrimaryNodeType().getChildNodeDefinitions()) {
+ if (pd.getName().equals(JcrConstants.JCR_CONTENT)) {
+ return true;
+ }
+ }
+ }
+ }
+ }
+ return false;
+ }
+
+}
\ No newline at end of file
Added: jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/IsMandatoryFilter.java
URL: http://svn.apache.org/viewvc/jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/IsMandatoryFilter.java?rev=1512568&view=auto
==============================================================================
--- jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/IsMandatoryFilter.java (added)
+++ jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/IsMandatoryFilter.java Sat Aug 10 05:53:42 2013
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jackrabbit.vault.fs.filter;
+
+import javax.jcr.Item;
+import javax.jcr.Node;
+import javax.jcr.Property;
+import javax.jcr.RepositoryException;
+
+import org.apache.jackrabbit.vault.fs.api.DumpContext;
+
+/**
+ * <code>IsNodeFilter</code>...
+ *
+ */
+public class IsMandatoryFilter extends DepthItemFilter {
+
+ private boolean isMandatory = true;
+
+ public IsMandatoryFilter() {
+ }
+
+ public IsMandatoryFilter(boolean isMandatory, int minDepth, int maxDepth) {
+ super(minDepth, maxDepth);
+ this.isMandatory = isMandatory;
+ }
+
+ public IsMandatoryFilter(boolean isMandatory) {
+ this(isMandatory, 0, Integer.MAX_VALUE);
+ }
+
+ public void setCondition(String node) {
+ isMandatory = Boolean.valueOf(node);
+ }
+
+ public void setIsMandatory(String node) {
+ isMandatory = Boolean.valueOf(node);
+ }
+
+ public boolean matches(Item item) throws RepositoryException {
+ if (item.isNode()) {
+ return ((Node) item).getDefinition().isMandatory() == isMandatory;
+ } else {
+ return ((Property) item).getDefinition().isMandatory() == isMandatory;
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public void dump(DumpContext ctx, boolean isLast) {
+ super.dump(ctx, isLast);
+ ctx.indent(isLast);
+ ctx.printf(true, "isMandatory: %b", isMandatory);
+ ctx.outdent();
+ }
+
+}
\ No newline at end of file
Added: jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/IsNodeFilter.java
URL: http://svn.apache.org/viewvc/jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/IsNodeFilter.java?rev=1512568&view=auto
==============================================================================
--- jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/IsNodeFilter.java (added)
+++ jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/IsNodeFilter.java Sat Aug 10 05:53:42 2013
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jackrabbit.vault.fs.filter;
+
+import javax.jcr.Item;
+import javax.jcr.RepositoryException;
+
+import org.apache.jackrabbit.vault.fs.api.DumpContext;
+
+/**
+ * Item filter that checks if an item is a node.
+ *
+ */
+public class IsNodeFilter extends DepthItemFilter {
+
+ /**
+ * Polarity of this filter
+ */
+ private boolean isNode = true;
+
+ /**
+ * Default constructor.
+ */
+ public IsNodeFilter() {
+ super(1, Integer.MAX_VALUE);
+ }
+
+ /**
+ * Creates a new node item filter.
+ *
+ * @param polarity the polarity of this filter. if <code>true</code> it matches
+ * nodes, if <code>false</code> it matches properties.
+ * @param minDepth the minimum depth
+ * @param maxDepth the maximum depth
+ *
+ * @see DepthItemFilter
+ */
+ public IsNodeFilter(boolean polarity, int minDepth, int maxDepth) {
+ super(minDepth, maxDepth);
+ isNode = polarity;
+ }
+
+ /**
+ * Creates a new node item filter
+ * @param polarity the polarity of this filter. if <code>true</code> it matches
+ * nodes, if <code>false</code> it matches properties.
+ */
+ public IsNodeFilter(boolean polarity) {
+ this(polarity, 1, Integer.MAX_VALUE);
+ }
+
+ /**
+ * Sets the polarity of this filter. If set to <code>true</code> this filter
+ * matches nodes otherwise properties.
+ *
+ * @param polarity the polarity
+ */
+ public void setPolarity(String polarity) {
+ setIsNode(polarity);
+ }
+
+ /**
+ * Sets the polarity of this filter. If set to <code>true</code> this filter
+ * matches nodes otherwise properties.
+ *
+ * @param polarity the polarity
+ */
+ public void setIsNode(String polarity) {
+ isNode = Boolean.valueOf(polarity);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * Returns <code>true</code> if the item is a node and the polarity is
+ * positive (true).
+ */
+ public boolean matches(Item item) throws RepositoryException {
+ return item.isNode() == isNode;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public void dump(DumpContext ctx, boolean isLast) {
+ super.dump(ctx, isLast);
+ ctx.indent(isLast);
+ ctx.printf(true, "isNode: %b", isNode);
+ ctx.outdent();
+ }
+
+}
\ No newline at end of file
Added: jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/NameItemFilter.java
URL: http://svn.apache.org/viewvc/jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/NameItemFilter.java?rev=1512568&view=auto
==============================================================================
--- jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/NameItemFilter.java (added)
+++ jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/NameItemFilter.java Sat Aug 10 05:53:42 2013
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jackrabbit.vault.fs.filter;
+
+import javax.jcr.Item;
+import javax.jcr.RepositoryException;
+
+import org.apache.jackrabbit.vault.fs.api.DumpContext;
+
+/**
+ * Filters items according to their names.
+ *
+ */
+public class NameItemFilter extends DepthItemFilter {
+
+ /**
+ * The name to filter on
+ */
+ private String name;
+
+ /**
+ * Default constructor
+ */
+ public NameItemFilter() {
+ }
+
+ /**
+ * Creates a new name filter with the given name and depths
+ * @param name the name to filter on
+ * @param minDepth the minimal depth
+ * @param maxDepth the maximal depth
+ */
+ public NameItemFilter(String name, int minDepth, int maxDepth) {
+ super(minDepth, maxDepth);
+ this.name = name;
+ }
+
+ /**
+ * Creates a new name filter with the given name.
+ * @param name the name to filter on
+ */
+ public NameItemFilter(String name) {
+ this(name, 0, Integer.MAX_VALUE);
+ }
+
+ /**
+ * Sets the name
+ * @param name the name
+ */
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * Returns <code>true</code> if the name of the given item is equal to
+ * the configured name.
+ */
+ public boolean matches(Item item) throws RepositoryException {
+ return item.getName().equals(name);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public void dump(DumpContext ctx, boolean isLast) {
+ super.dump(ctx, true);
+ ctx.indent(isLast);
+ ctx.printf(true, "name: %s", name);
+ ctx.outdent();
+ }
+
+}
\ No newline at end of file
Added: jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/NodeTypeItemFilter.java
URL: http://svn.apache.org/viewvc/jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/NodeTypeItemFilter.java?rev=1512568&view=auto
==============================================================================
--- jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/NodeTypeItemFilter.java (added)
+++ jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/NodeTypeItemFilter.java Sat Aug 10 05:53:42 2013
@@ -0,0 +1,123 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jackrabbit.vault.fs.filter;
+
+import javax.jcr.Item;
+import javax.jcr.Node;
+import javax.jcr.RepositoryException;
+
+import org.apache.jackrabbit.vault.fs.api.DumpContext;
+
+/**
+ * Filters on the node type of a node.
+ *
+ */
+public class NodeTypeItemFilter extends DepthItemFilter {
+
+ /**
+ * the node type to filter on
+ */
+ private String nodeType;
+
+ /**
+ * indicates if super types should be respected
+ */
+ private boolean respectSupertype;
+
+ /**
+ * Default constructor
+ */
+ public NodeTypeItemFilter() {
+ }
+
+ /**
+ * Creates a new node type filter.
+ * @param nodeType the node type to filter on
+ * @param respectSupertype indicates if super type should be respected
+ * @param minDepth the minimal depth
+ * @param maxDepth the maximal depth
+ */
+ public NodeTypeItemFilter(String nodeType, boolean respectSupertype,
+ int minDepth, int maxDepth) {
+ super(minDepth, maxDepth);
+ this.nodeType = nodeType;
+ this.respectSupertype = respectSupertype;
+ }
+
+ /**
+ * Creates a new node type filter.
+ * @param nodeType the node type to filter on
+ * @param respectSupertype indicates if super type should be respected
+ */
+ public NodeTypeItemFilter(String nodeType, boolean respectSupertype) {
+ this(nodeType, respectSupertype, 0, Integer.MAX_VALUE);
+ }
+
+ /**
+ * Sets the node type to filter on
+ * @param nodeType the node type
+ */
+ public void setNodeType(String nodeType) {
+ this.nodeType = nodeType;
+ }
+
+ /**
+ * Sets the flag that indicates if super type should be respected.
+ * @param respectSupertype if <code>true</code>, super types are respected.
+ */
+ public void setRespectSupertype(String respectSupertype) {
+ this.respectSupertype = Boolean.valueOf(respectSupertype);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * Returns <code>true</code> if the item is a node and if the configured
+ * node type is equal to the primary type of the node. if super types are
+ * respected it also returns <code>true</code> if the items node type
+ * extends from the configured node type (Node.isNodeType() check).
+ */
+ public boolean matches(Item item) throws RepositoryException {
+ if (item.isNode()) {
+ if (respectSupertype) {
+ try {
+ return ((Node) item).isNodeType(nodeType);
+ } catch (RepositoryException e) {
+ // ignore
+ return false;
+ }
+ } else {
+ return ((Node) item).getPrimaryNodeType().getName().equals(nodeType);
+ }
+ }
+ return false;
+
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public void dump(DumpContext ctx, boolean isLast) {
+ super.dump(ctx, isLast);
+ ctx.indent(isLast);
+ ctx.printf(false, "nodeType: %s", nodeType);
+ ctx.printf(true, "respectSupertype: %b", respectSupertype);
+ ctx.outdent();
+ }
+
+}
\ No newline at end of file
Added: jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/NtFileItemFilter.java
URL: http://svn.apache.org/viewvc/jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/NtFileItemFilter.java?rev=1512568&view=auto
==============================================================================
--- jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/NtFileItemFilter.java (added)
+++ jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/filter/NtFileItemFilter.java Sat Aug 10 05:53:42 2013
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jackrabbit.vault.fs.filter;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.jcr.Item;
+import javax.jcr.Node;
+import javax.jcr.RepositoryException;
+
+import org.apache.jackrabbit.vault.fs.api.DumpContext;
+import org.apache.jackrabbit.vault.fs.api.ItemFilter;
+import org.apache.jackrabbit.vault.util.JcrConstants;
+
+/**
+ * The nt file item filter matches all properties that are defined my the
+ * nt:file or nt:resource node type. the later only, if the respective nodes
+ * name is 'jcr:content'.
+ *
+ * Additionally the properties 'jcr:encoding' can be configured to be excluded.
+ *
+ */
+public class NtFileItemFilter implements ItemFilter {
+
+ private static final Set<String> fileNames = new HashSet<String>();
+ static {
+ fileNames.add(JcrConstants.JCR_PRIMARYTYPE);
+ fileNames.add(JcrConstants.JCR_MIXINTYPES);
+ fileNames.add(JcrConstants.JCR_UUID);
+ fileNames.add(JcrConstants.JCR_CREATED);
+ fileNames.add(JcrConstants.JCR_CREATED_BY);
+ }
+
+ private static final Set<String> resNames = new HashSet<String>();
+ static {
+ resNames.add(JcrConstants.JCR_ENCODING);
+ resNames.add(JcrConstants.JCR_MIMETYPE);
+ resNames.add(JcrConstants.JCR_PRIMARYTYPE);
+ resNames.add(JcrConstants.JCR_MIXINTYPES);
+ resNames.add(JcrConstants.JCR_UUID);
+ resNames.add(JcrConstants.JCR_LASTMODIFIED);
+ resNames.add(JcrConstants.JCR_DATA);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @return <code>true</code> if the item is a nt:file or nt:resource property
+ */
+ public boolean matches(Item item, int depth) throws RepositoryException {
+ if (item.isNode()) {
+ // include nt:file node
+ Node node = (Node) item;
+ if (depth == 0) {
+ return node.isNodeType(JcrConstants.NT_FILE);
+ } else if (depth == 1) {
+ // include jcr:content
+ return item.getName().equals(JcrConstants.JCR_CONTENT);
+ } else {
+ return false;
+ }
+ } else {
+ if (depth == 1) {
+ return fileNames.contains(item.getName());
+ } else if (depth == 2 && item.getParent().getName().equals(JcrConstants.JCR_CONTENT)) {
+ return resNames.contains(item.getName());
+ } else {
+ return false;
+ }
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public void dump(DumpContext ctx, boolean isLast) {
+ ctx.printf(isLast, "%s:", getClass().getSimpleName());
+ }
+}
\ No newline at end of file
Added: jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/AbstractArtifact.java
URL: http://svn.apache.org/viewvc/jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/AbstractArtifact.java?rev=1512568&view=auto
==============================================================================
--- jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/AbstractArtifact.java (added)
+++ jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/AbstractArtifact.java Sat Aug 10 05:53:42 2013
@@ -0,0 +1,212 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jackrabbit.vault.fs.impl;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+import javax.jcr.RepositoryException;
+
+import org.apache.jackrabbit.vault.fs.api.Artifact;
+import org.apache.jackrabbit.vault.fs.api.ArtifactType;
+import org.apache.jackrabbit.vault.fs.api.DumpContext;
+import org.apache.jackrabbit.vault.util.PathUtil;
+import org.apache.jackrabbit.vault.util.PlatformNameFormat;
+
+/**
+ * Implements a generic abstract artifact
+ *
+ */
+public abstract class AbstractArtifact implements Artifact {
+
+ /**
+ * the repository relative path of this artifact
+ */
+ private final String repoRelPath;
+
+ /**
+ * the platform relative path or <code>null</code> if not differs from
+ * {@link #repoRelPath}.
+ */
+ private String platRelPath;
+
+ /**
+ * the extension
+ */
+ private final String extension;
+
+ /**
+ * the artifact type
+ */
+ private final ArtifactType type;
+
+ /**
+ * the content type
+ */
+ private String contentType;
+
+ /**
+ * Creates a new abstract artifact with the given repository name,
+ * platform extension and and type. the parent artifact is only used for
+ * generating the paths and is not linked to this artifact.
+ *
+ * @param parent the parent artifact or <code>null</code>.
+ * @param repoRelPath the repository name
+ * @param extension the platform extension
+ * @param type type of the artifact
+ */
+ protected AbstractArtifact(Artifact parent, String repoRelPath,
+ String extension, ArtifactType type) {
+ this.type = type;
+ this.extension = extension;
+ if (parent == null) {
+ this.repoRelPath = repoRelPath;
+ this.platRelPath = PlatformNameFormat.getPlatformPath(repoRelPath + extension);
+ } else {
+ this.repoRelPath = PathUtil.append(parent.getRelativePath(), repoRelPath);
+ this.platRelPath = PathUtil.append(parent.getPlatformPath(), PlatformNameFormat.getPlatformPath(repoRelPath + extension));
+ }
+ if (this.platRelPath.equals(this.repoRelPath)) {
+ this.platRelPath = null;
+ }
+ }
+
+ /**
+ * Creates a new abstract artifact initialized with the values from the
+ * given one.
+ * @param base base artifact
+ * @param type the new type
+ */
+ protected AbstractArtifact(Artifact base, ArtifactType type) {
+ this.type = type;
+ this.extension = base.getExtension();
+ this.repoRelPath = base.getRelativePath();
+ this.platRelPath = base.getPlatformPath();
+ this.contentType = base.getContentType();
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public String getRelativePath() {
+ return repoRelPath;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public String getPlatformPath() {
+ return platRelPath == null
+ ? repoRelPath
+ : platRelPath;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public String getExtension() {
+ return extension;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public ArtifactType getType() {
+ return type;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * Provides a generic spool mechanism from the {@link #getInputStream()}
+ * to the provided output stream.
+ */
+ public void spool(OutputStream out)
+ throws IOException, RepositoryException {
+ InputStream in = getInputStream();
+ byte[] buffer = new byte[8192];
+ int read;
+ while ((read = in.read(buffer)) >= 0) {
+ out.write(buffer, 0, read);
+ }
+ in.close();
+ out.close();
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @return the final name and the type
+ */
+ public String toString() {
+ StringBuffer buf = new StringBuffer(type.toString());
+ buf.append('(').append(repoRelPath).append(')');
+ return buf.toString();
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public String getContentType() {
+ return contentType;
+ }
+
+ /**
+ * Sets the content type
+ * @param contentType the content type
+ */
+ protected void setContentType(String contentType) {
+ this.contentType = contentType;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public int hashCode() {
+ return repoRelPath.hashCode() + 37 * type.hashCode();
+ }
+
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ else if (obj instanceof Artifact) {
+ Artifact a = (Artifact) obj;
+ return getRelativePath().equals(a.getRelativePath()) && type == a.getType();
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public void dump(DumpContext ctx, boolean isLast) {
+ ctx.println(isLast, "Artifact");
+ ctx.indent(isLast);
+ ctx.printf(false, "rel path: %s", getRelativePath());
+ ctx.printf(false, "plt path: %s", getPlatformPath());
+ ctx.printf(false, "type: %s", getType());
+ ctx.printf(false, "serialization: %s", getSerializationType());
+ ctx.printf(false, "access type: %s", getPreferredAccess());
+ ctx.printf(false, "content type: %s", getContentType());
+ ctx.printf(true, "content length: %d", getContentLength());
+ ctx.outdent();
+ }
+
+}
Added: jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/AggregateBuilder.java
URL: http://svn.apache.org/viewvc/jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/AggregateBuilder.java?rev=1512568&view=auto
==============================================================================
--- jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/AggregateBuilder.java (added)
+++ jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/AggregateBuilder.java Sat Aug 10 05:53:42 2013
@@ -0,0 +1,142 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jackrabbit.vault.fs.impl;
+
+import java.io.IOException;
+
+import javax.jcr.RepositoryException;
+
+import org.apache.jackrabbit.vault.fs.api.Aggregate;
+import org.apache.jackrabbit.vault.fs.api.Artifact;
+import org.apache.jackrabbit.vault.fs.api.ArtifactSet;
+import org.apache.jackrabbit.vault.fs.api.ImportInfo;
+
+/**
+ * Implements methods for creating a new {@link AggregateImpl}
+ *
+ */
+public class AggregateBuilder {
+
+ /**
+ * the underlying node or it's parent for new nodes
+ */
+ private final AggregateImpl aggregate;
+
+ /**
+ * the name for the new node or <code>null</code>
+ */
+ private final String reposName;
+
+ /**
+ * The artifacts to write
+ */
+ private ArtifactSetImpl artifacts = new ArtifactSetImpl();
+
+ /**
+ * Creates a new artifact output for the given node
+ * @param aggregate the node
+ * @param artifacts the artifacts of the node
+ */
+ AggregateBuilder(AggregateImpl aggregate, ArtifactSet artifacts) {
+ this.aggregate = aggregate;
+ this.reposName = null;
+ this.artifacts.addAll(artifacts);
+ this.artifacts.setCoverage(artifacts.getCoverage());
+ }
+
+ /**
+ * Creates a new artifact output for the given parent node
+ * @param parent the parent aggregate
+ * @param reposName the name for the new node
+ */
+ AggregateBuilder(AggregateImpl parent, String reposName) {
+ this.aggregate = parent;
+ this.reposName = reposName;
+ }
+
+ /**
+ * Returns the artifact set of this output.
+ * @return the artifact set of this output.
+ */
+ public ArtifactSetImpl getArtifacts() {
+ assertOpen();
+ return artifacts;
+ }
+
+ /**
+ * Adds an artifact to the output
+ * @param artifact the artifact to add
+ */
+ public void addArtifact(Artifact artifact) {
+ assertOpen();
+ artifacts.add(artifact);
+ }
+
+ /**
+ * Adds an artifact set to the output
+ * @param artifacts the artifact set
+ */
+ public void addArtifacts(ArtifactSetImpl artifacts) {
+ assertOpen();
+ this.artifacts.addAll(artifacts);
+ }
+
+ /**
+ * Returns the node this output was created for
+ * @return the artifacts node
+ */
+ public Aggregate getAggregate() {
+ return aggregate;
+ }
+
+ /**
+ * Returns the repository name this output was created for
+ * @return the repository name.
+ */
+ public String getReposName() {
+ return reposName;
+ }
+
+ /**
+ * Closes this artifact builder and writes the artifacts back to the
+ * repository.
+ *
+ * @return Infos about the modifications
+ * @throws RepositoryException if an error occurs.
+ * @throws IOException if an I/O error occurs.
+ */
+ public ImportInfo close() throws RepositoryException, IOException {
+ assertOpen();
+ try {
+ return aggregate.writeArtifacts(artifacts, reposName);
+ } finally {
+ artifacts = null;
+ }
+ }
+
+ /**
+ * Checks if this output is not closed.
+ *
+ * @throws IllegalStateException if this output is closed.
+ */
+ private void assertOpen() {
+ if (artifacts == null) {
+ throw new IllegalStateException("Output closed.");
+ }
+ }
+}
\ No newline at end of file
Added: jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/AggregateImpl.java
URL: http://svn.apache.org/viewvc/jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/AggregateImpl.java?rev=1512568&view=auto
==============================================================================
--- jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/AggregateImpl.java (added)
+++ jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/AggregateImpl.java Sat Aug 10 05:53:42 2013
@@ -0,0 +1,721 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jackrabbit.vault.fs.impl;
+
+import java.io.IOException;
+import java.lang.ref.WeakReference;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+
+import javax.jcr.Node;
+import javax.jcr.NodeIterator;
+import javax.jcr.Property;
+import javax.jcr.PropertyIterator;
+import javax.jcr.PropertyType;
+import javax.jcr.RepositoryException;
+import javax.jcr.Value;
+
+import org.apache.jackrabbit.vault.fs.api.Aggregate;
+import org.apache.jackrabbit.vault.fs.api.Aggregator;
+import org.apache.jackrabbit.vault.fs.api.Artifact;
+import org.apache.jackrabbit.vault.fs.api.ArtifactSet;
+import org.apache.jackrabbit.vault.fs.api.ArtifactType;
+import org.apache.jackrabbit.vault.fs.api.DumpContext;
+import org.apache.jackrabbit.vault.fs.api.ImportInfo;
+import org.apache.jackrabbit.vault.fs.api.PathFilterSet;
+import org.apache.jackrabbit.vault.fs.api.RepositoryAddress;
+import org.apache.jackrabbit.vault.fs.impl.io.AggregateWalkListener;
+import org.apache.jackrabbit.vault.util.NodeNameComparator;
+import org.apache.jackrabbit.vault.util.PathUtil;
+import org.apache.jackrabbit.vault.util.Text;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Collects the items that form an aggregate. The aggregates form a tree on top
+ * of the repository one by invoking the respective aggregators. The aggregates
+ * are controlled via the {@link AggregateManagerImpl} and are loaded dynamically
+ * when traversing through the tree.
+ * <p/>
+ * The aggregates can then later be used by the aggregators to provide the
+ * artifacts of this aggregate.
+ *
+ */
+public class AggregateImpl implements Aggregate {
+
+ private static final char STATE_INITIAL = 'i';
+ private static final char STATE_PREPARED = 'p';
+ private static final char STATE_COLLECTED = 'c';
+
+ /**
+ * The default logger
+ */
+ protected static final Logger log = LoggerFactory.getLogger(AggregateImpl.class);
+
+ private final AggregateImpl parent;
+
+ private final String path;
+
+ private String relPath;
+
+ private final Aggregator aggregator;
+
+ private final AggregateManagerImpl mgr;
+
+ private ArtifactSetImpl artifacts;
+
+ /**
+ * rel paths of included items (including a leading slash)
+ */
+ private Set<String> includes;
+
+ private Collection<Property> binaries;
+
+ private List<AggregateImpl> leaves;
+
+ private String[] namespacePrefixes;
+
+ private char state = STATE_INITIAL;
+
+ private WeakReference<Node> nodeRef;
+
+ /**
+ * workaround to filter out non directory artifacts for relative
+ * path includes (ACL export case)
+ */
+ private boolean filterArtifacts;
+
+ /**
+ * Creates a new root aggregate
+ * @param mgr Aggregate manager
+ * @param path the path of the aggregate
+ * @param aggregator aggregator
+ * @throws RepositoryException if a error occurs
+ */
+ protected AggregateImpl(AggregateManagerImpl mgr, String path, Aggregator aggregator)
+ throws RepositoryException{
+ log.debug("Create Root Aggregate {}", path);
+ this.mgr = mgr;
+ this.parent = null;
+ this.path = path.equals("/") ? "" : path;
+ this.aggregator = aggregator;
+ }
+
+ /**
+ * Creates a new sub aggregate
+ * @param parent parent aggregate
+ * @param path path of aggregate
+ * @param aggregator aggregator
+ * @throws RepositoryException if a error occurs
+ */
+ protected AggregateImpl(AggregateImpl parent, String path, Aggregator aggregator)
+ throws RepositoryException{
+ log.debug("Create Aggregate {}", path);
+ this.mgr = parent.mgr;
+ this.parent = parent;
+ this.path = path;
+ this.aggregator = aggregator;
+ // if we have a full coverage aggregator, consider this already collected
+ mgr.onAggregateCreated();
+ if (aggregator.hasFullCoverage()) {
+ this.state = STATE_COLLECTED;
+ mgr.onAggregateCollected();
+ }
+ }
+
+ public Node getNode() throws RepositoryException {
+ if (path.length() == 0) {
+ return mgr.getSession().getRootNode();
+ } else {
+ Node node = nodeRef == null ? null : nodeRef.get();
+ if (node == null) {
+ node = mgr.getSession().getNode(path);
+ nodeRef = new WeakReference<Node>(node);
+ }
+ return node;
+ }
+ }
+
+ public boolean hasNode() throws RepositoryException {
+ return nodeRef != null && nodeRef.get() != null
+ || path.length() == 0 || mgr.getSession().nodeExists(path);
+ }
+
+ public void invalidate() {
+ log.debug("invalidating aggregate {}", getPath());
+ artifacts = null;
+ includes = null;
+ binaries = null;
+ leaves = null;
+ namespacePrefixes = null;
+ nodeRef = null;
+ relPath = null;
+ state = STATE_INITIAL;
+ }
+
+ public Aggregate getParent() {
+ return parent;
+ }
+
+ public String getPath() {
+ return path;
+ }
+
+ public RepositoryAddress getRepositoryAddress() throws RepositoryException {
+ //assertAttached();
+ return mgr.getMountpoint().resolve(getPath());
+ }
+
+ public boolean allowsChildren() {
+ return aggregator == null || !aggregator.hasFullCoverage() ;
+ }
+
+ public String getRelPath() {
+ if (relPath == null) {
+ relPath = parent == null
+ ? path.substring(path.lastIndexOf('/') + 1)
+ : path.substring(parent.getPath().length()+1);
+ }
+ return relPath;
+ }
+
+ public String getName() {
+ return Text.getName(getRelPath());
+ }
+
+ public List<? extends Aggregate> getLeaves() throws RepositoryException {
+ load();
+ return leaves;
+ }
+
+ public Aggregate getAggregate(String relPath) throws RepositoryException {
+ String[] pathElems = PathUtil.makePath((String[]) null, relPath);
+ if (pathElems == null) {
+ return this;
+ }
+ return getAggregate(pathElems, 0);
+ }
+
+ private Aggregate getAggregate(String[] pathElems, int pos)
+ throws RepositoryException {
+ if (pos < pathElems.length) {
+ String elem = pathElems[pos];
+ if (elem.equals("..")) {
+ return parent == null ? null : parent.getAggregate(pathElems, pos + 1);
+ }
+ // find suitable leaf
+ load();
+ if (leaves != null && !leaves.isEmpty()) {
+ for (AggregateImpl a: leaves) {
+ String[] le = Text.explode(a.getRelPath(), '/');
+ int i=0;
+ while (i<le.length && i+pos < pathElems.length) {
+ if (!le[i].equals(pathElems[i+pos])) {
+ break;
+ }
+ i++;
+ }
+ if (i==le.length) {
+ return a.getAggregate(pathElems, i+pos);
+ }
+ }
+ }
+ return null;
+ }
+ return this;
+ }
+
+ public ArtifactSet getArtifacts() throws RepositoryException {
+ if (artifacts == null) {
+ assertAttached();
+ load();
+ artifacts = (ArtifactSetImpl) aggregator.createArtifacts(this);
+
+ if (filterArtifacts) {
+ // filter out all non-directory and non .content.xml artifacts
+ ArtifactSetImpl na = new ArtifactSetImpl();
+ na.addAll(artifacts);
+ for (Artifact a: na.values()) {
+ if (a.getType() != ArtifactType.DIRECTORY) {
+ if (!Text.getName(a.getPlatformPath()).equals(".content.xml")) {
+ artifacts.remove(a);
+ }
+ }
+ }
+ }
+ }
+ return artifacts;
+ }
+
+ /**
+ * Returns an artifact output for this node that allows writing the artifacts.
+ *
+ * @return an artifact output.
+ * @throws RepositoryException if this file is not attached to the fs, yet.
+ */
+ public AggregateBuilder getBuilder() throws RepositoryException {
+ assertAttached();
+ return new AggregateBuilder(this, getArtifacts());
+ }
+
+ /**
+ * Creates a new child artifact node with the given name.
+ * Please note, that the returned node is not attached to the tree.
+ * <p/>
+ * If this artifact node does not allow children a RepositoryException is
+ * thrown.
+ *
+ * @param reposName the (repository) name for the new node
+ * @return a new child node.
+ * @throws RepositoryException if an error occurs.
+ */
+ public AggregateBuilder create(String reposName) throws RepositoryException {
+ assertAttached();
+ if (!allowsChildren()) {
+ throw new RepositoryException("Unable to create artifact node below a non-folder.");
+ }
+ return new AggregateBuilder(this, reposName);
+ }
+
+ /**
+ * Removes this artifact node from the tree. If this artifact node has
+ * directory and non-directory artifacts only the non-directory artifacts
+ * are removed unless <code>recursive</code> is specified.
+ *
+ * @param recursive specifies if directories are removed as well.
+ * @return infos about the modifications
+ * @throws RepositoryException if an error occurs.
+ */
+ public ImportInfo remove(boolean recursive) throws RepositoryException {
+ assertAttached();
+ Node node = getNode();
+ ImportInfo info = aggregator.remove(node, recursive, true);
+ if (parent != null) {
+ parent.invalidate();
+ }
+ return info;
+ }
+
+ public AggregateManagerImpl getManager() {
+ return mgr;
+ }
+
+ /**
+ * Writes the artifacts back to the repository.
+ *
+ * @param artifacts the artifacts to write
+ * @param reposName the name of a new child node or <code>null</code>
+ * @return infos about the modifications
+ * @throws RepositoryException if an error occurs.
+ * @throws IOException if an I/O error occurs.
+ */
+ ImportInfo writeArtifacts(ArtifactSetImpl artifacts, String reposName)
+ throws RepositoryException, IOException {
+ try {
+ return mgr.writeAggregate(this, reposName, artifacts);
+ } catch (RepositoryException e) {
+ log.error("Error while writing artifacts of {}: {}", getPath(), e.toString());
+ throw e;
+ } catch (IOException e) {
+ log.error("Error while writing artifacts of {}: {}", getPath(), e.toString());
+ throw e;
+ }
+ }
+
+ /**
+ * Checks if this aggregate has an aggregator and its node exists.
+ * @throws RepositoryException if no aggregator is set.
+ */
+ private void assertAttached() throws RepositoryException {
+ if (aggregator == null || !hasNode()) {
+ throw new RepositoryException("aggregate not attached anymore");
+ }
+ }
+
+ public boolean isAttached() throws RepositoryException {
+ return aggregator != null && hasNode();
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public void dump(DumpContext ctx, boolean isLast) {
+ ctx.println(isLast, "Aggregate");
+ ctx.indent(isLast);
+ ctx.printf(false, "path: %s", getPath());
+ ctx.printf(false, "name: %s", getName());
+ ctx.printf(false, "relPath: %s", getRelPath());
+ try {
+ getArtifacts().dump(ctx, false);
+ } catch (RepositoryException e) {
+ ctx.printf(false, "no artifacts: %s", e.toString());
+ }
+ ctx.println(false, "Namespaces");
+ ctx.indent(false);
+ for (String pfx: getNamespacePrefixes()) {
+ String uri = "invalid";
+ try {
+ uri = getNamespaceURI(pfx);
+ } catch (RepositoryException e) {
+ log.error("Error while resolving namespace uri", e);
+ }
+ ctx.printf(false, "%s = %s", pfx, uri);
+ }
+ ctx.outdent();
+ if (aggregator != null) {
+ aggregator.dump(ctx, true);
+ } else {
+ ctx.println(true, "no aggregator");
+ }
+ ctx.outdent();
+ }
+
+ public String[] getNamespacePrefixes() {
+ if (namespacePrefixes == null) {
+ loadNamespaces();
+ }
+ return namespacePrefixes;
+ }
+
+ public String getNamespaceURI(String prefix) throws RepositoryException {
+ return mgr.getNamespaceURI(prefix);
+ }
+
+ public Collection<Property> getBinaries() {
+ return binaries;
+ }
+
+ /**
+ * Walks the node tree and invokes the callbacks in the listener according
+ * to the configured filters.
+ *
+ * For each tree there are the following events generated:
+ * events := OnWalkBegin { nodeEvent } OnWalkEnd;
+ * nodeEvent := OnNodeBegin { propEvent } OnChildBegin { nodeEvent } OnNodeEnd;
+ * propEvent := OnProperty;
+ *
+ * @param aggregateWalkListener the listener that receives the events
+ * @throws RepositoryException if an repository error occurs.
+ */
+ public void walk(AggregateWalkListener aggregateWalkListener)
+ throws RepositoryException {
+ Node node = getNode();
+ aggregateWalkListener.onWalkBegin(node);
+ walk(aggregateWalkListener, "", node, 0);
+ aggregateWalkListener.onWalkEnd(node);
+ }
+
+ /**
+ * Walks the tree.
+ *
+ * @param aggregateWalkListener the listener
+ * @param relPath rel path of node
+ * @param node the current node
+ * @param depth the depth of the node
+ * @throws RepositoryException if an error occurs.
+ */
+ private void walk(AggregateWalkListener aggregateWalkListener, String relPath, Node node, int depth)
+ throws RepositoryException {
+ if (node != null) {
+ boolean included = includes(relPath);
+ aggregateWalkListener.onNodeBegin(node, included, depth);
+ PropertyIterator piter = node.getProperties();
+ while (piter.hasNext()) {
+ Property prop = piter.nextProperty();
+ if (includes(relPath + "/" + prop.getName())) {
+ aggregateWalkListener.onProperty(prop, depth + 1);
+ }
+ }
+ aggregateWalkListener.onChildren(node, depth);
+
+ // copy nodes to list
+ NodeIterator niter = node.getNodes();
+ long size = niter.getSize();
+ List<Node> nodes = new ArrayList<Node>(size > 0 ? (int) size : 16);
+ while (niter.hasNext()) {
+ nodes.add(niter.nextNode());
+ }
+
+ // if node is not orderable, sort them alphabetically
+ boolean hasOrderableChildNodes = node.getPrimaryNodeType().hasOrderableChildNodes();
+ if (!hasOrderableChildNodes) {
+ Collections.sort(nodes, NodeNameComparator.INSTANCE);
+ }
+ for (Node child: nodes) {
+ String p = relPath + "/" + Text.getName(child.getPath());
+ if (includes(p)) {
+ walk(aggregateWalkListener, p, child, depth + 1);
+ } else {
+ // only inform if node is orderable
+ if (hasOrderableChildNodes) {
+ aggregateWalkListener.onNodeIgnored(child, depth+1);
+ }
+ }
+ }
+ aggregateWalkListener.onNodeEnd(node, included, depth);
+ }
+ }
+
+ private boolean includes(String relPath) throws RepositoryException {
+ // if we have a full coverage aggregator, all items below our root are
+ // included.. for now just include all
+ return aggregator.hasFullCoverage() ||
+ includes != null && includes.contains(relPath);
+ }
+
+ private void include(Node node, String nodePath) throws RepositoryException {
+ if (nodePath == null) {
+ nodePath = node.getPath();
+ }
+
+ String relPath = nodePath.substring(path.length());
+ if (includes == null || !includes.contains(relPath)) {
+ if (log.isDebugEnabled()) {
+ log.debug("including {} -> {}", path, nodePath);
+ }
+ if (includes == null) {
+ includes = new HashSet<String>();
+ }
+ includes.add(mgr.cacheString(relPath));
+ if (!node.isSame(getNode())) {
+ // ensure that parent nodes are included
+ include(node.getParent(), null);
+ }
+ }
+ }
+
+ private void addNamespace(Set<String> prefixes, Property prop) throws RepositoryException {
+ String propName = prop.getName();
+ addNamespace(prefixes, propName);
+ switch (prop.getType()) {
+ case PropertyType.NAME:
+ if (propName.equals("jcr:mixinTypes") || prop.getDefinition().isMultiple()) {
+ Value[] values = prop.getValues();
+ for (Value value: values) {
+ addNamespace(prefixes, value.getString());
+ }
+ } else {
+ addNamespace(prefixes, prop.getValue().getString());
+ }
+ break;
+ case PropertyType.PATH:
+ if (prop.getDefinition().isMultiple()) {
+ Value[] values = prop.getValues();
+ for (Value value: values) {
+ addNamespacePath(prefixes, value.getString());
+ }
+ } else {
+ addNamespacePath(prefixes, prop.getValue().getString());
+ }
+ break;
+ }
+ }
+
+ private void include(Node parent, Property prop, String propPath)
+ throws RepositoryException {
+ String relPath = propPath.substring(path.length());
+ if (includes == null || !includes.contains(relPath)) {
+ if (log.isDebugEnabled()) {
+ log.debug("including {} -> {}", path, propPath);
+ }
+ // ensure that parent node is included as well
+ include(parent, null);
+ includes.add(mgr.cacheString(relPath));
+ if (prop.getType() == PropertyType.BINARY) {
+ if (binaries == null) {
+ binaries = new LinkedList<Property>();
+ }
+ binaries.add(prop);
+ }
+ }
+ }
+
+ private void addNamespace(Set<String> prefixes, String name) throws RepositoryException {
+ int idx = name.indexOf(':');
+ if (idx > 0) {
+ String pfx = name.substring(0, idx);
+ if (!prefixes.contains(pfx)) {
+ prefixes.add(mgr.cacheString(pfx));
+ }
+ }
+ }
+
+ private void addNamespacePath(Set<String> prefixes, String path) throws RepositoryException {
+ String[] names = path.split("/");
+ for (String name: names) {
+ addNamespace(prefixes, name);
+ }
+ }
+
+ private void loadNamespaces() {
+ if (namespacePrefixes == null) {
+ if (log.isDebugEnabled()) {
+ log.debug("loading namespaces of aggregate {}", path);
+ }
+ try {
+ load();
+ Set<String> prefixes = new HashSet<String>();
+ // need to traverse the nodes to get all namespaces
+ loadNamespaces(prefixes, "", getNode());
+ namespacePrefixes = prefixes.toArray(new String[prefixes.size()]);
+ } catch (RepositoryException e) {
+ throw new IllegalStateException("Internal error while loading namespaces", e);
+ }
+ }
+ }
+
+ private void loadNamespaces(Set<String> prefixes, String parentPath, Node node) throws RepositoryException {
+ String name = node.getName();
+ addNamespace(prefixes, name);
+ for (PropertyIterator iter = node.getProperties(); iter.hasNext();) {
+ Property p = iter.nextProperty();
+ String relPath = parentPath + "/" + p.getName();
+ if (includes(relPath)) {
+ addNamespace(prefixes, p);
+ }
+ }
+ for (NodeIterator iter = node.getNodes(); iter.hasNext();) {
+ Node c = iter.nextNode();
+ String relPath = parentPath + "/" + c.getName();
+ if (includes(relPath)) {
+ loadNamespaces(prefixes, relPath, c);
+ } else if (node.getPrimaryNodeType().hasOrderableChildNodes()) {
+ addNamespace(prefixes, c.getName());
+ }
+ }
+ }
+
+ private void load() throws RepositoryException {
+ long now = System.currentTimeMillis();
+ if (state == STATE_INITIAL) {
+ log.debug("Collect + Preparing {}", getPath());
+ prepare(getNode(), true);
+ state = STATE_PREPARED;
+ long end = System.currentTimeMillis();
+ log.debug("Collect + Preparing {} in {}ms", getPath(), (end-now));
+ mgr.onAggregateCollected();
+ mgr.onAggregatePrepared();
+ } else if (state == STATE_COLLECTED) {
+ log.debug("Preparing {}", getPath());
+ // in this state we were traversed once and all parent items where
+ // resolved. now we need to collect the items of our non-collected
+ // leafs
+ if (leaves != null && !leaves.isEmpty()) {
+ for (AggregateImpl leaf: leaves) {
+ leaf.collect();
+ }
+ }
+ state = STATE_PREPARED;
+ long end = System.currentTimeMillis();
+ log.debug("Preparing {} in {}ms", getPath(), (end-now));
+ mgr.onAggregatePrepared();
+ }
+ }
+
+ private void collect() throws RepositoryException {
+ if (state == STATE_INITIAL) {
+ long now = System.currentTimeMillis();
+ log.debug("Collecting {}", getPath());
+ prepare(getNode(), false);
+ state = STATE_COLLECTED;
+ long end = System.currentTimeMillis();
+ log.debug("Collecting {} in {}ms", getPath(), (end-now));
+ mgr.onAggregateCollected();
+ }
+ }
+
+ private void prepare(Node node, boolean descend)
+ throws RepositoryException {
+ if (log.isDebugEnabled()) {
+ log.debug("descending into {} (descend={})", node.getPath(), descend);
+ }
+ // add "our" properties to the include set
+ PropertyIterator pIter = node.getProperties();
+ while (pIter.hasNext()) {
+ Property p = pIter.nextProperty();
+ String path = p.getPath();
+ if (aggregator.includes(getNode(), node, p, path)) {
+ include(node, p, path);
+ }
+ }
+ // include "our" nodes to the include set and delegate the others to the
+ // respective aggregator building sub aggregates
+ NodeIterator nIter = node.getNodes();
+ while (nIter.hasNext()) {
+ Node n = nIter.nextNode();
+ String path = n.getPath();
+ PathFilterSet coverSet = mgr.getWorkspaceFilter().getCoveringFilterSet(path);
+ boolean isAncestor = mgr.getWorkspaceFilter().isAncestor(path);
+ boolean isIncluded = mgr.getWorkspaceFilter().contains(path);
+ if (coverSet == null && !isAncestor) {
+ continue;
+ }
+ // check if another aggregator can handle this node
+ Aggregator a = mgr.getAggregator(n, path);
+ // - if the aggregator is null
+ // - or the aggregator is the same as ours or the default
+ // - and if we include the content as well
+ // - then don't use the matched aggregator
+ if ((a == null)
+ || ((a == aggregator || a.isDefault())
+ && (aggregator.includes(getNode(), n, path)))) {
+ // if workspace does not include this node, ignore it
+ if (!isIncluded && !isAncestor) {
+ continue;
+ }
+ include(n, path);
+ prepare(n, true);
+ } else {
+ // otherwise create sub node and collect items if needed
+ // but only if the node is either an ancestor or is included
+ // or if the workspace filter set contains relative pattern (ACL export case).
+ boolean onlyRelativePatterns = coverSet !=null && coverSet.hasOnlyRelativePatterns();
+ if (isAncestor || isIncluded || onlyRelativePatterns) {
+ AggregateImpl sub = new AggregateImpl(this, path, a);
+ sub.filterArtifacts = !isIncluded && onlyRelativePatterns;
+ if (leaves == null) {
+ leaves = new LinkedList<AggregateImpl>();
+ }
+ if (descend) {
+ try {
+ sub.collect();
+ } catch (RepositoryException e) {
+ // in some weird cases, the jcr2spi layer reports
+ // wrong nodes. in this case, just remove it again
+ // as leave
+ log.warn("Alleged node is gone: {}", path);
+ sub.invalidate();
+ sub = null;
+ }
+ } else {
+ log.debug("adding pending leaf {}", path);
+ }
+ if (sub != null) {
+ leaves.add(sub);
+ }
+ }
+ }
+ }
+ }
+
+}
\ No newline at end of file
Added: jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/AggregateManagerImpl.java
URL: http://svn.apache.org/viewvc/jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/AggregateManagerImpl.java?rev=1512568&view=auto
==============================================================================
--- jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/AggregateManagerImpl.java (added)
+++ jackrabbit/commons/filevault/trunk/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/AggregateManagerImpl.java Sat Aug 10 05:53:42 2013
@@ -0,0 +1,585 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jackrabbit.vault.fs.impl;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import javax.jcr.Credentials;
+import javax.jcr.LoginException;
+import javax.jcr.Node;
+import javax.jcr.PropertyType;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.nodetype.NodeDefinition;
+import javax.jcr.nodetype.NodeType;
+import javax.jcr.nodetype.PropertyDefinition;
+
+import org.apache.commons.collections.map.ReferenceMap;
+import org.apache.commons.io.IOUtils;
+import org.apache.jackrabbit.vault.fs.api.AggregateManager;
+import org.apache.jackrabbit.vault.fs.api.Aggregator;
+import org.apache.jackrabbit.vault.fs.api.ArtifactHandler;
+import org.apache.jackrabbit.vault.fs.api.DumpContext;
+import org.apache.jackrabbit.vault.fs.api.ImportInfo;
+import org.apache.jackrabbit.vault.fs.api.ProgressTrackerListener;
+import org.apache.jackrabbit.vault.fs.api.RepositoryAddress;
+import org.apache.jackrabbit.vault.fs.api.VaultFsConfig;
+import org.apache.jackrabbit.vault.fs.api.WorkspaceFilter;
+import org.apache.jackrabbit.vault.fs.config.AbstractVaultFsConfig;
+import org.apache.jackrabbit.vault.fs.config.ConfigurationException;
+import org.apache.jackrabbit.vault.fs.config.DefaultWorkspaceFilter;
+import org.apache.jackrabbit.vault.fs.impl.aggregator.RootAggregator;
+import org.apache.jackrabbit.vault.fs.spi.CNDReader;
+import org.apache.jackrabbit.vault.fs.spi.NodeTypeInstaller;
+import org.apache.jackrabbit.vault.fs.spi.ServiceProviderFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * The artifact manager exposes an artifact node tree using the configured
+ * aggregators and serializers.
+ */
+public class AggregateManagerImpl implements AggregateManager {
+
+ /**
+ * the name of the (internal) default config
+ */
+ private static final String DEFAULT_CONFIG =
+ "org/apache/jackrabbit/vault/fs/config/defaultConfig-1.1.xml";
+
+ /**
+ * the name of the (internal) default workspace filter
+ */
+ private static final String DEFAULT_WSP_FILTER = "" +
+ "org/apache/jackrabbit/vault/fs/config/defaultFilter-1.0.xml";
+
+ /**
+ * name of node types resource
+ */
+ private static final String DEFAULT_NODETYPES = "" +
+ "org/apache/jackrabbit/vault/fs/config/nodetypes.cnd";
+
+ /**
+ * the repository session for this manager
+ */
+ private Session session;
+
+ /**
+ * indicates if this manager owns the session and is allowed to close
+ * it in {@link #unmount()}
+ */
+ private final boolean ownSession;
+
+ /**
+ * The repository address of the mountpoint;
+ */
+ private final RepositoryAddress mountpoint;
+
+ /**
+ * provider that selects the respective aggregator for a repository node
+ */
+ private final AggregatorProvider aggregatorProvider;
+
+ /**
+ * list of artifact handlers
+ */
+ private final List<ArtifactHandler> artifactHandlers;
+
+ /**
+ * filter to general includes/excludes
+ */
+ private final WorkspaceFilter workspaceFilter;
+
+ private AggregatorTracker tracker;
+
+ /**
+ * Set of node types used in the aggregates. this is cumulated when building
+ * the aggregates
+ */
+ private final Set<String> nodeTypes = new HashSet<String>();
+
+ private final Map<String, String> nameCache = new ReferenceMap(ReferenceMap.WEAK, ReferenceMap.WEAK);
+
+ /**
+ * config
+ */
+ private final VaultFsConfig config;
+
+ /**
+ * the root aggregate
+ */
+ private final AggregateImpl root;
+
+ /**
+ * Creates a new artifact manager that is rooted at the given node.
+ *
+ * @param config fs config
+ * @param wspFilter the workspace filter
+ * @param mountpoint the address of the mountpoint
+ * @param session the repository session
+ * @return an artifact manager
+ * @throws RepositoryException if an error occurs.
+ */
+ public static AggregateManager mount(VaultFsConfig config,
+ WorkspaceFilter wspFilter,
+ RepositoryAddress mountpoint,
+ Session session)
+ throws RepositoryException {
+ assert mountpoint.getWorkspace().equals(session.getWorkspace().getName());
+ if (config == null) {
+ config = getDefaultConfig();
+ }
+ if (wspFilter == null) {
+ wspFilter = getDefaultWorkspaceFilter();
+ }
+ Node rootNode = session.getNode(mountpoint.getPath());
+ return new AggregateManagerImpl(config, wspFilter, mountpoint, rootNode, false);
+ }
+
+ /**
+ * Creates a new artifact manager that is rooted at the given path using
+ * the provided repository, credentials and workspace to create the
+ * session.
+ *
+ * @param config fs config
+ * @param wspFilter the workspace filter
+ * @param rep the jcr repository
+ * @param credentials the credentials
+ * @param mountpoint the address of the mountpoint
+ * @return an artifact manager
+ * @throws RepositoryException if an error occurs.
+ */
+ public static AggregateManager mount(VaultFsConfig config,
+ WorkspaceFilter wspFilter,
+ Repository rep,
+ Credentials credentials,
+ RepositoryAddress mountpoint)
+ throws RepositoryException {
+ if (config == null) {
+ config = getDefaultConfig();
+ }
+ if (wspFilter == null) {
+ wspFilter = getDefaultWorkspaceFilter();
+ }
+ Node rootNode;
+ String wspName = mountpoint.getWorkspace();
+ try {
+ rootNode = rep.login(credentials, wspName).getNode(mountpoint.getPath());
+ } catch (LoginException e) {
+ if (wspName == null) {
+ // try again with default workspace
+ // todo: make configurable
+ rootNode = rep.login(credentials, "crx.default").getNode(mountpoint.getPath());
+ } else {
+ throw e;
+ }
+ }
+ return new AggregateManagerImpl(config, wspFilter, mountpoint, rootNode, true);
+ }
+
+ /**
+ * Returns the default config
+ * @return the default config
+ */
+ public static VaultFsConfig getDefaultConfig() {
+ try {
+ InputStream in = AggregateManagerImpl.class.getClassLoader()
+ .getResourceAsStream(DEFAULT_CONFIG);
+ if (in == null) {
+ throw new InternalError("Default config not in classpath: " + DEFAULT_CONFIG);
+ }
+ return AbstractVaultFsConfig.load(in, DEFAULT_CONFIG);
+ } catch (ConfigurationException e) {
+ throw new IllegalArgumentException("Internal error while parsing config.", e);
+ } catch (IOException e) {
+ throw new IllegalArgumentException("Internal error while parsing config.", e);
+ }
+ }
+
+ /**
+ * Returns the default workspace filter
+ * @return the default workspace filter
+ */
+ public static DefaultWorkspaceFilter getDefaultWorkspaceFilter() {
+ try {
+ InputStream in = AggregateManagerImpl.class.getClassLoader()
+ .getResourceAsStream(DEFAULT_WSP_FILTER);
+ if (in == null) {
+ throw new InternalError("Default filter not in classpath: " + DEFAULT_WSP_FILTER);
+ }
+ DefaultWorkspaceFilter filter = new DefaultWorkspaceFilter();
+ filter.load(in);
+ return filter;
+ } catch (ConfigurationException e) {
+ throw new IllegalArgumentException("Internal error while parsing config.", e);
+ } catch (IOException e) {
+ throw new IllegalArgumentException("Internal error while parsing config.", e);
+ }
+ }
+
+
+ public void unmount() throws RepositoryException {
+ assertMounted();
+ if (ownSession) {
+ session.logout();
+ }
+ session = null;
+ }
+
+ public AggregateImpl getRoot() throws RepositoryException {
+ assertMounted();
+ return root;
+ }
+
+ public RepositoryAddress getMountpoint() {
+ return mountpoint;
+ }
+
+ /**
+ * Constructs the artifact manager.
+ *
+ * @param config the configuration
+ * @param wspFilter the workspace filter
+ * @param mountpoint the repository address of the mountpoint
+ * @param rootNode the root node
+ * @param ownSession indicates if the session can be logged out in unmount.
+ * @throws RepositoryException if an error occurs.
+ */
+ private AggregateManagerImpl(VaultFsConfig config, WorkspaceFilter wspFilter,
+ RepositoryAddress mountpoint, Node rootNode,
+ boolean ownSession)
+ throws RepositoryException {
+ session = rootNode.getSession();
+ this.mountpoint = mountpoint;
+ this.ownSession = ownSession;
+ this.config = config;
+ workspaceFilter = wspFilter;
+ aggregatorProvider = new AggregatorProvider(config.getAggregators());
+ artifactHandlers = Collections.unmodifiableList(config.getHandlers());
+
+ // init root node
+ Aggregator rootAggregator = rootNode.getDepth() == 0
+ ? new RootAggregator()
+ : getAggregator(rootNode, null);
+ root = new AggregateImpl(this, rootNode.getPath(), rootAggregator);
+
+ // setup node types
+ initNodeTypes();
+ }
+
+ public Set<String> getNodeTypes() {
+ return nodeTypes;
+ }
+
+ /**
+ * Add the primary and mixin node types of that node to the internal set
+ * of used node types.
+ * @param node the node
+ * @throws RepositoryException if an error occurs
+ */
+ public void addNodeTypes(Node node) throws RepositoryException {
+ internalAddNodeType(node.getPrimaryNodeType());
+ for (NodeType nt: node.getMixinNodeTypes()) {
+ internalAddNodeType(nt);
+ }
+ }
+
+ public String getNamespaceURI(String prefix) throws RepositoryException {
+ return session.getNamespaceURI(prefix);
+ }
+
+ public String cacheString(String string) {
+ String ret = nameCache.get(string);
+ if (ret == null) {
+ // create copy to keep retained size minimal
+ ret = new String(string);
+ nameCache.put(ret, ret);
+ }
+ return ret;
+ }
+
+ public void startTracking(ProgressTrackerListener pTracker) {
+ tracker = new AggregatorTracker(pTracker);
+ }
+
+ public void stopTracking() {
+ if (tracker != null) {
+ tracker.log(true);
+ tracker = null;
+ }
+ }
+
+ public void onAggregateCreated() {
+ if (tracker != null) {
+ tracker.onCreated();
+ }
+ }
+
+ public void onAggregateCollected() {
+ if (tracker != null) {
+ tracker.onCollected();
+ }
+ }
+
+ public void onAggregatePrepared() {
+ if (tracker != null) {
+ tracker.onPrepared();
+ }
+ }
+
+ /**
+ * internally add the node type and all transitive ones to the set of
+ * used node types.
+ * @param nodeType to add
+ */
+ private void internalAddNodeType(NodeType nodeType) {
+ if (nodeType != null && !nodeTypes.contains(nodeType.getName())) {
+ nodeTypes.add(nodeType.getName());
+ NodeType[] superTypes = nodeType.getSupertypes();
+ for (NodeType superType: superTypes) {
+ nodeTypes.add(superType.getName());
+ }
+ NodeDefinition[] nodeDefs = nodeType.getChildNodeDefinitions();
+ if (nodeDefs != null) {
+ for (NodeDefinition nodeDef: nodeDefs) {
+ internalAddNodeType(nodeDef.getDefaultPrimaryType());
+ NodeType[] reqs = nodeDef.getRequiredPrimaryTypes();
+ if (reqs != null) {
+ for (NodeType req: reqs) {
+ internalAddNodeType(req);
+ }
+ }
+ }
+ }
+
+ // check reference constraints, too (bug #33367)
+ PropertyDefinition[] propDefs = nodeType.getPropertyDefinitions();
+ if (propDefs != null) {
+ for (PropertyDefinition propDef: propDefs) {
+ if (propDef.getRequiredType() == PropertyType.REFERENCE ||
+ propDef.getRequiredType() == PropertyType.WEAKREFERENCE) {
+ String[] vcs = propDef.getValueConstraints();
+ if (vcs != null) {
+ for (String vc: vcs) {
+ try {
+ internalAddNodeType(session.getWorkspace().getNodeTypeManager().getNodeType(vc));
+ } catch (RepositoryException e) {
+ // ignore
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ }
+
+ /**
+ * Initializes vlt node types (might not be the correct location)
+ * @throws RepositoryException if an error occurs
+ */
+ private void initNodeTypes() throws RepositoryException {
+ // check if node types are registered
+ try {
+ session.getWorkspace().getNodeTypeManager().getNodeType("vlt:HierarchyNode");
+ session.getWorkspace().getNodeTypeManager().getNodeType("vlt:FullCoverage");
+ return;
+ } catch (RepositoryException e) {
+ // ignore
+ }
+ InputStream in = getClass().getClassLoader()
+ .getResourceAsStream(DEFAULT_NODETYPES);
+ try {
+ NodeTypeInstaller installer = ServiceProviderFactory.getProvider().getDefaultNodeTypeInstaller(session);
+ CNDReader types = ServiceProviderFactory.getProvider().getCNDReader();
+ types.read(new InputStreamReader(in, "utf8"), DEFAULT_NODETYPES, null);
+ installer.install(null, types);
+ } catch (Exception e) {
+ throw new RepositoryException("Error while importing nodetypes.", e);
+ } finally {
+ IOUtils.closeQuietly(in);
+ }
+ }
+
+ public Aggregator getAggregator(Node node, String path) throws RepositoryException {
+ return aggregatorProvider.getAggregator(node, path);
+ }
+
+ public WorkspaceFilter getWorkspaceFilter() {
+ return workspaceFilter;
+ }
+
+ /**
+ * Writes the artifact set back to the repository.
+ *
+ * @param node the artifact node to write
+ * @param reposName the name of the new node or <code>null</code>
+ * @param artifacts the artifact to write
+ * @return infos about the modifications
+ * @throws RepositoryException if an error occurs.
+ * @throws IOException if an I/O error occurs.
+ */
+ public ImportInfo writeAggregate(AggregateImpl node, String reposName,
+ ArtifactSetImpl artifacts)
+ throws RepositoryException, IOException {
+ assertMounted();
+ if (reposName == null) {
+ ImportInfo info;
+ for (ArtifactHandler artifactHandler : artifactHandlers) {
+ info = artifactHandler.accept(session, node, artifacts);
+ if (info != null) {
+ node.invalidate();
+ return info;
+ }
+ }
+ } else {
+ ImportInfo info;
+ for (ArtifactHandler artifactHandler : artifactHandlers) {
+ info = artifactHandler.accept(session, node, reposName, artifacts);
+ if (info != null) {
+ node.invalidate();
+ return info;
+ }
+ }
+ }
+ throw new IllegalStateException("No handler accepted artifacts " + artifacts);
+ }
+
+ /**
+ * Checks if this tree is still mounted and if the attached session
+ * is still live.
+ *
+ * @throws RepositoryException if not mounted or not live.
+ */
+ private void assertMounted() throws RepositoryException {
+ if (!isMounted()) {
+ throw new RepositoryException("JcrFS is not mounted anymore.");
+ }
+ }
+
+ public boolean isMounted() {
+ return session != null && session.isLive();
+ }
+
+ public String getUserId() throws RepositoryException {
+ assertMounted();
+ return session.getUserID();
+ }
+
+ public String getWorkspace() throws RepositoryException {
+ assertMounted();
+ return session.getWorkspace().getName();
+ }
+
+ public Session getSession() {
+ return session;
+ }
+
+
+ public void dumpConfig(PrintWriter out) throws IOException {
+ DumpContext ctx = new DumpContext(out);
+ ctx.println(false, "workspace filter");
+ ctx.indent(false);
+ workspaceFilter.dump(ctx, true);
+ ctx.outdent();
+ aggregatorProvider.dump(ctx, false);
+ ctx.println(true, "handlers");
+ ctx.indent(true);
+ for (Iterator<ArtifactHandler> iter = artifactHandlers.iterator(); iter.hasNext();) {
+ ArtifactHandler h = iter.next();
+ h.dump(ctx, !iter.hasNext());
+ }
+ ctx.outdent();
+
+ ctx.flush();
+ }
+
+ public VaultFsConfig getConfig() {
+ return config;
+ }
+
+ private static class AggregatorTracker {
+
+ /**
+ * default logger
+ */
+ private static final Logger log = LoggerFactory.getLogger(AggregatorTracker.class);
+
+ private ProgressTrackerListener tracker;
+
+ int numCreated;
+
+ int numCollected;
+
+ int numPrepared;
+
+ long lastLogged;
+
+ private AggregatorTracker(ProgressTrackerListener tracker) {
+ this.tracker = tracker;
+ }
+
+ public void onCreated() {
+ numCreated++;
+ log(false);
+ }
+
+ public void onCollected() {
+ numCollected++;
+ log(false);
+ }
+
+ public void onPrepared() {
+ numPrepared++;
+ log(false);
+ }
+
+ public void log(boolean flush) {
+ if (tracker == null && !log.isInfoEnabled()) {
+ return;
+ }
+ long now = System.currentTimeMillis();
+ if (lastLogged == 0) {
+ lastLogged = now;
+
+ // updated each 5 seconds
+ } else if (now-lastLogged > 5000 || flush) {
+ lastLogged = now;
+ String str = new StringBuilder("Aggregation status: ")
+ .append(numPrepared).append(" of ")
+ .append(numCreated).append(" prepared, ")
+ .append(numCollected).append(" collected").toString();
+ log.debug("- {}", str);
+ if (tracker != null) {
+ tracker.onMessage(ProgressTrackerListener.Mode.TEXT, "-", str);
+ }
+ }
+ }
+ }
+}
\ No newline at end of file